code
stringlengths
1
13.8M
library(R2MLwiN) mlwin <- getOption("MLwiN_path") while (!file.access(mlwin, mode = 1) == 0) { cat("Please specify the root MLwiN folder or the full path to the MLwiN executable:\n") mlwin <- scan(what = character(0), sep = "\n") mlwin <- gsub("\\", "/", mlwin, fixed = TRUE) } options(MLwiN_path = mlwin) data(tutorial, package = "R2MLwiN") boy.normexam <- tutorial$normexam[which(tutorial$sex == "boy")] girl.normexam <- tutorial$normexam[which(tutorial$sex == "girl")] tab1 <- cbind(c(length(boy.normexam), mean(boy.normexam), sd(boy.normexam)), c(length(girl.normexam), mean(girl.normexam), sd(girl.normexam)), c(length(tutorial$normexam), mean(tutorial$normexam), sd(tutorial$normexam))) colnames(tab1) <- c("0", "1", "TOTAL") rownames(tab1) <- c("N", "MEANS", "SDs") formatC(round(tab1, 6)) c5 <- tutorial$standlrt intakecat <- rep(0, length(c5)) intakecat[which(c5 > -1)] <- 1 intakecat[which(c5 > -0.5)] <- 2 intakecat[which(c5 > -0.1)] <- 3 intakecat[which(c5 > 0.3)] <- 4 intakecat[which(c5 > 0.7)] <- 5 intakecat[which(c5 > 1.1)] <- 6 normexam <- tutorial$normexam tab2 <- cbind(c(sum(intakecat == 0), mean(normexam[intakecat == 0]), sd(normexam[intakecat == 0])), c(sum(intakecat == 1), mean(normexam[intakecat == 1]), sd(normexam[intakecat == 1])), c(sum(intakecat == 2), mean(normexam[intakecat == 2]), sd(normexam[intakecat == 2])), c(sum(intakecat == 3), mean(normexam[intakecat == 3]), sd(normexam[intakecat == 3])), c(sum(intakecat == 4), mean(normexam[intakecat == 4]), sd(normexam[intakecat == 4])), c(sum(intakecat == 5), mean(normexam[intakecat == 5]), sd(normexam[intakecat == 5])), c(sum(intakecat == 6), mean(normexam[intakecat == 6]), sd(normexam[intakecat == 6])), c(length(intakecat), mean(normexam), sd(normexam))) colnames(tab2) <- c("0", "1", "2", "3", "4", "5", "6", "TOTAL") rownames(tab2) <- c("N", "MEANS", "SDs") formatC(round(tab2, 6)) (mymodel1 <- runMLwiN(normexam ~ 1 + standlrt + (1 + standlrt | student), estoptions = list(EstM = 1), data = tutorial)) trajectories(mymodel1, Range = c(4501, 5000)) l1varfn <- mymodel1@RP["RP1_var_Intercept"] + 2 * mymodel1@RP["RP1_cov_Intercept_standlrt"] * tutorial$standlrt + mymodel1@RP["RP1_var_standlrt"] * tutorial$standlrt^2 plot(sort(tutorial$standlrt), l1varfn[order(tutorial$standlrt)], xlab = "standlrt", ylab = "l1varfn", type = "l") abline(v = 0, lty = "dotted") (mymodel2 <- runMLwiN(normexam ~ 1 + standlrt + (1 + standlrt | school) + (1 | student), estoptions = list(EstM = 1), data = tutorial)) l2varfn <- mymodel2@RP["RP2_var_Intercept"] + 2 * mymodel2@RP["RP2_cov_Intercept_standlrt"] * tutorial$standlrt + mymodel2@RP["RP2_var_standlrt"] * tutorial$standlrt^2 l1varfn <- mymodel2@RP["RP1_var_Intercept"] plot(sort(tutorial$standlrt), l2varfn[order(tutorial$standlrt)], xlab = "standlrt", ylab = "varfns", ylim = c(0, 0.6), type = "l") abline(h = l1varfn) abline(v = 0, lty = "dotted") (mymodel3 <- runMLwiN(normexam ~ 1 + standlrt + (1 + standlrt | school) + (1 + standlrt | student), estoptions = list(EstM = 1), data = tutorial)) clre <- matrix(, nrow = 3, ncol = 1) clre[1, 1] <- 1 clre[2, 1] <- "standlrt" clre[3, 1] <- "standlrt" (mymodel4 <- runMLwiN(normexam ~ 1 + standlrt + (1 + standlrt | school) + (1 + standlrt | student), estoptions = list(EstM = 1, clre = clre), data = tutorial)) tutorial$girl <- as.integer(tutorial$sex) - 1 clre <- matrix(, nrow = 3, ncol = 2) clre[1, 1] <- 1 clre[2, 1] <- "standlrt" clre[3, 1] <- "standlrt" clre[1, 2] <- 1 clre[2, 2] <- "girl" clre[3, 2] <- "girl" (mymodel5 <- runMLwiN(normexam ~ 1 + standlrt + girl + (1 + standlrt | school) + (1 + standlrt + girl | student), estoptions = list(EstM = 1, clre = clre), data = tutorial)) l2varfn <- mymodel5@RP["RP2_var_Intercept"] + 2 * mymodel5@RP["RP2_cov_Intercept_standlrt"] * tutorial$standlrt + mymodel5@RP["RP2_var_standlrt"] * tutorial$standlrt^2 l1varfnboys <- mymodel5@RP["RP1_var_Intercept"] + 2 * mymodel5@RP["RP1_cov_Intercept_standlrt"] * tutorial$standlrt l1varfngirls <- mymodel5@RP["RP1_var_Intercept"] + 2 * mymodel5@RP["RP1_cov_Intercept_standlrt"] * tutorial$standlrt + 2 * mymodel5@RP["RP1_cov_Intercept_girl"] + 2 * mymodel5@RP["RP1_cov_standlrt_girl"] * tutorial$standlrt plot(sort(tutorial$standlrt), l2varfn[order(tutorial$standlrt)], xlab = "standlrt", ylab = "varfns", ylim = c(0, 0.8), type = "l") lines(sort(tutorial$standlrt), l1varfnboys[order(tutorial$standlrt)]) lines(sort(tutorial$standlrt), l1varfngirls[order(tutorial$standlrt)]) abline(v = 0, lty = "dotted") (mymodel6 <- runMLwiN(normexam ~ 1 + standlrt + girl + (1 + standlrt | school) + (1 + standlrt + girl | student), estoptions = list(EstM = 1, clre = clre, mcmcMeth = list(lclo = 1)), data = tutorial)) l2varfn <- mymodel6@RP["RP2_var_Intercept"] + 2 * mymodel6@RP["RP2_cov_Intercept_standlrt"] * tutorial$standlrt + mymodel6@RP["RP2_var_standlrt"] * tutorial$standlrt^2 l1varfnboys <- 1/exp(mymodel6@RP["RP1_var_Intercept"] + 2 * mymodel6@RP["RP1_cov_Intercept_standlrt"] * tutorial$standlrt) l1varfngirls <- 1/exp(mymodel6@RP["RP1_var_Intercept"] + 2 * mymodel6@RP["RP1_cov_Intercept_standlrt"] * tutorial$standlrt + 2 * mymodel6@RP["RP1_cov_Intercept_girl"] + 2 * mymodel6@RP["RP1_cov_standlrt_girl"] * tutorial$standlrt) plot(sort(tutorial$standlrt), l2varfn[order(tutorial$standlrt)], xlab = "standlrt", ylab = "varfns", ylim = c(0, 0.8), type = "l") lines(sort(tutorial$standlrt), l1varfnboys[order(tutorial$standlrt)]) lines(sort(tutorial$standlrt), l1varfngirls[order(tutorial$standlrt)]) abline(v = 0, lty = "dotted")
context("inspect_types pair of dataframes") data("starwars", package = "dplyr") data("band_instruments", package = "dplyr") data("storms", package = "dplyr") data(mtcars, airquality) test_that("Output with two identical df inputs data frame", { expect_is(inspect_types(mtcars, mtcars), "data.frame") expect_is(inspect_types(band_instruments, band_instruments), "data.frame") expect_is(inspect_types(starwars, starwars), "data.frame") expect_is(inspect_types(storms, storms), "data.frame") expect_is(inspect_types(airquality, airquality), "data.frame") }) test_that("Output with two different inputs data frame", { set.seed(10) expect_is(inspect_types(mtcars, mtcars %>% dplyr::sample_n(100, replace = T)), "data.frame") expect_is(inspect_types(band_instruments, band_instruments %>% dplyr::sample_n(100, replace = T)) , "data.frame") expect_is(inspect_types(starwars, starwars %>% dplyr::sample_n(100, replace = T)), "data.frame") expect_is(inspect_types(storms, storms %>% dplyr::sample_n(100, replace = T)), "data.frame") expect_is(inspect_types(airquality, airquality%>% dplyr::sample_n(100, replace = T)), "data.frame") })
test_that("test resample", { df <- data.frame( sex = as.factor(c(rep("M", 5), rep("F", 5), rep("N", 5))), target = c(1, 1, 1, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1) ) MN <- sum(df$sex == "M" & df$target == 0) MP <- sum(df$sex == "M" & df$target == 1) FN <- sum(df$sex == "F" & df$target == 0) FP <- sum(df$sex == "F" & df$target == 1) NN <- sum(df$sex == "N" & df$target == 0) NP <- sum(df$sex == "N" & df$target == 1) weights <- reweight(df$sex, df$target) wMP <- weights[1] wMN <- weights[5] wFP <- weights[10] wFN <- weights[6] wNN <- weights[13] wNP <- weights[15] E_MP <- round(MP * wMP) E_MN <- round(MN * wMN) E_FN <- round(FN * wFN) E_FP <- round(FP * wFP) E_NP <- round(NP * wNP) E_NN <- round(NN * wNN) df_2 <- df[resample(df$sex, df$target), ] MN_2 <- sum(df_2$sex == "M" & df_2$target == 0) MP_2 <- sum(df_2$sex == "M" & df_2$target == 1) FN_2 <- sum(df_2$sex == "F" & df_2$target == 0) FP_2 <- sum(df_2$sex == "F" & df_2$target == 1) NN_2 <- sum(df_2$sex == "N" & df_2$target == 0) NP_2 <- sum(df_2$sex == "N" & df_2$target == 1) expect_equal(E_MP, MP_2) expect_equal(E_MN, MN_2) expect_equal(E_FP, FP_2) expect_equal(E_FN, FN_2) expect_equal(E_NP, MP_2) expect_equal(E_NN, NN_2) df <- data.frame( sex = as.factor(c(rep("M", 5), rep("F", 5), rep("N", 5))), target = c(1, 1, 1, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1), name = as.character(1:15), probs = c(0.9, 0.82, 0.56, 0.78, 0.45, 0.12, 0.48, 0.63, 0.48, 0.88, 0.34, 0.12, 0.34, 0.49, 0.9), stringsAsFactors = FALSE ) df_3 <- df[resample(df$sex, df$target, type = "preferential", probs = df$probs), ] expect_equal(sort(as.numeric(df_3$name)), c(1, 2, 5, 5, 5, 6, 7, 8, 9, 10, 11, 12, 13, 15, 15)) expect_error(resample(df$sex, df$target, type = "preferential", probs = df$probs, cutoff = 12)) expect_error(resample(df$sex, df$target, type = "preferential", probs = df$probs, cutoff = c(0.3, 0.4))) })
Wilks <- function (object, ...) { UseMethod("Wilks") } Wilks.cancor <- function(object, ...) { if (!inherits(object, "cancor")) stop("Not a cancor object") ev <- (1 - object$cancor^2) n <- object$dim$n p <- object$dim$p q <- object$dim$q k <- min(p, q) m <- n - 3/2 - (p + q)/2 w <- rev(cumprod(rev(ev))) df1 <- df2 <- Fstat <- vector("numeric", k) for (i in 1:k) { s <- sqrt((p^2 * q^2 - 4)/(p^2 + q^2 - 5)) si <- 1/s df1[i] <- p * q df2[i] <- m * s - p * q/2 + 1 r <- (1 - w[i]^si)/w[i]^si Fstat[i] <- r * df2[i]/df1[i] p <- p - 1 q <- q - 1 } pv <- pf(Fstat, df1, df2, lower.tail = FALSE) tests <- cbind(CanR=object$cancor, w, Fstat, df1, df2, pv) colnames(tests) <- c("CanR", "LR test stat", "approx F", "numDF", "denDF", "Pr(> F)") tests <- structure(as.data.frame(tests), heading = paste("\nTest of H0: The canonical correlations in the", "\ncurrent row and all that follow are zero\n") , class = c("anova", "data.frame")) tests } Wilks.candisc <- function(object, ...) { ev <- object$eigenvalues n <- nrow(object$scores) g <- object$dfh + 1 p <- nrow(object$coeffs.std) rank <- object$rank r <- 1:rank LR <- rep(0, rank) for (i in seq(r)) { LR[i] <- prod(1/(1 + ev[i:rank])) } df1 <- (p-r+1)*(g-r) nu = sqrt( ( (p-r+1)^2*(g-r)^2 - 4 ) / ( (p-r+1)^2+(g-r)^2 - 5) ) df2 = nu * (n-(p+g+2)/2) - (p-r+1)*(g-r)/2 + 1 Lnu <- LR^(1/nu) F <- (1-Lnu)*df2/(Lnu*df1) pv <- pf(F, df1, df2, lower.tail = FALSE) tests <- cbind(LR, F, df1, round(df2, digits=2), pv) colnames(tests) <- c("LR test stat", "approx F", "numDF", "denDF", "Pr(> F)") tests <- structure(as.data.frame(tests), heading = paste("\nTest of H0: The canonical correlations in the", "\ncurrent row and all that follow are zero\n") , class = c("anova", "data.frame")) tests }
library(ranger) library(randomForest) test_that('trainsetBias works for ranger & classification tree', { set.seed(42L) trainID <- sample(150, 120) rfobj <- ranger(Species ~ ., iris[trainID, ], keep.inbag = TRUE) tidy.RF <- tidyRF(rfobj, iris[trainID, -5], iris[trainID, 5]) trainset.bias <- trainsetBias(tidy.RF) expect_equal(dim(trainset.bias), c(1, 3)) expect_equal(dimnames(trainset.bias), list('Bias', levels(iris$Species))) }) test_that('trainsetBias works for randomForest & classification tree', { set.seed(42L) trainID <- sample(150, 120) rfobj <- randomForest(Species ~ ., iris[trainID, ], keep.inbag = TRUE) tidy.RF <- tidyRF(rfobj, iris[trainID, -5], iris[trainID, 5]) trainset.bias <- trainsetBias(tidy.RF) expect_equal(dim(trainset.bias), c(1, 3)) expect_equal(dimnames(trainset.bias), list('Bias', levels(iris$Species))) }) test_that('trainsetBias works for ranger & regression tree', { set.seed(42L) trainID <- sample(32, 25) rfobj <- ranger(mpg ~ ., mtcars[trainID, ], keep.inbag = TRUE) tidy.RF <- tidyRF(rfobj, mtcars[trainID, -1], mtcars[trainID, 1]) trainset.bias <- trainsetBias(tidy.RF) expect_equal(dim(trainset.bias), c(1, 1)) expect_equal(dimnames(trainset.bias), list('Bias', 'Response')) }) test_that('trainsetBias works for randomForest & regression tree', { set.seed(42L) trainID <- sample(32, 25) rfobj <- randomForest(mpg ~ ., mtcars[trainID, ], keep.inbag = TRUE) tidy.RF <- tidyRF(rfobj, mtcars[trainID, -1], mtcars[trainID, 1]) trainset.bias <- trainsetBias(tidy.RF) expect_equal(dim(trainset.bias), c(1, 1)) expect_equal(dimnames(trainset.bias), list('Bias', 'Response')) })
require(sde) set.seed(123) W <- vector(14,mode="list") W[[1]] <- BBridge(1,1,0,1,N=2) for(i in 1:13){ cat(paste(i,"\n")) n <- length(W[[i]]) t <- time(W[[i]]) w <- as.numeric(W[[i]]) tmp <- w[1] for(j in 1:(n-1)){ tmp.BB <- BBridge(w[j],w[j+1],t[j],t[j+1],N=2) tmp <- c(tmp, as.numeric(tmp.BB[2:3])) } W[[i+1]] <- ts(tmp,start=0,deltat=1/(2^(i+1))) } min.w <- min(unlist(W))-0.5 max.w <- max(unlist(W))+0.5 opar <- par(no.readonly = TRUE) par(mfrow=c(7,2),mar=c(3,0,0,0)) for(i in 1:14){ plot(W[[i]], ylim=c(min.w, max.w),axes=F) if(i==1) axis(1,c(0,0.5,1)) if(i==2) axis(1,c(0,0.25,0.5,0.75,1)) if(i>2) axis(1,c(0,0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9,1)) text(0.5,2.2,sprintf("N = %d",2^i)) } par(opar)
get_importance <- function(model){ importance <- NULL if((inherits(model, "rf") & !inherits(model, "rf_spatial")) | (inherits(model, "rf_repeat") & !inherits(model, "rf_spatial"))){ x <- model$importance$per.variable } if(inherits(model, "rf_spatial")){ if(!is.null(model$ranger.arguments$repetitions)){ repetitions <- model$ranger.arguments$repetitions } else { repetitions <- 1 } length.non.spatial.predictors <- sum(model$importance$spatial.predictors$variable != "spatial_predictors") / repetitions length.spatial.predictors <- sum(model$importance$spatial.predictors$variable == "spatial_predictors") / repetitions if(length.spatial.predictors >= length.non.spatial.predictors){ x <- model$importance$spatial.predictor.stats } else { x <- model$importance$per.variable } } if(is.null(x)){ stop("This model doesn't have a 'variable.importance' slot") } x <- dplyr::arrange(x, dplyr::desc(importance)) x }
alpha.compute <- function(p) { alpha <- NULL if((!is.vector(p))|(any(p<0))|(any(p>1))|(abs(sum(p)-1)>.Machine$double.eps)) stop("p must be a vector of probabilities\n") if(length(p)==1) alpha <- NA else { if(p[1]==0) alpha[1] <- -Inf else if(p[1]==1) alpha[1] <- Inf else alpha[1] <- logit(p[1]) for(i in setdiff(1:(length(p)-1),1)) { cum.p <- sum(p[1:i]) cum.alpha <- sum(alpha[!is.infinite(alpha)]) alpha[i] <- ifelse(cum.p==1,Inf,logit(cum.p)-cum.alpha) } } alpha }
probs <- function (x) { pr <- plogis(x) if (any(ind <- pr == 1)) pr[ind] <- 1 - sqrt(.Machine$double.eps) if (any(ind <- pr == 0)) pr[ind] <- sqrt(.Machine$double.eps) pr }
test_xlin_fits <- function() { x <- c(1, 2, 3, 4) y <- c(1, 2, 2, 1) w <- c(1, 1, 1, 1) f <- xlin_fits(x, y, w, 0, length(y)-1) f_lm <- xlin_fits_lm(x, y, w) expect_true(max(abs(f - f_lm))<=1e-3) f_R <- xlin_fits_R(x, y, w) expect_equal(f, f_R) f_V <- xlin_fits_V(x, y, w) expect_equal(f, f_V) invisible(NULL) } test_xlin_fits()
knitr::opts_chunk$set( collapse = TRUE, warning = FALSE, message = FALSE, comment = " ) library(grainchanger) library(sf) library(ggplot2) coarse_dat <- cat_ls %>% st_bbox() %>% st_as_sfc() %>% st_buffer(-4) %>% st_make_grid(cellsize = 19) %>% st_sf() landscapetools::show_landscape(cat_ls) + geom_sf(data = coarse_dat, alpha = 0.5) coarse_dat$shdi_3 <- winmove_agg(coarse_dat = coarse_dat, fine_dat = cat_ls, d = 3, type = "rectangle", win_fun = shdi, agg_fun = mean, is_grid = FALSE, lc_class = 1:4) ggplot(coarse_dat, aes(fill = shdi_3)) + geom_sf() + theme_bw() g_sf$shei_4 <- winmove_agg(coarse_dat = g_sf, fine_dat = cat_ls, d = 4, type = "rectangle", win_fun = shei, agg_fun = mean, is_grid = FALSE, lc_class = 1:4) library(sf) library(ggplot2) coarse_dat <- st_read(system.file("shape/poly_sf.shp", package="grainchanger")) coarse_dat$var_range <- nomove_agg(coarse_dat = coarse_dat, fine_dat = cont_ls, agg_fun = var_range, is_grid = FALSE) ggplot(coarse_dat, aes(fill = var_range)) + geom_sf() + theme_bw() function_overview <- data.frame( `Function Name` = c("prop", "shdi", "shei", "range"), `Description` = c("Calculate the proportion of a given class", "Calculate the Shannon diversity", "Calculate the Shannon evenness", "Calculate the range of values"), `Additional arguments` = c("lc_class (numeric)", "lc_class (numeric)", "lc_class (numeric)", "") ) knitr::kable(function_overview) torus <- create_torus(cat_ls, 5) landscapetools::show_landscape(torus)
test_that("can't compare zoned-times with different zones", { x <- as_zoned_time(sys_days(0), "America/New_York") y <- as_zoned_time(sys_days(0), "UTC") expect_snapshot_error(x > y) }) test_that("can compare zoned-times with same zone", { x <- as_zoned_time(sys_days(0:1), "America/New_York") expect_false(x[1] > x[2]) expect_true(x[1] < x[2]) }) test_that("normal print method works", { x <- as_zoned_time(as_sys_time(year_month_day(2019, 1:5, 1)), "America/New_York") expect_snapshot(x) }) test_that("can limit with `max`", { x <- as_zoned_time(as_sys_time(year_month_day(2019, 1:5, 1)), "America/New_York") expect_snapshot(print(x, max = 2)) expect_snapshot(print(x, max = 4)) expect_snapshot(print(x, max = 5)) expect_snapshot(print(x, max = 6)) }) test_that("`max` defaults to `getOption('max.print')` but can be overridden", { local_options(max.print = 3) x <- as_zoned_time(as_sys_time(year_month_day(2019, 1:5, 1)), "America/New_York") expect_snapshot(x) expect_snapshot(print(x, max = 4)) expect_snapshot(print(x, max = 5)) }) test_that("as.character() works", { expect <- "2019-01-01T01:02:03-05:00[America/New_York]" x <- zoned_time_parse_complete(expect) expect_identical(as.character(x), expect) }) test_that("can parse what we format with seconds precision zoned time", { zone <- "America/New_York" x <- as_zoned_time(as_naive_time(year_month_day(2019, 1, 1)), zone) expect_identical( zoned_time_parse_complete(format(x)), x ) }) test_that("can parse subsecond zoned time", { zone <- "America/New_York" x <- "2019-01-01T01:02:03.123-05:00[America/New_York]" y <- "2019-01-01T01:02:03.1234-05:00[America/New_York]" z <- "2019-01-01T01:02:03.123456789-05:00[America/New_York]" expect_identical( zoned_time_parse_complete(x, precision = "millisecond"), as_zoned_time(as_naive_time(year_month_day(2019, 1, 1, 1, 2, 3, 123, subsecond_precision = "millisecond")), zone) ) expect_identical( zoned_time_parse_complete(y, precision = "microsecond"), as_zoned_time(as_naive_time(year_month_day(2019, 1, 1, 1, 2, 3, 123400, subsecond_precision = "microsecond")), zone) ) expect_identical( zoned_time_parse_complete(z, precision = "nanosecond"), as_zoned_time(as_naive_time(year_month_day(2019, 1, 1, 1, 2, 3, 123456789, subsecond_precision = "nanosecond")), zone) ) }) test_that("multiple formats can be used", { zone <- "America/New_York" x <- c( "1970-10-25 05:30:00-05:00[America/New_York]", "1970/10/25 05:30:00-05:00[America/New_York]" ) formats <- c( "%Y-%m-%d %H:%M:%S%Ez[%Z]", "%Y/%m/%d %H:%M:%S%Ez[%Z]" ) expect_identical( zoned_time_parse_complete(x, format = formats), as_zoned_time( as_naive_time(year_month_day(1970, 10, 25, 05, 30, c(00, 00))), zone ) ) }) test_that("cannot parse nonexistent time", { zone <- "America/New_York" x <- "1970-04-26T02:30:00-05:00[America/New_York]" expect_warning( expect_identical( zoned_time_parse_complete(x), as_zoned_time(naive_seconds(NA), zone) ) ) expect_snapshot(zoned_time_parse_complete(x)) }) test_that("ambiguous times are resolved by the offset", { zone <- "America/New_York" x <- c( "1970-10-25T01:30:00-04:00[America/New_York]", "1970-10-25T01:30:00-05:00[America/New_York]" ) expect_identical( zoned_time_parse_complete(x), as_zoned_time( as_naive_time(year_month_day(1970, 10, 25, 01, 30, c(00, 00))), zone, ambiguous = c("earliest", "latest") ) ) }) test_that("offset must align with unique offset", { zone <- "America/New_York" x <- "2019-01-01T01:02:03-03:00[America/New_York]" expect_warning( expect_identical( zoned_time_parse_complete(x), as_zoned_time(naive_seconds(NA), zone) ) ) expect_snapshot(zoned_time_parse_complete(x)) }) test_that("offset must align with one of two possible ambiguous offsets", { zone <- "America/New_York" x <- c( "1970-10-25T01:30:00-03:00[America/New_York]", "1970-10-25T01:30:00-06:00[America/New_York]" ) expect_warning( expect_identical( zoned_time_parse_complete(x), as_zoned_time(naive_seconds(c(NA, NA)), zone) ) ) expect_snapshot(zoned_time_parse_complete(x)) }) test_that("cannot have differing zone names", { x <- c( "2019-01-01T01:02:03-05:00[America/New_York]", "2019-01-01T01:02:03-08:00[America/Los_Angeles]" ) expect_snapshot_error(zoned_time_parse_complete(x)) }) test_that("zone name must be valid", { x <- "2019-01-01T01:02:03-05:00[America/New_Yor]" expect_snapshot_error(zoned_time_parse_complete(x)) }) test_that("empty input uses UTC time zone ( expect_identical( zoned_time_parse_complete(character()), as_zoned_time(naive_seconds(), "UTC") ) expect_identical( zoned_time_parse_complete(character(), precision = "nanosecond"), as_zoned_time(as_naive_time(duration_nanoseconds()), "UTC") ) }) test_that("all `NA`s uses UTC time zone ( expect_identical( zoned_time_parse_complete(c(NA_character_, NA_character_)), as_zoned_time(naive_seconds(c(NA, NA)), "UTC") ) }) test_that("all failures uses UTC time zone ( expect_warning( expect_identical( zoned_time_parse_complete(c("foo", "bar")), as_zoned_time(naive_seconds(c(NA, NA)), "UTC") ) ) }) test_that("`x` is translated to UTF-8", { x <- "2019-f\u00E9vrier-01 01:02:03-05:00[America/New_York]" x <- iconv(x, from = "UTF-8", to = "latin1") locale <- clock_locale("fr") format <- "%Y-%B-%d %H:%M:%S%Ez[%Z]" expect_identical(Encoding(x), "latin1") expect_identical(Encoding(locale$labels$month[2]), "UTF-8") expect_identical( zoned_time_parse_complete(x, format = format, locale = locale), as_zoned_time(as_naive_time(year_month_day(2019, 2, 1, 1, 2, 3)), "America/New_York") ) }) test_that("leftover subseconds result in a parse failure", { x <- "2019-01-01T01:01:01.1238-05:00[America/New_York]" expect_identical( zoned_time_parse_complete(x, precision = "microsecond"), as_zoned_time(as_naive_time(year_month_day(2019, 1, 1, 1, 1, 1, 123800, subsecond_precision = "microsecond")), "America/New_York") ) expect_warning( expect_identical( zoned_time_parse_complete(x, precision = "millisecond"), as_zoned_time(naive_seconds(NA) + duration_milliseconds(NA), zone = "UTC") ), class = "clock_warning_parse_failures" ) }) test_that("parsing rounds parsed subsecond components more precise than the resulting container ( x <- "2019-01-01 01:01:01.1238-05:00[America/New_York]" expect_identical( zoned_time_parse_complete(x, precision = "millisecond", format = "%Y-%m-%d %H:%M:%7S%Ez[%Z]"), as_zoned_time(as_naive_time(year_month_day(2019, 1, 1, 1, 1, 1, 124, subsecond_precision = "millisecond")), "America/New_York") ) }) test_that("parsing fails when undocumented rounding behavior would result in invalid 60 second component ( x <- "2019-01-01 01:01:59.550-05:00[America/New_York]" expect_warning( expect_identical( zoned_time_parse_complete(x, precision = "second", format = "%Y-%m-%d %H:%M:%6S%Ez[%Z]"), as_zoned_time(as_naive_time(year_month_day(NA, NA, NA, NA, NA, NA)), zone = "UTC") ), class = "clock_warning_parse_failures" ) }) test_that("can parse with abbreviation and zone name", { expect_identical( zoned_time_parse_abbrev("2019-01-01 01:02:03 EST", "America/New_York"), zoned_time_parse_complete("2019-01-01T01:02:03-05:00[America/New_York]") ) }) test_that("can parse when abbreviation is an offset", { expect_identical( zoned_time_parse_abbrev("2019-01-01 01:02:03 +11", "Australia/Lord_Howe"), zoned_time_parse_complete("2019-01-01T01:02:03+11:00[Australia/Lord_Howe]") ) expect_identical( zoned_time_parse_abbrev("2019-10-01 01:02:03 +1030", "Australia/Lord_Howe"), zoned_time_parse_complete("2019-10-01T01:02:03+10:30[Australia/Lord_Howe]") ) }) test_that("can parse at more precise precisions", { expect_identical( zoned_time_parse_abbrev("2019-01-01 01:02:03.123 EST", "America/New_York", precision = "millisecond"), as_zoned_time(as_naive_time(year_month_day(2019, 1, 1, 1, 2, 3, 123, subsecond_precision = "millisecond")), "America/New_York") ) expect_identical( zoned_time_parse_abbrev("2019-01-01 01:02:03.123456 EST", "America/New_York", precision = "nanosecond"), as_zoned_time(as_naive_time(year_month_day(2019, 1, 1, 1, 2, 3, 123456000, subsecond_precision = "nanosecond")), "America/New_York") ) }) test_that("abbreviation is used to resolve ambiguity", { x <- c( "1970-10-25 01:30:00 EDT", "1970-10-25 01:30:00 EST" ) expect <- c( "1970-10-25T01:30:00-04:00[America/New_York]", "1970-10-25T01:30:00-05:00[America/New_York]" ) expect_identical( zoned_time_parse_abbrev(x, "America/New_York"), zoned_time_parse_complete(expect) ) }) test_that("nonexistent times are NAs", { expect_warning( expect_identical( zoned_time_parse_abbrev("1970-04-26 02:30:00 EST", "America/New_York"), as_zoned_time(sys_seconds(NA), "America/New_York") ) ) }) test_that("abbreviation must match the one implied from naive + time zone name lookup", { x <- "1970-01-01 00:00:00 FOOBAR" expect_warning( expect_identical( zoned_time_parse_abbrev(x, "America/New_York"), as_zoned_time(sys_days(NA), "America/New_York") ) ) x <- "1970-01-01 00:00:00 EDT" expect_warning( expect_identical( zoned_time_parse_abbrev(x, "America/New_York"), as_zoned_time(sys_days(NA), "America/New_York") ) ) expect_snapshot(zoned_time_parse_abbrev(x, "America/New_York")) }) test_that("%Z must be used", { x <- "1970-01-01" expect_snapshot_error( zoned_time_parse_abbrev(x, "America/New_York", format = "%Y-%m-%d") ) }) test_that("%z can be parsed (but is ignored really)", { expect <- zoned_time_parse_complete("1970-01-01T00:00:00-05:00[America/New_York]") x <- "1970-01-01 00:00:00-05:00 EST" expect_identical( zoned_time_parse_abbrev(x, "America/New_York", format = "%Y-%m-%d %H:%M:%S%Ez %Z"), expect ) }) test_that("%z that is incorrect technically slips through unnoticed", { expect <- zoned_time_parse_complete("1970-01-01T00:00:00-05:00[America/New_York]") x <- "1970-01-01 00:00:00-02:00 EST" expect_identical( zoned_time_parse_abbrev(x, "America/New_York", format = "%Y-%m-%d %H:%M:%S%Ez %Z"), expect ) }) test_that("%z must parse correctly if included", { expect <- as_zoned_time(sys_days(NA), "America/New_York") x <- "1970-01-01 00:00:00-0a:00 EST" expect_warning( result <- zoned_time_parse_abbrev(x, "America/New_York", format = "%Y-%m-%d %H:%M:%S%Ez %Z") ) expect_identical(result, expect) }) test_that("multiple formats can be attempted", { x <- c("1970-01-01 EST", "1970-01-01 05:06:07 EST") formats <- c("%Y-%m-%d %H:%M:%S %Z", "%Y-%m-%d %Z") expect <- as_zoned_time( as_naive_time(year_month_day(1970, 1, 1, c(0, 5), c(0, 6), c(0, 7))), "America/New_York" ) expect_identical( zoned_time_parse_abbrev(x, "America/New_York", format = formats), expect ) }) test_that("NA parses correctly", { expect_identical( zoned_time_parse_abbrev(NA_character_, "America/New_York"), as_zoned_time(sys_seconds(NA), "America/New_York") ) expect_identical( zoned_time_parse_abbrev(NA_character_, "America/New_York", precision = "nanosecond"), as_zoned_time(as_sys_time(duration_nanoseconds(NA)), "America/New_York") ) }) test_that("`x` is translated to UTF-8", { x <- "2019-f\u00E9vrier-01 01:02:03-05:00[EST]" x <- iconv(x, from = "UTF-8", to = "latin1") locale <- clock_locale("fr") format <- "%Y-%B-%d %H:%M:%S%Ez[%Z]" expect_identical(Encoding(x), "latin1") expect_identical(Encoding(locale$labels$month[2]), "UTF-8") expect_identical( zoned_time_parse_abbrev(x, "America/New_York", format = format, locale = locale), as_zoned_time(as_naive_time(year_month_day(2019, 2, 1, 1, 2, 3)), "America/New_York") ) }) test_that("zoned-times don't support arithmetic", { x <- as_zoned_time(as_naive_time(year_month_day(2019, 1, 1)), "America/New_York") expect_snapshot_error(add_years(x, 1)) expect_snapshot_error(add_quarters(x, 1)) expect_snapshot_error(add_months(x, 1)) expect_snapshot_error(add_weeks(x, 1)) expect_snapshot_error(add_days(x, 1)) expect_snapshot_error(add_hours(x, 1)) expect_snapshot_error(add_minutes(x, 1)) expect_snapshot_error(add_seconds(x, 1)) expect_snapshot_error(add_milliseconds(x, 1)) expect_snapshot_error(add_microseconds(x, 1)) expect_snapshot_error(add_nanoseconds(x, 1)) }) test_that("ptype is correct", { zones <- c("UTC", "America/New_York", "") for (zone in zones) { for (precision in precision_names()) { precision <- validate_precision_string(precision) if (precision < PRECISION_SECOND) { next } x <- duration_helper(0L, precision) x <- as_zoned_time(as_naive_time(x), zone) ptype <- duration_helper(integer(), precision) ptype <- as_zoned_time(as_naive_time(ptype), zone) expect_identical(vec_ptype(x), ptype) } } }) test_that("is.nan() works", { zone <- "America/New_York" x <- as_zoned_time(naive_days(c(2019, NA)), zone) expect_identical(is.nan(x), c(FALSE, FALSE)) }) test_that("is.finite() works", { zone <- "America/New_York" x <- as_zoned_time(naive_days(c(2019, NA)), zone) expect_identical(is.finite(x), c(TRUE, FALSE)) }) test_that("is.infinite() works", { zone <- "America/New_York" x <- as_zoned_time(naive_days(c(2019, NA)), zone) expect_identical(is.infinite(x), c(FALSE, FALSE)) }) test_that("precision: can get the precision", { zone <- "America/New_York" expect_identical(zoned_time_precision(as_zoned_time(as_naive_time(duration_seconds(2:5)), zone)), "second") expect_identical(zoned_time_precision(as_zoned_time(as_naive_time(duration_nanoseconds(2:5)), zone)), "nanosecond") }) test_that("precision: can only be called on zoned-times", { expect_snapshot_error(zoned_time_precision(duration_days())) })
skip_if_not_pandoc <- function(ver = NULL) { if (!rmarkdown::pandoc_available(ver)) { msg <- if (is.null(ver)) { "Pandoc is not available" } else { sprintf("Version of Pandoc is lower than %s.", ver) } skip(msg) } } skip_if_pandoc <- function(ver = NULL) { if (rmarkdown::pandoc_available(ver)) { msg <- if (is.null(ver)) { "Pandoc is available" } else { sprintf("Version of Pandoc is greater than %s.", ver) } skip(msg) } } local_rmd_file <- function(..., .env = parent.frame()) { path <- withr::local_tempfile(.local_envir = .env, fileext = ".Rmd") xfun::write_utf8(c(...), path) path } local_render <- function(input, ..., .env = parent.frame()) { skip_if_not_pandoc() output_file <- withr::local_tempfile(.local_envir = .env) rmarkdown::render(input, output_file = output_file, quiet = TRUE, ...) } local_pandoc_convert <- function(text, from = "markdown", options = NULL, ..., .env = parent.frame()) { skip_if_not_pandoc() rmd <- local_rmd_file(text) out <- withr::local_tempfile(.local_envir = .env) rmarkdown::pandoc_convert(rmd, from = from, output = out, options = c("--wrap", "preserve", options), ...) xfun::read_utf8(out) } .render_and_read <- function(input, ...) { skip_if_not_pandoc() res <- local_render(input, ...) xfun::read_utf8(res) }
"SpatialPoints" = function(coords, proj4string = CRS(as.character(NA)), bbox = NULL) { coords = coordinates(coords) colNames = dimnames(coords)[[2]] if (is.null(colNames)) colNames = paste("coords.x", 1:(dim(coords)[2]), sep = "") rowNames = dimnames(coords)[[1]] dimnames(coords) = list(rowNames, colNames) if (is.null(bbox)) bbox <- .bboxCoords(coords) new("SpatialPoints", coords = coords, bbox = bbox, proj4string = proj4string) } .bboxCoords = function(coords) { stopifnot(nrow(coords) > 0) bbox = t(apply(coords, 2, range)) dimnames(bbox)[[2]] = c("min", "max") as.matrix(bbox) } setMethod("coordinates", "matrix", function(obj) { if (!is.numeric(obj)) stop("cannot derive coordinates from non-numeric matrix") storage.mode(obj) <- "double" if (any(is.na(obj))) stop("NA values in coordinates") if (any(!is.finite(obj))) stop("non-finite coordinates") obj } ) setMethod("coordinates", "data.frame", function(obj)coordinates(as.matrix(obj))) setMethod("coordinates", "list", function(obj) coordinates(as.data.frame(obj))) asWKTSpatialPoints = function(x, digits = getOption("digits")) { data.frame(geometry = paste("POINT(",unlist(lapply(data.frame( t(signif(coordinates(x),digits = digits))), paste, collapse=" ")),")",sep="")) } "print.SpatialPoints" <- function(x, ..., digits = getOption("digits"), asWKT = .asWKT) { cat("SpatialPoints:\n") if (asWKT) print(asWKTSpatialPoints(x, digits)) else print(x@coords) pst <- paste(strwrap(paste( "Coordinate Reference System (CRS) arguments:", slot(slot(x, "proj4string"), "projargs"))), collapse="\n") cat(pst, "\n") } setMethod("show", "SpatialPoints", function(object) print.SpatialPoints(object)) plot.SpatialPoints = function(x, pch = 3, axes = FALSE, add = FALSE, xlim = NULL, ylim = NULL, ..., setParUsrBB=FALSE, cex = 1, col = 1, lwd = 1, bg = 1) { if (! add) plot(as(x, "Spatial"), axes = axes, xlim = xlim, ylim = ylim, ..., setParUsrBB=setParUsrBB) cc = coordinates(x) points(cc[,1], cc[,2], pch = pch, cex = cex, col = col, lwd = lwd, bg = bg) } setMethod("plot", signature(x = "SpatialPoints", y = "missing"), function(x,y,...) plot.SpatialPoints(x,...)) points.SpatialPoints = function(x, y = NULL, ...) points(coordinates(x), ...) setMethod("coordinates", "SpatialPoints", function(obj) obj@coords) as.data.frame.SpatialPoints = function(x, row.names, optional, ...) data.frame(x@coords) setAs("SpatialPoints", "data.frame", function(from) as.data.frame(from)) row.names.SpatialPoints <- function(x) { ret = dimnames(slot(x, "coords"))[[1]] if (is.null(ret)) seq_len(nrow(slot(x, "coords"))) else ret } "row.names<-.SpatialPoints" <- function(x, value) { dimnames(slot(x, "coords"))[[1]] <- value x } setMethod("[", "SpatialPoints", function(x, i, j, ..., drop = TRUE) { if (!missing(j)) warning("j index ignored") if (is.character(i)) i <- match(i, row.names(x)) else if (is(i, "Spatial")) i = !is.na(over(x, geometry(i))) if (any(is.na(i))) stop("NAs not permitted in row index") x@coords = x@coords[i, , drop = FALSE] if (drop && nrow(x@coords)) x@bbox = .bboxCoords(x@coords) x }) setMethod("coordnames", signature(x = "SpatialPoints"), function(x) dimnames(x@coords)[[2]]) setReplaceMethod("coordnames", signature(x = "SpatialPoints", value = "character"), function(x, value) { dimnames(x@bbox)[[1]] = value dimnames(x@coords)[[2]] = value x } ) length.SpatialPoints = function(x) { nrow(x@coords) } setMethod("$", "SpatialPoints", function(x, name) { if (name %in% coordnames(x)) return(x@coords[,name]) if (!("data" %in% slotNames(x))) stop("no $ method for object without attributes") x@data[[name]] } )
dotouchgen<-function(indelow1,indeupp1,indelow2,indeupp2,direction){ epsi<-0 d<-length(indelow1) touch<-TRUE i<-1 while (i<=d){ if ((i != direction) && ((indelow1[i]>indeupp2[i]+epsi) || (indeupp1[i]<indelow2[i]-epsi))){ touch<-FALSE } i<-i+1 } return(touch) }
setOldClass(c("RecLinkResult", "RecLinkData")) setOldClass("RecLinkClassif")
get_eurostat_raw <- function(id) { base <- getOption("eurostat_url") url <- paste0(base, "estat-navtree-portlet-prod/BulkDownloadListing?sort=1&file=data%2F", id, ".tsv.gz") tfile <- tempfile() on.exit(unlink(tfile)) utils::download.file(url, tfile) dat <- readr::read_tsv(gzfile(tfile), na = ":", col_types = readr::cols(.default = readr::col_character())) if (ncol(dat) < 2 | nrow(dat) < 1) { msg <- ". Some datasets (for instance the comext type) are not accessible via the eurostat interface. You can try to search the data manually from the comext database at http://epp.eurostat.ec.europa.eu/newxtweb/ or bulk download facility at http://ec.europa.eu/eurostat/estat-navtree-portlet-prod/BulkDownloadListing or annual Excel files http://ec.europa.eu/eurostat/web/prodcom/data/excel-files-nace-rev.2" if (grepl("does not exist or is not readable", dat[1])) { stop(id, " does not exist or is not readable", msg) } else { stop(paste("Could not download ", id, msg)) } } dat }
source("incl/start.R") library(future) supportedStrategies <- function(...) future:::supportedStrategies() isWin32 <- FALSE availCores <- 2L message("*** Demos ...") message("*** Mandelbrot demo ...") if (!isWin32) { options(future.demo.mandelbrot.nrow = 2L) options(future.demo.mandelbrot.resolution = 50L) options(future.demo.mandelbrot.delay = FALSE) for (cores in 1:availCores) { message(sprintf("Testing with %d cores ...", cores)) options(mc.cores = cores) for (strategy in supportedStrategies(cores)) { message(sprintf("- plan('%s') ...", strategy)) plan(strategy) demo("mandelbrot", package = "progressr", ask = FALSE) message(sprintf("- plan('%s') ... DONE", strategy)) } message(sprintf("Testing with %d cores ... DONE", cores)) } } else { message(" - This demo requires R (>= 3.2.0). Skipping test. (Skipping also on Win32 i386 for speed)") } message("*** Mandelbrot demo ... DONE") message("*** Demos ... DONE") source("incl/end.R")
test_that("can pivot all cols (unspecified) to long", { df <- data.table(x = 1:2, y = 3:4) pivot_df <- dt_pivot_longer(df)[order(name, value)] tidyr_df <- dplyr::arrange(tidyr::pivot_longer(df, cols = c(x,y)), name, value) expect_named(pivot_df, c("name", "value")) expect_equal(pivot_df$name, tidyr_df$name) expect_equal(pivot_df$value, tidyr_df$value) }) test_that("can pivot all cols (specified) to long", { df <- data.table(x = 1:2, y = 3:4) pivot_df <- dt_pivot_longer(df, cols = c(x,y))[order(name, value)] tidyr_df <- dplyr::arrange(tidyr::pivot_longer(df, cols = c(x, y)), name, value) expect_named(pivot_df, c("name", "value")) expect_equal(pivot_df$name, tidyr_df$name) expect_equal(pivot_df$value, tidyr_df$value) }) test_that("can select a single column", { df <- data.table(x = 1:2, y = 3:4) pivot_df <- dt_pivot_longer(df, cols = x)[order(name, value)] tidyr_df <- dplyr::arrange(tidyr::pivot_longer(df, cols = x), name, value) expect_named(pivot_df, c("y", "name", "value")) expect_equal(pivot_df$name, tidyr_df$name) expect_equal(pivot_df$value, tidyr_df$value) pivot_df2 <- dt_pivot_longer(df, cols = c(x))[order(name, value)] expect_equal(pivot_df, pivot_df2) }) test_that("preserves original keys", { df <- data.table(x = 1:2, y = 2, z = 1:2) pivot_df <- dt_pivot_longer(df, cols = c(y, z))[order(name, value)] tidyr_df <- dplyr::arrange(tidyr::pivot_longer(df, c(y, z)), name, value) expect_named(pivot_df, c("x", "name", "value")) expect_equal(pivot_df$x, tidyr_df$x) }) test_that("can drop missing values", { df <- data.table(x = c(1, NA), y = c(NA, 2)) pivot_df <- dt_pivot_longer(df, c(x,y), values_drop_na = TRUE)[order(name, value)] tidyr_df <- dplyr::arrange(tidyr::pivot_longer(df, c(x,y), values_drop_na = TRUE), name, value) expect_equal(pivot_df$name, c("x", "y")) expect_equal(pivot_df$value, tidyr_df$value) }) test_that("... args to melt", { df <- data.table(x = c(1, 2), y = c(2,2)) expect_named(dt_pivot_longer(df, c(x,y), verbose = TRUE), c("name", "value")) }) test_that("testing removal of multiple columns", { df <- data.table(x = c(1, 2), y = c(2,2), z = c(1,1)) expect_named(dt_pivot_longer(df, c(-x)), c("x", "name", "value")) expect_named(dt_pivot_longer(df, -x), c("x", "name", "value")) expect_named(dt_pivot_longer(df, c(-x,-y)), c("x", "y", "name", "value")) expect_warning(dt_pivot_longer(df, c(-x,-y,-z))) }) test_that("stops if given vector", { df <- data.table(x = c(1, 2), y = c(2,2)) expect_error(dt_pivot_longer(df$x, c(x,-y))) }) test_that("works with select helpers", { df <- data.table(x = 1:2, y = 2, z = 1:2) pivot_df <- dt_pivot_longer(df, cols = c(dt_starts_with("y"), dt_contains("z")))[order(name, value)] tidyr_df <- dplyr::arrange(tidyr::pivot_longer(df, c(dplyr::starts_with("y"), dplyr::contains("z"))), name, value) expect_named(pivot_df, c("x", "name", "value")) expect_equal(pivot_df$x, tidyr_df$x) }) test_that("a single helper works outside of c() call", { df <- data.table(x = 1:2, y = 3:4) pivot_df <- dt_pivot_longer(df, cols = dt_everything())[order(name, value)] tidyr_df <- dplyr::arrange(tidyr::pivot_longer(df, cols = dplyr::everything()), name, value) expect_named(pivot_df, c("name", "value")) expect_equal(pivot_df$name, tidyr_df$name) expect_equal(pivot_df$value, tidyr_df$value) })
context("nifti data") img01 <- nifti(array(1:64, c(4,4,4,1)), datatype=4) ximg01 = img01 img02 <- nifti(array(64:1, c(4,4,4,1)), datatype=4) test_that("as.nifti", { expect_s4_class(as.nifti(img01), "nifti") anlz_img = as.anlz(img01) expect_s4_class(anlz_img, "anlz") expect_s4_class(img01 + anlz_img, "nifti") expect_s4_class(img01 + anlz_img, "nifti") }) test_that("operators", { expect_false(is.afni(img01)) expect_false(is.anlz(img01)) expect_true(is.nifti(img01 + img02)) expect_true(is.nifti(sqrt(2) * img01)) expect_true(is.nifti(img02 / pi)) }) test_that("extract and assign char", { funcs = c("data_type", "db_name", "regular", "dim_info", "descrip", "aux_file", "intent_name", "magic") func = funcs[1] for (func in funcs) { assign_func = paste0(func, "<-") t_value = basename(tempfile()) expect_silent(do.call(func, args = list(img01))) expect_silent({ img01 = do.call(assign_func, list(object = img01, value = t_value)) }) expect_equal(do.call(func, list(img01)), t_value) } }) test_that("extract and assign numeric", { funcs = c("sizeof_hdr", "extents", "session_error", "dim_", "intent_p1", "intent_p2", "intent_p3", "intent_code", "datatype", "bitpix", "slice_start", "pixdim", "vox_offset", "scl_slope", "scl_inter", "slice_end", "slice_code", "xyzt_units", "cal_max", "cal_min", "slice_duration", "toffset", "glmax", "glmin", "qform_code", "sform_code", "quatern_b", "quatern_c", "quatern_d", "qoffset_x", "qoffset_y", "qoffset_z", "srow_x", "srow_y", "srow_z", "extender", "img_data" ) func = funcs[1] for (func in funcs) { img01 = ximg01 assign_func = paste0(func, "<-") expect_silent({ t_value = do.call(func, args = list(img01)) }) expect_silent({ img01 = do.call(assign_func, list(object = img01, value = t_value)) }) expect_equal(do.call(func, list(img01)), t_value) } })
GetMinMap <- function(mapping,leftbias = T,verbose=F) { l1 <- nrow(mapping$path1) l2 <- nrow(mapping$path2) l1.b <- 2*l1-1 l2.b <- 2*l2-1 leastcostchain <- matrix(0,l1.b,l2.b) for(i in 1:(l1.b)) { if(verbose)print(paste(i,"of",l1.b)) for(j in 1:(l2.b)) { curcost <- mapping$linkcost[i,j] if(i==1 & j==1) { leastcostchain[i,j]<-mapping$linkcost[i,j] } else if(even(i) & even(j)) { leastcostchain[i,j] <- Inf }else{ if(odd(i) & odd(j)) { prevL <- c(i,j-1) prevD <- c(i-2,j-2) prevU <- c(i-1,j) } else if(even(i) & odd(j)) { prevL <- c(i,j-2) prevD <- c(0,0) prevU <- c(i-1,j) }else if(odd(i)&even(j)) { prevL <- c(i,j-1) prevD <- c(0,0) prevU <- c(i-2,j) } if(all(prevL>0)) { diff <- (mapping$leastcost[i,j] - mapping$leastcost[prevL[1],prevL[2]]) delta <- Cost(mapping$path1,mapping$path2,i,j,prevL[1],prevL[2],mapping$opposite) pathLlegal <- abs((diff-delta))<.000001 } else { pathLlegal <- F } if(all(prevU>0)) { diff <- (mapping$leastcost[i,j] - mapping$leastcost[prevU[1],prevU[2]]) delta <- Cost(mapping$path1,mapping$path2,i,j,prevU[1],prevU[2],mapping$opposite) pathUlegal <- abs((diff-delta))<.000001 } else { pathUlegal <- F } if(all(prevD>0)) { diff <- (mapping$leastcost[i,j] - mapping$leastcost[prevD[1],prevD[2]]) delta <- Cost(mapping$path1,mapping$path2,i,j,prevD[1],prevD[2],mapping$opposite) pathDlegal <- (abs(diff-delta)<.000001) } else { pathDlegal <- F } if(leftbias) { picks <- c(pathLlegal,pathDlegal,pathUlegal) }else{ picks <- c(pathUlegal,pathDlegal,pathLlegal) } if(pathLlegal) costL <- leastcostchain[prevL[1],prevL[2]] else costL <- Inf if(pathUlegal) costU <- leastcostchain[prevU[1],prevU[2]] else costU <- Inf if(pathDlegal) costD <- leastcostchain[prevD[1],prevD[2]] else costD <- Inf if(leftbias) { costs <- c(costL,costD,costU) prevs <- rbind(prevL,prevD,prevU) }else{ costs <- c(costU,costD,costL) prevs <- rbind(prevU,prevD,prevL) } argmin <- which.min(costs) bestprev <- prevs[argmin,] leastcostchain[i,j] <- curcost + costs[argmin] mapping$bestpath[i,j,] <- bestprev } } } mapping$leastcostchain<- leastcostchain mapping$minmap <- TRUE mapping$minlinkcost <- leastcostchain[nrow(leastcostchain),ncol(leastcostchain)] mapping }
acf2msd <- function(acf) { N <- length(acf) msd <- rep(NA, N) msd[1] <- acf[1] msd[2] <- 2 * (acf[2] + msd[1]) for(ii in 3:N) { msd[ii] <- 2 * (acf[ii] + msd[ii - 1]) - msd[ii - 2] } msd }
myiAWvar.TM=function (value, memSubjects, trim.alpha = 0.25) { u.memSubjects = sort(unique(memSubjects)) if (length(u.memSubjects) != 2) { stop("memSubjects must take 2 and only 2 values\n") } if (!identical(u.memSubjects, c(0, 1))) { stop("memSubjects must only take values 0 or 1\n") } if (length(value) != length(memSubjects)) { stop("value must have the same length as memSubjects\n") } pos1 = which(memSubjects == 1) pos0 = which(memSubjects == 0) value1 = value[pos1] value0 = value[pos0] var1 = var(value1, na.rm=TRUE) var0 = var(value0, na.rm=TRUE) m.value1 = mean(value1, na.rm = TRUE, trim = trim.alpha) m.value0 = mean(value0, na.rm = TRUE, trim = trim.alpha) value1.2 = abs(value1 - m.value1) value0.2 = abs(value0 - m.value0) z = rep(NA, length(value)) z[pos1] = value1.2 z[pos0] = value0.2 ybar = mean(memSubjects, na.rm = TRUE) U2 = sum((memSubjects - ybar) * z, na.rm = TRUE) zbar = mean(z, na.rm = TRUE) varU2 = ybar * (1 - ybar) * sum((z - zbar)^2, na.rm = TRUE) T2 = U2^2/varU2 pval = 1 - pchisq(T2, df = 1) if(var1 < var0) { T2 = -T2 } res = c(pval, T2) names(res) = c("pvalue", "stat") return(res) }
tw_get_message_media <- function(message_sid){ base_url <- "https://api.twilio.com/" ua <- user_agent("https://github.com/seankross/twilio") path <- paste("2010-04-01", "Accounts", get_sid(), "Messages", message_sid, "Media.json", sep = "/") url <- modify_url(base_url, path = path) resp <- GET(url, ua, authenticate(get_sid(), get_token())) if(http_type(resp) != "application/json"){ stop("Twilio API did not return JSON.", call. = FALSE) } parsed <- fromJSON(content(resp, "text", encoding = "UTF-8"), simplifyVector = FALSE) check_status(resp) media <- map(parsed$media_list, function(x){ structure( list(sid = x$sid, message_sid = x$parent_sid, content_type = x$content_type), class = "twilio_media" ) }) for(i in seq_along(media)){ media_path <- paste("2010-04-01", "Accounts", get_sid(), "Messages", message_sid, "Media", media[[i]]$sid, sep = "/") media_url <- modify_url(base_url, path = media_path) media_resp <- GET(media_url, authenticate(get_sid(), get_token())) media[[i]]$url <- media_resp$url } media } print.twilio_media <- function(x, ...){ cat("URL: ", x$url, "\n", "Type: ", x$content_type, "\n", sep = "") invisible(x) }
summarise_nm_model <- function(file, model, software, rounding) { sum <- dplyr::bind_rows( sum_software(software), sum_version(model, software), sum_file(file), sum_run(file), sum_directory(file), sum_reference(model, software), sum_timestart(model, software), sum_timestop(model, software), sum_probn(model, software), sum_label(model, software), sum_description(model, software), sum_input_data(model, software), sum_nobs(model, software), sum_nind(model, software), sum_nsim(model, software), sum_simseed(model, software), sum_subroutine(model, software), sum_runtime(model, software), sum_covtime(model, software), sum_term(model, software), sum_warnings(model, software), sum_errors(model, software), sum_nsig(model, software), sum_condn(model, software, rounding), sum_nesample(model, software), sum_esampleseed(model, software), sum_ofv(model, software), sum_method(model, software), sum_shk(model, software, 'eps', rounding), sum_shk(model, software, 'eta', rounding) ) tmp <- sum %>% dplyr::filter(.$problem != 0) if (nrow(tmp) == 0) return(sum) tmp %>% tidyr::complete(!!!rlang::syms(c('problem', 'label')), fill = list(subprob = 0, value = 'na')) %>% dplyr::bind_rows(dplyr::filter(sum, sum$problem == 0)) %>% dplyr::arrange_at(.vars = c('problem', 'label', 'subprob')) %>% dplyr::mutate(descr = dplyr::case_when( .$label == 'software' ~ 'Software', .$label == 'version' ~ 'Software version', .$label == 'file' ~ 'Run file', .$label == 'run' ~ 'Run number', .$label == 'dir' ~ 'Run directory', .$label == 'ref' ~ 'Reference model', .$label == 'probn' ~ 'Problem number', .$label == 'timestart' ~ 'Run start time', .$label == 'timestop' ~ 'Run stop time', .$label == 'descr' ~ 'Run description', .$label == 'label' ~ 'Run label', .$label == 'data' ~ 'Input data', .$label == 'nobs' ~ 'Number of observations', .$label == 'nind' ~ 'Number of individuals', .$label == 'nsim' ~ 'Number of simulations', .$label == 'simseed' ~ 'Simulation seed', .$label == 'subroutine' ~ 'ADVAN', .$label == 'runtime' ~ 'Estimation runtime', .$label == 'covtime' ~ 'Covariance step runtime', .$label == 'term' ~ 'Termination message', .$label == 'warnings' ~ 'Run warnings', .$label == 'errors' ~ 'Run errors', .$label == 'nsig' ~ 'Number of significant digits', .$label == 'condn' ~ 'Condition number', .$label == 'nesample' ~ 'Number of ESAMPLE', .$label == 'esampleseed' ~ 'ESAMPLE seed number', .$label == 'ofv' ~ 'Objective function value', .$label == 'method' ~ 'Estimation method', .$label == 'epsshk' ~ 'Epsilon shrinkage', .$label == 'etashk' ~ 'Eta shrinkage')) %>% dplyr::select(dplyr::one_of('problem', 'subprob', 'descr', 'label', 'value')) } sum_tpl <- function(label, value) { dplyr::tibble(problem = 0, subprob = 0, label = label, value = value) } sum_software <- function(software) { sum_tpl('software', software) } sum_version <- function(model, software) { if (software == 'nonmem') { x <- model %>% dplyr::filter(.$problem == 0) %>% dplyr::filter(stringr::str_detect(.$code, 'NONLINEAR MIXED EFFECTS MODEL PROGRAM')) if (nrow(x) == 0) return(sum_tpl('version', 'na')) sum_tpl('version', stringr::str_match(x$code, 'VERSION\\s+(.+)$')[, 2]) } } sum_file <- function(file) { sum_tpl('file', basename(file)) } sum_run <- function(file) { sum_tpl('run', update_extension(basename(file), '')) } sum_directory <- function(file) { sum_tpl('dir', dirname(file)) } sum_reference <- function(model, software) { if (software == 'nonmem') { x <- model %>% dplyr::filter(.$problem == 0) %>% dplyr::filter(stringr::str_detect(tolower(.$comment), stringr::regex('based on\\s*:', ignore_case = TRUE))) if (nrow(x) == 0) return(sum_tpl('ref', 'na')) sum_tpl('ref', stringr::str_match(x$comment, ':\\s*(.+)$')[1, 2]) } } sum_timestart <- function(model, software) { if (software == 'nonmem') { x <- model %>% dplyr::slice(1) %>% dplyr::filter(stringr::str_detect(.$code, '\\s+\\d{2}:\\d{2}:\\d{2}\\s+')) if (nrow(x) == 0) return(sum_tpl('timestart', 'na')) sum_tpl('timestart', x$code) } } sum_timestop <- function(model, software) { if (software == 'nonmem') { x <- model %>% dplyr::slice(nrow(model)) %>% dplyr::filter(stringr::str_detect(.$code, '\\s+\\d{2}:\\d{2}:\\d{2}\\s+')) if (nrow(x) == 0) return(sum_tpl('timestop', 'na')) sum_tpl('timestop', x$code) } } sum_probn <- function(model, software) { if (software == 'nonmem') { x <- unique(model$problem[model$problem != 0]) if (length(x) == 0) return(sum_tpl('probn', 'na')) dplyr::tibble( problem = x, subprob = 0, label = 'probn', value = as.character(x)) } } sum_label <- function(model, software) { if (software == 'nonmem') { x <- model %>% dplyr::filter(.$subroutine == 'pro') if (nrow(x) == 0) return(sum_tpl('label', 'na')) x %>% dplyr::mutate(subprob = 0, label = 'label', value = as.character(.$code)) %>% dplyr::select(dplyr::one_of('problem', 'subprob', 'label', 'value')) } } sum_description <- function(model, software) { if (software == 'nonmem') { x <- dplyr::filter(.data = model, model$level == 0) start <- which(stringr::str_detect(tolower(x$comment), stringr::regex('2.\\s*description\\s*:', ignore_case = TRUE))) if (length(start) == 1) { end <- which(stringr::str_detect(tolower(x$comment), '(3|x\\d)\\.\\s*\\w+')) end <- end[(end - start) > 0] end <- ifelse(length(end) == 0, nrow(x), min(end) - 1) x <- dplyr::slice(.data = x, seq(start, end)) %>% {stringr::str_replace(.$comment, '^\\s*;\\s*', '')} %>% stringr::str_c(collapse = ' ') %>% {sum_tpl('descr', stringr::str_match(., ':\\s*(.+)$')[, 2])} return(value = x) } sum_tpl('descr', 'na') } } sum_input_data <- function(model, software) { if (software == 'nonmem') { x <- model %>% dplyr::filter(.$subroutine == 'dat') %>% dplyr::distinct(!!rlang::sym('level'), .keep_all = TRUE) if (nrow(x) == 0) return(sum_tpl('data', 'na')) x %>% dplyr::mutate(subprob = 0, label = 'data', value = stringr::str_match(.$code, '^\\s*?([^\\s]+)\\s+')[, 2]) %>% dplyr::select(dplyr::one_of('problem', 'subprob', 'label', 'value')) } } sum_nobs <- function(model, software) { if (software == 'nonmem') { x <- model %>% dplyr::filter(.$subroutine == 'lst') %>% dplyr::filter(stringr::str_detect(.$code, stringr::fixed('TOT. NO. OF OBS RECS'))) if (nrow(x) == 0) return(sum_tpl('nobs', 'na')) x %>% dplyr::mutate(subprob = 0, label = 'nobs', value = stringr::str_extract(.$code, '\\d+')) %>% dplyr::select(dplyr::one_of('problem', 'subprob', 'label', 'value')) } } sum_nind <- function(model, software) { if (software == 'nonmem') { x <- model %>% dplyr::filter(.$subroutine == 'lst') %>% dplyr::filter(stringr::str_detect(.$code, stringr::fixed('TOT. NO. OF INDIVIDUALS'))) if (nrow(x) == 0) return(sum_tpl('nind', 'na')) x %>% dplyr::mutate(subprob = 0, label = 'nind', value = stringr::str_extract(.$code, '\\d+')) %>% dplyr::select(dplyr::one_of('problem', 'subprob', 'label', 'value')) } } sum_nsim <- function(model, software) { if (software == 'nonmem') { x <- model %>% dplyr::filter(.$subroutine == 'sim') %>% dplyr::filter(stringr::str_detect(.$code, stringr::fixed('NSUB'))) if (nrow(x) == 0) return(sum_tpl('nsim', 'na')) x %>% dplyr::mutate(subprob = 0, label = 'nsim', value = stringr::str_match(.$code, 'NSUB.*=\\s*(\\d+)')[, 2]) %>% dplyr::select(dplyr::one_of('problem', 'subprob', 'label', 'value')) } } sum_simseed <- function(model, software) { if (software == 'nonmem') { x <- model %>% dplyr::filter(.$subroutine == 'sim') %>% dplyr::filter(stringr::str_detect(.$code, '\\(\\d+\\)')) if (nrow(x) == 0) return(sum_tpl('simseed', 'na')) x %>% dplyr::mutate(subprob = 0, label = 'simseed', value = stringr::str_match(.$code, '\\((\\d+)\\)')[, 2]) %>% dplyr::select(dplyr::one_of('problem', 'subprob', 'label', 'value')) } } sum_subroutine <- function(model, software) { if (software == 'nonmem') { x <- model %>% dplyr::filter(.$subroutine == 'sub') %>% dplyr::filter(stringr::str_detect(.$code, stringr::fixed('ADVAN'))) if (nrow(x) == 0) return(sum_tpl('subroutine', 'na')) x %>% dplyr::mutate(subprob = 0, label = 'subroutine', value = stringr::str_match(.$code, 'ADVAN(\\d+)')[, 2]) %>% dplyr::select(dplyr::one_of('problem', 'subprob', 'label', 'value')) } } sum_runtime <- function(model, software) { if (software == 'nonmem') { x <- model %>% dplyr::filter(.$subroutine == 'lst') %>% dplyr::filter(stringr::str_detect(.$code, 'Elapsed estimation\\s+time')) if (nrow(x) == 0) return(sum_tpl('runtime', 'na')) x %>% dplyr::group_by_at(.vars = 'problem') %>% dplyr::mutate(subprob = (1:n()) - 1) %>% dplyr::ungroup() %>% dplyr::mutate(label = 'runtime', value = as.ctime(stringr::str_match(.$code, '([\\.\\d]+)')[, 2])) %>% dplyr::select(dplyr::one_of('problem', 'subprob', 'label', 'value')) } } sum_covtime <- function(model, software) { if (software == 'nonmem') { x <- model %>% dplyr::filter(.$subroutine == 'lst') %>% dplyr::filter(stringr::str_detect(.$code, 'Elapsed covariance\\s+time in seconds:\\s+\\d')) if (nrow(x) == 0) return(sum_tpl('covtime', 'na')) x %>% dplyr::mutate(subprob = 0, label = 'covtime', value = as.ctime(stringr::str_match(.$code, '([\\.\\d]+)')[, 2])) %>% dplyr::select(dplyr::one_of('problem', 'subprob', 'label', 'value')) } } sum_term <- function(model, software) { if (software == 'nonmem') { x <- dplyr::filter(model, model$subroutine == 'lst') start <- which(stringr::str_detect(x$code, stringr::fixed('0MINIMIZATION'))) end <- which(stringr::str_detect(x$code, stringr::fixed(" NO. OF FUNCTION EVALUATIONS USED:"))) if (length(start) == 0 | length(end) == 0 | length(start)!=length(end)) return(sum_tpl('term', 'na')) x %>% dplyr::slice(purrr::map2(start, end, ~seq(.x,.y)) %>% purrr::flatten_int()) %>% dplyr::group_by_at(.vars = 'problem') %>% tidyr::nest() %>% dplyr::ungroup() %>% dplyr::mutate(value = purrr::map_chr(.$data, function(y) { drop <- min(which(stringr::str_detect(y$code, 'NO. OF'))) dplyr::slice(.data = y, seq(1, (drop - 1))) %>% {stringr::str_trim(.$code)} %>% stringr::str_trunc(width = 56) %>% stringr::str_c(collapse = '\n') %>% stringr::str_replace('0MINIM', 'MINIM')})) %>% dplyr::mutate(subprob = 0, label = 'term') %>% dplyr::select(dplyr::one_of('problem', 'subprob', 'label', 'value')) } } sum_warnings <- function(model, software) { if (software == 'nonmem') { x <- model %>% dplyr::filter(.$subroutine == 'oth') %>% dplyr::filter(stringr::str_detect(.$code, 'WARNINGS AND ERRORS|\\(WARNING')) if (nrow(x) == 0) return(sum_tpl('warnings', 'na')) x %>% dplyr::mutate(problem = stringr::str_match(.$code, 'FOR PROBLEM\\s+(\\d+)')[, 2]) %>% tidyr::fill(!!rlang::sym('problem')) %>% dplyr::mutate(problem = as.numeric(.$problem)) %>% dplyr::filter(!stringr::str_detect(.$code, 'FOR PROBLEM\\s+(\\d+)')) %>% dplyr::mutate(code = stringr::str_trim(.$code)) %>% dplyr::mutate(code = stringr::str_trunc(.$code, width = 56)) %>% dplyr::distinct(!!!rlang::syms(c('problem', 'code'))) %>% dplyr::group_by_at(.vars = 'problem') %>% tidyr::nest() %>% dplyr::ungroup() %>% dplyr::mutate(value = purrr::map_chr(.$data, ~stringr::str_c(.$code, collapse = '\n'))) %>% dplyr::mutate(subprob = 0, label = 'warnings') %>% dplyr::select(dplyr::one_of('problem', 'subprob', 'label', 'value')) } } sum_errors <- function(model, software) { if (software == 'nonmem') { sum_tpl('errors', 'na') } } sum_nsig <- function(model, software) { if (software == 'nonmem') { x <- model %>% dplyr::filter(.$subroutine == 'lst') %>% dplyr::filter(stringr::str_detect(.$code, stringr::fixed('NO. OF SIG. DIGITS'))) if (nrow(x) == 0) return(sum_tpl('nsig', 'na')) x %>% dplyr::mutate(subprob = 0, label = 'nsig', value = stringr::str_match(.$code, ':\\s+([\\.\\d]+)')[, 2]) %>% dplyr::select(dplyr::one_of('problem', 'subprob', 'label', 'value')) } } sum_condn <- function(model, software, rounding) { if (software == 'nonmem') { x <- model %>% dplyr::filter(.$subroutine == 'lst') %>% dplyr::slice(which(stringr::str_detect(.$code, stringr::fixed('EIGENVALUES OF COR'))) + 4) if (nrow(x) == 0) return(sum_tpl('condn', 'na')) x %>% dplyr::group_by_at(.vars = 'problem') %>% tidyr::nest() %>% dplyr::ungroup() %>% dplyr::mutate(subprob = 0, label = 'condn', value = purrr::map_chr(.$data, function(x) { stringr::str_trim(x$code, side = 'both') %>% stringr::str_split(pattern = '\\s+') %>% purrr::flatten_chr() %>% as.numeric() %>% {max(.)/min(.)} %>% round(digits = rounding) %>% as.character()})) %>% dplyr::select(dplyr::one_of('problem', 'subprob', 'label', 'value')) } } sum_nesample <- function(model, software) { if (software == 'nonmem') { x <- model %>% dplyr::filter(.$subroutine == 'tab') %>% dplyr::filter(stringr::str_detect(.$code, stringr::fixed('ESAMPLE'))) if (nrow(x) == 0) return(sum_tpl('nesample', 'na')) x %>% dplyr::mutate(subprob = 0, label = 'nesample', value = stringr::str_match(.$code, 'ESAMPLE\\s*=\\s*(\\d+)')[, 2]) %>% dplyr::select(dplyr::one_of('problem', 'subprob', 'label', 'value')) } } sum_esampleseed <- function(model, software) { if (software == 'nonmem') { x <- model %>% dplyr::filter(.$subroutine == 'tab') %>% dplyr::filter(stringr::str_detect(.$code, stringr::fixed('SEED'))) if (nrow(x) == 0) return(sum_tpl('esampleseed', 'na')) x %>% dplyr::mutate(subprob = 0, label = 'esampleseed', value = stringr::str_match(.$code, 'SEED\\s*=\\s*(\\d+)')[, 2]) %>% dplyr::select(dplyr::one_of('problem', 'subprob', 'label', 'value')) } } sum_ofv <- function(model, software) { if (software == 'nonmem') { x <- model %>% dplyr::filter(.$subroutine == 'lst') %>% dplyr::filter(stringr::str_detect(.$code, stringr::fixed(' if (nrow(x) == 0) return(sum_tpl('ofv', 'na')) x %>% dplyr::mutate(value = stringr::str_match(.$code, '\\*\\s+(.+)\\s+\\*')[, 2]) %>% dplyr::group_by_at(.vars = 'problem') %>% dplyr::mutate(subprob = (1:n()) - 1, label = 'ofv') %>% dplyr::select(dplyr::one_of('problem', 'subprob', 'label', 'value')) %>% dplyr::ungroup() } } sum_method <- function(model, software) { if (software == 'nonmem') { x <- model %>% dplyr::filter(.$subroutine %in% c('sim', 'est')) %>% dplyr::filter(stringr::str_detect(.$code, 'METH|NSUB')) if (nrow(x) == 0) return(sum_tpl('method', 'na')) x %>% dplyr::mutate(value = stringr::str_match(.$code, 'METH[OD]*\\s*=\\s*([^\\s]+)')[, 2], inter = stringr::str_detect(.$code, '\\sINTER'), lapl = stringr::str_detect(.$code, '\\sLAPLA'), like = stringr::str_detect(.$code, '\\sLIKE')) %>% dplyr::mutate(value = dplyr::if_else(.$subroutine == 'sim', 'sim', .$value)) %>% dplyr::mutate(value = dplyr::case_when(.$value %in% c('0', 'ZERO') ~ 'FO', .$value == '1' ~ 'FOCE', stringr::str_detect(.$value, 'COND') ~ 'FOCE', TRUE ~ tolower(.$value))) %>% dplyr::mutate(value = stringr::str_c(stringr::str_to_lower(.$value), dplyr::if_else(.$inter, '-i', ''), dplyr::if_else(.$lapl, ' laplacian', ''), dplyr::if_else(.$like, ' likelihood', ''))) %>% dplyr::group_by_at(.vars = 'problem') %>% dplyr::mutate(subprob = (1:n()) - 1, label = 'method') %>% dplyr::select(dplyr::one_of('problem', 'subprob', 'label', 'value')) %>% dplyr::ungroup() } } sum_shk <- function(model, software, type, rounding) { if (software == 'nonmem') { x <- model %>% dplyr::filter(.$subroutine == 'lst') %>% dplyr::group_by_at(.vars = 'problem') %>% tidyr::nest() %>% dplyr::ungroup() %>% dplyr::mutate(start = purrr::map_int(.x = .$data, .f = function(x) { stringr::str_c(stringr::str_to_upper(type), 'SHRINK[^V]') %>% stringr::regex(ignore_case = TRUE) %>% {stringr::str_detect(string = x$code, pattern = .)} %>% which() %>% {ifelse(length(.) == 0, NA_integer_, .)} })) %>% filter(!is.na(.$start)) if (nrow(x) == 0) return(sum_tpl(stringr::str_c(type, 'shk'), 'na')) x <- x %>% dplyr::mutate(rows = purrr::map2(.x = .$data, .y = .$start, .f = function(x, start) { x$code[start:nrow(x)] %>% {start + (which.max(stringr::str_detect(., '^\\s+\\D')[-1]) - 1)} %>% {seq(start, .)}})) %>% dplyr::mutate(code = purrr::map2_chr(.x = .$data, .y = .$rows, ~stringr::str_c(.x$code[.y], collapse = ' '))) %>% dplyr::mutate(code = stringr::str_match(.$code, '\\Q(%)\\E:*\\s*(.+)')[, 2]) %>% dplyr::mutate(code = stringr::str_split(.$code, '\\s+')) %>% dplyr::mutate(value = purrr::map(.$code, ~round(as.numeric(.), digits = rounding)), grouping = purrr::map(.$code, ~stringr::str_c(' [', 1:length(.), ']', sep = ''))) %>% dplyr::group_by_at(.vars = 'problem') %>% dplyr::mutate(subprob = (1:n()) - 1) %>% dplyr::ungroup() if (tidyr_new_interface()) { x <- x %>% tidyr::unnest(cols = dplyr::one_of('value', 'grouping')) } else { x <- x %>% tidyr::unnest(dplyr::one_of('value', 'grouping')) } x %>% dplyr::filter(.$value != 100) %>% dplyr::mutate(value = stringr::str_c(.$value, .$grouping)) %>% dplyr::group_by_at(.vars = c('problem', 'subprob')) %>% tidyr::nest() %>% dplyr::ungroup() %>% dplyr::mutate(label = stringr::str_c(type, 'shk'), value = purrr::map_chr(.$data, ~stringr::str_c(.$value, collapse = ', '))) %>% dplyr::select(dplyr::one_of('problem', 'subprob', 'label', 'value')) %>% dplyr::ungroup() } }
plot.stFit = function( x, type='density', stData=NULL, coord.s=NULL, coord.knot=NULL, text.size=NULL, axis.text.size=NULL, title.text.size=NULL, burn = 1, signif.telecon = F, p = 1, local.covariate=NULL, lwd=NULL, facet.signif = 3, stat.smooth.bw=NULL, stat.smooth.degree=NULL, dots=NULL, ...) { stFit = x dots = c(dots, list(...)) dots = dots[!duplicated(dots)] for(x in setdiff(names(formals(eval(match.call()[[1]]))), c('dots', '...'))) { if(x %in% names(dots)) { assign(eval(x), dots[[x]]) } } match.opts = c('traceplot', 'density', 'pairs', 'teleconnection', 'beta', 'teleconnection_knot', 'teleconnection_knot_transect', 'teleconnection_knot_influence', 'teleconnection_knot_local', 'eof_alpha') type = match.opts[pmatch(type, match.opts)] if( type %in% c('traceplot', 'density', 'pairs') ) { if(stFit$varying) { listInd = which(!(names(stFit$parameters$samples) %in% c('beta', 'T'))) res.df = data.frame(stFit$parameters$samples[listInd]) } else { res.df = data.frame(stFit$parameters$samples) } maxIt = nrow(res.df) res.df = res.df[burn:maxIt,] res.df$Iteration = burn:maxIt if(!is.null(stFit$parameters$beta.names)) { colnames(res.df)[ 1:ncol(stFit$parameters$samples$beta)] = stFit$parameters$beta.names[1:ncol(stFit$parameters$samples$beta)] } res.plottable = melt(res.df, id.vars = 'Iteration', variable.name = 'param', value.name = 'Value') } ret = NULL if( type=='traceplot' ) { ret = ggplot(res.plottable, aes(x=Iteration, y=Value)) + geom_line() + facet_wrap(~param, scales='free') } else if( type=='density' ) { ret = ggplot(res.plottable, aes(x=Value)) + geom_density() + facet_wrap(~param, scales='free') + ylab('Posterior density') } else if( type=='pairs' ) { plot(res.df) } else if( type=='teleconnection' ) { if(is.null(stData)) { stop('stData object required for plotting estimated teleconnection effects.') } coord.s = unlist(coord.s) stData$alpha = stFit$alpha$summary$alpha stData$alpha_signif = stFit$alpha$summary$signif ret = plot.stData(stData, type='teleconnection', lab.teleconnection = 'alpha', dots=dots, signif.telecon = signif.telecon, ...) + ggtitle('Estimated teleconnection effects') } else if(type=='teleconnection_knot_local') { if(is.null(stData)) { stop('stData object required for plotting estimated teleconnection effects.') } stData$alpha_knots = stFit$alpha_knots$summary$alpha stData$alpha_knots_signif = stFit$alpha_knots$summary$signif stData$coords.knots = stFit$coords.knots ret = plot.stData(stData, 'teleconnection_knot_local', lab.teleconnection = expression(hat(alpha)), coord.r = coord.knot, dots=dots, ...) + ggtitle('Estimated teleconnection effects') } else if( type=='teleconnection_knot' ) { if(is.null(stData)) { stop('stData object required for plotting estimated teleconnection effects.') } stData$alpha_knots = stFit$alpha_knots$summary$alpha stData$alpha_knots_signif = stFit$alpha_knots$summary$signif stData$coords.knots = stFit$coords.knots ret = plot.stData(stData, 'teleconnection_knot', lab.teleconnection = expression(hat(alpha)), dots=dots, ...) + ggtitle('Estimated teleconnection effects') } else if( type == 'teleconnection_knot_influence' ) { if(is.null(stData)) { stop('stData object required for plotting estimated teleconnection effects.') } coord.knot = matrix(unlist(coord.knot), nrow=1) Dz_to_knots = rdist.earth(stData$coords.r, coord.knot, miles=stFit$miles) c_full = maternArray(Dz_to_knots, scale = 1, range = mean(stFit$parameters$samples$rho_r[-(1:burn)]), smoothness = stFit$priors$cov.r$smoothness, nugget = 0) stData$coords.knots = stFit$coords.knots stData$Z = c_full stData$Z.lab = expression(italic(Cor[alpha])) ret = plot.stData(stData, 'remote', coords.knots = coord.knot, dots=dots, ...) + ggtitle('Remote covariate influence on knot index') } else if( type == 'teleconnection_knot_transect' ) { coord.s = unlist(coord.s) df = stFit$alpha_knots$summary %>% filter(lat.Y == coord.s[2]) %>% mutate(lon.Z = signif(lon.Z, 3), lat.Z = signif(lat.Z, 3)) lon.trans = lon_trans() lat.trans = lat_trans() df$lon.Z = factor(lon.trans$format(df$lon.Z), levels = lon.trans$format(sort(unique(df$lon.Z %% 360)))) df$lat.Z = factor(lat.trans$format(df$lat.Z), levels = lat.trans$format(sort(unique(df$lat.Z), decreasing = T))) if(!is.null(local.covariate)) { df = df %>% left_join(local.covariate, by=c('lon.Y', 'lat.Y')) } if(is.null(lwd)) lwd=1 ret = ggplot(df, aes(x = lon.Y, y = alpha)) + geom_ribbon(aes(ymin = lower, ymax=upper), fill = 'grey70') if(is.null(stat.smooth.bw)) { ret = ret + geom_line(lwd=lwd) } else { ret = ret + stat_smooth(span=stat.smooth.bw, se=F, col=1, method='loess', method.args=list(degree=stat.smooth.degree)) } ret = ret + geom_hline(yintercept = 0, lty=2, alpha=.6, lwd=lwd ) + facet_grid(lat.Z ~ lon.Z) + ylab('Teleconnection effect') + scale_x_continuous('Transect longitude', trans = lon_trans()) + ggtitle(paste('Teleconnection effects along', lat.trans$format(coord.s[2]), 'transect')) if(!is.null(text.size)) ret = ret + theme( text = element_text(size=text.size)) if(!is.null(axis.text.size)) ret = ret + theme( axis.text = element_text(size=axis.text.size)) if(!is.null(title.text.size)) ret = ret + theme( plot.title = element_text(size=title.text.size)) if(!is.null(local.covariate)) { ret = ret + geom_line(aes(x=lon.Y, y=x), col=2, alpha=.7, lwd=lwd) } ret.grob = ggplotGrob(ret) label.bg = rectGrob(gp = gpar(col = NA, fill = gray(6/8))) label.textgp = gpar(fontsize=ifelse(axis.text.size, axis.text.size, 14), col = gray(.1)) right.pos = max(ret.grob$layout[which(ret.grob$layout$name=='strip-right'),]$r) ylab.range = ret.grob$layout[which(ret.grob$layout$name=='ylab'),c('t','b')] ret.grob = gtable_add_cols(ret.grob, ret.grob$widths[right.pos], right.pos) ret.grob = gtable_add_grob(ret.grob, list(label.bg, textGrob("Teleconnection latitude", rot = -90, gp = label.textgp)), ylab.range$t, right.pos+1, ylab.range$b, right.pos+1, name = paste(runif(2))) top.pos = min(ret.grob$layout[which(ret.grob$layout$name=='strip-top'),]$t) xlab.range = ret.grob$layout[which(ret.grob$layout$name=='xlab'),c('l','r')] ret.grob = gtable_add_rows(ret.grob, ret.grob$heights[top.pos], top.pos-1) ret.grob = gtable_add_grob(ret.grob, list(label.bg, textGrob("Teleconnection longitude", gp = label.textgp)), top.pos, xlab.range$l, top.pos, xlab.range$r, name = paste(runif(2))) ret.grob = gtable_add_cols(ret.grob, unit(1/8, "line"), right.pos) ret.grob = gtable_add_rows(ret.grob, unit(1/8, "line"), top.pos) grid.newpage() grid.draw(ret.grob) ret = NULL } else if( type=='beta' ) { if(is.null(stData)) { stop('stData object required for plotting estimated spatially varying coefficients.') } betaH = colMeans(stFit$parameters$samples$beta[-(1:burn), seq(from = p, to = prod(dim(stData$X)[1:2]), by = ncol(stData$X) )]) stData$Y[,1] = betaH stData$Y.lab = 'Coefficient' ret = plot.stData(stData, 'response') + ggtitle('Estimated spatially varying coefficient') } if(!is.null(text.size)) ret = ret + theme( text = element_text(size=text.size)) if(!is.null(axis.text.size)) ret = ret + theme( axis.text = element_text(size=axis.text.size)) if(!is.null(ret)) ret }
projdepth <- function(x, z = NULL, options = list()) { if (missing(x)) { stop("Input argument x is required.") } x <- data.matrix(x) if (!is.numeric(x)) { stop("The input argument x must be a numeric data matrix.") } n1 <- nrow(x) p1 <- ncol(x) if (n1 > sum(complete.cases(x))) { stop("Missing values in x are not allowed.") } if (is.null(z)) { z <- x } z <- data.matrix(z) if (!is.numeric(z)) { stop("The input argument z must be a numeric data matrix.") } n2 <- nrow(z) p2 <- ncol(z) if (p1 != p2) { stop("The data dimension has to be the same for x and z.") } if (n2 > sum(complete.cases(z))) { stop("Missing values in z are not allowed.") } if (is.null(options)) { options <- list() } if (!is.list(options)) { stop("options must be a list") } tol <- 1e-7 scaled.x <- scale(x) temp <- attributes(scaled.x) column.sd <- temp[["scaled:scale"]] if (sum(column.sd <= 1e-14) > 0) { warning("One of the variables has zero standard deviation. Check the data matrix x.") returned.result <- list(depthX = NULL, depthZ = NULL, cutoff = NULL, flagX = NULL, flagY = NULL, singularSubsets = NULL, dimension = sum(column.sd > 1e-14), hyperplane = as.numeric(column.sd <= 1e-14), inSubspace = NULL) class(returned.result) <- c("mrfDepth", "projdepth") return(returned.result) } w1 <- try(svd(scaled.x / sqrt(n1 - 1)), silent = TRUE) if (!is.list(w1)) { warning("The singular-value decomposition of the data matrix x could not be computed.") returned.result <- list(depthX = NULL, depthZ = NULL, cutoff = NULL, flagX = NULL, flagY = NULL, singularSubsets = NULL, dimension = NULL, hyperplane = NULL, inSubspace = NULL) class(returned.result) <- c("mrfDepth", "projdepth") return(returned.result) } if (min(w1$d) < tol) { warning("An exact fit was found. Check the output for more details.") returned.result <- list(depthX = NULL, depthZ = NULL, cutoff = NULL, flagX = NULL, flagZ = NULL, singularSubsets = NULL, dimension = sum(w1$d > tol), hyperplane = w1$v[which(w1$d == min(w1$d))[1]], inSubspace = NULL) class(returned.result) <- c("mrfDepth", "projdepth") return(returned.result) } original <- options(warn = 1) result <- outlyingness(x = x, z = z, options = options) options(warn = original$warn) if (!is.null(result$hyperplane)) { returned.result <- list(depthX = NULL, depthZ = NULL, cutoff = NULL, flagX = NULL, flagZ = NULL, singularSubsets = NULL, dimension = NULL, hyperplane = result[["hyperplane"]], inSubspace = result[["inSubspace"]]) class(returned.result) <- c("mrfDepth", "projdepth") return(returned.result) } else{ returned.result <- list(depthX = 1 / (1 + result[["outlyingnessX"]]), depthZ = 1 / (1 + result[["outlyingnessZ"]]), cutoff = 1 / (1 + result[["cutoff"]]), flagX = result[["flagX"]], flagZ = result[["flagZ"]], singularSubsets = result[["singularSubsets"]], dimension = NULL, hyperplane = result[["hyperplane"]], inSubspace = NULL) class(returned.result) <- c("mrfDepth", "projdepth") return(returned.result) } }
unbump_version <- function() { unbump_version_impl() }
print.summary.survexp <- function(x, digits = max(options()$digits - 4, 3), ...) { savedig <- options(digits=digits) on.exit(options(savedig)) if (!is.null(cl<- x$call)) { cat("Call: ") dput(cl) cat("\n") } omit <- x$na.action if (length(omit)) cat(naprint(omit), "\n") mat <- cbind(x$time, x$n.risk, x$surv) if (is.matrix(x$n.risk)) cnames <- c("time", paste("nrisk", 1:ncol(x$n.risk), sep='')) else cnames <- c("time", "n.risk") if (is.matrix(x$surv)) ncurve <- ncol(x$surv) else ncurve <- 1 if (ncurve==1) { cnames <- c(cnames, "survival") } else cnames <- c(cnames, paste("survival", seq(ncurve), sep='')) if (!is.matrix(mat)) mat <- matrix(mat, nrow=1) if (!is.null(mat)) { dimnames(mat) <- list(rep("", nrow(mat)), cnames) if (is.null(x$strata)) print(mat) else { strata <- x$strata for (i in levels(strata)) { who <- (strata==i) cat(" ", i, "\n") print(mat[who,]) cat("\n") } } } else stop("There are no observations to print.") invisible(x) }
las = clip_rectangle(megaplot, 684766.4, 5017773, 684866.4, 5017973) xall = grid_metrics(las, .stdmetrics, 20) test_that("stdmetric returns the same result than .stdmetric", { y = grid_metrics(las, stdmetrics(X,Y,Z, Intensity, ReturnNumber, Classification), 20) expect_identical(xall, y) }) test_that("stdmetric_i returns the same result than .stdmetric_i", { x = grid_metrics(las, .stdmetrics_i, 20) y = grid_metrics(las, stdmetrics_i(Intensity, Z, Classification), 20) expect_identical(x,y) }) test_that("stdmetric_i returns the same result than stdmetric", { y = grid_metrics(las, .stdmetrics_i, 20) cols = names(y) x = raster::subset(xall, cols) x@data@isfactor <- y@data@isfactor <- FALSE dimnames(x@data@values) <- dimnames(y@data@values) expect_identical(x,y) }) test_that("stdmetric_z returns the same result than stdmetric", { y = grid_metrics(las, .stdmetrics_z, 20) cols = names(y) x = raster::subset(xall,cols) x@data@isfactor <- y@data@isfactor <- FALSE dimnames(x@data@values) <- dimnames(y@data@values) expect_identical(x,y) }) test_that("stdmetric_rn returns the same result than stdmetric", { y = grid_metrics(las, .stdmetrics_rn, 20) cols = names(y) x = raster::subset(xall,cols) x@data@isfactor <- y@data@isfactor <- FALSE dimnames(x@data@values) <- dimnames(y@data@values) expect_identical(x,y) }) test_that("stdmetric_pulse works", { las = retrieve_pulses(las) expect_error(grid_metrics(las, .stdmetrics_pulse, 20), NA) })
`csv2rwl` <- function(fname,...) { dat <- read.table(fname, header=TRUE, sep=",",...) rownames(dat) <- as.character(dat[,1]) dat <- dat[,-1] class(dat) <- c("rwl","data.frame") dat }
gquad_base <- function(a){ aSpace <- gsub("-", "", a) a <- aSpace if(nchar(a) < 11) return(data.frame("sequence_position" = "-", "sequence" = "-", "sequence_length" = "-", "likeliness" = "-")) a4 <- "(?=(GG\\w{7,50}GG))" a5 <- gregexpr(a4, a, ignore.case = TRUE, perl = TRUE) if(a5[[1]][1] == -1){ resultClean5 <- data.frame("sequence_position" = "-", "sequence" = "-", "sequence_length" = "-", "likeliness" = "-") return(resultClean5) }else{ sequence_position <- a5[[1]][1:length(a5[[1]])] sequence_length <- as.vector(attr(a5[[1]], "capture.length")) a_end = sequence_position + sequence_length - 1 sequence <- substring(a, sequence_position, a_end) b4 <- "(?=(G{2,7}[ACGTURYSWKMBDHVN]{1,40}G{2,7}[ACGTURYSWKMBDHVN]{1,40}G{2,7}[ACGTURYSWKMBDHVN]{1,40}G{2,7}))" c4 <- "G{2,7}([ACGTURYSWKMBDHVN]{1,40}?)G{2,7}([ACGTURYSWKMBDHVN]{1,40}?)G{2,7}([ACGTURYSWKMBDHVN]{1,40}?)G{2,7}" sequenceCount <- 0 sequenceAll <- "" sequenceAll2 <- "" sequence_positionAll <- "" sequence_positionAll2 <- "" for (x in sequence){ sequenceCount <- sequenceCount + 1 b5 <- gregexpr(b4, sequence[sequenceCount], ignore.case = TRUE, perl = TRUE) sequence_positionb <- b5[[1]][1:length(b5[[1]])] sequence_positionReal <- sequence_position[sequenceCount] + sequence_positionb - 1 sequence_lengthb <- as.vector(attr(b5[[1]], "capture.length")) b_end = sequence_positionReal + sequence_lengthb - 1 sequenceb <- substring(a, sequence_positionReal, b_end) sequenceAll <- c(sequenceAll, sequenceb) sequence_positionAll <- c(sequence_positionAll, sequence_positionReal) c5 <- gregexpr(c4, sequence[sequenceCount], ignore.case = TRUE, perl = TRUE) sequence_positionc <- c5[[1]][1:length(c5[[1]])] sequence_positionReal2 <- sequence_position[sequenceCount] + sequence_positionc - 1 c6 <- regmatches(sequence[sequenceCount], c5) sequencec <- c6[[1]][1:length(c6[[1]])] sequence_lengthc <- nchar(sequencec) c_end = sequence_positionReal2 + sequence_lengthc - 1 sequenceAll2 <- c(sequenceAll2, sequencec) sequence_positionAll2 <- c(sequence_positionAll2, sequence_positionReal2) } sequenceLengthAll <- nchar(sequenceAll) resultOnway <- unique(data.frame(sequence_positionAll, sequenceAll, sequenceLengthAll)) resultClean1 <- subset(resultOnway,sequence_positionAll!="") resultClean2 <- subset(resultClean1,sequenceAll!="") sequenceLengthAll2 <- nchar(sequenceAll2) resultOnway2 <- unique(data.frame(sequence_positionAll2, sequenceAll2, sequenceLengthAll2)) resultClean1b <- subset(resultOnway2,sequence_positionAll2!="") resultClean2b <- subset(resultClean1b,sequenceAll2!="") if(nrow(resultClean2) > 0){ colnames(resultClean2) <- c("sequence_position", "sequence", "sequence_length") }else{ resultClean2 <- data.frame("sequence_position" = "-", "sequence" = "-", "sequence_length" = "-") } if(nrow(resultClean2b) > 0){ colnames(resultClean2b) <- c("sequence_position", "sequence", "sequence_length") }else{ resultClean2b <- data.frame("sequence_position" = "-", "sequence" = "-", "sequence_length" = "-") } resultClean3 <- rbind(resultClean2, resultClean2b) resultClean3 <- unique(resultClean3) if(nrow(resultClean3) > 1){ resultClean4 <- resultClean3[!(resultClean3$sequence == "-"),] }else{ resultClean4 <- resultClean3 } if(nrow(resultClean4[!(resultClean4$sequence == "-"),]) > 0){ resultClean5 <- resultClean4[order(as.integer(as.character(resultClean4[,1])), -as.integer(as.character(resultClean4[,3]))),] rownames(resultClean5) <- 1:nrow(resultClean5) intresult <- resultClean5 intresult$end <- as.integer(as.character(intresult[,1])) + as.integer(as.character(intresult[,3])) - 1 nRowsintresult <- nrow(intresult) startQ <- 1 q <- 1 k <- 0 for(i in 1:nRowsintresult){ k <- k + 1 if(as.integer(as.character(intresult[k,1])) > as.integer(as.character(intresult[q,4]))){ q <- k startQ <- c(startQ,q) } } resultClean6 <- resultClean5[startQ, ] rownames(resultClean6) <- 1:nrow(resultClean6) }else{ resultClean6 <- data.frame("sequence_position" = "-", "sequence" = "-", "sequence_length" = "-") } likeliness <- "" for(sequ in resultClean6$sequence){ if(sequ == "-"){ sequlikely <- "-" }else{ if((gregexpr("^G{3,7}\\w{1,7}G{3,7}\\w{1,7}G{3,7}\\w{1,7}G{3,7}$", sequ, ignore.case = TRUE))[[1]][1] == 1){ sequlikely <- "**" }else{ sequlikely <- "*" } } likeliness <- c(likeliness, sequlikely) } likeliness <- likeliness[2:length(likeliness)] resultClean6$likeliness <- likeliness return(resultClean6) } } gquad_main <- function(b){ if(length(b) == 1){ b <- gsub("[\r\n]", "", b) b <- gsub(" ", "", b) if(grepl("[^acgturyswkmbdhvnACGTURYSWKMBDHVN-]", b) == "TRUE"){ b1 <- data.frame("sequence_position" = "!", "sequence" = "Error: Non-nucleotide character(s) in input", "sequence_length" = "!", "likeliness" = "!") return(b1) }else{ b1 <- gquad_base(b) return(b1) } }else{ input_pos = 0 q <- data.frame("input_ID" = integer(0), "sequence_position" = character(0), "sequence" = character(0), "sequence_length" = character(0)) for(i in b){ b <- gsub("[\r\n]", "", i) b <- gsub(" ", "", i) if(grepl("[^acgturyswkmbdhvnACGTURYSWKMBDHVN-]", i) == "TRUE"){ b1 <- data.frame("sequence_position" = "!", "sequence" = "Error: Non-nucleotide character(s) in input", "sequence_length" = "!", "likeliness" = "!") }else{ b1 <- gquad_base(i) } input_pos = input_pos + 1 b2 <- cbind(input_ID = input_pos, b1) b2[,c(2,4)] <- sapply(b2[,c(2,4)],as.character) q <- rbind(q, b2) } return(q) } } gquad <- function(x, xformat = "default"){ if(xformat == "default"){ x1 <- gquad_main(x) return(x1) } if(xformat == "GenBank"){ x2 <- read.GenBank(x, as.character = TRUE) x3 <- sapply(x2, paste, collapse="") x4 <- gquad_main(x3) return(x4) } if(xformat == "fasta"){ x5 <-read.fasta(x) x6 <- getSequence(x5, as.string = TRUE) x7 <- unlist(x6) x8 <- gquad_main(x7) return(x8) }else{ stop("Unacceptable option for argument 'xformat'") } }
bcontSurvGunivMIXED <- function(params, respvec, VC, ps, AT = FALSE){ p1 <- p2 <- pdf1 <- pdf2 <- c.copula.be2 <- c.copula.be1 <- c.copula2.be1be2 <- NA monP <- monP1 <- monP2 <- k <- 0; V <- list() etad <- etas1 <- etas2 <- l.ln <- NULL params1 <- params[1:VC$X1.d2] params1[VC$mono.sm.pos] <- exp( params1[VC$mono.sm.pos] ) eta1 <- VC$X1%*%params1 Xd1P <- VC$Xd1%*%params1 indN <- as.numeric(Xd1P < 0) Xd1P <- ifelse(Xd1P < VC$min.dn, VC$min.dn, Xd1P ) der.par1 <- der2.par1 <- params1 der.par1[-c( VC$mono.sm.pos )] <- 1 der2.par1[-c( VC$mono.sm.pos )] <- 0 pd1 <- probmS(eta1, VC$margins[1], min.dn = VC$min.dn, min.pr = VC$min.pr, max.pr = VC$max.pr) p1 <- pd1$pr dS1eta1 <- pd1$dS d2S1eta1 <- pd1$d2S d3S1eta1 <- pd1$d3S if( any(unique(VC$cens) == "I") ){ eta2 <- VC$X2%*%params1 pd2 <- probmS(eta2, VC$margins[1], min.dn = VC$min.dn, min.pr = VC$min.pr, max.pr = VC$max.pr) p2 <- pd2$pr dS2eta2 <- pd2$dS d2S2eta2 <- pd2$d2S d3S2eta2 <- pd2$d3S dereta2derb1 <- t(t(VC$X2)*der.par1) }else{ eta2 <- rep(0.1, length(eta1)) pd2 <- probmS(eta2, VC$margins[1], min.dn = VC$min.dn, min.pr = VC$min.pr, max.pr = VC$max.pr) p2 <- pd2$pr dS2eta2 <- pd2$dS d2S2eta2 <- pd2$d2S d3S2eta2 <- pd2$d3S dereta2derb1 <- t(t(VC$X2)*der.par1) } l.par <- VC$weights*( VC$indvU*( log(-dS1eta1) + log(Xd1P) ) + VC$indvR*log(p1) + VC$indvL*log(1-p1) + VC$indvI*log(mm(p1-p2, min.pr = VC$min.pr, max.pr = VC$max.pr)) ) res <- -sum(l.par) der2eta1dery1b1 <- t(t(VC$Xd1)*der.par1) dereta1derb1 <- t(t(VC$X1)*der.par1) dl.dbe1 <- -VC$weights*( VC$indvU*( c((dS1eta1*Xd1P)^-1)*(c(d2S1eta1*Xd1P)*dereta1derb1 + c(dS1eta1)*der2eta1dery1b1) ) + VC$indvR*c(p1^-1*dS1eta1)*dereta1derb1 + VC$indvL*-c((1-p1)^-1*dS1eta1)*dereta1derb1 + VC$indvI*c(mm(p1-p2, min.pr = VC$min.pr, max.pr = VC$max.pr))^-1*(c(dS1eta1)*dereta1derb1-c(dS2eta2)*dereta2derb1) ) G <- colSums(dl.dbe1) H <- -( crossprod(c(VC$weights*VC$indvU*(-dS1eta1^-2*d2S1eta1^2 + dS1eta1^-1*d3S1eta1))*dereta1derb1, dereta1derb1) + diag( colSums( t( t( c(VC$weights*VC$indvU*dS1eta1^-1*d2S1eta1)*VC$X1)*der2.par1 ) ) ) + diag( colSums( t( t(VC$weights*VC$indvU*c(Xd1P^-1)*VC$Xd1)*der2.par1 ) ) ) + crossprod(VC$weights*VC$indvU*c(-Xd1P^-2)*der2eta1dery1b1, der2eta1dery1b1) + crossprod(c(VC$weights*VC$indvR*(-p1^-2*dS1eta1^2+p1^-1*d2S1eta1))*dereta1derb1, dereta1derb1) + diag( colSums( t( t(c(VC$weights*VC$indvR*p1^-1*dS1eta1)*VC$X1)*der2.par1 ) ) ) - crossprod(c(VC$weights*VC$indvL*((1-p1)^-2*dS1eta1^2+(1-p1)^-1*d2S1eta1))*dereta1derb1, dereta1derb1) - diag( colSums( t( t(c(VC$weights*VC$indvL*(1-p1)^-1*dS1eta1)*VC$X1)*der2.par1 ) ) ) + crossprod(c(VC$weights*VC$indvI*(mm(p1-p2, min.pr = VC$min.pr, max.pr = VC$max.pr)^-1*d2S1eta1))*dereta1derb1, dereta1derb1) + crossprod(c(VC$weights*VC$indvI*(-mm(p1-p2, min.pr = VC$min.pr, max.pr = VC$max.pr)^-1*d2S2eta2))*dereta2derb1, dereta2derb1) + diag( colSums( t( t(c(VC$weights*VC$indvI*mm(p1-p2, min.pr = VC$min.pr, max.pr = VC$max.pr)^-1*dS1eta1)*VC$X1)*der2.par1 ) ) ) + diag( colSums( t( t(c(VC$weights*VC$indvI*-mm(p1-p2, min.pr = VC$min.pr, max.pr = VC$max.pr)^-1*dS2eta2)*VC$X2)*der2.par1 ) ) ) + crossprod(c(VC$weights*VC$indvI*(-mm(p1-p2, min.pr = VC$min.pr, max.pr = VC$max.pr)^-2*dS1eta1^2))*dereta1derb1, dereta1derb1) + crossprod(c(VC$weights*VC$indvI*(-mm(p1-p2, min.pr = VC$min.pr, max.pr = VC$max.pr)^-2*dS2eta2^2))*dereta2derb1, dereta2derb1) + crossprod(c(VC$weights*VC$indvI*(mm(p1-p2, min.pr = VC$min.pr, max.pr = VC$max.pr)^-2*dS1eta1*dS2eta2))*dereta1derb1, dereta2derb1) + crossprod(c(VC$weights*VC$indvI*(mm(p1-p2, min.pr = VC$min.pr, max.pr = VC$max.pr)^-2*dS1eta1*dS2eta2))*dereta2derb1, dereta1derb1) ) if(VC$extra.regI == "pC") H <- regH(H, type = 1) S.h <- ps$S.h + monP2 S.h1 <- 0.5*crossprod(params, ps$S.h)%*%params + monP S.h2 <- S.h%*%params + monP1 S.res <- res res <- S.res + S.h1 G <- G + S.h2 H <- H + S.h if(VC$extra.regI == "sED") H <- regH(H, type = 2) list(value=res, gradient=G, hessian=H, S.h=S.h, S.h1=S.h1, S.h2=S.h2, indN = indN, V = V, l=S.res, l.ln = l.ln, l.par=l.par, ps = ps, k = VC$my.env$k, monP2 = monP2, params1 = params1, eta1=eta1, p1 = p1, p2 = p2, pdf1 = -dS1eta1, pdf2 = -dS2eta2, c.copula.be2 = c.copula.be2, c.copula.be1 = c.copula.be1, dl.dbe1 = NULL, dl.dbe2 = NULL, dl.dteta.st = NULL) }
ryt_get_video_details_helper <- function( video_id, fields = c('contentDetails', 'fileDetails', 'id', 'liveStreamingDetails', 'localizations', 'player', 'processingDetails', 'recordingDetails', 'snippet', 'statistics', 'status', 'suggestions', 'topicDetails') ) { fields <- paste0(fields, collapse = ",") out <- request_build( method = "GET", params = list( id = video_id, part = fields ), token = ryt_token(), path = 'youtube/v3/videos', base_url = 'https://www.googleapis.com/' ) ans <- request_retry( out, encode = 'json' ) resp <- response_process(ans) result <- tibble(items = resp$items) %>% unnest_wider(.data$items) nested_fields <- select(result, where(is.list)) %>% names() nested_fields <- nested_fields[!nested_fields %in% c("tags", "topicDetails")] while ( length(nested_fields) > 0 ) { for ( col in nested_fields ) { if (col == "tags") next result_t <- try(unnest_wider(result, col), silent = T) if ( 'try-error' %in% class(result_t) ) { result <- unnest_wider(result, col, names_sep = '_') } else { result <- result_t } } nested_fields <- select(result, where(is.list)) %>% names() nested_fields <- nested_fields[!nested_fields %in% c("tags", "topicDetails")] } return(result) }
getPortfolio.fPFOLIOVAL <- function(object) object@portfolio getWeights.fPFOLIOVAL <- function(object) object@portfolio$weights getCovRiskBudgets.fPFOLIOVAL <- function(object) object@portfolio$covRiskBudgets getTargetReturn.fPFOLIOVAL <- function(object) object@portfolio$targetReturn getTargetRisk.fPFOLIOVAL <- function(object) object@portfolio$targetRisk getAlpha.fPFOLIOVAL <- function(object) object@portfolio$targetAlpha getRiskFreeRate.fPFOLIOVAL <- function(object) object@Portfolio$riskFreeRate getNFrontierPoints.fPFOLIOVAL <- function(object) object@portfolio$nFrontierPoints getStatus.fPFOLIOVAL <- function(object) object@portfolio$status
setConstructorS3("RawCopyNumberModel", function(...) { extend(CopyNumberChromosomalModel(...), "RawCopyNumberModel") }) setMethodS3("getAsteriskTags", "RawCopyNumberModel", function(this, ...) { "" }, protected=TRUE) setMethodS3("getSetTag", "RawCopyNumberModel", function(this, ...) { "raw" }, protected=TRUE)
iris.panel = histogram( ~ Sepal.Length | Species, xlab = "花萼长度", ylab = "分布密度", layout = c(3, 1), data = iris, type = "density", panel = function(x, ...) { panel.histogram(x, ...) panel.mathdensity( dmath = dnorm, col = "red", lwd = 2, args = list(mean = mean(x), sd = sd(x)) ) } ) print(iris.panel)
prediction.survreg <- function(model, data = find_data(model, parent.frame()), at = NULL, type = c("response", "lp", "quantile", "uquantile"), calculate_se = TRUE, ...) { type <- match.arg(type) data <- data if (missing(data) || is.null(data)) { pred <- predict(model, type = type, se.fit = TRUE, ...) pred <- make_data_frame(fitted = pred[["fit"]], se.fitted = pred[["se.fit"]]) } else { if (is.null(at)) { out <- data } else { out <- build_datalist(data, at = at, as.data.frame = TRUE) at_specification <- attr(out, "at_specification") } tmp <- predict(model, newdata = out, type = type, se.fit = TRUE, ...) pred <- make_data_frame(out, fitted = tmp[["fit"]], se.fitted = tmp[["se.fit"]]) } vc <- NA_real_ structure(pred, class = c("prediction", "data.frame"), at = if (is.null(at)) at else at_specification, type = type, call = if ("call" %in% names(model)) model[["call"]] else NULL, model_class = class(model), row.names = seq_len(nrow(pred)), vcov = vc, jacobian = NULL, weighted = FALSE) }
startupSpeed.ensembleData <- function (x) { if (is.null(startup <- attr( x, "startupSpeed"))) { class(x) <- "data.frame" x$startup } else startup }
context("examples") dexample=system.file("examples", package="kvh") fexample=system(sprintf("ls -1 %s/*.kvh", dexample), intern=TRUE) saved=readRDS("res.RData") res=list() test_that("example files", for (f in fexample) { bf=basename(f) res[[bf]] <<- kvh_read(f) expect_equivalent(res[[bf]], saved[[bf]], info=f) } ) test_that("obj_by_keys", { expect_equivalent(kvh::obj_by_keys(res[["hello_salut.kvh"]], c("salutation", "en")), "Hello, world!") expect_null(kvh::obj_by_keys(res[["hello_salut.kvh"]], c("salutation", "zz"))) }) wd=tempdir(TRUE) fn=tempfile(pattern = "file", tmpdir = wd, fileext = ".kvh") fc=file(fn, "wb") cat(" a \t b \n", file=fc) close(fc) res=kvh::kvh_read(fn, strip_white=TRUE) test_that("strip_white", expect_equivalent(res[["a"]], "b", info="strip spaces") ) fc=file(fn, "wb") cat("a\tb\n\n\t\t \r\nc\td\n", file=fc) close(fc) res=kvh::kvh_read(fn, skip_blank=TRUE) test_that("skip_blank", { expect_equivalent(res[["a"]], "b", info="before blank line") expect_equivalent(res[["c"]], "d", info="after blank line") expect_equivalent(length(res), 3, info="total skip blank") }) fc=file(fn, "wb") cat("a\tb close(fc) res=kvh::kvh_read(fn, comment_str=" test_that("comment strip & skip_blank", { expect_equivalent(res[["a"]], "b", info="before comment strip & skip blank") expect_equivalent(res[["c"]], "d", info="after comment strip & skip blank") expect_equivalent(length(res), 2, info="total comment strip & skip blank") }) fc=file(fn, "wb") cat("a\tb1\tb2\\\tb2_too\tb3\n", file=fc) close(fc) res=kvh::kvh_read(fn, split_str="\t") test_that("split value", { expect_equivalent(res[["a"]], c("b1", "b2\tb2_too", "b3")) }) fn2=tempfile(pattern = "file", tmpdir = wd, fileext = ".kvh") fc=file(fn, "wb") cat("a\t file://", fn2, " \n", sep="", file=fc) close(fc) fc=file(fn2, "wb") cat("k\tv\nbad nested ref\t file://", fn, " \n", sep="", file=fc) close(fc) res=kvh::kvh_read(fn, strip_white=TRUE, follow_url=TRUE) test_that("split value", { expect_equivalent(res[["a"]], list(k="v", `bad nested ref`=paste0("file://", fn))) expect_warning(kvh::kvh_read(fn, strip_white=TRUE, follow_url=TRUE), "kvh_read: detected circular reference to file ") }) unlink(fn)
context("NMcheckColnames") NMdata_filepath <- function(...) { system.file(..., package = "NMdata") } test_that("basic",{ fileRef <- "testReference/NMcheckColnames_1.rds" file.lst <- system.file("examples/nonmem/xgxr001.lst" ,package="NMdata") res1 <- NMcheckColnames(file=file.lst) expect_equal_to_reference(res1,fileRef,version=2) }) test_that("results as data.frame",{ fileRef <- "testReference/NMcheckColnames_2.rds" file.lst <- system.file("examples/nonmem/xgxr001.lst" ,package="NMdata") res1 <- NMcheckColnames(file=file.lst,as.fun=as.data.frame) expect_equal_to_reference(res1,fileRef,version=2) })
stats_Pillai <- function (eig, q, df.res) { test <- sum(eig/(1 + eig)) p <- length(eig) s <- min(p, q) n <- 0.5 * (df.res - p - 1) m <- 0.5 * (abs(p - q) - 1) tmp1 <- 2 * m + s + 1 tmp2 <- 2 * n + s + 1 c(test, (tmp2/tmp1 * test)/(s - test), s * tmp1, s * tmp2) } stats_Wilks <- function (eig, q, df.res) { test <- prod(1/(1 + eig)) p <- length(eig) tmp1 <- df.res - 0.5 * (p - q + 1) tmp2 <- (p * q - 2)/4 tmp3 <- p^2 + q^2 - 5 tmp3 <- if (tmp3 > 0) sqrt(((p * q)^2 - 4)/tmp3) else 1 c(test, ((test^(-1/tmp3) - 1) * (tmp1 * tmp3 - 2 * tmp2))/p/q, p * q, tmp1 * tmp3 - 2 * tmp2) } stats_HL <- function (eig, q, df.res) { test <- sum(eig) p <- length(eig) m <- 0.5 * (abs(p - q) - 1) n <- 0.5 * (df.res - p - 1) s <- min(p, q) tmp1 <- 2 * m + s + 1 tmp2 <- 2 * (s * n + 1) c(test, (tmp2 * test)/s/s/tmp1, s * tmp1, tmp2) } stats_Roy <- function (eig, q, df.res) { p <- length(eig) test <- max(eig) tmp1 <- max(p, q) tmp2 <- df.res - tmp1 + q c(test, (tmp2 * test)/tmp1, tmp1, tmp2) }
innerprod.im <- function(A, B, outsideA = NA, outsideB = NA, na.replace = TRUE, method = "cubature"){ integrationrectangle <- union.owin(Frame(A), Frame(B)) if (is.na(outsideA) && is.na(outsideB)){return(NA)} if (is.na(outsideA) && (outsideB != 0)){return(NA)} if (is.na(outsideB) && (outsideA != 0)){return(NA)} if ((!is.na(outsideA)) && (outsideA != 0) && (outsideB != 0)) {return(Inf)} if ((!is.na(outsideB)) && (outsideB != 0) && (outsideA != 0)) {return(Inf)} if ((!is.na(outsideA)) && (outsideA == 0)){integrationrectangle <- intersect.owin(integrationrectangle, Frame(A))} if ((!is.na(outsideB)) && (outsideB == 0)){integrationrectangle <- intersect.owin(integrationrectangle, Frame(B))} if (method == "cubature"){ intresult <- innerprod.cub(A, B, outsideA, outsideB, integrationrectangle, na.replace = na.replace)$integral } else if (method == "harmonisesum") { intresult <- integration_trad(A, B, outsideA, outsideB, integrationrectangle, na.replace = na.replace) } else { stop(paste("Method", method, "did not match available methods.")) } return(intresult) } integration_trad <- function(A, B, outsideA, outsideB, integrationrectangle, na.replace){ if (na.replace){ A[is.na(as.matrix(A))] <- outsideA B[is.na(as.matrix(B))] <- outsideB } harmgrid <- as.mask(integrationrectangle, eps = c(min(A$xstep, B$xstep), min(A$ystep, B$ystep))) A2 <- as.im(A, xy = harmgrid) A2[setminus.owin(integrationrectangle, Frame(A))] <- outsideA B2 <- as.im(B, xy = harmgrid) B2[setminus.owin(integrationrectangle, Frame(B))] <- outsideB prdimg <- eval.im(A2 * B2, harmonize = FALSE) return(sum(prdimg[, ]) * prdimg$xstep * prdimg$ystep) } innerprod.cub <- function(A, B, outsideA, outsideB, integrationrectangle, na.replace, tol = 1E-3){ if (requireNamespace("cubature") != TRUE){ stop("Cubature package must be installed to integrate using method cubature") } if (na.replace){ integrand <- function(arg){ outA <- matrix(A[list(x = arg[1, ], y = arg[2, ]), drop = FALSE], ncol = ncol(arg)) outA <- replace(outA, is.na(outA), outsideA) outB <- matrix(B[list(x = arg[1, ], y = arg[2, ]), drop = FALSE], ncol = ncol(arg)) outB <- replace(outB, is.na(outB), outsideB) return(outA * outB) } } else if (!na.replace){ tmpfunA <- as.function.im(A) tmpfunB <- as.function.im(B) integrand <- function(arg){ insideA <- inside.owin(x = arg[1, ], y = arg[2, ], w = Window(A)) insideB <- inside.owin(x = arg[1, ], y = arg[2, ], w = Window(B)) outA <- matrix(outsideA, nrow = 1, ncol = ncol(arg), byrow = FALSE) if (sum(insideA) > 0){ outA[, insideA] <- matrix(tmpfunA(arg[1, insideA], arg[2, insideA]), ncol = sum(insideA)) } outB <- matrix(outsideB, nrow = 1, ncol = ncol(arg), byrow = FALSE) if (sum(insideB) > 0){ outB[, insideB] <- matrix(tmpfunB(arg[1, insideB], arg[2, insideB]), ncol = sum(insideB)) } return(outA * outB) } } out <- cubature::cubintegrate(f = integrand, lower = c(integrationrectangle$xrange[[1]], integrationrectangle$yrange[[1]]), upper = c(integrationrectangle$xrange[[2]], integrationrectangle$yrange[[2]]), relTol = tol, method = "pcubature", nVec = 1024) return(out) }
print.NMAoutlier.measures <- function(x, digits = 4, details = FALSE, ...) { chkclass(x, "NMAoutlier.measures") chknumeric(digits, min = 0, single = TRUE) chklogical(details) Mydata <- x$dat if (details) { cat("Original data:\n") datamatrix <- cbind(formatN(as.numeric(Mydata[, 1]), digits), formatN(as.numeric(Mydata[, 2]), digits), as.character(Mydata[, 3]), as.character(Mydata[, 4]), as.character(Mydata[, 5])) prmatrix(datamatrix, rowlab = paste(c(1:length(Mydata[, 1])), ""), collab = c("TE", "seTE", "studylab", "treat1", "treat2"), quote = FALSE, right = TRUE) } if (x$measure == "simple") { cat("\n") cat("Simple outlier detection measures (for each included study):\n") cat("\n") prmatrix(cbind(formatN(x$eraw, digits), formatN(x$estand, digits), formatN(x$estud, digits), formatN(x$Mahalanobis.distance, digits), formatN(x$leverage, digits)), collab = c("raw", "standardized", "studentized", "mahalanobis", "leverage"), rowlab = c(unique(as.character(Mydata[, 3]))), quote = FALSE, right = TRUE) } else if (x$measure == "deletion") { cat("\n") cat("Outlier detection measures considering deletion:\n") cat("\n") cat(paste("Raw, standardized, Studentized deleted residuals for", "each included study in the network:\n")) cat("\n") prmatrix(cbind(formatN(x$eraw.deleted, digits), formatN(x$estand.deleted, digits), formatN(x$estud.deleted, digits)), collab = c("Raw deleted residual", "Standardized deleted residual", "Studentized deleted residual"), rowlab = c(unique(as.character(Mydata[, 3]))), quote = FALSE, right = TRUE) cat("\n") cat(paste("'Leave one out' measures for each included study", "in the network:\n")) cat("\n") prmatrix(cbind(formatN(x$w.leaveoneout, digits), formatN(x$H.leaveoneout, digits), formatN(x$heterog.leaveoneout, digits)), collab = c("Weight 'leave one out'", "Leverage 'leave one out'", "heterogeneity 'leave one out'"), rowlab = c(unique(as.character(Mydata[, 3]))), quote = FALSE, right = TRUE) cat("\n") cat(paste("Cook's distance and COVRATIO considered deletion of", "study in the network:\n")) cat("\n") prmatrix(cbind(formatN(x$Cooks.distance, digits), formatN(x$Covratio, digits)), collab = c("Cook's distance", "COVRATIO"), rowlab = c(unique(as.character(Mydata[, 3]))), quote = FALSE, right = TRUE) cat("\n") cat("R statistics considered deletion of study in the network:\n") cat("\n") prmatrix(cbind(formatN(x$Rheterogeneity, digits), formatN(x$RQtotal, digits), formatN(x$RQhet, digits), formatN(x$RQinc, digits)), collab = c("R statistic for heterogeneity", "R statistic for Qtotal", "R statistic for Qheterogeneity", "R statistic for Qinconsistency"), rowlab = c(unique(as.character(Mydata[, 3]))), quote = FALSE, right = TRUE) cat("\n") cat(paste("DFbetas for each treatment versus the reference", "considered deletion of study in the network:\n")) cat("\n") prmatrix(formatN(t(x$DFbetas), digits), rowlab = c(unique(as.character(Mydata[, 3]))), collab = rownames(x$DFbetas), quote = FALSE, right = TRUE) } invisible(NULL) }
.deal.ties <- function(ny, i, tie.action, tie.limit, warn = FALSE) { maxi <- max(i) mini <- min(i) if (maxi-mini > tie.limit * ny) { warning(paste("encountered a tie, and the difference between minimal and maximal value is > length('x') * 'tie.limit'", "the distribution could be multimodal", sep="\n"), call. = FALSE) } switch(tie.action, mean = mean(i), median = stats::median(i), max = maxi, min = mini, stop(sprintf("invalid value '%s' for argument 'tie.action'", tie.action), call. = FALSE)) }
plot.nhat <- function( x, ci=TRUE, smooth=TRUE, occasions=-1, smubass=5,...){ y <- x$n.hat occasion <- cumsum( c(1,x$intervals ) ) ns <- length( y ) if( length(names(y)) != ns ) names(y) <- 1:ns nms <- names(y) if( any(occasions <= 0) ){ occasions <- 1:length(y) } occasions <- occasions[ occasions != 1 ] y <- y[occasions] occasion <- occasion[occasions] nms <- nms[occasions] n.hat <- y if( ci ){ lower.ci <- x$n.hat.lower[occasions] upper.ci <- x$n.hat.upper[occasions] plot( range(occasion), range(n.hat,lower.ci,upper.ci), type="n", xaxt="n", xlab="Occasion", ylab="N estimate", ...) axis( side=1, at=occasion, labels=nms ) lines(occasion, lower.ci, type="l", lty=2) lines(occasion, upper.ci, type="l", lty=2) lines(occasion, n.hat, type="b") } else { plot(occasion, n.hat, type="b", xaxt="n", ...) axis(1, at=occasion, labels=nms ) } if( smooth & exists("supsmu") ){ sm <- supsmu( occasion, n.hat, bass=smubass ) lines( sm, lwd=3, col=3 ) } else { sm <- NA } invisible(sm) }
cds2 = function (t, T, tr, r, tint, int, R = 0.005, ...) { if (!is.numeric(t)) stop("time t must be a numeric vector") if (!is.numeric(int)) stop("intensity int must be a numeric vector") if (!is.numeric(r)) stop("rates r must be a numeric vector") new.r <- stats::approx(x = tr, y = r, xout = t, method = "linear", rule = 2)$y new.int <- stats::approx(x = tint, y = int, xout = t, method = "linear", rule = 2)$y cds1 <- cds(t = t, r = new.r, int = new.int, R = R, ...) Q <- stats::approx(x = t, y = cds1$Survival, xout = T, rule = 2)$y price <- stats::approx(x = t, y = cds1$Price, xout = T, rule = 2)$y premium <- stats::approx(x = t, y = cds1$PremiumLeg, xout = T, rule = 2)$y protection <- stats::approx(x = t, y = cds1$ProtectionLeg, xout = T, rule = 2)$y out <- data.frame(T = T, Q = Q, premium = premium, protection = protection, rate = R * protection/premium, price = price) names(out) <- c("T", "Survival", "PremiumLeg", "ProtectionLeg", "Rate", "Price") return(out) }
setMethodS3("drawCnRegions", "HaarSeg", function(object, ...) { cnr <- extractCopyNumberRegions(object, ...) drawLevels(cnr, ...) }) setMethodS3("extractCopyNumberRegions", "HaarSeg", function(object, ...) { output <- object$output regions <- output$SegmentsTable data <- object$data x <- data$x starts <- regions[,1] counts <- regions[,2] ends <- starts+counts-1 starts <- x[starts] ends <- x[ends] means <- regions[,3] CopyNumberRegions( chromosome=data$chromosome, start=starts, stop=ends, mean=means, count=counts ) }) setMethodS3("extractRawCopyNumbers", "HaarSeg", function(object, ...) { data <- object$data RawCopyNumbers(cn=data$M, x=data$x, chromosome=data$chromosome) })
.Mpsi.R.names <- c('bisquare', 'lqq', 'welsh', 'opt', 'hampel', 'ggw', 'mopt') .Mpsi.M.names <- c('huber') .Mpsi.names <- c(R= .Mpsi.R.names, M= .Mpsi.M.names) .regularize.Mpsi <- function(psi, redescending = TRUE) { stopifnot(is.character(psi), length(psi) == 1) psi <- tolower(psi) psi <- switch(psi, 'tukey'= , 'biweight'= "bisquare", psi) nms <- if(redescending) .Mpsi.R.names else .Mpsi.names if (is.na(i <- pmatch(psi, nms))) stop(gettextf("'psi' should be one of %s", paste(dQuote(nms), collapse=", ")), domain = NA) nms[i] } .Mpsi.tuning.defaults <- list( 'huber' = 1.345 , 'bisquare' = 4.685061 , 'welsh' = 2.11 , 'ggw' = c(-0.5, 1.5, .95, NA) , 'lqq' = c(-0.5, 1.5, .95, NA) , 'opt' = c(a = 0.01317965, lower = 0.03305454, upper = 3.003281, c = 1.0, "Psi_Opt(lower)" = -0.0005459033, "rho(Inf)" = 3.331370) , 'hampel' = c(1.5, 3.5, 8) * 0.9016085 , 'mopt' = c(a = 0.01316352, normConst = 1.05753107, upper = 3.00373940, c = 1.0, "Psi_Opt(1)" = 0.46057111, "rho(Inf)" = 3.53690811) ) .Mpsi.tuning.default <- function(psi) { if(is.null(p <- .Mpsi.tuning.defaults[[psi]])) stop(gettextf("invalid 'psi'=%s; possibly use .regularize.Mpsi(%s)", psi, "psi, redescending=FALSE"), domain=NA) p } .Mchi.tuning.defaults <- list( 'bisquare' = 1.54764 , 'welsh' = 0.5773502 , 'ggw' = c(-0.5, 1.5, NA, .50) , 'lqq' = c(-0.5, 1.5, NA, .50) , 'opt' = c(a = 0.01317965, lower = 0.03305454, upper = 3.003281, c = 0.2618571, "Psi_Opt(lower)" = -0.0005459033, "rho(Inf)" = 3.331370) , 'hampel' = c(1.5, 3.5, 8) * 0.2119163 , 'mopt' = c(a = 0.01316352, normConst = 1.05753107, upper = 3.00373940, c = 0.38124404, "Psi_Opt(1)" = 0.46057111, "rho(Inf)" = 3.53690811) ) .Mchi.tuning.default <- function(psi) { if(is.null(p <- .Mchi.tuning.defaults[[psi]])) stop(gettextf("invalid 'psi'=%s; possibly use .regularize.Mpsi(%s)", psi, "psi"), domain=NA) p } lmrob.control <- function(seed = NULL, nResample = 500, tuning.chi = NULL, bb = 0.5, tuning.psi = NULL, max.it = 50, groups = 5, n.group = 400, k.fast.s = 1, best.r.s = 2, k.max = 200, maxit.scale = 200, k.m_s = 20, refine.tol = 1e-7, rel.tol = 1e-7, solve.tol = 1e-7, trace.lev = 0, mts = 1000, subsampling = c("nonsingular", "simple"), compute.rd = FALSE, method = 'MM', psi = 'bisquare', numpoints = 10, cov = NULL, split.type = c("f", "fi", "fii"), fast.s.large.n = 2000, eps.outlier = function(nobs) 0.1 / nobs, eps.x = function(maxx) .Machine$double.eps^(.75)*maxx, compute.outlier.stats = method, warn.limit.reject = 0.5, warn.limit.meanrw = 0.5, ...) { p.ok <- missing(psi) if (missing(cov) || is.null(cov)) cov <- if(method %in% c('SM', 'MM')) ".vcov.avar1" else ".vcov.w" if(!p.ok) psi <- .regularize.Mpsi(psi) subsampling <- match.arg(subsampling) compute.const <- (psi %in% c('ggw', 'lqq')) if(is.null(tuning.chi)) tuning.chi <- .Mchi.tuning.default(psi) else if(compute.const) tuning.chi <- .psi.const(tuning.chi, psi) if(is.null(tuning.psi)) tuning.psi <- .Mpsi.tuning.default(psi) else if(compute.const) tuning.psi <- .psi.const(tuning.psi, psi) c(list(setting = NULL, seed = as.integer(seed), nResample=nResample, psi=psi, tuning.chi=tuning.chi, bb=bb, tuning.psi=tuning.psi, max.it=max.it, groups=groups, n.group=n.group, best.r.s=best.r.s, k.fast.s=k.fast.s, k.max=k.max, maxit.scale=maxit.scale, k.m_s=k.m_s, refine.tol=refine.tol, rel.tol=rel.tol, solve.tol=solve.tol, trace.lev=trace.lev, mts=mts, subsampling=subsampling, compute.rd=compute.rd, method=method, numpoints=numpoints, cov=cov, split.type = match.arg(split.type), fast.s.large.n=fast.s.large.n, eps.outlier = eps.outlier, eps.x = eps.x, compute.outlier.stats = sub("^MM$", "SM", compute.outlier.stats), warn.limit.reject = warn.limit.reject, warn.limit.meanrw = warn.limit.meanrw), list(...)) } lmrob.control.neededOnly <- function(control) { if(is.null(control)) return(control) switch(sub("^(S|M-S).*", "\\1", control$method), S = { control$k.m_s <- NULL control$split.type <- NULL if (length(residuals) <= control$fast.s.large.n) { control$groups <- NULL control$n.group <- NULL } }, `M-S` = { control$refine.tol <- NULL control$groups <- NULL control$n.group <- NULL control$best.r.s <- NULL control$k.fast.s <- NULL }, { control$tuning.chi <- NULL control$bb <- NULL control$refine.tol <- NULL control$nResample <- NULL control$groups <- NULL control$n.group <- NULL control$best.r.s <- NULL control$k.fast.s <- NULL control$k.max <- NULL control$k.m_s <- NULL control$split.type <- NULL control$mts <- NULL control$subsampling <- NULL } ) if (!grepl("D", control$method)) control$numpoints <- NULL if (control$method == 'SM') control$method <- 'MM' control } lmrob.fit.MM <- function(x, y, control) .Defunct("lmrob.fit(*, control) with control$method = 'SM'") lmrob.fit <- function(x, y, control, init=NULL, mf=NULL) { if(!is.matrix(x)) x <- as.matrix(x) if (control$method == "MM") control$method <- "SM" est <- if (is.null(init)) { if ((M1 <- substr(control$method,1,1)) != 'S') { warning(gettextf("Initial estimator '%s' not supported; using S-estimator instead", M1), domain = NA) substr(control$method,1,1) <- 'S' } init <- lmrob.S(x, y, control = control, mf = mf) 'S' } else { stopifnot(is.list(init)) if (is.null(init$converged)) init$converged <- TRUE if (is.null(init$control)) { init$control <- control M <- init$control$method <- 'l' } else if(!length(M <- init$control$method) || !nzchar(M)) M <- "l" M } stopifnot(is.numeric(init$coef), length(init$coef) == ncol(x), is.numeric(init$scale), init$scale >= 0) if (est != 'S' && control$cov == '.vcov.avar1') { warning( ".vcov.avar1 can only be used when initial estimator is S; using .vcov.w instead") control$cov <- ".vcov.w" } trace.lev <- control$trace.lev if (init$converged) { method <- sub(paste0("^", est), '', control$method) if(trace.lev) { cat(sprintf("init converged (remaining method = \"%s\") -> coef=\n", method)) print(init$coef) } for (step in strsplit(method,'')[[1]]) { est <- paste0(est, step) init <- switch(step, D = lmrob..D..fit(init, x, mf = mf, control=control, method = init$control$method), M = lmrob..M..fit(x = x, y = y, obj = init, mf = mf, control=control, method = init$control$method), stop('only M and D are steps supported after "init" computation')) if(trace.lev) { cat(sprintf("step \"%s\" -> new coef=\n", step)); print(init$coef) } if (!init$converged) { warning(gettextf( "%s-step did NOT converge. Returning unconverged %s-estimate", step, est), domain = NA) } } } if (is.null(init$qr)) init$qr <- qr(x * sqrt(init$rweights)) if (is.null(init$rank)) init$rank <- init$qr$rank control$method <- est init$control <- control init$cov <- if (init$scale == 0) { matrix(0, ncol(x), ncol(x), dimnames=list(colnames(x), colnames(x))) } else if (is.null(x)) { NA } else { if (is.null(control$cov) || control$cov == "none") NA else { lf.cov <- if (!is.function(control$cov)) get(control$cov, mode='function') else control$cov lf.cov(init, x) } } df <- NROW(y) - init$rank init$degree.freedom <- init$df.residual <- df init } globalVariables("r", add=TRUE) .vcov.w <- function(obj, x=obj$x, scale=obj$scale, cov.hubercorr=ctrl$cov.hubercorr, cov.dfcorr=ctrl$cov.dfcorr, cov.resid=ctrl$cov.resid, cov.corrfact=ctrl$cov.corrfact, cov.xwx=ctrl$cov.xwx) { ctrl <- obj$control if (is.null(cov.hubercorr)) cov.hubercorr <- !grepl('D', ctrl$method) else if (!is.logical(cov.hubercorr)) stop(':.vcov.w: cov.hubercorr must be logical (or NULL)') valid.corrfact <- c('tau', 'empirical', 'asympt', 'hybrid', 'tauold') if (is.null(cov.corrfact)) { cov.corrfact <- if (cov.hubercorr) 'empirical' else 'tau' } else if(length(cov.corrfact) != 1 || is.na(match(cov.corrfact, valid.corrfact))) stop(":.vcov.w: cov.corrfact must be one of ", paste(dQuote(valid.corrfact), collapse=', ')) valid.dfcorr <- c("mean", "none", "mn.vc", "varc", "mn.df") if (is.null(cov.dfcorr)) { cov.dfcorr <- if (cov.hubercorr || cov.corrfact %in% c('tau', 'hybrid')) "mn.vc" else "mean" } else if(length(cov.dfcorr) != 1 || is.na(match(cov.dfcorr, valid.dfcorr))) stop(":.vcov.w: cov.dfcorr must be one of ", paste(dQuote(valid.dfcorr), collapse=', ')) valid.cov.resid <- c('final', 'initial', 'trick') if (is.null(cov.resid)) cov.resid <- 'final' else if (length(cov.resid) != 1 || is.na(match(cov.resid, valid.cov.resid))) stop(":.vcov.w: cov.resid must be one of ", paste(dQuote(valid.cov.resid), collapse=', ')) if (is.null(cov.xwx)) cov.xwx <- TRUE else if (!is.logical(cov.xwx)) stop(':.vcov.w: cov.xwx must be logical (or NULL)') if (is.null(x)) x <- model.matrix(obj) psi <- ctrl$psi if (is.null(psi)) stop('parameter psi is not defined') c.psi <- if (cov.resid == 'initial') ctrl$tuning.chi else if (ctrl$method %in% c('S', 'SD')) ctrl$tuning.chi else ctrl$tuning.psi if (!is.numeric(c.psi)) stop("parameter 'tuning.psi' is not numeric") if (cov.resid == 'final' && (class(obj)[1] == 'lmrob.S')) warning(":.vcov.w: ignoring cov.resid == final since est != final") if (is.null(scale)) { warning(":.vcov.w: scale missing, using D scale") scale <- lmrob..D..fit(obj)$scale } n <- NROW(x) w <- if (cov.xwx) obj$rweights else rep(1,n) if (!is.qr(obj$qr) || !cov.xwx) obj$qr <- qr(x * sqrt(w)) p <- if (is.null(obj$rank)) obj$qr$rank else obj$rank cinv <- if(is.qr(obj$qr)) tryCatch(tcrossprod(solve(qr.R(obj$qr))), error = function(e)e) if(inherits(cinv, 'error')) cinv <- matrix(NA,p,p) if (cov.corrfact == 'asympt') { if(cov.hubercorr) warning("option 'cov.hubercorr' is ignored for cov.corrfact = \"asympt\"") corrfact <- if (psi == 'ggw') { if ( isTRUE(all.equal(c.psi, c(-.5, 1.0, 0.95, NA)))) 1.052619 else if (isTRUE(all.equal(c.psi, c(-.5, 1.5, 0.95, NA)))) 1.0525888644 else if (isTRUE(all.equal(c.psi, c(-.5, 1.0, 0.85, NA)))) 1.176479 else if (isTRUE(all.equal(c.psi, c(-.5, 1.5, 0.85, NA)))) 1.176464 else lmrob.E(psi(r)^2, ctrl) / lmrob.E(r*psi(r), ctrl)^2 } else if (isTRUE(all.equal(c.psi, .Mpsi.tuning.default(psi)))) { switch(psi, bisquare = 1.0526317574, welsh = 1.0526704649, opt = 1.0526419204, hampel = 1.0526016980, lqq = 1.0526365291, stop(':.vcov.w: unsupported psi function')) } else lmrob.E(psi(r)^2, ctrl) / lmrob.E(r*psi(r), ctrl)^2 varcorr <- 1 } else { rstand <- if (cov.resid == 'initial') { lobj <- if (grepl('[DT]$',ctrl$method)) obj$init$init else obj$init resid(lobj) / lobj$scale } else if (cov.resid == 'trick') { obj$init$resid / obj$init$scale } else obj$resid / scale tau <- if (cov.corrfact %in% c('tau', 'hybrid', 'tauold')) { if (!is.null(obj$tau)) obj$tau else if (!is.null(obj$init$tau)) obj$init$tau else stop("(tau / hybrid / tauold): tau not found in 'obj'") } else rep(1,n) rstand <- rstand / tau r.psi <- Mpsi(rstand, c.psi, psi) r.psipr <- Mpsi(rstand, c.psi, psi, deriv = 1) if (any(is.na(r.psipr))) warning(":.vcov.w: Caution. Some psi'() are NA") mpp2 <- (mpp <- mean(r.psipr, na.rm=TRUE))^2 hcorr <- if (cov.hubercorr) { vpp <- sum((r.psipr - mpp)^2) / n (1 + p/n * vpp/mpp2)^2 } else 1 varcorr <- if (cov.corrfact == 'tau' && any(tau != 1)) 1 / mean(tau^2) else n / (n - p) if (cov.corrfact == 'hybrid') { mpp2 <- if (psi == 'ggw') { if ( isTRUE(all.equal(c.psi, c(-.5, 1.0, 0.95, NA)))) 0.7598857 else if (isTRUE(all.equal(c.psi, c(-.5, 1.5, 0.95, NA)))) 0.6817983 else if (isTRUE(all.equal(c.psi, c(-.5, 1.0, 0.85, NA)))) 0.4811596 else if (isTRUE(all.equal(c.psi, c(-.5, 1.5, 0.85, NA)))) 0.411581 else lmrob.E(r*psi(r), ctrl)^2 } else if (isTRUE(all.equal(c.psi, .Mpsi.tuning.default(psi)))) switch(psi, bisquare = 0.5742327, welsh = 0.5445068, opt = 0.8598825, hampel = 0.6775217, lqq = 0.6883393, stop(':.vcov.w: unsupported psi for "hybrid" correction factor')) else lmrob.E(r*psi(r), ctrl)^2 } corrfact <- mean({ if (cov.corrfact == 'tauold') 1 else tau^2 } * r.psi^2)/mpp2 * hcorr } sscorr <- switch(cov.dfcorr, "mean" = mean(w), "mn.vc" = mean(w) * varcorr, "none" = 1, "varc" = varcorr, "mn.df" = mean(w)^2 / (1 - p / sum(w)), stop("invalid 'cov.dfcorr': ", cov.dfcorr)) structure(scale^2 * sscorr * corrfact * cinv, weights = w, scale = scale, scorr = sscorr, corrfact = corrfact) } .vcov.avar1 <- function(obj, x=obj$x, posdef.meth = c("posdefify", "orig")) { stopifnot(is.list(ctrl <- obj$control)) if (!is.null(ctrl$method) && !ctrl$method %in% c('SM', 'MM')) stop('.vcov.avar1() supports only SM or MM estimates') psi <- chi <- ctrl$psi if (is.null(psi)) stop('parameter psi is not defined') stopifnot(is.numeric(c.chi <- ctrl$tuning.chi), is.numeric(c.psi <- ctrl$tuning.psi)) r0 <- obj$init$resid r <- resid(obj) scale <- obj$scale if (is.null(x)) x <- model.matrix(obj) bb <- 1/2 n <- length(r) stopifnot(n == length(r0), is.matrix(x), n == nrow(x)) p <- ncol(x) r.s <- r / scale r0.s <- r0 / scale w <- Mpsi(r.s, cc = c.psi, psi = psi, deriv = 1) w0 <- Mchi(r0.s, cc = c.chi, psi = chi, deriv = 1) x.wx <- crossprod(x, x * w) if(inherits(A <- tryCatch(solve(x.wx) * scale, error=function(e)e), "error")) { warning("X'WX is almost singular. Consider rather using cov = \".vcov.w\"") A <- tryCatch(solve(x.wx, tol = 0) * scale, error=function(e)e) if(inherits(A, "error")) stop("X'WX is singular. Rather use cov = \".vcov.w\"") } a <- A %*% (crossprod(x, w * r.s) / mean(w0 * r0.s)) w <- Mpsi( r.s, cc = c.psi, psi = psi) w0 <- Mchi(r0.s, cc = c.chi, psi = chi) Xww <- crossprod(x, w*w0) u1 <- A %*% crossprod(x, x * w^2) %*% (n * A) u2 <- a %*% crossprod(Xww, A) u3 <- A %*% tcrossprod(Xww, a) u4 <- mean(w0^2 - bb^2) * tcrossprod(a) ret <- (u1 - u2 - u3 + u4)/n ev <- eigen(ret, symmetric = TRUE) if (any(neg.ev <- ev$values < 0)) { posdef.meth <- match.arg(posdef.meth) if(ctrl$trace.lev) message("fixing ", sum(neg.ev), " negative eigen([",p,"])values") Q <- ev$vectors switch(posdef.meth, "orig" = { levinv <- solve(Q) cov.eb <- levinv %*% ret %*% Q cov.eb[, neg.ev] <- 0 ret <- Q %*% cov.eb %*% levinv }, "posdefify" = { lam <- ev$values lam[neg.ev] <- 0 o.diag <- diag(ret) ret <- Q %*% (lam * t(Q)) D <- sqrt(pmax.int(0, o.diag)/diag(ret)) ret[] <- D * ret * rep(D, each = p) }, stop("invalid 'posdef.meth': ", posdef.meth)) } attr(ret,"weights") <- w / r.s attr(ret,"eigen") <- ev ret } lmrob..M..fit <- function (x = obj$x, y = obj$y, beta.initial = obj$coef, scale = obj$scale, control = obj$control, obj, mf = obj$model, method = obj$control$method) { c.psi <- .psi.conv.cc(control$psi, control$tuning.psi) ipsi <- .psi2ipsi(control$psi) stopifnot(is.matrix(x)) n <- nrow(x) p <- ncol(x) if (is.null(y) && !is.null(obj$model)) y <- model.response(obj$model, "numeric") stopifnot(length(y) == n, length(c.psi) > 0, c.psi[-5] >= 0, scale >= 0, length(beta.initial) == p) ret <- .C(R_lmrob_MM, x = as.double(x), y = as.double(y), n = as.integer(n), p = as.integer(p), beta.initial = as.double(beta.initial), scale = as.double(scale), coefficients = double(p), residuals = double(n), iter = as.integer(control$max.it), c.psi = as.double(c.psi), ipsi = as.integer(ipsi), loss = double(1), rel.tol = as.double(control$rel.tol), converged = logical(1), trace.lev = as.integer(control$trace.lev), mts = as.integer(control$mts), ss = .convSs(control$subsampling) )[c("coefficients", "scale", "residuals", "loss", "converged", "iter")] ret$fitted.values <- drop(x %*% ret$coefficients) names(ret$coefficients) <- colnames(x) names(ret$residuals) <- rownames(x) ret$rweights <- lmrob.rweights(ret$residuals, scale, control$tuning.psi, control$psi) ret$control <- control if (!missing(obj)) { if (!grepl('M$', method)) { method <- paste0(method, 'M') } if (!is.null(obj$call)) { ret$call <- obj$call ret$call$method <- method } if (method %in% c('SM', 'MM')) { ret$init.S <- obj } else { ret$init <- obj[intersect(names(obj), c("coefficients", "scale", "residuals", "loss", "converged", "iter", "rweights", "fitted.values", "control", "ostats", "init.S", "init", "kappa", "tau"))] class(ret$init) <- 'lmrob' ret <- c(ret, obj[intersect(names(obj), c("df.residual", "degree.freedom", "xlevels", "terms", "model", "x", "y", "na.action", "contrasts", "MD"))]) } ret$qr <- qr(x * sqrt(ret$rweights)) ret$rank <- ret$qr$rank if (!is.null(obj$cov)) { if (!method %in% c('SM', 'MM') && ret$control$cov == '.vcov.avar1') ret$control$cov <- '.vcov.w' lf.cov <- if (!is.function(ret$control$cov)) get(ret$control$cov, mode='function') else ret$control$cov ret$cov <- lf.cov(ret, x) } if (!is.null(obj$assign)) ret$assign <- obj$assign if (method %in% control$compute.outlier.stats) ret$ostats <- outlierStats(ret, x, control) } class(ret) <- "lmrob" ret } lmrob.S <- function (x, y, control, trace.lev = control$trace.lev, mf = NULL) { if (!is.matrix(x)) x <- as.matrix(x) n <- nrow(x) p <- ncol(x) nResample <- as.integer(control$nResample) groups <- as.integer(control$groups) nGr <- as.integer(control$n.group) large_n <- (n > control$fast.s.large.n) if (large_n) { if (nGr <= p) stop("'control$n.group' must be larger than 'p' for 'large_n' algorithm") if (nGr * groups > n) stop("'groups * n.group' must be smaller than 'n' for 'large_n' algorithm") if (nGr <= p + 10) warning("'control$n.group' is not much larger than 'p', probably too small") } if (length(seed <- control$seed) > 0) { if (exists(".Random.seed", envir = .GlobalEnv, inherits = FALSE)) { seed.keep <- get(".Random.seed", envir = .GlobalEnv, inherits = FALSE) on.exit(assign(".Random.seed", seed.keep, envir = .GlobalEnv)) } assign(".Random.seed", seed, envir = .GlobalEnv) } bb <- as.double(control$bb) c.chi <- .psi.conv.cc(control$psi, control$tuning.chi) best.r <- as.integer(control$best.r.s) stopifnot(length(c.chi) > 0, c.chi[-5] >= 0, length(bb) > 0, length(best.r) > 0, best.r >= 1, length(y) == n, n > 0) b <- .C(R_lmrob_S, x = as.double(x), y = as.double(y), n = as.integer(n), p = as.integer(p), nResample = nResample, scale = double(1), coefficients = double(p), as.double(c.chi), .psi2ipsi(control$psi), bb, best_r = best.r, groups = groups, n.group = nGr, k.fast.s = as.integer(control$k.fast.s), k.iter = as.integer(control$k.max), maxit.scale = as.integer(control$maxit.scale), refine.tol = as.double(control$refine.tol), inv.tol = as.double(control$solve.tol), converged = logical(1), trace.lev = as.integer(trace.lev), mts = as.integer(control$mts), ss = .convSs(control$subsampling), fast.s.large.n = as.integer(if (large_n) control$fast.s.large.n else n+1) )[c("coefficients", "scale", "k.iter", "converged")] scale <- b$scale if (scale < 0) stop("C function R_lmrob_S() exited prematurely") if (scale == 0) warning("S-estimated scale == 0: Probably exact fit; check your data") if(trace.lev) { cat(sprintf("lmrob.S(): scale = %g; coeff.=\n", scale)); print(b$coefficients) } b$fitted.values <- x %*% b$coefficients b$residuals <- setNames(drop(y - b$fitted.values), rownames(x)) names(b$coefficients) <- colnames(x) b$rweights <- lmrob.rweights(b$residuals, scale, control$tuning.chi, control$psi) control$method <- 'S' b$control <- control if (identical(parent.frame(), .GlobalEnv)) b$call <- match.call() class(b) <- 'lmrob.S' if ("S" %in% control$compute.outlier.stats) b$ostats <- outlierStats(b, x, control) b } lmrob..D..fit <- function(obj, x=obj$x, control = obj$control, mf = obj$model, method = obj$control$method) { if (is.null(control)) stop('lmrob..D..fit: control is missing') if (!obj$converged) stop('lmrob..D..fit: prior estimator did not converge, stopping') if (is.null(x)) x <- model.matrix(obj) w <- obj$rweights if (is.null(w)) stop('lmrob..D..fit: robustness weights undefined') if (is.null(obj$residuals)) stop('lmrob..D..fit: residuals undefined') r <- obj$residuals psi <- control$psi if (is.null(psi)) stop('lmrob..D..fit: parameter psi is not defined') c.psi <- .psi.conv.cc(psi, if (method %in% c('S', 'SD')) control$tuning.chi else control$tuning.psi) if (!is.numeric(c.psi)) stop('lmrob..D..fit: parameter tuning.psi is not numeric') obj$init <- obj[names(obj)[na.omit(match( c("coefficients","scale", "residuals", "loss", "converged", "iter", "ostats", "rweights", "fitted.values", "control", "init.S", "init"), names(obj)))]] obj$init.S <- NULL if (is.null(obj$kappa)) obj$kappa <- lmrob.kappa(obj, control) kappa <- obj$kappa if (is.null(obj$tau)) obj$tau <- lmrob.tau(obj, x, control) tau <- obj$tau scale.1 <- sqrt(sum(w * r^2) / kappa / sum(tau^2*w)) ret <- .C(R_find_D_scale, r = as.double(r), kappa = as.double(kappa), tau = as.double(tau), length = as.integer(length(r)), scale = as.double(scale.1), c = as.double(c.psi), ipsi = .psi2ipsi(psi), type = 3L, rel.tol = as.double(control$rel.tol), k.max = as.integer(control$k.max), converged = logical(1))[c("converged", "scale")] obj$scale <- if(ret$converged) ret$scale else NA obj$converged <- ret$converged if (!grepl('D$', method)) { method <- method if (method == 'MM') method <- 'SM' method <- paste0(method, 'D') } if (!is.null(obj$call)) obj$call$method <- method obj$control <- control class(obj) <- "lmrob" if (!is.null(obj$cov)) { if (control$cov == '.vcov.avar1') control$cov <- '.vcov.w' lf.cov <- if (!is.function(control$cov)) get(control$cov, mode='function') else control$cov obj$cov <- lf.cov(obj, x) } if (method %in% control$compute.outlier.stats) obj$ostats <- outlierStats(obj, x, control) obj } globalVariables(c("psi", "wgt", "r"), add=TRUE) lmrob.kappa <- function(obj, control = obj$control) { if (is.null(control)) stop('control is missing') if (control$method %in% c('S', 'SD')) control$tuning.psi <- control$tuning.chi fun.min <- function(kappa) lmrob.E(psi(r)*r - kappa*wgt(r), control = control) uniroot(fun.min, c(0.1, 1))$root } lmrob.tau <- function(obj, x=obj$x, control = obj$control, h, fast = TRUE) { if(is.null(control)) stop("'control' is missing") if(missing(h)) h <- if (is.null(obj$qr)) .lmrob.hat(x, obj$rweights) else .lmrob.hat(wqr = obj$qr) if (fast && !control$method %in% c('S', 'SD')) { c.psi <- control$tuning.psi tfact <- tcorr <- NA switch(control$psi, opt = if (isTRUE(all.equal(c.psi, 1.060158))) { tfact <- 0.94735878 tcorr <- -0.09444537 }, bisquare = if (isTRUE(all.equal(c.psi, 4.685061))) { tfact <- 0.9473684 tcorr <- -0.0900833 }, welsh = if (isTRUE(all.equal(c.psi, 2.11))) { tfact <- 0.94732953 tcorr <- -0.07569506 }, ggw = if (isTRUE(all.equal(c.psi, c(-.5, 1.0, 0.95, NA)))) { tfact <- 0.9473787 tcorr <- -0.1143846 } else if (isTRUE(all.equal(c.psi, c(-.5, 1.5, 0.95, NA)))) { tfact <- 0.94741036 tcorr <- -0.08424648 }, lqq = if (isTRUE(all.equal(c.psi, c(-.5, 1.5, 0.95, NA)))) { tfact <- 0.94736359 tcorr <- -0.08594805 }, hampel = if (isTRUE(all.equal(c.psi, c(1.35241275, 3.15562975, 7.212868)))) { tfact <- 0.94739770 tcorr <- -0.04103958 }, {}) if (!is.na(tfact)) return(sqrt(1 - tfact*h) * (tcorr*h + 1)) } kappa <- if(is.null(obj$kappa)) lmrob.kappa(obj, control) else obj$kappa psi <- control$psi if (is.null(psi)) stop('parameter psi is not defined') cpsi <- if (control$method %in% c('S', 'SD')) control$tuning.chi else control$tuning.psi cpsi <- .psi.conv.cc(psi, cpsi) ipsi <- .psi2ipsi(psi) inta <- function(r) .Mpsi(r, cpsi, ipsi)^2 * dnorm(r) intb <- function(r) .Mpsi(r, cpsi, ipsi, deriv = 1) * dnorm(r) ta <- integrate(inta, -Inf,Inf)$value tb <- integrate(intb, -Inf,Inf)$value hu <- unique(h) nu <- length(hu) tau <- numeric(length=nu) tc <- ta/tb^2 gh <- ghq(control$numpoints) ghz <- gh$nodes ghw <- gh$weights for (i in 1:nu) { s <- sqrt(tc*(hu[i]-hu[i]^2)) tc2 <- hu[i]/tb fun <- function(w, v, sigma.i) { t <- (v - tc2*.Mpsi(v, cpsi, ipsi) + w*s)/sigma.i psi.t <- .Mpsi(t, cpsi, ipsi) (psi.t*t - kappa*psi.t/t) * dnorm(v)*dnorm(w) } wint <- function(v, sigma.i) { sapply(v, function(v.j) sum(fun(ghz, v.j, sigma.i)*ghw)) } vint <- function(sigma.i) { sum(wint(ghz, sigma.i)*ghw) } tau[i] <- uniroot(vint, c(if (hu[i] < 0.9) 3/20 else 1/16, 1.1))$root } tau[match(h, hu)] } lmrob.tau.fast.coefs <- function(cc, psi) { ctrl <- lmrob.control(tuning.psi = cc, psi = psi) levs <- seq(0, 0.8, length.out = 80) taus <- lmrob.tau(list(), control=ctrl, h=levs, fast=FALSE) ta <- lmrob.E(psi(r)^2, ctrl, use.integrate = TRUE) tb <- lmrob.E(psi(r, 1), ctrl, use.integrate = TRUE) tfact <- 2 - ta/tb^2 taus.0 <- sqrt(1 - tfact * levs) tcorr <- coef(robustbase::lmrob(taus / taus.0 - 1 ~ levs - 1)) c(tfact = tfact, tcorr = tcorr) } lmrob.hatmatrix <- function(x, w = rep(1, NROW(x)), wqr = qr(sqrt(w) * x), names = FALSE) { H <- tcrossprod(qr.qy(wqr, diag(1, NROW(x), x$rank))) if(names && !is.null(rnms <- dimnames(wqr$qr)[[1L]])) dimnames(H) <- list(rnms,rnms) H } .lmrob.hat <- function(x, w = rep(1, NROW(x)), wqr = qr(sqrt(w) * x), names = TRUE) { if (missing(wqr) && !is.matrix(x)) x <- as.matrix(x) h <- pmin(1, rowSums(qr.qy(wqr, diag(1, NROW(wqr$qr), wqr$rank))^2)) if(names && !is.null(rnms <- dimnames(wqr$qr)[[1L]])) names(h) <- rnms h } hatvalues.lmrob <- function(model, ...) { if (is.null(wqr <- model$qr)) .lmrob.hat(model$x, model$rweights) else .lmrob.hat(wqr = wqr) } .psi2ipsi <- function(psi) { psi <- .regularize.Mpsi(psi, redescending=FALSE) i <- match(psi, c( 'huber', 'bisquare', 'welsh', 'opt', 'hampel', 'ggw', 'lqq', 'mopt' )) if(is.na(i)) stop("internal logic error in psi() function name: ", psi, " Please report!") i - 1L } .psi.conv.cc <- function(psi, cc) { if (!is.character(psi) || length(psi) != 1) stop("argument 'psi' must be a string (denoting a psi function)") if(!is.numeric(cc)) stop("tuning constant 'cc' is not numeric") switch(tolower(psi), 'ggw' = { if ( isTRUE(all.equal(cc, c(-.5, 1 , 0.95, NA)))) return(1) else if (isTRUE(all.equal(cc, c(-.5, 1 , 0.85, NA)))) return(2) else if (isTRUE(all.equal(cc, c(-.5, 1. , NA, 0.5)))) return(3) else if (isTRUE(all.equal(cc, c(-.5, 1.5, 0.95, NA)))) return(4) else if (isTRUE(all.equal(cc, c(-.5, 1.5, 0.85, NA)))) return(5) else if (isTRUE(all.equal(cc, c(-.5, 1.5, NA, 0.5)))) return(6) else if (length(cc) == 5 && cc[1] == 0 || (length(cc <- attr(cc, 'constants')) == 5 && cc[1] == 0)) return(cc) else stop('Coefficients for ',psi,' function incorrectly specified.\n', 'Use c(minimal slope, b, efficiency, breakdown point) or c(0, a,b,c, max_rho).') }, 'lqq' = { if (isTRUE(all.equal(cc, c(-.5, 1.5, 0.95, NA)))) return(c(1.4734061, 0.9822707, 1.5)) else if (isTRUE(all.equal(cc, c(-.5, 1.5, NA, 0.5)))) return(c(0.4015457, 0.2676971, 1.5)) else if (length(cc) == 3 || length(cc <- attr(cc, 'constants')) == 3) return(cc) else stop('Coefficients for ',psi,' function incorrectly specified.\n', 'Use c(minimal slope, b, efficiency, breakdown point) [2 cases only] or c(b, c, s)') }, 'hampel' = { if (length(cc) != 3) stop('Coef. for Hampel psi function not of length 3') }, 'opt' = { if (length(cc) != 6) stop('Coef. for Optimal psi function not of length 6') }, 'mopt' = { if (length(cc) != 6) stop('Coef. for Modified Optimal psi function not of length 6') }, { if (length(cc) != 1) stop('Coef. for psi function ', psi,' not of length 1') }) return(cc) } .psi.ggw.mxs <- function(a, b, c, tol = .Machine$double.eps^0.25) { ipsi <- .psi2ipsi('ggw') ccc <- c(0, a, b, c, 1) optimize(.Mpsi, c(c, max(a+b+2*c, 0.5)), ccc=ccc, ipsi=ipsi, deriv = 1, tol = tol) } .psi.ggw.ms <- function(a, b, c, tol = .Machine$double.eps^0.25) .psi.ggw.mxs(a, b, c, tol=tol)[["objective"]] .psi.ggw.finda <- function(ms, b, c, tol = .Machine$double.eps^0.25, maxiter = 1000, ms.tol = tol / 64,...) { val <- uniroot(function(a) .psi.ggw.ms(1/a, b, c, tol=ms.tol) - ms, c(200, if (b > 1.4) 1/400 else if (b > 1.3) 1/50 else 1/20), tol=tol, maxiter=maxiter) 1/val$root } .psi.ggw.eff <- function(a, b, c) { ipsi <- .psi2ipsi('ggw') ccc <- c(0, a, b, c, 1) lmrob.E(.Mpsi(r, ccc, ipsi, deriv=1), use.integrate = TRUE)^2 / lmrob.E(.Mpsi(r, ccc, ipsi) ^2, use.integrate = TRUE) } .psi.ggw.bp <- function(a, b, c, ...) { ipsi <- .psi2ipsi('ggw') abc <- c(0, a, b, c) nc <- integrate(.Mpsi, 0, Inf, ccc = c(abc, 1), ipsi=ipsi, ...)$value lmrob.E(.Mchi(r, ccc = c(abc, nc), ipsi), use.integrate = TRUE) } .psi.ggw.findc <- function(ms, b, eff = NA, bp = NA, subdivisions = 100L, rel.tol = .Machine$double.eps^0.25, abs.tol = rel.tol, tol = .Machine$double.eps^0.25, ms.tol = tol/64, maxiter = 1000) { c. <- if (!is.na(eff)) { if (!is.na(bp)) warning('tuning constants for ggw psi: both eff and bp specified, ignoring bp') tryCatch(uniroot(function(x) .psi.ggw.eff(.psi.ggw.finda(ms, b, x, ms.tol=ms.tol), b, x) - eff, c(0.15, if (b > 1.61) 1.4 else 1.9), tol=tol, maxiter=maxiter)$root, error=function(e)e) } else { if (is.na(bp)) stop("neither breakdown point 'bp' nor efficiency 'eff' specified") tryCatch(uniroot(function(x) .psi.ggw.bp(.psi.ggw.finda(ms, b, x, ms.tol=ms.tol), b, x) - bp, c(0.08, if (ms < -0.4) 0.6 else 0.4), tol=tol, maxiter=maxiter)$root, error=function(e)e) } if (inherits(c., 'error')) stop(gettextf('unable to find constants for "ggw" psi function: %s', c.$message), domain=NA) a <- .psi.ggw.finda(ms, b, c., ms.tol=ms.tol) nc <- integrate(.Mpsi, 0, Inf, ccc= c(0, a, b, c., 1), ipsi = .psi2ipsi('ggw'))$value c(0, a, b, c., nc) } lmrob.efficiency <- function(psi, cc, ...) { ipsi <- .psi2ipsi(psi) ccc <- .psi.conv.cc(psi, cc=cc) integrate(function(x) .Mpsi(x, ccc=ccc, ipsi=ipsi, deriv=1)*dnorm(x), -Inf, Inf, ...)$value^2 / integrate(function(x) .Mpsi(x, ccc=ccc, ipsi=ipsi)^2 *dnorm(x), -Inf, Inf, ...)$value } lmrob.bp <- function(psi, cc, ...) integrate(function(x) Mchi(x, cc, psi)*dnorm(x), -Inf, Inf, ...)$value .psi.lqq.findc <- function(ms, b.c, eff = NA, bp = NA, interval = c(0.1, 4), subdivisions = 100L, rel.tol = .Machine$double.eps^0.25, abs.tol = rel.tol, tol = .Machine$double.eps^0.25, maxiter = 1000) { bcs <- function(cc) c(b.c*cc, cc, 1-ms) t.fun <- if (!is.na(eff)) { if (!is.na(bp)) warning("tuning constants for \"lqq\" psi: both 'eff' and 'bp' specified, ignoring 'bp'") function(c) lmrob.efficiency('lqq', bcs(c), subdivisions=subdivisions, rel.tol=rel.tol, abs.tol=abs.tol) - eff } else { if (is.na(bp)) stop('Error: neither breakdown point nor efficiency specified') function(c) lmrob.bp('lqq', bcs(c), subdivisions=subdivisions, rel.tol=rel.tol, abs.tol=abs.tol) - bp } c. <- tryCatch(uniroot(t.fun, interval=interval, tol=tol, maxiter=maxiter)$root, error=function(e)e) if (inherits(c., 'error')) stop(gettextf('unable to find constants for "lqq" psi function: %s', c.$message), domain=NA) else bcs(c.) } .psi.const <- function(cc, psi) { switch(psi, "ggw" = { if (!(isTRUE(all.equal(cc, c(-.5, 1, 0.95, NA))) || isTRUE(all.equal(cc, c(-.5, 1, 0.85, NA))) || isTRUE(all.equal(cc, c(-.5, 1, NA, 0.5))) || isTRUE(all.equal(cc, c(-.5, 1.5, 0.95, NA))) || isTRUE(all.equal(cc, c(-.5, 1.5, 0.85, NA))) || isTRUE(all.equal(cc, c(-.5, 1.5, NA, 0.5))))) { attr(cc, 'constants') <- .psi.ggw.findc(ms=cc[[1]], b=cc[[2]], eff=cc[[3]], bp=cc[[4]]) } }, "lqq" = { if (!(isTRUE(all.equal(cc, c(-.5, 1.5, 0.95, NA))) || isTRUE(all.equal(cc, c(-.5, 1.5, NA, 0.5))))) { attr(cc, 'constants') <- .psi.lqq.findc(ms=cc[[1]], b.c=cc[[2]], eff=cc[[3]], bp=cc[[4]]) } }, stop("method for psi function ", psi, " not implemented")) cc } Mpsi <- function(x, cc, psi, deriv=0) { x[] <- .Call(R_psifun, x, .psi.conv.cc(psi, cc), .psi2ipsi(psi), deriv) x } .Mpsi <- function(x, ccc, ipsi, deriv=0) .Call(R_psifun, x, ccc, ipsi, deriv) Mchi <- function(x, cc, psi, deriv=0) { x[] <- .Call(R_chifun, x, .psi.conv.cc(psi, cc), .psi2ipsi(psi), deriv) x } .Mchi <- function(x, ccc, ipsi, deriv=0) .Call(R_chifun, x, ccc, ipsi, deriv) Mwgt <- function(x, cc, psi) { x[] <- .Call(R_wgtfun, x, .psi.conv.cc(psi, cc), .psi2ipsi(psi)) x } .Mwgt <- function(x, ccc, ipsi) .Call(R_wgtfun, x, ccc, ipsi) .Mwgt.psi1 <- function(psi, cc = .Mpsi.tuning.default(psi)) { ipsi <- .psi2ipsi(psi) ccc <- .psi.conv.cc(psi, cc) function(x, deriv = 0) if(deriv) .Mpsi(x, ccc, ipsi, deriv=deriv) else .Mwgt(x, ccc, ipsi) } MrhoInf <- function(cc, psi) { cc <- .psi.conv.cc(psi, cc) .Call(R_rho_inf, cc, .psi2ipsi(psi)) } .MrhoInf <- function(ccc, ipsi) .Call(R_rho_inf, ccc, ipsi) lmrob.rweights <- function(resid, scale, cc, psi, eps = 16 * .Machine$double.eps) { if (scale == 0) { m <- max(ar <- abs(resid)) if(m == 0) numeric(seq_len(ar)) else as.numeric(ar < eps * m) } else Mwgt(resid / scale, cc, psi) } lmrob.E <- function(expr, control, dfun = dnorm, use.integrate = FALSE, obj, ...) { expr <- substitute(expr) if (missing(control) && !missing(obj)) control <- obj$control lenvir <- if (!missing(control)) { psi <- control$psi if (is.null(psi)) stop('parameter psi is not defined') c.psi <- control[[if (control$method %in% c('S', 'SD')) "tuning.chi" else "tuning.psi"]] if (!is.numeric(c.psi)) stop('tuning parameter (chi/psi) is not numeric') list(psi = function(r, deriv = 0) Mpsi(r, c.psi, psi, deriv), chi = function(r, deriv = 0) Mchi(r, c.psi, psi, deriv), wgt = function(r) Mwgt(r, c.psi, psi)) } else list() pf <- parent.frame() FF <- function(r) eval(expr, envir = c(list(r = r), lenvir), enclos = pf) * dfun(r) if (isTRUE(use.integrate)) { integrate(FF, -Inf,Inf, ...)$value } else { gh <- ghq(if(is.null(control$numpoints)) 13 else control$numpoints) sum(gh$weights * FF(gh$nodes)) } } ghq <- function(n = 1, modify = TRUE) { n <- as.integer(n) if(n<0) stop("need non-negative number of nodes") if(n==0) return(list(nodes=numeric(0), weights=numeric(0))) i1 <- seq_len(n-1L) muzero <- sqrt(pi) b <- sqrt(i1/2) A <- numeric(n*n) A[(n+1)*(i1-1)+2] <- b A[(n+1)*i1] <- b dim(A) <- c(n,n) vd <- eigen(A,symmetric=TRUE) n..1 <- n:1L w <- vd$vectors[1, n..1] w <- muzero * w^2 x <- vd$values[n..1] list(nodes=x, weights= if (modify) w*exp(x^2) else w) } .convSs <- function(ss) switch(ss, "simple"= 0L, "nonsingular"= 1L, stop("unknown setting for parameter ss")) outlierStats <- function(object, x = object$x, control = object$control, epsw = control$eps.outlier, epsx = control$eps.x, warn.limit.reject = control$warn.limit.reject, warn.limit.meanrw = control$warn.limit.meanrw ) { rw <- object$rweights if (is.function(epsw)) epsw <- epsw(nobs(object, use.fallback = TRUE)) if (!is.numeric(epsw) || length(epsw) != 1) stop("'epsw' must be numeric(1) or a function of nobs(obj.) which returns a numeric(1)") rj <- abs(rw) < epsw if (NROW(x) != length(rw)) stop("number of rows in 'x' and length of 'object$rweights' must be the same") if (is.function(epsx)) epsx <- epsx(max(abs(x))) if (!is.numeric(epsx) || length(epsx) != 1) stop("'epsx' must be numeric(1) or a function of max(abs(x)) which returns a numeric(1)") xnz <- abs(x) > epsx cc <- function(idx) { nnz <- sum(idx) Fr <- sum(rj[idx]) c(N.nonzero = nnz, N.rejected = Fr, Ratio = Fr / nnz, Mean.RobWeight = mean(rw[idx])) } report <- t(apply(cbind(Overall=TRUE, xnz[, colSums(xnz) < NROW(xnz)]), 2, cc)) shout <- FALSE lbr <- rep.int(FALSE, nrow(report)) if (!is.null(warn.limit.reject)) { lbr <- report[, "Ratio"] >= warn.limit.reject shout <- any(lbr & !is.na(lbr)) } if (!is.null(warn.limit.meanrw)) { lbr <- lbr | report[, "Mean.RobWeight"] <= warn.limit.meanrw shout <- shout || any(lbr & !is.na(lbr)) } if (shout) { nbr <- rownames(report)[lbr] attr(report, "warning") <- paste("Possible local breakdown of", paste0("'", nbr, "'", collapse=", ")) warning("Detected possible local breakdown of ", control$method, "-estimate in ", if (length(nbr) > 1) paste(length(nbr), "coefficients") else "coefficient", " ", paste0("'", nbr, "'", collapse=", "), ".", if ("KS2014" %in% control$setting) "" else "\nUse lmrob argument 'setting=\"KS2014\"' to avoid this problem." ) } report }
as.integer.lfactor <- function(x, ...) { as.integer(as.factor.lfactor(x)) }
skellam.reg <- function(y, x) { n <- length(y) x <- stats::model.matrix( ~., data.frame(x) ) p <- dim(x)[2] skelreg <- function(pa) { b1 <- pa[1:p] ; b2 <- pa[ -c(1:p) ] a1 <- x %*% b1 ; a2 <- x %*% b2 lam1 <- exp(a1) ; lam2 <- exp(a2) a <- 2 * sqrt(lam1 * lam2) sum(lam1 + lam2) + 0.5 * sum(y * (a1 - a2) ) - sum( log( besselI(a, y) ) ) } options(warn = -1) mod <- stats::nlm(skelreg, stats::rnorm(2 * p), iterlim = 5000 ) mod <- stats::nlm(skelreg, mod$estimate, iterlim = 5000 ) mod <- stats::optim(mod$estimate, skelreg, hessian = TRUE, control = list(maxit = 5000) ) b1 <- mod$par[1:p] ; b2 <- mod$par[ -c(1:p) ] s <- diag( solve(mod$hessian) ) s1 <- sqrt(s[1:p]) ; s2 <- sqrt(s[ -c(1:p) ]) param1 <- cbind(b1, s1, b1 / s1, stats::pchisq( (b1 / s1)^2, 1, lower.tail = FALSE) ) param2 <- cbind(b2, s2, b2 / s2, stats::pchisq( (b2 / s2)^2, 1, lower.tail = FALSE) ) rownames(param1) <- rownames(param2) <- colnames(x) colnames(param1) <- colnames(param2) <- c("Estimate", "Std. Error", "Wald value", "p-value") list(loglik = -mod$value, param1 = param1, param2 = param2) }
summary.LMmixed<-function(object,...){ cat("Call:\n") print(object$call) out_se = object$call$out_se cat("\nCoefficients:\n") cat("\nMass probabilities:\n") print(round(object$la,4)) if(!is.null(object$sela)){ cat("\nStandard errors for the mass probabilities:\n") print(round(object$sela,4)) } cat("\nInitial probabilities:\n") print(round(object$Piv,4)) if(!is.null(object$sePiv)){ cat("\nStandard errors for the initial probabilities:\n") print(round(object$sePiv,4)) } cat("\nTransition probabilities:\n") print(round(object$Pi,4)) if(!is.null(object$sePi)){ cat("\nStandard errors for the transition probabilities:\n") print(round(object$sePi,4)) } cat("\nConditional response probabilities:\n") print(round(object$Psi,4)) if(!is.null(object$sePsi)){ cat("\nStandard errors for the conditional response probabilities:\n") print(round(object$sePsi,4)) } }
read_file <- function(path) { n <- file.info(path)$size readChar(path, n, TRUE) }
read1 = read.csv(file="./data/iris.csv",header = TRUE,sep = ",") str(read1) class(read1) read2 = read.table(file="./data/iris.csv",header = TRUE,sep = ",") str(read2) class(read2) read3 = read.delim(file="./data/iris.csv",header = TRUE,sep = ",") str(read3) class(read3)
verify_tinytest_used <- function() { if (!dir.exists("tests") || !file.exists(file.path("tests", "tinytest.R")) || !dir.exists(file.path("inst", "tinytest"))) { stop("tinytest is not currently used, please set it up, e.g. by tinytest::setup_tinytest()") return(FALSE) } return(TRUE) }
print.matreg <- function(x, digits=x$digits, signif.stars=getOption("show.signif.stars"), signif.legend=signif.stars, ...) { mstyle <- .get.mstyle("crayon" %in% .packages()) .chkclass(class(x), must="matreg") if (missing(digits)) { digits <- .get.digits(xdigits=x$digits, dmiss=TRUE) } else { digits <- .get.digits(digits=digits, xdigits=x$digits, dmiss=FALSE) } if (!exists(".rmspace")) cat("\n") if (x$test == "t") { res.table <- data.frame(estimate=.fcf(c(x$tab$beta), digits[["est"]]), se=.fcf(x$tab$se, digits[["se"]]), tval=.fcf(x$tab$tval, digits[["test"]]), df=round(x$tab$df,2), pval=.pval(x$tab$pval, digits[["pval"]]), ci.lb=.fcf(x$tab$ci.lb, digits[["ci"]]), ci.ub=.fcf(x$tab$ci.ub, digits[["ci"]]), stringsAsFactors=FALSE) } else { res.table <- data.frame(estimate=.fcf(c(x$tab$beta), digits[["est"]]), se=.fcf(x$tab$se, digits[["se"]]), zval=.fcf(x$tab$zval, digits[["test"]]), pval=.pval(x$tab$pval, digits[["pval"]]), ci.lb=.fcf(x$tab$ci.lb, digits[["ci"]]), ci.ub=.fcf(x$tab$ci.ub, digits[["ci"]]), stringsAsFactors=FALSE) } rownames(res.table) <- rownames(x$tab) signif <- symnum(x$tab$pval, corr=FALSE, na=FALSE, cutpoints=c(0, 0.001, 0.01, 0.05, 0.1, 1), symbols = c("***", "**", "*", ".", " ")) if (signif.stars) { res.table <- cbind(res.table, signif) colnames(res.table)[ncol(res.table)] <- "" } tmp <- capture.output(print(res.table, quote=FALSE, right=TRUE, print.gap=2)) .print.table(tmp, mstyle) if (signif.legend) { cat("\n") cat(mstyle$legend("---\nSignif. codes: "), mstyle$legend(attr(signif, "legend"))) cat("\n") } if (!exists(".rmspace")) cat("\n") invisible() }
matrixInputVind <- function (inputId, label, data, dataOn, NT, NTnames, nb.IS) { tagList( singleton(tags$head(tags$link(rel = "stylesheet", type = "text/css", href = "css/tableinput.css"), tags$script(src = "js/tableinput.js")) ), tags$div(class = "control-group tableinput-container", tags$table(id = inputId, class = "tableinput data table table-bordered table-condensed", tags$colgroup( lapply(names(data), function(name) { tags$col(`data-name` = name, `data-field` = name,`data-type` = "numeric") }) ), tags$thead( tags$tr(lapply(names(data), function(name) { tags$th(withMathJax(name)) })) ), tags$tbody(lapply(1:nrow(data), function(row) { tags$tr( lapply(1:ncol(data), function(col) { cellColor <- ifelse(col <= row & dataOn[row, col], ifelse(col == row,"CoVarMat_Var","CoVarMat_Covar"),"CoVarMat_noInput") cellHide <- ifelse(col <= row & dataOn[row, col], "","invisible") tags$td(class=cellColor,div(class = cellHide, contenteditable="true", as.character(data[row, col]))) }) ) })) ) ) ) }
chinese_utf8 <- function(x){ for (i in 1:length(x)) { if (i==1) res=list() f=capture.output(tmcn::catUTF8(x[i])) t=paste0('tmcn::toUTF8("',f,'")') res=c(res,list(parse(text=t))) } names(res)=x res }
startOptim <- function(n.ints, t.alpha){ out <- c(rep(0, n.ints - 1), rep(t.alpha, n.ints)) }
knitr::opts_chunk$set( collapse = TRUE, comment = " ) library(rgtmx)
ladder_moment <- function(simulations, level) { if(level < 0) stop("Level must be positive") sim <- id <- sim_id <- NULL kendall_rw <- simulations$simulation kendall_rw <- dplyr::group_by(kendall_rw, sim_id) kendall_rw <- dplyr::mutate(kendall_rw, id = 1:n()) all_sims <- dplyr::ungroup(dplyr::distinct(kendall_rw, sim_id)) kendall_rw <- dplyr::filter(kendall_rw, sim > level) kendall_rw <- dplyr::summarise(kendall_rw, ladder_moment = min(id)) kendall_rw <- dplyr::ungroup(kendall_rw) all_sims <- dplyr::left_join(all_sims, kendall_rw, by = "sim_id") class(all_sims) <- c("kendall_barrier_crossing", class(all_sims)) all_sims } ladder_height <- function(simulations, level) { if(level < 0) stop("Level must be positive") sim <- id <- sim_id <- NULL kendall_rw <- simulations$simulation kendall_rw <- dplyr::group_by(kendall_rw, sim_id) kendall_rw <- dplyr::mutate(kendall_rw, id = 1:n()) all_sims <- dplyr::ungroup(dplyr::distinct(kendall_rw, sim_id)) kendall_rw <- dplyr::filter(kendall_rw, sim > level) kendall_rw <- dplyr::summarise(kendall_rw, ladder_moment = min(sim)) kendall_rw <- dplyr::ungroup(kendall_rw) all_sims <- dplyr::left_join(all_sims, kendall_rw, by = "sim_id") class(all_sims) <- c("kendall_barrier_crossing", class(all_sims)) all_sims } print.kendall_barrier_crossing <- function(x, ...) { quantiles <- quantile(x$ladder_moment, na.rm = T, probs = seq(0, 1, by = 0.1)) labels <- names(quantiles) cat("Mean of the distribution: ", mean(x$ladder_moment, na.rm = T), "\n") cat("Standard deviation of the distribution: ", sd(x$ladder_moment, na.rm = T), "\n") cat("Number of observations: ", max(x$sim_id), "\n") cat("Times the level was not crossed: ", sum(!is.finite(x$ladder_moment)), "\n") cat("Quantiles of the distribution: \n") print(quantiles) invisible(x) } plot.kendall_barrier_crossing <- function(x, ...) { mean_value <- mean(x$ladder_moment, na.rm = TRUE) ggplot2::ggplot(x, ggplot2::aes_string(x = 'ladder_moment')) + ggplot2::geom_histogram() + ggplot2::geom_vline(xintercept = mean_value) + ggplot2::theme_bw() + ggplot2::xlab("First ladder moments") + ggplot2::ylab("Count") }
csu_bar_top <- function(df_data, var_value, var_bar, group_by=NULL, nb_top = 10, plot_title=NULL, plot_subtitle=NULL, xtitle= NULL, label_by=NULL, color=NULL, digits = 1) { core.error_variable(df_data, var_value, csu_bar_top) core.error_variable(df_data, var_bar, csu_bar_top, type="") dt <- data.table(df_data) if (!is.null(label_by) & is.null(group_by)) { stop(paste0("There is no group_by variable defined (group_by=NULL) to use with the label_by defined: See documentation: Help(", deparse(substitute(csu_bar_top)), ")")) } csu_ratio = 1 csu_bar_label_size = 5 line_size <- 0.4 text_size <- 14 setnames(dt, var_value, "CSU_ASR") setnames(dt, var_bar, "CSU_BAR") bool_group <- !is.null(group_by) if (bool_group) { core.error_variable(df_data, group_by, csu_bar_top, type="") setnames(dt, group_by, "CSU_BY") if (!is.factor(dt$CSU_BY)) { dt[,CSU_BY:=as.factor(CSU_BY)] } if (length(levels(dt$CSU_BY)) != 2) { stop(paste0("group_by variable must have only 2 values: See documentation: Help(", deparse(substitute(csu_bar_top)), ")")) } if (!is.null(label_by)) { dt[,CSU_BY:=factor(CSU_BY, labels=label_by)] } dt[, CSU_ASR:= as.double(CSU_ASR)] dt_test <- dt[, c("CSU_BAR","CSU_BY"), with=FALSE] } else { dt_test <- dt[, c("CSU_BAR"), with=FALSE] } if (nrow(dt_test) > nrow(unique(dt_test))) { stop(paste0("There is more than 1 value for each bar: See documentation: Help(", deparse(substitute(csu_bar_top)), ")")) } dt <- core.csu_dt_rank(dt, var_value = "CSU_ASR", var_rank = "CSU_BAR",number = nb_top) dt_return <- dt dt$CSU_BAR <-core.csu_legend_wrapper(dt$CSU_BAR, 15) if (bool_group) { dt[CSU_BY==levels(dt$CSU_BY)[[1]], asr_plot:= CSU_ASR*(-1)] dt[CSU_BY==levels(dt$CSU_BY)[[2]], asr_plot:= CSU_ASR] } else { dt[, asr_plot:= CSU_ASR] } dt$CSU_BAR <- factor(dt$CSU_BAR) factor_order <- unique(dt[, c("CSU_BAR", "CSU_RANK"), with=FALSE]) dt$CSU_BAR <- factor(dt$CSU_BAR, levels = rev(setkeyv(factor_order, "CSU_RANK")$CSU_BAR)) if (bool_group) { tick_minor_list <- core.csu_tick_generator(max = max(dt$CSU_ASR), 0)$tick_list nb_tick <- length(tick_minor_list) tick_space <- tick_minor_list[nb_tick] - tick_minor_list[nb_tick-1] if ((tick_minor_list[nb_tick] - max(dt$CSU_ASR))/tick_space < 1/4){ tick_minor_list[nb_tick+1] <- tick_minor_list[nb_tick] + tick_space } tick_major <- tick_minor_list[1:length(tick_minor_list) %% 2 == 1] tick_major_list <- c(rev(-tick_major),tick_major[tick_major!=0]) tick_label <- c(rev(tick_major),tick_major[tick_major!=0]) tick_minor_list <- c(rev(-tick_minor_list),tick_minor_list[tick_minor_list!=0]) } else { tick_major_list <- core.csu_tick_generator(max = max(dt$CSU_ASR), 0)$tick_list nb_tick <- length(tick_major_list) tick_space <- tick_major_list[nb_tick] - tick_major_list[nb_tick-1] if ((tick_major_list[nb_tick] - max(dt$CSU_ASR))/tick_space < 1/4){ tick_major_list[nb_tick+1] <- tick_major_list[nb_tick] + tick_space } tick_minor_list <-seq(tick_major_list[1],tail(tick_major_list,1),tick_space/2) tick_label <- tick_major_list } dt$asr_label <- dt$CSU_ASR + (tick_space*0.1) dt$asr_round <- format(round(dt$CSU_ASR, digits = digits), nsmall = digits) if (bool_group){ dt[CSU_BY==levels(dt$CSU_BY)[[1]], asr_label:= asr_label*(-1)] csu_plot <- ggplot(dt, aes(CSU_BAR, asr_plot, fill=CSU_BY)) if (is.null(color)) { color <- hue_pal()(2) } } else { csu_plot <- ggplot(dt, aes(CSU_BAR, asr_plot, fill=CSU_BAR)) if (is.null(color)) { color <- rep(hue_pal()(1), length(levels(dt$CSU_BAR))) } else if (color[1] %in% names(dt)){ dt_label_order <- setkey(unique(dt[, c("CSU_ASR",color, "CSU_RANK"), with=FALSE]), CSU_RANK) color <- as.character(rev(dt_label_order[[color]])) } else if (length(color) != length(levels(dt$CSU_BAR))) { color <- rep(color[1], length(levels(dt$CSU_BAR))) } } csu_plot <- csu_plot + geom_bar(stat="identity", width = 0.8)+ geom_hline(yintercept = 0, colour="black",size = line_size) if (bool_group){ csu_plot <- csu_plot+ geom_text(data=dt[asr_label > 0, ],aes(CSU_BAR, asr_label,label=asr_round), size = csu_bar_label_size, hjust = 0)+ geom_text(data=dt[asr_label < 0, ],aes(CSU_BAR, asr_label,label=asr_round), size = csu_bar_label_size, hjust = 1)+ coord_flip(ylim = c(tick_minor_list[1]-(tick_space*0.25),tick_minor_list[length(tick_minor_list)]+(tick_space*0.25)), expand = TRUE) } else { csu_plot <- csu_plot+ geom_text(aes(CSU_BAR, asr_label,label=asr_round), size = csu_bar_label_size, hjust = 0)+ coord_flip(ylim = c(0,tick_major_list[length(tick_major_list)]+(tick_space*0.25)), expand = TRUE) } csu_plot <- csu_plot+ scale_y_continuous(name = xtitle, breaks=tick_major_list, minor_breaks = tick_minor_list, labels=tick_label)+ scale_fill_manual(name="", values= color, drop = FALSE)+ labs(title = plot_title, subtitle = plot_subtitle)+ theme( aspect.ratio = csu_ratio, plot.background= element_blank(), panel.background = element_blank(), panel.grid.major.y= element_blank(), panel.grid.major.x= element_line(colour = "grey70",size = line_size), panel.grid.minor.x= element_line(colour = "grey70",size = line_size), plot.title = element_text(size=18, margin=margin(0,0,15,0),hjust = 0.5), plot.subtitle = element_text(size=16, margin=margin(0,0,15,0),hjust = 0.5), plot.caption = element_text(size=12, margin=margin(15,0,0,0)), plot.margin=margin(20,20,20,20), axis.title = element_text(size=text_size), axis.title.x=element_text(margin=margin(10,0,0,0)), axis.title.y = element_blank(), axis.text = element_text(size=text_size, colour = "black"), axis.text.x = element_text(size=text_size), axis.text.y = element_text(size=text_size), axis.ticks.x= element_line(colour = "black", size = line_size), axis.ticks.y= element_blank(), axis.ticks.length = unit(0.2, "cm"), axis.line.y = element_blank(), axis.line.x = element_line(colour = "black", size = line_size, linetype = "solid") ) if (bool_group){ csu_plot <- csu_plot + theme( legend.key = element_rect(fill="transparent"), legend.position = "bottom", legend.text = element_text(size = text_size), legend.key.height = unit(0.6,"cm"), legend.key.width =unit(1.5,"cm"), legend.margin = margin(0, 0, 0, 0) ) } else { csu_plot <- csu_plot + theme(legend.position = "none") } print(csu_plot) setnames(dt_return, "CSU_BAR",var_bar) setnames(dt_return, "CSU_ASR", var_value) setnames(dt_return, "CSU_RANK","cancer_rank") setkeyv(dt_return, c("cancer_rank",var_bar)) if (bool_group) { setnames(dt_return, "CSU_BY", group_by) } return(dt_return) }
wrap.rotation <- function(input){ if (is.array(input)){ if (!check_3darray(input, symmcheck=FALSE)){ stop("* wrap.rotation : input does not follow the size requirement as described.") } N = dim(input)[3] tmpdata = list() for (n in 1:N){ tmpdata[[n]] = input[,,n] } } else if (is.list(input)){ tmpdata = input } else { stop("* wrap.rotation : input should be either a 3d array or a list.") } if (!check_list_eqsize(tmpdata, check.square=TRUE)){ stop("* wrap.rotation : elements are not of same size.") } N = length(tmpdata) for (n in 1:N){ tmpcheck = single_rotcheck(tmpdata[[n]], n) tmpdata[[n]] = tmpdata[[n]] } output = list() output$data = tmpdata output$size = dim(tmpdata[[1]]) output$name = "rotation" return(structure(output, class="riemdata")) } single_rotcheck <- function(x, id=0){ p = nrow(x) if (nrow(x)!=ncol(x)){ stop(paste0("* wrap.rotation : ",id,"-th element is not a square matrix.")) } if ((norm((t(x)%*%x)-diag(p),"F")/sqrt(p) >= 1e-10)){ stop(paste0("* wrap.rotation : ",id,"-th element does not satisfy X'*X = I.")) } if (abs(base::det(x)-1) >= 1e-10){ stop(paste0("* wrap.rotation : ",id,"-th element's determinant is not close to 1.")) } return(TRUE) }
clearpars <- function(x, ind){ x$std[ind] <- NA x$se[ind] <- NA x$p[ind] <- NA x$se_boot[ind] <- NA x$p_boot[ind] <- NA x$mi[ind] <- NA x$pmi[ind] <- NA x$epc[ind] <- NA x$mi_free[ind] <- NA x$pmi_free[ind] <- NA x$epc_free[ind] <- NA x$mi_equal[ind] <- NA x$pmi_equal[ind] <- NA x$epc_equal[ind] <- NA x }
ttp <- function(...) { ttestPower(...) }
"sacCer3"
Density.Feature <- function(test_seq){ if(class(test_seq)!="DNAStringSet"){stop("The dataset must be of class DNAStringSet")} if(length(unique(width(test_seq)))>1){stop("Each sequence must of equal length")} zz <- as.character(as.character(test_seq)) my.den <- function (dat){ ss <- unlist(strsplit(dat, split="")) ss[ss=="T"|ss=="TRUE"]<- "X" z <- vector(mode="numeric", length=length(ss)) IA <- as.numeric(which(ss=="A")) lA <- length(IA) seA <- sequence (lA) z[IA] <- seA/IA IT <- as.numeric(which(ss=="X")) lT <- length(IT) seT <- sequence (lT) z[IT] <- seT/IT IG <- as.numeric(which(ss=="G")) lG <- length(IG) seG <- sequence (lG) z[IG] <- seG/IG IC <- as.numeric(which(ss=="C")) lC <- length(IC) seC <- sequence (lC) z[IC] <- seC/IC z } x1 <- round(t(sapply(zz, my.den)),3) return(x1) }
find.snps.in.regions <- function(stat, options){ if(is.null(options$excluded.regions)){ return(NULL) } msg <- paste('Removing SNPs in specified regions:', date()) if(options$print) message(msg) exc.reg <- options$excluded.regions nfiles <- length(stat) exc.snps <- NULL comment <- NULL for(i in 1:nfiles){ if(any(is.na(stat[[i]]$Chr))){ msg <- 'Column \'Chr\' is missing or has NA in summary.files but options$excluded.regions is specified' stop(msg) } if(any(is.na(stat[[i]]$Pos))){ msg <- 'Column \'Pos\' is missing or has NA in summary.files but options$excluded.regions is specified' stop(msg) } for(j in 1:nrow(exc.reg)){ id <- which(stat[[i]]$Chr == exc.reg$Chr[j]) if(length(id) == 0){ next } st <- stat[[i]][id, c('SNP', 'Pos')] id <- which(st$Pos >= exc.reg$Start[j] & st$Pos <= exc.reg$End[j]) if(length(id) == 0){ next } exc.snps <- c(exc.snps, st$SNP[id]) com <- paste0('Chr_', exc.reg$Chr[j], '_Start_', exc.reg$Start[j], '_End_', exc.reg$End[j]) comment <- c(comment, rep(com, length(id))) } } if(!is.null(exc.snps)){ tmp <- data.frame(exc.snps, comment, stringsAsFactors = FALSE) dup <- duplicated(tmp) tmp <- tmp[!dup, , drop = FALSE] exc.snps <- tmp$exc.snps comment <- tmp$comment } list(exc.snps = exc.snps, comment = comment) }
`samples.id` <- function(x,id.cols=c("sample","repli")){ mmnt.lines <- which ( x[[3]][,"sample_type"]=="measurement") temp <- c(NULL) for (i in id.cols){ temp <- paste(temp,as.character(x[[3]][mmnt.lines,i]),sep="") } identifier <- unique(temp) replis <- (length(temp)/length(identifier)) count <- length(identifier) print(paste("found",count,"individual samples, spottet in",replis ,"replicates")) return(identifier) }
predict.fcrr <- function(object, newdata, getBootstrapVariance = TRUE, var.control = varianceControl(B = 100, useMultipleCores = FALSE), type = "none", alpha = 0.05, tL = NULL, tU = NULL, ...){ if(class(object) != "fcrr") { stop("Object 'fit' must be of class fcrr") } if(is.null(object$breslowJump)) { stop("Breslow jumps were not calculated. Please re-run model with 'getBreslowJumps = TRUE'") } if(is.null(object$df)) { stop("Ordered data frame not returned. Please re-run model with 'returnDataFrame = TRUE'") } if(!(type %in% c("none", "bands", "interval"))) { type = "none" warning("type is incorrectly specified. Valid options are 'bands', 'interval', 'none'. Set to 'none'") } if(alpha <= 0 | alpha >= 1) { alpha = 0.05 warning("alpha is incorrectly specified. Set to 0.05") } if(is.null(tL)) tL <- min(object$uftime) if(is.null(tU)) tU <- max(object$uftime) if(tL <= 0 | tL >= max(object$uftime)) { tL <- min(object$uftime) warning("tL is incorrectly specified (can not be nonpositive or larger than largest observed event time. Set to smallest observed event time") } if(tU <= 0 | tU <= min(object$uftime)) { tU <- max(object$uftime) warning("tU is incorrectly specified (can not be nonpositive or smaller than smallest observed event time. Set to largest observed event time") } min.idx = min(which(object$uftime >= tL)) max.idx = max(which(object$uftime <= tU)) if (length(object$coef) == length(newdata)) { CIF.hat <- cumsum(exp(sum(newdata * object$coef)) * object$breslowJump[, 2]) CIF.hat <- 1 - exp(-CIF.hat) } else { stop("Parameter dimension of 'newdata' does not match dimension of '$coef' from object.") } res <- data.frame(ftime = object$uftime, CIF = CIF.hat, lower = NA, upper = NA) if(getBootstrapVariance) { controls = var.control if (!missing(controls)) controls[names(controls)] <- controls B <- controls$B seed <- controls$seed mcores <- controls$mcores if(mcores) `%mydo%` <- `%dopar%` else `%mydo%` <- `%do%` i <- NULL set.seed(seed) seeds = sample.int(2^25, B, replace = FALSE) CIF.boot <- numeric() ftime <- object$df$ftime fstatus <- object$df$fstatus n <- length(ftime) X <- as.matrix(object$df[, -(1:2)]) CIF.boot <- foreach(i = seeds, .combine = 'rbind', .packages = "fastcmprsk") %mydo% { set.seed(i) bsamp <- sample(n, n, replace = TRUE) fit.bs <- fastCrr(Crisk(ftime[bsamp], fstatus[bsamp]) ~ X[bsamp, ], variance = FALSE, ...) CIF.bs <- 1 - exp(-cumsum(exp(sum(newdata * fit.bs$coef)) * fit.bs$breslowJump[, 2])) return(evalstep(fit.bs$breslowJump$time, stepf = CIF.bs, subst = 1E-16, newtime = object$uftime)) rm(fit.bs) } CIF.hat <- log(-log(CIF.hat)) CIF.boot <- log(-log(CIF.boot)) CIF.sd <- apply(CIF.boot, 2, sd) if(type == "bands") { sup <- apply(CIF.boot, 1, function(x) max((abs(x - CIF.hat) / CIF.sd)[min.idx:max.idx])) z.stat <- quantile(sup, 1 - alpha / 2) llim <- CIF.hat + z.stat * CIF.sd ulim <- CIF.hat - z.stat * CIF.sd res <- data.frame(ftime = object$uftime, CIF = exp(-exp(CIF.hat)), lower = exp(-exp(llim)), upper = exp(-exp(ulim))) } else if (type == "interval") { llim <- CIF.hat + qnorm(1 - alpha / 2) * CIF.sd ulim <- CIF.hat - qnorm(1 - alpha / 2) * CIF.sd res <- data.frame(ftime = object$uftime, CIF = exp(-exp(CIF.hat)), lower = exp(-exp(llim)), upper = exp(-exp(ulim))) } } res <- subset(res, res$ftime >= tL & res$ftime <= tU) class(res) <- "predict.fcrr" res$type <- type return(res) }
read_rrd <- function(filename) { filename <- normalizePath(filename) dat <- .smart_import_rrd(filename) for (i in seq_along(dat)){ dat[[i]] <- as_tibble(dat[[i]]) dat[[i]][["timestamp"]] <- as.POSIXct(dat[[i]][["timestamp"]], origin = "1970-01-01") } dat } describe_rrd <- function(filename){ filename <- normalizePath(filename) .describe_rrd(filename) invisible(NULL) } read_rra <- function(filename, cf, step, n_steps, start, end = Sys.time()){ assert_that(is.character(cf)) assert_that(cf %in% c("AVERAGE", "MIN", "MAX", "LAST")) assert_that(is.time(end)) assert_that(is.numeric(step)) assert_that(is.integer(as.integer(step))) if ( (missing(start) || is.null(start)) && (missing(n_steps) || is.null(n_steps)) ) { stop("You must specify one of n_steps or start") } if (missing(start) || is.null(start)) { assert_that(is.numeric(n_steps)) assert_that(n_steps > 0) start <- end - (n_steps * step) } if (missing(n_steps) || is.null(n_steps)) { assert_that(is.time(start)) assert_that(start < end) } filename <- normalizePath(filename) start <- as.numeric(start) end <- as.numeric(end) step <- as.integer(step) cf <- match.arg(cf, c("AVERAGE", "MIN", "MAX", "LAST")) dat <- .import_rrd(filename, cf, start, end, step) dat <- as_tibble(dat) dat[["timestamp"]] <- as.POSIXct(dat[["timestamp"]], origin = "1970-01-01") dat } importRRD <- function(filename, cf = NULL, start = NULL, end = NULL, step = NULL){ if (any(!is.null(cf), !is.null(start), !is.null(end), !is.null(step))) { .Deprecated("read_rra") read_rra(filename = filename, cf = cf, start = start, end = end, step = step) } else { .Deprecated("read_rrd") read_rrd(filename = filename) } } is.POSIXct <- function(x){ inherits(x, "POSIXct") } is.time <- function(x){ is.POSIXct(x) || is.numeric(x) }
pocc <- function(x, size, space, prob = 1, approx = FALSE, log.p = FALSE, lower.tail = TRUE) { if (!is.numeric(x)) stop('Error: Argument x is not numeric') if (!is.numeric(size)) stop('Error: Size parameter is not numeric') if (!is.numeric(space)) stop('Error: Space parameter is not numeric') if (!is.numeric(prob)) stop('Error: Probability parameter is not numeric') if (!is.logical(approx)) stop('Error: approx option is not a logical value') if (!is.logical(log.p)) stop('Error: log.p option is not a logical value') if (!is.logical(lower.tail)) stop('Error: lower.tail option is not a logical value') if (length(size) != 1) stop('Error: Size parameter should be a single number') if (length(space) != 1) stop('Error: Space parameter should be a single number') if (length(prob) != 1) stop('Error: Probability parameter should be a single number') if (length(approx) != 1) stop('Error: approx option should be a single logical value') if (length(log.p) != 1) stop('Error: log.p option should be a single logical value') if (length(lower.tail) != 1) stop('Error: lower.tail option should be a single logical value') n <- as.integer(size) if (space == Inf) { m <- Inf } else { m <- as.integer(space) } MAX <- min(n,m) if (size != n) stop('Error: Size parameter is not an integer') if (n < 0) stop('Error: Size parameter must be non-negative') if (space != m) stop('Error: Space parameter is not an integer') if (m <= 0) stop('Error: Space parameter must be positive') if ((prob < 0)|(prob > 1)) stop('Error: Probability parameter is not between zero and one') MAX <- min(n, m) CUMOCC <- rep(-Inf, length(x)) if ((n == 0)|(prob == 0)) { IND <- (x >= 0) CUMOCC[IND] <- 0 if (log) { return(CUMOCC) } else { return(exp(CUMOCC)) } } if (m == Inf) { CUMOCC <- pbinom(x, size = n, prob = prob, log.p = TRUE) if (log) { return(CUMOCC) } else { return(exp(CUMOCC)) } } if (!approx) { SCALE <- m*(1-prob)/prob LOGSTIRLING <- matrix(-Inf, nrow = n+1, ncol = MAX+1) LOGSTIRLING[1,1] <- 0 if ((SCALE > 0)&(n > 0)) { for (nn in 1:n) { LOGSTIRLING[nn+1, 1] <- nn*log(SCALE) } } for (nn in 1:n) { for (kk in 1:MAX) { T1 <- log(kk + SCALE) + LOGSTIRLING[nn, kk+1] T2 <- LOGSTIRLING[nn, kk] LOGSTIRLING[nn+1, kk+1] <- matrixStats::logSumExp(c(T1, T2)) } } LOGS <- rep(-Inf, MAX+1) for (k in 0:MAX) { LOGS[k+1] <- n*log(prob) - n*log(m) + lchoose(m,k) + lfactorial(k) + LOGSTIRLING[n+1, k+1] } LOGS <- LOGS - matrixStats::logSumExp(LOGS) } if (approx) { E1 <- (1 - prob/m)^n E2 <- (1 - 2*prob/m)^n MEAN <- m*(1 - E1) VAR <- m*((m-1)*E2 + E1 - m*E1^2) LOGS <- dnorm(0:MAX, mean = MEAN, sd = sqrt(VAR), log = TRUE) LOGS <- LOGS - matrixStats::logSumExp(LOGS) } CUMLOGS <- rep(-Inf, MAX+1) CUMLOGS[1] <- LOGS[1] for (k in 1:MAX) { CUMLOGS[k+1] <- matrixStats::logSumExp(c(CUMLOGS[k], LOGS[k+1])) } for (i in 1:length(x)) { xx <- floor(x[i]) if ((xx >= 0)&(xx <= MAX)) { CUMOCC[i] <- CUMLOGS[xx+1] } if (xx > MAX) { CUMOCC[i] <- 0 } } if (!lower.tail) { CUMOCC <- VGAM::log1mexp(-CUMOCC) } if (log.p) { CUMOCC } else { exp(CUMOCC) } }
kcpRS <- function(data, RS_fun, RS_name, wsize = 25, nperm = 1000, Kmax = 10, alpha = .05, varTest = FALSE, ncpu = 1) { UseMethod("kcpRS") }
sfaStep <- function (sfaList, arg, step=NULL, method=NULL){ if(!is.null(arg)){ arg<-as.matrix(arg) } if (is.null(method)){ if (!is.null(step) && (step=="sfa")){ method = "SVDSFA"; } else{ method = "TIMESERIES"; } } if (sfaList$deg==1){ sfaList<-sfa1Step(sfaList, arg, step, method);} else{ sfaList<-sfa2Step(sfaList, arg, step, method);} return(sfaList) } sfa2Step <- function (sfaList, arg=NULL, step=NULL, method=NULL){ if(is.null(sfaList$opts$epsC)){epsC<-1e-7}else{epsC<-sfaList$opts$epsC} if(!is.null(step)) { oldStep=sfaList$step if (oldStep=="init" & (step=="preprocessing")){ print("Start preprocessing"); if (substr(sfaList$ppType, 1, 3)=="PCA"){ sfaList$lcov=lcovCreate(ncol(arg)); } else{ sfaList$sfa1List=sfa1Create(sfaList$ppRange); } } else if (oldStep=="preprocessing" & (step=="expansion")){ print("Close preprocessing"); if(sfaList$ppType=="SFA1"){ sfaList$sfa1List=sfaStep(sfaList$sfa1List, NULL, "sfa") sfaList$W0=sfaList$sfa1List$SF; sfaList$D0=sfaList$sfa1List$DSF; sfaList$avg0=sfaList$sfa1List$avg0; sfaList$tlen0=sfaList$sfa1List$tlen0; sfaList$sfa1List=NULL; } else{ sfaList$lcov=lcovFix(sfaList$lcov) if(sfaList$ppType=="PCA"){ print("Whitening and dimensionality reduction (PCA)"); pcaResult=lcovPca(sfaList$lcov,sfaList$ppRange) sfaList$W0=pcaResult$W; sfaList$DW0=pcaResult$DW; sfaList$D0=pcaResult$D; sfaList$avg0=sfaList$lcov$avg; sfaList$tlen0=sfaList$lcov$tlen; sfaCheckCondition(sfaList$lcov$COVMTX, "input") } else if(sfaList$ppType=="PCA2"){ print("Whitening and dimensionality reduction (PCA2)"); pcaResult=lcovPca2(sfaList$lcov,sfaList$ppRange) sfaList$W0=pcaResult$W; sfaList$DW0=pcaResult$DW; sfaList$D0=pcaResult$D; ppRange=length(which(colSums(t(sfaList$W0))!=0)); sfaList$ppRange=ppRange sfaList$xpRange=sfaList$xpDimFun(ppRange) sfaList$sfaRange=min(cbind(sfaList$xpRange,sfaList$sfaRange)); sfaList$W0=sfaList$W0[1:ppRange,]; sfaList$avg0=sfaList$lcov$avg; sfaList$tlen0=sfaList$lcov$tlen; } else if(sfaList$ppType=="PCAVAR"){ print("unit variance w/o dimensionality reduction (PCAVAR)"); varmat = diag(diag(sfaList$lcov$COVMTX)); sfaList$W0 = varmat^(-0.5); sfaList$avg0=sfaList$lcov$avg; sfaList$tlen0=sfaList$lcov$tlen; } sfaList$lcov=NULL; } print("Init expansion step"); xpSize=sfaList$xpRange; sfaList$xp=lcovCreate(xpSize); sfaList$diff=lcovCreate(xpSize); } else if (oldStep=="expansion" & (step=="sfa")){ print("Close expansion step"); sfaList$xp=lcovFix(sfaList$xp); sfaList$avg1=sfaList$xp$avg; sfaList$tlen1=sfaList$xp$tlen; xpsize=sfaList$xpRange sfaList$diff=lcovFix(sfaList$diff); print("Perform Slow Feature Analysis") sfaInt=sfaGetIntRange(sfaList$sfaRange); if(method=="GENEIG" ){ stop("GENEIG method is not implemented in rSFA package. Please choose method SVDSFA instead.") } if(method=="SVDSFA"){ print("Using alternate [WisSej02] approach for SFA-calculation ..."); pcaResult<-lcovPca2(sfaList$xp); S<-pcaResult$W BD<-pcaResult$D C = S %*% sfaList$diff$COVMTX %*% t(S); resvd=svd(C,nu=0,nv=ncol(C)) W1=resvd$v; D1=resvd$d; SF1 = t(S)%*%W1; sfaList$SF = SF1; sfaList$BD = BD; sfaList$myS=S; D=D1; } B = sfaList$xp$COVMTX; rankB = qr(B)$rank; print(paste("rank of B = ",rankB)); sfaList$rankB = rankB; sfaList$myB = B; idx=t(order(D)); lammax=max(D); print(paste("epsC*lammax= ",epsC*lammax)); if(method=="SVDSFA"){ idx = idx[which(abs(D[idx])>rep(epsC*lammax,length(D[idx])))]; sfaInt = 1:length(idx); sfaList$sfaRange = length(idx); } sfaList$DSF<-t(D[idx[sfaInt]]); sfaList$SF<-t(sfaList$SF[,idx[sfaInt]]); sfaList$cp=NULL; sfaList$diff=NULL; print("SFA2 closed"); } else if (!(oldStep==step)){ warning("Unknown Step Sequence in sfa2Step") return(sfaList) } sfaList$step=step; } if(sfaList$step=="preprocessing"){ if(substr(sfaList$ppType, 1, 3)=="PCA"){ sfaList$lcov=lcovUpdate(sfaList$lcov,arg); } else{ sfaList$sfa1List=sfaStep(sfaList$sfa1List, arg, "preprocessing") } } if(sfaList$step=="expansion"){ arg=arg-matrix(sfaList$avg0,customSize(arg,1),length(sfaList$avg0),byrow=T) arg=sfaList$sfaExpandFun(sfaList, arg %*% t(sfaList$W0)); sfaList$xp=lcovUpdate(sfaList$xp,arg); if(method=="TIMESERIES"){ sfaList$diff=lcovUpdate(sfaList$diff, sfaTimediff(arg,sfaList$axType)); } else if (method=="CLASSIF"){ K = customSize(arg,1); lt = customSize(arg,2); if(K<2){ stop("This class has less than two training records. Expansion can not run, pattern difference can not be calculated") } pdiff = NULL; for (k in 1:(K-1)){ pdiff = rbind(pdiff, matrix(t(arg[k,]),K-k,lt,byrow=TRUE) - arg[(k+1):K,]); if (k%%100==0) { sfaList$diff=lcovUpdate(sfaList$diff, pdiff); pdiff=NULL; } } sfaList$diff=lcovUpdate(sfaList$diff, pdiff); } else{ warning(paste(method," is not an allowed method in expansion step")); } } return(sfaList) } sfa1Step <- function (sfaList, arg=NULL, step=NULL, method=NULL){ if(!is.null(step)) { oldStep=sfaList$step if (oldStep=="init" & (step=="preprocessing")){ print("Start preprocessing"); sfaList$lcov=lcovCreate(ncol(arg)); sfaList$diff=sfaList$lcov; } else if (oldStep=="preprocessing" & (step=="sfa")){ print("Close preprocessing"); sfaList$lcov=lcovFix(sfaList$lcov); sfaList$avg0=sfaList$lcov$avg; sfaList$tlen0=sfaList$lcov$tlen; print("Perform slow feature analysis"); if(length(sfaList$sfaRange)==1){ sfaInt=1:sfaList$sfaRange; } else{ sfaInt=sfaList$sfaRange[1]:sfaList$sfaRange[2]; } pcaResult<-lcovPca2(sfaList$lcov); S<-pcaResult$W C = S %*% sfaList$diff$COVMTX %*% t(S); resvd=svd(C,nu=0,nv=ncol(C)) W1=resvd$v; D=resvd$d; sfaList$SF = t(S)%*%W1; idx=t(order(D)); lammax=max(D); if(is.null(sfaList$opts$epsC)){epsC<-0}else{epsC<-sfaList$opts$epsC} print(paste("epsC*lammax= ",epsC*lammax)); idx = idx[which( abs(D[idx])>rep(epsC*lammax,length(D[idx])))]; sfaInt = 1:length(idx); sfaList$DSF<-t(D[idx[sfaInt]]); sfaList$SF<-t(sfaList$SF[,idx[sfaInt]]); sfaList$lcov=NULL; sfaList$diff=NULL; print("SFA1 closed"); } else if (!(oldStep==step)){ warning("Unknown Step Sequence in sfa1Step") return(sfaList) } sfaList$step=step; } if(sfaList$step=="preprocessing"){ sfaList$lcov=lcovUpdate(sfaList$lcov,arg); if(method=="TIMESERIES"){ sfaList$diff=lcovUpdate(sfaList$diff, sfaTimediff(arg,sfaList$axType)); } else if (method=="CLASSIF"){ K = customSize(arg,1); lt = customSize(arg,2); pdiff = NULL; for (k in 1:(K-1)){ pdiff = rbind(pdiff, matrix(t(arg[k,]),K-k,lt,byrow=TRUE) - arg[(k+1):K,]); } sfaList$diff=lcovUpdate(sfaList$diff, pdiff); } else{ stop(paste(method," is not an allowed method in expansion step")); } } return(sfaList) }
library(imagerExtra) test_that("OCR", { if (requireNamespace("tesseract", quietly = TRUE)) { notim <- 1 im <- boats gim <- grayscale(im) gim2 <- imrep(gim, 2) %>% imappend(., "z") im_NA <- as.cimg(matrix(NA, 100, 100)) im_char <- as.cimg(matrix("A", 100, 100)) impix <- boats %>% as.pixset gimpix <- gim %>% as.pixset gim2pix <- gim2 %>% as.pixset impix_NA <- im_NA%>% as.pixset expect_error(OCR(notim)) expect_error(OCR(im)) expect_error(OCR(gim2)) expect_error(OCR(im_NA)) expect_error(OCR(im_char)) expect_error(OCR(impix)) expect_error(OCR(gim2pix)) expect_error(OCR(impix_NA)) expect_error(OCR_data(notim)) expect_error(OCR_data(im)) expect_error(OCR_data(gim2)) expect_error(OCR_data(im_NA)) expect_error(OCR_data(im_char)) expect_error(OCR_data(impix)) expect_error(OCR_data(gim2pix)) expect_error(OCR_data(impix_NA)) } })
geom_streamline <- function(mapping = NULL, data = NULL, stat = "streamline", position = "identity", ..., L = 5, min.L = 0, res = 1, S = NULL, dt = NULL, xwrap = NULL, ywrap = NULL, skip = 1, skip.x = skip, skip.y = skip, n = NULL, nx = n, ny = n, jitter = 1, jitter.x = jitter, jitter.y = jitter, arrow.angle = 6, arrow.length = 0.5, arrow.ends = "last", arrow.type = "closed", arrow = grid::arrow(arrow.angle, grid::unit(arrow.length, "lines"), ends = arrow.ends, type = arrow.type), lineend = "butt", na.rm = TRUE, show.legend = NA, inherit.aes = TRUE) { ggplot2::layer( data = data, mapping = mapping, stat = stat, geom = GeomStreamline, position = position, show.legend = show.legend, inherit.aes = inherit.aes, params = list( L = L, min.L = min.L, res = res, xwrap = xwrap, ywrap = ywrap, dt = dt, S = S, arrow = arrow, lineend = lineend, na.rm = na.rm, skip.x = skip.x, skip.y = skip.y, nx = nx, ny = ny, jitter.x = jitter.x, jitter.y = jitter.y, ... ) ) } stat_streamline <- function(mapping = NULL, data = NULL, geom = "streamline", position = "identity", ..., L = 5, min.L = 0, res = 1, S = NULL, dt = NULL, xwrap = NULL, ywrap = NULL, skip = 1, skip.x = skip, skip.y = skip, n = NULL, nx = n, ny = n, jitter = 1, jitter.x = jitter, jitter.y = jitter, arrow.angle = 6, arrow.length = 0.5, arrow.ends = "last", arrow.type = "closed", arrow = grid::arrow(arrow.angle, grid::unit(arrow.length, "lines"), ends = arrow.ends, type = arrow.type), lineend = "butt", na.rm = TRUE, show.legend = NA, inherit.aes = TRUE) { ggplot2::layer( data = data, mapping = mapping, stat = StatStreamline, geom = geom, position = position, show.legend = show.legend, inherit.aes = inherit.aes, params = list( L = L, min.L = min.L, res = res, dt = dt, S = S, xwrap = xwrap, ywrap = ywrap, arrow = arrow, lineend = lineend, na.rm = na.rm, skip.x = skip.x, skip.y = skip.y, nx = nx, ny = ny, jitter.x = jitter.x, jitter.y = jitter.y, ... ) ) } StatStreamline <- ggplot2::ggproto("StatStreamline", ggplot2::Stat, required_aes = c("x", "y", "dx", "dy"), setup_params = function(data, params) { m <- with(data, mean(Mag(dx, dy), na.rm = TRUE)) r <- min(ggplot2::resolution(data$x, zero = FALSE), ggplot2::resolution(data$y, zero = FALSE)) if (is.null(params$dt)) params$dt <- r/m/params$res if (is.null(params$S)) params$S <- ceiling(params$L/params$dt/m/2) if (params$S == 1) { warningf("Performing only 1 integration step, please consider increasing the resolution.") } return(params) }, compute_group = function(data, scales, dt = 0.1, S = 3, skip.x = 1, skip.y = 1, nx = 10, ny = 10, jitter.x = 1, jitter.y = 1, xwrap = NULL, ywrap = NULL, min.L = 0, L = NULL, res = NULL, no.cache = FALSE) { if (no.cache == TRUE) memoise::forget(streamline.f) data <- streamline.f(data, dt = dt, S = S, skip.x = skip.x, skip.y = skip.y, nx = nx, ny = ny, jitter.x = jitter.x, jitter.y = jitter.y, xwrap = xwrap, ywrap = ywrap) distance <- data[, .(dx = diff(x), dy = diff(y)), by = line] distance <- distance[, .(distance = sum(sqrt(dx^2 + dy^2))), by = line] keep <- distance[distance >= min.L, line] return(data.table::setDF(data[line %in% keep])) } ) GeomStreamline <- ggplot2::ggproto("GeomStreamline", ggplot2::GeomPath, default_aes = ggplot2::aes(colour = "black", size = 0.5, linetype = 1, alpha = NA), draw_panel = function(data, panel_params, coord, arrow = NULL, lineend = "butt", linejoin = "round", linemitre = 1, na.rm = FALSE) { if (!anyDuplicated(data$group)) { messagef("%s: Each group consists of only one observation.\nDo you need to adjust the group aesthetic?", "geom_path") } data <- data[order(data$group), , drop = FALSE] munched <- ggplot2::coord_munch(coord, data, panel_params) rows <- stats::ave(seq_len(nrow(munched)), munched$group, FUN = length) munched <- munched[rows >= 2, ] if (nrow(munched) < 2) return(ggplot2::zeroGrob()) attr <- plyr::ddply(munched, "group", function(df) { linetype <- unique(df$linetype) data.frame( solid = identical(linetype, 1) || identical(linetype, "solid"), constant = nrow(unique(df[, c("alpha", "colour","size", "linetype")])) == 1 ) }) solid_lines <- all(attr$solid) constant <- all(attr$constant) if (!solid_lines && !constant) { stopf("%s: If you are using dotted or dashed lines, colour, size and linetype must be constant over the line.", "geom_streamline", call. = FALSE) } n <- nrow(munched) group_diff <- munched$group[-1] != munched$group[-n] start <- c(TRUE, group_diff) end <- c(group_diff, TRUE) if (!constant) { if (!is.null(arrow)) { mult <- end&munched$end mult <- mult[!start] if ("simpleUnit" %in% class(grid::unit(1, "mm"))) { arrow$length <- mult*arrow$length[1] } else { arrow$length <- grid::unit(as.numeric(arrow$length)[1]*mult, attr(arrow$length, "unit")) } } grid::segmentsGrob( munched$x[!end], munched$y[!end], munched$x[!start], munched$y[!start], default.units = "native", arrow = arrow, gp = grid::gpar( col = scales::alpha(munched$colour, munched$alpha)[!end], fill = scales::alpha(munched$colour, munched$alpha)[!end], lwd = munched$size[!end] * .pt, lty = munched$linetype[!end], lineend = lineend, linejoin = linejoin, linemitre = linemitre ) ) } else { id <- match(munched$group, unique(munched$group)) if (!is.null(arrow)) { mult <- as.numeric(munched$end)[start] if ("simpleUnit" %in% class(grid::unit(1, "mm"))) { arrow$length <- mult*arrow$length[1] } else { arrow$length <- grid::unit(as.numeric(arrow$length)[1]*mult, attr(arrow$length, "unit")) } } grid::polylineGrob( munched$x, munched$y, id = id, default.units = "native", arrow = arrow, gp = grid::gpar( col = scales::alpha(munched$colour, munched$alpha)[start], fill = scales::alpha(munched$colour, munched$alpha)[start], lwd = munched$size[start] * .pt, lty = munched$linetype[start], lineend = lineend, linejoin = linejoin, linemitre = linemitre ) ) } } ) streamline <- function(field, dt = 0.1, S = 3, skip.x = 1, skip.y = 1, nx = NULL, ny = NULL, jitter.x = 1, jitter.y = 1, xwrap = NULL, ywrap = NULL) { field <- data.table::copy(data.table::as.data.table(field)) is.grid <- with(field, .is.regular_grid(x, y)) if (!is.grid) { stopf("'x' and 'y' do not define a regular grid.") } data.table::setorder(field, x, y) circ.x <- !is.null(xwrap) circ.y <- !is.null(ywrap) if (circ.x) field <- suppressWarnings(WrapCircular(field, "x", xwrap)) if (circ.y) field <- suppressWarnings(WrapCircular(field, "y", ywrap)) field <- field[!is.na(dx) & !is.na(dy)] rx <- ggplot2::resolution(as.numeric(field$x), zero = FALSE) ry <- ggplot2::resolution(as.numeric(field$y), zero = FALSE) range.x <- range(field$x) range.y <- range(field$y) matrix <- .tidy2matrix(field, x ~ y, value.var = "dx", fill = 0) dx.field <- list(x = matrix$rowdims$x, y = matrix$coldims$y, z = matrix$matrix) matrix <- .tidy2matrix(field, x ~ y, value.var = "dy", fill = 0) dy.field <- list(x = matrix$rowdims$x, y = matrix$coldims$y, z = matrix$matrix) force.fun <- function(X) { X[, 1] <- .fold(X[, 1], 1, range.x, circ.x)[[1]] X[, 2] <- .fold(X[, 2], 1, range.y, circ.y)[[1]] dx <- fields::interp.surface(dx.field, X) dy <- fields::interp.surface(dy.field, X) return(cbind(dx = dx, dy = dy)) } if (is.null(nx)) { xs <- JumpBy(dx.field$x, skip.x + 1) } else { xs <- seq(range.x[1], range.x[2], length.out = nx) } if (is.null(ny)) { ys <- JumpBy(dx.field$y, skip.y + 1) } else { ys <- seq(range.y[1], range.y[2], length.out = ny) } if ((is.null(nx) && is.null(ny))) { points <- data.table::as.data.table(field[x %in% xs & y %in% ys, .(x = x, y = y)]) } else { points <- data.table::as.data.table(expand.grid(x = xs, y = ys)) } set.seed(42) points[, x := x + rnorm(.N, 0, rx)*jitter.x] points[, y := y + rnorm(.N, 0, ry)*jitter.y] points[, group := 1:.N] points[, piece := 1] points[, step := 0] points[, end := FALSE] if (circ.x == TRUE){ points[, x := .fold(x, 1, range.x, circ.x)[[1]]] } else { points[, x := ifelse(x > range.x[2], range.x[2], x)] points[, x := ifelse(x < range.x[1], range.x[1], x)] } if (circ.y == TRUE){ points[, y := .fold(y, 1, range.y, circ.y)[[1]]] } else { points[, y := ifelse(y > range.y[2], range.y[2], y)] points[, y := ifelse(y < range.y[1], range.y[1], y)] } as.list.matrix <- function(x, ...) { list(x[, 1], x[, 2]) } points[, c("dx", "dy") := as.list(force.fun(cbind(x, y)))] points <- points[abs(dx) + abs(dy) != 0 & !is.na(dx) & !is.na(dy)] points[, sign := 1] points_forw <- data.table::copy(points) points_forw[, sign := 1] points_back <- data.table::copy(points) points_back[, sign := -1] accum_forw <- vector(mode = "list", length = S) accum_back <- vector(mode = "list", length = S) for (s in 1:S) { points_forw <- points_forw[dx + dy != 0] points_back <- points_back[dx + dy != 0] points_forw[, c("x", "y") := runge_kutta4(x, y, force.fun, dt, piece, list(range.x, range.y), c(circ.x, circ.y))] points_forw[, step := s] points_forw[, c("dx", "dy") := as.list(force.fun(cbind(x, y)))] points_back[, c("x", "y") := runge_kutta4(x, y, force.fun, -dt, piece, list(range.x, range.y), c(circ.x, circ.y))] points_back[, step := -s] points_back[, c("dx", "dy") := as.list(force.fun(cbind(x, y)))] points_forw <- points_forw[!is.na(dx) & !is.na(dy)] points_back <- points_back[!is.na(dx) & !is.na(dy)] accum_forw[[s]] <- points_forw accum_back[[S - s + 1]] <- points_back } points <- data.table::rbindlist(c(accum_back, list(points), accum_forw)) range.select <- function(sign, range) { ifelse(sign == 1, range[2], range[1]) } points[, step2 := step] if (circ.x == TRUE) { points <- points[, .approx_order(x, y, range.x), by = group] points[, piece := as.numeric(data.table::rleid(x %between% range.x)), by = group] points[, c("dx", "dy") := as.list(force.fun(cbind(x, y)))] points[, step := seq_along(x), by = group] points[, x := .fold(x, 1, range.x, circ.x)[[1]]] } if (circ.y == TRUE) { points <- points[, .approx_order(y, x, range.y), by = group] points[, piece := as.numeric(data.table::rleid(y %between% range.y)), by = group] points[, c("dx", "dy") := as.list(force.fun(cbind(x, y)))] points[, step := seq_along(y), by = group] points[, y := .fold(y, 1, range.y, circ.y)[[1]]] } points[, end := seq_len(.N) < .N/2, by = .(group, piece)] points_last <- points[end == FALSE, ] points_first <- rbind(points[end == TRUE, ], points_last[, head(.SD, 1), by = .(group, piece)]) points_first[, end := TRUE] points <- rbind(points_first, points_last) points[, group := interaction(group, piece, end)] points[, line := group] return(points[, .(x, y, group, piece, end, step, dx, dy, line)]) } .fold <- function(x, piece, range, circular = TRUE) { if (circular) { R <- diff(range) piece <- ifelse(x > range[2], piece + 1, piece) x <- ifelse(x > range[2], x - R, x) piece <- ifelse(x < range[1], piece + 1, piece) x <- ifelse(x < range[1], x + R, x) } else { x <- ifelse(x > range[2], NA, x) x <- ifelse(x < range[1], NA, x) } return(list(x, piece)) } streamline.f <- memoise::memoise(streamline) .approx_order <- function(x, y, extra.x) { rx <- ggplot2::resolution(x, zero = FALSE) extra.x <- c(extra.x - rx/100000, extra.x + rx/100000) for (i in seq_along(extra.x)) { ind <- which(diff(x < extra.x[i]) != 0) + 1 if (length(ind) != 0) { val <- rep(extra.x[i], length(ind)) new_x <- vector(mode = "numeric", length(x) + length(val)) new_y <- new_x new_x[-ind] <- x new_x[ind] <- val new_y[-ind] <- y new_y[ind] <- y[ind-1] + (extra.x[i] - x[ind-1]) * (diff(y)/diff(x))[ind - 1] x <- new_x y <- new_y } } return(list(x = x, y = y)) }
between. <- function(x, left, right) { between(x = x, lower = left, upper = right) }
fluidPage <- function(..., title = NULL, responsive = TRUE, theme = NULL) { bootstrapPage(div(class = "container-fluid", ...), title = title, responsive = responsive, theme = theme) } fluidRow <- function(...) { div(class = "row-fluid", ...) }
isNanVectorOrNull <- function(argument, default = NULL, stopIfNot = FALSE, n = NA, message = NULL, argumentName = NULL) { checkarg(argument, "N", default = default, stopIfNot = stopIfNot, nullAllowed = TRUE, n = NA, zeroAllowed = FALSE, negativeAllowed = FALSE, positiveAllowed = FALSE, nonIntegerAllowed = TRUE, naAllowed = FALSE, nanAllowed = TRUE, infAllowed = FALSE, message = message, argumentName = argumentName) }
asymdesign<-function(I,beta=0.3,betaspend,alpha=0.05,p_0,p_1,K,tol=1e-6){ temp1=check.asymdesign(I,beta,betaspend,alpha,p_0,p_1,K,tol) I=temp1$I betaspend=temp1$betaspend K=temp1$K lowerbounds=rep(0,K) betaspend=betaspend*beta u_K=stats::qnorm(1-alpha) n_K=ceiling(p_1*(1-p_1)*(((u_K-stats::qnorm(beta))/(p_1-p_0))^2)) n.I=ceiling(n_K*I) lowerbounds[1]=stats::qnorm(betaspend[1])+(p_1-p_0)*sqrt(n.I[1]/p_1/(1-p_1)) problow=betaspend if(K>2){ for(k in 2:(K-1)){ temp1=bound1(k,lowerbounds[1:(k-1)],u_K,n.I[1:k],p_1,p_0,betaspend[k],tol) flag=temp1$flag if(flag){ lowerbounds[k:K]=u_K break } lowerbounds[k]=temp1$l_k problow[k]=temp1$error } } lowerbounds[K]=u_K if(!flag){ mean1=(p_1-p_0)*sqrt(n.I/p_1/(1-p_1)) lowerlimits=c(lowerbounds[1:(K-1)],-Inf) sigma=matrix(0,K,K) for(i in 1:K){ for(j in 1:K){ sigma[i,j]=mean1[min(i,j)]/mean1[max(i,j)] } } problow[K]=mvtnorm::pmvnorm(lower=lowerlimits,upper=c(rep(Inf,(K-1)),u_K),mean=mean1,sigma=sigma)[1] }else{ problow=asymprob1(n.I,lowerbounds,p_0,p_1,K) } t=0 while((beta<sum(problow))&(t<=30)){ n_K=n_K+1 n.I=ceiling(n_K*I) problow=asymprob1(n.I,lowerbounds,p_0,p_1,K) t=t+1 } if(beta<sum(problow)) stop('cannot converge with the current tol.') probhi=asymprob2(n.I,lowerbounds,K) probhi=1-sum(probhi) x=list(I=I,beta=beta,betaspend=betaspend,alpha=alpha,p_0=p_0,p_1=p_1,K=K,tol=tol,n.I=n.I,u_K=u_K,lowerbounds=lowerbounds, problow=problow,probhi=probhi,power=1-sum(problow)) class(x)="asymdesign" return(x) }
weight_omega <- function (result) { if (result$estimand == "ATEcombined") { if (result$method == "score") { omega <- result$ps^result$alpha + (1 - result$ps)^result$alpha } else if (result$method == "cb") { omega <- 1 / (1 - result$ps) + 1 / result$ps } else if (result$method == "both") { omega <- (result$ps^result$alpha + (1 - result$ps)^result$alpha) * result$scratio + (1 / (1 - result$ps) + 1 / result$ps) * (1 - result$scratio) } } else if (result$estimand == "ATE") { ps1 <- result$ps[1, ] ps2 <- result$ps[2, ] if (result$method == "score") { omega1 <- ps1^result$alpha omega2 <- (1 - ps2)^result$alpha } else if (result$method == "cb") { omega1 <- 1 / (1 - ps1) omega2 <- 1 / ps2 } else if (result$method == "both") { omega1 <- ps1^result$alpha * result$scratio[1] + 1 / (1 - ps1) * (1 - result$scratio[1]) omega2 <- (1 - ps2)^result$alpha * result$scratio[2] + 1 / ps2 * (1 - result$scratio[2]) } omega <- rbind(omega1 = omega1, omega2 = omega2) } else { if (result$method == "score") { omega <- result$ps^result$alpha } else if (result$method == "cb") { omega <- 1 / (1 - result$ps) } else if (result$method == "both") { omega <- result$ps^result$alpha * result$scratio + 1 / (1 - result$ps) * (1 - result$scratio) } } omega }
setMethod("yFromRow", signature(object="SpatRaster", row="numeric"), function(object, row) { object@ptr$yFromRow(row - 1) } ) setMethod(xFromCol, signature(object="SpatRaster", col="numeric"), function(object, col) { object@ptr$xFromCol(col - 1) } ) setMethod(colFromX, signature(object="SpatRaster", x="numeric"), function(object, x) { cols <- object@ptr$colFromX(x) + 1 cols[cols==0] <- NA cols } ) setMethod(rowFromY, signature(object="SpatRaster", y="numeric"), function(object, y) { rows <- object@ptr$rowFromY(y) + 1 rows[rows==0] <- NA rows } ) setMethod(cellFromXY, signature(object="SpatRaster", xy="matrix"), function(object, xy) { stopifnot(ncol(xy) == 2) object@ptr$cellFromXY(xy[,1], xy[,2]) + 1 } ) setMethod(cellFromXY, signature(object="SpatRaster", xy="data.frame"), function(object, xy) { stopifnot(ncol(xy) == 2) object@ptr$cellFromXY(xy[,1], xy[,2]) + 1 } ) setMethod(cellFromRowCol, signature(object="SpatRaster", row="numeric", col="numeric"), function(object, row, col) { object@ptr$cellFromRowCol(row-1, col-1) + 1 } ) setMethod(cellFromRowColCombine, signature(object="SpatRaster", row="numeric", col="numeric"), function(object, row, col) { object@ptr$cellFromRowColCombine(row-1, col-1) + 1 } ) setMethod(xyFromCell, signature(object="SpatRaster", cell="numeric"), function(object, cell) { xy <- object@ptr$xyFromCell(cell-1) xy <- do.call(cbind, xy) colnames(xy) <- c("x", "y") xy } ) setMethod(yFromCell, signature(object="SpatRaster", cell="numeric"), function(object, cell) { xyFromCell(object, cell)[,2] } ) setMethod(xFromCell, signature(object="SpatRaster", cell="numeric"), function(object, cell) { xyFromCell(object, cell)[,1] } ) setMethod(rowColFromCell, signature(object="SpatRaster", cell="numeric"), function(object, cell) { rc <- object@ptr$rowColFromCell(cell-1) rc <- do.call(cbind, rc) rc[rc < 0] <- NA rc+1 } ) setMethod(rowFromCell, signature(object="SpatRaster", cell="numeric"), function(object, cell) { rowColFromCell(object, cell)[,1] } ) setMethod(colFromCell, signature(object="SpatRaster", cell="numeric"), function(object, cell) { rowColFromCell(object, cell)[,2] } )
library(forecast) SeasonalityTest <- function(input, ppy){ tcrit <- 1.645 if (length(input)<3*ppy){ test_seasonal <- FALSE }else{ xacf <- acf(input, plot = FALSE)$acf[-1, 1, 1] clim <- tcrit/sqrt(length(input)) * sqrt(cumsum(c(1, 2 * xacf^2))) test_seasonal <- ( abs(xacf[ppy]) > clim[ppy] ) if (is.na(test_seasonal)==TRUE){ test_seasonal <- FALSE } } return(test_seasonal) } Theta.fit <- function(input, fh, theta, curve, model, seasonality , plot=FALSE){ if (theta<0){ theta <- 2 } if (fh<1){ fh <- 1 } outtest <- naive(input, h=fh)$mean if (theta==0){ wses <- 0 }else{ wses <- (1/theta) } wlrl <- (1-wses) ppy <- frequency(input) if (seasonality=="N"){ des_input <- input ; SIout <- rep(1, fh) ; SIin <- rep(1, length(input)) }else if (seasonality=="A"){ Dec <- decompose(input, type="additive") des_input <- input-Dec$seasonal SIin <- Dec$seasonal SIout <- head(rep(Dec$seasonal[(length(Dec$seasonal)-ppy+1):length(Dec$seasonal)], fh), fh) }else{ Dec <- decompose(input, type="multiplicative") des_input <- input/Dec$seasonal SIin <- Dec$seasonal SIout <- head(rep(Dec$seasonal[(length(Dec$seasonal)-ppy+1):length(Dec$seasonal)], fh), fh) } if (min(des_input)<=0){ curve <- "Lrl" ; model <- "A" } observations <- length(des_input) xs <- c(1:observations) xf = xff <- c((observations+1):(observations+fh)) dat=data.frame(des_input=des_input, xs=xs) newdf <- data.frame(xs = xff) if (curve=="Exp"){ estimate <- lm(log(des_input)~xs) thetaline0In <- exp(predict(estimate))+input-input thetaline0Out <- exp(predict(estimate, newdf))+outtest-outtest }else{ estimate <- lm(des_input ~ poly(xs, 1, raw=TRUE)) thetaline0In <- predict(estimate)+des_input-des_input thetaline0Out <- predict(estimate, newdf)+outtest-outtest } if (model=="A"){ thetalineT <- theta*des_input+(1-theta)*thetaline0In }else if ((model=="M")&(all(thetaline0In>0)==T)&(all(thetaline0Out>0)==T)){ thetalineT <- (des_input^theta)*(thetaline0In^(1-theta)) }else{ model<-"A" thetalineT <- theta*des_input+(1-theta)*thetaline0In } sesmodel <- ses(thetalineT, h=fh) thetaline2In <- sesmodel$fitted thetaline2Out <- sesmodel$mean if (model=="A"){ forecastsIn <- as.numeric(thetaline2In*wses)+as.numeric(thetaline0In*wlrl)+des_input-des_input forecastsOut <- as.numeric(thetaline2Out*wses)+as.numeric(thetaline0Out*wlrl)+outtest-outtest }else if ((model=="M")& (all(thetaline2In>0)==T)&(all(thetaline2Out>0)==T)& (all(thetaline0In>0)==T)&(all(thetaline0Out>0)==T)){ forecastsIn <- ((as.numeric(thetaline2In)^(1/theta))*(as.numeric(thetaline0In)^(1-(1/theta))))+des_input-des_input forecastsOut <- ((as.numeric(thetaline2Out)^(1/theta))*(as.numeric(thetaline0Out)^(1-(1/theta))))+outtest-outtest }else{ model<-"A" thetalineT <- theta*des_input+(1-theta)*thetaline0In sesmodel <- ses(thetalineT,h=fh) thetaline2In <- sesmodel$fitted thetaline2Out <- sesmodel$mean forecastsIn <- as.numeric(thetaline2In*wses)+as.numeric(thetaline0In*wlrl)+des_input-des_input forecastsOut <- as.numeric(thetaline2Out*wses)+as.numeric(thetaline0Out*wlrl)+outtest-outtest } if (seasonality=="A"){ forecastsIn <- forecastsIn+SIin forecastsOut <- forecastsOut+SIout }else{ forecastsIn <- forecastsIn*SIin forecastsOut <- forecastsOut*SIout } for (i in 1:length(forecastsOut)){ if (forecastsOut[i]<0){ forecastsOut[i] <- 0 } } if (plot==TRUE){ united <- cbind(input,forecastsOut) for (ik in 1:(observations+fh)){ united[ik,1] = sum(united[ik,2],united[ik,1], na.rm = TRUE) } plot(united[,1],col="black",type="l",main=paste("Model:",model,",Curve:",curve,",Theta:",theta),xlab="Time",ylab="Values", ylim=c(min(united[,1])*0.85,max(united[,1])*1.15)) lines(forecastsIn, col="green") ; lines(forecastsOut, col="green") lines(thetaline2In, col="blue") ; lines(thetaline2Out, col="blue") lines(thetaline0In, col="red") ; lines(thetaline0Out, col="red") } output=list(fitted=forecastsIn,mean=forecastsOut, fitted0=thetaline0In,mean0=thetaline0Out, fitted2=thetaline2In,mean2=thetaline2Out, model=paste(seasonality,model,curve,c(round(theta,2)))) return(output) } FourTheta<- function(input, fh){ base <- mean(input) ; input <- input/base molist <- c("M","A") ; trlist <- c("Lrl","Exp") ppy <- frequency(input) ; ST <- F if (ppy>1){ ST <- SeasonalityTest(input, ppy) } if (ST==T){ selist <- c("M","A") listnames <- c() for (i in 1:length(selist)){ for (ii in 1:length(molist)){ for (iii in 1:length(trlist)){ listnames <- c(listnames,paste(selist[i], molist[ii], trlist[iii])) } } } }else{ listnames <- c() for (ii in 1:length(molist)){ for (iii in 1:length(trlist)){ listnames <- c(listnames, paste("N", molist[ii], trlist[iii])) } } } modellist <- NULL for (i in 1:length(listnames)){ modellist[length(modellist)+1] <- list(c(substr(listnames,1,1)[i], substr(listnames,3,3)[i], substr(listnames,5,7)[i])) } errorsin <- c() ; models <- NULL optfun <- function(x, input, fh, curve, model, seasonality){ mean(abs(Theta.fit(input=input, fh, theta=x, curve, model, seasonality , plot=FALSE)$fitted-input)) } for (j in 1:length(listnames)){ optTheta <- optimize(optfun, c(1:3), input=input, fh=fh, curve=modellist[[j]][3], model=modellist[[j]][2], seasonality=modellist[[j]][1])$minimum fortheta <- Theta.fit(input=input, fh=fh, theta=optTheta, curve=modellist[[j]][3], model=modellist[[j]][2], seasonality=modellist[[j]][1], plot=F) models[length(models)+1] <- list(fortheta) errorsin <- c(errorsin, mean(abs(input-fortheta$fitted))) } selected.model <- models[[which.min(errorsin)]] description <- selected.model$model output <- list(fitted=selected.model$fitted*base,mean=selected.model$mean*base, description=description) return(output) }
code_barrel <- function(df) { df$barrel <- with(df, ifelse(hit_angle <= 50 & hit_speed >= 98 & hit_speed * 1.5 - hit_angle >= 117 & hit_speed + hit_angle >= 124, 1, 0)) return(df) }
getError=function(f,interval,type,delta,relative){ if (type=="lower") { if (relative){ error=abs((f-interval[1])/delta)*100 }else{ error=abs((f-interval[1])) } } else { if (relative){ error=abs((f-interval[2])/delta)*100 }else{ error=abs((f-interval[2])) } } return(error) }
bassPCA<-function(xx=NULL,y=NULL,dat=NULL,n.pc=NULL,perc.var=99,n.cores=1,parType="fork",center=T,scale=F,...){ if(is.null(dat)) dat<-bassPCAsetup(xx,y,n.pc,perc.var,center,scale) return(bassBasis(dat,n.cores,parType = parType,...)) } bassPCAsetup<-function(xx,y,n.pc=NULL,perc.var=99,center=T,scale=F){ if(perc.var>100 | perc.var<0) stop('perc.var must be between 0 and 100') n<-nrow(xx) y<-as.matrix(y) xx<-as.data.frame(xx) if(nrow(y)==1 | ncol(y)==1) stop('univariate y: use bass instead of bassPCA') if(nrow(y)!=nrow(xx)) y<-t(y) if(nrow(y)!=nrow(xx)) stop('x,y dimension mismatch') if(ncol(y)==nrow(y)) warning("Caution: because y is square, please ensure that each row of x corresponds to a row of y (and not a column)") if(!is.null(n.pc)){ if(n.pc>nrow(y)) warning('n.pc too large, using all PCs intead') } if(class(center)=='logical' & length(center)==1){ y.m<-colMeans(y) if(!center) y.m<-rep(0,ncol(y)) } else if(class(center)=='numeric' & length(center)==ncol(y)){ y.m<-center } else{ stop("center parameter wrong dimension") } if(class(scale)=='logical' & length(scale)==1){ y.s<-apply(y,2,sd) if(!scale) y.s<-rep(1,ncol(y)) } else if(class(scale)=='numeric' & length(scale)==ncol(y)){ y.s<-scale } else{ stop("scale parameter wrong dimension") } yc<-t(scale(y,center=y.m,scale=y.s)) S<-svd(yc) if(is.null(n.pc)){ ev<-S$d^2 n.pc<-which(cumsum(ev/sum(ev))*100>perc.var)[1] } basis<-S$u[,1:n.pc,drop=F]%*%diag(S$d[1:n.pc],nrow=n.pc) newy<-t(S$v[,1:n.pc,drop=F]) trunc.error<-basis%*%newy - yc ret<-list(xx=xx,y=y,n.pc=n.pc,basis=basis,newy=newy,trunc.error=trunc.error,y.m=y.m,y.s=y.s,ev=S$d^2) class(ret)<-'bassPCAsetup' return(ret) } bassBasis<-function(dat,n.cores=1,parType='fork',...){ if(n.cores>parallel::detectCores()) warning(paste0("Specified n.cores = ",n.cores,'. Proceeding with n.cores = min(n.cores,dat$n.pc,detectCores()) = ',min(n.cores,dat$n.pc,parallel::detectCores()))) n.cores<-min(n.cores,dat$n.pc,parallel::detectCores()) if(n.cores==1){ mod.list<-lapply(1:dat$n.pc,function(i) bass(dat$xx,dat$newy[i,],...)) } else if(parType=='socket'){ cl <- parallel::makeCluster(n.cores,setup_strategy = "sequential") mod.list<-parallel::parLapply(cl,1:dat$n.pc,function(i) bass(dat$xx,dat$newy[i,],...)) parallel::stopCluster(cl) } else if(parType=='fork'){ mod.list<-parallel::mclapply(1:dat$n.pc,function(i) bass(dat$xx,dat$newy[i,],...),mc.cores = n.cores,mc.preschedule = F) } ret<-list(mod.list=mod.list,dat=dat) class(ret)<-'bassBasis' return(ret) } predict.bassBasis<-function(object,newdata,mcmc.use=NULL,trunc.error=FALSE,nugget=T,n.cores=1,parType="fork",...){ if(is.null(mcmc.use)){ mcmc.use<-1:((object$mod.list[[1]]$nmcmc-object$mod.list[[1]]$nburn)/object$mod.list[[1]]$thin) } if(n.cores==1){ newy.pred<-array(unlist(lapply(1:object$dat$n.pc,function(i) predict1mod(object$mod.list[[i]],newdata,mcmc.use,nugget,...))),dim=c(length(mcmc.use),nrow(newdata),object$dat$n.pc)) out<-array(unlist(lapply(1:length(mcmc.use),function(i) predict1mcmc(matrix(newy.pred[i,,],ncol=object$dat$n.pc,nrow=nrow(newdata)),object$dat))),dim=c(length(object$dat$y.m),nrow(newdata),length(mcmc.use))) } else if(parType=='socket'){ cl <- parallel::makeCluster(min(n.cores,object$dat$n.pc,parallel::detectCores()),setup_strategy = "sequential") parallel::clusterExport(cl,varlist=c("newdata"),envir=environment()) newy.pred<-array(unlist(parallel::parLapply(cl,1:object$dat$n.pc,function(i) predict1mod(object$mod.list[[i]],newdata,mcmc.use,nugget,...))),dim=c(length(mcmc.use),nrow(newdata),object$dat$n.pc)) out<-array(unlist(parallel::parLapply(cl,1:length(mcmc.use),function(i) predict1mcmc(matrix(newy.pred[i,,],ncol=object$dat$n.pc,nrow=nrow(newdata)),object$dat))),dim=c(length(object$dat$y.m),nrow(newdata),length(mcmc.use))) parallel::stopCluster(cl) } else if(parType=='fork'){ newy.pred<-array(unlist(parallel::mclapply(1:object$dat$n.pc,function(i) predict1mod(object$mod.list[[i]],newdata,mcmc.use,nugget,...),mc.cores=n.cores)),dim=c(length(mcmc.use),nrow(newdata),object$dat$n.pc)) out<-array(unlist(parallel::mclapply(1:length(mcmc.use),function(i) predict1mcmc(matrix(newy.pred[i,,],ncol=object$dat$n.pc,nrow=nrow(newdata)),object$dat),mc.cores=n.cores)),dim=c(length(object$dat$y.m),nrow(newdata),length(mcmc.use))) } out<-aperm(out,c(3,2,1)) if(trunc.error) out<-out+array(truncErrSampN(length(mcmc.use)*nrow(newdata),object$dat$trunc.error),dim=c(length(mcmc.use),nrow(newdata),length(object$dat$y.m))) return(out) } predict1mcmc<-function(mat,dat){ if(is.null(dim(mat))) mat<-t(mat) dat$basis%*%t(mat)*dat$y.s + dat$y.m } predict1mod<-function(mod,newdata,mcmc.use,nugget,...){ pmat<-predict(mod,newdata,mcmc.use=mcmc.use,...) if(nugget) pmat<-pmat+rnorm(length(mcmc.use),0,sqrt(mod$s2[mcmc.use])) pmat } predict_fast.bassBasis<-function(object,newdata,n.cores=1,mcmc.use,trunc.error=FALSE,...){ newy.pred<-array(unlist(parallel::mclapply(1:object$dat$n.pc,function(i) predict1mod_fast(object$mod.list[[i]],newdata,mcmc.use,...),mc.cores=min(n.cores,object$dat$n.pc))),dim=c(length(mcmc.use),nrow(newdata),object$dat$n.pc)) out<-array(unlist(parallel::mclapply(1:length(mcmc.use),function(i) predict1mcmc(newy.pred[i,,],object$dat),mc.cores=min(n.cores,length(mcmc.use)))),dim=c(length(object$dat$y.m),nrow(newdata),length(mcmc.use))) out<-aperm(out,c(3,2,1)) return(out) } predict1mod_fast<-function(mod,newdata,mcmc.use,...){ pmat<-predict_fast(mod,newdata,mcmc.use=mcmc.use,...) pmat } truncErrSampN<-function(n,te.mat){ t(te.mat[,sample.int(nrow(te.mat),n,replace=T)]) } sobolBasis<-function(mod,int.order,prior=NULL,mcmc.use=NULL,nind=NULL,n.cores=1,parType='fork',plot=F,verbose=T){ if(is.null(mcmc.use)) mcmc.use<-length(mod$mod.list[[1]]$s2) bassMod<-mod$mod.list[[1]] pdescat<-sum(bassMod$pdes)+sum(bassMod$pcat) if(is.null(prior)) prior<-list() if(length(prior)<pdescat){ for(i in (length(prior)+1):pdescat) prior[[i]]<-list(dist=NA) } for(i in 1:pdescat){ if(is.null(prior[[i]])) prior[[i]]<-list(dist=NA) if(is.na(prior[[i]]$dist)){ prior[[i]]<-list() prior[[i]]$dist<-'uniform' } } if(bassMod$func){ if(is.null(prior.func)){ prior.func<-list() for(i in 1:bassMod$pfunc){ prior.func[[i]]<-list() prior.func[[i]]$dist<-'uniform' } } for(i in 1:length(prior.func)) class(prior.func[[i]])<-prior.func[[i]]$dist } for(i in 1:length(prior)) class(prior[[i]])<-prior[[i]]$dist if(bassMod$cat){ which.cat<-which(bassMod$cx=='factor') prior.cat<-list() for(i in 1:length(which.cat)){ prior.cat[i]<-prior[which.cat[i]] } prior[which.cat]<-NULL } else{ prior.cat<-NULL } if(bassMod$des){ for(i in 1:length(prior)){ if(is.null(prior[[i]]$trunc)){ prior[[i]]$trunc<-c(0,1) } else{ prior[[i]]$trunc<-scale.range(prior[[i]]$trunc,bassMod$range.des[,i]) } if(prior[[i]]$dist %in% c('normal','student')){ prior[[i]]$mean<-scale.range(prior[[i]]$mean,bassMod$range.des[,i]) prior[[i]]$sd<-prior[[i]]$sd/(bassMod$range.des[2,i]-bassMod$range.des[1,i]) if(prior[[i]]$dist == 'normal'){ prior[[i]]$z<-pnorm((prior[[i]]$trunc[2]-prior[[i]]$mean)/prior[[i]]$sd) - pnorm((prior[[i]]$trunc[1]-prior[[i]]$mean)/prior[[i]]$sd) } else{ prior[[i]]$z<-pt((prior[[i]]$trunc[2]-prior[[i]]$mean)/prior[[i]]$sd,prior[[i]]$df) - pt((prior[[i]]$trunc[1]-prior[[i]]$mean)/prior[[i]]$sd,prior[[i]]$df) } cc<-sum(prior[[i]]$weights*prior[[i]]$z) prior[[i]]$weights<-prior[[i]]$weights/cc } } } tl<-list(prior=prior) pc.mod<-mod$mod.list pcs<-mod$dat$basis if(verbose) cat('Start',timestamp(quiet = T),'\n') p<-pc.mod[[1]]$p if(int.order>p){ int.order<-p warning("int.order > number of inputs, changing to int.order = number of inputs") } u.list<-lapply(1:int.order,function(i) combn(1:p,i)) ncombs.vec<-unlist(lapply(u.list,ncol)) ncombs<-sum(ncombs.vec) nxfunc<-nrow(pcs) sob<-ints<-list() n.pc<-ncol(pcs) w0<-unlist(lapply(1:n.pc,function(pc) get.f0(prior,pc.mod,pc,mcmc.use))) f0r2<-(pcs%*%w0)^2 max.nbasis<-max(unlist(lapply(pc.mod,function(x) x$nbasis[mcmc.use]))) C1Basis.array<-array(dim=c(n.pc,p,max.nbasis)) for(i in 1:n.pc){ nb<-pc.mod[[i]]$nbasis[mcmc.use] mcmc.mod.usei<-pc.mod[[i]]$model.lookup[mcmc.use] for(j in 1:p){ for(k in 1:nb){ C1Basis.array[i,j,k]<-C1Basis(prior,pc.mod,j,k,i,mcmc.mod.usei) } } } u.list1<-list() for(i in 1:int.order) u.list1<-c(u.list1,split(u.list[[i]], col(u.list[[i]]))) if(verbose) cat('Integrating',timestamp(quiet = T),'\n') u.list.temp<-c(list(1:p),u.list1) if(n.cores==1){ ints1.temp<-lapply(u.list.temp,function(x) func.hat(prior,x,pc.mod,pcs,mcmc.use,f0r2,C1Basis.array)) } else if(parType=='socket'){ cl <- parallel::makeCluster(min(n.cores,parallel::detectCores()),setup_strategy = "sequential") parallel::clusterExport(cl,varlist=c("prior","x","pc.mod","pcs","mcmc.use","f0r2","C1Basis.array"),envir=environment()) ints1.temp<-parallel::parLapply(cl,u.list.temp,function(x) func.hat(prior,x,pc.mod,pcs,mcmc.use,f0r2,C1Basis.array)) parallel::stopCluster(cl) } else if(parType=='fork'){ ints1.temp<-parallel::mclapply(u.list.temp,function(x) func.hat(prior,x,pc.mod,pcs,mcmc.use,f0r2,C1Basis.array),mc.cores=n.cores,mc.preschedule = F) } V.tot<-ints1.temp[[1]] ints1<-ints1.temp[-1] ints<-list() ints[[1]]<-do.call(cbind,ints1[1:ncol(u.list[[1]])]) if(int.order>1){ for(i in 2:int.order) ints[[i]]<-do.call(cbind,ints1[sum(ncombs.vec[1:(i-1)])+1:ncol(u.list[[i]])]) } sob[[1]]<-ints[[1]] if(verbose) cat('Shuffling',timestamp(quiet = T),'\n') if(length(u.list)>1){ for(i in 2:length(u.list)){ sob[[i]]<-matrix(nrow=nxfunc,ncol=ncol(ints[[i]])) for(j in 1:ncol(u.list[[i]])){ cc<-rep(0,nxfunc) for(k in 1:(i-1)){ ind<-which(apply(u.list[[k]],2,function(x) all(x%in%u.list[[i]][,j]))) cc<-cc+(-1)^(i-k)*rowSums(ints[[k]][,ind]) } sob[[i]][,j]<-ints[[i]][,j]+cc } } } if(is.null(nind)) nind<-ncombs sob.comb.var<-do.call(cbind,sob) vv<-colMeans(sob.comb.var) ord<-order(vv,decreasing = T) cutoff<-vv[ord[nind]] if(nind>length(ord)) cutoff<-min(vv) use<-sort(which(vv>=cutoff)) V.other<-V.tot-rowSums(sob.comb.var[,use]) use<-c(use,ncombs+1) sob.comb.var<-t(cbind(sob.comb.var,V.other)) sob.comb<-t(t(sob.comb.var)/c(V.tot)) sob.comb.var<-sob.comb.var[use,,drop=F] sob.comb<-sob.comb[use,,drop=F] dim(sob.comb)<-c(1,length(use),nxfunc) dim(sob.comb.var)<-c(1,length(use),nxfunc) names.ind<-c(unlist(lapply(u.list,function(x) apply(x,2,paste,collapse='x',sep=''))),'other') names.ind<-names.ind[use] if(verbose) cat('Finish',timestamp(quiet = T),'\n') ret<-list(S=sob.comb,S.var=sob.comb.var,Var.tot=V.tot,names.ind=names.ind,xx=seq(0,1,length.out = nxfunc),func=T) class(ret)<-'bassSob' if(plot) plot(ret) return(ret) } func.hat<-function(prior,u,pc.mod,pcs,mcmc.use,f0r2,C1Basis.array){ res<-rep(0,nrow(pcs)) n.pc<-length(pc.mod) for(i in 1:n.pc){ res<-res+pcs[,i]^2*Ccross(prior,pc.mod,i,i,u,mcmc.use,C1Basis.array) if(i<n.pc){ for(j in (i+1):n.pc){ res<-res+2*pcs[,i]*pcs[,j]*Ccross(prior,pc.mod,i,j,u,mcmc.use,C1Basis.array) } } } return(res-f0r2) } Ccross<-function(prior,pc.mod,i,j,u,mcmc.use=1,C1Basis.array){ p<-pc.mod[[1]]$p mcmc.mod.usei<-pc.mod[[i]]$model.lookup[mcmc.use] mcmc.mod.usej<-pc.mod[[j]]$model.lookup[mcmc.use] Mi<-pc.mod[[i]]$nbasis[mcmc.use] Mj<-pc.mod[[j]]$nbasis[mcmc.use] mat<-matrix(nrow=Mi,ncol=Mj) a0i<-pc.mod[[i]]$beta[mcmc.use,1] a0j<-pc.mod[[j]]$beta[mcmc.use,1] f0i<-get.f0(prior,pc.mod,i,mcmc.use) f0j<-get.f0(prior,pc.mod,j,mcmc.use) out<- a0i*a0j + a0i*(f0j-a0j) + a0j*(f0i-a0i) if(Mi>0 & Mj>0){ ai<-pc.mod[[i]]$beta[mcmc.use,1+1:Mi] aj<-pc.mod[[j]]$beta[mcmc.use,1+1:Mj] for(mi in 1:Mi){ for(mj in 1:Mj){ temp1<-ai[mi]*aj[mj] temp2<-temp3<-1 for(l in (1:p)[-u]){ temp2<-temp2*C1Basis.array[i,l,mi]*C1Basis.array[j,l,mj] } for(l in u){ temp3<-temp3*C2Basis(prior,pc.mod,l,mi,mj,i,j,mcmc.mod.usei,mcmc.mod.usej) } out<-out+temp1*temp2*temp3 } } } if(length(out)==0) browser() return(out) } C1Basis<-function(prior,pc.mod,l,m,pc,mcmc.mod.use){ if(l<=pc.mod[[pc]]$pdes){ int.use.l<-which(pc.mod[[pc]]$vars.des[mcmc.mod.use,m,]==l) if(length(int.use.l)==0) return(1) s<-pc.mod[[pc]]$signs[mcmc.mod.use,m,int.use.l] t.ind<-pc.mod[[pc]]$knotInd.des[mcmc.mod.use,m,int.use.l] t<-pc.mod[[pc]]$xx.des[t.ind,l] q<-pc.mod[[pc]]$degree if(s==0) return(0) cc<-const(signs=s,knots=t,degree=q) if(s==1){ a<-max(prior[[l]]$trunc[1],t) b<-prior[[l]]$trunc[2] if(b<t) return(0) out<-intabq1(prior[[l]],a,b,t,q)/cc } else{ a<-prior[[l]]$trunc[1] b<-min(prior[[l]]$trunc[2],t) if(t<a) return(0) out<-intabq1(prior[[l]],a,b,t,q)*(-1)^q/cc } if(out< -1e-15) browser() return(out) } else{ l.cat<-l-pc.mod[[pc]]$pdes int.use.l<-which(pc.mod[[pc]]$vars.cat[mcmc.mod.use,m,]==l.cat) if(length(int.use.l)==0) return(1) lD1<-pc.mod[[pc]]$sub.size[mcmc.mod.use,m,int.use.l] nlevels<-pc.mod[[pc]]$nlevels[l.cat] return(lD1/nlevels) } } C2Basis<-function(prior,pc.mod,l,m1,m2,pc1,pc2,mcmc.mod.use1,mcmc.mod.use2){ if(l<=pc.mod[[pc1]]$pdes){ int.use.l1<-which(pc.mod[[pc1]]$vars.des[mcmc.mod.use1,m1,]==l) int.use.l2<-which(pc.mod[[pc2]]$vars.des[mcmc.mod.use2,m2,]==l) if(length(int.use.l1)==0 & length(int.use.l2)==0) return(1) if(length(int.use.l1)==0) return(C1Basis(prior,pc.mod,l,m2,pc2,mcmc.mod.use2)) if(length(int.use.l2)==0) return(C1Basis(prior,pc.mod,l,m1,pc1,mcmc.mod.use1)) q<-pc.mod[[pc1]]$degree s1<-pc.mod[[pc1]]$signs[mcmc.mod.use1,m1,int.use.l1] s2<-pc.mod[[pc2]]$signs[mcmc.mod.use2,m2,int.use.l2] t.ind1<-pc.mod[[pc1]]$knotInd.des[mcmc.mod.use1,m1,int.use.l1] t.ind2<-pc.mod[[pc2]]$knotInd.des[mcmc.mod.use2,m2,int.use.l2] t1<-pc.mod[[pc1]]$xx.des[t.ind1,l] t2<-pc.mod[[pc2]]$xx.des[t.ind2,l] if(t2<t1){ temp<-t1 t1<-t2 t2<-temp temp<-s1 s1<-s2 s2<-temp } return(C22Basis(prior[[l]],t1,t2,s1,s2,q,m1,m2,pc1,pc2)) } else{ l.cat<-l-pc.mod[[pc1]]$pdes int.use.l1<-which(pc.mod[[pc1]]$vars.cat[mcmc.mod.use1,m1,]==l.cat) int.use.l2<-which(pc.mod[[pc2]]$vars.cat[mcmc.mod.use2,m2,]==l.cat) if(length(int.use.l1)==0 & length(int.use.l2)==0) return(1) if(length(int.use.l1)==0) return(C1Basis(prior,pc.mod,l,m2,pc2,mcmc.mod.use2)) if(length(int.use.l2)==0) return(C1Basis(prior,pc.mod,l,m1,pc1,mcmc.mod.use1)) sub1<-pc.mod[[pc1]]$sub.list[[mcmc.mod.use1]][[m1]][[int.use.l1]] sub2<-pc.mod[[pc2]]$sub.list[[mcmc.mod.use2]][[m2]][[int.use.l2]] if(is.na(sub1[1]) & is.na(sub2[1])) browser() nlevels<-pc.mod[[pc1]]$nlevels[l.cat] return(length(intersect(sub1,sub2))/nlevels) } } C22Basis<-function(prior,t1,t2,s1,s2,q,m1,m2,pc1,pc2){ cc<-const(signs=c(s1,s2),knots=c(t1,t2),degree=q) if((s1*s2)==0){ return(0) } if(s1==1){ if(s2==1){ return(intabq2(prior,t2,1,t1,t2,q)/cc) } else{ return(intabq2(prior,t1,t2,t1,t2,q)*(-1)^q/cc) } } else{ if(s2==1){ return(0) } else{ return(intabq2(prior,0,t1,t1,t2,q)/cc) } } } get.f0<-function(prior,pc.mod,pc,mcmc.use){ mcmc.mod.use<-pc.mod[[pc]]$model.lookup[mcmc.use] out<-pc.mod[[pc]]$beta[mcmc.use,1] if(pc.mod[[pc]]$nbasis[mcmc.use] > 0){ for(m in 1:pc.mod[[pc]]$nbasis[mcmc.use]){ out1<-pc.mod[[pc]]$beta[mcmc.use,1+m] for(l in 1:pc.mod[[pc]]$p){ out1<-out1*C1Basis(prior,pc.mod,l,m,pc,mcmc.mod.use) } out<-out+out1 } } return(out) } rmnorm<-function(mu, S){ mu+c(rnorm(length(mu))%*%chol(S)) } calibrate.bassBasis<-function(mod,y,a,b,nmcmc,verbose=T){ p<-ncol(mod$dat$xx) ny<-length(y) ns<-mod$mod.list[[1]]$nmcmc-mod$mod.list[[1]]$nburn theta<-matrix(nrow=nmcmc,ncol=p) s2<-rep(NA,nmcmc) theta[1,]<-.5 pred.curr<-predict(mod,theta[1,,drop=F],mcmc.use=sample(ns,size=1),trunc.error=F) s2[1]<-1/rgamma(1,ny/2+a,b+sum((y-pred.curr)^2)) eps<-1e-10 cc<-2.4^2/p S<-diag(p)*eps count<-0 for(i in 2:nmcmc){ s2[i]<-1/rgamma(1,ny/2+a,b+sum((y-pred.curr)^2)) theta[i,]<-theta[i-1,] if(i>300){ mi<-1 S<-cov(theta[mi:(i-1),])*cc+diag(eps*cc,p) } theta.cand<-rmnorm(theta[i-1,],S) if(any(theta.cand<0 | theta.cand>1)) alpha<- -9999 else{ pred.cand<-predict(mod,t(theta.cand),mcmc.use=sample(ns,size=1),trunc.error=F) alpha<- -.5/s2[i]*(sum((y-pred.cand)^2)-sum((y-pred.curr)^2)) } if(log(runif(1))<alpha){ theta[i,]<-theta.cand count<-count+1 } pred.curr<-predict(mod,theta[i,,drop=F],mcmc.use=sample(ns,size=1),trunc.error=F) if(verbose & i%%100==0){ pr<-c('MCMC iteration',i,myTimestamp(),'count:',count) cat(pr,'\n') } } return(list(theta=theta,s2=s2,count=count)) } calibrateIndep.bassBasis<-function(mod,y,a,b,nmcmc,verbose=T){ p<-ncol(mod$dat$xx) ny<-length(y) ns<-mod$mod.list[[1]]$nmcmc-mod$mod.list[[1]]$nburn theta<-matrix(nrow=nmcmc,ncol=p) s2<-rep(NA,nmcmc) theta[1,]<-.5 pred.curr<-predict(mod,theta[1,,drop=F],mcmc.use=sample(ns,size=1),trunc.error=F) s2[1]<-1/rgamma(1,ny/2+a,b+sum((y-pred.curr)^2)) count<-rep(0,p) for(i in 2:nmcmc){ s2[i]<-1/rgamma(1,ny/2+a,b+sum((y-pred.curr)^2)) theta[i,]<-theta[i-1,] for(j in 1:p){ theta.cand<-theta[i,] theta.cand[j]<-runif(1) pred.cand<-predict(mod,t(theta.cand),mcmc.use=sample(ns,size=1),trunc.error=F) alpha<- -.5/s2[i]*(sum((y-pred.cand)^2)-sum((y-pred.curr)^2)) if(log(runif(1))<alpha){ theta[i,]<-theta.cand count[j]<-count[j]+1 } } pred.curr<-predict(mod,theta[i,,drop=F],mcmc.use=sample(ns,size=1),trunc.error=F) if(verbose & i%%100==0){ pr<-c('MCMC iteration',i,myTimestamp(),'count:',count) cat(pr,'\n') } } return(list(theta=theta,s2=s2,count=count)) } plot.prior<-function(prior,plot=TRUE,n=1000,...){ xx<-seq(prior$trunc[1],prior$trunc[2],length.out=n) if(prior$dist=='uniform'){ out<-dunif(xx,prior$trunc[1],prior$trunc[2]) z<-1 } if(prior$dist=='normal'){ out<-0 z<-0 for(i in 1:length(prior$weights)){ zi<-pnorm(prior$trunc[2],prior$mean[i],prior$sd[i]) - pnorm(prior$trunc[1],prior$mean[i],prior$sd[i]) z<-z+zi*prior$weights[i] out<-out+prior$weights[i]*dnorm(xx,prior$mean[i],prior$sd[i]) } } if(prior$dist=='student'){ out<-0 z<-0 for(i in 1:length(prior$weights)){ zi<-pt((prior$trunc[2]-prior$mean[i])/prior$sd[i],prior$df[i]) - pt((prior$trunc[1]-prior$mean[i])/prior$sd[i],prior$df[i]) z<-z+zi*prior$weights[i] out<-out+prior$weights[i]*(dt((xx-prior$mean[i])/prior$sd[i],prior$df[i])/prior$sd[i]) } } if(plot) plot(xx,out/z,...) return(cbind(xx,out/z)) } sample.prior<-function(prior,n){ p<-length(prior) out<-matrix(nrow=n,ncol=p) for(i in 1:p){ if(prior[[i]]$dist=='uniform'){ out[,i]<-runif(n,prior[[i]]$trunc[1],prior[[i]]$trunc[2]) } else{ ncomp<-length(prior[[i]]$weights) comp<-sample(1:ncomp,size=n,prob=prior[[i]]$weights,replace=T) if(prior[[i]]$dist=='normal') out[,i]<-suppressWarnings(truncdist::rtrunc(n,spec='norm',a=(prior[[i]]$trunc[1]-prior[[i]]$mean[comp])/prior[[i]]$sd[comp],b=(prior[[i]]$trunc[2]-prior[[i]]$mean[comp])/prior[[i]]$sd[comp])*prior[[i]]$sd[comp]+prior[[i]]$mean[comp]) if(prior[[i]]$dist=='student') out[,i]<-truncdist::rtrunc(n,spec='t',df=prior[[i]]$df[comp],a=(prior[[i]]$trunc[1]-prior[[i]]$mean[comp])/prior[[i]]$sd[comp],b=(prior[[i]]$trunc[2]-prior[[i]]$mean[comp])/prior[[i]]$sd[comp])*prior[[i]]$sd[comp]+prior[[i]]$mean[comp] } } out }
library(testthat) context("Subsetting test") p <- maxample("pop") a <- as.array(p) mv <- getOption("magclass.verbosity") on.exit(options(magclass.verbosity = mv)) options(magclass.verbosity = 2) test_that("single element subsetting works", { expect_identical(p[11], a[11]) expect_identical(p[3], a[3]) }) test_that("multi element subsetting works", { expect_equivalent(as.array(p[3, , ]), a[3, , , drop = FALSE]) expect_equivalent(as.array(p["FSU", , ]), a[4, , , drop = FALSE]) expect_equivalent(as.array(p[, 2005, ]), a[, 2, , drop = FALSE]) expect_equivalent(as.array(p[as.factor("PAS"), , "B1"]), a[9, , 2, drop = FALSE]) expect_equivalent(as.array(p["PAS", "y2005", "B1"]), a[9, 2, 2, drop = FALSE]) expect_equivalent(as.array(p[c("CPA", "CPA"), , ]), a[c(2, 2), , , drop = FALSE]) expect_equivalent(as.array(p[list(c("CPA", "CPA")), , ]), a[c(2, 2), , , drop = FALSE]) expect_equivalent(as.array(p[list(i = c("CPA", "CPA")), , ]), a[c(2, 2), , , drop = FALSE]) expect_equivalent(as.array(p[c("EUR", "CPA"), , ]), a[c(3, 2), , , drop = FALSE]) expect_equivalent(as.array(p[list(c("EUR", "CPA")), , ]), a[c(3, 2), , , drop = FALSE]) expect_equivalent(as.array(p[list(i = c("EUR", "CPA")), , ]), a[c(3, 2), , , drop = FALSE]) expect_equivalent(as.array(p[character(0), character(0), character(0)]), a[NULL, NULL, NULL, drop = FALSE]) expect_identical(p[, NULL, ], p) }) test_that("subsetting via dim argument works", { expect_identical(p[, 1:3, ], p[1:3, dim = 2]) expect_identical(p[, , "B1"], p["B1", dim = 3]) expect_identical(p[c("FSU", "EUR"), , ], p[c("FSU", "EUR"), dim = 1]) expect_error(p[1, 2, dim = 2], "Only single dimension selection allowed") expect_error(p[1, 2, 3, dim = 2], "Only single dimension selection allowed") expect_error(p[1, dim = 1.2], "Invalid dim selection") p3 <- p2 <- p expect_silent(p2[, , "A2"] <- 99) expect_silent(p3["A2", dim = 3] <- 99) expect_identical(p2, p3) expect_silent(p2[c("FSU", "EUR"), , ] <- 42) expect_silent(p3[c("FSU", "EUR"), dim = 1] <- 42) expect_identical(p2, p3) expect_silent(p2[, 2015, ] <- -99) expect_silent(p3[2015, dim = 2] <- -99) expect_identical(p2, p3) expect_error(p3[1, 2, dim = 2] <- 1, "Only single dimension selection allowed") expect_error(p3[1, 2, 3, dim = 2] <- 1, "Only single dimension selection allowed") expect_error(p[1, dim = 1.2] <- 1, "Invalid dim selection") }) test_that("boolean subsetting works", { expect_identical(p[p > 1000], p[p[10:1, , ] > 1000]) p2 <- p[, 1, 1] expect_identical(p[p2 > 1000, , ], p[as.vector(p2 > 1000), , ]) expect_identical(p[p2[10:1, , ] > 1000, , ], p[as.vector(p2 > 1000), , ]) p2 <- p[1, , 1] expect_identical(p[, p2 > 1000, ], p[, as.vector(p2 > 1000), ]) expect_identical(p[, p2[, 16:1, ] > 1000, ], p[, as.vector(p2 > 1000), ]) expect_identical(p[, p2[, c(16:10, 1:9), ] > 1000, ], p[, as.vector(p2 > 1000), ]) }) test_that("error detection works", { expect_error(p[, , "A3"], "out of bounds") expect_error(p[, , list("A3")], "out of bounds") expect_error(p[, , list(scenario = "A3")], "out of bounds") expect_error(p[, , list(blub = "A2")], "subdimension does not exist") names(dimnames(p)) <- NULL expect_error(p[, , list(scenario = "A2")], "subdimension does not exist \\(missing set names\\)") dimnames(p)[[3]] <- NULL expect_error(p[, , "A2"], "Missing element names") }) test_that("invert argument works", { expect_identical(p[-1, , ], p["AFR", invert = TRUE]) expect_identical(p[-1, , ], p["AFR", , invert = TRUE]) expect_identical(p[-1, , ], p["AFR", , , invert = TRUE]) expect_identical(p[-9, -4, ], p["PAS", 2025, invert = TRUE]) expect_identical(p[-9, -4, ], p["PAS", 2025, , invert = TRUE]) expect_identical(p[, -4, ], p[, 2025, , invert = TRUE]) expect_identical(p[-1:-3, , ], p[1:3, , , invert = TRUE]) }) test_that("drop works", { a <- maxample("animal") expect_identical(getItems(a[, , , drop = TRUE], dim = 3)[1], "rabbit.black") }) test_that("pmatch argument works", { expect_identical(getItems(p[, list("y1"), , pmatch = TRUE], 2), "y1995") expect_identical(getItems(p[, list(as.factor("y1")), , pmatch = TRUE], 2), "y1995") expect_identical(getItems(p[, "y1", , pmatch = TRUE], 2), "y1995") expect_error(getItems(p[, "y1", , pmatch = "right"], 2), "out of bounds") expect_identical(getItems(p[, "y1", , pmatch = "left"], 2), "y1995") expect_error(getItems(p[, "05", , pmatch = "left"], 2), "out of bounds") expect_identical(getItems(p[, "05", , pmatch = "right"], 2), c("y2005", "y2105")) }) test_that("multiple subdimensions work", { getItems(p, "j", maindim = 1) <- 1:10 expect_identical(p["AFR", , ], p[1, , ]) expect_identical(p[list(i = "CPA"), , ], p[2, , ]) expect_silent(p[list(i = "AFR"), , ] <- 99) expect_equal(as.vector(p["AFR", 1, 1]), 99) expect_silent(p[list(i = "AFR"), , list(scenario = "A2")] <- 100) expect_equal(as.vector(p["AFR", 1, "A2"]), 100) expect_silent(p[list("AFR"), 2145, list("A2")] <- 101) expect_equal(as.vector(p["AFR", 16, "A2"]), 101) t <- c(1995, 2005) yt <- paste0("y", t) expect_identical(p[, list(yt), ], p[, t, ]) expect_identical(p[, list(t), ], p[, yt, ]) expect_identical(getYears(p[, list(t), , invert = TRUE]), setdiff(getYears(p), yt)) }) test_that("value assignment works", { a <- maxample("animal") expect_silent(a[, NULL, as.factor("rabbit")] <- as.magpie(99)) expect_true(all(a[, , "rabbit"] == 99)) expect_silent(a[as.factor("NLD"), as.factor(c("april", "june")), as.factor("rabbit")] <- 12) expect_true(all(a["NLD", "june", "rabbit"] == 12)) b <- a expect_silent(b[, , ] <- 0) expect_true(all(b[, , ] == 0)) expect_silent(b[, , ] <- as.magpie(99)) expect_true(all(b[, , ] == 99)) expect_message(b[1:2, 1, 1] <- 1:2, "Dangerous replacement") expect_error(b[1:2, 1:2, 1:2] <- 1:7, "Different replacement length!") expect_silent(b["NLD", c("april", "june"), list("rabbit", "black")] <- a["NLD", c("april", "june"), list("rabbit", "black")]) expect_identical(b["NLD", c("april", "june"), list("rabbit", "black")], a["NLD", c("april", "june"), list("rabbit", "black")]) }) test_that("data.frame subsetting works", { a <- maxample("animal") df <- data.frame(getItems(a, 3, split = TRUE, full = TRUE), stringsAsFactors = FALSE) w <- c(1, 3, 4) expect_identical(getItems(a[df[w, ]], 3), getItems(a, 3)[w]) expect_identical(getItems(a[df[3:1][w, ]], 3), getItems(a, 3)[w]) expect_identical(getItems(a[df[3:2][w, ]], 3), getItems(a, 3)[w]) df$blub <- paste0("bl", 1:dim(df)[1]) expect_identical(getItems(a[df[w, ]], 3), paste(getItems(a, 3), df$blub, sep = ".")[w]) df2 <- df df2$ble <- paste0("ble", 1:dim(df2)[1]) expect_identical(getItems(a[df2[w, ]], 3), paste(getItems(a, 3), df2$blub, df2$ble, sep = ".")[w]) df$species <- NULL expect_identical(getItems(a[df[1, ]], 3), c("animal.rabbit.black.bl1", "animal.bird.black.bl1")) expect_identical(getItems(a[df[w, ]], 3), c("animal.rabbit.black.bl1", "animal.bird.black.bl1", "animal.rabbit.black.bl3", "animal.bird.black.bl3", "animal.bird.red.bl4")) df2 <- df df2$type <- NULL expect_identical(getItems(a[df2[1, ]], 3), c("animal.rabbit.black.bl1", "animal.bird.black.bl1")) df[3, 1] <- "car" expect_message(b <- a[df[w, ]], "elements were added") expect_identical(getItems(b, 3), c("animal.rabbit.black.bl1", "animal.bird.black.bl1", "animal.bird.red.bl4", "car.NA.black.bl3")) expect_true(all(is.na(b[, , "car.NA.black.bl3"]))) df[4, 1] <- "house" expect_message(b <- a[df[w, ]], "elements were added") expect_identical(getItems(b, 3), c("animal.rabbit.black.bl1", "animal.bird.black.bl1", "car.NA.black.bl3", "house.NA.red.bl4")) df1 <- data.frame(getItems(a, 1, split = TRUE, full = TRUE)) expect_identical(getItems(a[df1[w, ]], 1), getItems(a, 1)[w]) df2 <- data.frame(getItems(a, 2, split = TRUE, full = TRUE)) expect_identical(getItems(a[df2[w, ][c(3, 1, 2)]], 2), getItems(a, 2)[w]) names(df2)[2] <- names(df2)[1] expect_error(a[df2], "more than once") names(df2)[2] <- "country" expect_error(a[df2], "must only contain subdimensions with a shared main dimension") names(df2) <- paste0("bla", seq_along(df2)) expect_error(a[df2], "None of the dimensions in the mapping could be found") names(dimnames(a)) <- NULL expect_error(a[df], "must have names") p <- maxample("pop") df <- data.frame(getItems(p, 3, split = TRUE, full = TRUE), stringsAsFactors = FALSE) df$blub <- paste0("bla", seq_len(nrow(df))) expect_identical(getItems(p[df], 3), paste0(getItems(p, 3), ".", df$blub)) df$scenario <- c("C1", "D2") expect_identical(getItems(p[df], 3), c("C1.bla1", "D2.bla2")) expect_true(all(is.na(p[df]))) p0 <- p[, , NULL] expect_true(all(is.na(p0[df]))) expect_identical(getItems(p0[df], 3), c("C1.bla1", "D2.bla2")) }) test_that("duplicates detection works", { a <- maxample("animal") expect_warning(a[, c(1, 1, 2, 3), ][, "y2000.april.20", ], "contain duplicates") })
"fertil3"
aml_score_data <- function(symbol, timeframe, path_model, path_data, path_sbxm, path_sbxs){ requireNamespace("readr", quietly = TRUE) requireNamespace("h2o", quietly = TRUE) f_name <- paste0("AI_RSIADX", symbol,timeframe, ".csv") full_path <- file.path(path_data, f_name) x <- readr::read_csv(full_path, col_names = F, col_types = readr::cols()) x1 <- head(x, 1)[, -c(1:3)] m_name <- paste0("DL_Regression", "-", symbol,"-", timeframe) m_path <- file.path(path_model, m_name) ModelR <- h2o::h2o.loadModel(path = m_path) recent_ML <- h2o::as.h2o(x = x1, destination_frame = "recent_ML") result_R <- h2o::h2o.predict(ModelR, recent_ML) %>% as.data.frame() rownames(result_R) <- symbol names(result_R) <- symbol file_string <- paste0("AI_M", timeframe, "_Change", symbol, ".csv") readr::write_csv(result_R, file.path(path_sbxm, file_string)) readr::write_csv(result_R, file.path(path_sbxs, file_string)) }
group_2d_grid <- function(zargs, glabs = NULL, sep = "\n", loc = c(0.5, 0.5), draw = FALSE, ...) { check_zargs(zargs, "turns", "vars", "num", "ispace") turns <- zargs$turns vars <- zargs$vars num <- zargs$num ii <- range(vars[num,]) ii <- if(turns[num-1] == "u" || turns[num] == "u") rev(ii) else ii if(is.null(glabs)) { glabs <- extract_2d(zargs)$glabs } else { len.groups <- length(unlist(zargs$x, recursive = FALSE)) if(length(glabs) != len.groups) stop("length(glabs) has to equal the number ",len.groups," of variables in all groups together; consider rep()") } labs <- paste0(glabs[ii], collapse = sep) vp <- vport(zargs$ispace) res <- textGrob(label = labs, x = loc[1], y = loc[2], default.units = "npc", name = "group_2d", gp = gpar(...), vp = vp) if(draw) grid.draw(res) invisible(res) } points_2d_grid <- function(zargs, type = c("p", "l", "o"), pch = NULL, size = 0.02, box = FALSE, box.width = 1, box.height = 1, group... = list(cex = 0.66), draw = FALSE, ...) { r <- extract_2d(zargs) xlim <- r$xlim ylim <- r$ylim x <- as.matrix(r$x) y <- as.matrix(r$y) same.group <- r$same.group check_zargs(zargs, "ispace") res <- if(same.group) { vp <- vport(zargs$ispace, xlim = xlim, ylim = ylim) if(box) gBox <- rectGrob(x = 0.5, y = 0.5, width = box.width, height = box.height, just = "centre", default.units = "npc", name = "box_2d", gp = gpar(...), vp = vp) type <- match.arg(type) switch(type, "p" = { if(is.null(pch)) pch <- 21 gPoints <- pointsGrob(x = x, y = y, pch = pch, size = unit(size, units = "npc"), default.units = "native", name = "points_2d", gp = gpar(...), vp = vp) if(box) { gTree(children = gList(gBox, gPoints)) } else { gTree(children = gList(gPoints)) } }, "l" = { gLines <- linesGrob(x = x, y = y, default.units = "native", name = "lines_2d", gp = gpar(...), vp = vp) if(box) { gTree(children = gList(gBox, gLines)) } else { gTree(children = gList(gLines)) } }, "o" = { if(is.null(pch)) pch <- 20 gLines <- linesGrob(x = x, y = y, default.units = "native", name = "lines_2d", gp = gpar(...), vp = vp) gPoints <- pointsGrob(x = x, y = y, pch = pch, size = unit(size, units = "npc"), default.units = "native", name = "points_2d", gp = gpar(...), vp = vp) if(box) { gTree(children = gList(gBox, gLines, gPoints)) } else { gTree(children = gList(gLines, gPoints)) } }, stop("Wrong 'type'")) } else { args <- c(list(zargs = zargs), group...) do.call(group_2d_grid, args) } if(draw) grid.draw(res) invisible(res) } qq_2d_grid <- function(zargs, do.line = TRUE, lines... = NULL, pch = NULL, size = 0.02, box = FALSE, box.width = 1, box.height = 1, group... = list(cex = 0.66), draw = FALSE, ...) { r <- extract_2d(zargs) xlim <- r$xlim ylim <- r$ylim x <- r$x y <- r$y same.group <- r$same.group check_zargs(zargs, "ispace") res <- if(same.group) { vp <- vport(zargs$ispace, xlim = xlim, ylim = ylim) sx <- sort(x) sy <- sort(y) lenx <- length(sx) leny <- length(sy) if (leny < lenx) sx <- approx(1L:lenx, sx, n = leny)$y if (leny > lenx) sy <- approx(1L:leny, sy, n = lenx)$y if(is.null(pch)) pch <- 21 gPoints <- pointsGrob(x = sx, y = sy, pch = pch, size = unit(size, units = "npc"), default.units = "native", name = "points_2d", gp = gpar(...), vp = vp) groblist <- list(gPoints) if(do.line) { qx <- quantile(x, probs = c(0.25, 0.75), na.rm = TRUE, names = FALSE) qy <- quantile(y, probs = c(0.25, 0.75), na.rm = TRUE, names = FALSE) slope <- diff(qy) / diff(qx) intercept <- qy[1] - qx[1] * slope xvals <- seq(xlim[1], xlim[2], length.out = 1024) yvals <- slope * xvals + intercept ok <- (xlim[1] <= xvals) & (xvals <= xlim[2]) & (ylim[1] <= yvals) & (yvals <= ylim[2]) vals <- cbind(xvals, yvals)[ok, ] x0.x1 <- c(vals[1,1], vals[nrow(vals),1]) y0.y1 <- c(vals[1,2], vals[nrow(vals),2]) gLines <- linesGrob(x = x0.x1, y = y0.y1, default.units = "native", name = "lines_2d", gp = gpar(...), vp = vp) groblist <- c(list(gLines), groblist) } if(box) { gBox <- rectGrob(x = 0.5, y = 0.5, width = box.width, height = box.height, just = "centre", default.units = "npc", name = "box_2d", gp = gpar(...), vp = vp) groblist <- c(list(gBox), groblist) } gTree(children = do.call(gList, groblist)) } else { args <- c(list(zargs = zargs), group...) do.call(group_2d_grid, args) } if(draw) grid.draw(res) invisible(res) } density_2d_grid <- function(zargs, ngrids = 25, ccol = NULL, clwd = 1, clty = 1, box = FALSE, box.width = 1, box.height = 1, group... = list(cex = 0.66), draw = FALSE, ...) { r <- extract_2d(zargs) xlim <- r$xlim ylim <- r$ylim x <- r$x y <- r$y same.group <- r$same.group check_zargs(zargs, "ispace") res <- if(same.group) { data <- na.omit(data.frame(x, y)) colnames(data) <- c("x", "y") dens <- kde2d(data$x, data$y, n = ngrids, lims = c(xlim, ylim)) contours <- contourLines(dens$x, dens$y, dens$z) levels <- sapply(contours, function(contour) contour$level) nLevels <- length(levels) uniqueLevels <- unique(levels) nuLevels <- length(uniqueLevels) if(is.null(ccol)) { basecol <- c("grey80", "grey0") palette <- colorRampPalette(basecol, space = "Lab") ccol <- palette(nuLevels) } ccol <- rep_len(ccol, nuLevels) clwd <- rep_len(clwd, nuLevels) clty <- rep_len(clty, nuLevels) ccol. <- numeric(nLevels) clwd. <- numeric(nLevels) clty. <- numeric(nLevels) for (i in 1:nuLevels) { idx <- (1:nLevels)[levels == uniqueLevels[i]] ccol.[idx] <- ccol[i] clwd.[idx] <- clwd[i] clty.[idx] <- clty[i] } vp <- vport(zargs$ispace, xlim = xlim, ylim = ylim, x = x, y = y) if(box) gBox <- rectGrob(x = 0.5, y = 0.5, width = box.width, height = box.height, just = "centre", default.units = "npc", name = "box_2d", gp = gpar(...), vp = vp) contourGrobs <- lapply(1:length(contours), function(i) { contour <- contours[[i]] linesGrob(x = contour$x, y = contour$y, gp = gpar(col = ccol.[i], lwd = clwd.[i], lty = clty.[i], ...), default.units = "native", name = paste0("contour_",i), vp = vp) }) if(box) { gTree(children = do.call(gList, args = c(contourGrobs, list(gBox)))) } else { gTree(children = do.call(gList, args = contourGrobs)) } } else { args <- c(list(zargs = zargs), group...) do.call(group_2d_grid, args) } if(draw) grid.draw(res) invisible(res) } axes_2d_grid <- function(zargs, angle = 30, length = unit(0.05, "npc"), type = "open", eps = 0.02, group... = list(cex = 0.66), draw = FALSE, ...) { r <- extract_2d(zargs) xlim <- r$xlim ylim <- r$ylim x <- r$x y <- r$y same.group <- r$same.group check_zargs(zargs, "ispace") res <- if(same.group) { vp <- vport(zargs$ispace, xlim = xlim, ylim = ylim, x = x, y = y) x.grob <- linesGrob(x = unit(c(-eps, 1+eps), "npc"), y = unit(c(-eps, -eps), "npc"), arrow = arrow(angle = angle, length = length, ends = "last", type = type), name = "x_axis_2d", gp = gpar(...), vp = vp) y.grob <- linesGrob(x = unit(c(-eps, -eps), "npc"), y = unit(c(-eps, 1+eps), "npc"), arrow = arrow(angle = angle, length = length, ends = "last", type = type), name = "y_axis_2d", gp = gpar(...), vp = vp) gTree(children = gList(x.grob, y.grob)) } else { args <- c(list(zargs = zargs), group...) do.call(group_2d_grid, args) } if(draw) grid.draw(res) invisible(res) } arrow_2d_grid <- function(zargs, loc = c(0.5, 0.5), angle = 60, length = 0.2, group... = list(cex = 0.66), draw = FALSE, ...) { r <- extract_2d(zargs) same.group <- r$same.group check_zargs(zargs, "num", "turns", "ispace") turn.out <- zargs$turns[zargs$num] res <- if(same.group) { vp <- vport(zargs$ispace) arrow <- zenarrow(turn.out, angle = angle, length = length, coord.scale = 1) arr <- loc + arrow linesGrob(x = arr[1,], y = arr[2,], default.units = "npc", name = "arrow_2d", gp = gpar(...), vp = vp) } else { args <- c(list(zargs = zargs), group...) do.call(group_2d_grid, args) } if(draw) grid.draw(res) invisible(res) } rect_2d_grid <- function(zargs, loc = c(0.5, 0.5), width = 1, height = 1, group... = list(cex = 0.66), draw = FALSE, ...) { r <- extract_2d(zargs) same.group <- r$same.group check_zargs(zargs, "ispace") res <- if(same.group) { vp <- vport(zargs$ispace) rectGrob(x = loc[1], y = loc[2], width = width, height = height, default.units = "npc", name = "rect_2d", gp = gpar(...), vp = vp) } else { args <- c(list(zargs = zargs), group...) do.call(group_2d_grid, args) } if(draw) grid.draw(res) invisible(res) } label_2d_grid <- function(zargs, loc = c(0.98, 0.05), label = NULL, cex = 0.66, just = c("right", "bottom"), rot = 0, box = FALSE, box.width = 1, box.height = 1, group... = list(cex = cex), draw = FALSE, ...) { r <- extract_2d(zargs) same.group <- r$same.group vlabs <- r$vlabs check_zargs(zargs, "vars", "num", "ispace") vars <- zargs$vars num <- zargs$num res <- if(same.group) { xlab <- vlabs[vars[num, 1]] ylab <- vlabs[vars[num, 2]] if(is.null(label)) label <- paste0("(",xlab,", ",ylab,")") vp <- vport(zargs$ispace) gText <- textGrob(label = label, x = loc[1], y = loc[2], just = just, rot = rot, default.units = "npc", name = "label_2d", gp = gpar(cex = cex, ...), vp = vp) if(box) { gBox <- rectGrob(x = 0.5, y = 0.5, width = box.width, height = box.height, default.units = "npc", name = "box_2d", gp = gpar(...), vp = vp) gTree(children = gList(gBox, gText)) } else { gTree(children = gList(gText)) } } else { args <- c(list(zargs = zargs), group...) do.call(group_2d_grid, args) } if(draw) grid.draw(res) invisible(res) } layout_2d_grid <- function(zargs, ...) label_2d_grid(zargs, loc = c(0.5, 0.5), just = "centre", box = TRUE, group... = list(...), ...)
recommenderRegistry$seal_entries()
graph_from_data_frame <- function(d, directed=TRUE, vertices=NULL) { d <- as.data.frame(d) if (!is.null(vertices)) { vertices <- as.data.frame(vertices) } if (ncol(d) < 2) { stop("the data frame should contain at least two columns") } if (any(is.na(d[,1:2]))) { warning("In `d' `NA' elements were replaced with string \"NA\"") d[,1:2][ is.na(d[,1:2]) ] <- 'NA' } if (!is.null(vertices) && any(is.na(vertices[,1]))) { warning("In `vertices[,1]' `NA' elements were replaced with string \"NA\"") vertices[,1][is.na(vertices[,1])] <- 'NA' } names <- unique( c(as.character(d[,1]), as.character(d[,2])) ) if (!is.null(vertices)) { names2 <- names vertices <- as.data.frame(vertices) if (ncol(vertices) < 1) { stop("Vertex data frame contains no rows") } names <- as.character(vertices[,1]) if (any(duplicated(names))) { stop("Duplicate vertex names") } if (any(! names2 %in% names)) { stop("Some vertex names in edge list are not listed in vertex data frame") } } g <- make_empty_graph(n=0, directed=directed) attrs <- list(name=names) if (!is.null(vertices)) { if (ncol(vertices) > 1) { for (i in 2:ncol(vertices)) { newval <- vertices[,i] if (inherits(newval, "factor")) { newval <- as.character(newval) } attrs[[ names(vertices)[i] ]] <- newval } } } g <- add_vertices(g, length(names), attr=attrs) from <- as.character(d[,1]) to <- as.character(d[,2]) edges <- rbind(match(from, names), match(to,names)) attrs <- list() if (ncol(d) > 2) { for (i in 3:ncol(d)) { newval <- d[,i] if (inherits(newval, "factor")) { newval <- as.character(newval) } attrs[[ names(d)[i] ]] <- newval } } g <- add_edges(g, edges, attr=attrs) g } from_data_frame <- function(...) constructor_spec(graph_from_data_frame, ...) graph_from_edgelist <- function(el, directed=TRUE) { if (!is.matrix(el) || ncol(el) != 2) { stop("graph_from_edgelist expects a matrix with two columns") } if (nrow(el) == 0) { res <- make_empty_graph(directed=directed) } else { if (is.character(el)) { names <- unique(as.character(t(el))) ids <- seq(names) names(ids) <- names res <- graph( unname(ids[t(el)]), directed=directed) rm(ids) V(res)$name <- names } else { res <- graph( t(el), directed=directed ) } } res } from_edgelist <- function(...) constructor_spec(graph_from_edgelist, ...)
tidypredict_fit <- function(model) { UseMethod("tidypredict_fit") } tidypredict_fit.pm_regression <- function(model) { build_fit_formula(model) } tidypredict_fit.pm_tree <- function(model) { build_fit_formula_rf(model) } tidypredict_fit.pm_xgb <- function(model) { build_fit_formula_xgb(model) }
dir <- "C:\\Temp\\" poly=1 hist=0 testname="write" for (users in c(1,4,16) ){ file <- paste(testtype,testname,"users",users,sep="_") file <- paste(dir,file,".png",sep="") cat("file=",file,"\n") png(filename=file) graphit(m, i_name=testname, i_users=users,i_title=paste(testtype,testname,"users=",users),i_hist=hist,i_poly=poly) dev.off() } for (bs in c("1K","8K","128K") ){ file <- paste(testtype,testname,"bs",bs,sep="_") file <- paste(dir,file,".png",sep="") cat("file=",file,"\n") png(filename=file) graphit(m, i_name=testname, i_bs=bs,i_title=paste(testtype,testname,"bs=",bs),i_hist=hist,i_poly=poly) dev.off() } testname="randread" file <- paste(testtype,testname,"bs_8K",sep="_") file <- paste(dir,file,".png",sep="") cat("file=",file,"\n") png(filename=file) graphit(m, i_name=testname, i_bs="8K",i_title=paste(testtype,testname,"bs=8K"),i_hist=hist,i_poly=poly) dev.off() testname="read" file <- paste(testtype,testname,"users_1",sep="_") file <- paste(dir,file,".png",sep="") cat("file=",file,"\n") png(filename=file) graphit(m, i_name=testname, i_users=1,i_title=paste(testtype,testname,"users=1"),i_hist=hist,i_poly=poly) dev.off() file <- paste(testtype,testname,"bs_1M",sep="_") file <- paste(dir,file,".png",sep="") cat("file=",file,"\n") png(filename=file) graphit(m, i_name=testname, i_bs="1M",i_title=paste(testtype,testname,"bs=1M"),i_hist=hist,i_poly=poly) dev.off() dir <- "C:\\Temp\\hist_" poly=0 hist=1 testname="write" for (users in c(1,4,16) ){ file <- paste(testtype,testname,"users",users,sep="_") file <- paste(dir,file,".png",sep="") cat("file=",file,"\n") png(filename=file) graphit(m, i_name=testname, i_users=users,i_title=paste(testtype,testname,"users=",users),i_hist=hist,i_poly=poly) dev.off() } for (bs in c("1K","8K","128K") ){ file <- paste(testtype,testname,"bs",bs,sep="_") file <- paste(dir,file,".png",sep="") cat("file=",file,"\n") png(filename=file) graphit(m, i_name=testname, i_bs=bs,i_title=paste(testtype,testname,"bs=",bs),i_hist=hist,i_poly=poly) dev.off() } testname="randread" file <- paste(testtype,testname,"bs_8K",sep="_") file <- paste(dir,file,".png",sep="") cat("file=",file,"\n") png(filename=file) graphit(m, i_name=testname, i_bs="8K",i_title=paste(testtype,testname,"bs=8K"),i_hist=hist,i_poly=poly) dev.off() testname="read" file <- paste(testtype,testname,"users_1",sep="_") file <- paste(dir,file,".png",sep="") cat("file=",file,"\n") png(filename=file) graphit(m, i_name=testname, i_users=1,i_title=paste(testtype,testname,"users=1"),i_hist=hist,i_poly=poly) dev.off() file <- paste(testtype,testname,"bs_1M",sep="_") file <- paste(dir,file,".png",sep="") cat("file=",file,"\n") png(filename=file) graphit(m, i_name=testname, i_bs="1M",i_title=paste(testtype,testname,"bs=1M"),i_hist=hist,i_poly=poly) dev.off()
require(arm) require(car) require(downloader) require(foreign) require(ggplot2) require(GGally) if(!file.exists(file <- "data/qog_codebook.pdf")) download("http://www.qogdata.pol.gu.se/codebook/codebook_standard_20110406.pdf", file, mode = "wb") if(!file.exists(file <- "data/qog_cs.dta")) { download("http://www.qogdata.pol.gu.se/data/qog_std_cs.dta", file, mode = "wb") } qog.d <- read.dta(file) names(qog.d)[1:50] qog.x <- qog.d[, grepl("undp_", names(qog.d))] qog.x <- na.omit(qog.x) cor(qog.x) corrplot(qog.x, color = TRUE) pairs(qog.x) scatterplotMatrix(qog.x, spread=FALSE, lty.smooth=2) ggpairs(qog.x) qog <- with(qog.d, data.frame( cname = cname, ccode = ccodealp, fertility = wdi_fr, gdpcapita = wdi_gdpc, education = bl_asyf25, democracy = gol_polreg)) qog <- na.omit(qog) qplot(data = qog, y = fertility, x = education, label = ccode, geom = "text") m1 <- with(qog, lm(fertility ~ education)) summary(m1) names(m1) display(m1, digits = 1) display(m1, detail = T) coef(m1) coef(m1)[1] coef(m1)[2] g <- qplot(data = qog, y = fertility, x = education, geom = "point") g g <- g + geom_abline(intercept = coef(m1)[1], slope = coef(m1)[2]) g qog$yhat <- fitted.values(m1) g <- g + geom_point(y = qog$yhat, color = "blue") g str(within(qog, rd <- ifelse(yhat > fertility, "overpredicted", "underpredicted"))) g <- g + geom_segment(y = fitted.values(m1), yend = qog$fertility, x = qog$education, xend = qog$education, color = "blue") g qplot(y = residuals(m1), x = fitted.values(m1), geom = "point") + geom_hline(yintercept = 0) + geom_smooth() qplot(x = residuals(m1), geom = "density") m2 <- with(qog, lm(fertility ~ education + log(gdpcapita) + I(democracy))) display(m2, digits = 1) display(m2, detail = T)
library("matrixStats") fcns <- list( weightedVar = c(rowWeightedVars, colWeightedVars), weightedSd = c(rowWeightedSds, colWeightedSds), weightedMad = c(rowWeightedMads, colWeightedMads) ) source("utils/validateIndicesFramework.R") dimnames <- list(letters[1:6], LETTERS[1:6]) for (fcn in names(fcns)) { cat(sprintf("subsetted tests on matrix %s()...\n", fcn)) row_fcn <- fcns[[fcn]][[1]] col_fcn <- fcns[[fcn]][[2]] for (mode in c("numeric", "integer")) { x <- matrix(runif(6 * 6, min = -6, max = 6), nrow = 6, ncol = 6) w <- runif(6, min = 0, max = 6) storage.mode(x) <- mode storage.mode(w) <- mode if (mode == "numeric") w[1] <- Inf for (setDimnames in c(TRUE, FALSE)) { if (setDimnames) dimnames(x) <- dimnames else dimnames(x) <- NULL count <- 0L for (rows in index_cases) { for (cols in index_cases) { count <- count + 1L na.rm <- c(TRUE, FALSE)[count %% 2 + 1] useNames <- c(NA, TRUE, FALSE)[count %% 3 + 1] validateIndicesTestMatrix_w(x, w, rows, cols, ftest = row_fcn, fsure = row_fcn, na.rm = na.rm, useNames = useNames) validateIndicesTestMatrix_w(x, w, rows, cols, fcoltest = col_fcn, fsure = row_fcn, na.rm = na.rm, useNames = useNames) } } } } cat(sprintf("%s()...DONE\n", fcn)) }
library(SimRVPedigree) context("get_nextEvent") data("AgeSpecific_Hazards") test_that("If current age > max birth age and disease status = 1, next event is death", { expect_equal(get_nextEvent(current_age = 46, disease_status = 1, RV_status = 1, hazard_rates = hazard(AgeSpecific_Hazards), GRR = 5, carrier_prob = 0.02, lambda_birth = 0.05, birth_range = c(18, 45))[[2]] , "Death") })
[ { "title": "Differential Privacy Mini-series from Win-Vector", "href": "http://blog.revolutionanalytics.com/2015/11/differential-privacy-mini-series-from-win-vector.html" }, { "title": "Soil Series Query for SoilWeb", "href": "https://casoilresource.lawr.ucdavis.edu/" }, { "title": "Update: Extending Commodity time series", "href": "https://systematicinvestor.wordpress.com/2013/07/04/update-extending-commodity-time-series/" }, { "title": "What is R, really?", "href": "https://web.archive.org/web/http://pineda-krch.com/2011/03/31/what-is-r-really/" }, { "title": "How to ask for R help", "href": "http://blog.revolutionanalytics.com/2014/01/how-to-ask-for-r-help.html" }, { "title": "Age of U.S. President Candidates", "href": "http://factbased.blogspot.com/2016/01/age-of-us-president-candidates.html" }, { "title": "Fixing R’s design flaws in a new version of pqR", "href": "https://radfordneal.wordpress.com/2016/06/25/fixing-rs-design-flaws-in-a-new-version-of-pqr/" }, { "title": "Data Driven Journalism", "href": "http://blog.revolutionanalytics.com/2010/12/data-journalism.html" }, { "title": "Take the ggplot2 user survey", "href": "http://blog.revolutionanalytics.com/2011/02/take-the-ggplot2-user-survey.html" }, { "title": "New R User Group in Kansas City", "href": "http://blog.revolutionanalytics.com/2011/01/new-r-user-group-in-kansas-city.html" }, { "title": "Fast Company explains why R is good for business", "href": "http://blog.revolutionanalytics.com/2014/05/r-at-facebook-datasong-etc.html" }, { "title": "Monitoring Progress Inside a Foreach Loop", "href": "http://viksalgorithms.blogspot.com/2012/02/monitoring-progress-inside-foreach-loop.html" }, { "title": "OpenCPU release 1.4.5: configurable webhooks", "href": "https://www.opencpu.org/posts/opencpu-release-1-4-5/" }, { "title": "Postdoc position in computational Bayesian statistics", "href": "https://xianblog.wordpress.com/2010/10/14/postdoc-position-in-computational-bayesian-statistics/" }, { "title": "In case you missed it: May 2014 Roundup", "href": "http://blog.revolutionanalytics.com/2014/06/in-case-you-missed-it-may-2014-roundup.html" }, { "title": "The joy and martyrdom of trying to be a Bayesian", "href": "http://www.petrkeil.com/?p=1515" }, { "title": "Tips and Tricks for Getting Started with R", "href": "http://jmichaelrosenberg.com/" }, { "title": "R Resources", "href": "https://web.archive.org/web/http://www.stattler.com/article/r-resources" }, { "title": "Analysis of Cable Morning Trade Strategy", "href": "http://www.exegetic.biz/blog/2013/05/analysis-of-cable-morning-trade-strategy/" }, { "title": "Choropleths Made Easy!", "href": "http://rstats.posterous.com/choropleths-made-easy" }, { "title": "Functions from functions", "href": "http://www.stat.tamu.edu/site-directory/?q=node%2F51" }, { "title": "Even the tiniest error messages can indicate an invalid statistical analysis", "href": "http://realizationsinbiostatistics.blogspot.com/2015/11/even-tiniest-error-messages-can.html" }, { "title": "“The next big thing”, R, and Statistics in the cloud", "href": "https://www.r-statistics.com/2010/04/r-the-next-big-thing-and-statistics-in-the-cloud/" }, { "title": "R Code Example for Neural Networks", "href": "http://econometricsense.blogspot.com/2010/12/r-code-example-for-neural-networks.html" }, { "title": "Bioenergetics in R", "href": "https://web.archive.org/web/https://fishr.wordpress.com/2015/07/07/bioenergetics-in-r/" }, { "title": "Using R.Net in an Excel Add in", "href": "https://web.archive.org/web/http://sharpstatistics.co.uk/csharp/using-r-net-in-an-excel-add-in/" }, { "title": "Berlin Marathon 2014 Participants", "href": "http://blog.tafkas.net/2013/11/12/berlin-marathon-2014-participants/" }, { "title": "Rentrez 1_0 released", "href": "http://ropensci.org/blog/2015/09/24/rentrez-1_0-release/" }, { "title": "Going Bananas "href": "https://aschinchon.wordpress.com/2015/08/03/going-bananas-2-a-needle-in-a-haystack/" }, { "title": "Presentations of the eighth Torino R net meeting – 17 Sep 2014", "href": "http://torinor.net/2015/05/11/presentations-of-the-eighth-torino-r-net-meeting-17-sep-2014/" }, { "title": "How to calculate with dates and hours in R", "href": "https://danganothererror.wordpress.com/2010/07/24/how-to-calculate-with-dates-and-hours-in-r/" }, { "title": "Did she know we were writing a book?", "href": "http://www.win-vector.com/blog/2016/09/did-she-know-we-were-writing-a-book/" }, { "title": "Getting help with R", "href": "http://www.gettinggeneticsdone.com/2009/07/getting-help-with-r.html" }, { "title": "Matt Dowle’s data.table Talk from useR! 2014", "href": "http://datascience.la/matt-dowles-data-table-talk-from-user-2014/" }, { "title": "First Milano R net meeting details", "href": "https://web.archive.org/web/http://www.milanor.net/blog/?p=29" }, { "title": "Forbes: Top 20 influencers in Big Data", "href": "http://blog.revolutionanalytics.com/2012/02/forbes-top-20-influencers-in-big-data.html" }, { "title": "R Courses at Newcastle", "href": "https://csgillespie.wordpress.com/2016/04/22/r-courses-at-newcastle/" }, { "title": "What is probabilistic truth?", "href": "https://bayesianbiologist.com/2013/05/18/what-is-probabilistic-truth/" }, { "title": "NFL Code on Github", "href": "https://web.archive.org/web/http://pirategrunt.com/2013/01/02/nfl-code-on-github/" }, { "title": "Custom Summary Stats as Dataframe or List", "href": "http://thebiobucket.blogspot.com/2012/03/custom-summary-stats-as-dataframe-or.html" }, { "title": "Using the rasterVis package for raster plotting (in R)", "href": "https://biologyforfun.wordpress.com/2013/05/31/using-the-rastervis-package-for-raster-plotting-in-r/" }, { "title": "Poor man’s integration – a simulated visualization approach", "href": "https://beckmw.wordpress.com/2013/04/29/poor-mans-integration-a-simulated-visualization-approach/" }, { "title": "y-aware scaling in context", "href": "http://www.win-vector.com/blog/2016/06/y-aware-scaling-in-context/" }, { "title": "x[[c(5,3)]]", "href": "https://kbroman.wordpress.com/2013/04/02/xc53/" }, { "title": "Is Hillary Clinton a Progressive? An Investigation Using Statistical Methods", "href": "https://ntguardian.wordpress.com/2016/10/25/hillary-clinton-progressive-statistics/" }, { "title": "GSoC 2015: Tracking changes in performance metrics of R Code", "href": "https://techandmortals.wordpress.com/2015/05/19/gsoc-2015-tracking-changes-in-performance-metrics-of-r-code/" }, { "title": "How to make beautiful bubble charts with R", "href": "http://blog.revolutionanalytics.com/2010/11/how-to-make-beautiful-bubble-charts-with-r.html" }, { "title": "A Tale of Two Frontiers", "href": "http://timelyportfolio.blogspot.com/2011/12/tale-of-two-frontiers.html" }, { "title": "On Unpublished Software", "href": "https://confounding.net/2012/02/09/on-unpublished-software/" }, { "title": "R Web Application – “Hello World” using RApache (~7min video tutorial)", "href": "https://www.r-statistics.com/2010/02/r-web-application-hello-world-using-rapache-7min-video-tutorial/" } ]
funs <- function(..., .args = list()) { lifecycle::deprecate_warn("0.8.0", "funs()", details = paste_line( "Please use a list of either functions or lambdas: ", "", " " list(mean = mean, median = median)", "", " " tibble::lst(mean, median)", "", " " list(~ mean(., trim = .2), ~ median(., na.rm = TRUE))" )) dots <- enquos(...) default_env <- caller_env() funs <- map(dots, function(quo) as_fun(quo, default_env, .args)) new_funs(funs) } new_funs <- function(funs) { attr(funs, "have_name") <- any(names2(funs) != "") temp <- map(funs, function(fn) node_car(quo_get_expr(fn))) temp <- exprs_auto_name(temp) names(funs) <- names(temp) class(funs) <- "fun_list" funs }
`equateSGP` <- function(tmp.data, state, current.year, equating.method) { VALID_CASE <- YEAR <- CONTENT_AREA <- GRADE <- V1 <- V2 <- NULL tmp.list <- equate.list <- list() equate.interval.digits <- SGP::SGPstateData[[state]][["Assessment_Program_Information"]][["Assessment_Transition"]][["Equate_Interval_Digits"]] if (is.null(equate.interval.digits)) equate.interval.digits <- 0 current.year <- SGP::SGPstateData[[state]][["Assessment_Program_Information"]][["Assessment_Transition"]][["Year"]] prior.year <- tail(head(sort(unique(tmp.data[['YEAR']])), -1L), 1L) current.year.data <- tmp.data[VALID_CASE=="VALID_CASE" & YEAR==current.year] prior.year.data <- tmp.data[VALID_CASE=="VALID_CASE" & YEAR==prior.year] setkey(current.year.data, CONTENT_AREA, GRADE) setkey(prior.year.data, CONTENT_AREA, GRADE) current.year.uniques <- unique(current.year.data[VALID_CASE=="VALID_CASE"], by=key(current.year.data))[,c("CONTENT_AREA", "GRADE"), with=FALSE] prior.year.uniques <- unique(prior.year.data[VALID_CASE=="VALID_CASE"], by=key(prior.year.data))[,c("CONTENT_AREA", "GRADE"), with=FALSE] content_areas.for.equate <- intersect(unique(current.year.uniques[['CONTENT_AREA']]), unique(prior.year.uniques[['CONTENT_AREA']])) unique.content.by.grade <- lapply(content_areas.for.equate, function(x) intersect(current.year.uniques[x]$GRADE, prior.year.uniques[x]$GRADE)) names(unique.content.by.grade) <- content_areas.for.equate get.my.knots.boundaries.path <- function(content_area, year) { tmp.path.knots.boundaries <- paste(content_area, year, sep=".") tmp.knots.boundaries.names <- names(SGP::SGPstateData[[state]][["Achievement"]][["Knots_Boundaries"]]) tmp.knots.boundaries.years <- sapply(strsplit(tmp.knots.boundaries.names, "[.]"), function(x) x[2]) if (any(!is.na(tmp.knots.boundaries.years))) { if (year %in% tmp.knots.boundaries.years) { return(paste0("[['", content_area, ".", year, "']]")) } else { if (year==sort(c(year, tmp.knots.boundaries.years))[1L]) { return(paste0("[['", content_area, "']]")) } else { return(paste0("[['", content_area, ".", rev(sort(tmp.knots.boundaries.years))[1L], "']]")) } } } else { return(paste0("[['", tmp.path.knots.boundaries, "']][['", content_area, "']]")) } } equateSGP_INTERNAL <- function(prior.year.data, current.year.data) { current.year.data.range <- round(range(current.year.data[['SCALE_SCORE']], na.rm=TRUE), digits=equate.interval.digits) prior.year.data.range <- round(range(prior.year.data[['SCALE_SCORE']], na.rm=TRUE), digits=equate.interval.digits) for (equate.type.iter in equating.method) { equate.list[[toupper(equate.type.iter)]] <- list( NEW_TO_OLD=equate( freqtab(as.numeric(as.character(round(current.year.data[['SCALE_SCORE']], digits=equate.interval.digits))), scales=as.numeric(as.character(round(seq(current.year.data.range[1L], current.year.data.range[2L], by=0.1^equate.interval.digits), digits=equate.interval.digits)))), freqtab(as.numeric(as.character(round(prior.year.data[['SCALE_SCORE']], digits=equate.interval.digits))), scales=as.numeric(as.character(round(seq(prior.year.data.range[1L], prior.year.data.range[2L], by=0.1^equate.interval.digits), digits=equate.interval.digits)))), type=equate.type.iter), OLD_TO_NEW=equate( freqtab(as.numeric(as.character(round(prior.year.data[['SCALE_SCORE']], digits=equate.interval.digits))), scales=as.numeric(as.character(round(seq(prior.year.data.range[1L], prior.year.data.range[2L], by=0.1^equate.interval.digits), digits=equate.interval.digits)))), freqtab(as.numeric(as.character(round(current.year.data[['SCALE_SCORE']], digits=equate.interval.digits))), scales=as.numeric(as.character(round(seq(current.year.data.range[1L], current.year.data.range[2L], by=0.1^equate.interval.digits), digits=equate.interval.digits)))), type=equate.type.iter) ) } return(equate.list) } for (content_area.iter in names(unique.content.by.grade)) { for (grade.iter in unique.content.by.grade[[content_area.iter]]) { tmp.list[[paste(content_area.iter, current.year, sep=".")]][[paste("GRADE", grade.iter, sep="_")]] <- equateSGP_INTERNAL(prior.year.data[list(content_area.iter, grade.iter)], current.year.data[list(content_area.iter, grade.iter)]) for (equate.type.iter in equating.method) { approxfun.scale <- tmp.list[[paste(content_area.iter, current.year, sep=".")]][[paste("GRADE", grade.iter, sep="_")]][[toupper(equate.type.iter)]][['NEW_TO_OLD']][['concordance']][['scale']] approxfun.yx <- tmp.list[[paste(content_area.iter, current.year, sep=".")]][[paste("GRADE", grade.iter, sep="_")]][[toupper(equate.type.iter)]][['NEW_TO_OLD']][['concordance']][['yx']] equate.dt <- data.table(V1=approxfun.scale, V2=approxfun.yx) equate.dt.newtoold <- equate.dt[,list(V2=mean(V2, na.rm=TRUE)), by=V1] tmp.list[[paste(content_area.iter, current.year, sep=".")]][[paste("GRADE", grade.iter, sep="_")]][[toupper(equate.type.iter)]][['NEW_TO_OLD']]$interpolated_function <- approxfun( equate.dt.newtoold[['V1']], equate.dt.newtoold[['V2']], rule=2) equate.dt.oldtonew <- equate.dt[,list(V1=mean(V1, na.rm=TRUE)), by=V2] tmp.list[[paste(content_area.iter, current.year, sep=".")]][[paste("GRADE", grade.iter, sep="_")]][[toupper(equate.type.iter)]][['OLD_TO_NEW']]$interpolated_function <- approxfun( equate.dt.oldtonew[['V2']], equate.dt.oldtonew[['V1']], rule=2) } } } return(tmp.list) }