code
stringlengths
1
13.8M
rJython <- function( jython.jar = NULL, modules = NULL ){ stopifnot(require(rJava)) system.file. <- function(...) { s <- system.file(...) if (.Platform$OS == "windows") gsub("\\", "/", s, fixed = TRUE) else s } if( is.null( jython.jar ) ) jython.jar <- Sys.getenv("RJYTHON_JYTHON") if (is.null(jython.jar) || jython.jar == "") jython.jar <- system.file.("jython.jar", package = "rJython") tmp.dir.parm <- paste( "-Djava.io.tmpdir=", tempdir() ) .jinit(jython.jar, parameters = tmp.dir.parm ) rJython <- .jnew("org.python.util.PythonInterpreter") rJython$exec( "import sys" ) if (is.character(modules)) modules <- as.list(modules) modules <- c( modules, list( system.file.( package = "rJython" ) ) ) modules <- lapply( modules, function( module ) paste( "sys.path.append(", module, ");", sep = '"' ) ) lapply( modules, rJython$exec ) rJython$exec( "import simplejson as json" ) rJython }
simrec <- function(N, fu.min, fu.max, cens.prob = 0, dist.x = "binomial", par.x = 0, beta.x = 0, dist.z = "gamma", par.z = 0, dist.rec, par.rec, pfree = 0, dfree = 0) { if ((cens.prob > 0) & (fu.min != fu.max)) { warning( paste0( "The censoring scheme for parameters cens.prob greater than 0 and fu.min != fu.max is undefined. \n", "The package properly implements two censoring schemes depending on parameters cens.prob, fu.min, and fu.max: \n", "a) cens.prob>0 and fu.min=fu.max: follow-up ends at time fu.max with a probability of 1-cens.prob and follow-up ends uniformly distributed in [0, fu.max] with a probability of cens.prob. \n", "b) cens.prob=0 and fu.min<fu.max: follow-up ends uniformly distributed in [0,fu.max] for each subject." ) ) } ID <- c(1:N) if (cens.prob < 0 || cens.prob > 1) { stop("cens.prob must be a probability between 0 and 1") } if (fu.min > fu.max || fu.min < 0) { stop("fu.min must be a non-negative value smaller or equal fu.max") } fu <- rbinom(N, 1, cens.prob) nr.cens <- sum(fu) if (nr.cens == 0) { fu <- runif(N, min = fu.min, max = fu.max) } else { index.cens <- which(fu == 1) fu[-index.cens] <- runif((N - nr.cens), min = fu.min, max = fu.max) fu[index.cens] <- runif(nr.cens, min = 0, max = fu.max) } if (length(beta.x) != length(dist.x)) { stop("dimensions of beta.x and dist.x differ") } if (length(beta.x) != length(par.x)) { stop("dimensions of beta.x and par.x differ") } nr.cov <- length(beta.x) x <- matrix(0, N, nr.cov) for (i in 1:nr.cov) { dist.x[i] <- match.arg(dist.x[i], choices = c("binomial", "normal")) if (dist.x[i] == "binomial") { if (length(par.x[[i]]) != 1) { stop("par.x has wrong dimension") } if (par.x[[i]] < 0 || par.x[[i]] > 1) { stop("par.x must be a probability between 0 and 1 for the binomial distributed covariate") } x[, i] <- c(rbinom(N, 1, par.x[[i]])) } else { if (length(par.x[[i]]) != 2) { stop("par.x has wrong dimension") } mu.x <- par.x[[i]][1] sigma.x <- par.x[[i]][2] x[, i] <- c(rnorm(N, mean = mu.x, sd = sigma.x)) } } z <- rep(1, N) dist.z <- match.arg(dist.z, choices = c("gamma", "lognormal")) if (length(par.z) != 1) { stop("par.z has wrong dimension") } if (par.z < 0) { stop("par.z must be non-negative") } if (par.z != 0) { if (dist.z == "gamma") { aGamma <- 1 / par.z z <- rgamma(N, shape = aGamma, scale = 1 / aGamma) } else { mu.z <- log(1 / sqrt(par.z + 1)) sigma.z <- sqrt(log(par.z + 1)) z <- exp(rnorm(N, mean = mu.z, sd = sigma.z)) } } dist.rec <- match.arg(dist.rec, choices = c("weibull", "lognormal", "gompertz", "step")) if (dist.rec == "lognormal") { if (length(par.rec) != 2) { stop("par.rec has wrong dimension") } mu <- par.rec[1] sigma <- par.rec[2] if (any(beta.x != 0)) { warning("lognormal together with covariates specified: this does not define the usual lognormal model! see help for details") } } else if (dist.rec == "weibull") { if (length(par.rec) != 2) { stop("par.rec has wrong dimension") } lambda <- par.rec[1] nu <- par.rec[2] } else if (dist.rec == "gompertz") { if (length(par.rec) != 2) { stop("par.rec has wrong dimension") } lambdag <- par.rec[1] alpha <- par.rec[2] } else if (dist.rec == "step") { if (length(par.rec) != 3) { stop("par.rec has wrong dimensions") } fc <- par.rec[1] sc <- par.rec[2] jump <- par.rec[3] jumpinv <- jump * fc } if (length(pfree) != 1) { stop("pfree has wrong dimension") } if (length(dfree) != 1) { stop("dfree has wrong dimension") } if (pfree < 0 || pfree > 1) { stop("pfree must be a probability between 0 and 1") } U <- runif(N) Y <- (-1) * log(U) * exp((-1) * x %*% beta.x) * 1 / z if (dist.rec == "lognormal") { t <- exp(qnorm(1 - exp((-1) * Y)) * sigma + mu) } else if (dist.rec == "weibull") { t <- ((lambda)^(-1) * Y)^(1 / nu) } else if (dist.rec == "gompertz") { t <- (1 / alpha) * log((alpha / lambdag) * Y + 1) } else if (dist.rec == "step") { t <- rep(NA, N) indexTr1 <- which(Y <= jumpinv) if (length(indexTr1 > 0)) { t[indexTr1] <- Y[indexTr1] / fc } indexTr2 <- which(Y > jumpinv) if (length(indexTr2 > 0)) { t[indexTr2] <- (Y[indexTr2] - (fc - sc) * jump) / sc } } T <- matrix(t, N, 1) dirty <- rep(TRUE, N) T1 <- NULL while (any(dirty)) { pd <- rbinom(N, 1, pfree) U <- runif(N) Y <- (-1) * log(U) * exp((-1) * x %*% beta.x) * 1 / z t1 <- t + pd * dfree if (dist.rec == "lognormal") { t <- (t1 + exp(qnorm(1 - exp(log(1 - pnorm((log(t1) - mu) / sigma)) - Y)) * sigma + mu) - (t1)) } else if (dist.rec == "weibull") { t <- (t1 + ((Y + lambda * (t1)^(nu)) / lambda)^(1 / nu) - (t1)) } else if (dist.rec == "gompertz") { t <- (t1 + ((1 / alpha) * log((alpha / lambdag) * Y + exp(alpha * t1))) - (t1)) } else if (dist.rec == "step") { indexTr3 <- which((t1 <= jump) & (Y <= (jump - t1) * fc)) if (length(indexTr3 > 0)) { t[indexTr3] <- t1[indexTr3] + Y[indexTr3] / fc } indexTr4 <- which((t1 <= jump) & (Y > (jump - t1) * fc)) if (length(indexTr4 > 0)) { t[indexTr4] <- t1[indexTr4] + (Y[indexTr4] + (fc - sc) * (t1[indexTr4] - jump)) / sc } indexTr5 <- which(t1 > jump) if (length(indexTr5 > 0)) { t[indexTr5] <- t1[indexTr5] + Y[indexTr5] / sc } } T1 <- cbind(T1, ifelse(dirty, t1, NA)) dirty <- ifelse(dirty, (t(t) < fu) & (t(t1) < fu), dirty) if (!any(dirty)) break T <- cbind(T, ifelse(dirty, t, NA)) } start.t <- cbind(0, T1) start.t <- as.vector(t(start.t)) tab.start.t <- start.t[!is.na(start.t)] stop.t <- cbind(T, NA) d <- apply(!is.na(T), 1, sum) f <- d + 1 for (i in 1:N) { stop.t[i, f[i]] <- fu[i] } stop.t <- as.vector(t(stop.t)) tab.stop.t <- stop.t[!is.na(stop.t)] e <- NULL for (i in 1:N) { e <- cbind(e, t(rep(1, d[i])), 0) } tab.ID <- rep(ID, f) tab.X <- x[rep(1:nrow(x), f), ] tab.Z <- rep(z, f) tab.Fu <- rep(fu, f) w <- tab.start.t > tab.stop.t v <- rep(0, length(w)) for (i in 1:length(w)) { if (w[i]) { v[i - 1] <- 1 } } l <- tab.stop.t > tab.Fu for (i in 1:length(l)) { if (l[i]) { tab.stop.t[i] <- tab.Fu[i] e[i] <- 0 } } tab <- cbind(tab.ID, tab.X, tab.Z, tab.start.t, tab.stop.t, t(e), tab.Fu) for (i in 1:length(w)) { if (w[i]) { tab[i, ] <- rep(NA, nr.cov + 6) } } tab <- data.frame(id = tab[, 1], x = tab[, 2:(nr.cov + 1)], z = tab[, (nr.cov + 2)], start = tab[, (nr.cov + 3)], stop = tab[, (nr.cov + 4)], status = tab[, (nr.cov + 5)], fu = tab[, (nr.cov + 6)]) tab <- na.omit(tab) return(tab) }
base.lmadd <- lm(temp ~ location + treatment, data = Ice1930 %>% filter(phase == "b")) anova(base.lmadd) plot(base.lmadd, w = c(5, 2))
EMsimulation <- function(eta, alpha, sigma, samplesize, expriments, compact_flag=FALSE, C0=5, C1=2, C2=9){ etasize <- dim(eta)[1] alphasize <- dim(alpha)[1] classsize <- dim(eta)[2] eta_est <- array(0,c(expriments,etasize,classsize)) alpha_est <- array(0,c(expriments,alphasize,classsize)) sigma_est <- array(0,c(expriments)) Covmatrix_est <- array(0,c(expriments,2*alphasize+etasize+1,2*alphasize+etasize+1)) for(i in 1:expriments) { set.seed(i) X <- matrix(c(matrix(1,samplesize), rnorm(samplesize*(etasize-1))+1),samplesize,etasize) Z <- matrix(c(matrix(1,samplesize),rbinom(prob=1/2,size=1,n=samplesize), rnorm(samplesize*(alphasize-2))+1),samplesize,alphasize) W <- Wgenerate(alpha=alpha,eta=eta,X=X,Z=Z,sigma=sigma) set.seed(i) eta_initial <- matrix((C1+0.001),etasize,classsize) eta_initial[,classsize] <- 0 eta_initial[1,] <- 0 alpha_initial<- matrix(rnorm(alphasize*classsize),alphasize,classsize) sigma_initial <- 1 EMtheta <- EMalgorithm(X=W$X,Z=W$Z,Y=W$Y,classsize=classsize, etat=eta_initial,alphat=alpha_initial,sigmat=sigma_initial, learning_rate=0.01,regular_parameter_eta=0.001, max_iteration=1000,max_iteration_eta=10000, compact_flag,C0,C1,C2) eta_est[i,,] <- EMtheta$eta alpha_est[i,,] <- EMtheta$alpha sigma_est[i] <- EMtheta$sigma } return(list(eta=eta_est,alpha=alpha_est,sigma=sigma_est)) }
dtriang <- function(x, min=0, max=1) { if (!is.numeric(x)) stop("'x' must be numeric.") if (!is.numeric(min)) stop("'min' must be numeric.") if (!is.numeric(max)) stop("'max' must be numeric.") if (min==max) stop("Endpoints cannot be equal.") if (min>max) {temp=min; min=max; max=temp} ; c=(min+max)/2 2*(x-min)/(max-min)/(c-min)*(min<=x & x<=c) + 2*(max-x)/(max-min)/(max-c)*(c<x & x<=max) }
relevel <- function(x, ref, ...) UseMethod("relevel") relevel.default <- function(x, ref, ...) stop("'relevel' only for (unordered) factors") relevel.ordered <- function(x, ref, ...) stop("'relevel' only for unordered factors") relevel.factor <- function(x, ref, ...) { lev <- levels(x) if(length(ref) != 1L) stop("'ref' must be of length one") if(is.character(ref)) ref <- match(ref, lev) if(is.na(ref)) stop("'ref' must be an existing level") nlev <- length(lev) if(ref < 1 || ref > nlev) stop(gettextf("ref = %d must be in 1L:%d", ref, nlev), domain = NA) factor(x, levels = lev[c(ref, seq_along(lev)[-ref])]) }
makeRLearner.surv.cv.CoxBoost = function() { makeRLearnerSurv( cl = "surv.cv.CoxBoost", package = "!CoxBoost", par.set = makeParamSet( makeIntegerLearnerParam(id = "maxstepno", default = 100L, lower = 0L), makeIntegerLearnerParam(id = "K", default = 10L, lower = 1L), makeDiscreteLearnerParam(id = "type", default = "verweij", values = c("verweij", "naive")), makeLogicalLearnerParam(id = "parallel", default = FALSE, tunable = FALSE), makeLogicalLearnerParam(id = "upload.x", default = FALSE, tunable = FALSE), makeLogicalLearnerParam(id = "multicore", default = FALSE, tunable = FALSE), makeIntegerVectorLearnerParam(id = "unpen.index"), makeLogicalLearnerParam(id = "standardize", default = TRUE), makeNumericLearnerParam(id = "penalty", lower = 0), makeDiscreteLearnerParam(id = "criterion", default = "pscore", values = c("pscore", "score", "hpscore", "hscore")), makeNumericLearnerParam(id = "stepsize.factor", default = 1, lower = 0), makeLogicalLearnerParam(id = "trace", default = FALSE, tunable = FALSE) ), properties = c("numerics", "factors", "weights"), name = "Cox Proportional Hazards Model with Componentwise Likelihood based Boosting, tuned for the optimal number of boosting steps", short.name = "cv.CoxBoost", note = "Factors automatically get converted to dummy columns, ordered factors to integer.", callees = c("cv.CoxBoost", "CoxBoost") ) } trainLearner.surv.cv.CoxBoost = function(.learner, .task, .subset, .weights = NULL, penalty = NULL, unpen.index = NULL, ...) { data = getTaskData(.task, subset = .subset, target.extra = TRUE, recode.target = "surv") info = getFixDataInfo(data$data, factors.to.dummies = TRUE, ordered.to.int = TRUE) if (is.null(penalty)) { penalty = 9 * sum(data$target[, 2L]) } pars = c(list( time = data$target[, 1L], status = data$target[, 2L], x = as.matrix(fixDataForLearner(data$data, info)), penalty = penalty, weights = .weights ), list(...)) rm(data) res = do.call(CoxBoost::cv.CoxBoost, pars) res$optimal.step if (res$optimal.step == 0L) { warning("Could not determine the optimal step number in cv.CoxBoost") } pars = insert(pars, list(stepno = res$optimal.step)) pars$maxstepno = NULL attachTrainingInfo(do.call(CoxBoost::CoxBoost, pars), info) } predictLearner.surv.cv.CoxBoost = function(.learner, .model, .newdata, ...) { info = getTrainingInfo(.model) .newdata = as.matrix(fixDataForLearner(.newdata, info)) as.numeric(predict(.model$learner.model, newdata = .newdata, type = "lp")) }
q_scad <- function(theta,lambda,a=3.7) { p<-length(theta) theta<-abs(theta) b1<-rep(0,p) b1[theta>lambda]<-1 b2<-rep(0,p) b2[theta<(lambda*a)]<-1 lambda*(1-b1)+((lambda*a)-theta)*b2/(a-1)*b1 }
NULL setMethod("read_quadtree", signature(x = "character"), function(x) { qt <- new("Quadtree") qt@ptr <- readQuadtreeCpp(x) return(qt) } ) setMethod("write_quadtree", signature(x = "character", y = "Quadtree"), function(x, y) { writeQuadtreeCpp(y@ptr, x) } )
"summary.regul" <- function(object, ...) structure(object, class=c("summary.regul", class(object)))
"joint.curvesCOP2" <- function(cop=NULL, para=NULL, type=c("and", "or"), probs=c(0.5, 0.8, 0.90, 0.96, 0.98, 0.99, 0.995, 0.998), zero2small=TRUE, small=1E-6, divisor=100, delv=0.001, ...) { type <- match.arg(type) "orfunc" <- function(u, v=NULL, LHS=NULL, cop=NULL, para=NULL, ...) { RHS <- duCOP(u, v, cop = cop, para = para, ...) return(LHS - RHS) } zz <- new.env() for(t in probs) { if(type == "and") { delta <- diff(range(c(t+delv, 1-delv)))/divisor tmp <- NULL try(tmp <- seq(t+delv, 1-delv, by=delta), silent=FALSE) if(is.null(tmp)) { warning("trying to compensate for 'by' error by reversing") try(tmp <- seq(t+delv, 1-delv, by=-delta), silent=FALSE) } v <- c(t, t+delv/100, t+delv/10, t+delv/5, t+delv/2, tmp, 1-delv/2, 1-delv/5, 1-delv/10, 1-delv/100, 1) v <- sort(unique(v)) u <- sapply(1:length(v), function(i) { COPinv(cop=cop, v[i], t, para=para) }) if(zero2small) u[u == 0] <- small if(zero2small) u[u == 1] <- 1 - small uv <- data.frame(U=u, V=v) assign(as.character(t), uv, envir=zz) } else if(type == "or") { delta <- diff(range(c(0, t)))/divisor v <- c(0, delv/100, delv/10, delv/5, delv/2, seq(delv,t, by=delta), t-delv/2, t-delv/5, t-delv/10, t-delv/100, t) v <- sort(unique(v)) u <- vector(mode="numeric", length(v)) for(i in 1:length(v)) { lo <- duCOP(0,0, cop=cop, para=para, ...) if(is.na(lo) | ! is.finite(lo)) lo <- .Machine$double.eps my.rt <- NULL try(my.rt <- uniroot(orfunc, interval=c(lo,t), v=v[i], LHS=t, cop=cop, para=para, ...), silent=TRUE) if(is.null(my.rt)) { u[i] <- NA } else if(length(my.rt$root) != 0) { u[i] <- my.rt$root } else { u[i] <- NA } } if(zero2small) u[u == 0] <- small if(zero2small) u[u == 1] <- 1 - small uv <- data.frame(U=u, V=v) assign(as.character(t), uv, envir=zz) } else { stop("should not be here in logic") } } zzz <- as.list(zz); rm(zz) return(zzz) }
R_hat = function(samples, parts = 2){ samples = as.matrix(samples) no_chains = ncol(samples) length_chains = nrow(samples) sub_chains = list() for(c in 1:no_chains) sub_chains = c(sub_chains, split(samples[,c], cut(1:length_chains, parts))) L = length_chains/parts chain_means = sapply(sub_chains,mean) grand_mean = mean(chain_means) B = 1/(parts-1)*sum((chain_means-grand_mean)^2) chain_variances = sapply(sub_chains,var) W = sum(chain_variances)/parts R_hat = ((L-1)/L*W+B)/W return(R_hat) }
set_LoadProfile = function(profile, verbose = FALSE) { profilemsg = "Profile format not supported. " terminatemsg = "Terminating program..." if(is.null(profile)) { stop(paste(profilemsg, terminatemsg)) } if (dim(as.matrix(profile))[1] > 1) { stop(paste(profilemsg, terminatemsg)) } params_vec = c("load_s_shape", "load_s_scale", "load_p_shape", "load_p_scale", "R0", "alpha_d", "alpha_l", "load_d_mean", "load_d_sd", "gamma", "mean_Ts", "mean_Te", "mean_Tp") for (param in params_vec) { cur_param = tryCatch( { as.double(profile[param]) }, error = function(error_msg) { datanfmsg = paste("Dataframe provided does not contain:", param) message(datanfmsg) return(NULL) } ) if (is.null(cur_param)) { next } if (!is.na(cur_param) && !is.infinite(cur_param)) { modifyLoadP(param, cur_param, as.integer(verbose)) } else { paramerrormsg = paste("The parameter", param, "is not available") message(paramerrormsg) } } if (verbose) { cat("Finish setting load profile parameters") } } sample_load_profile = data.frame("load_s_shape" = 3.122, "load_s_scale" = 0.0481, "load_p_shape" = 0.826, "load_p_scale" = 0.1023, "R0" = 2722, "alpha_d" = 1.25, "alpha_l" = 1.5, "load_d_mean" = 1, "load_d_sd" = 0.1, "gamma" = 0.25, "mean_Ts" = 10, "mean_Te" = 1, "mean_Tp" = 0.03835)
identity_transformer <- function(text, envir) { eval(parse(text = text, keep.source = FALSE), envir) }
durations <- function(x, trim = FALSE, units = c("secs", "mins", "hours", "days")) { stopifnot(is.timeSeries(x)) Title <- x@title Documentation <- x@documentation if (x@format == "counts") stop(as.character(match.call())[1], " is for time series and not for signal series.") units <- match.arg(units) pos <- time(x) dur <- c(NA, diff(as.integer(difftime(pos, pos[1], units = units[1])))) ans <- timeSeries(data = dur, charvec = pos, units = "Duration") if (trim) ans <- ans[-1, ] ans@title <- Title ans@documentation <- Documentation ans }
library(quokar) library(quantreg) library(ggplot2) library(gridExtra) library(purrr) library(tidyr) library(dplyr) library(robustbase) x <- sort(runif(100)) y1 <- 40*x + x*rnorm(100, 0, 10) df <- data.frame(y1, x) add_outlier <- data.frame(y1 = c(60,61,62), x = c(0.71, 0.73,0.75)) df_o <- rbind(df, add_outlier) model1 <- lm(y1 ~ x, data = df) model2 <- lm(y1 ~ x, data = df_o) coeff_lm <- c(model1$coef[2], model2$coef[2]) inter_lm <- c(model1$coef[1], model2$coef[1]) flag <- c("without-outlier", "with-outlier") line_lm <- data.frame(coeff_lm, inter_lm, flag) ggplot(df_o, aes(x = x, y = y1)) + geom_point(alpha = 0.1) + geom_abline(data = line_lm, aes(intercept = inter_lm, slope = coeff_lm, colour = flag)) coef1 <- rq(y1 ~ x, tau = c(0.1, 0.5, 0.9), data = df, method = "br")$coef rq_coef1 <- data.frame(intercept = coef1[1, ], coef = coef1[2, ], tau_flag =colnames(coef1)) coef2 <- rq(y1 ~ x, tau = c(0.1, 0.5, 0.9),data = df_o, method = "br")$coef rq_coef2 <- data.frame(intercept = coef2[1, ], coef = coef2[2, ], tau_flag =colnames(coef2)) ggplot(df_o) + geom_point(aes(x = x, y = y1), alpha = 0.1) + geom_abline(data = rq_coef1, aes(intercept = intercept, slope = coef, colour = tau_flag))+ geom_abline(data = rq_coef2, aes(intercept = intercept, slope = coef, colour = tau_flag)) x <- sort(runif(100)) y2 <- 40*x + x*rnorm(100, 0, 10) df <- data.frame(y2, x) add_outlier <- data.frame(y2 = c(1,2,3), x = c(0.71, 0.73,0.75)) df_o <- rbind(df, add_outlier) model1 <- lm(y2 ~ x, data = df) model2 <- lm(y2 ~ x, data = df_o) coeff_lm <- c(model1$coef[2], model2$coef[2]) inter_lm <- c(model1$coef[1], model2$coef[1]) flag <- c("without-outlier", "with-outlier") line_lm <- data.frame(coeff_lm, inter_lm, flag) ggplot(df_o, aes(x = x, y = y2)) + geom_point(alpha = 0.1) + geom_abline(data = line_lm, aes(intercept = inter_lm, slope = coeff_lm, colour = flag)) coef1 <- rq(y2 ~ x, tau = c(0.1, 0.5, 0.9), data = df, method = "br")$coef rq_coef1 <- data.frame(intercept = coef1[1, ], coef = coef1[2, ], tau_flag = colnames(coef1)) coef2 <- rq(y2 ~ x, tau = c(0.1, 0.5, 0.9), data = df_o, method = "br")$coef rq_coef2 <- data.frame(intercept = coef2[1, ], coef = coef2[2, ], tau_flag = colnames(coef2)) ggplot(df_o) + geom_point(aes(x = x, y = y2), alpha = 0.1) + geom_abline(data = rq_coef1, aes(intercept = intercept, slope = coef, colour = tau_flag))+ geom_abline(data = rq_coef2, aes(intercept = intercept, slope = coef, colour = tau_flag)) x <- sort(runif(100)) y <- 40*x + x*rnorm(100, 0, 10) selectedX <- sample(50:100,5) y2<- y y2[selectedX] <- x[1:5]*rnorm(5, 0, 10) y3 <- y2 y3[selectedX] <- y2[selectedX] - 10 y4 <- y3 y4[selectedX] <- y3[selectedX] - 10 df <- data.frame(x, y, y2, y3, y4) df_m <- df %>% gather(variable, value, -x) ggplot(df_m, aes(x = x, y=value)) + geom_point(alpha = 0.5) + xlab("x") + ylab("y") + facet_wrap(~variable, ncol=2, scale = "free_y") + geom_quantile(quantiles = seq(0.1, 0.9, 0.1), colour = "purple") + geom_smooth(method = "lm", se = FALSE, colour = "orange") coefs <- 2:5 %>% map(~ rq(df[, .] ~ x, data = df, seq(0.1, 0.9, 0.1))) %>% map_df(~ as.data.frame(t(as.matrix(coef(.))))) colnames(coefs) <- c("intercept", "slope") tau <- rep(seq(0.1, 0.9, by = 0.1), 4) model <- paste('rq', rep(1:4, each = 9), sep="") df_m1 <- data.frame(model, tau, coefs) df_mf <- df_m1 %>% gather(variable, value, -c(model, tau)) ggplot(df_mf, aes(x = tau, y = value, colour = model)) + geom_point() + geom_line() + facet_wrap(~ variable, scale = "free_y") + xlab('quantiles') + ylab('coefficients') n <- 100 set.seed(101) x1 <- sort(rnorm(n, 0, 1)) x2 <- sort(rnorm(n, 1, 2)) y <- 40*(x1 + x2) + x1*rnorm(100, 0, 10) + x2*rnorm(100, 0, 10) selectedX <- sample(50:100,5) y2<- y y2[selectedX] <- x1[1:5]*rnorm(5, 0, 10) + x2[1:5]*rnorm(5, 0, 10) y3 <- y2 y3[selectedX] <- y2[selectedX] - 100 y4 <- y3 y4[selectedX] <- y3[selectedX] - 100 df <- data.frame(y, y2, y3, y4, x1, x2) coefs <- 1:4 %>% map(~ rq(df[, .] ~ x1 + x2, data = df, seq(0.1, 0.9, 0.1))) %>% map_df(~ as.data.frame(t(as.matrix(coef(.))))) colnames(coefs) <- c("intercept", "slope_x1", "slope_x2") tau <- rep(seq(0.1, 0.9, by = 0.1), 4) model <- paste('rq', rep(1:4, each = 9), sep="") df_m1 <- data.frame(model, tau, coefs) df_mf <- df_m1 %>% gather(variable, value, -c(model, tau)) ggplot(df_mf, aes(x = tau, y = value, colour = model)) + geom_point() + geom_line() + facet_wrap(~ variable, scale = "free_y") + xlab('quantiles') + ylab('coefficients') x <- sort(runif(100)) y <- 40*x + x*rnorm(100, 0, 10) selectedIdx <- sample(50:100,5) df <- data.frame(y) df$y2 <- y df$x <- x df$y2[selectedIdx] <- df$x[1:5]*rnorm(5, 0, 10) df$x2 <- x df$x2[selectedIdx] <- df$x2[selectedIdx] + 0.2 df$x3 <- df$x2 df$x3[selectedIdx] <- df$x3[selectedIdx] + 0.2 df$x4 <- df$x3 df$x4[selectedIdx] <- df$x4[selectedIdx] + 0.2 df_m <- df %>% gather(variable, value, -y, -y2) ggplot(df_m, aes(x = value, y=y2)) + geom_point() + xlab("x") + ylab("y") + facet_wrap(~variable, ncol=2, scale = "free") + geom_quantile(quantiles = seq(0.1, 0.9, 0.1)) coefs <- 3:6 %>% map(~ rq(df$y2 ~ df[, .], data = df, seq(0.1, 0.9, 0.1))) %>% map_df(~ as.data.frame(t(as.matrix(coef(.))))) colnames(coefs) <- c("intercept", "slope") tau <- rep(seq(0.1, 0.9, by = 0.1), 4) model <- paste('rq', rep(1:4, each = 9), sep="") df_m1 <- data.frame(model, tau, coefs) df_mf <- df_m1 %>% gather(variable, value, -c(model, tau)) ggplot(df_mf, aes(x = tau, y = value, colour = model)) + geom_point() + geom_line() + facet_wrap(~ variable, scale = "free_y") + xlab('quantiles') + ylab('coefficients') x <- sort(runif(100)) y <- 40*x + x*rnorm(100, 0, 10) selectedX1 <- sample(50:100, 5) y_number_y1 <- y y_number_y1[selectedX1] <- x[1:5]*rnorm(5, 0, 10) selectedX2 <- sample(50:100, 10) y_number_y2 <- y y_number_y2[selectedX2] <- x[1:10]*rnorm(10, 0, 10) selectedX3 <- sample(50:100, 15) y_number_y3 <- y y_number_y3[selectedX3] <- x[1:15]*rnorm(15, 0, 10) df <- data.frame(x, y, y_number_y1, y_number_y2, y_number_y3) df_m <- df %>% gather(variable, value, -x) ggplot(df_m, aes(x=x, y=value)) + geom_point() + xlab("x") + ylab("y") + facet_wrap(~variable, ncol=2) + geom_quantile(quantiles = seq(0.1, 0.9, 0.1)) data(ais) ais_female <- subset(ais, Sex == 1) ais_female_o <- ais_female[-75, ] coef1 <- lm(BMI ~ LBM, data = ais_female)$coef coef2 <- lm(BMI ~ LBM, data = ais_female_o)$coef coefs <- c(coef1[2], coef2[2]) inters <- c(coef1[1], coef2[1]) flag <- c("with-outlier", "without-outlier") coef_o <- data.frame(coefs, inters, flag) ggplot(ais_female, aes(x = LBM, y = BMI)) + geom_point(alpha = 0.1) + geom_abline(data = coef_o, aes(intercept = inters, slope = coefs, colour = flag)) coef1 <- rq(BMI ~ LBM, tau = c(0.1, 0.5, 0.9), data = ais_female, method = "br")$coef rq_coef1 <- data.frame(intercept = coef1[1, ], coef = coef1[2, ], tau_flag = colnames(coef1)) coef2 <- rq(BMI ~ LBM, tau = c(0.1, 0.5, 0.9), data = ais_female_o, method = "br")$coef rq_coef2 <- data.frame(intercept = coef2[1, ], coef = coef2[2, ], tau_flag = colnames(coef2)) ggplot(ais_female) + geom_point(aes(x = LBM, y = BMI), alpha = 0.1) + geom_abline(data = rq_coef1, aes(intercept = intercept, slope = coef, colour = tau_flag))+ geom_abline(data = rq_coef2, aes(intercept = intercept, slope = coef, colour = tau_flag)) data(ais) tau <- c(0.1, 0.5, 0.9) ais_female <- subset(ais, Sex == 1) br <- rq(BMI ~ LBM, tau = tau, data = ais_female, method = 'br') coef <- br$coef br_result <- frame_br(br, tau) origin_obs <- br_result$all_observation use_obs <- br_result$fitting_point ggplot(origin_obs, aes(x = value, y = y)) + geom_point(alpha = 0.1) + geom_abline(slope = coef[2, 1], intercept = coef[1,1], colour = "gray") + geom_abline(slope = coef[2, 2], intercept = coef[1,2], colour = "gray") + geom_abline(slope = coef[2, 3], intercept = coef[1,3], colour = "grey") + ylab('y') + xlab('x') + facet_wrap(~variable, scales = "free_x", ncol = 2) + geom_point(data = use_obs, aes(x = value, y = y, group = tau_flag, colour = tau_flag, shape = obs)) br <- rq(BMI ~ LBM + Bfat , tau = tau, data = ais_female, method = 'br') tau <- c(0.1, 0.5, 0.9) br_result <- frame_br(br, tau) origin_obs <- br_result$all_observation use_obs <- br_result$fitting_point ggplot(origin_obs, aes(x = value, y = y)) + geom_point(alpha = 0.1) + ylab('y') + xlab('x') + facet_wrap(~variable, scales = "free_x", ncol = 2) + geom_point(data = use_obs, aes(x = value, y = y, group = tau_flag, colour = tau_flag, shape = obs)) tau <- c(0.1, 0.5, 0.9) fn <- rq(BMI ~ LBM, data = ais_female, tau = tau, method = 'fn') fn_obs <- frame_fn_obs(fn, tau) head(fn_obs) fn1 <- fn_obs[,1] case <- 1: length(fn1) fn1 <- cbind(case, fn1) m <- data.frame(y = ais_female$BMI, x1 = ais_female$LBM,fn1) p <- length(attr(fn$coefficients, "dimnames")[[1]]) m_f <- m %>% gather(variable, value, -case, -fn1, -y) mf_a <- m_f %>% group_by(variable) %>% arrange(variable, desc(fn1)) %>% filter(row_number() %in% 1:p) p1 <- ggplot(m_f, aes(x = value, y = y)) + geom_point(alpha = 0.1) + geom_point(data = mf_a, size = 3, colour = "purple") + facet_wrap(~variable, scale = "free_x") + xlab("x") fn2 <- fn_obs[,2] case <- 1: length(fn2) fn2 <- cbind(case, fn2) m <- data.frame(y = ais_female$BMI, x1 = ais_female$LBM, fn2) p <- length(attr(fn$coefficients, "dimnames")[[1]]) m_f <- m %>% gather(variable, value, -case, -fn2, -y) mf_a <- m_f %>% group_by(variable) %>% arrange(variable, desc(fn2)) %>% filter(row_number() %in% 1:p ) p2 <- ggplot(m_f, aes(x = value, y = y)) + geom_point(alpha = 0.1) + geom_point(data = mf_a, size = 3, colour = "blue", alpha = 0.5) + facet_wrap(~variable, scale = "free_x") + xlab("x") fn3 <- fn_obs[ ,3] case <- 1: length(fn3) fn3 <- cbind(case, fn3) m <- data.frame(y = ais_female$BMI, x1 = ais_female$LBM, fn3) p <- length(attr(fn$coefficients, "dimnames")[[1]]) m_f <- m %>% gather(variable, value, -case, -fn3, -y) mf_a <- m_f %>% group_by(variable) %>% arrange(variable, desc(fn3)) %>% filter(row_number() %in% 1:p ) p3 <- ggplot(m_f, aes(x = value, y = y)) + geom_point(alpha = 0.1) + geom_point(data = mf_a, size = 3, colour = "orange") + facet_wrap(~variable, scale = "free_x") + xlab("x") grid.arrange(p1, p2, p3, ncol = 3) tau <- c(0.1, 0.5, 0.9) fn <- rq(BMI ~ LBM, data = ais_female, tau = tau, method = 'fn') fn_obs <- frame_fn_obs(fn, tau) p <- 2 obs <- data.frame(cbind(fn_obs,id = 1:nrow(fn_obs))) selected <- NULL for(i in 1:3){ data <- obs[order(obs[,i],decreasing = T),c(i,4)][1:p,] data <- cbind(data,idx=1:p) colnames(data) <- c("value","id","idx") data = cbind(data,type=rep(colnames(obs)[i],p)) if(is.null(selected)){ selected = data }else{ selected = rbind(selected,data) } } selected$value = round(selected$value,3) ggplot(selected,aes(x=idx,y=value,colour=type))+ geom_point(aes(size=value),alpha=0.5)+ geom_text(aes(label = id), hjust = 0, vjust= 0)+ facet_wrap( ~ type,scale="free_y") x <- rep(1:25, 20) y <- SSlogis(x, 10, 12, 2) * rnorm(500, 1, 0.1) Dat <- data.frame(x = x, y = y) formula <- y ~ SSlogis(x, Aysm, mid, scal) nlrq_m <- frame_nlrq(formula, data = Dat, tau = c(0.1, 0.5, 0.9)) weights <- nlrq_m$weights m <- data.frame(Dat, weights) m_f <- m %>% gather(tau_flag, value, -x, -y) ggplot(m_f, aes(x = x, y = y, colour = tau_flag)) + geom_point(aes(size = value), alpha = 0.5) + facet_wrap(~tau_flag) x <- matrix(ais_female$LBM, ncol = 1) y <- ais_female$BMI tau = c(0.1, 0.5, 0.9) ald_data <- frame_ald(y, x, tau, smooth = 10, error = 1e-6, iter = 2000) ggplot(ald_data) + geom_line(aes(x = r, y = d, group = obs, colour = tau_flag)) + facet_wrap(~tau_flag, ncol = 1,scales = "free_y") + xlab('') + ylab('Asymmetric Laplace Distribution Density Function') ais_female <- subset(ais, Sex == 1) tau <- c(0.1, 0.5, 0.9) object <- rq(BMI ~ LBM + Bfat, data = ais_female, tau = tau) plot_distance <- frame_distance(object, tau = c(0.1, 0.5, 0.9)) distance <- plot_distance[[1]] head(distance) cutoff_v <- plot_distance[[2]] cutoff_v cutoff_h <- plot_distance[[3]] cutoff_h n <- nrow(object$model) case <- rep(1:n, length(tau)) distance <- cbind(case, distance) distance$residuals <- abs(distance$residuals) distance1 <- distance %>% filter(tau_flag == 'tau0.1') p1 <- ggplot(distance1, aes(x = rd, y = residuals)) + geom_point() + geom_hline(yintercept = cutoff_h[1], colour = "red") + geom_vline(xintercept = cutoff_v, colour = "red") + geom_text(data = subset(distance1, residuals > cutoff_h[1]| rd > cutoff_v), aes(label = case), hjust = 0, vjust = 0) + xlab("Robust Distance") + ylab("|Residuals|") distance2 <- distance %>% filter(tau_flag == 'tau0.5') p2 <- ggplot(distance1, aes(x = rd, y = residuals)) + geom_point() + geom_hline(yintercept = cutoff_h[2], colour = "red") + geom_vline(xintercept = cutoff_v, colour = "red") + geom_text(data = subset(distance1, residuals > cutoff_h[2]| rd > cutoff_v), aes(label = case), hjust = 0, vjust = 0) + xlab("Robust Distance") + ylab("|Residuals|") distance3 <- distance %>% filter(tau_flag == 'tau0.9') p3 <- ggplot(distance1, aes(x = rd, y = residuals)) + geom_point() + geom_hline(yintercept = cutoff_h[3], colour = "red") + geom_vline(xintercept = cutoff_v, colour = "red") + geom_text(data = subset(distance1, residuals > cutoff_h[3]| rd > cutoff_v), aes(label = case), hjust = 0, vjust = 0) + xlab("Robust Distance") + ylab("|Residuals|") grid.arrange(p1, p2, p3, ncol = 3) ais_female <- subset(ais, Sex == 1) y <- ais_female$BMI x <- cbind(1, ais_female$LBM, ais_female$Bfat) tau <- c(0.1, 0.5, 0.9) case <- rep(1:length(y), length(tau)) GCD <- frame_mle(y, x, tau, error = 1e-06, iter = 10000, method = 'cook.distance') GCD_m <- cbind(case, GCD) ggplot(GCD_m, aes(x = case, y = value )) + geom_point() + facet_wrap(~variable, scale = 'free_y') + geom_text(data = subset(GCD_m, value > mean(value) + 2*sd(value)), aes(label = case), hjust = 0, vjust = 0) + xlab("case number") + ylab("Generalized Cook Distance") QD <- frame_mle(y, x, tau, error = 1e-06, iter = 100, method = 'qfunction') QD_m <- cbind(case, QD) ggplot(QD_m, aes(x = case, y = value)) + geom_point() + facet_wrap(~variable, scale = 'free_y')+ geom_text(data = subset(QD_m, value > mean(value) + sd(value)), aes(label = case), hjust = 0, vjust = 0) + xlab('case number') + ylab('Qfunction Distance') ais_female <- subset(ais, Sex == 1) y <- ais_female$BMI x <- matrix(c(ais_female$LBM, ais_female$Bfat), ncol = 2, byrow = FALSE) tau <- c(0.1, 0.5, 0.9) case <- rep(1:length(y), length(tau)) prob <- frame_bayes(y, x, tau, M = 500, burn = 100, method = 'bayes.prob') prob_m <- cbind(case, prob) ggplot(prob_m, aes(x = case, y = value )) + geom_point() + facet_wrap(~variable, scale = 'free') + geom_text(data = subset(prob_m, value > mean(value) + 2*sd(value)), aes(label = case), hjust = 0, vjust = 0) + xlab("case number") + ylab("Mean probability of posterior distribution") kl <- frame_bayes(y, x, tau, M = 5, burn = 1, method = 'bayes.kl') kl_m <- cbind(case, kl) ggplot(kl_m, aes(x = case, y = value)) + geom_point() + facet_wrap(~variable, scale = 'free')+ geom_text(data = subset(kl_m, value > mean(value) + 2*sd(value)), aes(label = case), hjust = 0, vjust = 0) + xlab('case number') + ylab('Kullback-Leibler')
source(file = "../create_testing_df.R") testdf <- create.testing.df() test_that("error for wrong value of stepsize", { expect_error(suppressWarnings(datsteps(testdf, stepsize = "test")), "stepsize") }) test_that("error for wrong value of stepsize", { expect_warning(datsteps(testdf, stepsize = 25), "recommended") }) data("DAT_df") test_that("error for wrong value of stepsize", { expect_warning(datsteps(DAT_df), regexp = "wrong order") }) testdf <- create.testing.df() testdf[, 3] <- sample(-200:0, nrow(testdf)) testdf[, 4] <- sample(1:200, nrow(testdf)) testdf[1, 3:4] <- c(4, 4) test_that("error for wrong value of stepsize", { expect_warning(datsteps(testdf), regexp = "the same value") expect_failure(expect_warning(datsteps(testdf, stepsize = 1), regexp = "larger than the range of")) })
get_emission=function(wide,days) { if(!is.data.frame(wide)) stop("wide must be a data.frame.") tmp=.check_bad_transition(wide,days) split_list=list() for(nm in days) { col_str=wide[[nm]] splt=sub("[", "", col_str, fixed=TRUE) splt=sub("]", "", splt, fixed=TRUE) splt=strsplit( splt, ",", fixed=TRUE) split_list[[nm]]=mapply(function(x,v) { ret=tryCatch(as.numeric(x), warning=function(e) stop(paste("Could not process niad os value:", v,"."))) if(any(is.na(ret))) { if(length(ret)>1) stop(paste("Could not process niad os value of:", v,".")) return(c(1,8)) } else if(any(ret<1 | ret>8 | round(ret)!=ret)){ stop(paste("Niad os value must be integer between 1 and 8. Found value of:", v,".")) } else if(length(ret)>2) { stop(paste("Could not process niad os value of:", v,".")) } else if(length(ret)==2) { if(ret[1]==ret[2]) stop(paste("Could not process niad os value of:", v,".")) return(c(min(ret),max(ret))) } else return(ret) }, splt,col_str, SIMPLIFY = FALSE) } valuelist=unique(do.call(c, split_list)) tochar=function(x) sapply(x, function(x) paste(x,collapse="_")) emmssionNum=lapply(valuelist, function(x) { ret=rep(0,8) if(length(x)==1) { ret[x]=1 } else { ret[x[1]:x[2]]=1 } return(ret) }) names(emmssionNum)=tochar(valuelist) Em=array(0,c(nrow(wide),length(days),8)) Emlst=lapply(split_list, function(x) do.call(rbind,emmssionNum[tochar(x)])) for(i in 1:length(days)) Em[,i,]=Emlst[[i]] return(Em) } .check_bad_transition=function(wide,days) { tmp=wide[,days] for(i in 1:nrow(wide)) { v = unlist(tmp[i,]) pos8 = which(v==8) if(length(pos8)>0) { first=pos8[1] flgmaybe8=grepl("8", v, fixed=TRUE) | is.na(v) if(any(!flgmaybe8[first:length(days)])) stop(paste("Cannot transition out of 8:\n", paste(wide[i,], collapse=","))) } } return(TRUE) }
d_station <- function(fsq, op = "C", sta=NA, ty_st = NA, name_st=NA, name_fld=NA, value_fld=NA, bku = TRUE) { if (!file.exists(fsq)) return(warning("\nThis data base doesn't exist, Verify!\n")) if (!(op %in% c("C", "M", "R", "c", "m", "r"))) return(warning("\nOperation missing or not authorized\n")) if((op %in% c("C", "c")) && (is.na(sta) || is.na(name_st) || is.na(ty_st))) return(warning("\nStation id, type and name must be completed!\n")) if((op %in% c("C", "c")) && !(ty_st %in% c("M","H"))) return(warning("\nStation type and name must be H (hydro) or M (Meteo)\n")) if((op %in% c("M", "R", "m", "r")) && (is.na(sta))) return(warning("\nStation id must be completed!\n")) conn <- dbConnect(SQLite(),fsq) ltable <- dbListTables(conn) dbDisconnect(conn) if(!("ST" %in% ltable)) return(warning("\nNo table 'ST'.\n")) if (op %in% c("C","c","M", "m")){ if(length(name_fld) != length(value_fld)) return(warning("\nThe lists field names and field values must have the same length.\n")) if(length(name_fld)==1 && is.na(name_fld)) name_fld <- NA else { for (i in 1:length(name_fld)){ if(!(name_fld[i] %in% c( "Ordre", "Id_Secondaire", "Id_Tertiaire", "Type_Meteo", "Nom", "Pays", "Zone", "SousZone", "GrandBassin", "Bassin", "PetitBassin", "Riviere", "Gestionnaire", "Critere_Texte", "Nom_Observateur", "Adresse", "Commentaire", "District", "Localite","Latitude", "Longitude", "Altitude", "Superficie_bv", "Mois_Debut_Hydro", "Debut_Activite", "Critere_Numerique", "Activite", "Critere_OuiNon", "Critere_OuiNon2", "Teletransmission", "Enregistreur", "Flag"))) return(warning("\nField name not authorized.\n")) if(name_fld[i] %in% c( "Ordre", "Id_Secondaire", "Id_Tertiaire", "Type_Meteo", "Nom", "Pays", "Zone", "SousZone", "GrandBassin", "Bassin", "PetitBassin", "Riviere", "Gestionnaire", "Critere_Texte", "Nom_Observateur", "Adresse", "Commentaire", "District", "Localite")) value_fld[i] <- as.character(value_fld[i]) if(name_fld[i] %in% c( "Latitude", "Longitude", "Altitude", "Superficie_bv", "Debut_Activite", "Critere_Numerique")) value_fld[i] <- as.numeric(value_fld[i]) if(name_fld[i] %in% c("Altitude", "Mois_Debut_Hydro")) value_fld[i] <- as.integer(value_fld[i]) if(name_fld[i] %in% c("Activite", "Critere_OuiNon", "Critere_OuiNon2", "Teletransmission", "Enregistreur", "Flag")) value_fld[i] <- as.logical(value_fld[i]) } } } conn <- dbConnect(SQLite(),fsq) selection <- paste ("SELECT * FROM ST") xxt <-dbGetQuery(conn, selection) dbDisconnect(conn) if (op %in% c("C","c") && sta %in% xxt$Id_Station) return(warning("\nStation ", sta, " already exists and its data are conserved.\n")) if (op %in% c("M","m", "R", "r") && !(sta %in% xxt$Id_Station)) return(warning("\nThe station ", sta, " doesn't exist in the station table.\n")) if (bku == TRUE) d_backup(fsq) if (op %in% c("C","c")) { conn <- dbConnect(SQLite(),fsq) station <- list( Ordre = as.character(NA), Type_Station = as.character(ty_st), Id_Station = as.character(sta), Id_Secondaire = as.character(NA), Id_Tertiaire = as.character(NA), Type_Meteo = as.character(NA), Nom = as.character(name_st), Pays = as.character(NA), Zone = as.character(NA), SousZone = as.character(NA), GrandBassin = as.character(NA), Bassin = as.character(NA), PetitBassin = as.character(NA), Riviere = as.character(NA), Gestionnaire = as.character(NA), Latitude = as.numeric(NA), Longitude = as.numeric(NA), Altitude = as.integer(NA), Superficie_bv = as.numeric(NA), Mois_Debut_Hydro = as.integer(NA), Debut_Activite = as.numeric(NA), Activite = as.logical(NA), Critere_OuiNon = as.logical(NA), Critere_OuiNon2 = as.logical(NA), Critere_Numerique = as.numeric(NA), Critere_Texte = as.character(NA), Nom_Observateur = as.character(NA), Adresse = as.character(NA), Teletransmission = as.logical(NA), Enregistreur = as.logical(NA), Fictive = as.logical(NA), Commentaire = as.character(NA), Flag = as.logical(NA), District = as.character(NA), Localite = as.character(NA) ) if(length(name_fld) == 1 && is.na(name_fld)) name_fld <- NA else for(i in 1:length(station)){ j <- 0 repeat { j <- j+1 if(j > length(name_fld)) break if(name_fld[j] == names(station[i])) station [i] <- value_fld [j] } } station <- as.data.frame(station, stringsAsFactors = FALSE) dbWriteTable(conn, "ST", station, append = TRUE) message("\nStation ",station$Nom," with id ", sta, " created.") dbDisconnect(conn) } if (op %in% c("M","m")) { conn <- dbConnect(SQLite(),fsq) sta1 <- paste0("'",sta,"'") selection <- paste("SELECT * FROM ST WHERE Id_Station = ", sta1) station <- dbGetQuery(conn, selection) if (!is.na(name_st)) station[7] <- name_st if(length(name_fld) != 1) { for (j in 1:length(value_fld)) { for (i in 1:length(station)) { if(name_fld[j] == names(station[i])) k <- i } station[1, k] <- value_fld[j] } } else { if (!is.na(name_fld)){ for (i in 1:length(station)) { if(name_fld == names(station[i])) k <- i } station[1, k] <- value_fld } } selection <- paste ("DELETE FROM ST WHERE Id_Station = ", sta1) rs <- dbSendQuery(conn, selection) dbClearResult(rs) dbWriteTable(conn, "ST", station, append = TRUE) message("\nStation ",name_st," with id ", sta, " modified.") } if (op %in% c("R","r")){ conn <- dbConnect(SQLite(),fsq) lstab <- c("SS", "WL", "DI", "WE", "PR", "QU","ST") sta1 = paste0("'",as.character(sta),"'") for(i in 1:length(lstab)) { selection <- paste ("DELETE FROM", lstab[i], " WHERE Id_Station = ", sta1) rs <- dbSendQuery(conn, selection) dbClearResult(rs) } message("\nStation ",name_st," with id ", sta, " is removed with all its data.") dbDisconnect(conn) } }
neg_log_likelihood_le_t_ineq_h_k = function(t, fitls, M_vec,init_lambdas = rep(0, times = length(fitls)-1), maxeval = 10000) { k = length(fitls) iter = 1:k iter_without_last = 1:(k-1) d = lapply(fitls, FUN = function (f) { f$n.event[f$time <= t & f$n.event != 0] }) r = lapply(fitls, FUN = function (f) { f$n.risk[f$time <= t & f$n.event != 0] }) A = lapply(iter, FUN = function (iter) { r[[iter]]-d[[iter]] }) D = c() init_weights = c() for (i in 1:k){ D[i] = max(d[[i]] - r[[i]]) / M_vec[i] + 0.0001 if (i == 1 ){ init_weights[i] = list(d[[i]] / (r[[i]] + (M_vec[i] * init_lambdas[i]))) }else if (i == k){ init_weights[i] = list(d[[i]] / (r[[i]] - (M_vec[i] * init_lambdas[i-1]))) }else{ init_weights[i] = list(d[[i]] / (r[[i]] + (M_vec[i] * (init_lambdas[i] - init_lambdas[i-1])))) } } init_weights = as.numeric(unlist(init_weights)) lgth_d = unlist(lapply(iter, FUN =function(iter){ length(d[[iter]]) })) local_opts = list("algorithm" = "NLOPT_LD_MMA", "xtol_rel" = 1.0e-10) opts_used = list("algorithm" = "NLOPT_LD_AUGLAG", "xtol_rel" = 1.0e-10, "maxeval" = maxeval, "local_opts" = local_opts) nlopt = nloptr(x0 = init_weights, eval_f = fnc_objective_h, eval_grad_f = gnc_objective_h, lb = rep(0.0, times = cumsum(lgth_d[1:k])[k]), ub = rep(1.0, times = cumsum(lgth_d[1:k])[k]), eval_g_ineq = constraints_h, eval_jac_g_ineq = jac_constraints_h, opts = opts_used, k = k, iter = iter, iter_without_last = iter_without_last, d = d, r = r, A = A, D = D, M_vec = M_vec) nlopt$Ds = D nlopt$resultEEs = constraints_h(nlopt$solution, k, iter, iter_without_last, d, r, A, D, M_vec) lambda = c() lambda[1] = (d[[1]][1] / nlopt$solution[1] - r[[1]][1]) / M_vec[1] if (k > 2){ for (j in 2:(k-1)){ lambda[j] = lambda[j-1] - ( r[[j]][1] - d[[j]][1] / nlopt$solution[1 + cumsum(lgth_d[1:k])[j] - lgth_d[j]]) / M_vec[j] } } nlopt$lambdas = lambda return (nlopt) }
context("show object methods") test_that("show webdata", { expect_output(show(readRDS("data/test_webdata_fabric.rds"))) }) test_that("show webgeom", { expect_output(show(readRDS("data/test_webgeom_WI.rds"))) }) test_that("show simplegeom", { expect_output(show(readRDS("data/test_simplegeom_two_points.rds"))) expect_error(simplegeom(c(-88.6, 45.2,99))) }) test_that("show datagroup", { testthat::skip_on_cran() expect_output(show(readRDS("data/test_query_webdata.rds"))) }) test_that("show geojob", { expect_output(show(readRDS("data/test_email_gj.rds"))) }) test_that("show webprocess", { testthat::skip_on_cran() expect_output(show(readRDS("data/test_webprocess_knife.rds"))) })
.logdistrfit <- function(x,wt) { xbar = sum(wt*x) fn <- function(p) xbar + p/((1-p)*log(1-p)) uniroot(fn,c(1e-10,1-1e-10))$root }
inputsList <- list() dbFilesRecordList <- list() FormatRecordList <- list() updateSelectizeInput(session, "InputSiteName", choices = sitenames, server = TRUE) updateSelectizeInput(session, "InputParentName", choices = input_names, server = TRUE) updateSelectizeInput(session, "InputFormatName", choices = formats, server = TRUE) observeEvent(input$createInput, { inputsList$siteName <- input$InputSiteName inputsList$siteID <- sites %>% dplyr::filter(sitename %in% inputsList$siteName) %>% pull(id) inputsList$parentName <- input$InputParentName inputsList$parentID <- inputs %>% dplyr::filter(name %in% inputsList$parentName) %>% pull(id) inputsList$formatName <- input$InputFormatName inputsList$formatID <- formats_sub %>% dplyr::filter(name %in% inputsList$formatName) %>% pull(id) inputsList$Name <- input$InputName inputsList$StartDate <- input$InputStartDate inputsList$StartTime <- input$StartTimeInput inputsList$EndDate <- input$InputEndDate inputsList$EndTime <- input$EndTimeInput inputsList$Timezone <- input$Timezone inputsList$Notes <- input$InputNotes output$summInputs <- renderPrint({print(inputsList)}) }) updateSelectizeInput(session, "InputMachineName", choices = machines, server = TRUE) observeEvent(input$createDBFilesRecord, { dbFilesRecordList$machine <- input$InputMachineName dbFilesRecordList$machineID <- machines_sub %>% dplyr::filter(hostname %in% dbFilesRecordList$machine) %>% pull(id) dbFilesRecordList$filePath <- input$InputFilePath dbFilesRecordList$fileName <- input$InputFileName output$dbFilesRecordOut <- renderPrint({print(dbFilesRecordList)}) }) updateSelectizeInput(session, "MimetypeName", choices = mimetypes, server = TRUE) observeEvent(input$createFormatRecord, { FormatRecordList$mimetypeName <- input$MimetypeName FormatRecordList$mimetypeID <- mimetype_sub %>% dplyr::filter(type_string %in% FormatRecordList$mimetypeName) %>% pull(id) FormatRecordList$NewMimeType <- input$NewMimeType FormatRecordList$NewFormatName <- input$NewFormatName FormatRecordList$HeaderBoolean <- input$HeaderBoolean FormatRecordList$SkipLines <- input$SkipLines FormatRecordList$FormatNotes <- input$FormatNotes output$FormatRecordOut <- renderPrint({print(FormatRecordList)}) })
ex <- function(x, discrete = FALSE, var = "q") { rhs <- if (discrete) "update" else "deriv" sprintf("%s\ninitial(%s) <- 1\n%s(%s) <- 1", x, var, rhs, var) }
test_that("prep_min_obs_level works", { load(system.file("extdata/meta_data.RData", package = "dataquieR"), envir = environment()) load(system.file("extdata/study_data.RData", package = "dataquieR"), envir = environment()) label_col <- LABEL util_prepare_dataframes() expect_warning( x <- prep_min_obs_level(ds1, group_vars = "USR_BP_0", min_obs_in_subgroup = 50), regexp = paste("The following levels: .+USR_559.+ have < 50", "observations and are disregarded"), perl = TRUE, all = TRUE ) expect_equal(nrow(study_data) - nrow(x), 29) expect_error( x <- prep_min_obs_level(ds1, group_vars = character(0), min_obs_in_subgroup = 50 ), regexp = paste(".+group_vars.+ is required to name exactly one variable."), perl = TRUE ) expect_error( x <- prep_min_obs_level(ds1, group_vars = NULL, min_obs_in_subgroup = 50 ), regexp = paste(".+group_vars.+ is required to be a character.* argument."), perl = TRUE ) expect_warning( expect_error( x <- prep_min_obs_level(ds1, group_vars = letters, min_obs_in_subgroup = 50 ), regexp = paste(".+group_vars.+ = .+a.+ is not a variable."), perl = TRUE ), regexp = sprintf("(%s)", paste("Subsets based only on one variable possible.")), perl = TRUE, all = TRUE ) expect_warning( x <- prep_min_obs_level(ds1, group_vars = "USR_BP_0", min_obs_in_subgroup = NA), regexp = sprintf("(%s|%s)", paste("The following levels: .+USR_559.+ have < 30", "observations and are disregarded"), paste("argument .+min_obs_in_subgroup.+ was missing,", "not of length 1 or NA, setting to its default, 30")), perl = TRUE, all = TRUE ) expect_equal(nrow(study_data) - nrow(x), 29) ds1. <- ds1 ds1.$USR_BP_0 <- NA expect_warning( expect_error( x <- prep_min_obs_level(ds1., group_vars = "USR_BP_0", min_obs_in_subgroup = NA), regexp = paste("For .+group_vars.+ = .+USR_BP_0.+,", "observations cannot be counted."), perl = TRUE ), regexp = sprintf("(%s|%s)", paste("The following levels: .+USR_559.+ have < 30", "observations and are disregarded"), paste("argument .+min_obs_in_subgroup.+ was missing,", "not of length 1 or NA, setting to its default, 30")), perl = TRUE, all = TRUE ) ds1. <- subset(ds1, USR_BP_0 != "USR_559") expect_silent( x <- prep_min_obs_level(ds1., group_vars = "USR_BP_0", min_obs_in_subgroup = 50) ) expect_equal(nrow(ds1.) - nrow(x), 0) })
setClass(Class = "Survey.LT", representation = representation(transect = "Line.Transect", perpendicular.truncation = "numeric"), contains = "Survey" ) setMethod( f="initialize", signature="Survey.LT", definition=function(.Object, population, transect, perp.truncation){ .Object@population <- population .Object@transect <- transect [email protected] <- perp.truncation valid <- validObject(.Object, test = TRUE) if(class(valid) == "character"){ stop(paste(valid), call. = FALSE) } return(.Object) } ) setValidity("Survey.LT", function(object){ return(TRUE) } ) setMethod( f="run.survey", signature="Survey.LT", definition=function(object, region = NULL){ population <- object@population line.transect <- object@transect poss.distances <- calc.perp.dists(population, line.transect) if(!is.null(poss.distances$distance)){ [email protected] <- poss.distances$distance } dist.data <- simulate.detections(poss.distances, object@population@detectability) if(nrow(dist.data) == 0){ warning("No detections", immediate. = TRUE, call. = FALSE) return(object) } all.col.names <- names(object@population@population) cov.param.names <- all.col.names[!all.col.names %in% c("object", "x", "y", "Region.Label", "Sample.Label", "scale.param", "shape.param", "individual")] dist.data <- dist.data[,c("object", "individual", "Region.Label", "Sample.Label", "distance", "x", "y", cov.param.names)] sample.table <- data.frame(Region.Label = line.transect@samplers$strata, Sample.Label = line.transect@samplers$transect, Effort = sf::st_length(line.transect@samplers)) if(!is.null(region)){ region.table <- data.frame(Region.Label = [email protected], Area = region@area) sample.table <- dplyr::left_join(sample.table, region.table, by = "Region.Label") } dist.data <- dplyr::full_join(dist.data, sample.table, by = c("Sample.Label", "Region.Label")) index <- order(dist.data$Sample.Label) dist.data <- dist.data[index,] [email protected] <- dist.data return(object) } )
library(sp23design) trueParameters <- list(p0 = 0.3, p1 = 0.3, pdiffHyp=0.3, theta = list( alpha = 0, beta = 0, gamma = 0), baselineLambda = 0.35, etaHyp = 0.25) trialParameters <- list(minimumNumberOfEvents = 20, minimumIncreaseInV = 0.2, numberRecruitedEachYear = c(80, 120, 160, 160), followupTime = 3, adminCensoringTime = 7, interimLookTime = c(1, 2, 3, 5, 7), type1ErrorForResponse = 0.05, type2ErrorForResponse = 0.01, glrBoundarySidedness = "one", type1Error = 0.05, type2Error = 0.10, epsType1 = 1/3, epsType2 = 1/3) sp23Design <- generateSP23Design(trueParameters, trialParameters) trialHistory <- exploreSP23Design(sp23Design, numberOfSimulations=1000, rngSeed=9872831) result <- analyzeSP23Design(sp23Design, trialHistory)$designSummary cat("numberOfTimesH0RIsRejectedAtFirstLook", result[["numberOfTimesH0RIsRejectedAtFirstLook"]], "\n") cat("numberOfTimesH0RIsRejected", result[["numberOfTimesH0RIsRejected"]], "\n") cat("numberOfTimesStoppedForFutility", result[["numberOfTimesStoppedForFutility"]], "\n") cat("numberOfTimesH0SIsAccepted", result[["numberOfTimesH0SIsAccepted"]], "\n") cat("numberOfTimesH0SIsRejected", result[["numberOfTimesH0SIsRejected"]], "\n") cat("numberOfTimesFutilityDecidedAtLastLook", result[["numberOfTimesFutilityDecidedAtLastLook"]], "\n") cat("numberOfTimesTrialEndedAtLook", result[["numberOfTimesTrialEndedAtLook"]], "\n") cat("avgExitTime", result[["avgExitTime"]], "\n")
ranktruncated <- function(pValues, K, silent = FALSE){ L <- length(pValues) if (K > L){ warn1 <- paste("K must be smaller than L") stop(warn1) } index <- order(pValues) rindex <- order(index) spval <- pValues[index] w <- prod(spval[1:K]) w.pvalues <- pValues[rindex] p.used <- data.frame(Position=index[1:K], pValue=spval[1:K]) awt <- function(w,t,K){ if (w<=t^K){ s <- c(0:(K-1)) num1 <- K*log(t)-log(w) num2 <- w * sum(num1^s/factorial(s)) } if (w > t^K) { num2<- t^K } return(num2) } fac1 <- choose(L, K+1)*(K+1) t <- seq(0.001,0.999,0.001) terg <- c() for (i in 1:length(t)){ terg[i] <- (1-t[i])^(L-K-1)*awt(w,t[i],K) } distribution <- fac1*mean(terg) p1 <- (distribution>1) distribution[p1] <- 1 if (! silent) { cat(" } result <- data.frame(Statistic = w, p.Value=distribution) return(list(Used.pValue=p.used, RTP=result)) }
var_tests = function(xvar, group_by, dat = NULL, hush = FALSE, sep = ', ') { if (typeof(dat) == "character") { dat = eval(parse(text = dat)) } validate_args(match.call(), list(val_arg(xvar, c('num', 'char')), val_arg(dat, c('null', 'df'), 1))) if (!is.null(dat)) { if (typeof(xvar) == 'character') { checkcol(names(dat), xvar) xvar = dat[[xvar]] } if (typeof(group_by) == 'character') { group_by = eval(parse( text = paste0( 'with(data = dat, paste(', paste(group_by, collapse = ','), ", sep = '", sep, "'))" ) )) } } group_by = as.factor(as.character(group_by)) df_mes = data.frame(xvar = xvar, group_by = group_by) df_sds = aggr_neat( df_mes, 'xvar', group_by = 'group_by', method = function(x) { c(sd = stats::sd(x, na.rm = TRUE), n = length(stats::na.omit(x))) } ) sds_zip = paste(paste0( df_sds$aggr_group, ': n = ', ro(df_sds$x.n, 2, signi = TRUE), ', SD = ', ro(df_sds$x.sd, 2) ), collapse = '; ') lev_med = car::leveneTest(y = xvar, group = group_by) fk_med = stats::fligner.test(x = xvar, g = group_by) if (hush == FALSE) { prnt( " ", sds_zip, ".\n Brown-Forsythe: F(", lev_med$Df[1], ",", lev_med$Df[2], ")", " = ", ro(lev_med$`F value`[1], 2), ", p = ", ro(lev_med$`Pr(>F)`[1], 3), "; Fligner-Killeen: X2(", fk_med$parameter, ")", " = ", ro(fk_med$statistic, 3), ", p = ", ro(fk_med$p.value, 3), '.' ) } invisible(list( df_sds = df_sds, p_BF = lev_med$`Pr(>F)`[1], p_FK = fk_med$p.value )) }
test_that("sw_* lists haven't changed", { expect_known_value(sw_people, reference_file("sw_people.rds")) expect_known_value(sw_films, reference_file("sw_films.rds")) expect_known_value(sw_planets, reference_file("sw_planets.rds")) expect_known_value(sw_species, reference_file("sw_species.rds")) expect_known_value(sw_vehicles, reference_file("sw_vehicles.rds")) expect_known_value(sw_starships, reference_file("sw_starships.rds")) })
front41Est <- function( command = ifelse( .Platform$OS.type == "windows", "front41.exe", "front41.bin" ), ... ) { front41Ins <-front41WriteInput( ... ) fullCommand <- paste( command, front41Ins$insFile ) front41Messages <- system( fullCommand, intern = TRUE ) front41Out <- front41ReadOutput( front41Ins$outFile ) front41Out$input <- front41Ins front41Out$messages <- front41Messages return( front41Out ) }
marginaleffects <- function(model, newdata = NULL, variables = NULL, vcov = TRUE, type = "response", ...) { scall <- substitute(newdata) if (is.call(scall)) { lcall <- as.list(scall) fun_name <- as.character(scall)[1] if (fun_name %in% c("datagrid", "typical", "counterfactual")) { if (!any(c("model", "newdata") %in% names(lcall))) { lcall <- c(lcall, list("model" = model)) newdata <- eval.parent(as.call(lcall)) } } else if (fun_name == "visualisation_matrix") { if (!"x" %in% names(lcall)) { lcall <- c(lcall, list("x" = insight::get_data(model))) newdata <- eval.parent(as.call(lcall)) } } } attributes_newdata <- attributes(newdata) idx <- c("class", "row.names", "names", "data", "reference") idx <- !names(attributes_newdata) %in% idx attributes_newdata <- attributes_newdata[idx] model <- sanity_model(model = model, newdata = newdata, variables = variables, vcov = vcov, type = type, return_data = return_data, ...) sanity_type(model = model, type = type, calling_function = "marginaleffects") newdata <- sanity_newdata(model, newdata) variables <- sanity_variables(model, newdata, variables) vcov <- sanitize_vcov(model, vcov) if (!"rowid" %in% colnames(newdata)) { newdata$rowid <- 1:nrow(newdata) } variables_vec <- unlist(variables[names(variables) %in% c("conditional")]) mfx_list <- list() mfx_list <- list() se_mean_list <- list() draws_list <- list() J_list <- list() J_mean_list <- list() for (predt in type) { for (v in variables_vec) { mfx <- get_dydx(model = model, variable = v, newdata = newdata, type = predt, ...) mfx$type <- predt if (!is.null(attr(mfx, "posterior_draws"))) { draws_list <- c(draws_list, list(attr(mfx, "posterior_draws"))) J <- J_mean <- NULL } else if (!is.null(vcov)) { idx <- intersect(colnames(mfx), c("type", "group", "term", "contrast")) idx <- mfx[, idx, drop = FALSE] se <- standard_errors_delta(model, vcov = vcov, type = predt, FUN = standard_errors_delta_marginaleffects, newdata = newdata, index = idx, variable = v) mfx$std.error <- as.numeric(se) J <- attr(se, "J") J_mean <- attr(se, "J_mean") } else { J <- J_mean <- NULL } mfx_list <- c(mfx_list, list(mfx)) J_list <- c(J_list, list(J)) J_mean_list <- c(J_mean_list, list(J_mean)) } } out <- bind_rows(mfx_list) J <- do.call("rbind", J_list) J_mean <- bind_rows(J_mean_list) if ("contrast" %in% colnames(J_mean)) { J_mean$contrast <- ifelse(is.na(J_mean$contrast), "", J_mean$contrast) } if (!is.null(J_mean) && !is.null(vcov)) { idx <- !colnames(J_mean) %in% c("type", "group", "term", "contrast") tmp <- J_mean[, !idx, drop = FALSE] J_mean_mat <- as.matrix(J_mean[, idx, drop = FALSE]) colnames(J_mean_mat) <- colnames(J) if (any(colnames(J_mean_mat) != colnames(vcov))) { tmp <- NULL warning("The variance covariance matrix and the Jacobian do not match. `marginaleffects` is unable to compute standard errors using the delta method.") } else { V <- colSums(t(J_mean_mat %*% vcov) * t(J_mean_mat)) tmp$std.error <- sqrt(V) } se_at_mean_gradient <- tmp } else { se_at_mean_gradient <- NULL } draws <- do.call("rbind", draws_list) if (!is.null(draws)) { if (!"conf.low" %in% colnames(out)) { tmp <- apply(draws, 1, get_hdi) out[["std.error"]] <- NULL out[["dydx"]] <- apply(draws, 1, stats::median) out[["conf.low"]] <- tmp[1, ] out[["conf.high"]] <- tmp[2, ] } } return_data <- sanitize_return_data() if (isTRUE(return_data)) { out <- left_join(out, newdata, by = "rowid") } stubcols <- c("rowid", "type", "group", "term", "contrast", "dydx", "std.error", sort(grep("^predicted", colnames(newdata), value = TRUE))) cols <- intersect(stubcols, colnames(out)) cols <- unique(c(cols, colnames(out))) out <- out[, cols] if ("group" %in% colnames(out) && all(out$group == "main_marginaleffect")) { out$group <- NULL } if ("contrast" %in% colnames(out)) { if (all(is.na(out$contrast))) { out$contrast <- NULL } else { out$contrast[is.na(out$contrast)] <- "" } } row.names(out) <- NULL out <- as.data.frame(out) class(out) <- c("marginaleffects", class(out)) attr(out, "posterior_draws") <- draws attr(out, "model") <- model attr(out, "type") <- type attr(out, "model_type") <- class(model)[1] attr(out, "variables") <- variables attr(out, "J") <- J attr(out, "J_mean") <- J_mean attr(out, "se_at_mean_gradient") <- se_at_mean_gradient for (a in names(attributes_newdata)) { attr(out, paste0("newdata_", a)) <- attributes_newdata[[a]] } return(out) } meffects <- marginaleffects
xport.namestr <- function( var, varName, varNum, varPos, varLength, varLabel="", fName="", fLength=0, fDigits=0, just=c("left","right"), iName="", iLength=0, iDigits=0 ) { if(is.factor(var)) var <- as.character(var) isChar = is.character(var) if(missing(varLength)) if(isChar) varLength <- max(nchar(var, "bytes", keepNA=FALSE)) else varLength <- 8 if( missing(varLabel) || is.null(varLabel) ) varLabel <- "" just <- match.arg(just) if(just=="left") justVal <- 0 else justVal <- 1 varName <- gsub("\\.","_", make.names(varName)) .C("fill_namestr", isChar = as.integer(isChar), nlng = as.integer(varLength), nvar0 = as.integer(varNum), nname = toupper(as.character(varName)), nlabel = as.character(varLabel), nform = toupper(as.character(fName)), nfl = as.integer(fLength), nfd = as.integer(fDigits), nfj = as.integer(justVal), niform = toupper(as.character(iName)), nifl = as.integer(iLength), nifd = as.integer(iDigits), npos = as.integer(varPos), PACKAGE="SASxport" ) .Call("getRawBuffer", PACKAGE="SASxport") }
ssdata <- function (sys1) { if (class(sys1) == "ss") { return(sys1) } else if(class(sys1) == "tf"){ sssys <- tf2ss(sys1) return(sssys) } else if (class(sys1) == "zpk") { zpsys <- zp2ss(sys1) return(zpsys) } else { stop("ssdata: sys must be class of tf, ss or zpk") } }
data{ n <- length(energy) n.activities <- max(activity) n.subjects <- max(subject) } model{ for (i in 1:n) { energy[i] ~ dnorm(mu[i], tau) mu[i] <- b0 + b1[activity[i]] + b2[subject[i]] residual[i] <- mu[i] - energy[i] } for (j in 1:(n.activities - 1)) { b1[j] ~ dnorm(0, tau.1) } b1[3] <- 0 for (k in 1:(n.subjects - 1)) { b2[k] ~ dnorm(0, tau.1) } b2[n.subjects] <- -sum(b2[1:(n.subjects - 1)]) b0 ~ dnorm(0, tau.1) tau ~ dgamma(0.002, 0.002) tau.1 ~ dgamma(0.002, 0.002) }
rbinTrait <- function(n=1, phy, beta, alpha, X = NULL, model = c("LogReg")) { model = match.arg(model) if (is.null(n)) stop("n needs to be an integer (number of replicates)") if (length(n)>1) stop("n needs to be an integer (number of replicates)") n = as.numeric(n) if (!inherits(phy, "phylo")) stop('object "phy" is not of class "phylo".') if (is.null(phy$edge.length)) stop("the tree has no branch lengths.") if (is.null(phy$tip.label)) stop("the tree has no tip labels.") phy = reorder(phy,"pruningwise") ntip <- length(phy$tip.label) N <- dim(phy$edge)[1] ROOT <- ntip + 1L anc <- phy$edge[, 1] des <- phy$edge[, 2] d = length(beta) if ((d==1)&(!is.null(X))) stop("The design matrix is not needed when the coefficient is a scalar.") if (d>1) { if (is.null(X)) stop("there is no independent variables.") X = as.matrix(X) if (length(beta)!=ncol(X)) stop("the number of columns in the design matrix does not match the length of the vector of coefficients.") if (nrow(X)!=ntip) stop("the number of rows in the design matrix does not match the number of tips in the tree.") if (is.null(rownames(X))) { warning("independent variables have no tip labels, order assumed to be the same as in the tree.\n") data.names = phy$tip.label } else data.names = rownames(X) order = match(data.names, phy$tip.label) if (sum(is.na(order))>0) stop("data names do not match with the tip labels.\n") g = X%*%beta mu = as.vector(exp(g)/(1+exp(g))) p = mean(mu) } else { g = as.numeric(beta) p = exp(g)/(1+exp(g)) } q = 1-p y <- matrix(0, ntip + phy$Nnode, n) el = phy$edge.length y[ROOT,] = as.numeric(runif(n)<p) for (i in N:1){ j0 = (y[anc[i],] == 0) y[des[i], j0] = as.numeric(runif(sum( j0))<(p-p*exp(-el[i]*alpha))) y[des[i],!j0] = as.numeric(runif(sum(!j0))<(p+q*exp(-el[i]*alpha))) } y <- y[1:ntip,] if (d>1) { et = matrix(pmin(mu/p,(1-mu)/(1-p)), ntip,n) b = matrix(as.numeric(mu>p), ntip,n) ij0 = (y==0) y[ ij0] = as.numeric(runif(sum( ij0)) < (b[ ij0]-b[ ij0]*et[ ij0])) y[!ij0] = as.numeric(runif(sum(!ij0)) < (b[!ij0]+(1-b[!ij0])*et[!ij0])) } if (n==1) names(y) <- phy$tip.label else rownames(y) <- phy$tip.label return(y) }
setClass("fTHETA", representation( call = "call", data = "list", theta = "data.frame", title = "character", description = "character") ) setMethod("show", "fTHETA", function(object) { x = object cat("\nTitle:\n ", x@title, "\n", sep = "") cat("\nCall:\n ", deparse(x@call), "\n", sep = "") cat("\nExtremal Index:\n") print(object@theta) cat("\nDescription:\n ", x@description, sep = "") cat("\n\n") invisible() }) thetaSim = function(model = c("max", "pair"), n = 1000, theta = 0.5) { model = match.arg(model) model = model[1] X = rep(0, n) if (model == "max") { eps = 1/(-log(runif(n))) X[1] = theta*eps[1] for ( i in 2:n ) X[i] = max( (1-theta)*X[i-1], theta*eps[i] ) } else if (model == "pair") { theta = 0.5 eps = rexp(n+1) for ( i in 1:n ) X[i] = max(eps[i], eps[i+1]) } X = as.ts(X) attr(X, "control") = c(model = model, theta = as.character(theta)) X } blockTheta = function (x, block = 22, quantiles = seq(0.950, 0.995, length = 10), title = NULL, description = NULL) { stopifnot(is.numeric(block)) X = as.vector(x) ordered = sort(X) k = floor(length(X)/block) n = k*block X = matrix(X[1:(k*block)], ncol = block, byrow = TRUE) thresholds = ordered[floor(quantiles*length(X))] theta1 = rep(0, times = length(quantiles)) run = 0 keepK = keepN = NULL for ( u in thresholds ) { run = run + 1 N = length(X[X > u]) K = floor(sum(sign(apply(X, 1, max) - u) + 1) / 2) if (K/k < 1) { theta1[run] = (k/n) * log(1-K/k) / log(1-N/n) } else { theta1[run] = NA } keepK = c(keepK, K) keepN = c(keepN, N) } ans = data.frame(quantiles = quantiles, thresholds = thresholds, N = keepN, K = keepK, theta = theta1) if (is.null(title)) title = "Extremal Index from Block Method" if (is.null(description)) description = description() new("fTHETA", call = match.call(), data = list(x = x, block = block), theta = ans, title = title, description = description) } clusterTheta = function (x, block = 22, quantiles = seq(0.950, 0.995, length = 10), title = NULL, description = NULL) { stopifnot(is.numeric(block)) X = as.vector(x) ordered = sort(X) k = floor(length(X)/block) n = k*block X = matrix(X[1:(k*block)], ncol = block, byrow = TRUE) thresholds = ordered[floor(quantiles*length(X))] theta2 = rep(0, times = length(quantiles)) run = 0 keepK = keepN = NULL for ( u in thresholds ) { run = run + 1 N = length(X[X > u]) K = floor(sum(sign(apply(X, 1, max) - u) + 1) / 2) theta2[run] = K/N keepK = c(keepK, K) keepN = c(keepN, N) } ans = data.frame(quantiles = quantiles, thresholds = thresholds, N = keepN, K = keepK, theta = theta2) if (is.null(title)) title = "Extremal Index from Reciprocal Cluster Method" if (is.null(description)) description = description() new("fTHETA", call = match.call(), data = list(x = x, block = block), theta = ans, title = title, description = description) } runTheta = function (x, block = 22, quantiles = seq(0.950, 0.995, length = 10), title = NULL, description = NULL) { stopifnot(is.numeric(block)) X = as.vector(x) ordered = sort(X) k = floor(length(X)/block) n = k*block Count = 1:n X = matrix(X[1:(k*block)], ncol = block, byrow = TRUE) thresholds = ordered[floor(quantiles*length(X))] theta3 = rep(0, times = length(quantiles)) run = 0 keepN = NULL for ( u in thresholds ) { run = run + 1 N = length(X[X > u]) Y = diff(Count[X > u]) Y = Y[Y > block] theta3[run] = length(Y)/N keepN = c(keepN, N) } ans = data.frame(quantiles = quantiles, thresholds = thresholds, N = keepN, theta = theta3) if (is.null(title)) title = "Extremal Index from Run Method" if (is.null(description)) description = description() new("fTHETA", call = match.call(), data = list(x = x, block = block), theta = ans, title = title, description = description) } ferrosegersTheta = function (x, quantiles = seq(0.950, 0.995, length= 10), title = NULL, description = NULL) { x = as.vector(x) n = length(x) N = floor(quantiles*n) sorted = sort(x) U = sorted[N] ans = NULL for ( u in U ) { msg = 0 id = x > u N = sum(id) S = (1:n)[id] TT = diff(S) if (!any(TT > 2)) { theta = 2*sum(TT, na.rm = TRUE)^2/((N-1) * sum(TT^2, na.rm = TRUE)) msg = msg + 1 if (theta > 1) { theta = 1 msg = msg + 10 } } else { theta = 2 * sum(TT-1, na.rm = TRUE)^2/((N-1) * sum((TT-1) * (TT-2), na.rm = TRUE)) msg = msg + 100 if (theta > 1) { theta = 1 msg = msg + 1000 } } K = ifelse(round(theta*N) != theta*N, floor(theta*N) + 1, theta*N) T.order = order(TT, na.last = TRUE, decreasing = TRUE) T.ranked = TT[T.order] T.K = T.ranked[K] if (sum(TT == T.K, na.rm = TRUE) > 1) { for (i in 1:K) { K = K - 1 T.K = T.ranked[K] if (sum(TT == T.K, na.rm = TRUE) > 1) { next } else { break } } } ans = rbind(ans, c(T.K, K, msg, theta)) } ans = data.frame(quantiles, U, ans) colnames(ans) = c("Threshold", "Quantiles", "RunLength", "Clusters", "messageNo", "theta") if (is.null(title)) title = "Extremal Index from Ferro-Segers Method" if (is.null(description)) description = description() new("fTHETA", call = match.call(), data = list(x = x), theta = ans, title = title, description = description) } exindexesPlot = function (x, block = 22, quantiles = seq(0.950, 0.995, length = 10), doplot = TRUE, labels = TRUE, ...) { if (!is.numeric(block)) stop("Argument block must be an integer value.") doprint = FALSE blocklength = block resid = as.vector(x) k = floor(length(resid)/blocklength) n = k*blocklength resid1 = resid[1:(k*blocklength)] resid1 = matrix(resid1, ncol = blocklength, byrow = TRUE) ordered1 = sort(resid1) z0 = ordered1[floor(quantiles*length(resid1))] theta1 = theta2 = theta3 = rep(0, times = length(quantiles)) run = 0 for ( z in z0 ) { run = run + 1 N = length(resid1[resid1 > z]) K = floor(sum(sign(apply(resid1, 1, max)-z)+1) / 2) if (K/k < 1) { theta1[run] = (k/n) * log(1-K/k) / log(1-N/n) } else { theta1[run] = NA } theta2[run] = K/N x = 1:n xx = diff(x[resid1 > z]) xx = xx[xx > blocklength] theta3[run] = length(xx)/N if (doprint) { print(c(N, K, quantiles[run], z)) print(c(theta1[run], theta2[run], theta3[run])) } } if (doplot) { plot(quantiles, theta1, xlim = c(quantiles[1], quantiles[length(quantiles)]), ylim = c(0, 1.2), type = "b", pch = 1, xlab = "", ylab = "", main = "", ...) points(quantiles, theta2, pch = 2, col = 3) points(quantiles, theta3, pch = 4, col = 4) if (labels) { title(main = "Extremal Index") title(xlab = "Quantile", ylab = "Theta 1,2,3") mtext("Threshold", side = 3, line = 3) grid() mtext(text = paste("Blocklength: ", as.character(block)), adj = 0, side = 4, cex = 0.7) } } ans = data.frame(quantiles = quantiles, thresholds = z0, theta1 = theta1, theta2 = theta2, theta3 = theta3) ans } exindexPlot = function(x, block = c("monthly", "quarterly"), start = 5, end = NA, doplot = TRUE, plottype = c("thresh", "K"), labels = TRUE, ...) { plottype = match.arg(plottype) reverse = FALSE if (plottype == "K") reverse = TRUE b.maxima = rev(sort(as.vector(blockMaxima(x, block)))) data = as.vector(x) sorted = rev(sort(data)) n = length(sorted) if (!is.numeric(block)) block = round(length(data)/length(b.maxima)) k = round(n/block) un = unique(b.maxima)[-1] K = match(un, b.maxima) - 1 N = match(un, sorted) - 1 if (is.na(end)) end = k cond = (K < end) & (K >= start) un = un[cond] K = K[cond] N = N[cond] theta2 = K/N theta = logb(1 - K/k)/(block * logb(1 - N/n)) ans = data.frame(N = N, K = K, un = un, theta2 = theta2, theta = theta) yrange = range(theta) index = K if (reverse) index = - K if (doplot) { plot(index, theta, ylim = yrange, type = "b", xlab = "", ylab = "", axes = FALSE, ...) IDX = round(seq(1, length(index), length = 10)) axis(1, at = index[IDX], labels = paste(K)[IDX]) axis(2) axis(3, at = index[IDX], labels = paste(format(signif(un, 3)))[IDX]) box() if (labels) { ylabel = paste("theta (", k, " blocks of size ", block, ")", sep = "") title(xlab = "K", ylab = ylabel) mtext("Threshold", side = 3, line = 3) lines(index, theta, col = "steelblue") grid() mtext(text = paste("Blocklength: ", as.character(block)), adj = 0, side = 4, cex = 0.7) } } ans }
adm_cmt <- function(x){ dat <- as.list(x)$details$data if(is.null(dat[["block"]])){ v <- NULL } else { datcmt <- dat[dat$block %in% c("CMT", "INIT"),] admcmtname <- datcmt$name[str_detect(tolower(datcmt$options), "adm")] v <- x@Icmt[x@cmtL %in% admcmtname] if(length(v)== 0){ v <- NULL } } return(v) } obs_cmt <- function(x){ dat <- as.list(x)$details$data if(is.null(dat[["block"]])){ v <- NULL } else { datcmt <- dat[dat$block %in% c("CMT", "INIT"),] obscmtname <- datcmt$name[str_detect(tolower(datcmt$options), "obs")] v <- x@Icmt[x@cmtL %in% obscmtname] if(length(v)== 0){ v <- NULL } } return(v) } obs_cmt_data <- function(data){ v <- sort(unique(data[data$mdv==0,]$cmt)) if(length(v)== 0){ v <- NULL } return(v) } fit_cmt <- function(x, data){ cmt_model <- obs_cmt(x) if(is.null(cmt_model)){ return(obs_cmt_data(data)) } else{ return(cmt_model) } } adm_0_cmt <- function(x){ v <- paste0("D_", x@cmtL) %>% map(str_detect, string = x@code) %>% map(any) %>% as.logical() %>% which() if(length(v)== 0){ v <- NULL } return(v) } log_transformation <- function(x){ x@code %>% str_subset("EPS") %>% str_detect("exp *\\(.*EPS") %>% any() } eta_names <- function(x){ parnames <- names(param(x)) v <- parnames[grepl("^ETA\\d+$", parnames)] if(length(v)== 0){ v <- NULL } return(v) } n_eta <- function(x){ length(eta_names(x)) } eta_descr <- function(x){ dat <- as.list(x)$details$data if(is.null(dat[["block"]])){ v <- eta_names(x) } else { datpar <- filter(dat, .data$block=="PARAM") v <- datpar$descr[datpar$name %in% eta_names(x)] } return(v) } mbr_cov_names <- function(x){ as.list(x)$covariates } mbr_cov_refvalues <- function(x){ unlist(x@param[mbr_cov_names(x)]) } mbr_cov_descr <- function(x){ as.list(x)$details$data %>% filter(.data$name %in% mbr_cov_names(x)) %>% mutate(covariate_description = paste0(.data$descr, " (", .data$unit, ")")) %>% pull(.data$covariate_description) }
library("knitr") knitr::opts_knit$set(self.contained = FALSE) knitr::opts_chunk$set(tidy = TRUE, collapse=TRUE, comment = " tidy.opts=list(blank=FALSE, width.cutoff=55)) library("ebci") cva(m2=0, kappa=Inf, alpha=0.05)$cv cva(m2=4, kappa=Inf, alpha=0.05)$cv cva(m2=4, kappa=3, alpha=0.05)$cv r <- ebci(formula=theta25~stayer25, data=cz, se=se25, weights=1/se25^2, alpha=0.1) r$delta c(r$mu2, r$kappa) names(r$df) cva(r$df$se[1]^2/r$mu2[1], r$kappa[1], alpha=0.1)$cv*r$df$w_eb[1]*r$df$se[1] r$df$len_eb[1] df <- (cbind(cz[!is.na(cz$se25), ], r$df)) df <- df[df$state=="NY", ] knitr::kable(data.frame(cz=df$czname, unshrunk_estimate=df$theta25, estimate=df$th_eb, lower_ci=df$th_eb-df$len_eb, upper_ci=df$th_eb+df$len_eb), digits=3) mean(r$df$len_eb) mean(r$df$len_pa) mean(r$df$len_us) mean(r$df$len_eb)/mean(r$df$len_pa) mean(r$df$len_eb)/mean(r$df$len_us) mean(r$df$ncov_pa)
centTable <- function(Wmats, scale = TRUE, which.net = "temporal", labels = NULL, relative = FALSE, weighted = TRUE, signed = TRUE){ if(isTRUE(attr(Wmats, "mlGVAR"))){ Wmats <- switch(which.net, between = Wmats$betweenNet, Wmats$fixedNets)} if("SURnet" %in% c(names(Wmats), names(attributes(Wmats)))){ if("SURnet" %in% names(Wmats)){Wmats <- Wmats$SURnet} if(is.numeric(which.net)){which.net <- c("t", "c", "p", 'i')[which.net]} which.net <- match.arg(tolower(which.net), c("temporal", "contemporaneous", "pdc", "interactions")) Wmats <- Wmats[[ifelse(which.net == "contemporaneous", "contemporaneous", ifelse(which.net == 'interactions', 'interactions', 'temporal'))]] if(which.net == "pdc"){Wmats <- Wmats$PDC} if(which.net == 'interactions'){names(Wmats)[1] <- 'adjMat'} } else if(startsWith(tolower(which.net), 'i')){stop('Interaction centrality only supported for temporal networks.')} if("adjMat" %in% names(Wmats)){Wmats <- t(Wmats$adjMat)} if(any(grepl("lag", dimnames(Wmats)))){dimnames(Wmats) <- lapply(dimnames(Wmats), function(z) gsub("[.]lag1.*|[.]y$", "", z))} if(!is.list(Wmats)){Wmats <- list(Wmats)} if(any(sapply(Wmats, ncol) == 1)){stop("Not supported for single-node graphs")} names(Wmats) <- fnames(Wmats, 'graph ') centOut <- lapply(Wmats, centAuto, which.net = which.net, weighted = weighted, signed = signed) for(g in seq_along(centOut)){ if(!is(centOut[[g]], "centrality_auto")){ names(centOut[[g]]) <- fnames(centOut[[g]], 'type ') for(t in seq_along(centOut[[g]])){ if(!is.null(labels)){ centOut[[g]][[t]][["node.centrality"]][["node"]] <- labels } else if(!is.null(rownames(centOut[[g]][[t]][["node.centrality"]]))){ centOut[[g]][[t]][["node.centrality"]][["node"]] <- rownames(centOut[[g]][[t]][["node.centrality"]]) } else { centOut[[g]][[t]][["node.centrality"]][["node"]] <- paste("Node", seq_len(nrow(centOut[[g]][[t]][["node.centrality"]]))) } centOut[[g]][[t]]$node.centrality$graph <- names(centOut)[g] centOut[[g]][[t]]$node.centrality$type <- names(centOut[[g]])[t] } } else { centOut[[g]]$node.centrality$graph <- names(centOut)[g] if(!is.null(labels)){ centOut[[g]][["node.centrality"]][["node"]] <- labels } else if(!is.null(rownames(centOut[[g]][["node.centrality"]]))){ centOut[[g]][["node.centrality"]][["node"]] <- rownames(centOut[[g]][["node.centrality"]]) } else { centOut[[g]][["node.centrality"]][["node"]] <- paste("Node", seq_len(nrow(centOut[[g]][["node.centrality"]]))) } } } isList <- sapply(centOut, function(x) !"centrality_auto" %in% class(x)) if(any(isList)){ for(l in which(isList)){centOut <- c(centOut, centOut[[l]])} centOut <- centOut[-which(isList)] } for(i in seq_along(centOut)){ if(relative | scale){ if(relative & scale){warning("Using 'relative' and 'scale' together is not recommended")} for(j in which(sapply(centOut[[i]][["node.centrality"]], mode) == "numeric")){ if(scale){ centOut[[i]][["node.centrality"]][, j] <- scaleNA(centOut[[i]][["node.centrality"]][, j]) } if(relative){ mx <- max(abs(centOut[[i]][["node.centrality"]][, j]), na.rm = TRUE) if(mx != 0){centOut[[i]][["node.centrality"]][, j] <- centOut[[i]][["node.centrality"]][, j]/mx} } attributes(centOut[[i]][["node.centrality"]][, j]) <- NULL } } } wideCent <- plyr::rbind.fill(lapply(centOut, "[[", "node.centrality")) if(is.null(wideCent$type)){wideCent$type <- NA} longCent <- reshape2::melt(wideCent, variable.name = "measure", id.var = c("graph", "type", "node")) if(any(is.nan(longCent$value))){warning("NaN detected in centrality measures. Try relative = FALSE")} return(longCent) } clustTable <- function(Wmats, scale = TRUE, labels = NULL, relative = FALSE, signed = TRUE){ if("SURnet" %in% c(names(Wmats), names(attributes(Wmats)))){ if("SURnet" %in% names(Wmats)){Wmats <- Wmats$SURnet} Wmats <- Wmats$contemporaneous$adjMat } else if("adjMat" %in% names(Wmats)){ Wmats <- Wmats$adjMat } if(any(grepl("lag", dimnames(Wmats)))){ dimnames(Wmats) <- lapply(dimnames(Wmats), function(z) gsub("[.]lag1.*|[.]y$", "", z)) } if(!is.list(Wmats)){Wmats <- list(Wmats)} if(any(sapply(Wmats, ncol) == 1)){stop("Not supported for single-node graphs")} syms <- sapply(Wmats, isSymmetric) if(any(!syms)){ if(all(!syms)){stop("No symmetrical graphs detected")} warning(paste0(sum(!syms), " Nonsymmetrical graph", ifelse(sum(!syms) > 1, "s ", " "), "removed")) Wmats <- Wmats[-which(!syms)] } names(Wmats) <- fnames(Wmats, 'graph ') clustOut <- lapply(Wmats, clustAuto) for(g in seq_along(clustOut)){ if(!is(clustOut[[g]], "clustcoef_auto")){ names(clustOut[[g]]) <- fnames(clustOut[[g]], 'type ') for(t in seq_along(clustOut[[g]])){ if(!is.null(labels)){ clustOut[[g]][[t]][["node"]] <- labels } else if(!is.null(rownames(clustOut[[g]][[t]]))){ clustOut[[g]][[t]][["node"]] <- rownames(clustOut[[g]][[t]]) } else { clustOut[[g]][[t]][["node"]] <- paste("Node", seq_len(nrow(clustOut[[g]][[t]]))) } clustOut[[g]][[t]]$graph <- names(clustOut)[g] clustOut[[g]][[t]]$type <- names(clustOut[[g]])[t] } } else { clustOut[[g]]$graph <- names(clustOut)[g] if(!is.null(labels)){ clustOut[[g]][["node"]] <- labels } else if(!is.null(rownames(clustOut[[g]]))){ clustOut[[g]][["node"]] <- rownames(clustOut[[g]]) } else { clustOut[[g]][["node"]] <- paste("Node", seq_len(nrow(clustOut[[g]]))) } } } isList <- sapply(clustOut, function(x) !"clustcoef_auto" %in% class(x)) if(any(isList)){ for(l in which(isList)){clustOut <- c(clustOut, clustOut[[l]])} clustOut <- clustOut[-which(isList)] } for(i in seq_along(clustOut)){ if(any(grepl("signed_", names(clustOut[[i]])))){ clustOut[[i]] <- clustOut[[i]][, sapply(clustOut[[i]], mode) != "numeric" | grepl("signed_", names(clustOut[[i]])) == signed] names(clustOut[[i]]) <- gsub("signed_", "", names(clustOut[[i]])) } names(clustOut[[i]]) <- gsub("clust", "", names(clustOut[[i]])) if(relative | scale){ if(relative & scale){warning("Using 'relative' and 'scale' together is not recommended")} for(j in which(sapply(clustOut[[i]], mode) == "numeric")){ if(scale){ clustOut[[i]][, j] <- scaleNA(clustOut[[i]][, j]) } if(relative){ mx <- max(abs(clustOut[[i]][, j]), na.rm = TRUE) if(mx != 0){clustOut[[i]][, j] <- clustOut[[i]][, j]/mx} } attributes(clustOut[[i]][, j]) <- NULL } } } WideCent <- plyr::rbind.fill(clustOut) if(is.null(WideCent$type)){WideCent$type <- NA} LongCent <- reshape2::melt(WideCent, variable.name = "measure", id.var = c("graph", "type", "node")) return(LongCent) } centAuto <- function(x, which.net = "temporal", weighted = TRUE, signed = TRUE){ if(isTRUE(attr(x, "mlGVAR"))){ x <- switch(which.net, between = x$betweenNet, x$fixedNets)} if("SURnet" %in% c(names(x), names(attributes(x)))){ if("SURnet" %in% names(x)){x <- x$SURnet} if(is.numeric(which.net)){which.net <- c("t", "c", "p", "i")[which.net]} which.net <- match.arg(tolower(which.net), c("temporal", "contemporaneous", "pdc", "interactions")) x <- x[[ifelse(which.net == "contemporaneous", "contemporaneous", ifelse(which.net == 'interactions', 'interactions', "temporal"))]] if(which.net == "pdc"){x <- x$PDC} if(which.net == 'interactions'){names(x)[1] <- 'adjMat'} } if("adjMat" %in% names(x)){x <- t(x$adjMat)} if(any(grepl("lag", dimnames(x)))){dimnames(x) <- lapply(dimnames(x), function(z) gsub("[.]lag1.*|[.]y$", "", z))} if(is.list(x)){return(lapply(x, centAuto, which.net = which.net, weighted = weighted, signed = signed))} if(!weighted){x <- sign(x)} if(!signed){x <- abs(x)} if(!is.matrix(x)){stop("The input network must be an adjacency or weights matrix")} diag(x) <- 0 directed.gr <- ifelse(isSymmetric.matrix(object = x, tol = 0.000000000001), FALSE, TRUE) weighted.gr <- ifelse(all(qgraph::mat2vec(x) %in% c(0, 1)), FALSE, TRUE) net_qg <- qgraph::qgraph(x, diag = FALSE, labels = colnames(x), DoNotPlot = TRUE, minimum = 0) centr <- qgraph::centrality(net_qg) if(directed.gr & !weighted.gr){ centr1 <- data.frame(cbind( Betweenness = centr$Betweenness, Closeness = centr$Closeness, InDegree = centr$InDegree, OutDegree = centr$OutDegree, OutExpectedInfluence = centr$OutExpectedInfluence, InExpectedInfluence = centr$InExpectedInfluence)) } if(directed.gr & weighted.gr){ centr1 <- data.frame(cbind( Betweenness = centr$Betweenness, Closeness = centr$Closeness, InStrength = centr$InDegree, OutStrength = centr$OutDegree, OutExpectedInfluence = centr$OutExpectedInfluence, InExpectedInfluence = centr$InExpectedInfluence)) } if(!directed.gr & !weighted.gr){ centr1 <- data.frame(cbind( Betweenness = centr$Betweenness/2, Closeness = centr$Closeness, Degree = centr$OutDegree, ExpectedInfluence = centr$OutExpectedInfluence)) } if(!directed.gr & weighted.gr){ centr1 <- data.frame(cbind( Betweenness = centr$Betweenness/2, Closeness = centr$Closeness, Strength = centr$OutDegree, ExpectedInfluence = centr$OutExpectedInfluence)) } row.names(centr1) <- colnames(x) log <- capture.output({ graph <- igraph::graph.adjacency( adjmatrix = 1 * (x != 0), mode = ifelse(directed.gr, "directed", "undirected")) comps <- igraph::components(graph) largcomp <- comps$membership == which.max(comps$csize) }) if(sum(largcomp) < ncol(x) & sum(largcomp) > 1){ x2 <- x[largcomp, largcomp] clos <- qgraph::centrality(qgraph::qgraph( x2, diag = FALSE, labels = colnames(x)[largcomp], DoNotPlot = TRUE, minimum = 0))$Closeness centr1$Closeness[largcomp] <- clos centr1$Closeness[!largcomp] <- NA } net_ig_abs <- igraph::graph.adjacency( adjmatrix = abs(1/x), mode = ifelse(directed.gr, "directed", "undirected"), weighted = ifelse(weighted.gr, list(TRUE), list(NULL))[[1]], diag = FALSE) edgebet <- igraph::edge.betweenness(graph = net_ig_abs, directed = directed.gr) el <- data.frame(igraph::get.edgelist(graph = net_ig_abs), stringsAsFactors = FALSE) edgebet <- merge(el, edgebet, by = 0) edgebet$Row.names <- NULL names(edgebet) <- c("from", "to", "edgebetweenness") edgebet <- edgebet[order(edgebet$edgebetweenness, decreasing = TRUE), ] ShortestPathLengths <- centr$ShortestPathLengths rownames(ShortestPathLengths) <- colnames(ShortestPathLengths) <- colnames(x) Res <- list(node.centrality = centr1, edge.betweenness.centrality = edgebet, ShortestPathLengths = ShortestPathLengths) class(Res) <- c("list", "centrality_auto") return(Res) } clustAuto <- function(x, thresholdWS = 0, thresholdON = 0){ if("SURnet" %in% c(names(x), names(attributes(x)))){ if("SURnet" %in% names(x)){x <- x$SURnet} x <- x$contemporaneous$adjMat } else if("adjMat" %in% names(x)){ x <- x$adjMat } if(any(grepl("lag", dimnames(x)))){ dimnames(x) <- lapply(dimnames(x), function(z) gsub("[.]lag1.*|[.]y$", "", z)) } if(is.list(x)){ return(lapply(x, clustAuto, thresholdWS = thresholdWS, thresholdON = thresholdWS)) } dim = dim(x) if(is.null(dim) || length(dim) != 2){stop("adjacency is not two-dimensional")} if(!is.numeric(x)){stop("adjacency is not numeric")} if(dim[1] != dim[2]){stop("adjacency is not square")} if(max(abs(x - t(x)), na.rm = TRUE) > 0.000000000001){stop("adjacency is not symmetric")} if(min(x, na.rm = TRUE) < -1 || max(x, na.rm = TRUE) > 1){x <- x/max(abs(x))} weighted.gr <- ifelse(all(abs(x) %in% c(0, 1)), FALSE, TRUE) signed.gr <- ifelse(all(x >= 0), FALSE, TRUE) net_ig <- igraph::graph.adjacency( adjmatrix = abs(x), mode = "undirected", weighted = ifelse(weighted.gr, list(TRUE), list(NULL))[[1]], diag = FALSE) cb <- igraph::transitivity(net_ig, type = "barrat", isolates = "zero") cw <- WS(x, thresholdWS) cz <- zhang(x) co <- onnela(x, thresholdON) if(!signed.gr & !weighted.gr){output <- cbind(clustWS = cw[, 1])} if(!signed.gr & weighted.gr){ output <- cbind(clustWS = cw[, 1], clustZhang = cz[, 1], clustOnnela = co[, 1], clustBarrat = cb) } if(signed.gr & !weighted.gr){ output <- cbind(clustWS = cw[, 1], signed_clustWS = cw[, 2]) } if(signed.gr & weighted.gr){ output <- cbind(clustWS = cw[, 1], signed_clustWS = cw[, 2], clustZhang = cz[, 1], signed_clustZhang = cz[, 2], clustOnnela = co[, 1], signed_clustOnnela = co[, 2], clustBarrat = cb) } output[is.na(output)] <- 0 Res <- data.frame(output) class(Res) <- c("data.frame", "clustcoef_auto") rownames(Res) <- colnames(x) return(Res) } centPlot <- function(Wmats, scale = c("z-scores", "raw", "raw0", "relative"), which.net = "temporal", include = "all", labels = NULL, orderBy = NULL, decreasing = FALSE, plot = TRUE, verbose = TRUE, weighted = TRUE, signed = TRUE){ if(isTRUE(attr(Wmats, "mlGVAR"))){ Wmats <- switch(which.net, between = Wmats$betweenNet, Wmats$fixedNets)} if(is.logical(scale)){scale <- ifelse(scale, "z-scores", "raw")} measure <- value <- node <- type <- NULL scale <- tryCatch({match.arg(scale)}, error = function(e){scale}) include0 <- c("Degree", "Strength", "OutDegree", "InDegree", "OutStrength", "InStrength", "Closeness", "Betweenness", "ExpectedInfluence", "OutExpectedInfluence", "InExpectedInfluence") if(all(tolower(include) == "all")){ include <- include0 } else if(isTRUE(attr(Wmats, "SURnet")) & which.net != "contemporaneous"){ include0 <- include0[!grepl("Degree|^S|^E", include0)] include <- include0[grep(paste(tolower(include), collapse = "|"), tolower(include0))] } else if(which.net %in% c("between", "contemporaneous")){ include0 <- include0[!grepl("Degree|^Out|^In", include0)] include <- include0[grep(paste(tolower(include), collapse = "|"), tolower(include0))] } include <- match.arg(include, c("Degree", "Strength", "OutDegree", "InDegree", "OutStrength", "InStrength", "Closeness", "Betweenness", "ExpectedInfluence", "OutExpectedInfluence", "InExpectedInfluence"), several.ok = TRUE) if(scale == "z-scores" & verbose & plot){message("Note: z-scores are shown on x-axis.")} if(scale == "relative" & verbose & plot){message("Note: relative centrality indices are shown on x-axis.")} Long <- centTable(Wmats = Wmats, scale = (scale == "z-scores"), labels = labels, which.net = which.net, relative = (scale == "relative"), weighted = weighted, signed = signed) Long <- subset(Long, measure %in% include) Long$measure <- factor(Long$measure) if(ifelse(is.null(orderBy), FALSE, ifelse(orderBy == "default", TRUE, FALSE))){ nodeLevels <- unique(gtools::mixedsort( as.character(Long$node), decreasing = decreasing)) } else if(!is.null(orderBy)){ nodeLevels <- names(sort(tapply( Long$value[Long$measure == orderBy], Long$node[Long$measure == orderBy], mean), decreasing = decreasing)) } else { nodeLevels <- rev(unique(as.character(Long$node))) } Long$node <- factor(as.character(Long$node), levels = nodeLevels) Long <- Long[gtools::mixedorder(Long$node), ] if(length(unique(Long$type)) > 1){ g <- ggplot(Long, aes(x = value, y = node, group = type, colour = type)) } else { g <- ggplot(Long, aes(x = value, y = node, group = type)) } g <- g + geom_path() + xlab("") + ylab("") + geom_point() + theme_bw() if(length(unique(Long$graph)) > 1){ g <- g + facet_grid(graph ~ measure, scales = "free") } else { g <- g + facet_grid(~measure, scales = "free") } if(scale == "raw0"){g <- g + xlim(0, NA)} if(plot){plot(g)} else {invisible(g)} } clustPlot <- function(Wmats, scale = c("z-scores", "raw", "raw0", "relative"), include = "all", labels = NULL, orderBy = NULL, decreasing = FALSE, plot = TRUE, signed = TRUE, verbose = TRUE){ if(is.logical(scale)){scale <- ifelse(scale, "z-scores", "raw")} measure <- value <- node <- type <- NULL scale <- match.arg(scale) if(scale == "z-scores" & verbose & plot){message("Note: z-scores are shown on x-axis.")} if(scale == "relative" & verbose & plot){message("Note: relative centrality indices are shown on x-axis.")} Long <- clustTable(Wmats = Wmats, scale = (scale == "z-scores"), labels = labels, relative = (scale == "relative"), signed = signed) Long$value[!is.finite(Long$value)] <- 0 if(all(include == "all")){include <- c("WS", "Zhang", "Onnela", "Barrat")} include <- match.arg(include, c("WS", "Zhang", "Onnela", "Barrat"), several.ok = TRUE) Long <- subset(Long, measure %in% include) Long$measure <- factor(Long$measure) if(ifelse(is.null(orderBy), FALSE, ifelse(orderBy == "default", TRUE, FALSE))){ nodeLevels <- unique(gtools::mixedsort( as.character(Long$node), decreasing = decreasing)) } else if(!is.null(orderBy)){ nodeLevels <- names(sort(tapply( Long$value[Long$measure == orderBy], Long$node[Long$measure == orderBy], mean), decreasing = decreasing)) } else { nodeLevels <- rev(unique(as.character(Long$node))) } Long$node <- factor(as.character(Long$node), levels = nodeLevels) Long <- Long[gtools::mixedorder(Long$node), ] if(length(unique(Long$type)) > 1){ g <- ggplot(Long, aes(x = value, y = node, group = type, colour = type)) } else { g <- ggplot(Long, aes(x = value, y = node, group = type)) } g <- g + geom_path() + xlab("") + ylab("") + geom_point() + theme_bw() if(length(unique(Long$graph)) > 1){ g <- g + facet_grid(graph ~ measure, scales = "free") } else { g <- g + facet_grid(~measure, scales = "free") } if(scale == "raw0"){g <- g + xlim(0, NA)} if(plot){plot(g)} else {invisible(g)} } plotCentrality <- function(Wmats, which.net = "temporal", scale = TRUE, labels = NULL, plot = TRUE, centrality = "all", clustering = "Zhang"){ if(any(c("ggm", "SURnet", "mlGVAR") %in% names(attributes(Wmats)))){Wmats <- list(net1 = Wmats)} if(all(sapply(Wmats, function(z) isTRUE(attr(z, "mlGVAR"))))){ Wmats <- lapply(Wmats, function(z) switch( which.net, between = z$betweenNet, z$fixedNets)) } if(any(grepl("ggm", lapply(Wmats, function(z) names(attributes(z)))))){which.net <- "contemporaneous"} if(length(unique(lapply(Wmats, checkInclude))) != 1){stop("All networks must be of the same type")} if(is.null(names(Wmats))){names(Wmats) <- paste0("net", seq_along(Wmats))} which.net <- match.arg(tolower(which.net), c("temporal", "contemporaneous", "pdc")) c0 <- c01 <- do.call(rbind, lapply(seq_along(Wmats), function(z){ cbind.data.frame( centTable(Wmats[[z]], scale = scale, which.net = which.net, labels = labels), group = names(Wmats)[z]) })) if(all(centrality != "all")){ include0 <- checkInclude(Wmats[[1]], which.net = which.net) include0 <- include0[!grepl(ifelse( which.net != "contemporaneous", "Degree|^S|^E", "Degree|^Out|^In"), include0)] centrality <- include0[grep(paste(tolower( centrality), collapse = "|"), tolower(include0))] c0 <- c01 <- subset(c0, measure %in% centrality) } if(which.net == "contemporaneous" & clustering != FALSE){ c1 <- do.call(rbind, lapply(seq_along(Wmats), function(z){ z1 <- clustTable(Wmats[[z]], scale = scale, labels = labels) z1 <- z1[z1$measure == ifelse(is.logical(clustering), "Zhang", clustering), ] z1$measure <- "Clust. coef." z1$node <- as.character(z1$node) rownames(z1) <- 1:nrow(z1) return(cbind.data.frame(z1, group = names(Wmats)[z])) })) c01 <- rbind(c0, c1) } c01 <- c01[order(c01$node), ] c01 <- c01[order(c01$group), ] rownames(c01) <- 1:nrow(c01) c01$node <- substr(c01$node, 1, 6) if(!plot){ if(which.net == 'contemporaneous' & clustering != FALSE){ out <- list(centrality = c0, clustering = c1, combined = c01) } else { out <- c0 } return(out) } else { g1 <- ggplot(c01, aes(x = value, y = node, group = group, color = group, shape = group)) + geom_path(alpha = 1, size = 1) + geom_point(size = 2) + xlab("") + ylab("") + theme_bw() + facet_grid(. ~ measure, scales = "free") + scale_x_continuous(breaks = c(-1, 0, 1)) + theme(axis.line.x = element_line(colour = "black"), axis.ticks.x = element_line(colour = "black"), axis.ticks.y = element_line(colour = "white", size = 0), axis.text.y = element_text(colour = "black"), axis.text.x = element_text(angle = 45, colour = "black")) g1 } }
choropleth.plot <- function(sp,dem="P0010001",cuts=list("quantile",seq(0, 1, 0.25)),color=list(fun="hsv",attr=list(h = c(.4,.5,.6,.7), s = .6, v = .6, alpha=1)),main=NULL,sub="Quantiles (equal frequency)",legend=list(pos="bottomleft",title="Population Count"),type=NULL,...){ color.map<- function(x,dem,y=NULL){ l.poly<-length(x@polygons) dem.num<- cut(dem, breaks=ceiling(do.call(cuts[[1]],list(x=dem,probs=cuts[[2]]))),dig.lab = 6) dem.num[which(is.na(dem.num)==TRUE)]<-levels(dem.num)[1] l.uc<-length(table(dem.num)) if(is.null(y)){ col.heat<-do.call(color$fun,color$attr) }else{ col.heat<-y } dem.col<-cbind(col.heat,names(table(dem.num))) colors.dem<-vector(length=l.poly) for(i in 1:l.uc){ colors.dem[which(dem.num==dem.col[i,2])]<-dem.col[i,1] } out<-list(colors=colors.dem,dem.cut=dem.col[,2],table.colors=dem.col[,1]) out } colors.use<-color.map(sp,sp[[dem]]) col<-colors.use$color args <- list(x=sp,...,col=colors.use$color) do.call("plot", args) graphics::title(main=main,sub=sub) legend(legend$pos,legend=colors.use$dem.cut,fill=colors.use$table.colors,bty="o",title=legend$title,bg="white") }
rsymphonyLP <- function(objective, lower=0, upper=1, linCons, control=list()) { ctrl <- symphonyLPControl() if (length(control) > 0) for (name in names(control)) ctrl[name] = control[name] control <- ctrl N = length(objective) if(length(lower) == 1) { par.lower <- rep(lower, N) } else { par.lower <- lower } if(length(upper) == 1) { par.upper <- rep(upper, N) } else { par.upper <- upper } bounds <- list( lower = list(ind = 1:N, val = par.lower), upper = list(ind = 1:N, val = par.upper)) mat <- linCons[[1]] M <- nrow(mat) lower <- as.vector(linCons[[2]]) upper <- as.vector(linCons[[3]]) if(length(lower) == 1) { lower <- rep(lower, M) } else { lower <- lower } if(length(upper) == 1) { upper <- rep(upper, M) } else { upper <- upper } eqIndex <- which(lower == upper) ineqIndex <- which(lower != upper) eqA <- mat[eqIndex, ] ineqA <- mat[ineqIndex, ] mat <- rbind(eqA, ineqA, ineqA) dir <- c(rep("==", length(eqIndex)), rep("<=", length(ineqIndex)), rep(">=", length(ineqIndex))) rhs <- c(upper[eqIndex], upper[ineqIndex], lower[ineqIndex]) mat <- mat[is.finite(rhs), ] dir <- dir[is.finite(rhs)] rhs <- rhs[is.finite(rhs)] optim <- symphonyLP( obj = objective, mat = mat, dir = dir, rhs = rhs, bounds = bounds, types = NULL, max = FALSE) version <- paste(packageDescription("Rsymphony")[1:3], collapse=" ") value = list( opt = optim, solution = optim$solution, objective = optim$objval, status = optim$status[[1]], message = names(optim$status), solver = paste("R", control$solver), version = version) class(value) <- c("solver", "list") value } symphonyLP <- function(...) { Rsymphony::Rsymphony_solve_LP(...) } symphonyLPControl <- function(solver="symphony", project="r", trace=FALSE) { list(solver=solver, trace=trace) }
return(switch(kernel, gaussian = 1/(2*sqrt(pi)), rectangular = sqrt(3)/6, triangular = sqrt(6)/9, epanechnikov = 3/(5*sqrt(5)), biweight = 5*sqrt(7)/49, cosine = 3/4*sqrt(1/3 - 2/pi^2), optcosine = sqrt(1-8/pi^2)*pi^2/16 ))
(X <- matrix(c(2, 20, 18, 25), nrow = 2)) fisher.test(X) .seerHome="~/data/SEER" rm(list=ls()) library(RSQLite) m=dbDriver("SQLite") con=dbConnect(m,dbname=file.path(.seerHome,"00/all.db")) dbListTables(con) dbListFields(con,"other") d=dbGetQuery(con, "SELECT * from other where histo3>9979 and histo3<9990") head(d) summary(d) summary(as.factor(d$COD)) MFcnts=summary(as.factor(d$sex)) summary(as.factor(d$surv)) nd=transform(d,dwd=(COD>0) ) summary(as.factor(nd$surv)) library(survival) graphics.off() if(length(grep("linux",R.Version()$os))) windows <- function( ... ) X11( ... ) if(length(grep("darwin",R.Version()$os))) windows <- function( ... ) quartz( ... ) windows(width=9,height=5) par(mfrow=c(1,2),mar=c(4.5,4.1,1,1),cex=1.4,lwd=1.5) rb=c("blue","red") bbr=c("black","blue","red") plot(survfit(Surv(surv,dwd)~sex,data = nd), main="MDS", xlab="Months",ylab="Survival", xlim=c(0,60) , col=rb) print(S<-survdiff(Surv(surv,dwd)~sex,data = nd)) options(digits=10) dput(S) mtext("A",side=3,line=0,cex=1.5,adj=-.3,font=2) legend("bottomleft",expression(paste("P < ",10^-15)),bty="n") legend("topright",c("Cases",paste("Males =",MFcnts[1]),paste("Females =",MFcnts[2])),text.col=bbr,bty="n") d=dbGetQuery(con, "SELECT * from lymyleuk where ICD9=2050") MFcnts=summary(as.factor(d$sex)) nd=transform(d,dwd=(COD>0) ) plot(survfit(Surv(surv,dwd)~sex,data = nd),main="AML", xlab="Months",ylab="Survival", xlim=c(0,60), col=rb) print(S<-survdiff(Surv(surv,dwd)~sex,data = nd)) mtext("B",side=3,line=0,cex=1.5,adj=-.3,font=2) legend("bottomleft",c("P=0.051"),bty="n") legend("topright",c("Cases",paste("Males =",MFcnts[1]),paste("Females =",MFcnts[2])),text.col=bbr,bty="n")
setConstructorS3("MbeiCnPlm", function(..., combineAlleles=FALSE) { extend(MbeiSnpPlm(...), c("MbeiCnPlm", uses(CnPlm())), combineAlleles = combineAlleles ) }) setMethodS3("getAsteriskTags", "MbeiCnPlm", function(this, collapse=NULL, ...) { tags <- NextMethod("getAsteriskTags", collapse=NULL) if (this$combineAlleles) tags <- c(tags, "A+B") tags <- paste(tags, collapse=collapse) tags }, protected=TRUE)
get.percent.age <- function(tree, percent = 0.01) { tree_slice <- slice.tree.sharp(tree, tree$root.time - (percent * tree$root.time)) while(is.null(tree_slice) || Ntip(tree_slice) < 3) { percent <- percent + 0.01 tree_slice <- slice.tree.sharp(tree, tree$root.time - (percent * tree$root.time)) if(percent >= 100) { stop("Impossible to find a starting point to slice the tree. This can happen if the tree has no branch length or has a \"ladder\" structure. You can try to fix that by setting specific slicing times.") break } } return(percent) } adjust.age <- function(FADLAD, ages_tree) { return(ifelse(FADLAD != ages_tree, FADLAD, ages_tree)) } adjust.FADLAD <- function(FADLAD, tree, data) { ages_tree <- tree.age(tree) names_match <- match(rownames(FADLAD), ages_tree[,2]) ages_tree_tmp <- ages_tree[names_match,] ages_tree_FAD <- ages_tree_LAD <- ages_tree_tmp ages_tree_FAD[,1] <- mapply(adjust.age, as.list(FADLAD[,1]), as.list(ages_tree_tmp[,1])) ages_tree_LAD[,1] <- mapply(adjust.age, as.list(FADLAD[,2]), as.list(ages_tree_tmp[,1])) ages_tree_FAD <- rbind(ages_tree_FAD, ages_tree[-names_match,]) ages_tree_LAD <- rbind(ages_tree_LAD, ages_tree[-names_match,]) row_order <- match(rownames(data), ages_tree_FAD$elements) return(list("FAD" = ages_tree_FAD[row_order,], "LAD" = ages_tree_LAD[row_order,])) } chrono.subsets.discrete <- function(data, tree, time, model = NULL, FADLAD, inc.nodes, verbose) { model <- NULL get.interval <- function(interval, time, ages_tree, inc.nodes, verbose) { if(verbose) message(".", appendLF = FALSE) if(inc.nodes) { return( list("elements" = as.matrix(which(ages_tree$FAD$ages >= time[interval+1] & ages_tree$LAD$ages <= time[interval]) ))) } else { one_interval <- which(ages_tree$FAD$ages >= time[interval+1] & ages_tree$LAD$ages <= time[interval]) matching <- match(tree$tip.label, rownames(data[one_interval,])) if(any(is.na(matching))) { elements_out <- list("elements" = as.matrix(one_interval[matching[-which(is.na(matching))]]) ) } else { elements_out <- list("elements" = as.matrix(one_interval[matching])) } return(elements_out) } } ages_tree <- adjust.FADLAD(FADLAD, tree, data) interval_elements <- lapply(as.list(seq(1:(length(time)-1))), get.interval, time, ages_tree, inc.nodes, verbose) names(interval_elements) <- paste(time[-length(time)], time[-1], sep = " - ") for (interval in 1:length(interval_elements)) { if(nrow(interval_elements[[interval]]$elements) == 0) { warning("The interval ", names(interval_elements)[interval], " is empty.", call. = FALSE) interval_elements[[interval]]$elements <- matrix(NA) } } return(interval_elements) } chrono.subsets.continuous <- function(data, tree, time, model, FADLAD, inc.nodes = NULL, verbose) { slices_elements <- lapply(as.list(time), get.time.slice, tree, model, verbose) slices_elements <- lapply(slices_elements, match.tree.data, tree, data) if(!is.null(FADLAD)) { slices_elements <- mapply(add.FADLAD, slices_elements, as.list(time), MoreArgs = list(FADLAD = FADLAD, data_rownames = rownames(data)), SIMPLIFY = FALSE) } names(slices_elements) <- time return(slices_elements) } make.origin.subsets <- function(data) { origin <- list("elements" = as.matrix(seq(1:nrow(data)))) origin_subsets <- list("origin" = origin) return(origin_subsets) } cbind.fill <- function(x, y) { if(dim(x)[1] == dim(y)[1]) { return(list("elements" = cbind(x, y))) } else { min_rows <- min(dim(x)[1], dim(y)[1]) output <- cbind(x[1:min_rows, , drop = FALSE], y[1:min_rows, , drop = FALSE]) if(dim(x)[1] == min_rows) { NAs <- cbind(matrix(NA, ncol = dim(x)[2], nrow = dim(y)[1]-min_rows), y[-c(1:min_rows), , drop = FALSE]) } else { NAs <- cbind(x[-c(1:min_rows), , drop = FALSE], matrix(NA, ncol = dim(y)[2], nrow = dim(x)[1]-min_rows)) } return(list("elements" = rbind(output, NAs))) } } recursive.combine.list <- function(list) { if(length(list) == 2) { return(mapply(function(x,y) cbind.fill(x$elements, y$elements), list[[1]], list[[length(list)]], SIMPLIFY = FALSE)) } else { list[[1]] <- mapply(function(x,y) cbind.fill(x$elements, y$elements), list[[1]], list[[length(list)]], SIMPLIFY = FALSE) list[[length(list)]] <- NULL return(recursive.combine.list(list)) } } fast.slice.table <- function(slice, tree) { rounding <- 3 slice_time <- round(tree$root.time - slice, rounding) if(slice_time == 0) { root_edges <- which(tree$edge[,1] == Ntip(tree)+1) return(cbind(tree$edge[root_edges, 1], c(0, 0), tree$edge[root_edges, 2], tree$edge.length[root_edges])) } node_age <- round(castor::get_all_distances_to_root(tree), rounding) crossed_edges <- which((node_age[ tree$edge[, 1] ] < slice_time) & (node_age[tree$edge[, 2] ] >= slice_time)) if(length(crossed_edges) == 0) { return(NULL) } get.sliced.edge <- function(crossed_edge, tree, node_age, slice_time) { return(abs(node_age[tree$edge[crossed_edge, ]] - slice_time)) } sliced_edge_lengths <- t(sapply(crossed_edges, get.sliced.edge, tree, node_age, slice_time)) return(cbind(tree$edge[crossed_edges, 1], sliced_edge_lengths[,1], tree$edge[crossed_edges, 2], sliced_edge_lengths[, 2])) } select.table.tips <- function(table, model) { if(is.null(table)) { return(NA) } switch(model, "acctran" = return(unique(table[,3])), "deltran" = return(unique(table[,1])), "random" = return(unique(apply(table[,c(1,3), drop = FALSE], 1, FUN = function(x) x[sample(c(1,2), 1)]))), "proximity" = return(unique(sapply(1:nrow(table), function(x, table, closest) table[,c(1,3), drop = FALSE][x, closest[x]], table, apply(table[,c(2,4), drop = FALSE], 1, FUN = function(x) which(x == min(x))[1])))), "equal.split" = return(cbind(table[, c(1,3)], 0.5)), "gradual.split" = return(cbind(table[, c(1,3)], 1-(table[, 2]/(table[, 2] + table[, 4])))) ) } get.time.slice <- function(time, tree, model, verbose) { if(verbose) message(".", appendLF = FALSE) is_split <- ifelse(grepl("split", model), TRUE, FALSE) slice <- select.table.tips(fast.slice.table(time, tree), model) if(is.na(slice[1])) { warning("The slice ", time, " is empty.", call. = FALSE) } else { if(is_split && ncol(slice) == 2) { slice <- matrix(c(slice[,1], slice[1,2]), nrow = 1) } } return(list("elements" = matrix(slice, ncol = ifelse(is_split, 3, 1)))) } add.FADLAD <- function(time_slice, one_time, FADLAD, data_rownames) { intervals <- (one_time >= FADLAD[,2]) & (one_time <= FADLAD[,1]) if(any(intervals)) { add_tips <- which(data_rownames %in% rownames(FADLAD)[intervals]) if(dim(time_slice$elements)[2] == 1) { time_slice$elements <- matrix(unique(c(time_slice$elements, add_tips))) } else { time_slice$elements <- rbind(time_slice$elements, cbind(matrix(add_tips),matrix(add_tips), 1)) remove.duplicates <- function(time_slice, col) { duplicated_elements <- (duplicated(time_slice$elements[,col]) & time_slice$elements[,3] == 1) if(any(duplicated_elements)) { replace <- which(time_slice$elements[,col] %in% time_slice$elements[duplicated_elements, col][1]) time_slice$elements[replace[1], ] <- time_slice$elements[replace[2], ] time_slice$elements <- time_slice$elements[-replace[2], ] remove.duplicates(time_slice, col) } else { return(time_slice) } } time_slice <- remove.duplicates(time_slice, col = 2) time_slice <- remove.duplicates(time_slice, col = 1) } } return(time_slice) } match.tree.data <- function(elements, tree, data) { if(all(is.na(elements$elements))) { return(elements) } matching <- function(x, tree, data) { return(match(c(tree$tip.label, tree$node.label)[x], rownames(data))) } if(dim(elements$elements)[2] == 1) { elements$elements <- matrix(matching(elements$elements, tree, data), ncol = 1) } else { elements$elements[,c(1,2)] <- apply(elements$elements[,c(1,2)], 2, FUN = matching, tree, data) } return(elements) }
plotDist("norm", mean = 75, sd = 10, xlim = c(40, 110)) plotDist("norm", mean = 7.1, sd = 1.1, xlim = c(2.7, 11.5)) plotDist("norm", mean = 0, sd = 0.02, xlim = c(-.07, .07))
"constants_table"
sirt_pem_include_ll_args <- function(ll_args, pem_parm, pem_pars, pem_parameter_index) { for (pp in pem_pars){ ll_args[[ pp ]] <- sirt_pem_extract_parameters( parm=pem_parm, parmgroup=pp, pem_parameter_index=pem_parameter_index ) } return(ll_args) }
Rcpp::sourceCpp('tmp-tests/test-fastMatVec3.cpp') require(bigmemory) X <- big.matrix(5000, 5000, shared = FALSE) X[] <- rnorm(length(X)) y <- rnorm(ncol(X)) require(microbenchmark) print(microbenchmark( test <- armaProdVec(X@address, y, 1:5000 - 1), test2 <- rcppProdVec(X@address, y), times = 10 )) X2 <- big.matrix(5000, 50000, backingfile = "tmp", backingpath = ".") for (i in 1:50) { X2[, 1:1000 + (i - 1) * 1000] <- rnorm(length(X2) / 50) } y2 <- rnorm(ncol(X2)) print(microbenchmark( test3 <- armaProdVec(X2@address, y2, 1:5000 - 1), test4 <- rcppProdVec(X2@address, y2), times = 10 ))
if (0){ error.plot <- function (summ, label){ colors <- c("blue", "green", "red") round <- summ[,"round"] binary <- summ[,"Error.binary"] distance <- summ[,"Error.distance"] vote <- summ[,"vote"] par (mar=c(5,5,4,1)+.1) plot (jitt(round,.2), jitt(binary,.04), xlab="time", ylab="Error rate", pch=16, cex=.7, cex.lab=1.5, cex.axis=1.5, cex.main=1.5,ylim=c(-.03,1.03), main=paste("error counting,", label), col=colors[vote]) temp <- binned.means (round, binary) par (mar=c(5,5,4,1)+.1) plot (jitt(round,.2), jitt(distance,1), xlab="time", ylab="Error distance", pch=16, cex=.7, cex.lab=1.5, cex.axis=1.5, cex.main=1.5, main=paste("error weighting,", label), col=colors[vote]) temp <- binned.means (round, distance) } errcount2 <- function (value.i, vote.i, round.i, cut1.i, cut2.i){ n <- length(value.i) n.cutpairs <- length(cut1.i) value <- array (value.i, c(n, n.cutpairs)) vote <- array (vote.i, c(n,n.cutpairs)) round <- array (round.i, c(n,n.cutpairs)) cut1 <- t (array (cut1.i, c(n.cutpairs,n))) cut2 <- t (array (cut2.i, c(n.cutpairs,n))) expected <- ifelse (value<=cut1, 1, ifelse (value<=cut2, 2, 3)) w <- round*(round+max(round)) err.rate <- apply (vote!=expected, 2, mean) w.err.rate <- apply (w*(vote!=expected), 2, mean) / apply (w, 2, mean) error <- ifelse (vote==expected, 0, ifelse (vote==1, value-cut1, ifelse (vote==3, cut2-value, ifelse (expected==1, cut1-value, value-cut2)))) err.mean <- apply (error, 2, mean) w.err.mean <- apply (w*error, 2, mean) / apply (w, 2, mean) cbind (err.rate, err.mean, w.err.rate, w.err.mean) } binned.means <- function (x, y, breaks=NULL, add=0, plot=T, ...){ if (is.null(breaks)){ nbins=floor(sqrt(length(x))) breaks <- quantile (x, seq(0,1,length=(nbins+1))) breaks[nbins+1] <- breaks[nbins+1] + 10^10 } else { nbins <- length(breaks)-1 } xmean <- NULL ymean <- NULL for (i in 1:nbins){ cond <- x>=breaks[i] & x<breaks[i+1] xmean <- c(xmean, mean(x[cond])) ymean <- c(ymean, mean(y[cond])) if (plot) lines (breaks[i:(i+1)], add + rep(ymean[i],2), ...) } if (plot){ for (i in 2:nbins){ lines (rep(breaks[i],2), add + ymean[(i-1):i], ...) } } return (list (xmean=xmean, ymean=ymean)) } empirical <- function (summ, label){ value <- summ[,"value"] vote <- summ[,"vote"] plot (c(0,100), c(0,1), xlab="value", ylab="", type="n", xaxs="i", yaxs="i", cex.lab=1.3, cex.axis=1.3, cex.main=1.3, main=label) binned.means (value, ifelse(vote<3,1,0), breaks=seq(0,100,5), add=.002, col="darkgray") binned.means (value, ifelse(vote<2,1,0), breaks=seq(0,100,5), add=-.002, col="darkgray") text (10, .1, "1", cex=2) text (50, .5, "2", cex=2) text (90, .9, "3", cex=2) lines (c(0,100),c(0,0)) lines (c(0,100),c(1,1)) } monotonic <- function (data, rule="down", rounds=c(0,100)){ players <- as.vector(unique (data[,"person"])) players <- players[(players!="NA") & (players!="")] n.players <- length(players) best.cuts <- array (NA, c(n.players,12)) dimnames (best.cuts) <- list (players, c( "cut1.rate", "cut2.rate", "err.rate", "cut1.mean", "cut2.mean", "err.mean", "cut1.rate.w", "cut2.rate.w", "err.rate.w", "cut1.mean.w", "cut2.mean.w", "err.mean.w")) for (i in 1:n.players){ pl <- players[i] ok <- data[,"person"]==pl & data[,"proposal"]==1 & data[,"round"]>=rounds[1] & data[,"round"]<=rounds[2] ok[is.na(ok)] <- F round <- as.vector(data[ok,"round"]) value <- abs(as.numeric(as.vector(data[ok,"value"]))) vote <- abs(as.numeric(as.vector(data[ok,"vote"]))) cut.poss <- c(0,sort(as.vector(unique(value)))+.5) n.cut.poss <- length(cut.poss) cut1.all <- NULL cut2.all <- NULL errs.all <- NULL if (rule=="down"){ for (i1 in 1:n.cut.poss){ cut1 <- rep (cut.poss[i1], n.cut.poss+1-i1) cut2 <- cut.poss[i1:n.cut.poss] cut1.all <- c (cut1.all, cut1) cut2.all <- c (cut2.all, cut2) errs.all <- rbind(errs.all, errcount2 (value, vote, round, cut1, cut2)) } } else if (rule=="up"){ for (i2 in n.cut.poss:1){ cut1 <- cut.poss[i2:1] cut2 <- rep (cut.poss[i2], i2) cut1.all <- c (cut1.all, cut1) cut2.all <- c (cut2.all, cut2) errs.all <- rbind(errs.all, errcount2 (value, vote, round, cut1, cut2)) } } best.rate <- argmin (errs.all[,1])$argmin[1] best.mean <- argmin (errs.all[,2])$argmin[1] best.rate.w <- argmin (errs.all[,3])$argmin[1] best.mean.w <- argmin (errs.all[,4])$argmin[1] best.cuts[i,] <- c ( cut1.all[best.rate], cut2.all[best.rate], errs.all[best.rate,1], cut1.all[best.mean], cut2.all[best.mean], errs.all[best.mean,2], cut1.all[best.rate.w], cut2.all[best.rate.w], errs.all[best.rate.w,3], cut1.all[best.mean.w], cut2.all[best.mean.w], errs.all[best.mean.w,4]) } return (best.cuts) } } simple.and.nash <- function (data, rounds=c(0,100), simple=c(33.3,66.7), nash=c(50,50)){ players <- as.vector(unique (data[,"person"])) players <- players[(players!="NA") & (players!="")] n.players <- length(players) output <- array (NA, c(n.players,2)) err.mean.simple <- rep(NA, n.players) err.mean.nash <- rep(NA, n.players) ll.simple <- rep(NA, n.players) ll.nash <- rep(NA, n.players) n <- rep(NA, n.players) for (i in 1:n.players){ pl <- players[i] ok <- data[,"person"]==pl & data[,"proposal"]==1 & data[,"round"]>=rounds[1] & data[,"round"]<=rounds[2] ok[is.na(ok)] <- F round <- as.vector(data[ok,"round"]) value <- abs(as.numeric(as.vector(data[ok,"value"]))) vote <- abs(as.numeric(as.vector(data[ok,"vote"]))) err.mean.simple[i] <- mean (!((value<simple[1] & vote==1) | (value>simple[1] & value<simple[2] & vote==2) |(value>simple[2] & vote==3))) err.mean.nash[i] <- mean(!((value<nash[1] & vote==1) | (value>nash[1] & value<nash[2] & vote==2) |(value>nash[2] & vote==3))) ll.simple[i] <- ll (sum(ok), err.mean.simple[i]) ll.nash[i] <- ll (sum(ok), err.mean.nash[i]) n[i] <- sum(ok) } ll.simple.total <- ll(sum(n),sum(err.mean.simple*n)/sum(n)) ll.nash.total <- ll(sum(n),sum(err.mean.nash*n)/sum(n)) print(n) print (c(round(ll.simple.total,0), round(ll.nash.total,0))) output <- cbind (err.mean.simple, err.mean.nash, ll.simple, ll.nash) return (output) } if (0){ monotonic.aggregate <- function (data, rule="down", rounds=c(0,100)){ players <- as.vector(unique (data[,"person"])) players <- players[(players!="NA") & (players!="")] n.players <- length(players) best.cuts <- rep (NA, 12) names (best.cuts) <- c( "cut1.rate", "cut2.rate", "err.rate", "cut1.mean", "cut2.mean", "err.mean", "cut1.rate.w", "cut2.rate.w", "err.rate.w", "cut1.mean.w", "cut2.mean.w", "err.mean.w") ok <- data[,"proposal"]==1 & data[,"round"]>=rounds[1] & data[,"round"]<=rounds[2] ok[is.na(ok)] <- F round <- as.vector(data[ok,"round"]) value <- abs(as.numeric(as.vector(data[ok,"value"]))) vote <- abs(as.numeric(as.vector(data[ok,"vote"]))) cut.poss <- c(0,sort(as.vector(unique(value)))+.5) n.cut.poss <- length(cut.poss) cut1.all <- NULL cut2.all <- NULL errs.all <- NULL if (rule=="down"){ for (i1 in 1:n.cut.poss){ cut1 <- rep (cut.poss[i1], n.cut.poss+1-i1) cut2 <- cut.poss[i1:n.cut.poss] cut1.all <- c (cut1.all, cut1) cut2.all <- c (cut2.all, cut2) errs.all <- rbind(errs.all, errcount2 (value, vote, round, cut1, cut2)) } } else if (rule=="up"){ for (i2 in n.cut.poss:1){ cut1 <- cut.poss[i2:1] cut2 <- rep (cut.poss[i2], i2) cut1.all <- c (cut1.all, cut1) cut2.all <- c (cut2.all, cut2) errs.all <- rbind(errs.all, errcount2 (value, vote, round, cut1, cut2)) } } best.rate <- argmin (errs.all[,1])$argmin[1] best.mean <- argmin (errs.all[,2])$argmin[1] best.rate.w <- argmin (errs.all[,3])$argmin[1] best.mean.w <- argmin (errs.all[,4])$argmin[1] best.cuts <- c ( cut1.all[best.rate], cut2.all[best.rate], errs.all[best.rate,1], cut1.all[best.mean], cut2.all[best.mean], errs.all[best.mean,2], cut1.all[best.rate.w], cut2.all[best.rate.w], errs.all[best.rate.w,3], cut1.all[best.mean.w], cut2.all[best.mean.w], errs.all[best.mean.w,4]) return (best.cuts) } argmin <- function(a){ m <- min (a, na.rm=T) i <- (1:length(a))[a==m] list (min=m, argmin=i) } summarize <- function (data, cuts, rounds=c(0,100)){ players <- as.vector(unique (data[,"person"])) players <- players[(players!="NA") & (players!="")] n.players <- length(players) summ <- NULL for (i in 1:n.players){ ok <- data[,"person"]==players[i] & data[,"Proposal"]==1 & data[,"round"]>=rounds[1] & data[,"round"]<=rounds[2] ok[is.na(ok)] <- F round <- as.vector(data[ok,"round"]) value <- abs(as.numeric(as.vector(data[ok,"value"]))) vote <- abs(as.numeric(as.vector(data[ok,"vote"]))) ord <- order(value) cbind (round,value,vote)[ord,] exb <- error.binary (value, vote, cuts[i,"cut1.rate"], cuts[i,"cut2.rate"])$expected exd <-error.distance (value, vote, cuts[i,"cut1.mean"], cuts[i,"cut2.mean"])$expected eb <- error.binary (value, vote, cuts[i,"cut1.rate"], cuts[i,"cut2.rate"])$error ed <-error.distance (value, vote, cuts[i,"cut1.mean"], cuts[i,"cut2.mean"])$error summ <- rbind (summ, cbind (rep(i,sum(ok)), round, value, vote, exb, exd, eb, ed)) } dimnames (summ) <- list (NULL, c("person", "round", "value", "vote", "Expected.binary", "Expected.distance", "Error.binary", "Error.distance")) return (summ) } postscript ("aggregate.ps", height=3, horizontal=F) par (mfrow=c(1,3), oma=c(0,0,3,0)) for (i in 1:3){ data <- data.by.gamesize[[i]] J <- length(unique(data[,"person"])) label <- paste (gamesize[i], "-player games\n(data from ", J, " subjects)", sep="") empirical (data, label) } mtext ("Empirical votes, averaging over all persons in each experiment", outer=T) dev.off() postscript ("errors.logit.ps", height=2.5, horizontal=F) par (mfrow=c(1,4), oma=c(0,0,3,0)) for (i in 1:4){ data <- data.by.gamesize[[i]] label <- paste (gamesize[i], "-player games", sep="") if (i==4) label <- "Model fit to random votes" c1 <- data[,"cutoff.12"] c2 <- data[,"cutoff.23"] value <- data[,"value"] predicted <- ifelse (value<c1, 1, ifelse (value<=c2, 2, 3)) vote <- data[,"vote"] person <- data[,"person"] err.rates <- NULL for (p in unique(person)){ err.rates <- c(err.rates, mean((predicted!=vote)[person==p],na.rm=T)) } hist (err.rates, breaks=seq(0,1,.05), xlab="Error rate", ylab="", main=label) } mtext ("Histograms of individual persons' error rates\n(cutpoints estimated from logit model)", outer=T) dev.off() postscript ("errors.min.ps", height=2.5, horizontal=F) par (mfrow=c(1,4), oma=c(0,0,3,0)) for (i in 1:4){ label <- paste (gamesize[i], "-player games", sep="") if (i==4) label <- "Model fit to random votes" hist (cuts[[i]][,"err.rate"], breaks=seq(0,1,.05), xlab="Error rate", ylab="", main=label) } mtext ("Histograms of individual persons' error rates\n(cutpoints estimated to minimize each person's error rates)", outer=T) dev.off() postscript ("errors.min.univ.ps", height=4.5, width=6.5, horizontal=F) par (mfcol=c(2,3), oma=c(0,0,3,0)) for (i in 1:3){ label <- paste (gamesize[i], "-player games\n(Caltech students)", sep="") id <- as.numeric(substr(row.names(cuts[[i]]),1,1)) caltech <- school[id]==1 ucla <- school[id]==2 hist (cuts[[i]][caltech,"err.rate"], breaks=seq(0,1,.05), xlab="Error rate", ylab="", main=label) label <- paste (gamesize[i], "-player games\n(UCLA students)", sep="") hist (cuts[[i]][ucla,"err.rate"], breaks=seq(0,1,.05), xlab="Error rate", ylab="", main=label) } mtext ("Histograms of individual persons' error rates, by university\n(cutpoints estimated to minimize each person's error rates)", outer=T) dev.off() postscript ("errors.min.halves.ps", height=4.5, width=6.5, horizontal=F) par (mfcol=c(2,3), oma=c(0,0,3,0)) for (i in 1:3){ label <- paste (gamesize[i], "-player games\n(first 10 trials)", sep="") hist (cuts.part1[[i]][,"err.rate"], breaks=seq(0,1,.05), xlab="Error rate", ylab="", main=label) label <- paste (gamesize[i], "-player games\n(trials 11 and later)", sep="") hist (cuts.part2[[i]][,"err.rate"], breaks=seq(0,1,.05), xlab="Error rate", ylab="", main=label) } mtext ("Histograms of individual persons' error rates, early and late trials\n(cutpoints estimated to minimize each person's error rates)", outer=T) dev.off() postscript ("errors.dist.ps", height=2.5, horizontal=F) par (mfrow=c(1,4), oma=c(0,0,3,0)) for (i in 1:4){ label <- paste (gamesize[i], "-player games", sep="") if (i==4) label <- "Model fit to random votes" hist (cuts[[i]][,"err.mean"], breaks=seq(0,30,2), xlab="Avg error dist", ylab="", main=label) } mtext ("Histograms of individual persons' average error distances\n(cutpoints estimated to minimize each person's average error distance)", outer=T) dev.off() postscript ("errors.dist.univ.ps", height=4.5, width=6.5, horizontal=F) par (mfcol=c(2,3), oma=c(0,0,3,0)) for (i in 1:3){ label <- paste (gamesize[i], "-player games\n(Caltech students)", sep="") id <- as.numeric(substr(row.names(cuts[[i]]),1,1)) caltech <- school[id]==1 ucla <- school[id]==2 hist (cuts[[i]][caltech,"err.mean"], breaks=seq(0,30,2), xlab="Avg error distance", ylab="", main=label) label <- paste (gamesize[i], "-player games\n(UCLA students)", sep="") hist (cuts[[i]][ucla,"err.mean"], breaks=seq(0,30,2), xlab="Avg error distance", ylab="", main=label) } mtext ("Histograms of individual persons' average error discances, by university\n(cutpoints estimated to minimize each person's average error distance)", outer=T) dev.off() postscript ("errors.dist.halves.ps", height=4.5, width=6.5, horizontal=F) par (mfcol=c(2,3), oma=c(0,0,3,0)) for (i in 1:3){ label <- paste (gamesize[i], "-player games\n(first 10 trials)", sep="") hist (cuts.part1[[i]][,"err.mean"], breaks=seq(0,30,2), xlab="Avg error distance", ylab="", main=label) label <- paste (gamesize[i], "-player games\n(trials 11 and later)", sep="") hist (cuts.part2[[i]][,"err.mean"], breaks=seq(0,30,2), xlab="Avg error distance", ylab="", main=label) } mtext ("Histograms of individual persons' average error distances, early and late trials\n(cutpoints estimated to minimize each person's average error distance)", outer=T) dev.off() theory <- rbind (c(50,50), c(35,67), c(45,55)) postscript ("cutpoints.logit.ps", height=3, horizontal=F) par (mfrow=c(1,4), oma=c(0,0,3,0)) for (i in 1:4){ data <- data.by.gamesize[[i]] label <- paste (gamesize[i], "-player game", sep="") if (i==4) label <- "Model fit to random votes" round <- data[,"round"] c1 <- data[round==1,"cutoff.12"] c2 <- data[round==1,"cutoff.23"] sch <- data[round==1,"school"] par (pty="s") plot (c(0,100), c(0,100), xlab="1-2 cutpoint", ylab="2-3 cutpoint", main=label, type="n", xaxt="n", yaxt="n") axis (1, seq(0,100,25)) axis (2, seq(0,100,25)) abline (0,1,lty=2,lwd=.5) if (i==4) points (c1, c2, pch=20, cex=.5) else { points (c1[sch==1], c2[sch==1], col="black", pch=20, cex=.5) points (c1[sch==2], c2[sch==2], col="darkgray", pch=20, cex=.5) text (40,8,"Caltech students",adj=0,col="black", cex=.8) text (40,1,"UCLA students",adj=0,col="darkgray", cex=.8) points (theory[i,1], theory[i,2], pch=21, cex=1.5) } } mtext ("Individuals' cutpoints, estimated from logit model\n(circles show theoretical equilibrium values)", outer=T) dev.off() postscript ("cutpoints.min.ps", height=3, horizontal=F) par (mfrow=c(1,4), oma=c(0,0,3,0)) for (i in 1:4){ label <- paste (gamesize[i], "-player games", sep="") if (i==4) label <- "Model fit to random votes" c1 <- cuts[[i]][,"cut1.rate"] c2 <- cuts[[i]][,"cut2.rate"] data <- data.by.gamesize[[i]] round <- data[,"round"] sch <- data[round==1,"school"] par (pty="s") plot (c(0,100), c(0,100), xlab="1-2 cutpoint", ylab="2-3 cutpoint", main=label, type="n", xaxt="n", yaxt="n") axis (1, seq(0,100,25)) axis (2, seq(0,100,25)) abline (0,1,lty=2,lwd=.5) if (i==4) points (c1, c2, pch=20, cex=.5) else { points (c1[sch==1], c2[sch==1], col="black", pch=20, cex=.5) points (c1[sch==2], c2[sch==2], col="darkgray", pch=20, cex=.5) text (40,8,"Caltech students",adj=0,col="black", cex=.8) text (40,1,"UCLA students",adj=0,col="darkgray", cex=.8) points (theory[i,1], theory[i,2], pch=21, cex=1.5) } } mtext ("Individuals' cutpoints, estimated to minimize error rates\n(circles show theoretical equilibrium values)", outer=T) dev.off() postscript ("cutpoints.min.halves.ps", height=5.3, horizontal=F) par (mfrow=c(2,3), oma=c(0,0,3,0), pty="s") for (j in 1:2){ if (j==1) cat ("first 10 trials:\n") else if (j==2) cat ("trials 11 and later:\n") for (i in 1:3){ if (i==1) cat ("2-player games:\n") else if (i==2) cat ("3-player games:\n") else if (i==3) cat ("6-player games:\n") if (j==1){ c1 <- cuts.part1[[i]][,"cut1.rate"] c2 <- cuts.part1[[i]][,"cut2.rate"] label <- paste (gamesize[i], "-player games\n(first 10 trials)", sep="") } else{ c1 <- cuts.part2[[i]][,"cut1.rate"] c2 <- cuts.part2[[i]][,"cut2.rate"] label <- paste (gamesize[i], "-player games\n(trials 11 and later)", sep="") } data <- data.by.gamesize[[i]] round <- data[,"round"] sch <- data[round==1,"school"] plot (c(0,100), c(0,100), xlab="1-2 cutpoint", ylab="2-3 cutpoint", main=label, type="n", xaxt="n", yaxt="n") axis (1, seq(0,100,25)) axis (2, seq(0,100,25)) abline (0,1,lty=2,lwd=.5) cat ("caltech cutpoint 1:", c1[sch==1],"\n") cat ("ucla cutpoint 1:", c1[sch==2],"\n") cat ("caltech cutpoint 2:", c2[sch==1],"\n") cat ("ucla cutpoint 2:", c2[sch==2],"\n") points (c1[sch==1], c2[sch==1], col="black", pch=20, cex=.5) points (c1[sch==2], c2[sch==2], col="darkgray", pch=20, cex=.5) text (40,8,"Caltech students",adj=0,col="black", cex=.8) text (40,1,"UCLA students",adj=0,col="darkgray", cex=.8) points (theory[i,1], theory[i,2], pch=21, cex=1.5) } } mtext ("Individuals' cutpoints, estimated to minimize error rates,\nearly and late trials", outer=T) dev.off() postscript ("sampledata.ps", height=8, horizontal=F) par (mfrow=c(3,3), oma=c(0,0,3,0)) plotted <- c(101, 106, 409, 303, 405, 504, 705, 112) story <- c("Perfectly monotonic", "Approximately monotonic", "One aberrant observation", "One fuzzy and one sharp cutpoint", "Only 1's and 3's", "Almost only 3's", "No 3's", "Nearly random") for (i in 1:length(plotted)){ ok <- data.all[,"person"]==plotted[i] data <- data.all[ok,] c12 <- c (data[1,"cutoff.12"], data[1,"cutoff.23"]) s <- data[1,"sd.logit"] plot (data[,"value"], data[,"vote"], xlim=c(0,100), ylim=c(1,3), xlab="Value", ylab="Vote", main=story[i], yaxt="n") axis (2, 1:3) temp <- seq (0,100,.1) prob <- array (NA, c(length(temp),n.cut+1)) expected <- rep (NA, length(temp)) prob[,1] <- 1 - invlogit ((temp-c12[1])/s) expected <- 1*prob[,1] for (i.cut in 2:n.cut){ prob[,i.cut] <- invlogit ((temp-c12[i.cut-1])/s) - invlogit ((temp-c12[i.cut])/s) expected <- expected + i.cut*prob[,i.cut] } prob[,n.cut+1] <- invlogit ((temp-c12[n.cut])/s) expected <- expected + (n.cut+1)*prob[,n.cut+1] lines (temp, expected, lwd=.5) for (i.cut in 1:n.cut) lines (rep(c12[i.cut],2), i.cut+c(0,1), lwd=.5) } mtext ("Data from some example individuals (vertical lines show estimated cutpoints,\nand curves show expected responses from fitted robust logit models)", outer=T) dev.off() }
gg_session <- function(gg_pkg=NULL) { if (is.null(gg_pkg)) { gg_pkg <- names(which(sapply(tools::package_dependencies(names(utils::sessionInfo()[["otherPkgs"]])), function(x) any(grepl("ggplot2", x))))) gg_pkg <- c("ggplot2", gg_pkg[!grepl("ggedit", gg_pkg)]) } fn <- unlist(sapply(gg_pkg, function(x) { y <- ls(sprintf("package:%s", x)) y <- y[!grepl("[^[:alnum:][:space:]_]", y)] sprintf("%s::%s", x, y) }, USE.NAMES = FALSE)) x <- sapply(fn, function(x) { string <- capture.output(eval(parse(text = x))) y <- regmatches(string, gregexpr("geom = (.*?),|stat = (.*?),|position = (.*?),", string)) gsub("[,]", "", unlist(y)) }) x1 <- x[lapply(x, length) > 0] x1 <- x1[sapply(x1, function(x) any(grepl("stat", x)))] y <- lapply(x1, function(y) y[!y %in% c( "geom = geom", "stat = stat", "position = position", "position = title.position", "position = label.position", "position = NULL", "stat = list(NULL)", "position = list(NULL)" )]) out <- purrr::map_df(y[sapply(y, length) > 0], .f = function(x) { data.frame(do.call("rbind", strsplit(x, " = ")), stringsAsFactors = FALSE)}, .id = "fn")%>% tidyr::spread(!!rlang::sym('X1'),!!rlang::sym('X2'))%>% dplyr::filter(complete.cases(!!rlang::sym('.')))%>% mutate_all(as.character) out[, c("position", "stat", "geom")] <- sapply(c("position", "stat", "geom"), function(x) { y <- which(grepl('"', out[[x]])) z <- sapply(strsplit(gsub('"', "", out[[x]][y]), "_"), function(a) { paste0(gsub("(^|[[:space:]])([[:alpha:]])", "\\1\\U\\2", c(x, a), perl = TRUE), collapse = "") }) out[[x]][y] <- z out[[x]] }) out$pkg <- gsub(":(.*?)$", "", out$fn) out$fn <- gsub("^(.*?):", "", out$fn) out$stat[which(!grepl('^Stat',out$stat))] <- sprintf('Stat%s',out$stat[which(!grepl('^Stat',out$stat))]) out }
getGeneFromKO <- function(KOList){ return(getGeneFromKGene(getKGeneFromKO(KOList))) }
.onLoad <- function(lib, pkg) { .initialize() if (is.null(getOption("fileEditor"))) { if (interactive()) { if (is_win()) { pfdir <- Sys.getenv("ProgramFiles") if (pfdir == "") pfdir <- "c:\\program files" file_editor <- paste0(pfdir, "\\Notepad++\\notepad++.exe") if (!file.exists(file_editor)) file_editor <- "notepad" } else if (is_mac()) { if (length(suppressWarnings(system("which edit", intern = TRUE)))) { file_editor <- "bbedit" } else { file_editor <- "textedit" } } else { if (length(suppressWarnings(system("which gedit", intern = TRUE)))) { file_editor <- "gedit" } else if (length(suppressWarnings(system("which kate", intern = TRUE)))) { file_editor <- "kate" } else { file_editor <- Sys.getenv("EDITOR") if (nzchar(file_editor)) file_editor <- Sys.getenv("VISUAL") if (nzchar(file_editor)) file_editor <- "vi" } } options(fileEditor = file_editor) } else options(fileEditor = "") } } .initialize <- function(replace = TRUE) { if (is.null(getOption("svGUI.methods"))) options(svGUI.methods = c("AIC", "anova", "confint", "BIC", "formula", "head", "hist", "logLik", "plot", "predict", "residuals", "summary", "tail", "vcov")) } .gettext <- function(msg, domain = "R") ngettext(1, msg, "", domain = domain) .gettextf <- function(fmt, ..., domain = "R") sprintf(ngettext(1, fmt, "", domain = domain), ...)
module_ui_group_selector_table <- function(id) { ns <- shiny::NS(id) shiny::tagList( DT::DTOutput(ns("grouptable")) ) } module_server_group_selector_table <- function(input, output, session, df, df_label, ...) { ns <- session$ns group_table <- dplyr::summarise(df(), `Group` = as.character(unique(.data$.dcrindex)), `n obs.` = dplyr::n() ) %>% dplyr::relocate(.data$Group) if (identical(dim(dplyr::group_data(df())), as.integer(c(1, 1)))) { group_table <- data.frame( `Group` = df_label, `n obs.` = nrow(df()), stringsAsFactors = FALSE ) } output$grouptable <- DT::renderDT(group_table, rownames = FALSE, ... ) }
trajectoryab <- function(tstart, tend, ustart, uend, xstart, xend, step) { start <- as.numeric(tstart) tend <- as.numeric(tend) ustart <- as.numeric(ustart) uend <- as.numeric(uend) xstart <- as.numeric(xstart) xend <- as.numeric(xend) ab <- xabparam( tstart = tstart, tend = tend, ustart = ustart, uend = uend, xstart = xstart, xend = xend) a <- ab[1] b <- ab[2] tseq <- seq(tstart, tend, by = step) tlen <- length(tseq) xseq <- rep(NA, tlen) useq <- rep(NA, tlen) for(i in 1:tlen) useq[i] <- uab(u0 = ustart, a, b, tseq[i], t0 = tstart) for(i in 1:tlen) xseq[i] <- xab(x0 = xstart, u0 = ustart, a, b, tseq[i], t0 = tstart) lines(tseq, xseq, col = "black") return(list(ab, tseq, useq, xseq)) }
predictsmcure <- function(object, newX, newZ,model=c("ph","aft"), ...) { call <- match.call() if(!inherits(object, "smcure")) stop("Object must be results of smcure") if(is.vector(newZ)) newZ=as.matrix(newZ) newZ=cbind(1,newZ) if(is.vector(newX)) newX=as.matrix(newX) s0=as.matrix(object$s,ncol=1) n=nrow(s0) uncureprob=exp(object$b%*%t(newZ))/(1+exp(object$b%*%t(newZ))) scure=array(0,dim=c(n,nrow(newX))) t=array(0,dim=c(n,nrow(newX))) spop=array(0,dim=c(n,nrow(newX))) if(model=='ph') {ebetaX=exp(object$beta%*%t(newX)) for( i in 1:nrow(newZ)) {scure[,i]=s0^ebetaX[i]} for (i in 1:n){ for (j in 1:nrow(newX)){ spop[i,j]=uncureprob[j]*scure[i,j]+(1-uncureprob[j]) } } prd=cbind(spop,Time=object$Time) } if(model=='aft') { newX=cbind(1,newX) ebetaX=exp(object$beta%*%t(newX)) for( i in 1:nrow(newX)) {t[,i]=ebetaX[i]*exp(object$error)} for (i in 1:n){ for (j in 1:nrow(newX)){ spop[i,j]=uncureprob[j]*s0[i]+(1-uncureprob[j]) } } prd=cbind(spop=spop,Time=t) } structure(list(call=call,newuncureprob=uncureprob,prediction=prd),class="predictsmcure") }
translate.params <- function(x,params.sub){ params = colnames(x$samples[[1]]) params.simple.sub = unique(sapply(strsplit(params.sub, "\\["), "[", 1)) params.simple <- unique(sapply(strsplit(params, "\\["), "[", 1)) n = length(params.simple.sub) if(sum(params.simple.sub%in%params.simple)!=n){stop('One or more specified parameters are not in model output./n')} params.sub.1 <- sapply(strsplit(params.sub, "\\]"), "[", 1) params.2 <- sapply(strsplit(params.sub.1, "\\["), "[", 2) expand <- sapply(strsplit(params, "\\["), "[", 1) dim = get.dim(params) gen.samp.mat <- function(x){ out = x for(i in 1:length(x)){ if(!is.na(x[[i]][1])){ if(length(x[[i]])>1){ out[[i]] = array(params[expand==names(x)[i]],dim=x[[i]]) } if(length(x[[i]])==1){ out[[i]] = params[expand==names(x)[i]] } } else {out[[i]] = NA} } return(out) } mats = gen.samp.mat(dim) mats.sub = mats[params.simple.sub] index=1 params.new = character() for (i in 1:length(params.sub)){ if(!is.na(mats.sub[i])||!is.na(params.2[i])){ if(params.sub[i]==params.simple.sub[i]){ st = paste('mats.sub$',params.simple.sub[i],"[]",sep="") } else { st = paste('mats.sub$',params.simple.sub[i],"[",params.2[i],']',sep="") } ind = eval(parse(text=st)) params.new[index:(index+length(ind)-1)] = ind index = index+length(ind) } else { params.new[index]=params.sub[i] index=index+1 } } return(params.new) }
context("QuantilePG") test_that("quantilePG works as expected for various levels",{ source("load-ref.R") lev.ok.all <- c(0.25,0.5,0.75) lev.ok.1 <- c(0.25) lev.ok.2 <- c(0.5,0.75) lev.err.1 <- c("non numeric",0.5) lev.err.2 <- c(0.5,1.5) V.qr.ref.1 <- array(V.qr.ref[,,,1], dim=c(64,3,3,1)) V.fft.ref.1 <- array(V.fft.ref[,,,1], dim=c(64,3,3,1)) qPG.qr <- quantilePG(Y, levels.1=lev.ok.all, type="qr") V.qr <- getValues(qPG.qr) expect_that(dim(V.qr),equals(c(64,3,3,1))) expect_that(V.qr,equals(V.qr.ref.1)) qPG.fft <- quantilePG(Y, levels.1=lev.ok.all, type="clipped") V.fft <- getValues(qPG.fft) expect_that(dim(V.fft),equals(c(64,3,3,1))) expect_that(V.fft,equals(V.fft.ref.1)) V.qr.ref.1 <- array(V.qr.ref[,1,1,1], dim=c(64,1,1,1)) V.fft.ref.1 <- array(V.fft.ref[,1,1,1], dim=c(64,1,1,1)) qPG.qr <- quantilePG(Y, levels.1=lev.ok.1, type="qr") V.qr.1 <- getValues(qPG.qr) expect_that(dim(V.qr.1),equals(c(64,1,1,1))) expect_that(V.qr.1,equals(V.qr.ref.1)) qPG.fft <- quantilePG(Y, levels.1=lev.ok.1, type="clipped") V.fft.1 <- getValues(qPG.fft) expect_that(dim(V.fft.1),equals(c(64,1,1,1))) expect_that(V.fft.1,equals(V.fft.ref.1)) V.qr.ref.1 <- array(V.qr.ref[,1,2:3,1], dim=c(64,1,2,1)) V.fft.ref.1 <- array(V.fft.ref[,1,2:3,1], dim=c(64,1,2,1)) qPG.qr <- quantilePG(Y, levels.1=lev.ok.1, levels.2=lev.ok.2, type="qr") V.qr <- getValues(qPG.qr) expect_that(dim(V.qr),equals(c(64,1,2,1))) expect_that(V.qr,equals(V.qr.ref.1)) qPG.fft <- quantilePG(Y, levels.1=lev.ok.1, levels.2=lev.ok.2, type="clipped") V.fft <- getValues(qPG.fft) expect_that(dim(V.fft),equals(c(64,1,2,1))) expect_that(V.fft,equals(V.fft.ref.1)) expect_that(quantilePG(Y,levels.1=lev.err.1,type="qr"),throws_error()) expect_that(quantilePG(Y,levels.1=lev.err.2,type="clipped"),throws_error()) }) test_that("quantilePG works as expected for various frequencies",{ source("load-ref.R") lev.ok.all <- c(0.25,0.5,0.75) freq.init.all <- 2*pi*(0:63)/64 qPG.qr <- quantilePG(Y, levels.1=lev.ok.all, frequencies=freq.init.all, type="qr") qPG.fft <- quantilePG(Y, levels.1=lev.ok.all, frequencies=freq.init.all, type="clipped") V.qr.ref.1 <- array(V.qr.ref[,,,1], dim=c(64,3,3,1)) V.fft.ref.1 <- array(V.fft.ref[,,,1], dim=c(64,3,3,1)) freq.call.part <- 2*pi*((0:63)/64+1/256) expect_that(V.qr <- getValues(qPG.qr, frequencies=freq.call.part), gives_warning()) expect_that(dim(V.qr),equals(c(64,3,3,1))) expect_that(V.qr, equals(V.qr.ref.1)) expect_that(V.fft <- getValues(qPG.fft, frequencies=freq.call.part), gives_warning()) expect_that(dim(V.fft),equals(c(64,3,3,1))) expect_that(V.fft, equals(V.fft.ref.1)) freq.call.part <- 2*pi*(0:31)/32 V.qr <- getValues(qPG.qr, frequencies=freq.call.part) expect_that(dim(V.qr),equals(c(32,3,3,1))) expect_that(V.qr[,,,1], equals(V.qr.ref[1+2*(0:31),,,1])) V.fft <- getValues(qPG.fft, frequencies=freq.call.part) expect_that(dim(V.fft),equals(c(32,3,3,1))) expect_that(V.fft[,,,1], equals(V.fft.ref[1+2*(0:31),,,1])) freq.init.part <- 2*pi*(0:31)/32 qPG.qr <- quantilePG(Y, levels.1=lev.ok.all, frequencies=freq.init.part, type="qr") qPG.fft <- quantilePG(Y, levels.1=lev.ok.all, frequencies=freq.init.part, type="clipped") freq.call.all <- 2*pi*(0:63)/64 expect_that(V.qr <- getValues(qPG.qr, frequencies=freq.call.all), gives_warning()) expect_that(dim(V.qr),equals(c(64,3,3,1))) expect_that(V.qr[1+2*(0:31),,,1], equals(V.qr.ref[1+2*(0:31),,,1])) expect_that(V.fft <- getValues(qPG.fft, frequencies=freq.call.all), gives_warning()) expect_that(dim(V.fft),equals(c(64,3,3,1))) expect_that(V.fft[1+2*(0:31),,,1], equals(V.fft.ref[1+2*(0:31),,,1])) freq.init.beg <- 2*pi*(0:15)/64 freq.call.all <- 2*pi*(0:63)/64 qPG.qr <- quantilePG(Y, levels.1=lev.ok.all, frequencies=freq.init.beg, type="qr") expect_that(V.qr <- getValues(qPG.qr, frequencies = freq.call.all), gives_warning()) expect_that(dim(V.qr),equals(c(64,3,3,1))) expect_that(V.qr[1:33,,,1], equals(V.qr.ref[c(1:16,rep(16,17)),,,1])) expect_that(V.qr[34:64,,,1], equals(Conj(V.qr.ref[c(rep(16,16),16:2),,,1]))) qPG.fft <- quantilePG(Y, levels.1=lev.ok.all, frequencies=freq.init.beg, type="clipped") expect_that(V.fft <- getValues(qPG.fft, frequencies = freq.call.all), gives_warning()) expect_that(dim(V.fft),equals(c(64,3,3,1))) expect_that(V.fft[1:33,,,1], equals(V.fft.ref[c(1:16,rep(16,17)),,,1])) expect_that(V.fft[34:64,,,1], equals(Conj(V.fft.ref[c(rep(16,16),16:2),,,1]))) freq.init <- 2*pi*(0:63)/64 freq.call <- 2*pi*c(64,32,128)/64 qPG.qr <- quantilePG(Y, levels.1=lev.ok.all, frequencies=freq.init, type="qr") V.qr <- getValues(qPG.qr, frequencies = freq.call) expect_that(dim(V.qr),equals(c(3,3,3,1))) expect_that(V.qr[,,,1], equals(V.qr.ref[c(1,33,1),,,1])) qPG.fft <- quantilePG(Y, levels.1=lev.ok.all, frequencies=freq.init, type="clipped") V.fft <- getValues(qPG.fft, frequencies = freq.call) expect_that(dim(V.fft),equals(c(3,3,3,1))) expect_that(V.fft[,,,1], equals(V.fft.ref[c(1,33,1),,,1])) }) test_that("quantilePG works as expected with bootstrapping",{ source("load-ref.R") lev.ok.all <- c(0.25,0.5,0.75) set.seed(2581) qPG.qr <- quantilePG(Y, levels.1=lev.ok.all, type="qr", B=1, l=8, type.boot="mbb") V.qr <- getValues(qPG.qr) expect_that(dim(V.qr),equals(c(64,3,3,2))) expect_that(V.qr,equals(V.qr.ref)) qPG.fft <- quantilePG(Y, levels.1=lev.ok.all, type="clipped", B=1, l=8, type.boot="mbb") V.qr <- getValues(qPG.qr) expect_that(dim(V.qr),equals(c(64,3,3,2))) expect_that(V.qr,equals(V.qr.ref)) })
context("vis_expect") dat_test <- tibble::tribble( ~x, ~y, -1, "A", 0, "B", 1, "C", NA, NA ) vis_expect_plot <- vis_expect(dat_test, ~ .x == -1) vis_expect_plot_show_perc_true <- vis_expect(dat_test, ~ .x == -1, show_perc = FALSE) test_that("vis_expect creates the right plot",{ skip_on_cran() ver <- as.character(gdtools::version_freetype()) cat(sprintf("FreeType version: %s\n", ver)) vdiffr::expect_doppelganger("vis_expect vanilla", vis_expect_plot) vdiffr::expect_doppelganger("vis_expect show perc true", vis_expect_plot_show_perc_true) }) test_that("vis_expect fails when an object of the wrong class is provided", { testthat::expect_error(vis_expect(AirPassengers, ~.x < 20)) })
as.BibEntry <- function(x){ if (!length(x)) return(x) if (inherits(x, 'BibEntry')){ class(x) <- c('BibEntry', 'bibentry') }else if (inherits(x, 'bibentry')){ att <- attributes(x) x <- lapply(unclass(x), function(y){ attr(y, "dateobj") <- ProcessDates(y) if (!length(attr(y, "key"))) attr(y, "key") <- CreateBibKey(y[['title']], y[['author']], y[['year']]) check <- try(.BibEntryCheckBibEntry1(y), TRUE) if (inherits(check, 'try-error')){ message(gettextf('Ignoring entry titled %s because %s', dQuote(y[['title']]), strsplit(check, '\\n[[:space:]]*')[[1]][2])) return(NULL) } y }) x <- x[!vapply(x, is.null, FALSE)] if (length(x)){ attributes(x) <- att class(x) <- c('BibEntry', 'bibentry') } }else if (is.character(x)){ if (is.na(x['bibtype']) || is.na(x['key'])) stop("Object of class character must have entries named bibtype and key.") x <- as.list(x) attr(x, 'entry') <- x$bibtype attr(x, 'key') <- x$key x$bibtype <- NULL x$key <- NULL x <- MakeBibEntry(x, FALSE) }else if(is.data.frame(x)){ .fields <- colnames(x) if (is.null(x$bibtype)) stop("data.frame must have column for 'bibtype'.") keys <- rownames(x) if (keys[1L] == '1') warning('rownames of data.frame not meaningful for creating keys') y <- vector('list', length(x)) for (i in seq_len(nrow(x))){ na.ind <- which(!is.na(x[i, ])) y[[i]] <- as.BibEntry(c(setNames(as.character(x[i, na.ind]), .fields[na.ind]), key = keys[i])) } y <- MakeCitationList(y) return(y) }else if(is.list(x)){ if(length(x) == 1L && !is.null(attr(x, 'bibtype'))){ class(x) <- c('BibEntry', 'bibentry') }else if (!is.null(x$dateobj)){ x <- RelistBibEntry(x) }else if (!is.null(attr(x[[1L]], 'bibtype'))){ class(x) <- c('BibEntry', 'bibentry') }else{ if (length(x[[1L]]) == 1L){ x <- do.call(BibEntry, x) }else{ x <- lapply(x, function(...) do.call(BibEntry, as.list(...))) x <- do.call("c", x) } } }else{ classes <- paste(class(x), collapse = ", ") stop(gettextf("Cannot coerce object of class %s to BibEntry", sQuote(classes))) } x <- MakeKeysUnique(x) return(x) } is.BibEntry <- function(x){ inherits(x, "BibEntry") }
ladreg = function(y,X, intercept =1, alpha=0.05, print=1){ if(intercept == 0){ fit = ladfit(X, y, intercept = F) Xmat =X SEAm = sum(abs(y)) fator=0 } if(intercept == 1){ fit = ladfit(X, y, intercept = T) Xmat =cbind(1,X) SEAm = sum(abs(y-median(y))) fator=1 } r= cbind(residuals(fit)) coef1 = cbind(coef(fit)) aju = y-r n = nrow(X) - ncol(X) -fator m = (n+1)/2 -sqrt(n) res = cbind(sort(r)) res1 = matrix(0, nrow(res) -nrow(coef1),1) def = matrix(0, nrow(coef1),1) j=1 for(i in 1:nrow(r)) if(r[i] == 0) ((def[j] = i) & (j = j+1)) j=1 for(i in 1:nrow(res)) if(res[i] !=0) ((res1[j] = res[i]) & (j = j+1)) pos1 = n-m +1 pos2 = m e1 = round(pos1) e2 = round(pos2) if (e1 > n) (e1 = n) if(e2 == 0) (e2 = 1) tao = sqrt(n)*(res[e1] -res1[e2])/4 SEA = sum(abs(res)) XX = solve(t(Xmat)%*%Xmat) DX = diag(XX) IC = matrix(0, nrow(coef1),2) TH = matrix(0, nrow(coef1),2) aux = cbind(sqrt(DX)) DP = tao*aux IC[,1] = coef1 -qnorm(0.975)*tao*aux IC[,2] = coef1 +qnorm(0.975)*tao*aux E = matrix(0, nrow(coef1), 1) for(i in 1:ncol(Xmat)) E[i] = coef1[i]/(tao*aux[i]) E = abs(E) TH[,1] = E TH[,2] = 2*(1-pnorm(E,0,1)) EReg = matrix(0,1,2) reg = (SEAm -SEA)/(tao/2) EReg[1,1] = reg EReg[1,2] = 1-stats::pchisq(reg, ncol(X)) EAM = SEA/length(y) RSEA = SEAm -SEA R2 = RSEA/(RSEA +(nrow(X) -ncol(X) -fator)*tao/2) R3 = RSEA/(RSEA +(nrow(X) -ncol(X) -fator)*EAM/2) Raju = 1 -(1-R3)*(nrow(y)/(nrow(X) -ncol(X) -fator)) SEAP = 0 for(i in 1:nrow(X)) { X1 = matrix(0, nrow(X) -1, ncol(X)) y1 = matrix(0, nrow(X) -1, 1) k=1 for(j in 1:nrow(X)) if (j != i) (X1[k,] = X[j,]) & (y1[k] = y[j]) & (k=k+1) if(intercept == 0) (fit1 =ladfit(X1,y1, intercept = F)) if(intercept == 1) (fit1 =ladfit(X1,y1, intercept = T)) coefaux = cbind(coef(fit1)) yaju = Xmat[i,]%*%coefaux parc = abs(y[i] -yaju) SEAP = SEAP +parc } print(def) if(print == 1) { OAR = cbind(y, aju, r) print(OAR) } tao = round(tao, digits=4) coef1 = round(coef1, digits=4) print(coef1) DP = round(DP, digits=4) print(DP) IC = round(IC, digits =4) print(IC) TH = round(TH, digits =4) print(TH) EReg = round(EReg, digits =4) print(EReg) SEA = round(SEA, digits=4) EAM = round(EAM, digits =4) R2 = round(R2, digits=4) SEAP =round(SEAP, digits=4) FIT = list("OAR" = OAR, "tao" = tao, "coef" = coef1, "DP" = DP, "IC" = IC, "TH" = TH, "EReg" = EReg, "SEA" = SEA, "EAM" = EAM, "R2" = R2, "SEAP" = SEAP) return(FIT) }
library(RoughSets) data(RoughSetData) decision.table <- RoughSetData$hiring.dt control <- list(t.implicator = "lukasiewicz", type.relation = c("tolerance", "eq.1"), type.aggregation = c("t.tnorm", "lukasiewicz")) reduct.1 <- FS.quickreduct.FRST(decision.table, type.method = "fuzzy.dependency", type.QR = "fuzzy.QR", control = control) control <- list(t.implicator = "lukasiewicz", type.relation = c("tolerance", "eq.1"), type.aggregation = c("t.tnorm", "lukasiewicz")) reduct.2 <- FS.quickreduct.FRST(decision.table, type.method = "fuzzy.boundary.reg", type.QR = "fuzzy.QR", control = control) control <- list(alpha = 0.9, q.some = c(0.1, 0.6), q.most = c(0.2, 1), type.aggregation = c("t.tnorm", "lukasiewicz")) reduct.3 <- FS.quickreduct.FRST(decision.table, type.method = "vqrs", type.QR = "fuzzy.QR", control = control) control <- list(t.implicator = "lukasiewicz", type.relation = c("tolerance", "eq.1"), m.owa = 3, type.aggregation = c("t.tnorm","lukasiewicz")) reduct.4 <- FS.quickreduct.FRST(decision.table, type.method = "owa", type.QR = "fuzzy.QR", control = control) control <- list(t.implicator = "lukasiewicz", type.relation = c("tolerance", "eq.1"), type.rfrs = "k.trimmed.min", type.aggregation = c("t.tnorm", "lukasiewicz"), k.rfrs = 0) reduct.5 <- FS.quickreduct.FRST(decision.table, type.method = "rfrs", type.QR = "fuzzy.QR", control = control) control <- list(alpha = 1, t.implicator = "lukasiewicz", type.relation = c("tolerance", "eq.1"), type.aggregation = c("t.tnorm", "lukasiewicz")) reduct.6 <- FS.quickreduct.FRST(decision.table, type.method = "min.positive.reg", type.QR = "fuzzy.QR", control = control) control <- list(alpha.precision = 0.05, t.implicator = "lukasiewicz", type.aggregation = c("t.tnorm", "lukasiewicz"), type.relation = c("tolerance", "eq.1")) reduct.7 <- FS.quickreduct.FRST(decision.table, type.method = "fvprs", type.QR = "fuzzy.QR", control = control) control <- list(t.implicator = "lukasiewicz", type.relation = c("tolerance", "eq.1"), beta.quasi = 0.05, type.aggregation = c("t.tnorm", "lukasiewicz")) reduct.8 <- FS.quickreduct.FRST(decision.table, type.method = "beta.pfrs", type.QR = "fuzzy.QR", control = control) control <- list(alpha = 1) reduct.9 <- FS.quickreduct.FRST(decision.table, type.method = "fuzzy.discernibility", type.QR = "fuzzy.QR", control = control)
test_that("returns the message sent", { skip_if(!getOption("depigner.dev.test_telegram_bot")) skip_if(Sys.getenv("R_telegram_bot_name") != "cl_r_bot") start_bot_for_chat("Depigner test") expect_equal( send_to_telegram("test-send_to_telegram"), "test-send_to_telegram" ) options(depigner.bot = NULL) options(depigner.chat_id = NULL) }) test_that("returns the ggplot sent", { skip_if(!getOption("depigner.dev.test_telegram_bot")) skip_if(Sys.getenv("R_telegram_bot_name") != "cl_r_bot") start_bot_for_chat("Depigner test") gg <- ggplot2::qplot(data = mtcars, x = cyl, y = hp, main = "Test") expect_equal(send_to_telegram(gg), gg) options(depigner.bot = NULL) options(depigner.chat_id = NULL) })
vertex.pca.j <- function(data.sym) { data.sym.vertex <- vertex.interval.new.j(data.sym) data.sym.center <- centers.interval.j(data.sym) data.sym.center <- data.sym.center$centers data.sym.center <- scale(data.sym.center) mean.var <- attr(data.sym.center, "scaled:center") desv.var <- attr(data.sym.center, "scaled:scale") N <- data.sym$N M <- data.sym$M sym.data.vertex.matrix.cent <- data.sym.vertex$vertex for (i in 1:M) { sym.data.vertex.matrix.cent[, i] <- (sym.data.vertex.matrix.cent[, i] - mean.var[i]) / desv.var[i] } dim.vertex <- dim(data.sym.vertex$vertex)[1] tot.individuals <- N + dim.vertex data.sym.matrix <- rbind(data.sym.center, sym.data.vertex.matrix.cent) pca.centers <- FactoMineR::PCA(X = data.sym.matrix, scale.unit = FALSE, ind.sup = (N + 1):tot.individuals, ncp = M, graph = FALSE) data.sym.cent <- data.frame.to.RSDA.inteval.table.j(sym.scale.interval( data.sym, mean.var, desv.var )) res <- sym.interval.vertex.pca.j(data.sym.cent) class(res$Sym.Components) <- "sym.data.table" res$Sym.Components <- to.v3(res$Sym.Components) return(res) }
gov_layout <- function(..., inputID = "main", size="full"){ govLayout <- shiny::tags$div( id = inputID, class="govuk-width-container govuk-main-wrapper", shiny::tags$div( id = paste0(inputID,"_sub"), class=paste0("govuk-grid-column-",size), ...) ) attachDependency(govLayout) }
Life <- function( n.rows=40, n.cols=40, n.cycles=100, sleep.time=0.12, cols=c(" random=TRUE, rnd.threshold=0.3) { .shiftmatrix <- function(mx, dr, dc) { nr <- nrow(mx) nc <- ncol(mx) if (abs(dr) >= nr || abs(dc) >= nc) { mx <- matrix(0, nrow = nr, ncol = nc) ; return(mx) } if (dr > 0) { mx <- rbind(mat.or.vec(dr, nc), mx) mx <- mx[1:nr,] } else if (dr < 0) { mx <- rbind(mx, mat.or.vec(-dr, nc)) mx <- mx[(1 - dr):(nr - dr),] } if (dc > 0) { mx <- cbind(mat.or.vec(nr, dc), mx) mx <- mx[,1:nc] } else if (dc < 0) { mx <- cbind(mx, mat.or.vec(nr, -dc)) mx <- mx[,(1 - dc):(nc - dc)] } return(mx) } .lifecycle <- function(mx) { mx0 <- matrix(0, nrow = nrow(mx), ncol = ncol(mx)) for (n in (-1:1)) { for (m in (-1:1)) { if (n !=0 || m !=0) mx0 <- mx0 + .shiftmatrix(mx, n, m) } } mx[mx0 > 3 | mx0 < 2] <- 0 mx[mx0 == 3] <- 1 return(mx) } .board <- matrix(0, nrow=n.rows, ncol=n.cols) old.par <- par(mar=rep(0, 4)) if(random) { .board[runif(n.rows * n.cols, 0, 1) < rnd.threshold] <- 1 image(.board, col=cols) grid(n.cols, n.rows, lty=1) } else { image(.board, col=cols) grid(n.cols, n.rows, lty=1) points(0, 0, pch=15, cex=2, col="red") while(TRUE) { click <- locator(1) click$x <- floor(click$x * n.cols) + 1 click$y <- floor(click$y * n.rows) + 1 click$x[click$x > n.cols] <- n.cols click$y[click$y > n.rows] <- n.rows click$x[click$x < 1] <- 1 click$y[click$y < 1] <- 1 if(click$x == 1 & click$y == 1) break sel <- as.matrix(as.data.frame(click)) .board[sel] <- as.numeric(.board[sel] == 0) image(.board, col=cols) grid(n.cols, n.rows, lty=1) points(0, 0, pch=15, cex=2, col="red") } } for (i in (1:n.cycles)) { Sys.sleep(sleep.time) .board <- .lifecycle(.board) image(.board, col=cols) grid(n.cols, n.rows, lty=1) } par(old.par) }
simulate_allele_count_data <- function(allele.frequencies,sample.sizes,phi.parameter=NULL,invariant.loci.tolerance=10,allele.frequency.numerical.shift=1e-10){ populations <- nrow(allele.frequencies) loci <- ncol(allele.frequencies) simulated.allele.counts <- matrix(0,nrow=populations,ncol=loci) if(is.null(phi.parameter)){ invariant.loci <- apply(simulated.allele.counts,2,identify_invariant_loci) i <- 0 while(any(invariant.loci=="TRUE") && i < invariant.loci.tolerance){ simulated.allele.counts <- matrix( rbinom(n=populations*loci, size=sample.sizes, prob=allele.frequencies), nrow=populations,ncol=loci) invariant.loci <- apply(simulated.allele.counts,2,identify_invariant_loci) i <- i + 1 } } if(!is.null(phi.parameter)){ invariant.loci <- apply(simulated.allele.counts,2,identify_invariant_loci) i <- 0 while(any(invariant.loci=="TRUE") && i < invariant.loci.tolerance){ allele.frequencies[which(allele.frequencies == 0)] <- allele.frequency.numerical.shift allele.frequencies[which(allele.frequencies == 1)] <- 1-allele.frequency.numerical.shift simulated.allele.counts <- matrix( rbetabinom(n=populations*loci, size=sample.sizes, prob=allele.frequencies, shape1=phi.parameter*allele.frequencies, shape2=phi.parameter*(1-allele.frequencies)), nrow=populations,ncol=loci) invariant.loci <- apply(simulated.allele.counts,2,identify_invariant_loci) i <- i + 1 } } if(i == invariant.loci.tolerance){ warnings("your data matrix contains invariant loci") } return(simulated.allele.counts) }
lon_lat_fine <- function(lon, lat, nf) { nlon <- length(lon) nlat <- length(lat) lone <- c(2 * lon[1] - lon[2], lon, 2 * tail(lon, 1) - tail(lon, 2)[1]) late <- c(2 * lat[1] - lat[2], lat, 2 * tail(lat, 1) - tail(lat, 2)[1]) xc <- 0:(nlon + 1) yc <- 0:(nlat + 1) xf <- seq(0.5 + 1 / (2 * nf), nlon + 0.5 - 1 / (2 * nf), 1 / nf) yf <- seq(0.5 + 1 / (2 * nf), nlat + 0.5 - 1 / (2 * nf), 1 / nf) lonf <- approx(xc, lone, xf, method = "linear") latf <- approx(yc, late, yf, method = "linear") return(list(lon = lonf$y, lat = latf$y)) }
registerCores <- function(numberCores){ cluster <- makeCluster(numberCores) registerDoParallel(cluster) }
setClass(Class = "OutcomeNoFit", contains = c("TxObj")) setGeneric(name = ".newOutcomeFit", def = function(moMain, moCont, txObj, iter, ...) { standardGeneric(".newOutcomeFit") }) setGeneric(name = ".predictAll", def = function(object, newdata, ...) { standardGeneric(".predictAll") }) setGeneric(name = ".predictMu", def = function(object, data, ...) { standardGeneric(".predictMu") }) setGeneric(name = "outcome", def = function(object, ...) { standardGeneric("outcome") }) NULL setMethod(f = ".newOutcomeFit", signature = c(moMain = "NULL", moCont = "NULL", txObj = "TxObj", iter = "NULL"), definition = function(moMain, moCont, txObj, iter, data, response, suppress) { if (!suppress ) cat("No outcome regression performed.\n") return( new(Class = "OutcomeNoFit", txObj) ) }) setMethod(f = ".predictAll", signature = c(object = "OutcomeNoFit", newdata = "data.frame"), definition = function(object, newdata) { superset <- .getSuperset(object = object@txInfo) prediction <- matrix(data = 0.0, nrow = nrow(x = newdata), ncol = length(x = superset), dimnames = list(NULL, superset)) optimalTx <- rep(x = NA, times = nrow(x = newdata)) return( list("optimalTx" = optimalTx, "decisionFunc" = prediction) ) }) setMethod(f = ".predictMu", signature = c(object = "OutcomeNoFit", data = "data.frame"), definition = function(object, data, ...) { return( .predictAll(object = object, newdata = data)$decisionFunc ) }) setMethod(f = "outcome", signature = c(object = "OutcomeNoFit"), definition = function(object, ...) { return( NA ) }) setMethod(f = "coef", signature = c(object = "OutcomeNoFit"), definition = function(object, ...) { return( NA ) }) setMethod(f = "fitObject", signature = c(object = "OutcomeNoFit"), definition = function(object, ...) { return( NA ) }) setMethod(f = "plot", signature = c(x = "OutcomeNoFit"), definition = function(x, suppress=FALSE, ...) { return( NULL ) }) setMethod(f = "predict", signature = c(object = "OutcomeNoFit"), definition = function(object, ...) { return( NULL ) }) setMethod(f = "print", signature = c(x = "OutcomeNoFit"), definition = function(x, ...) { print(x = NA) }) setMethod(f = "show", signature = c(object = "OutcomeNoFit"), definition = function(object) { show(object = NA) }) setMethod(f = "summary", signature = c(object = "OutcomeNoFit"), definition = function(object, ...) { return( NA ) })
if(getOption_IsoriX("example_maxtime") > 30) { GNIPDataDEagg <- prepsources(data = GNIPDataDE) GermanFit <- isofit(data = GNIPDataDEagg, mean_model_fix = list(elev = TRUE, lat_abs = TRUE)) GermanScape <- isoscape(raster = ElevRasterDE, isofit = GermanFit) GermanScape plot(GermanScape) PlotMean <- plot(x = GermanScape, which = "mean", plot = FALSE) PlotMeanPredVar <- plot(x = GermanScape, which = "mean_predVar", plot = FALSE) PlotMeanResidVar <- plot(x = GermanScape, which = "mean_residVar", plot = FALSE) PlotMeanRespVar <- plot(x = GermanScape, which = "mean_respVar", plot = FALSE) print(PlotMean, split = c(1, 1, 2, 2), more = TRUE) print(PlotMeanPredVar, split = c(2, 1, 2, 2), more = TRUE) print(PlotMeanResidVar, split = c(1, 2, 2, 2), more = TRUE) print(PlotMeanRespVar, split = c(2, 2, 2, 2), more = FALSE) }
compute_gradient <- function(theta, fun, step = 1e-6, ...){ temp <- sapply(1:length(theta), FUN = compute_gradient_coordinate, theta = theta, fun = function(x){fun(x,...)}, step = step) names(temp) <- names(theta) temp }
loglik_penalty <- function(tau, y, type = c("h", "hh", "s"), is.non.negative = FALSE) { stopifnot(is.numeric(y), is.logical(is.non.negative)) type <- match.arg(type) yy <- y tau <- complete_tau(tau) zz <- normalize_by_tau(yy, tau) switch(type, h = { if (tau["delta"] == 0) { penalty <- 0 } else { uu <- W_delta(zz, delta = tau["delta"]) penalty <- sum(-tau["delta"]/2 * uu^2 - log(1 + tau["delta"] * uu^2)) } }, hh = { if (all(tau[grepl("delta", names(tau))] == 0)) { penalty <- 0 } else { uu <- W_2delta(zz, delta = tau[c("delta_l", "delta_r")]) ind <- (uu < 0) penalty <- sum(-tau["delta_l"]/2 * uu[ind]^2) + sum(-tau["delta_r"]/2 * uu[!ind]^2) - sum(log(1 + tau["delta_l"] * uu[ind]^2)) - sum(log(1 + tau["delta_r"] * uu[!ind]^2)) } }, s = { if (tau["gamma"] == 0) { penalty <- 0 } else { if (is.non.negative) { penalty <- sum(log_deriv_W(tau["gamma"] * zz, branch = 0)) } else { penalty <- NA } } }) return(penalty) }
word_coverage <- function(object, corpus, ...) UseMethod("word_coverage") word_coverage.sbo_dictionary <- function(object, corpus, ...) { .preprocess <- attr(object, ".preprocess") EOS <- attr(object, "EOS") wfreqs <- kgram_freqs(corpus, 1, object, .preprocess, EOS)[[1]] wfreqs <- arrange(wfreqs, .data$w1)[["n"]] names(wfreqs) <- c(as.character(object), "<EOS>", "<UNK>") wfreqs <- c("<EOS>" = wfreqs[["<EOS>"]], wfreqs[-(length(wfreqs) - 1)]) wfreqs <- cumsum(wfreqs / sum(wfreqs)) wfreqs <- head(wfreqs, -1) return(new_word_coverage(wfreqs)) } word_coverage.character <- function(object, corpus, .preprocess = identity, EOS = "", ...) { dict <- as_sbo_dictionary(object, .preprocess = .preprocess, EOS = EOS) return(word_coverage(dict, corpus = corpus)) } word_coverage_sbo_generic <- function(object, corpus, ...) { dict <- attr(object, "dict") .preprocess <- attr(object, ".preprocess") EOS <- attr(object, "EOS") dict <- as_sbo_dictionary(dict, .preprocess = .preprocess, EOS = EOS) return(word_coverage(dict, corpus = corpus)) } word_coverage.sbo_kgram_freqs <- function(object, corpus, ...) return(word_coverage_sbo_generic(object, corpus = corpus)) word_coverage.sbo_predictions <- function(object, corpus, ...) return(word_coverage_sbo_generic(object, corpus = corpus))
'.ursaCacheDir' <- function() { fpath <- getOption("ursaCacheDir") if (!dir.exists(fpath)) dir.create(fpath) fpath } '.ursaCacheFile' <- function(pattern="ursaCache") { .normalizePath(tempfile(tmpdir=.ursaCacheDir(),pattern=pattern)) } '.ursaCacheInventory' <- function() file.path(.ursaCacheDir(),"ursaCache_inventory.txt") '.ursaCacheVisits' <- function() file.path(.ursaCacheDir(),"ursaCache_visits.txt") '.ursaCacheDirClear' <- function(size=getOption("ursaCacheSize") ,age=getOption("ursaCacheAge") ,count=10000,completely=FALSE) { if (!is.numeric(size)) size <- 16 if (!is.numeric(age)) age <- 7 fpath <- .ursaCacheDir() if (!file.exists(fpath)) return(invisible(NULL)) if (completely) { if (!dir.exists(fpath)) return(invisible(NULL)) file.remove(.dir(path=fpath,pattern=as.list(args(.ursaCacheFile))$pattern ,full.names=TRUE)) if (develHtmlWidgets <- TRUE) { file.remove(.dir(path=fpath,pattern="^htmlwidgets.+\\.html$",full.names=TRUE)) dhw <- file.path(fpath,"htmlwidgets") if (dir.exists(dhw)) unlink(dhw) } unlink(fpath) return(invisible(NULL)) } inventory <- .ursaCacheInventory() if (!file.exists(inventory)) { return(.ursaCacheDirClear(completely=TRUE)) } was <- try(utils::read.table(inventory,sep=",",encoding="UTF-8")) if (inherits(was,"try-error")) { message("cache was removed completely due to damaged structure") return(.ursaCacheDirClear(completely=TRUE)) } colnames(was) <- c("time","stamp","visits","size","src","dst") was <- was[rev(seq(nrow(was))),] was0 <- was was$src <- NULL was$time <- as.POSIXct(was$time,format="%Y-%m-%dT%H:%M:%SZ",tz="UTC") t0 <- as.POSIXct(as.numeric(Sys.time()),origin="1970-01-01",tz="UTC") was$p1 <- unclass(difftime(t0,was$time,units="days")) was$p2 <- cumsum(was$size/1024) was$p3 <- row(was[,1,drop=FALSE]) ind <- which(was$p1>age | was$p2>size*1024*1024 | was$p3>count) if (!length(ind)) return(invisible(NULL)) if (length(ind)==nrow(was)) { message("cache was removed completely") return(.ursaCacheDirClear(completely=TRUE)) } dst <- file.path(fpath,was0$dst[ind]) dst <- dst[file.exists(dst)] if (FALSE) { print(was) print(c(size=size,age=age,count=count)) print(c(toRemove=dst)) q() } file.remove(dst) dst <- paste0(dst,".hdr") dst <- dst[file.exists(dst)] file.remove(dst) was0 <- was0[-ind,] was0 <- was0[rev(seq(nrow(was0))),] .ursaCacheWrite(was0,append=FALSE) return(invisible(NULL)) } '.ursaCacheExpired' <- function(value) { if (is.character(value)) { if (!.lgrep("\\-",value)) value <- paste0("-",value) if (!.lgrep("\\d\\s\\D",value)) value <- gsub("(^.*\\d)(\\D.+$)","\\1 \\2",value) expired <- as.integer(tail(seq(Sys.time(),len=2,by=value),1)) attr(expired,"cache") <- TRUE } else { expired <- as.integer(tail(seq(Sys.time(),len=2,by="-1 month"),1)) attr(expired,"cache") <- value } expired } '.ursaCacheDownload' <- function(src,dst,method,quiet=FALSE,cache=TRUE ,mode="w",headers=NULL) { enc <- "UTF-8" inventory <- .ursaCacheInventory() src0 <- src if (.lgrep("\\{..+}",src)) { dom <- unlist(strsplit(.gsub2("\\{(.+)\\}","\\1",gsub("\\{.\\}","",src)),"")) src <- unname(sapply(sample(dom),function(x) .gsub("{.+}",x,src0))) } if (missing(dst)) dst <- NULL expired <- .ursaCacheExpired(cache) cache <- attr(expired,"cache") if (cache) { if (file.exists(inventory)) { was <- utils::read.table(inventory,sep=",",encoding=enc) colnames(was) <- c("time","stamp","visits","size","src","dst") if (is.character(dst)) { stop("dst") } ind <- tail(which(!is.na(match(was$src,src0))),1) if ((length(ind))&&(!is.na(ind))) { if (was$stamp[ind]>=expired) { dst <- file.path(.ursaCacheDir(),was$dst[ind]) } } } } if ((is.null(dst))||(!file.exists(dst))) { if (!length(src)) return(NULL) if (is.null(dst)) dst <- if (cache) .ursaCacheFile() else tempfile() for (i in seq_along(src)) { ret <- try(download.file(url=URLencode(iconv(src[i],to="UTF-8")) ,destfile=dst,method=method,quiet=quiet,mode=mode ,headers=headers)) if (!inherits(ret,"try-error")) break } if (inherits(ret,"try-error")) return(ret) if (cache) .ursaCacheWrite(.ursaCacheRecord(dst,src=src0),append=TRUE) } else if (cache) { Fout <- file(.ursaCacheVisits(),"at") writeLines(basename(dst),Fout) close(Fout) } dst } '.ursaCacheRaster' <- function(src,unpack=c("none","gzip","bzip2"),reset=FALSE) { enc <- "UTF-8" unpack <- match.arg(unpack) finfo <- file.info(src) ftime <- as.integer(finfo$mtime) fsize <- finfo$size dst <- NULL inventory <- .ursaCacheInventory() ind <- NA if (file.exists(inventory)) { was <- utils::read.table(inventory,sep=",",encoding=enc) colnames(was) <- c("time","stamp","visits","size","src","dst") if (is.character(dst)) { stop("dst") } if (FALSE) { wasP <- was wasP$src <- substr(wasP$src,1,12) print(wasP) } if (FALSE) { ind1 <- match(.normalizePath(src),was$src) ind2 <- match(ftime,was$stamp) ind3 <- ind2 if (!anyNA(c(ind1,ind2,ind3))&&(ind1==ind2)&&(ind2==ind3)) { dst <- file.path(.ursaCacheDir(),was$dst[ind1[1]]) ind <- ind1 } } else { ind1 <- which(!is.na(match(was$src,.normalizePath(src)))) ind2 <- which(!is.na(match(was$stamp,ftime))) ta <- table(c(ind1,ind2)) ta <- ta[ta==2] if (length(ta)) { ind <- as.integer(names(ta)) dst <- file.path(.ursaCacheDir(),was$dst[ind[1]]) } } } if (reset) { if (!is.null(dst)) { was <- was[-ind,] file.remove(dst) dst <- NULL } else reset <- FALSE } if (unpack!="none") { if ((!is.null(dst))&&(!envi_exists(dst))) dst <- NULL if (is.null(dst)) { dst <- .ursaCacheFile() if (unpack %in% c("gzip","bzip2")) { if (unpack=="gzip") { system2("gzip",c("-f -d -c",.dQuote(src)),stdout=dst,stderr=FALSE) } else if (unpack=="bzip2") system2("bzip2",c("-f -d -c",.dQuote(src)),stdout=dst,stderr=FALSE) if (debugExact <- F) { str(src) str(dst) cat("------------\n") str(envi_list(src,exact=TRUE)) cat("------------\n") q() } if (length(listE <- envi_list(src,exact=TRUE))) file.copy(paste0(listE,".hdr"),paste0(dst,".hdr"),copy.date=TRUE) } da <- .ursaCacheRecord(dst,src=.normalizePath(src),ftime=ftime) if (reset) da <- rbind(was,da) .ursaCacheWrite(da,append=!reset) } else { Fout <- file(.ursaCacheVisits(),"at") writeLines(basename(dst),Fout) close(Fout) } } dst } '.ursaCacheRecord' <- function(dst,src=NULL,ftime=NULL) { if (.lgrep("^file:///",dst)) dst <- .gsub("^file:///","",dst) if (is.null(src)) src <- basename(dst) if (is.null(ftime)) ftime <- file.mtime(dst) da <- data.frame(time=format(Sys.time(),"%Y-%m-%dT%H:%M:%SZ",tz="UTC") ,stamp=as.integer(ftime) ,visits=0L ,size=file.size(dst) ,src=src ,dst=basename(dst) ) da } '.ursaCacheWrite' <- function(da,append=TRUE) { inventory <- .ursaCacheInventory() utils::write.table(da,quote=TRUE,col.names=FALSE,row.name=FALSE,sep="," ,file=inventory,append=append,fileEncoding="UTF-8") } '.ursaCacheRead' <- function(fname) { inventory <- .ursaCacheInventory() if (!file.exists(inventory)) return(NULL) was <- utils::read.table(inventory,sep=",",encoding="UTF-8") stopifnot(ncol(was)==6) colnames(was) <- c("time","stamp","visits","size","src","dst") was } '.ursaCacheFind' <- function(loc) { was <- .ursaCacheRead() if (is.null(was)) return(0L) ind <- match(loc,was$src) if (is.na(ind)) { ind <- match(basename(loc),was$dst) } if (is.na(ind)) return(0L) ind } '.atOnceCacheRebuildAndForget' <- function() { a <- utils::read.table("_inventory.txt",sep=",",dec=".") a <- data.frame(a[,1:2],B=0,a[,3:5]) str(a) utils::write.table(a,"_inventory.new",sep=",",dec=".",col.names=FALSE,row.names=FALSE) }
"countdata"
expected <- c(0, 0) test(id=12, code={ argv <- structure(list(x = c(0, 0)), .Names = "x") do.call('sample', argv); }, o = expected);
context("A basic workflow gets correct results.") suppressPackageStartupMessages(library(survey)) data(api) source("utilities.R") dstrata_srvyr <- apistrat %>% as_survey(strata = stype, weights = pw) srvyr_results <- dstrata_srvyr %>% summarise(api99_mn = survey_mean(api99, vartype = c("se", "var", "ci")), api99_tot = survey_total(api99, vartype = c("se", "var", "ci"))) dstrata_survey <- svydesign(ids = ~1, strata = ~stype, weights = ~pw, data = apistrat) survey_mn <- svymean(~api99, dstrata_survey) survey_tot <- svytotal(~api99, dstrata_survey) test_that("srvyr and survey get same mean (overall)", expect_equal(survey_mn[[1]], srvyr_results[[1]][[1]])) test_that("srvyr and survey get same mean var (overall)", expect_equal(attr(survey_mn, "var")[[1]], srvyr_results[[3]][[1]])) test_that("srvyr and survey get same mean CIs (overall)", expect_equal(confint(survey_mn, df = degf(dstrata_survey))[1:2], c(srvyr_results[[4]][[1]], srvyr_results[[5]][[1]]))) test_that("srvyr and survey get same total (overall)", expect_equal(survey_tot[[1]], srvyr_results[[6]][[1]])) test_that("srvyr and survey get same total var (overall)", expect_equal(attr(survey_tot, "var")[[1]], srvyr_results[[8]][[1]])) test_that("srvyr and survey get same total CIs (overall)", expect_equal(confint(survey_tot, df = degf(dstrata_survey))[1:2], c(srvyr_results[[9]][[1]], srvyr_results[[10]][[1]]))) srvyr_grouped_results <- dstrata_srvyr %>% group_by(stype) %>% summarise(api99_mn = survey_mean(api99, vartype = c("se", "var", "ci")), api99_tot = survey_total(api99, vartype = c("se", "var", "ci"))) survey_grouped_results_mn <- svyby(~api99, ~stype, dstrata_survey, svymean, vartype = c("se", "var", "ci")) survey_grouped_results_mn[, c("ci_l", "ci_u")] <- confint(survey_grouped_results_mn, df = degf(dstrata_survey)) survey_grouped_results_tot <- svyby(~api99, ~stype, dstrata_survey, svytotal, vartype = c("se", "var", "ci")) survey_grouped_results_tot[, c("ci_l", "ci_u")] <- confint(survey_grouped_results_tot, df = degf(dstrata_survey)) test_that("srvyr and survey get same mean (grouped)", expect_equal(survey_grouped_results_mn$api99, srvyr_grouped_results$api99_mn)) test_that("srvyr and survey get same mean var (grouped)", expect_equal(survey_grouped_results_mn$var, srvyr_grouped_results$api99_mn_var)) test_that("srvyr and survey get same mean lower CIs (grouped)", expect_equal(survey_grouped_results_mn$ci_l, srvyr_grouped_results$api99_mn_low)) test_that("srvyr and survey get same mean upper CIs (grouped)", expect_equal(survey_grouped_results_mn$ci_u, srvyr_grouped_results$api99_mn_upp)) test_that("srvyr and survey get same total (grouped)", expect_equal(survey_grouped_results_tot$api99, srvyr_grouped_results$api99_tot)) test_that("srvyr and survey get same total var (grouped)", expect_equal(survey_grouped_results_tot$var, srvyr_grouped_results$api99_tot_var)) test_that("srvyr and survey get same total lower CIs (grouped)", expect_equal(survey_grouped_results_tot$ci_l, srvyr_grouped_results$api99_tot_low)) test_that("srvyr and survey get same total upper CIs (grouped)", expect_equal(survey_grouped_results_tot$ci_u, srvyr_grouped_results$api99_tot_upp))
setClass( Class = "PhenotypicModel", representation = representation( name= "character", period = "numeric", aAGamma = "function", numbersCopy = "numeric", numbersPaste = "numeric", initialCondition = "function", paramsNames = "character", constraints = "function", params0 = "numeric", tipLabels = "character", tipLabelsSimu = "character", comment = "character" ), prototype=prototype( name = "BMtest", period = c(0,1,2,3,4,5,6), aAGamma = function(i, params){ functiona <- function(t){ return(rep(0,i+1)) } matrixA <- diag(0, i+1) functionGamma <- function(t){ return(diag(params[1], i+1)) } return(list(a=functiona, A=matrixA, Gamma=functionGamma)) }, numbersCopy = c(1, 1, 2, 1, 2, 5), numbersPaste = c(2, 3, 4, 5, 6, 7), initialCondition = function(params){ return(list(mean=c(0,0), var=c(0,0))) }, paramsNames = c("sigma"), constraints = function(params){ return(params[1] > 0) }, params0 = c(1), tipLabels = c("A", "B", "C", "D", "E", "F", "G"), tipLabelsSimu = c("A", "B", "C", "D", "E", "F", "G"), comment = "Toy model defined by defaut" ), validity=function(object){ if( length(object@numbersCopy) != length(object@numbersPaste) ){ stop("[PhenotypicModel : validation] \n The sequence of positions of branching lineages \n and the sequence of new positions for the traits in the newly born lineages\n should have the same length.") } if( length(object@numbersCopy) != length(object@period) ){ stop("[PhenotypicModel : validation] \n The sequence of positions of branching lineages \n and the sequence of time periods \n should have the same length.") } if( length(object@params0) != length(object@paramsNames) ){ stop("[PhenotypicModel : validation] \n There should be the same number of defaut parameters \n and parameter names.") } return(TRUE) } ) setClass( Class = "PhenotypicACDC", representation = representation( matrixCoalescenceTimes="matrix" ), contains="PhenotypicModel" ) setClass( Class = "PhenotypicADiag", representation = representation(), contains="PhenotypicModel" ) setClass( Class = "PhenotypicBM", representation = representation( matrixCoalescenceTimes="matrix" ), contains="PhenotypicModel" ) setClass( Class = "PhenotypicDD", representation = representation( matrixCoalescenceJ="matrix", nLivingLineages="numeric" ), contains="PhenotypicModel" ) setClass( Class = "PhenotypicGMM", representation = representation( n1="numeric", n2="numeric" ), contains="PhenotypicModel" ) setClass( Class = "PhenotypicOU", representation = representation( matrixCoalescenceTimes="matrix" ), contains="PhenotypicModel" ) setClass( Class = "PhenotypicPM", representation = representation(), contains="PhenotypicModel" ) setMethod( f="[", signature="PhenotypicModel", definition=function(x,i,j,drop){ switch( EXPR=i, "name"={return(x@name)}, "period"={return(x@period)}, "aAGamma"={return(x@aAGamma)}, "numbersCopy"={return(x@numbersCopy)}, "numbersPaste"={return(x@numbersPaste)}, "initialCondition"={return(x@initialCondition)}, "paramsNames"={return(x@paramsNames)}, "constraints"={return(x@constraints)}, "params0"={return(x@params0)}, "tipLabels"={return(x@tipLabels)}, "tipLabelsSimu"={return(x@tipLabelsSimu)}, "comment"={return(x@comment)}, stop("This variable name does not exist !") ) } ) setReplaceMethod( f="[", signature="PhenotypicModel", definition=function(x,i,j,value){ switch( EXPR=i, "name"={x@name <- value}, "period"={x@period <- value}, "aAGamma"={x@aAGamma <- value}, "numbersCopy"={x@numbersCopy <- value}, "numbersPaste"={x@numbersPaste <- value}, "initialCondition"={x@initialCondition <- value}, "paramsNames"={x@paramsNames <- value}, "constraints"={x@constraints <- value}, "params0"={x@params0 <- value}, "tipLabels"={x@tipLabels <- value}, "tipLabelsSimu"={x@tipLabelsSimu <- value}, "comment"={x@comment <- value}, stop("This variable name does not exist !") ) validObject(x) return(x) } ) setMethod( f="print", signature="PhenotypicModel", definition=function(x, ...){ cat("****************************************************************\n") cat("*** Object of Class PhenotypicModel *** \n") cat("*** Name of the model : ") print(x@name) cat("*** Parameters of the model : ") print(x@paramsNames) cat("*** Description : ") cat(x@comment) cat(paste("\n*** Epochs : the model is cut into ", length(x@period), " parts. \n")) print(x@period) cat("*** Lineages branching (to be copied at the end of the corresponding period) :\n") print(x@numbersCopy) cat("*** Positions of the new trait at the end of each period :\n") print(x@numbersPaste) cat("*** Initial condition :\n") print(x@initialCondition) cat("*** Vectors a_i, A_i, Gamma_i on each period i : \n") print(x@aAGamma) cat("*** Constraints on the parameters : \n") print(x@constraints) cat("*** Defaut parameter values : ") print(x@params0) cat("*** Tip labels : \n") print(x@tipLabels) cat("*** Tip labels for simulations : \n") print(x@tipLabelsSimu) cat("****************************************************************\n") } ) setMethod( f="show", signature="PhenotypicModel", definition=function(object){ cat("****************************************************************\n") cat("*** Object of Class PhenotypicModel *** \n") cat("*** Name of the model : ") print(object@name) cat("*** Parameters of the model : ") print(object@paramsNames) cat("*** Description : ") cat(object@comment) cat(paste("\n*** Periods : the model is cut into ", length(object@period), " parts. \n")) cat("For more details on the model, call : print(PhenotypicModel)\n") cat("****************************************************************\n") } )
set.seed(123) x <- stats::rnorm(100) lgl <- x > 0 cp <- crisp(x, lgl) test_that("ens() works", { expect_type(ens(cp), "double") expect_length(ens(cp), 1L) expect_lte(ens(cp), length(x)) expect_gte(ens(cp), 1L) })
population2sample.test <- function(popEst1, popEst2, alpha = 0.05, c0 = 0.1, MBT = 3000){ if (class(popEst1) != 'popEst' | class(popEst2) != 'popEst') stop("The arguments popEst1 and popEst2 require 'popEst' class inputs!\n") EstAll1 = popEst1$coef EstAll2 = popEst2$coef n = nrow(popEst1[['ind.est']][[1]][['asym.ex']]) p = nrow(EstAll1) MC1 = length(popEst1[['ind.est']]) MC2 = length(popEst2[['ind.est']]) Mp = p * (p - 1) / 2 EstVec1 = matrix(0, MC1, Mp) EstVec2 = matrix(0, MC2, Mp) for (i in 1 : MC1){ Est = popEst1[['ind.est']][[i]][['coef']] EstVec1[i,] = Est[lower.tri(Est)] } for (i in 1 : MC2){ Est = popEst2[['ind.est']][[i]][['coef']] EstVec2[i,] = Est[lower.tri(Est)] } EstVecCenter1 = scale(EstVec1, scale = FALSE) EstVecCenter2 = scale(EstVec2, scale = FALSE) TestAllstandard1 = EstAll1[lower.tri(EstAll1)] TestAllstandard2 = EstAll2[lower.tri(EstAll2)] EstAll = EstAll1 - EstAll2 BTAllsim = matrix(0, Mp, MBT) for (i in 1 : MBT){ temp1 = rnorm(MC1) temp2 = rnorm(MC2) BTAllsim[, i] = (MC1)^(-0.5) * colSums(temp1 * EstVecCenter1) - (MC1)^(0.5) * colMeans(temp2 * EstVecCenter2) } SignalID=c() TestPro = TestProstandard = TestAllstandard1 - TestAllstandard2 BTPro = BTAllsim repeat{ PCtemp = TestPro TPStemp = round(TestProstandard, 5) PCmaxIndex0 = order(-abs(TPStemp))[1] PCmaxIndex = which(TPStemp %in% c(TPStemp[PCmaxIndex0], -TPStemp[PCmaxIndex0])) SignalIDtemp = c() for (q in 1 : length(PCmaxIndex)){ SignalIDtemp1 = which(abs(EstAll - PCtemp[PCmaxIndex[q]]) == min(abs(EstAll - PCtemp[PCmaxIndex[q]])), arr.ind = TRUE) SignalIDtemp = rbind(SignalIDtemp, SignalIDtemp1) } SignalID = rbind(SignalID, SignalIDtemp) TestPro = TestPro[-PCmaxIndex] BTPro = BTPro[-PCmaxIndex, ] TestProstandard = TestProstandard[-PCmaxIndex] TestStatPro = sqrt(MC1) * max(abs(TestProstandard)) BTAllsimPro = c() for (i in 1 : MBT){ BTAllsimPro[i] = max(abs(BTPro[, i])) } QPro = sort(BTAllsimPro)[(1 - alpha) * MBT] if (TestStatPro < QPro) break } aug = ceiling(c0 * dim(SignalID)[1] / (2 * (1 - c0))) PCtemp = TestPro TPStemp = round(TestProstandard, 5) PCmaxIndex0 = order(-abs(TPStemp))[1 : aug] PCmaxIndex = which(TPStemp %in% c(TPStemp[PCmaxIndex0], -TPStemp[PCmaxIndex0])) SignalIDtemp = c() for (q in 1 : length(PCmaxIndex)){ SignalIDtemp1 = which(abs(EstAll - PCtemp[PCmaxIndex[q]]) == min(abs(EstAll - PCtemp[PCmaxIndex[q]])), arr.ind = TRUE) SignalIDtemp = rbind(SignalIDtemp, SignalIDtemp1) } SignalID = rbind(SignalID, SignalIDtemp) recovery = matrix(0, p, p) recovery[SignalID[,1]+(SignalID[,2]-1)*p]=1 return(recovery) }
evouniparam <- function(phyl, comm, method = c("hill", "tsallis", "renyi"), q = 2, tol = 1e-8){ ow <- options("warn") tre <- .checkphyloarg(phyl) phyl1 <- tre$phyl.phylo A <- write.tree(phyl1) if(!substr(A, nchar(A)-1, nchar(A))==");"){ phyl1 <- read.tree(text=paste0("(", substr(A,1,nchar(A)-1), ");")) options(warn = -1) } method <- method[1] reduceTree <- function(tree){ C <- vcv.phylo(tree, model = "Brownian") if(min(diag(C))<1){ warning("The phylogenetic tree was re-scaled so that the shortest distance from tip to root is equal to 1") tree$edge.length <- tree$edge.length/min(diag(C)) } return(tree) } if(method!="hill") phyl1 <- reduceTree(phyl1) hi <- diag(vcv.phylo(phyl1, model = "Brownian")) phylstar <- starTree(phyl1$tip.label, hi) if(length(q)==1){ vnum <- evodivparam(phyl1, comm, method, q, tol) vden <- evodivparam(phylstar, comm, method, q, tol) v <- vnum/vden v[vnum < tol] <- 0 class(v) <- "evouniparam" options(ow) return(v) } if ( length(q) > 1){ tabnum <- evodivparam(phyl1, comm, method, q, tol)$div tabden <- evodivparam(phylstar, comm, method, q, tol)$div tab1 <- as.matrix(tabnum)/as.matrix(tabden) tab1[as.matrix(tabnum) < tol] <- 0 listtotale <- list() listtotale$q <- q listtotale$uni <- as.data.frame(tab1) class(listtotale) <- "evouniparam" options(ow) return(listtotale) } }
valid.limits <- function(plist, no.ord, no.norm){ validate.plist(plist, no.ord) minmat = maxmat= diag(length(plist) + no.norm) for (r in 2:nrow(minmat) ) { for (c in 1:(r-1) ){ if(r != c) { if (r<=length(plist) & c<=length(plist) ) { minmax = LimitforOO(plist[[r]], plist[[c]]) } else if (r>length(plist) & c>length(plist) ) { minmax = c(-1,1) } else if (r>length(plist) & c<=length(plist) ){ minmax = LimitforON(plist[[ c ]]) } minmat[r,c] = minmax[1] maxmat[r,c] = minmax[2] rm(minmax) } } } minmat= minmat + t(minmat) ; diag(minmat)=1 maxmat= maxmat + t(maxmat) ; diag(maxmat)=1 return (list(lower=minmat, upper=maxmat) ) }
mediation.effect.bar.plot <- function(x, mediator, dv, main="Mediation Effect Bar Plot", width=1, left.text.adj=0, right.text.adj=0, rounding=3, file="", save.pdf = FALSE, save.eps = FALSE, save.jpg = FALSE, ...) { Mediation.Results <- mediation(x=x, mediator=mediator, dv=dv, conf.level=.95) observed.c <- Mediation.Results$Y.on.X$Regression.Table[2,1] observed.c.prime <- Mediation.Results$Y.on.X.and.M$Regression.Table[2,1] max.possible.c <- sqrt(var(dv))/sqrt(var(x)) if(observed.c < 0) max.possible.c <- -max.possible.c if(width < 1) { width <- .5*(1-width) } if(width > 1) { width <- .5*(1+width) } if(save.pdf==TRUE) { if(save.eps==TRUE) stop("Only one file format for saving figure may be used at a time (you have both PDF and EPS specified).") if(save.jpg==TRUE) stop("Only one file format for saving figure may be used at a time (you have both PDF and JPG specified).") } if(save.eps==TRUE) { if(save.jpg==TRUE) stop("Only one file format for saving figure may be used at a time (you have both EPS and JPG specified).") } if(save.pdf==TRUE | save.eps==TRUE | save.jpg==TRUE) { no.file.name <- FALSE if (file == "") { file <- "mediation.effect.bar.plot" no.file.name <- TRUE } } if(save.pdf==TRUE) pdf(file = paste(file, ".pdf", sep = ""), ...) if(save.eps == TRUE) jpeg(filename = paste(file, ".eps", sep = ""), ...) if(save.jpg == TRUE) jpeg(filename = paste(file, ".jpg", sep = ""), ...) plot(c(-2, 2), seq(0, 1), ylab="", xlab="", xaxt="n", yaxt="n", bty="n", type="n", main=main, ...) segments(x0=-.5*width, y0=0, x1=-.5*width, y1=1) segments(x0=.5*width, y0=0, x1=.5*width, y1=1) segments(x0=.5*width, y0=0, x1=-.5*width, y1=0) segments(x0=.5*width, y0=1, x1=-.5*width, y1=1) segments(x0=.5*width, y0=observed.c/max.possible.c, x1=-.5*width, y1=observed.c/max.possible.c) segments(x0=.5*width, y0=observed.c.prime/max.possible.c, x1=-.5*width, y1=observed.c.prime/max.possible.c) rect(xleft=-.5*width, ybottom=0, xright=.5*width, ytop=observed.c.prime/max.possible.c, density = 10, angle = 45, border=NA) rect(xleft=-.5*width, ybottom=observed.c.prime/max.possible.c, xright=.5*width, ytop=observed.c/max.possible.c, density = 10, angle = 135, border=NA) if(left.text.adj==0) { left.text.adj <- -.5*width - (.5*width/3) } if(left.text.adj != 0) { left.text.adj <- -.5*width - (.5*width/3) + left.text.adj } if(right.text.adj==0) { right.text.adj <- .5*width + (.5*width/20) } if(right.text.adj != 0) { right.text.adj <- .5*width + (.5*width/20) + right.text.adj } use.this <- round(max.possible.c, rounding) text(x=right.text.adj*1.3, y=1, bquote(paste(plain("max possible"), phantom(x), italic(c)==.(use.this)))) use.this <- round(observed.c, rounding) text(x=left.text.adj, y=observed.c/max.possible.c, bquote(paste(plain(observed), phantom(x), italic(c)==.(use.this)))) use.this <- round(observed.c.prime, rounding) text(x=left.text.adj, y=observed.c.prime/max.possible.c, bquote(paste(plain(observed), phantom(x), italic(c), phantom(x), plain(prime)==.(use.this)))) use.this <- round(observed.c - observed.c.prime, rounding) text(x=right.text.adj, y=observed.c/max.possible.c - observed.c.prime/max.possible.c, bquote(italic(ab)==.(use.this))) segments(x0=right.text.adj*.6, y0=observed.c/max.possible.c, x1=right.text.adj*.6, y1=observed.c.prime/max.possible.c) segments(x0=right.text.adj*.6, y0=observed.c/max.possible.c, x1=right.text.adj*.55, y1=observed.c/max.possible.c) segments(x0=right.text.adj*.6, y0=observed.c.prime/max.possible.c, x1=right.text.adj*.55, y1=observed.c.prime/max.possible.c) text(x=right.text.adj*.8, y=0, "zero") if (save.pdf == TRUE) { dev.off() if (no.file.name == TRUE) print(paste("'mediation.effect.bar.plot.pdf' file saved at the directory", getwd())) } if (save.eps == TRUE) { dev.off() if (no.file.name == TRUE) print(paste("'mediation.effect.bar.plot.eps' file saved at the directory", getwd())) } if (save.jpg == TRUE) { dev.off() if (no.file.name == TRUE) print(paste("'mediation.effect.bar.plot.jpg' file saved at the directory", getwd())) } }
tar_test("tar_network() works", { tar_script( list( tar_target(y1, 1 + 1), tar_target(y2, 1 + 1), tar_target(z, y1 + y2) ) ) out <- tar_network( callr_function = NULL, callr_arguments = list(show = FALSE), targets_only = TRUE ) out$vertices <- out$vertices[order(out$vertices$name), ] rownames(out$vertices) <- NULL exp <- data_frame( name = c("z", "y1", "y2"), type = "stem", status = "outdated", seconds = NA_real_, bytes = NA_real_, children = NA_real_ ) exp <- exp[order(exp$name), ] rownames(exp) <- NULL expect_equiv(out$vertices, exp) out$edges <- out$edges[order(out$edges$from), ] rownames(out$edges) <- NULL exp <- data_frame(from = c("y1", "y2"), to = "z") exp <- exp[order(exp$from), ] rownames(exp) <- NULL expect_equiv(out$edges, exp) }) tar_test("targets_only = FALSE", { tar_script({ x <- 1L envir <- environment() tar_option_set(envir = envir) list( tar_target(y1, 1 + 1), tar_target(y2, 1 + 1), tar_target(z, y1 + y2) ) }) out <- tar_network( callr_function = NULL, callr_arguments = list(show = FALSE), targets_only = FALSE ) expect_true("x" %in% out$vertices$name) }) tar_test("allow", { tar_script({ x <- 1L envir <- environment() tar_option_set(envir = envir) list( tar_target(y1, 1 + 1), tar_target(y2, 1 + 1), tar_target(z, y1 + y2) ) }) out <- tar_network( callr_function = NULL, callr_arguments = list(show = FALSE), allow = "z" ) expect_equal(out$vertices$name, "z") }) tar_test("exclude", { tar_script({ x <- 1L envir <- environment() tar_option_set(envir = envir) list( tar_target(y1, 1 + 1), tar_target(y2, 1 + 1), tar_target(z, y1 + y2) ) }) out <- tar_network( callr_function = NULL, callr_arguments = list(show = FALSE), targets_only = TRUE, exclude = c("y1", "z") ) expect_equal(out$vertices$name, "y2") }) tar_test("names", { tar_script({ x <- 1L envir <- environment() tar_option_set(envir = envir) list( tar_target(y1, 1 + 1), tar_target(y2, 1 + 1), tar_target(z, y1 + y2) ) }) out <- tar_network( callr_function = NULL, callr_arguments = list(show = FALSE), names = "y1", targets_only = TRUE ) expect_equal(out$vertices$name, "y1") }) tar_test("names and shortcut", { tar_script({ x <- 1L envir <- environment() tar_option_set(envir = envir) list( tar_target(y1, 1 + 1), tar_target(y2, 1 + 1), tar_target(z, y1 + y2) ) }) tar_make(callr_function = NULL) out <- tar_network( callr_function = NULL, callr_arguments = list(show = FALSE), names = "z", targets_only = TRUE, shortcut = TRUE ) expect_equal(out$vertices$name, "z") }) tar_test("custom script and store args", { skip_on_cran() expect_equal(tar_config_get("script"), path_script_default()) expect_equal(tar_config_get("store"), path_store_default()) tar_script(tar_target(x, "y"), script = "example/script.R") out <- tar_network( script = "example/script.R", store = "example/store", callr_function = NULL ) expect_true(is.list(out)) expect_false(file.exists("_targets.yaml")) expect_equal(tar_config_get("script"), path_script_default()) expect_equal(tar_config_get("store"), path_store_default()) expect_false(file.exists(path_script_default())) expect_false(file.exists(path_store_default())) expect_true(file.exists("example/script.R")) expect_false(file.exists("example/store")) tar_config_set(script = "x") expect_equal(tar_config_get("script"), "x") expect_true(file.exists("_targets.yaml")) }) tar_test("custom script and store args with callr function", { skip_on_cran() expect_equal(tar_config_get("script"), path_script_default()) expect_equal(tar_config_get("store"), path_store_default()) tar_script(tar_target(x, "y"), script = "example/script.R") out <- tar_network( script = "example/script.R", store = "example/store" ) expect_true(is.list(out)) expect_false(file.exists("_targets.yaml")) expect_equal(tar_config_get("script"), path_script_default()) expect_equal(tar_config_get("store"), path_store_default()) expect_false(file.exists(path_script_default())) expect_false(file.exists(path_store_default())) expect_true(file.exists("example/script.R")) expect_false(file.exists("example/store")) tar_config_set(script = "x") expect_equal(tar_config_get("script"), "x") expect_true(file.exists("_targets.yaml")) })
congruent_hbds_model = function(age_grid, PSR, PDR, lambda_psi, lambda = NULL, mu = NULL, psi = NULL, Reff = NULL, removal_rate = NULL, lambda0 = NULL, CSA_ages = NULL, CSA_pulled_probs = NULL, CSA_PSRs = NULL, splines_degree = 1, ODE_relative_dt = 0.001, ODE_relative_dy = 1e-4){ NCSA = (if(is.null(CSA_ages)) 0 else length(CSA_ages)) if((NCSA==0) && (!is.null(CSA_pulled_probs)) && (length(CSA_pulled_probs)>0)) return(list(success=FALSE, error="No CE ages were provided, but CSA_pulled_probs were")) if((NCSA>0) && is.null(CSA_pulled_probs)) return(list(success=FALSE, error="Missing CSA_pulled_probs")) if((NCSA>0) && (length(CSA_pulled_probs)!=NCSA)) return(list(success=FALSE, error=sprintf("Expected %d CSA_pulled_probs, but instead got %d",NCSA,length(CSA_pulled_probs)))) if((NCSA>0) && is.null(CSA_PSRs)) return(list(success=FALSE, error="Missing CSA_PSRs")) if((NCSA>0) && (length(CSA_PSRs)!=NCSA)) return(list(success=FALSE, error=sprintf("Expected %d CSA_PSRs, but instead got %d",NCSA,length(CSA_PSRs)))) if(is.null(CSA_ages)){ CSA_ages = numeric(0) CSA_pulled_probs = numeric(0) CSA_PSRs = numeric(0) } NG = length(age_grid) if(is.null(PSR)){ PSR = rep(0,times=NG) }else if(length(PSR)==1){ PSR = rep(PSR,times=NG) }else if(length(PSR)!=NG){ return(list(success=FALSE, error=sprintf("Expected %d PSR values, but instead got %d",NG,length(PSR)))) } if(is.null(PDR)){ PDR = rep(0,times=NG) }else if(length(PDR)==1){ PDR = rep(PDR,times=NG) }else if(length(PDR)!=NG){ return(list(success=FALSE, error=sprintf("Expected %d PDR values, but instead got %d",NG,length(PDR)))) } if(is.null(lambda_psi)){ lambda_psi = rep(0,times=NG) }else if(length(psi)==1){ lambda_psi = rep(lambda_psi,times=NG) }else if(length(lambda_psi)!=NG){ return(list(success=FALSE, error=sprintf("Expected %d lambda_psi values, but instead got %d",NG,length(lambda_psi)))) } if(is.null(lambda) && is.null(mu) && is.null(psi) && is.null(Reff) && is.null(removal_rate)) return(list(success=FALSE, error=sprintf("Expecting either lambda, mu, psi, Reff or removal_rate"))) if(sum(!c(is.null(lambda),is.null(mu),is.null(psi),is.null(Reff),is.null(removal_rate)))>1) return(list(success=FALSE, error=sprintf("Only one of lambda, mu, psi, Reff or removal_rate must be provided"))) if((!is.null(lambda)) && (!is.null(lambda0))) return(list(success=FALSE, error=sprintf("lambda0 must not be provided if lambda is provided"))) if((!is.null(mu)) && is.null(lambda0)) return(list(success=FALSE, error=sprintf("lambda0 must be provided when mu is provided"))) if((!is.null(psi)) && (!is.null(lambda0))) return(list(success=FALSE, error=sprintf("lambda0 must not be provided if psi is provided"))) if((!is.null(Reff)) && is.null(lambda0)) return(list(success=FALSE, error=sprintf("lambda0 must be provided when Reff is provided"))) if((!is.null(removal_rate)) && is.null(lambda0)) return(list(success=FALSE, error=sprintf("lambda0 must be provided when removal_rate is provided"))) if(!is.null(lambda)){ if(length(lambda)==1){ lambda = rep(lambda,times=NG) }else if(length(lambda)!=NG){ return(list(success=FALSE, error=sprintf("Expected %d lambda values, but instead got %d",NG,length(lambda)))) } if(NCSA>0) return(list(success=FALSE, error=sprintf("Providing lambda to define a model is only available in the absence of CSAs"))) } if(!is.null(mu)){ if(length(mu)==1){ mu = rep(mu,times=NG) }else if(length(mu)!=NG){ return(list(success=FALSE, error=sprintf("Expected %d mu values, but instead got %d",NG,length(mu)))) } if(NCSA>0) return(list(success=FALSE, error=sprintf("Providing mu to define a model is only available in the absence of CSAs"))) } if(!is.null(psi)){ if(length(psi)==1){ psi = rep(psi,times=NG) }else if(length(psi)!=NG){ return(list(success=FALSE, error=sprintf("Expected %d psi values, but instead got %d",NG,length(psi)))) } } if(!is.null(Reff)){ if(length(Reff)==1){ Reff = rep(Reff,times=NG) }else if(length(Reff)!=NG){ return(list(success=FALSE, error=sprintf("Expected %d Reff values, but instead got %d",NG,length(Reff)))) } if(NCSA>0) return(list(success=FALSE, error=sprintf("Providing Reff to define a model is only available in the absence of CSAs"))) } if(!is.null(removal_rate)){ if(length(removal_rate)==1){ removal_rate = rep(removal_rate,times=NG) }else if(length(removal_rate)!=NG){ return(list(success=FALSE, error=sprintf("Expected %d removal_rate values, but instead got %d",NG,length(removal_rate)))) } if(NCSA>0) return(list(success=FALSE, error=sprintf("Providing removal_rate to define a model is only available in the absence of CSAs"))) } if(!(splines_degree %in% c(1,2,3))) return(list(success = FALSE, error = sprintf("Invalid splines_degree (%d): Expected one of 1,2,3.",splines_degree))) results = get_congruent_HBDS_CPP( CSA_ages = CSA_ages, CSA_pulled_probs = CSA_pulled_probs, CSA_PSRs = CSA_PSRs, age_grid = age_grid, PSRs = PSR, PDRs = PDR, lambda_psis = lambda_psi, lambdas = (if(is.null(lambda)) numeric(0) else lambda), mus = (if(is.null(mu)) numeric(0) else mu), psis = (if(is.null(psi)) numeric(0) else psi), Reffs = (if(is.null(Reff)) numeric(0) else Reff), removal_rates = (if(is.null(removal_rate)) numeric(0) else removal_rate), lambda0 = (if(is.null(lambda0)) 0 else lambda0), splines_degree = splines_degree, ODE_relative_dt = ODE_relative_dt, ODE_relative_dy = ODE_relative_dy, runtime_out_seconds = -1) if(!results$success) return(list(success=FALSE, error=results$error)) return(list(success = TRUE, valid = results$valid, ages = age_grid, lambda = results$lambdas, mu = results$mus, psi = results$psis, lambda_psi = results$lambda_psis, Reff = results$Reffs, removal_rate = results$removal_rates, Pmissing = results$Pmissings, CSA_probs = results$CSA_probs, CSA_Pmissings = results$CSA_Pmissings)) }
bmdplotwithgradient <- function(extendedres, BMDtype = c("zSD", "xfold"), xmin, xmax, y0shift = TRUE, facetby, facetby2, shapeby, npoints = 50, line.size, point.size = 1, ncol4faceting, limits4colgradient, lowercol = "darkblue", uppercol = "darkred", add.label = FALSE, label.size = 2, BMD_log_transfo = FALSE) { BMDtype <- match.arg(BMDtype, c("zSD", "xfold")) if (missing(extendedres) | !is.data.frame(extendedres)) stop("The first argument of bmdplotwithgradient must be a dataframe (see ?bmdplotwithgradient for details).") cnames <- colnames(extendedres) if (BMDtype == "zSD") { if (any(!is.element(c("id", "model", "b", "c", "d", "e", "f", "BMD.zSD"), cnames))) stop("The first argument of bmdplotwithgradient must be a dataframe containing at least columns named id, model, b, c, d, e, f and BMD.zSD.") BMD2plot <- data.frame(x = extendedres$BMD.zSD, id = extendedres$id) } else { if (any(!is.element(c("id", "model", "b", "c", "d", "e", "f", "BMD.xfold"), cnames))) stop("The first argument of bmdplotwithgradient must be a dataframe containing at least columns named id, model, b, c, d, e, f and BMD.xfold.") BMD2plot <- data.frame(x = extendedres$BMD.xfold, id = extendedres$id) } if (BMD_log_transfo) { if (missing(xmin)) { xmin <- min(BMD2plot$x[is.finite(BMD2plot$x) & BMD2plot$x != 0]) } else { if (xmin == 0) { warning(strwrap(prefix = "\n", initial = "\n", "When using a log scale for the BMD plot, it is not possible to fix xmin at 0. If the default value does not suit you, you can define a strictly positive value for xmin.")) xmin <- min(BMD2plot$x[is.finite(BMD2plot$x) & BMD2plot$x != 0]) } } } else { if (missing(xmin)) xmin <- 0 } if (missing(xmax)) { xmax <- max(BMD2plot$x[is.finite(BMD2plot$x)]) } if (!missing(shapeby)) { if (!is.character(shapeby)) stop("shapeby should be a character string for the name of the column coding for the point shape.") if (!is.element(shapeby, cnames)) stop("shapeby should be a character string corresponding to the name of a column of extendedres, the dataframe given in input.") BMD2plot$shapeby <- extendedres[, shapeby] } ntot <- nrow(BMD2plot) if (!missing(facetby)) { if (!is.character(facetby)) stop("facetby should be a character string for the name of the column used for facetting.") if (!is.element(facetby, cnames)) stop("facetby should be a character string corresponding to the name of a column of extendedres, the dataframe given in input.") BMD2plot$facetby <- extendedres[, facetby] if (!missing(facetby2)) { if (!is.character(facetby2)) stop("facetby2 should be a character string for the name of the column used for facetting.") if (!is.element(facetby2, cnames)) stop("facetby2 should be a character string corresponding to the name of a column of extendedres, the dataframe given in input.") BMD2plot$facetby2 <- extendedres[, facetby2] BMD2plot$group <- paste(extendedres[, facetby], extendedres[, facetby2], sep = "_") } else { BMD2plot$group <- BMD2plot$facetby } if (missing(line.size)) line.size <- 24 / max(table(BMD2plot$group)) uniqueby <- unique(BMD2plot$group) n.uniqueby <- length(uniqueby) BMD2plot$ECDF <- rep(0, ntot) for (i in 1:n.uniqueby) { indi <- which(BMD2plot$group == uniqueby[i]) ntoti <- length(indi) BMD2plot$ECDF[indi] <- (rank(BMD2plot$x[indi], ties.method = "first") - 0.5) / ntoti } g <- ggplot(data = BMD2plot, mapping = aes_(x = quote(x), y = quote(ECDF), label = quote(id))) if (missing(facetby2)) g <- g + facet_wrap(~ facetby) else g <- g + facet_grid(facetby2 ~ facetby) } else { if (missing(line.size)) line.size <- 24 / nrow(BMD2plot) BMD2plot$ECDF <- (rank(BMD2plot$x, ties.method = "first") - 0.5) / ntot g <- ggplot(data = BMD2plot, mapping = aes_(x = quote(x), y = quote(ECDF), label = quote(id))) } if (BMD_log_transfo) { x2plot <- 10^seq(log10(xmin), log10(xmax), length.out = npoints) } else { x2plot <- seq(xmin, xmax, length.out = npoints) } ns <- nrow(extendedres) N <- ns * npoints curves2plot <- data.frame(x = rep(x2plot, ns), id = rep(BMD2plot$id, each = npoints), ECDF = rep(BMD2plot$ECDF, each = npoints), signal = numeric(length = N)) for (i in 1:ns) { modeli <- extendedres$model[i] if (modeli == "linear") { b <- extendedres$b[i] d <- extendedres$d[i] curves2plot$signal[(i-1)*npoints + 1:npoints] <- flin(x2plot, b = extendedres$b[i], d = extendedres$d[i]) - extendedres$y0[i]*y0shift } else if (modeli == "exponential") { curves2plot$signal[(i-1)*npoints + 1:npoints] <- fExpo(x2plot, b = extendedres$b[i], d = extendedres$d[i], e = extendedres$e[i]) - extendedres$y0[i]*y0shift } else if (modeli == "Hill") { curves2plot$signal[(i-1)*npoints + 1:npoints] <- fHill(x2plot, b = extendedres$b[i], c = extendedres$c[i], d = extendedres$d[i], e = extendedres$e[i]) - extendedres$y0[i]*y0shift } else if (modeli == "Gauss-probit") { curves2plot$signal[(i-1)*npoints + 1:npoints] <- fGauss5p(x2plot, b = extendedres$b[i], c = extendedres$c[i], d = extendedres$d[i], e = extendedres$e[i], f = extendedres$f[i]) - extendedres$y0[i]*y0shift } else if (modeli == "log-Gauss-probit") { curves2plot$signal[(i-1)*npoints + 1:npoints] <- fLGauss5p(x2plot, b = extendedres$b[i], c = extendedres$c[i], d = extendedres$d[i], e = extendedres$e[i], f = extendedres$f[i]) - extendedres$y0[i]*y0shift } } if (!missing(facetby)) { curves2plot$facetby <- rep(extendedres[, facetby], each = npoints) if (!missing(facetby2)) { curves2plot$facetby2 <- rep(extendedres[, facetby2], each = npoints) } } gg <- g + geom_line(data = curves2plot, mapping = aes_(x = quote(x), y = quote(ECDF), group = quote(id), color = quote(signal)), size = line.size) if (!missing(facetby)) { if (!missing(facetby2)) { gg <- gg + facet_grid(facetby2 ~ facetby) } else { if (missing(ncol4faceting)) { gg <- gg + facet_wrap(~ facetby) } else { gg <- gg + facet_wrap(~ facetby, ncol = ncol4faceting) } } } if (missing(limits4colgradient)) { gg <- gg + scale_colour_gradient2(low = lowercol, mid = "white", high = uppercol, midpoint = 0, space = "Lab", na.value = "grey50", guide = "colourbar", aesthetics = "colour") } else { gg <- gg + scale_colour_gradient2(low = lowercol, mid = "white", high = uppercol, midpoint = 0, space = "Lab", na.value = "grey50", guide = "colourbar", aesthetics = "colour", limits = limits4colgradient) } if (!missing(shapeby)) { gg <- gg + geom_point(data = BMD2plot, mapping = aes_(shape = quote(shapeby)), size = point.size) } else { gg <- gg + geom_point(data = BMD2plot, size = point.size) } gg <- gg + theme_classic() if(add.label) { if (!missing(shapeby)) warning(strwrap(prefix = "\n", initial = "\n", "The type of points will not be seen when points are replaced by labels. You should omit it in this case.")) gg <- gg + geom_label(size = label.size) } if (BMD_log_transfo) gg <- gg + scale_x_log10() gg <- gg + xlab("BMD") return(gg) }
googlesheets4::gs4_deauth() req <- googlesheets4::request_generate( endpoint = "sheets.spreadsheets.get", params = list( spreadsheetId = "DOES_NOT_EXIST", fields = "spreadsheetId" ) ) resp <- googlesheets4::request_make(req) stopifnot(httr::status_code(resp) == 404) saveRDS( gargle:::redact_response(resp), testthat::test_path( "fixtures", "sheets-spreadsheets-get-nonexistent-sheet-id_404.rds" ), version = 2 ) gargle::response_process(resp)
fastFilter <- function (RNA, lowest_percentile_mean = 0.2, lowest_percentile_variance = 0.2, var.func = "var"){ RNA = as.matrix(RNA) rowIQRs <- function(eSet) { numSamp <- ncol(eSet) lowQ <- rowQ(eSet, floor(0.25 * numSamp)) upQ <- rowQ(eSet, ceiling(0.75 * numSamp)) upQ - lowQ } varFilter <- function (eset, var.cutoff = 0.5, filterByQuantile = TRUE, var.func = var.func) { if (deparse(substitute(var.func)) == "IQR") { message("Using row-wise IQR for calculating the variances.") vars <- rowIQRs(eset) } else { message("Calculating the variances.") vars <- apply(eset, 1, var.func) } if (filterByQuantile) { if (0 < var.cutoff && var.cutoff < 1) { quant = quantile(vars, probs = var.cutoff) selected = !is.na(vars) & vars > quant } else stop("Cutoff Quantile has to be between 0 and 1.") } else { selected <- !is.na(vars) & vars > var.cutoff } return(selected) } message("Note: For RNA data, we suppose input matrix (data frame) is with:") message(" Row: Genes; Columns: Samples.") geneID = rownames(RNA) percentile = lowest_percentile_mean if (percentile > 0){ RNAmean = apply(RNA, 1, mean) RNA_filtered1 = RNA[RNAmean > quantile(RNAmean, percentile), ] geneID_filtered1 = geneID[RNAmean > quantile(RNAmean, percentile)] } else { RNA_filtered1 = RNA geneID_filtered1 = geneID } message(sprintf("(%d genes, %d samples) after removing lowest %.2f%% mean expression value.", dim(RNA_filtered1)[1], dim(RNA_filtered1)[2], percentile*100)) percentile = lowest_percentile_variance if (percentile > 0){ if (dim(RNA_filtered1)[2] > 3){ index <- varFilter(eset = RNA_filtered1, var.cutoff = percentile, var.func = var.func) RNA_filtered2 = RNA_filtered1[index, ] geneID_filtered2 = geneID_filtered1[index] } else{ message("Cannot calculate order statistic on object with less than 3 columns, will not remove data based on variance.") RNA_filtered2 = RNA_filtered1 geneID_filtered2 = geneID_filtered1 } } else { RNA_filtered2 = RNA_filtered1 geneID_filtered2 = geneID_filtered1 } message(sprintf("(%d genes, %d samples) after removing lowest %.2f%% variance expression value.", dim(RNA_filtered2)[1], dim(RNA_filtered2)[2], lowest_percentile_variance*100)) return(RNA_filtered2) }
context("pocket_delete") test_that("missing consumer key causes error", { expect_error( pocket_delete(item_ids = c("foobarid"), consumer_key = "", access_token = "faketoken"), regexp = "^POCKET_CONSUMER_KEY does not exist as environment variable.", class = "usethis_error" ) }) test_that("missing access token causes error", { expect_error( pocket_delete(item_ids = c("foobarid"), consumer_key = "fakekey", access_token = ""), regexp = "^POCKET_ACCESS_TOKEN does not exist as environment variable.", class = "usethis_error" ) }) with_mock_api({ test_that("pocket_delete - success generates message", { time_stub <- "2020-03-14 12:51:02 CET" with_mock( Sys.time = function() time_stub, expect_message( pocket_delete(item_ids = c("foobarid"), consumer_key = "fakekey", access_token = "faketoken"), regexp = "Action was successful for the items: foobarid" ) ) }) }) with_mock_api({ test_that("pocket_delete - two successes", { time_stub <- "2020-03-14 12:51:02 CET" with_mock( Sys.time = function() time_stub, expect_message( pocket_delete(item_ids = c("faz", "bar"), consumer_key = "fakekey", access_token = "faketoken"), regexp = "Action was successful for the items: faz, bar" ) ) }) }) with_mock_api({ test_that("pocket_delete - one success, one error", { time_stub <- "2020-03-14 12:51:02 CET" with_mock( Sys.time = function() time_stub, expect_warning( pocket_delete(item_ids = c("foo", "bar"), consumer_key = "fakekey", access_token = "faketoken"), regexp = "Action on bar failed with error: some error occurred" ) ) }) })
library(lme4) test_that("Checking for random/fixed effects works", { m <- glmer(bush ~ 1 + edu + (black|state), data = polls, family = binomial(link = "logit")) expect_error(extract_eq(m)) }) d <- arrests totes <- tapply(d$arrests, d$precinct, sum) tot_arrests <- data.frame(precinct = as.numeric(names(totes)), total_arrests = totes) d <- merge(d, tot_arrests, by = "precinct") test_that("colorizing works", { suppressWarnings( m <- lme4::glmer(stops ~ eth + total_arrests + (1|precinct), data = d, family = poisson(link = "log")) ) expect_snapshot_output( extract_eq( m, swap_var_names = c( stops = "Stops", "eth" = "Ethnicity", "total_arrests" = "Total Arrests" ), var_colors = c( eth = "red", stops = "blue" ), var_subscript_colors = c( eth = "purple" ) ) ) expect_warning( extract_eq( m, swap_var_names = c( stops = "Stops", "eth" = "Ethnicity", "total_arrests" = "Total Arrests" ), var_colors = c( stops = "blue", eth = "red" ), var_subscript_colors = c( eth = "purple" ), greek_colors = rainbow(7) ) ) }) test_that("Renaming Variables works", { suppressWarnings( m6 <- lme4::glmer(stops ~ eth + total_arrests + (1|precinct), data = d, family = poisson(link = "log")) ) expect_snapshot_output( extract_eq( m6, swap_var_names = c( "eth" = "Ethnicity", "total_arrests" = "Total Arrests" ), swap_subscript_names = c( "black" = "Black", "hispanic" = "Hispanic/Latino", "white" = "White" ) ) ) }) test_that("Standard Poisson regression models work", { p1 <- glmer(stops ~ eth + (1|precinct), data = arrests, family = poisson(link = "log")) expect_snapshot_output(extract_eq(p1)) suppressWarnings( p_complicated <- glmer(stops ~ eth*total_arrests + (eth|precinct), data = d, family = poisson(link = "log")) ) expect_snapshot_output(extract_eq(p_complicated)) }) test_that("Poisson regression models with an offset work", { p_offset1 <- glmer(stops ~ eth + (1|precinct), data = arrests, family = poisson(link = "log"), offset = log(arrests)) expect_snapshot_output(extract_eq(p_offset1)) suppressWarnings( p_offset_complicated <- glmer(stops ~ eth*total_arrests + (eth|precinct), data = d, family = poisson(link = "log"), offset = log(arrests)) ) expect_snapshot_output(extract_eq(p_offset_complicated)) }) test_that("Binomial Logistic Regression models work", { m <- glmer(bush ~ 1 + black + female + edu + (black|state), data = polls, family = binomial(link = "logit")) expect_snapshot_output(extract_eq(m)) })
robregbelow <- function(behavior,phaseX, v1,v2){ t1<-table(phaseX) tmaxA<-t1[names(t1)==v1] startA<-match(v1,phaseX) endA<-tmaxA+startA-1 A<-behavior[startA:endA] meanA=mean(A,na.rm=T) x1=(c(seq(1:tmaxA))) regA<-rlm(A~x1) rA<-residuals(regA) yA<-regA$coefficients[1] BetaA<-regA$coefficient[2] tmaxB<-t1[names(t1)==v2] startB<-match(v2,phaseX) endB<-tmaxB+startB-1 B=(behavior[startB:endB]) x2=(c(seq(1:tmaxB))) cdcl<-c(A,NA,B) y<-na.omit(cdcl) total=length(y) iv=(1:total) end<-which(is.na(cdcl)) iv<-insert(iv,NA,end) x2=iv[end+1:total] regc<-rlm(cdcl~iv) x2<-na.omit(x2) Byhat<-yA+iv*BetaA Byhat<-na.omit(Byhat) yhatA<-Byhat[startA:endA] startB<-startB-1 endB<-endB-1 yhatB<-Byhat[startB:endB] maxy=which.max(cdcl) dzone<- y< Byhat len1=length(A) len2=length(B) pA<-rep(v1,len1) pB<-rep(v2,len2) p<- c(pA,pB) tm<-table(dzone,p) ctbl<-cbind(tm[,v1],tm[,v2]) print(ctbl) print(prop.table(ctbl,1)*100) print(prop.table(ctbl,2)*100) c1<-chisq.test(ctbl,correct=FALSE) f1<-fisher.test(ctbl,alternative = "two.sided") print(c1) print(f1) t1<-table(phaseX) tmaxA<-t1[names(t1)==v1] startA<-match(v1,phaseX) endA<-tmaxA+startA-1 A<-behavior[startA:endA] meanA=mean(A,na.rm=T) x1=(c(seq(1:tmaxA))) regA<-rlm(A~x1) rA<-residuals(regA) yA<-regA$coefficients[1] BetaA<-regA$coefficient[2] tmaxB<-t1[names(t1)==v2] startB<-match(v2,phaseX) endB<-tmaxB+startB-1 B=(behavior[startB:endB]) cdcl<-c(A,NA,B) y<-na.omit(cdcl) total=length(y) iv=(1:total) end<-which(is.na(cdcl)) iv<-insert(iv,NA,end) x2=iv[end+1:total] regc<-rlm(cdcl~iv) x2<-na.omit(x2) Byhat<-yA+x2*BetaA Byhat<-na.omit(Byhat) yhatA<-Byhat[startA:endA] startB<-startB-1 endB<-endB-1 yhatB<-Byhat[startB:endB] maxy=which.max(cdcl) max<-cdcl[maxy]+1 numx<-sum(!is.na(cdcl))+3 par(mfrow=c(3,3)) maxy=which.max(behavior) max<-behavior[maxy]+1 numx<-sum(!is.na(behavior))+3 layout(rbind(1,2), heights=c(4,1)) plot(iv,cdcl, ylim=c(0,max),lwd=2,type="o",col="red", bty="l",xlab="time", ylab="behavior", main="Regression Line" ) abline(reg=regA,col='Blue',lty="dashed") par(mar=c(1, 1, 1, 1)) plot.new() legend("center", c("regression line"),lty=c("dashed"), col = c("blue"), lwd = 1,ncol=2,bty ="n") }
spatial_data <- function(win = spatstat.geom::unit.square(), sim_total = 2, x_case, y_case, samp_case = c("uniform", "MVN", "CSR", "IPP"), samp_control = c("uniform", "systematic","MVN", "CSR","IPP", "clustered"), x_control = NULL, y_control = NULL, n_case = NULL, n_control = NULL, npc_control = NULL, r_case = NULL, r_control = NULL, s_case = NULL, s_control = NULL, l_case = NULL, l_control = NULL, e_control = NULL, ...) { if (length(x_case) != length(y_case)) { stop("There is at least one missing coordinate") } if (length(n_case) == 1) { l <- vector('list', length(x_case)) for (i in 1:length(x_case)) { l[[i]] <- n_case } n_case <- unlist(l) } if (length(r_case) == 1) { l <- vector('list', length(x_case)) for (i in 1:length(x_case)) { l[[i]] <- r_case } r_case <- unlist(l) } if (length(s_case) == 1) { l <- vector('list', length(x_case)) for (i in 1:length(x_case)) { l[[i]] <- s_case } s_case <- unlist(l) } if (length(l_case) == 1) { l <- vector('list', length(x_case)) for (i in 1:length(x_case)) { l[[i]] <- l_case } l_case <- l } if (samp_control == "MVN" & length(n_control) == 1) { l <- vector('list', length(x_control)) for (i in 1:length(x_control)) { l[[i]] <- round(n_control/length(x_control)) } n_control <- unlist(l) } if (length(s_control) == 1) { l <- vector('list', length(x_control)) for (i in 1:length(x_control)) { l[[i]] <- s_control } s_control <- unlist(l) } rcluster_case <- function(x0, y0, rad, n, scalar, lamb, wind, types = "case", ...) { if (samp_case == "uniform"){ x <- spatstat.core::runifdisc(n = n, radius = rad, centre = c(x0, y0), win = wind, ...) } if (samp_case == "MVN"){ x1 <- rep(x0, n) y1 <- rep(y0, n) x2 <- x1 + stats::rnorm(n, 0, scalar) y2 <- y1 + stats::rnorm(n, 0, scalar) x <- spatstat.geom::ppp(x2, y2, window = wind) } if (samp_case == "CSR"){ win_case <- spatstat.geom::disc(radius = rad, centre = c(0.5, 0.5), ...) l <- n / (diff(win_case$xrange)*diff(win_case$yrange)) x <- spatstat.core::rpoispp(lambda = l, win = win_case, ...) x <- spatstat.geom::shift(x, c(x0 - 0.5, y0 - 0.5)) } if (samp_case == "IPP"){ if (class(lamb) != "function") { stop("The argument 'l_case' should be an intensity function") } win_case <- spatstat.geom::disc(radius = rad, centre = c(0.5, 0.5), ...) x <- spatstat.core::rpoispp(lambda = lamb, win = win_case, ...) x <- spatstat.geom::shift(x, c(x0 - 0.5, y0 - 0.5)) } spatstat.geom::marks(x) <- types return(x) } rcluster_control <- function(x0, y0, scalar, n, lamb, ex, nclust, rad, types = "control", wind, ...) { if (samp_control == "uniform"){ x <- spatstat.core::runifpoint(n, win = wind, ...) } if (samp_control == "systematic") { x <- spatstat.geom::rsyst(nx = sqrt(n), win = wind, ...) } if (samp_control == "MVN"){ x1 <- rep(x0, n) y1 <- rep(y0, n) x2 <- x1 + stats::rnorm(n, 0, scalar) y2 <- y1 + stats::rnorm(n, 0, scalar) x <- spatstat.geom::ppp(x2, y2, window = wind) } if (samp_control == "CSR") { l <- n / (diff(wind$xrange)*diff(wind$yrange)) x <- spatstat.core::rpoispp(lambda = l, win = wind, ...) } if (samp_control == "IPP") { if (class(lamb) != "function") { stop("The argument 'l_control' should be an intensity function") } x <- spatstat.core::rpoispp(lambda = lamb, win = wind, ...) } if (samp_control == "clustered") { control_clustering <- function(x0, y0, radius, n) { X <- spatstat.core::runifdisc(n, radius, centre = c(x0, y0)) return(X) } x <- spatstat.core::rNeymanScott(kappa = lamb, expand = ex, rcluster = control_clustering, n = nclust, radius = rad, win = wind, ...) } spatstat.geom::marks(x) <- types return(x) } pppCase <- vector('list', length(x_case)) pppControl <- vector('list', length(x_control)) pppList <- vector('list', length(sim_total)) for (i in 1:length(x_case)){ x1 <- rcluster_case(x0 = x_case[i], y0 = y_case[i], rad = r_case[i], n = n_case[i], scalar = s_case[i], lamb = l_case[[i]], wind = win, ...) pppCase[[i]] <- x1 } pppCase <- spatstat.geom::as.solist(pppCase) x <- spatstat.geom::superimpose(pppCase) for (j in 1:sim_total) { if(samp_control == "MVN") { for (i in 1:length(x_control)) { y1 <- rcluster_control(x0 = x_control[i], y0 = y_control[i], radius = NULL, n = n_control[i], scalar = s_control[i], lamb =NULL, wind = win, ...) pppControl[[i]] <- y1 } pppControl <- spatstat.geom::as.solist(pppControl) y <- spatstat.geom::superimpose(pppControl) } else { y <- rcluster_control(x0 = NULL, y0 = NULL, n = n_control, nclust = npc_control, rad = r_control, ex = e_control, lamb = l_control, scalar = NULL, wind = win, ...) } z <- spatstat.geom::superimpose(y, x) spatstat.geom::marks(z) <- as.factor(spatstat.geom::marks(z)) pppList[[j]] <- z } pppList <- spatstat.geom::as.solist(pppList) return(pppList) }
square.kernel<-function(d,bandwidth){as.numeric(abs(d)<=bandwidth);} gaussian.kernel<-function(d,bandwidth){return(exp(0-((d^2)/(2*(bandwidth^2)))));} gaussian.square.kernel<-function(d,bandwidth){ return(exp(0-((d^2)/(2*(bandwidth^2))))*as.numeric(abs(d)<=bandwidth)); } triangular.kernel<-function(d,bandwidth){ return(((bandwidth-abs(d))/bandwidth)*as.numeric(abs(d)<=bandwidth)); }
makePriceN4Function = function() { makeSingleObjectiveFunction( name = "Price Function N. 4", id = "price04_2d", fn = function(x) { assertNumeric(x, len = 2L, any.missing = FALSE, all.missing = FALSE) (2 * x[1]^3 * x[2] - x[2]^3)^2 + (6 * x[1] - x[2]^2 + x[2])^2 }, par.set = makeNumericParamSet( len = 2L, id = "x", lower = c(-500, -500), upper = c(500, 500), vector = TRUE ), tags = attr(makePriceN4Function, "tags"), global.opt.params = matrix( c(0, 0, 2, 4, 1.464, -2.506), ncol = 2L, byrow = TRUE), global.opt.value = 0 ) } class(makePriceN4Function) = c("function", "smoof_generator") attr(makePriceN4Function, "name") = c("Price Function N. 4") attr(makePriceN4Function, "type") = c("single-objective") attr(makePriceN4Function, "tags") = c("single-objective", "continuous", "differentiable", "non-separable", "non-scalable", "multimodal")
varClass <- function(x){ xAttrib <- lapply(x, attributes) p <- ncol(x) x.types <- character(p) for (t.co in 1:p){ if (is.null(xAttrib[[t.co]])){ x.types[t.co] <- 'numeric' } else { x.types[t.co] <- 'factor' } } return(x.types) }
kRp.POS.tags <- function(lang=get.kRp.env(lang=TRUE), list.classes=FALSE, list.tags=FALSE, tags=c("words", "punct", "sentc")){ if(is.na(sum(match(tags, c("words", "punct", "sentc"))))){ stop(simpleError("Invalid tags declared (must be at least one of \"words\", \"punct\" or \"sentc\")!")) } else {} if(!identical(lang, "kRp")){ lang <- is.supported.lang(lang, support="treetag") } else {} all.POS.tags <- as.list(as.environment(.koRpus.env))[["langSup"]][["kRp.POS.tags"]][["tags"]][[lang]] if(is.null(all.POS.tags) & !identical(lang, "kRp")){ stop(simpleError("No tags found for this language!")) } else { tag.class.def.words <- all.POS.tags[["tag.class.def.words"]] tag.class.def.punct <- all.POS.tags[["tag.class.def.punct"]] tag.class.def.sentc <- all.POS.tags[["tag.class.def.sentc"]] } tag.class.def.words.kRp <- matrix(c( "word.kRp", "word", "Word (kRp internal)", "no.kRp", "number", "Number (kRp internal)", "abbr.kRp", "abbreviation", "Abbreviation (kRp internal)", "unk.kRp", "unknown", "Unknown (kRp internal)" ), ncol=3, byrow=TRUE, dimnames=list(c(),c("tag","wclass","desc"))) tag.class.def.punct.kRp <- matrix(c( ",kRp", "comma", "Comma (kRp internal)", "(kRp", "punctuation", "Opening bracket (kRp internal)", ")kRp", "punctuation", "Closing bracket (kRp internal)", "''kRp", "punctuation", "Quote (kRp internal)", "-kRp", "punctuation", "Punctuation (kRp internal)", "hon.kRp", "punctuation", "Headline begins (kRp internal)", "p.kRp", "punctuation", "Paragraph (kRp internal)" ), ncol=3, byrow=TRUE, dimnames=list(c(),c("tag","wclass","desc"))) tag.class.def.sentc.kRp <- matrix(c( ".kRp", "fullstop", "Sentence ending punctuation (kRp internal)", "hoff.kRp", "fullstop", "Headline ends (kRp internal)" ), ncol=3, byrow=TRUE, dimnames=list(c(),c("tag","wclass","desc"))) tag.class.def.words.uni <- matrix(c( "ADJ", "adjective", "Adjective (universal POS tags)", "ADP", "adposition", "Adposition (universal POS tags)", "ADV", "adverb", "Adverb (universal POS tags)", "AUX", "auxiliary", "Auxiliary (universal POS tags)", "CCONJ", "conjunction", "Coordinating conjunction (universal POS tags)", "DET", "determiner", "Determiner (universal POS tags)", "INTJ", "interjection", "Interjection (universal POS tags)", "NOUN", "noun", "Noun (universal POS tags)", "NUM", "numeral", "Numeral (universal POS tags)", "PART", "particle", "Particle (universal POS tags)", "PRON", "pronoun", "Pronoun (universal POS tags)", "PROPN", "name", "Proper noun (universal POS tags)", "SCONJ", "conjunction", "Subordinating conjunction (universal POS tags)", "SYM", "symbol", "Symbol (universal POS tags)", "VERB", "verb", "Verb (universal POS tags)", "X", "other", "Not assigned a real POS category (universal POS tags)" ), ncol=3, byrow=TRUE, dimnames=list(c(),c("tag","wclass","desc"))) tag.class.def.punct.uni <- matrix(c( "PUNCT", "punctuation", "Punctuation (universal POS tags)" ), ncol=3, byrow=TRUE, dimnames=list(c(),c("tag","wclass","desc"))) tag.wanted <- paste("tag.class.def", tags, sep=".") if(identical(lang, "kRp")){ tag.wanted.kRp <- paste(tag.wanted, "kRp", sep=".") } else { tag.wanted.kRp <- c() for(x in tag.wanted){ tag.wanted.kRp <- c(tag.wanted.kRp, x, paste(x, "kRp", sep=".")) } } tag.definition <- c() for(x in tag.wanted.kRp){ stopifnot(exists(x, inherits=FALSE)) tag.definition <- rbind(tag.definition, get(x, inherits=FALSE)) if("words" %in% tags & "tag.class.def.words.kRp" %in% x){ tag.definition <- rbind( tag.definition, tag.class.def.words.uni[!tag.class.def.words.uni[,"tag"] %in% tag.definition[,"tag"],] ) } else {} if("punct" %in% tags & "tag.class.def.punct.kRp" %in% x){ tag.definition <- rbind( tag.definition, tag.class.def.punct.uni[!tag.class.def.punct.uni[,"tag"] %in% tag.definition[,"tag"],] ) } else {} } if(isTRUE(list.classes) & !isTRUE(list.tags)){ tag.definition <- unique(tag.definition[,"wclass"]) } else {} if(isTRUE(list.tags)){ tag.definition <- unique(tag.definition[,"tag"]) } else {} return(tag.definition) }
clv.template.controlflow.predict <- function(clv.fitted, verbose, user.newdata, ...){ clv.controlflow.check.prediction.params(clv.fitted = clv.fitted) if(!is.null(user.newdata)){ clv.controlflow.check.newdata(clv.fitted = clv.fitted, user.newdata = user.newdata, ...) [email protected] <- copy(user.newdata) clv.fitted <- clv.model.process.newdata(clv.model = [email protected], clv.fitted=clv.fitted, verbose=verbose) } clv.controlflow.predict.check.inputs(clv.fitted=clv.fitted, verbose=verbose, ...) dt.predictions <- clv.controlflow.predict.build.result.table(clv.fitted=clv.fitted, verbose=verbose, ...) dt.predictions <- clv.model.predict(clv.model = [email protected], clv.fitted = clv.fitted, dt.predictions = dt.predictions, verbose = verbose, ...) setkeyv(dt.predictions, "Id") has.actuals <- clv.controlflow.predict.get.has.actuals(clv.fitted, dt.predictions = dt.predictions) dt.predictions <- clv.controlflow.predict.add.actuals(clv.fitted = clv.fitted, dt.predictions = dt.predictions, has.actuals = has.actuals, verbose = verbose, ...) dt.predictions <- clv.controlflow.predict.post.process.prediction.table(clv.fitted = clv.fitted, has.actuals = has.actuals, dt.predictions = dt.predictions, verbose = verbose, ...) dt.predictions[] return(dt.predictions) }
sanitize.plot.dots = function(dots, meaningless) { if (any(names(dots) %in% meaningless)) warning("arguments ", paste(meaningless, collapse = ", "), " will be silently ignored.") for (m in meaningless) dots[[m]] = NULL return(dots) } check.colour = function(col, num = 1, expand = FALSE, labels) { if (is.list(col)) col = unlist(col) if (length(col) %!in% c(1, num)) { if (num == 1) stop(sprintf("%s must be a single colour.", deparse(substitute(col)))) else stop(sprintf("%s must be a single colour or a vector of %d colours.", deparse(substitute(col)), num)) } valid = sapply(col, function(col) tryCatch(is.matrix(col2rgb(col)), error = function(x) { FALSE })) if (any(!valid)) stop("invalid colour identifier(s) in ", deparse(substitute(col)), " :", paste0(" '", col[!valid], "'")) if (expand) { if (length(col) == 1) col = rep(col, num) if (!missing(labels)) { if (is.null(names(col))) col = structure(col, names = labels) else if (!setequal(names(col), labels)) stop("colours specified with unknown names", paste0(" '", col[names(col) %!in% labels], "'")) } } return(col) } check.lty = function(lty) { lty.strings = c("blank", "solid", "dashed", "dotted", "dotdash", "longdash", "twodash") if (length(lty) > 1) stop(sprintf("%s must be a single line type identifier.", deparse(substitute(lty)))) if ((lty %!in% 0:6) && (lty %!in% lty.strings)) stop(sprintf("%s is not a valid line type identifier.", deparse(substitute(lty)))) } check.quantile.grid = function(grid) { if (is.logical(grid)) { if (identical(grid, TRUE)) grid = c(0, 0.25, 0.50, 0.75) else grid = NULL } else if (is.probability.vector(grid, zero = TRUE) && (length(grid) >= 1)) { if (anyDuplicated(grid)) warning("duplicated grid points.") } else { stop("the grid is defined by one or more numbers between zero and one.") } return(grid) }