code
stringlengths 1
13.8M
|
---|
if (requiet("testthat") && requiet("performance") && requiet("glmmTMB")) {
data(Salamanders)
m1 <- glm(count ~ spp + mined, family = poisson, data = Salamanders)
m2 <- glmmTMB(
count ~ mined + spp + (1 | site),
family = poisson,
data = Salamanders
)
test_that("check_overdispersion", {
expect_equal(
check_overdispersion(m1),
structure(
list(
chisq_statistic = 1873.71012423995,
dispersion_ratio = 2.94608510100621,
residual_df = 636L,
p_value = 3.26607509162498e-122
),
class = "check_overdisp"
),
tolerance = 1e-3
)
})
test_that("check_overdispersion", {
expect_equal(
check_overdispersion(m2),
structure(
list(
chisq_statistic = 1475.87512547128,
dispersion_ratio = 2.32421279601777,
residual_df = 635L,
p_value = 8.41489530177729e-69
),
class = "check_overdisp"
),
tolerance = 1e-3
)
})
} |
pmc_internal <-
function(X, alpha = 0.05){
out <- data.frame(
k = integer(),
P.R. = integer(),
P = numeric(),
MESOR = numeric(),
CI.M = numeric(),
Amplitude = numeric(),
Lo.CI.A = numeric(),
Hi.CI.A = numeric(),
PHI = numeric(),
Lo.CI.PHI = numeric(),
Hi.CI.PHI = numeric(),
errMsg = numeric()
)
printP = numeric()
sigma_matrix <- matrix(0, nrow = 3, ncol = 3)
k <- nrow(X)
df <- k - 1
if (df>1){
beta= X$amp*cos(X$phi * pi/180)
gamma=-X$amp*sin(X$phi * pi/180)
beta_hat <- mean(beta)
gamma_hat <- mean(gamma)
sigma_matrix[1,1] <- sum((X$mesor - mean(X$mesor))^2)/df
sigma_matrix[1,2] <- sum((X$mesor - mean(X$mesor)) %*% (beta - mean(beta)))/df
sigma_matrix[1,3] <- sum((X$mesor - mean(X$mesor)) %*% (gamma - mean(gamma)))/df
sigma_matrix[2,2] <- sum((beta - mean(beta))^2)/df
sigma_matrix[2,3] <- sum((beta - mean(beta)) %*% (gamma - mean(gamma)))/df
sigma_matrix[3,3] <- sum((gamma - mean(gamma))^2)/df
sigma_matrix[lower.tri(sigma_matrix)] <- sigma_matrix[upper.tri(sigma_matrix)]
pr <- mean(X$pr)
mesor <- mean(X$mesor)
acr <- -phsrd(beta, gamma)
amp <- module(beta, gamma)
tval <- qt(1-alpha/2, df)
cim <- tval * sqrt(sigma_matrix[1,1]/k)
c22 <- (sigma_matrix[2,2] * beta_hat^2 + 2*sigma_matrix[2,3]*beta_hat*gamma_hat + sigma_matrix[3,3] * gamma_hat^2) / (k * amp^2)
if (c22>0){
cia <- tval*sqrt(c22)
}
else {
cia<--0.1
}
c23 <- (-(sigma_matrix[2,2] - sigma_matrix[3,3]) * (beta_hat*gamma_hat) + sigma_matrix[2,3]*(beta_hat^2 - gamma_hat^2)) / (k * amp^2)
c33 <- (sigma_matrix[2,2] * gamma_hat^2 - 2 * sigma_matrix[2,3] * beta_hat * gamma_hat + sigma_matrix[3,3]*beta_hat^2) / (k * amp^2)
an2 <- amp^2 - (c22*c33 - c23^2) * (tval^2)/c33
if(an2 < 0){
phi1 <- 0
phi2 <- 0
}else{
den<-(amp^2)-c22*(tval^2)
an2<-tval*sqrt(c33)*sqrt(an2)
an1<-c23*(tval^2)
phi1=phase(phix=(an1+an2)/den,c33,c23,acr)
phi2=phase(phix=(an1-an2)/den,c33,c23,acr)
}
r <- sigma_matrix[2,3]/sqrt(sigma_matrix[2,2]*sigma_matrix[3,3])
fval <- k*(k-2)/(2*(k-1) * (1-r^2)) *
(beta_hat^2/sigma_matrix[2,2]
-2*r*beta_hat*gamma_hat/sqrt(sigma_matrix[2,2]*sigma_matrix[3,3])
+gamma_hat^2/sigma_matrix[3,3]
)
p <- pf(fval, df1 = 2, df2 = k - 2, lower.tail = FALSE)
out[1,"k"] <- as.integer(k)
out[1,"P.R."] <- round(mean(X$pr),1)
printP <- round(p, 4)
printP[printP<.0005]<-c("<.001")
out[1,"P"]<-printP
out[1,"MESOR"] <- signif(mesor, 6)
out[1,"CI.M"] <- signif(cim, 4)
out[1,"Amplitude"] <- signif(amp, 5)
amp_cia<-amp - cia
out[1,"Lo.CI.A"] <- ifelse(p < alpha, signif(amp_cia, 5), NA)
out[1,"Hi.CI.A"] <- ifelse(p < alpha, signif(amp + cia, 5), NA)
out[1,"PHI"] <- round(acr, 1)
out[1,"Lo.CI.PHI"] <- ifelse(p < alpha, round(phi1, 1), NA)
out[1,"Hi.CI.PHI"] <- ifelse(p < alpha, round(phi2, 1), NA)
if (amp_cia < 0 && p<=alpha) {
out[1,"errMsg"]<-paste("Warn1")
}
if (amp < .000000000001){
out[1,"PHI"]<-NA
out[1,"Lo.CI.PHI"] <- NA
out[1,"Hi.CI.PHI"] <- NA
out[1,"errMsg"]<-paste(out[1,"errMsg"],"Warn3")
}
}
else {
out[1,"k"] <- as.integer(k)
out[1,"P.R."] <- NA
out[1,"P"]<-NA
out[1,"MESOR"] <- NA
out[1,"CI.M"] <- NA
out[1,"Amplitude"] <- NA
out[1,"Lo.CI.A"] <- NA
out[1,"Hi.CI.A"] <- NA
out[1,"PHI"] <- NA
out[1,"Lo.CI.PHI"] <- NA
out[1,"Hi.CI.PHI"] <- NA
out[1,"errMsg"]<-paste("Warn2")
}
return(out )
} |
LearnerDens = R6::R6Class("LearnerDens",
inherit = Learner,
public = list(
initialize = function(id, param_set = ParamSet$new(),
predict_types = "cdf", feature_types = character(),
properties = character(), data_formats = "data.table",
packages = character(),
man = NA_character_) {
super$initialize(
id = id, task_type = "dens", param_set = param_set,
predict_types = predict_types, feature_types = feature_types, properties = properties,
data_formats = data_formats, packages = c("mlr3proba", packages), man = man)
}
)
) |
P_Rmask <-
function (x, k)
{
SUM <- 0 * x
for (i in 0:floor(k/2)) SUM <- SUM + (-1)^i/2^k *
exp(lgamma(2 * k - 2 * i + 1) - lgamma(i + 1) -
lgamma(k - i + 1) - lgamma(k - 2 * i + 1)) * x^(k - 2 * i)
return(SUM)
} |
describe("metaObserve", isolate({
it("basically works", {
e1 <- environment()
x <- 0
mo <- metaObserve({
x <<- 1
})
mo1 <- metaObserve({
e2 <- environment()
expect_false(identical(e1, e2))
})
shiny:::flushReact()
expect_identical(x, 1)
expect_equal(unclass(withMetaMode(mo())), quote( x <<- 1 ))
})
it("basically works 2", {
e1 <- environment()
x <- 0
mo <- metaObserve2({
e2 <- environment()
expect_false(identical(e1, e2))
x <<- x + 1
metaExpr({
x <<- ..(x + 1)
})
})
shiny:::flushReact()
expect_identical(x, 2)
res <- withMetaMode(mo())
expect_equal(unclass(res), quote( x <<- 4 ))
})
it("obeys scoping rules", {
outer <- environment()
i <- 0
mo <- metaObserve({
inner <- environment()
expect_false(identical(inner, outer))
i <<- i + 1
})
shiny:::flushReact()
expect_identical(i, 1)
mo2 <- metaObserve2({
inner <- environment()
expect_false(identical(inner, outer))
i <<- i + 1
metaExpr({
innermost <- environment()
expect_true(identical(innermost, inner))
i <<- i + 1
})
})
shiny:::flushReact()
expect_identical(i, 3)
withMetaMode(mo2())
expect_identical(i, 4)
})
})) |
library(CHNOSZ)
di <- c("Cu+2", "Ni+2", "Co+2", "Mn+2", "Zn+2", "Cd+2")
di1 <- c("Cu(Gly)+", "Ni(Gly)+", "Co(Gly)+", "Mn(Gly)+", "Zn(Gly)+", "Cd(Gly)+")
di2 <- c("Cu(Gly)2", "Ni(Gly)2", "Co(Gly)2", "Mn(Gly)2", "Zn(Gly)2", "Cd(Gly)2")
mo <- c("Au+", "Ag+", "Na+", "Tl+", "Cu+")
mo1 <- c("Au(Gly)", "Ag(Gly)", "Na(Gly)", "Tl(Gly)", "Cu(Gly)")
mo2 <- c("Au(Gly)2-", "Ag(Gly)2-", "Na(Gly)2-", "Tl(Gly)2-", "Cu(Gly)2-")
T <- seq(0, 150, 10)
logK_di1 <- logK_di2 <- logK_mo1 <- logK_mo2 <- list()
for(i in 1:length(di1)) logK_di1[[i]] <- subcrt(c(di[i], "glycinate", di1[i]), c(-1, -1, 1), T = T)$out$logK
for(i in 1:length(di2)) logK_di2[[i]] <- subcrt(c(di[i], "glycinate", di2[i]), c(-1, -2, 1), T = T)$out$logK
for(i in 1:length(mo1)) logK_mo1[[i]] <- subcrt(c(mo[i], "glycinate", mo1[i]), c(-1, -1, 1), T = T)$out$logK
for(i in 1:length(mo2)) logK_mo2[[i]] <- subcrt(c(mo[i], "glycinate", mo2[i]), c(-1, -2, 1), T = T)$out$logK
add.OBIGT("OldAA")
logK_di1_SK95 <- logK_di2_SK95 <- list()
for(i in 1:length(di1)) logK_di1_SK95[[i]] <- subcrt(c(di[i], "glycinate", di1[i]), c(-1, -1, 1), T = T)$out$logK
for(i in 1:length(di2)) logK_di2_SK95[[i]] <- subcrt(c(di[i], "glycinate", di2[i]), c(-1, -2, 1), T = T)$out$logK
reset()
opar <- par(no.readonly = TRUE)
layout(matrix(1:6, byrow = TRUE, nrow = 2), widths = c(2, 2, 1))
par(mar = c(4, 3.2, 2.5, 0.5), mgp = c(2.1, 1, 0), las = 1, cex = 0.8)
xlab <- axis.label("T")
ylab <- axis.label("logK")
matplot(T, sapply(logK_di1, c), type = "l", lwd = 2, lty = 1, xlab = xlab, ylab = ylab)
matplot(T, sapply(logK_di1_SK95, c), type = "l", lwd = 2, lty = 2, add = TRUE)
legend(-9, 7.7, c("Azadi et al., 2019", "Shock and Koretsky, 1995"), lty = c(1, 2), bty = "n", cex = 1)
mtext(expression(M^"+2" + Gly^"-" == M*(Gly)^"+"), line = 0.5)
matplot(T, sapply(logK_di2, c), type = "l", lwd = 2, lty = 1, xlab = xlab, ylab = ylab)
matplot(T, sapply(logK_di2_SK95, c), type = "l", lwd = 2, lty = 2, add = TRUE)
legend(-9, 14, c("Azadi et al., 2019", "Shock and Koretsky, 1995"), lty = c(1, 2), bty = "n", cex = 1)
mtext(expression(M^"+2" + 2*Gly^"-" == M*(Gly)[2]), line = 0.5)
plot.new()
par(xpd = NA)
legend("right", as.expression(lapply(di, expr.species)), lty = 1, col = 1:6, bty = "n", cex = 1.2, lwd = 2)
text(0, 1, "metal-\nglycinate\ncomplexes", cex = 1.3, font = 2)
par(xpd = FALSE)
matplot(T, sapply(logK_mo1, c), type = "l", lwd = 2, lty = 1, xlab = xlab, ylab = ylab)
mtext(expression(M^"+" + Gly^"-" == M*(Gly)), line = 0.5)
matplot(T, sapply(logK_mo2, c), type = "l", lwd = 2, lty = 1, xlab = xlab, ylab = ylab)
mtext(expression(M^"+" + 2*Gly^"-" == M*(Gly)[2]^"-"), line = 0.5)
plot.new()
par(xpd = NA)
legend("right", as.expression(lapply(mo, expr.species)), lty = 1, col = 1:5, bty = "n", cex = 1.2, lwd = 2)
par(xpd = FALSE)
layout(matrix(1))
par(opar) |
tally( ~ cut(Sepal.Length, breaks = 2:10), data = iris) |
degross_lpostBasic = function(phi,tau,n.i,degross.data,
use.moments=rep(TRUE,4), freq.min=20, diag.only=FALSE,
gradient=FALSE,
penalize=TRUE,aa=2,bb=1e-6,pen.order=3){
if (missing(n.i)) n.i = NULL
K = length(phi)
Dd = diff(diag(K),diff=pen.order)
Pd = t(Dd) %*% Dd
freq.j = degross.data$freq.j
m.tot = sum(freq.j)
J = length(freq.j)
m.j = degross.data$m.j
small.to.big = degross.data$small.to.big
ui = degross.data$ui
delta = diff(ui[1:2])
B.i = degross.data$B.i
eta.i = c(B.i %*% phi)
temp = exp(eta.i)
pi.i = temp / sum(temp)
gamma.j = c(rowsum(pi.i,small.to.big))
M.j = matrix(nrow=J,ncol=8)
rownames(M.j) = paste("Bin",1:J,sep="")
colnames(M.j) = paste("mu",1:8,sep="")
M.j[,1] = rowsum(ui*pi.i,small.to.big) / gamma.j
for (r in 2:8){
M.j[,r] = rowsum((ui-M.j[small.to.big,1])^r*pi.i,small.to.big) / gamma.j
}
if (gradient){
Bi.tilde = t(t(B.i) - c(apply(pi.i*B.i,2,sum)))
PiBi.tilde = pi.i*Bi.tilde
if (!is.null(n.i)) Score.ni = c(t(B.i)%*%(n.i-m.tot*pi.i))
else Score.ni = NULL
Score.mj = Score.lprior = rep(0,ncol(B.i))
for (j in 1:J){
idx = which(small.to.big == j)
Score.mj = Score.mj + freq.j[j]/gamma.j[j]*c(apply(PiBi.tilde[idx,],2,sum))
}
}
R = 4
dmujr.k = array(dim=c(J,R,K))
if (sum(use.moments) > 0){
invSigma.j = array(dim=c(J,4,4))
for (j in 1:J){
idx = which(small.to.big == j)
if (gradient){
dmujr.k[j,1,] = c(apply((ui[idx]-M.j[j,1])*pi.i[idx]*Bi.tilde[idx,],2,sum)/gamma.j[j])
for (r in 2:4){
dmujr.k[j,r,] = c(apply((ui[idx]-M.j[j,1])^r*pi.i[idx]*Bi.tilde[idx,],2,sum)) / gamma.j[j]
temp = sum((ui[idx]-M.j[j,1])^(r-1)*pi.i[idx])
dmujr.k[j,r,] = dmujr.k[j,r,] - r * temp * dmujr.k[j,1,] / gamma.j[j]
temp2 = c(apply(pi.i[idx]*Bi.tilde[idx,],2,sum)/gamma.j[j])
dmujr.k[j,r,] = dmujr.k[j,r,] - M.j[j,r] * temp2
}
}
idx2 = which((freq.j[j] >= freq.min) & (!is.na(m.j[j,])) & use.moments)
if (length(idx2) > 0){
Mat = Sigma_fun(M.j[j,1:8])
if (diag.only) Mat = diag(diag(Mat))
invSigma.j[j,,] = freq.j[j] * solve(Mat)
if (gradient){
Mat = matrix(invSigma.j[j,idx2,idx2],nrow=length(idx2))
temp2 = matrix(dmujr.k[j,idx2,],nrow=length(idx2))
temp = Mat %*% temp2
Score.lprior = Score.lprior + matrix(m.j[j,idx2]-M.j[j,idx2],nrow=1) %*% temp
}
}
}
}
if (gradient){
Score.lprior = c(Score.lprior)
if (penalize){
Score.lprior = Score.lprior - tau * c(Pd%*%phi)
}
if (!is.null(n.i)) Score.ni = Score.ni + Score.lprior
Score.mj = Score.mj + Score.lprior
}
llik.ni = NULL
if (!is.null(n.i)) llik.ni = sum(n.i*log(pi.i))
llik.mj = sum(freq.j * log(gamma.j))
moments.penalty = 0
if (sum(use.moments) > 0){
for (j in 1:J){
idx2 = which((freq.j[j] >= freq.min) & (!is.na(m.j[j,])) & use.moments)
if (sum(idx2) > 0){
temp = matrix(invSigma.j[j,idx2,idx2],nrow=length(idx2))
eig.vals = svd(temp)$d ; eig.vals = eig.vals[eig.vals > 1e-4]
moments.penalty = moments.penalty + .5*sum(log(eig.vals))
moments.penalty = moments.penalty -.5 * sum(c(m.j[j,idx2]-M.j[j,idx2]) * c(temp %*% c(m.j[j,idx2]-M.j[j,idx2])))
}
}
}
penalty = 0
if (penalize){
penalty = (aa+.5*nrow(Dd))*log(tau) -tau*(bb+.5*sum(phi*c(Pd%*%phi)))
}
lpost.ni = llik.ni + moments.penalty + penalty
lpost.mj = llik.mj + moments.penalty + penalty
res = list()
res$lpost.ni = lpost.ni
res$lpost.mj = lpost.mj
res$llik.ni = llik.ni
res$llik.mj = llik.mj
res$moments.penalty = moments.penalty
res$penalty = penalty
if (gradient){
if (!is.null(n.i)) res$Score.ni = Score.ni
res$Score.mj = Score.mj
res$Score.lprior = Score.lprior
}
res$M.j = M.j
res$pi.i = pi.i
res$ui = ui ; res$delta = delta
res$gamma.j = gamma.j
res$tau = tau
res$phi = phi
res$n.i = n.i
res$freq.j = freq.j
return(res)
} |
context("perform_function")
test_that("Apply a calculation",
{
fit <- lm(mpg ~ qsec + factor(am) + wt + factor(gear), data = mtcars)
x <- dust(fit) %>%
sprinkle(row = 2, cols = 2:3, fn = quote(round(value * -1, 2)))
x <- perform_function(x$body)
expect_equal(x$value[x$row == 2 & x$col %in% 2:3],
c("-1.24", "-0.38"))
})
test_that("Apply a string manipulation",
{
fit <- lm(mpg ~ qsec + factor(am) + wt + factor(gear), data = mtcars)
x <- dust(fit) %>%
sprinkle(cols = 1, fn = quote(gsub("factor[(]gear[)]", "Gears: ", value)))
x <- perform_function(x$body)
expect_equal(x$value[x$row %in% 5:6 & x$col == 1],
c("Gears: 4", "Gears: 5"))
}) |
context("Check calculate_covariate_drift() function")
test_that("Type of data in the explainer",{
library("DALEX")
library("ranger")
predict_function <- function(m,x,...) predict(m, x, ...)$predictions
model_old <- ranger(m2.price ~ ., data = apartments)
d <- calculate_residuals_drift(model_old,
apartments_test[1:4000,], apartments_test[4001:8000,],
apartments_test$m2.price[1:4000], apartments_test$m2.price[4001:8000],
predict_function = predict_function)
expect_true("covariate_drift" %in% class(d))
expect_true(all(dim(d) == c(1,2)))
}) |
ecdfmeans.m <- function(x, groups, r) {
ecdf.ls <- by(x, INDICES=groups, FUN=stats::ecdf, simplify=FALSE)
sapply(ecdf.ls, FUN = function(x) { x(r) }, simplify=TRUE)
}
ecdfcontrasts.m <- function(x, groups, r) {
k <- nlevels(groups)
gnames <- levels(groups)
ecdf.ls <- by(x, INDICES=groups, FUN=stats::ecdf, simplify=FALSE)
cont <- matrix(0, nrow=length(r), ncol=k*(k-1)/2)
cont.names <- vector(length=k*(k-1)/2)
counter <- 1
for(i in 1:(k-1)) for(j in (i+1):k) {
cont[, counter] <- ecdf.ls[[i]](r) - ecdf.ls[[j]](r)
cont.names[counter] <- paste(gnames[i], gnames[j], sep="-")
counter <- counter+1
}
colnames(cont) <- cont.names
cont
}
GET.necdf <- function(x, r = seq(min(unlist((lapply(x, min)))), max(unlist((lapply(x, max)))), length=100),
contrasts = FALSE, nsim, ...) {
if(!is.list(x) && length(x)<2) stop("At least two groups should be provided.")
x.lengths <- as.numeric(lapply(x, FUN = length))
if(!is.null(names(x))) groups <- rep(names(x), times=x.lengths)
else groups <- rep(1:length(x), times=x.lengths)
groups <- factor(groups, levels=unique(groups))
gnames <- levels(groups)
if(missing(nsim)) {
if(!contrasts) {
nsim <- length(x)*1000 - 1
}
else {
J <- length(x)
nsim <- (J*(J-1)/2)*1000 - 1
}
message("Creating ", nsim, " permutations.\n", sep="")
}
if(!contrasts) fun <- ecdfmeans.m
else fun <- ecdfcontrasts.m
x <- unlist(x)
obs <- fun(x, groups, r)
sim <- replicate(nsim, fun(x, sample(groups, size=length(groups), replace=FALSE), r), simplify = "array")
complabels <- colnames(obs)
csets <- list()
for(i in 1:ncol(obs)) {
csets[[i]] <- create_curve_set(list(r = r,
obs = obs[,i],
sim_m = sim[,i,]))
}
names(csets) <- complabels
res <- global_envelope_test(csets, alternative="two.sided", ..., nstep=1)
if(!contrasts)
res <- envelope_set_labs(res, xlab = expression(italic(x)),
ylab = expression(italic(hat(F)(x))))
else
res <- envelope_set_labs(res, xlab = expression(italic(x)),
ylab = expression(italic(hat(F)[i](x)-hat(F)[j](x))))
attr(res, "contrasts") <- contrasts
attr(res, "labels") <- complabels
attr(res, "call") <- match.call()
res
} |
blktrace <- function(blk,X,Z,parbarrier=NULL){
if(is.null(parbarrier)){
trXZ <- 0
for(p in 1:nrow(blk)){
if(blk[[p,1]] == "s"){
if(length(blk[[p,2]]) == 1){
trXZ <- trXZ + sum(sum(X[[p,1]] * Z[[p,1]]))
}else{
xx <- mexsvec(blk[p,,drop=FALSE],as.matrix(X[[p,1]]),0)
zz <- mexsvec(blk[p,,drop=FALSE],as.matrix(Z[[p,1]]))
trXZ <- trXZ + sum(xx * zz)
}
}else{
trXZ <- trXZ + sum(X[[p,1]]*Z[[p,1]])
}
}
}else{
trXZ <- 0
for(p in 1:nrow(blk)){
if(base::norm(parbarrier[[p,1]], type="2") == 0){
if(blk[[p,1]] == "s"){
if(length(blk[[p,2]]) == 1){
trXZ <- trXZ + sum(sum(X[[p,1]] * Z[[p,1]]))
}else{
xx <- mexsvec(blk[p,,drop=FALSE],as.matrix(X[[p,1]]),0)
zz <- mexsvec(blk[p,,drop=FALSE],as.matrix(Z[[p,1]]))
trXZ <- trXZ + sum(xx * zz)
}
}else{
trXZ <- trXZ + sum(X[[p,1]]*Z[[p,1]])
}
}else{
idx <- which(parbarrier[[p,1]] == 0)
if(length(idx) > 0){
if(blk[[p,1]] == "s"){
sumXZ <- colSums(X[[p,1]] * Z[[p,1]])
ss <- c(0,cumsum(blk[[p,2]]))
for(k in 1:length(idx)){
idxtmp <- c((ss[idx[k]]+1):ss[idx[k]+1])
trXZ <- trXZ + sum(sumXZ[idxtmp])
}
}else if(blk[[p,1]] == "q"){
tmp <- qops(blk,p,X[[p]],Z[[p]],1)
trXZ <- trXZ + sum(tmp[idx])
}else if(blk[[p,1]] == "l"){
trXZ <- trXZ + sum(X[[p,1]][idx] * Z[[p,1]][idx])
}
}
}
}
}
return(trXZ)
} |
Update_a0 <-
function(last.params){
new.params <- last.params
gamma_rate_part <- a0_gibbs_rate(last.params$thetas,last.params$covariance,last.params$a0)
new.params$a0 <- rgamma(1,shape=(1+(last.params$loci*last.params$k)/2),rate=(1+gamma_rate_part))
new.params$covariance <- Covariance(new.params$a0,last.params$aD,last.params$aE,last.params$a2,last.params$D,last.params$E,last.params$delta)
new.params$prior_prob_alpha0 <- Prior_prob_alpha0(new.params$a0)
new.params$LnL_thetas_vec <- Likelihood_thetas(last.params$thetas,new.params$covariance)
new.params$a0_moves <- last.params$a0_moves + 1
return(new.params)
} |
jacobi.g.weight <- function( x, p, q )
{
n <- length( x )
y <- rep( 0, n )
for ( i in 1:n ) {
if ( ( x[i] > 0 ) && ( x[i] < 1 ) ) {
t1 <- ( 1 - x[i] ) ^ ( p - q )
t2 <- ( x[i] ) ^ ( q - 1 )
y[i] <- t1 * t2
}
}
return( y )
} |
tenant <- Sys.getenv("AZ_TEST_TENANT_ID")
app <- Sys.getenv("AZ_TEST_NATIVE_APP_ID")
if(tenant == "" || app == "")
skip("OneDrive for Business tests skipped: Microsoft Graph credentials not set")
if(!interactive())
skip("OneDrive for Business tests skipped: must be in interactive session")
tok <- get_test_token(tenant, app, c("Files.ReadWrite.All", "User.Read"))
if(is.null(tok))
skip("OneDrive for Business tests skipped: no access to tenant")
drv <- try(call_graph_endpoint(tok, "me/drive"), silent=TRUE)
if(inherits(drv, "try-error"))
skip("OneDrive for Business tests skipped: service not available")
od <- ms_drive$new(tok, tenant, drv)
test_that("OneDrive for Business works",
{
expect_is(od, "ms_drive")
lst <- od$list_items()
expect_is(lst, "data.frame")
newfolder <- make_name()
expect_silent(od$create_folder(newfolder))
src <- "../resources/file.json"
dest <- file.path(newfolder, basename(src))
newsrc <- tempfile()
expect_silent(od$upload_file(src, dest))
expect_silent(od$download_file(dest, newsrc))
expect_true(files_identical(src, newsrc))
item <- od$get_item(dest)
expect_is(item, "ms_drive_item")
pager <- od$list_files(newfolder, filter=sprintf("name eq '%s'", basename(src)), n=NULL)
expect_is(pager, "ms_graph_pager")
lst1 <- pager$value
expect_is(lst1, "data.frame")
expect_identical(nrow(lst1), 1L)
expect_silent(od$set_item_properties(dest, name="newname"))
expect_silent(item$sync_fields())
expect_identical(item$properties$name, "newname")
expect_silent(item$update(name=basename(dest)))
expect_identical(item$properties$name, basename(dest))
expect_silent(item$delete(confirm=FALSE))
expect_silent(od$delete_item(newfolder, confirm=FALSE))
})
test_that("Drive item methods work",
{
root <- od$get_item("/")
expect_is(root, "ms_drive_item")
rootp <- root$get_parent_folder()
expect_is(rootp, "ms_drive_item")
expect_equal(rootp$properties$name, "root")
tmpname1 <- make_name(10)
folder1 <- root$create_folder(tmpname1)
expect_is(folder1, "ms_drive_item")
expect_true(folder1$is_folder())
folder1p <- folder1$get_parent_folder()
expect_equal(rootp$properties$name, "root")
tmpname2 <- make_name(10)
folder2 <- folder1$create_folder(tmpname2)
expect_is(folder2, "ms_drive_item")
expect_true(folder2$is_folder())
folder2p <- folder2$get_parent_folder()
expect_equal(folder2p$properties$name, folder1$properties$name)
src <- write_file()
expect_silent(file1 <- root$upload(src))
expect_is(file1, "ms_drive_item")
expect_false(file1$is_folder())
expect_error(file1$create_folder("bad"))
file1p <- file1$get_parent_folder()
expect_equal(file1p$properties$name, "root")
file1_0 <- root$get_item(basename(src))
expect_is(file1_0, "ms_drive_item")
expect_false(file1_0$is_folder())
expect_identical(file1_0$properties$name, file1$properties$name)
dest1 <- tempfile()
expect_silent(file1$download(dest1))
expect_true(files_identical(src, dest1))
expect_silent(file2 <- folder1$upload(src))
expect_is(file2, "ms_drive_item")
file2p <- file2$get_parent_folder()
expect_equal(file2p$properties$name, folder1$properties$name)
dest2 <- tempfile()
expect_silent(file2$download(dest2))
expect_true(files_identical(src, dest2))
dest3 <- tempfile()
expect_silent(file3 <- folder2$upload(src, basename(dest3)))
expect_is(file3, "ms_drive_item")
expect_identical(file3$properties$name, basename(dest3))
expect_silent(file3$download(dest3))
expect_true(files_identical(src, dest3))
file3_1 <- folder1$get_item(file.path(tmpname2, basename(dest3)))
expect_is(file3_1, "ms_drive_item")
expect_identical(file3_1$properties$name, file3$properties$name)
lst0 <- root$list_files()
expect_is(lst0, "data.frame")
lst0_f <- root$list_files(info="name", full_names=TRUE)
expect_type(lst0_f, "character")
expect_true(all(substr(lst0_f, 1, 1) == "/"))
lst0_1 <- root$list_files(tmpname1)
lst1 <- folder1$list_files()
expect_identical(lst0_1, lst1)
lst1_f <- folder1$list_files(tmpname2, info="name", full_names=TRUE)
expect_type(lst1_f, "character")
expect_true(all(grepl(paste0("^", tmpname2), lst1_f)))
expect_silent(file3$delete(confirm=FALSE))
expect_silent(folder2$delete(confirm=FALSE))
expect_silent(file2$delete(confirm=FALSE))
expect_silent(folder1$delete(confirm=FALSE))
expect_silent(file1$delete(confirm=FALSE))
})
test_that("Methods work with filenames with special characters",
{
test_name <- paste(make_name(5), "plus spaces and áccénts")
src <- write_file(fname=file.path(tempdir(), test_name))
expect_silent(od$upload_file(src, basename(src)))
expect_silent(item <- od$get_item(basename(test_name)))
expect_true(item$properties$name == basename(test_name))
expect_silent(item$delete(confirm=FALSE))
})
test_that("Nested folder creation/deletion works",
{
f1 <- make_name(10)
f2 <- make_name(10)
f3 <- make_name(10)
it12 <- od$create_folder(file.path(f1, f2))
expect_is(it12, "ms_drive_item")
it1 <- od$get_item(f1)
expect_is(it1, "ms_drive_item")
replicate(30, it1$upload(write_file()))
it123 <- it1$create_folder(file.path(f2, f3))
expect_is(it123, "ms_drive_item")
expect_silent(it1$delete(confirm=FALSE, by_item=TRUE))
}) |
context("run_solver")
if (interactive()) {
test_that("run_solver", {
x = random_instance(size = 100)
for (method in get_solvers()) {
res = run_solver(x, method = method)
}
})
} |
globalVariables("func")
reduce <- function(x, f, init) {
Reduce(f, x, init)
}
drop_nulls <- function(x) {
if (length(x) == 0) {
x
} else {
x[!vapply(x, is.null, logical(1))]
}
}
str_conjoin <- function(x, con = "or") {
if (length(x) == 1) {
return(as.character(x))
}
if (length(x) == 2) {
return(paste(x[1], con, x[2]))
}
paste0(paste(x[-length(x)], collapse = ", "), ", ", con, " ", x[length(x)])
}
str_collate <- function(..., collapse = " ") {
args <- unique(c(...))
if (is.null(args)) {
return(NULL)
}
args <- args[nzchar(args) & !is_na(args)]
if (length(args) == 0) {
return(NULL)
}
paste(args, collapse = collapse)
}
str_re <- function(string, pattern, len0 = TRUE) {
if (length(string) == 0) {
return(len0)
}
grepl(paste0("^(?:", pattern, ")$"), string)
}
generate_id <- function(prefix) {
paste(c(prefix, sample(1000, 2, TRUE)), collapse = "-")
}
named_values <- function(x) {
x[names2(x) != ""]
}
unnamed_values <- function(x) {
x[names2(x) == ""]
}
s3_class_add <- function(x, new) {
class(x) <- unique(c(new, class(x)))
x
}
processDeps <- function(tags, session) {
ui <- takeSingletons(tags, session$singletons, desingleton = FALSE)$ui
ui <- surroundSingletons(ui)
dependencies <- lapply(
resolveDependencies(findDependencies(ui)),
createWebDependency
)
names(dependencies) <- NULL
dependencies
} |
plot.outlier<- function(x,...) {
o.outlier<- x
neighb<- o.outlier$neigh.dist
dmm<- o.outlier$olddim[1]
par(omi=c(1,0,1.5,0))
o.h<- hist(neighb,xlab="Nearest neighbour distance",main="",col=gray(0.8),labels=T,ylim=c(0,dmm*0.6),mgp=c(2,0.5,0))
colwidths<- o.h$mids[2]-o.h$mids[1]
drange<- range(neighb)
ddrange<- range(neighb)[2]-range(neighb)[1]
xline<- seq(drange[1],drange[2],ddrange/100)
yline<- dnorm(xline,mean=mean(neighb),sd=sd(neighb))*length(neighb)*colwidths
lines(xline,yline,col="red3",lwd=1.2)
x.c<- o.outlier$pco.points[,1]
y.c<- o.outlier$pco.points[,2]
thresh<- o.outlier$threshold
par(omi=c(0,0,0,0))
plot(x.c,y.c,asp=1,type="n",cex.axis=0.7,cex.lab=1.0,tcl=-0.3,mgp=c(2,0.5,0))
points(x.c[neighb <= thresh],y.c[neighb <= thresh],pch=16,cex=0.6,col=gray(0.85))
points(x.c[neighb > thresh],y.c[neighb > thresh],pch=16,cex=0.6,col="red4")
abline(h=0,v=0,lwd=1.0,col="black")
legend("bottomleft",c("outliers","other data points"),pch=c(16,16),col=c("red4",gray(0.85)),bty="n",cex=0.8)
} |
context("compare_stages")
DD <- generate_linear_dataset(5, 10)
levels(DD$C) <- c("a", "b")
levels(DD$X3) <- c("qqqq", "pppp")
mod1 <- full(DD, join_unobserved = FALSE)
mod2 <- indep(DD, join_unobserved = FALSE)
test_that("compare_stages correctly returns TRUE/FALSE", {
if (!requireNamespace("pkg", quietly = TRUE)) {
methods <- c("naive", "stages")
} else {
methods <- c("naive", "hamming", "stages")
}
for (m in methods) {
expect_true(compare_stages(mod1, mod1, method = !!m))
expect_true(compare_stages(mod2, mod2, method = !!m))
expect_false(compare_stages(mod1, mod2, method = !!m))
expect_false(compare_stages(mod2, mod1, method = !!m))
}
})
test_that("hamming_stages", {
expect_true(hamming_stages(mod1, mod2) >= 0)
})
test_that("inclusion_stages works properly", {
comparison <- inclusions_stages(mod1, mod2)
for (i in 2:(NCOL(DD) - 1)) {
expect_true(NROW(comparison[[i]]) > NROW(comparison[[i - 1]]))
}
})
test_that("inclusion_stages works symmetrically", {
comparison1 <- inclusions_stages(mod1, mod2)
comparison2 <- inclusions_stages(mod2, mod1)
for (i in 1:(NCOL(DD) - 1)) {
expect_true(NROW(comparison1[[i]]) == NROW(comparison2[[i]]))
}
}) |
set.seed(1234)
nbNodes <- 90
nbBlocks <- 3
blockProp <- c(.5, .25, .25)
test_that("Construction, fields access and other basics work in class SimpleSBM_Sampler (undirected Bernoulli model, no covariate)", {
means <- diag(.4, 3) + 0.05
connectParam <- list(mean = means)
mySampler <- SimpleSBM$new('bernoulli', nbNodes, FALSE, blockProp, connectParam)
expect_error(SimpleSBM$new('bernouilli',nbNodes, FALSE, blockProp, connectParam))
expect_error(SimpleSBM$new('bernoulli', -10 , FALSE, blockProp, connectParam))
expect_error(SimpleSBM$new('bernoulli', c(1,2) , FALSE, blockProp, connectParam))
expect_error(SimpleSBM$new('bernoulli', nbNodes, FALSE, -2, connectParam))
expect_error(SimpleSBM$new('bernoulli', nbNodes, FALSE, c(0,1), connectParam))
expect_error(SimpleSBM$new('bernoulli', nbNodes, FALSE, blockProp, list(mean = matrix( 2, nbBlocks, nbBlocks))))
expect_error(SimpleSBM$new('bernoulli', nbNodes, FALSE, blockProp, list(mean = matrix(-2, nbBlocks, nbBlocks))))
expect_error(SimpleSBM$new('bernoulli', nbNodes, FALSE, blockProp, list(mean = matrix(runif(nbBlocks**2), nbBlocks, nbBlocks))))
expect_error(SimpleSBM$new('bernoulli', nbNodes, FALSE, blockProp, list(mean = matrix(0, nbBlocks - 1, nbBlocks))))
expect_true(inherits(mySampler, "SBM"))
expect_true(inherits(mySampler, "SimpleSBM"))
expect_equal(mySampler$modelName, 'bernoulli')
expect_equal(unname(mySampler$nbNodes), nbNodes)
expect_equal(mySampler$nbDyads, nbNodes*(nbNodes - 1)/2)
expect_equal(mySampler$connectParam$mean, means)
expect_null(mySampler$connectParam$var)
mySampler$rMemberships(store = TRUE)
mySampler$rEdges(store = TRUE)
expect_equal(dim(mySampler$expectation), c(nbNodes, nbNodes))
expect_true(all(mySampler$expectation >= 0, na.rm = TRUE))
expect_true(all(mySampler$expectation <= 1, na.rm = TRUE))
expect_true(isSymmetric(mySampler$networkData))
expect_true(all(is.na(diag(mySampler$networkData))))
expect_true(!mySampler$directed)
expect_equal(mySampler$blockProp, blockProp)
expect_equal(mySampler$nbBlocks, nbBlocks)
expect_equal(dim(mySampler$indMemberships), c(nbNodes, nbBlocks))
expect_equal(sort(unique(mySampler$memberships)), 1:nbBlocks)
expect_equal(length(mySampler$memberships), nbNodes)
expect_equal(mySampler$nbCovariates, 0)
expect_equal(mySampler$covarList, list())
expect_equal(mySampler$covarParam, numeric(0))
expect_equal(mySampler$covarEffect, numeric(0))
})
test_that("Construction, fields access and other basics work in class SimpleSBM_Sampler (directed Bernoulli model, no covariate)", {
means <- matrix(runif(nbBlocks**2), nbBlocks, nbBlocks)
connectParam <- list(mean = means)
mySampler <- SimpleSBM$new('bernoulli', nbNodes, TRUE, blockProp, connectParam)
expect_error(SimpleSBM$new('bernouilli',nbNodes, TRUE, blockProp, connectParam))
expect_error(SimpleSBM$new('bernouilli',nbNodes, FALSE, blockProp, connectParam))
expect_error(SimpleSBM$new('bernoulli', -10 , TRUE, blockProp, connectParam))
expect_error(SimpleSBM$new('bernoulli', c(1,2) , TRUE, blockProp, connectParam))
expect_error(SimpleSBM$new('bernoulli', nbNodes, TRUE, -2, connectParam))
expect_error(SimpleSBM$new('bernoulli', nbNodes, TRUE, c(0,1), connectParam))
expect_error(SimpleSBM$new('bernoulli', nbNodes, TRUE, blockProp, list(mean = matrix( 2, nbBlocks, nbBlocks))))
expect_error(SimpleSBM$new('bernoulli', nbNodes, TRUE, blockProp, list(mean = matrix(-2, nbBlocks, nbBlocks))))
expect_error(SimpleSBM$new('bernoulli', nbNodes, TRUE, blockProp, list(mean = matrix(0, nbBlocks - 1, nbBlocks))))
expect_true(inherits(mySampler, "SBM"))
expect_true(inherits(mySampler, "SimpleSBM"))
expect_equal(mySampler$modelName, 'bernoulli')
expect_equal(unname(mySampler$nbNodes), nbNodes)
expect_equal(mySampler$nbDyads, nbNodes*(nbNodes - 1))
expect_equal(mySampler$connectParam$mean, means)
expect_null(mySampler$connectParam$var)
mySampler$rMemberships(store = TRUE)
mySampler$rEdges(store = TRUE)
expect_equal(dim(mySampler$expectation), c(nbNodes, nbNodes))
expect_true(all(mySampler$expectation >= 0, na.rm = TRUE))
expect_true(all(mySampler$expectation <= 1, na.rm = TRUE))
expect_true(all(is.na(diag(mySampler$networkData))))
expect_true(!isSymmetric(mySampler$networkData))
expect_true(mySampler$directed)
expect_equal(mySampler$blockProp, blockProp)
expect_equal(mySampler$nbBlocks, nbBlocks)
expect_equal(dim(mySampler$indMemberships), c(nbNodes, nbBlocks))
expect_equal(sort(unique(mySampler$memberships)), 1:nbBlocks)
expect_equal(length(mySampler$memberships), nbNodes)
expect_equal(mySampler$nbCovariates, 0)
expect_equal(mySampler$covarList, list())
expect_equal(mySampler$covarParam, numeric(0))
expect_equal(mySampler$covarEffect, numeric(0))
})
test_that("Construction, fields access and other basics work in class SimpleSBM_Sampler (undirected Poisson model, no covariate)", {
means <- diag(15., 3) + 5
connectParam <- list(mean = means)
mySampler <- SimpleSBM$new('poisson', nbNodes, FALSE, blockProp, connectParam)
expect_error(SimpleSBM$new('poison' , nbNodes, FALSE, blockProp, connectParam))
expect_error(SimpleSBM$new('poisson', -10 , FALSE, blockProp, connectParam))
expect_error(SimpleSBM$new('poisson', c(1,2) , FALSE, blockProp, connectParam))
expect_error(SimpleSBM$new('poisson', nbNodes, FALSE, -2, connectParam))
expect_error(SimpleSBM$new('poisson', nbNodes, FALSE, c(0,1), connectParam))
expect_error(SimpleSBM$new('poisson', nbNodes, FALSE, blockProp, list(mean = matrix(-2, nbBlocks, nbBlocks))))
expect_error(SimpleSBM$new('poisson', nbNodes, FALSE, blockProp, list(mean = round(40 * matrix(runif(nbBlocks**2), nbBlocks, nbBlocks)))))
expect_error(SimpleSBM$new('poisson', nbNodes, FALSE, blockProp, list(mean = matrix(2 , nbBlocks - 1, nbBlocks))))
expect_true(inherits(mySampler, "SBM"))
expect_true(inherits(mySampler, "SimpleSBM"))
expect_equal(mySampler$modelName, 'poisson')
expect_equal(unname(mySampler$nbNodes), nbNodes)
expect_equal(mySampler$nbDyads, nbNodes*(nbNodes - 1)/2)
expect_equal(mySampler$connectParam$mean, means)
expect_null(mySampler$connectParam$var)
mySampler$rMemberships(store = TRUE)
mySampler$rEdges(store = TRUE)
expect_equal(dim(mySampler$expectation), c(nbNodes, nbNodes))
expect_true(all(mySampler$expectation >= 0, na.rm = TRUE))
expect_true(all(is.na(diag(mySampler$networkData))))
expect_true(isSymmetric(mySampler$networkData))
expect_equal(mySampler$blockProp, blockProp)
expect_equal(mySampler$nbBlocks, nbBlocks)
expect_equal(dim(mySampler$indMemberships), c(nbNodes, nbBlocks))
expect_equal(sort(unique(mySampler$memberships)), 1:nbBlocks)
expect_equal(length(mySampler$memberships), nbNodes)
expect_equal(mySampler$nbCovariates, 0)
expect_equal(mySampler$covarList, list())
expect_equal(mySampler$covarParam, numeric(0))
expect_equal(mySampler$covarEffect, numeric(0))
})
test_that("Construction, fields access and other basics work in class SimpleSBM_Sampler (directed Poisson model, no covariate)", {
means <- round(40 * matrix(runif(nbBlocks**2), nbBlocks, nbBlocks))
connectParam <- list(mean = means)
mySampler <- SimpleSBM$new('poisson', nbNodes, TRUE, blockProp, connectParam)
expect_error(SimpleSBM$new('poison' , nbNodes, TRUE, blockProp, connectParam))
expect_error(SimpleSBM$new('poisson', -10 , TRUE, blockProp, connectParam))
expect_error(SimpleSBM$new('poisson', c(1,2) , TRUE, blockProp, connectParam))
expect_error(SimpleSBM$new('poisson', nbNodes, TRUE, -2, connectParam))
expect_error(SimpleSBM$new('poisson', nbNodes, TRUE, c(0,1), connectParam))
expect_error(SimpleSBM$new('poisson', nbNodes, TRUE, blockProp, list(mean = matrix(-2, nbBlocks, nbBlocks))))
expect_error(SimpleSBM$new('poisson', nbNodes, TRUE, blockProp, list(mean = matrix(2 , nbBlocks - 1, nbBlocks))))
expect_true(inherits(mySampler, "SBM"))
expect_true(inherits(mySampler, "SimpleSBM"))
expect_equal(mySampler$modelName, 'poisson')
expect_equal(unname(mySampler$nbNodes), nbNodes)
expect_equal(mySampler$nbDyads, nbNodes*(nbNodes - 1))
expect_equal(mySampler$connectParam$mean, means)
expect_null(mySampler$connectParam$var)
mySampler$rMemberships(store = TRUE)
mySampler$rEdges(store = TRUE)
expect_equal(dim(mySampler$expectation), c(nbNodes, nbNodes))
expect_true(all(mySampler$expectation >= 0, na.rm = TRUE))
expect_true(all(is.na(diag(mySampler$networkData))))
expect_true(!isSymmetric(mySampler$networkData))
expect_true(mySampler$directed)
expect_equal(mySampler$blockProp, blockProp)
expect_equal(mySampler$nbBlocks, nbBlocks)
expect_equal(dim(mySampler$indMemberships), c(nbNodes, nbBlocks))
expect_equal(sort(unique(mySampler$memberships)), 1:nbBlocks)
expect_equal(length(mySampler$memberships), nbNodes)
expect_equal(mySampler$nbCovariates, 0)
expect_equal(mySampler$covarList, list())
expect_equal(mySampler$covarParam, numeric(0))
expect_equal(mySampler$covarEffect, numeric(0))
})
test_that("Construction, fields access and other basics work in class SimpleSBM_Sampler (undirected Gaussian model, no covariate)", {
means <- diag(15., 3) + 5
connectParam <- list(mean = means, var = 2)
mySampler <- SimpleSBM$new('gaussian', nbNodes, FALSE, blockProp, connectParam)
expect_error(SimpleSBM$new('normal' , nbNodes, FALSE, blockProp, connectParam))
expect_error(SimpleSBM$new('gaussian', -10 , FALSE, blockProp, connectParam))
expect_error(SimpleSBM$new('gaussian', c(1,2) , FALSE, blockProp, connectParam))
expect_error(SimpleSBM$new('gaussian', nbNodes, FALSE, -2, connectParam))
expect_error(SimpleSBM$new('gaussian', nbNodes, FALSE, c(0,1), connectParam))
expect_error(SimpleSBM$new('gaussian', nbNodes, FALSE, blockProp, list(mean = means, var = -1)))
expect_error(SimpleSBM$new('gaussian', nbNodes, FALSE, blockProp, list(mean = matrix(runif(nbBlocks**2), nbBlocks, nbBlocks))))
expect_error(SimpleSBM$new('gaussian', nbNodes, FALSE, blockProp, list(mean = matrix(2 , nbBlocks - 1, nbBlocks), var = 1)))
expect_true(inherits(mySampler, "SBM"))
expect_true(inherits(mySampler, "SimpleSBM"))
expect_equal(mySampler$modelName, 'gaussian')
expect_equal(unname(mySampler$nbNodes), nbNodes)
expect_equal(mySampler$nbDyads, nbNodes*(nbNodes - 1)/2)
expect_equal(mySampler$connectParam$mean, means)
expect_equal(mySampler$connectParam$var, 2)
mySampler$rMemberships(store = TRUE)
mySampler$rEdges(store = TRUE)
expect_equal(dim(mySampler$expectation), c(nbNodes, nbNodes))
expect_true(all(is.na(diag(mySampler$networkData))))
expect_true(isSymmetric(mySampler$networkData))
expect_equal(mySampler$blockProp, blockProp)
expect_equal(mySampler$nbBlocks, nbBlocks)
expect_equal(dim(mySampler$indMemberships), c(nbNodes, nbBlocks))
expect_equal(sort(unique(mySampler$memberships)), 1:nbBlocks)
expect_equal(length(mySampler$memberships), nbNodes)
expect_equal(mySampler$nbCovariates, 0)
expect_equal(mySampler$covarList, list())
expect_equal(mySampler$covarParam, numeric(0))
expect_equal(mySampler$covarEffect, numeric(0))
})
test_that("Construction, fields access and other basics work in class SimpleSBM_Sampler (directed Gaussian model, no covariate)", {
means <- matrix(runif(nbBlocks**2), nbBlocks, nbBlocks)
connectParam <- list(mean = means, var = 2)
mySampler <- SimpleSBM$new('gaussian', nbNodes, TRUE, blockProp, connectParam)
expect_error(SimpleSBM$new('normal' , nbNodes, TRUE, blockProp, connectParam))
expect_error(SimpleSBM$new('gaussian', nbNodes, FALSE, blockProp, connectParam))
expect_error(SimpleSBM$new('gaussian', -10 , TRUE, blockProp, connectParam))
expect_error(SimpleSBM$new('gaussian', c(1,2) , TRUE, blockProp, connectParam))
expect_error(SimpleSBM$new('gaussian', nbNodes, TRUE, -2, connectParam))
expect_error(SimpleSBM$new('gaussian', nbNodes, TRUE, c(0,1), connectParam))
expect_error(SimpleSBM$new('gaussian', nbNodes, TRUE, blockProp, list(var = -1, mean = means)))
expect_error(SimpleSBM$new('gaussian', nbNodes, TRUE, blockProp, list(var = 1, mean = matrix(2 , nbBlocks - 1, nbBlocks))))
expect_true(inherits(mySampler, "SBM"))
expect_true(inherits(mySampler, "SimpleSBM"))
expect_equal(mySampler$modelName, 'gaussian')
expect_equal(unname(mySampler$nbNodes), nbNodes)
expect_equal(mySampler$nbDyads, nbNodes*(nbNodes - 1))
expect_equal(mySampler$connectParam$mean, means)
expect_equal(mySampler$connectParam$var, 2)
mySampler$rMemberships(store = TRUE)
mySampler$rEdges(store = TRUE)
expect_equal(dim(mySampler$expectation), c(nbNodes, nbNodes))
expect_true(all(is.na(diag(mySampler$networkData))))
expect_true(!isSymmetric(mySampler$networkData))
expect_true(mySampler$directed)
expect_equal(mySampler$blockProp, blockProp)
expect_equal(mySampler$nbBlocks, nbBlocks)
expect_equal(dim(mySampler$indMemberships), c(nbNodes, nbBlocks))
expect_equal(sort(unique(mySampler$memberships)), 1:nbBlocks)
expect_equal(length(mySampler$memberships), nbNodes)
expect_equal(mySampler$nbCovariates, 0)
expect_equal(mySampler$covarList, list())
expect_equal(mySampler$covarParam, numeric(0))
expect_equal(mySampler$covarEffect, numeric(0))
}) |
Stage1.notebook<-function() {
one<-inclus<-samplingtypeFrame<-run1<-NULL
initializeDialog(title="Stage 1 Sampling")
variables2<-names(get(activeDataSet()))
PSUselection<-ifelse(is.na(match("PSU_SN", variables2)), NA, match("PSU_SN", variables2)-1)
PSUBox<-variableListBox2(top, variableList=variables2, title="Select the PRIMARY SAMPLING UNIT variable:",
initialSelection=PSUselection)
tkgrid(getFrame(PSUBox), sticky="w")
strataselection<-ifelse(is.na(match("STRATUM_ID", variables2)), NA, match("STRATUM_ID", variables2)-1)
strataBox<-variableListBox2(top, variableList=variables2, title="Select the STAGE 1 STRATIFICATION variable:",
initialSelection=strataselection)
tkblank(top)
tkgrid(getFrame(strataBox), sticky="w")
inclusselection<-ifelse(is.na(match("ST1_PROB", variables2)), NA, match("ST1_PROB", variables2)-1)
inclusBox<-variableListBox2(top, variableList=variables2, title="Select the STAGE 1 INCLUSION PROBABILITY variable:",
initialSelection=inclusselection)
tkblank(top)
tkgrid(getFrame(inclusBox), sticky="w")
radioButtons(top, name="samplingtype", buttons=c("Maxentropy","system"), values=c(1,2),
labels=c("Maximum entropy sampling (recommended) ","Systematic Sampling"),
title="What type of sampling do you want to do?",right.buttons=FALSE)
tkblank(top)
tkgrid(samplingtypeFrame, sticky="w")
RandomValue<-tclVar("1")
RandomCB<-tkcheckbutton(top, text="Randomize the PSU order within each stratum")
tkconfigure(RandomCB, variable=RandomValue)
tkblank(top)
tkgrid(RandomCB, sticky="w")
onOK<-function(){
stage1df<-activeDataSet()
PSU <- as.double(tkcurselection(PSUBox$listbox))+1
strata <- as.double(tkcurselection(strataBox$listbox))+1
inclus<- as.double(tkcurselection(inclusBox$listbox))+1
RandomPSU<-tclvalue(RandomValue)
if (tclvalue(samplingtypeVariable)=="1") {samplingtype<-"UPmaxentropy"} else {samplingtype<-"UPsystematic"}
if (length(PSU)==0) {check.fn2("Primary Sampling Unit")
return()}
if (length(strata)==0) {check.fn2("STRATIFICATION")
return()}
if (length(inclus)==0) {check.fn2("First Stage Inclusion Probability")
return()}
closeDialog()
doItAndPrint(paste("Stage1Sample <- stage1Sample.fn(stage1df=",stage1df,", PSU=",PSU,", strata=",strata,", inclus=",inclus, ", samplingtype=",samplingtype,", randomPSU=", RandomPSU,")\n",sep=""))
activeDataSet("Stage1Sample")
}
OKCancelHelp2(window=top, onHelp=Stage1onHelp)
tkgrid(buttonsFrame, columnspan="2", sticky="w")
dialogSuffix(rows=3, columns=2, window=top, focus=buttonsFrame)
}
stage1Sample.fn <-function(stage1df, PSU, strata, inclus, samplingtype, randomPSU){
x<-stage1df
x<-x[order(x[,strata],x[,PSU]),]
y<-unique(x[,c(PSU,strata,inclus)])
PSU2<-match(names(x)[PSU], names(y))
strata2<-match(names(x)[strata],names(y))
inclus2<-match(names(x)[inclus],names(y))
y$strata.id<-as.numeric(y[,strata2])
y$I<-ifelse(y[,inclus2]==1,1,0)
sample<-y[which(y[,inclus2]<1),]
if (nrow(y)!=nrow(sample)) {
message("Sampling has proceeded, however this dataset contains PSU(s) with inclusion probabilities = 1")
}
total<-max(sample$strata.id)
pb<-tkProgressBar(title = "progress bar", min = 0,
max = total, width = 300)
selectionI<-matrix(NA, nrow=total, ncol=1)
for (i in 1: max(sample$strata.id)){
selectionI[i]<-list(samplingtype(sample[,inclus2][which(sample$strata.id==i)]))
setTkProgressBar(pb, i, label=paste( round(i/total*100, 0),"% done"))
}
close(pb)
sample$I<-c(selectionI,recursive=TRUE)
sample<-rbind(subset(sample,sample$I==1),y[which(y$I==1),])
Selected.PSU<-x[which(x[,PSU] %in% sample[,PSU2]),]
if (randomPSU==1){
Selected.PSU$rand<-runif(nrow(Selected.PSU))
rrr<-tapply(Selected.PSU$rand, FUN=min, INDEX=factor(Selected.PSU[,PSU]))
Selected.PSU$rrr<-rrr[match(Selected.PSU[,PSU], rownames(rrr))]
Selected.PSU<-Selected.PSU[order(Selected.PSU[,strata], Selected.PSU$rrr),]
}
row.names(Selected.PSU)<-Selected.PSU$I<-Selected.PSU$strata.id<-Selected.PSU$PSUorder<-Selected.PSU$rand<-Selected.PSU$rrr<-NULL
return(Selected.PSU)
} |
summary.dfm <- function(object, ci = .95, ...){
tt <- NROW(object[["x"]])
m <- NCOL(object[["x"]])
n <- NCOL(object[["factor"]]) / tt
n_lambda <- m * n
p <- object$specifications$lags["p"]
measure_names <- dimnames(object$x)[[2]]
fac_names <- paste0("Factor.", 1:n)
dim_names <- list(measure_names, fac_names)
ci_low <- (1 - ci) / 2
ci_high <- 1 - ci_low
temp <- summary(object[["lambda"]], quantiles = c(ci_low, .5, ci_high))
means <- matrix(temp$statistics[, "Mean"], m)
sds <- matrix(temp$statistics[, "SD"], m)
naive_sd <- matrix(temp$statistics[, "Naive SE"], m)
ts_sd <- matrix(temp$statistics[, "Time-series SE"], m)
q_low <- matrix(temp$quantiles[, 1], m)
median <- matrix(temp$quantiles[, 2], m)
q_high <- matrix(temp$quantiles[, 3], m)
if (!is.null(means)) {
dimnames(means) <- dim_names
dimnames(sds) <- dim_names
dimnames(naive_sd) <- dim_names
dimnames(ts_sd) <- dim_names
dimnames(q_low) <- dim_names
dimnames(median) <- dim_names
dimnames(q_high) <- dim_names
}
result <- list(lambda = list(means = means,
median = median,
sd = sds,
naivesd = naive_sd,
tssd = ts_sd,
q_lower = q_low,
q_upper = q_high))
means <- NULL
median <- NULL
sds <- NULL
naive_sd <- NULL
ts_sd <- NULL
q_low <- NULL
q_high <- NULL
temp <- summary(object[["factor"]], quantiles = c(ci_low, .5, ci_high))
for (i in 1:n) {
if ("numeric" %in% class(temp$statistics)) {
means <- cbind(means, matrix(temp$statistics["Mean"], tt))
sds <- cbind(sds, matrix(temp$statistics["SD"], tt))
naive_sd <- cbind(naive_sd, matrix(temp$statistics["Naive SE"], tt))
ts_sd <- cbind(ts_sd, matrix(temp$statistics["Time-series SE"], tt))
q_low <- cbind(q_low, matrix(temp$quantiles[1], tt))
median <- cbind(median, matrix(temp$quantiles[2], tt))
q_high <- cbind(q_high, matrix(temp$quantiles[3], tt))
} else {
means <- cbind(means, matrix(temp$statistics[i + n * 0:(tt - 1), "Mean"], tt))
sds <- cbind(sds, matrix(temp$statistics[i + n * 0:(tt - 1), "SD"], tt))
naive_sd <- cbind(naive_sd, matrix(temp$statistics[i + n * 0:(tt - 1), "Naive SE"], tt))
ts_sd <- cbind(ts_sd, matrix(temp$statistics[i + n * 0:(tt - 1), "Time-series SE"], tt))
q_low <- cbind(q_low, matrix(temp$quantiles[i + n * 0:(tt - 1), 1], tt))
median <- cbind(median, matrix(temp$quantiles[i + n * 0:(tt - 1), 2], tt))
q_high <- cbind(q_high, matrix(temp$quantiles[i + n * 0:(tt - 1), 3], tt))
}
}
dim_names <- list(1:tt, fac_names)
if (!is.null(means)) {
dimnames(means) <- dim_names
dimnames(sds) <- dim_names
dimnames(naive_sd) <- dim_names
dimnames(ts_sd) <- dim_names
dimnames(q_low) <- dim_names
dimnames(median) <- dim_names
dimnames(q_high) <- dim_names
}
result[["factor"]] = list(means = means,
median = median,
sd = sds,
naivesd = naive_sd,
tssd = ts_sd,
q_lower = q_low,
q_upper = q_high)
if (!is.null(object$sigma_u)) {
temp <- summary(object$sigma_u, quantiles = c(ci_low, .5, ci_high))
means <- matrix(temp$statistics[, "Mean"], m)
sds <- matrix(temp$statistics[, "SD"], m)
naive_sd <- matrix(temp$statistics[, "Naive SE"], m)
ts_sd <- matrix(temp$statistics[, "Time-series SE"], m)
q_low <- matrix(temp$quantiles[, 1], m)
median <- matrix(temp$quantiles[, 2], m)
q_high <- matrix(temp$quantiles[, 3], m)
}
dim_names <- list(measure_names, NULL)
if (!is.null(means)) {
dimnames(means) <- dim_names
dimnames(sds) <- dim_names
dimnames(naive_sd) <- dim_names
dimnames(ts_sd) <- dim_names
dimnames(q_low) <- dim_names
dimnames(median) <- dim_names
dimnames(q_high) <- dim_names
}
result[["sigma_u"]] = list(means = means,
median = median,
sd = sds,
naivesd = naive_sd,
tssd = ts_sd,
q_lower = q_low,
q_upper = q_high)
if (p > 0) {
temp <- summary(object[["a"]], quantiles = c(ci_low, .5, ci_high))
if ("numeric" %in% class(temp$statistics)) {
means <- matrix(temp$statistics["Mean"], n)
sds <-matrix(temp$statistics["SD"], n)
naive_sd <- matrix(temp$statistics["Naive SE"], n)
ts_sd <- matrix(temp$statistics["Time-series SE"], n)
q_low <- matrix(temp$quantiles[1], n)
median <- matrix(temp$quantiles[2], n)
q_high <- matrix(temp$quantiles[3], n)
} else {
means <- matrix(temp$statistics[, "Mean"], n)
sds <- matrix(temp$statistics[, "SD"], n)
naive_sd <- matrix(temp$statistics[, "Naive SE"], n)
ts_sd <- matrix(temp$statistics[, "Time-series SE"], n)
q_low <- matrix(temp$quantiles[, 1], n)
median <- matrix(temp$quantiles[, 2], n)
q_high <- matrix(temp$quantiles[, 3], n)
}
}
if (!is.null(object$sigma_v)) {
temp <- summary(object$sigma_v, quantiles = c(ci_low, .5, ci_high))
if (n == 1) {
means <- matrix(temp$statistics["Mean"], n)
sds <- matrix(temp$statistics["SD"], n)
naive_sd <- matrix(temp$statistics["Naive SE"], n)
ts_sd <- matrix(temp$statistics["Time-series SE"], n)
q_low <- matrix(temp$quantiles[1], n)
median <- matrix(temp$quantiles[2], n)
q_high <- matrix(temp$quantiles[3], n)
} else {
means <- matrix(temp$statistics[, "Mean"], n)
sds <- matrix(temp$statistics[, "SD"], n)
naive_sd <- matrix(temp$statistics[, "Naive SE"], n)
ts_sd <- matrix(temp$statistics[, "Time-series SE"], n)
q_low <- matrix(temp$quantiles[, 1], n)
median <- matrix(temp$quantiles[, 2], n)
q_high <- matrix(temp$quantiles[, 3], n)
}
}
dim_names <- list(fac_names, NULL)
if (!is.null(means)) {
dimnames(means) <- dim_names
dimnames(sds) <- dim_names
dimnames(naive_sd) <- dim_names
dimnames(ts_sd) <- dim_names
dimnames(q_low) <- dim_names
dimnames(median) <- dim_names
dimnames(q_high) <- dim_names
}
result[["sigma_v"]] = list(means = means,
median = median,
sd = sds,
naivesd = naive_sd,
tssd = ts_sd,
q_lower = q_low,
q_upper = q_high)
result$specifications <- object$specifications
result$specifications$ci <- paste(c(ci_low, ci_high) * 100, "%", sep = "")
class(result) <- "summary.dfm"
return(result)
} |
context("gen-method")
test_that("__and__", {
x <- torch_tensor(TRUE)
y <- x$`__and__`(x)
expect_tensor(y)
expect_equal_to_tensor(y, x)
x <- torch_tensor(c(TRUE, FALSE))
y <- x$`__and__`(TRUE)
expect_tensor(y)
expect_equal_to_tensor(y, x)
})
test_that("add", {
x <- torch_tensor(1L, dtype = torch_long())
expect_equal_to_r(x$add(1L)$to(dtype = torch_int()), 2L)
x <- torch_tensor(1)
expect_equal_to_r(x$add(1), 2)
})
test_that("clamp", {
x <- torch_randn(5)
expect_error(x$clamp(1), regexp = NA)
})
test_that("clone", {
x <- torch_randn(10, 10)
y <- x$clone()
expect_equal_to_tensor(x, y)
expect_true(! x$storage()$data_ptr() == y$storage()$data_ptr())
})
test_that("item", {
x <- torch_tensor(1)
expect_equal(x$item(), 1)
x <- torch_tensor(1L)
expect_equal(x$item(), 1L)
x <- torch_tensor(TRUE)
expect_equal(x$item(), TRUE)
x <- torch_tensor(1.5)
expect_equal(x$item(), 1.5)
x <- torch_tensor(1.5, dtype = torch_double())
expect_equal(x$item(), 1.5)
})
test_that("new_full", {
x <- torch_randn(2,2)
expect_equal_to_tensor(
x$new_full(c(3,3), 1),
torch_ones(3,3)
)
})
test_that("permute", {
x <- torch_randn(2,3,4)
y <- x$permute(c(3,2,1))
expect_tensor_shape(y, c(4,3,2))
expect_error(
x$permute(c(2,1, 0)),
regex = "Indexing starts at 1 but found a 0.",
fixed = TRUE
)
}) |
expected <- eval(parse(text="logical(0)"));
test(id=0, code={
argv <- eval(parse(text="list(logical(0), NULL)"));
.Internal('comment<-'(argv[[1]], argv[[2]]));
}, o=expected); |
min_flow = function(MO2, min_pO2 = 90, pO2_in = 100, temp = 25, sal = 35, atm_pres = 1013.25){
o2_conc = as.numeric(marelac::gas_satconc(S = sal, t = temp, P = measurements::conv_unit(atm_pres, 'mbar', 'bar'), species = 'O2'))
cO2_in = (pO2_in / 100) * o2_conc
return((MO2 / 60) / (cO2_in - o2_conc * (min_pO2 / 100)))
} |
library(CHNOSZ)
T <- 25:125
ntop <- 5
lcex <- 0.8
width <- 380
height <- 380
rubisco <- c("RBL_BRAJA","A6YF84_9PROT","A1E8R4_9CHLO","A8C9T6_9MYCO","A3EQE1_9BACT","A5CKC7_9CHRO",
"RBL_SYNJA","Q6JAI0_9RHOD","RBL_METJA","A3DND9_STAMF","A1RZJ5_THEPD","RBL_PYRHO")
rubisco.organisms <- c("a-proteobacterium-R","b-proteobacterium","Bracteacoccus","Mycobacterium",
"Leptospirillum","Cyanobium","Synechococcus","Cyanidiales",
"Methanococcus-R","Desulfurococcus","Thermofilum","Pyrococcus")
accoaco <- c("Q9F7M8_PRB01","ACCA_DEIRA","A6CDM2_9PLAN","A4AGS7_9ACTN","ACCA_CAUCR","A1VC70_DESVV",
"A6VIX9_METM7","Q2JSS7_SYNJA","A0GZU2_9CHLR","A7WGI1_9AQUI","Q05KD0_HYDTH","ACCA_AQUAE")
accoaco.organisms <- c("g-proteobacterium","Deinococcus","Planctomyces","Actinobacterium",
"a-proteobacterium-A","d-proteobacterium","Methanococcus-A","Synechococcus",
"Chloroflexus","Hydrogenobaculum","Hydrogenobacter","Aquifex")
organisms <- c(rubisco.organisms,accoaco.organisms)
col <- rep(c(rep("blue",6),rep("red",6)),2)
pch <- c(rep(c(0:2,5:7),2),rep(c(15:20),2))
res <- length(T)
if(res==1) ido <- 1 else {
if(!"png" %in% dir()) stop("directory 'png' not present")
else if(length(dir("png")) > 0) stop("directory 'png' not empty")
png(filename="png/Rplot%04d.png",width=width,height=height)
ido <- c(rep(1,15),1:res,rep(res,20))
}
basis(c("CO2","H2O","NH3","H2","H2S","H+"),
c("aq","liq","aq","aq","aq","aq"),c(-3,0,-4,-6,-7,-7))
species(c(rubisco,accoaco))
get.logaH2 <- function(T) return(-11+T*3/40)
H2 <- get.logaH2(T)
if(res==1) {
basis("H2",H2)
a <- affinity(T=T)
} else a <- affinity(T=T,H2=H2)
e <- equilibrate(a, normalize=TRUE)
rank <- 1:length(e$loga.equil)
for(i in 1:length(ido)) {
if(i%%20 == 0) cat("\n") else cat(".")
loga <- numeric()
for(j in 1:length(e$loga.equil)) loga <- c(loga, e$loga.equil[[j]][ido[i]])
if(i > 4) myrank4 <- myrank3
if(i > 3) myrank3 <- myrank2
if(i > 2) myrank2 <- myrank1
if(i > 1) myrank1 <- myrank
order <- order(loga,decreasing=TRUE)
myrank <- rank(loga)
cex <- rep(1.2,24)
if(i > 4) {
ichanged <- myrank3 != myrank4
cex[ichanged[order]] <- cex[ichanged[order]] + 0.1
}
if(i > 3) {
ichanged <- myrank2 != myrank3
cex[ichanged[order]] <- cex[ichanged[order]] + 0.2
}
if(i > 2) {
ichanged <- myrank1 != myrank2
cex[ichanged[order]] <- cex[ichanged[order]] + 0.3
}
if(i > 1) {
ichanged <- myrank != myrank1
cex[ichanged[order]] <- cex[ichanged[order]] + 0.4
}
plot(rank,loga[order],col=col[order],pch=pch[order],
ylab=expression(log~italic(a)),cex=cex,cex.main=1,cex.lab=1,cex.axis=1)
myT <- format(round(T,1))[ido[i]]
myH2 <- format(round(H2,2))[ido[i]]
title(main=substitute(list(X~degree*C, log*italic(a)[paste(H2)]==Y),
list(X=myT,Y=myH2)))
legend("topright",legend=c(paste("top",ntop),organisms[order[1:ntop]]),
pch=c(NA,pch[order[1:ntop]]),col=c(NA,col[order[1:ntop]]),
pt.cex=c(NA,cex[1:ntop]),cex=lcex)
order <- order(loga)
legend("bottomleft",legend=c(paste("low",ntop),organisms[order[ntop:1]]),
pch=c(NA,pch[order[ntop:1]]),col=c(NA,col[order[ntop:1]]),
pt.cex=c(NA,cex[24:(24-ntop+1)]),cex=lcex)
}
if(res > 1) {
cat("\n")
dev.off()
cat("anim.carboxylase: converting to animated GIF...\n")
outfile <- "carboxylase.gif"
syscmd <- paste("convert -loop 0 -delay 10 png/*.png png/", outfile, sep = "")
cat(paste(syscmd,"\n"))
if(.Platform$OS.type=="unix") sres <- system(syscmd)
else sres <- shell(syscmd)
if(sres==0) cat(paste("anim.carboxylase: animation is at png/",outfile,"\n",sep=""))
else {
cat("anim.carboxylase: error converting to animated GIF\n")
cat("anim.carboxylase: check that 'convert' tool from ImageMagick is in your PATH\n")
}
} |
one_forward <- function(vect) {
m <- sum(vect)
if (length(vect) == 1) return(vect)
for (i in (length(vect) - 1):1) {
if (i == 1) {
vect[i] <- vect[i] + 1
return(c(vect[1:i], rep(1, m - sum(vect[1:i]))))
}
else if (vect[i - 1] > vect[i]) {
vect[i] <- vect[i] + 1
return(c(vect[1:i], rep(1, m - sum(vect[1:i]))))
}
}
}
groups <- function(nelem) {
l <- list(vect <- rep(1, nelem))
while (length(vect) > 1) {
vect <- one_forward(vect)
l <- c(l, list(vect))
}
return(l)
}
perm_categories <- function(powvect) {
K <- length(powvect)
nall <- sum(powvect)
prev_vect <- paste(rep(0, nall), collapse = "")
spaces_left <- c(nall - c(0, cumsum(powvect)[-K]))
for (k in 1:K) {
ind_next <- combn(1:spaces_left[k], powvect[k])
prev_vect <- add_letter(ind_next, prev_vect, letters[k])
}
return(prev_vect)
}
add_letter <- function(ind_next, prev_vect, letter) {
next_vect <- matrix(nrow = length(prev_vect), ncol = ncol(ind_next))
for (i in 1:length(prev_vect)) {
for (j in 1:ncol(ind_next)) {
old <- strsplit(prev_vect[i], split = "")[[1]]
not_filled <- which(old == "0")
old[not_filled[ind_next[, j]]] <- letter
next_vect[i, j] <- paste(old, collapse = "")
}
}
return(as.character(next_vect))
}
sort_str <- function(str) {
return(paste(sort((strsplit(str, split = "")[[1]])), collapse = ""))
}
calculate_coef <- function(str_vect, K, excl) {
coef_numer(str_vect, K, excl)/coef_denom(str_vect)
}
coef_numer <- function(str_vect, K, excl) {
coefs <- matrix(nrow = K, ncol = length(str_vect))
for (k in 1:K) {
nn <- nchar(gsub(excl[k], "", str_vect))
cn <- c(0, cumsum(nn))[- (length(nn) + 1)]
coefs[k, ] <- choose(sum(nn) - cn, nn)
}
return(prod(coefs))
}
coef_denom <- function(str_vect) {
if (length(str_vect) == 1) return(1)
repeats <- duplicated(str_vect)
k <- 1
multiples <- NULL
for (i in 2:(length(str_vect))) {
if (repeats[i]) k <- k + 1
else {
multiples <- c(multiples, k)
k <- 1
}
}
multiples <- c(multiples, k)
return(prod(factorial(multiples)))
}
all_groups <- function(powvect) {
K <- length(powvect)
m <- sum(powvect)
groupings <- groups(m)
perms <- perm_categories(powvect)
excl <- paste("[^", letters[1:K], "]", sep = "")
mat <- matrix("", ncol = m + 2)
for (g in 1:length(groupings)) {
grp <- groupings[[g]]
for (p in 1:length(perms)) {
permgroup <- substring(perms[p],
c(1, cumsum(grp[-length(grp)]) + 1),
cumsum(grp))
if (any(permgroup == "a")) next
newrow <- sort(sapply(permgroup, sort_str))
mat <- rbind(mat, c(newrow, rep("", m - length(newrow)),
length(newrow),
calculate_coef(newrow, K, excl)))
}
}
all_empty <- apply(mat, 2, function(s) all(s == ""))
mat <- unique(mat[, !all_empty])[-1, ]
if (!inherits(mat, "matrix"))
mat <- matrix(mat, nrow = 1)
colnames(mat) <- c(paste("group", 1:(ncol(mat) - 2)), "d", "coef")
return(mat)
}
combine_coef <- function(char_mat) {
if (all(char_mat == "")) return(char_mat)
J <- ncol(char_mat)
if (all(dim(char_mat) == c(1, 3)))
return(data.frame(mu = str_to_moment(char_mat[1, 1]), k = 1, coef = 1))
num_mat <- t(apply(char_mat[, 1:(J-2)], 1, str_to_moment))
mu_vect <- apply(num_mat, 1, vect_to_one)
coef_vect <- tapply(char_mat[, "coef"], mu_vect,
function(x) sum(as.numeric(x)))
d_vect <- tapply(char_mat[, "d"], mu_vect, unique)
df_d <- data.frame(mu = names(d_vect), d = d_vect)
df_coef <- data.frame(mu = names(coef_vect), coef = coef_vect)
return(merge(df_d, df_coef, by = "mu"))
}
str_to_moment <- function(str_vect) {
sapply(str_vect, function(str) {
sum(match(strsplit(str, split = "")[[1]], letters))
})
}
vect_to_one <- function(vect) {
vect <- sort(vect[vect != 0])
return(paste(vect, collapse = ":"))
}
one_grouping <- function(vect3elem, smpsize) {
if (all(vect3elem == "")) return("")
mu <- strsplit(vect3elem[1], split = ":")
mutab <- rev(table(mu))
muexpr <- paste("mu", names(mutab), "^", mutab, "*",
sep = "", collapse = "")
muexpr <- substr(muexpr, start = 1, stop = nchar(muexpr) - 1)
k <- as.numeric(vect3elem[2])
nexpr <- smpsize
if (k > 1) {
for (i in 1:(k-1)) {
nexpr <- paste(nexpr, "*(", smpsize, "-", i, ")",
sep = "", collapse = "")
}
}
coef_n_mu <- paste(vect3elem[3], "*", nexpr, "*", muexpr, " + ",
sep = "", collapse = "")
return(coef_n_mu)
}
one_combination <- function(powvect, smpsize = "n") {
if (!length(powvect)) return("1")
if (powvect[1] == 1 & sum(powvect[-1]) == 0) return("0")
if (!sum(powvect)) return("1")
res <- all_groups(powvect)
combined_res <- combine_coef(res)
combined_res[, 1] <- as.character(combined_res[, 1])
vect <- apply(combined_res, 1, one_grouping, smpsize = smpsize)
str <- paste(vect, collapse = "")
return(paste(" (", substr(str, start = 1, stop = nchar(str) - 2),
") / ", smpsize, "^", sum(powvect), sep = ""))
} |
context("HttpClient: query")
test_that("query works", {
skip_on_cran()
cli <- HttpClient$new(url = hb())
aa <- cli$get('get', query = list(hello = "world"))
expect_is(aa, "HttpResponse")
expect_match(aa$url, "hello")
expect_match(aa$url, "world")
expect_match(jsonlite::fromJSON(aa$parse())$url, "hello")
expect_match(jsonlite::fromJSON(aa$parse())$url, "world")
})
test_that("query - multiple params of same name work", {
skip_on_cran()
cli <- HttpClient$new(url = hb())
aa <- cli$get('get', query = list(hello = 5, hello = 6))
expect_is(aa, "HttpResponse")
expect_equal(length(gregexpr("hello", aa$url)[[1]]), 2)
expect_equal(
length(gregexpr("hello", jsonlite::fromJSON(aa$parse())$url)[[1]]), 2)
})
test_that("query - length 0 query list works", {
skip_on_cran()
cli <- HttpClient$new(url = hb())
aa <- cli$get('get', query = list())
expect_is(aa, "HttpResponse")
expect_false(grepl("\\?", aa$url))
}) |
getFittedArima <- function(object, x, y){
arma = object$arma
coef = object$coefficients
n = length(y)
coef_reg = coef[0:ncol(x)]
coef_arma = coef[(ncol(x)+1):length(coef)]
fitted_regression <- as.numeric(x %*% coef_reg)
error_regression = as.numeric(y) - fitted_regression
trarma <- ArimaTransf(coef_arma, arma)
max.order = arma[1] + arma[5] * arma[3]
res <- ArimaEstimation(y - x %*% coef_reg,
arma, trarma[[1L]], trarma[[2L]],
max.order, T)[[2]]
residuals = res
fitted_arima = error_regression - residuals
list(
fitted = fitted_arima + fitted_regression,
residuals = residuals, fitted_arima = fitted_arima,
fitted_regression = fitted_regression, residuals_regression = error_regression
)
} |
plotCount <- function(x, ...) {
x <- data.frame(x)
stopifnot(
ncol(x) == 2,
is.numeric(x[, 2]),
all.equal(as.integer(x[, 2]), x[, 2]),
length(x[, 1]) == length(unique(x[, 1]))
)
p <-
ggplot2::ggplot(x) +
ggplot2::aes_string(colnames(x)[1], colnames(x)[2]) +
ggplot2::geom_bar(stat = "identity", ...)
p
} |
library(covidcast)
library(dplyr)
cli <- suppressMessages(
covidcast_signal(data_source = "fb-survey", signal = "smoothed_cli",
start_day = "2020-05-01", end_day = "2020-05-07",
geo_type = "county")
)
knitr::kable(head(cli))
summary(cli)
cli <- suppressMessages(
covidcast_signal(data_source = "fb-survey", signal = "smoothed_cli",
start_day = "2020-05-01", end_day = "2020-05-07",
geo_type = "state")
)
knitr::kable(head(cli))
cli <- suppressMessages(
covidcast_signal(data_source = "fb-survey", signal = "smoothed_cli",
start_day = "2020-05-01", end_day = "2020-05-07",
geo_type = "county", geo_value = "42003")
)
knitr::kable(head(cli))
plot(cli, plot_type = "line",
title = "Survey results in Allegheny County, PA")
name_to_fips("Allegheny")
name_to_cbsa("Pittsburgh")
county_fips_to_name("42003")
cbsa_to_name("38300")
meta <- covidcast_meta()
knitr::kable(head(meta))
covidcast_signal(data_source = "doctor-visits", signal = "smoothed_cli",
start_day = "2020-05-01", end_day = "2020-05-01",
geo_type = "state", geo_values = "pa", as_of = "2020-05-07")
covidcast_signal(data_source = "doctor-visits", signal = "smoothed_cli",
start_day = "2020-05-01", end_day = "2020-05-01",
geo_type = "state", geo_values = "pa")
covidcast_signal(data_source = "doctor-visits", signal = "smoothed_cli",
start_day = "2020-05-01", end_day = "2020-05-01",
geo_type = "state", geo_values = "pa",
issues = c("2020-05-01", "2020-05-15")) %>%
knitr::kable()
covidcast_signal(data_source = "doctor-visits", signal = "smoothed_cli",
start_day = "2020-05-01", end_day = "2020-05-07",
geo_type = "state", geo_values = "pa", lag = 7) %>%
knitr::kable()
covidcast_signal(data_source = "doctor-visits", signal = "smoothed_cli",
start_day = "2020-05-03", end_day = "2020-05-03",
geo_type = "state", geo_values = "pa",
issues = c("2020-05-09", "2020-05-15")) %>%
knitr::kable() |
Ohit <- function(X, y, Kn = NULL, c1 = 5, HDIC_Type = "HDBIC", c2 = 2, c3 = 2.01, intercept = TRUE){
if (!is.vector(y)) stop("y should be a vector")
if (!is.matrix(X)) stop("X should be a matrix")
n = nrow(X)
p = ncol(X)
if (n != length(y)) stop("the number of observations in y is not equal to the number of rows of X")
if (n == 1) stop("the sample size should be greater than 1")
if (is.null(Kn)) K = max(1, min(floor(c1 * sqrt(n / log(p))), p))
else{
if ((Kn < 1) | (Kn > p)) stop(paste("Kn should between 1 and ", p, sep = ""))
if ((Kn - floor(Kn)) != 0) stop("Kn should be a positive integer")
K = Kn
}
dy = y - mean(y)
dX = apply(X, 2, function(x) x - mean(x))
Jhat = sigma2hat = rep(0, K)
XJhat = matrix(0, n, K)
u = as.matrix(dy)
xnorms = sqrt(colSums((dX) ^ 2))
aSSE = (abs(t(u) %*% dX) / xnorms)
Jhat[1] = which.max(aSSE)
XJhat[, 1] = (dX[, Jhat[1]] / sqrt(sum((dX[, Jhat[1]]) ^ 2)))
u = u - XJhat[, 1] %*% t(XJhat[, 1]) %*% u
sigma2hat[1] = mean(u ^ 2)
if (K > 1){
for (k in 2:K) {
aSSE = (abs(t(u) %*% dX) / xnorms)
aSSE[Jhat[1:(k-1)]] = 0
Jhat[k] = which.max(aSSE)
rq = dX[, Jhat[k]] - XJhat[, 1:(k-1)] %*% t(XJhat[, 1:(k-1)]) %*% dX[, Jhat[k]]
XJhat[, k] = (rq / sqrt(sum((rq) ^ 2)))
u = u - XJhat[, k] %*% t(XJhat[, k]) %*% u
sigma2hat[k] = mean(u ^ 2)
}
}
if ((HDIC_Type != "HDAIC") & (HDIC_Type != "HDBIC") & (HDIC_Type != "HDHQ")) stop("HDIC_Type should be \"HDAIC\", \"HDBIC\" or \"HDHQ\"")
if (HDIC_Type == "HDAIC") omega_n = c2
if (HDIC_Type == "HDBIC") omega_n = log(n)
if (HDIC_Type == "HDHQ") omega_n = c3 * log(log(n))
hdic = (n * log(sigma2hat))+((1:K) * omega_n * (log(p)))
kn_hat = which.min(hdic)
benchmark = hdic[kn_hat]
J_HDIC = sort(Jhat[1:kn_hat])
J_Trim = Jhat[1:kn_hat]
trim_pos = rep(0, kn_hat)
if (kn_hat > 1){
for (l in 1:(kn_hat-1)){
JDrop1 = J_Trim[-l]
fit = lm(dy~.-1, data = data.frame(dX[, JDrop1]))
uDrop1 = fit$residuals
HDICDrop1 = (n * log(mean(uDrop1 ^ 2)))+((kn_hat - 1) * omega_n * (log(p)))
if (HDICDrop1 > benchmark) trim_pos[l] = 1
}
trim_pos[kn_hat] = 1
J_Trim = J_Trim[which(trim_pos==1)]
}
J_Trim = sort(J_Trim)
X_HDIC = as.data.frame(as.matrix(X[, J_HDIC]))
X_Trim = as.data.frame(as.matrix(X[, J_Trim]))
X = data.frame(X)
colnames(X_HDIC) = names(X)[J_HDIC]
colnames(X_Trim) = names(X)[J_Trim]
if (intercept == TRUE){
fit_HDIC = lm(y~., data = X_HDIC)
fit_Trim = lm(y~., data = X_Trim)
}else{
fit_HDIC = lm(y~.-1, data = X_HDIC)
fit_Trim = lm(y~.-1, data = X_Trim)
}
betahat_HDIC = summary(fit_HDIC)
betahat_Trim = summary(fit_Trim)
return(list("n" = n, "p" = p, "Kn" = K, "J_OGA" = Jhat, "HDIC" = hdic, "J_HDIC" = J_HDIC, "J_Trim" = J_Trim, "betahat_HDIC" = betahat_HDIC, "betahat_Trim" = betahat_Trim))
} |
require(OpenMx)
varNames <- c('x_T1','x_T2')
dataMZ <- mxData(matrix(c(1,.8,.8,1), nrow = 2, ncol=2,
dimnames = list(varNames,varNames)), type="cov", numObs=100)
dataDZ <- mxData(matrix(c(1,.5,.5,1), nrow = 2, ncol=2,
dimnames = list(varNames,varNames)), type="cov", numObs=100)
h = mxMatrix(name="h", "Full", .5, free=FALSE, nrow=1, ncol=1)
a = mxMatrix(name="a", "Full", .6, free=TRUE, labels='a_r1c1', nrow=1, ncol=1)
c = mxMatrix(name="c", "Full", .6, free=TRUE, labels='c_r1c1', nrow=1, ncol=1)
e = mxMatrix(name="e", "Full", .6, free=TRUE, labels='e_r1c1', nrow=1, ncol=1)
A = mxAlgebra(a * t(a), name="A")
C = mxAlgebra(c * t(c), name="C")
E = mxAlgebra(e * t(e), name="E")
cMZ = mxAlgebra(name="cMZ", rbind(cbind(A+C+E,A+C) ,cbind(A+C,A+C+E)))
cDZ = mxAlgebra(name="cDZ", rbind(cbind(A+C+E,h%x%A+C),cbind(h%x%A+C,A+C+E)))
objMZ <- mxExpectationNormal("cMZ", dimnames = varNames)
objDZ <- mxExpectationNormal("cDZ", dimnames = varNames)
MZ <- mxModel("MZ", dataMZ, a,c,e, A,C,E, cMZ , objMZ, mxFitFunctionML())
DZ <- mxModel("DZ", dataDZ, a,c,e, A,C,E, cDZ,h, objDZ, mxFitFunctionML())
model <- mxModel("both", MZ, DZ, mxFitFunctionMultigroup(c("MZ", "DZ")))
m1 <- mxRun(model)
summary(m1)$parameters
expectedACE <- c(.6, .2, .2) * 99/100
observedACE <- c(m1$MZ.A$result, m1$MZ.C$result, m1$MZ.E$result)
omxCheckCloseEnough(expectedACE, observedACE, epsilon = 10 ^ -4) |
source(file.path(Sys.getenv("DIRNAME"), "needs.R"))
needs(jsonlite)
run <- function(dataIn) {
input <- unname(dataIn[[1]])
.e <- as.environment(list(
path = dataIn[[2]],
out = modifyList(list(x = NULL, auto_unbox = T),
dataIn[[3]], keep.null = T)
))
lockBinding(".e", environment())
captured <- tryCatch(capture.output({
temp <- source(.e$path, local = T)$value
}), error = function(err) err)
unlockBinding(".e", environment())
if (inherits(captured, "error")) {
msg <- conditionMessage(captured)
cat("Error in R script", .e$path, "\n", sQuote(msg), file = stderr())
return(invisible(F))
}
.e$out$x <- if (is.null(temp)) {
""
} else {
temp
}
do.call(toJSON, .e$out)
}
suppressWarnings(
run(fromJSON(Sys.getenv("input")))
) |
"Batting2016" |
psPoisson <- function(x, y, xl = min(x), xr = max(x), nseg = 10, bdeg = 3,
pord = 2, lambda = 1, wts = NULL, show = FALSE, iter = 100, xgrid = 100) {
m <- length(x)
B <- bbase(x, xl = xl, xr = xr, nseg = nseg, bdeg = bdeg)
n <- dim(B)[2]
P <- sqrt(lambda) * diff(diag(n), diff = pord)
nix <- rep(0, n - pord)
z <- log(y + 0.01)
if (missing(wts)) {
wts <- rep(1, m)
}
for (it in 1:iter) {
mu <- exp(z)
w <- mu
u <- (y - mu) / w + z
wtprod <- c(wts * w, (nix + 1))
f <- lsfit(rbind(B, P), c(u, nix), intercept = FALSE, wt = wtprod)
beta <- f$coef
znew <- B %*% beta
dz <- max(abs(z - znew))
z <- znew
if (dz < 1e-06) {
break
}
if (show) {
print(c(it, dz))
}
}
if (it > (iter - 1)) {
cat(paste("Did NOT converge, iter >", iter))
warning(paste("Did NOT converge"))
}
dev <- 2 * sum(y * log((y + 1e-09) / mu))
qr <- f$qr
h <- hat(qr)[1:m]
ed <- sum(h)
aic <- dev + 2 * ed
dispersion <- dev / (m - ed)
if (length(xgrid) == 1) {
xgrid <- seq(xl, xr, length = xgrid)
}
Bu <- bbase(xgrid, xl = xl, xr = xr, nseg = nseg, bdeg = bdeg)
zu <- Bu %*% beta
ygrid <- zu
mugrid <- exp(zu)
R <- qr.R(qr)
L <- forwardsolve(t(R), t(Bu))
v2 <- colSums(L * L)
se_eta <- sqrt(v2)
pp <- list(
xl = xl, xr = xr, aic = aic, x = x, y = y, B = B, P = P,
muhat = mu, nseg = nseg, bdeg = bdeg, dev = dev,
pord = pord, pcoef = beta, lambda = lambda,
effdim = ed, dispersion = dispersion, aic = aic,
family = "poisson", link = "log", wts = wts,
xgrid = xgrid, ygrid = ygrid, mugrid = mugrid, se_eta = se_eta
)
class(pp) <- "pspfit"
return(pp)
} |
context("substituting_formla")
test_that("error checks work", {
expect_error(
as_substituting_formula(a~b, 1),
regexp="`substitutions` must be a list",
fixed=TRUE
)
expect_error(
substituting_formula(a~b, b~1, b~2),
regexp="The left hand side of substitution 1 and 2 are identical and no left hand sides may match",
fixed=TRUE
)
expect_error(
substituting_formula(a~b, ~1, b~2),
regexp="All substitution formulae must be 2-sided",
fixed=TRUE
)
expect_equal(
substituting_formula(a~b, b~1, c~2),
structure(
list(
base=a~b,
substitutions=list(b~1, c~2)
),
class="substituting_formula"
)
)
})
test_that("formula are created correctly", {
expect_equal(
formula(substituting_formula(a~b, b~c*d, d~e+f)),
a~c*(e+f)
)
expect_equal(
as.formula(substituting_formula(a~b, b~c*d, d~e+f), env=emptyenv()),
`environment<-`(a~c*(e+f), emptyenv())
)
}) |
test_that("unSurv, colnames default", {
df1 <- dataCensored[dataCensored$station=="CB3.3C","chla"][1:30]
df2 <- unSurv(df1)
qc_col <- c("lo", "hi", "type")
expect_equal(colnames(df2), qc_col)
})
test_that("unSurv, colnames user", {
df1 <- dataCensored[dataCensored$station=="CB3.3C","chla"][1:30]
df3 <- unSurv(df1, "LOW", "HIGH")
qc_col <- c("LOW", "HIGH", "type")
expect_equal(colnames(df3), qc_col)
}) |
addtable2plot<-function(x,y=NULL,table,lwd=par("lwd"),bty="n",
bg=par("bg"),cex=1,xjust=0,yjust=1,xpad=0.1,ypad=0.5,box.col=par("fg"),
text.col=par("fg"),display.colnames=TRUE,display.rownames=FALSE,
hlines=FALSE,vlines=FALSE,title=NULL) {
if(dev.cur() == 1)
stop("Cannot add table unless a graphics device is open")
if(is.null(y)) {
if(is.character(x)) {
tablepos<-get.tablepos(x)
x<-tablepos$x
y<-tablepos$y
xjust<-tablepos$xjust
yjust<-tablepos$yjust
}
else {
if(is.null(x$y)) stop("both x and y coordinates must be given")
y<-x$y
x<-x$x
}
}
droptop<-ifelse(any(c("topleft","top","topright") %in% x),1,0)
tabdim<-dim(table)
if(tabdim[1] == 1) hlines<-FALSE
if(tabdim[2] == 1) vlines<-FALSE
if(is.null(dim(bg))) bg<-matrix(bg,nrow=tabdim[1],ncol=tabdim[2])
column.names<-colnames(table)
if(is.null(column.names) && display.colnames)
column.names<-1:tabdim[2]
row.names<-rownames(table)
if(is.null(row.names) && display.rownames)
row.names<-1:tabdim[1]
if(par("xlog")) x<-log10(x)
cellwidth<-rep(0,tabdim[2])
if(display.colnames) {
for(column in 1:tabdim[2])
cellwidth[column]<-max(strwidth(c(column.names[column],
format(table[,column])),cex=cex))*(1+xpad)
nvcells<-tabdim[1]+1
}
else {
nvcells<-tabdim[1]
for(column in 1:tabdim[2])
cellwidth[column]<-max(strwidth(format(table[,column]),cex=cex))*(1+xpad)
}
if(display.rownames) {
nhcells<-tabdim[2]+1
rowname.width<-max(strwidth(row.names,cex=cex))*(1+xpad)
}
else {
nhcells<-tabdim[2]
rowname.width<-0
}
if(par("ylog")) y<-log10(y)
cellheight<-
max(strheight(c(column.names,row.names,as.vector(unlist(table))),
cex=cex))*(1+ypad)
if(!is.null(title) & droptop) y<-y-cellheight
ytop<-y+yjust*nvcells*cellheight
oldpar<-par(xlog=FALSE,ylog=FALSE,xpd=TRUE)
if(display.colnames) {
xleft<-x+display.rownames*rowname.width-xjust*(sum(cellwidth)+rowname.width)
for(column in 1:tabdim[2]) {
text(xleft+cellwidth[column]*0.5,
ytop-0.5*cellheight,column.names[column],cex=cex,col=text.col)
xleft<-xleft+cellwidth[column]
}
}
for(row in 1:tabdim[1]) {
xleft<-x-xjust*(sum(cellwidth)+rowname.width)
if(display.rownames) {
text(xleft+0.5*rowname.width,
ytop-(row+display.colnames-0.5)*cellheight,
row.names[row],cex=cex,col=text.col)
xleft<-xleft+rowname.width
}
for(column in 1:tabdim[2]) {
rect(xleft,ytop-(row+display.colnames-1)*cellheight,
xleft+cellwidth[column],ytop-(row+display.colnames)*cellheight,
col=bg[row,column],border=bg[row,column])
text(xleft+0.5*cellwidth[column],
ytop-(row+display.colnames-0.5)*cellheight,
table[row,column],cex=cex,col=text.col)
xleft<-xleft+cellwidth[column]
}
}
if(vlines) {
xleft<-x+display.rownames*rowname.width-xjust*(sum(cellwidth)+rowname.width)
segments(xleft+cumsum(cellwidth[-tabdim[2]]),
ytop-display.colnames*cellheight,
xleft+cumsum(cellwidth[-tabdim[2]]),
ytop-(display.colnames+tabdim[1])*cellheight)
}
if(hlines) {
xleft<-x+display.rownames*rowname.width-xjust*(sum(cellwidth)+rowname.width)
segments(xleft,
ytop-display.colnames*cellheight-cumsum(rep(cellheight,tabdim[1]-1)),
xleft+sum(cellwidth),
ytop-display.colnames*cellheight-cumsum(rep(cellheight,tabdim[1]-1)))
}
if(!is.null(title)) {
xleft<-x-xjust*(sum(cellwidth)+rowname.width)
text(xleft+(rowname.width+sum(cellwidth))/2,ytop+cellheight/2,title,
cex=cex,col=text.col)
}
if(bty == "o") {
xleft<-x+display.rownames*rowname.width-xjust*(sum(cellwidth)+rowname.width)
rect(xleft,ytop-(tabdim[1]+display.colnames)*cellheight,
xleft+sum(cellwidth),ytop-display.colnames*cellheight)
}
par(oldpar)
} |
covEstimation <- function(rets, control = list()) {
if (missing(rets)) {
stop("rets is missing")
}
if (!is.matrix(rets)) {
stop("rets must be a (T x N) matrix")
}
ctr <- .ctrCov(control)
if (ctr$type[1] == "naive") {
Sigma <- .naiveCov(rets = rets)
} else if (ctr$type[1] == "ewma") {
Sigma <- .ewmaCov(rets = rets, lambda = ctr$lambda)
} else if (ctr$type[1] == "lw") {
Sigma <- .lwCov(rets = rets)
} else if (ctr$type[1] == "factor") {
Sigma <- .factorCov(rets = rets, K = ctr$K)
} else if (ctr$type[1] == "const") {
Sigma <- .constCov(rets = rets)
} else if (ctr$type[1] == "cor") {
Sigma <- .corCov(rets = rets)
} else if (ctr$type[1] == "oneparm") {
Sigma <- .oneparmCov(rets = rets)
} else if (ctr$type[1] == "diag") {
Sigma <- .diagCov(rets = rets)
} else if (ctr$type[1] == "large") {
Sigma <- .largeCov(rets = rets)
} else if (ctr$type[1] == "bs") {
Sigma <- .bsCov(rets = rets)
} else {
stop("control$type is not well defined")
}
return(Sigma)
}
.ctrCov <- function(control = list()) {
if (!is.list(control)) {
stop("control must be a list")
}
if (length(control) == 0) {
control <- list(type = "naive", lambda = 0.94, K = 1)
}
nam <- names(control)
if (!("type" %in% nam) || is.null(control$type)) {
control$type <- c("naive", "ewma", "lw", "factor", "rtm", "const",
"cor", "oneparm", "diag", "large", "bs")
}
if (!("lambda" %in% nam) || is.null(control$lambda)) {
control$lambda <- 0.94
}
if (!("K" %in% nam) || is.null(control$K)) {
control$K <- 1
}
return(control)
}
.naiveCov <- function(rets) {
Sigma <- cov(rets)
return(Sigma)
}
.ewmaCov <- function(rets, lambda) {
t <- nrow(rets)
Sigma <- cov(rets)
mu <- colMeans(rets)
shiftRets <- sweep(rets, 2, mu, "-")
for (i in 1:t) {
r <- as.double(shiftRets[i, ])
r2 <- outer(r, r)
Sigma <- (1 - lambda)/(1 - lambda^t) * r2 + lambda * Sigma
}
return(Sigma)
}
.constCov <- function(rets) {
n <- dim(rets)[2]
tmpMat <- matrix(rep(1, n^2), ncol = n)
rho <- mean(cor(rets)[lower.tri(tmpMat, diag = FALSE)])
R <- rho * tmpMat
diag(R) <- 1
std <- apply(rets, 2, sd)
diagStd <- diag(std)
Sigma <- diagStd %*% R %*% diagStd
return(Sigma)
}
.factorCov <- function(rets, K) {
std <- apply(rets, 2, sd)
sigma <- cov(rets)
loading <- factanal(rets, K)$loadings
uniquenesses <- factanal(rets, K)$uniquenesses
R <- tcrossprod(loading) + diag(uniquenesses)
diagStd <- diag(std)
Sigma <- diagStd %*% R %*% diagStd
return(Sigma)
}
.lwCovElement <- function(rets, type) {
t <- dim(rets)[1]
n <- dim(rets)[2]
mu <- colMeans(rets)
shiftRets <- sweep(rets, 2, mu, "-")
y <- shiftRets^2
if (type == "large" || type == "lw") {
mkt <- rowMeans(shiftRets)
smple <- cov(cbind(rets, mkt)) * (t - 1)/t
covmkt <- smple[1:n, n + 1]
covmkt_ <- matrix(rep(covmkt, n), ncol = n, byrow = FALSE)
varmkt <- as.numeric(smple[n + 1, n + 1])
smple <- smple[-(n + 1), -(n + 1)]
prior <- outer(covmkt, covmkt)/varmkt
diag(prior) <- diag(smple)
lwCovElement <- list(t = t, n = n, mu = mu, shiftRets = shiftRets,
y = y, smple = smple, mkt = mkt, covmkt = covmkt, covmkt_ = covmkt_,
varmkt = varmkt, prior = prior)
} else {
smple <- (1/t) * crossprod(shiftRets)
lwCovElement <- list(t = t, n = n, mu = mu, shiftRets = shiftRets,
y = y, smple = smple, mkt = NULL, covmkt = NULL, covmkt_ = NULL,
varmkt = NULL, prior = NULL)
}
return(lwCovElement)
}
.lwCov <- function(rets) {
lwCovElement <- .lwCovElement(rets, type = "lw")
t <- lwCovElement$t
n <- lwCovElement$n
mu <- lwCovElement$mu
shiftRets <- lwCovElement$shiftRets
mkt <- lwCovElement$mkt
covmkt <- lwCovElement$covmkt
varmkt <- lwCovElement$varmkt
smple <- lwCovElement$smple
prior <- lwCovElement$prior
y <- lwCovElement$y
z <- sweep(shiftRets, 1, mkt, "*")
phiMat <- crossprod(y)/t - 2 * crossprod(shiftRets) * smple/t + smple^2
phi <- sum(apply(phiMat, 2, sum))
rhoMat1 <- 1/t * sweep(crossprod(y, z), 2, covmkt, "*")/varmkt
rhoMat3 <- 1/t * crossprod(z) * outer(covmkt, covmkt)/varmkt^2
rhoMat <- 2 * rhoMat1 - rhoMat3 - prior * smple
diag(rhoMat) <- diag(phiMat)
rho <- sum(apply(rhoMat, 2, sum))
gamma <- norm(smple - prior, "F")^2
kappa <- (phi - rho)/gamma
shrinkage <- pmax(0, pmin(1, kappa/t))
Sigma <- shrinkage * prior + (1 - shrinkage) * smple
return(Sigma)
}
.largeCov <- function(rets) {
lwCovElement <- .lwCovElement(rets, type = "large")
t <- lwCovElement$t
n <- lwCovElement$n
mu <- lwCovElement$mu
shiftRets <- lwCovElement$shiftRets
y <- lwCovElement$y
mkt <- lwCovElement$mkt
covmkt <- lwCovElement$covmkt
covmkt_ <- lwCovElement$covmkt_
varmkt <- lwCovElement$varmkt
smple <- lwCovElement$smple
prior <- lwCovElement$prior
z <- sweep(shiftRets, 1, mkt, "*")
d <- 1/n * norm(smple - prior, "F")^2
r2 <- 1/n/t^2 * sum(apply(crossprod(y, y), 2, sum)) - 1/n/t * sum(apply(smple^2,
2, sum))
phidiag <- 1/n/t^2 * sum(apply(y^2, 2, sum)) - 1/n/t * sum(diag(smple)^2)
v1 <- 1/t^2 * crossprod(y, z) - 1/t * covmkt_ * smple
phioff1 <- 1/n * sum(apply(v1 * t(covmkt_), 2, sum))/varmkt - 1/n *
sum(diag(v1) * covmkt)/varmkt
v3 <- 1/t^2 * crossprod(z, z) - 1/t * varmkt * smple
phioff3 <- 1/n * sum(apply(v3 * tcrossprod(covmkt, covmkt), 2, sum))/varmkt^2 -
1/n * sum(diag(v3) * covmkt^2)/varmkt^2
phioff <- 2 * phioff1 - phioff3
phi <- phidiag + phioff
Sigma <- (r2 - phi)/d * prior + (1 - (r2 - phi)/d) * smple
return(Sigma)
}
.corCov <- function(rets) {
lwCovElement <- .lwCovElement(rets, type = "cor")
t <- lwCovElement$t
n <- lwCovElement$n
mu <- lwCovElement$mu
shiftRets <- lwCovElement$shiftRets
y <- lwCovElement$y
smple <- lwCovElement$smple
var <- diag(smple)
sqrtvar <- sqrt(var)
outerSqrtVar <- outer(sqrtvar, sqrtvar)
rBar <- (sum(sum(smple/outerSqrtVar)) - n)/(n * (n - 1))
prior <- rBar * outerSqrtVar
diag(prior) <- var
phiMat <- crossprod(y)/t - 2 * crossprod(shiftRets) * smple/t + smple^2
phi <- sum(apply(phiMat, 2, sum))
term1 <- crossprod(shiftRets^3, shiftRets)/t
term2 <- sweep(smple, 1, diag(smple), "*")
rhoMat <- term1 - term2
diag(rhoMat) <- 0
rho <- sum(diag(phiMat)) + rBar * sum(sum(outer(1/sqrtvar, sqrtvar) *
rhoMat))
gamma <- norm(smple - prior, type = "F")^2
kappa <- (phi - rho)/gamma
shrinkage <- pmax(0, pmin(1, kappa/t))
Sigma <- shrinkage * prior + (1 - shrinkage) * smple
return(Sigma)
}
.diagCov <- function(rets) {
lwCovElement <- .lwCovElement(rets, type = "diag")
t <- lwCovElement$t
n <- lwCovElement$n
mu <- lwCovElement$mu
shiftRets <- lwCovElement$shiftRets
y <- lwCovElement$y
smple <- lwCovElement$smple
prior <- diag(diag(smple))
phiMat <- crossprod(y)/t - 2 * (crossprod(shiftRets)) * smple/t + smple^2
phi <- sum(apply(phiMat, 2, sum))
rho <- sum(diag(phiMat))
gamma <- norm(smple - prior, "F")^2
kappa <- (phi - rho)/gamma
shrinkage <- pmax(0, pmin(1, kappa/t))
Sigma <- shrinkage * prior + (1 - shrinkage) * smple
return(Sigma)
}
.oneparmCov <- function(rets) {
lwCovElement <- .lwCovElement(rets, type = "oneparm")
t <- lwCovElement$t
n <- lwCovElement$n
mu <- lwCovElement$mu
shiftRets <- lwCovElement$shiftRets
smple <- lwCovElement$smple
y <- lwCovElement$y
meanvar <- mean(diag(smple))
prior <- meanvar * diag(n)
phiMat <- crossprod(y)/t - 2 * (crossprod(shiftRets)) * smple/t + smple^2
phi <- sum(apply(phiMat, 2, sum))
gamma <- norm(smple - prior, type = "F")^2
kappa <- phi/gamma
shrinkage <- pmax(0, pmin(1, kappa/t))
Sigma <- shrinkage * prior + (1 - shrinkage) * smple
return(Sigma)
}
.bsCov <- function(rets) {
lwCovElement <- .lwCovElement(rets, type = "lw")
t <- lwCovElement$t
n <- lwCovElement$n
mu <- colMeans(rets)
Sigma <- cov(rets)
invSigma <- solve(Sigma)
i <- rep(1, n)
invSigmai <- crossprod(invSigma, i)
w_min <- (invSigmai)/as.numeric(crossprod(i, invSigmai))
mu_min <- crossprod(mu, w_min)
invSigmaMu <- crossprod(invSigma, mu - mu_min)
phi <- (n + 2)/((n + 2) + t * crossprod(mu - mu_min, invSigmaMu))
phi <- max(min(phi, 1), 0)
tau <- t * phi/(1 - phi)
Sigma <- Sigma * (1 + 1/(t + tau)) +
tau/(t * (t + 1 + tau)) * outer(i, i)/as.numeric(crossprod(i, invSigmai))
return(Sigma)
} |
context("size_factor")
test_that("size factor works", {
m <- matrix(1:16, ncol=4)
expect_error(estimateSizeFactorsForMatrix(m, geoMeans=1:5))
expect_error(estimateSizeFactorsForMatrix(m, geoMeans=rep(0,4)))
expect_error(estimateSizeFactorsForMatrix(m, controlGenes="foo"))
estimateSizeFactorsForMatrix(m, geoMeans=1:4)
estimateSizeFactorsForMatrix(m, controlGenes=1:2)
nm <- m / exp(rowMeans(log(m)))
true.sf <- c(2,1,1,.5)
counts <- sweep(2*m, 2, true.sf, "*")
dds <- DESeqDataSetFromMatrix(counts, data.frame(x=1:4), ~1)
dds <- estimateSizeFactors(dds, normMatrix=nm)
expect_equal((normalizationFactors(dds)/nm)[1,], true.sf)
set.seed(1)
true.sf <- 2^(rep(c(-2,-1,0,0,1,2),each=2))
dmr <- function(x) 0.01
dds <- makeExampleDESeqDataSet(sizeFactors=true.sf, n=100, dispMeanRel=dmr)
cts <- counts(dds)
idx <- cbind(seq_len(nrow(cts)), sample(ncol(dds), nrow(cts), replace=TRUE))
cts[idx] <- 0L
cts[1,1] <- 1000000L
counts(dds) <- cts
dds <- estimateSizeFactors(dds, type="poscounts")
sf <- sizeFactors(dds)
coefs <- coef(lm(sf ~ true.sf))
expect_true(abs(coefs[1]) < .1)
expect_true(abs(coefs[2] - 1) < .1)
dds <- estimateSizeFactors(dds, type="iterate")
sf <- sizeFactors(dds)
coefs <- coef(lm(sf ~ true.sf))
expect_true(abs(coefs[1]) < .1)
expect_true(abs(coefs[2] - 1) < .1)
}) |
on.convex.hull<-function(tri.obj,x,y)
{
if(!inherits(tri.obj,"tri"))
stop("tri.obj must be of class \"tri\"")
if(length(x)!=length(y))
stop("x and y must be of same length")
n<-length(x)
if(n==0)
stop("length of x (resp. y) is 0")
ans<-.Fortran("onhull",
as.double(x),
as.double(y),
as.integer(n),
as.double(tri.obj$x),
as.double(tri.obj$y),
as.integer(tri.obj$n),
as.integer(tri.obj$tlist),
as.integer(tri.obj$tlptr),
as.integer(tri.obj$tlend),
onhull=logical(n),
eps=as.double(1E-15),
PACKAGE = "tripack")
ans$onhull
} |
belex <- function(ticker, from = NULL, to = NULL)
{
w <- getOption("warn")
options(warn = -1)
ticker <- tolower(ticker)
urlHTML <- function(ticker)
{
if (ticker == "belex15")
{
url.I <- "http://www.belex.rs/trgovanje/indeksi/"
url.II <-"/istorijski/3y"
url.ticker <- ticker
url <- paste(url.I, url.ticker, url.II, sep="")
return(url)
}
else
{
url.I <- "http://www.belex.rs/trgovanje/istorijski/"
url.II <-"/3y"
url.ticker <- ticker
url <- paste(url.I, url.ticker, url.II, sep="")
return(url)
}
}
urlCSV <- function(ticker)
{
if (ticker == "belex15")
{
url.csv.I <- "http://www.belex.rs/xml/istorijski_indeks_csv.php?simbol="
url.csv.ticker <- toupper(ticker)
url.csv.II <- "&lang=srb"
url.csv <- paste(url.csv.I, url.csv.ticker, url.csv.II, sep="")
return(url.csv)
}
else
{
url.csv.I <- "http://www.belex.rs/xml/istorijski_csv.php?simbol="
url.csv.ticker <- ticker
url.csv.II <- "&lang=srb"
url.csv <- paste(url.csv.I, url.csv.ticker, url.csv.II, sep="")
return(url.csv)
}
}
dataHTML <- function(url.HTML)
{
if (ticker=="belex15")
{
tables <- XML::readHTMLTable(url.HTML)
data.fac <- tables[[2]]
data.mat <- as.matrix(data.fac)
data.mat[ ,2:8] <- gsub("[.]","", data.mat[ ,2:8])
data.mat[ ,2:8] <- gsub("[,]",".", data.mat[ ,2:8])
data.mat <- cbind(data.mat[ ,1:2], data.mat[ ,5:8])
colnames(data.mat) <- c("Date", "Close", "Open", "Low", "High", "Volume")
data.mat.num <- apply(data.mat[ ,2:6], 2,as.numeric)
Date <- as.Date(data.mat[ ,1], format='%d.%m.%Y')
data.final <- data.frame(Date, data.mat.num)
bool <- data.final$Date > "2012-12-31"
data.final <- data.final[bool, 1:6]
return(data.final)
}
else
{
tables <- XML::readHTMLTable(url.HTML)
data.fac <- tables[[2]]
data.mat <- as.matrix(data.fac)
data.mat[ ,2:14] <- gsub("[.]","", data.mat[ ,2:14])
data.mat <- data.mat[,-c(3,4,10,13,14)]
data.mat.num <- apply(data.mat[ ,2:9], 2,as.numeric)
Date <- as.Date(data.mat[ ,1], format='%d.%m.%Y')
data.final <- data.frame(Date, data.mat.num)
bool <- data.final$Date > "2012-12-31"
data.final <- data.final[bool, ]
colnames(data.final) <- c("Date", "Close", "Transactions", "Volume", "Open", "Low", "High","Total Bid","Total Ask")
return(data.final)
}
}
dataCSV <- function(url.CSV)
{
if (ticker=="belex15")
{
data.csv <- read.csv(url.CSV, header=TRUE, sep=";")
data.csv.mat <- as.matrix(data.csv)
data.csv.mat[ ,2:8] <- gsub("[.]","", data.csv.mat[ ,2:8])
data.csv.mat[ ,2:8] <- gsub("[,]",".", data.csv.mat[ ,2:8])
data.csv.mat <- cbind(data.csv.mat[,1:2], data.csv.mat[ ,4:7])
data.csv.mat.num <- apply(data.csv.mat[ ,2:6], 2,as.numeric)
Date <- as.Date(data.csv.mat[ ,1], format='%d.%m.%Y')
data.csv.final <- data.frame(Date, data.csv.mat.num)
colnames(data.csv.final) <- c("Date", "Close", "Open", "Low", "High", "Volume")
return(data.csv.final)
}
else
{
data.csv <- read.csv(url.CSV, header=TRUE, sep=";")
data.csv.mat <- as.matrix(data.csv)
data.csv.mat[ ,2:12] <- gsub(",00","", data.csv.mat[ ,2:12])
data.csv.mat[ ,2:12] <- gsub("[.]00","", data.csv.mat[ ,2:12])
data.csv.mat[ ,2:12] <- gsub("[.]","", data.csv.mat[ ,2:12])
data.csv.mat.num <- apply(data.csv.mat[ ,2:12], 2,as.numeric)
Date <- as.Date(data.csv.mat[,1], format='%d.%m.%Y')
data.csv.final <- data.frame(Date, data.csv.mat.num[,-c(2,10,11)])
colnames(data.csv.final) <- c("Date", "Close", "Transactions", "Volume", "Open", "Low", "High", "Total Bid","Total Ask")
return(data.csv.final)
}
}
dataMerge <- function(data.HTML, data.CSV)
{
data.merge <- rbind(data.HTML, data.CSV)
data.merge <- data.merge[with(data.merge, order(Date)), ]
return(data.merge)
}
removeNA <- function(data.merge)
{
close.na <- is.na(data.merge$Close)
datum.na <- is.na(data.merge[ ,1])
data.merge <- data.merge[!close.na, 1:ncol(data.merge)]
data.merge <- data.merge[!datum.na, 1:ncol(data.merge)]
return(data.merge)
}
dataSample <- function(data.merge, from, to)
{
if (is.null(from) & is.null(to))
{
data.merge <- removeNA(data.merge)
return(data.merge)
}
else
{
if (is.null(from) & !is.null(to))
{
bool.sample <- data.merge[ ,1] <= to
if (ticker=="belex15")
{
data.sample <- data.merge[bool.sample, 1:ncol(data.merge)]
}
else
{
data.sample <- data.merge[bool.sample, 1:ncol(data.merge)]
}
data.sample <- removeNA(data.sample)
return(data.sample)
}
else
{
if (!is.null(from) & is.null(to))
{
bool.sample <- data.merge[ ,1] >= from
if (ticker=="belex15")
{
data.sample <- data.merge[bool.sample, 1:ncol(data.merge)]
}
else
{
data.sample <- data.merge[bool.sample, 1:ncol(data.merge)]
}
data.sample <- removeNA(data.sample)
return(data.sample)
}
else
{
bool.sample <- (data.merge[ ,1] >= from & data.merge[ ,1] <= to)
if (ticker=="belex15")
{
data.sample <- data.merge[bool.sample, 1:ncol(data.merge)]
}
else
{
data.sample <- data.merge[bool.sample, 1:ncol(data.merge)]
}
data.sample <- removeNA(data.sample)
return(data.sample)
}
}
}
}
url.HTML <- urlHTML(ticker)
url.CSV <- urlCSV(ticker)
data.HTML <- dataHTML(url.HTML)
data.CSV <- dataCSV(url.CSV)
data.Merge <- dataMerge(data.HTML, data.CSV)
data.Sample <- dataSample(data.Merge, from, to)
data.Sample <- data.Sample[!duplicated(data.Sample$Date), ]
row.names(data.Sample) <- 1:dim(data.Sample)[1]
cat("Download Complete!\n")
cat("\nTicker:", toupper(ticker),"\n")
cat("From:", as.character(data.Sample$Date[1]),"\n")
cat("To:", as.character(data.Sample$Date[dim(data.Sample)[1]]),"\n")
cat("No rows:", dim(data.Sample)[1],"\n")
data <- list(ticker = toupper(ticker),
from = data.Sample$Date[1],
to = data.Sample$Date[dim(data.Sample)[1]],
nrows = dim(data.Sample)[1],
data = data.Sample)
options(warn = w)
return(data)
} |
context("Testing the phylogenetic diversity measures")
test_that("Answers match up with Leinster-Cobbold Appendix A", {
tree <- ape::read.tree(text = "(A:2,B:2)R:1;")
partition <- c(0.6, 0.4)
names(partition) <- tree$tip.label
partition <- check_partition(partition)
ps <- phy_struct(tree, partition)
structure_matrix <- ps$structure
T_bar <- ps$tbar
similarity <- phy2branch(tree, partition)
meta <- metacommunity(partition, similarity)
expect_equivalent(sum(tree$edge.length) + tree$root.edge,
unlist(metadiv(raw_gamma(meta), 0)$diversity * T_bar))
expect_equivalent(c(meta@type_abundance),
c(0.4, 0.2, (2 / 3) * 0.4, (1 / 3) * 0.4))
expect_equivalent(meta@similarity,
rbind(c(1, 1, 0, 0), rep(1, 4), c(0, 0, 1, 1), rep(1, 4)))
tree <- ape::read.tree(text = "(A:1,B:2)R:1;")
partition <- c(0.6, 0.4)
names(partition) <- tree$tip.label
partition <- check_partition(partition)
ps <- phy_struct(tree, partition)
structure_matrix <- ps$structure
T_bar <- ps$tbar
similarity <- phy2branch(tree, partition)
meta <- metacommunity(partition, similarity)
expect_equivalent(sum(tree$edge.length) + tree$root.edge,
unlist(metadiv(raw_gamma(meta), 0)$diversity * T_bar))
expect_equivalent(c(meta@type_abundance),
c(0.25, 0.25, (2 / 2.4) * 0.4, (1 / 2.4) * 0.4))
expect_equivalent(meta@similarity,
rbind(c(1.2, 1.2, 0, 0), c(1.2, 1.2, 0.8, 0.8),
c(0, 0, 0.8, 0.8), c(1.2, 1.2, 0.8, 0.8)))
})
test_that("pmatrix is correct when tips belong to the same subcommunities", {
tree <- ape::read.tree(text="(A:2,B:2)R:1;")
partition <- cbind(A = c(1, 1), B = c(1, 0))
partition <- partition / sum(partition)
row.names(partition) <- tree$tip.label
similarity <- phy2branch(tree, partition)
meta <- metacommunity(partition, similarity)
expect_equivalent(meta@type_abundance,
cbind(A = c(2, 1, 2, 1), B = c(2, 1, 0, 0)) / 9)
tree2 <- ape::rtree(10)
partition2 <- cbind(A = sample(10), B = sample(10))
partition2 <- partition2 / sum(partition2)
row.names(partition2) <- tree2$tip.label
similarity2 <- phy2branch(tree2, partition2)
meta2 <- metacommunity(partition2, similarity2)
expect_equivalent(sum(meta2@type_abundance), 1)
}) |
netstat <- function() {
os <- Sys.info()['sysname']
switch(os,
Windows = {
system("netstat -n -a", intern = TRUE)
},
{
system("netstat -n -a", intern = TRUE)
}
)
} |
context("classif.matrix")
data(centaurea)
centaurea = suppressWarnings(naMeanSubst(centaurea))
centaurea = deletePopulation(centaurea, populationName = c("LIP", "PREL"))
trainingSet = deletePopulation(centaurea, populationName = "SOK")
SOK = keepPopulation(centaurea, populationName = "SOK")
test_that("correct input", {
c = suppressWarnings(classif.lda(centaurea))
expect_error(classif.matrix(c, level = "ds"), "Invalid level of grouping. Consider using \"taxon\", \"pop\" or \"indiv\"")
})
test_that("classif.lda", {
c = suppressWarnings(classif.lda(centaurea))
m = classif.matrix(c, level = "taxon")
expect_equal(paste(colnames(m), collapse = ","), "Taxon,N,as.hybr,as.ph,as.ps,as.st,correct,correct[%]")
expect_equal(paste(rownames(m), collapse = ","), "1,2,3,4,5")
expect_equal(paste(m[,1], collapse = ","), "hybr,ph,ps,st,Total")
m = classif.matrix(c, level = "pop")
expect_equal(paste(colnames(m), collapse = ","), "Population,Taxon,N,as.hybr,as.ph,as.ps,as.st,correct,correct[%]")
expect_equal(paste(rownames(m)[1:5], collapse = ","), "1,2,3,4,5")
expect_equal(paste(m[,1][1:5], collapse = ","), "BABL,BABU,BOL,BRT,BUK")
expect_equal(paste(m[,1][30:32], collapse = ","), "VIT,VOL,Total")
m = classif.matrix(c, level = "indiv")
expect_equal(paste(colnames(m), collapse = ","), "ID,Population,Taxon,classification,as.hybr,as.ph,as.ps,as.st,correct")
expect_equal(paste(rownames(m)[1:5], collapse = ","), "1,2,3,4,5")
expect_equal(paste(m[,1][1:5], collapse = ","), "RTE1,RTE2,RTE3,RTE4,RTE5" )
expect_equal(m[,1][612], "KOT2295")
})
test_that("classif.knn", {
c = suppressWarnings(classif.knn(centaurea, k = 6))
m = classif.matrix(c, level = "taxon")
expect_equal(paste(colnames(m), collapse = ","), "Taxon,N,as.hybr,as.ph,as.ps,as.st,correct,correct[%]")
expect_equal(paste(rownames(m), collapse = ","), "1,2,3,4,5")
expect_equal(paste(m[,1], collapse = ","), "hybr,ph,ps,st,Total")
m = classif.matrix(c, level = "pop")
expect_equal(paste(colnames(m), collapse = ","), "Population,Taxon,N,as.hybr,as.ph,as.ps,as.st,correct,correct[%]")
expect_equal(paste(rownames(m)[1:5], collapse = ","), "1,2,3,4,5")
expect_equal(paste(m[,1][1:5], collapse = ","), "BABL,BABU,BOL,BRT,BUK" )
expect_equal(paste(m[,1][30:32], collapse = ","), "VIT,VOL,Total")
m = classif.matrix(c, level = "indiv")
expect_equal(paste(colnames(m), collapse = ","), "ID,Population,Taxon,classification,Proportion.of.the.votes.for.the.winning.class,correct")
expect_equal(paste(rownames(m)[1:5], collapse = ","), "1,2,3,4,5")
expect_equal(paste(m[,1][1:5], collapse = ","), "RTE1,RTE2,RTE3,RTE4,RTE5" )
expect_equal(paste(m[,1][612], collapse = ","), "KOT2295")
})
test_that("classifSamp.lda", {
c = suppressWarnings(classifSample.lda(SOK, trainingSet))
m = classif.matrix(c, level = "taxon")
expect_equal(paste(colnames(m), collapse = ","), "Taxon,N,as.hybr,as.ph,as.ps,as.st")
expect_equal(paste(rownames(m), collapse = ","), "1,2")
expect_equal(paste(m[,1], collapse = ","), "ps,Total")
m = classif.matrix(c, level = "pop")
expect_equal(paste(colnames(m), collapse = ","), "Population,Taxon,N,as.hybr,as.ph,as.ps,as.st")
expect_equal(paste(rownames(m), collapse = ","), "1,2")
expect_equal(paste(m[,1], collapse = ","), "SOK,Total" )
m = classif.matrix(c, level = "indiv")
expect_equal(paste(colnames(m), collapse = ","), "ID,Population,Taxon,classification,as.hybr,as.ph,as.ps,as.st" )
expect_equal(paste(rownames(m)[1:5], collapse = ","), "1,2,3,4,5")
expect_equal(paste(m[,1][1:5], collapse = ","), "SOK388,SOK389,SOK390,SOK391,SOK392")
})
test_that("classifSamp.knn", {
c = suppressWarnings(classifSample.knn(SOK, trainingSet, k = 1))
m = classif.matrix(c, level = "taxon")
expect_equal(paste(colnames(m), collapse = ","), "Taxon,N,as.hybr,as.ph,as.ps,as.st")
expect_equal(paste(rownames(m), collapse = ","), "1,2")
expect_equal(paste(m[,1], collapse = ","), "ps,Total")
m = classif.matrix(c, level = "pop")
expect_equal(paste(colnames(m), collapse = ","), "Population,Taxon,N,as.hybr,as.ph,as.ps,as.st" )
expect_equal(paste(rownames(m), collapse = ","), "1,2")
expect_equal(paste(m[,1], collapse = ","), "SOK,Total" )
m = classif.matrix(c, level = "indiv")
expect_equal(paste(colnames(m), collapse = ","), "ID,Population,Taxon,classification,Proportion.of.the.votes.for.the.winning.class" )
expect_equal(paste(rownames(m)[1:5], collapse = ","), "1,2,3,4,5")
expect_equal(paste(m[,1][1:5], collapse = ","), "SOK388,SOK389,SOK390,SOK391,SOK392")
}) |
library(skynet)
context("Test Download T100")
test_that("Download T100", {
skip_on_cran()
download_t100(2011, "seg")
expect_output(str(nrow(T100_2011_seg)), "250828")
expect_length(T100_2011_seg, 17)
})
test_that("Download T100", {
skip_on_cran()
download_t100(2011, "mkt")
expect_output(str(nrow(T100_2011_mkt)), "194371")
expect_length(T100_2011_mkt, 13)
}) |
checkPairwise = function(x, plot = TRUE, labels = FALSE, LRthreshold = 1000, ...) {
kEst = ibdEstimate(x, verbose = FALSE)
if(is.ped(x)) {
kTrue = kappaIBD(x)
} else {
kTrue = do.call(rbind, lapply(x, kappaIBD))
nPed = length(x)
for(i in seq_len(nPed - 1)) for(j in seq(i+1, nPed))
for(a in labels(x[[i]]))
kTrue = rbind(kTrue, data.frame(id1 = a, id2 = labels(x[[j]]),
kappa0 = 1, kappa1 = 0, kappa2 = 0,
stringsAsFactors = FALSE))
}
kMerge = merge(kEst, kTrue, by = 1:2)
k0 = kMerge$k0
k2 = kMerge$k2
kappa0 = kMerge$kappa0
kappa2 = kMerge$kappa2
kMerge$LR = vapply(1:nrow(kMerge), function(i) {
ids = kMerge[i, 1:2]
loglik1 = .IBDlikelihood(x, ids = ids, kappa = c(k0[i], k2[i]), log = TRUE)
loglik2 = .IBDlikelihood(x, ids = ids, kappa = c(kappa0[i], kappa2[i]), log = TRUE)
exp(loglik1 - loglik2)
}, FUN.VALUE = 1)
if(plot) {
kStr = paste(kappa0, kappa2, sep = "-")
kFac = factor(kStr, levels = unique(kStr[order(kappa0, kappa2)]))
levels(kFac)[levels(kFac) == "0-0"] = "Parent-offspring"
levels(kFac)[levels(kFac) == "0.25-0.25"] = "Full siblings"
levels(kFac)[levels(kFac) == "0.5-0"] = "Half/Uncle/Grand"
levels(kFac)[levels(kFac) == "0.75-0"] = "First cousins"
levels(kFac)[levels(kFac) == "1-0"] = "Unrelated"
levels(kFac)[levels(kFac) == "NA-NA"] = "NA (inbred)"
cols = pchs = as.integer(kFac) + 1
nlev = nlevels(kFac)
legcol = legpch = seq_len(nlev) + 1
legcex = rep(1, nlev)
legtxt = levels(kFac)
err = kMerge$LR > LRthreshold
if(any(err, na.rm = T)) {
legcol = c(legcol, NA, 1)
legpch = c(legpch, NA, 1)
legcex = c(legcex, NA, 3)
legtxt = c(legtxt, NA, sprintf("LR > %d", LRthreshold))
}
ribd::ibdTriangle(...)
ribd::showInTriangle(kMerge[1:6], col = cols, pch = pchs, labels = labels, new = FALSE)
points(k0[err], k2[err], pch = 1, lwd = 2, cex = 3)
legend("topright", title = " According to pedigree", title.adj = 0,
bg = "whitesmoke", legend = legtxt, col = legcol, pch = legpch,
pt.cex = legcex, lty = NA, lwd = 2)
}
kMerge
} |
read.settings <- function(inputfile = "pecan.xml") {
if (inputfile == "") {
PEcAn.logger::logger.warn(
"settings files specified as empty string;",
"\n\t\tthis may be caused by an incorrect argument to system.file.")
}
loc <- which(commandArgs() == "--settings")
if (length(loc) != 0) {
for (idx in loc) {
if (!is.null(commandArgs()[idx + 1])
&& file.exists(commandArgs()[idx + 1])) {
PEcAn.logger::logger.info("Loading --settings=", commandArgs()[idx + 1])
xml <- XML::xmlParse(commandArgs()[idx + 1])
break
}
}
} else if (file.exists(Sys.getenv("PECAN_SETTINGS"))) {
PEcAn.logger::logger.info(
"Loading PECAN_SETTINGS=",
Sys.getenv("PECAN_SETTINGS"))
xml <- XML::xmlParse(Sys.getenv("PECAN_SETTINGS"))
} else if (!is.null(inputfile) && file.exists(inputfile)) {
PEcAn.logger::logger.info("Loading inpufile=", inputfile)
xml <- XML::xmlParse(inputfile)
} else if (file.exists("pecan.xml")) {
PEcAn.logger::logger.info("Loading ./pecan.xml")
xml <- XML::xmlParse("pecan.xml")
} else {
PEcAn.logger::logger.severe("Could not find a pecan.xml file")
}
settings <- XML::xmlToList(xml)
settings <- as.Settings(settings)
settings <- expandMultiSettings(settings)
if (!is.null(settings$Rlib)) {
.libPaths(settings$Rlib)
}
return(invisible(settings))
} |
R2JAGS <-
function(model, data, nchains, inits, burnin, nodes, update, verbose){
old.pb <- options("jags.pb")
on.exit(options(old.pb))
options(jags.pb = "none")
wrap(model)
if (any(inits == "random")) inits <- NULL
mod <-
jags.model(file = "modelTempFile.txt",
data = data,
n.chains = nchains,
n.adapt = 1000,
quiet = !verbose)
unlink("modelTempFile.txt")
update(mod, n.iter = burnin, progress.bar = "none")
samples <- coda.samples(mod, nodes, n.iter = update, thin = 1,
progress.bar = "none")
dic <- dic.samples(mod, n.iter = update, thin = 1, type = "pD",
progress.bar = "none")
return(list(mcmc.list = samples, dic = dic))
} |
lologVariational <- function(formula,
nReplicates = 5L,
dyadInclusionRate = NULL,
edgeInclusionRate = NULL,
targetFrameSize = 500000) {
lolik <- createLatentOrderLikelihood(formula)
nReplicates <- as.integer(nReplicates)
dyadIndependent <- lolik$getModel()$isIndependent(TRUE, TRUE)
dyadIndependentOffsets <-
lolik$getModel()$isIndependent(TRUE, FALSE)
allDyadIndependent <-
all(dyadIndependent) & all(dyadIndependentOffsets)
if (allDyadIndependent & nReplicates != 1L) {
cat(
"\n Model is dyad independent. Replications are redundant. Setting nReplicates <- 1L.\n"
)
nReplicates <- 1L
}
network <- lolik$getModel()$getNetwork()
n <- network$size()
ndyads <- n * (n - 1)
nedges <- lolik$getModel()$getNetwork()$nEdges()
if (!network$isDirected())
ndyads <- ndyads / 2
if (is.null(edgeInclusionRate)){
edgeInclusionRate <- min( floor(targetFrameSize / 2), nedges) / nedges
}
if (is.null(dyadInclusionRate)) {
dyadInclusionRate <- min(1, (targetFrameSize - edgeInclusionRate * nedges) / (ndyads - nedges + 1) )
}
samples <-
lolik$variationalModelFrameMulti(nReplicates, dyadInclusionRate, edgeInclusionRate)
predictors <- lapply(samples, function(x)
as.data.frame(x[[2]],
col.names = 1:length(x[[2]])))
predictors <- do.call(rbind, predictors)
outcome <- do.call(c, lapply(samples, function (x)
x[[1]]))
selectionProb <- do.call(c, lapply(samples, function (x)
x[[3]]))
logFit <-
glm(
outcome ~ as.matrix(predictors) - 1,
family = "binomial",
weights = 1 / selectionProb
)
theta <- logFit$coefficients
lolik$setThetas(theta)
names(theta) <- names(lolik$getModel()$statistics())
result <- list(
method = "variational",
formula = formula,
theta = theta,
vcov = vcov(logFit) * nReplicates,
nReplicates = nReplicates,
dyadInclusionRate = dyadInclusionRate,
edgeInclusionRate = edgeInclusionRate,
allDyadIndependent = allDyadIndependent,
likelihoodModel = lolik,
outcome = outcome,
predictors = predictors
)
class(result) <- c("lologVariationalFit", "lolog", "list")
result
}
print.lologVariationalFit <- function(x, ...) {
if (x$allDyadIndependent)
cat("MLE Coefficients:\n")
else
cat("Variational Inference Coefficients:\n")
print(x$theta)
if (x$dyadInclusionRate != 1) {
cat("Inclusion rate:", x$dyadInclusionRate, "\n")
}
if (!x$allDyadIndependent)
cat("
} |
d_TE <- function(x, theta, g_scaled, d_V, Lambda, log = FALSE) {
if (length(theta) != (sqrt(length(Lambda)) + 1)) {
stop(paste("theta and Lambda do not have sizes p and (p - 1) x (p - 1),",
"respectively."))
}
d_U <- function(x, log) d_ACG(x = x, Lambda = Lambda, log = log)
log_dens <- d_tang_norm(x = x, theta = theta, g_scaled = g_scaled,
d_V = d_V, d_U = d_U, log = TRUE)
return(switch(log + 1, exp(log_dens), log_dens))
}
r_TE <- function(n, theta, r_V, Lambda) {
if (length(theta) != (sqrt(length(Lambda)) + 1)) {
stop(paste("theta and Lambda do not have sizes p and (p - 1) x (p - 1),",
"respectively."))
}
r_U <- function(x) r_ACG(n = n, Lambda = Lambda)
return(r_tang_norm(n = n, theta = theta, r_V = r_V, r_U = r_U))
} |
pisaedTable1REF <- c("",
"Formula: math ~ st04q01 + st20q01 ",
"",
"Plausible values: 5",
"jrrIMax: 1",
"Weight variable: 'w_fstuwt'",
"Variance method: jackknife",
"JK replicates: 80",
"full data n: 4978",
"n used: 4873",
"",
"",
"Summary Table:",
" st04q01 st20q01 N WTD_N PCT SE(PCT) MEAN SE(MEAN)",
" Female Country of test 2234 1575416.5 92.425753 0.8038984 481.1794 4.019205",
" Female Other country 180 129104.6 7.574247 0.8038984 463.6272 10.355889",
" Male Country of test 2272 1611412.6 91.796017 0.9558501 486.8819 3.853613",
" Male Other country 187 144015.0 8.203983 0.9558501 474.1468 9.474347"
)
plm1REF <- c(" (Intercept) st29q06Agree st29q06Disagree st29q06Strongly disagree sc01q01Private ",
" 506.993125 -21.828757 -32.381549 -52.944871 2.408131 "
)
pgap1REF <- c("Call: gap(variable = \"math\", data = usaINT2012, groupA = st04q01 == ",
" \"Male\", groupB = st04q01 == \"Female\", weightVar = \"w_fstuwt\")",
"",
"Labels:",
" group definition nFullData nUsed",
" A st04q01 == \"Male\" 4978 2525",
" B st04q01 == \"Female\" 4978 2453",
"",
"Percentage:",
" pctA pctAse pctB pctBse diffAB covAB diffABse diffABpValue dofAB",
" 50.98087 0.7182871 49.01913 0.7182871 1.961734 -0.5159363 1.436574 0.174861 110.0328",
"",
"Results:",
" estimateA estimateAse estimateB estimateBse diffAB covAB diffABse diffABpValue dofAB",
" 483.647 3.800262 478.9953 3.92109 4.651662 11.01904 2.789059 0.09911296 83.14098"
)
al1REF <- c("",
"AchievementVars: cpro",
"aggregateBy: st04q01",
"",
"Achievement Level Cutpoints:",
"358.49 423.42 488.35 553.28 618.21 683.14 ",
"",
"Plausible values: 5",
"jrrIMax: 1",
"Weight variable: 'w_fstuwt'",
"Variance method: jackknife",
"JK replicates: 80",
"full data n: 5177",
"n used: 5177",
"",
"",
"Discrete",
" cpro_Level st04q01 N wtdN Percent StandardError",
" Below Proficiency Level 1 Female 95.4 1541.697 3.539350 0.7086689",
" At Proficiency Level 1 Female 234.2 3815.988 8.760555 0.8224039",
" At Proficiency Level 2 Female 524.6 8669.991 19.904132 0.9013387",
" At Proficiency Level 3 Female 771.0 12722.017 29.206570 1.3662494",
" At Proficiency Level 4 Female 644.6 10711.838 24.591702 1.3001839",
" At Proficiency Level 5 Female 300.8 4985.134 11.444621 1.1955623",
" At Proficiency Level 6 Female 66.4 1112.086 2.553071 0.5394964",
" Below Proficiency Level 1 Male 68.8 1083.360 2.616423 0.5358135",
" At Proficiency Level 1 Male 167.6 2578.450 6.227218 0.6980039",
" At Proficiency Level 2 Male 383.8 6222.924 15.028992 1.1775214",
" At Proficiency Level 3 Male 648.8 10591.764 25.580183 1.3363213",
" At Proficiency Level 4 Male 696.6 11515.983 27.812266 1.7109083",
" At Proficiency Level 5 Male 430.6 7036.873 16.994760 1.1910320",
" At Proficiency Level 6 Male 143.8 2376.777 5.740157 0.6795269"
)
pgap2REF <- c("gapList",
"Call: gap(variable = \"math\", data = usaINT2012, groupA = st04q01 == ",
" \"Male\", groupB = st04q01 == \"Female\", percentiles = c(50, ",
" 90), weightVar = \"w_fstuwt\", pctMethod = \"symmetric\")",
"",
"Labels:",
" group definition",
" A st04q01 == \"Male\"",
" B st04q01 == \"Female\"",
"",
"Percentage:",
" pctA pctAse pctB pctBse diffAB covAB diffABse diffABpValue dofAB",
" 50.98087 0.7182871 49.01913 0.7182871 1.961734 -0.5159363 1.436574 0.174861 110.0328",
"",
"Results:",
" percentiles estimateA estimateAse estimateB estimateBse diffAB covAB diffABse diffABpValue dofAB",
" 50 480.7124 4.075956 474.6266 4.498829 6.085824 11.494158 3.723515 0.1064350 73.66955",
" 90 605.1735 3.942558 595.3941 8.603019 9.779404 7.893203 8.588906 0.2579161 89.16957")
pvREF <- c("There are 10 subject scale(s) or subscale(s) in this edsurvey.data.frame:",
"'math' subject scale or subscale with 5 plausible values (the default).",
" The plausible value variables are: 'pv1math', 'pv2math', 'pv3math', 'pv4math', and 'pv5math'",
"",
"'macc' subject scale or subscale with 5 plausible values.",
" The plausible value variables are: 'pv1macc', 'pv2macc', 'pv3macc', 'pv4macc', and 'pv5macc'",
"",
"'macq' subject scale or subscale with 5 plausible values.",
" The plausible value variables are: 'pv1macq', 'pv2macq', 'pv3macq', 'pv4macq', and 'pv5macq'",
"",
"'macs' subject scale or subscale with 5 plausible values.",
" The plausible value variables are: 'pv1macs', 'pv2macs', 'pv3macs', 'pv4macs', and 'pv5macs'",
"",
"'macu' subject scale or subscale with 5 plausible values.",
" The plausible value variables are: 'pv1macu', 'pv2macu', 'pv3macu', 'pv4macu', and 'pv5macu'",
"",
"'mape' subject scale or subscale with 5 plausible values.",
" The plausible value variables are: 'pv1mape', 'pv2mape', 'pv3mape', 'pv4mape', and 'pv5mape'",
"",
"'mapf' subject scale or subscale with 5 plausible values.",
" The plausible value variables are: 'pv1mapf', 'pv2mapf', 'pv3mapf', 'pv4mapf', and 'pv5mapf'",
"",
"'mapi' subject scale or subscale with 5 plausible values.",
" The plausible value variables are: 'pv1mapi', 'pv2mapi', 'pv3mapi', 'pv4mapi', and 'pv5mapi'",
"",
"'read' subject scale or subscale with 5 plausible values.",
" The plausible value variables are: 'pv1read', 'pv2read', 'pv3read', 'pv4read', and 'pv5read'",
"",
"'scie' subject scale or subscale with 5 plausible values.",
" The plausible value variables are: 'pv1scie', 'pv2scie', 'pv3scie', 'pv4scie', and 'pv5scie'",
"")
swREF <- c("There is 1 full sample weight in this edsurvey.data.frame:",
" 'w_fstuwt' with 80 JK replicate weights (the default).",
" Jackknife replicate weight variables associated with the full sample weight 'w_fstuwt':",
" 'w_fstr1', 'w_fstr2', 'w_fstr3', 'w_fstr4', 'w_fstr5', 'w_fstr6', 'w_fstr7', 'w_fstr8', 'w_fstr9', 'w_fstr10', 'w_fstr11', 'w_fstr12', 'w_fstr13', 'w_fstr14', 'w_fstr15', 'w_fstr16', 'w_fstr17', 'w_fstr18', 'w_fstr19', 'w_fstr20', 'w_fstr21', 'w_fstr22', 'w_fstr23', 'w_fstr24', 'w_fstr25', 'w_fstr26', 'w_fstr27', 'w_fstr28', 'w_fstr29', 'w_fstr30', 'w_fstr31', 'w_fstr32', 'w_fstr33', 'w_fstr34', 'w_fstr35', 'w_fstr36', 'w_fstr37',",
" 'w_fstr38', 'w_fstr39', 'w_fstr40', 'w_fstr41', 'w_fstr42', 'w_fstr43', 'w_fstr44', 'w_fstr45', 'w_fstr46', 'w_fstr47', 'w_fstr48', 'w_fstr49', 'w_fstr50', 'w_fstr51', 'w_fstr52', 'w_fstr53', 'w_fstr54', 'w_fstr55', 'w_fstr56', 'w_fstr57', 'w_fstr58', 'w_fstr59', 'w_fstr60', 'w_fstr61', 'w_fstr62', 'w_fstr63', 'w_fstr64', 'w_fstr65', 'w_fstr66', 'w_fstr67', 'w_fstr68', 'w_fstr69', 'w_fstr70', 'w_fstr71', 'w_fstr72', 'w_fstr73', 'w_fstr74',",
" 'w_fstr75', 'w_fstr76', 'w_fstr77', 'w_fstr78', 'w_fstr79', and 'w_fstr80'",
"")
scREF <- c("Achievement Levels:",
" Mathematics: 357.77, 420.07, 482.38, 544.68, 606.99, 669.3",
" Problem Solving: 358.49, 423.42, 488.35, 553.28, 618.21, 683.14",
" Reading: 262.04, 334.75, 407.47, 480.18, 552.89, 625.61, 698.32") |
context("Testing dCJS-related functions.")
test_that("dCJS_ss works",
{
x <- c(1, 0, 1, 0, 0)
probSurvive <- 0.6
probCapture <- 0.4
probX <- dCJS_ss(x, probSurvive, probCapture, len = 5)
correctProbX <- probSurvive * (1 - probCapture) *
probSurvive * (probCapture) *
(probSurvive^2 * (1 - probCapture)^2 +
probSurvive * (1 - probCapture) * (1 - probSurvive) +
(1 - probSurvive))
expect_equal(probX, correctProbX)
lProbX <- dCJS_ss(x, probSurvive, probCapture, log = TRUE, len = 5)
lCorrectProbX <- log(correctProbX)
expect_equal(lProbX, lCorrectProbX)
CdCJS_ss <- compileNimble(dCJS_ss)
CprobX <- CdCJS_ss(x, probSurvive, probCapture, len = 5)
expect_equal(CprobX, probX)
ClProbX <- CdCJS_ss(x, probSurvive, probCapture, log = TRUE, len = 5)
expect_equal(ClProbX, lProbX)
nc <- nimbleCode({
x[1:5] ~ dCJS_ss(probSurvive, probCapture, len = 5)
probSurvive ~ dunif(0,1)
probCapture ~ dunif(0,1)
})
m <- nimbleModel(nc, data = list(x = x),
inits = list(probSurvive = probSurvive,
probCapture = probCapture))
m$calculate()
MlProbX <- m$getLogProb("x")
expect_equal(MlProbX, lProbX)
cm <- compileNimble(m)
cm$calculate()
CMlProbX <- cm$getLogProb("x")
expect_equal(CMlProbX, lProbX)
xNA <- c(NA, NA, NA, NA, NA)
mNA <- nimbleModel(nc, data = list(x = xNA),
inits = list(probSurvive = probSurvive,
probCapture = probCapture))
mNAConf <- configureMCMC(mNA)
mNAConf$addMonitors('x')
mNA_MCMC <- buildMCMC(mNAConf)
cmNA <- compileNimble(mNA, mNA_MCMC)
set.seed(0)
cmNA$mNA_MCMC$run(10)
expect_true(all(!is.na(as.matrix(cmNA$mNA_MCMC$mvSamples)[,"x[2]"])))
set.seed(1)
nSim <- 10
xSim <- array(NA, dim = c(nSim, length(x)))
for(i in 1:nSim)
xSim[i,] <- rCJS_ss(1, probSurvive, probCapture, len = length(x))
set.seed(1)
CrCJS_ss <- compileNimble(rCJS_ss)
CxSim <- array(NA, dim = c(nSim, length(x)))
for(i in 1:nSim)
CxSim[i,] <- CrCJS_ss(1, probSurvive, probCapture, len = length(x))
expect_identical(xSim, CxSim)
simNodes <- m$getDependencies(c('probSurvive', 'probCapture'), self = FALSE)
mxSim <- array(NA, dim = c(nSim, length(x)))
set.seed(1)
for(i in 1:nSim) {
m$simulate(simNodes, includeData = TRUE)
mxSim[i,] <- m$x
}
expect_identical(mxSim, xSim)
CmxSim <- array(NA, dim = c(nSim, length(x)))
set.seed(1)
for(i in 1:nSim) {
cm$simulate(simNodes, includeData = TRUE)
CmxSim[i,] <- cm$x
}
expect_identical(CmxSim, mxSim)
})
test_that("dCJS_sv works",
{
x <- c(1, 0, 1, 0, 0)
probSurvive <- 0.6
probCapture <- c(1, 0.25, 0.6, 0.4, 0.8)
probX <- dCJS_sv(x, probSurvive, probCapture)
correctProbX <- probSurvive * (1 - probCapture[2]) *
probSurvive * (probCapture[3]) *
(probSurvive^2 * (1 - probCapture[4]) * (1 - probCapture[5]) +
probSurvive * (1 - probCapture[4]) * (1 - probSurvive) +
(1 - probSurvive))
expect_equal(probX, correctProbX)
lProbX <- dCJS_sv(x, probSurvive, probCapture, log = TRUE)
lCorrectProbX <- log(correctProbX)
expect_equal(lProbX, lCorrectProbX)
CdCJS_sv <- compileNimble(dCJS_sv)
CprobX <- CdCJS_sv(x, probSurvive, probCapture)
expect_equal(CprobX, probX)
ClProbX <- CdCJS_sv(x, probSurvive, probCapture, log = TRUE)
expect_equal(ClProbX, lProbX)
nc <- nimbleCode({
x[1:5] ~ dCJS_sv(probSurvive, probCapture[1:5], len = 5)
probSurvive ~ dunif(0,1)
for (i in 1:5) {
probCapture[i] ~ dunif(0,1)
}
})
m <- nimbleModel(nc, data = list(x = x),
inits = list(probSurvive = probSurvive,
probCapture = probCapture))
m$calculate()
MlProbX <- m$getLogProb("x")
expect_equal(MlProbX, lProbX)
cm <- compileNimble(m)
cm$calculate()
CMlProbX <- cm$getLogProb("x")
expect_equal(CMlProbX, lProbX)
xNA <- c(NA, NA, NA, NA, NA)
mNA <- nimbleModel(nc, data = list(x = xNA),
inits = list(probSurvive = probSurvive,
probCapture = probCapture))
mNAConf <- configureMCMC(mNA)
mNAConf$addMonitors('x')
mNA_MCMC <- buildMCMC(mNAConf)
cmNA <- compileNimble(mNA, mNA_MCMC)
set.seed(10)
cmNA$mNA_MCMC$run(5)
expect_true(all(!is.na(as.matrix(cmNA$mNA_MCMC$mvSamples)[,"x[1]"])))
set.seed(1)
nSim <- 10
xSim <- array(NA, dim = c(nSim, length(x)))
for(i in 1:nSim)
xSim[i,] <- rCJS_sv(1, probSurvive, probCapture, len = length(x))
set.seed(1)
CrCJS_sv <- compileNimble(rCJS_sv)
CxSim <- array(NA, dim = c(nSim, length(x)))
for(i in 1:nSim)
CxSim[i,] <- CrCJS_sv(1, probSurvive, probCapture, len = length(x))
expect_identical(xSim, CxSim)
simNodes <- m$getDependencies(c('probSurvive', 'probCapture'), self = FALSE)
mxSim <- array(NA, dim = c(nSim, length(x)))
set.seed(1)
for (i in 1:nSim) {
m$simulate(simNodes, includeData = TRUE)
mxSim[i,] <- m$x
}
expect_identical(mxSim, xSim)
CmxSim <- array(NA, dim = c(nSim, length(x)))
set.seed(1)
for (i in 1:nSim) {
cm$simulate(simNodes, includeData = TRUE)
CmxSim[i,] <- cm$x
}
expect_identical(CmxSim, mxSim)
})
test_that("dCJS_vs works",
{
x <- c(1, 0, 1, 0, 0)
probSurvive <- c(0.8, 0.45, 0.4, 0.7)
probCapture <- 0.6
probX <- dCJS_vs(x, probSurvive, probCapture)
correctProbX <- probSurvive[1] * (1 - probCapture) *
probSurvive[2] * probCapture *
(probSurvive[3] * probSurvive[4] * (1 - probCapture)^2 +
probSurvive[3] * (1 - probCapture) * (1 - probSurvive[4]) +
(1 - probSurvive[3]))
expect_equal(probX, correctProbX)
lProbX <- dCJS_vs(x, probSurvive, probCapture, log = TRUE)
lCorrectProbX <- log(correctProbX)
expect_equal(lProbX, lCorrectProbX)
CdCJS_vs <- compileNimble(dCJS_vs)
CprobX <- CdCJS_vs(x, probSurvive, probCapture)
expect_equal(CprobX, probX)
ClProbX <- CdCJS_vs(x, probSurvive, probCapture, log = TRUE)
expect_equal(ClProbX, lProbX)
nc <- nimbleCode({
x[1:5] ~ dCJS_vs(probSurvive[1:4], probCapture, len = 5)
probCapture ~ dunif(0,1)
for (i in 1:4) {
probSurvive[i] ~ dunif(0,1)
}
})
m <- nimbleModel(nc, data = list(x = x),
inits = list(probSurvive = probSurvive,
probCapture = probCapture))
m$calculate()
MlProbX <- m$getLogProb("x")
expect_equal(MlProbX, lProbX)
cm <- compileNimble(m)
cm$calculate()
CMlProbX <- cm$getLogProb("x")
expect_equal(CMlProbX, lProbX)
xNA <- c(NA, NA, NA, NA, NA)
mNA <- nimbleModel(nc, data = list(x = xNA),
inits = list(probSurvive = probSurvive,
probCapture = probCapture))
mNAConf <- configureMCMC(mNA)
mNAConf$addMonitors('x')
mNA_MCMC <- buildMCMC(mNAConf)
cmNA <- compileNimble(mNA, mNA_MCMC)
set.seed(5)
cmNA$mNA_MCMC$run(10)
expect_true(all(!is.na(as.matrix(cmNA$mNA_MCMC$mvSamples)[,"x[1]"])))
set.seed(1)
nSim <- 10
xSim <- array(NA, dim = c(nSim, length(x)))
for(i in 1:nSim)
xSim[i,] <- rCJS_vs(1, probSurvive, probCapture, len = length(x))
set.seed(1)
CrCJS_vs <- compileNimble(rCJS_vs)
CxSim <- array(NA, dim = c(nSim, length(x)))
for(i in 1:nSim)
CxSim[i,] <- CrCJS_vs(1, probSurvive, probCapture, len = length(x))
expect_identical(xSim, CxSim)
simNodes <- m$getDependencies(c('probSurvive', 'probCapture'), self = FALSE)
mxSim <- array(NA, dim = c(nSim, length(x)))
set.seed(1)
for(i in 1:nSim) {
m$simulate(simNodes, includeData = TRUE)
mxSim[i,] <- m$x
}
expect_identical(mxSim, xSim)
CmxSim <- array(NA, dim = c(nSim, length(x)))
set.seed(1)
for(i in 1:nSim) {
cm$simulate(simNodes, includeData = TRUE)
CmxSim[i,] <- cm$x
}
expect_identical(CmxSim, mxSim)
})
test_that("dCJS_vv works",
{
x <- c(1, 0,1,0,0)
probSurvive <- c(0.6, 0.5, 0.4, 0.55)
probCapture <- c(1, 0.45, 0.5, 0.55, 0.6)
len <- 5
probX <- dCJS_vv(x, probSurvive, probCapture, len)
correctProbX <-
probSurvive[1] * (1 - probCapture[2]) *
probSurvive[2] * (probCapture[3]) *
((probSurvive[3] * (1 - probCapture[4]) *
probSurvive[4] * (1 - probCapture[5])) +
(probSurvive[3] * (1 - probCapture[4]) *
(1 - probSurvive[4])) +
(1 - probSurvive[3]))
expect_equal(probX, correctProbX)
lProbX <- dCJS_vv(x, probSurvive, probCapture, log = TRUE)
lCorrectProbX <- log(correctProbX)
expect_equal(lProbX, lCorrectProbX)
CdCJS_vv <- compileNimble(dCJS_vv)
CprobX <- CdCJS_vv(x, probSurvive, probCapture)
expect_equal(CprobX, probX)
ClProbX <- CdCJS_vv(x, probSurvive, probCapture, len = 5, log = TRUE)
expect_equal(ClProbX, lProbX)
nc <- nimbleCode({
x[1:5] ~ dCJS_vv(probSurvive[1:4], probCapture[1:5], len = 5)
for (i in 1:4) {
probSurvive[i] ~ dunif(0,1)
probCapture[i] ~ dunif(0,1)
}
})
m <- nimbleModel(nc, data = list(x = x),
inits = list(probSurvive = probSurvive,
probCapture = probCapture))
m$calculate()
MlProbX <- m$getLogProb("x")
expect_equal(MlProbX, lProbX)
cm <- compileNimble(m)
cm$calculate()
CMlProbX <- cm$getLogProb("x")
expect_equal(CMlProbX, lProbX)
xNA <- c(NA, NA, NA, NA, NA)
mNA <- nimbleModel(nc, data = list(x = xNA),
inits = list(probSurvive = probSurvive,
probCapture = probCapture))
mNAConf <- configureMCMC(mNA)
mNAConf$addMonitors('x')
mNA_MCMC <- buildMCMC(mNAConf)
cmNA <- compileNimble(mNA, mNA_MCMC)
set.seed(5)
cmNA$mNA_MCMC$run(10)
expect_true(all(!is.na(as.matrix(cmNA$mNA_MCMC$mvSamples)[,"x[1]"])))
set.seed(1)
nSim <- 10
xSim <- array(NA, dim = c(nSim, length(x)))
for(i in 1:nSim)
xSim[i,] <- rCJS_vv(1, probSurvive, probCapture, len = length(x))
set.seed(1)
CrCJS_vv <- compileNimble(rCJS_vv)
CxSim <- array(NA, dim = c(nSim, length(x)))
for(i in 1:nSim)
CxSim[i,] <- CrCJS_vv(1, probSurvive, probCapture, len = length(x))
expect_identical(xSim, CxSim)
simNodes <- m$getDependencies(c('probSurvive', 'probCapture'), self = FALSE)
mxSim <- array(NA, dim = c(nSim, length(x)))
set.seed(1)
for(i in 1:nSim) {
m$simulate(simNodes, includeData = TRUE)
mxSim[i,] <- m$x
}
expect_identical(mxSim, xSim)
CmxSim <- array(NA, dim = c(nSim, length(x)))
set.seed(1)
for(i in 1:nSim) {
cm$simulate(simNodes, includeData = TRUE)
CmxSim[i,] <- cm$x
}
expect_identical(CmxSim, mxSim)
})
test_that("dCJS errors", {
expect_error(
dCJS_ss(x = c(1,0,1,0,0), probCapture = 0.4, probSurvive = 0.5, len = 3)
)
expect_error(
dCJS_ss(x = c(0,0,1,0,0), probCapture = 0.4, probSurvive = 0.5)
)
expect_error(
dCJS_vs(x = c(1,0,1,0,0), probCapture = 0.1, probSurvive = c(0.9, 0.9, 0.4, 0.4), len = 2)
)
expect_error(
dCJS_vs(x = c(1,0,1,0,0), probCapture = 0.1, probSurvive = c(0.9, 0.9, 0.4))
)
expect_error(
dCJS_vs(x = c(0,0,1,0,0), probCapture = 0.1, probSurvive = c(0.9, 0.9, 0.4, 0.4))
)
expect_error(
dCJS_sv(x = c(1,0,1,0,0), probCapture = c(1,0.9, 0.9, 0.4, 0.4), probSurvive = 0.1, len = 6)
)
expect_error(
dCJS_sv(x = c(1,0,1,0,0), probCapture = c(1,0.9, 0.9, 0.4), probSurvive = 0.8)
)
expect_error(
dCJS_sv(x = c(0,0,1,0,0), probCapture = c(1,0.9, 0.9, 0.4, 0.4), probSurvive = 0.1)
)
expect_error(
dCJS_vv(x = c(1,0,1,0,0), probCapture = c(1,0,1,0.3,0.3), probSurvive = c(0.9, 0.9))
)
expect_error(
dCJS_vv(x = c(1,0,1,0,0), probCapture = c(1,0,1), probSurvive = c(0.9, 0.9, 0.1, 0.1))
)
expect_error(
dCJS_vv(x = c(1,0,1,0,0), probCapture = c(1,0,1,0,0),
probSurvive = c(0.9, 0.9, 0.1, 0.1), len = 2)
)
expect_error(
dCJS_vv(x = c(0,0,1,0,0), probCapture = c(1,0,1,0,0),
probSurvive = c(0.9, 0.9, 0.1, 0.1))
)
CdCJS_ss <- compileNimble(dCJS_ss)
CdCJS_sv <- compileNimble(dCJS_sv)
CdCJS_vs <- compileNimble(dCJS_vs)
CdCJS_vv <- compileNimble(dCJS_vv)
expect_error(
CdCJS_ss(x = c(1,0,1,0,0), probCapture = 0.4, probSurvive = 0.5, len = 3)
)
expect_error(
CdCJS_ss(x = c(0,0,1,0,0), probCapture = 0.4, probSurvive = 0.5)
)
expect_error(
CdCJS_vs(x = c(1,0,1,0,0), probCapture = 0.1, probSurvive = c(0.9, 0.9, 0.4, 0.4), len = 3)
)
expect_error(
CdCJS_vs(x = c(1,0,1,0,0), probCapture = 0.1, probSurvive = c(0.9, 0.9, 0.4))
)
expect_error(
CdCJS_vs(x = c(0,0,1,0,0), probCapture = 0.1, probSurvive = c(0.9, 0.9, 0.4, 0.4))
)
expect_error(
CdCJS_sv(x = c(1,0,1,0,0), probCapture = c(1,0.9, 0.9, 0.4, 0.4), probSurvive = 0.1, len = 3)
)
expect_error(
CdCJS_sv(x = c(1,0,1,0,0), probCapture = c(1,0.9, 0.9, 0.4), probSurvive = 0.8)
)
expect_error(
CdCJS_sv(x = c(0,0,1,0,0), probCapture = c(1,0.9, 0.9, 0.4, 0.4), probSurvive = 0.1)
)
expect_error(
CdCJS_vv(x = c(1,0,1,0,0), probCapture = c(1,0,1,0.3,0.3), probSurvive = c(0.9, 0.9))
)
expect_error(
CdCJS_vv(x = c(1,0,1,0,0), probCapture = c(0,1), probSurvive = c(0.9, 0.9, 0.1, 0.1))
)
expect_error(
CdCJS_vv(x = c(1,0,1,0,0), probCapture = c(1,0,1,0,0),
probSurvive = c(0.9, 0.9, 0.1, 0.1), len = 2)
)
expect_error(
CdCJS_vv(x = c(0,0,1,0,0), probCapture = c(1,0,1,0,0),
probSurvive = c(0.9, 0.9, 0.1, 0.1))
)
}) |
source("ESEUR_config.r")
library("foreign")
pc7=read.dta(paste0(ESEUR_dir, "ecosystems/Berndt/pc7699a.dta"))
pc_1987=subset(pc7, year < 1988)
RAPID CHANGE IN THE PERSONAL COMPUTER MARKET
A QUALITY-ADJUSTED HEDONIC PRICE INDEX 1976- 1987
by
JEREMY MICHAEL COHEN
MSc thesis
1) RAM - The amount of Random Access Memory (RAM) standard on each
PC model, measured in K (1024) bytes.
2) Maximum RAM - The maximum amount of RAM that can fit on the system
board (mother-board) of each PC model, measured in K bytes (logical
groupings of eight on/off bits). This figure does not include possible RAM
increases due to expansion cards, since RAM access is typically faster on
the system board than on expansion cards, placing a premium on the amount
of fast system board RAM that the system can have. Also, expansion slots
are typically multi-functional and not restricted to just RAM cards, so the
amount of expansion card based RAM can be quite variable for most PC models.
3) ROM - The amount of Read Only Memory (ROM) standard in each PC
model, measured in K bytes. Since ROM usually contains diagnostics and
low-level operating system routines, this variable may well serve as a
surrogate for the sophistication of the software packages in the PC.
4) Mhz - The clock speed of each PC mode, measured in Megahertz (million of
cycles per second). This is one of the main indicators (along with processor
type) of the throughput of a PC.
5) Hard Disk - The amount of storage on the hard disk (if one exists) in each
PC, measured in M (1024*1024) bytes. A hard disk is often the dividing line be-
tween a home system and an office system, though this distinction has blurred in
recent years. 332 of the 1108 models in the data set had hard disks.
6) Hard Disk Access Speed - The average time it takes to retrieve a byte of in-
formation from the hard disk, measured in milliseconds. This information
would be more useful if it were supplemented by the seek and rotation times of
the hard disks, but it was extremely difficult to obtain this data.
7) Floppy Disk - The amount of storage that the floppy disk drives, if any
exist, are capable of reading or writing to a floppy disk, measured in K bytes.
This includes the flexible 8", 5.25" and 3.5" media and excludes fixed media.
8) Number of Floppy Disk Drives - The number of floppy disk drives standard
on each PC model. A variable equivalent to this one for hard disk drives is
not necessary, since all of the PC models examined had either zero or one hard
disks, while the PCs had either zero, one, or two floppy disk drives.
9) Slots (8 bit) - The number of eight bit expansion slots available within each
PC model for expansion boards.
10) Slots (16 bit) - The number of sixteen bit slots available within each PC
model for expansion boards.
11) Slots (32 bit) - The number of thirty-two bit slots available within each PC
model for expansion boards.
Note: The above three "slot" variables posed a few problems. First, both PC
ads and reviews often failed to state whether any of the slots mentioned were
already filled within the standard setup. To be consistent I have used the
total number of slots in the system, irrespective of whether the slots may
have been initially filled. This may not be optimal, but it was the best
solution given the available data. Since a machine would normally not have
more than one or two slots initially filled, this should not cause any major
problems. Another problem is that some ads and reviews specified only the
number of slots, not the size of the slots. I resolved this problem through
the use of multiple data sources (particularly the Dataquest guide). In the
few cases where the situation was still unresolved, I assumed that the size
of the slots was the same as the size of the PC's processor chip. This is a
reasonably safe assumption that should not have any negative effects on
the regression analysis.
12) Size - The size of each PC model, measured in cubic inches. This includes
the system unit but normally excludes the weight of the monitor or keyboard.
These components are included, however, if they are integral to the system
unit (i.e., IBM Convertible).
13) Weight - The weight of each PC model, measured in pounds. This includes
the system unit but normally does not include the weight of the monitor or
keyboard. These components are included, however, if they are integral to
the system unit (i.e., Apple Macintosh).
14) Age - The number of years since a given PC model was first introduced. A
model has an age of zero its initial year. This variable ranges from zero to
seven (for the Radio Shack Color Computer) and provides useful information
on the effects of longevity on pricing.
Note that while the specifications of many PC models changed over time, as
long as the model name remained constant the model was considered to be the
same as the model from the previous year.
Age Number of Data Points
0 649
1 257
2 118
3 46
4 25
5 9
6 3
7 1
----
1108
Note: The hard disk size, number of floppy disk drives, number of slots, and
age variables had the value one added to them so that it would be possible to
take their natural log during the regression analysis.
Dummy Variables
The twenty-six dummy variables are divided into eight subdivisions, as
described above. The variables and subdivisions are:
1) Processor Type - All of the PCs in my study have either eight bit,
sixteen bit or thirty-two bit processors, this being an indication of how
much data the system can process at a given time. The higher this number,
ceteris paribus, the greater the throughput of the system.
A few processor chips, such as the 68000, can manipulate differing amounts of
data depending on the operation. In this case I have grouped such multiple-size
chips in the lower applicable group, both because this is how these chips are
normally viewed and because the throughput of the chip is restricted by
its lowest grouping.
15) DProcl6 - 1 if the system has a sixteen bit processor chip, 0 otherwise.
16) DProc32 - 1 if the system has a thirty-two bit processor chip, 0 otherwise.
Thus, the base case for this subdivision is having an eight bit processor chip.
In my 1108 data point sample, 540 of the PCs had eight bit processor chips,
506 had sixteen bit processor chips, and 62 had thirty-two bit processor chips.
11) Monitor Type - While many PCs are sold without a monitor, they are also
sometimes sold either with a black and white (B&W) or a color monitor.
17) DBW - 1 if the system comes with a B&W monitor, 0 otherwise.
18) DColor - 1 if the system comes with a color monitor, 0 otherwise.
Thus, the base case for this subdivision is not having a monitor. In my 1108
data point set, 605 of the PCs had no monitor, 478 had a B&W monitor and
25 had a color monitor.
III) Portability - Some PCs, often called "portables" or "convertibles," are
made small and light enough to be portable. These PCs often also have special
features such as battery power capability and an integral monitor.
19) DPortable - 1 if the system is meant to be portable, 0 otherwise.
Thus, the base case for this subdivision is not being portable. In my 1108
point data set, 937 of the systems were not portable, while 171 were
explicitly portable.
IV) Additional Technical Features - Some PCs have extra hardware that is costly
enough to have a significant effect on their overall price, yet rare enough
not to be considered a standard item. Some examples include modems, printers,
or an extra monitor.
20) DExtra - I if the system has a significant piece of additional hardware,
0 otherwise. Thus, the base case for this subdivision is not having any
additional equipment.
V) Price Type - In my sample I have both list prices and discount prices. This
allows an analysis of discount pricing and also provides more overall
variability in the pricing data. In particular, it is not unusual to see a
PC's list price remain steady over 2-3 years while its discount price may
drop steadily over this time.
21) DDiscount - 1 if the system price is discounted, 0 otherwise.
The base case for this subdivision is having a list price. In my sample, 841 of
the systems had list price information and 267 had discount prices.
VI) Manufacturer - In my data set I have PCs from 114 different companies. In
this subdivision, I attempt to discover any differences in pricing among seven
major PC manufacturers. While any pricing discrepancies may possibly be
accounted for by intangibles such as quality and reliability or more tangible
items such as warranties and included software, these discrepancies may
also be an indicator of a company's overall pricing policy.
22) DApple - I if the PC is made by Apple, 0 otherwise.
23) DCommo - 1 if the PC is made by Commodore, 0 otherwise.
24) DCompa - 1 if the PC is made by Compaq, 0 otherwise.
25) DIBM - 1 if the PC is made by IBM, 0 otherwise.
26) DNEC - I if the PC is made by NEC, 0 otherwise.
27) DPCLim - 1 if the PC is made by PC Limited, 0 otherwise.
28) DRadio - I if the PC is made by Radio Shack, 0 otherwise.
The base case for this subdivision is to be manufactured by one of the 107 other
PC companies. Of the 1108 PC data points, 62 were made by Apple, 40 by
Commodore, 59 by Compaq, 94 by IBM, 36 by NEC, 21 by PC Limited, 85 by Radio
Shack, and 711 by other companies. Thus, 35.4% (396 of 1108) of the models were
built by these seven manufacturers.
VII) Date - The heart of a hedonic pricing study are the yearly dummy variables.
The data in my sample runs from 1976 to 1987, resulting in eleven of these dummy
variables. The parameter coefficients obtained for these variables will be
directly used to construct the hedonic price index.
29) D77 this model/price data point from 1977, otherwise.
30) D78 this model/price data point from 1978, otherwise.
31) D79 this model/price data point from 1979, otherwise.
32) D80 this model/price data point from 1980, otherwise.
33) D81 this model/price data point from 1981, otherwise.
34) D82 this model/price data point from 1982, otherwise.
35) D83 this model/price data point from 1983, otherwise.
36) D84 this model/price data point from 1984, otherwise.
37) D85 this model/price data point from 1985, otherwise.
38) D86 this model/price data point from 1986, otherwise.
39) D87 this model/price data point from 1987, otherwise.
The base case for this subdivision is a model/price data point from 1976. The
number of data points from each year is as follows:
Year
1976 11
1977 17
1978 15
1979 28
1980 45
1981 40
1982 54
1983 85
1984 130
1985 109
1986 178
1987 396
----
1108
VIII) Pre/Post IBM Date - IBM revolutionized the PC industry when it released
its PC line at the end of 1981. Since this may well have affected overall
industry pricing, I examined it using the following dummy variable:
40) DP82 - I if the system data point is from after 1981, 0 otherwise.
The base case for this subdivision is for the data point to be from before 1982.
Of the 1108 data points, 156 are from before 1982, while 952 are from 1982 or later.
Overall Base Case
The overall base case (in which all of the dummy variables equal 0) is: a PC
model with an eight bit processor, no monitor, non-portable, with no extra
hardware features, list price, not from one of the seven specified
manufacturers, from 1976, and from before the late 1981 IBM announcement
date for its PCs.
Omitted Variables
There were several independent variables that I considered using in the initial
regression equation but eventually chose not to use, for various reasons.
These variables are:
1) Hard Disk Seek Speed - This information proved to be very difficult to obtain
for the early PC models, since it was rarely reported either in advertisements
or in product reviews. However, it should prove to be highly correlated with
the hard disk access speed variable, which is included in the regression.
Thus, I conjecture that this variable would not have added much new
information to the analysis.
2) Floppy Disk Access/Seek Speed - This information was difficult to obtain for
recent PC models and often impossible for the earlier models. Thus, I was not
able to gather enough entries for this variable to make it worthwhile for
inclusion in the regression.
3) Asynchronous (Asynch.) Card - While the information on whether each PC
came with an Asynch. card was often available, I decided that this variable
would likely not add much to the regression analysis, since the cost/value of
an asynch. card is quite low (on the order of $20-40).
4) Clock/Calendar Card - Similar to the asynch. card, the low cost/value of a
clock/calendar card (about $20) implied that it was not worth adding it to the
regression, even though this information was often available.
orig=pc7[, !grepl("^gen", names(pc7))]
orig=orig[, !grepl("^d", names(orig))]
|
context("Test: biomart()")
test_that("The biomart() interface works properly..",{
skip_on_cran()
skip_on_travis()
expect_output(getMarts())
expect_output(
biomart(
genes = "GUCA2A",
mart = "ENSEMBL_MART_ENSEMBL",
dataset = "hsapiens_gene_ensembl",
attributes = c("start_position", "end_position", "description"),
filters = "hgnc_symbol"
)[1, 1:3]
)
}) |
summary.ssanova <- function(object,diagnostics=FALSE,...)
{
y <- model.response(object$mf,"numeric")
w <- model.weights(object$mf)
offset <- model.offset(object$mf)
if (is.null(offset)) offset <- rep(0,length(y))
mf <- object$mf
if (!is.null(object$random)) mf$random <- object$random$z
res <- y - predict(object,mf)
fitted <- as.numeric(y-res)
sigma <- sqrt(object$varht)
if (!is.null(w)) {
r.squared <- sum(w*(fitted-sum(w*fitted)/sum(w))^2)
r.squared <- r.squared/sum(w*(y-sum(w*y)/sum(w))^2)
}
else r.squared <- var(fitted)/var(y)
if (is.null(w)) rss <- sum(res^2)
else rss <- sum(w*res^2)
obj.wk <- object
obj.wk$d[] <- 0
if (!is.null(model.offset(obj.wk$mf))) obj.wk$mf[,"(offset)"] <- 0
penalty <- sum(obj.wk$c*predict(obj.wk,obj.wk$mf[object$id.basis,]))
penalty <- as.vector(10^object$nlambda*penalty)
if (!is.null(object$random)) {
p.ran <- t(object$b)%*%object$random$sigma$fun(object$zeta,
object$random$sigma$env)%*%object$b
penalty <- penalty + p.ran
}
if (is.null(object$partial)) labels.p <- NULL
else labels.p <- labels(object$partial$mt)
if (diagnostics) {
comp <- NULL
p.dec <- NULL
for (label in c(object$terms$labels,labels.p)) {
if (label=="1") next
if (label=="offset") next
comp <- cbind(comp,predict(object,object$mf,inc=label))
jk <- sum(obj.wk$c*predict(obj.wk,obj.wk$mf[object$id.basis,],inc=label))
p.dec <- c(p.dec,10^object$nlambda*jk)
}
term.label <- object$terms$labels[object$terms$labels!="1"]
term.label <- term.label[term.label!="offset"]
term.label <- c(term.label,labels.p)
if (!is.null(object$random)) {
comp <- cbind(comp,predict(object,mf,inc=NULL))
p.dec <- c(p.dec,p.ran)
term.label <- c(term.label,"random")
}
fitted.off <- fitted-offset
comp <- cbind(comp,yhat=fitted.off,y=fitted.off+res,e=res)
if (any(outer(term.label,c("yhat","y","e"),"==")))
warning("gss warning in summary.ssanova: avoid using yhat, y, or e as variable names")
colnames(comp) <- c(term.label,"yhat","y","e")
if (!is.null(w))
comp <- sqrt(w)*comp - outer(sqrt(w),apply(w*comp,2,sum))/sum(w)
else comp <- sweep(comp,2,apply(comp,2,mean))
comp1 <- comp[,c(term.label,"yhat")]
decom <- t(comp1) %*% comp1[,"yhat"]
names(decom) <- c(term.label,"yhat")
decom <- decom[term.label]/decom["yhat"]
corr <- t(comp)%*%comp
corr <- t(corr/sqrt(diag(corr)))/sqrt(diag(corr))
norm <- apply(comp,2,function(x){sqrt(sum(x^2))})
cosines <- rbind(corr[c("y","e"),],norm)
rownames(cosines) <- c("cos.y","cos.e","norm")
corr <- corr[term.label,term.label,drop=FALSE]
if (qr(corr)$rank<dim(corr)[2])
kappa <- rep(Inf,len=dim(corr)[2])
else kappa <- as.numeric(sqrt(diag(solve(corr))))
rough <- p.dec / penalty
names(kappa) <- names(rough) <- term.label
}
else decom <- kappa <- cosines <- rough <- NULL
z <- list(call=object$call,method=object$method,fitted=fitted,residuals=res,
sigma=sigma,r.squared=r.squared,rss=rss,penalty=penalty,
pi=decom,kappa=kappa,cosines=cosines,roughness=rough)
class(z) <- "summary.ssanova"
z
} |
NULL
soccerPitchHalf <- function(lengthPitch = 105, widthPitch = 68, arrow = c("none", "r", "l"), theme = c("light", "dark", "grey", "grass"), title = NULL, subtitle = NULL, data = NULL) {
start<-end<-NULL
if(theme[1] == "grass") {
fill1 <- "
fill2 <- "
colPitch <- "grey85"
arrowCol <- "white"
colText <- "white"
} else if(theme[1] == "light") {
fill1 <- "white"
fill2 <- "white"
colPitch <- "grey60"
arrowCol = "black"
colText <- "black"
} else if(theme[1] %in% c("grey", "gray")) {
fill1 <- "
fill2 <- "
colPitch <- "white"
arrowCol <- "white"
colText <- "black"
} else {
fill1 <- "
fill2 <- "
colPitch <- "
arrowCol <- "
colText <- "
}
lwd <- 0.5
border <- c(12, 6, 1, 6)
lines <- (lengthPitch + border[2] + border[4]) / 13
boxes <- data.frame(start = lines * 0:12 - border[4], end = lines * 1:13 - border[2])[seq(2, 12, 2),]
p <- ggplot(data) +
geom_rect(aes(xmin = -border[4], xmax = widthPitch + border[2], ymin = lengthPitch/2 - border[3], ymax = lengthPitch + border[1]), fill = fill1) +
geom_rect(data = boxes, aes(ymin = start, ymax = end, xmin = -border[4], xmax = widthPitch + border[2]), fill = fill2) +
geom_rect(aes(xmin = 0, xmax = widthPitch, ymin = lengthPitch/2, ymax = lengthPitch), fill = NA, col = colPitch, lwd = lwd) +
geom_arc(aes(x0 = widthPitch/2, y0 = lengthPitch/2, r = 9.15, start = pi/2, end = -pi/2), col = colPitch, lwd = lwd) +
geom_circle(aes(x0 = widthPitch/2, y0 = lengthPitch/2, r = 0.25), fill = colPitch, col = colPitch, lwd = lwd) +
geom_segment(aes(x = 0, y = lengthPitch/2, xend = widthPitch, yend = lengthPitch/2), col = colPitch, lwd = lwd) +
geom_arc(aes(x0 = widthPitch/2, y0 = lengthPitch - 11, r = 9.15, start = pi * 0.705, end = 1.295 * pi), col = colPitch, lwd = lwd) +
geom_rect(aes(xmin = widthPitch/2 - 20.15, xmax = widthPitch/2 + 20.15, ymin = lengthPitch - 16.5, ymax = lengthPitch), fill = NA, col = colPitch, lwd = lwd) +
geom_circle(aes(x0 = widthPitch/2, y0 = lengthPitch - 11, r = 0.25), fill = colPitch, col = colPitch, lwd = lwd) +
geom_rect(aes(xmin = (widthPitch/2) - 9.16, xmax = (widthPitch/2) + 9.16, ymin = lengthPitch - 5.5, ymax = lengthPitch), fill = NA, col = colPitch, lwd = lwd) +
geom_rect(aes(xmin = (widthPitch/2) - 3.66, xmax = (widthPitch/2) + 3.66, ymin = lengthPitch, ymax = lengthPitch + 2), fill = NA, col = colPitch, lwd = lwd) +
coord_fixed(ylim = c(lengthPitch/2 - border[3], lengthPitch + border[1])) +
theme(rect = element_blank(),
line = element_blank(),
axis.text = element_blank(),
axis.title = element_blank())
theme_buffer <- ifelse(theme[1] == "light", 0, 4)
if(!is.null(title) & !is.null(subtitle)) {
p <- p +
draw_text(title,
x = widthPitch/2, y = lengthPitch + 10, hjust = 0.5, vjust = 1,
size = 15, fontface = 'bold', col = colText) +
draw_text(subtitle,
x = widthPitch/2, y = lengthPitch + 6.5, hjust = 0.5, vjust = 1,
size = 13, col = colText) +
theme(plot.margin = unit(c(-0.7,-1.4,-0.7,-1.4), "cm"))
} else if(!is.null(title) & is.null(subtitle)) {
p <- p +
draw_text(title,
x = widthPitch/2, y = lengthPitch + 6.5, hjust = 0.5, vjust = 1,
size = 15, fontface = 'bold', col = colText) +
theme(plot.margin = unit(c(-1.2,-1.4,-0.7,-1.4), "cm"))
} else if(is.null(title) & !is.null(subtitle)) {
p <- p +
draw_text(subtitle,
x = widthPitch/2, y = lengthPitch + 6.5, hjust = 0.5, vjust = 1,
size = 13, col = colText) +
theme(plot.margin = unit(c(-1.2,-1.4,-0.7,-1.4), "cm"))
} else if(is.null(title) & is.null(subtitle)){
p <- p +
theme(plot.margin = unit(c(-1.85,-1.4,-0.7,-1.4), "cm"))
}
return(p)
} |
"inner.prod" <-
function(x,y,mass=NULL) {
x <- as.matrix(x); y <- as.matrix(y);
dx <- dim(x); dy <- dim(y);
if(dx[1]!=dy[1])
stop("inner.prod: unequal vector lengths")
if(dx[2]>1 && dy[2]>1) {
if(dx[2]!=dy[2])
stop("inner.prod: unequal vector lengths")
}
if(dx[2]==1)
x <- as.numeric(x)
if(dy[2]==1)
y <- as.numeric(y)
if(!is.null(mass)) {
if (dx[1] != (length(mass)*3))
stop("inner.prod: incorrect length of mass")
}
if(is.null(mass))
mass <- 1
else
mass <- rep(mass,each=3)
if(is.matrix(x) || is.matrix(y))
return(colSums((x*y)*mass^2))
else
return(sum(x*y*mass^2))
} |
print.multcomp_table <- function(x, ...) {
cat("Effect: ",attr(x,"effect_name"), ".\n",sep="")
cat("Alternative Hypothesis: ",attr(x,"alternative"), ".\n",sep="")
cat("Statistic: ",attr(x,"test"),"(",paste(attr(x,"df"),collapse=", "),")", ".\n",sep="")
cat("Resampling Method: ",attr(x,"method"), ".\n",sep="")
cat("Type of Resampling: ",attr(x,"type"), ".\n",sep="")
cat("Number of Dependant Variables: ",attr(x,"nDV"), ".\n",sep="")
cat("Number of Resamples: ",attr(x,"np"), ".\n",sep="")
cat("Multiple Comparisons Procedure: ",attr(x,"multcomp"), ".\n",sep="")
if(attr(x,"multcomp") == "clustermass"){
cat("Threshold: ",attr(x,"threshold"),".\n",sep="")
cat("Mass Function: ",attr(x,"fun_name"),".\n",sep="")
}
if(attr(x,"table_type")=="cluster"){
if(attr(x,"multcomp")=="clustermass"){
cat("Table of clusters.\n")
}else if(attr(x,"multcomp")!="clustermass"){
cat("Table of pseudo-clusters.\n")
}
}else if(attr(x,"table_type")=="full"){
cat("Table of all tests.\n")
}
cat("\n")
if(!is.null(attr(x,"nocluster"))){
if(attr(x,"nocluster")){
cat("No cluster above the threshold.\n")}else{
print.data.frame(x)}
}else{
print.data.frame(x)}
cat("\n\n")
} |
mkExcelTsd=function(seerSet,tsdn,outDir="~/Results",outName=NULL,flip=FALSE) {
if (length(tsdn)>1) {
cat("collapsing brks vector to a tsdn string\n")
tsdn=paste0("b",paste(tsdn,collapse="_"))
}
if (is.null(seerSet$L)) stop("seerSet L field is empty. Please run tsd on your seerSet object!") else
L=seerSet$L[[tsdn]]
if (!dir.exists(outDir)) dir.create(outDir,recursive=T)
unlink(f<-paste0(outDir,"/",ifelse(is.null(outName),paste0(seerSet$bfn,tsdn),outName),ifelse(flip,"Flipped",""),".xlsx"))
wb <- createWorkbook()
hs1=createStyle(fgFill="
OL=NULL
intvs=names(L[[L$trtS[1]]][["Obs"]])
sheetS=L$firstS
if (flip) sheetS=seerSet$secondS
for (icanc in sheetS) {
addWorksheet(wb,icanc)
M=NULL
for (R in L$trtS) {
D=NULL
for (intv in intvs) {
if (flip) {
O=L[[R]][["Obs"]][[intv]][,icanc,drop=FALSE]
E=L[[R]][["Exp"]][[intv]][,icanc,drop=FALSE]
} else {
O=L[[R]][["Obs"]][[intv]][icanc,,drop=FALSE]
E=L[[R]][["Exp"]][[intv]][icanc,,drop=FALSE]
}
RR=O/E
LL=qchisq(.025,2*O) /(2*E)
UL=qchisq(.975,2*O+2)/(2*E)
if (flip) col=as.data.frame(round(cbind(RR,LL,UL,O=O,E=E),2)) else
col=as.data.frame(round(cbind(t(RR),t(LL),t(UL),O=t(O),E=t(E)),2))
names(col)=c("RR","LL","UL","O","E")
D=cbind(D,paste0(col$RR,"(",col$LL,",",col$UL,") O=",O))
}
colnames(D)=paste(intvs,"after",R)
rownames(D)=rownames(col)
M=cbind(M,D)
}
if (flip) writeData(wb, icanc,cbind("1st cancer"=L$firstS,M), headerStyle = hs1) else
writeData(wb, icanc, cbind("2nd cancer"=seerSet$secondS,M), headerStyle = hs1)
OL[[icanc]]=M
setColWidths(wb,icanc, cols = 1:(dim(M)[2]+1), widths = "auto")
freezePane(wb,icanc,firstRow = TRUE, firstCol = TRUE)
}
saveWorkbook(wb,file=f,overwrite = TRUE)
cat("Workbook was written to",f,"\n")
invisible(OL)
} |
describeC <- function(x, w, data, digits=3, printC=FALSE)
{
if(missing(x)) stop("Oops. You need to specify the variables to be analyzed. To see how to use this function, try example(descibeC) or help(describeC).")
if(!missing(w)) w.name = deparse(substitute(w))
check.value(digits, valuetype="numeric")
if(!missing(data))
{
variables.list <- as.list(substitute(x))
if(length(variables.list)>1) variables.list <- variables.list[-1]
if(is.matrix(data)) data <- data.frame(data)
variable.names = NULL
variable.set = data.frame(matrix(NA, nrow=nrow(data), ncol=length(variables.list)))
for(i in 1:length(variables.list))
{
variable.names[i] <- deparse(variables.list[[i]])
variable.set[, i] <- vector.from.data(variables.list[[i]], data)
}
colnames(variable.set) <- variable.names
if(!missing(w)) w <- vector.from.data(substitute(w), data)
}
if(missing(data))
{
if(!is.list(x))
{
variables.list <- NULL
variables.list[[1]] = substitute(x)
}
if(is.list(x))
{
variables.list <- as.list(substitute(x))
if(length(variables.list)>1) variables.list <- variables.list[-1]
}
variable.names = NULL
variable.set = data.frame(matrix(NA, ncol=length(variables.list), nrow=length(unlist(x)) / length(variables.list)))
for(i in 1:length(variables.list))
{
variable.names[i] <- deparse(variables.list[[i]])
variable.set[, i] <- eval(variables.list[[i]])
}
variable.set <- data.frame(variable.set, stringsAsFactors=TRUE)
colnames(variable.set) <- variable.names
}
if(!missing(w))
{
weighted = TRUE
w.length <- length(w)
if (nrow(variable.set) != w.length) stop(paste(gettext("X variables and", domain = "R-descr"), w.name, gettext("have different lengths.", domain = "R-descr")))
}
else
{
w <- rep(1, nrow(variable.set))
weighted = FALSE
}
check.variable(w, vartype="numeric")
w.full <- w
w.rawsum <- sum(w, na.rm=T)
descriptives.names = c("Observed values", "Missing values", "Unique values", "Class",
"Mean", "Median", "Mode",
"Variance", "Standard deviation", "Minimum", "Maximum", "Range", "First quartile (25%)",
"Third quartile (75%)", "Interquartile range (IQR)", "Skewness", "Kurtosis")
descriptives.df <- data.frame(matrix(NA, nrow=length(descriptives.names), ncol=length(variable.names)))
rownames(descriptives.df) <- descriptives.names
colnames(descriptives.df) <- variable.names
factormessage = NULL
for(i in 1: length(variable.names))
{
x.name <- variable.names[i]
x <- variable.set[, x.name]
k <- grep(FALSE, (is.na(x) | is.na(w.full)))
x <- x[k]
w <- w.full[k]
x.nonmissing <- sum(w)
x.missing <- w.rawsum - x.nonmissing
if(Hmisc::all.is.numeric(x, extras=c(NA)) & !is.numeric(x))
{
transform.warning <- paste("Note:", x.name, "is not a numeric (interval-level) variable, but can be analyzed as one. Do you want to analyze", x.name, "as numeric?\nEnter y to analyze", x.name,"as numeric or any other key to leave it as is.")
if(tolower(ask(transform.warning))=="y") x = as.numeric(x)
}
unique <- length(unique(x))
if(unique == length(x)) mode = "None"
else
{
if(is.numeric(x)) mode <- round(as.numeric(wtd.mode(x=x, w=w)), digits)
else mode <- wtd.mode(x=x, w=w)
}
mode <- paste(mode, collapse=", ")
if(is.numeric(x)) median <- round(as.numeric(wtd.median(x=x, w=w)), digits)
if(is.ordered(x)) median <- wtd.median(x=x, w=w)
if(is.numeric(x))
{
mean <- stats::weighted.mean(x=x, w=w, na.rm = T)
var <- Hmisc::wtd.var(x=x, weights=w)
sd <- sqrt(var)
min <- min(x)
max <- max(x)
range <- (max - min)
quantiles <- Hmisc::wtd.quantile(x=x, weights=w, probs=c(.10,.25,.50,.75,.90))
Q1 <- quantiles["25%"]
Q3 <- quantiles["75%"]
IQR <- Q3 - Q1
skewness <- wtd.skewness(x, w)
kurtosis <- wtd.kurtosis(x, w)
}
descriptives.df["Observed values", x.name] <- round(x.nonmissing, digits)
descriptives.df["Missing values", x.name] <- round(x.missing, digits)
descriptives.df["Unique values", x.name] <- unique
descriptives.df["Class", x.name] <- paste(class(x), collapse=", ")
if(is.numeric(x)) descriptives.df["Mean", x.name] <- round(mean, digits)
if(is.numeric(x) || is.ordered(x)) descriptives.df["Median", x.name] <- median
descriptives.df["Mode", x.name] <- mode
if(is.numeric(x))
{
descriptives.df["Variance", x.name] <- round(var, digits)
descriptives.df["Standard deviation", x.name] <- round(sd, digits)
descriptives.df["Minimum", x.name] <- round(min, digits)
descriptives.df["Maximum", x.name] <- round(max, digits)
descriptives.df["Range", x.name] <- round(range, digits)
descriptives.df["First quartile (25%)", x.name] <- round(Q1, digits)
descriptives.df["Third quartile (75%)", x.name] <- round(Q3, digits)
descriptives.df["Interquartile range (IQR)", x.name] <- round(IQR, digits)
descriptives.df["Skewness", x.name] <- round(skewness, digits)
descriptives.df["Kurtosis", x.name] <- round(kurtosis, digits)
}
if(is.factor(x) && !is.ordered(x)) factormessage <- paste(factormessage, "Note: ", x.name, " was analyzed as an unordered factor.\nTo evaluate as ordered factor, try x=list(as.ordered(", x.name, "))\n", sep="")
}
printrows <- NULL
for(j in 1:nrow(descriptives.df)) if(!all(is.na(descriptives.df[j, ]))) printrows = c(printrows, j)
descriptives.df <- data.frame(descriptives.df[printrows, ])
descriptives.df[is.na(descriptives.df)] <- ""
rownames(descriptives.df) <- descriptives.names[printrows]
colnames(descriptives.df) <- variable.names
main.heading <- headingbox("Descriptive Statistics", width=75, marker="=")
if(printC==TRUE) printC(main.heading)
variable.names.string <- paste(variable.names, collapse=", ")
table_caption <- paste("Descriptive Statistics for", variable.names.string)
if(weighted) table_caption <- paste(table_caption, ", weighted by ", w.name, sep="")
print(knitr::kable(format(descriptives.df, drop0trailing=F, nsmall=digits, digits=digits), format="simple", align="r", caption=table_caption))
if(printC==TRUE) printC(knitr::kable(format(descriptives.df, drop0trailing=F, nsmall=digits, digits=digits), caption=printCaption(table_caption), format="html"))
cat("\n")
slightpause()
if(!is.null(factormessage)) message(factormessage)
if(printC==T) printC(match.call(expand.dots = FALSE))
invisible(descriptives.df)
}
|
pski <- function(binomN, count, Tski, g) {
result <- 1.0
if (binomN == -1) {
if (any(abs(Tski-1) > 1e-10)) {
g <- 1 - (1 - g)^Tski
}
if (count>0)
result <- g
else
result <- 1 - g
}
else if (binomN == 0) {
if (count == 0)
result <- exp(-Tski * g)
else
result <- dpois(count, Tski * g, FALSE)
}
else if (binomN == 1) {
result <- dbinom (count, round(Tski), g, FALSE)
}
else if (binomN > 1) {
if (abs(Tski-1) > 1e-10) {
g <- 1 - (1 - g)^Tski
}
result <- dbinom (count, binomN, g, FALSE)
}
else stop("binomN < -1 not allowed")
result
}
convolvemq <- function (
j,
kernelp,
edgecode,
mqarray,
pjm
)
{
mm <- nrow(mqarray)
kn <- ncol(mqarray)
workpjm <- numeric(mm)
for (m in 1:mm) {
if (edgecode == 2) {
q <- mqarray[m,] + 1
q <- q[q>0]
sump <- sum(kernelp[kn * (j-1) + q], na.rm = T)
}
else {
sump <- 1.0
}
if (is.na(sump)) browser()
if (sump>0) {
for (q in 1:kn) {
mq <- mqarray[m,q] + 1
if (mq > 0) {
if (mq>mm) stop("mq > mm")
workpjm[mq] <- workpjm[mq] + pjm[m] * kernelp[q,j]
}
}
}
}
workpjm
}
prw <- function (n, j, x, kk, binomN, cumss, w, PIA, Tsk, gk, h, p0, hindex, pjm) {
dead <- FALSE
for (s in (cumss[j]+1):cumss[j+1]) {
if (binomN == -2) {
wi <- w[n, s]
if (wi < 0) dead <- TRUE
k <- abs(wi)
if (k < 1) {
OK <- h[x,,hindex[n,s]] > 1e-8
pjm[OK] <- pjm[OK] * p0[x,OK,hindex[n,s]]
}
else {
c <- PIA[1,n, s, k, x]
if (c >= 1) {
pjm <- pjm * Tsk[k,s] * (1-p0[x,,hindex[n,s]]) * gk[c, k, ] / h[x,,hindex[n,s]]
}
}
}
else {
for (k in 1:kk) {
c <- PIA[1,n,s,k,x]
if (c >= 1) {
count <- w[n,s,k]
if (count<0) {count <- -count; dead <- TRUE }
pjm <- pjm * pski(binomN,count,Tsk[k,s], gk[c,k,])
}
}
}
if (dead) break;
}
pjm
}
prwisecr <- function (type, n, x, nc, jj, kk, mm, nmix, cumss, w, fi, li, gk,
openval, PIA, PIAJ, binomN, Tsk, intervals, h, hindex,
CJSp1, moveargsi, movementcode, sparsekernel, edgecode,
usermodel, kernel = NULL, mqarray = NULL, cellsize = NULL,
r0) {
p0 <- if (binomN == -2) exp(-h) else 1
phij <- getphij (n, x, openval, PIAJ, intervals)
if (movementcode > 1) {
moveargsi <- pmax(moveargsi,0)
moveargs <- getmoveargs (n, x, openval, PIAJ, intervals, moveargsi)
kernelp <- fillkernelC ( jj, movementcode-2, sparsekernel, kernel,
usermodel, cellsize, r0, moveargsi, moveargs, normalize = TRUE)
}
if(type==6) {
minb <- fi[n]
cjs <- 1 - CJSp1
}
else {
minb <- 1
cjs <- 0
beta <- getbeta (type, n, x, openval, PIAJ, intervals, phij)
}
maxb <- fi[n]
mind <- abs(li[n])
maxd <- jj
if (li[n] < 0) maxd <- mind
pdt <- 0
pdotbd <- 1.0
for (b in minb:maxb) {
for (d in mind:maxd) {
if (type==6) {
pbd <- 1
}
else {
pbd <- beta[b]
pdotbd <- 1
}
if (b<d) pbd <- pbd * prod(phij[b:(d-1)])
if ((li[n]>0) & (d<jj))
pbd <- pbd * (1-phij[d])
prwi <- 1.0
if (d >= (b+cjs)) {
if (movementcode == 0) {
alpha <- rep(1.0/mm,mm)
for (j in (b+cjs):d) {
alpha <- prw(n, j, x, kk, binomN, cumss, w, PIA, Tsk, gk, h, p0, hindex, alpha)
}
prwi <- sum(alpha)
}
else if ( movementcode == 1) {
prwi <- 1.0
for (j in (b+cjs):d) {
alpha <- rep(1.0/mm,mm)
alpha <- prw(n, j, x, kk, binomN, cumss, w, PIA, Tsk, gk, h, p0, hindex, alpha)
prwi <- prwi * sum(alpha)
}
}
else {
alpha <- rep(1.0/mm, mm)
alpha <- prw (n, b+cjs, x, kk, binomN, cumss, w, PIA, Tsk, gk, h, p0, hindex, alpha)
if (d>(b+cjs)) {
for (j in (b+cjs+1):d) {
alpha <- convolvemq(j-1, kernelp, edgecode, mqarray, alpha)
alpha <- prw(n, j, x, kk, binomN, cumss, w, PIA, Tsk, gk, h, p0, hindex, alpha)
}
}
prwi <- sum(alpha)
}
}
pdt <- pdt + pbd * prwi / pdotbd
}
}
pdt
} |
acontext("chunk vars")
test_that("produce as many chunk files as specified", {
viz <- list(iris=ggplot()+
geom_point(aes(Petal.Width, Sepal.Length, showSelected=Species),
data=iris, chunk_vars="Species", validate_params = FALSE))
tdir <- tempfile()
dir.create(tdir)
tsv.files <- Sys.glob(file.path(tdir, "*.tsv"))
expect_equal(length(tsv.files), 0)
animint2dir(viz, tdir, open.browser=FALSE)
tsv.files <- Sys.glob(file.path(tdir, "*.tsv"))
expect_equal(length(tsv.files), 3)
viz <- list(iris=ggplot()+
geom_point(aes(Petal.Width, Sepal.Length, showSelected=Species),
data=iris, chunk_vars=character(), validate_params = FALSE))
tdir <- tempfile()
dir.create(tdir)
tsv.files <- Sys.glob(file.path(tdir, "*.tsv"))
expect_equal(length(tsv.files), 0)
animint2dir(viz, tdir, open.browser=FALSE)
tsv.files <- Sys.glob(file.path(tdir, "*.tsv"))
expect_equal(length(tsv.files), 1)
})
test_that("produce informative errors for bad chunk_vars", {
viz <- list(iris=ggplot()+
geom_point(aes(Petal.Width, Sepal.Length, showSelected=Species),
data=iris, chunk_vars="species", validate_params = FALSE))
expect_error({
animint2dir(viz, open.browser=FALSE)
}, "invalid chunk_vars species; possible showSelected variables: Species")
viz <- list(iris=ggplot()+
geom_point(aes(Petal.Width, Sepal.Length, showSelected=Species),
data=iris, chunk_vars=NA, validate_params = FALSE))
expect_error({
animint2dir(viz, open.browser=FALSE)
}, paste("chunk_vars must be a character vector;",
"use chunk_vars=character() to specify 1 chunk"), fixed=TRUE)
})
data(breakpoints, package = "animint")
only.error <- subset(breakpoints$error,type=="E")
only.segments <- subset(only.error,bases.per.probe==bases.per.probe[1])
signal.colors <- c(estimate="
latent="
breakpointError <-
list(signal=ggplot()+
geom_point(aes(position, signal, showSelected=bases.per.probe),
data=breakpoints$signals)+
geom_line(aes(position, signal), colour=signal.colors[["latent"]],
data=breakpoints$imprecision)+
geom_segment(aes(first.base, mean, xend=last.base, yend=mean,
showSelected=segments,
showSelected2=bases.per.probe),
colour=signal.colors[["estimate"]],
data=breakpoints$segments)+
geom_vline(aes(xintercept=base,
showSelected=segments,
showSelected2=bases.per.probe),
colour=signal.colors[["estimate"]],
linetype="dashed",
data=breakpoints$breaks),
error=ggplot()+
geom_vline(aes(xintercept=segments, clickSelects=segments),
data=only.segments, lwd=17, alpha=1/2)+
geom_line(aes(segments, error, group=bases.per.probe,
clickSelects=bases.per.probe),
data=only.error, lwd=4))
bytes.used <- function(file.vec, apparent.size = FALSE){
file.str <- paste(file.vec, collapse=" ")
if(apparent.size){
cmd <- paste("ls -l", file.str, "| awk '{print $5}'")
} else{
cmd <- paste("du -k", file.str, "| awk '{print $1 * 1024}'")
}
tryCatch({
du.lines <- system(cmd, intern=TRUE)
as.integer(sub("\t.*", "", du.lines))
}, error=function(e){
rep(NA_integer_, length(file.vec))
})
}
test.paths <-
c(tempfile=tempfile(),
HOME=file.path(Sys.getenv("HOME"), "ANIMINT_TEST_FOO"),
getwd=file.path(getwd(),"ANIMINT_TEST_FOO"))
for(f in test.paths){
unlink(f)
cat("foo", file=f)
}
du.bytes <- bytes.used(test.paths)
apparent.bytes <- bytes.used(test.paths, apparent.size = TRUE)
byte.df <- data.frame(du.bytes, apparent.bytes,
file.size=file.size(test.paths),
test.paths)
test_that("default chunks are at least 4KB", {
tdir <- tempfile()
dir.create(tdir)
tsv.files <- Sys.glob(file.path(tdir, "*.tsv"))
expect_equal(length(tsv.files), 0)
expect_no_warning({
animint2dir(breakpointError, tdir, open.browser=FALSE)
})
tsv.files <- Sys.glob(file.path(tdir, ".+chunk[0-9]+.tsv"))
geom <- sub("_.*", "", basename(tsv.files))
files.by.geom <- split(tsv.files, geom)
for(files in files.by.geom){
if(length(files) > 1){
info <- file.info(files)
expect_true(all(4096 < info$size))
}
}
}) |
chebyshev.c.quadrature.rules <- function( n, normalized = FALSE )
{
if ( n <= 0 )
stop( "highest order is not positive" )
if ( n != round( n ) )
stop( "highest order is not an integer" )
recurrences <- chebyshev.c.recurrences( n, normalized )
inner.products <- chebyshev.c.inner.products( n )
return( quadrature.rules( recurrences, inner.products ) )
} |
stochprof.results.rLNLN <-
function(prev.result,TY,show.plots=T,plot.title="",pdf.file,fix.mu=F) {
if (is.null(prev.result) || (nrow(prev.result)==0)) {
print("stochprof.results: File contains only the header.")
return(NULL)
}
results <- prev.result
these.names <- colnames(results)
m <- ncol(results)/TY - 2
inf.indices <- which(results[,"target"]>=10^7)
if (length(inf.indices)>0) {
results <- results[-inf.indices,,drop=F]
}
if (nrow(results)==0) {
print("stochprof.results: dataset contains only infinite target values.")
return(NULL)
}
mu.min <- apply(X=results[,TY:((m+1)*TY-1),drop=F],FUN=min,MARGIN=1)
mu.max <- apply(X=results[,TY:((m+1)*TY-1),drop=F],FUN=max,MARGIN=1)
smallmu.indices <- which(mu.min<(-50))
largemu.indices <- which(mu.max>10)
mu.indices <- union(smallmu.indices,largemu.indices)
if ((length(mu.indices)>0) && (length(mu.indices)<nrow(results))) {
results <- results[-mu.indices,,drop=F]
}
nod.p <- 4
nod.all <- 3
nod.target <- 4
if (TY>1) {
p.indices <- 1:(TY-1)
}
else {
p.indices <- NULL
}
target.indices <- (m+2)*TY
results_dup <- results
results_dup[,p.indices] <- round(results_dup[,p.indices],nod.p)
results_dup[,-c(p.indices,target.indices)] <- round(results_dup[,-c(p.indices,target.indices)],nod.all)
results_dup[,target.indices] <- round(results_dup[,target.indices],nod.target)
results <- results[!duplicated(results_dup,MARGIN=1),]
results <- results[order(results[,"target"]),,drop=F]
indices <- which(results[,"target"]<=log(5) + (results[,"target"])[1])
minlength <- 50
maxlength <- 5000
if (length(indices)<minlength) {
indices <- 1:min(minlength,nrow(results))
}
else if (length(indices)>maxlength) {
indices <- 1:min(maxlength,nrow(results))
}
results <- results[indices,,drop=F]
if (show.plots) {
best <- results[1,,drop=F]
index.set <- 1:(ncol(results)-1)
if (fix.mu) {
index.set <- index.set[-(TY:((m+1)*TY-1))]
}
if (missing(pdf.file)) {
par(ask=T)
}
else {
pdf(pdf.file)
}
for (i in index.set) {
plot(results[,i],-results[,"target"],xlab=these.names[i],ylab="log likelihood",main=plot.title,cex.axis=1.5,cex.lab=1.5,pch=1,lwd=3)
abline(v=best[i],lwd=2,col="red")
}
if (missing(pdf.file)) {
par(ask=F)
}
else {
dev.off()
}
}
return(results)
} |
dt__get <- function(data, key) {
data[[key, exact = TRUE]]
}
dt__set <- function(data, key, value) {
data[[key]] <- value
data
} |
library(shiny.fluent)
if (interactive()) {
shinyApp(
ui = div(
DefaultButton.shinyInput("toggleCallout", text = "Toggle Callout"),
reactOutput("callout")
),
server = function(input, output) {
show <- reactiveVal(FALSE)
observeEvent(input$toggleCallout, show(!show()))
output$callout <- renderReact({
if (show()) {
Callout(
tags$div(
style = "margin: 10px",
"Callout contents"
)
)
}
})
}
)
} |
recreg <- function(formula,data=data,cause=1,death.code=c(2),cens.code=1,cens.model=~1,
weights=NULL,offset=NULL,Gc=NULL,...)
{
cl <- match.call()
m <- match.call(expand.dots = TRUE)[1:3]
special <- c("strata", "cluster","offset")
Terms <- terms(formula, special, data = data)
m$formula <- Terms
m[[1]] <- as.name("model.frame")
m <- eval(m, parent.frame())
Y <- model.extract(m, "response")
if (class(Y)!="EventCens") stop("Expected a 'EventCens'-object")
if (ncol(Y)==2) {
exit <- Y[,1]
entry <- NULL
status <- Y[,2]
} else {
entry <- Y[,1]
exit <- Y[,2]
status <- Y[,3]
cens <- Y[,4]
}
id <- strata <- NULL
if (!is.null(attributes(Terms)$specials$cluster)) {
ts <- survival::untangle.specials(Terms, "cluster")
pos.cluster <- ts$terms
Terms <- Terms[-ts$terms]
id <- m[[ts$vars]]
} else pos.cluster <- NULL
if (!is.null(stratapos <- attributes(Terms)$specials$strata)) {
ts <- survival::untangle.specials(Terms, "strata")
pos.strata <- ts$terms
Terms <- Terms[-ts$terms]
strata <- m[[ts$vars]]
strata.name <- ts$vars
} else { strata.name <- NULL; pos.strata <- NULL}
if (!is.null(offsetpos <- attributes(Terms)$specials$offset)) {
ts <- survival::untangle.specials(Terms, "offset")
Terms <- Terms[-ts$terms]
offset <- m[[ts$vars]]
}
X <- model.matrix(Terms, m)
if (!is.null(intpos <- attributes(Terms)$intercept))
X <- X[,-intpos,drop=FALSE]
if (ncol(X)==0) X <- matrix(nrow=0,ncol=0)
res <- c(recreg01(data,X,entry,exit,status,cens,id,strata,offset,weights,
strata.name, cens.model=cens.model, cause=cause,
death.code=death.code,cens.code=cens.code,Gc=Gc,...),
list(call=cl,model.frame=m,formula=formula,strata.pos=pos.strata,
cluster.pos=pos.cluster,n=nrow(X),nevent=sum(status==cause))
)
class(res) <- c("phreg","recreg")
return(res)
}
recreg01 <- function(data,X,entry,exit,status,cens,id=NULL,strata=NULL,offset=NULL,weights=NULL,
strata.name=NULL,beta,stderr=1,method="NR",no.opt=FALSE,propodds=NULL,profile=0,
case.weights=NULL,cause=1,death.code=2,cens.code=1,Gc=NULL,cens.model=~+1,augmentation=0,cox.prep=FALSE,...) {
p <- ncol(X)
if (missing(beta)) beta <- rep(0,p)
if (p==0) X <- cbind(rep(0,length(exit)))
cause.jumps <- which(status==cause)
max.jump <- max(exit[cause.jumps])
other <- which((status %in% death.code ) & (exit< max.jump))
n <- length(exit)
if (is.null(strata)) {
strata <- rep(0,length(exit))
nstrata <- 1
strata.level <- NULL
} else {
strata.level <- levels(strata)
ustrata <- sort(unique(strata))
nstrata <- length(ustrata)
strata.values <- ustrata
if (is.numeric(strata))
strata <- fast.approx(ustrata,strata)-1
else {
strata <- as.integer(factor(strata,labels=seq(nstrata)))-1
}
}
if (is.null(entry)) entry <- rep(0,length(exit))
trunc <- (any(entry>0))
if (is.null(offset)) offset <- rep(0,length(exit))
if (is.null(weights)) weights <- rep(1,length(exit))
if (is.null(case.weights)) case.weights <- rep(1,length(exit))
strata.call <- strata
if (!is.null(id)) {
ids <- unique(id)
nid <- length(ids)
if (is.numeric(id))
id <- fast.approx(ids,id)-1
else {
id <- as.integer(factor(id,labels=seq(nid)))-1
}
} else { id <- as.integer(seq_along(entry))-1; nid <- nrow(X); }
id.orig <- id+1;
whereC <- which(cens==cens.code)
time <- exit
statusC <- c(cens==cens.code)
data$id <- id
data$exit__ <- exit
data$entry__ <- entry
data$statusC <- statusC
cens.strata <- cens.nstrata <- NULL
if (length(whereC)>0) {
if (is.null(Gc)) {
kmt <- TRUE
if (class(cens.model)[1]=="formula") {
formC <- update.formula(cens.model,Surv(entry__,exit__,statusC)~ . +cluster(id))
cens.model <- phreg(formC,data)
}
if (cens.model$p>0) kmt <- FALSE
Pcens.model <- predict(cens.model,data,times=exit,individual.time=TRUE,se=FALSE,km=kmt)
Stime <- Pcens.model$surv <- c(Pcens.model$surv)
nCstrata <- cens.model$nstrata
cens.strata <- Pcens.model$strata
} else {
formC <- NULL
Stime <- Gc
Pcens.model <- list(time=exit,surv=Gc,strata=0)
nCstrata <- 1
cens.strata <- rep(0,length(exit))
}
} else {
formC <- NULL
Stime <- Gc <- rep(1,length(exit))
Pcens.model <- list(time=exit,surv=Gc,strata=0)
nCstrata <- 1
cens.strata <- rep(0,length(exit))
}
Zcall <- cbind(status,cens.strata,Stime,cens)
stat1 <- 1*(status==cause)
xx2 <- .Call("FastCoxPrepStrata",entry,exit,stat1,X,id,trunc,strata,weights,offset,Zcall,case.weights,PACKAGE="mets")
xx2$nstrata <- nstrata
jumps <- xx2$jumps+1
jumptimes <- xx2$time[jumps]
strata1jumptimes <- xx2$strata[jumps]
Xj <- xx2$X[jumps,,drop=FALSE]
if (length(whereC)>0) {
if (is.null(Gc)) {
whereaJ <- fast.approx(c(0,cens.model$cumhaz[,1]),jumptimes,type="left")
Gts <- vecAllStrata(cens.model$cumhaz[,2],cens.model$strata.jump,cens.model$nstrata)
Gts <- apply(rbind(0,Gts),2,diff)
GtsAl<- Gts <- apply(Gts,2,function(x) exp(cumsum(log(1-x))))
Gts <- rbind(1,Gts)[whereaJ,]
Gts[is.na(Gts)] <- 0
Gjumps <- Gts
} else Gts <- Gjumps <- c(1,Pcens.model$surv)[fast.approx(c(0,Pcens.model$time),jumptimes,type="left")]
} else {
Gts <- Gjumps <- rep(1,length(jumptimes))
}
if (length(other)>=1) {
trunc <- TRUE
weightso <- weights[other]/Stime[other]
timeoo <- rep(max(exit)+1,length(other))
statuso <- rep(1,length(other))
Xo <- X[other,,drop=FALSE]
offseto <- offset[other]
entryo <- exit[other]
ido <- id[other]
stratao <- strata[other]
if (nCstrata>1) {
Cstratao <- cens.strata[other]
Zcall <- matrix(Cstratao,length(other),1)
} else {
Cstratao <- rep(0,length(other))
Zcall <- matrix(0,1,1);
}
xx <- .Call("FastCoxPrepStrata",entryo,timeoo,statuso,Xo,
ido,trunc,stratao,weightso,offseto,Zcall,case.weights[other],PACKAGE="mets")
xx$nstrata <- nstrata
timeo <- xx$time
if (nCstrata>1) xxCstrata <- c(xx$Z) else xxCstrata <- rep(0,length(timeo))
where <- indexstratarightR(timeo,xx$strata,jumptimes,strata1jumptimes,nstrata)
}
obj <- function(pp,all=FALSE) {
if (length(other)>=1) {
if (nCstrata==1) {
rr <- c(xx$sign*exp(xx$X %*% pp + xx$offset)*xx$weights)
S0no <- c(0,revcumsumstrata(rr,xx$strata,xx$nstrata))
S1no <- rbind(0,apply(xx$X*rr,2,revcumsumstrata,xx$strata,xx$nstrata))
S2no <- rbind(0,apply(xx$XX*rr,2,revcumsumstrata,xx$strata,xx$nstrata));
Gjumps <- c(Gjumps)
S0no <- Gjumps*S0no[where+1]
S1no <- Gjumps*S1no[where+1,,drop=FALSE]
S2no <- Gjumps*S2no[where+1,,drop=FALSE]
} else {
ff <- function(x,strata,nstrata,strata2,nstrata2)
{
x <- rbind(0,revcumsum2strata(x,strata,nstrata,strata2,nstrata2)$mres)
x <- x[where+1,]
x <- apply(x*Gts,1,sum)
return(x)
}
rr <- c(xx$sign*exp(xx$X %*% pp + xx$offset)*xx$weights)
S0no <- ff(rr,xx$strata,xx$nstrata,xxCstrata,nCstrata)
S1no <- apply(xx$X*rr,2,ff,xx$strata,xx$nstrata,xxCstrata,nCstrata);
S2no <- apply(xx$XX*rr,2,ff,xx$strata,xx$nstrata,xxCstrata,nCstrata);
}
} else { Gjumps <- S0no <- S1no <- S2no <- 0}
rr2 <- c(xx2$sign*exp(xx2$X %*% pp + xx2$offset)*xx2$weights)
rr2now <- c(xx2$sign*exp(xx2$X %*% pp + xx2$offset))
S0oo <- revcumsumstrata(rr2,xx2$strata,xx2$nstrata)
S1oo <- apply(xx2$X*rr2,2,revcumsumstrata,xx2$strata,xx2$nstrata);
S2oo <- apply(xx2$XX*rr2,2,revcumsumstrata,xx2$strata,xx2$nstrata);
S0oo <- S0oo[jumps,]
S1oo <- S1oo[jumps,,drop=FALSE]
S2oo <- S2oo[jumps,,drop=FALSE]
S0 <- c(S0oo+S0no)
S1 <- S1oo+S1no
E <- S1/S0
weightsJ <- xx2$weights[jumps]
caseweightsJ <- xx2$caseweights[jumps]
strataJ <- xx2$strata[jumps]
rr2now <- rr2now[jumps]
U <- (Xj-E)
ploglik <- (log(rr2now)-log(S0))*weightsJ*caseweightsJ;
if (!is.null(propodds)) {
pow <- c(.Call("cumsumstrataPOR",weightsJ,S0,strataJ,nstrata,propodds,rr2now,PACKAGE="mets")$pow);
DLam <-.Call("DLambetaR",weightsJ,S0,E,Xj,strataJ,nstrata,propodds,rr2now,PACKAGE="mets")$res;
Dwbeta <- DLam*rr2now+(pow-1)*Xj
DUadj <- .Call("vecMatMat",Dwbeta,U,PACKAGE="mets")$vXZ
}
Ut <- caseweightsJ*weightsJ*U
Et2 <- .Call("vecMatMat",E,E,PACKAGE="mets")$vXZ
S2S0 <- (S2oo+S2no)/S0
DUt <- -(S2S0-Et2)
if (!is.null(propodds)) {
Ut <- pow*Ut
S0 <- S0/pow
DUt <- pow*DUt
DUt <- DUt+DUadj
if (profile==1) {
Ut <- Ut+c(ploglik)*Dwbeta
DUt <- DUt
}
ploglik <- pow*ploglik
}
U <- apply(Ut,2,sum)
DUt <- caseweightsJ*weightsJ*DUt
DU <- -matrix(apply(DUt,2,sum),p,p)
ploglik <- sum(ploglik)
U <- U+augmentation
out <- list(ploglik=ploglik,gradient=U,hessian=-DU,cox.prep=xx2,
hessiantime=DUt,weightsJ=weightsJ,caseweightsJ=caseweightsJ,
jumptimes=jumptimes,strata=strataJ,nstrata=nstrata,S0s=cbind(S0oo,S0no),
time=jumptimes,S0=S0/(caseweightsJ*weightsJ),S2S0=S2S0,E=E,U=Ut,X=Xj,Gjumps=Gjumps)
if (all)
return(out)
else
with(out,structure(-ploglik, gradient=-gradient, hessian=-hessian))
}
if (length(jumps)==0) no.opt <- TRUE
opt <- NULL
if (p>0) {
if (no.opt==FALSE) {
if (tolower(method)=="nr") {
opt <- lava::NR(beta,obj,...)
opt$estimate <- opt$par
} else {
opt <- nlm(obj,beta,...)
opt$method <- "nlm"
}
cc <- opt$estimate; names(cc) <- colnames(X)
if (stderr==2) return(cc)
val <- c(list(coef=cc),obj(opt$estimate,all=TRUE))
} else val <- c(list(coef=beta),obj(beta,all=TRUE))
} else {
no.opt <- TRUE
val <- obj(0,all=TRUE)
}
beta.s <- val$coef
if (is.null(beta.s)) beta.s <- 0
opt <- val
if (p>0) {
S0i <- rep(0,length(xx2$strata))
S0i[jumps] <- 1/opt$S0
Z <- xx2$X
U <- E <- matrix(0,nrow(Z),p)
E[jumps,] <- opt$E
U[jumps,] <- opt$U
cumhazA <- cumsumstratasum(S0i,xx2$strata,xx2$nstrata,type="all")
cumhaz <- c(cumhazA$sum)
rr <- c(xx2$sign*exp(Z %*% beta.s + xx2$offset))
if (!is.null(propodds)) {
cumhazm <- c(cumhazA$lagsum)
S0star <- cumsumstrata(rr/(1+rr*cumhazm),xx2$strata,xx2$nstrata)
}
EdLam0 <- apply(E*S0i,2,cumsumstrata,xx2$strata,xx2$nstrata)
MGt <- U[,drop=FALSE]-(Z*cumhaz-EdLam0)*rr*c(xx2$weights)
mid <- max(xx2$id)
UU <- apply(MGt,2,sumstrata,xx2$id,mid+1)
if (length(other)>=1) {
otherxx2 <- which((xx2$Z[,1] %in% death.code) & xx2$sign==1)
statusxx2 <- xx2$Z[,1]
rr0 <- xx2$sign
jumpsC <- which((xx2$Z[,4]==cens.code) & xx2$sign==1)
strataCxx2 <- xx2$Z[,2]
S0iC2 <- S0iC <- rep(0,length(xx2$status))
S0rrr <- revcumsumstrata(rr0,strataCxx2,nCstrata)
S0iC[jumpsC] <- 1/S0rrr[jumpsC]
S0iC2[jumpsC] <- 1/S0rrr[jumpsC]^2
Gcxx2 <- exp(cumsumstrata(log(1-S0iC),strataCxx2,nCstrata))
Gstart <- rep(1,nCstrata)
dstrata <- mystrata(data.frame(strataCxx2,xx2$strata))
ndstrata <- attr(dstrata,"nlevel")
lastt <- tailstrata(dstrata-1,ndstrata)
ll <- cumsum2strata(Gcxx2,S0i,strataCxx2,nCstrata,xx2$strata,xx2$nstrata,Gstart)
Htsj <- ll$res[lastt][dstrata]-ll$res
fff <- function(x) {
cx <- cumsum2strata(Gcxx2,x*S0i,strataCxx2,nCstrata,xx2$strata,xx2$nstrata,Gstart)
cx <- cx$res[lastt][dstrata]-cx$res
return(cx)
}
EHtsj <- apply(E,2,fff)
rrx2 <- rr[otherxx2]*xx2$weights[otherxx2]/xx2$Z[otherxx2,3]
MGt2 <- -(Z[otherxx2,,drop=FALSE]*Htsj[otherxx2,]-EHtsj[otherxx2,,drop=FALSE])*rrx2
UU2 <- apply(MGt2,2,sumstrata,xx2$id[otherxx2],mid+1)
UU <- UU+UU2
}
if ((stderr==1) & (length(other)>=1) & (length(whereC)>0)) {
Xos <- matrix(0,nrow(Z),ncol(Z));
Xos[otherxx2,] <- Z[otherxx2,]*rrx2
rrx <- rep(0,nrow(Z))
rrx[otherxx2] <- rrx2
rrsx <- cumsumstrata(rrx,strataCxx2,nCstrata)
Xos <- apply(Xos,2,cumsumstrata,strataCxx2,nCstrata)
q2 <- (Xos*c(Htsj)-EHtsj*c(rrsx))
sss <- headstrata(dstrata-1,ndstrata)
fff <- function(x) {
gtstart <- x[sss]
cx <- cumsum2strata(x,S0iC2,dstrata-1,ndstrata,strataCxx2,nCstrata,gtstart)$res
return(cx)
}
EdLam0q2 <- apply(q2,2,fff)
MGc <- q2*S0iC-EdLam0q2*c(xx2$sign)
MGc <- apply(MGc,2,sumstrata,xx2$id,mid+1)
} else MGc <- 0
iH <- - tryCatch(solve(opt$hessian),error=function(e) matrix(0,nrow(opt$hessian),ncol(opt$hessian)) )
Uiid <- (UU+MGc) %*% iH
UUiid <- UU %*% iH
var1 <- crossprod(UUiid)
varmc <- crossprod(Uiid)
} else {varmc <- var1 <- 0; MGc <- iH <- UUiid <- Uiid <- NULL}
strata <- xx2$strata[jumps]
cumhaz <- cbind(opt$time,cumsumstrata(1/opt$S0,strata,nstrata))
colnames(cumhaz) <- c("time","cumhaz")
if (no.opt==FALSE & p!=0) {
DLambeta.t <- apply(opt$E/c(opt$S0),2,cumsumstrata,strata,nstrata)
varbetat <- rowSums((DLambeta.t %*% iH)*DLambeta.t)
} else varbetat <- 0
var.cumhaz <- cumsumstrata(1/opt$S0^2,strata,nstrata)+varbetat
se.cumhaz <- cbind(jumptimes,(var.cumhaz)^.5)
colnames(se.cumhaz) <- c("time","se.cumhaz")
out <- list(coef=beta.s,var=varmc,se.coef=diag(varmc)^.5,iid.naive=UUiid,
iid=Uiid,ncluster=nid,
ihessian=iH,hessian=opt$hessian,var1=var1,se1.coef=diag(var1)^.5,
ploglik=opt$ploglik,gradient=opt$gradient,
cumhaz=cumhaz, se.cumhaz=se.cumhaz,MGciid=MGc,
strata=xx2$strata,
nstrata=nstrata,strata.name=strata.name,strata.level=strata.level,
propodds=propodds,
S0=opt$S0,E=opt$E,S2S0=opt$S2S0,time=opt$time,Ut=opt$U,
jumps=jumps,exit=exit,p=p,S0s=val$S0s,
no.opt=no.opt,
Pcens.model=Pcens.model,Gjumps=Gjumps,cens.code=cens.code,cause=cause
)
if (cox.prep) out <- c(out,list(cox.prep=xx2))
return(out)
}
EventCens <- function(time,time2=TRUE,cause=NULL,cens=NULL,cens.code=0,...) {
out <- cbind(time,time2,cause,cens)
colnames(out) <- c("entry","exit","cause","cens")
class(out) <- "EventCens"
attr(out,"cens.code") <- cens.code
return(out)
}
simRecurrentCox <- function(n,cumhaz,cumhaz2,death.cumhaz=NULL,X=NULL,r1=NULL,r2=NULL,rd=NULL,rc=NULL,...)
{
if (is.null(r1)) r1 <- rep(1,n)
if (is.null(r2)) r2 <- rep(1,n)
if (is.null(rd)) rd <- rep(1,n)
if (is.null(rc)) rc <- rep(1,n)
base1 <- cumhaz
if (is.null(death.cumhaz)) stop("Modification for death, otherwise use simRecurrentII\n")
if (is.null(X)) stop("X must be given to link with simulated data\n");
St <- exp(-Cpred(rbind(c(0,0),death.cumhaz),base1[,1])[,2])
rds <- unique(rd)
dtt <- diff(c(0,base1[,1]))
dbase1 <- diff(c(0,base1[,2]))
data <- c()
XX <- c()
nks <- 0
for (rdss in rds) {
lam1ms <- (dbase1)/St^rdss
where <- which(rd==rdss)
nk <- length(where)
cumhaz1 <- cbind(base1[,1],cumsum(lam1ms))
datss <- simRecurrentII(nk,cumhaz1,cumhaz2,death.cumhaz=t(t(death.cumhaz)*c(1,rdss)),
r1=r1[where],r2=NULL,rd=NULL,rc=rc[where],...)
Xs <- X[where,,drop=FALSE][datss$id,,drop=FALSE]
XX <- rbind(XX,Xs)
datss$id <- datss$id+nks
nks <- nks+nk
data <- rbind(data,as.matrix(datss))
}
rownames(XX) <- NULL
rownames(data) <- NULL
return(list(data=data,X=XX))
}
simMarginalMeanCox <- function(n,cens=3/5000,k1=0.1,k2=0,bin=1,Lam1=NULL,Lam2=NULL,LamD=NULL,beta1=rep(0,2),betad=rep(0,2),betac=rep(0,2),...)
{
revnr <- death <- status <- NULL
if (bin==1) X <- matrix(rbinom(n*2,1,0.5),n,2) else X <- matrix(rnorm(n*2),n,2)
colnames(X) <- paste("X",1:2,sep="")
r1 <- exp( X %*% beta1)
rd <- exp( X %*% betad)
rc <- exp( X %*% betac)
if (is.null(Lam2)) Lam2 <- Lam1;
rr <- simRecurrentCox(n,t(t(Lam1)*c(1,k1)),cumhaz2=t(t(Lam1)*c(1,k2)),
death.cumhaz=LamD,X=X,cens=cens,r1=r1,rd=rd,rc=rc,...)
rr <- as.data.frame(cbind(rr$data,rr$X))
dsort(rr) <- ~id+start
nid <- max(rr$id)
rr$revnr <- revcumsumstrata(rep(1,nrow(rr)),rr$id-1,nid)
rr$cens <- 0
rr <- dtransform(rr,cens=1,revnr==1 & death==0)
rr <- dtransform(rr,statusG=status)
rr <- dtransform(rr,statusG=0,status==2)
rr <- dtransform(rr,statusG=2,death==1)
if (bin==0) dcut(rr,breaks=4) <- X1g~X1 else rr$X1g <- rr$X1
if (bin==0) dcut(rr,breaks=4) <- X2g~X2 else rr$X2g <- rr$X2
return(rr)
} |
capitalizer <-
function(text, caps.list = NULL, I.list = TRUE, apostrophe.remove = FALSE) {
I_list <- c("I'm", "I'll", "I'd", "I've", "I")
idf <- data.frame(from = sapply(I_list, function(x) strip(x,
apostrophe.remove = FALSE)), to = I_list)
rownames(idf) <- 1:nrow(idf)
if (!I.list) {
idf <- NULL
}
names <- data.frame(from = tolower(caps.list), to = gsub("(\\w)(\\w*)",
"\\U\\1\\L\\2", tolower(caps.list), perl = T))
names2 <- data.frame(from = paste(names$from, "'s", sep = ""),
to = paste(names$to, "'s", sep = ""))
idf <- rbind(idf, names, names2)
idf$from <- as.character(idf$from)
idf$to <- as.character(idf$to)
subber <- function(x) ifelse(x %in% idf$from, idf[match(x,
idf$from), "to"], x)
unlist(lapply(text, subber))
} |
library(testthat)
require(OpenMx)
context("WLS acov")
data(Bollen)
got <- mxGenerateData(Bollen[, 1:8], nrows=10)
omxCheckEquals(nrow(got), 10)
manvar <- names(Bollen[, 1:8])
lval <- matrix(
c(1, 0,
1, 0,
1, 0,
1, 0,
0, 1,
0, 1,
0, 1,
0, 1),
byrow=TRUE,
ncol=2, nrow=8)
lfre <- matrix(as.logical(lval), ncol=2)
lfre[1, 1] <- FALSE
lfre[5, 2] <- FALSE
llab <- matrix(c(paste("lam", 1:4, sep=""), rep(NA, 8), paste("lam", 1:4, sep="")), ncol=2)
lx <- mxMatrix(name="Lam", values=lval, free=lfre, ncol=2, nrow=8, labels=llab, dimnames=list(manvar, c("F1", "F2")))
td <- mxMatrix(name="Theta", type="Symm", ncol=8,
values=
c(.8, 0, 0, 0, .2, 0, 0, 0,
.8, 0, .2, 0, .2, 0, 0,
.8, 0, 0, 0, .2, 0,
.8, 0, 0, 0, .2,
.8, 0, 0, 0,
.8, 0, .2,
.8, 0,
.8),
free=c(T,F,F,F,T,F,F,F,
T,F,T,F,T,F,F,
T,F,F,F,T,F,
T,F,F,F,T,
T,F,F,F,
T,F,T,
T,F,
T),
dimnames=list(manvar, manvar)
)
diag(td$labels) <- paste("var", 1:8, sep="")
selMat <- matrix(
c(5,1,
4,2,
6,2,
7,3,
8,4,
8,6), ncol=2, byrow=TRUE)
td$labels[selMat] <- paste("cov", c(51, 42, 62, 73, 84, 86), sep="")
td$labels[selMat[,2:1]] <- paste("cov", c(51, 42, 62, 73, 84, 86), sep="")
ph <- mxMatrix(name="Phi", type="Symm", ncol=2, free=T, values=c(.8, .2, .8), labels=paste("phi", c(1, 12, 2), sep=""), dimnames=list(c("F1", "F2"), c("F1", "F2")))
wlsMod <- mxModel(
"Test case for WLS Objective function from Bollen 1989",
lx, ph, td,
mxExpectationLISREL(LX=lx$name, PH=ph$name, TD=td$name),
mxFitFunctionWLS(),
mxData(Bollen[, 1:8], 'raw')
)
wlsRun <- mxRun(wlsMod)
omxCheckTrue(is.null(wlsRun$output$calculatedHessian))
dwlsMod <- mxModel(wlsMod, mxFitFunctionWLS("DWLS"))
dwlsRun <- mxRun(dwlsMod)
mxdw <- omxAugmentDataWithWLSSummary(mxd=wlsMod$data, type="DWLS")
dwlsMod2 <- dwlsMod
dwlsMod2$data <- mxData(
mxdw$observedStats$cov, numObs = 75, means = NA, type = "acov",
fullWeight=mxdw$observedStats$asymCov * 75,
acov=mxdw$observedStats$useWeight)
dwlsRun2 <- mxRun(dwlsMod2)
expect_equivalent(coef(dwlsRun) - coef(dwlsRun2),
rep(0, length(coef(wlsRun))))
dwlsMod3 <- dwlsMod
dwlsMod3$data <- mxData(
observedStats = mxdw$observedStats, numObs = 75)
dwlsRun3 <- mxRun(dwlsMod3)
expect_equivalent(coef(dwlsRun) - coef(dwlsRun3),
rep(0, length(coef(wlsRun))))
mxw <- omxAugmentDataWithWLSSummary(mxd=wlsMod$data)
wlsMod2 <- wlsMod
wlsMod2$data <- mxData(
mxw$observedStats$cov, numObs = 75, means = NA, type = "acov",
fullWeight=mxw$observedStats$asymCov * 75,
acov=mxw$observedStats$useWeight)
wlsRun2 <- mxRun(wlsMod2)
expect_equivalent(coef(wlsRun) - coef(wlsRun2),
rep(0, length(coef(wlsRun))))
wlsMod3 <- wlsMod
wlsMod3$data <- mxData(
observedStats = mxw$observedStats, numObs = 75)
wlsRun3 <- mxRun(wlsMod3)
expect_equivalent(coef(wlsRun) - coef(wlsRun3),
rep(0, length(coef(wlsRun)))) |
QTLModelCIM <- function(x, mppData, trait, cross.mat, Q.eff, VCOV,
cof.list, cof.part, plot.gen.eff){
QTL <- inc_mat_QTL(x = x, mppData = mppData, Q.eff = Q.eff, order.MAF = TRUE)
QTL.el <- dim(QTL)[2]
ref.name <- colnames(QTL)
cof.mat <- do.call(cbind, cof.list[which(cof.part[x, ])])
if(is.null(cof.mat)){ cof.mat <- rep(0, length(mppData$geno.id)); cof.el <- 1
} else { cof.el <- dim(cof.mat)[2] }
if(VCOV == "h.err"){
model <- tryCatch(expr = lm(trait ~ - 1 + cross.mat + cof.mat
+ QTL), error = function(e) NULL)
if (is.null(model)){
if(plot.gen.eff) {
if(Q.eff == "cr"){ results <- c(0, rep(1, mppData$n.cr))
} else { results <- c(0, rep(1, mppData$n.par)) }
} else { results <- 0 }
} else {
if(!("QTL" %in% rownames(anova(model)))){
if(plot.gen.eff) {
if(Q.eff == "cr"){ results <- c(0, rep(1, mppData$n.cr))
} else { results <- c(0, rep(1, mppData$n.par)) }
} else { results <- 0 }
} else {
results <- -log10(anova(model)$Pr[which(rownames(anova(model))=="QTL")])
if(plot.gen.eff){
gen.eff <- QTL_pval(mppData = mppData, model = model,
Q.eff = Q.eff, x = x)
results <- c(results, gen.eff)
}
}
}
} else if ((VCOV == "h.err.as") || (VCOV == "cr.err")){
} else if ((VCOV == "pedigree") || (VCOV == "ped_cr.err")){
}
return(results)
} |
set.seed(100)
n <- 10000
n_perturb <- n
Z_sample <- abs(rnorm(n=n))
Z_sample <- sort(Z_sample, decreasing=TRUE)
Z_sample[1:n_perturb] <- Z_sample[1:n_perturb] + rep(0.15, n_perturb)
p_values <- 1-pchisq(Z_sample^2, df=1)
observed <- sort(-log10(p_values), decreasing=TRUE)
expected <- -log10((1:n)/(n+1))
plot(expected, observed)
abline(0,1)
library(GEint)
beta_list <- list(1, 1, 1, 0, c(1,1), 1)
rho_list <- list(0.1, c(0.1, 0.1), c(0.1,0.1), 0.1, 0.1, c(0.1, 0.1))
prob_G <- 0.3
cov_Z <- matrix(data=c(1, 0.2, 0.2, 1), nrow=2, ncol=2)
cov_W <- 1
normal_assumptions <- GE_bias_normal_squaredmis(beta_list=beta_list, rho_list=rho_list, prob_G=prob_G, cov_Z=cov_Z, cov_W=cov_W)
cov_list <- normal_assumptions$cov_list
cov_mat_list <- normal_assumptions$cov_mat_list
mu_list <- normal_assumptions$mu_list
HOM_list <- normal_assumptions$HOM_list
no_assumptions <- GE_bias(beta_list, cov_list, cov_mat_list, mu_list, HOM_list)
unlist(no_assumptions)
unlist(normal_assumptions$alpha_list)
set.seed(100)
n <- 500
Y_continuous <- rnorm(n=n)
Y_binary <- rbinom(n=n, size=1, prob=0.5)
E <- rnorm(n=n)
G <- rbinom(n=n, size=2, prob=0.3)
design_mat <- cbind(1, G, E, G*E)
GE_BICS(outcome=Y_continuous, design_mat=design_mat, desired_coef=4, outcome_type='C')
GE_BICS(outcome=Y_binary, design_mat=design_mat, desired_coef=4, outcome_type='D') |
visBoxplotAdv <- function(formula, data, orientation=c("vertical","horizontal"), method=c("center","hex","square","swarm"), corral=c("none","gutter","wrap","random","omit"), corralWidth, cex=1, spacing=1, breaks=NULL, labels, at=NULL, add=FALSE, log=FALSE, xlim=NULL, ylim=NULL, xlab=NULL, ylab=NULL, pch=c("circles","thermometers","pies")[1], col=graphics::par("col"), bg=NA, pwpch=NULL, pwcol=NULL, pwbg=NULL, pwpie=NULL, do.plot=TRUE, do.boxplot=TRUE, boxplot.notch=FALSE, boxplot.border="
{
if(missing(formula) || (length(formula)!=3)){
stop("The input formula is missing or incorrect!")
}
if(is.null(pwpie) || length(pwpie) != nrow(data)) pwpie <- rep(1, nrow(data))
method <- match.arg(method)
corral <- match.arg(corral)
orientation <- match.arg(orientation)
m <- match.call(expand.dots=FALSE)
if (is.matrix(eval(m$data, parent.frame()))){
m$data <- as.data.frame(data)
}
if(is.null(rownames(data))){
rnames <- 1:nrow(data)
}else{
rnames <- rownames(data)
}
rownames(data) <- rnames
m[[1]] <- as.name("model.frame")
flag <- names(m) %in% c("pwpch","pwcol","pwbg","pwpie")
m <- m[union(1:3,which(flag))]
mf <- eval(m, parent.frame())
response <- attr(attr(mf, "terms"), "response")
f <- mf[-response]
f <- f[names(f) %in% attr(attr(mf, "terms"), "term.labels")]
if(!is.null(mf$'(pwpch)')){
pwpch <- split(mf$'(pwpch)', f)
}
if(!is.null(mf$'(pwcol)')){
pwcol <- split(mf$'(pwcol)', f)
}
if(!is.null(mf$'(pwbg)')){
pwbg <- split(mf$'(pwbg)',f)
}
if(!is.null(mf$'(pwpie)')){
pwpie <- split(mf$'(pwpie)',f)
}
pwrnames <- split(rnames,f)
dlab <- as.character(formula)[2]
glab <- as.character(formula)[3]
x <- split(x=mf[[response]], f=f)
n.groups <- length(x)
if(length(cex) > 1) {
stop('the parameter "cex" must have length 1')
}
if(missing(labels) || is.null(labels)) {
if(is.null(names(x))) {
if(n.groups == 1) {
labels <- NA
}else{
labels <- 1:n.groups
}
}else{
labels <- names(x)
}
}else{
labels <- rep(labels, length.out=n.groups)
}
if (is.null(at)){
at <- 1:n.groups
}else if (length(at) != n.groups){
stop(gettextf("'at' must have length equal to %d, the number of groups", n.groups), domain = NA)
}
unlistGroup <- function(x, nms=names(x)){
rep(nms, sapply(x, length))
}
x.val <- unlist(x)
x.gp <- unlistGroup(x, nms=labels)
if((range(x.val, finite=T)[1] <= 0) && log) warning('values <= 0 omitted from logarithmic plot')
n.obs <- length(x.val)
n.obs.per.group <- sapply(x, length)
if(log) {
dlim <- 10 ^ (grDevices::extendrange(log10(x.val[x.val > 0])))
}else{
dlim <- grDevices::extendrange(x.val, f=0.01)
}
glim <- c(min(at)-0.5, max(at)+0.5)
if(orientation=="horizontal") {
if(is.null(ylim)){
ylim <- glim
}
if(is.null(xlim)){
xlim <- dlim
}
if(is.null(xlab)){
xlab <- dlab
}
if(is.null(ylab)){
ylab <- glab
}
}else if(orientation=="vertical"){
if(is.null(xlim)){
xlim <- glim
}
if(is.null(ylim)){
ylim <- dlim
}
if(is.null(ylab)){
ylab <- dlab
}
if(is.null(xlab)){
xlab <- glab
}
}
if(is.null(pwpch)) {
pch.out <- unlistGroup(x, nms=rep(pch, length.out=n.groups))
}else{
if(is.list(pwpch)) {
names(pwpch) <- names(x)
stopifnot(all(sapply(pwpch, length)==n.obs.per.group))
pch.out <- unlist(pwpch)
}else{
pch.out <- pwpch
}
}
stopifnot(length(pch.out) == n.obs)
if(is.null(pwcol)) {
col.out <- unlistGroup(x, nms=rep(col, length.out=n.groups))
} else {
if(is.list(pwcol)) {
names(pwcol) <- names(x)
stopifnot(all(sapply(pwcol, length) == n.obs.per.group))
col.out <- unlist(pwcol)
} else {
col.out <- pwcol
}
}
stopifnot(length(col.out) == n.obs)
if(is.null(pwbg)) {
bg.out <- unlistGroup(x, nms=rep(bg, length.out=n.groups))
} else {
if(is.list(pwbg)) {
names(pwbg) <- names(x)
stopifnot(all(sapply(pwbg, length) == n.obs.per.group))
bg.out <- unlist(pwbg)
} else {
bg.out <- pwbg
}
}
stopifnot(length(bg.out) == n.obs)
if(is.null(pwpie)) {
pie.out <- unlistGroup(x, nms=rep(1, length.out=n.groups))
} else {
if(is.list(pwpie)) {
names(pwpie) <- names(x)
stopifnot(all(sapply(pwpie, length) == n.obs.per.group))
pie.out <- unlist(pwpie)
} else {
pie.out <- pwpie
}
}
stopifnot(length(pie.out) == n.obs)
names(pwrnames) <- names(x)
rnames.out <- unlist(pwrnames)
if(do.plot & !add) {
plot(xlim, ylim, type='n', axes=F, log=ifelse(log, ifelse(orientation=="horizontal", 'x', 'y'), ''), xlab=xlab, ylab=ylab)
}
sizeMultiplier <- graphics::par('cex') * cex * spacing
if(orientation=="horizontal") {
size.g <- graphics::yinch(0.08, warn.log = FALSE) * sizeMultiplier
size.d <- graphics::xinch(0.08, warn.log = FALSE) * sizeMultiplier
} else {
size.g <- graphics::xinch(0.08, warn.log = FALSE) * sizeMultiplier
size.d <- graphics::yinch(0.08, warn.log = FALSE) * sizeMultiplier
}
calculateSwarm <- function(x, dsize, gsize) {
if(length(x) == 0) return(numeric(0))
out <- data.frame(x = x / dsize, y = 0, i = seq(along = x))
out <- out[order(out$x), ]
if(nrow(out) > 1) {
for (i in 2:nrow(out)) {
xi <- out$x[i]
yi <- out$y[i]
pre <- out[1:(i - 1), ]
wh <- xi - pre$x < 1
wh[is.na(wh)] <- FALSE
if(any(wh)) {
pre <- pre[wh, ]
pre <- pre[order(abs(pre$y)), ]
poty.off <- sqrt(1 - ((xi - pre$x) ^ 2))
poty <- c(0, pre$y + poty.off, pre$y - poty.off)
poty.bad <- sapply(poty, function(y) {
any(((xi - pre$x) ^ 2 + (y - pre$y) ^ 2) < 0.999)
})
poty[poty.bad] <- Inf
out$y[i] <- poty[which.min(abs(poty))]
} else {
out$y[i] <- 0
}
}
}
out <- out[order(out$i), ]
out[is.na(out$x), 'y'] <- NA
out$y * gsize
}
swarmX <- function(x, y, xsize = graphics::xinch(0.08, warn.log = FALSE), ysize = graphics::yinch(0.08, warn.log = FALSE), log = NULL, cex = graphics::par("cex")){
if(is.null(log)) {
log <- paste(ifelse(graphics::par('xlog'), 'x', ''), ifelse(graphics::par('ylog'), 'y', ''), sep = '')
}
xlog <- 'x' %in% strsplit(log, NULL)[[1L]]
ylog <- 'y' %in% strsplit(log, NULL)[[1L]]
xy <- grDevices::xy.coords(x = x, y = y, recycle = TRUE, log = log)
stopifnot((length(unique(xy$x)) <= 1))
if(xlog) xy$x <- log10(xy$x)
if(ylog) xy$y <- log10(xy$y)
x.new <- xy$x + calculateSwarm(xy$y, dsize = ysize * cex, gsize = xsize * cex)
out <- data.frame(x = x.new, y = y)
if(xlog) out$x <- 10 ^ out$x
out
}
swarmY <- function(x, y, xsize = graphics::xinch(0.08, warn.log = FALSE), ysize = graphics::yinch(0.08, warn.log = FALSE), log = NULL, cex = graphics::par("cex")) {
if(is.null(log)) {
log <- paste(ifelse(graphics::par('xlog'), 'x', ''), ifelse(graphics::par('ylog'), 'y', ''), sep = '')
}
xlog <- 'x' %in% strsplit(log, NULL)[[1L]]
ylog <- 'y' %in% strsplit(log, NULL)[[1L]]
xy <- grDevices::xy.coords(x = x, y = y, recycle = TRUE, log = log)
stopifnot((length(unique(xy$y)) <= 1))
if(xlog) xy$x <- log10(xy$x)
if(ylog) xy$y <- log10(xy$y)
y.new <- xy$y + calculateSwarm(xy$x, dsize = xsize * cex, gsize = ysize * cex)
out <- data.frame(x = x, y = y.new)
if(ylog) out$y <- 10 ^ out$y
out
}
floating.pie.asp <- function (xpos, ypos, x, edges = 200, radius = 1, col = NULL, startpos = 0, ...){
u <- graphics::par("usr")
user.asp <- diff(u[3:4])/diff(u[1:2])
p <- graphics::par("pin")
inches.asp <- p[2]/p[1]
asp <- user.asp/inches.asp
if (!is.numeric(x) || any(is.na(x) | x < 0))
stop("floating.pie: x values must be non-negative")
x <- c(0, cumsum(x)/sum(x))
dx <- diff(x)
nx <- length(dx)
col <- if (is.null(col))
grDevices::rainbow(nx)
else rep(col, length.out = nx)
if (length(i <- which(dx == 1))) {
graphics::symbols(xpos, ypos, circles = radius, inches = FALSE, add = TRUE, bg = col[i], fg = col[which(dx == 0)])
}else {
bc <- 2 * pi * (x[1:nx] + dx/2) + startpos
for (i in seq_len(nx)) {
n <- max(2, floor(edges * dx[i]))
t2p <- 2 * pi * seq(x[i], x[i + 1], length = n) + startpos
xc <- c(cos(t2p) * radius + xpos, xpos)
yc <- c(sin(t2p) * radius * asp + ypos, ypos)
graphics::polygon(xc, yc, col = col[i], ...)
}
}
}
if(method == 'swarm') {
if(orientation=="horizontal") {
g.offset <- lapply(x, function(a) swarmY(x = a, y = rep(0, length(a)), cex = sizeMultiplier)$y)
} else {
g.offset <- lapply(x, function(a) swarmX(x = rep(0, length(a)), y = a, cex = sizeMultiplier)$x)
}
d.pos <- x
} else {
if(method == 'hex') size.d <- size.d * sqrt(3) / 2
if(log) {
if(is.null(breaks)){
breaks <- 10 ^ seq(log10(dlim[1]), log10(dlim[2]) + size.d, by = size.d)
}
if(length(breaks) == 1 && is.na(breaks[1])) {
d.index <- x
d.pos <- x
} else {
mids <- 10 ^ ((log10(head(breaks, -1)) + log10(tail(breaks, -1))) / 2)
d.index <- lapply(x, cut, breaks = breaks, labels = FALSE)
d.pos <- lapply(d.index, function(a) mids[a])
}
} else {
if(is.null(breaks)){
breaks <- seq(dlim[1], dlim[2] + size.d, by = size.d)
}
if(length(breaks) == 1 && is.na(breaks[1])) {
d.index <- x
d.pos <- x
} else {
mids <- (head(breaks, -1) + tail(breaks, -1)) / 2
d.index <- lapply(x, cut, breaks = breaks, labels = FALSE)
d.pos <- lapply(d.index, function(a) mids[a])
}
}
x.index <- lapply(d.index, function(v) {
if(length(stats::na.omit(v)) == 0) return(v)
v.s <- lapply(split(v, v), seq_along)
if(method == 'center')
v.s <- lapply(v.s, function(a) a - mean(a))
else if(method == 'square')
v.s <- lapply(v.s, function(a) a - floor(mean(a)))
else if(method == 'hex') {
odd.row <- (as.numeric(names(v.s)) %% 2) == 1
v.s[odd.row] <- lapply(v.s[odd.row], function(a) a - floor(mean(a)) - 0.25)
v.s[!odd.row] <- lapply(v.s[!odd.row], function(a) a - ceiling(mean(a)) + 0.25)
}
unsplit(v.s, v)
})
g.offset <- lapply(1:n.groups, function(i) x.index[[i]] * size.g)
}
if(corral != 'none') {
if(missing(corralWidth)) {
if(n.groups > 1) {
corralWidth <- min(at[-1] - at[-n.groups]) - (2 * size.g)
} else {
corralWidth <- 2 * (min(diff(c(graphics::par('usr')[1], at, graphics::par('usr')[2]))) - size.g)
}
} else {
stopifnot(length(corralWidth) == 1)
stopifnot(corralWidth > 0)
}
halfCorralWidth <- corralWidth / 2
if(corral == 'gutter') {
g.offset <- lapply(g.offset, function(zz) pmin(halfCorralWidth, pmax(-halfCorralWidth, zz)))
}
if(corral == 'wrap') {
g.offset <- lapply(g.offset, function(zz) ((zz + halfCorralWidth) %% (halfCorralWidth * 2)) - halfCorralWidth)
}
if(corral == 'random') {
g.offset <- lapply(g.offset, function(zz) ifelse(zz > halfCorralWidth | zz < -halfCorralWidth, stats::runif(length(zz), -halfCorralWidth, halfCorralWidth), zz))
}
if(corral == 'omit') {
g.offset <- lapply(g.offset, function(zz) ifelse(zz > halfCorralWidth, NA, ifelse(zz < -halfCorralWidth, NA, zz)))
}
}
g.pos <- lapply(1:n.groups, function(i) at[i] + g.offset[[i]])
out <- data.frame(x = unlist(g.pos),
y = unlist(d.pos),
pch = pch.out,
col = col.out,
bg = bg.out,
pie = pie.out,
x.orig = x.gp,
y.orig = x.val,
stringsAsFactors = FALSE
)
rownames(out) <- rnames.out
tmp <- data.frame(ind=1:nrow(out), x.orig=out$x.orig, y=out$y, col=out$col, pie=out$pie)
ordering <- tmp[with(tmp, order(x.orig,y,col,pie)),]$ind
a <- out[ordering, ]
b <- split(a, a$x.orig)
for(i in 1:length(b)){
c <- b[[i]]
d <- split(c, c$y)
for(j in 1:length(d)){
e <- d[[j]]
e$x <- sort(e$x)
d[[j]] <- e
}
ttmp <- d[[1]]
if(length(d)>=2){
for(k in 2:length(d)){
ttmp <- rbind(ttmp, d[[k]])
}
}
b[[i]] <- ttmp
}
out <- b[[1]]
if(length(out)>=2){
for(i in 2:length(b)){
out <- rbind(out, b[[i]])
}
}
ind <- match(rownames(data), rownames(out))
out <- out[ind,]
if(do.plot) {
if(orientation=="horizontal") {
if(is.numeric(out$pch)){
graphics::points(out$y, out$x, pch=out$pch, col=out$col, bg=out$bg, cex=cex)
}else{
for(i in 1:length(out$pch)){
pch_tmp <- out$pch[i]
if(pch_tmp == 'circles'){
graphics::symbols(out$y[i], out$x[i], circles=min(size.g, size.d)/2, inches=F, fg=out$col[i], bg=out$bg[i], add=T)
}else if(pch_tmp == 'thermometers'){
graphics::symbols(out$y[i], out$x[i], thermometers=cbind(0.4*size.d, 0.8*size.g, out$pie[i]), inches=F, fg=out$col[i], bg=out$bg[i], add=T)
}else if(pch_tmp == 'pies'){
floating.pie.asp(xpos=out$y[i], ypos=out$x[i], x=cbind(out$pie[i], 1-out$pie[i]), edges=200, radius=0.8*max(size.g, size.d)/2, col=cbind(out$col[i], "transparent"), startpos=0, border=out$col[i])
}else{
graphics::points(out$x[i], out$y[i], pch=out$pch[i], col=out$col[i], bg=out$bg[i], cex=cex)
}
}
}
if(!add) {
graphics::axis(1, ...)
graphics::axis(2, at=at, labels=labels, tick=FALSE, ...)
graphics::box(...)
}
if(do.boxplot){
graphics::boxplot(formula=formula, data=data, at=at, names=labels, outline=F, add=T, horizontal=T, notch=boxplot.notch, border=boxplot.border, col=boxplot.col)
}
}else if(orientation=="vertical") {
if(is.numeric(out$pch)){
graphics::points(out$x, out$y, pch=out$pch, col=out$col, bg=out$bg, cex=cex)
}else{
for(i in 1:length(out$pch)){
pch_tmp <- out$pch[i]
if(pch_tmp == 'circles'){
graphics::symbols(out$x[i], out$y[i], circles=min(size.g, size.d)/2, inches=F, fg=out$col[i], bg=out$bg[i], add=T)
}else if(pch_tmp == 'thermometers'){
graphics::symbols(out$x[i], out$y[i], thermometers=cbind(0.4*size.g, 0.8*size.d, out$pie[i]), inches=F, fg=out$col[i], bg=out$bg[i], add=T)
}else if(pch_tmp == 'pies'){
floating.pie.asp(xpos=out$x[i], ypos=out$y[i], x=cbind(out$pie[i], 1-out$pie[i]), edges=200, radius=0.8*min(size.g, size.d)/2, col=cbind(out$col[i], "transparent"), startpos=0, border=out$col[i])
}else{
graphics::points(out$x[i], out$y[i], pch=out$pch[i], col=out$col[i], bg=out$bg[i], cex=cex)
}
}
}
if(!add) {
graphics::axis(2, ...)
graphics::axis(1, at=at, labels=labels, tick=FALSE, ...)
graphics::box(...)
}
if(do.boxplot){
graphics::boxplot(formula=formula, data=data, at=at, names=labels, outline=F, add=T, horizontal=F, notch=boxplot.notch, border=boxplot.border, col=boxplot.col)
}
}
}
invisible(out)
} |
addHooksToPrint <- function(class = "ggplot",
repoDir = aoptions("repoDir"),
repo = aoptions("repo"),
user = aoptions("user"),
branch = "master",
subdir = aoptions("subdir"),
format = "markdown"
){
stopifnot( is.character( class ),
is.character( repoDir ),
(is.null(repo) || is.character( repo )),
is.character( user ) )
for (class1 in class) {
namespace <- gsub(grep(getAnywhere(paste0("print.",class1))$where,
pattern="namespace:", value=T),
pattern="namespace:", replacement="")
if (length(namespace) == 0) {
stop(paste0("The function print.", class1, " has not been found. Evaluation stopped for further classes."))
}
}
if (!file.exists( repoDir ))
createLocalRepo(repoDir)
setLocalRepo(repoDir)
for (class1 in class) {
namespace <- gsub(grep(getAnywhere(paste0("print.",class1))$where,
pattern="namespace:", value=T),
pattern="namespace:", replacement="")
if (is.null(repo)) {
fun <- paste0('function(x, ..., artifactName = deparse(substitute(x))) {
hash <- saveToRepo(x, artifactName = artifactName)
cat("Load: [",hash,"](", repoDir, "/gallery/",hash,".rda)\n", sep="")
',namespace,':::print.',class1,'(x, ...)
}')
} else {
fun <- paste0('function(x, ..., artifactName = deparse(substitute(x))) {
hash <- saveToRepo(x, artifactName = artifactName)
al <- alink(hash, repo = "',repo,'", user = "',user,'", subdir = "',subdir,'", format = "',format,'")
cat("Load: ", al, "\n", sep="")
',namespace,':::print.',class1,'(x, ...)
}')
}
fun <- eval(parse(text=fun))
veryDirtyHack <- 1
assign(paste0("print.", class1), fun, pos=veryDirtyHack)
}
invisible(NULL)
} |
`keyfitz` <-
function(b0,nMx){
nax12 <- c(0,0)
nax12[1] <- b0[1] + b0[2] *nMx[1]
nax12[2] <- 1.5
return(nax12)
} |
mandel.kh <- function(x, g=NULL, m=NULL, na.rm=T, rowname=NULL, type=c("h", "k"), method=c("classical", "robust"), n=NA, ...) {
UseMethod("mandel.kh")
}
mandel.kh.default <- function(x, g=NULL, m=NULL, na.rm=T, rowname=NULL, type=c("h", "k"), method=c("classical", "robust"), n=NA, ...) {
name.g <- .get.mandel.rowname(deparse(substitute(g)), rowname)
if( is.vector(x) ) {
if( !is.null(m) ) {
fm <- factor(m)
if( is.null(g) ) {
if( !all( (tm<-table(m)) == tm[1] ) ) {
stop("g must be present if x is a vector and group sizes in m are unequal")
} else {
g<-factor( ave(1:length(m), m, FUN=function(x) seq(length(x))) )
xw <- .to.wide(x,g, m)
}
} else {
xw <- .to.wide(x, g, m)
}
x <- xw[ , 2:ncol(xw), drop=FALSE]
g <- xw$g
} else {
x <- data.frame(x=x)
}
} else {
stop(sprintf("mandel.kh does not support objects of type %s", class(x)) )
}
mkh<-mandel.kh(x=x, g=g, m=m, na.rm=na.rm, rowname=rowname, type=type, method=method, n=n, ...)
attr(mkh, "grouped.by") <- name.g
return(mkh)
}
mandel.kh.array <- function(x, g=NULL, m=NULL, na.rm=T, rowname=NULL, type=c("h", "k"), method=c("classical", "robust"), n=NA, ...) {
name.g <- .get.mandel.rowname(deparse(substitute(g)), rowname)
if( length(dim(as.array(x))) ==1 ) {
x <- as.vector(x)
} else if( length(dim(as.array(x))) ==2 ){
x <- as.data.frame(x)
} else {
stop("mandel.kh does not support arrays with more than 2 dimensons")
}
mkh<-mandel.kh(x=x, g=g, m=m, na.rm=na.rm, rowname=rowname, type=type, method=method, n=n, ...)
attr(mkh, "grouped.by") <- name.g
return(mkh)
}
mandel.kh.matrix <- function(x, g=NULL, m=NULL, na.rm=T, rowname=NULL, type=c("h", "k"), method=c("classical", "robust"), n=NA, ...) {
mkh<-mandel.kh(x=as.data.frame(x), g=g, m=m, na.rm=na.rm, rowname=rowname, type=type, method=method, n=n, ...)
name.g <- .get.mandel.rowname(deparse(substitute(g)), rowname)
attr(mkh, "grouped.by") <- name.g
return(mkh)
}
mandel.kh.data.frame <- function(x, g=NULL, m=NULL, na.rm=T, rowname=NULL, type=c("h", "k"), method=c("classical", "robust"), n=NA, ...) {
method <- match.arg(method)
h <- function(y, na.rm=T) {
return( (y-mean(y, na.rm=na.rm))/sd(y, na.rm=na.rm) )
}
k <- function(y, na.rm=T) {
y.omit<-na.omit(y)
pooled.sd <- sqrt(sum(y.omit^2)/length(y.omit))
return( y/pooled.sd )
}
h.robust <- function(y, na.rm=T, ...) {
y.omit<- if(na.rm) na.omit(y) else y
H <- hubers(y.omit, ...)
return( (y-H$mu)/H$s )
}
k.robust <- function(y, degfree, na.rm=T) {
y.omit<- if(na.rm) na.omit(y) else y
pooled.sd <- algS(y.omit, degfree)
return( y/pooled.sd )
}
name.g <- .get.mandel.rowname(deparse(substitute(g)), rowname)
if( is.null(g) ) {
was.null.g <- TRUE
if(is.null(rownames(x))) {
g <- paste(rowname, format(1:nrow(x)), sep="")
g<-factor(g, levels=g)
print(g)
} else {
g <- factor(rownames(x), levels=rownames(x))
}
} else {
was.null.g <- FALSE
}
if(type[1]=="k" && is.na(n) ) {
if( !was.null.g ) {
n.all <- aggregate(x, by=list(g=g),
FUN=function(x) sum(!is.na(x)) )
if(ncol(n.all)>2)
n.all <- stack(n.all[,2:ncol(n.all)])
else
names(n.all) <- c("g", "value")
n <- median(n.all$value[n.all$value>0], na.rm=na.rm)
}
if(is.na(n) || n <=1) {
stop("n must be specified and >1 with type=='k' and only one value per group" )
}
}
if(type[1]=="h") {
x <- aggregate(x, by=list(g=g), FUN=mean, na.rm=na.rm)
mkh <- if(method=="robust") {
as.data.frame( lapply(x[,2:ncol(x), drop=FALSE], h.robust, na.rm=na.rm, ...) )
} else {
as.data.frame( lapply(x[,2:ncol(x), drop=FALSE], h, na.rm=na.rm) )
}
} else if(type[1]=="k") {
x <- aggregate(x, by=list(g=g),
FUN=function(x, na.rm) {
if(length(x)==1)
x
else sd(x, na.rm=na.rm)
},
na.rm=na.rm )
if(method=="robust") {
mkh <- as.data.frame( lapply(x[,2:ncol(x), drop=FALSE], k.robust, na.rm=na.rm, degfree=n-1) )
} else {
mkh <- as.data.frame( lapply(x[,2:ncol(x), drop=FALSE], k, na.rm=na.rm) )
}
} else {
stop("type must be one of 'h' or 'k'")
}
row.names(mkh) <- as.character(x[[1]])
attr(mkh, "mandel.type") <- type[1]
attr(mkh, "mandel.method") <- method[1]
attr(mkh, "grouped.by") <- name.g
attr(mkh, "n") <- n
class(mkh) <- c("mandel.kh", class(mkh))
return(mkh)
}
mandel.kh.ilab <- function(x, g=NULL, m=NULL, na.rm=T, rowname=NULL, type=c("h", "k"), method=c("classical", "robust"), n=NA, ...) {
if(missing(g)) {
g<-x$data$org
name.g <- if(is.null(rowname))
"Organisation"
else
rowname
} else {
name.g <- if(is.null(rowname))
deparse(substitute(g))
else
rowname
}
if(missing(m)||is.null(m)) m <- x$data$measurand
mkh<-mandel.kh(x=x$data$x, g=g, m=x$data$measurand, na.rm=na.rm, rowname=rowname, type=type, method=method, n=n, ...)
attr(mkh, "grouped.by") <- name.g
return(mkh)
}
mandel.h <- function(x, g=NULL, m=NULL, na.rm=T, rowname=NULL, method=c("classical", "robust"), n=NA, ...) {
UseMethod("mandel.h")
}
mandel.k <- function(x, g=NULL, m=NULL, na.rm=T, rowname=NULL, method=c("classical", "robust"), n=NA, ...) {
UseMethod("mandel.k")
}
mandel.h.default <- function(x, g=NULL, m=NULL, na.rm=T, rowname=NULL, method=c("classical", "robust"), n=NA, ...) {
mkh<-mandel.kh(x=x, g=g, m=m, na.rm=na.rm, rowname=rowname, type="h", method=method, n=n, ...)
name.g <- .get.mandel.rowname(deparse(substitute(g)), rowname)
attr(mkh, "grouped.by") <- name.g
return(mkh)
}
mandel.k.default <- function(x, g=NULL, m=NULL, na.rm=T, rowname=NULL, method=c("classical", "robust"), n=NA, ...) {
mkh<-mandel.kh(x=x, g=g, m=m, na.rm=na.rm, rowname=rowname, type="k", method=method, n=n, ...)
name.g <- .get.mandel.rowname(deparse(substitute(g)), rowname)
attr(mkh, "grouped.by") <- name.g
return(mkh)
}
mandel.h.ilab <- function(x, g=NULL, m=NULL, na.rm=T, rowname=NULL, method=c("classical", "robust"), n=NA, ...) {
if(missing(g)) {
g<-x$data$org
name.g <- if(is.null(rowname))
"Organisation"
else
rowname
} else {
name.g <- if(is.null(rowname))
deparse(substitute(g))
else
rowname
}
if(missing(m)||is.null(m)) m <- x$data$measurand
mkh<-mandel.kh(x=x$data$x, g=g, m=m, na.rm=na.rm, rowname=rowname, type="h", method=method, n=n, ...)
attr(mkh, "grouped.by") <- name.g
return(mkh)
}
mandel.k.ilab <- function(x, g=NULL, m=NULL, na.rm=T, rowname=NULL, method=c("classical", "robust"), n=NA, ...) {
if(missing(g)) {
g<-x$data$org
name.g <- if(is.null(rowname))
"Organisation"
else
rowname
} else {
name.g <- if(is.null(rowname))
deparse(substitute(g))
else
rowname
}
if(missing(m)||is.null(m)) m <- x$data$measurand
mkh<-mandel.kh(x=x$data$x, g=g, m=m, na.rm=na.rm, rowname=rowname, type="k", method=method, n=n, ...)
attr(mkh, "grouped.by") <- name.g
return(mkh)
}
plot.mandel.kh <- function(x, probs=c(0.95, 0.99), main,
xlab=attr(x, "grouped.by"), ylab= attr(x, "mandel.type") ,
ylim=NULL, las=1, axes=TRUE, cex.axis=1, frame.plot = axes,
lwd=1, lty=1,col=par("col"),
col.ind=1, lty.ind=c(2,1), lwd.ind=1,
separators=TRUE, col.sep="lightgrey", lwd.sep=1, lty.sep=1,
zero.line=TRUE, lwd.zero=1, col.zero=1, lty.zero=1,
p.adjust="none", ...) {
if(missing(main) )
main <- paste( deparse(substitute(x)), " - Mandel's",
attr(x, "mandel.type"),
if(attr(x, "mandel.method") == "robust") "(Robust variant)"
)
ni<-ncol(x)
ng <- nrow(x)
mids <- gplot(x, main=main, xlab=xlab, ylab=ylab,
ylim=ylim, las=las, axes=axes, cex.axis=cex.axis,
frame.plot=frame.plot, lwd=lwd, lty=lty, col=col,
separators=separators, col.sep=col.sep,
lwd.sep=lwd.sep, lty.sep=lty.sep,
zero.line=zero.line, lwd.zero=lwd.zero,
col.zero=col.zero, lty.zero=lty.zero, ...)
if( !is.na(probs[1]) ) {
if(p.adjust != "none" ) {
probs <- 1 - p.adjust(1-probs, method=p.adjust, n = ng * ncol(x))
}
if(attr(x, "mandel.type") == "h" ) {
probs <- 1 - (1 - probs)/2
probs <- c(probs, 1-probs)
if(attr(x, "mandel.method") == "classical" ) {
abline(h=qmandelh(probs, ng), lty=lty.ind, col=col.ind, lwd=lwd.ind)
} else {
abline(h=qnorm(probs), lty=lty.ind, col=col.ind, lwd=lwd.ind)
}
} else {
if(attr(x, "mandel.method") == "classical" ) {
abline(h=qmandelk(probs, ng, attr(x, "n")), lty=lty.ind, col=col.ind, lwd=lwd.ind)
} else {
abline(h=sqrt(qf(probs, attr(x, "n")-1, Inf)), lty=lty.ind, col=col.ind, lwd=lwd.ind)
}
}
}
return(invisible(mids))
}
barplot.mandel.kh <- function(height, probs=c(0.95, 0.99), main,
xlab=attr(height, "grouped.by"), ylab=attr(height, "mandel.type"),
separators=TRUE, zero.line=TRUE, ylim, p.adjust="none", frame.plot = TRUE,
... ,
col.ind=1, lty.ind=c(2,1), lwd.ind=1,
col.sep="lightgrey", lwd.sep=1, lty.sep=1,
lwd.zero=1, col.zero=1, lty.zero=1) {
if(missing(main) )
main <- paste( deparse(substitute(height)), " - Mandel's",
attr(height, "mandel.type"),
if(attr(height, "mandel.method") == "robust") "(Robust variant)"
)
ng <- nrow(height)
if(missing(ylim)) ylim <- range(pretty(c(0, na.omit(stack(height))$values)))
mids <- barplot(t(as.matrix(height)), beside=TRUE,
ylim=ylim, main=main, xlab=xlab, ylab=ylab, ...)
if(separators) {
mid.max<-mids[nrow(mids), ]
abline(v=c(0.5, mid.max+1), col=col.sep, lty=lty.sep, lwd=lwd.sep)
}
if(zero.line) abline(h=0, col=col.zero, lwd=lwd.zero, lty=lty.zero)
if(frame.plot) box()
if( !is.na(probs[1]) ) {
if(p.adjust != "none" ) {
probs <- 1 - p.adjust(1-probs, method=p.adjust, n = ng * ncol(height))
}
if(attr(height, "mandel.type") == "h" ) {
probs <- 1 - (1 - probs)/2
probs <- c(probs, 1-probs)
if(attr(height, "mandel.method") == "classical" ) {
abline(h=qmandelh(probs, ng), lty=lty.ind, col=col.ind, lwd=lwd.ind)
} else {
abline(h=qnorm(probs), lty=lty.ind, col=col.ind, lwd=lwd.ind)
}
} else {
if(attr(height, "mandel.method") == "classical" ) {
abline(h=qmandelk(probs, ng, attr(height, "n")), lty=lty.ind, col=col.ind, lwd=lwd.ind)
} else {
abline(h=qf(probs, attr(height, "n")-1, Inf), lty=lty.ind, col=col.ind, lwd=lwd.ind)
}
}
}
return(invisible(mids))
}
boxplot.mandel.kh <- function(x, probs=c(0.95, 0.99), main,
xlab=attr(x, "grouped.by"), ylab=attr(x, "mandel.type"),
separators=FALSE, zero.line=TRUE, ylim, p.adjust="none",
frame.plot = TRUE, horizontal=FALSE, at,
... ,
col.ind=1, lty.ind=c(2,1), lwd.ind=1,
col.sep="lightgrey", lwd.sep=1, lty.sep=1,
lwd.zero=1, col.zero=1, lty.zero=1,
outlier.labels=row.names(x), cex.lab=0.7, col.lab=1,
adj=NULL, pos=NULL, srt=0 ) {
if(missing(main) )
main <- paste( deparse(substitute(x)), " - Mandel's",
attr(x, "mandel.type"),
if(attr(x, "mandel.method") == "robust") "(Robust variant)"
)
ng <- nrow(x)
if(missing(at)) at <- 1:ncol(x)
if(missing(ylim)) ylim <- range(pretty(c(0, na.omit(stack(x))$values)))
bx <- boxplot(as.matrix(x), horizontal=horizontal, at=at,
ylim=ylim, main=main, xlab=xlab, ylab=ylab, ...)
if(separators) {
if(length(at)> 1 ) {
offset.at <- diff(at[1:2])/2
sep.at <-c(at[1]-offset.at, at[1]+offset.at, at[-1]+diff(at)/2)
} else {
sep.at <- at+c(-0.5,0.5)
}
if(horizontal)
abline(h=sep.at, col=col.sep, lty=lty.sep, lwd=lwd.sep)
else
abline(v=sep.at, col=col.sep, lty=lty.sep, lwd=lwd.sep)
}
if(zero.line) abline(h=0, col=col.zero, lwd=lwd.zero, lty=lty.zero)
if(frame.plot) box()
if( !is.na(probs[1]) ) {
if(p.adjust != "none" ) {
probs <- 1 - p.adjust(1-probs, method=p.adjust, n = ng * ncol(x))
}
if(attr(x, "mandel.type") == "h" ) {
probs <- 1 - (1 - probs)/2
probs <- c(probs, 1-probs)
if(attr(x, "mandel.method") == "classical" ) {
if(horizontal)
abline(v=qmandelh(probs, ng), lty=lty.ind, col=col.ind, lwd=lwd.ind)
else
abline(h=qmandelh(probs, ng), lty=lty.ind, col=col.ind, lwd=lwd.ind)
} else {
if(horizontal)
abline(v=qnorm(probs), lty=lty.ind, col=col.ind, lwd=lwd.ind)
else
abline(h=qnorm(probs), lty=lty.ind, col=col.ind, lwd=lwd.ind)
}
} else {
if(attr(x, "mandel.method") == "classical" ) {
if(horizontal)
abline(v=qmandelk(probs, ng, attr(x, "n")), lty=lty.ind, col=col.ind, lwd=lwd.ind)
else
abline(h=qmandelk(probs, ng, attr(x, "n")), lty=lty.ind, col=col.ind, lwd=lwd.ind)
} else {
if(horizontal)
abline(v=qf(probs, attr(x, "n")-1, Inf), lty=lty.ind, col=col.ind, lwd=lwd.ind)
else
abline(h=qf(probs, attr(x, "n")-1, Inf), lty=lty.ind, col=col.ind, lwd=lwd.ind)
}
}
}
if( ifelse(is.logical(outlier.labels[1]),outlier.labels[1], !is.na(outlier.labels[1]) ) ) {
if(is.logical(outlier.labels[1])) {
outlier.labels <- row.names(x)
}
out.index <- rep(NA, length(bx$out))
for(i in 1:length(bx$out)) {
out.index[i] <- which.min( abs( x[,bx$group[i]] - bx$out[i] ) )
}
if(is.null(pos) && is.null(adj)) pos <- 4
if(horizontal)
text(bx$out, at[bx$group], outlier.labels[out.index],
cex=cex.lab, col=col.lab, pos=pos, adj=adj, srt=srt)
else
text(at[bx$group], bx$out, outlier.labels[out.index],
cex=cex.lab, col=col.lab, pos=pos, adj=adj, srt=srt)
}
return(invisible(bx))
}
qmandelk <- function(p, g, n, lower.tail = TRUE, log.p = FALSE) {
sqrt( g * qbeta( p, (n-1)/2, (g-1)*(n-1)/2, lower.tail=lower.tail, log.p=log.p) )
}
pmandelk <- function(q, g, n, lower.tail = TRUE, log.p = FALSE) {
pbeta( q^2 / g, (n-1)/2, (g-1)*(n-1)/2, lower.tail=lower.tail, log.p=log.p)
}
dmandelk <- function(x, g, n, log = FALSE) {
2 * x * dbeta( x^2 / g, (n-1)/2, (g-1)*(n-1)/2, log = FALSE) / g
}
rmandelk <- function(B, g, n) {
sqrt( g * rbeta( B, (n-1)/2, (g-1)*(n-1)/2) )
}
qmandelh <- function(p, g, lower.tail = TRUE, log.p = FALSE) {
((g-1)/sqrt(g))*(2*qbeta(p, (g-2)/2, (g-2)/2, lower.tail = TRUE, log.p = FALSE)-1)
}
pmandelh <- function(q, g, lower.tail = TRUE, log.p = FALSE) {
pbeta( (1+q*sqrt(g)/(g-1))/2, (g-2)/2, (g-2)/2, lower.tail = TRUE, log.p = FALSE)
}
dmandelh <- function(x, g, log = FALSE) {
dbeta( (1+x*sqrt(g)/(g-1))/2, (g-2)/2, (g-2)/2, log = FALSE) / (2*(g-1)/sqrt(g))
}
rmandelh <- function(B, g) {
((g-1)/sqrt(g))*(2*rbeta(B, (g-2)/2, (g-2)/2)-1)
}
.get.mandel.rowname <- function(g, rowname=NULL) {
rv <- if(g=="NULL" | g=="") {
if(is.null(rowname))
"Row"
else
rowname
} else {
g
}
return(rv)
}
.to.wide <- function(x, g, m) {
if(!is.factor(g)) g <- factor(g)
if(!is.factor(m)) g <- factor(m)
n.per.g <- tapply(x, g:m, length)
n <- max(n.per.g, na.rm=TRUE)
d <- data.frame(g = factor(rep(levels(g), each=n), levels=levels(g)))
row.names(d) <- paste(d$g, rep(1:n, along=d$g), sep=":")
n.x <- paste(g, ave(x, g, m, FUN=function(x) 1:length(x)), sep=":")
for(nn in levels(m) ) {
subx <- x[m==nn]
names(subx) <- n.x[m==nn]
d[[nn]] <- subx[row.names(d)]
}
return(d)
} |
AutoXGBoostFunnelCARMA <- function(data,
GroupVariables = NULL,
BaseFunnelMeasure = NULL,
ConversionMeasure = NULL,
ConversionRateMeasure = NULL,
CohortPeriodsVariable = NULL,
CalendarDate = NULL,
CohortDate = NULL,
EncodingMethod = "credibility",
OutputSelection = c('Importances', 'EvalPlots', 'EvalMetrics', 'Score_TrainData'),
WeightsColumnName = NULL,
TruncateDate = NULL,
PartitionRatios = c(0.70,0.20,0.10),
TimeUnit = c("day"),
CalendarTimeGroups = c("day","week","month"),
CohortTimeGroups = c("day","week","month"),
TransformTargetVariable = TRUE,
TransformMethods = c("Identity","YeoJohnson"),
AnomalyDetection = list(tstat_high = 3, tstat_low = -2),
Jobs = c("Evaluate","Train"),
SaveModelObjects = TRUE,
ModelID = "Segment_ID",
ModelPath = NULL,
MetaDataPath = NULL,
DebugMode = FALSE,
CalendarVariables = c("wday","mday","yday","week","isoweek","month","quarter","year"),
HolidayGroups = c("USPublicHolidays","EasterGroup","ChristmasGroup","OtherEcclesticalFeasts"),
HolidayLookback = NULL,
CohortHolidayLags = c(1L, 2L, 7L),
CohortHolidayMovingAverages = c(3L, 7L),
CalendarHolidayLags = c(1L, 2L, 7L),
CalendarHolidayMovingAverages = c(3L, 7L),
ImputeRollStats = -0.001,
CalendarLags = list("day" = c(1L, 7L, 21L), "week" = c(1L, 4L, 52L), "month" = c(1L, 6L, 12L)),
CalendarMovingAverages = list("day" = c(1L, 7L, 21L), "week" = c(1L, 4L, 52L), "month" = c(1L, 6L, 12L)),
CalendarStandardDeviations = NULL,
CalendarSkews = NULL,
CalendarKurts = NULL,
CalendarQuantiles = NULL,
CalendarQuantilesSelected = "q50",
CohortLags = list("day" = c(1L, 7L, 21L), "week" = c(1L, 4L, 52L), "month" = c(1L, 6L, 12L)),
CohortMovingAverages = list("day" = c(1L, 7L, 21L), "week" = c(1L, 4L, 52L), "month" = c(1L, 6L, 12L)),
CohortStandardDeviations = NULL,
CohortSkews = NULL,
CohortKurts = NULL,
CohortQuantiles = NULL,
CohortQuantilesSelected = "q50",
PassInGrid = NULL,
GridTune = FALSE,
BaselineComparison = "default",
MaxModelsInGrid = 25L,
MaxRunMinutes = 180L,
MaxRunsWithoutNewWinner = 10L,
GridEvalMetric = 'mae',
NumOfParDepPlots = 1L,
NThreads = parallel::detectCores(),
TreeMethod = 'hist',
EvalMetric = 'MAE',
LossFunction = 'reg:squarederror',
Trees = 1000L,
LearningRate = 0.3,
MaxDepth = 9L,
MinChildWeight = 1.0,
SubSample = 1.0,
ColSampleByTree = 1.0) {
if(!is.null(ModelPath)) if(!dir.exists(file.path(ModelPath))) dir.create(ModelPath)
if(!is.null(MetaDataPath)) if(!is.null(MetaDataPath)) if(!dir.exists(file.path(MetaDataPath))) dir.create(MetaDataPath)
ArgsList <- list()
ArgsList[["Algorithm"]] <- "XGBoost"
ArgsList[["GroupVariables"]] <- GroupVariables
ArgsList[["BaseFunnelMeasure"]] <- BaseFunnelMeasure
ArgsList[["ConversionMeasure"]] <- ConversionMeasure
ArgsList[["CohortPeriodsVariable"]] <- CohortPeriodsVariable
ArgsList[["CalendarDate"]] <- CalendarDate
ArgsList[["CohortDate"]] <- CohortDate
ArgsList[["AnomalyDetection"]] <- AnomalyDetection
ArgsList[["TimeUnit"]] <- TimeUnit
ArgsList[["CalendarTimeGroups"]] <- CalendarTimeGroups
ArgsList[["CohortTimeGroups"]] <- CohortTimeGroups
ArgsList[["WeightsColumnName"]] <- WeightsColumnName
if(is.null(MetaDataPath)) if(!is.null(ModelPath)) MetaDataPath <- ModelPath
ArgsList[["NThreads"]] <- NThreads
ArgsList[["ModelID"]] <- ModelID
ArgsList[["ModelPath"]] <- ModelPath
ArgsList[["MetaDataPath"]] <- MetaDataPath
ArgsList[["Jobs"]] <- Jobs
ArgsList[["EncodingMethod"]] <- EncodingMethod
ArgsList[["TransformTargetVariable"]] <- TransformTargetVariable
ArgsList[["CalendarVariables"]] <- CalendarVariables
ArgsList[["HolidayGroups"]] <- HolidayGroups
ArgsList[["HolidayLookback"]] <- HolidayLookback
ArgsList[["ImputeRollStats"]] <- ImputeRollStats
ArgsList[["CohortHolidayLags"]] <- CohortHolidayLags
ArgsList[["CohortHolidayMovingAverages"]] <- CohortHolidayMovingAverages
ArgsList[["CalendarHolidayLags"]] <- CalendarHolidayLags
ArgsList[["CalendarHolidayMovingAverages"]] <- CalendarHolidayMovingAverages
ArgsList[["CalendarLags"]] <- CalendarLags
ArgsList[["CalendarMovingAverages"]] <- CalendarMovingAverages
ArgsList[["CalendarStandardDeviations"]] <- CalendarStandardDeviations
ArgsList[["CalendarSkews"]] <- CalendarSkews
ArgsList[["CalendarKurts"]] <- CalendarKurts
ArgsList[["CalendarQuantiles"]] <- CalendarQuantiles
ArgsList[["CalendarQuantilesSelected"]] <- CalendarQuantilesSelected
ArgsList[["CohortLags"]] <- CohortLags
ArgsList[["CohortMovingAverages"]] <- CohortMovingAverages
ArgsList[["CohortStandardDeviations"]] <- CohortStandardDeviations
ArgsList[["CohortSkews"]] <- CohortSkews
ArgsList[["CohortKurts"]] <- CohortKurts
ArgsList[["CohortQuantiles"]] <- CohortQuantiles
ArgsList[["CohortQuantilesSelected"]] <- CohortQuantilesSelected
ArgsList[["PartitionRatios"]] <- if(!is.null(PartitionRatios)) PartitionRatios else c(0.70,0.20,0.10)
if(tolower(TimeUnit) %chin% c("day","days")) {
TimeUnit <- "days"
} else if(tolower(TimeUnit) %chin% c("week","weeks")) {
TimeUnit <- "weeks"
} else if(tolower(TimeUnit) %chin% c("month","months")) {
TimeUnit <- "months"
} else if(tolower(TimeUnit) %chin% c("quarter","quarters")) {
TimeUnit <- "quarters"
} else if(tolower(TimeUnit) %chin% c("year","years")) {
TimeUnit <- "years"
}
ArgsList[["PassInGrid"]] <- PassInGrid
ArgsList[["GridTune"]] <- GridTune
ArgsList[["BaselineComparison"]] <- BaselineComparison
ArgsList[["MaxModelsInGrid"]] <- MaxModelsInGrid
ArgsList[["MaxRunMinutes"]] <- MaxRunMinutes
ArgsList[["MaxRunsWithoutNewWinner"]] <- MaxRunsWithoutNewWinner
ArgsList[["LossFunction"]] <- LossFunction
ArgsList[["GridEvalMetric"]] <- GridEvalMetric
ArgsList[["EvalMetric"]] <- EvalMetric
ArgsList[["TreeMethod"]] <- TreeMethod
ArgsList[["Trees"]] <- Trees
ArgsList[["LearningRate"]] <- LearningRate
ArgsList[["MaxDepth"]] <- MaxDepth
ArgsList[["MinChildWeight"]] <- MinChildWeight
ArgsList[["SubSample"]] <- SubSample
ArgsList[["ColSampleByTree"]] <- ColSampleByTree
ArgsList[[paste0("Min","-", eval(CalendarDate))]] <- data[, min(get(CalendarDate))][[1L]]
ArgsList[[paste0("Max","-", eval(CalendarDate))]] <- data[, max(get(CalendarDate))][[1L]]
TimerDataEval <- data.table::data.table(Process = rep("a", 25L), Time = rep(999, 25L))
TimerDataTrain <- data.table::data.table(Process = rep("a", 25L), Time = rep(999, 25L))
if(!data.table::is.data.table(data)) data.table::setDT(data)
if(!(tolower(TimeUnit) %chin% c("1min","5min","10min","15min","30min","hour"))) {
if(is.character(data[[eval(CalendarDate)]])) {
x <- data[1L, get(CalendarDate)]
x1 <- lubridate::guess_formats(x, orders = c("mdY", "BdY", "Bdy", "bdY", "bdy", "mdy", "dby", "Ymd", "Ydm"))
data[, eval(CalendarDate) := as.Date(get(CalendarDate), tryFormats = x1)]
}
if(is.character(data[[eval(CohortDate)]])) {
x <- data[1L, get(CohortDate)]
x1 <- lubridate::guess_formats(x, orders = c("mdY", "BdY", "Bdy", "bdY", "bdy", "mdy", "dby", "Ymd", "Ydm"))
data[, eval(CohortDate) := as.Date(get(CohortDate), tryFormats = x1)]
}
} else {
data[, eval(CalendarDate) := as.POSIXct(get(CalendarDate))]
data[, eval(CohortDate) := as.POSIXct(get(CohortDate))]
}
if(is.null(CohortPeriodsVariable)) {
data[, CohortPeriods := as.numeric(difftime(time1 = get(CohortDate), time2 = get(CalendarDate), units = eval(TimeUnit)))]
CohortPeriodsVariable <- "CohortPeriods"
ArgsList[["CohortPeriodsVariable"]] <- CohortPeriodsVariable
}
if(!is.null(GroupVariables)) {
data[, paste0("Temp_", CohortPeriodsVariable) := do.call(paste, c(.SD, sep = ' ')), .SDcols = c(GroupVariables, CohortPeriodsVariable)]
data[, paste0("Temp_", CalendarDate) := do.call(paste, c(.SD, sep = ' ')), .SDcols = c(GroupVariables, CalendarDate)]
data[, GroupVar := do.call(paste, c(.SD, sep = ' ')), .SDcols = c(GroupVariables)]
ArgsList[[paste0("Temp_", CohortPeriodsVariable)]] <- paste0("Temp_", CohortPeriodsVariable)
ArgsList[[paste0("Temp_", CalendarDate)]] <- paste0("Temp_", CalendarDate)
ArgsList[["GroupVar"]] <- "GroupVar"
} else {
data[, paste0("Temp_", CohortPeriodsVariable) := do.call(paste, c(.SD, sep = ' ')), .SDcols = c(CohortPeriodsVariable)]
data[, paste0("Temp_", CalendarDate) := do.call(paste, c(.SD, sep = ' ')), .SDcols = c(CalendarDate)]
ArgsList[[paste0("Temp_", CohortPeriodsVariable)]] <- paste0("Temp_", CohortPeriodsVariable)
ArgsList[[paste0("Temp_", CalendarDate)]] <- paste0("Temp_", CalendarDate)
}
if(is.null(ConversionRateMeasure)) {
data[, ConversionRate := data.table::fifelse(get(BaseFunnelMeasure[1L]) == 0, 0, get(ConversionMeasure) / get(BaseFunnelMeasure[1L]))]
ConversionRateMeasure <- "ConversionRate"
ArgsList[["ConversionRateMeasure"]] <- ConversionRateMeasure
} else {
ArgsList[["ConversionRateMeasure"]] <- ConversionRateMeasure
}
if("GroupVar" %chin% names(data)) {
drop <- setdiff(names(data), c("GroupVar", GroupVariables, paste0("Temp_", CohortPeriodsVariable), paste0("Temp_", CalendarDate), BaseFunnelMeasure, ConversionMeasure, ConversionRateMeasure, CohortPeriodsVariable, CalendarDate, CohortDate))
} else {
drop <- setdiff(names(data), c(paste0("Temp_", CohortPeriodsVariable), paste0("Temp_", CalendarDate), BaseFunnelMeasure, ConversionMeasure, ConversionRateMeasure, CohortPeriodsVariable, CalendarDate, CohortDate))
}
if(!identical(drop, character(0))) data.table::set(data, j = c(drop), value = NULL)
for(proc in Jobs) {
proc <- tolower(proc)
if(proc %chin% c("evaluate","eval","evaluation")) {
data1 <- data.table::copy(data)
} else {
data <- data1
rm(data1)
}
if(DebugMode) print("FE: CreateCalendarVariables() CalendarDate and CohortDate ----")
x <- system.time(gcFirst = FALSE, data <- RemixAutoML::CreateCalendarVariables(data, DateCols = c(eval(CalendarDate)), AsFactor = FALSE, TimeUnits = CalendarVariables))
data <- RemixAutoML::CreateCalendarVariables(data, DateCols = c(eval(CohortDate)), AsFactor = FALSE, TimeUnits = CalendarVariables)
if(proc %chin% c("evaluate","eval","evaluation")) {
data.table::set(TimerDataEval, i = 2L, j = "Time", value = x[[3L]])
data.table::set(TimerDataEval, i = 2L, j = "Process", value = "
} else if(proc %chin% c("training","train")) {
data.table::set(TimerDataTrain, i = 2L, j = "Time", value = x[[3L]])
data.table::set(TimerDataTrain, i = 2L, j = "Process", value = "
}
SaveTimers(SaveModelObjectss = SaveModelObjects, procs = proc, TimerDataEvals = TimerDataEval, TimerDataTrains = TimerDataTrain, MetaDataPaths = MetaDataPath, ModelIDs = ModelID)
if(DebugMode) print("FE: CreateHolidayVariables() CalendarDate ----")
x <- system.time(gcFirst = FALSE, data <- RemixAutoML::CreateHolidayVariables(data, DateCols = eval(CalendarDate), LookbackDays = if(!is.null(HolidayLookback)) HolidayLookback else LB(TimeUnit), HolidayGroups = HolidayGroups, Holidays = NULL, Print = FALSE))
data.table::setnames(data, old = "HolidayCounts", new = paste0(CalendarDate,"HolidayCounts"))
if(proc %chin% c("evaluate","eval","evaluation")) {
data.table::set(TimerDataEval, i = 3L, j = "Time", value = x[[3L]])
data.table::set(TimerDataEval, i = 3L, j = "Process", value = "
} else if(proc %chin% c("training","train")) {
data.table::set(TimerDataTrain, i = 3L, j = "Time", value = x[[3L]])
data.table::set(TimerDataTrain, i = 3L, j = "Process", value = "
}
SaveTimers(SaveModelObjectss = SaveModelObjects, procs = proc, TimerDataEvals = TimerDataEval, TimerDataTrains = TimerDataTrain, MetaDataPaths = MetaDataPath, ModelIDs = ModelID)
if(DebugMode) print("FE: CreateHolidayVariables() CohortDate ----")
x <- system.time(gcFirst = FALSE, data <- RemixAutoML::CreateHolidayVariables(data, DateCols = eval(CohortDate), LookbackDays = if(!is.null(HolidayLookback)) HolidayLookback else LB(TimeUnit), HolidayGroups = eval(HolidayGroups), Holidays = NULL, Print = FALSE))
data.table::setnames(data, old = "HolidayCounts", new = paste0(CohortDate, "HolidayCounts"))
if(proc %chin% c("evaluate","eval","evaluation")) {
data.table::set(TimerDataEval, i = 4L, j = "Time", value = x[[3L]])
data.table::set(TimerDataEval, i = 4L, j = "Process", value = "
} else if(proc %chin% c("training","train")) {
data.table::set(TimerDataTrain, i = 4L, j = "Time", value = x[[3L]])
data.table::set(TimerDataTrain, i = 4L, j = "Process", value = "
}
if(DebugMode) print("AnomalyDetection for all CohortDates ----")
if(!is.null(AnomalyDetection)) {
temp <- data[, list(ConversionCheck = sum(get(ConversionMeasure))), by = eval(CohortDate)]
data.table::setnames(temp, eval(CohortDate), eval(CalendarDate))
temp1 <- data[, list(Leads = max(get(BaseFunnelMeasure[1]))), by = eval(CalendarDate)]
temp <- merge(temp, temp1, by = eval(CalendarDate), all = FALSE); rm(temp1)
temp <- temp[, ConversionRate := ConversionCheck / (Leads + 1)][, .SD, .SDcols = c(eval(CalendarDate), "ConversionRate")]
temp <- RemixAutoML::CreateCalendarVariables(data = temp, DateCols = eval(CalendarDate), AsFactor = FALSE, TimeUnits = "wday")
temp <- RemixAutoML::GenTSAnomVars(data = temp, ValueCol = "ConversionRate", GroupVars = paste0(CalendarDate,"_wday"), DateVar = eval(CalendarDate), HighThreshold = AnomalyDetection$tstat_high, LowThreshold = AnomalyDetection$tstat_low, KeepAllCols = TRUE, IsDataScaled = FALSE)
temp <- temp[, .SD, .SDcols = c(eval(CalendarDate), "AnomHigh","AnomLow")]
if(!is.null(temp)) {
data <- merge(data, temp, by.x = eval(CohortDate), by.y = eval(CalendarDate), all.x = TRUE)
data[is.na(AnomHigh), AnomHigh := 0]
data[is.na(AnomLow), AnomLow := 0]
} else {
ArgsList[["AnomalyDetection"]] <- NULL
}
rm(temp)
}
if(DebugMode) print("DM: Type Casting CalendarDate to Character to be used as a Grouping Variable ----")
x <- system.time(gcFirst = FALSE, data.table::set(data, j = eval(CalendarDate), value = as.character(data[[eval(CalendarDate)]])))
if(proc %chin% c("evaluate","eval","evaluation")) {
data.table::set(TimerDataEval, i = 6L, j = "Time", value = x[[3L]])
data.table::set(TimerDataEval, i = 6L, j = "Process", value = "
} else if(proc %chin% c("training","train")) {
data.table::set(TimerDataTrain, i = 6L, j = "Time", value = x[[3L]])
data.table::set(TimerDataTrain, i = 6L, j = "Process", value = "
}
if(DebugMode) print('Feature Engineering: Add Target Transformation ----')
if(TransformTargetVariable) {
TransformResults <- AutoTransformationCreate(data, ColumnNames=c(BaseFunnelMeasure[1L], ConversionMeasure), Methods=TransformMethods, Path=NULL, TransID='Trans', SaveOutput=FALSE)
data <- TransformResults$Data; TransformResults$Data <- NULL
TransformObject <- TransformResults$FinalResults; rm(TransformResults)
data[, eval(ConversionRateMeasure) := data.table::fifelse(get(BaseFunnelMeasure[1L]) == 0, 0, get(ConversionMeasure) / get(BaseFunnelMeasure[1L]))]
} else {
TransformObject <- NULL
}
SaveTimers(SaveModelObjectss = SaveModelObjects, procs = proc, TimerDataEvals = TimerDataEval, TimerDataTrains = TimerDataTrain, MetaDataPaths = MetaDataPath, ModelIDs = ModelID)
if(DebugMode) print("DM: Sort data by GroupVariables, CalendarDate and then by CohortPeriodsVariable ----")
x <- system.time(gcFirst = FALSE, data.table::setorderv(data, cols = c(GroupVariables, CalendarDate, CohortPeriodsVariable), rep(1L, length(c(GroupVariables, CalendarDate, CohortPeriodsVariable)))))
if(proc %chin% c("evaluate","eval","evaluation")) {
data.table::set(TimerDataEval, i = 5L, j = "Time", value = x[[3L]])
data.table::set(TimerDataEval, i = 5L, j = "Process", value = "
} else if(proc %chin% c("training","train")) {
data.table::set(TimerDataTrain, i = 5L, j = "Time", value = x[[3L]])
data.table::set(TimerDataTrain, i = 5L, j = "Process", value = "
}
SaveTimers(SaveModelObjectss = SaveModelObjects, procs = proc, TimerDataEvals = TimerDataEval, TimerDataTrains = TimerDataTrain, MetaDataPaths = MetaDataPath, ModelIDs = ModelID)
if(DebugMode) print("FE: AutoLagRollStats() ConversionMeasure with CalendarDate as a Grouping Variable ----")
if(proc %in% c("evaluate","evaluation","eval","train","training")) {
x <- system.time(gcFirst = FALSE, data <- RemixAutoML::AutoLagRollStats(
data = data,
DateColumn = CohortDate,
Targets = c(ConversionMeasure, ConversionRateMeasure),
HierarchyGroups = NULL,
IndependentGroups = paste0("Temp_", CalendarDate),
TimeUnit = TimeUnit,
TimeGroups = CohortTimeGroups,
TimeUnitAgg = TimeUnit,
TimeBetween = NULL,
RollOnLag1 = TRUE,
Type = "Lag",
SimpleImpute = FALSE,
Lags = CohortLags,
MA_RollWindows = CohortMovingAverages,
SD_RollWindows = CohortStandardDeviations,
Skew_RollWindows = CohortSkews,
Kurt_RollWindows = CohortKurts,
Quantile_RollWindows = CohortQuantiles,
Quantiles_Selected = CohortQuantilesSelected,
Debug = FALSE))
if(proc %chin% c("evaluate","eval")) {
data.table::set(TimerDataEval, i = 7L, j = "Time", value = x[[3L]])
data.table::set(TimerDataEval, i = 7L, j = "Process", value = "
} else if(proc %chin% c("training","train")) {
data.table::set(TimerDataTrain, i = 7L, j = "Time", value = x[[3L]])
data.table::set(TimerDataTrain, i = 7L, j = "Process", value = "
}
SaveTimers(SaveModelObjectss = SaveModelObjects, procs = proc, TimerDataEvals = TimerDataEval, TimerDataTrains = TimerDataTrain, MetaDataPaths = MetaDataPath, ModelIDs = ModelID)
}
if(DebugMode) print("FE: AutoLagRollStats() CohortDate HolidayCounts with CalendarDate as a Grouping Variable ----")
if(proc %in% c("evaluate","evaluation","eval","train","training") && !is.null(CohortHolidayLags)) {
x <- system.time(gcFirst = FALSE, data <- RemixAutoML::AutoLagRollStats(
data = data,
DateColumn = CohortDate,
Targets = paste0(CohortDate, "HolidayCounts"),
HierarchyGroups = NULL,
IndependentGroups = paste0("Temp_", CalendarDate),
TimeUnit = TimeUnit,
TimeGroups = TimeUnit,
TimeUnitAgg = TimeUnit,
TimeBetween = NULL,
RollOnLag1 = TRUE,
Type = "Lag",
SimpleImpute = FALSE,
Lags = CohortHolidayLags,
MA_RollWindows = CohortHolidayMovingAverages,
SD_RollWindows = NULL,
Skew_RollWindows = NULL,
Kurt_RollWindows = NULL,
Quantile_RollWindows = NULL,
Quantiles_Selected = NULL,
Debug = FALSE))
if(proc %chin% c("evaluate","eval")) {
data.table::set(TimerDataEval, i = 7L, j = "Time", value = x[[3L]])
data.table::set(TimerDataEval, i = 7L, j = "Process", value = "
} else if(proc %chin% c("training","train")) {
data.table::set(TimerDataTrain, i = 7L, j = "Time", value = x[[3L]])
data.table::set(TimerDataTrain, i = 7L, j = "Process", value = "
}
x <- system.time(gcFirst = FALSE, data.table::set(data, j = eval(CalendarDate), value = as.Date(data[[eval(CalendarDate)]])))
if(proc %chin% c("evaluate","eval")) {
data.table::set(TimerDataEval, i = 8L, j = "Time", value = x[[3L]])
data.table::set(TimerDataEval, i = 8L, j = "Process", value = "
} else if(proc %chin% c("training","train")) {
data.table::set(TimerDataTrain, i = 8L, j = "Time", value = x[[3L]])
data.table::set(TimerDataTrain, i = 8L, j = "Process", value = "
}
SaveTimers(SaveModelObjectss = SaveModelObjects, procs = proc, TimerDataEvals = TimerDataEval, TimerDataTrains = TimerDataTrain, MetaDataPaths = MetaDataPath, ModelIDs = ModelID)
}
if(DebugMode) print("FE: AutoLagRollStats() BaseFunnelMeasure OVER CalendarDate ----")
if(proc %chin% c("evaluate","evaluation","eval","training","train")) {
for(bfm in seq_along(BaseFunnelMeasure)) {
if("GroupVar" %chin% names(data)) {
temp <- data[, lapply(.SD, data.table::first), .SDcols = c(BaseFunnelMeasure[bfm]), by = c("GroupVar", eval(CalendarDate))]
} else {
temp <- data[, lapply(.SD, data.table::first), .SDcols = c(BaseFunnelMeasure[bfm]), by = c(eval(CalendarDate))]
}
x <- system.time(gcFirst = FALSE, temp <- RemixAutoML::AutoLagRollStats(
data = temp,
DateColumn = CalendarDate,
Targets = BaseFunnelMeasure[bfm],
HierarchyGroups = NULL,
IndependentGroups = if(!"GroupVar" %chin% names(temp)) NULL else "GroupVar",
TimeGroups = CalendarTimeGroups,
TimeUnitAgg = TimeUnit,
TimeUnit = TimeUnit,
TimeBetween = NULL,
RollOnLag1 = TRUE,
Type = "Lag",
SimpleImpute = FALSE,
Lags = CalendarLags,
MA_RollWindows = CalendarMovingAverages,
SD_RollWindows = CalendarStandardDeviations,
Skew_RollWindows = CalendarSkews,
Kurt_RollWindows = CalendarKurts,
Quantile_RollWindows = CalendarQuantiles,
Quantiles_Selected = CalendarQuantilesSelected,
Debug = FALSE))
if(proc %chin% c("evaluate","eval")) {
data.table::set(TimerDataEval, i = 9L, j = "Time", value = x[[3L]])
data.table::set(TimerDataEval, i = 9L, j = "Process", value = paste0("
} else if(proc %chin% c("training","train")) {
data.table::set(TimerDataTrain, i = 9L, j = "Time", value = x[[3L]])
data.table::set(TimerDataTrain, i = 9L, j = "Process", value = paste0("
}
if("GroupVar" %chin% names(data)) {
data.table::setkeyv(temp, c("GroupVar", CalendarDate))
data.table::setkeyv(data, c("GroupVar", CalendarDate))
} else {
data.table::setkeyv(temp, c(CalendarDate))
data.table::setkeyv(data, c(CalendarDate))
}
keep <- setdiff(names(temp), names(data))
data[temp, paste0(keep) := mget(paste0("i.", keep))]
}
}
if(DebugMode) print("FE: ModelDataPrep() Impute Numeric Columns from AutoLagRollStats() ----")
x <- system.time(gcFirst = FALSE, data <- RemixAutoML::ModelDataPrep(
data = data,
Impute = TRUE,
CharToFactor = FALSE,
FactorToChar = FALSE,
IntToNumeric = TRUE,
DateToChar = FALSE,
RemoveDates = FALSE,
MissFactor = "0",
MissNum = ImputeRollStats,
IgnoreCols = NULL))
if(proc %chin% c("evaluate","eval","evaluation")) {
data.table::set(TimerDataEval, i = 10L, j = "Time", value = x[[3L]])
data.table::set(TimerDataEval, i = 10L, j = "Process", value = "
} else if(proc %chin% c("training","train")) {
data.table::set(TimerDataTrain, i = 10L, j = "Time", value = x[[3L]])
data.table::set(TimerDataTrain, i = 10L, j = "Process", value = "
}
SaveTimers(SaveModelObjectss = SaveModelObjects, procs = proc, TimerDataEvals = TimerDataEval, TimerDataTrains = TimerDataTrain, MetaDataPaths = MetaDataPath, ModelIDs = ModelID)
x <- system.time(gcFirst = FALSE, if(SaveModelObjects) data.table::fwrite(data, file = file.path(MetaDataPath, paste0(ModelID, "_ModelDataReady.csv"))))
if(proc %chin% c("evaluate","eval")) {
data.table::set(TimerDataEval, i = 12L, j = "Time", value = x[[3L]])
data.table::set(TimerDataEval, i = 12L, j = "Process", value = "
} else if(proc %chin% c("training","train")) {
data.table::set(TimerDataTrain, i = 12L, j = "Time", value = x[[3L]])
data.table::set(TimerDataTrain, i = 12L, j = "Process", value = "
}
SaveTimers(SaveModelObjectss = SaveModelObjects, procs = proc, TimerDataEvals = TimerDataEval, TimerDataTrains = TimerDataTrain, MetaDataPaths = MetaDataPath, ModelIDs = ModelID)
if(!exists("data")) data <- data.table::fread(file = file.path(MetaDataPath, paste0(ModelID, "_ModelDataReady.csv")))
if(proc %chin% c("evaluate","eval")) {
data.table::set(TimerDataEval, i = 13L, j = "Time", value = x[[3L]])
data.table::set(TimerDataEval, i = 13L, j = "Process", value = "
} else if(proc %chin% c("training","train")) {
data.table::set(TimerDataTrain, i = 13L, j = "Time", value = x[[3L]])
data.table::set(TimerDataTrain, i = 13L, j = "Process", value = "
}
SaveTimers(SaveModelObjectss = SaveModelObjects, procs = proc, TimerDataEvals = TimerDataEval, TimerDataTrains = TimerDataTrain, MetaDataPaths = MetaDataPath, ModelIDs = ModelID)
if(!all(class(data[[eval(CohortPeriodsVariable)]]) %chin% "numeric")) data[, eval(CohortPeriodsVariable) := as.numeric(as.character(get(CohortPeriodsVariable)))]
if(!all(class(data[[eval(CalendarDate)]]) %chin% "Date")) data[, eval(CalendarDate) := as.Date(get(CalendarDate))]
if(!all(class(data[[eval(CohortDate)]]) %chin% "Date")) data[, eval(CohortDate) := as.Date(get(CohortDate))]
if(!is.null(TruncateDate)) data <- data[get(CalendarDate) >= eval(TruncateDate)]
if(DebugMode) print("DM: Partition Data ----")
if(proc %chin% c("evaluate","eval","evaluation")) {
x <- system.time(gcFirst = FALSE, DataSets <- RemixAutoML::AutoDataPartition(
data = data,
NumDataSets = 3L,
Ratios = PartitionRatios,
PartitionType = "random",
StratifyColumnNames = if("GroupVar" %chin% names(data)) "GroupVar" else NULL,
TimeColumnName = NULL))
data.table::set(TimerDataEval, i = 15L, j = "Time", value = x[[3L]])
data.table::set(TimerDataEval, i = 15L, j = "Process", value = "
TrainData <- DataSets$TrainData
ValidationData <- DataSets$ValidationData
TestData <- DataSets$TestData
rm(DataSets)
}
if(DebugMode) print("ML: LightGBMRegression() ----")
if(proc %chin% c("evaluate","eval","training","train")) {
if(proc %chin% c("evaluate","eval")) {
Features <- names(TrainData)[!names(TrainData) %chin% c(eval(CalendarDate),eval(CohortDate),eval(ConversionMeasure),eval(ConversionRateMeasure),paste0("Temp_", CalendarDate),paste0("Temp_", CohortPeriodsVariable))]
idcols <- names(TrainData)[!names(TrainData) %in% c(Features, ConversionMeasure, ConversionRateMeasure)]
if(ModelID %chin% names(TrainData)) Features <- Features[!Features %chin% ModelID]
} else {
Features <- names(data)[!names(data) %chin% c(eval(CalendarDate),eval(CohortDate),eval(ConversionMeasure),eval(ConversionRateMeasure),paste0("Temp_", CalendarDate),paste0("Temp_", CohortPeriodsVariable))]
idcols <- names(data)[!names(data) %in% c(Features, ConversionMeasure, ConversionRateMeasure)]
if(ModelID %chin% names(data)) Features <- Features[!Features %chin% ModelID]
}
if(proc %chin% c("eval","evaluation","evaluate")) NTrees <- Trees
x <- system.time(gcFirst = FALSE, TestModel <- RemixAutoML::AutoXGBoostRegression(
ModelID = paste0(ModelID,"_", proc, "_"),
model_path = ModelPath,
metadata_path = MetaDataPath,
SaveModelObjects = FALSE,
ReturnModelObjects = TRUE,
NThreads = NThreads,
WeightsColumnName = WeightsColumnName,
OutputSelection = OutputSelection,
DebugMode = DebugMode,
SaveInfoToPDF = FALSE,
ReturnFactorLevels = TRUE,
EncodingMethod = EncodingMethod,
data = if(proc %chin% c("eval", "evaluate")) TrainData else data,
TrainOnFull = if(proc %chin% c("eval", "evaluate")) FALSE else TRUE,
ValidationData = if(proc %chin% c("eval", "evaluate")) ValidationData else NULL,
TestData = if(proc %chin% c("eval", "evaluate")) TestData else NULL,
TargetColumnName = ConversionRateMeasure,
FeatureColNames = Features,
PrimaryDateColumn = CohortDate,
IDcols = idcols,
TransformNumericColumns = NULL,
Methods = TransformMethods,
NumOfParDepPlots = NumOfParDepPlots,
PassInGrid = PassInGrid,
GridTune = GridTune,
MaxModelsInGrid = MaxModelsInGrid,
MaxRunsWithoutNewWinner = MaxRunsWithoutNewWinner,
MaxRunMinutes = MaxRunMinutes,
BaselineComparison = BaselineComparison,
grid_eval_metric = GridEvalMetric,
eval_metric = EvalMetric,
LossFunction = LossFunction,
TreeMethod = TreeMethod,
Trees = NTrees,
eta = LearningRate,
max_depth = MaxDepth,
min_child_weight = MinChildWeight,
subsample = SubSample,
colsample_bytree = ColSampleByTree))
if(SaveModelObjects) {
if(proc %chin% c("evaluate","eval","evaluation")) {
data.table::set(TimerDataEval, i = 16L, j = "Time", value = x[[3L]])
data.table::set(TimerDataEval, i = 16L, j = "Process", value = "
} else if(proc %chin% c("training","train")) {
data.table::set(TimerDataTrain, i = 16L, j = "Time", value = x[[3L]])
data.table::set(TimerDataTrain, i = 16L, j = "Process", value = "
}
}
if(proc %chin% c("eval","evaluate")) {
TreeCount <- TestModel$Model$niter
if(!is.null(Trees)) NTrees <- TreeCount else NTrees <- Trees
ArgsList[["Trees"]] <- NTrees
} else {
NTrees <- Trees
}
SaveTimers(SaveModelObjectss = SaveModelObjects, procs = proc, TimerDataEvals = TimerDataEval, TimerDataTrains = TimerDataTrain, MetaDataPaths = MetaDataPath, ModelIDs = ModelID)
TestModel[["TransformationResults"]] <- TransformObject
if(SaveModelObjects) {
if(proc %chin% c("evaluate","eval","evaluation")) {
save(TestModel, file = file.path(ModelPath, paste0(ModelID, "_Evaluation.Rdata")))
} else if(proc %chin% c("training","train")) {
save(TestModel, file = file.path(ModelPath, paste0(ModelID, "_FinalTrain.Rdata")))
}
}
if(proc %chin% c("evaluate","eval","evaluation")) {
rm(TestModel)
} else {
for(zz in names(TestModel)) {
if(!zz %chin% c("Model", "ColNames", "TransformationResults", "FactorLevelsList")) {
TestModel[[zz]] <- NULL
}
}
}
gc()
}
SaveTimers(SaveModelObjectss = SaveModelObjects, procs = proc, TimerDataEvals = TimerDataEval, TimerDataTrains = TimerDataTrain, MetaDataPaths = MetaDataPath, ModelIDs = ModelID)
}
if(SaveModelObjects) save(ArgsList, file = file.path(ModelPath, paste0(ModelID, "_ArgsList.Rdata")))
return(list(ModelOutput = TestModel, ArgsList = ArgsList))
}
AutoXGBoostFunnelCARMAScoring <- function(TrainData,
ForwardLookingData = NULL,
TrainEndDate = NULL,
ForecastEndDate = NULL,
ArgsList = NULL,
TrainOutput = NULL,
ModelPath = NULL,
MaxCohortPeriod = NULL,
DebugMode = FALSE) {
if(!is.null(TrainOutput)) {
for(zz in seq_along(TrainOutput)) if(!names(TrainOutput)[zz] %chin% c("Model", "ColNames", "TransformationResults", "FactorLevelsList")) TrainOutput[[zz]] <- NULL
}
options(warn = -1)
TrainData <- ModelDataPrep(data=TrainData, Impute=FALSE, CharToFactor=FALSE, FactorToChar=FALSE, IntToNumeric=FALSE, LogicalToBinary=FALSE, DateToChar=FALSE, IDateConversion=TRUE, RemoveDates=FALSE, MissFactor="0", MissNum=-1, IgnoreCols=NULL)
ForwardLookingData <- ModelDataPrep(data = ForwardLookingData, Impute=FALSE, CharToFactor=FALSE, FactorToChar=FALSE, IntToNumeric=FALSE, LogicalToBinary=FALSE, DateToChar=FALSE, IDateConversion=TRUE, RemoveDates=FALSE, MissFactor="0", MissNum=-1, IgnoreCols=NULL)
if(DebugMode) print("Forecasting start and end periods ----")
if(is.null(TrainEndDate)) TrainEndDate <- TrainData[, max(get(ArgsList$CalendarDate), na.rm = TRUE)]
if(is.null(ForecastEndDate)) ForecastEndDate <- ForwardLookingData[, max(get(ArgsList$CalendarDate), na.rm = TRUE)]
if(DebugMode) print("DE: Add GroupVar ----")
if(!is.null(ArgsList[["GroupVar"]])) {
TrainData[, paste0("Temp_", ArgsList$CohortPeriodsVariable) := do.call(paste, c(.SD, sep = ' ')), .SDcols = c(ArgsList$GroupVariables, ArgsList$CohortPeriodsVariable)]
TrainData[, paste0("Temp_", ArgsList$CalendarDate) := do.call(paste, c(.SD, sep = ' ')), .SDcols = c(ArgsList$GroupVariables, ArgsList$CalendarDate)]
TrainData[, GroupVar := do.call(paste, c(.SD, sep = ' ')), .SDcols = c(ArgsList$GroupVariables)]
} else {
TrainData[, paste0("Temp_", ArgsList$CohortPeriodsVariable) := do.call(paste, c(.SD, sep = ' ')), .SDcols = c(ArgsList$CohortPeriodsVariable)]
TrainData[, paste0("Temp_", ArgsList$CalendarDate) := do.call(paste, c(.SD, sep = ' ')), .SDcols = c(ArgsList$CalendarDate)]
}
if(DebugMode) print('Feature Engineering: Add Target Transformation ----')
if(ArgsList$TransformTargetVariable) {
TrainData <- AutoTransformationScore(ScoringData = TrainData, FinalResults = TrainOutput$TransformationResults[ColumnName == eval(ArgsList$ConversionMeasure)], Type = "Apply")
LeadsData <- AutoTransformationScore(ScoringData = LeadsData, FinalResults = TrainOutput$TransformationResults[ColumnName == eval(ArgsList$BaseFunnelMeasure[1L])], Type = "Apply")
}
FC_Period <- 0L
while(TrainEndDate < ForecastEndDate) {
IterationStart <- Sys.time()
if(DebugMode) print("Increment FC_Period ----")
FC_Period <- FC_Period + 1L
for(bla in seq_len(5L)) print(paste0("Working on Forecast for period: ", FC_Period, " ::: Periods left to forecast: ", difftime(ForecastEndDate, TrainEndDate)))
if(DebugMode) print("Convert to date ----")
if(!all(class(TrainData[[eval(ArgsList$CalendarDate)]]) %chin% "Date")) TrainData[, eval(ArgsList$CalendarDate) := as.Date(get(ArgsList$CalendarDate))]
if(!all(class(TrainData[[eval(ArgsList$CohortDate)]]) != "Date")) TrainData[, eval(ArgsList$CohortDate) := as.Date(get(ArgsList$CohortDate))]
if(DebugMode) print("Add indicator variable for AutoLagRollStatsScoring() so it knows what to score and what not to. A value of 1 indicates that the record should be scored. All others are not scored----")
TrainData[, ScoreRecords := 2]
if(DebugMode) print("Type conversion ----")
if(class(TrainData[[eval(ArgsList$CohortPeriodsVariable)]]) == "factor") TrainData[, eval(ArgsList$CohortPeriodsVariable) := as.numeric(as.character(get(ArgsList$CohortPeriodsVariable)))]
if(DebugMode) print("Create single future value for all cohorts ----")
maxct <- TrainData[, list(max(get(ArgsList$CohortPeriodsVariable)), data.table::first(ScoreRecords)), by = c(ArgsList$GroupVariables, ArgsList$CalendarDate)]
data.table::setnames(maxct, c("V1","V2"), c(ArgsList$CohortPeriodsVariable, "ScoreRecords"))
maxct[, eval(ArgsList$CohortPeriodsVariable) := get(ArgsList$CohortPeriodsVariable) + 1L]
maxct[, eval(ArgsList$CohortDate) := as.Date(get(ArgsList$CalendarDate)) + lubridate::days(get(ArgsList$CohortPeriodsVariable))]
data.table::setkeyv(maxct, cols = c(ArgsList$GroupVariables, ArgsList$CalendarDate))
if(!is.null(ArgsList[["GroupVar"]])) {
maxct[, paste0("Temp_", ArgsList$CohortPeriodsVariable) := do.call(paste, c(.SD, sep = ' ')), .SDcols = c(ArgsList$GroupVariables, ArgsList$CohortPeriodsVariable)]
maxct[, paste0("Temp_", ArgsList$CalendarDate) := do.call(paste, c(.SD, sep = ' ')), .SDcols = c(ArgsList$GroupVariables, ArgsList$CalendarDate)]
maxct[, GroupVar := do.call(paste, c(.SD, sep = ' ')), .SDcols = c(ArgsList$GroupVariables)]
} else {
maxct[, paste0("Temp_", ArgsList$CohortPeriodsVariable) := do.call(paste, c(.SD, sep = ' ')), .SDcols = c(ArgsList$CohortPeriodsVariable)]
maxct[, paste0("Temp_", ArgsList$CalendarDate) := do.call(paste, c(.SD, sep = ' ')), .SDcols = c(ArgsList$CalendarDate)]
}
if(DebugMode) print("DE: Subset TrainData and update TrainData ----")
ForwardLookingData <- ForwardLookingData[get(ArgsList$CalendarDate) > max(maxct[[eval(ArgsList$CalendarDate)]])]
if(!is.null(ArgsList$GroupVariables)) {
NextFCPeriod <- ForwardLookingData[get(ArgsList$CalendarDate) == min(get(ArgsList$CalendarDate))]
} else {
NextFCPeriod <- ForwardLookingData[1L]
}
NextFCPeriod[, eval(ArgsList$CalendarDate) := as.Date(get(ArgsList$CalendarDate))]
NextFCPeriod[, eval(ArgsList$CohortDate) := as.Date(get(ArgsList$CalendarDate))]
NextFCPeriod[, ScoreRecords := 1]
NextFCPeriod[, eval(ArgsList$CohortPeriodsVariable) := 0]
NextFCPeriod[, eval(ArgsList$ConversionRateMeasure) := 0.0]
if(!is.null(ArgsList[["GroupVar"]])) {
NextFCPeriod[, paste0("Temp_", ArgsList$CohortPeriodsVariable) := do.call(paste, c(.SD, sep = ' ')), .SDcols = c(ArgsList$GroupVariables, ArgsList$CohortPeriodsVariable)]
NextFCPeriod[, paste0("Temp_", ArgsList$CalendarDate) := do.call(paste, c(.SD, sep = ' ')), .SDcols = c(ArgsList$GroupVariables, ArgsList$CalendarDate)]
NextFCPeriod[, GroupVar := do.call(paste, c(.SD, sep = ' ')), .SDcols = c(ArgsList$GroupVariables)]
} else {
NextFCPeriod[, paste0("Temp_", ArgsList$CohortPeriodsVariable) := do.call(paste, c(.SD, sep = ' ')), .SDcols = c(ArgsList$CohortPeriodsVariable)]
NextFCPeriod[, paste0("Temp_", ArgsList$CalendarDate) := do.call(paste, c(.SD, sep = ' ')), .SDcols = c(ArgsList$CalendarDate)]
}
if(!is.null(ArgsList$GroupVariables)) {
temp <- data.table::copy(NextFCPeriod)
temp <- temp[, .SD, .SDcols = c(ArgsList$GroupVariables, ArgsList$CalendarDate, ArgsList$BaseFunnelMeasure)]
ForwardLookingData <- data.table::fsetdiff(x = ForwardLookingData, y = temp)
} else {
ForwardLookingData <- ForwardLookingData[2L:.N]
}
if(DebugMode) print("DE: Merge BaseFunnelMeasures to TrainData if they aren't in there ----")
if(!all(ArgsList$BaseFunnelMeasure %chin% names(TrainData))) {
data.table::setkeyv(x = TrainData, c(ArgsList$GroupVariables, ArgsList$CalendarDate))
data.table::setkeyv(x = LeadsData, c(ArgsList$GroupVariables, ArgsList$CalendarDate))
TrainData[LeadsData, eval(ArgsList$BaseFunnelMeasure) := mget(paste0("i.", ArgsList$BaseFunnelMeasure))]
}
if(DebugMode) print("DE: Aggregate Data ----")
temp <- TrainData[, lapply(.SD, data.table::first), .SDcols = c(ArgsList$BaseFunnelMeasure), keyby = c(ArgsList$GroupVariables, ArgsList$CalendarDate)]
if(DebugMode) print("Merge TrainData ----")
if(!any(class(temp[[ArgsList$CalendarDate]]) %chin% "Date")) temp[, eval(ArgsList$CalendarDate) := as.Date(get(ArgsList$CalendarDate))]
maxct[temp, eval(ArgsList$BaseFunnelMeasure) := mget(paste0("i.", ArgsList$BaseFunnelMeasure))]
maxct[, eval(ArgsList$ConversionMeasure) := 0]
maxct[, eval(ArgsList$ConversionRateMeasure) := 0]
maxct[, ScoreRecords := 1]
if(!is.null(ArgsList[["GroupVar"]])) {
maxct[, paste0("Temp_", ArgsList$CohortPeriodsVariable) := do.call(paste, c(.SD, sep = ' ')), .SDcols = c(ArgsList$GroupVariables, ArgsList$CohortPeriodsVariable)]
maxct[, paste0("Temp_", ArgsList$CalendarDate) := do.call(paste, c(.SD, sep = ' ')), .SDcols = c(ArgsList$GroupVariables, ArgsList$CalendarDate)]
maxct[, GroupVar := do.call(paste, c(.SD, sep = ' ')), .SDcols = c(ArgsList$GroupVariables)]
} else {
maxct[, paste0("Temp_", ArgsList$CohortPeriodsVariable) := do.call(paste, c(.SD, sep = ' ')), .SDcols = c(ArgsList$CohortPeriodsVariable)]
maxct[, paste0("Temp_", ArgsList$CalendarDate) := do.call(paste, c(.SD, sep = ' ')), .SDcols = c(ArgsList$CalendarDate)]
}
maxct <- data.table::rbindlist(list(maxct, NextFCPeriod), use.names = TRUE, fill = TRUE)
for(xxxx in seq_len(ncol(maxct))) data.table::set(maxct, i = which(is.na(maxct[[xxxx]])), j = xxxx, value = 0)
if(DebugMode) print("DE: Remove CohortPeriods beyond MaxCohortPeriod ----")
maxct <- maxct[get(ArgsList$CohortPeriodsVariable) <= MaxCohortPeriod]
ScoreDate <- maxct[, max(get(ArgsList$CalendarDate))]
if(DebugMode) print("DE: Stack onto modeling TrainData for ArgsList$ModelID ----")
TrainData <- data.table::rbindlist(list(TrainData, maxct), fill = TRUE, use.names = TRUE)
TrainData[, eval(ArgsList$ConversionRateMeasure) := data.table::fifelse(get(ArgsList$BaseFunnelMeasure[1L]) == 0, 0, get(ArgsList$ConversionMeasure) / get(ArgsList$BaseFunnelMeasure[1L]))]
rm(maxct)
if(DebugMode) print("FE: Calendar & Holiday Variables ----")
TrainData <- RemixAutoML::CreateCalendarVariables(TrainData, DateCols = c(eval(ArgsList$CalendarDate)), AsFactor = FALSE, TimeUnits = ArgsList$CalendarVariables)
TrainData <- RemixAutoML::CreateCalendarVariables(TrainData, DateCols = c(eval(ArgsList$CohortDate)), AsFactor = FALSE, TimeUnits = ArgsList$CalendarVariables)
TrainData <- RemixAutoML::CreateHolidayVariables(TrainData, DateCols = c(ArgsList$CalendarDate), LookbackDays = if(!is.null(ArgsList$HolidayLookback)) ArgsList$HolidayLookback else LB(ArgsList$TimeUnit), HolidayGroups = ArgsList$HolidayGroups, Holidays = NULL, Print = FALSE)
data.table::setnames(TrainData, old = "HolidayCounts", new = paste0(ArgsList$CalendarDate,"HolidayCounts"))
TrainData <- RemixAutoML::CreateHolidayVariables(TrainData, DateCols = c(ArgsList$CohortDate), LookbackDays = if(!is.null(ArgsList$HolidayLookback)) ArgsList$HolidayLookback else LB(ArgsList$TimeUnit), HolidayGroups = ArgsList$HolidayGroups, Holidays = NULL, Print = FALSE)
data.table::setnames(TrainData, old = "HolidayCounts", new = paste0(ArgsList$CohortDate,"HolidayCounts"))
data.table::setorderv(TrainData, cols = c(ArgsList$CalendarDate,eval(ArgsList$CohortPeriodsVariable)), c(1L, 1L))
if(DebugMode) print("Add Anomaly detection zeros ----")
if(!is.null(ArgsList[["AnomalyDetection"]])) TrainData[, ":=" (AnomHigh = 0, AnomLow = 0)]
if(DebugMode) print("FE: ConversionMeasure OVER CohortDate ----")
temp <- data.table::copy(TrainData)
data.table::set(temp, j = ArgsList$CalendarDate, value = as.character(temp[[ArgsList$CalendarDate]]))
temp <- RemixAutoML::AutoLagRollStatsScoring(
data = temp,
DateColumn = ArgsList$CohortDate,
Targets = c(ArgsList$ConversionMeasure, ArgsList$ConversionRateMeasure),
RowNumsID = "ScoreRecords",
RowNumsKeep = 1,
HierarchyGroups = NULL,
IndependentGroups = paste0("Temp_", ArgsList$CalendarDate),
TimeUnit = ArgsList$TimeUnit,
TimeGroups = ArgsList$CohortTimeGroups,
TimeUnitAgg = ArgsList$TimeUnit,
TimeBetween = NULL,
RollOnLag1 = TRUE,
Type = "Lag",
SimpleImpute = TRUE,
Lags = ArgsList$CohortLags,
MA_RollWindows = ArgsList$CohortMovingAverages,
SD_RollWindows = ArgsList$CohortStandardDeviations,
Skew_RollWindows = ArgsList$CohortSkews,
Kurt_RollWindows = ArgsList$Kurts,
Quantile_RollWindows = ArgsList$Quantiles,
Quantiles_Selected = ArgsList$CohortQuantilesSelected,
Debug = TRUE)
temp[, eval(ArgsList$CalendarDate) := as.Date(get(ArgsList$CalendarDate))]
data.table::setkeyv(temp, c(ArgsList$GroupVariables, ArgsList$CalendarDate))
data.table::setkeyv(TrainData, c(ArgsList$GroupVariables, ArgsList$CalendarDate))
TrainData[temp, paste0(setdiff(names(temp), names(TrainData))) := mget(paste0("i.", setdiff(names(temp), names(TrainData))))]
rm(temp)
if(DebugMode) print("FE: CohortDateHolidayCounts OVER CohortDate ----")
temp <- data.table::copy(TrainData)
data.table::set(temp, j = eval(ArgsList$CalendarDate), value = as.character(temp[[eval(ArgsList$CalendarDate)]]))
temp <- RemixAutoML::AutoLagRollStatsScoring(
data = temp,
DateColumn = ArgsList$CohortDate,
Targets = paste0(ArgsList$CohortDate,"HolidayCounts"),
RowNumsID = "ScoreRecords",
RowNumsKeep = 1,
HierarchyGroups = NULL,
IndependentGroups = paste0("Temp_", ArgsList$CalendarDate),
TimeUnit = ArgsList$TimeUnit,
TimeGroups = ArgsList$TimeUnit,
TimeUnitAgg = ArgsList$TimeUnit,
TimeBetween = NULL,
RollOnLag1 = TRUE,
Type = "Lag",
SimpleImpute = TRUE,
Lags = ArgsList$CohortHolidayLags,
MA_RollWindows = ArgsList$CohortHolidayMovingAverages,
SD_RollWindows = NULL,
Skew_RollWindows = NULL,
Kurt_RollWindows = NULL,
Quantile_RollWindows = NULL,
Quantiles_Selected = NULL,
Debug = TRUE)
temp[, eval(ArgsList$CalendarDate) := as.Date(get(ArgsList$CalendarDate))]
data.table::setkeyv(temp, c(ArgsList$GroupVariables, ArgsList$CalendarDate))
TrainData[temp, paste0(setdiff(names(temp), names(TrainData))) := mget(paste0("i.", setdiff(names(temp), names(TrainData))))]
rm(temp)
if(DebugMode) print("FE: BaseFunnelMeasure OVER CalendarDate ----")
for(bfm in seq_along(ArgsList$BaseFunnelMeasure)) {
if("GroupVar" %chin% names(TrainData)) {
temp <- TrainData[, lapply(.SD, data.table::first), .SDcols = c(ArgsList$BaseFunnelMeasure[bfm]), by = c("GroupVar", eval(ArgsList$CalendarDate))]
} else {
temp <- TrainData[, lapply(.SD, data.table::first), .SDcols = c(ArgsList$BaseFunnelMeasure[bfm]), by = c(eval(ArgsList$CalendarDate))]
}
temp[, ScoreRecords := data.table::fifelse(get(ArgsList$CalendarDate) == eval(ScoreDate), 1, 2)]
if(!any(class(temp[[ArgsList$CalendarDate]]) %chin% "Date")) data.table::set(temp, j = eval(ArgsList$CalendarDate), value = as.Date(temp[[eval(ArgsList$CalendarDate)]]))
temp <- RemixAutoML::AutoLagRollStatsScoring(
data = temp,
DateColumn = ArgsList$CalendarDate,
Targets = ArgsList$BaseFunnelMeasure[bfm],
HierarchyGroups = NULL,
IndependentGroups = if(!"GroupVar" %chin% names(temp)) NULL else "GroupVar",
TimeGroups = ArgsList[["CalendarTimeGroups"]],
TimeUnit = ArgsList[["TimeUnit"]],
TimeUnitAgg = ArgsList[["TimeUnit"]],
RowNumsID = "ScoreRecords",
RowNumsKeep = 1,
TimeBetween = NULL,
RollOnLag1 = TRUE,
Type = "Lag",
SimpleImpute = TRUE,
Lags = ArgsList[["CalendarLags"]],
MA_RollWindows = ArgsList[["CalendarMovingAverages"]],
SD_RollWindows = ArgsList[["CalendarStandardDeviations"]],
Skew_RollWindows = ArgsList[["CalendarSkews"]],
Kurt_RollWindows = ArgsList[["CalendarKurts"]],
Quantile_RollWindows = ArgsList[["CalendarQuantiles"]],
Quantiles_Selected = ArgsList[["CalendarQuantilesSelected"]],
Debug = TRUE)
temp[, eval(ArgsList$CalendarDate) := as.Date(get(ArgsList$CalendarDate))]
if("GroupVar" %chin% names(temp)) {
data.table::setkeyv(temp, c("GroupVar", ArgsList$CalendarDate))
data.table::setkeyv(TrainData, c("GroupVar", ArgsList$CalendarDate))
} else {
data.table::setkeyv(temp, ArgsList$CalendarDate)
data.table::setkeyv(TrainData, ArgsList$CalendarDate)
}
TrainData[temp, paste0(setdiff(names(temp), names(TrainData))) := mget(paste0("i.", setdiff(names(temp), names(TrainData))))]
rm(temp)
}
if(DebugMode) print("DE: Model data prep ----")
TrainData <- RemixAutoML::ModelDataPrep(
data = TrainData,
Impute = TRUE,
CharToFactor = FALSE,
FactorToChar = FALSE,
IntToNumeric = TRUE,
DateToChar = FALSE,
RemoveDates = FALSE,
MissFactor = "0",
MissNum = ArgsList$ImputeRollStats,
IgnoreCols = NULL)
if(DebugMode) print("DE: Type Change: CorhortDaysOut as numeric and the dates as Dates ----")
if(!all(class(TrainData[[ArgsList$CohortPeriodsVariable]]) %chin% "numeric")) TrainData[, eval(ArgsList$CohortPeriodsVariable) := as.numeric(as.character(get(ArgsList$CohortPeriodsVariable)))]
if(!all(class(TrainData[[ArgsList$CalendarDate]]) %chin% "Date")) TrainData[, eval(ArgsList$CalendarDate) := as.Date(get(ArgsList$CalendarDate))]
if(!all(class(TrainData[[ArgsList$CohortDate]]) %chin% "Date")) TrainData[, eval(ArgsList$CohortDate) := as.Date(get(ArgsList$CohortDate))]
if(is.null(TrainOutput) && FC_Period == 1L) load(file = file.path(normalizePath(eval(ArgsList$ModelPath)), paste0(ArgsList$ModelID, "_FinalTrain.Rdata")))
if(DebugMode) print("ML: Score Model ----")
temp1 <- data.table::copy(TrainData)
temp <- temp1[ScoreRecords == 1]
Features <- TrainOutput$ColNames[[1L]]
temp <- RemixAutoML::AutoXGBoostScoring(
FactorLevelsList = TrainOutput$FactorLevelsList,
TargetType = "regression",
ScoringData = temp,
FeatureColumnNames = Features,
IDcols = names(temp)[!names(temp) %chin% Features],
EncodingMethod = ArgsList$EncodingMethod,
ReturnShapValues = FALSE,
ModelObject = TrainOutput$Model,
ModelPath = if(is.null(TrainOutput)) ArgsList$ModelPath else NULL,
ModelID = ArgsList$ModelID,
ReturnFeatures = FALSE,
TransformNumeric = FALSE,
BackTransNumeric = FALSE,
TargetColumnName = "Rate",
TransformationObject = TestModel$TransformationResults,
TransID = NULL,
TransPath = NULL,
MDP_Impute = FALSE,
MDP_CharToFactor = FALSE,
MDP_RemoveDates = TRUE,
MDP_MissFactor = "0",
MDP_MissNum = -1)
if(DebugMode) print("DE: Update forecast TrainData ----")
temp1[ScoreRecords == 1, eval(ArgsList$ConversionRateMeasure) := temp[which(Predictions < 0), Predictions := 0][[1L]]]
temp1[ScoreRecords == 1, eval(ArgsList$ConversionMeasure) := get(ArgsList$ConversionRateMeasure) * get(ArgsList$BaseFunnelMeasure[1L])]
if(!is.null(ArgsList$GroupVariables)) {
temp1 <- temp1[ScoreRecords == 1, .SD, .SDcols = c("GroupVar", eval(ArgsList$GroupVariables), eval(ArgsList$CalendarDate),eval(ArgsList$CohortDate),eval(ArgsList$CohortPeriodsVariable),eval(ArgsList$BaseFunnelMeasure),eval(ArgsList$ConversionMeasure),eval(ArgsList$ConversionRateMeasure))]
TrainData <- data.table::rbindlist(list(TrainData[ScoreRecords != 1, .SD, .SDcols = c("GroupVar", eval(ArgsList$GroupVariables), eval(ArgsList$CalendarDate),eval(ArgsList$CohortDate),eval(ArgsList$CohortPeriodsVariable),eval(ArgsList$BaseFunnelMeasure),eval(ArgsList$ConversionMeasure),eval(ArgsList$ConversionRateMeasure))], temp1), fill = TRUE, use.names = TRUE)
} else {
temp1 <- temp1[ScoreRecords == 1, .SD, .SDcols = c(eval(ArgsList$CalendarDate),eval(ArgsList$CohortDate),eval(ArgsList$CohortPeriodsVariable),eval(ArgsList$BaseFunnelMeasure),eval(ArgsList$ConversionMeasure),eval(ArgsList$ConversionRateMeasure))]
TrainData <- data.table::rbindlist(list(TrainData[ScoreRecords != 1, .SD, .SDcols = c(eval(ArgsList$CalendarDate),eval(ArgsList$CohortDate),eval(ArgsList$CohortPeriodsVariable),eval(ArgsList$BaseFunnelMeasure),eval(ArgsList$ConversionMeasure),eval(ArgsList$ConversionRateMeasure))], temp1), fill = TRUE, use.names = TRUE)
}
if(!is.null(ModelPath)) data.table::fwrite(TrainData, file = file.path(ArgsList$ModelPath, paste0(ArgsList$ModelID, "_Forecasts.csv")))
TrainEndDate <- TrainData[, max(get(ArgsList$CalendarDate))]
IterationEnd <- Sys.time()
for(ggg in 1:5) print(difftime(time1 = IterationEnd, time2 = IterationStart, units = "secs"))
}
if(ArgsList$TransformTargetVariable) {
TrainData <- AutoTransformationScore(ScoringData=TrainData, FinalResults=TestModel$ModelOutput$TransformationResults, Type='Inverse', TransID=NULL, Path=NULL)
TrainData[, eval(ArgsList$ConversionRateMeasure) := data.table::fifelse(get(ArgsList$BaseFunnelMeasure[1L]) == 0, 0, get(ArgsList$ConversionMeasure) / get(ArgsList$BaseFunnelMeasure[1L]))]
}
options(warn = 1)
return(TrainData)
} |
find_umax <- function(x , alternative = 'two-sided' ,
t = 0.05 , confidence = 0.95, common.effect = FALSE ){
if(is.numeric(confidence)){
if ( (confidence >= 1) | (confidence <= 0) ) stop("confidence must be a number between 0 - 1.")
}else{
stop("confidence must be a number between 0 - 1.")
}
if (common.effect){
t <- NULL
message( "Performing Replicability analysis with the common-effect assumption" )
}else{
if(is.null(t)){
stop("Error: Must specify truncation threshold t <= 1 .
For replicability-analysis with common-effect assumption, set common.effect = TRUE ")
}
if( (t<= 0)|(t>1) ){
stop("Error: Truncation threshold t must be a positive velue < = 1")
}
if(t == 1 ) message( "Performing Replicability analysis via original-Pearson's test" )
}
na.pvs <- all(is.na(x$pval))
na.zvs <- (sum(!is.na(x$zval))==0 )
if ( na.zvs & na.pvs ){
warning('Please supply valid p-values or zvalues.')
return( list ( u_max = NULL , worst.case = NULL , side = NULL , r.value = NULL )[c(2,1,3,4)] )
}
alpha = (1 - confidence)
Do.truncated.umax = ifelse(is.null(t) , F , t < 1 )
Alpha.tilde = ifelse(is.null(t) , 1 , t )
Comb.random = !common.effect
chkclass(x, "meta")
twoSided <- (alternative == 'two-sided')
nstudlab <- sum(!is.na(x$pval))
nstudlab <- ifelse( na.pvs , sum(!is.na(x$zval)) , nstudlab )
pv.greater <- ifelse(common.effect , x$zval.fixed , x$zval.random )
if(!is.na(pv.greater)){
pv.greater <- pnorm( pv.greater , lower.tail = F )
}else{
pv.greater <- ifelse(common.effect , x$pval.fixed , x$pval.random )
TE.sign <- ifelse(common.effect , x$TE.fixed , x$TE.random )
pv.greater <- ifelse(TE.sign > 0 , pv.greater/2 , 1-pv.greater/2)
}
pv.less <- 1 - pv.greater
rvl <- rvg <- 1
ul <- ug <- u_max <- 0
meta_ug <- meta_ul <- meta_ul_last_sig <- meta_ug_last_sig <- NULL
if( !common.effect ){
meta_ul_prev <- meta_ug_prev <- NULL
if ( alternative != 'greater' ){
u1 <- 1
meta_ul <- metaRvalue.onesided.U(x,u = u1 ,comb.fixed = F , comb.random = T,
alternative = 'less',
do.truncated.umax = Do.truncated.umax ,
alpha.tilde = t )
rvl <- meta_ul$pvalue.onesided
while( (u1 < nstudlab ) & ( rvl <= alpha / (1 + twoSided )) ){
meta_ul_prev <- meta_ul
u1 <- u1 + 1
meta_ul <- metaRvalue.onesided.U(x,u = u1 ,comb.fixed = F , comb.random = T,
alternative = 'less',
do.truncated.umax = Do.truncated.umax ,
alpha.tilde = t )
rvl <- meta_ul$pvalue.onesided
}
ul <- u1
if ( (rvl > alpha / (1 + twoSided )) ){
if(is.null(meta_ul_prev)){
ul <- 0
}else{
rvl <- meta_ul_prev$pvalue.onesided
meta_ul <- meta_ul_prev
ul <- u1-1
}
}
u_max <- ul ; names( u_max) <- 'u^L'
rvalue <- rvl
worst.case.meta <- meta_ul
side = 'less'
rep.text <- paste0('out of ' , nstudlab , ' studies, ', ul ,
' with decreased effect.')
}
if ( alternative != 'less' ){
u1 <- 1
meta_ug = metaRvalue.onesided.U(x,u = u1 , comb.fixed = F , comb.random = T,
alternative = 'greater',
do.truncated.umax = Do.truncated.umax ,
alpha.tilde = t )
rvg <- meta_ug$pvalue.onesided
while((u1 < nstudlab )&( rvg <= alpha / (1 + twoSided )) ){
meta_ug_prev <- meta_ug
u1 <- u1 + 1
meta_ug <- metaRvalue.onesided.U(x,u = u1 ,comb.fixed = F , comb.random = T,
alternative = 'greater',
do.truncated.umax = Do.truncated.umax,
alpha.tilde = t )
rvg <- meta_ug$pvalue.onesided
}
ug <- u1
if (rvg > alpha / (1 + twoSided ) ){
if ( is.null(meta_ug_prev) ){
ug <- 0
}else{
rvg <- meta_ug_prev$pvalue.onesided
meta_ug <- meta_ug_prev
ug <- u1-1
}
}
u_max <- ug ; names( u_max) <- 'u^R'
rvalue <- rvg
worst.case.meta <- meta_ug
side <- 'greater'
rep.text <- paste0('out of ' , nstudlab , ' studies, ' , ug , ' with increased effect.')
}
if(is.null(rvl)) { rvl <- 1 ; ul <- 0 }
if(is.null(rvg)) { rvg <- 1 ; ug <- 0 }
names(rvalue) <- 'r^R'
if ( alternative == 'two-sided' ){
if( (ul > ug) | ((ul == ug)&(rvl<rvg)) ){
u_max <- ul
worst.case.meta <- meta_ul
side <- 'less'
names(rvalue) <- 'r^L'
names( u_max) <- 'u^L'
}
u_max <- c(u_max , ul , ug )
names(u_max) <- c('u_max' , 'u^L', 'u^R')
rvalue <- 2*min( c( rvl, rvg, 0.5 ))
rvalue <- c( rvalue , rvl , rvg )
names( rvalue ) <- c( 'r.value' , 'r^L' , 'r^R')
rep.text <- paste0('out of ' , nstudlab , ' studies: ', ul ,
' with decreased effect, and ', ug , ' with increased effect.')
worst.case.meta$pvalue.onesided <- min( c( 0.5 , rvl , rvg) )*2
}
names(side) <- 'Direction of the stronger signal'
return(list(worst.case = (worst.case.meta$worst.case)$studlab,
side = side , u_max = u_max , r.value = round(rvalue,digits = 4) ,
Replicability_Analysis = unname(rep.text)))
}
ul <- 0 ; meta_ul <- NULL
if ( alternative != 'greater' ){
u1 <- 1 ; u2 <- nstudlab
final_ul <- NULL
meta_ul_last_sig <- meta_ul <-
metaRvalue.onesided.U(x,u = 1 ,comb.fixed = T , comb.random = F,
alternative = 'less', do.truncated.umax = F ,
alpha.tilde = t )
if( pv.less > alpha / (1 + twoSided )) {
meta_ul_last_sig <- NULL
final_ul <-
list(u_max = 0 , worst.case = (meta_ul$worst.case)$studlab,
side = 'less' , r.value = meta_ul$pvalue.onesided )
}
meta_ul <- metaRvalue.onesided.U(x,u = u2 ,comb.fixed =T , comb.random = F,
alternative = 'less',
do.truncated.umax = F ,
alpha.tilde = t)
rvl <- meta_ul$pvalue.onesided
if(rvl <= alpha / (1 + twoSided )) {
final_ul <-
list(u_max = u2 , worst.case = (meta_ul$worst.case)$studlab,
side = 'less' , r.value = round( rvl ,digits = 4) )
}
if ( is.null(final_ul)){
u_mid <- ceiling( (u1 + u2)/2 )
while ( u_mid != u2 ){
meta_ul <- metaRvalue.onesided.U(x,u = u_mid ,comb.fixed = T , comb.random = F,
alternative = 'less',
do.truncated.umax = F ,
alpha.tilde = t)
if ( meta_ul$pvalue.onesided < alpha / (1 + twoSided ) ){
u1 <- u_mid
meta_ul_last_sig <- meta_ul
}else{
u2 <- u_mid
}
u_mid <- ceiling( (u1 + u2)/2 )
}
ul <- u1
meta_ul <- meta_ul_last_sig
rvl <- meta_ul$pvalue.onesided
side <- 'less'
rep.text <- paste0('out of ' , nstudlab , ' studies, ', ul ,
' with decreased effect.')
final_ul <-
list(u_max = ul , worst.case = (meta_ul$worst.case)$studlab,
side = 'less' ,
r.value = round( rvl ,digits = 4) ,
Replicability_Analysis = unname(rep.text) )
}
if( alternative == 'less'){
return(final_ul[c(2,1,3:5)])
}
}
ug <- 0 ; meta_ug <- NULL
if ( alternative != 'less' ){
u1 <- 1 ; u2 <- nstudlab
final_ug <- NULL
meta_ug <- meta_ug_last_sig <-
metaRvalue.onesided.U(x,u = 1 ,comb.fixed = T , comb.random = F,
alternative = 'greater', do.truncated.umax = F ,
alpha.tilde = t )
if( pv.greater > alpha / (1 + twoSided )) {
meta_ug_last_sig <- NULL
final_ug <-
list(u_max = 0 , worst.case = (meta_ug$worst.case)$studlab,
side = 'greater' , r.value = meta_ug$pvalue.onesided )
}
meta_ug = metaRvalue.onesided.U(x,u = u2 ,comb.fixed = T , comb.random = F,
alternative = 'greater',
do.truncated.umax = F ,
alpha.tilde = t )
rvg <- meta_ug$pvalue.onesided
if(rvg <= alpha / (1 + twoSided )) {
final_ug <-
list(u_max = u2 , worst.case = (meta_ug$worst.case)$studlab,
side = 'greater' , r.value = round( rvg ,digits = 4) )
}
if (is.null(final_ug)) {
u_mid <- ceiling( (u1 + u2)/2 )
while ( u_mid != u2 ){
meta_ug <- metaRvalue.onesided.U(x,u = u_mid ,comb.fixed = T , comb.random = F,
alternative = 'greater',
do.truncated.umax = F ,
alpha.tilde = t )
if ( meta_ug$pvalue.onesided < alpha / (1 + twoSided ) ){
u1 <- u_mid
meta_ug_last_sig <- meta_ug
}else{
u2 <- u_mid
}
u_mid <- ceiling( (u1 + u2)/2 )
}
ug <- u1
meta_ug <- meta_ug_last_sig
rvg <- meta_ug$pvalue.onesided
rep.text <- paste0('out of ' , nstudlab , ' studies, ', ug ,
' with increased effect.')
final_ug <-
list(u_max = ug , worst.case = (meta_ug$worst.case)$studlab,
side = 'greater' ,
r.value = round( rvg ,digits = 4) ,
Replicability_Analysis = unname(rep.text))
}
final <- final_ug
side = 'greater'
if( alternative == 'greater'){
return(final_ug[c(2,1,3:5)])
}
}
if(is.null(rvl)) { rvl <- 1 ; ul <- 0 }
if(is.null(rvg)) { rvg <- 1 ; ug <- 0 }
if ( alternative == 'two-sided' ){
if( (ul > ug) | ((ul == ug)&(rvl<rvg)) ){
final <- final_ul
side <- 'less'
}
final$r.value <- round( min( min(rvl,rvg)*2 ,1 ) ,digits = 4)
final <- final[c(2,1,3,4)]
return(final)
}
} |
add_l2 <- function(data, data_l2, cvar = "case") {
for(i in seq_along(data)) {
id <- which(data_l2[[cvar]] == names(data)[i])
if (length(id) > 1) {
stop("Multiple matches for a casename in the L1 dataset")
}
if (length(id) == 1) {
data[[i]] <- cbind(
data[[i]],
data_l2[id, -which(names(data_l2) == cvar)],
row.names = NULL
)
}
}
data
} |
drawGroup <-
function(xy, center=FALSE,
xyTopLeft=TRUE, bb=FALSE, bbMin=FALSE, bbDiag=FALSE, minCirc=FALSE,
maxSpread=FALSE, meanDist=FALSE, confEll=FALSE, CEP=FALSE, ringID=FALSE,
valueID=TRUE,
doRob=FALSE, level=0.95, scaled=TRUE, caliber=9,
dstTarget, conversion, unit="unit", alpha=0.5, target) {
UseMethod("drawGroup")
}
drawGroup.data.frame <-
function(xy, center=FALSE,
xyTopLeft=TRUE, bb=FALSE, bbMin=FALSE, bbDiag=FALSE, minCirc=FALSE,
maxSpread=FALSE, meanDist=FALSE, confEll=FALSE, CEP=FALSE, ringID=FALSE,
valueID=TRUE,
doRob=FALSE, level=0.95, scaled=TRUE, caliber=9,
dstTarget, conversion, unit="unit", alpha=0.5, target) {
if(missing(dstTarget)) {
dstTarget <- if(hasName(xy, "distance")) {
xy[["distance"]]
} else {
NA_real_
}
}
if(missing(conversion)) {
conversion <- determineConversion(xy)
}
if(missing(target) &&
hasName(xy, "target") &&
length(unique(xy[["target"]])) == 1L) {
target <- unique(xy[["target"]])
}
xy <- getXYmat(xy, xyTopLeft=xyTopLeft, center=center)
xyTopLeft <- FALSE
center <- FALSE
drawGroup(xy=xy, center=center, xyTopLeft=xyTopLeft, bb=bb, bbMin=bbMin,
bbDiag=bbDiag, minCirc=minCirc, maxSpread=maxSpread,
meanDist=meanDist, confEll=confEll, CEP=CEP, ringID=ringID,
valueID=valueID,
doRob=doRob, level=level, scaled=scaled, caliber=caliber,
dstTarget=dstTarget, conversion=conversion, unit=unit,
alpha=alpha, target=target)
}
drawGroup.default <-
function(xy, center=FALSE,
xyTopLeft=TRUE, bb=FALSE, bbMin=FALSE, bbDiag=FALSE, minCirc=FALSE,
maxSpread=FALSE, meanDist=FALSE, confEll=FALSE, CEP=FALSE, ringID=FALSE,
valueID=TRUE,
doRob=FALSE, level=0.95, scaled=TRUE, caliber=9,
dstTarget, conversion, unit="unit", alpha=0.5, target) {
if(!is.matrix(xy)) { stop("xy must be a matrix") }
if(!is.numeric(xy)) { stop("xy must be numeric") }
if(ncol(xy) != 2L) { stop("xy must have two columns") }
if(!is.numeric(caliber)) { stop("caliber must be numeric") }
if(caliber <= 0) { stop("caliber must be > 0") }
if(!all(is.numeric(level))) { stop("level must be numeric") }
if(any(level <= 0)) { stop("level must be > 0") }
if(!is.numeric(alpha)) { stop("alpha must be numeric") }
if((alpha < 0) || (alpha > 1)) { stop("alpha must be in [0,1]") }
if(center) {
warning("Centering only works for data frames, ignored here")
}
unit <- match.arg(tolower(unit),
choices=c("unit", "m", "cm", "mm", "yd", "ft", "in",
"deg", "moa", "smoa", "rad", "mrad", "mil"))
CEP <- as.character(CEP)
if(CEP != "FALSE") {
CEPtype <- if(CEP != "TRUE") { CEP } else { "CorrNormal" }
}
levelTo01 <- function(level) {
if(level >= 1) {
while(level >= 1) { level <- level / 100 }
warning(c("level must be in (0,1) and was set to ", level))
}
level
}
level <- vapply(level, levelTo01, numeric(1))
if(missing(dstTarget)) {
dstTarget <- NA_real_
}
if(missing(conversion)) {
conversion <- NA_character_
}
haveTarget <- if(missing(target) || is.null(target) ||
is.na(target) || (tolower(target) == "none")) {
FALSE
} else {
if(length(unique(target)) > 1L) {
warning("will use only 1st target")
}
target <- target[1]
TRUE
}
N <- nrow(xy)
if(N < 4L) {
haveRob <- FALSE
if(doRob) {
warning("We need >= 4 points for robust estimations")
}
} else {
haveRob <- TRUE
}
res <- vector("list", 0)
unitDst <- getUnits(conversion, first=TRUE)
unitXY <- getUnits(conversion, first=FALSE)
if(xyTopLeft) { xy[ , 2] <- -xy[ , 2] }
xyNew <- if(unit == "unit") {
unitXYnew <- unitXY
convFac <- getConvFac(paste0("mm2", unitXYnew))
calSize <- convFac * caliber/2
xy
} else if(tolower(unit) %in% c("deg", "moa", "smoa", "rad", "mrad", "mil")) {
unitXYnew <- unit
calSize <- getMOA(caliber/2, dst=dstTarget, conversion=paste0(unitDst, "2mm"), type=unit)
sign(xy) * getMOA(abs(xy), dst=dstTarget, conversion=conversion, type=unit)
} else {
unitXYnew <- unit
convFac <- getConvFac(paste0("mm2", unitXYnew))
calSize <- convFac * caliber/2
xy2xyNew <- getConvFac(paste0(unitXY, "2", unitXYnew))
xy2xyNew * xy
}
calSize <- unique(calSize)
res$xy <- xyNew
X <- xyNew[ , 1]
Y <- xyNew[ , 2]
if(all(is.na(X)) || all(is.na(Y))) {
if(nrow(na.omit(xy)) > 0L) {
stop("No non-missing coordinates after unit conversion\n Please supply units via 'conversion'")
} else {
stop("No non-missing coordinates")
}
}
axisLimsX <- numeric(0)
axisLimsY <- numeric(0)
if(haveRob && doRob) {
rob <- robustbase::covMcd(xyNew, cor=FALSE)
ctr <- rob$center
} else {
ctr <- colMeans(xyNew)
}
res$ctr <- ctr
if(bb) {
bBox <- getBoundingBox(xyNew)
res$bb <- bBox
axisLimsX <- c(axisLimsX, bBox$pts[c(1, 3)])
axisLimsY <- c(axisLimsY, bBox$pts[c(2, 4)])
}
if(bbMin) {
bBoxMin <- getMinBBox(xyNew)
res$bbMin <- bBoxMin
axisLimsX <- c(axisLimsX, bBoxMin$pts[ , 1])
axisLimsY <- c(axisLimsY, bBoxMin$pts[ , 2])
}
if(bbDiag) {
if(bb || !any(c(bb, bbMin))) {
bBox <- getBoundingBox(xyNew)
res$bbDiag <- bBox$diag
}
if(bbMin) {
res$bbMinDiag <- bBoxMin$diag
}
}
if(minCirc) {
mCirc <- getMinCircle(xyNew)
res$minCirc <- mCirc
axisLimsX <- c(axisLimsX, mCirc$ctr[1] + mCirc$rad,
mCirc$ctr[1] - mCirc$rad)
axisLimsY <- c(axisLimsY, mCirc$ctr[2] + mCirc$rad,
mCirc$ctr[2] - mCirc$rad)
}
if(maxSpread) {
maxPD <- getMaxPairDist(xyNew)
res$maxPairDist <- maxPD$d
}
if(meanDist) {
meanDstCtr <- mean(getDistToCtr(xyNew))
res$meanDist <- meanDstCtr
}
if(confEll) {
if(doRob && haveRob) {
cEll <- lapply(level, function(x) {
getConfEll(xyNew, level=x, dstTarget=dstTarget, conversion=conversion, doRob=TRUE) })
cEll <- lapply(cEll, function(x) {
x$ctr <- x$ctrRob
x$size <- x$sizeRob
x })
} else {
cEll <- lapply(level, function(x) {
getConfEll(xyNew, level=x, dstTarget=dstTarget, conversion=conversion, doRob=FALSE) })
}
cEllCopy <- lapply(cEll, function(x) {
x$ctrRob <- NULL
x$covRob <- NULL
x$sizeRob <- NULL
x$shapeRob <- NULL
x$size <- x$size["unit", ]
axisLimsX <- c(axisLimsX, x$ctr[1] + x$size["semi-major"],
x$ctr[1] - x$size["semi-major"])
axisLimsY <- c(axisLimsY, x$ctr[2] + x$size["semi-major"],
x$ctr[2] - x$size["semi-major"])
x })
res$confEll <- cEllCopy
}
if(CEP != "FALSE") {
CEPres <- getCEP(xyNew, CEPlevel=level, dstTarget=dstTarget,
conversion=conversion, type=CEPtype, accuracy=FALSE)
res$CEP <- vapply(CEPres$CEP, function(x) { x["unit", CEPtype] }, numeric(1) )
axisLimsX <- c(axisLimsX, vapply(CEPres$CEP, function(x) {
c(CEPres$ctr[1] + x["unit", CEPtype], CEPres$ctr[1] - x["unit", CEPtype])
}, numeric(2)))
axisLimsY <- c(axisLimsY, vapply(CEPres$CEP, function(x) {
c(CEPres$ctr[2] + x["unit", CEPtype], CEPres$ctr[2] - x["unit", CEPtype])
}, numeric(2)))
}
xLims <- range(c(X, axisLimsX))
yLims <- range(c(Y, axisLimsY))
cols1 <- c(ctr=rgb(255, 0, 255, maxColorValue=255),
bb=rgb(228, 26, 28, maxColorValue=255),
bbMin=rgb( 55, 126, 184, maxColorValue=255),
bbDiag=rgb( 77, 175, 74, maxColorValue=255),
bbMinDiag=rgb(152, 78, 163, maxColorValue=255),
minCirc=rgb(255, 127, 0, maxColorValue=255),
maxSpread=rgb(255, 255, 51, maxColorValue=255),
meanDist=rgb(166, 86, 40, maxColorValue=255),
confEll=rgb(247, 129, 191, maxColorValue=255),
CEP=rgb(153, 153, 153, maxColorValue=255))
cols2 <- c(ctr=rgb(255, 0, 255, maxColorValue=255),
bb=rgb(166, 206, 227, maxColorValue=255),
bbMin=rgb( 31, 120, 180, maxColorValue=255),
bbDiag=rgb(178, 223, 138, maxColorValue=255),
bbMinDiag=rgb( 51, 160, 44, maxColorValue=255),
minCirc=rgb(251, 154, 153, maxColorValue=255),
maxSpread=rgb(227, 26, 28, maxColorValue=255),
meanDist=rgb(253, 191, 111, maxColorValue=255),
confEll=rgb(255, 127, 0, maxColorValue=255),
CEP=rgb(202, 178, 214, maxColorValue=255))
pointCol <- if(haveTarget) {
cols <- cols1
trgt <- getTarget(target, unit="cm", dstTarget=dstTarget, conversion=conversion)
if(hasName(trgt, "colPt")) {
adjustcolor(trgt$colPt, alpha.f=alpha)
} else {
if(all(is.na(unlist(trgt$inUnit)))) {
cols <- cols2
rgb(0, 0, 0, alpha)
} else {
rgb(1, 1, 1, alpha)
}
}
} else {
cols <- cols2
rgb(0, 0, 0, alpha)
}
legText <- character(0)
legCol <- character(0)
legLty <- numeric(0)
legLwd <- numeric(0)
legPch <- numeric(0)
dstTargetPlot <- paste(unique(round(na.omit(dstTarget))), collapse=", ")
unitXYnew <- unique(unitXYnew)
unitXYnewPlot <- paste(na.omit(unitXYnew), collapse=", ")
plot(Y ~ X, asp=1, type="n", main="Group (x,y)-coordinates",
xlim=xLims, ylim=yLims,
sub=paste("distance:", dstTargetPlot, unitDst),
xlab=paste0("X [", na.omit(unitXYnew), "]"),
ylab=paste0("Y [", na.omit(unitXYnew), "]"))
if(haveTarget) {
res$target <- drawTarget(target, unit=unitXYnew, dstTarget=dstTarget,
conversion=conversion, add=TRUE, cex=1.5)
} else {
abline(v=0, h=0, col="lightgray")
}
if(scaled && !is.na(calSize)) {
symbols(Y ~ X, asp=1, main="(x,y)-coordinates", add=TRUE,
circles=rep(calSize, N), inches=FALSE,
fg=rgb(0.3, 0.3, 0.3, alpha), bg=pointCol)
} else {
points(Y ~ X, pch=20, col=pointCol)
}
points(ctr[1], ctr[2], col=cols["ctr"], pch=4, lwd=2, cex=2)
legText <- c(legText, "center")
legCol <- c(legCol, cols["ctr"])
legLty <- c(legLty, NA)
legLwd <- c(legLwd, 2)
legPch <- c(legPch, 4)
if(bb) {
drawBox(bBox, fg=cols["bb"], lwd=2)
legText <- c(legText, "bounding box")
legCol <- c(legCol, cols["bb"])
legLty <- c(legLty, 1)
legLwd <- c(legLwd, 2)
legPch <- c(legPch, NA)
if(valueID) {
txtPosW_X <- bBox$pts["xleft"] + 0.15*(bBox$pts["xright"] - bBox$pts["xleft"])
txtPosW_Y <- bBox$pts["ytop"] + strheight("1234")
txtPosH_X <- bBox$pts["xleft"] - strheight("1234")
txtPosH_Y <- bBox$pts["ybottom"] + 0.85*(bBox$pts["ytop"] - bBox$pts["ybottom"])
text(x=txtPosW_X, y=txtPosW_Y, labels=round(bBox$width, 2), col=cols["bb"], adj=c(0.5, 0.5))
text(x=txtPosH_X, y=txtPosH_Y, labels=round(bBox$height, 2), col=cols["bb"], adj=c(0.5, 0.5), srt=90)
}
}
if(bbMin) {
drawBox2(bBoxMin, fg=cols["bbMin"], lwd=2)
legText <- c(legText, "min bounding box")
legCol <- c(legCol, cols["bbMin"])
legLty <- c(legLty, 1)
legLwd <- c(legLwd, 2)
legPch <- c(legPch, NA)
if(valueID) {
dPts <- diff(bBoxMin$pts)
idxMax <- which.max(rowSums(dPts^2))
idxMin <- which.min(rowSums(dPts^2))
eMax <- dPts[idxMax, ]
eMin <- dPts[idxMin, ]
eMaxUp <- eMax * sign(eMax[2])
eMinUp <- eMin * sign(eMin[2])
degMax <- atan2(eMaxUp[2], eMaxUp[1])*180 / pi
degMin <- atan2(eMinUp[2], eMinUp[1])*180 / pi
ang <- -bBoxMin$angle * pi/180
rotMat <- matrix(c(cos(ang), sin(ang), -sin(ang), cos(ang)), nrow=2)
bbMin90 <- t(rotMat %*% t(bBoxMin$pts))
idxMax <- which.max(rowSums(bbMin90^2))
idxMin <- which.min(rowSums(bbMin90^2))
txtPosW_X <- min(bbMin90[ , 1]) + 0.15*(max(bbMin90[ , 1]) - min(bbMin90[ , 1]))
txtPosW_Y <- max(bbMin90[ , 2]) + strheight("1234")
txtPosH_X <- min(bbMin90[ , 1]) - strheight("1234")
txtPosH_Y <- min(bbMin90[ , 2]) + 0.85*(max(bbMin90[ , 2]) - min(bbMin90[ , 2]))
txtPosWH_XY <- matrix(c(txtPosW_X, txtPosW_Y, txtPosH_X, txtPosH_Y), byrow=TRUE, nrow=2)
ang <- bBoxMin$angle * pi/180
rotMat <- matrix(c(cos(ang), sin(ang), -sin(ang), cos(ang)), nrow=2)
txtPos12_XY <- t(rotMat %*% t(txtPosWH_XY))
text(x=txtPos12_XY[1, 1], y=txtPos12_XY[1, 2], labels=round(bBoxMin$width, 2),
col=cols["bbMin"], adj=c(0.5, 0.5), srt=degMax)
text(x=txtPos12_XY[2, 1], y=txtPos12_XY[2, 2], labels=round(bBoxMin$height, 2),
col=cols["bbMin"], adj=c(0.5, 0.5), srt=degMin)
}
}
if(bbDiag) {
if(bb || !any(c(bb, bbMin))) {
segments(x0=bBox$pts["xleft"], y0=bBox$pts["ybottom"],
x1=bBox$pts["xright"], y1=bBox$pts["ytop"],
col=cols["bbDiag"], lwd=2)
legText <- c(legText, "bound box diag")
legCol <- c(legCol, cols["bbDiag"])
legLty <- c(legLty, 1)
legLwd <- c(legLwd, 2)
legPch <- c(legPch, NA)
if(valueID) {
dPts <- c(bBox$pts["xright"]-bBox$pts["xleft"], bBox$pts["ytop"]-bBox$pts["ybottom"])
dPtsU <- dPts / sqrt(sum(dPts^2))
dPtsUO <- c(dPtsU[2], -dPtsU[1])
txtPos_X <- bBox$pts["xleft"] + 0.75*(bBox$pts["xright"] - bBox$pts["xleft"]) + strheight("1234")*dPtsUO[1]
txtPos_Y <- bBox$pts["ybottom"] + 0.75*(bBox$pts["ytop"] - bBox$pts["ybottom"]) + strheight("1234")*dPtsUO[2]
eUp <- dPts * sign(dPts[2])
deg <- atan2(eUp[2], eUp[1])*180 / pi
text(x=txtPos_X, y=txtPos_Y, labels=round(bBox$diag, 2),
col=cols["bbDiag"], srt=deg, adj=c(0,0))
}
}
if(bbMin) {
segments(x0=bBoxMin$pts[1, 1], y0=bBoxMin$pts[1, 2],
x1=bBoxMin$pts[3, 1], y1=bBoxMin$pts[3, 2],
col=cols["bbMinDiag"], lwd=2)
legText <- c(legText, "min bound box diag")
legCol <- c(legCol, cols["bbMinDiag"])
legLty <- c(legLty, 1)
legLwd <- c(legLwd, 2)
legPch <- c(legPch, NA)
if(valueID) {
dPts <- diff(bBoxMin$pts)
dPtsU <- dPts / sqrt(sum(dPts^2))
dPtsUO <- c(dPtsU[2], -dPtsU[1])
txtPos_X <- bBoxMin$pts[1, 1] + 0.75*(bBoxMin$pts[3, 1] - bBoxMin$pts[1, 1]) + strheight("1234")*dPtsUO[1]
txtPos_Y <- bBoxMin$pts[1, 2] + 0.75*(bBoxMin$pts[3, 2] - bBoxMin$pts[1, 2]) + strheight("1234")*dPtsUO[2]
dPts <- diff(bBoxMin$pts[c(1, 3), ])
eUp <- dPts * sign(dPts[2])
deg <- atan2(eUp[2], eUp[1])*180 / pi
text(x=txtPos_X, y=txtPos_Y, labels=round(bBoxMin$diag, 2),
col=cols["bbDiag"], srt=deg, adj=c(0.5, 0.5))
}
}
}
if(minCirc) {
drawCircle(mCirc, fg=cols["minCirc"], lwd=2)
legText <- c(legText, "min circle")
legCol <- c(legCol, cols["minCirc"])
legLty <- c(legLty, 1)
legLwd <- c(legLwd, 2)
legPch <- c(legPch, NA)
if(valueID) {
txtPos_X_top <- 0
txtPos_Y_top <- mCirc$rad + strheight("1234")
ang <- pi/4
rotMat <- matrix(c(cos(ang), sin(ang), -sin(ang), cos(ang)), nrow=2)
txtPos_XY <- rotMat %*% matrix(c(txtPos_X_top, txtPos_Y_top), nrow=2)
txtPos_X <- mCirc$ctr[1] + txtPos_XY[1, 1]
txtPos_Y <- mCirc$ctr[2] + txtPos_XY[2, 1]
text(x=txtPos_X, y=txtPos_Y, labels=round(mCirc$rad, 2),
srt=180*ang/pi, col=cols["minCirc"], adj=c(0.5, 0.5))
}
}
if(maxSpread) {
segments(x0=xyNew[maxPD$idx[1], 1], y0=xyNew[maxPD$idx[1], 2],
x1=xyNew[maxPD$idx[2], 1], y1=xyNew[maxPD$idx[2], 2],
col=cols["maxSpread"], lwd=2)
legText <- c(legText, "max spread")
legCol <- c(legCol, cols["maxSpread"])
legLty <- c(legLty, 1)
legLwd <- c(legLwd, 2)
legPch <- c(legPch, NA)
if(valueID) {
m <- xyNew[maxPD$idx, ]
dPts <- diff(m)
dPtsU <- dPts / sqrt(sum(dPts^2))
dPtsUO <- c(dPtsU[2], -dPtsU[1])
txtPos_X <- m[1, 1] + 0.75*dPts[1] + strheight("1234")*dPtsUO[1]
txtPos_Y <- m[1, 2] + 0.75*dPts[2] + strheight("1234")*dPtsUO[2]
eUp <- dPts * sign(dPts[2])
deg <- atan2(eUp[2], eUp[1])*180 / pi
text(x=txtPos_X, y=txtPos_Y, labels=round(maxPD$d, 2),
col=cols["maxSpread"], srt=deg, adj=c(0,0))
}
}
if(meanDist) {
drawCircle(ctr, radius=meanDstCtr, fg=cols["meanDist"], lwd=2)
legText <- c(legText, "mean dist to ctr")
legCol <- c(legCol, cols["meanDist"])
legLty <- c(legLty, 1)
legLwd <- c(legLwd, 2)
legPch <- c(legPch, NA)
if(valueID) {
ctr <- colMeans(xy)
txtPos_X_top <- 0
txtPos_Y_top <- meanDstCtr + strheight("1234")
ang <- -pi/4
rotMat <- matrix(c(cos(ang), sin(ang), -sin(ang), cos(ang)), nrow=2)
txtPos_XY <- rotMat %*% matrix(c(txtPos_X_top, txtPos_Y_top), nrow=2)
txtPos_X <- ctr[1] + txtPos_XY[1, 1]
txtPos_Y <- ctr[2] + txtPos_XY[2, 1]
text(x=txtPos_X, y=txtPos_Y, labels=round(meanDstCtr, 2),
srt=180*ang/pi, col=cols["meanDist"], adj=c(0.5, 0.5))
}
}
if(confEll) {
ellCtrL <- lapply(cEll, function(x) { drawEllipse(x, pch=4, fg=cols["confEll"], lwd=2) })
legText <- c(legText, paste("conf ellipse", paste(level, collapse=" ")))
legCol <- c(legCol, cols["confEll"])
legLty <- c(legLty, 1)
legLwd <- c(legLwd, 2)
legPch <- c(legPch, NA)
if(valueID) {
idx <- which.max(ellCtrL[[1]][ , 1])
txtPos_X <- ellCtrL[[1]][idx, 1] + strheight("1234")
txtPos_Y <- ellCtrL[[1]][idx, 2]
label <- paste0(c(round(cEllCopy[[1]]$size["semi-major"], 2),
round(cEllCopy[[1]]$size["semi-minor"], 2)),
collapse="; ")
text(x=txtPos_X, y=txtPos_Y, labels=label,
srt=-90, col=cols["confEll"], adj=c(0.5, 0.5))
}
}
if(CEP != "FALSE") {
lapply(CEPres$CEP, function(x) {
drawCircle(CEPres$ctr, x["unit", CEPtype], fg=cols["CEP"], lwd=2) })
legText <- c(legText, paste("CEP", CEPtype, paste(level, collapse=" ")))
legCol <- c(legCol, cols["CEP"])
legLty <- c(legLty, 1)
legLwd <- c(legLwd, 2)
legPch <- c(legPch, NA)
if(valueID) {
txtPos_X <- CEPres$ctr[1]
txtPos_Y <- CEPres$ctr[2] + CEPres$CEP[[1]]["unit", ] + strheight("1234")
text(x=txtPos_X, y=txtPos_Y, labels=signif(CEPres$CEP[[1]]["unit", ], 2),
col=cols["CEP"], adj=c(0.5, 0.5))
}
}
if(ringID && haveTarget) {
rc <- simRingCount(xy, target=target, caliber=caliber, unit=unique(unitXY))
res$ringCount <- with(rc, c(count=count, max=max))
with(rc, text(xyNew[,1], xyNew[,2], label=levels(rings)[rings],
adj=c(0.5, 0.5), col="darkgreen"))
}
legend(x="bottomleft", legend=legText, col=legCol, lty=legLty,
lwd=legLwd, pch=legPch, bg=rgb(1, 1, 1, 0.7))
return(invisible(res))
} |
cut.folder <- function(x, breaks, labels = NULL, include.lowest = FALSE, right = TRUE,
dig.lab = 3L, ordered_result = FALSE, cutcol = NULL, ...) {
x <- as.data.frame(x, stringsAsFactors = TRUE)
result <- cut.data.frame(x, breaks = breaks, labels = labels, include.lowest = include.lowest,
right = right, dig.lab = dig.lab, ordered_result = ordered_result,
cutcol = cutcol)
return(as.folder(result))
} |
SummarySurvey <- function(design = NULL, variables = NULL, conf.level = 0.95, rnd = 3) {
if (length(variables) != length(names(design$variables))) {
stop('The length of variables argument must be equal to the length of names(design$variables)')
}
match1 <- names(design$variables)
match2 <- c('psu.id', 'ssu.id', 'pop.size', 'psu.size')
matches <- which(!is.na(match(match1, match2)))
variables[matches] <- ''
z <- abs(round(qnorm((1 - conf.level) / 2, 0, 1), 2))
vrs <- design$variables
out <- NULL
for (i in 1:length(variables)) {
if (variables[i] == 'total') {
tmp <- svytotal(~ vrs[, i], design, na.rm = T, deff = T)
tmp1 <- as.matrix(cbind(tmp, SE(tmp), confint(tmp),
deff(tmp), cv(tmp) * z * 100), nr = 1)
ci <- attributes(confint(tmp, level = conf.level))$dimnames[[2]]
rownames(tmp1) <- paste0('Total_', names(vrs)[i])
out <- rbind(out, tmp1)
}
if (variables[i] == 'mean') {
tmp <- svymean(~ vrs[, i], design, na.rm = T, deff = T)
tmp1 <- as.matrix(cbind(tmp, SE(tmp), confint(tmp),
deff(tmp), cv(tmp) * z * 100), nr = 1)
ci <- attributes(confint(tmp, level = conf.level))$dimnames[[2]]
rownames(tmp1) <- paste0('Mean_', names(vrs)[i])
out <- rbind(out, tmp1)
}
if (variables[i] == 'prop') {
tmp <- svymean(~ vrs[, i], design, na.rm = T, deff = T)
tmp1 <- as.matrix(cbind(tmp, SE(tmp), confint(tmp),
deff(tmp), confint(tmp)[, 2] - tmp[1] * 100), nr = 1)
ci <- attributes(confint(tmp, level = conf.level))$dimnames[[2]]
rownames(tmp1) <- paste0('Prop_', rownames(tmp1))
rownames(tmp1) <- gsub('vrs\\[, i\\]', paste0(names(vrs)[i]), rownames(tmp1))
out <- rbind(out, tmp1)
}
}
colnames(out) <- c('Estimate', 'SE', ci[1], ci[2], 'Deff', 'Error (%)')
if ('simple' %in% names(design)) {
out <- out[ , -5]
}
ifelse (is.na(rnd), return(out), return(round(out, rnd)))
} |
continuous.LR <- function(raw.pvalues, alpha = 0.05, zeta = 0.5, adaptive = TRUE, critical.values = FALSE){
if(is.null(alpha) || is.na(alpha) || !is.numeric(alpha) || alpha < 0 || alpha > 1)
stop("'alpha' must be a probability between 0 and 1!")
if(is.null(zeta) || is.na(zeta) || !is.numeric(zeta) || zeta < 0 || zeta > 1)
stop("'zeta' must be a probability between 0 and 1!")
m <- length(raw.pvalues)
a <- floor(alpha * 1:m) + 1
o <- order(raw.pvalues)
sorted.pvals <- raw.pvalues[o]
if(adaptive){
y <- pmin(1, cummax(sorted.pvals * (m - 1:m + a) / a))
}else{
y <- pmin(1, cummax(sorted.pvals * m / a))
}
idx <- which(y > zeta)
if(length(idx)){
m.rej <- min(idx) - 1
if (m.rej){
idx <- which(raw.pvalues <= sorted.pvals[m.rej])
pvec.rej <- raw.pvalues[idx]
}else{
idx <- numeric(0)
pvec.rej <- numeric(0)
}
}else{
m.rej <- m
idx <- 1:m
pvec.rej <- raw.pvalues
}
if(critical.values){
if(adaptive){
crit.constants <- zeta * a / (m - 1:m + a)
}else{
crit.constants <- zeta * a / m
}
}
output <- list(Rejected = pvec.rej, Indices = idx, Num.rejected = m.rej)
ro <- order(o)
output$Adjusted = y[ro]
if(critical.values) output$Critical.values = crit.constants
alg <- "Continuous Lehmann-Romano procedure"
output$Method <- if(!adaptive) paste("Non-Adaptive", alg) else alg
output$FDP.threshold <- alpha
output$Exceedance.probability <- zeta
output$Adaptive <- adaptive
output$Data <- list()
output$Data$raw.pvalues <- raw.pvalues
output$Data$data.name <- deparse(substitute(raw.pvalues))
class(output) <- "FDX"
return(output)
}
LR <- function(raw.pvalues, alpha = 0.05, zeta = 0.5, critical.values = FALSE){
return(continuous.LR(raw.pvalues, alpha, zeta, TRUE, critical.values))
}
NLR <- function(raw.pvalues, alpha = 0.05, zeta = 0.5, critical.values = FALSE){
return(continuous.LR(raw.pvalues, alpha, zeta, FALSE, critical.values))
} |
library(shiny)
library(shiny.semantic)
library(shiny.router)
menu <- (
div(class = "ui vertical menu",
div(class = "item",
div(class = "header", "Demo"),
div(class = "menu",
a(class = "item", href = route_link("/"), icon("home"), "Page"),
a(class = "item", href = route_link("other"), icon("clock"), "Other"),
a(class = "item", href = route_link("ui"), icon("clock"), "UI")
)
)
)
)
page_content <- function(title, content) {
div(
h1(title),
p(content)
)
}
root_page <- page_content("Home page", actionButton("button", "Click me!"))
other_page <- page_content("Some other page", textInput("text", "Type something here"))
ui_page <- page_content("UI page", uiOutput("oko"))
router <- make_router(
route("index", root_page),
route("other", other_page),
route("ui", ui_page)
)
ui <- semanticPage(
title = "Router demo",
div(class = "ui container",
style = "margin-top: 1em",
div(class = "ui grid",
div(class = "four wide column",
menu
),
div(class = "twelve wide column",
router$ui
)
)
)
)
server <- shinyServer(function(input, output, session) {
router$server(input, output, session)
output$oko <- renderUI({
div("Hello there")
})
})
shinyApp(ui, server) |
search_openresult_inelan <- function(x,
s,
resultNr,
openOriginalEafFileIfAvailable=FALSE) {
if (missing(x)) {stop("Corpus object in parameter 'x' is missing.") } else { if (class(x)[[1]]!="corpus") {stop("Parameter 'x' needs to be a corpus object.") } }
if (missing(s)) {stop("Search object in parameter 's' is missing.") } else { if (class(s)[[1]]!="search") {stop("Parameter 's' needs to be a search object.") } }
if (missing(resultNr)) {stop("Number of the search result 'resultNr' is missing.") }
path.elan<- getOption("act.path.elan", default="")
if(path.elan=="") {
stop("ELAN not found. Please set the path to the ELAN executable in the option 'act.path.elan' using options(act.path.elan='PATHTOYOURELANEXECUTABLE')")
} else {
if(!file.exists(path.elan)) {
stop("ELAN not found. Please set the path to the ELAN executable in the option 'act.path.elan' using options(act.path.elan='PATHTOYOURELANEXECUTABLE')")
}
}
t <- x@transcripts[[s@results$transcript.name[resultNr]]]
if (is.null(t)) {
stop("Transcript not found in corpus object'.")
}
file.path.eaf <- ""
file.path.pfsx <- ""
if(openOriginalEafFileIfAvailable) {
if([email protected]=="eaf") {
if(file.exists([email protected])) {
file.path.eaf <- [email protected]
}
}
}
if (file.path.eaf == "" ) {
file.path.eaf <- file.path(tempdir(), stringr::str_c(t@name, ".eaf", collapse=""))
act::export_eaf(t, file.path.eaf)
if(openOriginalEafFileIfAvailable) {
warning("Original .eaf file has not been found. A temporary .eaf file has been created")
}
}
pfsx<- '<?xml version="1.0" encoding="UTF-8"?>
<preferences version="1.1"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:noNamespaceSchemaLocation="http://www.mpi.nl/tools/elan/Prefs_v1.1.xsd">
<pref key="SelectionBeginTime">
<Long>%s</Long>
</pref>
<pref key="SelectionEndTime">
<Long>%s</Long>
</pref>
<pref key="TimeScaleBeginTime">
<Long>%s</Long>
</pref>
<pref key="MediaTime">
<Long>%s</Long>
</pref>
</preferences>'
startMiliSec <- round(s@results$startSec[resultNr]*1000, 0)
endMiliSec <- round(s@results$endSec[resultNr]*1000, 0)
pfsx.1 <- sprintf(pfsx, startMiliSec, endMiliSec, max(0, startMiliSec-1000), startMiliSec)
file.path.pfsx<- stringr::str_replace(file.path.eaf, pattern='\\.eaf', replacement=".pfsx")
fileConn <- file(file.path.pfsx, open="wb")
writeBin(charToRaw(pfsx.1), fileConn, endian="little")
close(fileConn)
for (i in 1:10) {
if(file.exists(file.path.pfsx)) {
break
}
Sys.sleep(0.02)
}
if(file.exists(file.path.pfsx)) {
if (helper_detect_os()=="windows" ){
cmd <- sprintf("%s %s", shQuote(path.elan), shQuote(file.path.eaf))
} else {
cmd <- sprintf("open %s -a %s", shQuote(file.path.eaf), shQuote(path.elan))
}
rslt <- system(cmd, wait=FALSE)
}
} |
rTableICC.RxC <-
function(p=NULL,theta,M,row.margins=NULL,col.margins=NULL,sampling="Multinomial",N=1,lambda=NULL,zero.clusters=FALSE,print.regular=TRUE,
print.raw=FALSE) UseMethod("rTableICC.RxC") |
library(httk)
calc_css(chem.name="endrin")
calc_stats(dtxsid="DTXSID0020442",days=3)
quit("no") |
kweights <- function (x, kernel = c("Truncated", "Bartlett", "Parzen", "Tukey-Hanning",
"Quadratic Spectral"), normalize = FALSE)
{
kernel <- match.arg(kernel)
if (normalize) {
ca <- switch(kernel, Truncated = 2, Bartlett = 2/3, Parzen = 0.539285,
`Tukey-Hanning` = 3/4, `Quadratic Spectral` = 1)
}
else ca <- 1
switch(kernel, Truncated = {
ifelse(ca * x > 1, 0, 1)
}, Bartlett = {
ifelse(ca * x > 1, 0, 1 - abs(ca * x))
}, Parzen = {
ifelse(ca * x > 1, 0, ifelse(ca * x < 0.5, 1 - 6 * (ca *
x)^2 + 6 * abs(ca * x)^3, 2 * (1 - abs(ca * x))^3))
}, `Tukey-Hanning` = {
ifelse(ca * x > 1, 0, (1 + cos(pi * ca * x))/2)
}, `Quadratic Spectral` = {
y <- 6 * pi * x/5
ifelse(x < 1e-04, 1, 3 * (1/y)^2 * (sin(y)/y - cos(y)))
})
} |
'BIOMOD_ModelingOptions' <- function(
GLM = NULL,
GBM = NULL,
GAM = NULL,
CTA = NULL,
ANN = NULL,
SRE = NULL,
FDA = NULL,
MARS = NULL,
RF = NULL,
MAXENT.Phillips = NULL
){
opt <- new('BIOMOD.Model.Options')
if(!is.null(GLM)){
if(!is.null(GLM$type)) { opt@GLM$type <- GLM$type }
if(!is.null(GLM$interaction.level)) { opt@GLM$interaction.level <- GLM$interaction.level }
if(!is.null(GLM$myFormula)) { opt@GLM$myFormula <- GLM$myFormula }
if(!is.null(GLM$test)) { opt@GLM$test <- GLM$test }
if(!is.null(GLM$family)) {
fam.test <- TRUE
if(inherits(GLM$family, 'family')){
opt@GLM$family <- GLM$family
} else{
if( is.character(GLM$family)){
if(! unlist(strsplit(GLM$family,"[/(]"))[1] %in% c('binomial', 'gaussian', 'Gamma', 'inverse.gaussian', 'poisson', 'quasi', 'quasibinomial', 'quasipoisson')){ fam.test <- FALSE}
if(grepl(')', GLM$family)){
opt@GLM$family <- eval(parse(text=GLM$family))
} else{
opt@GLM$family <- eval(parse(text=paste(GLM$family,"()", sep="")))
}
} else{ fam.test <- FALSE }
}
if(!fam.test){
cat("\n!!! invalid GLM$family given -> binomial(link = 'logit') was automatically set up !!!")
opt@GLM$family <- binomial(link = 'logit')
}
}
if(!is.null(GLM$mustart)) { opt@GLM$mustart <- GLM$mustart }
if(!is.null(GLM$control)) { opt@GLM$control <- GLM$control }
}
if(!is.null(GBM)){
if(!is.null(GBM$distribution )) { opt@GBM$distribution <- GBM$distribution }
if(!is.null(GBM$n.trees )) { opt@GBM$n.trees <- GBM$n.trees }
if(!is.null(GBM$interaction.depth )) { opt@GBM$interaction.depth <- GBM$interaction.depth }
if(!is.null(GBM$n.minobsinnode )) { opt@GBM$n.minobsinnode <- GBM$n.minobsinnode }
if(!is.null(GBM$shrinkage )) { opt@GBM$shrinkage <- GBM$shrinkage }
if(!is.null(GBM$bag.fraction )) { opt@GBM$bag.fraction <- GBM$bag.fraction }
if(!is.null(GBM$train.fraction )) { opt@GBM$train.fraction <- GBM$train.fraction }
if(!is.null(GBM$cv.folds )) { opt@GBM$cv.folds <- GBM$cv.folds }
if(!is.null(GBM$keep.data )) { opt@GBM$keep.data <- GBM$keep.data }
if(!is.null(GBM$verbose )) { opt@GBM$verbose <- GBM$verbose }
if(!is.null(GBM$perf.method )) { opt@GBM$perf.method <- GBM$perf.method }
if(!is.null(GBM$n.cores)) { opt@GBM$n.cores <- GBM$n.cores } else { opt@GBM$n.cores <- NULL }
}
if(!is.null(GAM)){
if(!is.null(GAM$algo )) { opt@GAM$algo <- GAM$algo }
if(!is.null(GAM$type )) { opt@GAM$type <- GAM$type }
if(!is.null(GAM$k )) { opt@GAM$k <- GAM$k } else{
if(opt@GAM$algo == 'GAM_gam'){
opt@GAM$k <- 4
} else{
opt@GAM$k <- -1
}
}
if(!is.null(GAM$interaction.level )) { opt@GAM$interaction.level <- GAM$interaction.level }
if(!is.null(GAM$myFormula )) { opt@GAM$myFormula <- GAM$myFormula }
if(!is.null(GAM$family)) {
fam.test <- TRUE
if(inherits(GAM$family, 'family')){
opt@GAM$family <- GAM$family
} else{
if( is.character(GAM$family)){
if(! unlist(strsplit(GAM$family,"[/(]"))[1] %in% c('binomial', 'gaussian', 'Gamma', 'inverse.gaussian', 'poisson', 'quasi', 'quasibinomial', 'quasipoisson')){ fam.test <- FALSE}
if(grepl(')', GAM$family)){
opt@GAM$family <- eval(parse(text=GAM$family))
} else{
opt@GAM$family <- eval(parse(text=paste(GAM$family,"()", sep="")))
}
} else{ fam.test <- FALSE }
}
if(!fam.test){
cat("\n!!! invalid GAM$family given -> binomial(link = 'logit') was automatically set up !!!")
opt@GAM$family <- binomial(link = 'logit')
}
}
if(is.null(GAM$control )) {
if(opt@GAM$algo == 'GAM_gam'){
requireNamespace('gam', quietly = TRUE)
opt@GAM$control <- gam::gam.control()
} else{ opt@GAM$control <- mgcv::gam.control() }
} else{
user.control.list <- GAM$control
if(opt@GAM$algo == 'GAM_gam'){
default.control.list <- gam::gam.control()
} else{
default.control.list <- mgcv::gam.control()
}
control.list <- lapply(names(default.control.list), function(x){
if(x %in% names(user.control.list)){
return(user.control.list[[x]])
} else {
return(default.control.list[[x]])
}
})
names(control.list) <- names(default.control.list)
opt@GAM$control <- control.list
}
if(!is.null(GAM$method )) { opt@GAM$method <- GAM$method }
if(!is.null(GAM$optimizer )) { opt@GAM$optimizer <- GAM$optimizer }
if(!is.null(GAM$select )) { opt@GAM$select <- GAM$select }
if(!is.null(GAM$knots )) { opt@GAM$knots <- GAM$knots }
if(!is.null(GAM$paraPen )) { opt@GAM$paraPen <- GAM$paraPen }
} else{
if(opt@GAM$algo == 'GAM_gam'){
opt@GAM$control <- gam::gam.control()
opt@GAM$k <- 4
} else{
opt@GAM$control <- mgcv::gam.control()
opt@GAM$k <- -1
}
}
if(!is.null(CTA)){
if(!is.null(CTA$method )) { opt@CTA$method <- CTA$method }
if(!is.null(CTA$parms )) { opt@CTA$parms <- CTA$parms }
if(!is.null(CTA$control )) { opt@CTA$control <- CTA$control }
if(!is.null(CTA$cost )) { opt@CTA$cost <- CTA$cost }
}
if(!is.null(ANN)){
if(!is.null(ANN$NbCV )) { opt@ANN$NbCV <- ANN$NbCV }
if(!is.null(ANN$size )) { opt@ANN$size <- ANN$size }
if(!is.null(ANN$decay )) { opt@ANN$decay <- ANN$decay }
if(!is.null(ANN$rang )) { opt@ANN$rang <- ANN$rang }
if(!is.null(ANN$maxit )) { opt@ANN$maxit <- ANN$maxit }
}
if(!is.null(SRE)){
if(!is.null(SRE$quant )) { opt@SRE$quant <- SRE$quant }
}
if(!is.null(FDA)){
if(!is.null(FDA$method )) { opt@FDA$method <- FDA$method }
if(!is.null(FDA$add_args )) { opt@FDA$add_args <- FDA$add_args }
}
if(!is.null(MARS)){
if(!is.null(MARS$type)) { opt@MARS$type <- MARS$type }
if(!is.null(MARS$interaction.level)) { opt@MARS$interaction.level <- MARS$interaction.level }
if(!is.null(MARS$myFormula)) { opt@MARS$myFormula <- MARS$myFormula }
if(!is.null(MARS$nk )) { opt@MARS$nk <- MARS$nk }
if(!is.null(MARS$penalty )) { opt@MARS$penalty <- MARS$penalty }
if(!is.null(MARS$thresh )) { opt@MARS$thresh <- MARS$thresh }
if(!is.null(MARS$nprune )) { opt@MARS$nprune <- MARS$nprune }
if(!is.null(MARS$pmethod )) { opt@MARS$pmethod <- MARS$pmethod }
}
if(!is.null(RF)){
if(!is.null(RF$type )) { opt@RF$type <- RF$type }
if(!is.null(RF$ntree )) { opt@RF$ntree <- RF$ntree }
if(!is.null(RF$mtry )) { opt@RF$mtry <- RF$mtry }
if(!is.null(RF$nodesize )) { opt@RF$nodesize <- RF$nodesize }
if(!is.null(RF$maxnodes )) { opt@RF$maxnodes <- RF$maxnodes }
}
if(!is.null(MAXENT.Phillips)){
if(!is.null(MAXENT.Phillips$path_to_maxent.jar )) {
[email protected]$path_to_maxent.jar <- normalizePath(sub("maxent.jar", "", MAXENT.Phillips$path_to_maxent.jar))
} else {[email protected]$path_to_maxent.jar <- getwd()}
if(!is.null(MAXENT.Phillips$memory_allocated )) { [email protected]$memory_allocated <- MAXENT.Phillips$memory_allocated }
if(!is.null(MAXENT.Phillips$background_data_dir )) { [email protected]$background_data_dir <- MAXENT.Phillips$background_data_dir }
if(!is.null(MAXENT.Phillips$maximumbackground )) { [email protected]$maximumbackground <- MAXENT.Phillips$maximumbackground }
if(!is.null(MAXENT.Phillips$maximumiterations )) { [email protected]$maximumiterations <- MAXENT.Phillips$maximumiterations }
if(!is.null(MAXENT.Phillips$visible )) { [email protected]$visible <- MAXENT.Phillips$visible }
if(!is.null(MAXENT.Phillips$linear )) { [email protected]$linear <- MAXENT.Phillips$linear }
if(!is.null(MAXENT.Phillips$quadratic )) { [email protected]$quadratic <- MAXENT.Phillips$quadratic }
if(!is.null(MAXENT.Phillips$product )) { [email protected]$product <- MAXENT.Phillips$product }
if(!is.null(MAXENT.Phillips$threshold )) { [email protected]$threshold <- MAXENT.Phillips$threshold }
if(!is.null(MAXENT.Phillips$hinge )) { [email protected]$hinge <- MAXENT.Phillips$hinge }
if(!is.null(MAXENT.Phillips$lq2lqptthreshold )) { [email protected]$lq2lqptthreshold <- MAXENT.Phillips$lq2lqptthreshold }
if(!is.null(MAXENT.Phillips$l2lqthreshold )) { [email protected]$l2lqthreshold <- MAXENT.Phillips$l2lqthreshold }
if(!is.null(MAXENT.Phillips$hingethreshold )) { [email protected]$hingethreshold <- MAXENT.Phillips$hingethreshold }
if(!is.null(MAXENT.Phillips$beta_threshold )) { [email protected]$beta_threshold <- MAXENT.Phillips$beta_threshold }
if(!is.null(MAXENT.Phillips$beta_categorical )) { [email protected]$beta_categorical <- MAXENT.Phillips$beta_categorical }
if(!is.null(MAXENT.Phillips$beta_lqp )) { [email protected]$beta_lqp <- MAXENT.Phillips$beta_lqp }
if(!is.null(MAXENT.Phillips$beta_hinge )) { [email protected]$beta_hinge <- MAXENT.Phillips$beta_hinge }
if(!is.null(MAXENT.Phillips$betamultiplier )) { [email protected]$betamultiplier <- MAXENT.Phillips$betamultiplier }
if(!is.null(MAXENT.Phillips$defaultprevalence )) { [email protected]$defaultprevalence <- MAXENT.Phillips$defaultprevalence }
} else{
[email protected]$path_to_maxent.jar <- getwd()
}
test <- as.logical(validObject(object = opt, test = TRUE, complete = FALSE))
if(!test){
cat("\n\n!!! NULL object returned because of invalid parameters given !!!")
return(NULL)
}
return(opt)
}
Print_Default_ModelingOptions <- function(){
cat('\n Defaut modeling options. copy, change what you want paste it as arg to BIOMOD_ModelingOptions\n\n')
opt_tmp <- BIOMOD_ModelingOptions()
print(opt_tmp)
} |
options(width = 80)
library(knitr)
knit_hooks$set(pngquant = hook_pngquant)
pngquant <- "--speed=1 --quality=0-25"
if (!nzchar(Sys.which("pngquant"))) pngquant <- NULL
logK <- "log <i>K</i>"
hires <- FALSE
res1.lo <- 150
res1.hi <- 256
res1 <- ifelse(hires, res1.hi, res1.lo)
res2.lo <- 200
res2.hi <- 400
res2 <- ifelse(hires, res2.hi, res2.lo)
library(CHNOSZ)
reset()
cat("```")
cat("\n")
cat(paste0(ifelse(hires, "
cat("\n")
cat(paste0(ifelse(hires, "", "
cat("\n")
cat(paste0(ifelse(hires, "
cat("\n")
cat(paste0(ifelse(hires, "", "
cat("\n")
cat("```")
par(mfrow = c(1, 2))
basis("CHNOS+")
species(c("CH4", "CO2", "HCO3-", "CO3-2"))
aC <- affinity(pH = c(0, 14), O2 = c(-75, -60))
dC <- diagram(aC, dx = c(0, 1, 0, 0), dy = c(0, 1, 0, 0))
species(c("H2S", "HS-", "HSO4-", "SO4-2"))
aS <- affinity(aC)
dS <- diagram(aS, add = TRUE, col = 4, col.names = 4, dx = c(0, -0.5, 0, 0))
aCS <- mash(dC, dS)
srt <- c(0, 0, 90, 0, 0, 0, 90, 0, 0, 0)
cex.names <- c(1, 1, 0.8, 1, 1, 1, 1, 1, 1, 1)
dy <- c(0, 0, 0, -0.2, 0, 0, 0, 0, 0, 0)
diagram(aCS, srt = srt, cex.names = cex.names, dy = dy)
legend("topright", legend = lTP(25, 1), bty = "n")
Fe.cr <- c(Fe = 0, FeO = -1.72768, Fe3O4 = -1.85838, Fe2O3 = -1.90708)
V.cr <- c(V = 0, V2O3 = -2.52787, V3O5 = -2.52516, VO2 = -2.48447, V3O7 = -2.32789, V2O5 = -2.29480)
natom.Fe <- sapply(makeup(names(Fe.cr)), sum)
Fe.cr <- Fe.cr * natom.Fe
natom.V <- sapply(makeup(names(V.cr)), sum)
V.cr <- V.cr * natom.V
eVtoJ <- function(eV) eV * 1.602176634e-19 * 6.02214076e23
Fe.cr <- eVtoJ(Fe.cr)
V.cr <- eVtoJ(V.cr)
Fe.aq <- 1000 * c("Fe+2" = -78.90, "Fe+3" = -4.7, "FeO2-2" = -295.3,
"FeOH+" = -277.4, "FeOH+2" = -229.41, "HFeO2-" = -377.7,
"Fe(OH)2+" = -438.0, "Fe(OH)3" = -659.3,
"FeO2-" = -368.2,
"FeO4-2" = -322.2
)
V.aq <- 1000 * c("VO+2" = -446.4, "VO2+" = -587.0, "VO3-" = -783.6, "VO4-3" = -899.0,
"V2O7-4" = -1719, "HVO4" = -745.1, "HVO4-2" = -974.8,
"VOH2O2+3" = -523.4, "VO2H2O2+" = -746.3, "V2HO7-3" = 1792.2, "V2H3O7-" = -1863.8,
"HV10O28-5" = -7702, "H2V10O28-4" = -7723
)
Fe3O4 <- 1000 * -1015.4
V3O5 <- 1000 * -1803
Fe.corr <- (Fe.cr["Fe3O4"] - Fe3O4) / 3
V.corr <- (V.cr["V3O5"] - V3O5) / 2
nFe <- sapply(makeup(names(Fe.aq)), "[", "Fe")
Fe.aq <- Fe.aq + nFe * Fe.corr
nV <- sapply(makeup(names(V.aq)), "[", "V")
V.aq <- V.aq + nV * V.corr
modfun <- function(x, state) sapply(seq_along(x), function(i) {
mod.OBIGT(names(x)[i], formula = names(x)[i], state = state, E_units = "J", G = x[i])
})
iFe.cr <- modfun(Fe.cr, "cr")
iFe.aq <- modfun(Fe.aq, "aq")
iV.cr <- modfun(V.cr, "cr")
iV.aq <- modfun(V.aq, "aq")
FeV.cr <- c(FeV = -0.12815, FeV3 = -0.17114, Fe3V = -0.12832, Fe2V4O13 = -2.23967, FeVO4 = -1.75573)
natom.FeV <- sapply(makeup(names(FeV.cr)), sum)
FeV.cr <- FeV.cr * natom.FeV
FeV.cr <- eVtoJ(FeV.cr)
iFeV.cr <- modfun(FeV.cr, "cr")
par(mfrow = c(1, 3))
loga.Fe <- -5
loga.V <- -5
basis(c("VO+2", "Fe+2", "H2O", "e-", "H+"))
species(c(iFe.aq, iFe.cr))
species(1:length(iFe.aq), loga.Fe)
aFe <- affinity(pH = c(4, 10, res1), Eh = c(-1.5, 0, res1))
dFe <- diagram(aFe, plot.it = FALSE)
species(c(iV.aq, iV.cr))
species(1:length(iV.aq), loga.V)
aV <- affinity(aFe)
dV <- diagram(aV, plot.it = FALSE)
species(iFeV.cr)
aFeV <- affinity(aFe)
dFeV <- diagram(aFeV, plot.it = FALSE, bold = TRUE)
a11 <- mix(dFe, dV, dFeV, c(1, 1))
iV2O3 <- info("V2O3")
iFeO <- info("FeO", "cr")
iFe3V <- info("Fe3V")
srt <- rep(0, nrow(a11$species))
srt[a11$species$ispecies == paste(iFeO, iV2O3, sep = ",")] <- 90
srt[a11$species$ispecies == paste(iV2O3, iFe3V, sep = ",")] <- -13
diagram(a11, min.area = 0.01, srt = srt)
title("Fe:V = 1:1")
label.figure(lTP(25, 1), xfrac = 0.12)
a13 <- mix(dFe, dV, dFeV, c(1, 3))
srt <- rep(0, nrow(a13$species))
srt[a13$species$ispecies == paste(iFeO, iV2O3, sep = ",")] <- 90
srt[a13$species$ispecies == paste(iV2O3, iFe3V, sep = ",")] <- -13
diagram(a13, min.area = 0.01, srt = srt)
title("Fe:V = 1:3")
a15 <- mix(dFe, dV, dFeV, c(1, 5))
iFeV3 <- info("FeV3")
srt <- rep(0, nrow(a15$species))
srt[a15$species$ispecies == paste(iFeO, iV2O3, sep = ",")] <- 90
srt[a15$species$ispecies == paste(iV2O3, iFe3V, sep = ",")] <- -13
srt[a15$species$ispecies == paste(iV2O3, iFeV3, sep = ",")] <- -13
diagram(a15, min.area = 0.01, srt = srt)
title("Fe:V = 1:5")
layout(t(matrix(1:3)), widths = c(1, 1, 0.2))
par(cex = 1)
basis(c("VO+2", "Fe+2", "H2O", "e-", "H+"))
species(c(iFe.aq, iFe.cr))$name
species(1:length(iFe.aq), loga.Fe)
aFe <- affinity(pH = c(0, 14, res2), Eh = c(-1.5, 2, res2))
dFe <- diagram(aFe, plot.it = FALSE)
species(c(iV.aq, iV.cr))$name
species(1:length(iV.aq), loga.V)
aV <- affinity(aFe)
dV <- diagram(aV, plot.it = FALSE)
species(iFeV.cr)
aFeV <- affinity(aFe)
dFeV <- diagram(aFeV, plot.it = FALSE, bold = TRUE)
a11 <- mix(dFe, dV, dFeV, c(1, 1))
iV2O3 <- info("V2O3")
iFe3V <- info("Fe3V")
iVO4m3 <- info("VO4-3")
iFe2O3 <- info("Fe2O3")
srt <- rep(0, nrow(a11$species))
srt[a11$species$ispecies == paste(iV2O3, iFe3V, sep = ",")] <- -13
srt[a11$species$ispecies == paste(iFe2O3, iVO4m3, sep = ",")] <- 90
d11 <- diagram(a11, min.area = 0.01, srt = srt)
water.lines(d11, col = "orangered")
species("FeVO4")
aFeVO4 <- affinity(aFe)
aFeVO4_vs_stable <- aFeVO4$values[[1]] - d11$predominant.values
diagram(a11, fill = NA, names = FALSE, limit.water = FALSE)
opar <- par(usr = c(0, 1, 0, 1))
col <- rev(hcl.colors(128, palette = "YlGnBu", alpha = 0.8))
image(aFeVO4_vs_stable, col = col, add = TRUE)
par(opar)
diagram(a11, fill = NA, add = TRUE, names = FALSE)
water.lines(d11, col = "orangered")
thermo.axis()
imax <- arrayInd(which.max(aFeVO4_vs_stable), dim(aFeVO4_vs_stable))
pH <- d11$vals$pH[imax[1]]
Eh <- d11$vals$Eh[imax[2]]
points(pH, Eh, pch = 10, cex = 2, lwd = 2, col = "gold")
stable <- d11$names[d11$predominant[imax]]
text(pH, Eh, stable, adj = c(0.5, -1), cex = 1.2, col = "gold")
par(mar = c(3, 0, 2.5, 2.7))
plot.new()
levels <- 1:length(col)
plot.window(xlim = c(0, 1), ylim = range(levels), xaxs = "i", yaxs = "i")
rect(0, levels[-length(levels)], 1, levels[-1L], col = rev(col), border = NA)
box()
arange <- rev(range(aFeVO4_vs_stable))
Jrange <- convert(convert(arange, "G"), "J")
eVrange <- Jrange / 1.602176634e-19 / 6.02214076e23 / 6
ylim <- formatC(eVrange, digits = 3, format = "f")
axis(4, at = range(levels), labels = ylim)
mtext(quote(Delta*italic(G)[pbx]*", eV/atom"), side = 4, las = 0, line = 1)
plot(1:10)
imax <- arrayInd(which.max(aFeVO4_vs_stable), dim(aFeVO4_vs_stable))
pH <- d11$vals$pH[imax[1]]
Eh <- d11$vals$Eh[imax[2]]
points(pH, Eh, pch = 10, cex = 2, lwd = 2, col = "gold")
stable <- d11$names[d11$predominant[imax]]
text(pH, Eh, stable, adj = c(0.3, 2), cex = 1.2, col = "gold")
range(aFeVO4_vs_stable[d11$predominant == d11$predominant[imax]])
b <- basis(c("Fe2O3", "Fe2V4O13", "O2"))
cal_mol <- subcrt("FeVO4", 1, T = 25)$out$G
convert(cal_mol, "logK")
J_mol <- convert(cal_mol, "J")
eV_mol <- J_mol / 1.602176634e-19
eV_atom <- eV_mol / 6.02214076e23 / 6
round(eV_atom, 3)
stopifnot(round(eV_atom, 3) == 0.415)
reset()
logaH2S <- -2
T <- 200
pH <- c(0, 14, res2)
O2 <- c(-48, -33, res2)
basis(c("Cu+", "pyrite", "H2S", "oxygen", "H2O", "H+"))
basis("H2S", logaH2S)
S.aq <- c("H2S", "HS-", "HSO4-", "SO4-2")
Fe.cr <- c("pyrite", "pyrrhotite", "magnetite", "hematite")
Fe.abbrv <- c("Py", "Po", "Mag", "Hem")
species(Fe.cr)
mFe <- mosaic(S.aq, pH = pH, O2 = O2, T = T)
diagram(mFe$A.bases, lty = 2, col = 4, col.names = 4, italic = TRUE, dx = c(0, 1, 0, 0), dy = c(-1.5, 0, 1, 0))
dFe <- diagram(mFe$A.species, add = TRUE, lwd = 2, names = Fe.abbrv, dx = c(0, 0.5, 0, 0), dy = c(-1, 0, 0.5, 0))
FeCu.cr <- c("chalcopyrite", "bornite")
Cu.cr <- c("copper", "cuprite", "tenorite", "chalcocite", "covellite")
FeCu.abbrv <- c("Ccp", "Bn", "Cu", "Cpr", "Tnr", "Cct", "Cv")
species(c(FeCu.cr, Cu.cr))
mFeCu <- mosaic(list(S.aq, Fe.cr), pH = pH, O2 = O2,
T = T, stable = list(NULL, dFe$predominant))
diagram(mFeCu$A.species, add = TRUE, col = 2, col.names = 2, bold = TRUE, names = FeCu.abbrv, dy = c(0, 0, 0, 0, 0, 1, 0))
col <- c("
diagram(mFeCu$A.species, add = TRUE, col = col, lwd = 2, col.names = col, bold = TRUE, names = FeCu.abbrv)
TPS <- c(describe.property(c("T", "P"), c(T, "Psat")), expression(sum(S) == 0.01*m))
legend("topright", TPS, bty = "n")
title("Cu-Fe-S-O-H (minerals only)", font.main = 1)
pH <- c(0, 14, res2)
O2 <- c(-48, -33, res2)
T <- 200
logmS <- -2
m_NaCl <- 0.1
logm_aq <- -6
S.aq <- c("H2S", "HS-", "HSO4-", "SO4-2")
Fe.cr <- c("pyrite", "pyrrhotite", "magnetite", "hematite")
Fe.abbrv <- c("Py", "Po", "Mag", "Hem")
FeCu.cr <- c("chalcopyrite", "bornite")
Cu.cr <- c("copper", "cuprite", "tenorite", "chalcocite", "covellite")
FeCu.abbrv <- c("Ccp", "Bn", "Cu", "Cpr", "Tnr", "Cct", "Cv")
iFe.aq <- retrieve("Fe", c("S", "O", "H", "Cl"), "aq")
Fe.aq <- info(iFe.aq)$name
iCu.aq <- retrieve("Cu", c("S", "O", "H", "Cl"), "aq")
Cu.aq <- info(iCu.aq)$name
TPexpr <- describe.property(c("T", "P"), c(T, "Psat"))
Sexpr <- as.expression(bquote(sum(S) == .(10^logmS)*m))
NaClexpr <- as.expression(bquote(NaCl == .(m_NaCl)*m))
aqexpr <- as.expression(bquote("("*aq*")"[italic(i)] == 10^.(logm_aq)*m))
basis(c("Cu+", "pyrite", "H2S", "oxygen", "H2O", "H+", "Cl-"))
basis("H2S", logmS)
nacl <- NaCl(T = T, P = "Psat", m_tot = m_NaCl)
basis("Cl-", log10(nacl$m_Cl))
species(Fe.cr)
species(iFe.aq, logm_aq, add = TRUE)
mFe <- mosaic(S.aq, pH = pH, O2 = O2, T = T, IS = nacl$IS)
dFe <- diagram(mFe$A.species, lwd = 0, names = FALSE)
species(c(FeCu.cr, Cu.cr))
species(iCu.aq, logm_aq, add = TRUE)
DG <- convert(-logm_aq, "G", T = convert(T, "K"))
G.orig <- info(iFe.aq)$G
G.new <- G.orig + DG
mod.OBIGT(iFe.aq, G = G.new)
predom <- dFe$predominant
ipredom <- sort(unique(as.numeric(predom)))
for(i in seq_along(ipredom)) predom[dFe$predominant == ipredom[i]] <- i
Fe.predom <- c(Fe.cr, Fe.aq)[ipredom]
mFeCu <- mosaic(list(S.aq, Fe.predom), pH = pH, O2 = O2, T = T, stable = list(NULL, predom), IS = nacl$IS)
bold <- c(rep(TRUE, length(FeCu.abbrv)), rep(FALSE, length(Cu.aq)))
names <- c(FeCu.abbrv, Cu.aq)
srt <- dx <- dy <- rep(0, length(names))
cex <- rep(1, length(names))
dx[names == "Cu"] <- -1.5
dx[names == "Bn"] <- 1.4
dx[names == "CuHS"] <- 1
dx[names == "Cu+"] <- -0.5
dy[names == "Cu"] <- 3
dx[names == "Cct"] <- -2
dy[names == "Cct"] <- 4
dy[names == "CuHS"] <- 1
dy[names == "Bn"] <- -0.9
dx[names == "CuCl2-"] <- -1
dy[names == "CuCl2-"] <- 2
cex[names == "Bn"] <- 0.8
srt[names == "Bn"] <- 85
diagram(mFeCu$A.species, add = TRUE, col = 2, col.names = 2, names = names, bold = bold, dx = dx, dy = dy, cex.names = cex, srt = srt)
text(12.3, -47, "Cu", col = 2, font = 2)
col.names <- col <- rep(NA, nrow(mFeCu$A.species$species))
col[3] <- "
col.names[1] <- "
diagram(mFeCu$A.species, add = TRUE, col = col, lwd = 2, col.names = col.names, bold = TRUE, names = names, fill = NA)
diagram(mFe$A.bases, add = TRUE, lty = 2, col = 4, names = FALSE, fill = NA)
bold <- c(rep(TRUE, length(Fe.abbrv)), rep(FALSE, length(Fe.aq)))
names <- c(Fe.abbrv, Fe.aq)
srt <- dx <- dy <- rep(0, length(names))
cex <- rep(1, length(names))
dy[names == "Hem"] <- 0.5
dy[names == "Mag"] <- -0.8
dx[names == "Hem"] <- -0.5
dx[names == "Mag"] <- 0.25
dx[names == "Fe+2"] <- 0.5
dx[names == "FeO2-"] <- 1
dy[names == "FeO2-"] <- -3
dx[names == "HFeO2-"] <- -0.5
dy[names == "HFeO2-"] <- 1
srt[names == "Mag"] <- 83
srt[names == "FeSO4"] <- 90
diagram(mFe$A.species, add = TRUE, lwd = 2, names = names, bold = bold, dx = dx, dy = dy, cex.names = cex, srt = srt, fill = NA)
legend("topright", c(TPexpr, Sexpr, NaClexpr, aqexpr), bty = "n")
title("Cu-Fe-S-O-H-Cl (minerals and aqueous species)", font.main = 1)
OBIGT()
minerals <- list(Fe.cr = Fe.cr, Cu.cr = Cu.cr, FeCu.cr = FeCu.cr)
aqueous <- list(S.aq = S.aq, Fe.aq = Fe.aq, Cu.aq = Cu.aq)
allspecies <- c(minerals, aqueous)
iall <- lapply(allspecies, info)
allkeys <- lapply(iall, function(x) thermo.refs(x)$key)
allkeys
allyears <- lapply(iall, function(x) thermo.refs(x)$year)
o <- order(unlist(allyears))
cat(paste(paste0("@", unique(unlist(allkeys)[o])), collapse = "; "))
par(mfrow = c(2, 2))
logaH2S <- -2
T <- 200
pH <- c(0, 14)
O2 <- c(-60, -25)
basis(c("Cu+", "Fe+2", "H2S", "oxygen", "H2O", "H+"))
basis("H2S", logaH2S)
S.aq <- c("H2S", "HS-", "HSO4-", "SO4-2")
Fe.cr <- c("pyrite", "pyrrhotite", "magnetite", "hematite")
Cu.cr <- c("copper", "cuprite", "tenorite", "chalcocite", "covellite")
FeCu.cr <- c("chalcopyrite", "bornite")
species(Fe.cr)
mFe <- mosaic(S.aq, pH = pH, O2 = O2, T = T)
diagram(mFe$A.bases, lty = 2, col = 4, col.names = 4, italic = TRUE, dx = c(0, 1, 0, 0), dy = c(-1, 0, -2, 0))
names <- info(info(Fe.cr))$abbrv
dFe <- diagram(mFe$A.species, add = TRUE, names = names)
species(Cu.cr)
mCu <- mosaic(S.aq, pH = pH, O2 = O2, T = T)
names <- info(info(Cu.cr))$abbrv
col.names <- rep(2, length(names))
col.names[1] <- 0
dCu <- diagram(mCu$A.species, add = TRUE, col = 2, col.names = col.names, names = names)
text(12, -55, "Cu", col = 2)
legend("topright", legend = lTP(T, "Psat"), bty = "n")
title(paste("Fe-S-O-H and Cu-S-O-H; Total S =", 10^logaH2S, "m"))
species(FeCu.cr)
mFeCu <- mosaic(S.aq, pH = pH, O2 = O2, T = T)
names <- info(info(FeCu.cr))$abbrv
dFeCu <- diagram(mFeCu$A.species, plot.it = FALSE, names = names)
fill <- function(a) {
ifelse(grepl("Ccp", a$species$name), "
ifelse(grepl("Bn", a$species$name), "
)}
srt <- function(a) ifelse(a$species$name %in% c("Mag+Bn", "Mag+Ccp"), 80, 0)
cex.names <- function(a) ifelse(a$species$name %in% c("Mag+Bn", "Mag+Ccp", "Mag+Cct"), 0.8, 1)
dx <- function(a) sapply(a$species$name, switch, "Mag+Bn" = 0.15, "Mag+Cct" = 0.5, 0)
dy <- function(a) sapply(a$species$name, switch, "Py+Bn" = -1, "Po+Bn" = -0.8, "Po+Cu" = -0.8, "Mag+Ccp" = -1, 0)
a11 <- mix(dFe, dCu, dFeCu)
diagram(a11, fill = fill(a11), srt = srt(a11), min.area = 0.01, dx = dx(a11), dy = dy(a11), cex.names = cex.names(a11))
title("Fe:Cu = 1:1")
a21 <- mix(dFe, dCu, dFeCu, c(2, 1))
diagram(a21, fill = fill(a21), srt = srt(a21), min.area = 0.01, dx = dx(a21), dy = dy(a21), cex.names = cex.names(a21))
title("Fe:Cu = 2:1")
a12 <- mix(dFe, dCu, dFeCu, c(1, 2))
diagram(a12, fill = fill(a12), srt = srt(a12), min.area = 0.01, dx = dx(a12), dy = dy(a12), cex.names = cex.names(a12))
title("Fe:Cu = 1:2")
T <- 125
layout(matrix(c(1, 2, 3, 3), nrow = 2), widths = c(1, 1.5))
basis(c("copper", "hematite", "S2", "oxygen", "H2O", "H+", "Cl-"))
bFe <- species(c("hematite", "magnetite", "pyrite"))$name
aFe <- affinity(S2 = c(-34, -10, res1), O2 = c(-55, -40, res1), T = T)
oFe <- order(aFe$species$S2 - aFe$species$O2)
fill <- terrain.colors(length(oFe), alpha = 0.2)[oFe]
abbrv <- info(aFe$species$ispecies)$abbrv
dFe <- diagram(aFe, names = abbrv, fill = fill)
title("Fe-S-O-H")
bCu <- species(c("copper", "chalcocite", "covellite", "chalcopyrite", "bornite"))$name
mCu <- mosaic(bFe, S2 = c(-34, -10, res1), O2 = c(-55, -40, res1),
T = T, stable = list(dFe$predominant))
oCu <- order(mCu$A.species$species$S2 - mCu$A.species$species$O2)
fill <- terrain.colors(length(oCu), alpha = 0.2)[oCu]
abbrv <- info(mCu$A.species$species$ispecies)$abbrv
dCu <- diagram(mCu$A.species, names = abbrv, fill = fill, dx = c(0, 0, 0, 0, 1.8))
title("Cu-Fe-S-O-H")
aFeCu <- mash(dFe, dCu)
names <- aFeCu$species$name
dx <- dy <- srt <- rep(0, length(names))
cex <- rep(1, length(names))
cex[names %in% c("Hem+Ccp", "Hem+Cv")] <- 0.8
srt[names %in% c("Mag+Cu", "Hem+Cu")] <- 90
srt[names %in% c("Mag+Bn", "Hem+Bn")] <- 63
srt[names %in% c("Mag+Ccp", "Hem+Ccp")] <- 68
srt[names %in% c("Py+Bn", "Py+Cv")] <- 90
dx[names == "Hem+Ccp"] <- -0.4
dy[names == "Hem+Ccp"] <- -0.5
oFeCu <- order(aFeCu$species$S2 - aFeCu$species$O2)
fill <- terrain.colors(length(oFeCu), alpha = 0.2)[oFeCu]
diagram(aFeCu, cex.names = cex, srt = srt, dx = dx, dy = dy, fill = fill)
legend("topleft", legend = lTP(T, "Psat"), bg = "white")
title("Cu-Fe-S-O-H")
par(mfrow = c(1, 3))
basis("pH", 6)
m_tot <- 80000 / mass("Cl") / 1000
calc <- NaCl(T = T, m_tot = m_tot)
basis("Cl-", log10(calc$m_Cl))
species("copper")
iaq <- retrieve("Cu", c("O", "H", "Cl", "S"), "aq")
mfun <- function() {
s <- solubility(iaq, bases = list(bFe, bCu), S2 = c(-34, -10, res1), O2 = c(-55, -40, res1),
T = T, IS = calc$IS, stable = list(dFe$predominant, dCu$predominant))
s <- convert(s, "ppm")
diagram(aFeCu, names = NA, col = "gray", fill = fill)
diagram(s, type = "loga.balance", levels = 10^(-3:3), add = TRUE)
diagram(s, type = "loga.balance", levels = 35, add = TRUE, lwd = 3, col = 6, contour.method = NA)
}
mfun()
title("Cu (ppm)")
logK <- subcrt(c("CuCl2-", "Cu+", "Cl-"), c(-1, 1, 2), T = T)$out$logK
dlogK <- logK - -5.2
dG <- convert(dlogK, "G", T = convert(T, "K"))
newG <- info(info("CuCl2-"))$G + dG
mod.OBIGT("CuCl2-", G = newG)
logK <- subcrt(c("CuCl3-2", "Cu+", "Cl-"), c(-1, 1, 3), T = T)$out$logK
dlogK <- logK - -5.6
dG <- convert(dlogK, "G", T = convert(T, "K"))
newG <- info(info("CuCl3-2"))$G + dG
mod.OBIGT("CuCl3-2", G = newG)
mfun()
title("Cu (ppm)", line = 1.7)
CuCl2 <- expr.species("CuCl2-")
CuCl3 <- expr.species("CuCl3-2")
title(bquote("Helgeson (1969)"~.(CuCl2)~and~.(CuCl3)), line = 0.9)
basis(c("S2", "copper", "hematite", "oxygen", "H2O", "H+", "Cl-"))
basis("pH", 6)
species("S2")
iaq <- info("SO4-2")
s <- solubility(iaq, S2 = c(-34, -10, res1), O2 = c(-55, -40, res1), T = T, IS = calc$IS, in.terms.of = "SO4-2")
s <- convert(s, "ppm")
diagram(aFeCu, names = NA, col = "gray", fill = fill)
diagram(s, type = "loga.balance", levels = 10^(-3:3), add = TRUE)
diagram(s, type = "loga.balance", levels = 35, add = TRUE, lwd = 3, col = 6, contour.method = NA)
title(bquote(bold(.(expr.species("SO4-2"))~"(ppm)")))
calc$IS
log10(calc$m_Cl * calc$gam_Cl)
subcrt(c("CuCl2-", "Cu+", "Cl-"), c(-1, 1, 2), T = T)$out$logK
subcrt(c("CuCl3-2", "Cu+", "Cl-"), c(-1, 1, 3), T = T)$out$logK
reset()
subcrt(c("CuCl2-", "Cu+", "Cl-"), c(-1, 1, 2), T = T)$out$logK
subcrt(c("CuCl3-2", "Cu+", "Cl-"), c(-1, 1, 3), T = T)$out$logK
names(iCu.aq)
mat <- matrix(c(1, 1, 2, 2, 3, 3, 4, 4, 4, 5, 5, 5), byrow = TRUE, nrow = 2)
layout(mat)
par(font.main = 1)
basis(c("Fe+2", "Cu+", "hydrogen sulfide", "oxygen", "H2O", "H+"))
xlab <- ratlab("Fe+2", "Cu+")
species(c("pyrite", "pyrrhotite", "magnetite", "hematite"))
aFe <- affinity("Fe+2" = c(0, 12), O2 = c(-40, -16), T = 400, P = 2000)
names <- info(aFe$species$ispecies)$abbrv
dFe <- diagram(aFe, xlab = xlab, names = names)
title(bquote("Only Fe; 1° balance:" ~ .(expr.species(dFe$balance))))
label.plot("A")
species(c("covellite", "chalcocite", "tenorite", "cuprite"))
aCu <- affinity(aFe)
names <- info(aCu$species$ispecies)$abbrv
dCu <- diagram(aCu, xlab = xlab, names = names)
title(bquote("Only Cu; 1° balance:" ~ .(expr.species(dCu$balance))))
label.plot("B")
species(c("chalcopyrite", "bornite"))
aFeCu <- affinity(aFe)
names <- info(aFeCu$species$ispecies)$abbrv
dFeCu <- diagram(aFeCu, xlab = xlab, balance = "H+", names = names)
title(bquote("Only Fe+Cu; 1° balance:" ~ .(expr.species(dFeCu$balance))))
label.plot("C")
ad1 <- rebalance(dFe, dCu, balance = "H+")
names <- info(ad1$species$ispecies)$abbrv
d1 <- diagram(ad1, xlab = xlab, balance = 1, names = names)
title(bquote("Only Fe or Cu; 2° balance:" ~ .(expr.species("H+"))))
label.plot("D")
d1$values <- c(dFe$values, dCu$values)
ad2 <- rebalance(d1, dFeCu, balance = "H+")
names <- info(ad2$species$ispecies)$abbrv
diagram(ad2, xlab = xlab, balance = 1, names = names)
title(bquote("Fe and/or Cu; 2° balance:" ~ .(expr.species("H+"))))
label.plot("E")
db <- describe.basis(ibasis = 3)
leg <- lex(lTP(400, 2000), db)
legend("bottomleft", legend = leg, bty = "n")
basis(c("Fe+2", "Cu+", "hydrogen sulfide", "oxygen", "H2O", "H+"))
basis("H2S", 2)
species(c("pyrite", "magnetite", "hematite", "covellite", "tenorite",
"chalcopyrite", "bornite"))
a <- affinity("Cu+" = c(-8, 2, 500), "Fe+2" = c(-4, 12, 500), T = 400, P = 2000)
names <- info(a$species$ispecies)$abbrv
d <- diagram(a, xlab = ratlab("Cu+"), ylab = ratlab("Fe+2"), balance = "O2", names = names)
title(bquote("Cu-Fe-S-O-H; 1° balance:" ~ .(expr.species(d$balance))))
species(c("pyrrhotite", "ferrous-oxide", "chalcocite", "cuprite"))
asat <- affinity(a)
names <- asat$species$name
names[2] <- "ferrous oxide"
diagram(asat, type = "saturation", add = TRUE, lty = 2, col = 4, names = names)
legend("topleft", legend = lTP(400, 2000), bty = "n")
Keff <- function(T = 25, pH = 7) {
len <- max(length(T), length(pH))
T <- rep(T, length.out = len)
pH <- rep(pH, length.out = len)
aH <- 10^(-pH)
logK1 <- subcrt(c("acetic acid", "NH3", "acetamide", "H2O"), c(-1, -1, 1, 1), T = T)$out$logK
logK2 <- subcrt(c("acetic acid", "acetate", "H+"), c(-1, 1, 1), T = T)$out$logK
logK3 <- subcrt(c("NH3", "H+", "NH4+"), c(-1, -1, 1), T = T)$out$logK
K1 <- 10^logK1
K2 <- 10^logK2
K3 <- 10^logK3
Keff <- K1 * (1 + K2 / aH) ^ -1 * (1 + K3 * aH) ^ -1
Keff
}
res <- 128
pH <- seq(0, 14, length.out = res)
T <- 100
logKeff <- log10(Keff(pH = pH, T = T))
logAc <- log10(0.01)
logAm <- log10(0.001)
logAcAm <- logKeff + logAc + logAm
a_N <- 0.001
a_C <- 2 * 0.01
loga_N <- log10(a_N)
loga_C <- log10(a_C)
basis(c("CO2", "NH3", "O2", "H2O", "H+"))
basis("NH3", loga_N)
species(c("acetamide", "acetic acid", "acetate"))
m <- mosaic(c("NH3", "NH4+"), pH = c(0, 14, res), T = T)
e <- equilibrate(m, loga.balance = loga_C)
diagram(e, ylim = c(-8, -0), lty = 0, names = FALSE)
abline(v = 6, col = "gray60", lty = 5)
lines(pH, logAcAm, col = 4, lwd = 6, lty = 2)
diagram(e, add = TRUE, lty = c(2, 3, 1, 2, 3), lwd = c(1, 1, 2, 1, 1), dx = c(-0.2, 0.2, -2.5, 0, 0), dy = c(0.1, 0.1, -1, 0.1, 0.1), srt = c(0, 0, 52, 0, 0))
tN <- paste("Total N in basis species =", format(a_N, scientific = FALSE), "m")
tC <- paste("Total C in formed species =", format(a_C, scientific = FALSE), "m")
title(main = paste(tN, tC, sep = "\n"), font.main = 1)
legend("topright", legend = lTP(T, "Psat"), bty = "n")
stopifnot(all.equal(as.numeric(e$loga.equil[[3]]), logAcAm, tol = 1e-3, scale = 1)) |
delete_object <- function(object, bucket, quiet = TRUE, ...) {
if (missing(bucket)) {
bucket <- get_bucketname(object)
}
object <- get_objectkey(object)
if (length(object) == 1) {
r <- s3HTTP(verb = "DELETE",
bucket = bucket,
path = paste0("/", object),
...)
return(TRUE)
} else {
xml <- xml2::read_xml(paste0('<?xml version="1.0" encoding="UTF-8"?><Delete><Quiet>', tolower(quiet),'</Quiet></Delete>'))
for (i in seq_along(object)) {
xml2::xml_add_child(xml, xml2::read_xml(paste0("<Object><Key>", get_objectkey(object[[i]]), "</Key></Object>")))
}
tmpfile <- tempfile()
on.exit(unlink(tmpfile))
xml2::write_xml(xml, tmpfile)
md <- base64enc::base64encode(digest::digest(file = tmpfile, raw = TRUE))
r <- s3HTTP(verb = "POST",
bucket = bucket,
query = list(delete = ""),
request_body = tmpfile,
headers = list(`Content-Length` = formatSize(file.size(tmpfile)),
`Content-MD5` = md),
...)
return(TRUE)
}
} |
originize_dir <-
function(
path = getwd(),
pkgs = getOption("origin.pkgs", .packages()),
recursive = TRUE,
exclude_files = NULL,
overwrite =
getOption("origin.overwrite", TRUE),
ask_before_applying_changes =
getOption("origin.ask_before_applying_changes", TRUE),
ignore_comments =
getOption("origin.ignore_comments", TRUE),
check_conflicts =
getOption("origin.check_conflicts", TRUE),
check_base_conflicts =
getOption("origin.check_base_conflicts", TRUE),
path_to_local_functions =
getOption("origin.path_to_local_functions", NULL),
check_local_conflicts =
getOption("origin.check_local_conflicts", TRUE),
add_base_packages =
getOption("origin.add_base_packages", FALSE),
excluded_functions =
getOption("origin.excluded_functions", list()),
verbose =
getOption("origin.verbose", FALSE),
use_markers =
getOption("origin.use_markers_for_logging", TRUE)
) {
if (!check_base_conflicts && add_base_packages) {
stop("When adding base packages checking for ",
"potential conflicts is required!")
}
files <- list_files(path = path,
exclude_folders = c("renv", "packrat",
".git", ".Rproj"),
full.names = TRUE,
include.dirs = FALSE,
recursive = recursive,
pattern = "\\.R$",
ignore.case = TRUE)
if (!is.null(exclude_files)) {
if (any(!exclude_files %in% files)) {
stop("File to exclude not in given path\n",
exclude_files[!exclude_files %in% files])
}
files <- files[!files %in% exclude_files]
}
n_files <- length(files)
if (n_files > 20) {
cat(sprintf("You are about to originize %s files.\nProceed?", n_files))
if (interactive()) {
answer <- utils::menu(choices = c("YES", "NO", "Show files"))
} else {
answer <- 1
}
if (answer == 2) {
stop("Execution halted")
} else if (answer == 3) {
print(files)
cat("\nProceed?")
if (interactive()) {
answer2 <- utils::menu(choices = c("YES", "NO"))
} else {
answer2 <- 1
}
if (answer2 == 2) {
stop("Execution halted")
}
}
}
scripts <- suppressWarnings(lapply(files, readLines))
empty_scripts <- vapply(X = scripts,
FUN = length,
FUN.VALUE = integer(1)) == 0
if (all(empty_scripts)) {
message("All provided scripts are empty")
return(invisible(NULL))
} else if (any(empty_scripts)) {
scripts <- scripts[!empty_scripts]
files <- files[!empty_scripts]
}
originize_wrap(scripts = scripts,
files = files,
type = "writeLines",
pkgs = pkgs,
overwrite = overwrite,
ask_before_applying_changes = ask_before_applying_changes,
ignore_comments = ignore_comments,
check_conflicts = check_conflicts,
check_base_conflicts = check_base_conflicts,
add_base_packages = add_base_packages,
excluded_functions = excluded_functions,
verbose = verbose,
use_markers = use_markers,
path_to_local_functions = path_to_local_functions,
check_local_conflicts = check_local_conflicts)
return(invisible(NULL))
} |
context("helper functions")
if (exists("iris") == TRUE) {
dataset = iris[,1:4]
} else {
dataset <- rbind(matrix(rnorm(200, mean = 1, sd = 0.3), ncol = 4),
matrix(rnorm(200, mean = 2, sd = 0.3), ncol = 4),
matrix(rnorm(200, mean = 3, sd = 0.3), ncol = 4))
}
k = 4
l = 3
C = dataset[sample(nrow(dataset),k),]
x = dataset[sample(nrow(dataset),1),]
dXC = dist_sqr_XC(dataset,C)
Q = Q_reorder(dXC)
test_that("C_zero is same width as dataset",
{
Cz = C_zero(dataset, k, method = "random", c_type = "clustroid")
expect_equal(ncol(Cz),ncol(dataset))
})
test_that("C_zero is length k",
{
Cz = C_zero(dataset, k, method = "random", c_type = "clustroid")
expect_equal(nrow(Cz),k)
})
test_that("dist_sqr_xC is length 1",
{
dsxC = dist_sqr_xC(x, C)
expect_equal(nrow(dsxC), 1)
})
test_that("dist_sqr_xC is width 2",
{
dsxC = dist_sqr_xC(x, C)
expect_equal(ncol(dsxC), 2)
})
test_that("dist_sqr_XC is same length as dataset",
{
dsXC = dist_sqr_XC(dataset, C)
expect_equal(nrow(dsXC), nrow(dataset))
})
test_that("dist_sqr_xC is width 2",
{
dsXC = dist_sqr_XC(dataset, C)
expect_equal(ncol(dsXC), 2)
}) |
NBSI2<-function(ref.infor=NULL,que.infor=NULL,ref.env=NULL,que.env=NULL,
barcode.identi.result,model="RF",variables="ALL",
en.vir=NULL,bak.vir=NULL){
spe.identified<-as.character(barcode.identi.result[,2])
spe.identified.uniq<-unique(spe.identified);length(spe.identified.uniq)
if (is.null(ref.infor) & is.null(ref.env) |
is.null(que.infor) & is.null(que.env)){
stop("Please check the input data!")
}else if (is.null(ref.env) == FALSE & is.null(que.env) == FALSE){
eff.samp.env<-ref.env[,-c(1:2)]
que.vari<-que.env[,-c(1:3)]
if (nrow(que.vari) == 1){
que.vari<-que.vari[,colnames(que.vari) %in% colnames(eff.samp.env)]
que.vari<-as.data.frame(que.vari)
}else{
que.vari<-apply(que.vari,MARGIN=2,as.integer)
que.vari<-que.vari[,colnames(que.vari) %in% colnames(eff.samp.env)]
}
}else if (is.null(ref.env) == TRUE & is.null(que.env) == TRUE){
spe.identified.uniq<-unique(ref.infor$species)
ref.range<-data.frame()
for (su in 1:length(spe.identified.uniq)){
prese.lonlat<-ref.infor[ref.infor$species %in% spe.identified.uniq[su],]
range.lon=min(diff(range(prese.lonlat[,4])),(abs(min(prese.lonlat[,4])-(-180))+abs(max(prese.lonlat[,4])-180)))
range.lat=diff(range(prese.lonlat[,5]))
ref.range<-rbind(ref.range,cbind(range.lon,range.lat))
}
lon.mean<-mean(ref.range[-which(ref.range[,1] == 0),1])
lat.mean<-mean(ref.range[-which(ref.range[,2] == 0),2])
if (is.null(en.vir) == T){
cat("Environmental layers downloading ... ")
envir<-raster::getData("worldclim",download=TRUE,
var="bio",res=10)
en.vir<-raster::brick(envir)
cat("Done!\n")
if (is.null(bak.vir) != T){
warning("The random background points and their environmental
variables will be recreated from the downloaded
environmental layer!")
}
cat("Background extracting ... ")
back<-dismo::randomPoints(mask=en.vir,n=5000,ext=NULL,extf=1.1,
excludep=TRUE,prob=FALSE,cellnumbers=FALSE,tryf=3,warn=2,
lonlatCorrection=TRUE)
bak.vir<-raster::extract(en.vir,back)
cat("Done!\n")
}else{
if (is.null(bak.vir) == T){
cat("Background extracting ... ")
back<-dismo::randomPoints(mask=en.vir,n=5000,ext=NULL,extf=1.1,excludep=TRUE,
prob=FALSE,cellnumbers=FALSE,tryf=3,warn=2,
lonlatCorrection=TRUE)
bak.vir<-raster::extract(en.vir,back)
cat("Done!\n")
}
}
samp.env<-data.frame()
samp.points<-data.frame()
for (su in 1:length(spe.identified.uniq)){
prese.lonlat<-ref.infor[ref.infor$species %in% spe.identified.uniq[su],]
present.points<-pseudo.present.points(prese.lonlat[,3:5],10,lon.mean,lat.mean,en.vir,map=F)
present.points[,1]<-gsub("Simulation",present.points[1,1],present.points[,1]);present.points
samp.points<-rbind(samp.points,present.points)
prese.env<-raster::extract(en.vir,present.points[,2:3])
if (!all(is.na(prese.env[,1])==FALSE)){
nonerr.env<-prese.env[-which(is.na(prese.env[,1])==TRUE),]
if (is.null(dim(nonerr.env))==TRUE){
prese.env<-t(as.data.frame(nonerr.env))
row.names(prese.env)=NULL
}else if (dim(nonerr.env)[1]==0){
stop ("Please check the coordinate of ",spe.identified.uniq[su]," !\n")
}else{
prese.env<-nonerr.env
}
}
spe.env<-cbind(Species=as.character(spe.identified.uniq[su]),prese.env)
samp.env<-rbind(samp.env,spe.env)
}
samp.env[,2:ncol(samp.env)]<-apply(samp.env[,2:ncol(samp.env)],FUN=as.integer,MARGIN=2)
eff.samp.env<-samp.env
if (variables == "SELECT"){
for (eff in 1:ncol(eff.samp.env)){
rs<-abs(stats::cor(eff.samp.env[,-1]))
cor.vir<-c()
for (r in 1:nrow(rs)){
tmp<-rs[r,which(rs[r,] >= 0.9 & rs[r,] != 1)]
if (length(tmp) != 0){
cor.vir<-c(cor.vir,names(tmp))
}
}
freq.vir<-table(cor.vir);freq.vir
if (length(freq.vir) != 0){
max.freq.vir<-names(freq.vir[freq.vir == max(freq.vir)])
max.freq.vir<-max.freq.vir[sample(1:length(max.freq.vir),size=1)]
eff.samp.env<-eff.samp.env[,-which(colnames(eff.samp.env)==max.freq.vir)]
}else{
break
}
}
colnames(eff.samp.env)
eff.list<-colnames(eff.samp.env)[-1]
}
que.vari<-raster::extract(en.vir,que.infor[,4:5])
if (nrow(que.vari) == 1){
que.vari<-que.vari[,colnames(que.vari) %in% colnames(eff.samp.env)]
que.vari<-as.data.frame(t(que.vari))
}else{
que.vari<-apply(que.vari,MARGIN=2,as.integer)
que.vari<-que.vari[,colnames(que.vari) %in% colnames(eff.samp.env)]
}
}else{
stop("There is no matching ecological variable information between REF and QUE!")
}
model<-gsub("randomforest|RandomForest|randomForest","RF",model)
model<-gsub("maxent|Maxent","MAXENT",model)
spe.niche<-list()
niche.ref.prob<-list()
for (siu in 1:length(spe.identified.uniq)){
prese.env<-eff.samp.env[gsub(".+,","",eff.samp.env[,1]) %in% spe.identified.uniq[siu],-1]
prese.env<-stats::na.omit(prese.env)
if (dim(prese.env)[1]==1){
prese.env<-rbind(prese.env,prese.env)
warning ("The model may not be accurate because there is only one record of ",spe.identified.uniq[siu],"!\n")
}
if (is.null(ref.env) == FALSE & is.null(que.env) == FALSE){
ref.mean<-apply(prese.env,FUN=mean,MARGIN=2)
ref.sd<-apply(prese.env,FUN=stats::sd,MARGIN=2)
ref.range<-apply(prese.env,FUN=max,MARGIN=2)-apply(prese.env,FUN=min,MARGIN=2)
for (rs in 1:length(ref.sd)){
if (ref.sd[rs] == 0){
ref.sd[rs]<-apply(eff.samp.env[,-1],FUN=stats::sd,MARGIN=2)[rs]
}
if (ref.range[rs] == 0){
ref.range[rs]<-ref.sd[rs]*2
}
}
q.01<-stats::qnorm(0.01,mean=ref.mean,sd=ref.sd)
q.99<-stats::qnorm(0.99,mean=ref.mean,sd=ref.sd)
bak.env<-matrix(nrow=5000,ncol=ncol(prese.env))
for (en in 1:ncol(prese.env)){
tmp.left<-stats::runif(2500,
min=(q.01[en]-2*ref.range[en]),
max=q.01[en]-ref.range[en])
tmp.right<-stats::runif(2500,
min=q.99[en]+ref.range[en],
max=q.99[en]+2*ref.range[en])
tmp.ab<-c(tmp.left,tmp.right)
bak.env[,en]<-tmp.ab
}
colnames(bak.env)<-colnames(prese.env)
if (model == "RF"){
mod<-niche.Model.Build(prese=NULL,absen=NULL,
prese.env=prese.env,absen.env=NULL,
model="RF",
bak.vir=bak.env)
}else if (model == "MAXENT"){
mod<-niche.Model.Build(prese=NULL,absen=NULL,
prese.env=prese.env,absen.env=NULL,
model="MAXENT",
bak.vir=bak.env)
}
}else if (is.null(ref.env) == TRUE & is.null(que.env) == TRUE){
if (model == "RF"){
mod<-niche.Model.Build(prese=NULL,absen=NULL,
prese.env=prese.env,absen.env=NULL,
model="RF",
bak.vir=bak.vir,en.vir=en.vir)
}else if (model == "MAXENT"){
mod<-niche.Model.Build(prese=NULL,absen=NULL,
prese.env=prese.env,absen.env=NULL,
model="MAXENT",
bak.vir=bak.vir,en.vir=en.vir)
}
}
spe.niche[[siu]]<-mod$model
spe.var<-apply(prese.env,FUN=as.numeric,MARGIN=2)
spe.var<-as.data.frame(spe.var)
if (model == "RF"){
ref.HSI<-stats::predict(mod$model,spe.var,type="prob")
niche.ref.prob[[siu]]<-min(ref.HSI[,2])
}else if (model == "MAXENT"){
ref.HSI<-dismo::predict(mod$model,spe.var,args='outputformat=logistic')
niche.ref.prob[[siu]]<-min(ref.HSI)
}
}
niche.ref.prob
result<-data.frame()
for(n in 1:nrow(que.vari)){
if (is.null(ref.env) == FALSE & is.null(que.env) == FALSE){
que.ID<-as.character(que.env[n,2])
ref.index<-grep(que.ID,barcode.identi.result[,1])
target.spe<-as.character(barcode.identi.result[ref.index,2])
spe_in_ref<-as.character(ref.env[grep(target.spe,ref.env$species),]$species)
}else{
que.ID<-as.character(que.infor[n,2])
ref.index<-grep(que.ID,barcode.identi.result[,1])
target.spe<-as.character(barcode.identi.result[ref.index,2])
spe_in_ref<-as.character(ref.infor[grep(target.spe,ref.infor$species),]$species)
}
if (length(spe_in_ref) == 0){
warning ("The identified species ",target.spe,
" doesn't exist in ref.infor!",
" Skipping the niche-based procedure ",
que.ID," ...\n")
Pb<-as.numeric(as.character(barcode.identi.result[n,3]))
res0<-cbind(Pb,ref.prob=NA,que.prob=NA,CF=NA,Pbe=NA,NicoB.prob=NA)
res0<-cbind(as.character(barcode.identi.result[n,1]),target.spe,res0)
result<-rbind(result,res0)
}else{
spe.index<-grep(paste(target.spe,"$",sep=""),spe.identified.uniq,fixed=F)
model.spe<-spe.niche[[spe.index]]
if (nrow(que.vari) == 1){
que.niche<-as.data.frame(que.vari)
}else{
que.niche<-t(as.matrix(que.vari[n,]))
que.niche<-as.data.frame(que.niche)
}
if (all(is.na(que.niche)) == TRUE){
stop ("Please check the coordinate of ",que.ID," !\n")
}else{
if (model == "RF"){
que.HSI<-stats::predict(model.spe,que.niche,type="prob")
que.prob<-que.HSI[,2]
ref.prob<-niche.ref.prob[[spe.index]]
}else if (model == "MAXENT"){
que.HSI<-dismo::predict(model.spe,que.niche,args='outputformat=logistic')
que.prob<-que.HSI
ref.prob<-niche.ref.prob[[spe.index]]
}
}
Pb<-as.numeric(as.character(barcode.identi.result[n,3]))
CF=que.prob/ref.prob
NicoB.prob=Pb*CF
if (CF > 1){ CF=1 }
if (NicoB.prob > 1){ NicoB.prob=1 }
res0<-cbind(Pb,ref.prob,que.prob,CF,Pbe=NA,NicoB.prob)
res0<-cbind(as.character(barcode.identi.result[n,1]),target.spe,round(res0,4))
result<-rbind(result,res0)
}
}
colnames(result)<-c("queID","species.identified","P(Bk)","min(P(EK))","P(Ek)",
"Prob(Sid).TSI","Prob(Sid).CI.cor","Prob(Sid).CI.unc")
result<-as.data.frame(result[,-7])
return(result)
} |
.msg <- function(verbose, ...) {
if (verbose) {
message(...)
}
} |
get_daymet <- function(template,
label,
elements = NULL,
years = NULL,
raw.dir = "./RAW/DAYMET",
extraction.dir = paste0("./EXTRACTIONS/", label, "/DAYMET"),
force.redo = F) {
raw.dir <- normalizePath(paste0(raw.dir,"/."), mustWork = FALSE)
extraction.dir <- normalizePath(paste0(extraction.dir,"/."), mustWork = FALSE)
dir.create(raw.dir, showWarnings = FALSE, recursive = TRUE)
dir.create(extraction.dir, showWarnings = FALSE, recursive = TRUE)
all.elements <- c("dayl", "prcp", "srad", "swe", "tmax", "tmin", "vp")
if (is.null(elements))
elements <- all.elements
elements <- tolower(elements)
missing.elements <- setdiff(elements, all.elements)
if (length(missing.elements) > 0)
warning("Elements not available: ", paste(missing.elements, collapse = ", "))
elements <- setdiff(elements, missing.elements)
if (length(elements) == 0)
stop("No elements available")
all.years <- 1980:(lubridate::year(Sys.time()) - 1)
if (is.null(years))
years <- all.years
missing.years <- setdiff(years, all.years)
if (length(missing.years) > 0)
warning("Years not available: ", paste(missing.years, collapse = ", "))
years <- setdiff(years, missing.years)
if (length(years) == 0)
stop("No years available")
out.files <- paste0(extraction.dir, "/", label, "_DAYMET_", elements, "_", min(years), "-", max(years), ".tif")
if (!force.redo & all(file.exists(out.files)) & file.exists(paste0(extraction.dir, "/", label, "_DAYMET_layer_names.Rds"))) {
extracted.DAYMET <- out.files %>% lapply(FUN = function(x) {
out <- raster::brick(x)
names(out) <- readr::read_rds(paste0(extraction.dir, "/", label, "_DAYMET_layer_names.Rds"))
return(out)
})
names(extracted.DAYMET) <- elements
return(extracted.DAYMET)
}
daymet_tiles <- FedData::daymet_tiles
template.latlon <- template %>% sp::spTransform(raster::projection(daymet_tiles))
tile.ids <- daymet_tiles$TileID[!is.na(daymet_tiles %over% template.latlon)]
tile.ids <- tile.ids[!is.na(tile.ids)]
tile.ids <- unique(tile.ids)
message("Area of interest includes ", length(tile.ids), " DAYMET tile(s).")
tiles <- lapply(tile.ids, function(tile) {
return(FedData::get_daymet_tile(template = template,
elements = elements,
years = years,
tileID = tile,
raw.dir = raw.dir))
})
names(tiles) <- tile.ids
if (length(tiles) > 1) {
message("Mosaicking DAYMET tiles.")
tiles <- foreach::foreach(element = elements) %do% {
utils::flush.console()
these.tiles <- lapply(tiles, "[[", element)
these.tiles$fun <- mean
names(these.tiles)[1:2] <- c("x", "y")
out.tiles <- do.call(raster::mosaic, these.tiles)
names(out.tiles) <- names(these.tiles$x)
out.tiles
}
} else {
tiles <- tiles[[1]]
}
names(tiles) <- elements
tiles %>% mapply(x = ., y = names(tiles), FUN = function(x, y) {
raster::writeRaster(x, paste0(extraction.dir, "/", label, "_DAYMET_", y, "_", min(years), "-", max(years), ".tif"), datatype = "FLT4S",
options = c("COMPRESS=DEFLATE", "ZLEVEL=9", "INTERLEAVE=BAND"), overwrite = T, setStatistics = FALSE)
})
tiles[[1]] %>% names() %>% readr::write_rds(paste0(extraction.dir, "/", label, "_DAYMET_layer_names.Rds"))
return(tiles)
}
download_daymet_tile <- function(tileID, elements, years, raw.dir) {
out <- foreach::foreach(element = elements) %:%
foreach::foreach(year = sort(years), .combine = "c") %do% {
destdir <- paste0(raw.dir, "/", tileID, "/", year)
dir.create(destdir,
recursive = TRUE,
showWarnings = FALSE)
url <- paste0("https://thredds.daac.ornl.gov/thredds/fileServer/ornldaac/1328/tiles/", year, "/", tileID, "_", year, "/",
element, ".nc")
download_data(url = url,
destdir = destdir,
timestamping = FALSE,
nc = TRUE) %>%
normalizePath(mustWork = T)
}
names(out) <- elements
return(out)
}
get_daymet_tile <- function(template, tileID, elements = NULL, years = NULL, raw.dir) {
tmpdir <- tempfile()
if (!dir.create(tmpdir))
stop("failed to create my temporary directory")
message("(Down)Loading DAYMET tile ", tileID)
all.elements <- c("dayl", "prcp", "srad", "swe", "tmax", "tmin", "vp")
elements <- tolower(elements)
if (is.null(elements))
elements <- all.elements
missing.elements <- setdiff(elements, all.elements)
if (length(missing.elements) > 0)
warning("Elements not available: ", paste(missing.elements, collapse = ", "))
elements <- setdiff(elements, missing.elements)
if (length(elements) == 0)
stop("No elements available")
all.years <- 1980:(lubridate::year(Sys.time()) - 1)
if (is.null(years))
years <- all.years
missing.years <- setdiff(years, all.years)
if (length(missing.years) > 0)
warning("Years not available: ", paste(missing.years, collapse = ", "))
years <- setdiff(years, missing.years)
if (length(years) == 0)
stop("No years available")
files <- download_daymet_tile(tileID = tileID, elements = elements, years = years, raw.dir = raw.dir)
tiles <- foreach::foreach(element = files) %do% {
tile <- foreach::foreach(file = element) %do% raster::brick(file)
tile %<>% raster::stack(quick = TRUE)
if (!is.null(template)) {
tile <- tryCatch(tile %>%
raster::crop(template %>%
sp::spTransform(tile %>%
raster::projection() %>%
sp::CRS()),
snap = "out"),
error = function(e) {
tile %>%
raster::crop(template %>%
sp::spTransform(tile %>%
raster::projection() %>%
sp::CRS()))
})
}
}
names(tiles) <- elements
unlink(tmpdir, recursive = TRUE)
return(tiles)
}
"daymet_tiles" |
members <-
function(x,...) UseMethod("members") |
SRE.fit <- function(object, n_EM = 100L, tol = 0.01, method = c("EM", "TMB"),
lambda = 0, print_lik = FALSE, optimiser = nlminb,
known_sigma2fs = NULL, taper = NULL,
simple_kriging_fixed = TRUE, ...) {
tmp <- list(...)
if(!is.null(tmp$SRE_model)) {
object <- tmp$SRE_model
warning("The argument 'SRE_model' is deprecated: Please use 'object'")
}
method <- match.arg(method)
optimiser <- match.fun(optimiser)
if (!is.null(known_sigma2fs)) object@sigma2fshat <- known_sigma2fs
if (method == "TMB" & object@K_type == "block-exponential") {
answer <- user_decision("You have selected method = 'TMB' and K_type = 'block-exponential'. While this combination is allowed, it is significantly more computationally demanding than K_type = 'precision'. Please enter Y if you would like to continue with the block-exponential formulation, or N if you would like to change to the sparse precision matrix formulation.\n")
if (answer == "N") {
object@K_type <- "precision"
cat("Setting K-type = 'precision'.\n")
}
}
.check_args2(n_EM = n_EM, tol = tol, lambda = lambda,
method = method, print_lik = print_lik,
fs_by_spatial_BAU = object@fs_by_spatial_BAU,
response = object@response, K_type = object@K_type, link = object@link,
known_sigma2fs = known_sigma2fs,
BAUs = object@BAUs,
optimiser = optimiser, taper = taper,
simple_kriging_fixed = simple_kriging_fixed,
...)
object@simple_kriging_fixed <- simple_kriging_fixed
object <- .SRE.fit(object = object, n_EM = n_EM, tol = tol,
method = method, lambda = lambda, print_lik = print_lik,
optimiser = optimiser, known_sigma2fs = known_sigma2fs, taper = taper, ...)
return(object)
}
.SRE.fit <- function(object, n_EM, tol, method, lambda,
print_lik, optimiser, known_sigma2fs, taper, ...) {
if(method == "EM") {
object <- .EM_fit(object = object, n_EM = n_EM, lambda = lambda,
tol = tol, print_lik = print_lik, known_sigma2fs = known_sigma2fs, taper = taper)
} else if (method == "TMB") {
object <- .TMB_fit(object, optimiser = optimiser, known_sigma2fs = known_sigma2fs, taper = taper, ...)
} else {
stop("No other estimation method implemented yet. Please use method = 'EM' or method = 'TMB'.")
}
object@method <- method
return(object)
}
.EM_fit <- function(object, n_EM, lambda, tol, print_lik, known_sigma2fs, taper) {
info_fit <- list()
info_fit$time <- system.time({
n <- nbasis(object)
X <- object@X
info_fit$method <- "EM"
llk <- rep(0,n_EM)
if(opts_FRK$get("progress"))
pb <- utils::txtProgressBar(min = 0, max = n_EM, style = 3)
if (!is.null(taper)) {
beta <- .tapering_params(D_matrices = object@D_basis, taper = taper)
T_beta <- .T_beta_taper_matrix(D_matrices = object@D_basis, beta = beta)
info_fit$taper <- "NULL"
} else {
T_beta <- 1
}
for(i in 1:n_EM) {
llk[i] <- loglik(object)
object <- .SRE.Estep(object)
object <- .SRE.Mstep(object, lambda, known_sigma2fs, T_beta)
if(opts_FRK$get("progress"))
utils::setTxtProgressBar(pb, i)
if(i>1)
if(abs(llk[i] - llk[i-1]) < tol) {
cat("Minimum tolerance reached\n")
break
}
}
if(opts_FRK$get("progress")) close(pb)
info_fit$num_iterations <- i
if(object@sigma2fshat == 0) {
info_fit$sigma2fshat_equal_0 <- 1
if(opts_FRK$get("verbose") > 0)
message("sigma2fs is being estimated to zero.
This might because of an incorrect binning
procedure or because too much measurement error
is being assumed (or because the latent
field is indeed that smooth, but unlikely).")
} else {
info_fit$sigma2fshat_equal_0 <- 0
}
if(i == n_EM) {
cat("Maximum EM iterations reached\n")
info_fit$converged <- 0
} else {
info_fit$converged <- 1
}
info_fit$plot_lik <- list(x = 1:i, llk = llk[1:i],
ylab = "log likelihood",
xlab = "EM iteration")
if(print_lik & !is.na(tol)) {
plot(1:i, llk[1:i],
ylab = "log likelihood",
xlab = "EM iteration")
}
})
object@info_fit <- info_fit
return(object)
}
.SRE.Estep <- function(Sm) {
if(Sm@fs_model == "ind")
Sm <- .SRE.Estep.ind(Sm)
else stop("E-step only for independent fs-variation model currently implemented")
}
.loglik.ind <- function(Sm) {
S <- Sm@S
K <- Sm@Khat
chol_K <- chol(K)
Kinv <- chol2inv(chol_K)
resid <- Sm@Z - Sm@X %*% Sm@alphahat
N <- length(Sm@Z)
D <- Sm@sigma2fshat*Sm@Vfs + Sm@Ve
if(isDiagonal(D)) {
D <- Diagonal(x = D@x)
cholD <- sqrt(D)
cholDinvT <- solve(cholD)
} else {
cholD <- chol(D)
cholDinvT <- t(solve(cholD))
}
S_Dinv_S <- crossprod(cholDinvT %*% S)
R <- chol(Kinv + S_Dinv_S)
log_det_SigmaZ <- logdet(R) +
determinant(K,logarithm = TRUE)$modulus +
logdet(cholD)
rDinv <- crossprod(cholDinvT %*% resid,cholDinvT)
quad_bit <- crossprod(cholDinvT %*% resid) - tcrossprod(rDinv %*% S %*% solve(R))
llik <- -0.5 * N * log(2*pi) -
0.5 * log_det_SigmaZ -
0.5 * quad_bit
return(as.numeric(llik))
}
.SRE.Mstep <- function(Sm, lambda = 0, known_sigma2fs, T_beta) {
if(Sm@fs_model == "ind")
Sm <- .SRE.Mstep.ind(Sm, lambda = lambda, known_sigma2fs = known_sigma2fs, T_beta = T_beta)
else stop("M-step only for independent fs-variation model currently implemented")
}
.SRE.Estep.ind <- function(Sm, known_sigma2fs) {
alpha <- Sm@alphahat
K <- Sm@Khat
Kinv <- Sm@Khat_inv
sigma2fs <- Sm@sigma2fshat
D <- sigma2fs*Sm@Vfs + Sm@Ve
if(isDiagonal(D)) {
D <- Diagonal(x=D@x)
cholD <- sqrt(D)
cholDinv <- solve(cholD)
Dinv <- solve(D)
} else {
cholD <- Matrix::chol(D)
cholDinv <- solve(cholD)
Dinv <- chol2inv(cholD)
}
Q_eta <- (crossprod(t(cholDinv) %*% Sm@S) + Kinv)
S_eta <- chol2inv(chol(Q_eta))
mu_eta <- (S_eta) %*%(t(Sm@S) %*% Dinv %*% (Sm@Z - Sm@X %*% alpha))
Sm@mu_eta <- mu_eta
Sm@S_eta <- S_eta
Sm@Q_eta <- Q_eta
Sm
}
.SRE.Mstep.ind <- function(Sm, lambda = 0, known_sigma2fs, T_beta) {
mu_eta <- Sm@mu_eta
S_eta <- Sm@S_eta
alpha <- Sm@alphahat
sigma2fs <- Sm@sigma2fshat
K <- .update_K(Sm,method=Sm@K_type,
lambda = lambda)
K <- K * T_beta
Khat_inv <- chol2inv(chol(K))
homoscedastic <- all((a <- diag(Sm@Ve)) == a[1]) &
all((b <- diag(Sm@Vfs)) == b[1]) &
isDiagonal(Sm@Vfs)
diagonal_mats <- isDiagonal(Sm@Ve) & isDiagonal(Sm@Vfs)
if(!is.null(known_sigma2fs)) {
est_sigma2fs <- FALSE
sigma2fs_new <- known_sigma2fs
} else {
est_sigma2fs <- TRUE
}
if(!all(diag(Sm@Vfs) == 0))
if(!diagonal_mats) {
J <- function(sigma2fs) {
if(sigma2fs < 0) {
return(Inf)
} else {
D <- sigma2fs*Sm@Vfs + Sm@Ve
Dinv <- chol2inv(chol(D))
DinvV <- Dinv %*% Sm@Vfs
DinvVDinv <- Dinv %*% Sm@Vfs %*% Dinv
alpha <- solve(t(Sm@X) %*% Dinv %*% Sm@X) %*%
t(Sm@X) %*% Dinv %*%
(Sm@Z - Sm@S %*% mu_eta)
resid <- Sm@Z - Sm@X %*% alpha
Dinvr <- DinvVDinv %*% resid
DinvS <- DinvVDinv %*% Sm@S
tr1 <- tr(DinvV)
tr2 <- sum(diag2(DinvS %*% (S_eta + tcrossprod(mu_eta)),t(Sm@S)) -
2*diag2(DinvS %*% mu_eta,t(resid)) +
diag2(Dinvr,t(resid)))
-(-0.5*tr1 +0.5*tr2)
}
}
} else {
R_eta <- chol(S_eta + tcrossprod(mu_eta))
S_R_eta <- Sm@S %*% t(R_eta)
Omega_diag1 <- rowSums(S_R_eta^2)
J <- function(sigma2fs) {
if(sigma2fs < 0) {
return(Inf)
} else {
D <- sigma2fs*Sm@Vfs + Sm@Ve
Dinv <- solve(D)
DinvV <- Dinv %*% Sm@Vfs
alpha <- solve(t(Sm@X) %*% Dinv %*% Sm@X) %*%
t(Sm@X) %*% Dinv %*%
(Sm@Z - Sm@S %*% mu_eta)
resid <- Sm@Z - Sm@X %*% alpha
Omega_diag <- Omega_diag1 -
2*diag2(Sm@S %*% mu_eta, t(resid)) +
diag2(resid,t(resid))
Omega_diag <- Diagonal(x=Omega_diag)
return(-(-0.5*tr(DinvV) + 0.5*tr(DinvV %*% Dinv %*% Omega_diag)))
}
}
}
if(!all(diag(Sm@Vfs) == 0)) {
if(!homoscedastic) {
if(est_sigma2fs) {
amp_factor <- 10; OK <- 0
while(!OK) {
amp_factor <- amp_factor * 10
if(!(sign(J(sigma2fs/amp_factor)) == sign(J(sigma2fs*amp_factor)))) OK <- 1
if(amp_factor > 1e9) {
OK <- 1
}
}
if(amp_factor > 1e9) {
sigma2fs_new <- 0
} else {
sigma2fs_new <- stats::uniroot(f = J,
interval = c(sigma2fs/amp_factor,
sigma2fs*amp_factor))$root
}
}
D <- sigma2fs_new*Sm@Vfs + Sm@Ve
if(isDiagonal(D)) {
D <- Diagonal(x=D@x)
Dinv <- solve(D)
} else {
Dinv <- chol2inv(chol(D))
}
alpha <- solve(t(Sm@X) %*% Dinv %*% Sm@X) %*%
t(Sm@X) %*% Dinv %*% (Sm@Z - Sm@S %*% mu_eta)
} else {
alpha <- solve(t(Sm@X) %*% Sm@X) %*%
t(Sm@X) %*% (Sm@Z - Sm@S %*% mu_eta)
resid <- Sm@Z - Sm@X %*% alpha
if(est_sigma2fs) {
Omega_diag <- Omega_diag1 -
2*diag2(Sm@S %*% mu_eta, t(resid)) +
diag2(resid,t(resid))
Omega_diag <- Diagonal(x=Omega_diag)
sigma2fs_new <- 1/b[1]*(sum(Omega_diag)/length(Sm@Z) - a[1])
if(sigma2fs_new < 0) {
sigma2fs_new = 0
}
}
}
}
if(all(diag(Sm@Vfs) == 0)) {
alpha <- solve(t(Sm@X) %*% solve(Sm@Ve) %*% Sm@X) %*% t(Sm@X) %*%
solve(Sm@Ve) %*% (Sm@Z - Sm@S %*% mu_eta)
if(est_sigma2fs) sigma2fs_new <- 0
}
Sm@Khat <- K
Sm@Khat_inv <- Khat_inv
Sm@alphahat <- alpha
Sm@sigma2fshat <- sigma2fs_new
return(Sm)
}
.update_K <- function(Sm,method="unstructured",
S_eta= NULL,mu_eta = NULL,
lambda = 0) {
if (is.null(S_eta)) S_eta <- Sm@S_eta
if (is.null(mu_eta)) mu_eta <- Sm@mu_eta
if(method == "unstructured") {
K <- .regularise_K(Sm, lambda = lambda)
} else if (method == "block-exponential") {
all_res <- count_res(Sm)
eta2 <- lapply(1:nrow(all_res),function(i) {
idx <- which(data.frame(Sm@basis)$res == i)
S_eta[idx,idx] +
tcrossprod(mu_eta[idx])
})
omega <- lapply(1:nrow(all_res),
function(i) {
ni <- all_res[i,]$n
idx <- which(data.frame(Sm@basis)$res == i)
Ki <- Sm@Khat[idx,idx]/Sm@Khat[idx[1],idx[1]]
Ki_inv <- chol2inv(chol(Ki))
ni / sum(diag2(Ki_inv,eta2[[i]]))
})
f_tau <- function(tau_i,i) {
if(any(tau_i <= 1e-10)) {
Inf
} else {
idx <- which(data.frame(Sm@basis)$res == i)
if(is(Sm@basis,"TensorP_Basis")) {
Ki1 <- exp(-Sm@D_basis$Basis2[[1]]/tau_i[2])
Ki2 <- exp(-Sm@D_basis$Basis1[[i]]/tau_i[1])
Ki <- kronecker(Ki1,Ki2)
Qi1 <- chol2inv(chol(Ki1))
Qi2 <- chol2inv(chol(Ki2))
Ki_inv <- kronecker(Qi1,Qi2)
R1 <- chol(Qi1)
R2 <- chol(Qi2)
det_part <- 0.5*(nrow(R2)*logdet(R1) + nrow(R1)*logdet(R2))
-as.numeric(det_part - omega[[i]]/2*sum(diag2(Ki_inv,eta2[[i]],symm=TRUE)))
} else {
Ki <- exp(-Sm@D_basis[[i]]/tau_i)
Ki_inv <- chol2inv(chol(Ki))
-as.numeric(0.5*determinant(Ki_inv)$modulus -
omega[[i]]/2*sum(diag2(Ki_inv,eta2[[i]],symm=TRUE)))
}
}
}
gr_f_tau <- function(tau_i,i) {
idx <- which(Sm@basis@df$res == i)
if(is(Sm@basis,"TensorP_Basis")) {
Ki1 <- exp(-Sm@D_basis$Basis2[[1]]/tau_i[2])
Ki2 <- exp(-Sm@D_basis$Basis1[[i]]/tau_i[1])
Ki <- kronecker(Ki1,Ki2)
Qi1 <- chol2inv(chol(Ki1))
Qi2 <- chol2inv(chol(Ki2))
Ki_inv <- kronecker(Qi1,Qi2)
dKi <- kronecker(Ki1,(Sm@D_basis$Basis1[[i]]/(tau_i[1]^2))*Ki2)
dKit <- kronecker((Sm@D_basis$Basis2[[1]]/(tau_i[2]^2))*Ki1,Ki2)
} else {
Ki <- exp(-Sm@D_basis[[i]]/tau_i)
dKi <- (Sm@D_basis[[i]]/(tau_i^2))*exp(-Sm@D_basis[[i]]/tau_i)
Ki_inv <- chol2inv(chol(Ki))
}
tau_i1 <- -(-0.5*sum(diag2(dKi,Ki_inv)) +
0.5*omega[[i]]*sum(diag2(eta2[[i]]%*% Ki_inv,
dKi %*% Ki_inv)))
tau_i1 <- as.numeric(tau_i1)
if(length(tau_i) == 1) {
return(tau_i1)
} else {
tau_i2 <- -(-0.5*sum(diag2(dKit,Ki_inv)) +
0.5*omega[[i]]*sum(diag2(eta2[[i]]%*% Ki_inv,
dKit %*% Ki_inv)))
tau_i2 <- as.numeric(tau_i2)
return(c(tau_i1,tau_i2))
}
}
max_l <- max(unlist(Sm@D_basis[[1]]))
tau <- lapply(1:nrow(all_res),
function(i) {
idx <- which(Sm@basis@df$res == i)
Ki <- Sm@Khat[idx,idx]/Sm@Khat[idx[1],idx[1]]
if(is(Sm@basis,"TensorP_Basis")) {
par_init <- ifelse(all_res$n[i]>1,
max(-Sm@D_basis$Basis1[[i]][1,2]/log(Ki[1,2]),1e-9),
1e-9)
par_init[2] <- max(-Sm@D_basis$Basis2[[1]][1,2]/log(Ki[1,1+count_res(Sm@basis@Basis1)$n[i]]),1e-9)
if(par_init[2] == 1e-9) par_init[2] <- 1
} else {
par_init <- ifelse(all_res$n[i]>1,
max(-Sm@D_basis[[i]][1,2]/log(Ki[1,2]),1e-9),
1e-9)
}
if(par_init[1] == 1e-9) par_init[1] <- max_l/10
suppressWarnings(optim(par = par_init,
fn = f_tau,
gr = gr_f_tau,
i=i,control=list(maxit=100L,reltol=1e-4))$par)
})
K <- lapply(1:nrow(all_res),
function(i) {
if(is(Sm@basis,"TensorP_Basis")) {
Ki <- kronecker(exp(-Sm@D_basis$Basis2[[1]]/tau[[i]][2]),
exp(-Sm@D_basis$Basis1[[i]]/tau[[i]][1]))/omega[[i]]
} else {
Ki <- exp(-Sm@D_basis[[i]]/tau[[i]])/omega[[i]]
}
})
K <- do.call("bdiag",K)
idx_all <- unlist(lapply(1:nrow(all_res),
function(i) which(Sm@basis@df$res == i)))
K <- reverse_permute(K,idx_all)
if( opts_FRK$get("verbose") > 0) {
cat(" Estimates of omega: ",unlist(omega)," ")
cat(" Estimates of tau: ",unlist(tau)," ")
}
}
return(K)
}
.regularise_K <- function(Sm,S_eta= NULL,mu_eta = NULL, lambda = 0) {
if (is.null(S_eta)) S_eta <- Sm@S_eta
if (is.null(mu_eta)) mu_eta <- Sm@mu_eta
if(any(lambda > 0)) {
if(length(lambda) == 1) {
reg_matrix <- lambda*Diagonal(nrow(S_eta))
} else {
reg_matrix <- Diagonal(x = do.call("c",
apply(count_res(Sm),1,
function(x) rep(lambda[x[1]],x[2]))))
}
Q <- chol2inv(chol(S_eta + tcrossprod(mu_eta))) + reg_matrix
K <- chol2inv(chol(Q))
} else {
K <- S_eta + tcrossprod(mu_eta)
}
K
}
.TMB_fit <- function(object, optimiser, known_sigma2fs, taper, ...) {
C_O <- .constructC_O(object)
X_O <- .constructX_O(object)
S_O <- .constructS_O(object)
info_fit <- list()
info_fit$time <- system.time({
info_fit$method <- "TMB"
K_type <- object@K_type
if (K_type == "precision") {
sp_basis <- if (is(object@basis,"TensorP_Basis")) object@basis@Basis1 else object@basis
if (!sp_basis@regular || is(manifold(sp_basis), "sphere")) {
K_type <- "precision-block-exponential"
} else {
K_type <- "neighbour"
}
}
if (is.null(taper) && (K_type %in% c("block-exponential", "precision-block-exponential"))) {
cat("The argument taper was not specified. Since we are using method = 'TMB'
with either i) a covariance matrix (K_type = 'block-exponential')
or ii) irregular basis functions (object@basis@regular = 0) or iii) a
non-plane manifold, we must use tapering for computational reasons.
Setting taper = 3.\n")
taper <- 3
info_fit$taper <- taper
}
parameters <- .TMB_initialise(object, K_type = K_type, C_O = C_O, X_O = X_O, S_O = S_O)
data <- .TMB_data_prep(object, sigma2fs_hat = exp(parameters$logsigma2fs),
K_type = K_type, taper = taper,
C_O = C_O, X_O = X_O, S_O = S_O)
ns <- dim(object@BAUs)[1]
if (object@fs_by_spatial_BAU) {
data$sigma2fs_hat <- rep(data$sigma2fs_hat, ns)
parameters$logsigma2fs <- rep(parameters$logsigma2fs, ns)
}
if (!is.null(known_sigma2fs)) {
data$fix_sigma2fs <- 1
data$sigma2fs_hat <- known_sigma2fs
parameters$logsigma2fs <- log(known_sigma2fs)
}
parameters$logsigma2 <- pmin(pmax(parameters$logsigma2, -4), 8)
parameters$logtau <- pmin(pmax(parameters$logtau, -4), 8)
parameters$logdelta <- pmin(pmax(parameters$logdelta, -4), 8)
parameters$logsigma2_t <- pmin(pmax(parameters$logsigma2_t, -4), 8)
if (any(sapply(data, function(x) any(length(x) == 0) || any(is.na(x)) || any(is.null(x)))))
stop("Something has gone wrong in the data preparation for TMB: Some entries are numeric(0), NA, or NULL. Please contact the package maintainer.")
if (any(sapply(parameters, function(x) any(length(x) == 0) || any(is.na(x)) || any(is.null(x)))))
stop("Something has gone wrong in the parameter initialisation for TMB: Some entries are numeric(0), NA, or NULL. Please contact the package maintainer.")
if (any(data$nnz < 0) || any(data$col_indices < 0) || any(data$row_indices < 0))
stop("Something has gone wrong in construction of the precision matrix of the basis-function coefficients: We have negative row-indices, col-indices, or total non-zeros: Please contact the package maintainer. ")
if (!.zero_range(c(length(data$x), length(data$col_indices), length(data$row_indices), sum(data$nnz))))
stop("Something has gone wrong in construction of the precision matrix of the basis-function coefficients: The number of row-indices, col-indices, or non-zeros is inconsistent. Please contact the package maintainer. ")
if(!.zero_range(c(length(object@Z), length(data$Z), nrow(data$C_O), nrow(data$X_O) , nrow(data$S_O))))
stop("Something has gone wrong in the data preparation for TMB: The dimensions of the C, X, or S matrix is inconsistent with the number of observations. Please contact the package maintainer.")
spatial_basis <- if (is(object@basis, "TensorP_Basis")) object@basis@Basis1 else object@basis
if(!.zero_range(c(nbasis(spatial_basis), max(data$row_indices + 1), max(data$col_indices + 1), sum(data$r_si))))
stop("Something has gone wrong in the data preparation for TMB: The number of basis functions and the matrix indices are inconsistent. Please contact the package maintainer.")
if (!(K_type %in% c("neighbour", "block-exponential", "precision-block-exponential")))
stop("Internal error: K_type is not one of neighbour, block-exponential, or precision-block-exponential. Please contact the package maintainer.")
obj <- MakeADFun(data = data,
parameters = parameters,
random = c("random_effects"),
DLL = "FRK",
silent = !opts_FRK$get("verbose"))
obj$env$tracepar <- opts_FRK$get("verbose")
cat("Optimising with TMB...\n")
fit <- optimiser(obj$par, obj$fn, obj$gr, ...)
info_fit$iterations <- fit$iterations
info_fit$convergence <- fit$convergence
info_fit$message <- fit$message
cat("Optimisation completed.\n")
cat("Extracting estimates of the parameters and the joint precision matrix of the random effects from TMB...\n")
par <- obj$env$last.par.best
estimates <- split(par, names(par))
s <- length(estimates$random_effects)
if (object@simple_kriging_fixed) {
Q_posterior <- obj$env$spHess(par = obj$env$last.par.best, random = TRUE)
} else {
Q_posterior <- sdreport(obj, getJointPrecision = TRUE)$jointPrecision
retain_idx <- rownames(Q_posterior) %in% c("alpha", "random_effects")
Q_posterior <- Q_posterior[retain_idx, retain_idx]
}
cat("Extraction completed.\n")
r <- nbasis(object)
mstar <- ncol(C_O)
object@alphahat <- as(estimates$alpha, "Matrix")
object@mu_eta <- as(estimates$random_effects[1:r], "Matrix")
if (object@include_fs) {
object@mu_xi <- as(estimates$random_effects[(r+1):(r + mstar)], "Matrix")
} else {
object@mu_xi <- as(rep(0, mstar), "Matrix")
}
object@sigma2fshat <- unname(exp(estimates$logsigma2fs))
object@Q_posterior <- Q_posterior
object@phi <- unname(exp(estimates$logphi))
object@log_likelihood <- -obj$fn()
if(object@sigma2fshat == 0) {
info_fit$sigma2fshat_equal_0 <- 1
if(opts_FRK$get("verbose") > 0)
message("sigma2fs is being estimated to zero.
This might because of an incorrect binning procedure or because too much
measurement error is being assumed (or because the latent field is indeed
that smooth, but unlikely).")
} else {
info_fit$sigma2fshat_equal_0 <- 0
}
})
object@info_fit <- info_fit
return(object)
}
.TMB_initialise <- function(object, K_type, C_O, X_O, S_O) {
cat("Initialising parameters and random effects...\n")
nres <- max(object@basis@df$res)
r <- object@basis@n
Y_O <- .compute_Y_O(object, C_O = C_O)
l <- list()
l$alpha <- solve(t(X_O) %*% X_O) %*% t(X_O) %*% Y_O
if (object@response %in% c("poisson", "binomial", "negative-binomial")) {
l$phi <- 1
} else if (object@response == "gaussian") {
l$phi <- mean(diag(object@Ve))
} else {
l$phi <- var(as.numeric(object@Z))
}
l$sigma2 <- var(as.vector(Y_O)) * (0.1)^(0:(nres - 1))
l$tau <- (1 / 3)^(1:nres)
if (K_type != "block-exponential") {
l$sigma2 <- 1 / l$sigma2
l$tau <- 1 / l$tau
}
if (K_type == "separable") {
l$sigma2 <- rep(l$sigma2, 2)
l$tau <- rep(l$tau, 2)
l$logdelta <- 1
} else if (K_type == "precision-block-exponential") {
l$logdelta <- rnorm(nres)
} else {
l$logdelta <- 1
}
l$sigma2_t <- 1
l$rho_t <- 0.1
simple_init <- TRUE
if (simple_init) {
l$sigma2fs <- 0.05
mstar <- length(Y_O)
l$xi_O <- rnorm(mstar, 0, sqrt(l$sigma2fs))
if (K_type != "block-exponential") {
sigma2 <- 1 / l$sigma2
}
temporal <- is(object@basis,"TensorP_Basis")
if (temporal) {
spatial_basis <- object@basis@Basis1
temporal_basis <- object@basis@Basis2
r_ti <- as.vector(table(temporal_basis@df$res))
} else {
spatial_basis <- object@basis
r_ti <- 1
}
r_si <- as.vector(table(spatial_basis@df$res))
sigma2_long <- rep(rep(sigma2, times = r_si), r_ti)
l$eta <- rnorm(r, 0, sqrt(sigma2_long))
} else {
for (iteration_dummy in 1:5) {
l <- .TMB_initialise_MAP_estimates(l, object, S_O, r, Y_O, X_O)
}
}
transform_minus_one_to_one_inverse <- function(x) -0.5 * log(2 / (x + 1) - 1)
return(list(
alpha = as.vector(l$alpha),
logphi = log(l$phi),
logsigma2 = log(l$sigma2),
logtau = log(l$tau),
logdelta = l$logdelta,
logsigma2_t = log(l$sigma2_t),
frho_t = transform_minus_one_to_one_inverse(l$rho_t),
logsigma2fs = log(l$sigma2fs),
random_effects = c(as.vector(l$eta), if(object@include_fs) as.vector(l$xi_O))
))
}
.TMB_initialise_MAP_estimates <- function(l, object, S_O, r, Y_O, X_O) {
regularising_weight <- if (!is.null(l$sigma2fs)) l$sigma2fs else l$sigma2[1]
QInit <- .sparse_Q_block_diag(object@basis@df,
kappa = l$sigma2,
rho = l$tau)$Q
mat <- Matrix::t(S_O) %*% S_O / regularising_weight + QInit
mat_inv <- tryCatch(expr = {
if (r > 4000) {
mat_L <- sparseinv::cholPermute(Q = mat)
sparseinv::Takahashi_Davis(Q = mat, cholQp = mat_L$Qpermchol, P = mat_L$P)
} else {
solve(mat)
}},
error = function(w){
cat("Initialisation phase: could not invert mat, just using diag(r)\n")
diag(r)
}
)
if (regularising_weight == 0) {
warning("In initialisation stage, the regularising_weight is 0; setting it to 1. This is probably not an issue, but feel free to contact the package maintainer.")
regularising_weight <- 1
}
l$eta <- (1 / regularising_weight) * mat_inv %*% Matrix::t(S_O) %*% (Y_O - X_O %*% l$alpha)
l$xi_O <- Y_O - X_O %*% l$alpha - S_O %*% l$eta
l$sigma2fs <- var(as.vector(l$xi_O))
return(l)
}
.compute_Y_O <- function(object, C_O) {
Z <- as.vector(object@Z)
k_Z <- object@k_Z
k_BAU_O <- object@k_BAU_O
if (object@response %in% c("binomial", "negative-binomial") & object@link %in% c("logit", "probit", "cloglog")) {
f <- .link_fn(kind = "prob_to_Y", link = object@link)
h <- .link_fn(kind = "mu_to_prob", response = object@response)
} else {
g <- .link_fn(kind = "mu_to_Y", link = object@link)
}
Z0 <- Z
if (object@link %in% c("log", "sqrt")) {
Z0[Z <= 0] <- 0.1
} else if (object@response == "negative-binomial" & object@link %in% c("logit", "probit", "cloglog")) {
Z0[Z == 0] <- 0.1
} else if (object@response == "binomial" & object@link %in% c("logit", "probit", "cloglog")) {
Z0 <- Z + 0.1 * (Z == 0) - 0.1 * (Z == k_Z)
} else if (object@link %in% c("inverse-squared", "inverse")) {
Z0[Z == 0] <- 0.05
}
mu_Z <- Z0
mu_O <- .compute_mu_O(object, C_O, mu_Z)
k_BAU_O[k_BAU_O == 0] <- 1
mu_O <- mu_O + 0.05 * (mu_O == 0) - 0.05 * (mu_O == k_BAU_O)
if (object@response %in% c("binomial", "negative-binomial") & object@link %in% c("logit", "probit", "cloglog")) {
Y_O <- f(h(mu_O, k_BAU_O))
} else if (object@response == "negative-binomial" & object@link %in% c("log", "sqrt")) {
Y_O <- g(mu_O / k_BAU_O)
} else {
Y_O <- g(mu_O)
}
return(Y_O)
}
.compute_mu_O <- function(object, C_O, mu_Z) {
m <- nrow(C_O)
mstar <- ncol(C_O)
obs_BAUs_df <- object@BAUs@data[object@obsidx, ]
mu_O <- vector(mode = "list", length = mstar)
for (Bj in 1:m) {
w_j <- C_O[Bj, ]
idx <- which(w_j > 0)
if (object@response %in% c("binomial", "negative-binomial")) {
w_j <- obs_BAUs_df$k_BAU[idx]
} else {
w_j <- w_j[idx]
}
mu_O_j <- w_j / sum(w_j) * mu_Z[Bj]
for (i in 1:length(idx)) {
mu_O[[idx[i]]] <- c(mu_O[[idx[i]]], mu_O_j[i])
}
}
if (object@response %in% c("binomial", "negative-binomial")) {
mu_O <- sapply(mu_O, min)
} else {
mu_O <- sapply(mu_O, mean)
}
return(mu_O)
}
.TMB_data_prep <- function (object, sigma2fs_hat, K_type, taper, C_O, X_O, S_O) {
cat("Preparing data for TMB...\n")
obsidx <- observed_BAUs(object)
sigma2e <- if (object@response == "gaussian") diag(object@Ve) else -1
data <- list(Z = as.vector(object@Z),
X_O = X_O, S_O = S_O, C_O = C_O,
K_type = K_type, response = object@response, link = object@link,
k_BAU_O = object@k_BAU_O, k_Z = object@k_Z,
temporal = as.integer(is(object@basis,"TensorP_Basis")),
fs_by_spatial_BAU = object@fs_by_spatial_BAU, sigma2e = sigma2e,
BAUs_fs = object@BAUs$fs[obsidx])
ns <- dim(object@BAUs)[1]
if (data$temporal) {
spatial_dist_matrix <- object@D_basis[[1]]
spatial_basis <- object@basis@Basis1
data$r_t <- object@basis@Basis2@n
} else {
spatial_dist_matrix <- object@D_basis
spatial_basis <- object@basis
data$r_t <- 1
}
data$spatial_BAU_id <- (obsidx - 1) %% ns
data$r_si <- as.vector(table(spatial_basis@df$res))
data$beta <- data$nnz <- data$row_indices <- data$col_indices <-
data$x <- data$n_r <- data$n_c <- -1
if (K_type %in% c("block-exponential", "precision-block-exponential")) {
tmp <- .cov_tap(spatial_dist_matrix, taper = taper)
data$beta <- tmp$beta
R <- as(tmp$D_tap, "dgTMatrix")
data$nnz <- tmp$nnz
data$row_indices <- R@i
data$col_indices <- R@j
data$x <- R@x
} else if (K_type == "neighbour") {
tmp <- .sparse_Q_block_diag(spatial_basis@df, kappa = 0, rho = 1)
R <- as(tmp$Q, "dgTMatrix")
data$nnz <- tmp$nnz
data$row_indices <- R@i
data$col_indices <- R@j
data$x <- R@x
} else if (K_type == "separable") {
for (i in unique(spatial_basis@df$res)) {
tmp <- spatial_basis@df[spatial_basis@df$res == i, ]
data$n_r[i] <- length(unique(tmp$loc1))
data$n_c[i] <- length(unique(tmp$loc2))
}
}
data$sigma2fs_hat <- sigma2fs_hat
if (!any(tabulate(object@Cmat@i + 1) == 1)) {
cat("There no observations are associated with a single BAU (i.e., all observations are associated with multiple BAUs). This makes the fine-scale variance parameter very difficult to estimate, so we will estimate it offline and fix for the remainder of model fitting; this estimate may be inaccurate.\n")
data$fix_sigma2fs <- as.integer(1)
if (object@fs_by_spatial_BAU)
stop("We do not allow each spatial BAU to have its own fine-scale variance parameter when there no observations associated with a single BAU (i.e., all observations are associated with multiple BAUs).")
} else {
data$fix_sigma2fs <- as.integer(0)
if (!all(tabulate(object@Cmat@i + 1) == 1))
cat("Some (but not all) observations are associated with multiple BAUs. Estimation of the fine-scale variance parameter will be done using TMB, but there should be a reasonable number of observations associated with a single BAU so that the fine-scale variance parameter can be estimated accurately.\n")
}
data$include_fs <- as.integer(object@include_fs)
return(data)
} |
n <- 100; p1 <- 15; p2 <- 10
maxcancor <- 0.9
set.seed(0)
perm1 <- sample(1:p1, size = p1);
Sigma1 <- autocor(p1, 0.7)[perm1, perm1]
blockind <- sample(1:3, size = p2, replace = TRUE);
Sigma2 <- blockcor(blockind, 0.7)
mu <- rbinom(p1+p2, 1, 0.5)
trueidx1 <- c(rep(1, 3), rep(0, p1-3))
trueidx2 <- c(rep(1, 2), rep(0, p2-2))
simdata <- GenerateData(n=n, trueidx1 = trueidx1, trueidx2 = trueidx2, maxcancor = maxcancor,
Sigma1 = Sigma1, Sigma2 = Sigma2,
copula1 = "exp", copula2 = "cube",
muZ = mu,
type1 = "trunc", type2 = "trunc",
c1 = rep(1, p1), c2 = rep(0, p2)
)
X1 <- simdata$X1
X2 <- simdata$X2
range(colMeans(X1 == 0))
range(colMeans(X2 == 0)) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.