code
stringlengths 1
13.8M
|
---|
`k_rel` <-
function(x,y){
rel <- k_hyd(x) / k_hyd(y)
toRet<-mean(rel,na.rm=TRUE)
return(toRet)
} |
print.tbl <- function(x, width = NULL, ..., n = NULL, max_extra_cols = NULL,
max_footer_lines = NULL) {
print_tbl(
x, width, ...,
n = n, max_extra_cols = max_extra_cols, max_footer_lines = max_footer_lines
)
}
print_tbl <- function(x, width = NULL, ...,
n_extra = NULL,
n = NULL, max_extra_cols = NULL, max_footer_lines = NULL) {
if (!is.null(n_extra)) {
deprecate_soft(
"1.6.2", "pillar::print(n_extra = )", "pillar::print(max_extra_cols = )",
user_env = caller_env(2)
)
if (is.null(max_extra_cols)) {
max_extra_cols <- n_extra
}
}
writeLines(format(
x,
width = width, ...,
n = n, max_extra_cols = max_extra_cols, max_footer_lines = max_footer_lines
))
invisible(x)
}
format.tbl <- function(x, width = NULL, ...,
n = NULL, max_extra_cols = NULL, max_footer_lines = NULL) {
format_tbl(
x, width, ...,
n = n, max_extra_cols = max_extra_cols, max_footer_lines = max_footer_lines
)
}
format_tbl <- function(x, width = NULL, ...,
n_extra = NULL,
n = NULL, max_extra_cols = NULL, max_footer_lines = NULL) {
check_dots_empty(action = signal)
if (!is.null(n_extra)) {
deprecate_soft(
"1.6.2", "pillar::format(n_extra = )", "pillar::format(max_extra_cols = )",
user_env = caller_env(2)
)
if (is.null(max_extra_cols)) {
max_extra_cols <- n_extra
}
}
force(x)
num_colors(forget = TRUE)
setup <- tbl_format_setup(x,
width = width, ...,
n = n,
max_extra_cols = max_extra_cols,
max_footer_lines = max_footer_lines
)
header <- tbl_format_header(x, setup)
body <- tbl_format_body(x, setup)
footer <- tbl_format_footer(x, setup)
c(header, body, footer)
}
format_comment <- function(x, width) {
if (length(x) == 0L) {
return(character())
}
map_chr(x, wrap, prefix = "
}
NBSP <- "\U00A0"
wrap <- function(..., indent = 0, prefix = "", width) {
x <- paste0(..., collapse = "")
wrapped <- strwrap2(x, width - get_extent(prefix), indent)
wrapped <- paste0(prefix, wrapped)
wrapped <- gsub(NBSP, " ", wrapped)
paste0(wrapped, collapse = "\n")
}
strwrap2 <- function(x, width, indent) {
fansi::strwrap_ctl(x, width = max(width, 0), indent = indent, exdent = indent + 2)
} |
bosplot1 <- function(object){
m <- object@m
nb.V <- length(unique(object@zr))
gammas <- rep(0,nb.V)
for(i in 1:nb.V){
gammas[i] <- length(which(object@zr==i))/length(object@zr)
}
if(object@name == "ClassifM"){
D <- length(m)
par(mfrow=c(1,D))
for(id in 1:D){
tmp <- object@xhat[[id]][sort(object@zr,index.return=TRUE)$ix,1:ncol(object@xhat[[id]])]
par(xpd=TRUE, mar=c(2,1,4,7))
image(t(tmp),xaxt='n',yaxt='n', main='classification',cex.main=1.5,col = gray(m[id]:1/m[id]))
legend(x=1.01,y=0.65,legend = 1:m[id],col = gray(m[id]:1/m[id]),pch=15,bty="n",cex=1)
par(xpd=FALSE)
sum.gammas <- 0
for(i in 1:(nb.V-1)){
sum.gammas <- sum.gammas + gammas[i]
abline(h=sum.gammas,lwd=3, col="red")
}
}
}
if(object@name == "Classif"){
D <- length(m)
par(mfrow=c(1,D))
for(id in 1:D){
tmp <- object@xhat[[id]][sort(object@zr,index.return=TRUE)$ix,
sort(object@zc[[id]],index.return=TRUE)$ix]
par(xpd=TRUE, mar=c(2,1,4,7))
image(t(tmp),xaxt='n',yaxt='n', main='classification',cex.main=1.5,col = gray(m[id]:1/m[id]))
legend(x=1.01,y=0.65,legend = 1:m[id],col = gray(m[id]:1/m[id]),pch=15,bty="n",cex=1)
par(xpd=FALSE)
nb.W <- length(unique(object@zc[[id]]))
sum.rho <- 0
if(nb.W!=1){
for(i in 1:(nb.W-1)){
rho <- length(which(object@zc[[id]]==i))/length(object@zc[[id]])
sum.rho <- sum.rho + rho
abline(v=sum.rho,lwd=3, col="red")
}
}
sum.gammas <- 0
for(i in 1:(nb.V-1)){
sum.gammas <- sum.gammas + gammas[i]
abline(h=sum.gammas,lwd=3, col="red")
}
}
}
if(object@name == "Coclust"){
D <- length(m)
par(mfrow=c(1,D))
for(id in 1:D){
tmp <- object@xhat[[id]][sort(object@zr,index.return=TRUE)$ix,
sort(object@zc[[id]],index.return=TRUE)$ix]
par(xpd=TRUE, mar=c(2,1,4,7))
image(t(tmp),xaxt='n',yaxt='n', main='co-clustering',cex.main=1.5,col = gray(m[id]:1/m[id]))
legend(x=1.01,y=0.65,legend = 1:m[id],col = gray(m[id]:1/m[id]),pch=15,bty="n",cex=1)
par(xpd=FALSE)
nb.W <- length(unique(object@zc[[id]]))
sum.rho <- 0
if(nb.W!=1){
for(i in 1:(nb.W-1)){
rho <- length(which(object@zc[[id]]==i))/length(object@zc[[id]])
sum.rho <- sum.rho + rho
abline(v=sum.rho,lwd=3, col="red")
}
}
sum.gammas <- 0
for(i in 1:(nb.V-1)){
sum.gammas <- sum.gammas + gammas[i]
abline(h=sum.gammas,lwd=3, col="red")
}
}
}
if(object@name == "Clust"){
D <- length(m)
par(mfrow=c(1,D))
for(id in 1:D){
tmp <- object@xhat[[id]][sort(object@zr,index.return=TRUE)$ix,1:ncol(object@xhat[[id]])]
image(t(tmp),xaxt='n',yaxt='n', main='clustering',cex.main=1.5,col = gray(m[id]:1/m[id]))
par(xpd=TRUE, mar=c(2,1,4,7))
legend(x=1.01,y=0.65,legend = 1:m[id],col = gray(m[id]:1/m[id]),pch=15,bty="n",cex=1)
par(xpd=FALSE)
sum.gammas <- 0
for(i in 1:(nb.V-1)){
sum.gammas <- sum.gammas + gammas[i]
abline(h=sum.gammas,lwd=3, col="red")
}
}
}
}
bosplot <- function(object){
par(xpd=TRUE, mar=c(2,1,4,7))
par(xpd=FALSE)
m <- object@m
nb.V <- length(unique(object@zr))
gammas <- rep(0,nb.V)
for(i in 1:nb.V){
gammas[i] <- length(which(object@zr==i))/length(object@zr)
}
if(object@name == "ClassifM"){
D <- length(m)
par(mfrow=c(1,D))
for(id in 1:D){
tmp <- object@xhat[[id]][sort(object@zr,index.return=TRUE)$ix,1:ncol(object@xhat[[id]])]
par(xpd=TRUE, mar=c(2,1,4,7))
image(t(tmp),xaxt='n',yaxt='n', main='classification',cex.main=1.5,col = gray(m[id]:1/m[id]))
legend(x=1.01,y=0.65,legend = 1:m[id],col = gray(m[id]:1/m[id]),pch=15,bty="n",cex=1)
par(xpd=FALSE)
sum.gammas <- 0
for(i in 1:(nb.V-1)){
sum.gammas <- sum.gammas + gammas[i]
abline(h=sum.gammas,lwd=3, col="red")
}
}
}
if(object@name == "Classif"){
D <- length(m)
par(mfrow=c(1,D))
for(id in 1:D){
tmp <- object@xhat[[id]][sort(object@zr,index.return=TRUE)$ix,
sort(object@zc[[id]],index.return=TRUE)$ix]
par(xpd=TRUE, mar=c(2,1,4,7))
image(t(tmp),xaxt='n',yaxt='n', main='classification',cex.main=1.5,col = gray(m[id]:1/m[id]))
legend(x=1.01,y=0.65,legend = 1:m[id],col = gray(m[id]:1/m[id]),pch=15,bty="n",cex=1)
par(xpd=FALSE)
nb.W <- length(unique(object@zc[[id]]))
sum.rho <- 0
if(nb.W!=1){
for(i in 1:(nb.W-1)){
rho <- length(which(object@zc[[id]]==i))/length(object@zc[[id]])
sum.rho <- sum.rho + rho
abline(v=sum.rho,lwd=3, col="red")
}
}
sum.gammas <- 0
for(i in 1:(nb.V-1)){
sum.gammas <- sum.gammas + gammas[i]
abline(h=sum.gammas,lwd=3, col="red")
}
}
}
if(object@name == "Coclust"){
D <- length(m)
par(mfrow=c(1,D))
for(id in 1:D){
tmp <- object@xhat[[id]][sort(object@zr,index.return=TRUE)$ix,
sort(object@zc[[id]],index.return=TRUE)$ix]
par(xpd=TRUE, mar=c(2,1,4,7))
image(t(tmp),xaxt='n',yaxt='n', main='co-clustering',cex.main=1.5,col = gray(m[id]:1/m[id]))
legend(x=1.01,y=0.65,legend = 1:m[id],col = gray(m[id]:1/m[id]),pch=15,bty="n",cex=1)
par(xpd=FALSE)
nb.W <- length(unique(object@zc[[id]]))
sum.rho <- 0
if(nb.W!=1){
for(i in 1:(nb.W-1)){
rho <- length(which(object@zc[[id]]==i))/length(object@zc[[id]])
sum.rho <- sum.rho + rho
abline(v=sum.rho,lwd=3, col="red")
}
}
sum.gammas <- 0
for(i in 1:(nb.V-1)){
sum.gammas <- sum.gammas + gammas[i]
abline(h=sum.gammas,lwd=3, col="red")
}
}
}
if(object@name == "Clust"){
D <- length(m)
par(mfrow=c(1,D))
for(id in 1:D){
tmp <- object@xhat[[id]][sort(object@zr,index.return=TRUE)$ix,1:ncol(object@xhat[[id]])]
image(t(tmp),xaxt='n',yaxt='n', main='clustering',cex.main=1.5,col = gray(m[id]:1/m[id]))
par(xpd=TRUE, mar=c(2,1,4,7))
legend(x=1.01,y=0.65,legend = 1:m[id],col = gray(m[id]:1/m[id]),pch=15,bty="n",cex=1)
par(xpd=FALSE)
sum.gammas <- 0
for(i in 1:(nb.V-1)){
sum.gammas <- sum.gammas + gammas[i]
abline(h=sum.gammas,lwd=3, col="red")
}
}
}
} |
fmgamma <- function(x, M, pvector = NULL, std.err = TRUE,
method = "BFGS", control = list(maxit = 10000), finitelik = TRUE, ...) {
call <- match.call()
check.n(M)
if (M == 1) stop("use fitdistr instead")
np = 3*M
check.quant(x, allowna = TRUE, allowinf = TRUE)
check.logic(std.err)
check.optim(method)
check.control(control)
check.logic(finitelik)
if (any(!is.finite(x))) {
warning("non-finite cases have been removed")
x = x[is.finite(x)]
}
if (any(x <= 0)) {
warning("non-positive values have been removed")
x = x[x > 0]
}
check.quant(x)
n = length(x)
if (is.null(pvector)) {
if (is.unsorted(x)) {
x = sort(x)
} else {
if (x[1] > x[length(x)])
x = rev(x)
}
nM = rmultinom(1, n, rep(1/M, M))
i = 1
maxi = 10
while (any(nM <= 2) & (i < maxi)) {
nM = rmultinom(1, n, rep(1/M, M))
i = i + 1
}
if (i == maxi) stop("sample is too small, user specified initial value (pvector) needed")
approxmle = function(x) {
s = log(mean(x)) - mean(log(x))
k = (3 - s + sqrt((s - 3) ^ 2 + 24 * s))/12/s
c(k, mean(x)/k)
}
mgparam = simplify2array(tapply(x, INDEX = rep(1:M, times = nM), FUN = approxmle, simplify=TRUE))
pvector = c(mgparam[1,], mgparam[2,], nM[-M]/n)
}
if (length(pvector) != (np - 1))
stop(paste("initial parameter vector must be of length", np - 1))
check.nparam(pvector, nparam = np - 1, allownull = FALSE)
if ((method == "L-BFGS-B") | (method == "BFGS")) finitelik = TRUE
maxit = 1000
abstol = 1e-8
mgshape = pvector[1:M]
mgscale = pvector[(M + 1):(2*M)]
mgweight = pvector[(2*M + 1):(3*M - 1)]
mgweight = c(mgweight, 1 - sum(mgweight))
mui = mgshape * mgscale
if (any(diff(mui) <= 0))
stop("initial parameter vector does not satisfy constraint of strictly increasing component means")
nllh = rep(NA, maxit + 1)
nllh[1] = 0
EMresults = as.data.frame(matrix(NA, nrow = maxit, ncol = 3*M + 3))
names(EMresults) = c("nllh", paste("mgshape", 1:M, sep = ""), paste("mgscale", 1:M, sep = ""),
paste("mgweight", 1:M, sep = ""), "nllh.cond.weights", "conv.cond.weights")
i = 1
nllh[i + 1] = nlmgamma(pvector, x, M)
if (is.infinite(nllh[i + 1])) stop("initial parameter values are invalid")
EMresults[i,] = c(nllh[i + 1], mgshape, mgscale, mgweight, NA, NA)
while ((abs(nllh[i + 1] - nllh[i]) > abstol) & (i < maxit)) {
i = i + 1
Mshape = matrix(mgshape, nrow = n, ncol = M, byrow = TRUE)
Mscale = matrix(mgscale, nrow = n, ncol = M, byrow = TRUE)
Mprob = t(mgweight * t(dgamma(x, Mshape, scale = Mscale)))
tau = Mprob / rowSums(Mprob)
mgweight = colMeans(tau)
fit = try(optim(par = c(mgshape, mgscale), fn = nlEMmgamma,
tau = tau, mgweight = mgweight, x = x, M = M,
finitelik = finitelik, method = method, control = control, hessian = TRUE, ...))
if (inherits(fit, "try-error")) stop("Maximisation step failed, try new initial values")
conv = TRUE
if ((fit$convergence != 0) | (abs(fit$value) >= 1e6)) {
conv = FALSE
warning("check convergence")
}
mgshape = fit$par[1:M]
mgscale = fit$par[(M + 1):(2*M)]
nllh[i + 1] = nlmgamma(c(mgshape, mgscale, mgweight[-M]), x, M)
EMresults[i,] = c(nllh[i], mgshape, mgscale, mgweight, fit$value, fit$convergence)
}
if (i == maxit) {
warning("Maximum number of iteration reached, could be that M is too large or poor starting values")
} else {
print(paste(i, "iterations of EM algorithm"))
}
EMresults = EMresults[1:i,]
if ((fit$convergence != 0) | any(fit$par == pvector[1:(2*M)]) |
any(mgweight[-M] == pvector[(2*M + 1):(3*M - 1)]) | (abs(fit$value) >= 1e6)) {
conv = FALSE
warning("check convergence")
}
if (std.err) {
qrhess = qr(fit$hessian)
if (qrhess$rank != ncol(qrhess$qr)) {
warning("observed information matrix is singular")
se = NULL
invhess = NULL
} else {
invhess = solve(qrhess)
vars = diag(invhess)
if (any(vars <= 0)) {
warning("observed information matrix is singular")
invhess = NULL
se = NULL
} else {
se = sqrt(vars)
}
}
} else {
invhess = NULL
se = NULL
}
list(call = call, x = as.vector(x), init = as.vector(pvector),
optim = fit, conv = conv, cov = invhess, mle = fit$par, se = se,
nllh = nllh[i + 1], n = n, M = M, mgshape = mgshape, mgscale = mgscale, mgweight = mgweight,
EMresults = EMresults, posterior = tau)
}
lmgamma <- function(x, mgshape, mgscale, mgweight, log = TRUE) {
check.quant(x, allowna = TRUE, allowinf = TRUE)
check.logic(log)
if (is.list(mgshape)) mgshape = unlist(mgshape)
if (is.list(mgscale)) mgscale = unlist(mgscale)
if (is.list(mgweight)) mgweight = unlist(mgweight)
check.param(mgshape, allowvec = TRUE)
check.param(mgscale, allowvec = TRUE)
check.param(mgweight, allowvec = TRUE)
M = check.inputn(c(length(mgshape), length(mgshape), length(mgweight)), allowscalar = TRUE)
if (any(!is.finite(x))) {
warning("non-finite cases have been removed")
x = x[is.finite(x)]
}
if (any(x <= 0)) {
warning("non-positive values have been removed")
x = x[x > 0]
}
check.quant(x)
n = length(x)
mui = mgshape * mgscale
if (any(mgscale <= 0) | any(mgshape <= 0) | any(diff(mui) <= 0) |
any(mgweight <= 0) | (sum(mgweight) > 1)) {
l = -Inf
} else {
l = sum(dmgamma(x, mgshape, mgscale, mgweight, log = TRUE))
}
if (!log) l = exp(l)
l
}
nlmgamma <- function(pvector, x, M, finitelik = FALSE) {
check.n(M)
if (M == 1) stop("use dgamma instead")
np = 3*M
check.nparam(pvector, nparam = np - 1)
check.quant(x, allowna = TRUE, allowinf = TRUE)
check.logic(finitelik)
mgshape = pvector[1:M]
mgscale = pvector[(M + 1):(2*M)]
mgweight = pvector[(2*M + 1):(3*M - 1)]
mgweight = c(mgweight, 1 - sum(mgweight))
nllh = -lmgamma(x, mgshape, mgscale, mgweight)
if (finitelik & is.infinite(nllh)) {
nllh = sign(nllh) * 1e6
}
nllh
}
nlEMmgamma <- function(pvector, tau, mgweight, x, M, finitelik = FALSE) {
check.n(M)
if (M == 1) stop("use dgamma instead")
np.noweight = 2*M
check.nparam(pvector, nparam = np.noweight)
check.nparam(mgweight, nparam = M)
check.prob(mgweight)
check.quant(x, allowna = TRUE, allowinf = TRUE)
check.logic(finitelik)
if (any(!is.finite(x))) {
warning("non-finite cases have been removed")
x = x[is.finite(x)]
}
if (any(x <= 0)) {
warning("non-positive values have been removed")
x = x[x > 0]
}
check.quant(x)
n = length(x)
if (!is.matrix(tau)) stop("Posterior probabilities (tau) must be nxM probability matrix")
if (any(dim(tau) != c(n, M))) stop("Posterior probabilities (tau) must be nxM probability matrix")
if (any((tau < 0) | (tau > 1))) stop("Posterior probabilities (tau) must be nxM probability matrix")
mgshape = pvector[1:M]
mgscale = pvector[(M + 1):(2*M)]
mui = mgshape * mgscale
if (any(mgscale <= 0) | any(mgshape <= 0) | any(diff(mui) <= 0)) {
nllh = Inf
} else {
Mshape = matrix(mgshape, nrow = n, ncol = M, byrow = TRUE)
Mscale = matrix(mgscale, nrow = n, ncol = M, byrow = TRUE)
nllh = -sum(tau * log(t(mgweight * t(dgamma(x, Mshape, scale = Mscale)))))
}
if (finitelik & is.infinite(nllh)) {
nllh = sign(nllh) * 1e6
}
nllh
} |
get_afl_fixture <- function(season = NULL, round_number = NULL, comp = "AFLM") {
.Deprecated("fetch_fixture_afl")
fetch_fixture_afl(season, round_number, comp)
} |
geom_confidence_tern <- function(mapping = NULL, data = NULL, stat = "ConfidenceTern",position = "identity",
...,
lineend = "butt", linejoin = "round", linemitre = 1,
na.rm = FALSE, show.legend = NA, inherit.aes = TRUE) {
layer(
data = data,
mapping = mapping,
stat = stat,
geom = GeomConfidenceTern,
position = position,
show.legend = show.legend,
inherit.aes = inherit.aes,
params = list(
lineend = lineend,
linejoin = linejoin,
linemitre = linemitre,
na.rm = na.rm,
...
)
)
}
geom_confidence <- function(...){
tern_dep('1.0.6.1',"Depreciated due to naming package naming standards, replaced by geom_confidence_tern")
geom_confidence_tern(...)
}
GeomConfidenceTern <- ggproto(
"GeomConfidenceTern",
GeomPath,
default_aes = aes(colour = " |
optimbase.hasbounds <- function(this=NULL) {
maxl <- length(this$boundsmax)
minl <- length(this$boundsmin)
hasbounds <- (maxl!=0 | minl!=0)
return(hasbounds)
} |
ParamHelpersParamSet <- function(session, paramset) {
getRequires <- function(depname) {
conds <- paramset$deps[get("id") == depname]
cond.expressions <- mapply(conditionAsExpression, conds$cond, conds$on)
Reduce(function(x, y) substitute(x && y, list(x = x, y = y)), cond.expressions)
}
data <- imap(paramset$params, function(param, pname) {
switch(param$class,
ParamLgl = list("makeLogicalParam", list(id = pname, requires = getRequires(pname))),
ParamInt = list("makeIntegerParam", list(id = pname, lower = param$lower, upper = param$upper, requires = getRequires(pname))),
ParamDbl = list("makeNumericParam", list(id = pname, lower = param$lower, upper = param$upper, requires = getRequires(pname))),
ParamFct = list("makeDiscreteParam", list(id = pname, values = param$levels, requires = getRequires(pname)))
)
})
encall(session, data, expr = {
ParamHelpers::makeParamSet(params = lapply(data, function(pcon) {
do.call(get(pcon[[1]], getNamespace("ParamHelpers"), mode = "function"), pcon[[2]])
}))
})
}
conditionAsExpression <- function(condition, on) {
UseMethod("ConditionAsExpression")
}
conditionAsExpression.CondAnyOf <- function(condition, on) {
substitute(!is.na(on) & x %in% rhs, list(on = as.symbol(on), rhs = condition$rhs))
}
conditionAsExpression.CondEqual <- function(condition, on) {
substitute(!is.na(on) & x == rhs, list(on = as.symbol(on), rhs = condition$rhs))
} |
"pwm.beta2alpha" <-
function(pwm) {
nmom <- length(pwm)
nm1 <- nmom - 1
otherpwm <- vector(mode="numeric", length=nmom)
for(r in 0:nm1) {
otherpwm[r+1] <- sum(sapply(0:r, function(k) { return((-1)^k*choose(r,k)*pwm[k+1]) }))
}
names(otherpwm) <- sapply(0:nm1, function(k) {
return(gsub("$", k, "Alpha", perl=TRUE)) } )
return(otherpwm)
}
"pwm.alpha2beta" <-
function(pwm) {
beta <- pwm.beta2alpha(pwm)
names(beta) <- sapply(0:(length(beta)-1), function(k) {
return(gsub("$", k, "Beta", perl=TRUE)) } )
return(beta)
} |
setMethod(
f="InFlux_by_PoolName",
signature=c(
func='function'
,destinationName='character'
)
,def=function(func,destinationName){
new(
'InFlux_by_PoolName'
,destinationName=PoolName(destinationName)
,func=func
)
}
)
setMethod(
f="by_PoolIndex"
,signature=signature(
obj='InFlux_by_PoolName'
,poolNames='character'
,timeSymbol='character'
)
,definition=function(obj,poolNames,timeSymbol){
fl_by_index<-new(
"InFlux_by_PoolIndex"
,destinationIndex=PoolIndex(
obj@destinationName
,poolNames=poolNames
)
,func=by_PoolIndex(
obj@func
,poolNames=poolNames
,timeSymbol=timeSymbol
)
)
fl_by_index
}
) |
library(plumber)
knitr::opts_chunk$set(
collapse = FALSE,
comment = "
)
code_chunk <- function(output, language=""){
cat(paste0("```",language,"\n"))
cat(output)
cat("\n```\n")
}
json_serialize <- function(obj){
jsonlite::toJSON(obj, auto_unbox = FALSE, pretty = TRUE)
} |
set.seed(0)
library(ReinforcementLearning)
data("tictactoe")
head(tictactoe, 5)
states <- c("s1", "s2", "s3", "s4")
actions <- c("up", "down", "left", "right")
env <- gridworldEnvironment
print(env)
data <- sampleExperience(N = 1000,
env = env,
states = states,
actions = actions)
head(data)
control <- list(alpha = 0.1, gamma = 0.5, epsilon = 0.1)
model <- ReinforcementLearning(data,
s = "State",
a = "Action",
r = "Reward",
s_new = "NextState",
control = control)
computePolicy(model)
print(model)
summary(model)
data_unseen <- data.frame(State = c("s1", "s2", "s1"),
stringsAsFactors = FALSE)
data_unseen$OptimalAction <- predict(model, data_unseen$State)
data_unseen
data_new <- sampleExperience(N = 1000,
env = env,
states = states,
actions = actions,
actionSelection = "epsilon-greedy",
model = model,
control = control)
model_new <- ReinforcementLearning(data_new,
s = "State",
a = "Action",
r = "Reward",
s_new = "NextState",
control = control,
model = model)
print(model_new)
plot(model_new)
cat("......X.B")
cat("| . | . | . |
|------------------|
| . | . | . |
|------------------|
| X | . | B |")
cat('.XXBB..XB XXBB.B.X. .XBB..BXX BXX...B.. ..XB..... XBXBXB...
"c1" "c5" "c5" "c4" "c5" "c9"')
cat("| . | X | X |
|------------------|
| B | B | . |
|------------------|
| . | X | B |")
cat("| c1 | c2 | c3 |
|---------------------|
| c4 | c5 | c6 |
|---------------------|
| c7 | c8 | c9 |") |
`mefaTables` <-
function(xtab, dframe, margin, index=NULL, drop.index=FALSE, xtab.fixed=TRUE)
{
if (margin != 1 && margin != 2)
stop("'margin' should be 1 or 2")
if (NCOL(dframe) == 1) {
dframe <- data.frame(x=dframe, copy=dframe)
onecol <- TRUE
} else onecol <- FALSE
if (margin == 1) {
rank.orig <- c(1:nrow(xtab))[order(rownames(xtab))]
nam.orig <- rownames(xtab)
xtab <- xtab[order(rownames(xtab)), ]
xnam <- rownames(xtab)
} else {
rank.orig <- c(1:ncol(xtab))[order(colnames(xtab))]
nam.orig <- colnames(xtab)
xtab <- xtab[, order(colnames(xtab))]
xnam <- colnames(xtab)}
if (!is.null(index))
rownames(dframe) <- dframe[, index]
dnam <- rownames(dframe)
if (xtab.fixed) {
dsub <- dframe[dnam %in% xnam, ]
dsub <- dsub[order(rownames(dsub)), ]
xsub <- xtab
rank.final <- rank.orig
} else {
int <- intersect(dnam, xnam)
dsub <- dframe[dnam %in% int, ]
dsub <- dsub[order(rownames(dsub)), ]
rank.final <- rank.orig[nam.orig %in% int]
xsub <- if (margin == 1)
xtab[xnam %in% int, ] else xsub <- xtab[, xnam %in% int]
}
xsub <- if (margin == 1)
xsub[order(rank.final), ] else xsub[, order(rank.final)]
dsub <- dsub[order(rank.final), ]
nsub <- if (margin == 1)
rownames(xsub) else colnames(xsub)
if (!identical(rownames(dsub), nsub))
stop("names do not match")
if (!is.null(index) && drop.index)
dsub[, index] <- NULL
if (onecol)
dsub <- as.data.frame(x=dsub[,1])
return(list(xtab=xsub, dtab=dsub))
} |
context("nhl_url_standings")
testthat::test_that(
"nhl_url_standings with no params and expand",
testthat::expect_equal(
nhl_url_standings(),
paste0(baseurl, "standings")
)
)
testthat::test_that(
"nhl_url_standings with seasons",
testthat::expect_equal(
nhl_url_standings(seasons = 2015:2016),
paste0(baseurl, "standings?", c("season=20152016", "season=20162017"))
)
)
testthat::test_that(
"nhl_url_standings with seasons and expand",
testthat::expect_equal(
nhl_url_standings(seasons = 2015:2016, expand = "standings.record"),
paste0(
baseurl, "standings?",
c("season=20152016", "season=20162017"),
"&expand=standings.record"
)
)
)
testthat::test_that(
"nhl_url_standings with seasons and expand",
testthat::expect_equal(
nhl_url_standings(
seasons = 2015:2016,
standingsTypes = c("regularSeason", "byDivision"),
expand = "standings.record"
),
paste0(
baseurl, "standings?", c(
"season=20152016&standingsType=regularSeason",
"season=20162017&standingsType=regularSeason",
"season=20152016&standingsType=byDivision",
"season=20162017&standingsType=byDivision"
),
"&expand=standings.record"
)
)
) |
library(DendSer)
library(idendr0)
d <- dist(scale(iris[, -5]))
h <- hclust(d)
PC1 <- prcomp(iris[,-5], scale = TRUE)$x[, 1]
iris.with.pc1 <- cbind(iris, PC1)
cat('original dendrogram\n')
idendro(h, iris.with.pc1)
h$order <- dser(h, PC1, cost = costLS)
cat('reordered dendrogram\n')
idendro(h, iris.with.pc1) |
donothing <- function() {} |
ncaa_baseball_pbp <- function(game_info_url) {
payload <- game_info_url %>%
xml2::read_html() %>%
rvest::html_elements("
rvest::html_attr("href") %>%
as.data.frame() %>%
dplyr::rename(pbp_url_slug = .data$`.`) %>%
dplyr::mutate(pbp_url = paste0("https://stats.ncaa.org", .data$pbp_url_slug)) %>%
dplyr::pull(.data$pbp_url)
pbp_payload <- payload %>%
xml2::read_html()
game_info <- pbp_payload %>%
rvest::html_elements("table:nth-child(7)") %>%
rvest::html_table() %>%
as.data.frame() %>%
tidyr::spread(.data$X1, .data$X2)
game_info <- dplyr::rename_with(game_info,~gsub(":", "", .x)) %>%
janitor::clean_names() %>%
dplyr::mutate(game_date = substr(.data$game_date, 1, 10))
att <- any(!grepl("attendance", colnames(game_info)))
if (att) {
game_info$attendance <- NA
} else {
game_info <- game_info %>%
dplyr::mutate(attendance = as.numeric(gsub(",", "", .data$attendance)))
}
table_list <- pbp_payload %>%
rvest::html_elements("[class='mytable']")
condition <- table_list %>%
lapply(function(x) nrow(as.data.frame(x %>%
rvest::html_table())) > 3)
table_list_innings <- table_list[which(unlist(condition))]
table_list_innings <- table_list_innings %>%
setNames(seq(1,length(table_list_innings)))
teams <- tibble::tibble(away = (table_list_innings[[1]] %>%
rvest::html_table() %>%
as.data.frame())[1,1],
home = (table_list_innings[[1]] %>%
rvest::html_table() %>%
as.data.frame())[1,3])
mapped_table <- purrr::map(.x = table_list_innings,
~format_baseball_pbp_tables(.x, teams = teams)) %>%
dplyr::bind_rows(.id = "inning")
mapped_table[1,2] <- ifelse(mapped_table[1,2] == "",
"0-0", mapped_table[1,2])
mapped_table <- mapped_table %>%
dplyr::mutate(score = ifelse(.data$score == "", NA, .data$score)) %>%
tidyr::fill(.data$score, .direction = "down")
mapped_table <- mapped_table %>%
dplyr::mutate(
inning_top_bot = ifelse(teams$away == .data$batting, "top", "bot"),
attendance = game_info$attendance,
date = game_info$game_date,
location = game_info$location) %>%
dplyr::select(
.data$date, .data$location, .data$attendance,
.data$inning, .data$inning_top_bot, dplyr::everything())
return(mapped_table)
}
get_ncaa_baseball_pbp <- ncaa_baseball_pbp
format_baseball_pbp_tables <- function(table_node, teams) {
table <- (table_node %>%
rvest::html_table() %>%
as.data.frame() %>%
dplyr::filter(!grepl(pattern = "R:", x = .data$X1)) %>%
dplyr::mutate(batting = ifelse(.data$X1 != "", teams$away, teams$home)) %>%
dplyr::mutate(fielding = ifelse(.data$X1 != "", teams$home, teams$away)))[-1,] %>%
tidyr::gather(key = "X1", value = "value", -c("batting", "fielding", "X2")) %>%
dplyr::rename(score = .data$X2) %>%
dplyr::filter(.data$value != "")
table <- table %>%
dplyr::rename(description = .data$value) %>%
dplyr::select(-.data$X1)
return(table)
} |
nrel_params <- function(status = NULL, access = NULL, fuel_type = NULL,
cards_accepted = NULL, owner_type = NULL,
federal_agency = NULL, cng_fill_type = NULL,
cng_psi = NULL, cng_vehicle_class = NULL,
e85_has_blender_pump = NULL, ev_network = NULL,
ev_charging_level = NULL, ev_connector_type = NULL,
ev_connector_type_operator = NULL, lng_vehicle_class = NULL,
state = NULL, zip = NULL, country = NULL, limit = NULL,
location = NULL, latitude = NULL, longitude = NULL, radius = NULL) {
params = as.list(match.call())
params[[1]] <- NULL
return(params)
}
altfuel_api <- function(api_key,
endpoint,
params = list(NULL)) {
nrel_url <- "https://developer.nrel.gov"
params <- update_params(params, list(api_key = api_key))
endpoint <- paste(endpoint, "json", sep = ".")
q_url <- httr::modify_url(nrel_url,
path = endpoint,
query = params)
ua <- httr::user_agent("https://github.com/burch-cm/altfuelr")
res <- httr::GET(q_url, ua)
if (httr::http_type(res) != "application/json") {
stop("API did not return json", call. = FALSE)
}
parsed <- jsonlite::fromJSON(httr::content(res, "text"), simplifyVector = TRUE)
if (httr::status_code(res) != 200) {
stop(
sprintf(
"NREL API request failed [%s]\n%s\n<%s>",
httr::status_code(res),
parsed$message,
parsed$documentation_url
),
call. = FALSE
)
}
iro <- structure(
list(
content = parsed,
parameters = params,
response = res
),
class = "nrel_api"
)
return(iro)
}
stations <- function(x) {
if (class(x) != "nrel_api") {
stop(
sprintf(
"Object x must be of class nrel_api, not %s",
class(x)
),
call. = FALSE
)
}
if (!("content" %in% names(x))) {
stop(
print("Content not found; object is improperly formatted.")
)
}
if (names(x$content)[1] == "alt_fuel_station") {
agency <- data.frame(federal_agency_id = x$content$alt_fuel_station$federal_agency$id,
federal_agency_code = x$content$alt_fuel_station$federal_agency$code,
federal_agency_name = x$content$alt_fuel_station$federal_agency$name)
x$content$alt_fuel_station$federal_agency <- NULL
x.df <- cbind(purrr::map_df(x$content$alt_fuel_station, rbind), agency)
return(x.df)
} else {
return(x$content$fuel_stations)
}
}
count_results <- function(x) {
if (class(x) != "nrel_api") {
stop(
sprintf(
"Object x must be of class nrel_api, not %s",
class(x)
),
call. = FALSE
)
}
if (!("station_counts" %in% names(x$content))) {
stop(
print("This object does not contain station counts.")
)
}
n_stations <- unlist(x$content$station_counts$fuels)
fuel_type <- stringr::str_replace_all(names(n_stations), "\\.", "_") %>%
stringr::str_replace("ELEC_total", "ELEC_outlets") %>%
stringr::str_remove("_stations") %>%
stringr::str_replace("_total", "_stations")
df <- dplyr::tibble(fuel_type, n_stations)
return(df)
}
all_stations <- function(api_key, params = nrel_params(limit = "all")) {
altfuel_api(api_key, endpoint = "/api/alt-fuel-stations/v1", params)
}
station_by_id <- function(api_key, station_id) {
id_endpoint = paste0("/api/alt-fuel-stations/v1/", station_id)
altfuel_api(api_key, endpoint = id_endpoint, params = nrel_params(limit = 1))
}
last_updated <- function(api_key) {
x <- altfuel_api(api_key, endpoint = "/api/alt-fuel-stations/v1/last-updated")
lubridate::ymd_hms(x$content$last_updated)
}
nearest_stations <- function(api_key, location = NULL, latitude = NULL, longitude = NULL,
radius = NULL, params = nrel_params(radius = 'infinite')) {
.params <- update_params(params, list(location = location,
latitude = latitude,
longitude = longitude,
radius = radius))
altfuel_api(api_key, endpoint = "/api/alt-fuel-stations/v1/nearest", params = .params)
}
alt_fuel_near <- function(api_key, location = NULL, miles = 5, fuel_type = NULL) {
q_params <- nrel_params(fuel_type = fuel_type)
stns <- nearest_stations(api_key, location, radius = miles)
if (stns$content$total_results < 1) return (FALSE)
n <- stns %>%
stations() %>%
dplyr::rename("fuel_type" = "fuel_type_code") %>%
dplyr::group_by(fuel_type) %>%
dplyr::count()
if (!is.null(fuel_type)) n <- dplyr::filter(n, fuel_type == fuel_type)
n <-
dplyr::pull(n) %>%
sum(na.rm = TRUE)
if (n > 0) {
return (TRUE)
} else {
return (FALSE)
}
} |
bounding_wkt <- function(min_x, min_y, max_x, max_y, values = NULL) {
if (is.null(values)) {
return(bounding_wkt_points(min_x, max_x, min_y, max_y))
}
if (is.list(values)) {
return(bounding_wkt_list(values))
}
if (is.vector(values) && length(values) == 4) {
return(bounding_wkt_list(list(values)))
}
stop("values must be NULL, a list or a length-4 vector")
} |
cuninormlike <- function(parm, nXvar, nuZUvar, nvZVvar, uHvar,
vHvar, Yvar, Xvar, S) {
beta <- parm[1:(nXvar)]
delta <- parm[(nXvar + 1):(nXvar + nuZUvar)]
phi <- parm[(nXvar + nuZUvar + 1):(nXvar + nuZUvar + nvZVvar)]
Wu <- as.numeric(crossprod(matrix(delta), t(uHvar)))
Wv <- as.numeric(crossprod(matrix(phi), t(vHvar)))
epsilon <- Yvar - as.numeric(crossprod(matrix(beta), t(Xvar)))
ll <- -Wu + log(pnorm((exp(Wu) + S * epsilon)/exp(Wv/2)) -
pnorm(S * epsilon/exp(Wv/2)))
return(ll)
}
cstuninorm <- function(olsObj, epsiRes, S, nuZUvar, uHvar, nvZVvar,
vHvar) {
m2 <- moment(epsiRes, order = 2)
m4 <- moment(epsiRes, order = 4)
if ((m2^2 - m4) < 0) {
theta <- (abs(120 * (3 * m2^2 - m4)))^(1/4)
varu <- theta^2/12
} else {
theta <- (120 * (3 * m2^2 - m4))^(1/4)
varu <- theta^2/12
}
if ((m2 - varu) < 0) {
varv <- abs(m2 - varu)
} else {
varv <- m2 - varu
}
dep_u <- 1/2 * log(((epsiRes^2 - varv) * 12)^2) * 1/2
dep_v <- 1/2 * log((epsiRes^2 - varu)^2)
reg_hetu <- if (nuZUvar == 1) {
lm(log(varu) ~ 1)
} else {
lm(dep_u ~ ., data = as.data.frame(uHvar[, 2:nuZUvar]))
}
if (any(is.na(reg_hetu$coefficients)))
stop("At least one of the OLS coefficients of 'uhet' is NA: ",
paste(colnames(uHvar)[is.na(reg_hetu$coefficients)],
collapse = ", "), ". This may be due to a singular matrix due to potential perfect multicollinearity",
call. = FALSE)
reg_hetv <- if (nvZVvar == 1) {
lm(log(varv) ~ 1)
} else {
lm(dep_v ~ ., data = as.data.frame(vHvar[, 2:nvZVvar]))
}
if (any(is.na(reg_hetv$coefficients)))
stop("at least one of the OLS coefficients of 'vhet' is NA: ",
paste(colnames(vHvar)[is.na(reg_hetv$coefficients)],
collapse = ", "), ". This may be due to a singular matrix due to potential perfect multicollinearity",
call. = FALSE)
delta <- coefficients(reg_hetu)
names(delta) <- paste0("Zu_", colnames(uHvar))
phi <- coefficients(reg_hetv)
names(phi) <- paste0("Zv_", colnames(vHvar))
if (names(olsObj)[1] == "(Intercept)") {
beta <- c(olsObj[1] + S * theta/2, olsObj[-1])
} else {
beta <- olsObj
}
return(c(beta, delta, phi))
}
cgraduninormlike <- function(parm, nXvar, nuZUvar, nvZVvar, uHvar,
vHvar, Yvar, Xvar, S) {
beta <- parm[1:(nXvar)]
delta <- parm[(nXvar + 1):(nXvar + nuZUvar)]
phi <- parm[(nXvar + nuZUvar + 1):(nXvar + nuZUvar + nvZVvar)]
Wu <- as.numeric(crossprod(matrix(delta), t(uHvar)))
Wv <- as.numeric(crossprod(matrix(phi), t(vHvar)))
epsilon <- Yvar - as.numeric(crossprod(matrix(beta), t(Xvar)))
epsiv <- S * (epsilon)/exp(Wv/2)
epsiu <- exp(Wu) + S * (epsilon)
epsiuv <- (epsiu)/exp(Wv/2)
depsiv <- dnorm(epsiv)
depsiuv <- dnorm(epsiuv)
pepsiv <- pnorm(epsiv)
pepsiuv <- pnorm(epsiuv)
sigx1 <- 0.5 * (S * depsiv * (epsilon)) - 0.5 * (depsiuv *
(epsiu))
sigx2 <- depsiv - depsiuv
sigx3 <- pepsiuv - pepsiv
sigx4 <- exp(Wv/2) * (sigx3)
depsiuvx2 <- depsiuv * exp(Wu)
gradll <- (cbind(sweep(Xvar, MARGIN = 1, STATS = S * (sigx2)/(sigx4),
FUN = "*"), sweep(uHvar, MARGIN = 1, STATS = (depsiuvx2/(sigx4) -
1), FUN = "*"), sweep(vHvar, MARGIN = 1, STATS = (sigx1)/(sigx4),
FUN = "*")))
return(gradll)
}
chessuninormlike <- function(parm, nXvar, nuZUvar, nvZVvar, uHvar,
vHvar, Yvar, Xvar, S) {
beta <- parm[1:(nXvar)]
delta <- parm[(nXvar + 1):(nXvar + nuZUvar)]
phi <- parm[(nXvar + nuZUvar + 1):(nXvar + nuZUvar + nvZVvar)]
Wu <- as.numeric(crossprod(matrix(delta), t(uHvar)))
Wv <- as.numeric(crossprod(matrix(phi), t(vHvar)))
epsilon <- Yvar - as.numeric(crossprod(matrix(beta), t(Xvar)))
epsiv <- S * (epsilon)/exp(Wv/2)
epsiu <- exp(Wu) + S * (epsilon)
epsiuv <- (epsiu)/exp(Wv/2)
depsiv <- dnorm(epsiv)
depsiuv <- dnorm(epsiuv)
pepsiv <- pnorm(epsiv)
pepsiuv <- pnorm(epsiuv)
sigx1 <- 0.5 * (S * depsiv * (epsilon)) - 0.5 * (depsiuv *
(epsiu))
sigx2 <- depsiv - depsiuv
sigx3 <- pepsiuv - pepsiv
sigx4 <- exp(Wv/2) * (sigx3)
depsiuvx2 <- depsiuv * exp(Wu)
sigx5 <- exp(Wv/2)^3 * (sigx3)
hessll <- matrix(nrow = nXvar + nuZUvar + nvZVvar, ncol = nXvar +
nuZUvar + nvZVvar)
hessll[1:nXvar, 1:nXvar] <- crossprod(sweep(Xvar, MARGIN = 1,
STATS = S^2 * ((S * depsiv * (epsilon) - depsiuv * (epsiu))/(sigx5) -
(sigx2)^2/(sigx4)^2), FUN = "*"), Xvar)
hessll[1:nXvar, (nXvar + 1):(nXvar + nuZUvar)] <- crossprod(sweep(Xvar,
MARGIN = 1, STATS = S * ((epsiu)/(sigx5) - (sigx2)/(sigx4)^2) *
depsiuvx2, FUN = "*"), uHvar)
hessll[1:nXvar, (nXvar + nuZUvar + 1):(nXvar + nuZUvar + nvZVvar)] <- crossprod(sweep(Xvar,
MARGIN = 1, STATS = S * ((0.5 * (depsiv * (S^2 * (epsilon)^2/exp(Wv/2)^2 -
1)) - 0.5 * (((epsiu)^2/exp(Wv/2)^2 - 1) * depsiuv))/(sigx4) -
(sigx1) * (sigx2)/(sigx4)^2), FUN = "*"), vHvar)
hessll[(nXvar + 1):(nXvar + nuZUvar), (nXvar + 1):(nXvar +
nuZUvar)] <- crossprod(sweep(uHvar, MARGIN = 1, STATS = ((1 -
exp(Wu) * epsiuv/exp(Wv/2))/(sigx4) - depsiuvx2/(sigx4)^2) *
depsiuvx2, FUN = "*"), uHvar)
hessll[(nXvar + 1):(nXvar + nuZUvar), (nXvar + nuZUvar + 1):(nXvar +
nuZUvar + nvZVvar)] <- crossprod(sweep(uHvar, MARGIN = 1,
STATS = -(((sigx1)/(sigx4)^2 + 0.5 * ((1 - epsiuv^2)/(sigx4))) *
depsiuvx2), FUN = "*"), vHvar)
hessll[(nXvar + nuZUvar + 1):(nXvar + nuZUvar + nvZVvar), (nXvar +
nuZUvar + 1):(nXvar + nuZUvar + nvZVvar)] <- crossprod(sweep(vHvar,
MARGIN = 1, STATS = ((0.25 * (S^3 * depsiv * (epsilon)^3) -
0.25 * (depsiuv * (epsiu)^3))/(sigx5) - (0.5 * (sigx4) +
sigx1) * (sigx1)/(sigx4)^2), FUN = "*"), vHvar)
hessll[lower.tri(hessll)] <- t(hessll)[lower.tri(hessll)]
return(hessll)
}
uninormAlgOpt <- function(start, olsParam, dataTable, S, nXvar,
uHvar, nuZUvar, vHvar, nvZVvar, Yvar, Xvar, method, printInfo,
itermax, stepmax, tol, gradtol, hessianType, qac) {
startVal <- if (!is.null(start))
start else cstuninorm(olsObj = olsParam, epsiRes = dataTable[["olsResiduals"]],
S = S, uHvar = uHvar, nuZUvar = nuZUvar, vHvar = vHvar,
nvZVvar = nvZVvar)
startLoglik <- sum(cuninormlike(startVal, nXvar = nXvar,
nuZUvar = nuZUvar, nvZVvar = nvZVvar, uHvar = uHvar, vHvar = vHvar,
Yvar = Yvar, Xvar = Xvar, S = S))
if (method %in% c("bfgs", "bhhh", "nr", "nm")) {
maxRoutine <- switch(method, bfgs = function(...) maxBFGS(...),
bhhh = function(...) maxBHHH(...), nr = function(...) maxNR(...),
nm = function(...) maxNM(...))
method <- "maxLikAlgo"
}
mleObj <- switch(method, ucminf = ucminf(par = startVal,
fn = function(parm) -sum(cuninormlike(parm, nXvar = nXvar,
nuZUvar = nuZUvar, nvZVvar = nvZVvar, uHvar = uHvar,
vHvar = vHvar, Yvar = Yvar, Xvar = Xvar, S = S)),
gr = function(parm) -colSums(cgraduninormlike(parm, nXvar = nXvar,
nuZUvar = nuZUvar, nvZVvar = nvZVvar, uHvar = uHvar,
vHvar = vHvar, Yvar = Yvar, Xvar = Xvar, S = S)),
hessian = 0, control = list(trace = if (printInfo) 1 else 0, maxeval = itermax,
stepmax = stepmax, xtol = tol, grtol = gradtol)),
maxLikAlgo = maxRoutine(fn = cuninormlike, grad = cgraduninormlike,
hess = chessuninormlike, start = startVal, finalHessian = if (hessianType ==
2) "bhhh" else TRUE, control = list(printLevel = if (printInfo) 2 else 0,
iterlim = itermax, reltol = tol, tol = tol, qac = qac),
nXvar = nXvar, nuZUvar = nuZUvar, nvZVvar = nvZVvar,
uHvar = uHvar, vHvar = vHvar, Yvar = Yvar, Xvar = Xvar,
S = S), sr1 = trust.optim(x = startVal, fn = function(parm) -sum(cuninormlike(parm,
nXvar = nXvar, nuZUvar = nuZUvar, nvZVvar = nvZVvar,
uHvar = uHvar, vHvar = vHvar, Yvar = Yvar, Xvar = Xvar,
S = S)), gr = function(parm) -colSums(cgraduninormlike(parm,
nXvar = nXvar, nuZUvar = nuZUvar, nvZVvar = nvZVvar,
uHvar = uHvar, vHvar = vHvar, Yvar = Yvar, Xvar = Xvar,
S = S)), method = "SR1", control = list(maxit = itermax,
cgtol = gradtol, stop.trust.radius = tol, prec = tol,
report.level = if (printInfo) 2 else 0, report.precision = 1L)),
sparse = trust.optim(x = startVal, fn = function(parm) -sum(cuninormlike(parm,
nXvar = nXvar, nuZUvar = nuZUvar, nvZVvar = nvZVvar,
uHvar = uHvar, vHvar = vHvar, Yvar = Yvar, Xvar = Xvar,
S = S)), gr = function(parm) -colSums(cgraduninormlike(parm,
nXvar = nXvar, nuZUvar = nuZUvar, nvZVvar = nvZVvar,
uHvar = uHvar, vHvar = vHvar, Yvar = Yvar, Xvar = Xvar,
S = S)), hs = function(parm) as(-chessuninormlike(parm,
nXvar = nXvar, nuZUvar = nuZUvar, nvZVvar = nvZVvar,
uHvar = uHvar, vHvar = vHvar, Yvar = Yvar, Xvar = Xvar,
S = S), "dgCMatrix"), method = "Sparse", control = list(maxit = itermax,
cgtol = gradtol, stop.trust.radius = tol, prec = tol,
report.level = if (printInfo) 2 else 0, report.precision = 1L,
preconditioner = 1L)), mla = mla(b = startVal, fn = function(parm) -sum(cuninormlike(parm,
nXvar = nXvar, nuZUvar = nuZUvar, nvZVvar = nvZVvar,
uHvar = uHvar, vHvar = vHvar, Yvar = Yvar, Xvar = Xvar,
S = S)), gr = function(parm) -colSums(cgraduninormlike(parm,
nXvar = nXvar, nuZUvar = nuZUvar, nvZVvar = nvZVvar,
uHvar = uHvar, vHvar = vHvar, Yvar = Yvar, Xvar = Xvar,
S = S)), hess = function(parm) -chessuninormlike(parm,
nXvar = nXvar, nuZUvar = nuZUvar, nvZVvar = nvZVvar,
uHvar = uHvar, vHvar = vHvar, Yvar = Yvar, Xvar = Xvar,
S = S), print.info = printInfo, maxiter = itermax,
epsa = gradtol, epsb = gradtol), nlminb = nlminb(start = startVal,
objective = function(parm) -sum(cuninormlike(parm,
nXvar = nXvar, nuZUvar = nuZUvar, nvZVvar = nvZVvar,
uHvar = uHvar, vHvar = vHvar, Yvar = Yvar, Xvar = Xvar,
S = S)), gradient = function(parm) -colSums(cgraduninormlike(parm,
nXvar = nXvar, nuZUvar = nuZUvar, nvZVvar = nvZVvar,
uHvar = uHvar, vHvar = vHvar, Yvar = Yvar, Xvar = Xvar,
S = S)), hessian = function(parm) -chessuninormlike(parm,
nXvar = nXvar, nuZUvar = nuZUvar, nvZVvar = nvZVvar,
uHvar = uHvar, vHvar = vHvar, Yvar = Yvar, Xvar = Xvar,
S = S), control = list(iter.max = itermax, trace = if (printInfo) 1 else 0,
eval.max = itermax, rel.tol = tol, x.tol = tol)))
if (method %in% c("ucminf", "nlminb")) {
mleObj$gradient <- colSums(cgraduninormlike(mleObj$par,
nXvar = nXvar, nuZUvar = nuZUvar, nvZVvar = nvZVvar,
uHvar = uHvar, vHvar = vHvar, Yvar = Yvar, Xvar = Xvar,
S = S))
}
mlParam <- if (method %in% c("ucminf", "nlminb")) {
mleObj$par
} else {
if (method == "maxLikAlgo") {
mleObj$estimate
} else {
if (method %in% c("sr1", "sparse")) {
names(mleObj$solution) <- names(startVal)
mleObj$solution
} else {
if (method == "mla") {
mleObj$b
}
}
}
}
if (hessianType != 2) {
if (method %in% c("ucminf", "nlminb"))
mleObj$hessian <- chessuninormlike(parm = mleObj$par,
nXvar = nXvar, nuZUvar = nuZUvar, nvZVvar = nvZVvar,
uHvar = uHvar, vHvar = vHvar, Yvar = Yvar, Xvar = Xvar,
S = S)
if (method == "sr1")
mleObj$hessian <- chessuninormlike(parm = mleObj$solution,
nXvar = nXvar, nuZUvar = nuZUvar, nvZVvar = nvZVvar,
uHvar = uHvar, vHvar = vHvar, Yvar = Yvar, Xvar = Xvar,
S = S)
}
mleObj$logL_OBS <- cuninormlike(parm = mlParam, nXvar = nXvar,
nuZUvar = nuZUvar, nvZVvar = nvZVvar, uHvar = uHvar, vHvar = vHvar,
Yvar = Yvar, Xvar = Xvar, S = S)
mleObj$gradL_OBS <- cgraduninormlike(parm = mlParam, nXvar = nXvar,
nuZUvar = nuZUvar, nvZVvar = nvZVvar, uHvar = uHvar, vHvar = vHvar,
Yvar = Yvar, Xvar = Xvar, S = S)
return(list(startVal = startVal, startLoglik = startLoglik,
mleObj = mleObj, mlParam = mlParam))
}
cuninormeff <- function(object, level) {
beta <- object$mlParam[1:(object$nXvar)]
delta <- object$mlParam[(object$nXvar + 1):(object$nXvar +
object$nuZUvar)]
phi <- object$mlParam[(object$nXvar + object$nuZUvar + 1):(object$nXvar +
object$nuZUvar + object$nvZVvar)]
Xvar <- model.matrix(object$formula, data = object$dataTable,
rhs = 1)
uHvar <- model.matrix(object$formula, data = object$dataTable,
rhs = 2)
vHvar <- model.matrix(object$formula, data = object$dataTable,
rhs = 3)
Wu <- as.numeric(crossprod(matrix(delta), t(uHvar)))
Wv <- as.numeric(crossprod(matrix(phi), t(vHvar)))
epsilon <- model.response(model.frame(object$formula, data = object$dataTable)) -
as.numeric(crossprod(matrix(beta), t(Xvar)))
theta <- exp(Wu)
u1 <- -exp(Wv/2) * ((dnorm((theta + object$S * epsilon)/exp(Wv/2)) -
dnorm(object$S * epsilon/exp(Wv/2)))/(pnorm((theta +
object$S * epsilon)/exp(Wv/2)) - pnorm(object$S * epsilon/exp(Wv/2)))) -
object$S * epsilon
u2 <- exp(Wv/2) * (dnorm(object$S * epsilon/exp(Wv/2))/(1 -
pnorm(object$S * epsilon/exp(Wv/2))) - object$S * epsilon/exp(Wv/2))
uLB <- exp(Wv/2) * qnorm((1 - level)/2 * pnorm((theta + object$S *
epsilon)/exp(Wv/2)) + (1 - (1 - level)/2) * pnorm(object$S *
epsilon/exp(Wv/2))) - object$S * epsilon
uUB <- exp(Wv/2) * qnorm((1 - (1 - level)/2) * pnorm((theta +
object$S * epsilon)/exp(Wv/2)) + (1 - level)/2 * pnorm(object$S *
epsilon/exp(Wv/2))) - object$S * epsilon
m <- ifelse(-theta < object$S * epsilon & object$S * epsilon <
0, -object$S * epsilon, ifelse(object$S * epsilon >=
0, 0, theta))
if (object$logDepVar == TRUE) {
teJLMS1 <- exp(-u1)
teJLMS2 <- exp(-u2)
teMO <- exp(-m)
teBC1 <- exp(object$S * epsilon + exp(Wv)/2) * (pnorm((object$S *
epsilon + theta)/exp(Wv/2) + exp(Wv/2)) - pnorm(object$S *
epsilon/exp(Wv/2) + exp(Wv/2)))/(pnorm((theta + object$S *
epsilon)/exp(Wv/2)) - pnorm(object$S * epsilon/exp(Wv/2)))
teBC2 <- exp(object$S * epsilon + exp(Wv)/2) * (1 - pnorm(object$S *
epsilon/exp(Wv/2) + exp(Wv/2)))/(1 - pnorm(object$S *
epsilon/exp(Wv/2)))
teBCLB <- exp(-uUB)
teBCUB <- exp(-uLB)
res <- bind_cols(u1 = u1, u2 = u2, uLB = uLB, uUB = uUB,
teJLMS1 = teJLMS1, teJLMS2 = teJLMS2, m = m, teMO = teMO,
teBC1 = teBC1, teBC2 = teBC2, teBCLB = teBCLB, teBCUB = teBCUB)
} else {
res <- bind_cols(u1 = u1, u2 = u2, uLB = uLB, uUB = uUB,
m = m)
}
return(res)
}
cmarguninorm_Eu <- function(object) {
delta <- object$mlParam[(object$nXvar + 1):(object$nXvar +
object$nuZUvar)]
uHvar <- model.matrix(object$formula, data = object$dataTable,
rhs = 2)
Wu <- as.numeric(crossprod(matrix(delta), t(uHvar)))
margEff <- kronecker(matrix(delta[2:object$nuZUvar], nrow = 1),
matrix(sqrt(3)/2 * exp(Wu/2), ncol = 1))
colnames(margEff) <- paste0("Eu_", colnames(uHvar)[-1])
return(margEff)
}
cmarguninorm_Vu <- function(object) {
delta <- object$mlParam[(object$nXvar + 1):(object$nXvar +
object$nuZUvar)]
uHvar <- model.matrix(object$formula, data = object$dataTable,
rhs = 2)
Wu <- as.numeric(crossprod(matrix(delta), t(uHvar)))
margEff <- kronecker(matrix(delta[2:object$nuZUvar], nrow = 1),
matrix(exp(Wu), ncol = 1))
colnames(margEff) <- paste0("Vu_", colnames(uHvar)[-1])
return(margEff)
} |
.useNThreads = function(nThreads = 0)
{
if (nThreads==0)
{
nt.env = Sys.getenv(.threadAllowVar, unset = NA);
if (is.na(nt.env)) return(1);
if (nt.env=="") return(1);
if (nt.env=="ALL_PROCESSORS") return (.nProcessorsOnline());
nt = suppressWarnings(as.numeric(nt.env));
if (!is.finite(nt)) return(2);
return(nt);
} else
return (nThreads);
}
.nProcessorsOnline = function()
{
n = detectCores();
if (!is.numeric(n)) n = 2;
if (!is.finite(n)) n = 2;
if (n<1) n = 2;
n;
}
allowWGCNAThreads = function(nThreads = NULL)
{
disableWGCNAThreads()
if (is.null(nThreads)) nThreads = .nProcessorsOnline();
if (!is.numeric(nThreads) || nThreads < 2)
stop("nThreads must be numeric and at least 2.");
if (nThreads > .nProcessorsOnline())
printFlush(paste("Warning in allowWGCNAThreads: Requested number of threads is higher than number\n",
"of available processors (or cores). Using too many threads may degrade code",
"performance. It is recommended that the number of threads is no more than number\n",
"of available processors.\n"))
printFlush(paste("Allowing multi-threading with up to", nThreads, "threads."));
pars = list(nThreads);
names(pars) = .threadAllowVar;
do.call(Sys.setenv, pars);
invisible(nThreads);
}
disableWGCNAThreads = function()
{
Sys.unsetenv(.threadAllowVar);
pars = list(1)
names(pars) = .threadAllowVar
do.call(Sys.setenv, pars)
if (exists(".revoDoParCluster", where = ".GlobalEnv"))
{
stopCluster(get(".revoDoParCluster", pos = ".GlobalEnv"));
}
registerDoSEQ();
}
.checkAvailableMemory = function()
{
size = 0;
res = .C("checkAvailableMemoryForR", size = as.double(size), PACKAGE = "WGCNA")
res$size;
}
blockSize = function(matrixSize, rectangularBlocks = TRUE, maxMemoryAllocation = NULL, overheadFactor = 3)
{
if (is.null(maxMemoryAllocation))
{
maxAlloc = .checkAvailableMemory();
} else {
maxAlloc = maxMemoryAllocation/8;
}
maxAlloc = maxAlloc/overheadFactor;
if (rectangularBlocks)
{
blockSz = floor(maxAlloc/matrixSize);
} else
blockSz = floor(sqrt(maxAlloc));
return( min (matrixSize, blockSz) )
}
enableWGCNAThreads = function(nThreads = NULL)
{
nCores = detectCores();
if (is.null(nThreads))
{
if (nCores < 4) nThreads = nCores else nThreads = nCores - 1;
}
if (!is.numeric(nThreads) || nThreads < 2)
stop("nThreads must be numeric and at least 2.")
if (nThreads > nCores)
printFlush(paste("Warning in allowWGCNAThreads: Requested number of threads is higher than number\n",
"of available processors (or cores). Using too many threads may degrade code",
"performance. It is recommended that the number of threads is no more than number\n",
"of available processors.\n"))
printFlush(paste("Allowing parallel execution with up to", nThreads, "working processes."))
pars = list(nThreads)
names(pars) = .threadAllowVar
do.call(Sys.setenv, pars)
registerDoParallel(nThreads);
invisible(nThreads)
}
WGCNAnThreads = function()
{
n = suppressWarnings(as.numeric(as.character(Sys.getenv(.threadAllowVar, unset = 1))));
if (is.na(n)) n = 1;
if (length(n)==0) n = 1;
n;
}
allocateJobs = function(nTasks, nWorkers)
{
if (is.na(nWorkers))
{
warning("In function allocateJobs: 'nWorkers' is NA. Will use 1 worker.");
nWorkers = 1;
}
n1 = floor(nTasks/nWorkers);
n2 = nTasks - nWorkers*n1;
allocation = list();
start = 1;
for (t in 1:nWorkers)
{
end = start + n1 - 1 + as.numeric(t<=n2);
if (start > end)
{
allocation[[t]] = numeric(0);
} else allocation[[t]] = c(start:end);
start = end+1;
}
allocation;
}
|
context("class level lsm_contig_sd metric")
landscapemetrics_class_landscape_value <- lsm_c_contig_sd(landscape)
test_that("lsm_c_contig_sd is typestable", {
expect_is(lsm_c_contig_sd(landscape), "tbl_df")
expect_is(lsm_c_contig_sd(landscape_stack), "tbl_df")
expect_is(lsm_c_contig_sd(landscape_brick), "tbl_df")
expect_is(lsm_c_contig_sd(landscape_list), "tbl_df")
})
test_that("lsm_c_contig_sd returns the desired number of columns", {
expect_equal(ncol(landscapemetrics_class_landscape_value), 6)
})
test_that("lsm_c_contig_sd returns in every column the correct type", {
expect_type(landscapemetrics_class_landscape_value$layer, "integer")
expect_type(landscapemetrics_class_landscape_value$level, "character")
expect_type(landscapemetrics_class_landscape_value$class, "integer")
expect_type(landscapemetrics_class_landscape_value$id, "integer")
expect_type(landscapemetrics_class_landscape_value$metric, "character")
expect_type(landscapemetrics_class_landscape_value$value, "double")
}) |
srstepwise=function(x, times, delta, sle=0.15, sls=0.15, dist='lognormal') {
x=cbind(x)
P=ncol(x)
in.=c()
out.=1:P
step=TRUE
H=0
L=P
y=Surv(time=times, event=delta)
while(step) {
fits=as.list(1:L)
A=matrix(nrow=L, ncol=H+1)
for(i in 1:L) {
if(H==0)
fits[[i]]=survreg(y~x[ , out.[i]], dist=dist)
else
fits[[i]]=survreg(y~x[ , out.[i]]+x[ , in.], dist=dist)
A[i, ]=pnorm(-abs(coef(fits[[i]])[2:(H+2)]/
sqrt(diag(vcov(fits[[i]]))[2:(H+2)])))
}
j=which(order(A[ , 1])==1)[1]
if(A[j, 1]<sle) {
in.=c(out.[j], in.)
H=H+1
out.=out.[-j]
L=L-1
if(L==0) step=FALSE
}
else if(H==0) step=FALSE
else {
fits[[1]]=survreg(y~x[ , in.], dist=dist)
A=A[ , -(H+1)]
A[1, ]=pnorm(-abs(coef(fits[[i]])[2:(H+1)]/
sqrt(diag(vcov(fits[[i]]))[2:(H+1)])))
j=1
step=FALSE
}
if(H>0) {
k=which(order(-A[j, ])==1)[1]
if(A[j, k]>=sls) {
out.=c(out., in.[k])
L=L+1
in.=in.[-k]
H=H-1
if(step && k==1) step=FALSE
else step=TRUE
}
}
}
return(in.)
} |
GWN <- function(n, sigma) {
epsilon <- rnorm(n = n, mean = 0, sd = sigma)
return(epsilon)
} |
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL |
run_swmm <- function(inp,
rpt = NULL,
out = NULL,
exec = NULL,
stdout = "",
wait = TRUE) {
if (is.null(exec)) exec <- getOption("swmmr.exec")
stopifnot(file.exists(inp), file.exists(exec))
dirn <- base::dirname(inp)
filename <- sub("^([^.]*).*", "\\1", base::basename(inp))
if (is.null(rpt)) {
rpt <- file.path(dirn, paste(filename, "rpt", sep = "."))
}
if (is.null(out)) {
out <- file.path(dirn, paste(filename, "out", sep = "."))
}
base::system2(command = exec,
args = c(inp, rpt, out),
stdout = stdout,
stderr = stdout,
wait = wait,
minimized = FALSE,
invisible = TRUE)
utils::flush.console()
invisible(list(inp = normalizePath(inp),
rpt = normalizePath(rpt),
out = normalizePath(out)))
} |
fixpt.skewAR <- function(thetav,y,x,z,time,ind,distr,pAR,lb,lu,parallelphi,parallelnu,
diagD,skewind){
p<-ncol(x);q1<-ncol(z);q2 <- q1*(q1+1)/2;N<-length(y)
m <- n_distinct(ind)
beta1<-matrix(thetav[1:p],ncol=1)
sigmae<-as.numeric(thetav[p+1])
Gammab <- Dmatrix(thetav[(p+2):(p+1+q2)])
Deltab<-thetav[(p+2+q2):(p+1+q2+q1)]
piAR<-thetav[(p+2+q2+q1):(p+1+q2+q1+pAR)]
if (distr=="sn") {
nu <- NULL
} else nu<-thetav[-(1:(p+1+q2+q1+pAR))]
res_emj = revert_list(tapply(1:N,ind,emjAR,y=y, x=x, z=z,time=time, beta1=beta1, Gammab=Gammab,
Deltab=Deltab, sigmae=sigmae,piAR=piAR, distr=distr,nu=nu))
sum1 = Reduce("+",res_emj$sum1)
sum2 = Reduce("+",res_emj$sum2)
sum3 = sum(unlist(res_emj$sum3))
sum4 = Reduce("+",res_emj$sum4)
sum5 = Reduce("+",res_emj$sum5)
ut2j = unlist(res_emj$ut2j,use.names = F)
beta1<-solve(sum1)%*%sum2
sigmae<-as.numeric(sum3)/N
Gammab<-sum4/m
Deltab<-sum5/sum(ut2j)
D1<-Gammab+Deltab%*%t(Deltab);sD1 <- solve(D1)
if ((t(Deltab)%*%sD1%*%Deltab)>=1) Deltab<-Deltab/as.numeric(sqrt(t(Deltab)%*%sD1%*%Deltab+1e-4))
lambda<-matrix.sqrt(sD1)%*%Deltab/as.numeric(sqrt(1-t(Deltab)%*%sD1%*%Deltab))
if (diagD||(sum(skewind)<q1)) {
lambda<-lambda*skewind
if (diagD) D1 <-diag(diag(D1))
delta<-lambda/as.numeric(sqrt(1+t(lambda)%*%lambda))
Deltab<-matrix.sqrt(D1)%*%delta
Gammab<-D1-Deltab%*%t(Deltab)
}
if (parallelphi) {
piAR<- optimParallel(piAR,lcAR,gr = NULL,method = "L-BFGS-B", lower =rep(-.9999,pAR),
upper = rep(.9999,pAR),control = list(fnscale=-1),beta1=beta1,sigmae=sigmae,
y=y,x=x,z=z,time=time,ind=ind,u=res_emj$uj,ub=res_emj$ubj,ub2=res_emj$ub2j)$par
} else {
piAR<- optim(piAR,lcAR,gr = NULL,method = "L-BFGS-B", lower =rep(-.9999,pAR),
upper = rep(.9999,pAR),control = list(fnscale=-1),beta1=beta1,sigmae=sigmae,
y=y,x=x,z=z,time=time,ind=ind,u=res_emj$uj,ub=res_emj$ubj,ub2=res_emj$ub2j)$par
}
logvero1<-function(nu){logveroAR(y, x, z,time, ind, beta1, sigmae,estphit(piAR), D1, lambda, distr, nu)}
if (distr=="sn"){ nu<-NULL} else {
if (parallelnu) {
nu <- optimParallel(nu,(logvero1),gr = NULL,method = "L-BFGS-B", lower =lb, upper = lu,
control = list(fnscale=-1))$par
} else {
nu <- optim(nu,(logvero1),gr = NULL,method = "L-BFGS-B", lower =lb, upper = lu,
control = list(fnscale=-1))$par
}
}
c(beta1,sigmae,Gammab[upper.tri(Gammab, diag = T)],Deltab,piAR,nu)
}
fixpt.skewUNC <- function(thetav,y,x,z,ind,distr,lb,lu,parallelnu,diagD,skewind){
p<-ncol(x);q1<-ncol(z);q2 <- q1*(q1+1)/2;N<-length(y)
m <- n_distinct(ind)
beta1<-matrix(thetav[1:p],ncol=1)
sigmae<-as.numeric(thetav[p+1])
Gammab <- Dmatrix(thetav[(p+2):(p+1+q2)])
Deltab<-thetav[(p+2+q2):(p+1+q2+q1)]
if (distr=="sn") {
nu <- NULL
} else nu<-thetav[-(1:(p+1+q2+q1))]
res_emj = revert_list(tapply(1:N,ind,emj,y=y, x=x, z=z, beta1=beta1, Gammab=Gammab,
Deltab=Deltab, sigmae=sigmae, distr=distr,nu=nu))
sum1 = Reduce("+",res_emj$sum1)
sum2 = Reduce("+",res_emj$sum2)
sum3 = sum(unlist(res_emj$sum3))
sum4 = Reduce("+",res_emj$sum4)
sum5 = Reduce("+",res_emj$sum5)
ut2j = unlist(res_emj$ut2j,use.names = F)
uj = unlist(res_emj$uj,use.names = F)
beta1<-solve(sum1)%*%sum2
sigmae<-as.numeric(sum3)/N
Gammab<-sum4/m
Deltab<-sum5/sum(ut2j)
D1<-Gammab+Deltab%*%t(Deltab)
sD1 <- solve(D1)
if ((t(Deltab)%*%sD1%*%Deltab)>=1) Deltab<-Deltab/as.numeric(sqrt(t(Deltab)%*%sD1%*%Deltab+1e-4))
lambda<-matrix.sqrt(sD1)%*%Deltab/as.numeric(sqrt(1-t(Deltab)%*%sD1%*%Deltab))
if (diagD||(sum(skewind)<q1)) {
lambda<-lambda*skewind
if (diagD) D1 <-diag(diag(D1))
delta<-lambda/as.numeric(sqrt(1+t(lambda)%*%lambda))
Deltab<-matrix.sqrt(D1)%*%delta
Gammab<-D1-Deltab%*%t(Deltab)
}
logvero1<-function(nu){logvero(y, x, z, ind, beta1, sigmae, D1, lambda, distr, nu)}
if (distr=="sn"){ nu<-NULL} else {
if (parallelnu) {
nu <- optimParallel(nu,(logvero1),gr = NULL,method = "L-BFGS-B", lower =lb,
upper = lu,control = list(fnscale=-1))$par
} else {
nu <- optim(nu,(logvero1),gr = NULL,method = "L-BFGS-B", lower =lb, upper = lu,
control = list(fnscale=-1))$par
}
}
c(beta1,sigmae,Gammab[upper.tri(Gammab, diag = T)],Deltab,nu)
}
fixpt.skewCS <- function(thetav,y,x,z,ind,distr,lb,lu,parallelphi,parallelnu,diagD,skewind){
p<-ncol(x);q1<-ncol(z);q2 <- q1*(q1+1)/2;N<-length(y)
m <- n_distinct(ind)
beta1<-matrix(thetav[1:p],ncol=1)
sigmae<-as.numeric(thetav[p+1])
Gammab <- Dmatrix(thetav[(p+2):(p+1+q2)])
Deltab<-thetav[(p+2+q2):(p+1+q2+q1)]
phiCS<-thetav[(p+2+q2+q1)]
if (distr=="sn") {
nu <- NULL
} else nu<-thetav[-(1:(p+2+q2+q1))]
res_emj = revert_list(tapply(1:N,ind,emjCS,y=y, x=x, z=z, beta1=beta1, Gammab=Gammab,
Deltab=Deltab, sigmae=sigmae,phiCS=phiCS, distr=distr,nu=nu))
sum1 = Reduce("+",res_emj$sum1)
sum2 = Reduce("+",res_emj$sum2)
sum3 = sum(unlist(res_emj$sum3))
sum4 = Reduce("+",res_emj$sum4)
sum5 = Reduce("+",res_emj$sum5)
ut2j = unlist(res_emj$ut2j,use.names = F)
beta1<-solve(sum1)%*%sum2
sigmae<-as.numeric(sum3)/N
Gammab<-sum4/m
Deltab<-sum5/sum(ut2j)
D1<-Gammab+Deltab%*%t(Deltab);sD1 <- solve(D1)
if ((t(Deltab)%*%sD1%*%Deltab)>=1) Deltab<-Deltab/as.numeric(sqrt(t(Deltab)%*%sD1%*%Deltab+1e-4))
lambda<-matrix.sqrt(sD1)%*%Deltab/as.numeric(sqrt(1-t(Deltab)%*%sD1%*%Deltab))
if (diagD||(sum(skewind)<q1)) {
lambda<-lambda*skewind
if (diagD) D1 <-diag(diag(D1))
delta<-lambda/as.numeric(sqrt(1+t(lambda)%*%lambda))
Deltab<-matrix.sqrt(D1)%*%delta
Gammab<-D1-Deltab%*%t(Deltab)
}
if (parallelphi) {
phiCS <- optimParallel(phiCS,lcCS,gr = NULL,method = "L-BFGS-B", lower =0,
upper = .9999,control = list(fnscale=-1),beta1=beta1,sigmae=sigmae,
y=y,x=x,z=z,ind=ind,u=res_emj$uj,ub=res_emj$ubj,ub2=res_emj$ub2j)$par
} else{
phiCS <- optim(phiCS,lcCS,gr = NULL,method = "L-BFGS-B", lower =0,
upper = .9999,control = list(fnscale=-1),beta1=beta1,sigmae=sigmae,
y=y,x=x,z=z,ind=ind,u=res_emj$uj,ub=res_emj$ubj,ub2=res_emj$ub2j)$par
}
logvero1<-function(nu){logveroCS(y, x, z, ind, beta1, sigmae,phiCS, D1, lambda, distr, nu)}
if (distr=="sn"){ nu<-NULL} else {
if (parallelnu) {
nu <- optimParallel(nu,(logvero1),gr = NULL,method = "L-BFGS-B", lower =lb,
upper = lu,control = list(fnscale=-1))$par
} else{
nu <- optim(nu,(logvero1),gr = NULL,method = "L-BFGS-B", lower =lb, upper = lu,
control = list(fnscale=-1))$par
}
}
c(beta1,sigmae,Gammab[upper.tri(Gammab, diag = T)],Deltab,phiCS,nu)
}
fixpt.skewDEC <- function(thetav,y,x,z,time,ind,distr,lb,lu,luDEC,parallelphi,
parallelnu,diagD,skewind){
p<-ncol(x);q1<-ncol(z);q2 <- q1*(q1+1)/2;N<-length(y)
m <- n_distinct(ind)
beta1<-matrix(thetav[1:p],ncol=1)
sigmae<-as.numeric(thetav[p+1])
Gammab <- Dmatrix(thetav[(p+2):(p+1+q2)])
Deltab<-thetav[(p+2+q2):(p+1+q2+q1)]
phiDEC<-thetav[(p+2+q2+q1)]
thetaDEC<-thetav[(p+3+q2+q1)]
if (distr=="sn") {
nu <- NULL
} else nu<-thetav[-(1:(p+3+q2+q1))]
res_emj = revert_list(tapply(1:N,ind,emjDEC,y=y, x=x, z=z,time=time, beta1=beta1, Gammab=Gammab,
Deltab=Deltab, sigmae=sigmae,phiDEC=phiDEC,thetaDEC=thetaDEC, distr=distr,nu=nu))
sum1 = Reduce("+",res_emj$sum1)
sum2 = Reduce("+",res_emj$sum2)
sum3 = sum(unlist(res_emj$sum3))
sum4 = Reduce("+",res_emj$sum4)
sum5 = Reduce("+",res_emj$sum5)
ut2j = unlist(res_emj$ut2j,use.names = F)
beta1<-solve(sum1)%*%sum2
sigmae<-as.numeric(sum3)/N
Gammab<-sum4/m
Deltab<-sum5/sum(ut2j)
D1<-Gammab+Deltab%*%t(Deltab);sD1 <- solve(D1)
if ((t(Deltab)%*%sD1%*%Deltab)>=1) Deltab<-Deltab/as.numeric(sqrt(t(Deltab)%*%sD1%*%Deltab+1e-4))
lambda<-matrix.sqrt(sD1)%*%Deltab/as.numeric(sqrt(1-t(Deltab)%*%sD1%*%Deltab))
if (diagD||(sum(skewind)<q1)) {
lambda<-lambda*skewind
if (diagD) D1 <-diag(diag(D1))
delta<-lambda/as.numeric(sqrt(1+t(lambda)%*%lambda))
Deltab<-matrix.sqrt(D1)%*%delta
Gammab<-D1-Deltab%*%t(Deltab)
}
if (parallelphi) {
parDEC<- optimParallel(c(phiDEC,thetaDEC),lcDEC,gr = NULL,method = "L-BFGS-B", lower =rep(0.0001,2),
upper = c(.9999,luDEC),control = list(fnscale=-1),beta1=beta1,sigmae=sigmae,
y=y,x=x,z=z,time=time,ind=ind,u=res_emj$uj,ub=res_emj$ubj,ub2=res_emj$ub2j)$par
} else{
parDEC<- optim(c(phiDEC,thetaDEC),lcDEC,gr = NULL,method = "L-BFGS-B", lower =rep(0.0001,2),
upper = c(.9999,luDEC),control = list(fnscale=-1),beta1=beta1,sigmae=sigmae,
y=y,x=x,z=z,time=time,ind=ind,u=res_emj$uj,ub=res_emj$ubj,ub2=res_emj$ub2j)$par
}
phiDEC<-parDEC[1]
thetaDEC<-parDEC[2]
logvero1<-function(nu){logveroDEC(y, x, z,time, ind, beta1, sigmae,phiDEC,thetaDEC, D1, lambda, distr, nu)}
if (distr=="sn"){ nu<-NULL} else {
if (parallelnu) {
nu <- optimParallel(nu,(logvero1),gr = NULL,method = "L-BFGS-B", lower =lb, upper = lu,control = list(fnscale=-1))$par
} else{
nu <- optim(nu,(logvero1),gr = NULL,method = "L-BFGS-B", lower =lb, upper = lu,control = list(fnscale=-1))$par
}
}
c(beta1,sigmae,Gammab[upper.tri(Gammab, diag = T)],Deltab,phiDEC,thetaDEC,nu)
}
fixpt.skewCAR1 <- function(thetav,y,x,z,time,ind,distr,lb,lu,parallelphi,parallelnu,
diagD,skewind){
p<-ncol(x);q1<-ncol(z);q2 <- q1*(q1+1)/2;N<-length(y)
m <- n_distinct(ind)
beta1<-matrix(thetav[1:p],ncol=1)
sigmae<-as.numeric(thetav[p+1])
Gammab <- Dmatrix(thetav[(p+2):(p+1+q2)])
Deltab<-thetav[(p+2+q2):(p+1+q2+q1)]
phiDEC<-thetav[(p+2+q2+q1)]
if (distr=="sn") {
nu <- NULL
} else nu<-thetav[-(1:(p+2+q2+q1))]
res_emj = revert_list(tapply(1:N,ind,emjCAR1,y=y, x=x, z=z,time=time, beta1=beta1, Gammab=Gammab,
Deltab=Deltab, sigmae=sigmae,phiDEC=phiDEC,distr=distr,nu=nu))
sum1 = Reduce("+",res_emj$sum1)
sum2 = Reduce("+",res_emj$sum2)
sum3 = sum(unlist(res_emj$sum3))
sum4 = Reduce("+",res_emj$sum4)
sum5 = Reduce("+",res_emj$sum5)
ut2j = unlist(res_emj$ut2j,use.names = F)
beta1<-solve(sum1)%*%sum2
sigmae<-as.numeric(sum3)/N
Gammab<-sum4/m
Deltab<-sum5/sum(ut2j)
D1<-Gammab+Deltab%*%t(Deltab);sD1 <- solve(D1)
if ((t(Deltab)%*%sD1%*%Deltab)>=1) Deltab<-Deltab/as.numeric(sqrt(t(Deltab)%*%sD1%*%Deltab+1e-4))
lambda<-matrix.sqrt(sD1)%*%Deltab/as.numeric(sqrt(1-t(Deltab)%*%sD1%*%Deltab))
if (diagD||(sum(skewind)<q1)) {
lambda<-lambda*skewind
if (diagD) D1 <-diag(diag(D1))
delta<-lambda/as.numeric(sqrt(1+t(lambda)%*%lambda))
Deltab<-matrix.sqrt(D1)%*%delta
Gammab<-D1-Deltab%*%t(Deltab)
}
if (parallelphi){
phiDEC<- optimParallel(phiDEC,lcCAR1,gr = NULL,method = "L-BFGS-B", lower =0.0001,
upper = .9999,control = list(fnscale=-1),beta1=beta1,sigmae=sigmae,
y=y,x=x,z=z,time=time,ind=ind,u=res_emj$uj,ub=res_emj$ubj,ub2=res_emj$ub2j)$par
} else{
phiDEC<- optim(phiDEC,lcCAR1,gr = NULL,method = "L-BFGS-B", lower =0.0001,
upper = .9999,control = list(fnscale=-1),beta1=beta1,sigmae=sigmae,
y=y,x=x,z=z,time=time,ind=ind,u=res_emj$uj,ub=res_emj$ubj,ub2=res_emj$ub2j)$par
}
logvero1<-function(nu){logveroCAR1(y, x, z,time, ind, beta1, sigmae,phiDEC, D1, lambda, distr, nu)}
if (distr=="sn"){ nu<-NULL} else {
if (parallelnu) {
nu <- optimParallel(nu,(logvero1),gr = NULL,method = "L-BFGS-B", lower =lb,
upper = lu,control = list(fnscale=-1))$par
} else{
nu <- optim(nu,(logvero1),gr = NULL,method = "L-BFGS-B", lower =lb, upper = lu,
control = list(fnscale=-1))$par
}
}
c(beta1,sigmae,Gammab[upper.tri(Gammab, diag = T)],Deltab,phiDEC,nu)
}
fixpt.AR <- function(thetav,y,x,z,time,ind,distr,pAR,lb,lu,parallelphi,parallelnu,diagD){
p<-ncol(x);q1<-ncol(z);q2 <- q1*(q1+1)/2;N<-length(y)
m <- n_distinct(ind)
beta1<-matrix(thetav[1:p],ncol=1)
sigmae<-as.numeric(thetav[p+1])
D1 <- Dmatrix(thetav[(p+2):(p+1+q2)])
piAR<-thetav[(p+2+q2):(p+1+q2+pAR)]
if (distr=="sn") {
nu <- NULL
} else nu<-thetav[-(1:(p+1+q2+pAR))]
res_emj = revert_list(tapply(1:N,ind,emjARs,y=y, x=x, z=z,time=time, beta1=beta1, D1=D1,
sigmae=sigmae,piAR=piAR, distr=distr,nu=nu))
sum1 = Reduce("+",res_emj$sum1)
sum2 = Reduce("+",res_emj$sum2)
sum3 = sum(unlist(res_emj$sum3))
sum4 = Reduce("+",res_emj$sum4)
beta1<-solve(sum1)%*%sum2
sigmae<-as.numeric(sum3)/N
D1<-sum4/m
if (diagD) D1 <-diag(diag(D1))
if (parallelphi){
piAR<- optimParallel(piAR,lcAR,gr = NULL,method = "L-BFGS-B", lower =rep(-.9999,pAR),
upper = rep(.9999,pAR),control = list(fnscale=-1),beta1=beta1,sigmae=sigmae,
y=y,x=x,z=z,time=time,ind=ind,u=res_emj$uj,ub=res_emj$ubj,ub2=res_emj$ub2j)$par
} else{
piAR<- optim(piAR,lcAR,gr = NULL,method = "L-BFGS-B", lower =rep(-.9999,pAR),
upper = rep(.9999,pAR),control = list(fnscale=-1),beta1=beta1,sigmae=sigmae,
y=y,x=x,z=z,time=time,ind=ind,u=res_emj$uj,ub=res_emj$ubj,ub2=res_emj$ub2j)$par
}
logvero1<-function(nu){logveroARs(y = y,x = x, z = z,time = time,ind = ind,
beta1 = beta1,sigmae = sigmae,phiAR = estphit(piAR),
D1 = D1, distr, nu)}
if (distr=="sn"){ nu<-NULL} else {
if (parallelnu) {
nu <- optimParallel(nu,(logvero1),gr = NULL,method = "L-BFGS-B", lower =lb,
upper = lu,control = list(fnscale=-1))$par
} else{
nu <- optim(nu,(logvero1),gr = NULL,method = "L-BFGS-B", lower =lb, upper = lu,
control = list(fnscale=-1))$par
}
}
c(beta1,sigmae,D1[upper.tri(D1, diag = T)],piAR,nu)
}
fixpt.UNC <- function(thetav,y,x,z,ind,distr,lb,lu,parallelnu,diagD){
p<-ncol(x);q1<-ncol(z);q2 <- q1*(q1+1)/2;N<-length(y)
m <- n_distinct(ind)
beta1<-matrix(thetav[1:p],ncol=1)
sigmae<-as.numeric(thetav[p+1])
D1 <- Dmatrix(thetav[(p+2):(p+1+q2)])
if (distr=="sn") {
nu <- NULL
} else nu<-thetav[-(1:(p+1+q2))]
res_emj = revert_list(tapply(1:N,ind,emjs,y=y, x=x, z=z, beta1=beta1, D1=D1,
sigmae=sigmae, distr=distr,nu=nu))
sum1 = Reduce("+",res_emj$sum1)
sum2 = Reduce("+",res_emj$sum2)
sum3 = sum(unlist(res_emj$sum3))
sum4 = Reduce("+",res_emj$sum4)
uj = unlist(res_emj$uj,use.names = F)
beta1<-solve(sum1)%*%sum2
sigmae<-as.numeric(sum3)/N
D1<-sum4/m
if (diagD) D1 <-diag(diag(D1))
logvero1<-function(nu){logveros(y, x, z, ind, beta1, sigmae, D1, distr, nu)}
if (distr=="sn"){ nu<-NULL} else {
if (parallelnu) {
nu <- optimParallel(nu,(logvero1),gr = NULL,method = "L-BFGS-B", lower =lb,
upper = lu,control = list(fnscale=-1))$par
} else{
nu <- optim(nu,(logvero1),gr = NULL,method = "L-BFGS-B", lower =lb, upper = lu,
control = list(fnscale=-1))$par
}
}
c(beta1,sigmae,D1[upper.tri(D1, diag = T)],nu)
}
fixpt.CS <- function(thetav,y,x,z,ind,distr,lb,lu,parallelphi,parallelnu,diagD){
p<-ncol(x);q1<-ncol(z);q2 <- q1*(q1+1)/2;N<-length(y)
m <- n_distinct(ind)
beta1<-matrix(thetav[1:p],ncol=1)
sigmae<-as.numeric(thetav[p+1])
D1 <- Dmatrix(thetav[(p+2):(p+1+q2)])
phiCS<-thetav[(p+2+q2)]
if (distr=="sn") {
nu <- NULL
} else nu<-thetav[-(1:(p+2+q2))]
res_emj = revert_list(tapply(1:N,ind,emjCSs,y=y, x=x, z=z, beta1=beta1, D1=D1,
sigmae=sigmae,phiCS=phiCS, distr=distr,nu=nu))
sum1 = Reduce("+",res_emj$sum1)
sum2 = Reduce("+",res_emj$sum2)
sum3 = sum(unlist(res_emj$sum3))
sum4 = Reduce("+",res_emj$sum4)
beta1<-solve(sum1)%*%sum2
sigmae<-as.numeric(sum3)/N
D1<-sum4/m
if (diagD) D1 <-diag(diag(D1))
if (parallelphi) {
phiCS <- optimParallel(phiCS,lcCS,gr = NULL,method = "L-BFGS-B", lower =0,
upper = .9999,control = list(fnscale=-1),beta1=beta1,sigmae=sigmae,
y=y,x=x,z=z,ind=ind,u=res_emj$uj,ub=res_emj$ubj,ub2=res_emj$ub2j)$par
} else {
phiCS <- optim(phiCS,lcCS,gr = NULL,method = "L-BFGS-B", lower =0,
upper = .9999,control = list(fnscale=-1),beta1=beta1,sigmae=sigmae,
y=y,x=x,z=z,ind=ind,u=res_emj$uj,ub=res_emj$ubj,ub2=res_emj$ub2j)$par
}
logvero1<-function(nu){logveroCSs(y, x, z, ind, beta1, sigmae,phiCS, D1, distr, nu)}
if (distr=="sn"){ nu<-NULL} else {
if (parallelnu) {
nu <- optimParallel(nu,(logvero1),gr = NULL,method = "L-BFGS-B", lower =lb, upper = lu,
control = list(fnscale=-1))$par
} else {
nu <- optim(nu,(logvero1),gr = NULL,method = "L-BFGS-B", lower =lb, upper = lu,
control = list(fnscale=-1))$par
}
}
c(beta1,sigmae,D1[upper.tri(D1, diag = T)],phiCS,nu)
}
fixpt.DEC <- function(thetav,y,x,z,time,ind,distr,lb,lu,luDEC,parallelphi,parallelnu,diagD){
p<-ncol(x);q1<-ncol(z);q2 <- q1*(q1+1)/2;N<-length(y)
m <- n_distinct(ind)
beta1<-matrix(thetav[1:p],ncol=1)
sigmae<-as.numeric(thetav[p+1])
D1 <- Dmatrix(thetav[(p+2):(p+1+q2)])
phiDEC<-thetav[(p+2+q2)]
thetaDEC<-thetav[(p+3+q2)]
if (distr=="sn") {
nu <- NULL
} else nu<-thetav[-(1:(p+3+q2))]
res_emj = revert_list(tapply(1:N,ind,emjDECs,y=y, x=x, z=z,time=time, beta1=beta1, D1=D1,
sigmae=sigmae,phiDEC=phiDEC,thetaDEC=thetaDEC, distr=distr,nu=nu))
sum1 = Reduce("+",res_emj$sum1)
sum2 = Reduce("+",res_emj$sum2)
sum3 = sum(unlist(res_emj$sum3))
sum4 = Reduce("+",res_emj$sum4)
beta1<-solve(sum1)%*%sum2
sigmae<-as.numeric(sum3)/N
D1<-sum4/m
if (diagD) D1 <-diag(diag(D1))
if (parallelphi) {
parDEC<- optimParallel(c(phiDEC,thetaDEC),lcDEC,gr = NULL,method = "L-BFGS-B", lower =rep(0.0001,2),
upper = c(.9999,luDEC),control = list(fnscale=-1),beta1=beta1,sigmae=sigmae,
y=y,x=x,z=z,time=time,ind=ind,u=res_emj$uj,ub=res_emj$ubj,ub2=res_emj$ub2j)$par
} else {
parDEC<- optim(c(phiDEC,thetaDEC),lcDEC,gr = NULL,method = "L-BFGS-B", lower =rep(0.0001,2),
upper = c(.9999,luDEC),control = list(fnscale=-1),beta1=beta1,sigmae=sigmae,
y=y,x=x,z=z,time=time,ind=ind,u=res_emj$uj,ub=res_emj$ubj,ub2=res_emj$ub2j)$par
}
phiDEC<-parDEC[1]; thetaDEC<-parDEC[2]
logvero1<-function(nu){logveroDECs(y, x, z,time, ind, beta1, sigmae,phiDEC,thetaDEC, D1, distr, nu)}
if (distr=="sn"){ nu<-NULL} else {
if (parallelnu) {
nu <- optimParallel(nu,(logvero1),gr = NULL,method = "L-BFGS-B", lower =lb,
upper = lu,control = list(fnscale=-1))$par
} else {
nu <- optim(nu,(logvero1),gr = NULL,method = "L-BFGS-B", lower =lb,
upper = lu,control = list(fnscale=-1))$par
}
}
c(beta1,sigmae,D1[upper.tri(D1, diag = T)],phiDEC,thetaDEC,nu)
}
fixpt.CAR1 <- function(thetav,y,x,z,time,ind,distr,lb,lu,parallelphi,parallelnu,diagD){
p<-ncol(x);q1<-ncol(z);q2 <- q1*(q1+1)/2;N<-length(y)
m <- n_distinct(ind)
beta1<-matrix(thetav[1:p],ncol=1)
sigmae<-as.numeric(thetav[p+1])
D1 <- Dmatrix(thetav[(p+2):(p+1+q2)])
phiDEC<-thetav[(p+2+q2)]
if (distr=="sn") {
nu <- NULL
} else nu<-thetav[-(1:(p+2+q2))]
res_emj = revert_list(tapply(1:N,ind,emjCAR1s,y=y, x=x, z=z,time=time, beta1=beta1, D1=D1,
sigmae=sigmae,phiDEC=phiDEC,distr=distr,nu=nu))
sum1 = Reduce("+",res_emj$sum1)
sum2 = Reduce("+",res_emj$sum2)
sum3 = sum(unlist(res_emj$sum3))
sum4 = Reduce("+",res_emj$sum4)
beta1<-solve(sum1)%*%sum2
sigmae<-as.numeric(sum3)/N
D1<-sum4/m
if (diagD) D1 <-diag(diag(D1))
if (parallelphi) {
phiDEC<- optimParallel(phiDEC,lcCAR1,gr = NULL,method = "L-BFGS-B", lower =0.0001,
upper = .9999,control = list(fnscale=-1),beta1=beta1,sigmae=sigmae,
y=y,x=x,z=z,time=time,ind=ind,u=res_emj$uj,ub=res_emj$ubj,ub2=res_emj$ub2j)$par
} else {
phiDEC<- optim(phiDEC,lcCAR1,gr = NULL,method = "L-BFGS-B", lower =0.0001,
upper = .9999,control = list(fnscale=-1),beta1=beta1,sigmae=sigmae,
y=y,x=x,z=z,time=time,ind=ind,u=res_emj$uj,ub=res_emj$ubj,ub2=res_emj$ub2j)$par
}
logvero1<-function(nu){logveroCAR1s(y, x, z,time, ind, beta1, sigmae,phiDEC, D1, distr, nu)}
if (distr=="sn"){ nu<-NULL} else {
if (parallelnu) {
nu <- optimParallel(nu,(logvero1),gr = NULL,method = "L-BFGS-B", lower =lb, upper = lu,
control = list(fnscale=-1))$par
} else {
nu <- optim(nu,(logvero1),gr = NULL,method = "L-BFGS-B", lower =lb, upper = lu,
control = list(fnscale=-1))$par
}
}
c(beta1,sigmae,D1[upper.tri(D1, diag = T)],phiDEC,nu)
} |
prepareModel = function(env) {
env$gNP <- length(unique(env$choicedata$ID))
env$Choice <- env$choicedata$Choice
env$gNOBS <- dim(env$choicedata)[1]
env$TIMES <- matrix(0, nrow = env$gNP, ncol = 1)
env$TIMES[, 1] <- aggregate(env$choicedata$ID, by = list(env$choicedata$ID), length)[, 2]
env$gIDS <- unlist(as.vector(mapply(rep, 1:env$gNP, env$TIMES)))
env$respIDs <- unique(env$choicedata$ID)
if (length(env$gVarNamesNormal) > 0) {
env$A <- matrix(0, nrow = env$gNIV, ncol = 1)
env$B <- matrix(0, nrow = env$gNP, ncol = env$gNIV)
env$Dmat <- env$priorVariance * diag(env$gNIV)
env$A[, 1] <- env$svN
env$B <- 1 + env$B
env$B <- env$B * matrix(t(env$A), nrow = env$gNP, ncol = env$gNIV, byrow = T)
}
} |
getSampleBasedOnUnmaskedData <- function( meansOfNoises, meansOfSquaredNoises, maskedVectors, unmaskedVectors, mu, s, rho_X, cores = 1, size, verbose = -1) {
numberOfVectors <- length(maskedVectors)
if(missing(mu)) {
mu <- lapply(1:numberOfVectors, FUN = function(i) {
mean(maskedVectors[[i]])/meansOfNoises[[i]]
})}
if(missing(s)) {
s <- lapply(1:numberOfVectors, FUN = function(i) {
sqrt((mean(maskedVectors[[i]]^2)-(meansOfSquaredNoises[[i]])*mean(maskedVectors[[i]])^2/(meansOfNoises[[i]])^2)/(meansOfSquaredNoises[[i]]))
})}
if(missing(rho_X)) {
rho_X <- matrix(1,numberOfVectors,numberOfVectors)
for(i in 1:numberOfVectors) {
for(j in 1:numberOfVectors) {
if(i != j) {
rho_X[i,j] <- (cov(maskedVectors[[i]],maskedVectors[[j]])/((meansOfNoises[[i]])*(meansOfNoises[[j]])))/(s[[i]]*s[[j]])
rho_X[j,i] <- rho_X[i,j]
}
}
}
}
if(verbose > 1) {
print(mu)
print(s)
print(rho_X)
}
if(verbose > 0) {
print("finished estimating mu, s and rho_X if missing")
}
G_Point7<-c(-3.75043971768,-2.36675941078,-1.1544053948,0, 1.1544053948, 2.36675941078,3.75043971768 )
GH_Quadrature<-c(0.000548268858737,0.0307571239681,0.240123178599,0.457142857143,0.240123178599,
0.0307571239681, 0.000548268858737 )
if(length(unmaskedVectors) != numberOfVectors) {
stop("meansOfNoises and unmaskedVectors must be the same length")
}
if(length(mu) != numberOfVectors) {
stop("meansOfNoises and mu must be the same length")
}
if(length(s) != numberOfVectors) {
stop("meansOfNoises and s must be the same length")
}
fhat <- lapply(1:numberOfVectors, FUN = function(i) {
return(ks::kde(x=unmaskedVectors[[i]], binned = TRUE))
})
if (verbose > 1) {
print("calculating Nataf_rho matrix")
}
Nataf_rho <- matrix(rep(1,(numberOfVectors^2)),nrow=numberOfVectors,ncol=numberOfVectors)
Nataf_rho[upper.tri(Nataf_rho, diag = TRUE)] <- NA
Nataf_rho <- parallel::mclapply(1:numberOfVectors, mc.cores = cores, FUN = function(j) {
return(lapply(1:numberOfVectors, FUN = function(i) {
if (verbose > 1) {
print("row and column")
print(i)
print(j)
}
if(!is.na(Nataf_rho[i,j])) {
return(rho_0(unmaskedVectors[[j]], unmaskedVectors[[i]], mu[[j]],mu[[i]],s[[j]],s[[i]], rho_X[i,j], fhat[[j]], fhat[[i]], G_Point7, GH_Quadrature, verbose))
} else {
return(NA)
}}))
})
Nataf_rho <- unlist(Nataf_rho)
Nataf_rho <- matrix(Nataf_rho,nrow=numberOfVectors,ncol=numberOfVectors)
diag(Nataf_rho) <- 1
Nataf_rho[upper.tri(Nataf_rho, diag = FALSE)] <- Nataf_rho[lower.tri(Nataf_rho, diag = FALSE)]
Mmu<-rep(0,numberOfVectors)
Erho_0<-Nataf_rho
ZfinalOutput<- mvrnorm(n = size, mu=Mmu, Sigma=Erho_0, tol = 1e-6, empirical = FALSE, EISPACK = FALSE)
finalOutput<- parallel::mclapply(1:numberOfVectors, mc.cores = cores, FUN=function(i){
return(qkdeSorted(pnorm(ZfinalOutput[,i]),fhat[[i]]))
})
return(finalOutput)
} |
domain_numeric <- function(from, to) {
list(type = "numeric",
from = from,
to = to) -> range
class(range) <- c("value_range", class(range))
range
} |
qqPlotGestalt <-
function (distribution = "norm", param.list = list(mean = 0,
sd = 1), estimate.params = FALSE, est.arg.list = NULL, sample.size = 10,
num.pages = 2, num.plots.per.page = 4, nrow = ceiling(num.plots.per.page/2),
plot.type = "Q-Q", plot.pos.con = switch(dist.abb, norm = ,
lnorm = , lnormAlt = , lnorm3 = 0.375, evd = 0.44, 0.4),
equal.axes = (qq.line.type == "0-1" || estimate.params),
margin.title = NULL, add.line = FALSE, qq.line.type = "least squares",
duplicate.points.method = "standard", points.col = 1, line.col = 1,
line.lwd = par("cex"), line.lty = 1, digits = .Options$digits,
same.window = TRUE, ask = same.window & num.pages > 1, mfrow = c(nrow,
num.plots.per.page/nrow), mar = c(4, 4, 1, 1) + 0.1,
oma = c(0, 0, 7, 0), mgp = c(2, 0.5, 0), ..., main = NULL,
xlab = NULL, ylab = NULL, xlim = NULL, ylim = NULL)
{
if (!((num.plots.per.page%%2) == 0 || num.plots.per.page ==
1))
stop("'num.plots.per.page' must be 1 or an even number")
if ((num.plots.per.page%%nrow) != 0)
stop("'num.plots.per.page' must be a multiple of 'nrow'")
plot.type <- match.arg(plot.type, c("Q-Q", "Tukey Mean-Difference Q-Q"))
duplicate.points.method <- match.arg(duplicate.points.method,
c("standard", "jitter", "number"))
qq.line.type <- match.arg(qq.line.type, c("least squares",
"0-1", "robust"))
par.list <- list(mfrow = mfrow, mar = mar, oma = oma, mgp = mgp)
dev.new()
cex.orig <- par("cex")
par(par.list)
par(cex = 0.75 * cex.orig, mex = 0.75 * cex.orig)
devAskNewPage(ask = ask)
check.gp.list <- checkGraphicsPars(...)
gp.arg.list <- check.gp.list$gp.arg.list
gp.names <- check.gp.list$gp.names
gen.gp.list <- check.gp.list$gen.gp.list
check.da.list <- check.distribution.args(distribution, param.list)
dist.abb <- check.da.list$dist.abb
dist.name <- check.da.list$dist.name
n.dist.params <- check.da.list$n.dist.params
dist.params.names <- check.da.list$dist.params.names
param.list <- check.da.list$param.list
param.list.x <- param.list
if (!is.vector(plot.pos.con, mode = "numeric") || length(plot.pos.con) !=
1 || plot.pos.con < 0 || plot.pos.con > 1)
stop("'plot.pos.con' must be a numeric scalar between 0 and 1")
if (estimate.params) {
if (EnvStats::Distribution.df[dist.abb, "Estimation.Method(s)"] ==
"")
stop(paste("No estimation method available for the",
dist.name, "Distribution"))
if (dist.params.names[n.dist.params] == "ncp" && param.list$ncp !=
0)
stop("No estimation method available for Non-Central Distributions.")
}
r.fcn <- paste("r", dist.abb, sep = "")
q.fcn <- paste("q", dist.abb, sep = "")
if (is.null(margin.title)) {
margin.title.supplied <- FALSE
plot.string <- ifelse(num.plots.per.page == 1, "Plot",
"Plots")
if (any(dist.abb == c("beta", "chisq", "f")) && param.list$ncp ==
0)
margin.title <- paste(plot.type, " ", plot.string,
" for\n", dist.name, "(", paste(paste(dist.params.names[-n.dist.params],
signif(unlist(param.list[-n.dist.params]),
digits), sep = " = "), collapse = ", "),
") Distribution", sep = "")
else margin.title <- paste(plot.type, " ", plot.string,
" for\n", dist.name, "(", paste(paste(dist.params.names,
signif(unlist(param.list), digits), sep = " = "),
collapse = ", "), ") Distribution", sep = "")
}
else margin.title.supplied <- TRUE
if (estimate.params) {
est.fcn <- paste("e", dist.abb, sep = "")
estimation.method <- do.call(est.fcn, c(list(x = do.call(r.fcn,
c(list(n = sample.size), param.list))), est.arg.list))$method
ss.title <- paste("(Sample Size = ", sample.size, "; Estimation Method = ",
estimation.method, ")", sep = "")
}
else ss.title <- paste("(Sample Size = ", sample.size, "; No Parameter Estimation",
")", sep = "")
if (is.null(main))
main <- ""
if (plot.type == "Q-Q") {
if (is.null(xlab))
xlab <- "Quantiles of Assumed Distribution"
if (is.null(ylab))
ylab <- "Random Quantiles"
}
else {
if (is.null(xlab))
xlab <- "Mean of Observed and Fitted Quantiles"
if (is.null(ylab))
ylab <- "Observed-Fitted Quantiles"
}
user.xlim <- xlim
user.ylim <- ylim
for (i in 1:(num.pages * num.plots.per.page)) {
if (i > 1 && ((i%%num.plots.per.page) == 1) && !same.window) {
dev.new()
par(par.list)
par(cex = 0.75 * cex.orig, mex = 0.75 * cex.orig)
}
q.y <- sort(do.call(r.fcn, c(list(n = sample.size), param.list)))
if (estimate.params) {
est.param.vec <- do.call(est.fcn, c(list(x = q.y),
est.arg.list))$parameters
if (!is.null(param.list$ncp))
param.list.x[-n.dist.params] <- est.param.vec
else param.list.x[] <- est.param.vec
}
q.x <- do.call(q.fcn, c(list(ppoints(q.y, a = plot.pos.con)),
param.list.x))
if (plot.type == "Q-Q") {
if (is.null(user.xlim) && is.null(user.ylim) && equal.axes) {
xlim <- range(q.x, q.y)
ylim <- xlim
}
else {
if (is.null(user.xlim))
xlim <- range(q.x)
if (is.null(user.ylim))
ylim <- range(q.y)
}
plot(q.x, q.y, type = "n", ..., main = main, xlab = xlab,
ylab = ylab, xlim = xlim, ylim = ylim)
arg.list <- c(list(x = q.x, y = q.y, method = duplicate.points.method),
gen.gp.list, list(col = points.col))
do.call("points.w.dups", arg.list)
if (add.line)
switch(qq.line.type, `least squares` = {
arg.list <- c(list(a = lm(q.y ~ q.x)), gen.gp.list,
list(col = line.col, lwd = line.lwd, lty = line.lty))
do.call("abline", arg.list)
}, `0-1` = {
arg.list <- c(list(a = 0, b = 1), gen.gp.list,
list(col = line.col, lwd = line.lwd, lty = line.lty))
do.call("abline", arg.list)
}, robust = {
arg.list <- c(list(x = q.x, y = q.y), gen.gp.list,
list(col = line.col, lwd = line.lwd, lty = line.lty))
do.call("qqLine", arg.list)
})
}
else {
q.mean <- (q.x + q.y)/2
q.diff <- q.y - q.x
if (is.null(user.ylim)) {
rqmo2 <- diff(range(q.mean))/2
mqd <- median(q.diff)
ylim.min <- min(min(q.diff), mqd - rqmo2)
ylim.max <- max(max(q.diff), mqd + rqmo2)
ylim <- c(ylim.min, ylim.max)
}
if (is.null(user.xlim))
xlim <- range(q.mean)
plot(q.mean, q.diff, type = "n", ..., main = main,
xlim = xlim, ylim = ylim, xlab = xlab, ylab = ylab)
arg.list <- c(list(x = q.mean, y = q.diff, method = duplicate.points.method),
gen.gp.list, list(col = points.col))
do.call("points.w.dups", arg.list)
if (add.line) {
arg.list <- c(list(h = 0), gen.gp.list, list(col = line.col,
lwd = line.lwd, lty = line.lty))
do.call("abline", arg.list)
}
}
if ((i%%num.plots.per.page) == 0 && !margin.title.supplied) {
mtext(margin.title, side = 3, line = 3, outer = TRUE,
cex = 1.25 * cex.orig)
mtext(ss.title, side = 3, line = 0, outer = TRUE,
cex = cex.orig)
}
}
} |
makeMOP1Function = function() {
fn = function(x) {
assertNumeric(x, len = 2L, any.missing = FALSE, all.missing = FALSE)
return(.Call("mof_MOP1", x))
}
makeMultiObjectiveFunction(
name = "MOP1 function",
id = "MOP1",
description = "MOP1 function",
fn = fn,
par.set = makeNumericParamSet(
len = 2L,
id = "x",
lower = -1e5,
upper = 1e5,
vector = TRUE
),
n.objectives = 2L
)
}
class(makeMOP1Function) = c("function", "smoof_generator")
attr(makeMOP1Function, "name") = c("MOP1")
attr(makeMOP1Function, "type") = c("multi-objective")
attr(makeMOP1Function, "tags") = c("multi-objective") |
dfToGeoJSON <-
function(data, name, dest, lat.lon, overwrite) {
if(is(data, "tbl_df")) data <- as.data.frame(data)
if(is.null(lat.lon)) {
lat <- which(names(data)==intersect(names(data), c("y", "Y", "lat", "Lat", "LAT", "latitude", "Latitude", "LATITUDE"))[1])
lon <- which(names(data)==intersect(names(data), c("x", "X", "lon", "Lon", "LON", "long", "Long", "LONG", "longitude", "Longitude", "LONGITUDE"))[1])
if(length(lat)==0 || length(lon)==0) {
lat.lon <- c(1,2)
message("Latitude and longitude not found - columns 1 (", names(data)[1], ") and 2 (", names(data)[2], ") taken instead")
} else {
lat.lon <- c(lat, lon)
message("Columns ", lat, " (", names(data)[lat], ") and ", lon, " (", names(data)[lon], ") detected as latitude and longitude")
}
}
if(length(lat.lon)!=2) stop("'lat.lon' must be a vector of two: c(latitude, longitude)")
if(any(!is.numeric(lat.lon))) {
if(!any(names(data)==lat.lon[1])) stop("Longitude column not found")
if(!any(names(data)==lat.lon[2])) stop("Latitude column not found")
lat.lon <- c(which(names(data)==lat.lon[1]), which(names(data)==lat.lon[2]))
}
if(is.na(data[,lat.lon[1]]) || is.na(data[,lat.lon[2]])) stop("Coordinate columns not found")
for(i in 1:ncol(data)) {
if(is(data[,i], "factor")) {
data[,i] <- as.character(data[,i])
message("Column \'", names(data[i]), "\' converted from factor to character type")
}
}
for(i in 1:ncol(data)) {
data[,i] <- gsub("\n","; ",data[,i])
}
path <- paste0(file.path(dest, name), ".geojson")
if(file.exists(path) && !overwrite) stop("Abort - file already exists\n")
cat("{", file=path, sep="\n")
cat(" \"type\": \"FeatureCollection\",", file=path, append=TRUE, sep="\n")
cat(" \"features\": [", file=path, append=TRUE, sep="\n")
for(f in 1:nrow(data)) {
cat(" {", file=path, append=TRUE, sep="\n")
cat(" \"type\": \"Feature\",", file=path, append=TRUE, sep="\n")
if(length(data)>2) {
cat(" \"properties\": {", file=path, append=TRUE, sep="\n")
dat <- data[f,-lat.lon]
if(!is.data.frame(dat)) names(dat) <- names(data)[-lat.lon]
if(length(dat)==1) {
cat(paste0(" \"", names(data)[-lat.lon], "\": \"", dat, "\"\n"), file=path, append=TRUE)
} else {
for(p in 1:length(dat)) {
cat(paste0(" \"", names(dat)[p], "\": \"", dat[p], "\""), file=path, append=TRUE)
if(p==length(dat)) cat("\n", file=path, append=TRUE)
else cat(",", file=path, append=TRUE, sep="\n")
}
}
cat(" },", file=path, append=TRUE, sep="\n")
}
cat(" \"geometry\": {", file=path, append=TRUE, sep="\n")
cat(" \"type\": \"Point\",", file=path, append=TRUE, sep="\n")
cat(paste0(" \"coordinates\": [", data[f,lat.lon[2]], ",", data[f,lat.lon[1]], "]"), file=path, append=TRUE, sep="\n")
cat(" }", file=path, append=TRUE, sep="\n")
if(f==nrow(data)) cat(" }", file=path, append=TRUE, sep="\n")
else cat(" },", file=path, append=TRUE, sep="\n")
}
cat(" ]", file=path, append=TRUE, sep="\n")
cat("}", file=path, append=TRUE, sep="\n")
return(path)
} |
NULL
put_axes <- function(quadrant = NULL, col = NULL, size = NULL) {
if (is.null(quadrant)) quadrant <- 1
if (is.null(col)) col <- "gray60"
if (is.null(size)) size <- 0.5
if (quadrant == 0) {
x_lim <- c(-1, 1)
y_lim <- c(-1, 1)
} else if (quadrant == 1) {
x_lim <- c(0, 1)
y_lim <- c(0, 1)
} else if (quadrant == 2) {
x_lim <- c(-1, 0)
y_lim <- c(0, 1)
} else if (quadrant == 3) {
x_lim <- c(-1, 0)
y_lim <- c(-1, 0)
} else if (quadrant == 4) {
x_lim <- c(0, 1)
y_lim <- c(-1, 0)
} else if (quadrant == 12 | quadrant == 21) {
x_lim <- c(-1, 1)
y_lim <- c(0, 1)
} else if (quadrant == 23 | quadrant == 32) {
x_lim <- c(-1, 0)
y_lim <- c(-1, 1)
} else if (quadrant == 34 | quadrant == 43) {
x_lim <- c(-1, 1)
y_lim <- c(-1, 0)
} else if (quadrant == 14 | quadrant == 41) {
x_lim <- c(0, 1)
y_lim <- c(-1, 1)
} else {
stop("Incorrect quadrant argument.")
}
p <- ggplot() +
geom_segment(aes(x = x_lim[1], y = 0, xend = x_lim[2], yend = 0),
col = col, size = size, lineend = "square"
) +
geom_segment(aes(x = 0, y = y_lim[1], xend = 0, yend = y_lim[2]),
col = col, size = size, lineend = "square"
) +
theme(
axis.ticks = element_blank(),
axis.text = element_blank(),
axis.title = element_blank(),
panel.grid = element_blank(),
panel.background = element_blank(),
plot.margin = unit(c(0, 0, 0, 0), "mm")
) +
coord_fixed(xlim = x_lim, ylim = y_lim)
return(p)
}
"put_axes" |
check.homog <- function(h.fct,Z,tol=NULL) {
if (is.null(tol)) {tol <- 1e-9}
nr <- nrow(Z)
nc <- ncol(Z)
x1 <- runif(nr,1,10); x2 <- runif(nr,1,10)
gam <- runif(nc,1,10)
gx1 <- x1*c((Z%*%gam))
gx2 <- x2*c((Z%*%gam))
diff <- h.fct(gx1)*h.fct(x2) - h.fct(gx2)*h.fct(x1)
norm.diff <- sqrt( sum(diff*diff) )
chk <- ""
if (norm.diff > tol) {chk <- paste("h(m) is not Z homogeneous [based on tol=",tol,"]!")}
chk
} |
test_that("rct class works", {
expect_s3_class(rct(), "wk_rct")
expect_output(print(rct(1, 2, 3, 4)), "\\[1 2 3 4\\]")
expect_identical(as_rct(rct(1, 2, 3, 4)), rct(1, 2, 3, 4))
expect_identical(
as_rct(as.matrix(data.frame(xmin = 1, ymin = 2, xmax = 3, ymax = 4))),
rct(1, 2, 3, 4)
)
expect_identical(
as_rct(data.frame(xmin = 1, ymin = 2, xmax = 3, ymax = 4)),
rct(1, 2, 3, 4)
)
expect_identical(
as_rct(matrix(1:4, nrow = 1)),
rct(1, 2, 3, 4)
)
})
test_that("coercion to and from wk* classes works", {
expect_identical(
as_wkt(rct(1, 2, 3, 4)),
wkt("POLYGON ((1 2, 3 2, 3 4, 1 4, 1 2))")
)
expect_identical(
as_wkb(rct(1, 2, 3, 4)),
as_wkb("POLYGON ((1 2, 3 2, 3 4, 1 4, 1 2))")
)
})
test_that("subset-assign works for rct", {
x <- rct(1:2, 2:3, 3:4, 4:5)
x[1] <- rct(NA, NA, NA, NA)
expect_identical(x, c(rct(NA, NA, NA, NA), rct(2, 3, 4, 5)))
})
test_that("rct() propagates CRS", {
x <- rct(1, 2, 3, 4)
wk_crs(x) <- 1234
expect_identical(wk_crs(x[1]), 1234)
expect_identical(wk_crs(c(x, x)), 1234)
expect_identical(wk_crs(rep(x, 2)), 1234)
expect_error(x[1] <- wk_set_crs(x, NULL), "are not equal")
x[1] <- wk_set_crs(x, 1234L)
expect_identical(wk_crs(x), 1234)
}) |
test_that("generatePartialDependenceData", {
m = c(4, 10)
fr = train("regr.rpart", regr.task)
dr = generatePartialDependenceData(fr, input = regr.task,
features = c("lstat", "chas"), interaction = TRUE, n = m)
nfeat = length(dr$features)
nfacet = length(unique(regr.df[["chas"]]))
n = getTaskSize(regr.task)
expect_equal(nrow(dr$data), m[1] * nfeat)
expect_true(all(dr$data$medv >= min(regr.df$medv) | dr$data$medv <= max(regr.df$medv)))
plotPartialDependence(dr, facet = "chas")
dir = tempdir()
path = file.path(dir, "test.svg")
suppressMessages(ggsave(path))
doc = XML::xmlParse(path)
expect_equal(length(XML::getNodeSet(doc, grey.rect.xpath, ns.svg)), nfacet)
expect_equal(length(XML::getNodeSet(doc, black.circle.xpath, ns.svg)), nfacet * m[1])
dr.df = generatePartialDependenceData(fr, input = regr.df, features = "lstat")
dr = generatePartialDependenceData(fr, input = regr.task,
features = c("lstat", "chas"), interaction = TRUE, individual = TRUE, n = m)
expect_equal(nrow(dr$data), m[1] * nfeat * m[2])
plotPartialDependence(dr, facet = "chas", data = regr.df, p = 1)
suppressMessages(ggsave(path))
doc = XML::xmlParse(path)
expect_equal(length(XML::getNodeSet(doc, grey.rect.xpath, ns.svg)), nfacet)
expect_equal(length(XML::getNodeSet(doc, black.circle.xpath, ns.svg)), n + prod(m) * nfacet)
fc = train("classif.rpart", multiclass.task)
dc = generatePartialDependenceData(fc, input = multiclass.task, features = c("Petal.Width", "Petal.Length"),
fun = function(x) table(x) / length(x), n = m)
nfeat = length(dc$features)
n = getTaskSize(multiclass.task)
plotPartialDependence(dc, data = multiclass.df)
suppressMessages(ggsave(path))
doc = XML::xmlParse(path)
expect_equal(length(XML::getNodeSet(doc, grey.rect.xpath, ns.svg)), nfeat)
expect_equal(length(XML::getNodeSet(doc, red.circle.xpath, ns.svg)) - 1, nfeat * m[1])
expect_equal(length(XML::getNodeSet(doc, blue.circle.xpath, ns.svg)) - 1, nfeat * m[1])
expect_equal(length(XML::getNodeSet(doc, green.circle.xpath, ns.svg)) - 1, nfeat * m[1])
fcp = train(makeLearner("classif.svm", predict.type = "prob"), multiclass.task)
dcp = generatePartialDependenceData(fcp, input = multiclass.task,
features = "Petal.Width",
fun = function(x) quantile(x, c(.025, .5, .975)), n = m)
plotPartialDependence(dcp)
dcp = generatePartialDependenceData(fcp, input = multiclass.task, features = c("Petal.Width", "Petal.Length"),
interaction = TRUE, n = m)
nfacet = length(unique(dcp$data$Petal.Length))
ntarget = length(dcp$target)
plotPartialDependence(dcp, "tile")
dcp = generatePartialDependenceData(fcp, input = multiclass.task, features = c("Petal.Width", "Petal.Length"),
interaction = TRUE, individual = TRUE, n = m)
fs = train("surv.rpart", surv.task)
ds = generatePartialDependenceData(fs, input = surv.task, features = c("x1", "x2"), n = m)
nfeat = length(ds$features)
n = getTaskSize(surv.task)
plotPartialDependence(ds, data = surv.df)
suppressMessages(ggsave(path))
doc = XML::xmlParse(path)
expect_equal(length(XML::getNodeSet(doc, grey.rect.xpath, ns.svg)), nfeat)
expect_equal(length(XML::getNodeSet(doc, black.circle.xpath, ns.svg)), m[1] * nfeat)
pd = generatePartialDependenceData(fr, input = regr.task,
features = c("lstat", "chas"), n = m)
plotPartialDependence(pd)
db = generatePartialDependenceData(fr, input = regr.task, features = c("lstat", "chas"),
interaction = TRUE,
fun = function(x) quantile(x, c(.25, .5, .75)), n = m)
nfacet = length(unique(regr.df[["chas"]]))
n = getTaskSize(regr.task)
expect_equal(colnames(db$data), c("medv", "Function", "lstat", "chas"))
plotPartialDependence(db, facet = "chas", data = regr.df)
suppressMessages(ggsave(path))
doc = XML::xmlParse(path)
expect_equal(length(XML::getNodeSet(doc, grey.rect.xpath, ns.svg)), nfacet * 3)
expect_equal(length(XML::getNodeSet(doc, black.circle.xpath, ns.svg)), nfacet * 3 * m[1] + n * 3)
expect_error(generatePartialDependenceData(fr, input = regr.task, features = c("lstat", "chas"),
derivative = TRUE))
expect_error(generatePartialDependenceData(fr, input = regr.task, features = c("lstat", "chas"),
interaction = TRUE, derivative = TRUE))
fcpb = train(makeLearner("classif.rpart", predict.type = "prob"), binaryclass.task)
bc = generatePartialDependenceData(fcpb, input = binaryclass.task, features = c("V11", "V12"),
individual = TRUE, n = m)
plotPartialDependence(bc)
bc = generatePartialDependenceData(fcpb, input = binaryclass.task, features = c("V11", "V12"), n = m)
plotPartialDependence(bc)
fr = train(makeLearner("regr.ksvm"), regr.task)
pfr = generatePartialDependenceData(fr, input = regr.df, features = c("lstat", "crim"),
derivative = TRUE, individual = FALSE, n = m)
pfri = generatePartialDependenceData(fr, input = regr.df,
features = c("lstat", "crim"),
derivative = TRUE, individual = TRUE, n = m)
fc = train(makeLearner("classif.ksvm", predict.type = "prob"), multiclass.task)
pfc = generatePartialDependenceData(fc, input = multiclass.df,
features = c("Petal.Width", "Petal.Length"),
derivative = TRUE, n = m)
fs = train(makeLearner("surv.coxph"), surv.task)
pfs = generatePartialDependenceData(fs, input = surv.df,
features = c("x1", "x2"),
derivative = TRUE, n = m)
fse = train(makeLearner("regr.lm", predict.type = "se"), regr.task)
pfse = generatePartialDependenceData(fse, input = regr.task, features = c("lstat", "crim"),
bounds = c(-2, 2), n = m)
plotPartialDependence(pfse)
expect_error(plotPartialDependence(ds, geom = "tile"))
tfr = generatePartialDependenceData(fr, regr.df, features = c("lstat", "crim", "chas"),
interaction = TRUE, n = m)
plotPartialDependence(tfr, geom = "tile", facet = "chas", data = regr.df)
tfs = generatePartialDependenceData(fs, surv.df, c("x1", "x2"), interaction = TRUE)
plotPartialDependence(tfs, geom = "tile", data = surv.df)
q = plotPartialDependence(dr, facet = "chas", data = regr.df,
facet.wrap.nrow = 2L)
testFacetting(q, 2L)
q = plotPartialDependence(dr, facet = "chas", facet.wrap.ncol = 2L,
data = regr.df)
testFacetting(q, ncol = 2L)
pd = generatePartialDependenceData(fcp, multiclass.task, "Petal.Width",
individual = TRUE, n = m)
pd = generatePartialDependenceData(fcp, multiclass.task, "Petal.Width",
individual = TRUE, derivative = TRUE, n = m)
pd.der.classif = generatePartialDependenceData(fcp, multiclass.task, "Petal.Width",
derivative = TRUE, n = m)
}) |
summary.CICA <- function(object, ...){
names(object$P) <- 1:length(object$P)
cat('Partitioning matrix P: \n' )
PB <- matrix(0, nrow = length(object$P), ncol = length(unique(object$P)))
for(i in 1:nrow(PB)){
PB[i, object$P[i]] <- 1
}
colnames(PB) <- paste('Cluster',sort(unique(object$P)))
cat('\n')
print(PB)
cat('\n')
cat('Tabulation of clustering: \n')
cat('\n')
tab <- table(object$P)
names(tab) <- paste('Cluster',sort(unique(object$P)))
print( tab )
cat('\n')
cat('Loss function value of optimal solution is: ', object$Loss,'\n')
out <- list()
out$PM <- PB
out$tab <- tab
out$loss <- object$Loss
return(out)
} |
expected <- eval(parse(text="structure(list(names = structure(\"stats\", .Names = \"name\")), .Names = \"names\")"));
test(id=0, code={
argv <- eval(parse(text="list(NULL, structure(list(names = structure(\"stats\", .Names = \"name\")), .Names = \"names\"))"));
do.call(`c`, argv);
}, o=expected); |
combineC <- function (Xmat, alpha = 0.05, fisher = FALSE, varest = 1)
{
fvec <- NULL
varvec <- NULL
m <- nrow(Xmat)
for (i in 1:nrow(Xmat)) {
fhat <- HWf(Xmat[i, ])
pa <- af(Xmat[i, ])
n <- sum(Xmat[i, ])
fvec <- c(fvec, fhat)
if(varest == 1) varf <- ((1-fhat)^2)*(1-2*fhat)/n + fhat*(1-fhat)*(2-fhat)/(2*n*pa*(1-pa)) else
stop("unknown option for parameter varest")
varvec <- c(varvec, varf)
}
if (fisher) {
fvec <- fisherz(fvec)
varvec <- rep(1/(n - 3), length(fvec))
}
MeanTheta <- mean(fvec)
W <- mean(varvec)
B <- var(fvec)
T <- W + (m + 1) * B/m
gamma <- (1 + 1/m) * B/T
v <- (m - 1) * (1 + m * W/((m + 1) * B))^2
stat <- abs(MeanTheta)/sqrt(T)
pvalimp <- 2 * pt(stat, v, lower.tail = FALSE)
r <- (1 + 1/m) * B/W
lambda <- (r + 2/(v + 3))/(r + 1)
gamma <- (1 + 1/m) * B/T
gammaalt <- (r + 2/(v + 3))/(r + 1)
fhatimp <- MeanTheta
if (fisher)
fhatimp <- ifisherz(MeanTheta)
llf <- fhatimp - qt(1 - alpha/2, v) * sqrt(T)
ulf <- fhatimp + qt(1 - alpha/2, v) * sqrt(T)
if (fisher) {
llf <- fhatimp - qnorm(1 - alpha/2) * sqrt(1/(n - 3))
ulf <- fhatimp + qnorm(1 - alpha/2) * sqrt(1/(n - 3))
llf <- ifisherz(llf)
ulf <- ifisherz(ulf)
}
return(list(fhatimp = fhatimp, pvalimp = pvalimp, r = r,
lambda = lambda, llf = llf, ulf = ulf, fvec = fvec, varvec = varvec,
gammaalt = gammaalt))
} |
overall.list_of_spills <- function(spillover_table, within = F, ...) {
T <- length(spillover_table$list_of_tables)
n_bands <- (length((spillover_table$list_of_tables[[1]])$bounds)-1)
if (check_that_it_is_not_fft(spillover_table[[1]][[1]]) & within) warning("You are setting within to FALSE. In DY case, the within and absolute spillovers are the same.")
temp <- lapply(spillover_table$list_of_tables, function(tab) overall(tab, within))
out <- lapply(1:n_bands, function(j) t(t(sapply(1:T, function(i) temp[[i]][[j]]))))
dates <- do.call(c, lapply(spillover_table$list_of_tables, function(i) i$date))
if (length(dates)==nrow(out[[1]])) {
for (i in 1:length(out)) {
out[[i]] <- zoo::zoo(out[[i]], order.by = dates)
}
}
return(out)
}
to.list_of_spills <- function(spillover_table, within = F, ...) {
T <- length(spillover_table$list_of_tables)
n_bands <- (length((spillover_table$list_of_tables[[1]])$bounds)-1)
if (check_that_it_is_not_fft(spillover_table[[1]][[1]]) & within) warning("You are setting within to FALSE. In DY case, the within and absolute spillovers are the same.")
temp <- lapply(spillover_table$list_of_tables, function(tab) to(tab, within))
out <- lapply(1:n_bands, function(j) t(sapply(1:T, function(i) temp[[i]][[j]])))
dates <- do.call(c, lapply(spillover_table$list_of_tables, function(i) i$date))
if (length(dates)==nrow(out[[1]])) {
for (i in 1:length(out)) {
out[[i]] <- zoo::zoo(out[[i]], order.by = dates)
}
}
return(out)
}
from.list_of_spills <- function(spillover_table, within = F, ...) {
T <- length(spillover_table$list_of_tables)
n_bands <- (length((spillover_table$list_of_tables[[1]])$bounds)-1)
if (check_that_it_is_not_fft(spillover_table[[1]][[1]]) & within) warning("You are setting within to FALSE. In DY case, the within and absolute spillovers are the same.")
temp <- lapply(spillover_table$list_of_tables, function(tab) from(tab, within))
out <- lapply(1:n_bands, function(j) t(sapply(1:T, function(i) temp[[i]][[j]])))
dates <- do.call(c, lapply(spillover_table$list_of_tables, function(i) i$date))
if (length(dates)==nrow(out[[1]])) {
for (i in 1:length(out)) {
out[[i]] <- zoo::zoo(out[[i]], order.by = dates)
}
}
return(out)
}
net.list_of_spills <- function(spillover_table, within = F, ...) {
T <- length(spillover_table$list_of_tables)
n_bands <- (length((spillover_table$list_of_tables[[1]])$bounds)-1)
if (check_that_it_is_not_fft(spillover_table[[1]][[1]]) & within) warning("You are setting within to FALSE. In DY case, the within and absolute spillovers are the same.")
temp <- lapply(spillover_table$list_of_tables, function(tab) net(tab, within))
out <- lapply(1:n_bands, function(j) t(sapply(1:T, function(i) temp[[i]][[j]])))
dates <- do.call(c, lapply(spillover_table$list_of_tables, function(i) i$date))
if (length(dates)==nrow(out[[1]])) {
for (i in 1:length(out)) {
out[[i]] <- zoo::zoo(out[[i]], order.by = dates)
}
}
return(out)
}
pairwise.list_of_spills <- function(spillover_table, within = F, ...) {
T <- length(spillover_table$list_of_tables)
n_bands <- (length((spillover_table$list_of_tables[[1]])$bounds)-1)
if (check_that_it_is_not_fft(spillover_table[[1]][[1]]) & within) warning("You are setting within to FALSE. In DY case, the within and absolute spillovers are the same.")
temp <- lapply(spillover_table$list_of_tables, function(tab) pairwise(tab, within))
out <- lapply(1:n_bands, function(j) t(sapply(1:T, function(i) temp[[i]][[j]])))
dates <- do.call(c, lapply(spillover_table$list_of_tables, function(i) i$date))
if (length(dates)==nrow(out[[1]])) {
for (i in 1:length(out)) {
out[[i]] <- zoo::zoo(out[[i]], order.by = dates)
}
}
return(out)
}
collapseBounds.list_of_spills <- function(spillover_table, which) {
spillover_table$lists_of_tables <- lapply(spillover_table$lists_of_tables, function(i) collapseBounds(i, which))
return(spillover_table)
}
plotOverall.list_of_spills <- function(spillover_table, within = F, ...) {
spills <- overall(spillover_table, within)
if (length(spills)==1) {
zoo::plot.zoo(spills[[1]], main = "Overall spillovers", ylab = "")
} else {
for (i in 1:length(spills)) {
zoo::plot.zoo(spills[[i]], main = sprintf("Overall spillovers on band: %.2f to %.2f.", spillover_table$list_of_tables[[1]]$bounds[i], spillover_table$list_of_tables[[1]]$bounds[i+1]), ylab = "")
invisible(readline(prompt="Press [enter] to continue"))
}
}
}
plotTo.list_of_spills <- function(spillover_table, within = F, which = 1:nrow(spillover_table$list_of_tables[[1]]$tables[[1]]), ...) {
spills <- to(spillover_table, within)
if (length(spills)==1) {
zoo::plot.zoo(spills[[1]][,which], main = "To spillovers")
} else {
for (i in 1:length(spills)) {
zoo::plot.zoo(spills[[i]][,which], main = sprintf("To spillovers on band: %.2f to %.2f.", spillover_table$list_of_tables[[1]]$bounds[i], spillover_table$list_of_tables[[1]]$bounds[i+1]))
invisible(readline(prompt="Press [enter] to continue"))
}
}
}
plotFrom.list_of_spills <- function(spillover_table, within = F, which = 1:nrow(spillover_table$list_of_tables[[1]]$tables[[1]]), ...) {
spills <- from(spillover_table, within)
if (length(spills)==1) {
zoo::plot.zoo(spills[[1]][,which], main = "From spillovers")
} else {
for (i in 1:length(spills)) {
zoo::plot.zoo(spills[[i]][,which], main = sprintf("From spillovers on band: %.2f to %.2f.", spillover_table$list_of_tables[[1]]$bounds[i], spillover_table$list_of_tables[[1]]$bounds[i+1]))
invisible(readline(prompt="Press [enter] to continue"))
}
}
}
plotNet.list_of_spills <- function(spillover_table, within = F, which = 1:nrow(spillover_table$list_of_tables[[1]]$tables[[1]]), ...) {
spills <- net(spillover_table, within)
if (length(spills)==1) {
zoo::plot.zoo(spills[[1]][,which], main = "Net spillovers")
} else {
for (i in 1:length(spills)) {
zoo::plot.zoo(spills[[i]][,which], main = sprintf("Net spillovers on band: %.2f to %.2f.", spillover_table$list_of_tables[[1]]$bounds[i], spillover_table$list_of_tables[[1]]$bounds[i+1]))
invisible(readline(prompt="Press [enter] to continue"))
}
}
}
plotPairwise.list_of_spills <- function(spillover_table, within = F, which = 1:ncol(utils::combn(nrow(spillover_table$list_of_tables[[1]]$tables[[1]]), 2)), ...) {
spills <- pairwise(spillover_table, within)
if (length(spills)==1) {
zoo::plot.zoo(spills[[1]][,which], main = "Pairwise spillovers")
} else {
for (i in 1:length(spills)) {
zoo::plot.zoo(spills[[i]][,which], main = sprintf("Pairwise spillovers on band: %.2f to %.2f.", spillover_table$list_of_tables[[1]]$bounds[i], spillover_table$list_of_tables[[1]]$bounds[i+1]))
invisible(readline(prompt="Press [enter] to continue"))
}
}
}
print.list_of_spills <- function(x, ...) {
cat("Surpressing printing of all the spillover tables, usually it is not a good\n
idea to print them all. (Too many of them.) If you want to do that\n
anyway use: lapply(\"..name..\", print).")
} |
dc <- function(x, d, ch = "&") {
frac <- function(x, d) {
res <- abs(x - trunc(x))
if (!missing(d)) res <- round(10 ^ d * res)
res
}
d <- max(d, 1)
fr <- frac(x, d)
paste(trunc(x), ch, fr, sep = "")
}
dcn <- function(x, d, ch = "&") {
d <- max(d, 1)
s <- sapply( x, function(x) eval(parse(text = paste("sprintf('%.", d,
"f',", x, ")", sep = ""))) )
gsub( "\\.", ch, s )
}
mpf <- function(r, after) {
paste(if (r<0) "-" else "+", eval(parse(text = paste("sprintf('%.",
after, "f', abs(", r, "))", sep = ""))))
} |
plot_vpc <- function(db,
show = NULL,
vpc_theme = NULL,
smooth = TRUE,
log_x = FALSE,
log_y = FALSE,
xlab = NULL,
ylab = NULL,
title = NULL,
verbose = FALSE) {
if(is.null(vpc_theme) || (class(vpc_theme) != "vpc_theme")) {
vpc_theme <- new_vpc_theme()
}
idv_as_factor <- is.factor(db$vpc_dat$bin)
if(db$type != "time-to-event") {
show <- replace_list_elements(show_default, show)
if(!is.null(db$stratify)) {
if(length(db$stratify) == 1) {
if(!is.null(db$aggr_obs)) colnames(db$aggr_obs)[match("strat", colnames(db$aggr_obs))] <- db$stratify[1]
if(!is.null(db$vpc_dat)) colnames(db$vpc_dat)[match("strat", colnames(db$vpc_dat))] <- db$stratify[1]
}
if(length(db$stratify) == 2) {
if(!is.null(db$aggr_obs)) {
colnames(db$aggr_obs)[match("strat1", colnames(db$aggr_obs))] <- db$stratify[1]
colnames(db$aggr_obs)[match("strat2", colnames(db$aggr_obs))] <- db$stratify[2]
}
if(!is.null(db$vpc_dat)) {
colnames(db$vpc_dat)[match("strat1", colnames(db$vpc_dat))] <- db$stratify[1]
colnames(db$vpc_dat)[match("strat2", colnames(db$vpc_dat))] <- db$stratify[2]
}
}
}
if (!is.null(db$sim)) {
if(idv_as_factor) db$vpc_dat$bin_mid <- db$vpc_dat$bin
pl <- ggplot2::ggplot(db$vpc_dat, ggplot2::aes(x=bin_mid, group=1))
if(show$sim_median) {
pl <- pl + ggplot2::geom_line(ggplot2::aes(y=q50.med), colour=vpc_theme$sim_median_color, linetype=vpc_theme$sim_median_linetype, size=vpc_theme$sim_median_size)
}
if(show$pi_as_area) {
if (smooth) {
pl <- pl +
ggplot2::geom_ribbon(ggplot2::aes(x=bin_mid, ymin=q5.med, ymax=q95.med), alpha=vpc_theme$sim_median_alpha, fill = vpc_theme$sim_median_fill)
} else {
pl <- pl +
ggplot2::geom_rect(ggplot2::aes(xmin=bin_min, xmax=bin_max, ymin=q5.med, ymax=q95.med), alpha=vpc_theme$sim_median_alpha, fill = vpc_theme$sim_median_fill)
}
} else {
if(show$sim_median_ci) {
if (smooth) {
pl <- pl +
ggplot2::geom_ribbon(ggplot2::aes(x=bin_mid, ymin=q50.low, ymax=q50.up), alpha=vpc_theme$sim_median_alpha, fill = vpc_theme$sim_median_fill)
} else {
pl <- pl +
ggplot2::geom_rect(ggplot2::aes(xmin=bin_min, xmax=bin_max, ymin=q50.low, ymax=q50.up), alpha=vpc_theme$sim_median_alpha, fill = vpc_theme$sim_median_fill)
}
}
if (show$pi) {
pl <- pl +
ggplot2::geom_line(ggplot2::aes(x=bin_mid, y=q5.med), colour=vpc_theme$sim_pi_color, linetype=vpc_theme$sim_pi_linetype, size=vpc_theme$sim_pi_size) +
ggplot2::geom_line(ggplot2::aes(x=bin_mid, y=q95.med), colour=vpc_theme$sim_pi_color, linetype=vpc_theme$sim_pi_linetype, size=vpc_theme$sim_pi_size)
}
if(show$pi_ci && "q5.low" %in% names(db$vpc_dat)) {
if (smooth) {
pl <- pl +
ggplot2::geom_ribbon(ggplot2::aes(x=bin_mid, ymin=q5.low, ymax=q5.up), alpha=vpc_theme$sim_pi_alpha, fill = vpc_theme$sim_pi_fill) +
ggplot2::geom_ribbon(ggplot2::aes(x=bin_mid, ymin=q95.low, ymax=q95.up), alpha=vpc_theme$sim_pi_alpha, fill = vpc_theme$sim_pi_fill)
} else {
pl <- pl +
ggplot2::geom_rect(ggplot2::aes(xmin=bin_min, xmax=bin_max, y=q5.low, ymin=q5.low, ymax=q5.up), alpha=vpc_theme$sim_pi_alpha, fill = vpc_theme$sim_pi_fill) +
ggplot2::geom_rect(ggplot2::aes(xmin=bin_min, xmax=bin_max, y=q95.low, ymin=q95.low, ymax=q95.up), alpha=vpc_theme$sim_pi_alpha, fill = vpc_theme$sim_pi_fill)
}
}
}
} else {
pl <- ggplot2::ggplot(db$aggr_obs)
}
if(!is.null(db$obs)) {
if(idv_as_factor) db$aggr_obs$bin_mid <- db$aggr_obs$bin
if (show$obs_median) {
pl <- pl +
ggplot2::geom_line(data=db$aggr_obs, ggplot2::aes(x=bin_mid, y=obs50),
linetype=vpc_theme$obs_median_linetype,
colour=vpc_theme$obs_median_color,
size=vpc_theme$obs_median_size)
}
if(show$obs_ci && !is.null(db$aggr_obs[["obs5"]])) {
pl <- pl +
ggplot2::geom_line(data=db$aggr_obs, ggplot2::aes(x=bin_mid, y=obs5), linetype=vpc_theme$obs_ci_linetype, colour=vpc_theme$obs_ci_color, size=vpc_theme$obs_ci_size) +
ggplot2::geom_line(data=db$aggr_obs, ggplot2::aes(x=bin_mid, y=obs95), linetype=vpc_theme$obs_ci_linetype, colour=vpc_theme$obs_ci_color, size=vpc_theme$obs_ci_size)
}
if(show$obs_dv) {
pl <- pl + ggplot2::geom_point(data=db$obs, ggplot2::aes(x=idv, y = dv), size=vpc_theme$obs_size, colour=vpc_theme$obs_color, alpha = vpc_theme$obs_alpha, shape = vpc_theme$obs_shape)
}
}
bdat <- data.frame(cbind(x=db$bins, y=NA))
if(show$bin_sep && !idv_as_factor) {
pl <- pl +
ggplot2::geom_rug(data=bdat, sides = "t", ggplot2::aes(x = x, y = y), colour=vpc_theme$bin_separators_color)
}
if(!is.null(xlab)) {
pl <- pl + ggplot2::xlab(xlab)
} else {
pl <- pl + ggplot2::xlab(db$xlab)
}
if(!is.null(ylab)) {
pl <- pl + ggplot2::ylab(ylab)
} else {
pl <- pl + ggplot2::ylab(db$ylab)
}
if (log_x) {
if(!idv_as_factor) pl <- pl + ggplot2::scale_x_log10()
else warning("log_x option has no effect when the IDV is a factor ")
}
if (log_y) {
pl <- pl + ggplot2::scale_y_log10()
}
if(!is.null(db$stratify)) {
if(is.null(db$labeller)) db$labeller <- ggplot2::label_both
if(length(db$stratify) == 1) {
if (db$facet == "wrap") {
pl <- pl + ggplot2::facet_wrap(stats::reformulate(db$stratify[1], NULL), scales = db$scales,
labeller = db$labeller)
} else {
if(length(grep("row", db$facet))>0) {
pl <- pl + ggplot2::facet_grid(stats::reformulate(db$stratify[1], NULL), scales = db$scales,
labeller = db$labeller)
} else {
pl <- pl + ggplot2::facet_grid(stats::reformulate(".", db$stratify[1]), scales = db$scales,
labeller = db$labeller)
}
}
} else {
if (db$stratify[1] %in% c(colnames(db$vpc_dat), colnames(db$aggr_obs))) {
if(length(grep("row", db$facet))>0) {
pl <- pl + ggplot2::facet_grid(stats::reformulate(db$stratify[1], db$stratify[2]), scales = db$scales,
labeller = db$labeller)
} else {
pl <- pl + ggplot2::facet_grid(stats::reformulate(db$stratify[2], db$stratify[1]), scales = db$scales,
labeller = db$labeller)
}
} else {
if ("strat" %in% c(colnames(db$vpc_dat), colnames(db$aggr_obs))) {
} else {
stop ("Stratification unsuccesful.")
}
}
}
}
if(!is.null(db$lloq)) {
pl <- pl + ggplot2::geom_hline(yintercept = db$lloq, colour=vpc_theme$loq_color)
}
if(!is.null(db$uloq)) {
pl <- pl + ggplot2::geom_hline(yintercept = db$uloq, colour=vpc_theme$loq_color)
}
if (!is.null(title)) {
pl <- pl + ggplot2::ggtitle(title)
}
pl <- pl + theme_plain()
return(pl)
} else {
show <- replace_list_elements(show_default_tte, show)
if(!is.null(db$stratify_pars)) {
if(length(db$stratify_pars) == 1) {
if(!is.null(db$obs_km)) db$obs_km[[db$stratify_pars[1]]] <- as.factor(db$obs_km$strat)
if(!is.null(db$sim_km)) db$sim_km[[db$stratify_pars[1]]] <- as.factor(db$sim_km$strat)
if(!is.null(db$all_dat)) db$all_dat[[db$stratify_pars[1]]] <- as.factor(db$all_dat$strat)
}
if(length(db$stratify_pars) == 2) {
if(!is.null(db$obs_km)) {
db$obs_km[[db$stratify_pars[1]]] <- as.factor(db$obs_km$strat1)
db$obs_km[[db$stratify_pars[2]]] <- as.factor(db$obs_km$strat2)
}
if(!is.null(db$sim_km)) {
db$sim_km[[db$stratify_pars[1]]] <- as.factor(db$sim_km$strat1)
db$sim_km[[db$stratify_pars[2]]] <- as.factor(db$sim_km$strat2)
}
}
}
if(!is.null(db$obs_km)) db$obs_km$bin_mid <- c(0, diff(db$obs_km$time))
show$pi_as_area <- TRUE
if(!is.null(db$sim_km)) {
pl <- ggplot2::ggplot(db$sim_km, ggplot2::aes(x=bin_mid, y=qmed))
} else {
pl <- ggplot2::ggplot(db$obs_km, ggplot2::aes(x=bin_mid, y=qmed))
show$sim_median <- FALSE
show$sim_median_ci <- FALSE
show$pi_ci <- FALSE
show$pi_as_area <- FALSE
show$sim_km <- FALSE
}
if(show$sim_km) {
db$all_dat$strat_sim <- paste0(db$all_dat$strat, "_", db$all_dat$i)
transp <- min(.1, 20*(1/length(unique(db$all_dat$i))))
pl <- pl + ggplot2::geom_step(data = db$all_dat, ggplot2::aes(x=bin_mid, y=surv, group=strat_sim), colour=grDevices::rgb(0.2,.53,0.796, transp))
}
if(show$pi_as_area) {
if(smooth) {
if(!is.null(db$stratify_color)) {
pl <- pl + ggplot2::geom_ribbon(data = db$sim_km,
ggplot2::aes(min = qmin, max=qmax, fill = get(db$stratify_color[1])),
alpha=vpc_theme$sim_median_alpha)
} else {
pl <- pl + ggplot2::geom_ribbon(data = db$sim_km,
ggplot2::aes(ymin = qmin, ymax=qmax),
fill = vpc_theme$sim_median_fill,
alpha=vpc_theme$sim_median_alpha)
}
} else {
if(!is.null(db$stratify_color)) {
pl <- pl + ggplot2::geom_rect(data = db$sim_km,
ggplot2::aes(xmin=bin_min, xmax=bin_max, ymin=qmin, ymax=qmax, fill = get(db$stratify_color[1])),
alpha=vpc_theme$sim_median_alpha)
} else {
pl <- pl + ggplot2::geom_rect(data = db$sim_km,
ggplot2::aes(xmin=bin_min, xmax=bin_max, ymin=qmin, ymax=qmax),
alpha=vpc_theme$sim_median_alpha,
fill = vpc_theme$sim_median_fill)
}
}
} else {
if(!is.null(db$obs)) {
pl <- ggplot2::ggplot(db$obs_km)
}
}
if(!is.null(db$cens_dat) && nrow(db$cens_dat)>0) {
pl <- pl + ggplot2::geom_point(data=db$cens_dat,
ggplot2::aes(x=time, y = y), shape="|", size=2.5)
}
if(show$sim_median) {
if (smooth) {
geom_line_custom <- ggplot2::geom_line
} else {
geom_line_custom <- ggplot2::geom_step
}
pl <- pl + geom_line_custom(linetype="dashed")
}
if(!is.null(db$obs) && show$obs_ci) {
pl <- pl + ggplot2::geom_ribbon(
data=db$obs_km,
ggplot2::aes(x=time, ymin=lower, ymax=upper, group=strat),
fill=vpc_theme$obs_ci_fill, colour = NA)
}
if (!is.null(db$obs)) {
chk_tbl <- db$obs_km %>%
dplyr::group_by(strat) %>%
dplyr::summarise(t = length(time))
if (sum(chk_tbl$t <= 1)>0) {
geom_step <- ggplot2::geom_line
}
msg("Warning: some strata in the observed data had zero or one observations, using line instead of step plot. Consider using less strata (e.g. using the 'events' argument).", verbose)
if(!is.null(db$stratify_color)) {
pl <- pl + ggplot2::geom_step(data = db$obs_km,
ggplot2::aes(x=time, y=surv, colour=get(db$stratify_color[1])), size=.8)
} else {
pl <- pl + ggplot2::geom_step(data = db$obs_km,
ggplot2::aes(x=time, y=surv, group=strat), size=.8)
}
}
if(!is.null(db$stratify) || db$rtte) {
if(is.null(db$labeller)) db$labeller <- ggplot2::label_both
if (length(db$stratify_pars) == 1 | db$rtte) {
if (db$facet == "wrap") {
pl + ggplot2::facet_wrap(~sex)
pl <- pl + ggplot2::facet_wrap(stats::reformulate(db$stratify_pars[1], NULL), scales = db$scales,
labeller = db$labeller)
} else {
if(length(grep("row", db$facet)) > 0) {
pl <- pl + ggplot2::facet_grid(stats::reformulate(db$stratify_pars[1], NULL), scales = db$scales,
labeller = db$labeller)
} else {
pl <- pl + ggplot2::facet_grid(stats::reformulate(".", db$stratify_pars[1]), scales = db$scales,
labeller = db$labeller)
}
}
} else {
if(length(grep("row", db$facet)) > 0) {
pl <- pl + ggplot2::facet_grid(stats::reformulate(db$stratify_pars[1], db$stratify_pars[2]), scales = db$scales,
labeller = db$labeller)
} else {
pl <- pl + ggplot2::facet_grid(stats::reformulate(db$stratify_pars[2], db$stratify_pars[1]), scales = db$scales,
labeller = db$labeller)
}
}
}
if(show$bin_sep) {
if(!(class(db$bins) == "logical" && db$bins == FALSE)) {
bdat <- data.frame(cbind(x = db$tmp_bins, y = NA))
pl <- pl + ggplot2::geom_rug(data=bdat, sides = "t", ggplot2::aes(x = x, y = y, group=NA), colour=vpc_theme$bin_separators_color)
}
}
if(!is.null(db$stratify_color)) {
pl <- pl + ggplot2::guides(fill = ggplot2::guide_legend(title=db$stratify_color[1]),
colour = ggplot2::guide_legend(title=db$stratify_color[1]))
}
if(!is.null(xlab)) {
pl <- pl + ggplot2::xlab(xlab)
} else {
pl <- pl + ggplot2::xlab(db$xlab)
}
if(!is.null(ylab)) {
pl <- pl + ggplot2::ylab(ylab)
} else {
pl <- pl + ggplot2::ylab(db$ylab)
}
return(pl)
}
} |
ensembleMOScsg0 <-
function(ensembleData, trainingDays, consecutive = FALSE, dates = NULL,
control = controlMOScsg0(), warmStart = FALSE,
exchangeable = NULL)
{
if (!inherits(ensembleData,"ensembleData")) stop("not an ensembleData object")
call <- match.call()
if(!is.logical(warmStart)) stop("warmStart improperly specified")
if (!is.logical(consecutive)) stop("consecutive improperly specified")
if (is.list(trainingDays)) trainingsDays <- trainingsDays[[1]]
if (length(trainingDays) > 1 || trainingDays <= 0
|| (trainingDays - trunc(trainingDays)) != 0)
stop("trainingDays improperly specified")
forecastHour <- ensembleFhour(ensembleData)
lag <- ceiling( forecastHour / 24 )
ensMemNames <- ensembleMemberLabels(ensembleData)
nForecasts <- length(ensMemNames)
exchangeable <- getExchangeable( exchangeable,
ensembleGroups(ensembleData),nForecasts)
M <- apply(ensembleForecasts(ensembleData), 1, function(z) all(is.na(z)))
M <- M | is.na(ensembleVerifObs(ensembleData))
M <- M | is.na(ensembleValidDates(ensembleData))
ensembleData <- ensembleData[!M,]
nObs <- ensembleNobs(ensembleData)
if (!nObs) stop("no observations")
ensDates <- ensembleValidDates(ensembleData)
if (is.null(ensDates)) stop("dates unavailable")
Dates <- as.character(ensDates)
DATES <- sort(unique(Dates))
if (trainingDays > length(DATES))
stop("insufficient training data")
julianDATES <- ymdhTOjul(DATES)
origin <- attr( julianDATES, "origin")
incr <- min(1,min(diff(julianDATES)))
Jdates <- seq(from = julianDATES[trainingDays]+lag*incr,
to = max(julianDATES)+lag*incr, by = incr)
DATEShh <- getHH(DATES)
if (length(DATEShh) != 1)
warning("valid dates do not have a unique forecast hour")
lD <- nchar(DATES[1])
if (!(lD <- unique(sapply(DATES,nchar))))
stop("all dates in data should have same character length")
if (nullDates <- is.null(dates)) {
dates <- julTOymdh(Jdates, origin = origin, dropHour = (lD == 8))
}
else {
dates <- sort(unique(as.character(dates)))
if (!all(dateCheck(dates)))
stop("improperly specified date(s) in dates argument")
datesHH <- getHH(dates)
if (length(datesHH) != 1)
warning("dates do not have a unique forecast hour")
if (any(datesHH != DATEShh)) stop("specified dates incompatible with data")
if (!(ld <- unique(sapply(dates,nchar))))
stop("all specified dates should have same character length")
if (ld < lD) {
dates <- sapply( dates, function(s) paste(s, "00", sep =""))
}
else if (ld < lD) {
dates <- sapply( dates, function(s) substring(s, 1, 8))
}
if (any(dates < julTOymdh(min(Jdates),origin=origin,dropHour=(lD == 8)))) {
stop("some dates precede the first training period")
}
if (any(dates > julTOymdh(max(Jdates),origin=origin,dropHour=(lD == 8)))) {
warning("there are dates beyond the last training period")
}
}
juliandates <- ymdhTOjul( dates, origin = origin)
nDates <- length(dates)
a <- array(NA, c(1,nDates))
dimnames(a) <- list("a", dates)
B <- array( NA, c(nForecasts, nDates))
dimnames(B) <- list(ensMemNames, dates)
c <- array( NA, c(1, nDates))
dimnames(c) <- list(c("c"), dates)
d <- array( NA, c(1, nDates))
dimnames(d) <- list(c("d"), dates)
q <- array( NA, c(1, nDates))
dimnames(q) <- list(c("q"), dates)
trainTable <- rep(0, nDates)
names(trainTable) <- dates
nIter <- rep(0, nDates)
names(nIter) <- dates
L <- length(juliandates)
twin <- 1:trainingDays
cat("\n")
if (control$scoringRule == 'crps'){
warning("options for chosing optimization methods are unavailable. Constrained minimization 'L-BFGS-G' is applied")
}
cat("\n")
l <- 0
for(i in seq(along = juliandates)) {
cat("modeling for date", dates[i], "...")
if(!consecutive){
I <- (juliandates[i]-lag*incr) >= julianDATES
if (!any(I)) stop("insufficient training data")
j <- which(I)[sum(I)]
if (j != l) {
twin <- (j+1) - (1:trainingDays)
D <- as.logical(match(Dates, DATES[twin], nomatch=0))
if (!any(D)) stop("this should not happen")
fit <- fitMOScsg0(ensembleData[D,], control = control, exchangeable = exchangeable)
}
}else{
dMax <- DATES[which(juliandates[i] == julianDATES)- lag]
dMin <- DATES[which(juliandates[i] == julianDATES) - lag -
trainingDays + 1]
tDATES <- DATES[as.logical((DATES <= dMax) * (DATES >= dMin))]
D <- as.logical(match(Dates, tDATES, nomatch=0))
if (!any(D)) stop("this should not happen")
fit <- fitMOScsg0(ensembleData[D,], control = control, exchangeable = exchangeable)
}
trainTable[i] = sum(D)
q[,i] <- fit$q
c[,i] <- fit$c
d[,i] <- fit$d
a[i] <- fit$a
B[,i] <- fit$B
if (warmStart) {
if (is.null(exchangeable)){
control$start <- list(a = fit$a, B = fit$B, c = fit$c, d = fit$d, q = fit$q)
}else {
control$start <- list(a = fit$a, B = aggregate(fit$B, by=list(exchangeable),mean)[,2], c = fit$c, d = fit$d, q = fit$q)
}
}
cat("\n")
print(round(c(fit$a, fit$B),2))
print(round(c(fit$c, fit$d, fit$q),2))
cat("\n")
}
structure(list(training = c(days = trainingDays, lag = lag,
table = trainTable),
a = a, B = B, c = c, d = d, q = q,
exchangeable = exchangeable),
forecastHour = attr(ensembleData, "forecastHour"),
initializationTime = attr(ensembleData, "initializationTime"),
call = match.call(), class = "ensembleMOScsg0")
} |
setClass('lcModelCrimCV', contains = 'lcModel')
setMethod('predictForCluster', signature('lcModelCrimCV'), function(
object, newdata, cluster, what = 'mu', ...)
{
assert_that(what %in% c('mu', 'nu', 'mean'))
if (nrow(newdata) == 0) {
return(numeric())
}
newtime = (newdata[[timeVariable(object)]] - object@model$minTime) / object@model$durTime
X = splines::bs(
x = newtime,
degree = getLcMethod(object)$dpolyp,
intercept = TRUE,
Boundary.knots = c(0, 1)
)
Xmat = X %*% object@model$beta
lambdaMat = exp(Xmat)
if (hasName(object@model, 'tau')) {
nuMat = exp(-object@model$tau * t(Xmat)) %>% t()
} else {
Zmat = splines::bs(
x = newtime,
degree = getLcMethod(object)$dpolyl,
intercept = TRUE,
Boundary.knots = c(0, 1)
)
nuMat = exp(Zmat %*% object@model$gamma)
}
nuMat = nuMat / (1 + nuMat)
predMat = switch(what,
mu = lambdaMat,
nu = nuMat,
mean = (1 - nuMat) * lambdaMat)
clusIdx = match(cluster, clusterNames(object))
predMat[, clusIdx]
})
setMethod('postprob', signature('lcModelCrimCV'), function(object) {
pp = object@model$gwt
colnames(pp) = clusterNames(object)
return(pp)
})
logLik.lcModelCrimCV = function(object, ...) {
ll = object@model$llike
attr(ll, 'nobs') = nIds(object)
attr(ll, 'df') = length(coef(object)) + 1
class(ll) = 'logLik'
return(ll)
}
coef.lcModelCrimCV = function(object, ...) {
betaMat = object@model$beta
colnames(betaMat) = clusterNames(object)
rownames(betaMat) = paste0('beta', seq_len(nrow(betaMat)) - 1)
if (hasName(object@model, 'tau')) {
tau = object@model$tau
tauMat = matrix(tau, nrow = length(tau), ncol = nClusters(object))
rownames(tauMat) = paste0('tau', seq_along(tau))
coefMat = rbind(betaMat, tauMat)
} else {
gammaMat = object@model$gamma
rownames(gammaMat) = paste0('gamma', seq_len(nrow(gammaMat)) - 1)
coefMat = rbind(betaMat, gammaMat)
}
return(coefMat)
}
setMethod('converged', signature('lcModelCrimCV'), function(object) {
TRUE
}) |
context("nextItem-MFII")
load("cat_objects.Rdata")
test_that("ltm nextItem MFII calculates correctly", {
ltm_cat@estimation <- "EAP"
ltm_cat@selection <- "MFII"
ltm_cat@answers[1:7] <- c(0, 1, 0, 0, 1, 0, 0)
package_next <- selectItem(ltm_cat)
package_item <- package_next$next_item
package_est <- package_next$estimates[package_next$estimates$q_number == package_item,
"MFII"]
expect_equal(package_item, 27)
expect_equal(round(package_est, 3), 1.972)
})
test_that("grm nextItem MFII calculates correctly", {
grm_cat@estimation <- "EAP"
grm_cat@selection <- "MFII"
grm_cat@answers[1:8] <- c(5, 4, 2, 2, 1, 2, 2, 3)
package_next <- selectItem(grm_cat)
package_item <- package_next$next_item
package_est <- package_next$estimates[package_next$estimates$q_number == package_item,
"MFII"]
expect_equal(package_item, 10)
expect_equal(round(package_est, 3), 4.286)
})
test_that("nextItem MFII is actually the maximum estimate", {
ltm_cat@selection <- "MFII"
ltm_cat@answers[1:5] <- c(1, 0, 1, 1, 1)
grm_cat@selection <- "MFII"
grm_cat@answers[1:5] <- c(5, 4, 2, 2, 5)
gpcm_cat@selection <- "MFII"
gpcm_cat@answers[1:5] <- c(1, 1, 2, 2, 4)
ltm_next <- selectItem(ltm_cat)
grm_next <- selectItem(grm_cat)
gpcm_next <- selectItem(gpcm_cat)
expect_equal(ltm_next$next_item, ltm_next$estimates[which(ltm_next$estimates[, "MFII"] ==
max(ltm_next$estimates[, "MFII"])), "q_number"])
expect_equal(grm_next$next_item, grm_next$estimates[which(grm_next$estimates[, "MFII"] ==
max(grm_next$estimates[, "MFII"])), "q_number"])
expect_equal(gpcm_next$next_item, gpcm_next$estimates[which(gpcm_next$estimates[, "MFII"] ==
max(gpcm_next$estimates[, "MFII"])), "q_number"])
})
test_that("nextItem MFII correctly skips questions", {
ltm_cat@selection <- "MFII"
grm_cat@selection <- "MFII"
gpcm_cat@selection <- "MFII"
ltm_cat@answers[1:10] <- c(rep(-1, 5), 1, 1, 0, 0, 1)
grm_cat@answers[1:5] <- c(-1, -1, 5, 4, 3)
gpcm_cat@answers[1:5] <- c(-1, -1, 5, 4, 3)
ltm_next <- selectItem(ltm_cat)
grm_next <- selectItem(grm_cat)
gpcm_next <- selectItem(gpcm_cat)
expect_equal(nrow(ltm_next$estimates) + sum(!is.na(ltm_cat@answers)),
length(ltm_cat@answers))
expect_equal(nrow(grm_next$estimates) + sum(!is.na(grm_cat@answers)),
length(grm_cat@answers))
expect_equal(nrow(gpcm_next$estimates) + sum(!is.na(gpcm_cat@answers)),
length(gpcm_cat@answers))
}) |
inventorygames <-
function(n=NA,a=NA,d=NA,h=NA,m=NA,r=NA,b=NA,model=c("EOQ","EPQ")){
if (model=="EOQ"){return(EOQcoo(n,a,d,h,m))}
if (model=="EPQ"){return(EPQcoo(n,a,d,h,m,r,b))}
if (model!="EOQ"&model!="EPQ"){
cat("Only EOQ and EPQ can be analyzed with this function.", sep="\n")
}
} |
.Random.seed <-
c(403L, 10L, 568137784L, 1476767515L, 1179761785L, 1103537352L,
-607255306L, 2065863985L, 3442019L, -2132719822L, 685150340L,
1319955655L, 869482397L, 1187924756L, -1864001094L, 418672645L,
606774751L, -1642530714L, -726300688L, 1448710419L, 1488228017L,
-465426080L, -31736066L, 1192860297L, -299797061L, 272387978L,
-2051836308L, -1392408593L, 717537317L, -1126575908L, -1682835790L,
1168095661L, 72732007L, -999212402L, -1620434072L, -117933205L,
-194108151L, 582032280L, -1586668858L, -1614856159L, -1832319821L,
1772484130L, -1762408428L, -999560041L, -1392310643L, -1349146716L,
1875171722L, 219080181L, -16459697L, -1927458986L, 1271457280L,
628913987L, -812306719L, 461630544L, 1162212526L, 1077997657L,
-576786325L, -1419491878L, -1706922340L, 1598455967L, -1835558155L,
-2147194996L, -1385994654L, -386123203L, 1381754039L, -885515426L,
-968851816L, 1958149627L, -155852903L, -2046994392L, 1682407638L,
-2145090223L, 1524036611L, -232932462L, 1754700068L, 1124278247L,
-412908483L, -606547276L, 250929434L, -1953918363L, -1796032769L,
1066412934L, -180851440L, -2066528845L, 2016638545L, -667273728L,
1122833118L, -2044919511L, -927847205L, -439362582L, 698461068L,
536145103L, -825961339L, -188358852L, 411115026L, -452461299L,
-331135417L, -1512866770L, -1040588408L, -1071794037L, 215388777L,
1600294456L, 881297126L, 1791517313L, -825700845L, 2055371202L,
377270900L, -1533471625L, 1487568237L, -60743164L, -872005334L,
71530133L, 1907055919L, 759949366L, -1427522464L, -1155927389L,
-734337599L, 402911664L, 1984737486L, -1156796871L, -1102491573L,
-106782214L, -1394402052L, -2040368833L, -343978475L, -1043051220L,
1538063874L, -1870043939L, 1265516503L, 1130863294L, 1012047096L,
1659405019L, -1988429511L, 1759493256L, 1242663990L, 710432881L,
914827939L, -298496014L, 1186734916L, -1649431545L, -1581119011L,
27244244L, 812767738L, -1670810299L, 1315414431L, 1559626534L,
-1476584528L, -100366253L, 1626997617L, -2141121888L, -1475410370L,
438185289L, -1360022533L, -179931190L, 2081286444L, 1456366895L,
46951397L, 1822683804L, -481724686L, -1244780691L, -1254002521L,
-755084594L, -315514840L, -367289301L, -2091880887L, 578586456L,
-2116840954L, -1837007903L, 1611061619L, 1291753954L, 859577684L,
348869719L, 901716045L, -711025692L, -591767094L, -1273454539L,
-14238065L, 1521586070L, -1239919808L, 1568742915L, 718062625L,
-1297766640L, -1399464722L, 1212564633L, -938389205L, 150743066L,
-258858276L, 678527711L, -2136326987L, 1910561356L, -1013717342L,
1645678077L, -583047433L, 1081847838L, -212917544L, 575374907L,
-2031193127L, -983645848L, 704239766L, 1312462353L, 461795267L,
-1531120686L, 796765796L, 2060197287L, -2018405635L, -248486156L,
656585178L, -125725403L, 1293317183L, -83672122L, -120434608L,
-668296077L, 346517649L, -87060288L, 1914706590L, 165934697L,
-1834350949L, 103099306L, -1140338740L, -2106326385L, 470215365L,
914113148L, 1326485202L, 373606989L, 499812103L, 1105939182L,
1215242184L, -28942133L, -1190231767L, 1960705228L, -2092522336L,
1826362754L, -1211905816L, -459507988L, -188705476L, 1971866738L,
1050290288L, 1897136164L, -983454664L, 905989914L, -934802224L,
-943300804L, -558102636L, 2087170946L, 1898637760L, -601194948L,
-825801328L, 1414995490L, 208643336L, -981086580L, -1541996772L,
-1656463214L, -702875264L, -1866815980L, 1558396712L, 1194261050L,
1181908432L, -1270355604L, -1694186572L, 1497249810L, 1562834400L,
-1160976436L, 1121648864L, -1697138654L, -814702296L, -1516642804L,
568420156L, -284695566L, -687654160L, -908046780L, 1000850008L,
-2004160710L, 211314160L, 8230588L, -72976108L, -825900350L,
1419613280L, 1298801532L, -441468976L, -970170078L, -1061184856L,
1027595148L, -2078918468L, -1788522702L, 197413440L, -1486922572L,
-1738866104L, -836999750L, 778936848L, -333335828L, 1244258644L,
1210442002L, 1894272416L, -675216692L, -1175806368L, -1621493822L,
611714856L, 1089695660L, 1571472188L, 731638130L, 1772444464L,
1129499940L, -2106528968L, -968196390L, 1771227024L, 574410492L,
606219156L, -2050588606L, -2100497152L, -89652164L, 1761378640L,
1032376482L, 75530568L, 1667716876L, 939664092L, -617173678L,
1644151936L, 1588052052L, -1987440664L, -1187411718L, 1037691728L,
2002779948L, 1948764532L, -141137006L, 906315872L, -2071663220L,
332913504L, 422042082L, -1729055832L, 1555576012L, -621338628L,
-368335438L, -350251536L, 1144802244L, 435438936L, 727046522L,
642450352L, -646305348L, -204564396L, 1427716034L, -377884640L,
-1581426500L, 627520080L, -1181977630L, -1208860568L, 490079308L,
1659582716L, -1262351374L, 1079232768L, -590192268L, 132361224L,
-436920390L, 1682223568L, 375045100L, 2083570068L, 1494438354L,
-1050447520L, 416804684L, -1030620896L, -605028478L, -2000687384L,
1699823852L, -1653214148L, 1144716146L, 1615093360L, 1548309540L,
27749176L, -1634407270L, -552193328L, 2003837116L, 719515284L,
1847596674L, -1322669888L, -266263748L, 1640520848L, 1955339810L,
638324744L, -923239156L, -239620964L, -2055995886L, 105316224L,
805801492L, -586372568L, 380810554L, -1939633712L, -1594067860L,
125143348L, -2134657390L, 1208519520L, -1430842548L, 575576032L,
-420268126L, -1304508504L, -289952372L, 1099114684L, 1540262258L,
-1903217424L, 930205764L, -16457384L, 2143594042L, 1621932912L,
-1472284612L, 46694164L, -1054431806L, -1048773920L, -16107652L,
-1820000560L, 1477114018L, 502860840L, -835107572L, -257472580L,
-229504718L, -1165276736L, -1821439436L, -456344632L, -1534248774L,
-630560368L, -1932201492L, -1804262828L, -590458734L, -584863968L,
-836507188L, -1161718304L, -1140443454L, 550159400L, 1571803820L,
-2036923204L, 1038649842L, 857444656L, 1841845412L, -2014239048L,
-2062828198L, 991065488L, 1282940L, 1648888084L, 1894900546L,
245979904L, -802080836L, 1936662352L, 869630754L, 48700744L,
1432750604L, -1032853412L, -760841006L, -1185859072L, -1150153388L,
1840921064L, 1640508410L, -1863647024L, 1271401772L, -1419779340L,
1730034066L, -260254624L, 1129096460L, 294802144L, -1658762910L,
-1032880356L, -1669035958L, -2105836481L, -1459725175L, 78477470L,
-1269131956L, -1589054179L, 96735399L, -466493488L, -78440594L,
-227434981L, -1371396227L, 952577386L, -1450753016L, -1511060303L,
107129635L, 1980028964L, -173499694L, -1418621145L, 1089212785L,
915920822L, -1611216044L, 221599205L, -774311953L, 1850511144L,
309753734L, -1798552653L, 793453333L, 1724562802L, -1332526656L,
-1672359383L, 73602715L, 255541100L, 1804576634L, 1329070255L,
-1750008903L, 1302564302L, 1065662748L, 401943693L, -89071369L,
100011008L, 1427271262L, 482300203L, 1324069421L, -1444793702L,
-777810344L, 2087489985L, 428850803L, -1717355628L, -1535528414L,
59170103L, -175465471L, -1220378362L, -121748156L, 321357941L,
92883423L, 1580078392L, -1259182314L, 1413293859L, 1917236453L,
545902850L, 1167967088L, -1976403175L, 1193204491L, 570550268L,
-1487674326L, 1180166239L, 1878343657L, -2092113666L, 791630060L,
-1545882563L, 279070727L, -1014934544L, -2120933490L, 1577778619L,
211966877L, -1596792502L, 1225632104L, 1197514577L, -78619581L,
602817860L, 1540883954L, -1222897721L, 1361899921L, -1211792106L,
1127791988L, 1166769157L, 1648643151L, -1941872568L, -53815834L,
1782475539L, 359402613L, -740510126L, 1782730912L, 929114505L,
1913170363L, 150775308L, 1883996058L, -1645893873L, -1360880295L,
-1571661842L, -745390468L, 959203053L, -910397033L, -1458906528L,
119263294L, -1215579893L, 490381L, 99023034L, -1855095048L, -88869727L,
-1698025389L, 815537012L, 2069915906L, 1717406103L, -1199066271L,
-706041946L, 2112463268L, 1039988821L, 1074425087L, 2129408408L,
1399065270L, 103514947L, 1302429317L, -2001644766L, 1218914320L,
-31566919L, -432753557L, -1165359268L, -214180214L, 1376266879L,
-1807297591L, -460828578L, -1207602420L, -467928355L, 1972507623L,
-379443184L, 928638254L, 1119402459L, -1127915843L, -146486230L,
1219349704L, -1756263951L, -385169565L, 2038906724L, 1189271186L,
-1114623129L, 1813093681L, 922629750L, -1260664940L, 1479693221L,
70624431L, -1002481304L, -1851432634L, -1056933645L, -861583787L,
1052125746L, 574789632L, 870804713L, 2041105243L, 1399770540L,
2059441594L, 1573647215L, -117791495L, 1277213070L, 1049685084L,
-898482483L, -312116681L, -653176640L, -269849276L) |
dapply = function(x, fun, ..., col.names) {
assertFunction(fun)
x = lapply(x, fun, ...)
if (missing(col.names)) {
ns = names2(x)
missing = which(is.na(ns))
if (length(missing))
names(x) = replace(ns, missing, paste0("Var.", missing))
} else {
assertCharacter(col.names, len = length(x), any.missing = FALSE)
names(x) = col.names
}
n = unique(viapply(x, length))
if (length(n) > 1L) {
max.n = max(n)
if (any(max.n %% n))
stop("Arguments imply differing number of rows: ", collapse(n, ", "))
x = lapply(x, rep_len, length.out = max.n)
n = max.n
}
attr(x, "row.names") = seq_len(n)
attr(x, "class") = "data.frame"
return(x)
} |
BIFIE.logistreg <- function( BIFIEobj, dep=NULL, pre=NULL,
formula=NULL, group=NULL, group_values=NULL, se=TRUE,
eps=1E-8, maxiter=100)
{
s1 <- Sys.time()
cl <- match.call()
bifieobj <- BIFIEobj
if (bifieobj$cdata){
formula_vars <- NULL
if (! is.null(formula) ){
formula_vars <- all.vars( formula )
}
varnames <- unique( c( dep, pre, group, "one", formula_vars ) )
bifieobj <- BIFIE.BIFIEcdata2BIFIEdata( bifieobj, varnames=varnames )
}
FF <- Nimp <- bifieobj$Nimp
N <- bifieobj$N
dat1 <- bifieobj$dat1
wgt <- bifieobj$wgt
wgtrep <- bifieobj$wgtrep
varnames <- bifieobj$varnames
RR <- bifieobj$RR
datalistM <- bifieobj$datalistM
fayfac <- bifieobj$fayfac
if ( ! is.null( formula) ){
cat("|*** Data Preparation ")
utils::flush.console()
bifieobj2 <- datalistM
colnames(bifieobj2) <- varnames
if ( is.null(group) ){ group <- "one" ; group_values <- 1 }
bifieobj2 <- as.data.frame( bifieobj2 )
m1 <- stats::model.matrix(formula, data=bifieobj2)
m0 <- m1
m1 <- matrix( NA, nrow=nrow(bifieobj2), ncol=ncol(m0) )
m1[ match( rownames(m0),rownames(bifieobj2) ), ] <- m0
colnames(m1) <- colnames(m0)
dep <- rownames( attr( stats::terms(formula),"factors") )[1]
pre <- colnames( m1 )
datalistM <- as.matrix( cbind( bifieobj2[, dep ], m1, bifieobj2[,group] ) )
varnames <- c( dep, pre, group )
cat("\n")
}
if (RR==1){ RR <- 0 }
if ( ! se ){
wgtrep <- matrix( wgt, ncol=1 )
RR <- 0
}
dep_index <- unlist( sapply( dep, FUN=function(vv){
which( varnames==vv ) } ) )
pre_index <- unlist( sapply( pre, FUN=function(vv){
which( varnames==vv ) } ) )
VV <- length(pre)
wgt_ <- matrix( wgt, ncol=1 )
if ( is.null( group) ){ nogroup <- TRUE } else { nogroup <- FALSE }
cat(paste0( "|", paste0( rep("*", FF), collapse=""), "|\n" ))
if (nogroup){
group <- "one"
group_values <- c(1)
}
group_index <- match( group, varnames )
if ( is.null(group_values) ){
t1 <- bifie_table( datalistM[, group_index ] )
group_values <- sort( as.numeric( paste( names(t1) ) ))
}
res00 <- BIFIE_create_pseudogroup( datalistM, group, group_index, group_values )
res00$datalistM -> datalistM
res00$group_index -> group_index
res00$GR -> GR
res00$group_values -> group_values
res00$group -> group
res <- bifiesurvey_rcpp_logistreg( datalist=datalistM, wgt1=wgt_,
wgtrep=as.matrix(wgtrep), dep_index=dep_index-1, pre_index=pre_index-1,
fayfac=fayfac, NI=Nimp, group_index1=group_index-1, group_values=group_values,
eps=eps, maxiter=maxiter )
GG <- length(group_values)
ZZ <- VV+1
p1 <- c( rep("b",VV), "R2" )
p2 <- c( pre, "NA" )
dfr <- data.frame( "parameter"=rep(p1,GG) )
dfr$var <- rep(p2,GG)
if (! nogroup){
dfr$groupvar <- group
dfr$groupval <- rep( group_values, each=ZZ )
}
dfr$Ncases <- rep( rowMeans( res$ncasesM ), each=ZZ )
dfr$Nweight <- rep( rowMeans( res$sumwgtM ), each=ZZ )
dfr <- create_summary_table( res_pars=res$regrcoefL,
parsM=res$regrcoefM, parsrepM=res$regrcoefrepM,
dfr=dfr, BIFIEobj=BIFIEobj )
dfr <- clean_summary_table( dfr=dfr, RR=RR, se=se, Nimp=Nimp )
nogroupL <- rep( nogroup, nrow(dfr) )
parnames <- paste0( dfr$parameter, "_", dfr$var,
ifelse( ! nogroupL, paste0( "_", dfr$groupvar, "_" ), "" ),
ifelse( ! nogroupL, dfr$groupval, "" ) )
dfr <- BIFIE_table_multiple_groupings( dfr, res00 )
s2 <- Sys.time()
timediff <- c(s1, s2)
res1 <- list( stat=dfr, output=res, timediff=timediff,
N=N, Nimp=Nimp, RR=RR, fayfac=fayfac,
NMI=BIFIEobj$NMI, Nimp_NMI=BIFIEobj$Nimp_NMI,
GG=GG, parnames=parnames, CALL=cl)
class(res1) <- "BIFIE.logistreg"
return(res1)
}
summary.BIFIE.logistreg <- function( object, digits=4, ... )
{
BIFIE.summary(object)
cat("Statistical Inference for Logistic Regression \n")
obji <- object$stat
print.object.summary( obji, digits=digits )
} |
SRM_INCLUDE_PARAMETERS_PARM_LIST <- function( parm.table, parm_list,
symm_matrices, include_fixed=FALSE )
{
NPT <- nrow(parm.table)
for (rr in seq_len(NPT) ){
val <- parm.table$est[rr]
val_na <- is.na(val)
if ( ( val_na & include_fixed ) | ( ! val_na) ){
if ( is.na(val) ){
val <- parm.table$fixed[rr]
}
if (parm.table$level[rr] == "U"){
entr <- "parm_list_U"
} else {
entr <- "parm_list_D"
}
mat_rr <- parm.table$mat[rr]
r1 <- parm.table$row[rr]
c1 <- parm.table$col[rr]
group_rr <- parm.table$group[rr]
parm_list[[ entr ]][[ group_rr ]][[ mat_rr ]][ r1, c1 ] <- val
if (mat_rr %in% symm_matrices){
parm_list[[ entr ]][[ group_rr ]][[ mat_rr ]][c1, r1 ] <- val
}
}
}
return(parm_list)
} |
bias.plot <- function(x, abline = TRUE,...) {
vif <- vif(x)
bias2 <- rstats1(x)$bias2
var <- rstats1(x)$var
mse <- rstats1(x)$mse
minmse <- min(mse)
K <- x$K[which.min(mse)]
col = cbind("black", "red", "green")
ridgetrade <- cbind(var,bias2, mse)
if (length(x$K) == 1) {
plot(
x = rep(x$K, length(ridgetrade)),
y = ridgetrade,
main = "Bias, Variance Tradeoof",
xlab = "Biasing Parameter",
ylab = " ",
col = col,
lwd = 2,
lty = c(1,4,5)
)
legend(
"topright",
legend = c("Var", "Bias^2", "MSE"),
col = col,
lwd = 2,
fill = 1:3,
lty = c(1,4,5),
cex = 0.7,
pt.cex = 0.5,
bty = "o",
bg = "transparent",
y.intersp = 0.4,
x.intersp = 0.3,
merge = TRUE
)
}else
matplot(
x$K,
ridgetrade,
main = 'Bias, Variance Tradeoff',
xlab = 'Biasing Parameter',
ylab= " ",
col = col,
lwd = 2,
type = 'l',
lty = c(1,4, 5)
)
legend(
"topright",
legend = c("Var", "Bias^2", "MSE"),
col = col,
lwd = 2,
fill = 1:3,
lty = c(1,4,5),
cex = 0.7,
pt.cex = 0.5,
bty = "o",
bg = "transparent",
y.intersp = 0.4,
x.intersp = 0.3,
merge = TRUE
)
if (abline) {
abline(v = K, lty = 2)
abline(h = minmse, lty = 2)
text(K,
max(rstats1(x)$mse),
paste(c("min MSE", " at K="), c(round(minmse,3), K ), collapse = ''),
col="red",
pos=4
)
}
} |
tango.test = function(cases, pop, w, nsim = 0) {
arg_check_tango_test(cases, pop, w, nsim)
N = length(cases)
yplus = sum(cases)
r = cases / yplus
p = pop / sum(pop)
ee = r - p
gof = sum(ee ^ 2)
sa = (crossprod(ee, w - diag(N)) %*% ee)[1, 1]
tstat = gof + sa
vp = diag(p) - tcrossprod(p)
wvp = w %*% vp
wvp2 = wvp %*% wvp
wvp3 = wvp2 %*% wvp
ec = sum(diag(wvp)) / yplus
vc = sum(diag(wvp2)) * 2 / yplus ^ 2
skc = 2 * sqrt(2) * sum(diag(wvp3)) / (sum(diag(wvp2)) ^ (1.5))
dfc = 8 / skc ^ 2
tstat.std = (tstat - ec) / sqrt(vc)
tstat.chisq = dfc + tstat.std * sqrt(2 * dfc)
pvalue.chisq = 1 - stats::pchisq(tstat.chisq, dfc)
out = list(tstat = tstat, gof = gof, sa = sa,
tstat.chisq = tstat.chisq, pvalue.chisq = pvalue.chisq,
dfc = dfc)
if (nsim > 0) {
ysim = stats::rmultinom(n = nsim, size = sum(cases),
prob = pop / sum(pop))
rsim = ysim / yplus
eesim = rsim - p
gof.sim = colSums(eesim ^ 2)
sa.sim = rowSums(crossprod(eesim, w - diag(N)) * t(eesim))
tstat.sim = gof.sim + sa.sim
out$gof.sim = gof.sim
out$sa.sim = sa.sim
out$tstat.sim = tstat.sim
pvalue.sim = (1 + sum(tstat.sim >= tstat)) / (1 + nsim)
out$pvalue.sim = pvalue.sim
}
class(out) = "tango"
return(out)
}
arg_check_tango_test = function(cases, pop, w, nsim) {
N = length(cases)
arg_check_cases(cases, N)
arg_check_pop(pop, N)
arg_check_tango_w(w, N)
arg_check_nsim(nsim)
} |
knitr::opts_chunk$set(
collapse = TRUE,
comment = "
)
library(prioGene)
net_disease <- deal_net(net,dise_gene)
genes_mat <- get_gene_mat(net_disease)
terms_mat <- get_term_mat(net_disease)
net_disease_term <- get_net_disease_term(genes_mat,net_disease)
node_weight <- get_node_weight(genes_mat)
edge_weight <- get_edge_weight(net_disease_term,terms_mat)
R_0<- get_R_0(dise_gene,node_weight,f=1)
result <- get_R(node_weight, net_disease_term, bet = 0.5, R_0 = R_0, threshold = 10^(-9))
sessionInfo() |
chebyshev.s.inner.products <- function( n )
{
if ( n < 0 )
stop( "negative highest polynomial order" )
if ( n != round( n ) )
stop( "highest polynomial order is not integer" )
inner.products <- rep( pi, n + 1 )
return ( inner.products )
} |
"summary.rf.ensembles" <- function(object, ...) {
cat("\nCall:\n", deparse(object$call), "\n")
cat(" Type of random forest: ", object$type, "\n", sep="")
cat(" Number of random forests models: ", object$nrf, "\n", sep="")
cat(" Number of trees: ", object$ntree, "\n",sep="")
cat("No. of variables tried at each split: ", object$mtry, "\n\n", sep="")
if(object$type == "classification") {
if(!is.null(object$confusion)) {
cat(" OOB estimate of error rate: ",
round(object$err.rate*100,2), "%\n", sep="")
cat("Confusion matrix:\n")
print(object$confusion)
if(!is.null(object$test$err.rate)) {
cat(" Test set error rate: ",
round(object$test$err.rate*100,2), "%\n", sep="")
cat("Confusion matrix:\n")
print(object$test$confusion)
}
}
}
if(object$type == "regression") {
if(!is.null(object$mse)) {
cat(" Mean of squared residuals: ", object$mse,
"\n", sep="")
cat(" % Var explained: ",
round(object$rsq, digits=2), "\n", sep="")
if(!is.null(object$test$mse)) {
cat(" Test set MSE: ",
round(object$test$mse, digits=2), "\n", sep="")
cat(" % Var explained: ",
round(object$test$rsq, digits=2), "\n", sep="")
}
}
if (!is.null(object$coefs)) {
cat(" Bias correction applied:\n")
cat(" Intercept: ", object$coefs[1], "\n")
cat(" Slope: ", object$coefs[2], "\n")
}
}
} |
IRT.jackknife <- function (object, repDesign, ...)
{
UseMethod("IRT.jackknife")
} |
rscale.cubinf <-
function(object)
{
famname <- object$family
z <- 1
names(z) <- famname
class(z) <- "cubinf.i"
z} |
ki <-
function(X, dummy=FALSE, pos=NULL)
{
if (dummy == TRUE){X = as.matrix(X[,-c(pos)])}
X = as.matrix(X)
if (dim(X)[2] == 1){
salida = "At least 2 quantitative independent variables are needed (excluding the intercept)"
} else {
ki = array(,dim(X)[2])
for (i in 1:dim(X)[2]){
ki[i] = crossprod(X[,i])/(crossprod(X[,i])-t(X[,i])%*%X[,-i]%*%solve(crossprod(X[,-i]))%*%t(X[,-i])%*%X[,i])
}
if (dim(X)[2] == 2){
salida = ki
} else {
porc1 = (VIF(X)/ki[-1])*100
porc2 = 100 - porc1
salida = list(ki, porc1, porc2)
names(salida) = c("Stewart index", "Proportion of essential collinearity in i-th independent variable (without intercept)", "Proportion of non-essential collinearity in i-th independent variable (without intercept)")
}
}
return(salida)
} |
parma.ineq.lpopt = function(ineq, ineqLB, ineqUB, LB, UB, cdim)
{
m = length(LB)
if( is.matrix(ineq) ) {
ineqn = dim(ineq)[1]
ineqcon = cbind(
rbind(ineq, -ineq),
matrix(0, nrow = 2 * ineqn, ncol = cdim) )
ineqcon = rbind( ineqcon,
cbind(diag(m), matrix(0, nrow = m, ncol = cdim) ),
cbind(-1 * diag(m), matrix(0, nrow = m, ncol = cdim) ) )
ineqcon[1:ineqn, m + cdim] = -1 * ineqUB
ineqcon[(ineqn + 1):(2 * ineqn), m + cdim] = 1 * ineqLB
ineqcon[(2 * ineqn + 1):(2 * ineqn + m), m + cdim] = -1 * UB
ineqcon[(2 * ineqn + m + 1):(2 * ineqn + 2 * m), m + cdim] = 1 * LB
ineqB = c( rep(0, 2 * ineqn), rep(0, 2 * m) )
nn = 2 * ineqn + 2 * m
} else{
ineqn = 0
ineqcon = rbind(
cbind(diag(m), matrix(0, nrow = m, ncol = cdim) ),
cbind(-1 * diag(m), matrix(0, nrow = m, ncol = cdim) ) )
ineqcon[1:m, m + cdim] = -1 * UB
ineqcon[(m + 1):(2 * m), m + cdim] = 1 * LB
ineqB = c( rep(0, 2 * m) )
nn = 2 * m
}
return( list( ineqcon = ineqcon, ineqB = ineqB, ineqn = nn, ineqm = m ) )
}
parma.eq.lpopt = function(eq, eqB, reward, rewardB, cdim, budget = 1, m = NULL)
{
eqcon = NULL
eqn = 0
if( !is.null(m) ) eqm = m else eqm = length( reward )
eqB = NULL
if( is.matrix(eq) ){
eqn = dim(eq)[1]
eqm = dim(eq)[2]
if( !is.null(budget) ){
eqcon = rbind( eqcon,
cbind( matrix(1, nrow = 1, ncol = eqm), matrix(0, nrow = 1, ncol = cdim)),
cbind( eq, matrix(0, nrow = eqn, ncol = cdim)))
eqcon[ , eqm + cdim] = c(-budget, -eqB)
eqn = eqn + 1
eqm = eqm
eqB = c( 0, rep(0, eqn) )
} else{
eqcon = rbind( eqcon,
cbind( eq, matrix(0, nrow = eqn, ncol = cdim) ) )
eqcon[ , eqm + cdim] = -eqB
eqn = eqn
eqB = rep(0, eqn)
}
} else{
if( !is.null(budget) ){
eqcon = rbind( eqcon,
cbind( matrix(1, nrow = 1, ncol = eqm), matrix(0, nrow = 1, ncol = cdim)))
eqcon[ , eqm + cdim] = -budget
eqn = 1
eqm = eqm
eqB = 0
}
}
return( list( eqcon = eqcon, eqB = eqB, eqn = eqn, eqm = eqm ) )
}
parma.ineq.lpmin = function(ineq, ineqLB, ineqUB, cdim)
{
if( is.matrix(ineq) ) {
ineqn = dim(ineq)[1]
ineqm = dim(ineq)[2]
ineqcon = cbind(
rbind(ineq, -ineq),
matrix(0, nrow = 2 * ineqn, ncol = cdim) )
ineqB = c( as.numeric(ineqUB), as.numeric(-1 * ineqLB) )
nn = 2 * ineqn
} else{
ineqcon = NULL
nn = 0
ineqm = 0
ineqB = NULL
}
return( list( ineqcon = ineqcon, ineqB = ineqB, ineqn = nn, ineqm = ineqm ) )
}
parma.eq.lpmin = function(eq, eqB, reward, rewardB, cdim, budget = 1, m = NULL)
{
mbench = 0
if( !is.null(rewardB) ){
eqcon = cbind( matrix(reward, nrow = 1),
matrix(mbench, nrow = 1, ncol = 1),
matrix(0, nrow = 1, ncol = cdim - 1) )
eqn = 1
if( !is.null(m) ) eqm = m else eqm = length( reward )
eqB = rewardB
if(is.matrix(eq)){
eqn = dim(eq)[1]
eqm = dim(eq)[2]
if( budget ){
eqcon = rbind( cbind( matrix(1, nrow = 1, ncol = eqm), matrix(0, nrow = 1, ncol = cdim) ),
cbind( eq, matrix(0, nrow = eqn, ncol = cdim) ),
eqcon )
eqn = eqn + 2
eqm = eqm
eqB = c( 1, eqB, rewardB )
} else{
eqcon = rbind( cbind( eq, matrix(0, nrow = eqn, ncol = cdim) ), eqcon )
eqn = eqn + 1
eqB = c( eqB, rewardB )
}
} else{
if( !is.null(budget) ){
eqcon = rbind( cbind( matrix(1, nrow = 1, ncol = eqm), matrix(0, nrow = 1, ncol = cdim) ),
eqcon )
eqn = eqn + 1
eqm = eqm
eqB = c( budget, rewardB )
}
}
} else{
eqcon = NULL
eqn = 0
if( !is.null(m) ) eqm = m else eqm = length( reward )
if( is.matrix(eq) ){
eqn = dim(eq)[1]
eqm = dim(eq)[2]
if( !is.null(budget) ){
eqcon = rbind( cbind( matrix(1, nrow = 1, ncol = eqm), matrix(0, nrow = 1, ncol = cdim) ),
cbind( eq, matrix(0, nrow = eqn, ncol = cdim) ) )
eqn = eqn + 1
eqm = eqm
eqB = c(budget, eqB)
} else{
eqcon = rbind( cbind( eq, matrix(0, nrow = eqn, ncol = cdim) ), eqcon )
eqn = eqn
eqB = eqB
}
} else{
if( !is.null(budget) ){
eqcon = rbind( cbind( matrix(1, nrow = 1, ncol = eqm), matrix(0, nrow = 1, ncol = cdim) ), eqcon )
eqn = 1
eqm = eqm
eqB = budget
}
}
}
return( list( eqcon = eqcon, eqB = eqB, eqn = eqn, eqm = eqm ) )
}
parma.con.qpmin1 = function(eq, eqB, reward, rewardB, ineq, ineqLB, ineqUB, LB, UB, budget, m)
{
Amat = NULL
bvec = NULL
meq = 0
dvec = rep(0, m)
if( !is.null(rewardB) )
{
Amat = rbind( Amat, matrix( reward, ncol = m, nrow = 1) )
meq = meq + 1
bvec = c(bvec, rewardB)
}
if( !is.null(budget) )
{
Amat = rbind( Amat, matrix( c(0, rep(1,m-1)), ncol = m, nrow = 1))
meq = meq + 1
bvec = c(bvec, budget)
}
if( !is.null(eq) ){
neq = length(eqB)
Amat = rbind( Amat, eq)
meq = meq + neq
bvec = c(bvec, eqB)
}
if( !is.null(ineq) ){
nineq = length(ineqLB)
Amat = rbind(Amat,
ineq, -ineq)
bvec = c(bvec, ineqLB, -ineqUB)
}
Amat = rbind(Amat, diag(m), -diag(m))
bvec = c(bvec, if(!is.null(LB)) LB else rep(-1000, m), if(!is.null(UB)) -UB else rep(-1000, m))
return( list(Amat = Amat, bvec = bvec, meq = meq) )
}
parma.con.qpmin2 = function(eq, eqB, reward, rewardB, ineq, ineqLB, ineqUB, LB, UB, budget, m)
{
Amat = NULL
bvec = NULL
meq = 0
dvec = rep(0, m)
if( !is.null(rewardB) )
{
Amat = rbind( Amat, matrix( reward, ncol = m, nrow = 1) )
meq = meq + 1
bvec = c(bvec, rewardB)
}
if( !is.null(budget) )
{
Amat = rbind( Amat, matrix( 1, ncol = m, nrow = 1))
meq = meq + 1
bvec = c(bvec, budget)
}
if( !is.null(eq) ){
neq = length(eqB)
Amat = rbind( Amat, eq)
meq = meq + neq
bvec = c(bvec, eqB)
}
if( !is.null(ineq) ){
nineq = length(ineqLB)
Amat = rbind(Amat,
ineq, -ineq)
bvec = c(bvec, ineqLB, -ineqUB)
}
Amat = rbind(Amat, diag(m), -diag(m))
bvec = c(bvec, if(!is.null(LB)) LB else rep(-1000, m), if(!is.null(UB)) -UB else rep(-1000, m))
return( list(Amat = Amat, bvec = bvec, meq = meq) )
}
parma.con.qpopt = function(eq, eqB, reward, ineq, ineqLB, ineqUB, LB, UB, budget, m)
{
Amat = NULL
bvec = NULL
meq = 0
dvec = rep(0, m)
mbench = 0
Amat = rbind( Amat, matrix( c(mbench, reward), ncol = m, nrow = 1) )
meq = meq + 1
bvec = c(bvec, 1)
Amat = rbind( Amat, matrix( c(budget, rep(1, m-1)), ncol = m, nrow = 1))
meq = meq + 1
bvec = c(bvec, 0)
if( !is.null(eq) ){
neq = length(eqB)
Amat = rbind( Amat, cbind(eqB, eq) )
meq = meq + neq
bvec = c(bvec, rep(0, neq))
}
Amat = rbind(Amat, matrix(c(-1, rep(0, m-1)), ncol = m, nrow = 1))
Amat = rbind(Amat, cbind( LB, diag(m-1)))
Amat = rbind(Amat, cbind(-UB,-diag(m-1)))
bvec = c(bvec, rep(0, 2*(m-1) + 1))
nineq = 2*(m)
if( !is.null(ineq) ){
nineq = nineq+length(ineqLB)
Amat = rbind(Amat,
cbind(ineqLB, ineq), cbind(-ineqUB,-ineq))
bvec = c(bvec, rep(0, 2*length(ineqLB)))
}
return( list(Amat = Amat, bvec = bvec, meq = meq) )
}
parma.ineq.minfun = function(w, optvars, uservars){
cons = NULL
if(!is.null(optvars$ineqfun)){
n = length(optvars$ineqfun)
if(optvars$index[3]==1) cons = ineqfun.target.min(w, optvars, uservars) else cons = NULL
fnlist = list(w, optvars, uservars)
names(fnlist) = c("w", "optvars", "uservars")
for(i in 1:n){
cons = c(cons, do.call(optvars$ineqfun[[i]], args = fnlist))
}
} else{
if(optvars$index[3]==1) cons = ineqfun.target.min(w, optvars, uservars) else cons = NULL
}
if(optvars$index[4]==2) cons = c(cons, ineqfun.minminmax(w, optvars, uservars))
return(cons)
}
parma.ineq.mingrad = function(w, optvars, uservars){
cons = NULL
if(!is.null(optvars$ineqgrad)){
n = length(optvars$ineqgrad)
if(optvars$index[3]==1) cons = ineqjac.target.min(w, optvars, uservars) else cons = NULL
fnlist = list(w, optvars, uservars)
names(fnlist) = c("w", "optvars", "uservars")
for(i in 1:n){
cons = rbind(cons, do.call(optvars$ineqgrad[[i]], args = fnlist))
}
} else{
if(optvars$index[3]==1) cons = ineqjac.target.min(w, optvars, uservars) else cons = NULL
}
if(optvars$index[4]==2){
if(is.null(cons)) cons = ineqjac.minminmax(w, optvars, uservars) else cons = rbind(cons, ineqjac.minminmax(w, optvars, uservars))
}
return(cons)
}
parma.eq.minfun = function(w, optvars, uservars){
if(!is.null(optvars$eqfun)){
n = length(optvars$eqfun)
if(optvars$index[6]==0) cons = eqfun.budget.min(w, optvars, uservars) else cons = eqfun.leverage.min(w, optvars, uservars)
if(optvars$index[3]==2) cons = c(cons, eqfun.target.min(w, optvars, uservars))
fnlist = list(w, optvars, uservars)
names(fnlist) = c("w", "optvars", "uservars")
for(i in 1:n){
cons = c(cons, do.call(optvars$eqfun[[i]], args = fnlist))
}
} else{
if(optvars$index[6]==0) cons = eqfun.budget.min(w, optvars, uservars) else cons = eqfun.leverage.min(w, optvars, uservars)
if(optvars$index[3]==2) cons = c(cons, eqfun.target.min(w, optvars, uservars))
}
return(cons)
}
parma.eq.mingrad = function(w, optvars, uservars){
if(!is.null(optvars$eqgrad)){
n = length(optvars$eqgrad)
if(optvars$index[6]==0) cons = eqjac.budget.min(w, optvars, uservars) else cons = eqjac.leverage.min(w, optvars, uservars)
if(optvars$index[3]==2) cons = rbind(cons, eqjac.target.min(w, optvars, uservars))
fnlist = list(w, optvars, uservars)
names(fnlist) = c("w", "optvars", "uservars")
for(i in 1:n){
cons = rbind(cons, do.call(optvars$eqgrad[[i]], args = fnlist))
}
} else{
if(optvars$index[6]==0) cons = eqjac.budget.min(w, optvars, uservars) else cons = eqjac.leverage.min(w, optvars, uservars)
if(optvars$index[3]==2) cons = rbind(cons, eqjac.target.min(w, optvars, uservars))
}
return(cons)
}
parma.ineq.optfun = function(w, optvars, uservars){
cons = NULL
if(!is.null(optvars$ineqfun)){
n = length(optvars$ineqfun)
cons = c(cons, ineqfun.bounds.opt(w, optvars, uservars))
fnlist = list(w, optvars, uservars)
names(fnlist) = c("w", "optvars", "uservars")
for(i in 1:n){
cons = c(cons, do.call(optvars$ineqfun[[i]], args = fnlist))
}
} else{
cons = ineqfun.bounds.opt(w, optvars, uservars)
}
if(optvars$index[4]==2) cons = c(cons, ineqfun.optminmax(w, optvars, uservars))
return(cons)
}
parma.ineq.optgrad = function(w, optvars, uservars){
cons = NULL
if(!is.null(optvars$ineqgrad)){
n = length(optvars$ineqgrad)
cons = ineqjac.bounds.opt(w, optvars, uservars)
fnlist = list(w, optvars, uservars)
names(fnlist) = c("w", "optvars", "uservars")
for(i in 1:n){
cons = rbind(cons, do.call(optvars$ineqgrad[[i]], args = fnlist))
}
} else{
cons = ineqjac.bounds.opt(w, optvars, uservars)
}
if(optvars$index[4]==2) cons = rbind(cons, ineqjac.optminmax(w, optvars, uservars))
return(cons)
}
parma.eq.optfun = function(w, optvars, uservars){
cons = eqfun.target.opt(w, optvars, uservars)
if(!is.null(optvars$eqfun)){
n = length(optvars$eqfun)
if(optvars$index[6]==0) cons = c(cons, eqfun.budget.opt(w, optvars, uservars)) else cons = c(cons, eqfun.leverage.opt(w, optvars, uservars))
fnlist = list(w, optvars, uservars)
names(fnlist) = c("w", "optvars", "uservars")
for(i in 1:n){
cons = c(cons, do.call(optvars$eqfun[[i]], args = fnlist))
}
} else{
if(optvars$index[6]==0) cons = c(cons, eqfun.budget.opt(w, optvars, uservars)) else cons = c(cons, eqfun.leverage.opt(w, optvars, uservars))
}
return(cons)
}
parma.eq.optgrad = function(w, optvars, uservars){
cons = eqjac.target.opt(w, optvars, uservars)
if(!is.null(optvars$eqgrad)){
n = length(optvars$eqgrad)
if(optvars$index[6]==0) cons = rbind(cons, eqjac.budget.opt(w, optvars, uservars)) else cons = rbind(cons, eqjac.leverage.opt(w, optvars, uservars))
fnlist = list(w, optvars, uservars)
names(fnlist) = c("w", "optvars", "uservars")
for(i in 1:n){
cons = rbind(cons, do.call(optvars$eqgrad[[i]], args = fnlist))
}
} else{
if(optvars$index[6]==0) cons = rbind(cons, eqjac.budget.opt(w, optvars, uservars)) else cons = rbind(cons, eqjac.leverage.opt(w, optvars, uservars))
}
return(cons)
}
parma.ineq.lpmupm = function(w, optvars, uservars){
if(!is.null(optvars$ineqfun)){
n = length(optvars$ineqfun)
cons = func.ineq.lpmupm(w, optvars, uservars)
fnlist = list(w, optvars, uservars)
names(fnlist) = c("w", "optvars", "uservars")
for(i in 1:n){
cons = c(cons, do.call(optvars$ineqfun[[i]], args = fnlist))
}
} else{
cons = func.ineq.lpmupm(w, optvars, uservars)
}
return(cons)
} |
methods::setClass("GCT",
methods::representation(
mat = "matrix",
rid = "character",
cid = "character",
rdesc = "data.frame",
cdesc = "data.frame",
version = "character",
src = "character"
)
)
methods::setValidity("GCT",
function(object) {
nrows <- nrow(object@mat)
ncols <- ncol(object@mat)
if (nrows != length(object@rid)) {
return("rid must be the same length as number of matrix rows")
}
if (ncols != length(object@cid)) {
return("cid must be the same length as number of matrix columns")
}
if (length(object@cid) > length(unique(object@cid))) {
return("cid must be unique")
}
if (length(object@rid) > length(unique(object@rid))) {
return("rid must be unique")
}
if (nrow(object@cdesc) != ncols & nrow(object@cdesc) != 0) {
return("cdesc must either have 0 rows or the same number of rows as matrix has columns")
}
if (nrow(object@rdesc) != nrows & nrow(object@rdesc) != 0) {
return("rdesc must either have 0 rows or the same number of rows as matrix has rows")
}
else {
return(T)
}
}
)
suppressMessages({
setMethod("show", methods::signature("GCT"), function(object) {
utils::str(object)
})
setMethod("ncol", methods::signature("GCT"), function(x) {
ncol(x@mat)
})
setMethod("nrow", methods::signature("GCT"), function(x) {
nrow(x@mat)
})
setMethod("dim", methods::signature("GCT"), function(x) {
dim(x@mat)
})
setMethod("range", methods::signature("GCT"), function(x, na.rm=F, finite=F) {
range(x@mat, na.rm=na.rm, finite=finite)
})
setMethod("max", methods::signature("GCT"), function(x, na.rm=F) {
max(x@mat, na.rm=na.rm)
})
setMethod("min", methods::signature("GCT"), function(x, na.rm=F) {
min(x@mat, na.rm=na.rm)
})
setMethod("diag", methods::signature("GCT"), function(x) {
diag(x@mat)
})
})
fix.datatypes <- function(meta) {
for (field.name in names(meta)) {
field <- meta[[field.name]]
field.as.numeric <- suppressWarnings(as.numeric(field))
if (!any(is.na(field.as.numeric))) {
field <- field.as.numeric
}
if (is.numeric(field)) {
field.as.integer <- suppressWarnings(as.integer(field))
if (!any(is.na(field.as.integer))) {
diffs <- field - field.as.integer
if (all(diffs == 0)) {
field <- field.as.integer
}
}
}
meta[[field.name]] <- field
}
return(meta)
}
read.gctx.meta <- function(gctx_path, dimension="row", ids=NULL) {
if (!file.exists(gctx_path)) {
stop(paste(gctx_path, "does not exist"))
}
if (dimension=="column") dimension <- "col"
if (!(dimension %in% c("row", "col"))) {
stop("dimension can be either row or col")
}
if (dimension == "row") {
name <- "0/META/ROW"
} else {
name <- "0/META/COL"
}
raw_annots <- rhdf5::h5read(gctx_path, name=name)
fields <- names(raw_annots)
annots <- data.frame(matrix(nrow=length(raw_annots[[fields[1]]]), ncol=length(fields)))
names(annots) <- fields
for (i in 1:length(fields)) {
field <- fields[i]
annots[,i] <- as.vector(gsub("\\s*$", "", raw_annots[[field]], perl=T))
}
annots <- fix.datatypes(annots)
if (is.null(ids)) {
ids <- as.character(annots$id)
} else {
ids <- ids
}
annots <- subset_to_ids(annots, ids)
annots$id <- as.character(annots$id)
return(annots)
}
read.gctx.ids <- function(gctx_path, dimension="row") {
if (!file.exists(gctx_path)) {
stop(paste(gctx_path, "does not exist"))
}
if (dimension=="column") dimension <- "col"
if (!(dimension %in% c("row", "col"))) {
stop("dimension can be either row or col")
}
if (dimension == "row") {
name <- "0/META/ROW/id"
} else {
name <- "0/META/COL/id"
}
ids <- gsub("\\s*$", "", rhdf5::h5read(gctx_path, name=name), perl=T)
ids <- as.character(ids)
return(ids)
}
process_ids <- function(ids, all_ids, type="rid") {
if (!is.null(ids)) {
if (is.numeric(ids)) {
idx <- ids
is_invalid_idx <- (idx > length(all_ids)) | (idx <= 0)
invalid_idx <- idx[is_invalid_idx]
if (all(is_invalid_idx)) {
stop(paste("none of the requested", type, "indices were found in the dataset"))
}
if (any(is_invalid_idx)) {
warning(paste("the following ", type, " were are outside possible range and will be ignored:\n",
paste(invalid_idx, collapse="\n"), sep=""))
}
idx <- idx[!is_invalid_idx]
} else {
idx <- match(ids, all_ids)
if (all(is.na(idx))) {
stop(paste("none of the requested", type, "were found in the dataset"))
}
if (any(is.na(idx))) {
ids_not_found <- ids[is.na(idx)]
warning(paste("the following ", type, " were not found and will be ignored:\n",
paste(ids_not_found, collapse="\n"), sep=""))
}
idx <- idx[!is.na(idx)]
}
} else {
idx <- seq_along(all_ids)
}
id_keep <- as.character(all_ids[idx])
return(list(idx=idx, ids=id_keep))
}
methods::setMethod("initialize",
signature = "GCT",
definition = function(.Object, mat=NULL, rdesc=NULL, cdesc=NULL, src=NULL, rid=NULL, cid=NULL,
matrix_only=FALSE) {
if (!is.null(mat)) {
.Object@mat <- mat
if (!is.null(rid)) {
.Object@rid <- rid
} else {
.Object@rid <- rownames(mat)
}
if (!is.null(cid)) {
.Object@cid <- cid
} else {
.Object@cid <- colnames(mat)
}
}
if (!is.null(rdesc)) {
.Object@rdesc <- rdesc
}
if (!is.null(cdesc)) {
.Object@cdesc <- cdesc
} else if (!is.null(src)) {
if (! (grepl(".gct$", src) || grepl(".gctx$", src) ))
stop("Either a .gct or .gctx file must be given")
if (grepl(".gct$", src)) {
if ( ! is.null(rid) || !is.null(cid) )
warning(paste("rid and cid values may only be given for .gctx files, not .gct files\n",
"ignoring"))
.Object@src <- src
.Object@version <- scan(src, what = "", nlines = 1, sep = "\t", quiet = TRUE)[1]
dimensions <- scan(src, what = double(0), nlines = 1, skip = 1, sep = "\t", quiet = TRUE)
nrmat <- dimensions[1]
ncmat <- dimensions[2]
if (length(dimensions)==4) {
message("parsing as GCT v1.3")
nrhd <- dimensions[3]
nchd <- dimensions[4]
} else {
message("parsing as GCT v1.2")
nrhd <- 0
nchd <- 0
}
message(paste(src, nrmat, "rows,", ncmat, "cols,", nrhd, "row descriptors,", nchd, "col descriptors"))
header <- scan(src, what = "", nlines = 1, skip = 2, sep = "\t", quote = NULL, quiet = TRUE)
if ( nrhd > 0 ) {
rhd <- header[2:(nrhd+1)]
cid <- header[-(nrhd+1):-1]
col_offset <- 1
}
else {
if (any(grepl("description", header, ignore.case=T))) {
col_offset <- 2
} else {
col_offset <- 1
}
rhd <- NULL
cid <- header[(1+col_offset):length(header)]
}
if ( nchd > 0 ) {
header <- scan(src, what = "", nlines = nchd, skip = 3, sep = "\t",
quote = NULL, quiet = TRUE)
header <- matrix(header, nrow = nchd,
ncol = ncmat + nrhd + 1, byrow = TRUE)
chd <- header[,1]
cdesc <- header[,-(nrhd+1):-1]
if ( nchd == 1 )
cdesc <- t(cdesc)
}
else {
chd = NULL
cdesc <- data.frame(id=cid)
}
mat <- scan(src, what = "", nlines = nrmat,
skip = 3 + nchd, sep = "\t", quote = NULL, quiet = TRUE)
mat <- matrix(mat, nrow = nrmat, ncol = ncmat + nrhd + col_offset,
byrow = TRUE)
rid <- mat[,1]
if ( nrhd > 0 ) {
rdesc <- as.matrix(mat[,2:(nrhd + 1)])
mat <- matrix(as.numeric(mat[,-(nrhd + 1):-1]),
nrow = nrmat, ncol = ncmat)
}
else {
rdesc <- data.frame(id=rid)
mat <- matrix(as.numeric(mat[, (1+col_offset):ncol(mat)]), nrow = nrmat, ncol = ncmat)
}
dimnames(mat) <- list(rid, cid)
if ( nrhd > 0 ) {
dimnames(rdesc) <- list(rid, rhd)
rdesc <- as.data.frame(rdesc, stringsAsFactors = FALSE)
}
if ( nchd > 0 ) {
cdesc <- t(cdesc)
dimnames(cdesc) <- list(cid,chd)
cdesc <- as.data.frame(cdesc, stringsAsFactors = FALSE)
}
.Object@mat <- mat
.Object@rid <- rownames(mat)
.Object@cid <- colnames(mat)
if (!matrix_only) {
.Object@rdesc <- fix.datatypes(rdesc)
.Object@cdesc <- fix.datatypes(cdesc)
.Object@rdesc$id <- rownames(.Object@rdesc)
.Object@cdesc$id <- rownames(.Object@cdesc)
}
}
else {
message(paste("reading", src))
.Object@src <- src
if ( length(rid) == 1 && grepl(".grp$", rid) )
rid <- parse.grp(rid)
if ( length(cid) == 1 && grepl(".grp$", cid) )
cid <- parse.grp(cid)
all_rid <- read.gctx.ids(src, dimension="row")
all_cid <- read.gctx.ids(src, dimension="col")
processed_rids <- process_ids(rid, all_rid, type="rid")
processed_cids <- process_ids(cid, all_cid, type="cid")
.Object@mat <- rhdf5::h5read(src, name="0/DATA/0/matrix",
index=list(processed_rids$idx, processed_cids$idx))
.Object@rid <- processed_rids$ids
.Object@cid <- processed_cids$ids
rownames(.Object@mat) <- processed_rids$ids
colnames(.Object@mat) <- processed_cids$ids
if (!matrix_only) {
.Object@rdesc <- read.gctx.meta(src, dimension="row", ids=processed_rids$ids)
.Object@cdesc <- read.gctx.meta(src, dimension="col", ids=processed_cids$ids)
}
else {
.Object@rdesc <- data.frame(id=.Object@rid, stringsAsFactors = F)
.Object@cdesc <- data.frame(id=.Object@cid, stringsAsFactors = F)
}
if(utils::packageVersion('rhdf5') < "2.23.0") {
rhdf5::H5close()
} else {
rhdf5::h5closeAll()
}
message("done")
}
}
ok <- methods::validObject(.Object)
return(.Object)
}
)
parse.gctx <- function(fname, rid=NULL, cid=NULL, matrix_only=FALSE) {
ds <- methods::new("GCT",
src = fname,
rid = rid,
cid = cid,
matrix_only = matrix_only)
return(ds)
}
append.dim <- function(ofile, mat, extension="gct") {
nc <- ncol(mat)
nr <- nrow(mat)
filename <- basename(ofile)
if (grepl("n[0-9]+x[0-9]+\\.gct", filename)) {
filename <- sub("_n[0-9]+x[0-9]+\\.gct.*", "", filename)
}
filename <- file.path(dirname(ofile),
sprintf('%s_n%dx%d.%s',filename,
nc, nr, extension))
return(filename)
}
write.gct <- function(ds, ofile, precision=4, appenddim=T, ver=3) {
if (!class(ds)=="GCT") {
stop("ds must be a GCT object")
}
ok <- methods::validObject(ds)
if (appenddim) ofile <- append.dim(ofile, ds@mat, extension="gct")
precision = floor(precision)
cat(sprintf('Saving file to %s\n',ofile))
nr <- nrow(ds@mat)
nc <- ncol(ds@mat)
cat(sprintf('Dimensions of matrix: [%dx%d]\n',nr,nc))
cat(sprintf('Setting precision to %d\n',precision))
if (ver==3) {
ds@cdesc$id <- NULL
ds@rdesc$id <- NULL
nrdesc = dim(ds@rdesc)[2]
ncdesc = dim(ds@cdesc)[2]
colkeys = colnames(ds@cdesc)
cat(sprintf('
file=ofile,sep='\n')
cat(paste(c('id',colnames(ds@rdesc),ds@cid),collapse='\t'),
file=ofile,sep='\n',append=T)
filler = 'na'
if (ncdesc > 0) {
for (ii in 1:ncdesc) {
if (is.numeric(ds@cdesc[,ii])) {
cat(paste(c(colkeys[ii],rep(filler,nrdesc),
round(ds@cdesc[,ii],precision)),
collapse='\t'),
file=ofile,sep='\n',append=T)
} else {
cat(paste(c(colkeys[ii],rep(filler,nrdesc),
ds@cdesc[,ii]),
collapse='\t'),
file=ofile,sep='\n',append=T)
}
}
}
for (ii in 1:nr) {
cat(paste(c(ds@rid[ii],
ds@rdesc[ii,],
round(ds@mat[ii,],precision)),collapse='\t'),
sep='\n',file=ofile,append=T)
}
} else {
cat(sprintf('
file=ofile,sep='\n')
cat(paste(c('id','Description',ds@cid),collapse='\t'),
file=ofile,sep='\n',append=T)
for (ii in 1:nr) {
cat(paste(c(ds@rid[ii],
ds@rdesc[ii, 2],
round(ds@mat[ii,],precision)),collapse='\t'),
sep='\n',file=ofile,append=T)
}
}
cat(sprintf('Saved.\n'))
}
write.gctx <- function(ds, ofile, appenddim=T, compression_level=0, matrix_only=F,
max_chunk_kb=1024) {
if (!class(ds)=="GCT") {
stop("ds must be a GCT object")
}
ok <- methods::validObject(ds)
if (appenddim) ofile <- append.dim(ofile, ds@mat, extension="gctx")
if (file.exists(ofile)) {
message(paste(ofile, "exists, removing"))
file.remove(ofile)
}
message(paste("writing", ofile))
rhdf5::h5createFile(ofile)
rhdf5::h5createGroup(ofile, "0")
rhdf5::h5createGroup(ofile, "0/DATA")
rhdf5::h5createGroup(ofile, "0/DATA/0")
rhdf5::h5createGroup(ofile, "0/META")
rhdf5::h5createGroup(ofile, "0/META/COL")
rhdf5::h5createGroup(ofile, "0/META/ROW")
bits_per_element <- switch(storage.mode(ds@mat),
"double" = 64,
"integer" = 32)
elem_per_kb <- max_chunk_kb * 8 / bits_per_element
row_dim <- nrow(ds)
col_dim <- ncol(ds)
row_chunk_size <- min(row_dim, 1000)
col_chunk_size <- min(((max_chunk_kb * elem_per_kb) %/% row_chunk_size), col_dim)
chunking <- c(row_chunk_size, col_chunk_size)
message(paste(c("chunk sizes:", chunking), collapse="\t"))
rhdf5::h5createDataset(ofile, "0/DATA/0/matrix", dim(ds@mat), chunk=chunking, level=compression_level)
rhdf5::h5write.default(ds@mat, ofile, "0/DATA/0/matrix")
rhdf5::h5write.default(as.character(ds@rid), ofile, "0/META/ROW/id")
rhdf5::h5write.default(as.character(ds@cid), ofile, "0/META/COL/id")
if (!matrix_only) {
write.gctx.meta(ofile, ds@cdesc, dimension="column")
write.gctx.meta(ofile, ds@rdesc, dimension="row")
}
if(utils::packageVersion('rhdf5') < "2.23.0") {
rhdf5::H5close()
} else {
rhdf5::h5closeAll()
}
fid <- rhdf5::H5Fopen(ofile)
rhdf5::h5writeAttribute.character("GCTX1.0", fid, "version")
rhdf5::H5Fclose(fid)
}
update.gctx <- function(x, ofile, rid=NULL, cid=NULL) {
stopifnot(is.numeric(x))
if (is.null(rid) && is.null(cid)) {
stop("one of rid or cid must not be NULL")
}
if (is.matrix(x)) {
stopifnot(all(dim(x) == c(length(rid), length(cid))))
} else {
if(!is.null(rid) & !is.null(cid)) {
stop(paste("x is a vector so you can only update in one dimension",
"(only one of rid or cid can be non-NULL)", sep="\n"))
}
if (is.null(rid)) {
stopifnot(length(cid) == length(x))
}
if (is.null(cid)) {
stopifnot(length(rid) == length(x))
}
}
info <- rhdf5::h5dump(ofile, load=F)
dims <- as.numeric(unlist(strsplit(info[["0"]][["DATA"]][["0"]][["matrix"]][["dim"]], " x ")))
all_rid <- cmapR::read.gctx.ids(ofile, dim="row")
all_cid <- cmapR::read.gctx.ids(ofile, dim="col")
validate_integer_ids <- function(ids, maxdim, which_dim) {
stopifnot(all(ids > 0))
out_of_range <- setdiff(ids, seq_len(maxdim))
if (length(out_of_range) > 0) {
stop(paste("the following", which_dim, "indices are out of range\n",
paste(out_of_range, collapse="\n")))
}
}
validate_character_ids <- function(ids, all_ids, which_dim) {
out_of_range <- setdiff(ids, all_ids)
if (length(out_of_range) > 0) {
stop(paste("the following", which_dim, "ids do not exist in the dataset\n",
paste(out_of_range, collapse="\n")))
}
}
if (is.integer(rid)) {
validate_integer_ids(rid, dims[1], "row")
ridx <- rid
}
if (is.integer(cid)) {
validate_integer_ids(cid, dims[2], "column")
cidx <- cid
}
if (is.character(rid)) {
validate_character_ids(rid, all_rid, "row")
ridx <- match(rid, all_rid)
}
if (is.character(cid)) {
validate_character_ids(cid, all_cid, "column")
cidx <- match(cid, all_cid)
}
rhdf5::h5write.default(x, ofile, "0/DATA/0/matrix", index=list(ridx, cidx))
if(utils::packageVersion('rhdf5') < "2.23.0") {
rhdf5::H5close()
} else {
rhdf5::h5closeAll()
}
}
write.gctx.meta <- function(ofile, df, dimension="row") {
path <- if ((dimension=="row")) "0/META/ROW/" else "0/META/COL/"
fields <- names(df)
if (length(fields) > 0) {
for (i in 1:length(fields)) {
field <- fields[i]
if (field == "id") next
v <- df[, i]
if(class(v) == "factor" || class(v) == "AsIs") {
v <- as.character(v)
}
rhdf5::h5write.default(v, ofile, paste(path, field, sep=""))
}
}
}
parse.grp <- function(fname) {
grp <- scan(fname, what = "", quote = NULL, quiet = TRUE, sep="\n")
return(grp)
}
write.grp <- function(vals, fname) {
if (is.list(vals)) vals <- unlist(vals)
if (!is.vector(vals)) vals <- as.vector(vals)
write(vals, fname, ncolumns=1)
}
parse.gmx <- function(fname) {
tmp <- utils::read.table(fname, sep = "\t",
header = TRUE, stringsAsFactors = FALSE)
L <- list()
for ( n in names(tmp) ) {
values <- tmp[[n]][-1]
remove.idx <- values == ""
values <- values[!remove.idx]
L[[n]] <- list(head = n,
desc = tmp[[n]][1],
len = length(values),
entry = values)
}
return(L)
}
parse.gmt <- function(fname) {
gmt.lines <- scan(fname, what = "", sep = "\n",
quote = NULL, quiet = TRUE)
tmp <- lapply(gmt.lines, function(x) unlist(strsplit(x, "\t")))
mk.gmt.entry <- function(x) {
L <- list()
L[["head"]] <- x[1]
L[["desc"]] <- x[2]
l.entry <- x[-c(1:2)]
idx <- l.entry != ""
L[["entry"]] <- l.entry[idx]
L[["len"]] <- length(L[["entry"]])
return(L)
}
L <- lapply(tmp, function(x) mk.gmt.entry(x))
names(L) <- unlist(lapply(L, function(x) x$head))
return(L)
}
write.gmt <- function(lst, fname) {
if (file.exists(fname)) {
message(paste(fname, "exists, deleting..."))
file.remove(fname)
}
for (i in 1:length(lst)) {
el <- lst[[i]]
ncolumns <- 2 + length(el$entry)
write(c(el$head, el$desc, el$entry), file=fname, sep="\t", append=T, ncolumns=ncolumns)
}
}
lxb2mat <- function(lxb_path, columns=c("RID", "RP1"),
newnames=c("barcode_id", "FI")) {
message(paste("reading", lxb_path))
lxb <- suppressWarnings(prada::readFCS(lxb_path))
m <- prada::exprs(lxb)[, columns]
keep_idx <- m[, 1] != 0
m <- m[keep_idx, ]
colnames(m) <- newnames
return(m)
}
write.tbl <- function(tbl, ofile, ...) {
utils::write.table(tbl, file = ofile, sep="\t", quote=F,
col.names=T, row.names=F, ...)
} |
prGetCgroupHeader <- function(x,
cgroup_vec,
n.cgroup_vec,
cgroup_vec.just,
row_no, top_row_style,
rnames,
rowlabel = NULL,
cgroup_spacer_cells,
style_list,
prepped_cell_css,
css_4_cgroup_vec) {
header_str <- "\n\t<tr>"
if (row_no == 1) {
ts <- top_row_style
} else {
ts <- ""
}
if (!is.null(rowlabel)) {
if (row_no == style_list$pos.rowlabel) {
header_str %<>% sprintf(
"%s\n\t\t<th style='%s'>%s</th>",
.,
prGetStyle(
c(`font-weight` = 900),
ts,
attr(prepped_cell_css, "rnames")[1]
),
rowlabel
)
} else {
header_str %<>%
sprintf(
"%s\n\t\t<th style='%s'></th>",
.,
prGetStyle(ts)
)
}
} else if (!prSkipRownames(rnames)) {
header_str %<>% sprintf(
"%s\n\t\t<th style='%s'></th>",
.,
prGetStyle(ts)
)
}
for (i in 1:length(cgroup_vec)) {
if (!is.na(n.cgroup_vec[i])) {
start_column <- ifelse(i == 1,
1,
sum(n.cgroup_vec[1:(i - 1)], na.rm = TRUE) + 1
)
colspan <- n.cgroup_vec[i] +
ifelse(start_column > length(cgroup_spacer_cells) || n.cgroup_vec[i] == 1,
0,
ifelse(start_column == 1,
sum(cgroup_spacer_cells[1:(n.cgroup_vec[i] - 1)]),
ifelse(sum(n.cgroup_vec[1:i], na.rm = TRUE) == ncol(x),
sum(cgroup_spacer_cells[start_column:length(cgroup_spacer_cells)]),
sum(cgroup_spacer_cells[start_column:((start_column - 1) + (n.cgroup_vec[i] - 1))])
)
) * prGetEmptySpacerCellSize(style_list = style_list)
)
header_align <- prGetAlign(cgroup_vec.just,
index = i,
style_list = style_list)
if (nchar(cgroup_vec[i]) == 0) {
header_values <- list(COLSPAN = colspan,
STYLE = prGetStyle(c(`font-weight` = 900),
ts,
header_align,
css_4_cgroup_vec[i]),
CONTENT = "")
} else {
header_values <- list(COLSPAN = colspan,
STYLE = prGetStyle(c(`font-weight` = 900,
`border-bottom` = "1px solid grey"),
ts,
header_align,
css_4_cgroup_vec[i]),
CONTENT = cgroup_vec[i])
}
header_str %<>% paste(str_interp("<th colspan='${COLSPAN}' style='${STYLE}'>${CONTENT}</th>",
header_values),
sep = "\n\t\t")
if (i != sum(!is.na(cgroup_vec))) {
bottom_border_style = str_interp("border-bottom: ${STYLE};",
list(STYLE = style_list$spacer.css.cgroup.bottom.border))
header_str %<>% prAddEmptySpacerCell(style_list = style_list,
cell_style = prGetStyle(bottom_border_style,
ts),
align_style = header_align,
cell_tag = "th")
}
}
}
header_str %<>%
paste0("\n\t</tr>")
return(header_str)
} |
optim.diff.norm <-
function(y,status,weight,param,x=NULL,var.list=NULL)
{
mu <- sigma <- list()
mu.mat.old <- mu.mat <- matrix(NA,nrow=ncol(weight),ncol=ncol(y))
sigma.array.old <- sigma.array <- array(NA,c(ncol(weight),ncol(y),ncol(y)))
for(k in 1:ncol(weight))
{
mu.mat.old[k,] <- param$mu[[k]]
sigma.array.old[k,,] <- param$sigma[[k]]
}
for(j in 1:ncol(y))
{
indivs <- which(is.finite(y[,j]))
weight.aff <- matrix(weight[status==2,],nrow=sum(status==2),ncol=ncol(weight))
weight.miss <- matrix(weight[status==0,],nrow=sum(status==0),ncol=ncol(weight))
weight.aff.j <- matrix(weight.aff[indivs,],nrow=length(indivs),ncol=ncol(weight))
mu.mat[,j] <- t(weight.aff.j)%*%y[indivs,j]
mu.mat[,j] <- mu.mat[,j]+apply(weight.miss,2,sum)*mu.mat.old[,j]
mu.mat[,j] <- mu.mat[,j]/(apply(weight.aff.j,2,sum)+apply(weight.miss,2,sum))
}
for(j1 in 1:ncol(y)) for(j2 in 1:ncol(y))
{
indivs <- intersect(which(is.finite(y[,j1])),which(is.finite(y[,j2])))
weight.aff <- matrix(weight[status==2,],nrow=sum(status==2),ncol=ncol(weight))
weight.miss <- matrix(weight[status==0,],nrow=sum(status==0),ncol=ncol(weight))
weight.aff.j1j2 <- matrix(weight.aff[indivs,],nrow=length(indivs),ncol=ncol(weight))
sigma.array[,j1,j2] <- diag(t(weight.aff.j1j2)%*%(outer(y[indivs,j1],mu.mat[,j1],"-")*outer(y[indivs,j2],mu.mat[,j2],"-")))
sigma.array[,j1,j2] <- sigma.array[,j1,j2]+apply(weight.miss,2,sum)*(sigma.array.old[,j1,j2]+(mu.mat.old[,j1]-mu.mat[,j1])*(mu.mat.old[,j2]-mu.mat[,j2]))
}
sigma.commun <- matrix(apply(sigma.array,c(2,3),sum),nrow=ncol(y),ncol=ncol(y))/sum(weight)
for(k in 1:ncol(weight))
{
mu[[k]] <- mu.mat[k,]
sigma[[k]] <- sigma.commun
}
param <- list("mu"=mu,"sigma"=sigma)
param
} |
cv.test <- function(data, fold = 10L, minnodes = 2L, maxnodes = 10L,
ncores = 1L, ...) {
if (!is.data.frame(data)) {
stop("\"data\" must be a data frame.")
}
if (!is.null(ncores)){
if (!is.numeric(ncores)) {
stop("\"ncores\" should be either NULL or a positive integer.")
}
if (ncores < 1) {
stop("\"ncores\" should be > 1.")
}
}
if (is.null(ncores))
ncores <- parallel::detectCores() - 1
if (ncores > 1) {
cl <- parallel::makeCluster(ncores)
doParallel::registerDoParallel(cl)
`%dodo%` <- foreach::`%dopar%`
} else {
`%dodo%` <- foreach::`%do%`
}
num_obs <- nrow(data)
sse_t <- vector("list", maxnodes - minnodes + 1)
if (fold == 1) {
for (k in minnodes:maxnodes) {
sse_i <-
foreach::foreach(iter = seq_len(num_obs),
.combine = "c",
.inorder = FALSE,
.packages = c("monoClust")) %dodo% {
out <- MonoClust(data[-iter, ], nclusters = k, ...)
pred <- predict.MonoClust(out,
newdata = data[iter, ],
type = "centroid")
return(sum((data[iter, ] - pred[, -1])^2))
}
sse_t[[k - minnodes + 1]] <- c(ncluster = k,
MSE = mean(sse_i),
`Std. Dev.` = stats::sd(sse_i))
}
ret <- list(cv = dplyr::bind_rows(sse_t),
cv.type = "Leave-one-out Cross-validation")
} else {
sse_t <- vector("list", maxnodes - minnodes + 1)
index <- rep(1:fold, num_obs %/% fold + 1)
random_list <- sample(index, num_obs, replace = FALSE)
for (k in minnodes:maxnodes) {
sse_i <-
foreach::foreach(iter = 1:fold,
.combine = "c",
.inorder = FALSE,
.packages = c("monoClust")) %dodo% {
train_set <- data[-which(random_list == iter), ]
test_set <- data[which(random_list == iter), ]
train_tree <- MonoClust(train_set, nclusters = k)
pred <- predict.MonoClust(train_tree,
test_set,
type = "centroid")
return(sum((test_set - pred[, -1])^2))
}
sse_t[[k - minnodes + 1]] <- c(ncluster = k,
MSE = mean(sse_i),
`Std. Dev.` = stats::sd(sse_i))
}
ret <- list(cv = dplyr::bind_rows(sse_t),
cv.type = paste0(fold, "-fold Cross-validation"))
}
if (ncores > 1) {
parallel::stopCluster(cl)
}
class(ret) <- "cv.MonoClust"
return(ret)
} |
BaseProvider <- R6::R6Class(
'BaseProvider',
public = list(
random_element = function(x) {
if (length(x) == 0) return('')
if (inherits(x, "character")) if (!any(nzchar(x))) return('')
x[sample.int(n = length(x), size = 1)]
},
random_element_prob = function(x) {
if (length(x) == 0) return('')
if (inherits(x, "character")) if (!any(nzchar(x))) return('')
choices <- names(x)
probs <- unname(unlist(x))
sample(choices, size = 1, prob = probs)
},
random_int = function(min = 0, max = 9999, size = 1) {
stopifnot(max >= min)
num <- max - min + 1
sample.int(n = num, size = size, replace = TRUE) + (min - 1)
},
random_digit = function() {
self$random_element(0:9)
},
random_digit_not_zero = function() {
self$random_element(1:9)
},
random_digit_or_empty = function() {
self$random_element(c(0:9, ""))
},
random_digit_not_zero_or_empty = function() {
self$random_element(c(1:9, ""))
},
random_letter = function() {
self$random_element(c(letters, LETTERS))
},
numerify = function(text = '
text <- do_match(text, "
text <- do_match(text, "%", self$random_digit_not_zero)
text <- do_match(text, "!", self$random_digit_or_empty)
text <- do_match(text, "@", self$random_digit_not_zero_or_empty)
return(text)
},
lexify = function(text = '????') {
do_match(text, "?", self$random_letter)
},
bothify = function(text = '
self$lexify(self$numerify(text))
},
check_locale = function(x) check_locale_(x),
randomize_nb_elements = function(number = 10, le = FALSE, ge = FALSE,
min = NULL, max = NULL) {
if (le && ge) return(number)
'_min' = if (ge) 100 else 60
'_max' = if (le) 100 else 140
nb = as.integer(number * self$random_int(`_min`, `_max`) / 100)
if (!is.null(min) && nb < min) nb = min
if (!is.null(max) && nb > min) nb = max
return(nb)
}
)
)
check_locale_ <- function(x, z = available_locales) {
if (!x %in% z) {
stop(x, ' not in set of available locales', call. = FALSE)
}
}
n_matches <- function(text, pattern) {
tmp <- gregexpr(paste0("\\", pattern), text)[[1]]
if (length(tmp) == 1) {
if (tmp == -1) 0 else tmp
} else {
length(tmp)
}
}
replace_loop <- function(x, pattern, repl) {
for (i in seq_along(repl)) {
x <- sub(paste0("\\", pattern), repl[i], x)
}
return(x)
}
do_match <- function(text, pattern, fun) {
nm <- n_matches(text, pattern)
if (nm > 0) {
pat <- replicate(nm, eval(fun)())
replace_loop(text, pattern, pat)
} else {
return(text)
}
} |
test_that("202012141334", {
f <- c('x^2','y^3','z^4')
x <- divergence(f, var = c('x','y','z'))
y <- "2 * x + 3 * y^2 + 4 * z^3"
expect_equal(x,y)
})
test_that("202012141335", {
f <- c('x^2','y^3','z^4')
x <- divergence(f, var = c('x','y','z'), drop = FALSE)
y <- array("2 * x + 3 * y^2 + 4 * z^3")
expect_equal(x,y)
})
test_that("202012141336", {
f <- array(rep(c('x^2','y^3','z^4'), each = 2), dim = c(2,3))
x <- divergence(f, var = c('x','y','z'))
y <- array("2 * x + 3 * y^2 + 4 * z^3", dim = 2)
expect_equal(x,y)
})
test_that("202012141345", {
f <- function(x, y, z) c(x^2, y^3, z^4)
x <- divergence(f, var = c('x' = 1, 'y' = 2, 'z' = 3))
y <- 122
expect_equal(x,y)
})
test_that("202012141346", {
f <- function(x, y, z) c(x^2, y^3, z^4)
x <- divergence(f, var = c('y' = 2, 'z' = 3, 'x' = 1))
y <- 0
expect_equal(x,y)
})
test_that("202012141351", {
f <- function(x) c(x[1]^2, x[2]^3, x[3]^4)
x <- divergence(f, var = c(1, 2, 3))
y <- 122
expect_equal(x,y)
})
test_that("202012141352", {
f <- function(x) c(x[1]^2, x[2]^3, x[3]^4)
x <- divergence(f, var = c(2, 3, 1))
y <- 35
expect_equal(x,y)
})
test_that("202012141353", {
f <- c("r^3","r*z","r*z*sin(phi)")
x <- divergence(f, var = c('r'=10,'phi'=pi/6,'z'=Inf), coordinates = 'cylindrical')
y <- 405
expect_equal(x,y)
})
test_that("202012141410", {
f <- function(r, phi, z) c(r^3,r*z,r*z*sin(phi))
x <- divergence(f, var = c('r'=10,'phi'=pi/6,'z'=100), coordinates = 'cylindrical')
y <- 405
expect_equal(x,y)
})
test_that("202012141411", {
f <- function(x) c(x[1]^3,x[1]*x[3],x[1]*x[3]*sin(x[2]))
x <- divergence(f, var = c(10,pi/6,100), coordinates = 'cylindrical')
y <- 405
expect_equal(x,y)
})
test_that("202012141412", {
f <- function(r, phi, z, extra) if(extra) c(r^3,r*z,r*z*sin(phi))
x <- divergence(f, var = c('r'=10,'phi'=pi/6,'z'=100), coordinates = 'cylindrical', params = list(extra = TRUE))
y <- 405
expect_equal(x,y)
})
test_that("202012141625", {
f <- function(x, extra) if(extra) array(1:6*rep(c(x[1]^3,x[1]*x[3],x[1]*x[3]*sin(x[2])), each = 6), dim = c(1,3,2,3))
x <- divergence(f, var = c(10,pi/6,100), coordinates = 'cylindrical', params = list(extra = TRUE))
y <- 405*array(1:6, dim = c(1,3,2))
expect_equal(x,y)
})
test_that("202012141633", {
f <- c('0', 'r^-2', 'k')
x <- divergence(f, var = c('r','phi','z'), coordinates = 'cylindrical')
y <- "0"
expect_equal(x,y)
})
test_that("202012141647", {
f <- c('r*cos(phi)', '-r*sin(phi)', 'r*z')
x <- divergence(f, var = c('r'=12,'phi'=pi/3,'z'=Inf), coordinates = 'cylindrical')
y <- 12.5
expect_equal(x,y)
})
test_that("202012141704", {
f <- array(c('r','r^2*sin(theta)','r*sin(theta)','0','0','r^2*sin(phi)','0','0','r*cos(theta)'), dim = c(3,3))
x <- divergence(f, var = c('r'=10,'theta'=20,'phi'=30), coordinates = 'spherical')
y <- function(r,theta,phi) array(c(3, 4*r*sin(theta), 3*sin(theta) + r*cos(theta)/sin(theta)*sin(phi)))
expect_equal(x,y(10,20,30))
})
test_that("202012141704", {
f <- function(x) x
x <- divergence(f, 1:100)
y <- 100
expect_equal(x,y)
})
test_that("202012142259", {
f <- function(x) x
x <- divergence(f, 1, coordinates = "spherical")
y <- 1
expect_equal(x,y)
})
test_that("202012302353", {
f <- c('a*x^2', 'y^3', 'z^4')
x <- divergence(f, var = c('x' = 1, 'y' = 2, 'z' = 3), params = list(a = 1))
y <- 122
expect_equal(x,y)
}) |
set_coef <- function(model, coefs) {
UseMethod("set_coef")
}
set_coef.default <- function(model, coefs) {
model[["coefficients"]][names(coefs)] <- coefs
model
} |
create_TRMF = function(dataM,weight=1,normalize=c("none","standard","robust","range"),
normalize.type = c("global","columnwise","rowwise"),
na.action=c("impute","fail")){
dataM = as.matrix(dataM)
if(match.arg(na.action) == "fail"){
if(any(is.na(dataM))){
stop("Missing values in dataM")
}
if(any(is.na(weight))){
stop("Missing values in weight")
}
}
if(any(is.infinite(dataM))){
stop("Infinite values in data matrix")
}
if(any(is.infinite(weight))){
stop("Infinite values in weights")
}
Dims = list(nrows = dim(dataM)[1],ncols = dim(dataM)[2],numTS=0)
normalize = match.arg(normalize)
normalize.type = match.arg(normalize.type)
NormalizedData = NormalizeMatrix(dataM,method=normalize,type=normalize.type)
NormalizedData[is.na(dataM)]=0
HadamardProjection = HadamardProjection4NA(dataM)
weight[is.na(weight)]=0
Weight = weight*HadamardProjection
trmf_object = list(dataM = dataM,NormalizedData = NormalizedData,HadamardProjection=HadamardProjection,
Weight=Weight,Dims = Dims,HasXreg=FALSE)
class(trmf_object) = "TRMF"
return(trmf_object)
}
TRMF_coefficients = function(obj,reg_type =c("l2","nnls","constrain","interval","none"),lambda=0.0001){
if(is.null(obj)||class(obj) != "TRMF"){
stop("TRMF_coefficients: Create a valid TRMF object first using create_TRMF()")
}
if(!is.null(obj$Fm_settings)){
warning("TRMF_coefficient model already defined, overwriting")
}
type = match.arg(reg_type)
if(!(type %in%c("l2","nnls","constrain","interval","none"))){
stop("TRMF_coefficients: Coefficient regularization type not valid (at least not currently implemented)")
}
if(type=="none"){
lambda=0
}
obj$Fm_Settings = list(type=type,lambda=lambda)
return(obj)
}
TRMF_columns = function(obj,reg_type =c("l2","nnls","constrain","interval","none"),lambda=0.0001){
if(is.null(obj)||class(obj) != "TRMF"){
stop("TRMF_columns: Create a valid TRMF object first using create_TRMF()")
}
if(!is.null(obj$Fm_settings)){
warning("TRMF_columns model already defined, overwriting")
}
type = match.arg(reg_type)
if(!(type %in%c("l2","nnls","constrain","interval","none"))){
stop("TRMF_columns: columns regularization type not valid (at least not currently implemented)")
}
if(type=="none"){
lambda=0
}
obj$Fm_Settings = list(type=type,lambda=lambda)
return(obj)
}
TRMF_trend = function(obj,numTS = 1,order = 1,lambdaD=1,lambdaA=0.0001,weight=1){
if(is.null(obj)||class(obj) != "TRMF"){
stop("TRMF_trend: Create a valid TRMF object first using create_TRMF()")
}
if(any(is.infinite(weight))){
stop("TRMF_trend: Infinite values in weights")
}
numTS = as.integer(numTS)
if(numTS<1){
return(obj)
}
if((length(lambdaD)!=1)||(length(lambdaA)!=1)){
stop("TRMF_trend: the regularization parameters (lambda) must be scalars")
}
if(is.null(obj$Xm_models)){
xm_it = 1
obj$Xm_models=list()
}else{
xm_it = length(obj$Xm_models)+1
}
nrows = obj$Dims$nrows
if((length(weight)!=1)&&(length(weight)!=nrows)){
stop("TRMF_trend: weight vector is wrong size")
}
WeightD = diag(x=lambdaD*weight,nrow=nrows)
WeightD = diag(x=lambdaD*weight,nrow=nrows)
Dmat = FiniteDiffM(nrows,order)
Dmat = WeightD%*%Dmat
WeightA = diag(x=lambdaA,nrow=nrows)
XmObj = list(Rm = Dmat,WA = WeightA)
XmObj$model =list(type = "trend",order=order,numTS=numTS)
XmObj$model$name = paste("Order_",order," trend with ",numTS," latent time series",sep="",collapse="")
XmObj$model$colnames = paste("D",round(order,2),"(",1:numTS,")",sep="")
obj$Xm_models[[xm_it]] = XmObj
obj$Dims$numTS=obj$Dims$numTS+numTS
return(obj)
}
TRMF_simple = function(obj,numTS = 1,lambdaA=0.0001,weight=1){
if(is.null(obj)||class(obj) != "TRMF"){
stop("TRMF_trend: Create a valid TRMF object first using create_TRMF()")
}
if(any(is.infinite(weight))){
stop("TRMF_simple: Infinite values in weights")
}
numTS = as.integer(numTS)
if(numTS<1){
return(obj)
}
if(length(lambdaA)!=1){
stop("TRMF_simple: the regularization parameter (lambda) must be scalar")
}
if(is.null(obj$Xm_models)){
xm_it = 1
obj$Xm_models=list()
}else{
xm_it = length(obj$Xm_models)+1
}
nrows = obj$Dims$nrows
if((length(weight)!=1)&&(length(weight)!=nrows)){
stop("TRMF_simple: weight vector is wrong size")
}
WeightA = diag(x=lambdaA*weight,nrow=nrows)
WeightD = diag(x=0,nrow=nrows)
XmObj = list(Rm = WeightD ,WA = WeightA)
XmObj$model =list(type = "simple",order=0,numTS=numTS)
XmObj$model$name = paste0("L2 regularized with ",numTS," latent time series")
XmObj$model$colnames = paste("L2(",1:numTS,")",sep="")
obj$Xm_models[[xm_it]] = XmObj
obj$Dims$numTS=obj$Dims$numTS+numTS
return(obj)
}
TRMF_seasonal = function(obj,numTS = 1,freq = 12,sumFirst=FALSE,lambdaD=1,lambdaA=0.0001,weight=1){
if(is.null(obj)||class(obj) != "TRMF"){
stop("TRMF_seasonal: Create a valid TRMF object first using create_TRMF()")
}
if(any(is.infinite(weight))){
stop("TRMF_seasonal: Infinite values in weights")
}
numTS = as.integer(numTS)
if(numTS<1){
return(obj)
}
if(round(freq) != freq){
message("TRMF_seasonal: Non-integer frequencies (freq) currently rounded to nearest integer")
}
if(freq<1){
stop("TRMF_seasonal: freq value not valid")
}
if(freq==1){
message("TRMF_seasonal: lag = freq, consider using TRMF_trend() instead")
}
if((length(lambdaD)!=1)||(length(lambdaA)!=1)){
stop("TRMF_seasonal: the regularization parameters (lambda) must be scalars")
}
if(is.null(obj$Xm_models)){
xm_it = 1
obj$Xm_models=list()
}else{
xm_it = length(obj$Xm_models)+1
}
nrows = obj$Dims$nrows
if((length(weight)!=1)&&(length(weight)!=nrows)){
stop("TRMF_seasonal: weight vector is wrong size")
}
WeightD = diag(x=lambdaD*weight,nrow=nrows)
WeightD = diag(x=lambdaD*weight,nrow=nrows)
Dmat = Seasonal_DM(nrows,lag=freq,sumFirst=sumFirst)
Dmat = WeightD%*%Dmat
WeightA = diag(x=lambdaA,nrow=nrows)
XmObj = list(Rm = Dmat,WA = WeightA)
XmObj$model =list(type = "seasonal",freq=freq,numTS=numTS)
XmObj$model$name = paste("Frequency = ",freq," seasonal random walk with ",numTS," latent time series",sep="",collapse="")
XmObj$model$colnames = paste("L",freq,"(",1:numTS,")",sep="")
obj$Xm_models[[xm_it]] = XmObj
obj$Dims$numTS=obj$Dims$numTS+numTS
return(obj)
}
TRMF_ar = function(obj,numTS = 1,AR,lambdaD=1,lambdaA=0.0001,weight=1){
if(is.null(obj)||class(obj) != "TRMF"){
stop("TRMF_AR: Create a valid TRMF object first using create_TRMF()")
}
if(any(is.infinite(weight))){
stop("TRMF_ar: Infinite values in weights")
}
numTS = as.integer(numTS)
if(numTS<1){
return(obj)
}
if((length(lambdaD)!=1)||(length(lambdaA)!=1)){
stop("TRMF_AR: the regularization parameters (lambda) must be scalars")
}
if(is.null(obj$Xm_models)){
xm_it = 1
obj$Xm_models=list()
}else{
xm_it = length(obj$Xm_models)+1
}
nrows = obj$Dims$nrows
if((length(weight)!=1)&&(length(weight)!=nrows)){
stop("TRMF_AR: weight vector is wrong size")
}
WeightD = diag(x=lambdaD*weight,nrow=nrows)
WeightD = diag(x=lambdaD*weight,nrow=nrows)
Amat = ARmat(nrows,AR)
Amat = WeightD%*%Amat
WeightA = diag(x=lambdaA,nrow=nrows)
XmObj = list(Rm = Amat,WA = WeightA)
XmObj$model =list(type = "auto-regressive",parms = AR,numTS=numTS)
XmObj$model$name = paste("Auto-regressive model of order ",length(AR)," with ",numTS," latent time series",sep="",collapse="")
XmObj$model$colnames = paste("AR",length(AR),"(",1:numTS,")",sep="")
obj$Xm_models[[xm_it]] = XmObj
obj$Dims$numTS=obj$Dims$numTS+numTS
return(obj)
}
TRMF_es = function(obj,numTS = 1,alpha=1,es_type=c("single","double"),lambdaD=1,lambdaA=0.0001 ,weight=1){
if(is.null(obj)||class(obj) != "TRMF"){
stop("TRMF_ES: Create a valid TRMF object first using create_TRMF()")
}
if(any(is.infinite(weight))){
stop("TRMF_es: Infinite values in weights")
}
numTS = as.integer(numTS)
if(numTS<1){
return(obj)
}
if((length(lambdaD)!=1)||(length(lambdaA)!=1)){
stop("TRMF_ES: the regularization parameters (lambda) must be scalars")
}
es_type = match.arg(es_type)
if(es_type=="single"){
order = 1
prefix =""
}else if(es_type=="double"){
order = 2
prefix ="D"
}else{
stop("TRMF_ES: exponential smoothing type not valid")
}
if(is.null(obj$Xm_models)){
xm_it = 1
obj$Xm_models=list()
}else{
xm_it = length(obj$Xm_models)+1
}
nrows = obj$Dims$nrows
if((length(weight)!=1)&&(length(weight)!=nrows)){
stop("TRMF_ES: weight vector is wrong size")
}
WeightD = diag(x=lambdaD*weight,nrow=nrows)
WeightD = diag(x=lambdaD*weight,nrow=nrows)
Dmat = ExpSmMat(nrows,alpha,order)
Dmat = WeightD%*%Dmat
WeightA = diag(x=lambdaA,nrow=nrows)
XmObj = list(Rm = Dmat,WA = WeightA)
XmObj$model =list(type = "ES",alpha=alpha,order=order,numTS=numTS)
XmObj$model$name = paste(es_type," exponential smoothing with ",numTS," latent time series",sep="",collapse="")
XmObj$model$colnames = paste(prefix,"ES",round(alpha,2),"(",1:numTS,")",sep="")
obj$Xm_models[[xm_it]] = XmObj
obj$Dims$numTS=obj$Dims$numTS+numTS
return(obj)
}
TRMF_regression = function(obj,Xreg,type=c("global","columnwise")){
if(is.null(obj)||class(obj) != "TRMF"){
stop("TRMF_Regression: Create a valid TRMF object first using create_TRMF()")
}
if(any(is.infinite(Xreg))){
stop("TRMF_Regression: infinite values in external regressors")
}
if(any(is.na(Xreg))){
stop("TRMF_Regression: Missing values not allowed in external regressors")
}
nrows = obj$Dims$nrows
type = match.arg(type)
dimX = dim(Xreg)
if(type=="global"){
if(is.null(dimX)){
if(length(Xreg) != nrows){
stop("TRMF_Regression: Xreg dimensions are incompatible with the data")
}else{
Xreg = matrix(Xreg,nrow=nrows)
dimX = dim(Xreg)
}
}else{
if(dimX[1] != nrows){
stop("TRMF_Regression: Xreg dimensions are incompatible with the data")
}
}
if(length(dimX)>2){
stop("TRMF_Regression: Xreg has more then 2 dimensions, perhaps you meant to use type='columnwise'")
}
if(!is.null(obj$GlobalXReg)){
warning("TRMF_Regression: A global external regressor model has already been defined, over-writing...")
}
obj$xReg_model$GlobalXReg = Xreg
cnames = colnames(Xreg)
if(is.null(cnames)){
cnames = paste("gXreg(",1:dimX[2],")",sep="")
}else{
cnames = paste("gXreg(",cnames,")",sep="")
}
obj$xReg_model$gxname = cnames
}else if(type=="columnwise"){
if(is.null(dimX)){
stop("TRMF_Regression: Xreg is not matrix or array, perhaps you meant to use type='global'")
}
if(dimX[1] != nrows){
stop("TRMF_Regression: number of rows of Xreg do not match number of rows of data")
}
if(dimX[2] != obj$Dims$ncols){
stop("TRMF_Regression: number of columns of Xreg do not match number of columns of data")
}
if(!is.null(obj$ColumnwiseXReg)){
warning("TRMF_Regression: A columnwise external regressor model has already been defined, over-writing...")
}
if(is.na(dimX[3])){dimX[3]=1}
obj$xReg_model$ColumnwiseXReg = array(Xreg,dimX)
obj$xReg_model$cxname = paste("cXreg(",1:dimX[3],")",sep="")
}
return(obj)
}
train.TRMF = function(x,numit=10,...){
obj = x
if(is.null(obj)||class(obj) != "TRMF"){
stop("Not a valid TRMF object")
}
if(is.null(obj$Fm_Settings)){
obj = TRMF_columns(obj)
}
if(is.null(obj$Xm_models)){
obj = TRMF_simple(obj)
}
localEnv = list2env(obj,envir = new.env())
Create_Xreg_Stuff(localEnv)
Create_Fm_Stuff(localEnv)
Create_Xm_Stuff(localEnv)
InitializeALS(localEnv)
if(numit<=0){
Get_XReg_fit(localEnv)
FitXm(localEnv)
}
else{
for(k in 1:numit){
Get_XReg_fit(localEnv)
FitXm(localEnv)
FitFm(localEnv)
}
}
FitAll(localEnv)
newobj=as.list(localEnv)
class(newobj) = class(obj)
return(newobj)
}
impute_TRMF= function(obj){
if(is.null(obj)||class(obj) != "TRMF"){
stop("Not a valid TRMF object")
}
if(is.null(obj$Fit)){
stop("Train TRMF model first using 'train_TRMF()'")
}
newM = obj$dataM
ind = which(is.na(newM))
newM[ind] = obj$Fit$fitted[ind]
return(newM)
} |
context("Installation of Test Packages")
rm(list = ls())
test_that('Test packages installed correctly',{
testPkgNames <- c("baseballstats", "sartre", "milne", "silverstein")
for (thisTestPkg in testPkgNames) {
expect_true(
object = require(thisTestPkg
, lib.loc = Sys.getenv('PKGNET_TEST_LIB')
, character.only = TRUE)
, info = sprintf("Test package %s is not installed.", thisTestPkg)
)
}
}) |
karaoke<-function(infile=NULL, outfile=NULL, sampf=NULL) {
wobj<-readWave(infile)
wl<-mono(wobj, "left")
wr<-mono(wobj, "right")
wobj<-wl-wr
wobj<-stereo(wobj,wobj)
savewav(wobj,f=sampf,filename=outfile)
} |
source("ESEUR_config.r")
library("lubridate")
bench=read.csv(paste0(ESEUR_dir, "benchmark/EvolvingCompPerf_1963-1967.csv.xz"), as.is=TRUE)
bench$Date.introduced=as.Date(paste0("1 ", sub("/", "/19", bench$Date.introduced)),
origin="1900-01-01", format="%d %m/%Y")
bench$year=year(round_date(bench$Date.introduced, "year"))
plot(bench$Sci.Ops.Sec, bench$Com.Sec.Dol, log="xy")
sp_mod=glm(Com.Sec.Dol ~ log(Sci.Ops.Sec)+as.factor(year), data=bench, family=gaussian(link="log"))
cp_mod=glm(Com.Sec.Dol ~ log(Com.Ops.Sec)+as.factor(year), data=bench, family=gaussian(link="log"))
summary(cp_mod)
sp_mod=glm(Com.Sec.Dol ~ log(Sci.Ops.Sec)+Date.introduced, data=bench, family=gaussian(link="log"))
summary(sp_mod) |
run_LWFB90 <- function(options_b90,
param_b90,
climate,
precip = NULL,
soil = NULL,
output = NULL,
output_fun = NULL,
rtrn_input = TRUE,
rtrn_output = TRUE,
chk_input = TRUE,
run = TRUE,
timelimit = Inf,
verbose = FALSE,
...) {
xfunargs <- list(...)
if (is.function(climate)) {
if (verbose == TRUE) {
message("Applying climate input function")
}
climfunargsnms <- match.arg(methods::formalArgs(climate), names(xfunargs),several.ok = TRUE)
climate <- do.call(climate,xfunargs[climfunargsnms])
}
if (chk_input) {
chk_options()
chk_param()
chk_clim()
chk_soil()
}
climyears <- unique(as.integer(format(climate$dates,"%Y")))
simyears <- seq(from = as.integer(format(options_b90$startdate,"%Y")),
to = as.integer(format(options_b90$enddate,"%Y")),
by = 1)
param_b90$ndays <- as.integer(difftime(options_b90$enddate,options_b90$startdate)) + 1
budburst_leaffall <- calc_vegperiod(dates = climate$dates, tavg = climate$tmean,
out_yrs = simyears,
budburst_method = options_b90$budburst_method,
leaffall_method = options_b90$leaffall_method,
budburstdoy.fixed = param_b90$budburstdoy,
leaffalldoy.fixed = param_b90$leaffalldoy,
species = param_b90$budburst_species,
est.prev = ifelse(length(climyears) <= 5,
length(climyears) - 1, 5))
param_b90$budburstdoy <- budburst_leaffall$start
param_b90$leaffalldoy <- budburst_leaffall$end
if (tolower(options_b90$standprop_input) == "table") {
if (verbose == TRUE) {message("Creating long term stand dynamics from table 'standprop_table'...")}
param_b90 <- standprop_yearly_to_param(param_b90$standprop_table,
param_b90,
out_yrs = simyears)
} else {
if (verbose == TRUE) {message("Creating stand properties from parameters...")}
param_b90$age <- seq(from = param_b90$age_ini + 1,
by = 1, length.out = length(simyears))
}
standprop_daily <- make_standprop(options_b90, param_b90, out_yrs = simyears)
standprop_daily <- standprop_daily[which(standprop_daily$dates >= options_b90$startdate
& standprop_daily$dates <= options_b90$enddate),]
if (verbose == TRUE) {
message("Standproperties created succesfully")
}
climate <- climate[which(climate$dates >= options_b90$startdate
& climate$dates <= options_b90$enddate),]
if (!is.null(precip)){
precip <- precip[which(precip$dates >= options_b90$startdate
& precip$dates <= options_b90$enddate),]
}
if (options_b90$correct_prec == TRUE) {
if (!is.null(precip)) {
warning("Correction of precipitation not possible for sub-daily precipitation data! Doing nothing.")
}
climate$prec <- with(climate, correct_prec(mo, tmean, prec,
station.exposure = param_b90$prec_corr_statexp))
}
if (options_b90$fornetrad == "sunhours") {
climate$globrad <- calc_globrad(climate$dates, climate$sunhours,
param_b90$coords_y)
}
if (!is.null(soil)) {
param_b90[c("soil_nodes","soil_materials" )] <- soil_to_param(soil, options_b90$imodel)
}
if (options_b90$imodel == "MvG") {
param_b90$soil_materials <- param_b90$soil_materials[,c("mat","ths","thr","alpha","npar","ksat","tort","gravel")]
} else {
param_b90$soil_materials <- param_b90$soil_materials[,c("mat","thsat","thetaf","psif","bexp","kf","wetinf","gravel")]
}
param_b90$soil_nodes$psiini <- param_b90$psiini
if (options_b90$root_method != "soilvar") {
param_b90$soil_nodes$rootden <- make_rootden(soilnodes = c(max(param_b90$soil_nodes$upper),
param_b90$soil_nodes$lower),
maxrootdepth = param_b90$maxrootdepth,
method = options_b90$root_method,
beta = param_b90$betaroot,
rootdat = param_b90$rootden_table)
} else {
if (!is.null(soil)) {
param_b90$soil_nodes$rootden <- soil$rootden
} else {
stopifnot(!is.null(param_b90$soil_nodes$rootden))
}
}
if (run) {
if (verbose == TRUE) {
message("Running model..." )
}
start <- Sys.time()
simout <- r_lwfbrook90(
siteparam = data.frame(simyears[1],
as.integer(format(options_b90$startdate, "%j")),
param_b90$coords_y, param_b90$snowini, param_b90$gwatini,
options_b90$prec_interval),
climveg = cbind(climate[, c("yr", "mo", "da","globrad","tmax","tmin",
"vappres","windspeed","prec","mesfl")],
standprop_daily[, c("densef", "height", "lai", "sai", "age")]),
precdat = precip[,c("yr", "mo", "da","ii","prec", "mesfl")],
param = param_to_rlwfbrook90(param_b90, options_b90$imodel),
pdur = param_b90$pdur,
soil_materials = param_b90$soil_materials,
soil_nodes = param_b90$soil_nodes[,c("layer","midpoint", "thick", "mat", "psiini", "rootden")],
output_log = verbose,
timelimit = timelimit
)
chk_errors()
finishing_time <- Sys.time()
simtime <- finishing_time - start
units(simtime) <- "secs"
if (verbose == TRUE) {
message(paste("Simulation successful! Duration:", round(simtime,2), "seconds"))
}
simout$daily_output <- data.table::data.table(simout$daily_output)
data.table::setnames(simout$daily_output, names(simout$daily_output),
c('yr','mo','da','doy','rfal','rint','sfal','sint','rthr','rsno',
'rnet','smlt','snow','swat','gwat','intr', 'ints','evap','tran','irvp',
'isvp','slvp','snvp','pint','ptran','pslvp','flow','seep',
'srfl','slfl','byfl','dsfl','gwfl','vrfln','safrac',
'stres','adef','awat','relawat','nits','balerr', 'slrad',
'solnet', 'lngnet', 'aa', 'asubs'))
simout$layer_output <- data.table::rbindlist(lapply(seq(dim(simout$layer_output)[3]),
function(x) data.frame(simout$layer_output[ , , x])),
idcol = "nl")
data.table::setnames(simout$layer_output, paste0("X", 1:16),
c('yr','mo','da','doy','swati','theta','wetnes','psimi','psiti','infl',
'byfl','tran','slvp','vrfl','dsfl','ntfl'))
simout$layer_output <- simout$layer_output[order(simout$layer_output$yr,
simout$layer_output$doy,
simout$layer_output$nl),]
simres <- list(simulation_duration = simtime,
finishing_time = finishing_time)
simres$model_input <- list(options_b90 = options_b90,
param_b90 = param_b90,
standprop_daily = standprop_daily)
if (is.matrix(output) & all(dim(output) == c(7,5))) {
simres <- c(simres, process_outputs(simout, output))
} else {
simres[names(simout)[-1]] <- simout[-1]
}
if (!is.null(output_fun)) {
if (verbose == TRUE) {
message("Applying custom functions on simulation output...")
}
if (!is.list(output_fun)){
output_fun <- list(output_fun)
}
outfunargs <- list(x = simres, ...)
outfunargsnms <- lapply(output_fun, FUN = function(x,argsnms) {
match.arg(methods::formalArgs(x),
argsnms,
several.ok = TRUE)},
argsnms = names(outfunargs))
simres$output_fun <- tryCatch( {
Map(do.call, output_fun, lapply(outfunargsnms, function(x,args) args[x], args = outfunargs))
},
warning = function(wrn){return(wrn)},
error = function(err){return(err)})
}
if (!rtrn_output) {
if (is.matrix(output) & all(dim(output) == c(7,5))) {
simres <- simres[-which(grepl(".ASC", names(simres), fixed = TRUE))]
} else {
simres <- simres[-which(names(simres) %in% c("daily_output", "layer_output"))]
}
}
if (!rtrn_input) {
simres <- simres[-which(names(simres) == "model_input")]
}
} else {
return(list(options_b90 = options_b90,
param_b90 = param_b90,
standprop_daily = standprop_daily))
}
if (verbose == TRUE) {
message("Finished!")
}
return(simres)
}
chk_errors <- function(){
eval.parent(quote({
if (simout$error_code != 0L) {
if (simout$error_code == 1L) stop("Simulation terminated abnormally: 'initial matrix psi > 0'
(rerun with verbose = TRUE to see more information)")
if (simout$error_code == 2L) stop("Simulation initialization failed: 'FWETK failed to determine wetness at KF'
(rerun with verbose = TRUE to see more information)")
if (simout$error_code == 3L) stop("Simulation terminated abnormally: 'inconsistent dates in climate!'
(rerun with verbose = TRUE to see more information)")
if (simout$error_code == 4L) stop("Simulation terminated abnormally: 'inconsistent dates in precipitation input!'
(rerun with verbose = TRUE to see more information)")
if (simout$error_code == 5L) stop("Simulation terminated abnormally: 'wrong precipitation interval input!'
(rerun with verbose = TRUE to see more information)")
if (simout$error_code == 6L) stop("Simulation terminated abnormally: 'negative soil water storage!'
(rerun with verbose = TRUE to see more information)")
if (simout$error_code == 7L) stop("Simulation terminated abnormally: 'water storage exceeds water capacity!'
(rerun with verbose = TRUE to see more information)")
}
}))
}
chk_options <- function(){
eval.parent(quote({
names(options_b90) <- tolower(names(options_b90))
stopifnot(all(names(options_b90) %in% c("startdate","enddate","fornetrad","prec_interval",
"correct_prec","budburst_method",
"leaffall_method", "standprop_input", "standprop_interp",
"use_growthperiod","lai_method","imodel", "root_method")))
options_b90$fornetrad <- match.arg(options_b90$fornetrad, choices = c("globrad","sunhours"))
options_b90$standprop_input <- match.arg(options_b90$standprop_input, choices = c("parameters", "table"))
options_b90$lai_method <- match.arg(options_b90$lai_method, choices = c("b90", "linear", "Coupmodel"))
options_b90$root_method <- match.arg(options_b90$root_method, choices = c("betamodel", "table", "linear", "constant", "soilvar"))
options_b90$imodel <- match.arg(options_b90$imodel, choices = c("MvG", "CH"))
if (!inherits(options_b90$startdate, "Date")) {
stop("Please provide 'options_b90$startdate' as Date-object")}
if (!inherits(options_b90$enddate, "Date")) {
stop("Please provide 'options_b90$enddate' as Date-object")}
if (!(options_b90$startdate < options_b90$enddate)) {
stop("Check options_b90: 'startdate > enddate ")}
if (options_b90$budburst_method %in% c("const", "const.", "constant")) options_b90$budburst_method <- "fixed"
if (options_b90$leaffall_method %in% c("const", "const.", "constant")) options_b90$leaffall_method <- "fixed"
}))
}
chk_param <- function() {
eval.parent(quote({
names(param_b90) <- tolower(names(param_b90))
nms <- c("maxlai","sai","sai_ini","height","height_ini","densef",
"densef_ini","age_ini","winlaifrac","budburst_species","budburstdoy",
"leaffalldoy","shp_budburst","shp_leaffall","shp_optdoy","emergedur","leaffalldur",
"lai_doy","lai_frac","alb","albsn","ksnvp","fxylem",
"mxkpl","lwidth","psicr","nooutf","lpc","cs",
"czs","czr","hs","hr","rhotp","nn",
"maxrlen","initrlen","initrdep","rrad","rgrorate","rgroper",
"maxrootdepth","betaroot","radex","glmax","glmin",
"rm","r5","cvpd","tl","t1","t2",
"th","frintlai","frintsai","fsintlai","fsintsai","cintrl",
"cintrs","cintsl","cintss","infexp","bypar","qfpar",
"qffc","imperv","drain","gsc","gsp","ilayer",
"qlayer","z0s","rstemp","melfac","ccfac","laimlt",
"saimlt","grdmlt","maxlqf","snoden","obsheight","rssa",
"rssb","dtimax","dswmax","dpsimax",
"wndrat","fetch","z0w","zw","zminh","coords_x",
"coords_y","c1","c2","c3","pdur","eslope",
"aspect","dslope","slopelen","intrainini","intsnowini","gwatini",
"snowini","psiini")
if ( any(!nms %in% names(param_b90) )) {
stop(paste("param_b90-list is incomplete. Missing list items:",
paste(nms[which(!nms %in% names(param_b90))], collapse = ", ")))
}
}))
}
chk_clim <- function() {
eval.parent(quote({
names(climate) <- tolower(names(climate))
if (!all(c("dates", "tmax", "tmin", options_b90$fornetrad, "vappres", "windspeed") %in% tolower(names(climate)))) {
if (is.null(precip)) {}
stop("Please check column names of 'climate'. Must contain: \n
'dates', 'tmax', 'tmin', 'vappres', 'windspeed', and 'globrad' (or 'sunhours') ")
}
stopifnot(inherits(climate$dates, "Date"))
if (min(climate$dates) > options_b90$startdate | max(climate$dates) < options_b90$enddate){
stop("climate not covering requested simulation period completely.")
}
if (any(!c("yr", "mo", "da") %in% names(climate))) {
climate$yr <- data.table::year(climate$dates)
climate$mo <- data.table::month(climate$dates)
climate$da <- data.table::mday(climate$dates)
}
if (!any( names(climate) == "mesfl") ) {
climate$mesfl <- 0
}
if (is.null(precip) ){
stopifnot("prec" %in% names(climate))
} else {
names(precip) <- tolower(names(precip))
stopifnot(all(c("dates","prec") %in% tolower(names(precip))))
if (!any( names(precip) == "mesfl") ) {
precip$mesfl <- 0
}
if (nrow(climate)*options_b90$prec_interval != nrow(precip)) {
stop("Climate and Precipitation data provided do not fit to the precipitation
interval defined in options_b90$prec_interval.")
} else {
if (options_b90$prec_interval == 1) {
climate$prec <- precip$prec
precip <- NULL
} else {
precip$ii <- rep(1:options_b90$prec_interval,nrow(climate))
precip$yr <- data.table::year(precip$dates)
precip$mo <- data.table::month(precip$dates)
precip$da <- data.table::mday(precip$dates)
climate$prec <- -999
}
}
}
}))
}
chk_soil <- function(){
eval.parent(quote({
if (is.null(soil)) {
if (is.null(param_b90$soil_nodes) | is.null(param_b90$soil_materials)) {
stop("Please provide soil parameters as items 'soil_nodes' and 'soil_materials' via 'param_b90',
when not using 'soil'-argument in run_LWFB90.")
}
names(param_b90$soil_nodes) <- tolower(names(param_b90$soil_nodes))
names(param_b90$soil_materials) <- tolower(names(param_b90$soil_materials))
stopifnot(all(c( "upper", "lower", "mat") %in% names(param_b90$soil_nodes)))
if (anyNA(param_b90$soil_nodes[,c( "upper", "lower", "mat")])) {
stop("No NAs allowed in param_b90$soil_nodes")
}
if (options_b90$imodel == "MvG" ) {
stopifnot(all(c("mat","ths","thr","alpha","npar","ksat","tort","gravel") %in% names(param_b90$soil_materials)))
if (anyNA(param_b90$soil_materials[,c("mat","ths","thr","alpha","npar","ksat","tort","gravel")])) {
stop("No NAs allowed in param_b90$soil_materials!")
}
} else {
stopifnot(all(c("mat","thsat","thetaf","psif","bexp","kf","wetinf","gravel") %in% names(param_b90$soil_materials)))
if (anyNA(param_b90$soil_materials[,c("mat","thsat","thetaf","psif","bexp","kf","wetinf","gravel")])) {
stop("No NAs allowed in param_b90$soil_materials!")
}
}
if (options_b90$root_method == "soilvar" & is.null(param_b90$soil_nodes$rootden)) {
stop("Please provide column 'rootden' in param_b90$soil_nodes when using options_b90$root_method = 'soilvar'.")
}
} else {
names(soil) <- tolower(names(soil))
if (options_b90$imodel == "MvG") {
stopifnot(all(c("upper","lower", "ths","thr","alpha","npar","ksat","tort","gravel") %in% names(soil)))
if (anyNA(soil[,c("upper","lower","ths","thr","alpha","npar","ksat","tort","gravel")])) {
stop("No NAs allowed in 'soil'!")
}
} else {
stopifnot(all(c("upper","lower", "thsat","thetaf","psif","bexp","kf","wetinf","gravel") %in% names(soil)))
if (anyNA(soil[,c("upper","lower","thsat","thetaf","psif","bexp","kf","wetinf","gravel")])) {
stop("No NAs allowed in 'soil'!")
}
}
if (options_b90$root_method == "soilvar" & is.null(soil$rootden)) {
stop("Please provide column 'rootden' in 'soil'-data.frame when using options_b90$root_method = 'soilvar'.")
}
}
}))
}
process_outputs <- function(simout, output) {
adef<-NULL;awat<-NULL;balerr<-NULL;da<-NULL;doy<-NULL;evap<-NULL;flow<-NULL;gwat<-NULL;
intr<-NULL;ints<-NULL;mo<-NULL;nits<-NULL;nl<-NULL;relawat<-NULL;rfal<-NULL;safrac<-NULL;
seep<-NULL;sfal<-NULL;snow<-NULL;stres<-NULL;swat<-NULL;vrfln<-NULL;yr<-NULL;
selection <- rownames(output)[which(rowSums(output) > 0)]
if (any(selection == "Budg")) {
Budg <- simout$daily_output[,c("yr","mo","da","doy","rfal","sfal","flow", "evap", "seep","snow","swat","gwat","intr","ints")]}
if (any(selection == "Flow")){
Flow <- simout$daily_output[,c("yr","mo","da","doy","flow","seep","srfl","slfl","byfl","dsfl","gwfl","vrfln")]}
if (any(selection == "Evap")){
Evap <- simout$daily_output[,c("yr","mo","da","doy","flow","evap","tran","irvp","isvp","slvp","snvp","pint","ptran","pslvp")]}
if (any(selection == "Abov")){
Abov <- simout$daily_output[,c("yr","mo","da","doy","rfal","rint","sfal","sint","rthr","rsno","rnet","smlt","slfl","srfl")]}
if (any(selection == "Belo")){
Belo <- simout$layer_output[,c("yr","mo","da","doy","nl","infl","byfl","tran","slvp","vrfl","dsfl","ntfl")]}
if (any(selection == "Swat")){
Swat <- simout$layer_output[,c("yr","mo","da","doy","nl","swati","theta","wetnes","psimi","psiti")]}
if (any(selection == "Misc")){
Misc <- simout$daily_output[,c("yr","mo","da","doy","vrfln","safrac","stres","adef","awat","relawat","nits","balerr")]}
moutputs <- list()
for (sel in selection) {
X <- get(sel)
if (sel %in% c("Flow", "Evap", "Abov")) {
for (per in rev(colnames(output)[which(output[sel,] == 1)])) {
if (per == "Day") {
moutputs[[paste0(toupper(sel),"DAY.ASC")]] <- X[,lapply(.SD, round, 1), by = list(yr, mo, da, doy)]
}
if (per == "Mon") {
moutputs[[paste0(toupper(sel),"MON.ASC")]] <- X[,lapply(.SD, function(x) {round(sum(x),1)}),
.SDcols = -c("da","doy"), by = list(yr, mo)]
}
if (per == "Ann") {
moutputs[[paste0(toupper(sel),"ANN.ASC")]] <- X[,lapply(.SD, function(x) {round(sum(x),1)}),
.SDcols = -c("mo","da","doy"),by = yr]
}
}
}
if (sel == "Swat") {
for (per in rev(colnames(output)[which(output[sel,] == 1)])) {
if (per == "Day") {
moutputs[[paste0(toupper(sel),"DAY.ASC")]] <- X[,lapply(.SD, round ,3), by = list(yr, mo, da, doy, nl)]
}
if (per == "Mon") {
moutputs[[paste0(toupper(sel),"MON.ASC")]] <- X[,lapply(.SD, function(x) {round(mean(x),3)}),
.SDcols = -c("da","doy"), by = list(yr, mo, nl)]
}
if (per == "Ann") {
moutputs[[paste0(toupper(sel),"ANN.ASC")]] <- X[,lapply(.SD, function(x) {round(mean(x),3)}),
.SDcols = -c("mo","da","doy"),by = list(yr, nl)]
}
}
}
if (sel == "Belo") {
for (per in rev(colnames(output)[which(output[sel,] == 1)])) {
if (per == "Day") {
moutputs[[paste0(toupper(sel),"DAY.ASC")]] <- X[,lapply(.SD, round, 1), by = list(yr, mo, da, doy, nl)]
}
if (per == "Mon") {
moutputs[[paste0(toupper(sel),"MON.ASC")]] <- X[,lapply(.SD, function(x) {round(sum(x),1)}),
.SDcols = -c("da","doy"),by = list(yr, mo, nl)]
}
if (per == "Ann") {
moutputs[[paste0(toupper(sel),"ANN.ASC")]] <- X[,lapply(.SD, function(x) {round(sum(x),1)}),
.SDcols = -c("mo","da","doy"), by = list(yr, nl)]
}
}
}
if (sel == "Budg") {
for (per in rev(colnames(output)[which(output[sel,] == 1)])) {
if (per == "Day") {
moutputs[[paste0(toupper(sel),"DAY.ASC")]] <- X[,list(
prec = round(sum(rfal+sfal),1),
flow = round(sum(flow),1),
evap = round(sum(evap),1),
seep = round(sum(seep),1),
snow = round(snow[which.max(doy)],1),
swat = round(swat[which.max(doy)],1),
gwat = round(gwat[which.max(doy)],1),
intr = round(intr[which.max(doy)],1),
ints = round(ints[which.max(doy)],1)),
by = list(yr, mo, da, doy)]
}
if (per == "Mon") {
moutputs[[paste0(toupper(sel),"MON.ASC")]] <- X[, list(
prec = round(sum(rfal+sfal),1),
flow = round(sum(flow),1),
evap = round(sum(evap),1),
seep = round(sum(seep),1),
snow = round(snow[which.max(doy)],1),
swat = round(swat[which.max(doy)],1),
gwat = round(gwat[which.max(doy)],1),
intr = round(intr[which.max(doy)],1),
ints = round(ints[which.max(doy)],1)),
by = list(yr, mo)]
}
if (per == "Ann") {
moutputs[[paste0(toupper(sel),"ANN.ASC")]] <- X[, list(
prec = round(sum(rfal+sfal),1),
flow = round(sum(flow),1),
evap = round(sum(evap),1),
seep = round(sum(seep),1),
snow = round(snow[which.max(doy)],1),
swat = round(swat[which.max(doy)],1),
gwat = round(gwat[which.max(doy)],1),
intr = round(intr[which.max(doy)],1),
ints = round(ints[which.max(doy)],1)),
by = list(yr)]
}
}
}
if (sel == "Misc") {
for (per in rev(colnames(output)[which(output[sel,] == 1)])) {
if (per == "Day") {
moutputs[[paste0(toupper(sel),"DAY.ASC")]] <- X[, list(
vrfln = round(vrfln,1),
safrac = round(safrac,1),
stres = round(stres,3),
adef = round(adef,3),
awat = round(awat,1),
relawat = round(relawat,3),
nits,
balerr = round(balerr, 3)),
by = list(yr, mo, da, doy)]
}
if (per == "Mon") {
moutputs[[paste0(toupper(sel),"MON.ASC")]] <- X[, list(vrfln = round(sum(vrfln),1),
safrac = round(sum(safrac),1),
stres = round(mean(stres),3),
adef = round(mean(adef),3),
awat = round(mean(awat),1),
relawat = round(mean(relawat),3),
nits = sum(nits),
balerr = round(sum(balerr), 3)),
by = list(yr, mo)]
}
if (per == "Ann") {
moutputs[[paste0(toupper(sel),"ANN.ASC")]] <- X[, list(vrfln = round(sum(vrfln),1),
safrac = round(sum(safrac),1),
stres = round(mean(stres),3),
adef = round(mean(adef),3),
awat = round(mean(awat),1),
relawat = round(mean(relawat),3),
nits = sum(nits),
balerr = round(sum(balerr), 3)),
by = list(yr)]
}
}
}
}
return(moutputs)
} |
discrim_regularized <-
function(mode = "classification", engine = "klaR",
frac_common_cov = NULL, frac_identity = NULL) {
args <- list(
frac_common_cov = rlang::enquo(frac_common_cov),
frac_identity = rlang::enquo(frac_identity)
)
parsnip::new_model_spec(
"discrim_regularized",
args = args,
eng_args = NULL,
mode = mode,
method = NULL,
engine = engine
)
}
print.discrim_regularized <- function(x, ...) {
cat("Regularized Discriminant Model Specification (", x$mode, ")\n\n", sep = "")
parsnip::model_printer(x, ...)
if (!is.null(x$method$fit$args)) {
cat("Model fit template:\n")
print(parsnip::show_call(x))
}
invisible(x)
}
update.discrim_regularized <-
function(object,
frac_common_cov = NULL,
frac_identity = NULL,
fresh = FALSE, ...) {
parsnip::update_dot_check(...)
args <- list(
frac_common_cov = rlang::enquo(frac_common_cov),
frac_identity = rlang::enquo(frac_identity)
)
if (fresh) {
object$args <- args
} else {
null_args <- map_lgl(args, parsnip::null_value)
if (any(null_args))
args <- args[!null_args]
if (length(args) > 0)
object$args[names(args)] <- args
}
parsnip::new_model_spec(
"discrim_regularized",
args = object$args,
eng_args = object$eng_args,
mode = object$mode,
method = NULL,
engine = object$engine
)
}
check_args.discrim_regularized <- function(object) {
args <- lapply(object$args, rlang::eval_tidy)
if (is.numeric(args$frac_common_cov) &&
(args$frac_common_cov < 0 | args$frac_common_cov > 1)) {
stop("The common covariance fraction should be between zero and one", call. = FALSE)
}
if (is.numeric(args$frac_identity) &&
(args$frac_identity < 0 | args$frac_identity > 1)) {
stop("The identity matrix fraction should be between zero and one", call. = FALSE)
}
invisible(object)
} |
output$hot <- renderRHandsontable({
inputData <- values[["inputData"]]
prices <- inputData[,"Prices \n($/unit)"]
output <- inputData[,grepl("Quantities|Revenue",colnames(inputData), perl=TRUE)]
missPrices <- isTRUE(any(is.na(prices[!is.na(output)])))
if(input$supply == "2nd Score Auction"){colnames(inputData)[grepl("Cost Changes",colnames(inputData))] <-'Post-merger\n Cost Changes\n($/unit)'}
else{colnames(inputData)[grepl("Cost Changes",colnames(inputData))] <-'Post-merger\n Cost Changes\n(Proportion)'}
if(missPrices && input$supply == "2nd Score Auction"){colnames(inputData)[grepl("Margins",colnames(inputData))] <- "Margins\n ($/unit)"}
else{colnames(inputData)[grepl("Margins",colnames(inputData))] <- "Margins\n (p-c)/p"}
if (missPrices && any(grepl("ces|aids", demand(), perl=TRUE), na.rm=TRUE)){colnames(inputData)[grepl("Quantities",colnames(inputData))] <- "Revenues"}
else{{colnames(inputData)[grepl("Revenues",colnames(inputData))] <- "Quantities"}}
if (!is.null(inputData))
rhandsontable(inputData, stretchH = "all", contextMenu = FALSE ) %>% hot_col(col = 1:ncol(inputData), valign = "htMiddle") %>%
hot_col(col = which(sapply(inputData, is.numeric)), halign = "htCenter") %>% hot_cols(columnSorting = TRUE)
})
output$hotVertical <- renderRHandsontable({
if (input$mergerTypeVertical == "Upstream"){
inputData <- mergersInputs(nrow = input$addRowsVertical, type = "Vertical", typeVertical = "Upstream")
}
if (input$mergerTypeVertical == "Downstream"){
inputData <- mergersInputs(nrow = input$addRowsVertical, type = "Vertical", typeVertical = "Downstream")
}
if (input$mergerTypeVertical == "Vertical"){
inputData <- mergersInputs(nrow = input$addRowsVertical, type = "Vertical", typeVertical = "Vertical")
}
missPricesDown <- isTRUE(any(is.na(inputData$pricesDown[!is.na(inputData$sharesDown)])))
missPricesUp <- isTRUE(any(is.na(inputData$pricesUp[!is.na(inputData$sharesDown)])))
if(missPricesDown && input$supplyVertical == "2nd Score Auction"){colnames(inputData)[grepl("marginsDown", colnames(inputData))] <- "marginsDown\n ($/unit)"}
else{colnames(inputData)[grepl("marginsDown", colnames(inputData))] <- "marginsDown\n (p-c)/p"}
if(missPricesUp && input$supplyVertical == "2nd Score Auction"){colnames(inputData)[grepl("marginsUp", colnames(inputData))] <- "marginsUp\n ($/unit)"}
else{colnames(inputData)[grepl("marginsUp", colnames(inputData))] <- "marginsUp\n (p-c)/p"}
if (!is.null(inputData))
rhandsontable(inputData, stretchH = "all", contextMenu = FALSE ) %>% hot_col(col = 1:ncol(inputData), valign = "htMiddle") %>%
hot_col(col = which(sapply(inputData, is.numeric)), halign = "htCenter") %>% hot_cols(columnSorting = TRUE)
})
output$results <-
renderTable({
if(input$inTabset != "respanel" || input$simulate == 0 || is.null(values[["sim"]])){return()}
mergersSummary(values[["sim"]])
}, na = "", digits = 1)
output$resultsVertical <-
renderTable({
if(input$inTabsetVertical != "respanelVertical" || input$simulateVertical == 0 || is.null(valuesVertical[["sim"]])){return()}
capture.output(result <- summary(valuesVertical[["sim"]], market = TRUE))
result <- as.data.frame(result)
}, na = "", digits = 1)
output$results_shareOut <- renderTable({
if(input$inTabset != "detpanel" || input$simulate == 0 || is.null(values[["sim"]])){return()}
mergersNoPurch(values[["sim"]])
}, rownames = TRUE, digits = 1, align = "c")
output$results_shareOutVertical <- renderTable({
if(input$inTabsetVertical != "detpanelVertical" || input$simulateVertical == 0 || is.null(valuesVertical[["sim"]])){return()}
mergersNoPurch(valuesVertical[["sim"]])
}, rownames = TRUE, digits = 1, align = "c")
output$results_detailed <- renderTable({
if(input$inTabset != "detpanel" || input$simulate == 0 || is.null(values[["sim"]])){return()}
if(input$supply == "Cournot"){
res <- NULL
capture.output(try(res <- summary(values[["sim"]], revenue= FALSE,market=FALSE),silent=TRUE))
res$product <- res$mcDelta <- NULL
try(colnames(res) <- c("Merging Party","Name","Pre-Merger Price","Post-Merger Price", "Price Change (%)","Pre-Merger Quantity","Post-Merger Quantity", "Output Change (%)"),silent=TRUE)
} else {
isAuction <- grepl("Auction",class(values[["sim"]]))
isRevDemand <- grepl("ces|aids",class(values[["sim"]]),ignore.case = TRUE)
inLevels <- FALSE
missPrice <- any(is.na(values[["sim"]]@prices))
if(isAuction && missPrice){inLevels = TRUE}
capture.output(res <- summary(values[["sim"]], revenue=isRevDemand & missPrice, insideOnly=TRUE, levels=inLevels))
res$Name <- rownames(res)
res$mcDelta <- NULL
res <- res[,c(1, ncol(res), 2 : (ncol(res) - 1))]
res$cmcr <- NA
try(res$cmcr[res$isParty=="*"] <- cmcr(values[["sim"]]))
thesenames <- c("Merging Party","Name","Pre-Merger Price","Post-Merger Price", "Price Change (%)","Pre-Merger Share (%)","Post-Merger Share (%)", "Share Change (%)",'Compensating Marginal Cost Reduction (%)')
colnames(res) <- thesenames
if(all(is.na(res$`Compensating Marginal Cost Reduction (%)`))) res$`Compensating Marginal Cost Reduction (%)` <- NULL
if(inLevels){ colnames(res)[ colnames(res) == "Price Change (%)"] = "Price Change ($/unit)"}
}
res
}, digits = 2)
output$results_detailedVertical <- renderTable({
if(input$inTabsetVertical != "detpanelVertical" || input$simulateVertical == 0 || is.null(valuesVertical[["sim"]])){return()}
capture.output(result <- summary(valuesVertical[["sim"]]))
result <- as.data.frame(result)
result$Name <- rownames(result)
result <- result[, c(1, ncol(result), 2:(ncol(result)-1))]
}, digits = 2)
output$results_mktelast <- renderTable({
if(input$inTabset != "elastpanel" || input$simulate == 0 || is.null(values[["sim"]])){return()}
if(input$pre_elast == "Pre-Merger"){ preMerger = TRUE}
else{preMerger = FALSE}
res <- as.matrix(elast(values[["sim"]], preMerger=preMerger, market = TRUE))
colnames(res) <- "Market"
res
}, rownames = FALSE)
output$results_mktelastVertical <- renderTable({
if(input$inTabsetVertical != "elastpanelVertical" || input$simulateVertical == 0 || is.null(valuesVertical[["sim"]])){return()}
if(input$pre_elastVertical == "Pre-Merger"){preMerger = TRUE}
else{preMerger = FALSE}
res <- as.matrix(elast(valuesVertical[["sim"]], preMerger = preMerger, market = TRUE))
colnames(res) <- "Market"
res
}, rownames = FALSE)
output$results_elast <- renderTable({
if(input$inTabset != "elastpanel" || input$simulate == 0 || is.null(values[["sim"]])){return()}
isCournot <- grepl("Cournot", class(values[["sim"]]))
if(input$pre_elast == "Pre-Merger"){preMerger = TRUE}
else{preMerger = FALSE}
if(!isCournot && input$diversions){
res <- diversion(values[["sim"]], preMerger = preMerger)
}
else{res <- elast(values[["sim"]], preMerger = preMerger)}
if(isCournot){colnames(res) <- "Elasticity"}
res
}, rownames = TRUE)
output$results_elastVertical <- renderTable({
if(input$inTabsetVertical != "elastpanelVertical" || input$simulateVertical == 0 || is.null(valuesVertical[["sim"]])){return()}
isCournot <- grepl("Cournot", class(valuesVertical[["sim"]]))
if(input$pre_elastVertical == "Pre-Merger"){preMerger = TRUE}
else{preMerger = FALSE}
if(!isCournot && input$diversionsVertical){
res <- diversion(valuesVertical[["sim"]], preMerger = preMerger)
}
else{res <- elast(valuesVertical[["sim"]], preMerger = preMerger)}
if(isCournot){colnames(res) <- "Elasticity"}
res
}, rownames = TRUE)
output$results_diag_elast <- renderTable({
if(input$inTabset != "diagpanel" || input$simulate == 0 || is.null(values[["sim"]])){return()}
res <- mergersDiag(values[["sim"]], mktElast = TRUE)
res
}, digits = 2, rownames = FALSE, align = "c")
output$results_diag_elastVertical <- renderTable({
if(input$inTabsetVertical != "diagpanelVertical" || input$simulateVertical == 0 || is.null(valuesVertical[["sim"]])){return()}
res <- mergersDiag(valuesVertical[["sim"]], mktElast = TRUE)
res
}, digits = 2, rownames = FALSE, align = "c")
output$results_diagnostics <- renderTable({
if(input$inTabset!= "diagpanel" || input$simulate == 0 || is.null(values[["sim"]])){return()}
res <- mergersDiag(values[["sim"]])
res
}, digits = 0 ,rownames = TRUE, align = "c")
output$results_diagnosticsVertical <- renderTable({
if(input$inTabsetVertical != "diagpanelVertical" || input$simulateVertical == 0 || is.null(valuesVertical[["sim"]])){return()}
res <- mergersDiag(valuesVertical[["sim"]])
res
}, digits = 2, rownames = FALSE, align = "c")
output$overIDText <- renderText({
if(is.null(values[["inputData"]])){return()}
isOverID(input$supply, input$calcElast, values[["inputData"]])
})
output$overIDTextVertical <- renderText({
if(is.null(valuesVertical[["inputData"]])){return()}
})
output$parameters <- renderPrint({
if(input$inTabset != "diagpanel" || input$simulate == 0 || is.null(values[["sim"]])){return()}
print(getParms(values[["sim"]], digits = 2))
})
output$parametersVertical <- renderPrint({
if(input$inTabsetVertical != "diagpanelVertical" || input$simulateVertical == 0 || is.null(valuesVertical[["sim"]])){return()}
print(getParms(valuesVertical[["sim"]], digits = 2))
})
output$results_code <- renderPrint({
if(input$inTabset != "codepanel"){return()}
thisCode <- mergersTemplateCode("Horizontal")
cat(thisCode)
})
output$results_codeVertical <- renderPrint({
if(input$inTabsetVertical != "codepanelVertical"){return()}
thisCode <- mergersTemplateCode("Vertical")
cat(thisCode)
})
output$warnings <- renderText({
if(input$inTabset!= "msgpanel" || input$simulate == 0 || is.null(values[["msg"]]$warning)){return()}
paste(values[["msg"]]$warning, collapse = "\n")
})
output$warningsVertical <- renderText({
if(input$inTabsetVertical != "msgpanelVertical" || input$simulateVertical == 0 || is.null(valuesVertical[["msg"]]$warning)){return()}
paste(valuesVertical[["msg"]]$warning, collapse = "\n")
})
output$errors <- renderText({
if(input$inTabset!= "msgpanel" || input$simulate == 0 || is.null(values[["msg"]]$error)){cat(return())}
paste(values[["msg"]]$error, collapse = "\n")
})
output$errorsVertical <- renderText({
if(input$inTabsetVertical != "msgpanelVertical" || input$simulateVertical == 0 || is.null(valuesVertical[["msg"]]$error)){cat(return())}
paste(valuesVertical[["msg"]]$error, collapse = "\n")
})
output$directionsVertical <- renderUI({
if (input$mergerTypeVertical == "Upstream") {
HTML(paste("Click on the Play button to simulate an upstream merger between 'U1' and 'U2'."))
} else if (input$mergerTypeVertical == "Downstream") {
HTML(paste("Click on the Play button to simulate a downstream merger between 'D1' and 'D2'."))
} else if (input$mergerTypeVertical == "Vertical") {
HTML(paste("Click on the Play button to simulate a vertical merger between 'U1' and 'D1'."))
}
}) |
show_c_source <- function(fun) {
fun <- substitute(fun)
stopifnot(is.call(fun))
name <- as.character(fun[[1]])
if (!(name %in% c(".Internal", ".Primitive"))) {
stop("Only know how to look up .Internal and .Primitive calls",
call. = FALSE)
}
internal_name <- as.character(fun[[2]][[1]])
names <- names_c()
found <- names[names$name == internal_name, , drop = FALSE]
if (nrow(found) != 1) {
stop("Could not find entry for ", internal_name, call. = FALSE)
}
message(internal_name, " is implemented by ", found$cfun,
" with op = ", found$offset)
query <- sprintf("SEXP attribute_hidden %s+repo:wch/r-source&type=Code",
found$cfun)
url <- paste0("https://github.com/search?q=", utils::URLencode(query))
if (interactive()) {
utils::browseURL(url)
} else {
message("Please visit ", url)
}
}
names_c <- function() {
if (exists("names_c", envir = cache)) return(cache$names_c)
lines <- readLines("http://svn.r-project.org/R/trunk/src/main/names.c")
fun_table <- lines[grepl("^[{][\"]", lines)]
fun_table <- gsub("[{}]", "", fun_table)
fun_table <- gsub(",$", "", fun_table)
fun_table <- gsub("/[*].*[*]/", "", fun_table)
table <- utils::read.csv(text = fun_table, strip = TRUE, header = FALSE,
stringsAsFactors = FALSE)
names(table) <- c("name", "cfun", "offset", "eval", "arity", "pp_kind",
"precedence", "rightassoc")
table$eval <- sprintf("%03d", table$eval)
table$rightassoc <- table$rightassoc == 1
cache$names_c <- table
table
}
cache <- new.env(parent = emptyenv()) |
targets::tar_test("tar_git_init() without .gitignore", {
skip_os_git()
store <- targets::tar_config_get("store")
dir.create(store)
expect_false(file.exists(file.path(store, ".git")))
expect_false(file.exists(file.path(store, ".gitignore")))
dir <- getwd()
tar_git_init(store = store)
expect_equal(getwd(), dir)
expect_true(file.exists(file.path(store, ".git")))
expect_false(file.exists(file.path(store, ".gitignore")))
})
targets::tar_test("tar_git_init() .gitattributes under vcs", {
skip_os_git()
store <- targets::tar_config_get("store")
dir.create(store)
expect_false(file.exists(file.path(store, ".gitattributes")))
dir <- getwd()
tar_git_init(store = store)
expect_true(file.exists(file.path(store, ".gitattributes")))
unlink(file.path(store, ".gitattributes"))
status <- tar_git_status_data(store = store)
expect_equal(status$file, ".gitattributes")
expect_equal(status$status, "deleted")
expect_equal(status$staged, FALSE)
})
targets::tar_test("tar_git_init() with git_lfs = FALSE", {
skip_os_git()
store <- targets::tar_config_get("store")
dir.create(store)
expect_false(file.exists(file.path(store, ".gitattributes")))
dir <- getwd()
tar_git_init(store = store, git_lfs = FALSE)
expect_false(file.exists(file.path(store, ".gitattributes")))
})
targets::tar_test("tar_git_init() with .gitignore", {
skip_os_git()
store <- targets::tar_config_get("store")
targets::tar_script(targets::tar_target(x, 1))
targets::tar_make(callr_function = NULL)
writeLines("*", file.path(store, ".gitignore"))
expect_false(file.exists(file.path(store, ".git")))
expect_true(file.exists(file.path(store, ".gitignore")))
dir <- getwd()
tar_git_init(store = store)
expect_equal(getwd(), dir)
expect_true(file.exists(file.path(store, ".git")))
expect_true(file.exists(file.path(store, ".gitignore")))
})
targets::tar_test("tar_git_init() is idempotent", {
skip_os_git()
store <- targets::tar_config_get("store")
dir.create(store)
expect_false(file.exists(file.path(store, ".git")))
expect_false(file.exists(file.path(store, ".gitignore")))
dir <- getwd()
tar_git_init(store = store)
tar_git_init(store = store)
expect_equal(getwd(), dir)
expect_true(file.exists(file.path(store, ".git")))
expect_false(file.exists(file.path(store, ".gitignore")))
}) |
context("parse code")
test_that("create code is character", {
expect_true(create_code() %>% code_as_table() %>% is.data.frame())
}
) |
default_nproc_blas <- function() {
cl <- parallel::makePSOCKcluster(1)
on.exit(parallel::stopCluster(cl), add = TRUE)
parallel::clusterEvalQ(cl, RhpcBLASctl::blas_get_num_procs())[[1]]
}
get_blas_ncores <- function() {
utils::capture.output({
ncores <- RhpcBLASctl::blas_get_num_procs()
})
ncores
}
set_blas_ncores <- function(ncores) {
save <- get_blas_ncores()
utils::capture.output({
RhpcBLASctl::blas_set_num_threads(ncores)
})
invisible(save)
}
assert_cores <- function(ncores) {
bigassertr::assert_one_int(ncores)
if (ncores < 1)
stop2("'ncores' should be at least 1.")
if (ncores > getOption("bigstatsr.ncores.max"))
stop2("You are trying to use more cores than allowed. See `?assert_cores`.")
if (ncores > 1 && getOption("bigstatsr.check.parallel.blas")) {
if (is.null(getOption("default.nproc.blas")))
options(default.nproc.blas = default_nproc_blas())
if (getOption("default.nproc.blas") > 1)
stop2("Two levels of parallelism are used. See `?assert_cores`.")
}
}
nb_cores <- function() {
ncores <- parallelly::availableCores(logical = FALSE)
all_cores <- parallelly::availableCores(logical = TRUE)
max(1L, `if`(ncores < all_cores, ncores, all_cores - 1L))
} |
context("minor breaks")
l1 <- c(0, 9)
l2 <- -l1
b1 <- extended_breaks()(l1)
b2 <- extended_breaks()(l2)
m1 <- regular_minor_breaks()(b1, l1, n = 2)
m2 <- regular_minor_breaks()(b2, l2, n = 2)
test_that("minor breaks are calculated correctly", {
expect_equal(m1, seq(b1[1], b1[length(b1)], by = 1.25))
expect_equal(m2, seq(b2[1], b2[length(b2)], by = 1.25))
})
test_that("minor breaks for reversed scales are comparable to non-reversed", {
expect_equal(m1, sort(-m2))
}) |
LWnomo1 <- function(values=FALSE, ...) {
bigtix <- function(x, fudge=10, roundingto=c(1, 2, 5)) {
onedigit <- signif(x, 1) - round(x, fudge) == 0
gooddigit <- substring(format(signif(x, 1), sci=TRUE), 1, 1) %in% roundingto
onedigit & gooddigit
}
ep2l <- c(
seq(50, 80, 5), seq(82, 90, 2), seq(91, 95, 1),
seq(95.5, 98, 0.5), seq(98.2, 99, 0.2), seq(99.1, 99.5, 0.1),
seq(99.55, 99.8, 0.05), seq(99.82, 99.9, 0.02), seq(99.91, 99.95, 0.01),
seq(99.955, 99.98, 0.005))
ep1l <- rev(100-ep2l)
ep1l. <- sort(unique(c(range(ep1l), ep1l[bigtix(ep1l)])))
ep2l. <- rev(100 - ep1l.)
chicontl <- 100*
c(seq(0.001, 0.002, 0.0002), seq(0.0025, 0.005, 0.0005),seq(0.006, 0.01, 0.001),
seq(0.012, 0.02, 0.002), seq(0.025, 0.05, 0.005), seq(0.06, 0.1, 0.01),
seq(0.12, 0.2, 0.02), seq(0.25, 0.5, 0.05), seq(0.6, 1, 0.1),
seq(1.2, 2, 0.2))
chicontladj <- chicontl/100
chicontl. <- sort(unique(c(range(chicontl), chicontl[bigtix(chicontl)])))
chicontladj. <- chicontl./100
opmeprange <- 10^((log10(c(0.02, 50)) + log10(c(0.1, 200)))/2)
opmepladj <- sort(unique(c(opmeprange,
seq(0.05, 0.1, 0.01), seq( 0.12, 0.2, 0.02), seq( 0.25, 0.5, 0.05),
seq(0.6, 1, 0.1), seq( 1.2, 2, 0.2), seq( 2.5, 5, 0.5),
seq(6, 10, 1), seq(12, 20, 2), seq(25, 50, 5),
seq(60, 100, 10))))
opmepl <- 2*log10(opmepladj)
opmepladj. <- sort(unique(c(range(opmepladj), opmepladj[bigtix(opmepladj)])))
opmepl. <- 2*log10(opmepladj.)
par(xaxs="i", yaxs="i", mar=c(1, 1.5, 4.5, 0.5), las=1, ...)
plot(0:1, 0:1, type="n", axes=FALSE, xlab="", ylab="")
nlines <- 1.5
inches <- nlines * par("cin")[2] * par("cex") * par("lheight")
mycoord <- diff(grconvertX(c(0, inches), from="inches", to="user"))
axis(2, pos=0.1, at=rescale(log10(ep1l), 0:1), labels=FALSE, tck=-0.01)
axis(2, pos=0.1, at=rescale(log10(ep1l.), 0:1), labels=round(rev(ep2l.), 2))
axis(2, pos=0.1+mycoord, at=rescale(log10(ep1l.), 0:1),
labels=round(ep1l., 2), tick=FALSE, hadj=0)
axis(2, pos=0.5, at=rescale(opmepl, 0:1)[-1], labels=FALSE, tck=-0.01)
axis(2, pos=0.5, at=rescale(opmepl., 0:1)[-1],
labels=round(opmepladj., 3)[-1])
axis(2, pos=0.9, at=rescale(log10(chicontl), 0:1), labels=FALSE, tck=-0.01)
axis(2, pos=0.9, at=rescale(log10(chicontl.), 0:1),
labels=round(chicontladj., 4))
mtext(c("Expected\n% effect", "Observed minus\nexpected % effect",
"(Chi)\U00B2\nfor samples\nof one"), side=3, at=c(0.1, 0.5, 0.9), line=1)
if(values) {
scale1l <- data.frame(x= c(0.1, 0.1), y=0:1, values=c(99.98, 50))
scale1r <- data.frame(x= c(0.1, 0.1), y=0:1, values=c(0.02, 50))
scale2 <- data.frame(x= c(0.5, 0.5), y=0:1, values=c(0.045, 100))
scale3 <- data.frame(x= c(0.9, 0.9), y=0:1, values=c(0.001, 2))
out <- list(scale1l=scale1l, scale1r=scale1r, scale2=scale2, scale3=scale3)
return(out)
}
} |
model_parameters.bfsl <- function(model,
ci = .95,
ci_method = "residual",
p_adjust = NULL,
verbose = TRUE,
...) {
out <- .model_parameters_generic(
model = model,
ci = ci,
ci_method = ci_method,
merge_by = "Parameter",
p_adjust = p_adjust,
...
)
attr(out, "object_name") <- deparse(substitute(model), width.cutoff = 500)
out
}
standard_error.bfsl <- function(model, ...) {
cf <- stats::coef(model)
params <- data.frame(
Parameter = rownames(cf),
SE = unname(cf[, "Std. Error"]),
stringsAsFactors = FALSE,
row.names = NULL
)
insight::text_remove_backticks(params, verbose = FALSE)
}
degrees_of_freedom.bfsl <- function(model, method = "residual", ...) {
if (is.null(method)) {
method <- "wald"
}
method <- match.arg(tolower(method), choices = c("analytical", "any", "fit", "wald", "residual", "normal"))
if (method %in% c("wald", "residual", "fit")) {
model$df.residual
} else {
degrees_of_freedom.default(model, method = method, ...)
}
} |
expect_true(as.mic(8) == as.mic("8"))
expect_true(as.mic("1") > as.mic("<=0.0625"))
expect_true(as.mic("1") < as.mic(">=32"))
expect_true(is.mic(as.mic(8)))
expect_equal(as.double(as.mic(">=32")), 32)
expect_equal(as.numeric(as.mic(">=32")), 32)
expect_equal(as.integer(as.mic(">=32")), 32)
expect_equal(suppressWarnings(as.logical(as.mic("INVALID VALUE"))), NA)
x <- as.mic(c(2, 4))
expect_inherits(x[1], "mic")
expect_inherits(x[[1]], "mic")
expect_inherits(c(x[1], x[9]), "mic")
expect_inherits(unique(x[1], x[9]), "mic")
expect_inherits(droplevels(c(x[1], x[9])), "mic")
x[2] <- 32
expect_inherits(x, "mic")
expect_warning(as.mic("INVALID VALUE"))
pdf(NULL)
expect_silent(barplot(as.mic(c(1, 2, 4, 8))))
expect_silent(plot(as.mic(c(1, 2, 4, 8))))
expect_silent(plot(as.mic(c(1, 2, 4, 8)), expand = FALSE))
expect_silent(plot(as.mic(c(1, 2, 4, 8)), mo = "esco", ab = "cipr"))
if (AMR:::pkg_is_available("ggplot2")) {
expect_inherits(autoplot(as.mic(c(1, 2, 4, 8))), "gg")
expect_inherits(autoplot(as.mic(c(1, 2, 4, 8)), expand = FALSE), "gg")
expect_inherits(autoplot(as.mic(c(1, 2, 4, 8, 32)), mo = "esco", ab = "cipr"), "gg")
}
expect_stdout(print(as.mic(c(1, 2, 4, 8))))
expect_inherits(summary(as.mic(c(2, 8))), c("summaryDefault", "table"))
if (AMR:::pkg_is_available("dplyr", min_version = "1.0.0")) {
expect_stdout(print(tibble(m = as.mic(2:4))))
}
x <- random_mic(50)
x_double <- as.double(gsub("[<=>]+", "", as.character(x)))
suppressWarnings(expect_identical(mean(x), mean(x_double)))
suppressWarnings(expect_identical(median(x), median(x_double)))
suppressWarnings(expect_identical(quantile(x), quantile(x_double)))
suppressWarnings(expect_identical(abs(x), abs(x_double)))
suppressWarnings(expect_identical(sign(x), sign(x_double)))
suppressWarnings(expect_identical(sqrt(x), sqrt(x_double)))
suppressWarnings(expect_identical(floor(x), floor(x_double)))
suppressWarnings(expect_identical(ceiling(x), ceiling(x_double)))
suppressWarnings(expect_identical(trunc(x), trunc(x_double)))
suppressWarnings(expect_identical(round(x), round(x_double)))
suppressWarnings(expect_identical(signif(x), signif(x_double)))
suppressWarnings(expect_identical(exp(x), exp(x_double)))
suppressWarnings(expect_identical(log(x), log(x_double)))
suppressWarnings(expect_identical(log10(x), log10(x_double)))
suppressWarnings(expect_identical(log2(x), log2(x_double)))
suppressWarnings(expect_identical(expm1(x), expm1(x_double)))
suppressWarnings(expect_identical(log1p(x), log1p(x_double)))
suppressWarnings(expect_identical(cos(x), cos(x_double)))
suppressWarnings(expect_identical(sin(x), sin(x_double)))
suppressWarnings(expect_identical(tan(x), tan(x_double)))
if (getRversion() >= "3.1") {
suppressWarnings(expect_identical(cospi(x), cospi(x_double)))
suppressWarnings(expect_identical(sinpi(x), sinpi(x_double)))
suppressWarnings(expect_identical(tanpi(x), tanpi(x_double)))
}
suppressWarnings(expect_identical(acos(x), acos(x_double)))
suppressWarnings(expect_identical(asin(x), asin(x_double)))
suppressWarnings(expect_identical(atan(x), atan(x_double)))
suppressWarnings(expect_identical(cosh(x), cosh(x_double)))
suppressWarnings(expect_identical(sinh(x), sinh(x_double)))
suppressWarnings(expect_identical(tanh(x), tanh(x_double)))
suppressWarnings(expect_identical(acosh(x), acosh(x_double)))
suppressWarnings(expect_identical(asinh(x), asinh(x_double)))
suppressWarnings(expect_identical(atanh(x), atanh(x_double)))
suppressWarnings(expect_identical(lgamma(x), lgamma(x_double)))
suppressWarnings(expect_identical(gamma(x), gamma(x_double)))
suppressWarnings(expect_identical(digamma(x), digamma(x_double)))
suppressWarnings(expect_identical(trigamma(x), trigamma(x_double)))
suppressWarnings(expect_identical(cumsum(x), cumsum(x_double)))
suppressWarnings(expect_identical(cumprod(x), cumprod(x_double)))
suppressWarnings(expect_identical(cummax(x), cummax(x_double)))
suppressWarnings(expect_identical(cummin(x), cummin(x_double)))
suppressWarnings(expect_identical(!x, !x_double))
suppressWarnings(expect_identical(all(x), all(x_double)))
suppressWarnings(expect_identical(any(x), any(x_double)))
suppressWarnings(expect_identical(sum(x), sum(x_double)))
suppressWarnings(expect_identical(prod(x), prod(x_double)))
suppressWarnings(expect_identical(min(x), min(x_double)))
suppressWarnings(expect_identical(max(x), max(x_double)))
suppressWarnings(expect_identical(range(x), range(x_double)))
el1 <- random_mic(50)
el1_double <- as.double(gsub("[<=>]+", "", as.character(el1)))
el2 <- random_mic(50)
el2_double <- as.double(gsub("[<=>]+", "", as.character(el2)))
suppressWarnings(expect_identical(el1 + el2, el1_double + el2_double))
suppressWarnings(expect_identical(el1 - el2, el1_double - el2_double))
suppressWarnings(expect_identical(el1 * el2, el1_double * el2_double))
suppressWarnings(expect_identical(el1 / el2, el1_double / el2_double))
suppressWarnings(expect_identical(el1 ^ el2, el1_double ^ el2_double))
suppressWarnings(expect_identical(el1 %% el2, el1_double %% el2_double))
suppressWarnings(expect_identical(el1 %/% el2, el1_double %/% el2_double))
suppressWarnings(expect_identical(el1 & el2, el1_double & el2_double))
suppressWarnings(expect_identical(el1 | el2, el1_double | el2_double))
suppressWarnings(expect_identical(el1 == el2, el1_double == el2_double))
suppressWarnings(expect_identical(el1 != el2, el1_double != el2_double))
suppressWarnings(expect_identical(el1 < el2, el1_double < el2_double))
suppressWarnings(expect_identical(el1 <= el2, el1_double <= el2_double))
suppressWarnings(expect_identical(el1 >= el2, el1_double >= el2_double))
suppressWarnings(expect_identical(el1 > el2, el1_double > el2_double)) |
knitr::opts_chunk$set(
collapse = TRUE,
comment = "
out.width="80%",
dpi=120
)
library(lolog)
set.seed(1)
suppressPackageStartupMessages(library(ergm))
data(florentine)
flomarriage
plot(flomarriage)
flomodel.01 <- lolog(flomarriage~edges)
flomodel.01
summary(flomodel.01)
flomodel.02 <- lolog(flomarriage~edges()+triangles(), verbose=FALSE)
summary(flomodel.02)
coef1 = flomodel.02$theta[1]
coef2 = flomodel.02$theta[2]
logodds = coef1 + c(0,1,2) * coef2
expit = function(x) 1/(1+exp(-x))
ps = expit(logodds)
coef1 = round(coef1, 3)
coef2 = round(coef2, 3)
logodds = round(logodds, 3)
ps = round(ps, 3)
class(flomodel.02)
names(flomodel.02)
flomodel.02$theta
flomodel.02$formula
wealth <- flomarriage %v% 'wealth'
wealth
plot(flomarriage, vertex.cex=wealth/25)
flomodel.03 <- lolog(flomarriage~edges+nodeCov('wealth'))
summary(flomodel.03)
wdiff<-outer(flomarriage %v% "wealth", flomarriage %v% "wealth",function(x,y){abs(x-y)>20})
table(wdiff)
flomodel.04 <- lolog(flomarriage~edges+nodeCov('wealth')+edgeCov(wdiff,"inequality"))
summary(flomodel.04)
data(samplk)
ls()
samplk3
plot(samplk3)
sampmodel.01 <- lolog(samplk3~edges+mutual, verbose=FALSE)
summary(sampmodel.01)
data(faux.mesa.high)
mesa <- faux.mesa.high
mesa
plot(mesa, vertex.col='Grade')
mesa %v% "GradeCat" <- as.character(mesa %v% "Grade")
fauxmodel.01 <- lolog(mesa ~edges + nodeMatch('GradeCat') + nodeMatch('Race'))
summary(fauxmodel.01)
fauxmodel.02 <- lolog(mesa ~edges + nodeMatch('GradeCat') + nodeMatch('Race') +
triangles + star(2), verbose=FALSE)
summary(fauxmodel.02)
library(network)
data(lazega)
seniority <- as.numeric(lazega %v% "seniority")
fit <- lolog(lazega ~ edges() + triangles() + nodeCov("cSeniority") +
nodeCov("cPractice") + nodeMatch("gender") + nodeMatch("practice") +
nodeMatch("office") | seniority, verbose=FALSE)
summary(fit)
calculateStatistics(mesa ~ edges + triangles + degree(0:15))
nets <- simulate(flomodel.03,nsim=10)
plot(nets[[1]])
flomodel.04 <- lolog(flomarriage ~ edges() + preferentialAttachment(),
flomarriage ~ star(2), verbose=FALSE)
summary(flomodel.04)
gdeg <- gofit(flomodel.03, flomarriage ~ degree(0:10))
gdeg
plot(gdeg)
gesp <- gofit(flomodel.03, flomarriage ~ esp(0:5))
gesp
plot(gesp) |
library(hamcrest)
test.matchFactorToCharacter <- function() {
x <- factor(c("Yes", "No", "Yes", "Yes"))
assertThat(match(x[2:3], levels(x)), equalTo(c(1,2)))
assertThat(match(levels(x), x[2:3]), equalTo(c(1,2)))
} |
FMANOVA.approximation <- function(formula, dataset, data.fuzzified, sig=0.05, breakpoints= 100, index.var =NA, int.method = "int.simpson", plot = TRUE){
if(is.trfuzzification(data.fuzzified) == TRUE){data.fuzzified <- tr.gfuzz(data.fuzzified, breakpoints = breakpoints)}
if(is.fuzzification(data.fuzzified) == FALSE){stop("Problems with the fuzzification matrix")}
breakpoints <- ncol(data.fuzzified) - 1
v <- c("TrapezoidalFuzzyNumber", "PowerFuzzyNumber", "PiecewiseLinearFuzzyNumber", "DiscontinuousFuzzyNumber", "FuzzyNumber")
if (unique(class(sig) %in% v) == TRUE){sig <- core(sig)[1]} else if (is.alphacuts(sig) == TRUE){sig <- sig[nrow(sig),1]
} else if (is.na(sig) == TRUE){stop("Significance level not defined")}
mf <- model.frame(formula, dataset)
if (ncol(mf) == 2){stop("The FANOVA function should be used for this model")}
if (length(which((lapply(mf, nlevels)[1:ncol(mf)] > 2) == TRUE)) == 0){
data <- as.data.frame(model.matrix(mf, dataset))
} else {
dataset[,] <- lapply(dataset[,], as.numeric)
mf <- model.frame(formula, dataset)
data <- as.data.frame(model.matrix(mf, dataset))
}
Yc <- as.matrix(model.response(mf))
ok <- complete.cases(data, Yc)
data <- data[ok,]
Y <- data.fuzzified
data[,] <- lapply(data[,], factor)
if (colnames(data)[1] != "(Intercept)"){nc = ncol(data)} else if (colnames(data)[1] == "(Intercept)") {nc = ncol(data) - 1}
r <- matrix(rep(0), ncol=1, nrow = nc)
for(u in 2:ncol(data)){r[u-1,1] <- nlevels(data[,u])
}
nt<- matrix(rep(0), ncol=1, nrow=nc)
nt[,] <- nrow(data[,])
ni <- matrix(rep(0), nrow= nc, ncol = max(r))
for(u in 2:(ncol(data))){ni[u-1,] <- table(data[,u])}
b <- breakpoints+1
mat.means <- array(rep(0), dim=c(nc,max(r)*b,2))
dist.mat.means <- matrix(rep(0), nrow = nc, ncol= max(r))
Y.. <- array(rep(0), dim=c(nc,b,2))
for (u in 2:ncol(data)){
WMS <- 0
for(v in 1:r[u-1,1]){
Part.mean <- Fuzzy.sample.mean(Y[which(data[,u]==levels(data[,u])[v]),,])
mat.means[u-1,((b*(v-1)+1):(b*(v-1)+b)),1] <- Part.mean[,1]
mat.means[u-1,((b*(v-1)+1):(b*(v-1)+b)),2] <- rev(Part.mean[,2])
LY.. <- Part.mean*ni[u-1,v]
WMS <- WMS + LY../nt[u-1]
}
Y..[u-1,,1] <- WMS[,1]
Y..[u-1,,2] <- rev(WMS[,2])
}
df <- matrix(rep(0), nrow=nc, ncol =1)
E <- array(rep(0), dim=c(nc,b,2))
T <- array(rep(0), dim=c(nc,b,2))
H <- array(rep(0), dim=c(nc,b,2))
for(u in 2:(ncol(data))){
SE <- 0
ST <- 0
SH <- 0
E11S <- 0
T11S <- 0
H11S <- 0
T11.mean <- cbind(Y..[u-1,,1], rev(Y..[u-1,,2]))
colnames(T11.mean) <- c("L","U")
for(v in 1:r[u-1,1]){
Group <- Y[which(data[,u] == levels(data[,u])[v]),,]
E11 <- array(rep(0), dim=c(nrow(Group),b,2))
T11 <- array(rep(0), dim=c(nrow(Group),b,2))
H11 <- array(rep(0), dim=c(nc,b,2))
E11.mean <- cbind(mat.means[u-1,((b*(v-1)+1)):((b*(v-1)+b)),1], rev(mat.means[u-1,((b*(v-1)+1)):((b*(v-1)+b)),2]))
colnames(E11.mean) <- c("L","U")
for (w in 1:nrow(Group)){
E11.part <- cbind(Group[w,,1], rev(Group[w,,2]))
colnames(E11.part) <- c("L","U")
E11 <- Fuzzy.Difference(E11.part, E11.mean, alphacuts=TRUE, breakpoints = breakpoints)
TE11 <- c(E11[1,1], E11[(breakpoints+1),1], E11[(breakpoints+1),2], E11[1,2])
if (is.unsorted(TE11) == TRUE){TE11 <- sort(TE11)}
E11S <- E11S + (PiecewiseLinearFuzzyNumber(TE11[1], TE11[2], TE11[3], TE11[4]))*(PiecewiseLinearFuzzyNumber(TE11[1], TE11[2], TE11[3], TE11[4]))
T11 <- Fuzzy.Difference(E11.part, T11.mean, alphacuts=TRUE, breakpoints = breakpoints)
TT11 <- c(T11[1,1], T11[(breakpoints+1),1], T11[(breakpoints+1),2], T11[1,2])
if (is.unsorted(TT11) == TRUE){TT11 <- sort(TT11)}
T11S <- T11S + (PiecewiseLinearFuzzyNumber(TT11[1], TT11[2], TT11[3], TT11[4]))*(PiecewiseLinearFuzzyNumber(TT11[1], TT11[2], TT11[3], TT11[4]))
}
H11 <- Fuzzy.Difference(E11.mean, T11.mean, alphacuts=TRUE, breakpoints = breakpoints)
TH11 <- c(H11[1,1], H11[(breakpoints+1),1], H11[(breakpoints+1),2], H11[1,2])
if (is.unsorted(TH11) == TRUE){TH11 <- sort(TH11)}
H11S <- ((PiecewiseLinearFuzzyNumber(TH11[1], TH11[2], TH11[3], TH11[4]))*(PiecewiseLinearFuzzyNumber(TH11[1], TH11[2], TH11[3], TH11[4])))*ni[u-1,v]
SE = SE + E11S
ST = ST + T11S
SH = SH + H11S
H11 <- NULL
T11 <- NULL
E11 <- NULL
}
E[u-1,,1] <- alphacut(SE, seq(0,1,1/breakpoints))[,1]
E[u-1,,2] <- rev(alphacut(SE, seq(0,1,1/breakpoints))[,2])
T[u-1,,1] <- alphacut(ST, seq(0,1,1/breakpoints))[,1]
T[u-1,,2] <- rev(alphacut(ST, seq(0,1,1/breakpoints))[,2])
H[u-1,,1] <- alphacut(SH, seq(0,1,1/breakpoints))[,1]
H[u-1,,2] <- rev(alphacut(SH, seq(0,1,1/breakpoints))[,2])
df[u-1,] <- r[u-1,] - 1
}
Sum.T <- H+E
if(length(grep(':',colnames(data))) != 0){
Ht<- array(rep(0), dim=c(nc,b,2))
SSE <- array(rep(0), dim=c(length(grep(':',colnames(data))),b,2))
SSMAIN <- array(rep(0), dim=c(length(grep(':',colnames(data))),b,2))
Y.Square.obs <- array(rep(0), dim=c(nrow(data),b,2))
for(w in 1:nrow(data)){
Y.obs <- c(Y[w,1,1], Y[w,(breakpoints+1),1], Y[w,1,2], Y[w,(breakpoints+1),2])
if (is.unsorted(Y.obs) == TRUE){Y.obs <- sort(Y.obs)}
Y.obs <- (PiecewiseLinearFuzzyNumber(Y.obs[1], Y.obs[2], Y.obs[3], Y.obs[4]))*(PiecewiseLinearFuzzyNumber(Y.obs[1], Y.obs[2], Y.obs[3], Y.obs[4]))
Y.Square.obs[w,,1] <- alphacut(Y.obs, seq(0,1,1/breakpoints))[,1]
Y.Square.obs[w,,2] <- rev(alphacut(Y.obs, seq(0,1,1/breakpoints))[,2])
}
for(u in grep(':',colnames(data))){
S5 <- 0
S3 <- matrix(rep(0), nrow = b, ncol=2)
S4 <- matrix(rep(0), nrow = b, ncol=2)
SS5 <- matrix(rep(0), nrow = b, ncol=2)
SSInt <- matrix(rep(0), nrow = b, ncol=2)
init <- match(colnames(data)[u], colnames(data)[grep(':',colnames(data))])
j.1 <- match(strsplit(colnames(data)[grep(':',colnames(data))][init], ":")[[1]][1], colnames(data))
k.1 <- match(strsplit(colnames(data)[grep(':',colnames(data))][init], ":")[[1]][2], colnames(data))
S3[,1] <- H[j.1-1,,1] + colSums(Y.Square.obs)[,1]/nt[u-1,1]
S3[,2] <- rev(H[j.1-1,,2] + colSums(Y.Square.obs)[,2]/nt[u-1,1])
S4[,1] <- H[k.1-1,,1] + colSums(Y.Square.obs)[,1]/nt[u-1,1]
S4[,2] <- rev(H[k.1-1,,2] + colSums(Y.Square.obs)[,2]/nt[u-1,1])
for (v in 1:(r[(j.1-1),1]*r[(k.1-1),1])){
mat <- Y.Square.obs[which((data[,j.1]==as.numeric(expand.grid(levels(data[,j.1]), levels(data[,k.1]))[v,1]))&(data[,k.1]==as.numeric(expand.grid(levels(data[,j.1]), levels(data[,k.1]))[v,2]))),,]
SS5[,1] <- colSums(mat)[,1]/nrow(mat)
SS5[,2] <- rev(colSums(mat)[,2]/nrow(mat))
S5 <- S5 + SS5
}
SSquare <- cbind(colSums(Y.Square.obs)[,1], rev(colSums(Y.Square.obs)[,2]))/nt[u-1,1]
SSInt <- Fuzzy.Difference(S5+SSquare,S3+S4, alphacuts = TRUE, breakpoints = breakpoints)
Ht[u-1,,1] <- SSInt[,1]
Ht[u-1,,2] <- rev(SSInt[,2])
SSE[init,,1] <- (Fuzzy.Difference(cbind(T[u-1,,1], rev(T[u-1,,2]))+SSquare, S5, alphacuts = TRUE, breakpoints = breakpoints))[,1]
SSE[init,,2] <- rev(Fuzzy.Difference(cbind(T[u-1,,1], rev(T[u-1,,2]))+SSquare, S5, alphacuts = TRUE, breakpoints = breakpoints)[,2])
SSMAIN[init,,1] <- (Fuzzy.Difference(S3+S4, 2*SSquare, alphacuts = TRUE, breakpoints = breakpoints))[,1]
SSMAIN[init,,2] <- rev(Fuzzy.Difference(S3+S4, 2*SSquare, alphacuts = TRUE, breakpoints = breakpoints)[,2])
df[u-1,] <- (r[j.1-1,1] - 1) * (r[k.1-1,1] - 1)
}
H[(grep(':',colnames(data))[1]-1):nc,,1] <- Ht[(grep(':',colnames(data))[1]-1):nc,,1]
H[(grep(':',colnames(data))[1]-1):nc,,2] <- Ht[(grep(':',colnames(data))[1]-1):nc,,2]
}
if (is.balanced(ni[1:(ncol(mf)-1),]) == FALSE){
seq <- SEQ.ORDERING.APPROXIMATION(scope = formula, data = data, f.response = Y)
E[1:(ncol(data)-1),,] <- seq$E.cond
H[1:(ncol(data)-1),,1] <- rbind(H[1,,1],seq$H.cond[,,1])
H[1:(ncol(data)-1),,2] <- rbind(H[1,,2],seq$H.cond[,,2])
coef.model <- coefficients(seq)
predicted.values <- fitted.values(seq)
residuals <- residuals(seq)
} else{
coef<- array(rep(0), dim=c(nc,max(r)*b,2))
for(u in 2:ncol(data)){
for(v in 1:r[u-1,1]){
Part.mean <- cbind(mat.means[u-1,((b*(v-1)+1):(b*(v-1)+b)),1], rev(mat.means[u-1,((b*(v-1)+1):(b*(v-1)+b)),2]))
colnames(Part.mean) <- c("L", "U")
Y..mean <- cbind(Y..[u-1,,1], rev(Y..[u-1,,2]))
colnames(Y..mean) <- c("L","U")
coef[u-1,((b*(v-1)+1):(b*(v-1)+b)),1] <- Fuzzy.Difference(Part.mean, Y..mean, alphacuts = TRUE, breakpoints = breakpoints)[,1]
coef[u-1,((b*(v-1)+1):(b*(v-1)+b)),2] <- rev(Fuzzy.Difference(Part.mean, Y..mean, alphacuts = TRUE, breakpoints = breakpoints)[,2])
}
}
coef.var <- coef
coef.model <- array(rep(0), dim=c((nc + 1),b,2))
coef.model[1,,1] <- Y..[1,,1]
coef.model[1,,2] <- Y..[1,,2]
coef.model[2:(nrow(coef.var)+1),,1] <- coef.var[1:nrow(coef.var),((b+1):(2*b)),1]
coef.model[2:(nrow(coef.var)+1),,2] <- coef.var[1:nrow(coef.var),((b+1):(2*b)),2]
if(length(grep(':',colnames(data))) != 0){
val.int <- t(t(grep(":",colnames(data))))
coef.int <- array(rep(0), dim=c(length(val.int),b,2))
Scoefi1 <- 0
Scoefi2 <- 0
e=1
for(l in val.int[order(-val.int)]){
for(v in 1:r[u-1,1]){
Scoefi1 <- Scoefi1 + coef[l-1,((b*(v-1)+1):(b*(v-1)+b)),1]
Scoefi2 <- Scoefi2 + coef[l-1,((b*(v-1)+1):(b*(v-1)+b)),2]
}
coef.int[(length(val.int)-e+1),,1] <- Scoefi1
coef.int[(length(val.int)-e+1),,2] <- Scoefi2
coef.var <- coef.var[-(l-1),,]
e=e+1
}
coef.model[(nrow(coef.var)+2):nrow(coef.model),,1] <- coef.int[1:nrow(coef.int),,1]
coef.model[(nrow(coef.var)+2):nrow(coef.model),,2] <- coef.int[1:nrow(coef.int),,2]
}
mat.matrix <- data.frame(as.matrix(model.matrix(formula, data)))
predicted.values <- fuzzy.predicted.values(dataset = mat.matrix, coef.model = coef.model)
residuals <- fuzzy.residuals(data.fuzzified, predicted.values)
}
p <- nc
CSH <- cbind(colSums(H[,,1]), rev(colSums(H[,,2])))/p
if (is.alphacuts(CSH)==FALSE){
CSH <- sort(CSH[1,1], CSH[breakpoints+1,1], CSH[1,2], CSH[breakpoints+1,2])
CSH <- TrapezoidalFuzzyNumber(CSH[1], CSH[2], CSH[3], CSH[4])}
CST <- Sum.T[grep(max(Sum.T[,101,2]), Sum.T[,101,2]),,]
CST[,2] <- rev(CST[,2])
ME.full <- (alphacut(Fuzzy.Difference(CST, CSH), seq(0,1,1/breakpoints)))/(max(nt)-1-p)
Ft <- qf(1-sig, df1=r-1, df2=max(nt)-sum(df)-1)
CSE <- ME.full
pvalue.manova.model <- pf(CSE[,2], CSH[,2], df1 = p, df2 = max(nt)-sum(df)-1)
plot(pvalue.manova.model, seq(0,1,1/breakpoints), 'l')
pvalue.manova <- array(rep(0), dim=c(nrow(H), breakpoints+1, 2))
MH <- H
ME <- E
for(z in 1:nrow(H)){
MH[z,,1] <- H[z,,1] / df[z,1]
MH[z,,2] <- H[z,,2] / df[z,1]
ST <- cbind(Sum.T[z,,1], rev(Sum.T[z,,2]))
if (is.alphacuts(ST)==FALSE){
ST <- sort(ST[1,1], ST[breakpoints+1,1], ST[1,2], ST[breakpoints+1,2])
ST <- TrapezoidalFuzzyNumber(ST[1], ST[2], ST[3], ST[4])}
Ft <- qf(1-sig, df1=p, df2=max(nt)-sum(df)-1)
ME[z,,1] <- (alphacut(Fuzzy.Difference(ST, CSH), seq(0,1,1/breakpoints))[,1]/(max(nt)-1-p))*Ft[1]
ME[z,,2] <- (rev(alphacut(Fuzzy.Difference(ST, CSH), seq(0,1,1/breakpoints))[,2])/(max(nt)-1-p))*Ft[1]
H0 <- cbind(MH[z,,1], rev(MH[z,,2]))
colnames(H0) <- c("L","U")
H1 <- cbind(MH[z,,1]+1, rev(MH[z,,2]+1))
colnames(H1) <- c("L","U")
t <- cbind(ME[z,,1], rev(ME[z,,2]))
colnames(t) <- c("L","U")
t.L <- t[,"L"]
t.U <- t[,"U"]
H0.L <- H0[,"L"]
H0.U <- H0[,"U"]
pvalue.manova[z,,1] = (pf( t.L, H0.U, df1 = r-1, df2 =max(nt)-sum(df)-1))
pvalue.manova[z,,2] = rev(pf( sort(t.U, decreasing = TRUE), sort(H0.U), df1 = r-1, df2 =max(nt)-sum(df)-1))
}
if (is.na(index.var)==FALSE){
F.MSTR <- MH
F.MSE <- ME
if (plot == TRUE){
plot(F.MSTR[index.var,,1], seq(0,1,1/breakpoints), type='l', xlim=c(min(F.MSTR[index.var,,],F.MSE[index.var,,]), max(F.MSTR[index.var,,],F.MSE[index.var,,])), col = 'blue', xlab = "x", ylab = "alpha", main="Fuzzy decisions - treatments vs. residuals")
opar <- par(new=TRUE, no.readonly = TRUE)
on.exit(par(opar))
plot(rev(F.MSTR[index.var,,2]), seq(0,1,1/breakpoints), type='l', xlim=c(min(F.MSTR[index.var,,],F.MSE[index.var,,]), max(F.MSTR[index.var,,],F.MSE[index.var,,])), col = 'blue', xlab = "x", ylab = "alpha")
opar <- par(new=TRUE, no.readonly = TRUE)
on.exit(par(opar))
lines(c(F.MSTR[index.var,breakpoints+1,1],F.MSTR[index.var,1,2]), c(1,1), col = "blue")
opar <- par(new=TRUE, no.readonly = TRUE)
on.exit(par(opar))
plot(F.MSE[index.var,,1], seq(0,1,1/breakpoints), type='l', xlim=c(min(F.MSTR[index.var,,],F.MSE[index.var,,]), max(F.MSTR[index.var,,],F.MSE[index.var,,])), col = 'red', xlab = "x", ylab = "alpha")
opar <- par(new=TRUE, no.readonly = TRUE)
on.exit(par(opar))
plot(rev(F.MSE[index.var,,2]), seq(0,1,1/breakpoints), type='l', xlim=c(min(F.MSTR[index.var,,],F.MSE[index.var,,]), max(F.MSTR[index.var,,],F.MSE[index.var,,])), col = 'red', xlab = "x", ylab = "alpha")
opar <- par(new=TRUE, no.readonly = TRUE)
on.exit(par(opar))
lines(c(F.MSE[index.var,breakpoints+1,1],F.MSE[index.var,1,2]), c(1,1), col = "red")
legend("bottomright", legend=c("F.MSE", "F.MSTR"), col=c("red", "blue"), lty=1)
}
Surf.MSTR <- abs(integrate.num(cut=F.MSTR[index.var,,1], alpha=seq(0,1,1/breakpoints), int.method)) + abs(integrate.num(cut=F.MSTR[index.var,,2], alpha=seq(0,1,1/breakpoints), int.method))
Surf.MSE <- abs(integrate.num(cut=F.MSE[index.var,,1], alpha=seq(0,1,1/breakpoints), int.method)) + abs(integrate.num(cut=F.MSE[index.var,,2], alpha=seq(0,1,1/breakpoints), int.method))
convicTR <- Surf.MSTR/(Surf.MSE+Surf.MSTR)
convicE <- Surf.MSE/(Surf.MSE+Surf.MSTR)
if(convicTR >= convicE){
decision <- list(noquote(paste0("Variable index: ", index.var, ". Decision: The null hypothesis (H0) is rejected at the ", sig, " significance level. ")),
noquote(paste0(" Degree of conviction (treatments of ",colnames(mf)[2], ") = ", round(convicTR,5), ".")),
noquote(paste0(" Degree of conviction (residuals) ", round(convicE,5), ".")))
} else {
decision <- list(noquote(paste0("Variable index: ", index.var, ". Decision: The null hypothesis (H0) is not rejected at the ", sig, " significance level. ")),
noquote(paste0(" Degree of conviction (treatments of ",colnames(mf)[2], ") = ", round(convicTR,5), ".")),
noquote(paste0(" Degree of conviction (residuals) ", round(convicE,5), ".")))
}
print(decision)
}
resultFMANOVA = list(formula = formula,
terms = colnames(data),
nlevels = r,
rank = nc,
table = ni,
treatments.SSQ = H,
F.coefficients = F,
pvalue.manova = pvalue.manova,
coefficients = coef.model,
pvalues.coefficients = pvalue.manova,
residuals = residuals,
fitted.values = predicted.values,
total.SSQ.model = CST,
df.total = max(nt)-1,
treatments.SSQ.model = CSH,
error.MSSQ.model = CSE,
df.residuals = max(nt)-1-sum(df),
treatment.SSQ.vars = MH,
df.treatments = df,
residuals.SSQ.vars = ME,
F.model <- Ft,
pvalue.model = pvalue.manova.model,
int.res = if(length(grep(':',colnames(data))) != 0){list(error.SSQ.int = SSE, main.SSQ.int = SSMAIN)} else{NULL}
)
} |
GeneratePossibleSubsequences <- function(vin, ignoreLenOneSubseq=FALSE){
lout<-list()
if (ignoreLenOneSubseq) {
for(i in 2:length(vin)){
for(j in 1:(length(vin)-(i-1))){
lout[[length(lout)+1]] <- vin[j:(j+i-1)]
}
}
} else {
for(i in 1:length(vin)){
for(j in 1:(length(vin)-(i-1))){
lout[[length(lout)+1]] <- vin[j:(j+i-1)]
}
}
}
lout
} |
Person <- setRefClass("Person",
fields = list(ID = 'integer',
raw_responses = 'character',
responses = 'integer',
items_answered = 'integer',
thetas = 'matrix',
thetas_history = 'matrix',
thetas_SE_history = 'matrix',
info_thetas = 'matrix',
demographics = 'data.frame',
item_time = 'numeric',
valid_item = 'logical',
state = 'list',
login_name = 'character',
score = 'logical',
true_thetas = 'numeric',
info_thetas_cov = 'matrix',
clientData = 'list',
terminated_sucessfully = 'logical',
classify_decision='character',
password_attempts = 'integer'),
methods = list(
initialize = function(nfact, nitems, thetas.start_in, score,
theta_SEs, CustomUpdateThetas, Info_thetas_cov, ID = 0L){
'Initialize the person object given background information'
if(missing(nfact)){
} else {
ID <<- ID
password_attempts <<- 0L
true_thetas <<- numeric(0L)
raw_responses <<- as.character(rep(NA, nitems))
responses <<- as.integer(rep(NA, nitems))
valid_item <<- rep(TRUE, nitems)
items_answered <<- as.integer(rep(NA, nitems))
thetas <<- matrix(numeric(nfact), nrow=1L)
thetas_SE_history <<- matrix(theta_SEs, 1L)
score <<- score
item_time <<- numeric(nitems)
login_name <<- character(0L)
if(!is.null(thetas.start_in) && !is.matrix(thetas.start_in))
thetas <<- matrix(thetas.start_in, nrow=1L)
thetas_history <<- matrix(thetas, 1L, nfact)
info_thetas <<- matrix(0, nfact, nfact)
info_thetas_cov <<- Info_thetas_cov
terminated_sucessfully <<- FALSE
classify_decision <<- rep('no decision', nfact)
}
})
)
Person$methods(
Update.info_mats = function(design, test){
'Update the information matrices for previous answered multidimensional IRT models'
set <- c('Drule', 'Trule', 'Erule', 'Wrule', 'Arule', 'APrule',
'DPrule', 'TPrule', 'EPrule', 'WPrule', 'custom')
if(test@nfact > 1L && design@criteria %in% set){
responses2 <- responses
responses2[design@items_not_scored] <- NA
pick <- which(!is.na(responses2))
infos <- lapply(pick, function(x, thetas)
FI(mirt::extract.item(test@mo, x), Theta=thetas), thetas=thetas)
tmp <- matrix(0, nrow(infos[[1L]]), ncol(infos[[1L]]))
for(i in seq_len(length(infos)))
tmp <- tmp + infos[[i]]
info_thetas <<- tmp
}
},
Update_thetas = function(theta, theta_SE){
if(!is.matrix(theta)) theta <- matrix(theta, 1L)
thetas <<- theta
thetas_SE_history <<- rbind(thetas_SE_history, theta_SE)
thetas_history <<- rbind(thetas_history, thetas)
}
) |
`CEMC` <-
function(input,space=NULL,k=NULL,dm="k",kp=0.5,N=NULL, N1=NULL,rho=0.1,
e1=0.1,e2=1,w=0.5,b=0,init.m="p",init.w=0, d.w=NULL,input.par=NULL,
extra=0){
if (missing(input))
stop("You need to input the top-k lists to be aggregated")
if (!is.null(input.par)) {
for (p.n in names(input.par))
assign(p.n,input.par[1,p.n])
}
time.start <- proc.time()
topK.input <- input
space.input <- space
a <- length(input)
k.a <- numeric(a)
for (i in 1:a)
k.a[i] <- length(input[[i]])
item <- sort(unique(unlist(input)))
n <- length(item)
if (is.null(k))
k <- n
else if (k>n)
k <- n
if (extra > 0) {
n <- n + extra
k <- k + extra
}
rank.a <- matrix(0,nrow=n,ncol=a)
if (is.null(space)) {
for (i in 1:a) {
input[[i]] <- match(input[[i]],item)
rank.a[,i] <- 1+k.a[i]
rank.a[input[[i]],i] <- 1:k.a[i]
}
}
else {
for (i in 1:a) {
input[[i]] <- match(input[[i]],item)
space[[i]] <- match(space[[i]],item)
rank.a[space[[i]],i] <- 1+k.a[i]
rank.a[input[[i]],i] <- 1:k.a[i]
}
}
if (is.null(N))
N <- 50 * n
if (is.null(N1))
N1 <- round(0.1 * N)
p <- init.p(input,n,k,init.m,init.w)
p2 <- p
y <- 0
samp2 <- TopKSample.c(.blur(p,b),N1)[1:k,]
iter <- 0
y.count <- 0
if (is.null(d.w))
d.w <- rep(1,a)
repeat {
iter <- iter + 1
samp <- cbind(samp2,TopKSample.c(.blur(p,b),N-N1)[1:k,])
rank.b <- matrix(k+1,nrow=n,ncol=N)
rank.b[cbind(as.vector(samp),rep(1:N,each=k))] <- 1:k
dist <- numeric(N)
for (i in 1:a) {
if (dm=="s")
dist <- dist + Spearman(rank.a[,i],rank.b,k.a[i],k,n) * d.w[i]
else if (dm=="k")
dist <- dist + Kendall2Lists.c(rank.a[,i],rank.b,k.a[i],k,n,kp) * d.w[i]
else stop("Invalid distance measure")
}
y2 <- sort(dist)[round(N*rho)]
samp2 <- samp[,order(dist)[1:N1]]
samp3 <- samp[,dist<=y2]
n.samp3 <- dim(samp3)[2]
for (i in 1:k)
for (j in 1:n)
p2[j,i] <- sum(samp3[i,]==j)/n.samp3
if (k < n)
p2[,k+1] <- 1 - apply(p2[,1:k],1,sum)
if (abs(y-y2) < e2) y.count <- y.count + 1
else y.count <- 0
y <- y2
p <- p*(1-w)+p2*w
if (y.count >= 5) break
}
result <- samp[,order(dist)[1]]
rank.result <- rep(k+1,n)
rank.result[result] <- 1:k
if (k < n)
dimnames(p) <- list(c(item,rep(0,extra)),1:(k+1))
else
dimnames(p) <- list(c(item,rep(0,extra)),1:k)
diff.p <- mean(abs(p[result,1:k] - diag(k)))
uc <- mean(1-p[cbind(result,1:k)])
result.ori <- item[result[result <= n-extra]]
dist.s <- 0
dist.k <- 0
for (i in 1:a) {
dist.s <- dist.s + Spearman(rank.a[,i],rank.result,k.a[i],k,n) * d.w[i]
dist.k <- dist.k + Kendall2Lists.c(rank.a[,i],rank.result,k.a[i],k,n,kp) *d.w[i]
}
time.end <- proc.time()
time.use <- sum((time.end-time.start)[-3])
list(TopK=result.ori,ProbMatrix=p,
input.par=data.frame(k=k,dm=I(dm),kp=kp,N=N,N1=N1,extra=extra,
rho=rho,e1=e1,e2=e2,w=w,b=b,init.m=I(init.m),init.w=init.w))
}
`TopKSample` <-
function(p,N) {
k <- dim(p)[2]
n <- dim(p)[1]
item <- 1:n
samp <- matrix(0,nrow=k,ncol=N)
i <- 1
fail <- FALSE
repeat {
samp[1,i] <- which(rmultinom(1,1,p[,1])==1)
for (j in 2:k) {
remain <- item[-samp[1:(j-1),i]]
if (sum(p[remain,j]) == 0) {
fail <- TRUE
break
}
samp[j,i] <- remain[which(rmultinom(1,1,p[remain,j])==1)]
}
if (!fail) i <- i + 1
if (i == N+1) break
}
samp
}
`TopKSample.c` <-
function(p,N) {
k <- dim(p)[2]
n <- dim(p)[1]
samp <- matrix(0,nrow=k,ncol=N)
seed <- round(runif(1,10000,3000000))
samp[] <- .C("topksamplec",as.double(p),as.integer(k),as.integer(n),
as.integer(N),samp=as.integer(samp),as.integer(seed))$samp
samp
}
`.blur` <-
function(p,b) {
n <- dim(p)[1]
k <- dim(p)[2]
p2 <- matrix(0,nrow=n,ncol=k)
p2[,1] <- p[,1]*(1-b) + p[,2]*b
for (i in 2:(k-1))
p2[,i] <- p[,i-1]*b+p[,i]*(1-2*b)+p[,i+1]*b
p2[,k] <- p[,k]*(1-b) + p[,k-1]*b
p2
}
`.cc.rank` <-
function(input.list) {
n.list <- length(input.list)
nn.list <- unlist(lapply(input.list, length))
item <- unique(unlist(input.list))
n.item <- length(item)
score <- matrix(0,nrow=n.item, ncol=n.list)
for (i in 1:n.list) {
list.code <- match(input.list[[i]],item)
score[,i] <- nn.list[i]+1
score[list.code,i] <- 1:nn.list[i]
score[,i] <- score[,i]/nn.list[i]
}
c.score <- apply(score,1,sum)
item[order(c.score)]
}
`init.p` <-
function(topK,n,k,init.m="p",init.w=0) {
if (k < n) {
p.u <- matrix(1/n,nrow=n,ncol=k+1)
p.u[,k+1] <- 1-k/n
}
else {
p.u <- matrix(1/n,nrow=n,ncol=k)
}
a <- length(topK)
if (init.m %in% c("p","s")) {
p.e <- matrix(0,nrow=n,ncol=n)
for (i in 1:a) {
k.a <- length(topK[[i]])
p.e[cbind(topK[[i]],1:k.a)] <- 1/a + p.e[cbind(topK[[i]],1:k.a)]
if (k.a < n) {
p.e[(1:n)[-match(topK[[i]],1:n)],(k.a+1):n] <-
1/((n-k.a)*a) + p.e[(1:n)[-match(topK[[i]],1:n)],(k.a+1):n]
}
}
}
else if (init.m %in% c("cp","cs")) {
p.e <- matrix(0,nrow=n,ncol=n)
c.list <- .cc.rank(topK)
p.e[cbind(c.list,1:n)] <- 1
}
else stop("Invalid initialization method")
if (init.m %in% c("s","cs")) {
dist <- 0
for (i in 1:(a-1)) {
for (j in (i+1):a) {
dist <- dist + Spearman(topK[[i]],topK[[j]],n)
}
}
dist.avg <- dist/(a*(a-1)/2)/n
normal.c <- sum(dnorm(1:n,mean=(n+1)/2,sd=dist.avg/2))
weight <- matrix(0,nrow=n,ncol=n)
for (i in 1:n) {
weight[,i] <- dnorm(1:n,mean=i,sd=dist.avg/2)/normal.c
weight[i,i] <- weight[i,i]+(1-sum(weight[,i]))
}
p.e <- p.e %*% weight
}
if (k < n)
p.e <- cbind(p.e[,1:k],1-apply(p.e[,1:k],1,sum))
p.u*(1-init.w) + p.e*init.w
}
`Kendall2Lists` <-
function(rank.a,rank.b,k.a,k.b,n,p=0) {
if (is.vector(rank.b)) {
n.b <- 1
}
else {
n.b <- ncol(rank.b)
}
dist <- numeric(n.b)
d.a <- rank.a - rep(rank.a,each=n)
for (i in 1:n.b) {
if (n.b == 1) d.b <- rank.b - rep(rank.b,each=n)
else d.b <- rank.b[,i] - rep(rank.b[,i],each=n)
count <- table(c(sign(d.a)*sign(d.b),-1,0,1))
dist[i] <- (count["-1"]-1)*1/2 + (count["1"]-1)*0/2 + (count["0"]-n-1)*p/2
}
dist
}
`Kendall2Lists.c` <-
function(rank.a,rank.b,k.a,k.b,n,p=0) {
if (is.vector(rank.b))
n.b <- 1
else
n.b <- ncol(rank.b)
dist <- numeric(n.b)
.C("kendall2c",as.integer(rank.a),as.integer(rank.b),as.integer(n),
as.integer(n.b),as.integer(k.a),as.integer(k.b),
as.double(p),dist=as.double(dist))$dist
}
`Spearman` <-
function(rank.a,rank.b,k.a,k.b,n) {
if (is.vector(rank.b)) {
n.b <- 1
}
else {
n.b <- ncol(rank.b)
}
if (n.b == 1)
d <- sum(abs(rank.a-rank.b)*((rank.a<=k.a) | (rank.b<=k.b)))
else
d <- apply(abs(rank.a-rank.b)*((rank.a<=k.a) | (rank.b<=k.b)),2,sum)
d
} |
rstandard.rma.uni <- function(model, digits, type="marginal", ...) {
mstyle <- .get.mstyle("crayon" %in% .packages())
.chkclass(class(model), must="rma.uni", notav=c("robust.rma", "rma.uni.selmodel"))
na.act <- getOption("na.action")
on.exit(options(na.action=na.act), add=TRUE)
if (!is.element(na.act, c("na.omit", "na.exclude", "na.fail", "na.pass")))
stop(mstyle$stop("Unknown 'na.action' specified under options()."))
type <- match.arg(type, c("marginal", "conditional"))
x <- model
if (type == "conditional" && (!is.null(x$weights) || !x$weighted))
stop(mstyle$stop("Extraction of conditional residuals not available for models with non-standard weights."))
if (missing(digits)) {
digits <- .get.digits(xdigits=x$digits, dmiss=TRUE)
} else {
digits <- .get.digits(digits=digits, xdigits=x$digits, dmiss=FALSE)
}
options(na.action="na.omit")
H <- hatvalues(x, type="matrix")
options(na.action = na.act)
ImH <- diag(x$k) - H
if (type == "marginal") {
ei <- c(x$yi - x$X %*% x$beta)
ei[abs(ei) < 100 * .Machine$double.eps] <- 0
ve <- ImH %*% tcrossprod(x$M,ImH)
sei <- sqrt(diag(ve))
}
if (type == "conditional") {
li <- x$tau2 / (x$tau2 + x$vi)
pred <- rep(NA_real_, x$k)
for (i in seq_len(x$k)) {
Xi <- matrix(x$X[i,], nrow=1)
pred[i] <- li[i] * x$yi[i] + (1 - li[i]) * Xi %*% x$beta
}
ei <- x$yi - pred
sei <- sqrt(x$vi^2 * 1/(x$vi + x$tau2) * (1 - diag(H)))
}
resid <- rep(NA_real_, x$k.f)
seresid <- rep(NA_real_, x$k.f)
stresid <- rep(NA_real_, x$k.f)
resid[x$not.na] <- ei
seresid[x$not.na] <- sei
stresid[x$not.na] <- ei / sei
if (na.act == "na.omit") {
out <- list(resid=resid[x$not.na], se=seresid[x$not.na], z=stresid[x$not.na])
out$slab <- x$slab[x$not.na]
}
if (na.act == "na.exclude" || na.act == "na.pass") {
out <- list(resid=resid, se=seresid, z=stresid)
out$slab <- x$slab
}
if (na.act == "na.fail" && any(!x$not.na))
stop(mstyle$stop("Missing values in results."))
out$digits <- digits
class(out) <- "list.rma"
return(out)
} |
is_linear_sequence <- function(x, by=NULL,...) UseMethod("is_linear_sequence")
is_lin_num_seq <- function(x, begin=NULL, end=NULL, sort=TRUE, tol=1e-8,...){
if ( length(x) <= 2 && all(is.na(x)) )
return(is.null(begin) && is.null(end))
if (anyNA(x)) return(NA)
!anyNA(x) &&
(is.null(begin) || abs(begin - min(x)) <= tol) &&
(is.null(end) || abs(end - max(x)) <= tol) &&
( length(x) <= 1 || { if(sort) x <- sort(x)
d <- diff(x)
all(abs(d - d[1]) <= tol)
})
}
as_int <- function(x){
if( is.null(x)) NULL else as.integer(x)
}
as_num <- function(x){
if (is.null(x)) NULL else as.numeric(x)
}
all_lin_num_seq <- function(x, by=NULL, begin=NULL, end=NULL, sort=TRUE, tol=1e-8){
if (length(by) == 0){
is_lin_num_seq(x, begin=begin, end=end, sort=sort, tol=tol)
} else {
all(tapply(x, INDEX=by, FUN=is_lin_num_seq, begin=begin, end=end, sort=sort, tol=tol))
}
}
is_linear_sequence.numeric <- function(x, by=NULL, begin=NULL, end=NULL, sort=TRUE, tol = 1e-8,...){
all_lin_num_seq(x, by=by, begin=begin, end=end, sort=sort, tol=1e-8)
}
is_linear_sequence.Date <- function(x, by=NULL, begin=NULL, end=NULL, sort=TRUE,...){
all_lin_num_seq(as.integer(x), by=by, begin=as_int(begin), end=as_int(end), sort=sort, tol=0)
}
is_linear_sequence.POSIXct <- function(x, by=NULL , begin=NULL, end=NULL, sort = TRUE, tol=1e-6,...){
all_lin_num_seq(as.numeric(x), by=by, begin=as_num(begin), end=as_num(end), sort=sort, tol=tol)
}
is_linear_sequence.character <- function(x, by=NULL, begin=NULL, end=NULL, sort=TRUE, format="auto",...){
if ( format == "auto" ){
y <- period_to_int(x, by=by)
begin <- period_to_int(begin)
end <- period_to_int(end)
is_linear_sequence.numeric(y, by=by, begin=begin, end=end, sort=sort, tol=0,...)
} else {
y <- strptime(x, format=format)
begin <- strptime(begin, format=format)
end <- strptime(end, format=format)
is_linear_sequence.POSIXct(y, by=by, begin=begin, end=end, sort=sort, tol=1e-6,...)
}
}
in_linear_sequence <- function(x, ...) UseMethod("in_linear_sequence")
in_lin_num_seq <- function(x, by=NULL, begin=NULL, end=NULL, sort=TRUE, tol=1e8,...){
rep(is_lin_num_seq(x, begin=begin, end=end, sort=sort, tol=tol), length(x))
}
in_linear_sequence.character <- function(x, by=NULL, begin=NULL, end=NULL, sort=TRUE, format="auto",...){
if ( format == "auto" ){
y <- period_to_int(x,by=by)
begin <- period_to_int(begin)
end <- period_to_int(end)
in_linear_sequence.numeric(y, by=by, begin=begin, end=end, sort=sort, tol=0,...)
} else {
y <- strptime(x, format=format)
begin <- strptime(begin, format=format)
end <- strptime(end, format=format)
in_linear_sequence.POSIXct(y, by=by, begin=begin, end=end, sort=sort, tol=1e-6,...)
}
}
in_linear_sequence.numeric <- function(x, by=NULL, begin=NULL, end=NULL, sort=TRUE, tol=1e-8,...){
if (is.null(by)){
in_lin_num_seq(as.integer(x), begin=as_int(begin), end=as_int(end), sort=sort, tol=tol)
} else {
result <- tapply(as.integer(x), by, in_lin_num_seq, begin=as_int(begin), end=as_int(end), sort=sort, tol=tol)
unsplit(result, by)
}
}
in_linear_sequence.Date <- function(x, by=NULL, begin=NULL, end=NULL, sort=TRUE,...){
in_linear_sequence.numeric(as.integer(x), by=by, begin=as_int(begin), end=as_int(end), sort=TRUE, tol=0)
}
in_linear_sequence.POSIXct <- function(x, by=NULL, begin=NULL, end=NULL, sort=TRUE, tol=1e-6,...){
in_linear_sequence.numeric(as.numeric(x), by=by, begin=as_num(begin), end=as_num(end), sort=sort, tol=0)
}
period_type <- function(x, undefined=NA_character_){
if ( all( grepl("^[12][0-9]{3}$",x) ) ) return("annual")
if ( all( grepl("^[12][0-9]{3}-?Q[1-4]$",x) ) ) return("quarterly")
if ( all( grepl("^[12][0-9]{3}M[01][0-9]$",x) ) ) return("monthly")
warning("Cannot detect period notation: undefined period type or different period types in single column.", call.=FALSE)
undefined
}
period_to_int <- function(x, by=NULL){
if (is.null(x)) return(NULL)
f <- function(xx){
from <- period_type(xx)
if (is.na(from)) return(rep(NA, length(xx)))
if (from == "annual"){
res <- as.numeric(xx)
}
if (from == "quarterly" ){
L <- strsplit(xx,"-?Q")
year <- as.numeric(sapply(L, `[[`,1))
quarter <- as.numeric(sapply(L, `[[`, 2))
res <- 4*year + quarter-1
}
if ( from == "monthly" ){
L <- strsplit(xx, "M")
year <- as.numeric( sapply(L,`[[`,1) )
month <- as.numeric( sapply(L, `[[`, 2) )
res <- 12*year + month-1 == 1
}
res
}
if (is.null(by)) by <- character(length(x))
unsplit(lapply(split(x, f=by), f), f=by)
}
in_range <- function(x, min, max,...) UseMethod("in_range")
in_range.default <- function(x, min, max, strict=FALSE, ...){
if (strict) x > min & x < max
else x >= min & x <= max
}
in_range.character <- function(x, min, max, strict=FALSE, format = "auto",...){
if (is.null(format))
in_range.default(x=x, min=min, max=max, strict=strict, ...)
else if ( format == "auto" ){
y <- period_to_int(x, by=NULL)
ymin <- period_to_int(min)
ymax <- period_to_int(max)
in_range.default(y, min=ymin, max=ymax, strict=strict, ...)
} else {
y <- strptime(x, format=format)
ymin <- strptime(min, format=format)
ymax <- strptime(max, format=format)
in_range(y, min=ymin, max=ymax, strict=strict, ...)
}
}
part_whole_relation <- function(values, labels, whole, part = NULL
, aggregator = sum, tol=1e-8, by = NULL, ...){
df <- data.frame(values=values, labels=labels)
f <- function(d, ...){
i_aggregate <- igrepl(whole, d$labels)
aggregate <- d$values[i_aggregate]
if (length(aggregate)>1){
stop(
sprintf("Multiple labels matching aggregate: %s. Expecting one"
, paste(aggregate,collapse=", "))
, call.=FALSE
)
}
i_details <- if (is.null(part)) !i_aggregate
else igrepl(part, d$labels)
details <- d$values[i_details]
out <- if (length(aggregate)==0){
FALSE
} else {
abs(aggregator(details, ...) - aggregate) < tol
}
values <- !logical(length(d$labels))
values[i_details | i_aggregate] <- out
values
}
if (is.null(by)){
return( f(df, ...) )
} else {
unsplit(lapply(split(df, by), f, ...),by)
}
}
rx <- function(x){
structure(x, class=c("regex",class(x)))
}
glob <- function(x){
structure(x, class=c("glob",class(x)))
}
igrepl <- function(pattern, x,...){
if (inherits(pattern, "glob")){
Reduce(`|`, lapply(utils::glob2rx(pattern), grepl,x,...))
} else if (inherits(pattern, "regex",...)){
Reduce(`|`, lapply(pattern, grepl, x, ...))
} else {
x %in% pattern
}
}
do_by <- function(x, by, fun, ...){
unsplit( lapply(split(x,by), function(d) rep(fun(d,...), length(d))),by)
}
sum_by <- function(x, by, na.rm=FALSE) do_by(x,by,sum, na.rm=na.rm)
mean_by <- function(x, by, na.rm=FALSE) do_by(x,by,mean, na.rm=na.rm)
min_by <- function(x, by, na.rm=FALSE) do_by(x,by,min, na.rm=na.rm)
max_by <- function(x, by, na.rm=FALSE) do_by(x,by,max, na.rm=na.rm)
hb <- function(x, ref=stats::median,...){
refval <- if(is.numeric(ref)) ref else ref(x,...)
pmax(x/refval, refval/x) -1
}
field_length <- function(x, n=NULL, min=NULL, max=NULL,...){
len <- nchar(as.character(x),...)
if (!is.null(n) & is.null(min) & is.null(max)){
len == n
} else if (!is.null(min) & !is.null(max) & is.null(n) ){
len >= min & len <= max
} else {
stop("Ill-specified check: either n, or min and max must be not-NULL")
}
}
field_format <- function(x, pattern, type=c("glob","regex"), ...){
type <- match.arg(type)
if (type == "glob") pattern <- utils::glob2rx(pattern)
grepl(pattern, x=as.character(x),...)
}
number_format <- function(x, format=NULL, min_dig=NULL, max_dig=NULL, dec="."){
if ( !is.null(format) ){
rx <- utils::glob2rx(format, trim.tail=FALSE)
rx <- gsub("d", "\\d", rx, fixed=TRUE)
rx <- gsub(".*", "\\d*", rx, fixed=TRUE)
return( grepl(rx, as.character(x)) )
}
rx <- if (dec == ".") "^.*\\." else sprintf("^.*\\%s",dec)
decimal_digits <- sub(rx, "", x)
min_dig <- if (is.null(min_dig)) "0" else as.character(min_dig)
max_dig <- if (is.null(max_dig)) "" else as.character(max_dig)
rx <- sprintf("^\\d{%s,%s}$",min_dig,max_dig)
grepl(rx,decimal_digits)
}
contains_exactly <- function(keys, by=NULL, allow_duplicates=FALSE){
given_keys <- do.call(paste, keys)
L <- list()
for ( keyname in names(keys) ) L[[keyname]] <- dynGet(keyname)
found_keys <- do.call(paste, L)
if (is.null(by)) by <- character(length(found_keys))
unsplit(lapply(split(found_keys, f=by), function(fk){
out <- all(fk %in% given_keys) && all(given_keys %in% fk)
if (!allow_duplicates) out <- out && !any(duplicated(fk))
rep(out, length(fk))
}), by)
}
contains_at_least <- function(keys, by=NULL){
L <- list()
for ( keyname in names(keys) ) L[[keyname]] <- dynGet(keyname)
given_keys <- do.call(paste, keys)
found_keys <- do.call(paste, L)
if (is.null(by)) by <- character(length(found_keys))
unsplit(lapply(split(found_keys, f=by), function(fk){
rep(all(given_keys %in% fk), length(fk))
}), by)
}
contains_at_most <- function(keys, by=NULL){
L <- list()
for ( keyname in names(keys) ) L[[keyname]] <- dynGet(keyname)
contains(L, keys, by=by)
}
does_not_contain <- function(keys){
L <- list()
for ( keyname in names(keys) ) L[[keyname]] <- dynGet(keyname)
!contains(L, keys, by=NULL)
}
rxin <- function(x, pattern){
A <- sapply(pattern, grepl, x=x)
if (!is.array(A)) A <- matrix(A,ncol=length(pattern))
apply(A, 1, any)
}
glin <- function(x, pattern){
pattern <- utils::glob2rx(pattern)
rxin(x, pattern)
}
get_keytype <- function(keys){
out <- grep("^(regex)|(glob)$", class(keys), value=TRUE)
if (length(out) < 1) out <- "fixed"
out
}
contains <- function(dat, keys, by){
keytype <- get_keytype(keys)
if (isTRUE(keytype=="regex") && length(keys) > 1){
for (keyname in names(keys)[-1]){
key <- keys[[keyname]]
keys[[keyname]] <- ifelse( substr(key,1,1) == "^"
, sub("^\\^", "", keys[[keyname]])
, paste0(".*", key) )
}
for (keyname in names(keys)[-length(keys)]){
key <- keys[[keyname]]
keys[[keyname]] <- ifelse( substr(key, nchar(key), nchar(key)) == "$"
, sub("\\$$", "", key)
, paste0(key, ".*"))
}
}
given_keys <- do.call(paste, keys)
found_keys <- do.call(paste, dat)
if (is.null(by)) by <- character(length(found_keys))
unsplit(lapply(split(found_keys, f=by), function(fk){
switch(keytype
, "fixed" = fk %in% given_keys
, "glob" = glin(fk, given_keys)
, "regex" = rxin(fk, given_keys)
)
}), by)
}
hierarchy <- function(values, labels, hierarchy, by=NULL, tol=1e-8, na_value=TRUE, aggregator = sum, ...){
if (is.null(by)) by <- character(length(values))
dat <- cbind(data.frame(values=values, labels=labels), by)
unsplit(lapply(split(dat, f=by)
, check_hagg, h=hierarchy, na_value = na_value, tol=tol, fun=aggregator,...)
, f=by)
}
check_hagg <- function(dat, h, na_value, tol, fun,...){
parents <- unique(h[,2])
keytype <- get_keytype(h)
out <- rep(na_value, nrow(dat))
for (parent in parents){
J <- dat$labels %in% parent
children <- h[,1][h[,2] == parent]
I <- switch(keytype
, "glob" = glin(dat$labels, children)
, "regex" = rxin(dat$labels, children)
, dat$labels %in% children)
if (sum(J) > 1){
grp <- paste0("(",paste(t(dat[1,-(1:2)]), collapse=", "),")")
msg <- "Parent '%s' occurs more than once (%d times) in group %s"
warning(sprintf(msg, parent, sum(J), grp), call.=FALSE)
out[I|J] <- FALSE
next
}
if (!any(J) && !any(I)) next
if (!any(J) && any(I)) out[I] <- FALSE
if ( any(J) && !any(I)) out[J] <- FALSE
ii <- I|J
test <- abs(dat$values[J] - fun(dat$value[I],...)) <= tol
if (any(J) && any(I)){
out[ii] <- (is.na(out[ii]) & test) | (!is.na(out[ii]) & out[ii] & test)
}
}
out
} |
solarZenithAngle <-
function(lat,hr,i,tr=0.2618,hr0=12) {
y <- acos(sin(lat)*sin(solarDecl(i))+cos(lat)*cos(solarDecl(i))*cos(tr*(hr-hr0)))
y
} |
f_month <- function(x, ...) {
UseMethod('f_month')
}
f_month.default <- function(x, ...) {
toupper(gsub("(^.)(.+)", "\\1", as.character(x)))
}
f_month.numeric <- function(x, ...) {
toupper(gsub("(^.)(.+)", "\\1", month.abb[x]))
}
f_month.Date <- function(x, ...) {
toupper(gsub("(^.)(.+)", "\\1", as.character(format(x, "%b"))))
}
f_month.POSIXt <- function(x, ...) {
toupper(gsub("(^.)(.+)", "\\1", as.character(format(x, "%b"))))
}
f_month.hms <- function(x, ...) {
f_month.POSIXt(as.POSIXct(x))
}
ff_month <- function(...) {
function(x) {f_month(x)}
}
f_month_name <- function(x, ...) {
UseMethod('f_month_name')
}
f_month_name.default <- function(x, ...) {
gsub("(^.)(.+)", "\\U\\1\\L\\2", as.character(x), perl = TRUE)
}
f_month_name.numeric <- function(x, ...) {
month.name[x]
}
f_month_name.Date <- function(x, ...) {
format(x, "%B")
}
f_month_name.POSIXt <- function(x, ...) {
format(x, "%B")
}
f_month_name.hms <- function(x, ...) {
f_month_name.POSIXt(as.POSIXct(x))
}
ff_month_name <- function(...) {
function(x) {f_month_name(x)}
}
f_month_abbreviation <- function(x, ...) {
UseMethod('f_month_abbreviation')
}
f_month_abbreviation.default <- function(x, ...) {
gsub("(^.)(.{2})()", "\\U\\1\\L\\2", as.character(x), perl = TRUE)
}
f_month_abbreviation.numeric <- function(x, ...) {
month.abb[x]
}
f_month_abbreviation.Date <- function(x, ...) {
format(x, "%b")
}
f_month_abbreviation.POSIXt <- function(x, ...) {
format(x, "%b")
}
f_month_abbreviation.hms <- function(x, ...) {
f_month_abbreviation.POSIXt(as.POSIXct(x))
}
ff_month_abbreviation <- function(...) {
function(x) {f_month_abbreviation(x)}
} |
mainMGHD<-function(data=NULL, gpar0, G, n, label , eps, method ,nr=NULL) {
pcol=ncol(data)
if(!is.null(label)){
lc=apply(data[label==1,],2,mean)
for(i in 2:G){
lc=rbind(lc,apply(data[label==i,],2,mean))
}
z = combinewk(weights=matrix(1/G,nrow=nrow(data),ncol=G), label=label)
if (is.null(gpar0)) gpar = rgparGH(data=data, g=G, w=z,l=lc)
else gpar = gpar0
}
else{
if (is.null(gpar0)) gpar = try(igpar(data=data, g=G, method=method,nr=nr))
else gpar = gpar0}
loglik = numeric(n)
for (i in 1:3) {
gpar = try(EMgrstepGH(data=data, gpar=gpar, v=1, label = label))
loglik[i] = llikGH(data, gpar)}
while ( ( getall(loglik[1:i]) > eps) & (i < (n) ) ) {
i = i+1
gpar = try(EMgrstepGH(data=data, gpar=gpar, v=1, label = label))
loglik[i] = llikGH(data, gpar)
}
if(i<n){loglik=loglik[-(i+1:n)]}
BIC=2*loglik[i]-log(nrow(data))*((G-1)+G*(2*pcol+2+pcol*(pcol-1)/2))
z=weightsGH(data=data, gpar= gpar)
ICL=BIC+2*sum(log(apply(z,1,max)))
AIC=2*loglik[i]-2*((G-1)+G*(2*pcol+2+pcol*(pcol-1)/2))
AIC3=2*loglik[i]-3*((G-1)+G*(2*pcol+2+pcol*(pcol-1)/2))
val = list(loglik= loglik, gpar=gpar, z=z, map=MAPGH(data=data, gpar= gpar, label=label),BIC=BIC,ICL=ICL,AIC=AIC,AIC3=AIC3 )
return(val)
}
MGHD <- function(data=NULL, gpar0=NULL, G=2, max.iter=100, label =NULL , eps=1e-2, method="kmeans" ,scale=TRUE ,nr=10, modelSel="AIC") {
data=as.matrix(data)
if( scale==TRUE){
data=scale(as.matrix(data))}
pcol=ncol(data)
if (is.null(data)) stop('data is null')
if (nrow(data) == 1) stop('nrow(data) is equal to 1')
if (any(is.na(data))) stop('No NAs allowed.')
if (is.null(G)) stop('G is NULL')
if ( max.iter < 1) stop('max.iter is not a positive integer')
if(modelSel=="BIC"){
bico=-Inf
t=length(G)
BIC=matrix(NA,t,1)
cont=0
for(b in 1:t){
mo=try(mainMGHD(data=data, gpar0=gpar0, G=G[b], n=max.iter, eps=eps, label=label,method= method,nr=nr),silent = TRUE)
cont=cont+1
if(is.list(mo)){
bicn=mo$BIC
BIC[cont]=bicn}
else{bicn=-Inf
BIC[cont]=NA}
if(bicn>bico){
bico=bicn
sg=G[b]
model=mo
}
}
val=MixGHD(Index=BIC,AIC=model$AIC,AIC3=model$AIC3,BIC=model$BIC,ICL=model$ICL, map=model$map, gpar=model$gpar, loglik=model$loglik, z=model$z,method="MGHD",data=as.data.frame(data),scale=scale)
cat("The best model (BIC) for the range of components used is G = ", sg,".\nThe BIC for this model is ", bico,".",sep="")
return(val)}
else if(modelSel=="ICL"){
bico=-Inf
t=length(G)
ICL=matrix(NA,t,1)
cont=0
for(b in 1:t){
mo=try(mainMGHD(data=data, gpar0=gpar0, G=G[b], n=max.iter, eps=eps, label=label,method= method,nr=nr),silent = TRUE)
cont=cont+1
if(is.list(mo)){
bicn=mo$ICL
ICL[cont]=bicn}
else{bicn=-Inf
ICL[cont]=NA}
if(bicn>bico){
bico=bicn
sg=G[b]
model=mo
}
}
val=MixGHD(Index=ICL,AIC=model$AIC,AIC3=model$AIC3,BIC=model$BIC,ICL=model$ICL, map=model$map, gpar=model$gpar, loglik=model$loglik, z=model$z,method="MGHD",data=as.data.frame(data),scale=scale)
cat("The best model (ICL) for the range of components used is G = ", sg,".\nThe ICL for this model is ", bico,".",sep="")
return(val)}
else if(modelSel=="AIC3"){
bico=-Inf
t=length(G)
AIC3=matrix(NA,t,1)
cont=0
for(b in 1:t){
mo=try(mainMGHD(data=data, gpar0=gpar0, G=G[b], n=max.iter, eps=eps, label=label,method= method,nr=nr),silent = TRUE)
cont=cont+1
if(is.list(mo)){
bicn=mo$AIC3
AIC3[cont]=bicn}
else{bicn=-Inf
AIC3[cont]=NA}
if(bicn>bico){
bico=bicn
sg=G[b]
model=mo
}
}
val=MixGHD(Index=AIC3,AIC=model$AIC,AIC3=model$AIC3,BIC=model$BIC,ICL=model$ICL, map=model$map, gpar=model$gpar, loglik=model$loglik, z=model$z,method="MGHD",data=as.data.frame(data),scale=scale)
cat("The best model (AIC3) for the range of components used is G = ", sg,".\nThe AIC3 for this model is ", bico,".",sep="")
return(val)}
else {
bico=-Inf
t=length(G)
AIC=matrix(NA,t,1)
cont=0
for(b in 1:t){
mo=try(mainMGHD(data=data, gpar0=gpar0, G=G[b], n=max.iter, eps=eps, label=label,method= method,nr=nr),silent = TRUE)
cont=cont+1
if(is.list(mo)){
bicn=mo$AIC
AIC[cont]=bicn}
else{bicn=-Inf
AIC[cont]=NA}
if(bicn>bico){
bico=bicn
sg=G[b]
model=mo
}
}
val=MixGHD(Index=AIC,AIC=model$AIC,AIC3=model$AIC3,BIC=model$BIC,ICL=model$ICL, map=model$map, gpar=model$gpar, loglik=model$loglik, z=model$z,method="MGHD",data=as.data.frame(data),scale=scale)
cat("The best model (AIC) for the range of components used is G = ", sg,".\nThe AIC for this model is ", bico,".",sep="")
return(val)}
} |
stringToStringList <- function(string) {
limit = nchar(string)
num=limit/16
final=c()
for (i in 1:num) {
start=(i-1)*16+1
substring = substr(string,start,start+15)
substring2 = gsub(" *$","", substring, perl=T)
final=c(final,substring2)
}
return(final)
} |
addDocument = function(doc, filename, ...){
checkHasSlide(doc)
if( missing( filename ) )
stop("filename cannot be missing")
if( !inherits( filename, "character" ) )
stop("filename must be a single character value")
if( length( filename ) != 1 )
stop("filename must be a single character value")
if( !file.exists( filename ) )
stop( filename, " does not exist")
UseMethod("addDocument")
}
addDocument.docx = function(doc, filename, ... ) {
pos <- regexpr("\\.([[:alnum:]]+)$", filename)
ext = ifelse(pos > -1L, substring(filename, pos + 1L), "")
if( ext == "docx" ){
.jcall( doc$obj, "V", "insertExternalDocx", filename )
} else stop("filename must be a docx document.")
doc
} |
charInSetParser <- function(fun,
action = function(s) list(type="charInSet",value=s),
error = function(p) list(type="charInSet",pos =p))
function(stream) {
cstream <- streamParserNextChar(stream)
if ( cstream$status == "eof" ) return(list(status="fail",node=error(streamParserPosition(stream)),stream=stream))
if ( fun(cstream$char) )
return(list(status="ok", node=action(cstream$char), stream=cstream$stream))
return(list(status="fail",node=error(streamParserPosition(stream)),stream=stream))
} |
ReadSortFile<-function(filename,terms=FALSE,septerms=",",sep=";",dec="."){
mat<-read.csv2(filename,sep=sep,dec=dec,row.names=1,header=TRUE)
nprod<-nrow(mat)
nsubjects<-ncol(mat)
lev<-unique(unlist(strsplit(as.character(unlist(mat)),septerms)))
MatTermSubject<-vector("list",length=nsubjects)
MatSort<-matrix(0,nprod,nsubjects)
colnames(MatSort)<-colnames(mat)
rownames(MatSort)<-rownames(mat)
for (h in 1:nsubjects){
S<-as.factor(mat[,h])
levels(S)<-1:nlevels(S)
MatSort[,h]<-S
S<-strsplit(as.character(mat[,h]),septerms)
Mats<-matrix(0,nprod,length(lev))
colnames(Mats)<-lev
rownames(Mats)<-rownames(mat)
for (p in 1:nprod){
for (l in 1:length(S[[p]])){
Mats[p,which(S[[p]][l]==lev)]<-1
}
}
MatTermSubject[[h]]<-Mats
}
MatTerms<-apply(simplify2array(MatTermSubject),c(1,2),FUN=sum)
if (terms==TRUE){
return(list(MatSort=MatSort,MatTerms=MatTerms,MatTermSubject=MatTermSubject))
} else {
return(MatSort)
}
} |
MA_effectsTable <- function(model,
weights,
data,
effects_model = "random") {
theCall <- match.call()
weights <- eval(theCall[[match("weights", names(theCall))]],
data,
enclos = sys.frame(sys.parent()))
effects_model <- switch(effects_model,
random = "DL",
fixed = "FE")
resultsRMA <- rma(model,
vi = weights,
data = data,
method = effects_model)
effect.pooled <- anova(do.call("lm",
list(model, data, NULL, (1.0/(weights + resultsRMA$tau2)))))
printModelSummary(effect.pooled)
printEffectTestsSummary(effect.pooled)
return (effect.pooled)
}
printModelSummary <- function(ANOVA) {
cat("Overall Model Summary:\n\n")
effectsRange <- nrow(ANOVA) - 1
model.summary <- data.frame(
SOURCE = c("model", "residual error", "total"),
Q = c(
sum(ANOVA[1:effectsRange, 2]),
ANOVA[effectsRange + 1, 2],
sum(ANOVA[1:effectsRange, 2]) + ANOVA[effectsRange + 1, 2]
),
DF = c(
sum(ANOVA[1:effectsRange, 1]),
ANOVA[effectsRange + 1, 1],
sum(ANOVA[1:effectsRange, 1]) + ANOVA[effectsRange + 1, 1]
),
P = c(
1.0 - pchisq(sum(ANOVA[1:effectsRange, 2]),
df = sum(ANOVA[1:effectsRange, 1])),
1.0 - pchisq(ANOVA[effectsRange + 1,2],
df = ANOVA[effectsRange + 1,1]),
1.0 - pchisq(sum(ANOVA[1:effectsRange, 2]) + ANOVA[effectsRange + 1, 2],
df = sum(ANOVA[1:effectsRange, 1]) + ANOVA[effectsRange + 1, 1])
)
)
print(model.summary, row.names = FALSE, digits = 5)
}
printEffectTestsSummary <- function(ANOVA) {
cat("\n\nEffect Test Summary:\n\n")
effectsRange <- nrow(ANOVA) - 1
mainEffects <- rownames(ANOVA[1:effectsRange, ])
model.summary <- data.frame(
SOURCE = mainEffects,
Q = ANOVA[1:effectsRange, 2],
DF = ANOVA[1:effectsRange, 1],
P = 1.0 - pchisq(ANOVA[1:effectsRange, 2], ANOVA[1:effectsRange, 1])
)
print(model.summary, row.names = FALSE, digits = 5)
} |
testRobustToNAimputation <- function(dat, gr, annot=NULL, retnNA=TRUE, avSdH=c(0.15,0.5), avSdL=NULL, plotHist=FALSE, xLab=NULL, tit=NULL, imputMethod="mode2",
seedNo=NULL, multCorMeth=NULL, nLoop=100, lfdrInclude=NULL, ROTSn=NULL, silent=FALSE, debug=FALSE, callFrom=NULL) {
fxNa <- wrMisc::.composeCallName(callFrom, newNa="testRobustToNAimputation")
if(!isTRUE(silent)) silent <- FALSE
if(isTRUE(debug)) silent <- FALSE else debug <- FALSE
if(!isTRUE(plotHist)) plotHist <- FALSE
datOK <- TRUE
msg <- NULL
if(is.list(dat)) { if(all(c("quant","annot") %in% names(dat))) {
if(length(dim(dat$annot)) ==2 & length(annot) <1) annot <- dat$annot
dat <- dat$quant } else {datOK <- FALSE; msg <- "Invalid 'dat' : does NOT contain both '$quant' and '$annot !"}
if(datOK) if(length(dim(dat)) !=2) { datOK <- FALSE
msg <- "'dat' must be matrix or data.frame with >1 columns"}
if(datOK) {
if(is.data.frame(dat)) dat <- as.matrix(dat)
if(length(gr) != ncol(dat)) { datOK <- FALSE
msg <- "Number of columns in 'dat' and number of (group-)elements in 'gr' do not match !"} }
if(datOK) {
if(!is.factor(gr)) gr <- as.factor(gr)
if(is.null(xLab)) xLab <- "values"
if(length(annot) <1) annot <- matrix(NA, nrow=nrow(dat), ncol=1, dimnames=list(rownames(dat),"rowNa"))
if(length(ROTSn) >0) message(fxNa," argument 'ROTSn' is depreciated, please used argument 'multCorMeth' instead (like multCorMeth=c(ROTSn='10'))")
if(length(lfdrInclude) >0) message(fxNa," argument 'lfdrInclude' is depreciated, please used argument 'multCorMeth' instead (like multCorMeth='lfdrInclude')")
ROTSn <- NULL
multCorMeth <- if(length(multCorMeth) <1) c("lfdr","FDR","means") else unique(c(multCorMeth, "means"))
if(length(multCorMeth) ==1 & is.numeric(multCorMeth)) {
multCorMeth <- if(multCorMeth >1) c("lfdr", ROTSn=as.integer(multCorMeth), "means") else "lfdr"}
if("ROTSn" %in% names(multCorMeth)) { ROTSn <- try(as.integer(multCorMeth["ROTSn"]), silent=TRUE)
if("try-error" %in% class(ROTSn)) {ROTSn <- NULL; comp<- NULL }} else {ROTSn <- NULL; comp<- NULL }
if("lfdr" %in% multCorMeth) { lfdrInclude <- TRUE
} else if("lfdr" %in% names(multCorMeth)) { lfdrInclude <- try(as.logical(multCorMeth["lfdr"]), silent=TRUE)
if("try-error" %in% class(lfdrInclude)) { lfdrInclude <- FALSE; multCorMeth <- multCorMeth[-which(names(multCorMeth)== "lfdr")] } }
if(length(lfdrInclude) <1) lfdrInclude <- FALSE
isNA <- is.na(dat)
chNA <- any(isNA)
nNAmat <- matrix(0, nrow=nrow(dat), ncol=length(levels(gr)), dimnames=list(NULL,levels(gr)))
seedNo <- as.integer(seedNo)[1]
gr <- as.factor(gr)
callFro <- try(as.factor(gr))
if(class(callFro) == "try-error") message("+++++\n",fxNa," MAJOR PROBLEM with argument 'gr' !! (possibly not sufficient level-names ?) \n+++++")
if(debug) message(fxNa,"start 1st pass, no of NA: ",sum(isNA))
datI <- matrixNAneighbourImpute(dat, gr, imputMethod=imputMethod, retnNA=retnNA ,avSdH=avSdH, plotHist=plotHist, xLab=xLab, tit=tit, seedNo=seedNo, silent=silent, callFrom=fxNa,debug=debug)
if(debug) message(fxNa,"start combineMultFilterNAimput ")
datFi <- combineMultFilterNAimput(dat=dat, imputed=datI, grp=gr, annDat=annot, abundThr=stats::quantile(if(is.list(dat)) dat$quant else dat, 0.02,na.rm=TRUE), silent=silent, callFrom=fxNa)
if(debug) message(fxNa,"done combineMultFilterNAimput")
if(lfdrInclude) {
chLfdr <- try(find.package("fdrtool"), silent=TRUE)
if("try-error" %in% class(chLfdr)) {
message(fxNa,"Package 'fdrtool' not found ! Please install first from CRAN for calculating lfdr-values. Omitting (defaut) 'lfdr' option from argument 'multCorMeth' ..")
lfdrInclude <- FALSE } }
pwComb <- wrMisc::triCoord(length(levels(gr)))
if(debug) message(fxNa,"start 1st moderTestXgrp()")
out <- wrMisc::moderTestXgrp(datFi$data, grp=gr, limmaOutput=TRUE, addResults=multCorMeth, silent=silent, callFrom=fxNa)
chFDR <- names(out) =="FDR"
if(any(chFDR)) names(out)[which(chFDR)] <- "BH"
rownames(pwComb) <- colnames(out$t)
if(length(ROTSn)==1) if(ROTSn >0 & !is.na(ROTSn)) {
chPa <- requireNamespace("ROTS", quietly=TRUE)
if(!chPa) { message(fxNa,": package 'RORS' not found/installed, omit argument 'ROTSn'")
ROTSn <- 0 }
} else ROTSn <- NULL
if(length(ROTSn)==1) if(ROTSn >0) {
if(debug) message(fxNa,"start ROTS n=",ROTSn)
comp <- wrMisc::triCoord(length(levels(gr)))
rownames(comp) <- paste(levels(gr)[comp[,1]], levels(gr)[comp[,2]],sep="-")
tmRO <- matrix(nrow=nrow(datFi$data), ncol=nrow(comp))
comPair <- matrix(unlist(strsplit(rownames(comp),"-")), ncol=nrow(comp))
useCol <- apply(comPair, 2, function(x) gr %in% x)
for(i in 1:nrow(comp)) tmRO[which(datFi$filt[,i]),i] <- ROTS::ROTS(datFi$data[which(datFi$filt[,i]),
which(useCol[,i])], groups=as.numeric(as.factor(gr[which(useCol[,i])]))-1, B=ROTSn)$pvalue
out$ROTS.p <- tmRO
out$ROTS.BH <- apply(tmRO, 2, stats::p.adjust, method="BH")
if(lfdrInclude) out$ROTS.lfdr <- apply(tmRO, 2, wrMisc::pVal2lfdr)
}
if(chNA & nLoop >1) {
if(debug) message(fxNa,"subsequent rounds of NA-imputation nLoop=",nLoop)
pValTab <- tValTab <- array(NA, dim=c(nrow(dat), nrow(pwComb), nLoop))
datIm <- array(NA, dim=c(nrow(dat), ncol(dat), nLoop))
datIm[,,1] <- datFi$data
pValTab[,,1] <- out$p.value
tValTab[,,1] <- out$t
if(length(ROTSn)==1) if(ROTSn >0) {
pVaRotsTab <- array(NA, dim=c(nrow(dat), nrow(pwComb), min(10,nLoop)))
pVaRotsTab[,,1] <- out$ROTS.p }
for(i in 2:nLoop) {
if(length(seedNo)==1) seedNo <- seedNo +i
datX <- matrixNAneighbourImpute(dat, gr, imputMethod=imputMethod, seedNo=seedNo, retnNA=retnNA, avSdH=avSdH, NAneigLst=datI$NAneigLst, plotHist=FALSE, silent=TRUE, callFrom=fxNa)$data
if(debug) message(fxNa,"passed matrixNAneighbourImpute() in loop no ",i)
fitX <- limma::eBayes(limma::contrasts.fit(limma::lmFit(datX[,], out$design), contrasts=out$contrasts))
datIm[,,i] <- datX
pValTab[,,i] <- fitX$p.value
tValTab[,,i] <- fitX$t
if(length(ROTSn)==1) if(ROTSn >0 & i < min(10, nLoop)) {
for(i in 1:nrow(comp)) tmRO[which(datFi$filt[,i]),i] <- ROTS::ROTS(datFi$data[which(datFi$filt[,i]),which(useCol[,i])], groups=as.numeric(as.factor(gr[which(useCol[,i])])), B=ROTSn)$pvalue
pVaRotsTab[,,i] <- tmRO } }
if(any(!datFi$filt)) {
fiAr <- rep(datFi$filt,nLoop)
pValTab[which(!fiAr)] <- NA }
out$datImp <- as.matrix(apply(datIm, 1:2, mean, na.rm=TRUE))
rownames(out$datImp) <- if(is.null(rownames(dat))) rownames(annot) else rownames(dat)
if("tValTab" %in% multCorMeth) { out$tValArr <- tValTab
out$pValArr <- pValTab }
if("noLimma" %in% multCorMeth) out$simple.p.value <- out$p.value
out$p.value <- as.matrix(apply(pValTab, 1:2, stats::median, na.rm=FALSE))
out$t <- as.matrix(apply(tValTab, 1:2, stats::median, na.rm=FALSE))
colnames(out$p.value) <- colnames(out$t) <- rownames(pwComb)
} else out$datImp <- datFi$data
out$annot <- annot
out$filter <- datFi$filt
dimnames(out$datImp) <- list(if(is.null(rownames(out$lods))) rownames(out$annot) else rownames(out$lods), colnames(dat))
rownames(out$t) <- rownames(out$p.value) <- rownames(out$datImp)
if(any(!datFi$filt)) out$p.value[which(!datFi$filt)] <- NA
if(lfdrInclude) {out$lfdr <- as.matrix(apply(out$p.value, 2, wrMisc::pVal2lfdr, callFrom=fxNa))
dimnames(out$lfdr) <- list(rownames(out$lods), colnames(out$contrasts))
if("noLimma" %in% multCorMeth) out$simple.lfdr <- if(ncol(out$simple.p.value) >1) apply(out$simple.p.value, 2, wrMisc::pVal2lfdr) else wrMisc::pVal2lfdr(out$simple.p.value)
}
if(any(c("FDR","BH") %in% multCorMeth)) { out$BH <- as.matrix(apply(out$p.value, 2, stats::p.adjust, method="BH"))
dimnames(out$BH) <- list(rownames(out$lods), colnames(out$contrasts))
if("noLimma" %in% multCorMeth) out$simple.BH <- if(ncol(out$simple.p.value) >1) apply(out$simple.p.value, 2, stats::p.adjust, method="BH") else stats::p.adjust(out$simple.p.value, method="BH")
}
if("BY" %in% multCorMeth) {out$BY <- as.matrix(apply(out$p.value, 2, stats::p.adjust, method="BY"))
dimnames(out$BY) <- list(rownames(out$lods), colnames(out$contrasts))}
if(length(ROTSn)==1) if(ROTSn >0 & chNA & nLoop >1) {
out$ROTS.p <- apply(pVaRotsTab, 1:2, stats::median, na.rm=TRUE)
if(any(!datFi$filt)) out$ROTS.p[which(!datFi$filt)] <- NA
out$ROTS.BH <- as.matrix(as.matrix(apply(out$ROTS.p, 2, stats::p.adjust, method="BH")))
dimnames(out$ROTS.BH) <- list(rownames(out$lods), colnames(out$contrasts) )
if(lfdrInclude) {out$ROTS.lfdr <- as.matrix(as.matrix(apply(out$ROTS.p, 2, wrMisc::pVal2lfdr)))
dimnames(out$ROTS.lfdr) <- list(rownames(out$lods), colnames(out$contrasts))} }
out }} else { warning(fxNa,msg)
return(NULL) }
}
|
expected <- eval(parse(text="structure(c(-0.0948897811608504, -0.188187614862163, -0.338089494458728, -0.542262420718711, -0.758125996567989, -0.917432086773337, -0.98559832731166, -0.999107364086017), .Names = c(\"1\", \"2\", \"3\", \"4\", \"5\", \"6\", \"7\", \"8\"))"));
test(id=0, code={
argv <- eval(parse(text="list(structure(c(-0.0996985539253204, -0.208486018303182, -0.412624920187971, -0.781459230080118, -1.41933833538431, -2.49413413365086, -4.24041092023363, -7.0213317713299), .Names = c(\"1\", \"2\", \"3\", \"4\", \"5\", \"6\", \"7\", \"8\")))"));
do.call(`expm1`, argv);
}, o=expected); |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.