code
stringlengths 1
13.8M
|
---|
segmented_barchart <-
function(x) {
xlabel <- deparse(substitute(x))
par(mfrow=c(1,1))
par(mar=c(5,2,4,4))
offset <- table(x)/length(x)/2
fs <- c(0,cumsum(table(x)/length(x)) )[1:length(offset)]
par(mgp = c(0.5, 1, 0))
plot(factor(x)~factor(rep(" ",length(x))),xlab=xlabel,ylab="",axes=FALSE,col=grey(seq(.3,.9,length=length(offset))))
axis(2)
text(0.5,fs+offset,labels=levels(factor(x)))
axis(4,at=fs+offset,labels=prettyNum(table(x)/length(x),drop0trailing=FALSE,digits=2,format="e"),las=1)
par(mgp=c(3, 1, 0))
}
|
"mix2_data"
|
NULL
axe_call.mda <- function(x, verbose = FALSE, ...) {
old <- x
x <- exchange(x, "call", call("dummy_call"))
add_butcher_attributes(
x,
old,
disabled = c("print()", "summary()", "update()"),
verbose = verbose
)
}
axe_env.mda <- function(x, verbose = FALSE, ...) {
old <- x
x$terms <- axe_env(x$terms, ...)
add_butcher_attributes(
x,
old,
verbose = verbose
)
}
axe_fitted.mda <- function(x, verbose = FALSE, ...) {
old <- x
x$fit <- exchange(x$fit, "fitted.values", matrix(NA))
add_butcher_attributes(
x,
old,
verbose = verbose
)
}
|
marginal_contribution_mean <-
function(permute,costs){
n<-max(permute)
coa<-coalitions(n)[[1]]
cc<-costs
if (is.vector(permute)==T){
phi<-c()
for (k in 1:n){
permutek<-permute[1:which(permute==k)]
permute_aux<-permute[1:(which(permute==k)-1)]
ipermutek<-rep(0,n);ipermutek[permutek]<-1
ipermutek_aux<-rep(0,n)
if (length(permutek)!=length(permute_aux)){ipermutek_aux[permute_aux]<-1}
for (i in 1:2^n){
if (sum(coa[i,]==ipermutek)==n){
costek<-cc[i]
}
if (sum(coa[i,]==ipermutek_aux)==n){
costemenosk<-cc[i]
}
}
ck<-costek-costemenosk
phi[k]<-ck
}
}
if(is.vector(permute)==F){
cmarginales<-matrix(0,ncol=n,nrow=nrow(permute))
phi<-rep(0,n)
for (l in 1:nrow(permute)){
permutel<-permute[l,]
for (k in 1:n){
permutek<-permutel[1:which(permutel==k)]
permute_aux<-permutel[1:(which(permutel==k)-1)]
ipermutek<-rep(0,n);ipermutek[permutek]<-1
ipermutek_aux<-rep(0,n)
if (length(permutek)!=length(permute_aux)){ipermutek_aux[permute_aux]<-1}
costek<-0;costemenosk<-0
for (i in 1:2^n){
if (sum(coa[i,]==ipermutek)==n){
costek<-cc[i]
}
if (sum(coa[i,]==ipermutek_aux)==n){
costemenosk<-cc[i]
}
}
ck<-costek-costemenosk
cmarginales[l,k]<-ck
}
}
phi<-apply(cmarginales,2,mean)}
return(phi)}
|
flds <- c('doi', 'container_issnl', 'container_name', 'publisher')
dois1 <- c('10.7554/eLife.030326', '10.7554/eLife.327636')
dois2 <- c('10.7717/peerj.228','10.7717/peerj.234')
test_that("fat_cat_search_one", {
skip_on_cran()
vcr::use_cassette("fat_cat_search_one", {
one <- fat_cat_search_one(dois1, fields = flds, size = length(dois1))
two <- fat_cat_search_one(dois2, fields = flds, size = length(dois2))
})
expect_is(one, "data.frame")
expect_equal(NROW(one), 2)
expect_is(one$doi, "character")
expect_equal(one$message[1], "not found")
expect_is(two, "data.frame")
expect_equal(NROW(two), 2)
expect_is(two$doi, "character")
expect_true(is.na(two$message[1]))
})
test_that("fat_cat_search", {
skip_on_cran()
vcr::use_cassette("fat_cat_search", {
one <- fat_cat_search(dois1)
two <- fat_cat_search(dois2)
})
expect_is(one, "list")
expect_equal(length(one), 2)
expect_named(one, NULL)
expect_is(one[[1]]$doi, "character")
expect_equal(one[[1]]$message, "not found")
expect_is(two, "list")
expect_equal(length(two), 2)
expect_named(two, NULL)
expect_is(two[[1]]$doi, "character")
expect_true(is.na(two[[1]]$message))
})
test_that("get_publisher2", {
skip_on_cran()
vcr::use_cassette("get_publisher2", {
one <- get_publisher2(dois1)
two <- get_publisher2(dois2)
})
expect_is(one, "list")
expect_equal(length(one), 2)
expect_named(one, dois1)
expect_is(one[[1]], "character")
expect_is(attr(one[[1]], "publisher"), "character")
expect_match(attr(one[[1]], "publisher"), "elife")
expect_equal(attr(one[[1]], "issn"), "")
expect_equal(attr(one[[1]], "error"), "not found")
expect_is(two, "list")
expect_equal(length(two), 2)
expect_named(two, dois2)
expect_is(two[[1]], "character")
expect_is(attr(two[[1]], "publisher"), "character")
expect_match(attr(two[[1]], "publisher"), "peerj")
expect_equal(attr(two[[1]], "issn"), "2167-8359")
expect_true(is.na(attr(two[[1]], "error")))
})
test_that("make_doi_str", {
aa <- make_doi_str(dois1)
expect_is(aa, "character")
expect_equal(length(aa), 1)
expect_match(aa, "doi:\\(")
expect_match(aa, dois1[1])
expect_match(aa, dois1[2])
})
test_that("unknown_id", {
aa <- unknown_id("foo bar")
expect_is(aa, "character")
expect_equal(length(aa), 1)
expect_match(aa, "unknown")
expect_match(attr(aa, "error"), "foo bar")
})
test_that("check_type", {
expect_error(check_type(5), "'type' parameter must be character")
expect_error(check_type('foo'), "'type' parameter must be")
expect_null(check_type('xml'))
expect_null(check_type('pdf'))
expect_null(check_type('plain'))
})
|
show_models <- function(models, model_names = NULL, covariates = NULL,
merge_models = FALSE, drop_controls = FALSE,
headings = list(variable = "Variable", n = "N", measure = "Hazard ratio", ci = NULL, p = "p"),
...) {
stopifnot(inherits(models, "ezcox_models") | all(sapply(models, function(x) inherits(x, "coxph"))), is.list(headings))
if (is.null(headings$variable)) {
headings$variable <- "Variable"
}
if (is.null(headings$n)) {
headings$n <- "N"
}
if (is.null(headings$measure)) {
headings$measure <- "Hazard ratio"
}
if (is.null(headings$p)) {
headings$p <- "p"
}
pkg_version <- packageVersion("forestmodel")
if (pkg_version$major == 0 & pkg_version$minor < 6) {
message("Please install the recent version of forestmodel firstly.")
message("Run the following command:")
message(" remotes::install_github(\"ShixiangWang/forestmodel\")")
message("Or")
message(" remotes::install_git(\"https://gitee.com/ShixiangWang/forestmodel\")")
return(invisible(NULL))
}
if (!is.null(model_names)) {
names(models) <- model_names
} else if (is.null(names(models))) {
names(models) <- paste0("Model ", seq_along(models))
}
if (drop_controls) {
if (is.null(covariates)) {
message("covariates=NULL but drop_controls=TRUE, detecting controls...")
if (isTRUE(attr(models, "has_control"))) {
message("Yes. Setting variables to keep...")
covariates <- sapply(models, function(x) attr(x, "Variable"))
} else {
message("No. Skipping...")
}
}
message("Done.")
}
if (!is.null(covariates)) {
covariates <- ifelse(isValidAndUnreserved(covariates) | startsWith(covariates, "`"),
covariates, paste0("`", covariates, "`")
)
}
forestmodel::forest_model(
model_list = models,
panels = cox_panel(headings = headings),
covariates = covariates,
merge_models = merge_models, ...
)
}
|
maxbigm<-function(m.desc,m.wd,nworker=1,rm.na=TRUE,size.limit=10000*10000)
{
requireNamespace("bigmemory")
mx=bigmemory::attach.big.matrix(dget(paste0(m.wd,"/",m.desc)))
coln=ncol(mx)
rown=nrow(mx)
inv=max(floor(size.limit/rown),1)
num=ceiling(coln/inv)
if(num==1){ser=matrix(c(1,coln),nrow=1)}else{
ser=cbind((0:(num-1))*inv+1,c((1:(num-1))*inv,coln))
}
findmax<-function(i,m.desc,ser,rm.na,m.wd)
{
requireNamespace("bigmemory")
mx=bigmemory::attach.big.matrix(dget(paste0(m.wd,"/",m.desc)))
mi=mx[,ser[i,1]:ser[i,2]]
gc()
maxi=max(mi,na.rm = rm.na)
id=which(mi==maxi,arr.ind = TRUE)
id[,2]=id[,2]+ser[i,1]-1
gc()
list(maxi,id)
}
if(nworker==1)
{
maxs=lapply(1:num, findmax,m.desc=m.desc,ser=ser,rm.na=rm.na,m.wd=m.wd)
}else{
requireNamespace("parallel")
c1<-parallel::makeCluster(nworker,type="PSOCK")
maxs<-parallel::parLapply(c1,1:num,findmax,m.desc=m.desc,ser=ser,rm.na=rm.na,m.wd=m.wd)
parallel::stopCluster(c1)
gc()
}
max.v=sapply(1:length(maxs), function(j){maxs[[j]][[1]]})
maxvalue=max(max.v)
maxid=which(max.v==maxvalue)
maxrc=lapply(maxid, function(u){maxs[[u]][[2]]})
maxrc=Reduce(rbind,maxrc)
list(max.value=maxvalue,row.col=maxrc)
}
|
removeGroup.Spectra2D <- function(spectra, rem.group) {
.chkArgs(mode = 21L)
if (missing(rem.group)) stop("Nothing to remove")
chkSpectra(spectra)
spectra <- .remGrpSam(spectra, rem.group, TRUE)
return(spectra)
}
|
"lh" <-
stats::ts(c(2.4, 2.4, 2.4, 2.2, 2.1, 1.5, 2.3, 2.3, 2.5, 2, 1.9,
1.7, 2.2, 1.8, 3.2, 3.2, 2.7, 2.2, 2.2, 1.9, 1.9, 1.8, 2.7, 3,
2.3, 2, 2, 2.9, 2.9, 2.7, 2.7, 2.3, 2.6, 2.4, 1.8, 1.7, 1.5,
1.4, 2.1, 3.3, 3.5, 3.5, 3.1, 2.6, 2.1, 3.4, 3, 2.9))
|
n <- 50
group <- rep(0:4,5:1)
p <- length(group)
X <- matrix(rnorm(n*p),ncol=p)
y <- rnorm(n)
yy <- runif(n) > .5
fit.mle <- lm(y~X)
fit <- grpreg(X, y, group, penalty="grLasso", lambda.min=0)
expect_equivalent(logLik(fit)[100], logLik(fit.mle)[1], tol=.001)
expect_equivalent(apply(grpreg:::loss.grpreg(y, predict(fit, X=X, type='response'), family='gaussian'), 2, sum), fit$loss)
expect_equivalent(AIC(fit)[100], AIC(fit.mle), tol=.001)
fit <- grpreg(X, y, group, penalty="gel", lambda.min=0)
expect_equivalent(logLik(fit)[100], logLik(fit.mle)[1], tol=.001)
expect_equivalent(apply(grpreg:::loss.grpreg(y, predict(fit, X=X, type='response'), family='gaussian'), 2, sum), fit$loss, tol=0.0001)
expect_equivalent(AIC(fit)[100], AIC(fit.mle), tol=.001)
fit.mle <- glm(yy~X, family="binomial")
fit <- grpreg(X, yy, group, penalty="grLasso", lambda.min=0, family="binomial")
expect_equivalent(logLik(fit)[100], logLik(fit.mle)[1], tol=.001)
expect_equivalent(apply(grpreg:::loss.grpreg(yy, predict(fit, X=X, type='response'), family='binomial'), 2, sum), fit$loss, tol=0.0001)
expect_equivalent(AIC(fit)[100], AIC(fit.mle), tol=.001)
fit <- grpreg(X, yy, group, penalty="gel", lambda.min=0, family="binomial")
expect_equivalent(logLik(fit)[100], logLik(fit.mle)[1], tol=.001)
expect_equivalent(apply(grpreg:::loss.grpreg(yy, predict(fit, X=X, type='response'), family='binomial'), 2, sum), fit$loss, tol=0.0001)
expect_equivalent(AIC(fit)[100], AIC(fit.mle), tol=.001)
fit.mle <- glm(yy~X, family="poisson")
fit <- grpreg(X, yy, group, penalty="grLasso", lambda.min=0, family="poisson")
expect_equivalent(logLik(fit)[100], logLik(fit.mle)[1], tol=.001)
expect_equivalent(apply(grpreg:::loss.grpreg(yy, predict(fit, X=X, type='response'), family='poisson'), 2, sum), fit$loss, tol=0.0001)
expect_equivalent(AIC(fit)[100], AIC(fit.mle), tol=.001)
fit <- grpreg(X, yy, group, penalty="gel", lambda.min=0, family="poisson")
expect_equivalent(logLik(fit)[100], logLik(fit.mle)[1], tol=.001)
expect_equivalent(apply(grpreg:::loss.grpreg(yy, predict(fit, X=X, type='response'), family='poisson'), 2, sum), fit$loss, tol=0.0001)
expect_equivalent(AIC(fit)[100], AIC(fit.mle), tol=.001)
fit <- grpreg(X, y, group, penalty="grLasso", lambda.min=0)
expect_equivalent(fit$linear.predictor, predict(fit, X))
fit <- grpreg(X, y, group, penalty="gel", lambda.min=0)
expect_equivalent(fit$linear.predictor, predict(fit, X))
fit <- grpreg(X, yy, group, penalty="grLasso", lambda.min=0, family="binomial")
expect_equivalent(fit$linear.predictor, predict(fit, X))
fit <- grpreg(X, yy, group, penalty="gel", lambda.min=0, family="binomial")
expect_equivalent(fit$linear.predictor, predict(fit, X))
fit <- grpreg(X, yy, group, penalty="grLasso", lambda.min=0, family="poisson")
expect_equivalent(fit$linear.predictor, predict(fit, X))
fit <- grpreg(X, yy, group, penalty="gel", lambda.min=0, family="poisson")
expect_equivalent(fit$linear.predictor, predict(fit, X))
fit.mle <- lm(y ~ X)
fit <- grpreg(X, y, group, penalty="grLasso", lambda.min=0, eps=1e-12)
expect_equivalent(residuals(fit, lambda=0), residuals(fit.mle))
fit <- grpreg(X, y, group, penalty="gel", lambda.min=0, eps=1e-12)
expect_equivalent(residuals(fit, lambda=0), residuals(fit.mle))
fit.mle <- glm(yy ~ X, family="binomial")
fit <- grpreg(X, yy, group, penalty="grLasso", lambda.min=0, family="binomial", eps=1e-12, max.iter=1e6)
expect_equivalent(residuals(fit, lambda=0), residuals(fit.mle), tolerance=1e-5)
fit <- grpreg(X, yy, group, penalty="gel", lambda.min=0, family="binomial", eps=1e-12, max.iter=1e6)
expect_equivalent(residuals(fit, lambda=0), residuals(fit.mle), tolerance=1e-5)
fit.mle <- glm(yy ~ X, family="poisson")
fit <- grpreg(X, yy, group, penalty="grLasso", lambda.min=0, family="poisson", eps=1e-12, max.iter=1e6)
expect_equivalent(residuals(fit, lambda=0), residuals(fit.mle), tolerance=1e-5)
fit <- grpreg(X, yy, group, penalty="gel", lambda.min=0, family="poisson", eps=1e-12, max.iter=1e6)
expect_equivalent(residuals(fit, lambda=0), residuals(fit.mle), tolerance=1e-5)
n <- 50
group <- rep(0:3,4:1)
p <- length(group)
X <- matrix(rnorm(n*p),ncol=p)
y <- rnorm(n)
yy <- y > 0
fit1 <- grpreg(X, y, group, penalty="grLasso")
fit2 <- grpreg(X, y, group, penalty="grLasso", lambda=fit1$lambda)
expect_equivalent(fit1$beta, fit2$beta)
fit1 <- grpreg(X, y, group, penalty="gel")
fit2 <- grpreg(X, y, group, penalty="gel", lambda=fit1$lambda)
expect_equivalent(fit1$beta, fit2$beta)
fit1 <- grpreg(X, yy, group, penalty="grLasso", family="binomial")
fit2 <- grpreg(X, yy, group, penalty="grLasso", family="binomial", lambda=fit1$lambda)
expect_equivalent(fit1$beta, fit2$beta)
fit1 <- grpreg(X, yy, group, penalty="gel", family="binomial")
fit2 <- grpreg(X, yy, group, penalty="gel", family="binomial", lambda=fit1$lambda)
expect_equivalent(fit1$beta, fit2$beta)
n <- 50
group1 <- rep(0:3,4:1)
group2 <- rep(c("0", "A", "B", "C"), 4:1)
p <- length(group1)
X <- matrix(rnorm(n*p), ncol=p)
X[, group1==2] <- 0
y <- rnorm(n)
yy <- y > 0
fit1 <- grpreg(X, y, group1, penalty="grLasso")
fit2 <- grpreg(X, y, group2, penalty="grLasso")
expect_equivalent(coef(fit1), coef(fit2), tol=0.001)
cvfit <- cv.grpreg(X, y, group, penalty="grLasso")
n <- 50
p <- 11
X <- matrix(rnorm(n*p),ncol=p)
y <- rnorm(n)
group <- rep(0:3, c(1, 2, 3, 5))
gm <- 1:3
plot(fit <- grpreg(X, y, group, penalty="cMCP", lambda.min=0, group.multiplier=gm), main=fit$penalty)
plot(fit <- gBridge(X, y, group, lambda.min=0, group.multiplier=gm), main=fit$penalty)
plot(fit <- grpreg(X, y, group, penalty="grLasso", lambda.min=0, group.multiplier=gm), main=fit$penalty)
plot(fit <- grpreg(X, y, group, penalty="grMCP", lambda.min=0, group.multiplier=gm), main=fit$penalty)
plot(fit <- grpreg(X, y, group, penalty="grSCAD", lambda.min=0, group.multiplier=gm), main=fit$penalty)
cvfit <- cv.grpreg(X, y, group, penalty="grLasso", group.multiplier=gm)
n <- 100
group <- rep(1:10, rep(3,10))
p <- length(group)
X <- matrix(rnorm(n*p),ncol=p)
y <- rnorm(n)
yy <- runif(n) > .5
dfmax <- 21
fit <- grpreg(X, y, group, penalty="grLasso", lambda.min=0, dfmax=dfmax)
nv <- sapply(predict(fit, type="vars"), length)
expect_true(max(head(nv, length(nv)-1)) <= dfmax)
expect_true(max(nv) > 3)
fit <- grpreg(X, y, group, penalty="gel", lambda.min=0, dfmax=dfmax)
nv <- sapply(predict(fit, type="vars"), length)
expect_true(max(head(nv, length(nv)-1)) <= dfmax)
expect_true(max(nv) > 3)
fit <- grpreg(X, yy, group, penalty="grLasso", family="binomial", lambda.min=0, dfmax=dfmax)
nv <- sapply(predict(fit, type="vars"), length)
expect_true(max(head(nv, length(nv)-1)) <= dfmax)
expect_true(max(nv) > 3)
fit <- grpreg(X, yy, group, penalty="gel", family="binomial", lambda.min=0, dfmax=dfmax)
nv <- sapply(predict(fit, type="vars"), length)
expect_true(max(head(nv, length(nv)-1)) <= dfmax)
expect_true(max(nv) > 3)
gmax <- 7
fit <- grpreg(X, y, group, penalty="grLasso", lambda.min=0, gmax=gmax)
ng <- sapply(predict(fit, type="groups"), length)
expect_true(max(head(ng, length(ng)-1)) <= gmax)
expect_true(max(ng) > 2)
fit <- grpreg(X, y, group, penalty="gel", lambda.min=0, gmax=gmax)
ng <- sapply(predict(fit, type="groups"), length)
expect_true(max(head(ng, length(ng)-1)) <= gmax)
expect_true(max(ng) > 2)
fit <- grpreg(X, yy, group, penalty="grLasso", family="binomial", lambda.min=0, gmax=gmax)
ng <- sapply(predict(fit, type="groups"), length)
expect_true(max(head(ng, length(ng)-1)) <= gmax)
expect_true(max(ng) > 2)
fit <- grpreg(X, yy, group, penalty="gel", family="binomial", lambda.min=0, gmax=gmax)
ng <- sapply(predict(fit, type="groups"), length)
expect_true(max(head(ng, length(ng)-1)) <= gmax)
expect_true(max(ng) > 2)
|
get.line.breaks <- function(x) {
v <- rep(NA, length(x));
for (i in 2:length(v)) {
v[i] <- ifelse(x[i] != x[i - 1], TRUE, FALSE);
}
breaks <- which(v) - 0.5;
return(breaks);
}
|
ridgeLFMM <- function(K, lambda) {
m <- list( K = K,
lambda = lambda,
algorithm = "analytical")
class(m) <- "ridgeLFMM"
m
}
ridgeLFMM_init <- function(m, dat) {
if (is.null(m$B)) {
m$B <- matrix(0.0, ncol(dat$Y), ncol(dat$X))
}
if (is.null(m$U)) {
m$U <- matrix(0.0, nrow(dat$Y), m$K)
}
if (is.null(m$V)) {
m$V <- matrix(0.0, ncol(dat$Y), m$K)
}
m
}
ridgeLFMM_noNA<- function(m, dat) {
P.list <- compute_P(X = dat$X, lambda = m$lambda)
m <- ridgeLFMM_main(m, dat, P.list)
m
}
ridgeLFMM_noNA_alternated<- function(m, dat, relative.err.min = 1e-6, it.max = 100) {
m <- ridgeLFMM_init(m, dat)
err2 <- .Machine$double.xmax
it <- 1
repeat {
Af1 <- function(x, args) {
dat$productY(x)- dat$X %*% crossprod(m$B, x)
}
Atransf <- function(x, args) {
dat$productYt(x) - m$B %*% crossprod(dat$X, x)
}
res.rspectra <- compute_svd(Af1, Atransf, k = m$K, nu = m$K, nv = m$K,
dim = c(nrow(dat$Y), ncol(dat$Y)))
m$U <- res.rspectra$u %*% diag(res.rspectra$d, length(res.rspectra$d), length(res.rspectra$d))
m$V <- res.rspectra$v
Af2 <- function(x) {
t(dat$productYt(x)) - tcrossprod(crossprod(x, m$U), m$V)
}
m$B <- compute_B_ridge(Af2, dat$X, m$lambda)
err2.new <- dat$err2_lfmm(m$U, m$V, m$B)
message("It = ", it, "/", it.max, ", err2 = ", err2.new)
if(it > it.max || (abs(err2 - err2.new) / err2) < relative.err.min) {
break
}
err2 <- err2.new
it <- it + 1
}
m
}
ridgeLFMM_main <- function(m, dat, P.list) {
d <- ncol(dat$X)
n <- nrow(dat$Y)
p <- ncol(dat$Y)
Af1 <- function(x, args) {
args$P %*% args$dat$productY(x)
}
Atransf <- function(x, args) {
args$dat$productYt(t(args$P) %*% x)
}
res.rspectra <- RSpectra::svds(A = Af1,
Atrans = Atransf,
k = m$K,
nu = m$K, nv = m$K,
opts = list(tol = 10e-10),
dim = c(n, p),
args = list(P = P.list$sqrt.P, dat = dat))
m$U <- res.rspectra$u %*% diag(res.rspectra$d[1:m$K], m$K, m$K)
m$U <- P.list$sqrt.P.inv %*% m$U
m$V <- res.rspectra$v
Af2 <- function(x) {
t(dat$productYt(x)) - tcrossprod(crossprod(x, m$U), m$V)
}
m$B <- compute_B_ridge(Af2, dat$X, m$lambda)
m
}
ridgeLFMM_withNA <- function(m, dat, relative.err.min = 1e-6, it.max = 100) {
dat$missing.ind <- which(is.na(dat$Y))
dat$Y <- impute_median(dat$Y)
P.list <- compute_P(X = dat$X, lambda = m$lambda)
err2 <- .Machine$double.xmax
it <- 1
repeat {
m <- ridgeLFMM_main(m, dat, P.list)
dat$impute_lfmm(m$U, m$V, m$B)
err2.new <- dat$err2_lfmm(m$U, m$V, m$B)
if(it > it.max || (abs(err2 - err2.new) / err2) < relative.err.min) {
break
}
err2 <- err2.new
message("It = ", it, "/", it.max, ", err2 = ", err2)
it <- it + 1
}
dat$Y[dat$missing.ind] <- NA
m
}
lfmm_fit.ridgeLFMM <- function(m, dat, it.max = 100, relative.err.min = 1e-6){
if (!(m$algorithm %in% c("analytical", "alternated"))){
stop("algorithm must be analytical or alternated")}
if (anyNA(dat$Y)) {
if (m$algorithm == "analytical"){
stop("Exact method doesn't allow missing data.
Use an imputation method before running lfmm.")
} else {
res <- ridgeLFMM_withNA(m, dat,
relative.err.min = relative.err.min,
it.max = it.max)
}
}
if (!anyNA(dat$Y)) {
if (m$algorithm == "analytical") {
res <- ridgeLFMM_noNA(m, dat)
} else {
res <- ridgeLFMM_noNA_alternated(m, dat,
relative.err.min = relative.err.min,
it.max = it.max)
}
}
}
lfmm_fit_knowing_loadings.ridgeLFMM <- function(m, dat) {
m$U <- (dat$Y - tcrossprod(dat$X, m$B)) %*% m$V
m
}
|
context("Tilde")
test_that("can read folder and store as multiple data frame", {
folder_1 <- importTilde("Tilde")
folder_2 <- importTilde("Tilde2")
expect_equal(folder_1, folder_2)
})
|
options(width=77)
load("hyper_results.RData")
summary(out2, thresholds=c(6.0, 4.7, 4.4, 4.7, 2.6))
summary(out2, thresholds=c(6.0, 4.7, 4.4, 4.7, 2.6), what="full")
summary(out2, thresholds=c(6.0, 4.7, 4.4, 4.7, 2.6), what="add")
summary(out2, thresholds=c(6.0, 4.7, 4.4, 4.7, 2.6), what="int")
summary(out2, allpairs=FALSE)
summary(out2, thresholds=c(6.0, 4.7, 4.4, 4.7, 2.6), df=TRUE)
summaryScantwoOld(out2, thresholds=c(6, 4, 4))
summary(operm2, alpha=c(0.05,0.20))
summary(out2, perms=operm2, alphas=rep(0.05, 5))
summary(out2, perms=operm2, alphas=c(0.05, 0.05, 0, 0.05, 0.05))
summary(out2, perms=operm2, alphas=c(0.05, 0.05, 0, 0.05, 0.05),
pvalues=TRUE)
plot(out2, chr=c(1,4,6,15),layout=list(cbind(1,2),c(5,1)),
mar1=c(4,4,0,0)+0.1, mar2=c(4,2,0,2)+0.1)
plot(out2, chr=c(1,4,6,15), upper="cond-int",
layout=list(cbind(1,2),c(5,1)),
mar1=c(4,4,0,0)+0.1, mar2=c(4,2,0,2)+0.1)
|
NULL
RawGen <- function(x,
Trait = 1,
Pop = 2,
R.res = NULL,
dist = c("truncated", "log"),
lower = -Inf,
upper = Inf,
format = c("wide", "long"),
complete_cases = FALSE) {
if (!identical(Sys.getenv("TESTTHAT"), "true")) {
.Deprecated("raw_gen")
}
if (!(is.list(x) || is.data.frame(x))) {
stop("x should be a list or a dataframe")
}
dist <- match.arg(dist, choices = c("truncated", "log"))
format <- match.arg(format, choices = c("wide", "long"))
if (!is.logical(complete_cases)) {
stop("complete_cases should be either TRUE or FALSE")
}
if (is.data.frame(x)) {
if (!all(c("M.mu", "F.mu", "M.sdev", "F.sdev", "m", "f") %in% names(x))) {
stop(
"colnames should contain:
M.mu= Male mean
F.mu=Female mean
M.sdev=Male sd
F.sdev=Female sd
m= Male sample size
f=Female sample size
N.B: colnames are case sensitive"
)
}
if (!(Trait %in% seq_along(x))) {
stop("Trait should be number from 1 to ncol(x)")
}
if (!(Pop %in% seq_along(x))) {
stop("Pop should be number from 1 to ncol(x)")
}
if (is.null(R.res)) {
x <- x %>%
drop_na() %>%
as.data.frame()
x$Pop <- x[, Pop]
x$Pop <- factor(x$Pop, levels = unique(x$Pop))
x$Trait <- x[, Trait]
x$Trait <- factor(x$Trait, levels = unique(x$Trait))
if (dist == "log") {
message("Data generation was done using univariate log distribution")
gen_m <- function(x) {
rlnorm(
n = x$m[1],
meanlog = log(x$M.mu^2 / sqrt(x$M.sdev^2 + x$M.mu^2)),
sdlog = sqrt(log(1 + (
x$M.sdev^2 / x$M.mu^2
)))
)
}
gen_f <- function(x) {
rlnorm(
n = x$f[1],
meanlog = log(x$F.mu^2 / sqrt(x$F.sdev^2 + x$F.mu^2)),
sdlog = sqrt(log(1 + (
x$F.sdev^2 / x$F.mu^2
)))
)
}
} else {
message("Data generation was done using univariate truncated distribution")
gen_m <- function(x) {
truncnorm::rtruncnorm(
n = x$m[1],
a = lower,
b = upper,
mean = x$M.mu[1],
sd = x$M.sdev[1]
)
}
gen_f <- function(x) {
truncnorm::rtruncnorm(
n = x$f[1],
a = lower,
b = upper,
mean = x$F.mu[1],
sd = x$F.sdev[1]
)
}
}
m_function <- function(x) {
df <- by(x, list(x$Trait), list)
df <- lapply(df, gen_m)
df <- lapply(df, as.data.frame)
df <- do.call(cbind_fill2, df)
colnames(df) <- levels(x$Trait)
df
}
f_function <- function(x) {
df <- by(x, list(x$Trait), list)
df <- lapply(df, gen_f)
df <- lapply(df, as.data.frame)
df <- do.call(cbind_fill2, df)
colnames(df) <- levels(x$Trait)
df
}
pops <- split.data.frame(x, x$Pop)
male <- lapply(pops, m_function)
male <- do.call(rbind.data.frame, male)
female <- lapply(pops, f_function)
female <- do.call(rbind.data.frame, female)
males <- strsplit(rownames(male), split = "\\.")
females <- strsplit(rownames(female), split = "\\.")
male$Pop <- as.factor(do.call(rbind.data.frame, males)[, 1])
male$Sex <- as.factor(rep("M", nrow(male)))
female$Pop <-
as.factor(do.call(rbind.data.frame, females)[, 1])
female$Sex <- as.factor(rep("F", nrow(female)))
male <-
male[, c(ncol(male), ncol(male) - 1, seq(nlevels(x$Trait)))]
female <-
female[, c(ncol(female), ncol(female) - 1, seq(nlevels(x$Trait)))]
wide <- rbind.data.frame(male, female)
rownames(wide) <- NULL
if (format == "wide") {
if (isTRUE(complete_cases)) {
return(tidyr::drop_na(wide))
} else {
return(wide)
}
}
if (format == "long") {
long <-
pivot_longer(
data = wide,
cols = -c("Sex", "Pop"),
names_to = "Trait",
values_drop_na = complete_cases
)
return(long)
}
}
if (!is.null(R.res)) {
if (!is.matrix(R.res)) {
stop("R.res should be a matrix")
}
x <- dataframe2list(
x = x,
R.res = R.res,
Trait = Trait,
Pop = Pop
)
}
}
if (!(is.data.frame(x))) {
if (!all(c("M.mu", "F.mu", "M.sdev", "F.sdev", "m", "f", "R.res") %in% names(x))) {
stop(
"List should have the following named matricies:
M.mu= Male mean
F.mu=Female mean
M.sdev=Male sd
F.sdev=Female sd
m= Male sample size
f=Female sample size
R.res=Pooled within correlational matrix
N.B: names are case sensitive"
)
}
message("Data generation was done using multivariate truncated distribution")
multi_raw(
x = x,
format = format,
complete_cases = complete_cases,
lower = lower,
upper = upper
)
}
}
|
frame_ald_weight <- function(y, x, tau, error, iter){
ntau <- length(tau)
n <- length(y)
p <- ncol(x)
vchpN <- matrix(0, nrow = n, ncol = ntau)
for(i in 1:ntau){
qr <- EM.qr(y,x,tau[i],error,iter)
beta_qr <- qr$theta[1:p,]
sigma_qr <- qr$theta[p+1]
taup2 <- (2/(tau[i] * (1 - tau[i])))
thep <- (1 - 2 * tau[i]) / (tau[i] * (1 - tau[i]))
delta2 <- (y - x %*% beta_qr)^2/(taup2 * sigma_qr)
gamma2 <- (2 + thep^2/taup2)/sigma_qr
vchpN[, i] <- (besselK(sqrt(delta2 * gamma2), 0.5 - 1)/
(besselK(sqrt(delta2 * gamma2), 0.5))) *
(sqrt(delta2 / gamma2))^(-1)
vchpN[, i] <- vchpN[,i]/sum(vchpN[,i])
}
colnames(vchpN) <- paste("tau", tau, sep = "")
return(vchpN)
}
|
hi <- function (from, to, by = 1L, maxindex = NA, vw=NULL, pack = TRUE, NAs = NULL)
{
minindex <- 1L
maxindex <- as.integer(maxindex)
if (is.null(vw)){
vw.convert <- FALSE
}else{
if (is.matrix(vw))
stop("matrix vw not allowed in hi, use as.hi")
storage.mode(vw) <- "integer"
vw.convert <- TRUE
}
nspec <- length(from)
if (nspec > 0) {
from <- as.integer(from)
to <- rep(as.integer(to), length.out = nspec)
by <- rep(as.integer(by), length.out = nspec)
d <- to - from
N <- d%/%by
if (any(d != 0 & sign(d) != sign(by)) || any(N * by != d))
stop("illegal input to hi")
l <- as.vector(rbind(rep(1L, nspec), N))[-1]
v <- as.vector(rbind(c(0L, from[-1] - to[-nspec]), by))[-1]
v <- v[l > 0]
l <- l[l > 0]
from <- from[1]
to <- to[nspec]
nl <- length(l)
r <- list(lengths = l, values = v)
n <- sum(r$lengths) + 1L
tab <- tabulate(sign(r$values) + 2, 3)
s <- !tab[1] || !tab[3]
if (s) {
class(r) <- "rle"
x <- list(first = from, dat = r, last = to)
class(x) <- "rlepack"
ix <- NULL
re <- tab[1] > 0
if (re)
x <- rev(x)
}else{
re <- FALSE
x <- as.integer(cumsum(c(from, rep(r$values, r$lengths))))
x <- sort.int(x, index.return = TRUE, method = "quick")
ix <- x$ix
x <- rlepack(x$x, pack = pack)
}
x <- unique(x)
if (x$last < 0) {
if (is.na(maxindex))
stop("maxindex is required with negative subscripts")
if ( -x$first > maxindex )
stop("negative subscripts out of range")
re <- FALSE
ix <- NULL
if (vw.convert){
x$first <- x$first - vw[1]
x$last <- x$last - vw[1]
if (inherits(x$dat, "rle")){
n <- sum(x$dat$lengths) + 1L
}else{
x$dat <- x$dat - vw[1]
n <- length(x$dat)
}
}else{
if (inherits(x$dat, "rle")){
n <- sum(x$dat$lengths) + 1L
}else{
n <- length(x$dat)
}
}
}else if (x$first > 0){
if (!is.na(maxindex) && x$last > maxindex )
stop("positive subscripts out of range")
if (vw.convert){
x$first <- vw[1] + x$first
x$last <- vw[1] + x$last
if (inherits(x$dat, "rle")){
n <- sum(x$dat$lengths) + 1L
}else{
x$dat <- vw[1] + x$dat
n <- length(x$dat)
}
}else{
if (inherits(x$dat, "rle")){
n <- sum(x$dat$lengths) + 1L
}else{
n <- length(x$dat)
}
}
}else{
stop("0s and mixed positive/negative subscripts not allowed")
}
}else{
x <- list(first = NA_integer_, dat = integer(), last = NA_integer_)
re <- FALSE
ix <- NULL
n <- 0L
minindex <- 1L
maxindex <- as.integer(maxindex)
}
if (!is.null(NAs))
NAs <- rlepack(as.integer(NAs), pack = pack)
if (!is.null(vw)){
minindex <- vw[1] + 1L
maxindex <- vw[1] + vw[2]
}
ret <- list(
x = x
, ix = ix
, re = re
, minindex = minindex
, maxindex = maxindex
, length = n
, dim = NULL
, dimorder = NULL
, symmetric = FALSE
, fixdiag = NULL
, vw = vw
, NAs = NAs
)
class(ret) <- "hi"
ret
}
print.hi <- function(x, ...){
cat("hybrid index (hi) from ", x$x$first, " to ", x$x$last, " over ", if (inherits(x$x$dat, "rle")) "<rle position diffs>" else "<plain positions>", " re=", x$re, " ix=", if(is.null(x$ix)) "NULL" else "<reverse sort info>", "\n", sep="")
cat("minindex=", x$minindex, " maxindex=", x$maxindex, " length=", x$length, " poslength=", poslength(x), "\n", sep="")
if (!is.null(x$dim)){
cat("dim=c(", paste(x$dim, collapse=","), "), dimorder=c(", paste(x$dimorder, collapse=","), ")\n", sep="")
}
if (!is.null(x$vw)){
cat("vw=")
print(x$vw, ...)
}
invisible()
}
str.hi <- function(object, nest.lev=0, ...){
nest.str <- paste(rep(" ..", nest.lev), collapse="")
str(unclass(object), nest.lev=nest.lev, ...)
cat(nest.str, ' - attr(*, "class") = ', sep="")
str(class(object), nest.lev=nest.lev, ...)
}
hiparse <- function(x, envir, first=NA_integer_, last=NA_integer_){
if (length(x)>1){
if (x[[1]]=='c'){
values <- integer()
lengths <- integer()
n <- length(x)
i <- 1
while(i<n){
i <- i + 1
r <- Recall(x[[i]], envir, first=first, last=last)
first <- r$first
last <- r$last
values <- c(values, r$values)
lengths <- c(lengths, r$lengths)
}
return(list(first=first, lengths=lengths, values=values, last=last))
}else if (x[[1]]==':'){
from <- eval(x[[2]], envir=envir)
to <- eval(x[[3]], envir=envir)
if (is.logical(from) || is.logical(to))
stop("as.hi.default:hiparse logicals encountered")
if (length(from)!=1 || length(to)!=1)
stop("as.hi.default:hiparse: arguments of : have length!=1")
from <- as.integer(from)
to <- as.integer(to)
if ( is.na(from) || is.na(to) || from==0 || to==0 )
stop("as.hi.default:hiparse NAs or 0s encountered")
if (is.na(first))
first <- from
if (is.na(last)){
if (from>to)
return(list(first=first, lengths=from-to, values=as.integer(-1), last=to))
else
return(list(first=first, lengths=to-from, values=as.integer(1), last=to))
}else{
if (from>to)
return(list(first=first, lengths=c(as.integer(1), from-to), values=c(from-last, as.integer(-1)), last=to))
else
return(list(first=first, lengths=c(as.integer(1), to-from), values=c(from-last, as.integer(1)), last=to))
}
}
}
x <- eval(x, envir=envir)
if (inherits(x,"hi"))
stop("DEBUGINFO visible when try(..., silent=FALSE) in as.hi.call: as.hi.default:hiparse found hi")
if (inherits(x,"ri"))
stop("DEBUGINFO visible when try(..., silent=FALSE) in as.hi.call: as.hi.default:hiparse found ri")
if (inherits(x,"bit"))
stop("DEBUGINFO visible when try(..., silent=FALSE) in as.hi.call: as.hi.default:hiparse found bit")
if (inherits(x,"bitwhich"))
stop("DEBUGINFO visible when try(..., silent=FALSE) in as.hi.call: as.hi.default:hiparse found bitwhich")
if (is.logical(x))
stop("DEBUGINFO visible when try(..., silent=FALSE) in as.hi.call: as.hi.default:hiparse found logical")
if (is.character(x))
stop("DEBUGINFO visible when try(..., silent=FALSE) in as.hi.call: as.hi.default:hiparse found character")
if (is.matrix(x))
stop("DEBUGINFO visible when try(..., silent=FALSE) in as.hi.call: as.hi.default:hiparse found matrix")
n <- length(x)
if (n>16)
stop("DEBUGINFO visible when try(..., silent=FALSE) in as.hi.call: as.hi.default:hiparse found length>16")
if (n){
x <- as.integer(x)
if (is.na(first))
first <- x[1]
if (is.na(last)){
r <- rle(diff(x))
}else{
r <- rle(diff(c(last, x)))
}
if (is.na(intisasc(r$values)))
stop("as.hi.default:hiparse found NAs")
last <- x[n]
return(list(first=first, lengths=r$lengths, values=r$values, last=last))
}else{
return(list(first=first, lengths=integer(), values=integer(), last=last))
}
}
as.hi.NULL <- function(x, ...){
structure(list(x = structure(list(first = NA_integer_, dat = integer(0),
last = NA_integer_), .Names = c("first", "dat", "last"), class = "rlepack"),
ix = NULL, re = FALSE, minindex = 1L, maxindex = 0L,
length = 0L, dim = NULL, dimorder = NULL, symmetric = FALSE,
fixdiag = NULL, vw = NULL, NAs = NULL), .Names = c("x", "ix",
"re", "minindex", "maxindex", "length", "dim", "dimorder", "symmetric",
"fixdiag", "vw", "NAs"), class = "hi")
}
as.hi.hi <- function(x, ...){
if (class(x$x)!="rlepack")
class(x$x) <- "rlepack"
x
}
as.hi.name <- function(x, envir=parent.frame(), ...){
as.hi(eval(x, envir=envir), ...)
}
"as.hi.(" <- function(x, envir=parent.frame(), ...){
as.hi.call(x[[2]], envir=envir, ...)
}
as.hi.call <- function(
x
, maxindex = NA_integer_
, dim = NULL
, dimorder = NULL
, vw = NULL
, vw.convert = TRUE
, pack = TRUE
, envir = parent.frame()
, ...
){
if ((!is.null(dim) && !dimorderStandard(dimorder)) || !is.null(dim(vw)))
return(as.hi(eval(x, envir=envir), maxindex=maxindex, dim=dim, dimorder=dimorder, vw=vw, vw.convert=vw.convert, pack=pack, ...))
r <- try(hiparse(x, envir=envir), silent=TRUE)
if (inherits(r,"try-error")){
return(as.hi(eval(x, envir=envir), maxindex=maxindex, dim=dim, dimorder=dimorder, vw=vw, vw.convert=vw.convert, pack=pack, ...))
}
if (is.null(vw))
vw.convert <- FALSE
else{
storage.mode(vw) <- "integer"
}
minindex <- 1L
if (is.na(maxindex)){
if(is.null(dim))
maxindex <- maxindex(x)
else
maxindex <- as.integer(prod(dim))
}else{
maxindex <- as.integer(maxindex)
}
if (is.na(r$first)){
x <- rlepack(integer())
ix <- NULL
re <- FALSE
n <- 0L
}else{
nl <- length(r$lengths)
n <- sum(r$lengths) + 1L
tab <- tabulate(sign(r$values)+2, 3)
if (tab[1] && tab[3]){
re <- FALSE
x <- as.integer(cumsum(c(r$first, rep(r$values, r$lengths))))
x <- sort.int(x, index.return=TRUE, method="quick")
ix <- x$ix
x <- rlepack(x$x, pack=pack)
}else{
if (nl){
pack <- 2*length(r$lengths)<n
}else
pack <- FALSE
if (pack){
dat <- list(lengths=r$lengths, values=r$values)
class(dat) <- "rle"
}else{
dat <- as.integer(cumsum(c(r$first, rep(r$values, r$lengths))))
}
x <- list(first=r$first, dat=dat, last=r$last)
class(x) <- "rlepack"
ix <- NULL
if (tab[1]){
re <- TRUE
x <- rev(x)
}else{
re <- FALSE
}
}
if (x$last < 0) {
if (is.na(maxindex))
stop("maxindex is required with negative subscripts")
if ( -x$first > maxindex )
stop("negative subscripts out of range")
re <- FALSE
ix <- NULL
if (vw.convert){
x$first <- x$first - vw[1]
x$last <- x$last - vw[1]
if (inherits(x$dat, "rle")){
n <- sum(x$dat$lengths) + 1L
}else{
x$dat <- x$dat - vw[1]
n <- length(x$dat)
}
}else{
if (inherits(x$dat, "rle")){
n <- sum(x$dat$lengths) + 1L
}else{
n <- length(x$dat)
}
}
}else if (x$first > 0){
if (!is.na(maxindex) && x$last > maxindex )
stop("positive subscripts out of range")
if (vw.convert){
x$first <- vw[1] + x$first
x$last <- vw[1] + x$last
if (inherits(x$dat, "rle")){
n <- sum(x$dat$lengths) + 1L
}else{
x$dat <- vw[1] + x$dat
n <- length(x$dat)
}
}else{
if (inherits(x$dat, "rle")){
n <- sum(x$dat$lengths) + 1L
}else{
n <- length(x$dat)
}
}
}else{
stop("0s and mixed positive/negative subscripts not allowed")
}
}
if (!is.null(vw)){
if (is.null(dim)){
minindex <- vw[1] + 1L
maxindex <- vw[1] + vw[2]
}else{
minindex <- 1L
maxindex <- as.integer(prod(colSums(vw)))
}
}
ret <- list(
x = x
, ix = ix
, re = re
, minindex = minindex
, maxindex = maxindex
, length = n
, dim = NULL
, dimorder = NULL
, symmetric = FALSE
, fixdiag = NULL
, vw = vw
, NAs = NULL
)
class(ret) <- "hi"
return(ret)
}
as.hi.integer <- function(
x
, maxindex = NA_integer_
, dim = NULL
, dimorder = NULL
, symmetric = FALSE
, fixdiag = NULL
, vw = NULL
, vw.convert = TRUE
, dimorder.convert = TRUE
, pack = TRUE
, NAs = NULL
, ...
){
n <- length(x)
if (is.null(vw))
vw.convert <- FALSE
else{
storage.mode(vw) <- "integer"
if (is.null(dim) && !is.null(dim(vw)))
dim <- vw[2,]
}
minindex <- 1L
if (is.na(maxindex)){
if(is.null(dim))
maxindex <- maxindex(x)
else
maxindex <- as.integer(prod(dim))
}else{
maxindex <- as.integer(maxindex)
}
if (n){
if (is.null(dim) || dimorderStandard(dimorder))
dimorder.convert <- FALSE
prechecked <- dimorder.convert || (vw.convert && !( is.null(dim) || dimorderStandard(dimorder) ))
if (prechecked){
if (all(x<0, na.rm=TRUE)){
if (any(x < -maxindex, na.rm=TRUE))
stop("negative subscripts out of range")
x <- seq_len(maxindex)[x]
}else if (all(x>0, na.rm=TRUE)){
if (any(x > maxindex, na.rm=TRUE))
stop("positive subscripts out of range")
}else
stop("0s and mixed positive/negative subscripts not allowed")
x <- arrayIndex2vectorIndex(vectorIndex2arrayIndex(x, dim=dim), dim=dim, dimorder=dimorder, vw=vw)
vw.convert <- FALSE
if (is.null(vw))
maxindex <- prod(dim)
else
maxindex <- prod(colSums(vw))
}
isasc <- intisasc(x)
if (is.na(isasc))
stop("NAs in as.hi.integer")
if (isasc){
ix <- NULL
re <- FALSE
}else{
if (intisdesc(x)){
x <- rev(x)
ix <- NULL
re <- TRUE
}else{
x <- sort.int(x, index.return=TRUE, method="quick")
ix <- x$ix
x <- x$x
re <- FALSE
}
}
if (x[n]<0){
if (is.na(maxindex)){
if (vw.convert && is.null(dim))
maxindex <- vw[[2]]
else
stop("maxindex is required with negative subscripts")
}
if ( -x[1] > maxindex )
stop("negative subscripts out of range")
ix <- NULL
re <- FALSE
x <- unique(x)
n <- length(x)
if (vw.convert){
if (is.null(dim)){
x <- x - vw[1]
}else{
x <- seq_len(maxindex)[x]
n <- length(x)
if (n)
x <- arrayIndex2vectorIndex(vectorIndex2arrayIndex(x, dim=dim, dimorder=dimorder), dimorder=dimorder, vw=vw)
}
}
}else if (x[1]>0){
if ( !is.na(maxindex) && x[n] > maxindex )
stop("positive subscripts out of range")
if (vw.convert){
if (is.null(dim)){
x <- vw[1] + x
}else{
x <- arrayIndex2vectorIndex(vectorIndex2arrayIndex(x, dim=dim, dimorder=dimorder), dimorder=dimorder, vw=vw)
}
}
}else{
stop("0s and mixed positive/negative subscripts not allowed")
}
x <- rlepack(x, pack=pack)
}else{
x <- rlepack(integer())
ix <- NULL
re <- FALSE
}
if (!is.null(vw)){
if (is.null(dim)){
minindex <- vw[1] + 1L
maxindex <- vw[1] + vw[2]
}else{
maxindex <- as.integer(prod(colSums(vw)))
}
}
r <- list(
x = x
, ix = ix
, re = re
, minindex = minindex
, maxindex = maxindex
, length = n
, dim = dim
, dimorder = dimorder
, symmetric = symmetric
, fixdiag = fixdiag
, vw = vw
, NAs = NAs
)
class(r) <- "hi"
r
}
as.hi.which <- function(x, ...){
ret <- as.hi.integer(unclass(x), ...)
ret$maxindex <- maxindex(x)
ret
}
if (FALSE){
dim <- 3:4
dimorder <- 1:2
vw <- rbind(c(1,1), dim, c(1,1))
i <- seq_len(prod(dim))
m <- vectorIndex2arrayIndex(i, dim=dim)
p <- arrayIndex2vectorIndex(m, dim=dim, dimorder=dimorder, vw=vw)
m
vectorIndex2arrayIndex(p, dim=dim, dimorder=dimorder, vw=vw)
h <- as.hi(m, dim=dim, dimorder=dimorder, vw=vw)
str(h)
p
as.integer(h)
i
}
as.hi.matrix <- function(x, dim, dimorder=NULL, symmetric=FALSE, fixdiag=NULL, vw=NULL, pack=TRUE
, ...
){
if (is.null(vw)){
maxindex <- as.integer(prod(dim))
}else{
maxindex <- as.integer(prod(colSums(vw)))
}
if (nrow(x)){
if (x[1]<0)
stop("matrix subscripts must be positive")
if (symmetric){
i <- symmIndex2vectorIndex(x, dim=dim, fixdiag=fixdiag)
if (is.null(fixdiag)){
ret <- as.hi.integer(i, maxindex=maxindex, dim=dim, symmetric=symmetric, fixdiag=fixdiag, vw=vw, pack=pack)
}else{
isna <- is.na(i)
NAs <- (seq_along(i))[isna]
if (length(NAs))
ret <- as.hi.integer(i[!isna], maxindex=maxindex, dim=dim, symmetric=symmetric, fixdiag=fixdiag, vw=vw, pack=pack, NAs=rlepack(NAs))
else
ret <- as.hi.integer(i, maxindex=maxindex, dim=dim, symmetric=symmetric, fixdiag=fixdiag, vw=vw, pack=pack)
}
}else{
ret <- as.hi.integer(
arrayIndex2vectorIndex(x, dim=dim, dimorder=dimorder, vw=vw)
, maxindex=maxindex
, dim=dim
, dimorder=dimorder
, symmetric=symmetric
, fixdiag=fixdiag
, vw=vw
, vw.convert=FALSE
, dimorder.convert=FALSE
, pack=pack
)
}
}else{
ret <- as.hi.integer(integer(), maxindex=maxindex, dim=dim, dimorder=dimorder, symmetric=symmetric, fixdiag=fixdiag, vw=vw, pack=pack)
}
ret
}
as.hi.logical <- function(
x
, maxindex = NA
, dim = NULL
, vw = NULL
, pack = TRUE
, ...
){
if(is.null(dim)){
if (is.na(maxindex))
maxindex <- length(x)
else
maxindex <- as.integer(maxindex)
}else{
maxindex <- as.integer(prod(dim))
}
if (length(x)>maxindex)
stop("as.hi.logical longer than maxindex")
if (maxindex>0){
x <- seq_len(maxindex)[rep(x, length=maxindex)]
}else{
x <- integer()
}
return(as.hi.integer(
x
, maxindex = maxindex
, dim = dim
, vw = vw
, pack = pack
))
}
as.hi.double <- function(x, ...){
as.hi.integer(as.integer(x), ...)
}
as.hi.character <- function(x
, names
, vw = NULL
, vw.convert=TRUE
, ...
){
if (is.atomic(names) && is.character(names))
as.hi.integer(match(x, names), vw=vw, vw.convert=vw.convert, ...)
else
as.hi.integer(names[x], vw=vw, vw.convert=vw.convert, ...)
}
as.integer.hi <- function(
x
, vw.convert=TRUE
, ...
){
if (x$length){
ret <- unsort.hi(rleunpack(x$x), x)
if (is.null(x$dim)){
if (!is.null(x$vw) && vw.convert){
if (ret[1]<0){
ret <- ret + x$vw[1]
}else{
ret <- ret - x$vw[1]
}
}
}else{
if (!is.null(x$vw) && vw.convert){
ret <- arrayIndex2vectorIndex(vectorIndex2arrayIndex(ret, dimorder=x$dimorder, vw=x$vw), dim=x$vw[2,])
}else{
if (!dimorderStandard(x$dimorder))
ret <- arrayIndex2vectorIndex(vectorIndex2arrayIndex(ret, dim=x$dim, dimorder=x$dimorder), dim=x$dim)
}
}
}else{
ret <- integer()
}
ret
}
as.which.hi <- function(x, ...){
i <- as.integer(x, ...)
if (length(i) && i[[1]]<0){
i <- seq_len(maxindex(x))[i]
setattributes(i, list(maxindex = maxindex(x), class = c("booltype", "which")))
}else{
attributes(i) <- list(maxindex = maxindex(x), class = c("booltype", "which"))
}
i
}
as.matrix.hi <- function(
x
, dim = x$dim
, dimorder = x$dimorder
, vw = x$vw
, symmetric = x$symmetric
, fixdiag = x$fixdiag
, ...
){
if (x$length){
if (is.null(dim))
stop("need dim to return matrix subscripts")
if (x$x$first<0)
stop("matrix subscripts must be positive")
if (symmetric){
if (is.null(fixdiag)){
stop("not yet implemented for symmetric matices with fixdiag")
}else{
stop("not yet implemented for symmetric matices without fixdiag (redundant diagonal)")
}
}else{
ret <- unsort.hi(rleunpack(x$x), x)
ret <- vectorIndex2arrayIndex(ret, dim=dim, dimorder=dimorder, vw=vw)
}
ret
}else{
matrix(integer(), 0, length(x$dim))
}
}
as.logical.hi <- function(
x
, maxindex=NULL
, ...
){
if (is.null(maxindex))
maxindex <- maxindex(x)
if (is.na(maxindex))
stop("can't make logical without knowing vector length")
ret <- rep(FALSE, maxindex)
ret[seq_len(maxindex)[as.integer.hi(x)]] <- TRUE
ret
}
as.character.hi <- function(
x
, names
, vw.convert=TRUE
, ...
){
names[as.integer.hi(x, vw.convert=vw.convert)]
}
length.hi <- function(x){
x$length
}
maxindex.hi <- function(
x
, ...
)
{
if (is.null(x$vw))
x$maxindex
else{
if (is.null(x$dim))
x$vw[2]
else
as.integer(prod(x$vw[2,]))
}
}
poslength.hi <- function(
x
, ...
){
if (is.na(x$x$first))
0L
else if (x$x$first<0){
if (is.na(x$maxindex))
stop("poslength.hi requires maxindex")
maxindex.hi(x) - x$length
}else
x$length
}
unsort <- function(
x
, ix
){
orig <- vector(mode=storage.mode(x), length=length(x))
orig[ix] <- x
orig
}
unsort.hi <- function(
x
, index
){
if (is.null(index$ix)){
if (index$re)
orig <- rev(x)
else
orig <- x
}else{
orig <- vector(mode=storage.mode(x), length=length(x))
orig[index$ix] <- x
}
orig
}
unsort.ahi <- function(
x
, index
, ixre = any(sapply(index, function(i){
if (is.null(i$ix)){
if (i$re)
TRUE
else
FALSE
}else{
TRUE
}
}))
, ix = lapply(index, function(i){
if (is.null(i$ix)){
if (i$re)
orig <- rev(seq_len(poslength(i)))
else
orig <- seq_len(poslength(i))
}else{
orig <- i$ix
}
orig
})
){
if (ixre){
x <- do.call("[<-", c(list(x=x), ix, list(value=x)))
}
x
}
subscript2integer <- function(
x
, maxindex=NULL
, names=NULL
){
if(any(is.na(x)))
stop("NAs not allowed in ff subscripting")
if (is.character(x)){
if (is.null(names))
stop("need names")
match(x, names)
}else if(is.logical(x)){
if (is.null(maxindex))
stop("need maxindex with logical subscripts")
seq_len(maxindex)[x]
}else{
if (is.double(x))
x <- as.integer(x)
tab <- tabulate(sign(x)+2, 3)
if (tab[[2]])
stop("no zeros allowed in ff subscripts")
if (tab[[1]] && tab[[2]])
stop("mixing negative and positive subscripts is not alllowed")
if (tab[[1]]){
if (is.null(maxindex))
stop("need maxindex with negative subscripts")
seq_len(maxindex)[x]
}else{
x
}
}
}
if (FALSE){
a <- seq(100, 200, 20)
as.hi(substitute(c(1:5, 4:9, a)))
hi(c(1,4, 100),c(5,9, 200), by=c(1,1,20))
as.hi(c(1:5, 4:9, a))
x <- c(1:5, 4:9, a)
as.hi(x)
as.hi(substitute(x))
as.integer(as.hi(x))
as.logical(as.hi(x))
as.logical(as.hi(x, maxindex=200))
length(as.hi.integer(x))
maxindex(as.hi(x))
poslength(as.hi(x, maxindex=200))
library(regtest)
timefactor(as.hi(substitute(c(1:4, 5:9, a))), hi(c(1,5,100),c(4,9, 200), by=c(1,1,20)), 1000, 1000)
timefactor(as.hi(substitute(c(1:4, a, 500:999999))), as.hi(c(1:4, a, 500:999999)), 100, 1)
s1 <- hi(c(1,4, 200),c(5,9, 100), by=c(1,1,-20))
s2 <- as.hi(substitute(c(1:5, 4:9, a)))
s3 <- as.hi(c(1:5, 4:9, a))
identical(s1, s2)
identical(s3, s2)
identical(as.integer(c(1:5, 4:9, a)), as.integer(s1))
identical(as.integer(c(1:5, 4:9, a)), as.integer(s2))
identical(as.integer(c(1:5, 4:9, a)), as.integer(s3))
library(ff)
n <- 10000000
a <- ff(0L, length=n)
load(file="c:/tmp/i.RData")
memory.size(max=T)
j <- rlepack(i)
memory.size(max=T)
debug(as.hi.integer)
j <- as.hi.integer(i)
memory.size(max=T)
system.time(j <- as.hi(quote(i)))
x <- 20:29
as.hi(quote((c(1, 3:10, x))))
load(file="c:/tmp/i.RData")
memory.size(max=T)
gc()
system.time(j <- intrle(i))
memory.size(max=T)
load(file="c:/tmp/i.RData")
rle <-
function (x)
{
if (!is.vector(x) && !is.list(x))
stop("'x' must be an atomic vector")
n <- length(x)
if (n == 0)
return(list(lengths = integer(0), values = x))
y <- x[-1] != x[-n]
i <- c(which(y | is.na(y)), n)
ret <- list(lengths = diff(c(0L, i)), values = x[i])
class(ret) = "rle"
ret
}
gc()
j <- rle(i)
memory.size(max=T)
load(file="c:/tmp/i.RData")
gc()
j <- rle(i)
memory.size(max=T)
}
|
testthat::setup({
if (!dir.exists(normalizePath(path = file.path(tempdir(),
"testModel",
"initializeTest"),
winslash = "/",
mustWork = FALSE))) {
dir.create(path = normalizePath(path = file.path(tempdir(),
"testModel",
"initializeTest",
"model"),
winslash = "/",
mustWork = FALSE),
recursive = TRUE)
file.copy(from = file.path("resourceFiles",
"lda.rds"),
to = normalizePath(path = file.path(tempdir(),
"testModel",
"initializeTest",
"model",
"lda.rds"),
winslash = "/",
mustWork = FALSE))
dir.create(path = normalizePath(path = file.path(tempdir(),
"testModel",
"initializeTest",
"wrongModel"),
winslash = "/",
mustWork = FALSE),
recursive = TRUE)
saveRDS("", normalizePath(path = file.path(tempdir(),
"testModel",
"initializeTest",
"wrongModel",
"lda.rds"),
winslash = "/",
mustWork = FALSE))
}
})
testthat::test_that("Model: initialize function works", {
dir.path <- normalizePath(path = file.path(tempdir(),
"testModel",
"dirpath"),
winslash = "/",
mustWork = FALSE)
model <- data.frame(name = c("lda"),
description = c("Linear Discriminant Analysis"),
family = c("Discriminant Analysis"),
library = c("MASS"),
prob = c(TRUE),
row.names = c(63))
modelClass <- testthat::expect_message(Model$new(dir.path = dir.path,
model = model),
"[Model][INFO] Save directory not exist. Creating...",
fixed = TRUE)
testthat::expect_is(modelClass,
"Model")
testthat::expect_true(file.exists(normalizePath(path = file.path(tempdir(),
"testModel",
"dirpath"),
winslash = "/",
mustWork = FALSE)))
dir.path <- normalizePath(path = file.path(tempdir(),
"testModel",
"initializeTest",
"model"),
winslash = "/",
mustWork = FALSE)
testthat::expect_message(Model$new(dir.path = dir.path, model = model),
"[Model][INFO] Model 'lda' already exists. Loading...",
fixed = TRUE)
testthat::expect_message(Model$new(dir.path = dir.path, model = model),
"[Model][INFO] 'lda', Linear Discriminant Analysis', Discriminant Analysis' has been succesfully loaded!",
fixed = TRUE)
dir.path <- normalizePath(path = file.path(tempdir(),
"testModel",
"initializeTest",
"wrongModel"),
winslash = "/",
mustWork = FALSE)
testthat::expect_message(Model$new(dir.path = dir.path, model = model),
"[Model][ERROR] Unable to load trained model. Task not performed",
fixed = TRUE)
})
testthat::teardown({
if (file.exists(normalizePath(path = file.path(tempdir(),
"testModel"),
winslash = "/",
mustWork = FALSE))) {
unlink(x = normalizePath(path = file.path(tempdir(),
"testModel"),
winslash = "/",
mustWork = FALSE),
recursive = TRUE,
force = TRUE)
}
})
testthat::test_that("Model: initialize function checks parameter", {
dir.path <- normalizePath(path = file.path(tempdir(),
"testModel",
"dirpath"),
winslash = "/",
mustWork = FALSE)
model <- data.frame(name = c("lda"),
description = c("Linear Discriminant Analysis"),
family = c("Discriminant Analysis"),
library = c("MASS"),
prob = c(TRUE),
row.names = c(63))
testthat::expect_error(Model$new(dir.path = dir.path,
model = NULL),
"[Model][FATAL] Model was not defined. Aborting...",
fixed = TRUE)
})
testthat::teardown({
if (file.exists(normalizePath(path = file.path(tempdir(),
"testModel"),
winslash = "/",
mustWork = FALSE))) {
unlink(x = normalizePath(path = file.path(tempdir(),
"testModel"),
winslash = "/",
mustWork = FALSE),
recursive = TRUE,
force = TRUE)
}
})
testthat::test_that("Model: isTrained function works", {
dir.path <- normalizePath(path = file.path(tempdir(),
"testModel",
"dirpath"),
winslash = "/",
mustWork = FALSE)
model <- data.frame(name = c("lda"),
description = c("Linear Discriminant Analysis"),
family = c("Discriminant Analysis"),
library = c("MASS"),
prob = c(TRUE),
row.names = c(63))
modelClass <- Model$new(dir.path = dir.path,
model = model)
testthat::expect_false(modelClass$isTrained())
})
testthat::teardown({
if (file.exists(normalizePath(path = file.path(tempdir(),
"testModel"),
winslash = "/",
mustWork = FALSE))) {
unlink(x = normalizePath(path = file.path(tempdir(),
"testModel"),
winslash = "/",
mustWork = FALSE),
recursive = TRUE,
force = TRUE)
}
})
testthat::test_that("Model: getDir function works", {
dir.path <- normalizePath(path = file.path(tempdir(),
"testModel"),
winslash = "/",
mustWork = FALSE)
model <- data.frame(name = c("lda"),
description = c("Linear Discriminant Analysis"),
family = c("Discriminant Analysis"),
library = c("MASS"),
prob = c(TRUE),
row.names = c(63))
modelClass <- Model$new(dir.path = dir.path,
model = model)
testthat::expect_equal(modelClass$getDir(), dir.path)
})
testthat::teardown({
if (file.exists(normalizePath(path = file.path(tempdir(),
"testModel"),
winslash = "/",
mustWork = FALSE))) {
unlink(x = normalizePath(path = file.path(tempdir(),
"testModel"),
winslash = "/",
mustWork = FALSE),
recursive = TRUE,
force = TRUE)
}
})
testthat::test_that("Model: getName function works", {
dir.path <- normalizePath(path = file.path(tempdir(),
"testModel",
"dirpath"),
winslash = "/",
mustWork = FALSE)
model <- data.frame(name = c("lda"),
description = c("Linear Discriminant Analysis"),
family = c("Discriminant Analysis"),
library = c("MASS"),
prob = c(TRUE),
row.names = c(63))
modelClass <- Model$new(dir.path = dir.path,
model = model)
testthat::expect_equal(modelClass$getName(), model$name)
})
testthat::teardown({
if (file.exists(normalizePath(path = file.path(tempdir(),
"testModel"),
winslash = "/",
mustWork = FALSE))) {
unlink(x = normalizePath(path = file.path(tempdir(),
"testModel"),
winslash = "/",
mustWork = FALSE),
recursive = TRUE,
force = TRUE)
}
})
testthat::test_that("Model: getFamily function works", {
dir.path <- normalizePath(path = file.path(tempdir(),
"testModel",
"dirpath"),
winslash = "/",
mustWork = FALSE)
model <- data.frame(name = c("lda"),
description = c("Linear Discriminant Analysis"),
family = c("Discriminant Analysis"),
library = c("MASS"),
prob = c(TRUE),
row.names = c(63))
modelClass <- Model$new(dir.path = dir.path,
model = model)
testthat::expect_equal(modelClass$getFamily(), model$family)
})
testthat::teardown({
if (file.exists(normalizePath(path = file.path(tempdir(),
"testModel"),
winslash = "/",
mustWork = FALSE))) {
unlink(x = normalizePath(path = file.path(tempdir(),
"testModel"),
winslash = "/",
mustWork = FALSE),
recursive = TRUE,
force = TRUE)
}
})
testthat::test_that("Model: getDescription function works", {
dir.path <- normalizePath(path = file.path(tempdir(),
"testModel",
"dirpath"),
winslash = "/",
mustWork = FALSE)
model <- data.frame(name = c("lda"),
description = c("Linear Discriminant Analysis"),
family = c("Discriminant Analysis"),
library = c("MASS"),
prob = c(TRUE),
row.names = c(63))
modelClass <- Model$new(dir.path = dir.path,
model = model)
testthat::expect_equal(modelClass$getDescription(), model$description)
})
testthat::teardown({
if (file.exists(normalizePath(path = file.path(tempdir(),
"testModel"),
winslash = "/",
mustWork = FALSE))) {
unlink(x = normalizePath(path = file.path(tempdir(),
"testModel"),
winslash = "/",
mustWork = FALSE),
recursive = TRUE,
force = TRUE)
}
})
testthat::test_that("Model: train function works", {
dir.path <- normalizePath(path = file.path(tempdir(),
"testModel",
"dirpath"),
winslash = "/",
mustWork = FALSE)
model <- data.frame(name = c("lda"),
description = c("Linear Discriminant Analysis"),
family = c("Discriminant Analysis"),
library = c("MASS"),
prob = c(TRUE),
row.names = c(63))
modelClass <- Model$new(dir.path = dir.path,
model = model)
train.set <- data.frame(Gender = c(1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 0, 1, 0, 1, 1),
Hemochro = c(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0),
HIV = c(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0),
Hallmark = c(1, 1, 1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 0, 0, 1, 0, 0, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1),
Grams_day = c(50, 100, 100, 60, 0, 500, 200, 80, 60, 100, 100, 100, 100, 100, 100, 100, 0, 100, 80, 100, 100, 100, 100, 0, 0, 0, 75, 180, 75, 0, 0, 0, 100, 100, 250, 75, 200, 30, 4, 99, 87, 35, 90, 100, 12, 24, 100, 107, 86, 124),
Ascites = c(2, 2, 2, 1, 1, 3, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 2, 1, 3, 1, 3, 3, 1, 2, 2, 1, 1, 1, 1, 1, 1, 3, 3, 1, 1, 1, 1, 3, 1, 1, 2, 1, 1, 3, 2, 1, 1),
INR = c(0.96, 1.58, 3.14, 1.53, 1.2, 1.44, 1.29, 1.06, 1.3, 1.32, 1.24, 1.09, 1.18, 1.2, 1.35, 1.24, 1.11, 1.92, 1.34, 2.08, 1.63, 1.23, 1.09, 1.04, 1.93, 1.17, 1.48, 1.46, 1.56, 1.13, 1.24, 1.19, 1.26, 1, 1.39, 1.63, 2.14, 1.27, 1.39, 1.26, 1.46, 1.36, 1.3, 1.62, 1.12, 1.2, 1.57, 1.35, 1.55, 1.33),
MCV = c(79.8, 91.5, 107.5, 90.1, 93.8, 103.4, 101, 90.7, 89.5, 96.1, 97.7, 105, 93.8, 91.4, 95.1, 83, 92.4, 119, 81.8, 91.1, 99, 90, 97.6, 98, 93.3, 93.8, 101.5, 103.8, 85.6, 95.1, 96.4, 97.3, 106.3, 94, 93.9, 101.6, 117.3, 95.6, 88, 103.8, 109.5, 88.9, 87.3, 101.8, 92.3, 86.5, 112.2, 95.2, 96.3, 86),
Platelets = c(472, 85, 70, 207000, 91000, 101000, 109000, 187, 108, 268000, 170000, 230000, 167000, 275000, 216, 1.71, 270000, 80000, 561, 91000, 75000, 38000, 169000, 77000, 406000, 144000, 120, 53000, 132000, 254000, 280000, 133000, 122, 157000, 88000, 172000, 118000, 272105.73, 174381.93, 175896.57, 68274.47, 114.42, 130783.68, 85246.57, 270585.81, 273354.81, 68596.59, 332033.67, 195.76, 101884.41),
Albumin = c(3.3, 3.4, 1.9, 4.4, 4.5, 3.4, 3.6, 4.5, 3, 3.4, 4.2, 4.2, 4.9, 3.11, 2.7, 3.9, 4, 3.1, 2.6, 2.4, 3.5, 2.2, 4.2, 3.5, 2.9, 3.8, 2.2, 3.2, 2.6, 3.68, 4.1, 4.5, 3, 3.88, 2.7, 3.44, 4.8, 3.73, 3.21, 2.43, 2.57, 3.47, 3.79, 3.62, 3.9, 2.89, 2.51, 3.26, 2.93, 3.31),
AST = c(68, 122, 59, 36, 96, 87, 35, 47, 85, 29, 85, 26, 29, 94, 523, 28, 73, 357, 43, 145, 85, 51, 31, 192, 266, 74, 71, 87, 219, 38, 52, 63, 401, 51, 73, 95, 60, 124, 20, 114, 86, 80, 60, 184, 75, 67, 106, 56, 68, 48),
ALP = c(109, 396, 63, 74, 70, 147, 141, 97, 293, 135, 227, 92, 68, 350, 397, 120, 103, 174, 88, 190, 165, 474, 91, 262, 670, 312, 97, 239, 363, 127, 123, 89, 93, 141, 44, 139, 170, 251, 913, 162, 97, 335, 181, 176, 132, 131, 85, 231, 304, 197),
Creatinine = c(2.1, 0.9, 0.59, 0.73, 0.88, 0.9, 0.68, 0.75, 0.67, 0.9, 1.72, 0.8, 0.72, 1.7, 0.82, 0.58, 1.24, 0.99, 0.9, 0.9, 0.7, 2.69, 1.9, 1.2, 4.82, 1.01, 2.82, 0.72, 0.55, 1.11, 0.82, 0.78, 1, 1.1, 0.96, 0.9, 0.74, 1.1, 1.3, 0.68, 0.82, 0.7, 1.17, 0.82, 1.29, 0.61, 0.8, 0.78, 1.07, 1.08),
Dir_Bil = c(0.1, 1.4, 1.2, 0.8, 0.2, 1.6, 0.7, 0.2, 0.4, 0.3, 0.3, 0.3, 0.3, 0.8, 5.5, 0.85, 0.2, 4.6, 0.5, 9.6, 1.7, 1, 0.85, 19.5, 29.3, 0.5, 0.3, 1, 1.5, 0.2, 0.5, 0.8, 0.4, 0.33, 1.2, 2.9, 1.8, 1.37, 0.15, 0.56, 3.1, 0.62, 0.75, 2.5, 0.27, 0.25, 1.18, 1.04, 1.57, 0.63),
Iron = c(28, 53, 85, 94, 82, 67, 152.6, 87, 94, 59, 104, 52.5, 52.5, 84, 56, 32, 45, 178, 19, 224, 200, 224, 53, 121, 106, 87, 92, 152.6, 40, 28, 131, 78, 124, 94, 37, 111, 161, 50.4, 15.5, 130.5, 50.3, 77.8, 99.1, 95.8, 49.6, 56, 56.9, 69.3, 71.2, 94.4),
Sat = c(6, 22, 73, 27, 24, 34, 39, 26, 27, 15, 37, 37, 37, 37, 27, 10, 21, 90, 8, 95, 87, 95, 21, 27, 67, 25, 56, 27, 12, 10, 78, 30, 51, 39, 17, 94, 96, 25, 5, 78, 19, 28, 38, 44, 23, 25, 27, 23, 29, 83),
Ferritin = c(16, 111, 982, 70, 80, 774, 76.9, 84, 70, 22, 635, 856, 856, 497, 742, 18, 802, 960, 141, 363, 316, 363, 278, 749, 2165, 81, 48.9, 76.9, 57, 308, 1316, 220, 642, 344, 419, 1600, 297, 828, 147, 1323, 342, 173, 620, 501, 766, 307, 366, 70, 106, 859),
Class = c("X1", "X0", "X1", "X1", "X1", "X0", "X1", "X1", "X0", "X1", "X1", "X1", "X1", "X0", "X0", "X1", "X0", "X0", "X1", "X1", "X1", "X1", "X1", "X0", "X0", "X1", "X1", "X1", "X0", "X0", "X1", "X1", "X1", "X1", "X1", "X0", "X0", "X0", "X0", "X0", "X0", "X0", "X0", "X0", "X0", "X0", "X0", "X0", "X0", "X0"),
row.names = c(1, 4, 7, 11, 12, 18, 19, 20, 27, 31, 32, 37, 44, 50, 53, 56, 58, 74, 77, 78, 80, 88, 93, 100, 102, 103, 104, 118, 121, 127, 134, 137, 143, 144, 151, 154, 162, 165, 166, 171, 176, 178, 181, 182, 186, 191, 193, 199, 200, 201))
fitting <- readRDS(file.path("resourceFiles",
"testModel",
"fitting.rds"))
trFunction <- TwoClass$new(method = "cv",
number = 10,
savePredictions = "final",
classProbs = TRUE,
allowParallel = TRUE,
verboseIter = FALSE,
seed = 1844523989)
trFunction$create(summaryFunction = UseProbability$new(),
search.method = "random")
metric <- "PPV"
logs <- normalizePath(path = file.path(tempdir(),
"testModel"),
winslash = "/",
mustWork = FALSE)
file.create(file.path(logs, "error.log"))
testthat::expect_message(modelClass$train(train.set = train.set,
fitting = fitting,
trFunction = trFunction,
metric = metric,
logs = logs),
"\\[Model\\]\\[INFO\\]\\[lda\\] Finished in \\[[0-9.]+ segs\\]",
perl = TRUE)
testthat::expect_true(modelClass$isTrained())
testthat::expect_message(modelClass$train(train.set = train.set,
fitting = fitting,
trFunction = trFunction,
metric = metric,
logs = logs),
"[Model][INFO][lda] Model has already been trained",
fixed = TRUE)
})
testthat::teardown({
if (file.exists(normalizePath(path = file.path(tempdir(),
"testModel"),
winslash = "/",
mustWork = FALSE))) {
unlink(x = normalizePath(path = file.path(tempdir(),
"testModel"),
winslash = "/",
mustWork = FALSE),
recursive = TRUE,
force = TRUE)
}
})
testthat::test_that("Model: train function checks parameter", {
dir.path <- normalizePath(path = file.path(tempdir(),
"dirpath"),
winslash = "/",
mustWork = FALSE)
model <- data.frame(name = c("lda"),
description = c("Linear Discriminant Analysis"),
family = c("Discriminant Analysis"),
library = c("MASS"),
prob = c(TRUE),
row.names = c(63))
modelClass <- Model$new(dir.path = dir.path,
model = model)
train.set <- data.frame(Gender = c(1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 0, 1, 0, 1, 1),
Hemochro = c(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0),
HIV = c(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0),
Hallmark = c(1, 1, 1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 0, 0, 1, 0, 0, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1),
Grams_day = c(50, 100, 100, 60, 0, 500, 200, 80, 60, 100, 100, 100, 100, 100, 100, 100, 0, 100, 80, 100, 100, 100, 100, 0, 0, 0, 75, 180, 75, 0, 0, 0, 100, 100, 250, 75, 200, 30, 4, 99, 87, 35, 90, 100, 12, 24, 100, 107, 86, 124),
Ascites = c(2, 2, 2, 1, 1, 3, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 2, 1, 3, 1, 3, 3, 1, 2, 2, 1, 1, 1, 1, 1, 1, 3, 3, 1, 1, 1, 1, 3, 1, 1, 2, 1, 1, 3, 2, 1, 1),
INR = c(0.96, 1.58, 3.14, 1.53, 1.2, 1.44, 1.29, 1.06, 1.3, 1.32, 1.24, 1.09, 1.18, 1.2, 1.35, 1.24, 1.11, 1.92, 1.34, 2.08, 1.63, 1.23, 1.09, 1.04, 1.93, 1.17, 1.48, 1.46, 1.56, 1.13, 1.24, 1.19, 1.26, 1, 1.39, 1.63, 2.14, 1.27, 1.39, 1.26, 1.46, 1.36, 1.3, 1.62, 1.12, 1.2, 1.57, 1.35, 1.55, 1.33),
MCV = c(79.8, 91.5, 107.5, 90.1, 93.8, 103.4, 101, 90.7, 89.5, 96.1, 97.7, 105, 93.8, 91.4, 95.1, 83, 92.4, 119, 81.8, 91.1, 99, 90, 97.6, 98, 93.3, 93.8, 101.5, 103.8, 85.6, 95.1, 96.4, 97.3, 106.3, 94, 93.9, 101.6, 117.3, 95.6, 88, 103.8, 109.5, 88.9, 87.3, 101.8, 92.3, 86.5, 112.2, 95.2, 96.3, 86),
Platelets = c(472, 85, 70, 207000, 91000, 101000, 109000, 187, 108, 268000, 170000, 230000, 167000, 275000, 216, 1.71, 270000, 80000, 561, 91000, 75000, 38000, 169000, 77000, 406000, 144000, 120, 53000, 132000, 254000, 280000, 133000, 122, 157000, 88000, 172000, 118000, 272105.73, 174381.93, 175896.57, 68274.47, 114.42, 130783.68, 85246.57, 270585.81, 273354.81, 68596.59, 332033.67, 195.76, 101884.41),
Albumin = c(3.3, 3.4, 1.9, 4.4, 4.5, 3.4, 3.6, 4.5, 3, 3.4, 4.2, 4.2, 4.9, 3.11, 2.7, 3.9, 4, 3.1, 2.6, 2.4, 3.5, 2.2, 4.2, 3.5, 2.9, 3.8, 2.2, 3.2, 2.6, 3.68, 4.1, 4.5, 3, 3.88, 2.7, 3.44, 4.8, 3.73, 3.21, 2.43, 2.57, 3.47, 3.79, 3.62, 3.9, 2.89, 2.51, 3.26, 2.93, 3.31),
AST = c(68, 122, 59, 36, 96, 87, 35, 47, 85, 29, 85, 26, 29, 94, 523, 28, 73, 357, 43, 145, 85, 51, 31, 192, 266, 74, 71, 87, 219, 38, 52, 63, 401, 51, 73, 95, 60, 124, 20, 114, 86, 80, 60, 184, 75, 67, 106, 56, 68, 48),
ALP = c(109, 396, 63, 74, 70, 147, 141, 97, 293, 135, 227, 92, 68, 350, 397, 120, 103, 174, 88, 190, 165, 474, 91, 262, 670, 312, 97, 239, 363, 127, 123, 89, 93, 141, 44, 139, 170, 251, 913, 162, 97, 335, 181, 176, 132, 131, 85, 231, 304, 197),
Creatinine = c(2.1, 0.9, 0.59, 0.73, 0.88, 0.9, 0.68, 0.75, 0.67, 0.9, 1.72, 0.8, 0.72, 1.7, 0.82, 0.58, 1.24, 0.99, 0.9, 0.9, 0.7, 2.69, 1.9, 1.2, 4.82, 1.01, 2.82, 0.72, 0.55, 1.11, 0.82, 0.78, 1, 1.1, 0.96, 0.9, 0.74, 1.1, 1.3, 0.68, 0.82, 0.7, 1.17, 0.82, 1.29, 0.61, 0.8, 0.78, 1.07, 1.08),
Dir_Bil = c(0.1, 1.4, 1.2, 0.8, 0.2, 1.6, 0.7, 0.2, 0.4, 0.3, 0.3, 0.3, 0.3, 0.8, 5.5, 0.85, 0.2, 4.6, 0.5, 9.6, 1.7, 1, 0.85, 19.5, 29.3, 0.5, 0.3, 1, 1.5, 0.2, 0.5, 0.8, 0.4, 0.33, 1.2, 2.9, 1.8, 1.37, 0.15, 0.56, 3.1, 0.62, 0.75, 2.5, 0.27, 0.25, 1.18, 1.04, 1.57, 0.63),
Iron = c(28, 53, 85, 94, 82, 67, 152.6, 87, 94, 59, 104, 52.5, 52.5, 84, 56, 32, 45, 178, 19, 224, 200, 224, 53, 121, 106, 87, 92, 152.6, 40, 28, 131, 78, 124, 94, 37, 111, 161, 50.4, 15.5, 130.5, 50.3, 77.8, 99.1, 95.8, 49.6, 56, 56.9, 69.3, 71.2, 94.4),
Sat = c(6, 22, 73, 27, 24, 34, 39, 26, 27, 15, 37, 37, 37, 37, 27, 10, 21, 90, 8, 95, 87, 95, 21, 27, 67, 25, 56, 27, 12, 10, 78, 30, 51, 39, 17, 94, 96, 25, 5, 78, 19, 28, 38, 44, 23, 25, 27, 23, 29, 83),
Ferritin = c(16, 111, 982, 70, 80, 774, 76.9, 84, 70, 22, 635, 856, 856, 497, 742, 18, 802, 960, 141, 363, 316, 363, 278, 749, 2165, 81, 48.9, 76.9, 57, 308, 1316, 220, 642, 344, 419, 1600, 297, 828, 147, 1323, 342, 173, 620, 501, 766, 307, 366, 70, 106, 859),
Class = c("X1", "X0", "X1", "X1", "X1", "X0", "X1", "X1", "X0", "X1", "X1", "X1", "X1", "X0", "X0", "X1", "X0", "X0", "X1", "X1", "X1", "X1", "X1", "X0", "X0", "X1", "X1", "X1", "X0", "X0", "X1", "X1", "X1", "X1", "X1", "X0", "X0", "X0", "X0", "X0", "X0", "X0", "X0", "X0", "X0", "X0", "X0", "X0", "X0", "X0"),
row.names = c(1, 4, 7, 11, 12, 18, 19, 20, 27, 31, 32, 37, 44, 50, 53, 56, 58, 74, 77, 78, 80, 88, 93, 100, 102, 103, 104, 118, 121, 127, 134, 137, 143, 144, 151, 154, 162, 165, 166, 171, 176, 178, 181, 182, 186, 191, 193, 199, 200, 201))
fitting <- readRDS(file.path("resourceFiles",
"testModel",
"fitting.rds"))
trFunction <- TwoClass$new(method = "cv",
number = 10,
savePredictions = "final",
classProbs = TRUE,
allowParallel = TRUE,
verboseIter = FALSE,
seed = 1844523989)
trFunction$create(summaryFunction = UseProbability$new(),
search.method = "random")
metric <- "PPV"
logs <- normalizePath(path = file.path(tempdir(),
"testModel"),
winslash = "/",
mustWork = FALSE)
file.create(file.path(logs, "error.log"))
testthat::expect_error(modelClass$train(train.set = NULL,
fitting = fitting,
trFunction = trFunction,
metric = metric,
logs = logs),
"[Model][FATAL][lda] Cannot perform trainning stage. Train set must be defined as 'data.frame' type. Aborting...",
fixed = TRUE)
testthat::expect_error(modelClass$train(train.set = data.frame(),
fitting = fitting,
trFunction = trFunction,
metric = metric,
logs = logs),
"[Model][FATAL][lda] Cannot perform trainning stage. Train set is empty. Aborting...",
fixed = TRUE)
testthat::expect_error(modelClass$train(train.set = train.set,
fitting = fitting,
trFunction = NULL,
metric = metric,
logs = logs),
"[Model][FATAL][lda] TrainFunction must be inherits from 'TrainFunction' class. Aborting...",
fixed = TRUE)
testthat::expect_error(modelClass$train(train.set = train.set,
fitting = fitting,
trFunction = trFunction,
metric = "WRONG",
logs = logs),
"[Model][FATAL][lda] Metric is not defined or unavailable. Must be a [ROC, Sens, Spec, Kappa, Accuracy, TCR_9, MCC, PPV] type. Aborting...",
fixed = TRUE)
})
testthat::teardown({
if (file.exists(normalizePath(path = file.path(tempdir(),
"testModel"),
winslash = "/",
mustWork = FALSE))) {
unlink(x = normalizePath(path = file.path(tempdir(),
"testModel"),
winslash = "/",
mustWork = FALSE),
recursive = TRUE,
force = TRUE)
}
})
testthat::test_that("Model: getTrainedModel function works", {
dir.path <- normalizePath(path = file.path(tempdir(),
"testModel",
"dirpath"),
winslash = "/",
mustWork = FALSE)
model <- data.frame(name = c("lda"),
description = c("Linear Discriminant Analysis"),
family = c("Discriminant Analysis"),
library = c("MASS"),
prob = c(TRUE),
row.names = c(63))
modelClass <- Model$new(dir.path = dir.path,
model = model)
testthat::expect_message(modelClass$getTrainedModel(),
"[Model][WARNING] Model 'lda' is not trained. Task not performed",
fixed = TRUE)
train.set <- data.frame(Gender = c(1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 0, 1, 0, 1, 1),
Hemochro = c(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0),
HIV = c(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0),
Hallmark = c(1, 1, 1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 0, 0, 1, 0, 0, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1),
Grams_day = c(50, 100, 100, 60, 0, 500, 200, 80, 60, 100, 100, 100, 100, 100, 100, 100, 0, 100, 80, 100, 100, 100, 100, 0, 0, 0, 75, 180, 75, 0, 0, 0, 100, 100, 250, 75, 200, 30, 4, 99, 87, 35, 90, 100, 12, 24, 100, 107, 86, 124),
Ascites = c(2, 2, 2, 1, 1, 3, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 2, 1, 3, 1, 3, 3, 1, 2, 2, 1, 1, 1, 1, 1, 1, 3, 3, 1, 1, 1, 1, 3, 1, 1, 2, 1, 1, 3, 2, 1, 1),
INR = c(0.96, 1.58, 3.14, 1.53, 1.2, 1.44, 1.29, 1.06, 1.3, 1.32, 1.24, 1.09, 1.18, 1.2, 1.35, 1.24, 1.11, 1.92, 1.34, 2.08, 1.63, 1.23, 1.09, 1.04, 1.93, 1.17, 1.48, 1.46, 1.56, 1.13, 1.24, 1.19, 1.26, 1, 1.39, 1.63, 2.14, 1.27, 1.39, 1.26, 1.46, 1.36, 1.3, 1.62, 1.12, 1.2, 1.57, 1.35, 1.55, 1.33),
MCV = c(79.8, 91.5, 107.5, 90.1, 93.8, 103.4, 101, 90.7, 89.5, 96.1, 97.7, 105, 93.8, 91.4, 95.1, 83, 92.4, 119, 81.8, 91.1, 99, 90, 97.6, 98, 93.3, 93.8, 101.5, 103.8, 85.6, 95.1, 96.4, 97.3, 106.3, 94, 93.9, 101.6, 117.3, 95.6, 88, 103.8, 109.5, 88.9, 87.3, 101.8, 92.3, 86.5, 112.2, 95.2, 96.3, 86),
Platelets = c(472, 85, 70, 207000, 91000, 101000, 109000, 187, 108, 268000, 170000, 230000, 167000, 275000, 216, 1.71, 270000, 80000, 561, 91000, 75000, 38000, 169000, 77000, 406000, 144000, 120, 53000, 132000, 254000, 280000, 133000, 122, 157000, 88000, 172000, 118000, 272105.73, 174381.93, 175896.57, 68274.47, 114.42, 130783.68, 85246.57, 270585.81, 273354.81, 68596.59, 332033.67, 195.76, 101884.41),
Albumin = c(3.3, 3.4, 1.9, 4.4, 4.5, 3.4, 3.6, 4.5, 3, 3.4, 4.2, 4.2, 4.9, 3.11, 2.7, 3.9, 4, 3.1, 2.6, 2.4, 3.5, 2.2, 4.2, 3.5, 2.9, 3.8, 2.2, 3.2, 2.6, 3.68, 4.1, 4.5, 3, 3.88, 2.7, 3.44, 4.8, 3.73, 3.21, 2.43, 2.57, 3.47, 3.79, 3.62, 3.9, 2.89, 2.51, 3.26, 2.93, 3.31),
AST = c(68, 122, 59, 36, 96, 87, 35, 47, 85, 29, 85, 26, 29, 94, 523, 28, 73, 357, 43, 145, 85, 51, 31, 192, 266, 74, 71, 87, 219, 38, 52, 63, 401, 51, 73, 95, 60, 124, 20, 114, 86, 80, 60, 184, 75, 67, 106, 56, 68, 48),
ALP = c(109, 396, 63, 74, 70, 147, 141, 97, 293, 135, 227, 92, 68, 350, 397, 120, 103, 174, 88, 190, 165, 474, 91, 262, 670, 312, 97, 239, 363, 127, 123, 89, 93, 141, 44, 139, 170, 251, 913, 162, 97, 335, 181, 176, 132, 131, 85, 231, 304, 197),
Creatinine = c(2.1, 0.9, 0.59, 0.73, 0.88, 0.9, 0.68, 0.75, 0.67, 0.9, 1.72, 0.8, 0.72, 1.7, 0.82, 0.58, 1.24, 0.99, 0.9, 0.9, 0.7, 2.69, 1.9, 1.2, 4.82, 1.01, 2.82, 0.72, 0.55, 1.11, 0.82, 0.78, 1, 1.1, 0.96, 0.9, 0.74, 1.1, 1.3, 0.68, 0.82, 0.7, 1.17, 0.82, 1.29, 0.61, 0.8, 0.78, 1.07, 1.08),
Dir_Bil = c(0.1, 1.4, 1.2, 0.8, 0.2, 1.6, 0.7, 0.2, 0.4, 0.3, 0.3, 0.3, 0.3, 0.8, 5.5, 0.85, 0.2, 4.6, 0.5, 9.6, 1.7, 1, 0.85, 19.5, 29.3, 0.5, 0.3, 1, 1.5, 0.2, 0.5, 0.8, 0.4, 0.33, 1.2, 2.9, 1.8, 1.37, 0.15, 0.56, 3.1, 0.62, 0.75, 2.5, 0.27, 0.25, 1.18, 1.04, 1.57, 0.63),
Iron = c(28, 53, 85, 94, 82, 67, 152.6, 87, 94, 59, 104, 52.5, 52.5, 84, 56, 32, 45, 178, 19, 224, 200, 224, 53, 121, 106, 87, 92, 152.6, 40, 28, 131, 78, 124, 94, 37, 111, 161, 50.4, 15.5, 130.5, 50.3, 77.8, 99.1, 95.8, 49.6, 56, 56.9, 69.3, 71.2, 94.4),
Sat = c(6, 22, 73, 27, 24, 34, 39, 26, 27, 15, 37, 37, 37, 37, 27, 10, 21, 90, 8, 95, 87, 95, 21, 27, 67, 25, 56, 27, 12, 10, 78, 30, 51, 39, 17, 94, 96, 25, 5, 78, 19, 28, 38, 44, 23, 25, 27, 23, 29, 83),
Ferritin = c(16, 111, 982, 70, 80, 774, 76.9, 84, 70, 22, 635, 856, 856, 497, 742, 18, 802, 960, 141, 363, 316, 363, 278, 749, 2165, 81, 48.9, 76.9, 57, 308, 1316, 220, 642, 344, 419, 1600, 297, 828, 147, 1323, 342, 173, 620, 501, 766, 307, 366, 70, 106, 859),
Class = c("X1", "X0", "X1", "X1", "X1", "X0", "X1", "X1", "X0", "X1", "X1", "X1", "X1", "X0", "X0", "X1", "X0", "X0", "X1", "X1", "X1", "X1", "X1", "X0", "X0", "X1", "X1", "X1", "X0", "X0", "X1", "X1", "X1", "X1", "X1", "X0", "X0", "X0", "X0", "X0", "X0", "X0", "X0", "X0", "X0", "X0", "X0", "X0", "X0", "X0"),
row.names = c(1, 4, 7, 11, 12, 18, 19, 20, 27, 31, 32, 37, 44, 50, 53, 56, 58, 74, 77, 78, 80, 88, 93, 100, 102, 103, 104, 118, 121, 127, 134, 137, 143, 144, 151, 154, 162, 165, 166, 171, 176, 178, 181, 182, 186, 191, 193, 199, 200, 201))
fitting <- readRDS(file.path("resourceFiles",
"testModel",
"fitting.rds"))
trFunction <- TwoClass$new(method = "cv",
number = 10,
savePredictions = "final",
classProbs = TRUE,
allowParallel = TRUE,
verboseIter = FALSE,
seed = 1844523989)
trFunction$create(summaryFunction = UseProbability$new(),
search.method = "random")
metric <- "PPV"
logs <- normalizePath(path = file.path(tempdir(),
"testModel"),
winslash = "/",
mustWork = FALSE)
file.create(file.path(logs, "error.log"))
testthat::expect_message(modelClass$train(train.set = train.set,
fitting = fitting,
trFunction = trFunction,
metric = metric,
logs = logs),
"\\[Model\\]\\[INFO\\]\\[lda\\] Finished in \\[[0-9.]+ segs\\]",
perl = TRUE)
testthat::expect_type(modelClass$getTrainedModel(), "list")
})
testthat::teardown({
if (file.exists(normalizePath(path = file.path(tempdir(),
"testModel"),
winslash = "/",
mustWork = FALSE))) {
unlink(x = file.path("resourceFiles",
"testModel"),
recursive = TRUE,
force = TRUE)
}
})
testthat::test_that("Model: getExecutionTime function works", {
dir.path <- normalizePath(path = file.path(tempdir(),
"testModel",
"dirpath"),
winslash = "/",
mustWork = FALSE)
model <- data.frame(name = c("lda"),
description = c("Linear Discriminant Analysis"),
family = c("Discriminant Analysis"),
library = c("MASS"),
prob = c(TRUE),
row.names = c(63))
modelClass <- Model$new(dir.path = dir.path,
model = model)
testthat::expect_message(modelClass$getExecutionTime(),
"[Model][WARNING] Model 'lda' is not trained. Task not performed",
fixed = TRUE)
train.set <- data.frame(Gender = c(1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 0, 1, 0, 1, 1),
Hemochro = c(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0),
HIV = c(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0),
Hallmark = c(1, 1, 1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 0, 0, 1, 0, 0, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1),
Grams_day = c(50, 100, 100, 60, 0, 500, 200, 80, 60, 100, 100, 100, 100, 100, 100, 100, 0, 100, 80, 100, 100, 100, 100, 0, 0, 0, 75, 180, 75, 0, 0, 0, 100, 100, 250, 75, 200, 30, 4, 99, 87, 35, 90, 100, 12, 24, 100, 107, 86, 124),
Ascites = c(2, 2, 2, 1, 1, 3, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 2, 1, 3, 1, 3, 3, 1, 2, 2, 1, 1, 1, 1, 1, 1, 3, 3, 1, 1, 1, 1, 3, 1, 1, 2, 1, 1, 3, 2, 1, 1),
INR = c(0.96, 1.58, 3.14, 1.53, 1.2, 1.44, 1.29, 1.06, 1.3, 1.32, 1.24, 1.09, 1.18, 1.2, 1.35, 1.24, 1.11, 1.92, 1.34, 2.08, 1.63, 1.23, 1.09, 1.04, 1.93, 1.17, 1.48, 1.46, 1.56, 1.13, 1.24, 1.19, 1.26, 1, 1.39, 1.63, 2.14, 1.27, 1.39, 1.26, 1.46, 1.36, 1.3, 1.62, 1.12, 1.2, 1.57, 1.35, 1.55, 1.33),
MCV = c(79.8, 91.5, 107.5, 90.1, 93.8, 103.4, 101, 90.7, 89.5, 96.1, 97.7, 105, 93.8, 91.4, 95.1, 83, 92.4, 119, 81.8, 91.1, 99, 90, 97.6, 98, 93.3, 93.8, 101.5, 103.8, 85.6, 95.1, 96.4, 97.3, 106.3, 94, 93.9, 101.6, 117.3, 95.6, 88, 103.8, 109.5, 88.9, 87.3, 101.8, 92.3, 86.5, 112.2, 95.2, 96.3, 86),
Platelets = c(472, 85, 70, 207000, 91000, 101000, 109000, 187, 108, 268000, 170000, 230000, 167000, 275000, 216, 1.71, 270000, 80000, 561, 91000, 75000, 38000, 169000, 77000, 406000, 144000, 120, 53000, 132000, 254000, 280000, 133000, 122, 157000, 88000, 172000, 118000, 272105.73, 174381.93, 175896.57, 68274.47, 114.42, 130783.68, 85246.57, 270585.81, 273354.81, 68596.59, 332033.67, 195.76, 101884.41),
Albumin = c(3.3, 3.4, 1.9, 4.4, 4.5, 3.4, 3.6, 4.5, 3, 3.4, 4.2, 4.2, 4.9, 3.11, 2.7, 3.9, 4, 3.1, 2.6, 2.4, 3.5, 2.2, 4.2, 3.5, 2.9, 3.8, 2.2, 3.2, 2.6, 3.68, 4.1, 4.5, 3, 3.88, 2.7, 3.44, 4.8, 3.73, 3.21, 2.43, 2.57, 3.47, 3.79, 3.62, 3.9, 2.89, 2.51, 3.26, 2.93, 3.31),
AST = c(68, 122, 59, 36, 96, 87, 35, 47, 85, 29, 85, 26, 29, 94, 523, 28, 73, 357, 43, 145, 85, 51, 31, 192, 266, 74, 71, 87, 219, 38, 52, 63, 401, 51, 73, 95, 60, 124, 20, 114, 86, 80, 60, 184, 75, 67, 106, 56, 68, 48),
ALP = c(109, 396, 63, 74, 70, 147, 141, 97, 293, 135, 227, 92, 68, 350, 397, 120, 103, 174, 88, 190, 165, 474, 91, 262, 670, 312, 97, 239, 363, 127, 123, 89, 93, 141, 44, 139, 170, 251, 913, 162, 97, 335, 181, 176, 132, 131, 85, 231, 304, 197),
Creatinine = c(2.1, 0.9, 0.59, 0.73, 0.88, 0.9, 0.68, 0.75, 0.67, 0.9, 1.72, 0.8, 0.72, 1.7, 0.82, 0.58, 1.24, 0.99, 0.9, 0.9, 0.7, 2.69, 1.9, 1.2, 4.82, 1.01, 2.82, 0.72, 0.55, 1.11, 0.82, 0.78, 1, 1.1, 0.96, 0.9, 0.74, 1.1, 1.3, 0.68, 0.82, 0.7, 1.17, 0.82, 1.29, 0.61, 0.8, 0.78, 1.07, 1.08),
Dir_Bil = c(0.1, 1.4, 1.2, 0.8, 0.2, 1.6, 0.7, 0.2, 0.4, 0.3, 0.3, 0.3, 0.3, 0.8, 5.5, 0.85, 0.2, 4.6, 0.5, 9.6, 1.7, 1, 0.85, 19.5, 29.3, 0.5, 0.3, 1, 1.5, 0.2, 0.5, 0.8, 0.4, 0.33, 1.2, 2.9, 1.8, 1.37, 0.15, 0.56, 3.1, 0.62, 0.75, 2.5, 0.27, 0.25, 1.18, 1.04, 1.57, 0.63),
Iron = c(28, 53, 85, 94, 82, 67, 152.6, 87, 94, 59, 104, 52.5, 52.5, 84, 56, 32, 45, 178, 19, 224, 200, 224, 53, 121, 106, 87, 92, 152.6, 40, 28, 131, 78, 124, 94, 37, 111, 161, 50.4, 15.5, 130.5, 50.3, 77.8, 99.1, 95.8, 49.6, 56, 56.9, 69.3, 71.2, 94.4),
Sat = c(6, 22, 73, 27, 24, 34, 39, 26, 27, 15, 37, 37, 37, 37, 27, 10, 21, 90, 8, 95, 87, 95, 21, 27, 67, 25, 56, 27, 12, 10, 78, 30, 51, 39, 17, 94, 96, 25, 5, 78, 19, 28, 38, 44, 23, 25, 27, 23, 29, 83),
Ferritin = c(16, 111, 982, 70, 80, 774, 76.9, 84, 70, 22, 635, 856, 856, 497, 742, 18, 802, 960, 141, 363, 316, 363, 278, 749, 2165, 81, 48.9, 76.9, 57, 308, 1316, 220, 642, 344, 419, 1600, 297, 828, 147, 1323, 342, 173, 620, 501, 766, 307, 366, 70, 106, 859),
Class = c("X1", "X0", "X1", "X1", "X1", "X0", "X1", "X1", "X0", "X1", "X1", "X1", "X1", "X0", "X0", "X1", "X0", "X0", "X1", "X1", "X1", "X1", "X1", "X0", "X0", "X1", "X1", "X1", "X0", "X0", "X1", "X1", "X1", "X1", "X1", "X0", "X0", "X0", "X0", "X0", "X0", "X0", "X0", "X0", "X0", "X0", "X0", "X0", "X0", "X0"),
row.names = c(1, 4, 7, 11, 12, 18, 19, 20, 27, 31, 32, 37, 44, 50, 53, 56, 58, 74, 77, 78, 80, 88, 93, 100, 102, 103, 104, 118, 121, 127, 134, 137, 143, 144, 151, 154, 162, 165, 166, 171, 176, 178, 181, 182, 186, 191, 193, 199, 200, 201))
fitting <- readRDS(file.path("resourceFiles",
"testModel",
"fitting.rds"))
trFunction <- TwoClass$new(method = "cv",
number = 10,
savePredictions = "final",
classProbs = TRUE,
allowParallel = TRUE,
verboseIter = FALSE,
seed = 1844523989)
trFunction$create(summaryFunction = UseProbability$new(),
search.method = "random")
metric <- "PPV"
logs <- normalizePath(path = file.path(tempdir(),
"testModel"),
winslash = "/",
mustWork = FALSE)
file.create(file.path(logs, "error.log"))
testthat::expect_message(modelClass$train(train.set = train.set,
fitting = fitting,
trFunction = trFunction,
metric = metric,
logs = logs),
"\\[Model\\]\\[INFO\\]\\[lda\\] Finished in \\[[0-9.]+ segs\\]",
perl = TRUE)
testthat::expect_type(modelClass$getExecutionTime(), "double")
})
testthat::teardown({
if (file.exists(normalizePath(path = file.path(tempdir(),
"testModel"),
winslash = "/",
mustWork = FALSE))) {
unlink(x = normalizePath(path = file.path(tempdir(),
"testModel"),
winslash = "/",
mustWork = FALSE),
recursive = TRUE,
force = TRUE)
}
})
testthat::test_that("Model: getPerformance function works", {
dir.path <- normalizePath(path = file.path(tempdir(),
"testModel",
"dirpath"),
winslash = "/",
mustWork = FALSE)
model <- data.frame(name = c("lda"),
description = c("Linear Discriminant Analysis"),
family = c("Discriminant Analysis"),
library = c("MASS"),
prob = c(TRUE),
row.names = c(63))
modelClass <- Model$new(dir.path = dir.path,
model = model)
train.set <- data.frame(Gender = c(1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 0, 1, 0, 1, 1),
Hemochro = c(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0),
HIV = c(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0),
Hallmark = c(1, 1, 1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 0, 0, 1, 0, 0, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1),
Grams_day = c(50, 100, 100, 60, 0, 500, 200, 80, 60, 100, 100, 100, 100, 100, 100, 100, 0, 100, 80, 100, 100, 100, 100, 0, 0, 0, 75, 180, 75, 0, 0, 0, 100, 100, 250, 75, 200, 30, 4, 99, 87, 35, 90, 100, 12, 24, 100, 107, 86, 124),
Ascites = c(2, 2, 2, 1, 1, 3, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 2, 1, 3, 1, 3, 3, 1, 2, 2, 1, 1, 1, 1, 1, 1, 3, 3, 1, 1, 1, 1, 3, 1, 1, 2, 1, 1, 3, 2, 1, 1),
INR = c(0.96, 1.58, 3.14, 1.53, 1.2, 1.44, 1.29, 1.06, 1.3, 1.32, 1.24, 1.09, 1.18, 1.2, 1.35, 1.24, 1.11, 1.92, 1.34, 2.08, 1.63, 1.23, 1.09, 1.04, 1.93, 1.17, 1.48, 1.46, 1.56, 1.13, 1.24, 1.19, 1.26, 1, 1.39, 1.63, 2.14, 1.27, 1.39, 1.26, 1.46, 1.36, 1.3, 1.62, 1.12, 1.2, 1.57, 1.35, 1.55, 1.33),
MCV = c(79.8, 91.5, 107.5, 90.1, 93.8, 103.4, 101, 90.7, 89.5, 96.1, 97.7, 105, 93.8, 91.4, 95.1, 83, 92.4, 119, 81.8, 91.1, 99, 90, 97.6, 98, 93.3, 93.8, 101.5, 103.8, 85.6, 95.1, 96.4, 97.3, 106.3, 94, 93.9, 101.6, 117.3, 95.6, 88, 103.8, 109.5, 88.9, 87.3, 101.8, 92.3, 86.5, 112.2, 95.2, 96.3, 86),
Platelets = c(472, 85, 70, 207000, 91000, 101000, 109000, 187, 108, 268000, 170000, 230000, 167000, 275000, 216, 1.71, 270000, 80000, 561, 91000, 75000, 38000, 169000, 77000, 406000, 144000, 120, 53000, 132000, 254000, 280000, 133000, 122, 157000, 88000, 172000, 118000, 272105.73, 174381.93, 175896.57, 68274.47, 114.42, 130783.68, 85246.57, 270585.81, 273354.81, 68596.59, 332033.67, 195.76, 101884.41),
Albumin = c(3.3, 3.4, 1.9, 4.4, 4.5, 3.4, 3.6, 4.5, 3, 3.4, 4.2, 4.2, 4.9, 3.11, 2.7, 3.9, 4, 3.1, 2.6, 2.4, 3.5, 2.2, 4.2, 3.5, 2.9, 3.8, 2.2, 3.2, 2.6, 3.68, 4.1, 4.5, 3, 3.88, 2.7, 3.44, 4.8, 3.73, 3.21, 2.43, 2.57, 3.47, 3.79, 3.62, 3.9, 2.89, 2.51, 3.26, 2.93, 3.31),
AST = c(68, 122, 59, 36, 96, 87, 35, 47, 85, 29, 85, 26, 29, 94, 523, 28, 73, 357, 43, 145, 85, 51, 31, 192, 266, 74, 71, 87, 219, 38, 52, 63, 401, 51, 73, 95, 60, 124, 20, 114, 86, 80, 60, 184, 75, 67, 106, 56, 68, 48),
ALP = c(109, 396, 63, 74, 70, 147, 141, 97, 293, 135, 227, 92, 68, 350, 397, 120, 103, 174, 88, 190, 165, 474, 91, 262, 670, 312, 97, 239, 363, 127, 123, 89, 93, 141, 44, 139, 170, 251, 913, 162, 97, 335, 181, 176, 132, 131, 85, 231, 304, 197),
Creatinine = c(2.1, 0.9, 0.59, 0.73, 0.88, 0.9, 0.68, 0.75, 0.67, 0.9, 1.72, 0.8, 0.72, 1.7, 0.82, 0.58, 1.24, 0.99, 0.9, 0.9, 0.7, 2.69, 1.9, 1.2, 4.82, 1.01, 2.82, 0.72, 0.55, 1.11, 0.82, 0.78, 1, 1.1, 0.96, 0.9, 0.74, 1.1, 1.3, 0.68, 0.82, 0.7, 1.17, 0.82, 1.29, 0.61, 0.8, 0.78, 1.07, 1.08),
Dir_Bil = c(0.1, 1.4, 1.2, 0.8, 0.2, 1.6, 0.7, 0.2, 0.4, 0.3, 0.3, 0.3, 0.3, 0.8, 5.5, 0.85, 0.2, 4.6, 0.5, 9.6, 1.7, 1, 0.85, 19.5, 29.3, 0.5, 0.3, 1, 1.5, 0.2, 0.5, 0.8, 0.4, 0.33, 1.2, 2.9, 1.8, 1.37, 0.15, 0.56, 3.1, 0.62, 0.75, 2.5, 0.27, 0.25, 1.18, 1.04, 1.57, 0.63),
Iron = c(28, 53, 85, 94, 82, 67, 152.6, 87, 94, 59, 104, 52.5, 52.5, 84, 56, 32, 45, 178, 19, 224, 200, 224, 53, 121, 106, 87, 92, 152.6, 40, 28, 131, 78, 124, 94, 37, 111, 161, 50.4, 15.5, 130.5, 50.3, 77.8, 99.1, 95.8, 49.6, 56, 56.9, 69.3, 71.2, 94.4),
Sat = c(6, 22, 73, 27, 24, 34, 39, 26, 27, 15, 37, 37, 37, 37, 27, 10, 21, 90, 8, 95, 87, 95, 21, 27, 67, 25, 56, 27, 12, 10, 78, 30, 51, 39, 17, 94, 96, 25, 5, 78, 19, 28, 38, 44, 23, 25, 27, 23, 29, 83),
Ferritin = c(16, 111, 982, 70, 80, 774, 76.9, 84, 70, 22, 635, 856, 856, 497, 742, 18, 802, 960, 141, 363, 316, 363, 278, 749, 2165, 81, 48.9, 76.9, 57, 308, 1316, 220, 642, 344, 419, 1600, 297, 828, 147, 1323, 342, 173, 620, 501, 766, 307, 366, 70, 106, 859),
Class = c("X1", "X0", "X1", "X1", "X1", "X0", "X1", "X1", "X0", "X1", "X1", "X1", "X1", "X0", "X0", "X1", "X0", "X0", "X1", "X1", "X1", "X1", "X1", "X0", "X0", "X1", "X1", "X1", "X0", "X0", "X1", "X1", "X1", "X1", "X1", "X0", "X0", "X0", "X0", "X0", "X0", "X0", "X0", "X0", "X0", "X0", "X0", "X0", "X0", "X0"),
row.names = c(1, 4, 7, 11, 12, 18, 19, 20, 27, 31, 32, 37, 44, 50, 53, 56, 58, 74, 77, 78, 80, 88, 93, 100, 102, 103, 104, 118, 121, 127, 134, 137, 143, 144, 151, 154, 162, 165, 166, 171, 176, 178, 181, 182, 186, 191, 193, 199, 200, 201))
fitting <- readRDS(file.path("resourceFiles",
"testModel",
"fitting.rds"))
trFunction <- TwoClass$new(method = "cv",
number = 10,
savePredictions = "final",
classProbs = TRUE,
allowParallel = TRUE,
verboseIter = FALSE,
seed = 1844523989)
trFunction$create(summaryFunction = UseProbability$new(),
search.method = "random")
metric <- "PPV"
logs <- normalizePath(path = file.path(tempdir(),
"testModel"),
winslash = "/",
mustWork = FALSE)
file.create(file.path(logs, "error.log"))
testthat::expect_message(modelClass$train(train.set = train.set,
fitting = fitting,
trFunction = trFunction,
metric = metric,
logs = logs),
"\\[Model\\]\\[INFO\\]\\[lda\\] Finished in \\[[0-9.]+ segs\\]",
perl = TRUE)
testthat::expect_is(modelClass$getPerformance(), "numeric")
})
testthat::teardown({
if (file.exists(normalizePath(path = file.path(tempdir(),
"testModel"),
winslash = "/",
mustWork = FALSE))) {
unlink(x = normalizePath(path = file.path(tempdir(),
"testModel"),
winslash = "/",
mustWork = FALSE),
recursive = TRUE,
force = TRUE)
}
})
testthat::test_that("Model: getPerformance function checks parameter", {
dir.path <- normalizePath(path = file.path(tempdir(),
"testModel",
"dirpath"),
winslash = "/",
mustWork = FALSE)
model <- data.frame(name = c("lda"),
description = c("Linear Discriminant Analysis"),
family = c("Discriminant Analysis"),
library = c("MASS"),
prob = c(TRUE),
row.names = c(63))
modelClass <- Model$new(dir.path = dir.path,
model = model)
train.set <- data.frame(Gender = c(1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 0, 1, 0, 1, 1),
Hemochro = c(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0),
HIV = c(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0),
Hallmark = c(1, 1, 1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 0, 0, 1, 0, 0, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1),
Grams_day = c(50, 100, 100, 60, 0, 500, 200, 80, 60, 100, 100, 100, 100, 100, 100, 100, 0, 100, 80, 100, 100, 100, 100, 0, 0, 0, 75, 180, 75, 0, 0, 0, 100, 100, 250, 75, 200, 30, 4, 99, 87, 35, 90, 100, 12, 24, 100, 107, 86, 124),
Ascites = c(2, 2, 2, 1, 1, 3, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 2, 1, 3, 1, 3, 3, 1, 2, 2, 1, 1, 1, 1, 1, 1, 3, 3, 1, 1, 1, 1, 3, 1, 1, 2, 1, 1, 3, 2, 1, 1),
INR = c(0.96, 1.58, 3.14, 1.53, 1.2, 1.44, 1.29, 1.06, 1.3, 1.32, 1.24, 1.09, 1.18, 1.2, 1.35, 1.24, 1.11, 1.92, 1.34, 2.08, 1.63, 1.23, 1.09, 1.04, 1.93, 1.17, 1.48, 1.46, 1.56, 1.13, 1.24, 1.19, 1.26, 1, 1.39, 1.63, 2.14, 1.27, 1.39, 1.26, 1.46, 1.36, 1.3, 1.62, 1.12, 1.2, 1.57, 1.35, 1.55, 1.33),
MCV = c(79.8, 91.5, 107.5, 90.1, 93.8, 103.4, 101, 90.7, 89.5, 96.1, 97.7, 105, 93.8, 91.4, 95.1, 83, 92.4, 119, 81.8, 91.1, 99, 90, 97.6, 98, 93.3, 93.8, 101.5, 103.8, 85.6, 95.1, 96.4, 97.3, 106.3, 94, 93.9, 101.6, 117.3, 95.6, 88, 103.8, 109.5, 88.9, 87.3, 101.8, 92.3, 86.5, 112.2, 95.2, 96.3, 86),
Platelets = c(472, 85, 70, 207000, 91000, 101000, 109000, 187, 108, 268000, 170000, 230000, 167000, 275000, 216, 1.71, 270000, 80000, 561, 91000, 75000, 38000, 169000, 77000, 406000, 144000, 120, 53000, 132000, 254000, 280000, 133000, 122, 157000, 88000, 172000, 118000, 272105.73, 174381.93, 175896.57, 68274.47, 114.42, 130783.68, 85246.57, 270585.81, 273354.81, 68596.59, 332033.67, 195.76, 101884.41),
Albumin = c(3.3, 3.4, 1.9, 4.4, 4.5, 3.4, 3.6, 4.5, 3, 3.4, 4.2, 4.2, 4.9, 3.11, 2.7, 3.9, 4, 3.1, 2.6, 2.4, 3.5, 2.2, 4.2, 3.5, 2.9, 3.8, 2.2, 3.2, 2.6, 3.68, 4.1, 4.5, 3, 3.88, 2.7, 3.44, 4.8, 3.73, 3.21, 2.43, 2.57, 3.47, 3.79, 3.62, 3.9, 2.89, 2.51, 3.26, 2.93, 3.31),
AST = c(68, 122, 59, 36, 96, 87, 35, 47, 85, 29, 85, 26, 29, 94, 523, 28, 73, 357, 43, 145, 85, 51, 31, 192, 266, 74, 71, 87, 219, 38, 52, 63, 401, 51, 73, 95, 60, 124, 20, 114, 86, 80, 60, 184, 75, 67, 106, 56, 68, 48),
ALP = c(109, 396, 63, 74, 70, 147, 141, 97, 293, 135, 227, 92, 68, 350, 397, 120, 103, 174, 88, 190, 165, 474, 91, 262, 670, 312, 97, 239, 363, 127, 123, 89, 93, 141, 44, 139, 170, 251, 913, 162, 97, 335, 181, 176, 132, 131, 85, 231, 304, 197),
Creatinine = c(2.1, 0.9, 0.59, 0.73, 0.88, 0.9, 0.68, 0.75, 0.67, 0.9, 1.72, 0.8, 0.72, 1.7, 0.82, 0.58, 1.24, 0.99, 0.9, 0.9, 0.7, 2.69, 1.9, 1.2, 4.82, 1.01, 2.82, 0.72, 0.55, 1.11, 0.82, 0.78, 1, 1.1, 0.96, 0.9, 0.74, 1.1, 1.3, 0.68, 0.82, 0.7, 1.17, 0.82, 1.29, 0.61, 0.8, 0.78, 1.07, 1.08),
Dir_Bil = c(0.1, 1.4, 1.2, 0.8, 0.2, 1.6, 0.7, 0.2, 0.4, 0.3, 0.3, 0.3, 0.3, 0.8, 5.5, 0.85, 0.2, 4.6, 0.5, 9.6, 1.7, 1, 0.85, 19.5, 29.3, 0.5, 0.3, 1, 1.5, 0.2, 0.5, 0.8, 0.4, 0.33, 1.2, 2.9, 1.8, 1.37, 0.15, 0.56, 3.1, 0.62, 0.75, 2.5, 0.27, 0.25, 1.18, 1.04, 1.57, 0.63),
Iron = c(28, 53, 85, 94, 82, 67, 152.6, 87, 94, 59, 104, 52.5, 52.5, 84, 56, 32, 45, 178, 19, 224, 200, 224, 53, 121, 106, 87, 92, 152.6, 40, 28, 131, 78, 124, 94, 37, 111, 161, 50.4, 15.5, 130.5, 50.3, 77.8, 99.1, 95.8, 49.6, 56, 56.9, 69.3, 71.2, 94.4),
Sat = c(6, 22, 73, 27, 24, 34, 39, 26, 27, 15, 37, 37, 37, 37, 27, 10, 21, 90, 8, 95, 87, 95, 21, 27, 67, 25, 56, 27, 12, 10, 78, 30, 51, 39, 17, 94, 96, 25, 5, 78, 19, 28, 38, 44, 23, 25, 27, 23, 29, 83),
Ferritin = c(16, 111, 982, 70, 80, 774, 76.9, 84, 70, 22, 635, 856, 856, 497, 742, 18, 802, 960, 141, 363, 316, 363, 278, 749, 2165, 81, 48.9, 76.9, 57, 308, 1316, 220, 642, 344, 419, 1600, 297, 828, 147, 1323, 342, 173, 620, 501, 766, 307, 366, 70, 106, 859),
Class = c("X1", "X0", "X1", "X1", "X1", "X0", "X1", "X1", "X0", "X1", "X1", "X1", "X1", "X0", "X0", "X1", "X0", "X0", "X1", "X1", "X1", "X1", "X1", "X0", "X0", "X1", "X1", "X1", "X0", "X0", "X1", "X1", "X1", "X1", "X1", "X0", "X0", "X0", "X0", "X0", "X0", "X0", "X0", "X0", "X0", "X0", "X0", "X0", "X0", "X0"),
row.names = c(1, 4, 7, 11, 12, 18, 19, 20, 27, 31, 32, 37, 44, 50, 53, 56, 58, 74, 77, 78, 80, 88, 93, 100, 102, 103, 104, 118, 121, 127, 134, 137, 143, 144, 151, 154, 162, 165, 166, 171, 176, 178, 181, 182, 186, 191, 193, 199, 200, 201))
fitting <- readRDS(file.path("resourceFiles",
"testModel",
"fitting.rds"))
trFunction <- TwoClass$new(method = "cv",
number = 10,
savePredictions = "final",
classProbs = TRUE,
allowParallel = TRUE,
verboseIter = FALSE,
seed = 1844523989)
trFunction$create(summaryFunction = UseProbability$new(),
search.method = "random")
metric <- "PPV"
logs <- normalizePath(path = file.path(tempdir(),
"testModel"),
winslash = "/",
mustWork = FALSE)
file.create(file.path(logs, "error.log"))
testthat::expect_message(modelClass$train(train.set = train.set,
fitting = fitting,
trFunction = trFunction,
metric = metric,
logs = logs),
"\\[Model\\]\\[INFO\\]\\[lda\\] Finished in \\[[0-9.]+ segs\\]",
perl = TRUE)
testthat::expect_error(modelClass$getPerformance(metric = "WRONG"),
"[Model][FATAL] Metric is not defined or unavailable. Must be a [ROC, Sens, Spec, Kappa, Accuracy, TCR_9, MCC, PPV] type. Aborting...",
fixed = TRUE)
})
testthat::teardown({
if (file.exists(normalizePath(path = file.path(tempdir(),
"testModel"),
winslash = "/",
mustWork = FALSE))) {
unlink(x = normalizePath(path = file.path(tempdir(),
"testModel"),
winslash = "/",
mustWork = FALSE),
recursive = TRUE,
force = TRUE)
}
})
testthat::test_that("Model: getConfiguration function works", {
dir.path <- normalizePath(path = file.path(tempdir(),
"testModel",
"dirpath"),
winslash = "/",
mustWork = FALSE)
model <- data.frame(name = c("lda"),
description = c("Linear Discriminant Analysis"),
family = c("Discriminant Analysis"),
library = c("MASS"),
prob = c(TRUE),
row.names = c(63))
modelClass <- Model$new(dir.path = dir.path,
model = model)
testthat::expect_message(modelClass$getConfiguration(),
"[Model][WARNING] Model 'lda' is not trained. Task not performed",
fixed = TRUE)
train.set <- data.frame(Gender = c(1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 0, 1, 0, 1, 1),
Hemochro = c(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0),
HIV = c(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0),
Hallmark = c(1, 1, 1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 0, 0, 1, 0, 0, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1),
Grams_day = c(50, 100, 100, 60, 0, 500, 200, 80, 60, 100, 100, 100, 100, 100, 100, 100, 0, 100, 80, 100, 100, 100, 100, 0, 0, 0, 75, 180, 75, 0, 0, 0, 100, 100, 250, 75, 200, 30, 4, 99, 87, 35, 90, 100, 12, 24, 100, 107, 86, 124),
Ascites = c(2, 2, 2, 1, 1, 3, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 2, 1, 3, 1, 3, 3, 1, 2, 2, 1, 1, 1, 1, 1, 1, 3, 3, 1, 1, 1, 1, 3, 1, 1, 2, 1, 1, 3, 2, 1, 1),
INR = c(0.96, 1.58, 3.14, 1.53, 1.2, 1.44, 1.29, 1.06, 1.3, 1.32, 1.24, 1.09, 1.18, 1.2, 1.35, 1.24, 1.11, 1.92, 1.34, 2.08, 1.63, 1.23, 1.09, 1.04, 1.93, 1.17, 1.48, 1.46, 1.56, 1.13, 1.24, 1.19, 1.26, 1, 1.39, 1.63, 2.14, 1.27, 1.39, 1.26, 1.46, 1.36, 1.3, 1.62, 1.12, 1.2, 1.57, 1.35, 1.55, 1.33),
MCV = c(79.8, 91.5, 107.5, 90.1, 93.8, 103.4, 101, 90.7, 89.5, 96.1, 97.7, 105, 93.8, 91.4, 95.1, 83, 92.4, 119, 81.8, 91.1, 99, 90, 97.6, 98, 93.3, 93.8, 101.5, 103.8, 85.6, 95.1, 96.4, 97.3, 106.3, 94, 93.9, 101.6, 117.3, 95.6, 88, 103.8, 109.5, 88.9, 87.3, 101.8, 92.3, 86.5, 112.2, 95.2, 96.3, 86),
Platelets = c(472, 85, 70, 207000, 91000, 101000, 109000, 187, 108, 268000, 170000, 230000, 167000, 275000, 216, 1.71, 270000, 80000, 561, 91000, 75000, 38000, 169000, 77000, 406000, 144000, 120, 53000, 132000, 254000, 280000, 133000, 122, 157000, 88000, 172000, 118000, 272105.73, 174381.93, 175896.57, 68274.47, 114.42, 130783.68, 85246.57, 270585.81, 273354.81, 68596.59, 332033.67, 195.76, 101884.41),
Albumin = c(3.3, 3.4, 1.9, 4.4, 4.5, 3.4, 3.6, 4.5, 3, 3.4, 4.2, 4.2, 4.9, 3.11, 2.7, 3.9, 4, 3.1, 2.6, 2.4, 3.5, 2.2, 4.2, 3.5, 2.9, 3.8, 2.2, 3.2, 2.6, 3.68, 4.1, 4.5, 3, 3.88, 2.7, 3.44, 4.8, 3.73, 3.21, 2.43, 2.57, 3.47, 3.79, 3.62, 3.9, 2.89, 2.51, 3.26, 2.93, 3.31),
AST = c(68, 122, 59, 36, 96, 87, 35, 47, 85, 29, 85, 26, 29, 94, 523, 28, 73, 357, 43, 145, 85, 51, 31, 192, 266, 74, 71, 87, 219, 38, 52, 63, 401, 51, 73, 95, 60, 124, 20, 114, 86, 80, 60, 184, 75, 67, 106, 56, 68, 48),
ALP = c(109, 396, 63, 74, 70, 147, 141, 97, 293, 135, 227, 92, 68, 350, 397, 120, 103, 174, 88, 190, 165, 474, 91, 262, 670, 312, 97, 239, 363, 127, 123, 89, 93, 141, 44, 139, 170, 251, 913, 162, 97, 335, 181, 176, 132, 131, 85, 231, 304, 197),
Creatinine = c(2.1, 0.9, 0.59, 0.73, 0.88, 0.9, 0.68, 0.75, 0.67, 0.9, 1.72, 0.8, 0.72, 1.7, 0.82, 0.58, 1.24, 0.99, 0.9, 0.9, 0.7, 2.69, 1.9, 1.2, 4.82, 1.01, 2.82, 0.72, 0.55, 1.11, 0.82, 0.78, 1, 1.1, 0.96, 0.9, 0.74, 1.1, 1.3, 0.68, 0.82, 0.7, 1.17, 0.82, 1.29, 0.61, 0.8, 0.78, 1.07, 1.08),
Dir_Bil = c(0.1, 1.4, 1.2, 0.8, 0.2, 1.6, 0.7, 0.2, 0.4, 0.3, 0.3, 0.3, 0.3, 0.8, 5.5, 0.85, 0.2, 4.6, 0.5, 9.6, 1.7, 1, 0.85, 19.5, 29.3, 0.5, 0.3, 1, 1.5, 0.2, 0.5, 0.8, 0.4, 0.33, 1.2, 2.9, 1.8, 1.37, 0.15, 0.56, 3.1, 0.62, 0.75, 2.5, 0.27, 0.25, 1.18, 1.04, 1.57, 0.63),
Iron = c(28, 53, 85, 94, 82, 67, 152.6, 87, 94, 59, 104, 52.5, 52.5, 84, 56, 32, 45, 178, 19, 224, 200, 224, 53, 121, 106, 87, 92, 152.6, 40, 28, 131, 78, 124, 94, 37, 111, 161, 50.4, 15.5, 130.5, 50.3, 77.8, 99.1, 95.8, 49.6, 56, 56.9, 69.3, 71.2, 94.4),
Sat = c(6, 22, 73, 27, 24, 34, 39, 26, 27, 15, 37, 37, 37, 37, 27, 10, 21, 90, 8, 95, 87, 95, 21, 27, 67, 25, 56, 27, 12, 10, 78, 30, 51, 39, 17, 94, 96, 25, 5, 78, 19, 28, 38, 44, 23, 25, 27, 23, 29, 83),
Ferritin = c(16, 111, 982, 70, 80, 774, 76.9, 84, 70, 22, 635, 856, 856, 497, 742, 18, 802, 960, 141, 363, 316, 363, 278, 749, 2165, 81, 48.9, 76.9, 57, 308, 1316, 220, 642, 344, 419, 1600, 297, 828, 147, 1323, 342, 173, 620, 501, 766, 307, 366, 70, 106, 859),
Class = c("X1", "X0", "X1", "X1", "X1", "X0", "X1", "X1", "X0", "X1", "X1", "X1", "X1", "X0", "X0", "X1", "X0", "X0", "X1", "X1", "X1", "X1", "X1", "X0", "X0", "X1", "X1", "X1", "X0", "X0", "X1", "X1", "X1", "X1", "X1", "X0", "X0", "X0", "X0", "X0", "X0", "X0", "X0", "X0", "X0", "X0", "X0", "X0", "X0", "X0"),
row.names = c(1, 4, 7, 11, 12, 18, 19, 20, 27, 31, 32, 37, 44, 50, 53, 56, 58, 74, 77, 78, 80, 88, 93, 100, 102, 103, 104, 118, 121, 127, 134, 137, 143, 144, 151, 154, 162, 165, 166, 171, 176, 178, 181, 182, 186, 191, 193, 199, 200, 201))
fitting <- readRDS(file.path("resourceFiles",
"testModel",
"fitting.rds"))
trFunction <- TwoClass$new(method = "cv",
number = 10,
savePredictions = "final",
classProbs = TRUE,
allowParallel = TRUE,
verboseIter = FALSE,
seed = 1844523989)
trFunction$create(summaryFunction = UseProbability$new(),
search.method = "random")
metric <- "PPV"
logs <- normalizePath(path = file.path(tempdir(),
"testModel"),
winslash = "/",
mustWork = FALSE)
file.create(file.path(logs, "error.log"))
testthat::expect_message(modelClass$train(train.set = train.set,
fitting = fitting,
trFunction = trFunction,
metric = metric,
logs = logs),
"\\[Model\\]\\[INFO\\]\\[lda\\] Finished in \\[[0-9.]+ segs\\]",
perl = TRUE)
testthat::expect_type(modelClass$getConfiguration(), "list")
})
testthat::teardown({
if (file.exists(normalizePath(path = file.path(tempdir(),
"testModel"),
winslash = "/",
mustWork = FALSE))) {
unlink(x = normalizePath(path = file.path(tempdir(),
"testModel"),
winslash = "/",
mustWork = FALSE),
recursive = TRUE,
force = TRUE)
}
})
testthat::test_that("Model: save function works", {
dir.path <- normalizePath(path = file.path(tempdir(),
"testModel",
"dirpathSave"),
winslash = "/",
mustWork = FALSE)
model <- data.frame(name = c("lda"),
description = c("Linear Discriminant Analysis"),
family = c("Discriminant Analysis"),
library = c("MASS"),
prob = c(TRUE),
row.names = c(63))
modelClass <- Model$new(dir.path = dir.path,
model = model)
testthat::expect_message(modelClass$save(),
"[Model][ERROR] Cannot save untrained model. Task not performed",
fixed = TRUE)
train.set <- data.frame(Gender = c(1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 0, 1, 0, 1, 1),
Hemochro = c(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0),
HIV = c(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0),
Hallmark = c(1, 1, 1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 0, 0, 1, 0, 0, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1),
Grams_day = c(50, 100, 100, 60, 0, 500, 200, 80, 60, 100, 100, 100, 100, 100, 100, 100, 0, 100, 80, 100, 100, 100, 100, 0, 0, 0, 75, 180, 75, 0, 0, 0, 100, 100, 250, 75, 200, 30, 4, 99, 87, 35, 90, 100, 12, 24, 100, 107, 86, 124),
Ascites = c(2, 2, 2, 1, 1, 3, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 2, 1, 3, 1, 3, 3, 1, 2, 2, 1, 1, 1, 1, 1, 1, 3, 3, 1, 1, 1, 1, 3, 1, 1, 2, 1, 1, 3, 2, 1, 1),
INR = c(0.96, 1.58, 3.14, 1.53, 1.2, 1.44, 1.29, 1.06, 1.3, 1.32, 1.24, 1.09, 1.18, 1.2, 1.35, 1.24, 1.11, 1.92, 1.34, 2.08, 1.63, 1.23, 1.09, 1.04, 1.93, 1.17, 1.48, 1.46, 1.56, 1.13, 1.24, 1.19, 1.26, 1, 1.39, 1.63, 2.14, 1.27, 1.39, 1.26, 1.46, 1.36, 1.3, 1.62, 1.12, 1.2, 1.57, 1.35, 1.55, 1.33),
MCV = c(79.8, 91.5, 107.5, 90.1, 93.8, 103.4, 101, 90.7, 89.5, 96.1, 97.7, 105, 93.8, 91.4, 95.1, 83, 92.4, 119, 81.8, 91.1, 99, 90, 97.6, 98, 93.3, 93.8, 101.5, 103.8, 85.6, 95.1, 96.4, 97.3, 106.3, 94, 93.9, 101.6, 117.3, 95.6, 88, 103.8, 109.5, 88.9, 87.3, 101.8, 92.3, 86.5, 112.2, 95.2, 96.3, 86),
Platelets = c(472, 85, 70, 207000, 91000, 101000, 109000, 187, 108, 268000, 170000, 230000, 167000, 275000, 216, 1.71, 270000, 80000, 561, 91000, 75000, 38000, 169000, 77000, 406000, 144000, 120, 53000, 132000, 254000, 280000, 133000, 122, 157000, 88000, 172000, 118000, 272105.73, 174381.93, 175896.57, 68274.47, 114.42, 130783.68, 85246.57, 270585.81, 273354.81, 68596.59, 332033.67, 195.76, 101884.41),
Albumin = c(3.3, 3.4, 1.9, 4.4, 4.5, 3.4, 3.6, 4.5, 3, 3.4, 4.2, 4.2, 4.9, 3.11, 2.7, 3.9, 4, 3.1, 2.6, 2.4, 3.5, 2.2, 4.2, 3.5, 2.9, 3.8, 2.2, 3.2, 2.6, 3.68, 4.1, 4.5, 3, 3.88, 2.7, 3.44, 4.8, 3.73, 3.21, 2.43, 2.57, 3.47, 3.79, 3.62, 3.9, 2.89, 2.51, 3.26, 2.93, 3.31),
AST = c(68, 122, 59, 36, 96, 87, 35, 47, 85, 29, 85, 26, 29, 94, 523, 28, 73, 357, 43, 145, 85, 51, 31, 192, 266, 74, 71, 87, 219, 38, 52, 63, 401, 51, 73, 95, 60, 124, 20, 114, 86, 80, 60, 184, 75, 67, 106, 56, 68, 48),
ALP = c(109, 396, 63, 74, 70, 147, 141, 97, 293, 135, 227, 92, 68, 350, 397, 120, 103, 174, 88, 190, 165, 474, 91, 262, 670, 312, 97, 239, 363, 127, 123, 89, 93, 141, 44, 139, 170, 251, 913, 162, 97, 335, 181, 176, 132, 131, 85, 231, 304, 197),
Creatinine = c(2.1, 0.9, 0.59, 0.73, 0.88, 0.9, 0.68, 0.75, 0.67, 0.9, 1.72, 0.8, 0.72, 1.7, 0.82, 0.58, 1.24, 0.99, 0.9, 0.9, 0.7, 2.69, 1.9, 1.2, 4.82, 1.01, 2.82, 0.72, 0.55, 1.11, 0.82, 0.78, 1, 1.1, 0.96, 0.9, 0.74, 1.1, 1.3, 0.68, 0.82, 0.7, 1.17, 0.82, 1.29, 0.61, 0.8, 0.78, 1.07, 1.08),
Dir_Bil = c(0.1, 1.4, 1.2, 0.8, 0.2, 1.6, 0.7, 0.2, 0.4, 0.3, 0.3, 0.3, 0.3, 0.8, 5.5, 0.85, 0.2, 4.6, 0.5, 9.6, 1.7, 1, 0.85, 19.5, 29.3, 0.5, 0.3, 1, 1.5, 0.2, 0.5, 0.8, 0.4, 0.33, 1.2, 2.9, 1.8, 1.37, 0.15, 0.56, 3.1, 0.62, 0.75, 2.5, 0.27, 0.25, 1.18, 1.04, 1.57, 0.63),
Iron = c(28, 53, 85, 94, 82, 67, 152.6, 87, 94, 59, 104, 52.5, 52.5, 84, 56, 32, 45, 178, 19, 224, 200, 224, 53, 121, 106, 87, 92, 152.6, 40, 28, 131, 78, 124, 94, 37, 111, 161, 50.4, 15.5, 130.5, 50.3, 77.8, 99.1, 95.8, 49.6, 56, 56.9, 69.3, 71.2, 94.4),
Sat = c(6, 22, 73, 27, 24, 34, 39, 26, 27, 15, 37, 37, 37, 37, 27, 10, 21, 90, 8, 95, 87, 95, 21, 27, 67, 25, 56, 27, 12, 10, 78, 30, 51, 39, 17, 94, 96, 25, 5, 78, 19, 28, 38, 44, 23, 25, 27, 23, 29, 83),
Ferritin = c(16, 111, 982, 70, 80, 774, 76.9, 84, 70, 22, 635, 856, 856, 497, 742, 18, 802, 960, 141, 363, 316, 363, 278, 749, 2165, 81, 48.9, 76.9, 57, 308, 1316, 220, 642, 344, 419, 1600, 297, 828, 147, 1323, 342, 173, 620, 501, 766, 307, 366, 70, 106, 859),
Class = c("X1", "X0", "X1", "X1", "X1", "X0", "X1", "X1", "X0", "X1", "X1", "X1", "X1", "X0", "X0", "X1", "X0", "X0", "X1", "X1", "X1", "X1", "X1", "X0", "X0", "X1", "X1", "X1", "X0", "X0", "X1", "X1", "X1", "X1", "X1", "X0", "X0", "X0", "X0", "X0", "X0", "X0", "X0", "X0", "X0", "X0", "X0", "X0", "X0", "X0"),
row.names = c(1, 4, 7, 11, 12, 18, 19, 20, 27, 31, 32, 37, 44, 50, 53, 56, 58, 74, 77, 78, 80, 88, 93, 100, 102, 103, 104, 118, 121, 127, 134, 137, 143, 144, 151, 154, 162, 165, 166, 171, 176, 178, 181, 182, 186, 191, 193, 199, 200, 201))
fitting <- readRDS(file.path("resourceFiles",
"testModel",
"fitting.rds"))
trFunction <- TwoClass$new(method = "cv",
number = 10,
savePredictions = "final",
classProbs = TRUE,
allowParallel = TRUE,
verboseIter = FALSE,
seed = 1844523989)
trFunction$create(summaryFunction = UseProbability$new(),
search.method = "random")
metric <- "PPV"
logs <- normalizePath(path = file.path(tempdir(),
"testModel"),
winslash = "/",
mustWork = FALSE)
file.create(file.path(logs, "error.log"))
testthat::expect_message(modelClass$train(train.set = train.set,
fitting = fitting,
trFunction = trFunction,
metric = metric,
logs = logs),
"\\[Model\\]\\[INFO\\]\\[lda\\] Finished in \\[[0-9.]+ segs\\]",
perl = TRUE)
testthat::expect_message(modelClass$save(replace = FALSE),
"[Model][INFO][lda] Model succesfully saved at: ",
fixed = TRUE)
testthat::expect_true(file.exists(file.path(dir.path,
"lda.rds")))
testthat::expect_message(modelClass$save(replace = FALSE),
"[Model][INFO][lda] Model already exists. Model not saved",
fixed = TRUE)
testthat::expect_message(modelClass$save(replace = TRUE),
"[Model][WARNING][lda] Model already exists. Replacing previous model",
fixed = TRUE)
})
testthat::teardown({
if (file.exists(normalizePath(path = file.path(tempdir(),
"testModel"),
winslash = "/",
mustWork = FALSE))) {
unlink(x = normalizePath(path = file.path(tempdir(),
"testModel"),
winslash = "/",
mustWork = FALSE),
recursive = TRUE,
force = TRUE)
}
})
testthat::test_that("Model: remove function works", {
dir.path <- normalizePath(path = file.path(tempdir(),
"testModel",
"dirpathRemove"),
winslash = "/",
mustWork = FALSE)
model <- data.frame(name = c("lda"),
description = c("Linear Discriminant Analysis"),
family = c("Discriminant Analysis"),
library = c("MASS"),
prob = c(TRUE),
row.names = c(63))
modelClass <- Model$new(dir.path = dir.path,
model = model)
testthat::expect_message(modelClass$save(),
"[Model][ERROR] Cannot save untrained model. Task not performed",
fixed = TRUE)
train.set <- data.frame(Gender = c(1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 0, 1, 0, 1, 1),
Hemochro = c(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0),
HIV = c(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0),
Hallmark = c(1, 1, 1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 0, 0, 1, 0, 0, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1),
Grams_day = c(50, 100, 100, 60, 0, 500, 200, 80, 60, 100, 100, 100, 100, 100, 100, 100, 0, 100, 80, 100, 100, 100, 100, 0, 0, 0, 75, 180, 75, 0, 0, 0, 100, 100, 250, 75, 200, 30, 4, 99, 87, 35, 90, 100, 12, 24, 100, 107, 86, 124),
Ascites = c(2, 2, 2, 1, 1, 3, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 2, 1, 3, 1, 3, 3, 1, 2, 2, 1, 1, 1, 1, 1, 1, 3, 3, 1, 1, 1, 1, 3, 1, 1, 2, 1, 1, 3, 2, 1, 1),
INR = c(0.96, 1.58, 3.14, 1.53, 1.2, 1.44, 1.29, 1.06, 1.3, 1.32, 1.24, 1.09, 1.18, 1.2, 1.35, 1.24, 1.11, 1.92, 1.34, 2.08, 1.63, 1.23, 1.09, 1.04, 1.93, 1.17, 1.48, 1.46, 1.56, 1.13, 1.24, 1.19, 1.26, 1, 1.39, 1.63, 2.14, 1.27, 1.39, 1.26, 1.46, 1.36, 1.3, 1.62, 1.12, 1.2, 1.57, 1.35, 1.55, 1.33),
MCV = c(79.8, 91.5, 107.5, 90.1, 93.8, 103.4, 101, 90.7, 89.5, 96.1, 97.7, 105, 93.8, 91.4, 95.1, 83, 92.4, 119, 81.8, 91.1, 99, 90, 97.6, 98, 93.3, 93.8, 101.5, 103.8, 85.6, 95.1, 96.4, 97.3, 106.3, 94, 93.9, 101.6, 117.3, 95.6, 88, 103.8, 109.5, 88.9, 87.3, 101.8, 92.3, 86.5, 112.2, 95.2, 96.3, 86),
Platelets = c(472, 85, 70, 207000, 91000, 101000, 109000, 187, 108, 268000, 170000, 230000, 167000, 275000, 216, 1.71, 270000, 80000, 561, 91000, 75000, 38000, 169000, 77000, 406000, 144000, 120, 53000, 132000, 254000, 280000, 133000, 122, 157000, 88000, 172000, 118000, 272105.73, 174381.93, 175896.57, 68274.47, 114.42, 130783.68, 85246.57, 270585.81, 273354.81, 68596.59, 332033.67, 195.76, 101884.41),
Albumin = c(3.3, 3.4, 1.9, 4.4, 4.5, 3.4, 3.6, 4.5, 3, 3.4, 4.2, 4.2, 4.9, 3.11, 2.7, 3.9, 4, 3.1, 2.6, 2.4, 3.5, 2.2, 4.2, 3.5, 2.9, 3.8, 2.2, 3.2, 2.6, 3.68, 4.1, 4.5, 3, 3.88, 2.7, 3.44, 4.8, 3.73, 3.21, 2.43, 2.57, 3.47, 3.79, 3.62, 3.9, 2.89, 2.51, 3.26, 2.93, 3.31),
AST = c(68, 122, 59, 36, 96, 87, 35, 47, 85, 29, 85, 26, 29, 94, 523, 28, 73, 357, 43, 145, 85, 51, 31, 192, 266, 74, 71, 87, 219, 38, 52, 63, 401, 51, 73, 95, 60, 124, 20, 114, 86, 80, 60, 184, 75, 67, 106, 56, 68, 48),
ALP = c(109, 396, 63, 74, 70, 147, 141, 97, 293, 135, 227, 92, 68, 350, 397, 120, 103, 174, 88, 190, 165, 474, 91, 262, 670, 312, 97, 239, 363, 127, 123, 89, 93, 141, 44, 139, 170, 251, 913, 162, 97, 335, 181, 176, 132, 131, 85, 231, 304, 197),
Creatinine = c(2.1, 0.9, 0.59, 0.73, 0.88, 0.9, 0.68, 0.75, 0.67, 0.9, 1.72, 0.8, 0.72, 1.7, 0.82, 0.58, 1.24, 0.99, 0.9, 0.9, 0.7, 2.69, 1.9, 1.2, 4.82, 1.01, 2.82, 0.72, 0.55, 1.11, 0.82, 0.78, 1, 1.1, 0.96, 0.9, 0.74, 1.1, 1.3, 0.68, 0.82, 0.7, 1.17, 0.82, 1.29, 0.61, 0.8, 0.78, 1.07, 1.08),
Dir_Bil = c(0.1, 1.4, 1.2, 0.8, 0.2, 1.6, 0.7, 0.2, 0.4, 0.3, 0.3, 0.3, 0.3, 0.8, 5.5, 0.85, 0.2, 4.6, 0.5, 9.6, 1.7, 1, 0.85, 19.5, 29.3, 0.5, 0.3, 1, 1.5, 0.2, 0.5, 0.8, 0.4, 0.33, 1.2, 2.9, 1.8, 1.37, 0.15, 0.56, 3.1, 0.62, 0.75, 2.5, 0.27, 0.25, 1.18, 1.04, 1.57, 0.63),
Iron = c(28, 53, 85, 94, 82, 67, 152.6, 87, 94, 59, 104, 52.5, 52.5, 84, 56, 32, 45, 178, 19, 224, 200, 224, 53, 121, 106, 87, 92, 152.6, 40, 28, 131, 78, 124, 94, 37, 111, 161, 50.4, 15.5, 130.5, 50.3, 77.8, 99.1, 95.8, 49.6, 56, 56.9, 69.3, 71.2, 94.4),
Sat = c(6, 22, 73, 27, 24, 34, 39, 26, 27, 15, 37, 37, 37, 37, 27, 10, 21, 90, 8, 95, 87, 95, 21, 27, 67, 25, 56, 27, 12, 10, 78, 30, 51, 39, 17, 94, 96, 25, 5, 78, 19, 28, 38, 44, 23, 25, 27, 23, 29, 83),
Ferritin = c(16, 111, 982, 70, 80, 774, 76.9, 84, 70, 22, 635, 856, 856, 497, 742, 18, 802, 960, 141, 363, 316, 363, 278, 749, 2165, 81, 48.9, 76.9, 57, 308, 1316, 220, 642, 344, 419, 1600, 297, 828, 147, 1323, 342, 173, 620, 501, 766, 307, 366, 70, 106, 859),
Class = c("X1", "X0", "X1", "X1", "X1", "X0", "X1", "X1", "X0", "X1", "X1", "X1", "X1", "X0", "X0", "X1", "X0", "X0", "X1", "X1", "X1", "X1", "X1", "X0", "X0", "X1", "X1", "X1", "X0", "X0", "X1", "X1", "X1", "X1", "X1", "X0", "X0", "X0", "X0", "X0", "X0", "X0", "X0", "X0", "X0", "X0", "X0", "X0", "X0", "X0"),
row.names = c(1, 4, 7, 11, 12, 18, 19, 20, 27, 31, 32, 37, 44, 50, 53, 56, 58, 74, 77, 78, 80, 88, 93, 100, 102, 103, 104, 118, 121, 127, 134, 137, 143, 144, 151, 154, 162, 165, 166, 171, 176, 178, 181, 182, 186, 191, 193, 199, 200, 201))
fitting <- readRDS(file.path("resourceFiles",
"testModel",
"fitting.rds"))
trFunction <- TwoClass$new(method = "cv",
number = 10,
savePredictions = "final",
classProbs = TRUE,
allowParallel = TRUE,
verboseIter = FALSE,
seed = 1844523989)
trFunction$create(summaryFunction = UseProbability$new(),
search.method = "random")
metric <- "PPV"
logs <- normalizePath(path = file.path(tempdir(),
"testModel"),
winslash = "/",
mustWork = FALSE)
file.create(file.path(logs, "error.log"))
testthat::expect_message(modelClass$train(train.set = train.set,
fitting = fitting,
trFunction = trFunction,
metric = metric,
logs = logs),
"\\[Model\\]\\[INFO\\]\\[lda\\] Finished in \\[[0-9.]+ segs\\]",
perl = TRUE)
testthat::expect_message(modelClass$remove(),
"[Model][ERROR] Cannot remove unsaved model. Task not performed",
fixed = TRUE)
testthat::expect_message(modelClass$save(replace = FALSE),
"[Model][INFO][lda] Model succesfully saved at: ",
fixed = TRUE)
testthat::expect_true(file.exists(file.path(dir.path,
"lda.rds")))
modelClass$remove()
testthat::expect_false(file.exists(file.path(dir.path,
"lda.rds")))
})
testthat::teardown({
if (file.exists(normalizePath(path = file.path(tempdir(),
"testModel",
"dirpathRemove"),
winslash = "/",
mustWork = FALSE))) {
unlink(x = normalizePath(path = file.path(tempdir(),
"testModel",
"dirpathRemove"),
winslash = "/",
mustWork = FALSE),
recursive = TRUE,
force = TRUE)
}
})
|
regplot <- function(x, ...)
UseMethod("regplot")
|
is_plain_r_file <- function(path) {
grepl("\\.R$", path, ignore.case = TRUE)
}
is_rprofile_file <- function(path) {
grepl(".rprofile", path, ignore.case = TRUE)
}
is_rmd_file <- function(path) {
grepl("\\.(Rmd|Rmarkdown)$", path, ignore.case = TRUE)
}
is_rnw_file <- function(path) {
grepl("\\.Rnw$", path, ignore.case = TRUE)
}
is_unsaved_file <- function(path) {
path == ""
}
map_filetype_to_pattern <- function(filetype) {
paste0("(", paste(set_and_assert_arg_filetype(filetype), collapse = "|"), ")$")
}
dir_without_. <- function(path, recursive = TRUE, ...) {
purrr::map(path, dir_without_._one, recursive = recursive, ...) %>%
unlist()
}
dir_without_._one <- function(path, recursive, ...) {
relative <- dir(
path = path,
full.names = FALSE,
ignore.case = TRUE,
recursive = recursive,
all.files = TRUE,
...
)
if (path == ".") {
return(relative)
}
file.path(path, relative)
}
|
hiersimu.default <-
function(y, x, FUN, location = c("mean", "median"),
relative = FALSE, drop.highest = FALSE, nsimul=99,
method = "r2dtable", ...)
{
lhs <- as.matrix(y)
if (missing(x))
x <- cbind(level_1=seq_len(nrow(lhs)),
leve_2=rep(1, nrow(lhs)))
rhs <- data.frame(x)
rhs[] <- lapply(rhs, as.factor)
rhs[] <- lapply(rhs, droplevels, exclude = NA)
nlevs <- ncol(rhs)
if (is.null(colnames(rhs)))
colnames(rhs) <- paste("level", 1:nlevs, sep="_")
tlab <- colnames(rhs)
l1 <- sapply(rhs, function(z) length(unique(z)))
if (!any(sapply(2:nlevs, function(z) l1[z] <= l1[z-1])))
stop("number of levels are inappropriate, check sequence")
rval <- list()
rval[[1]] <- rhs[,nlevs]
nCol <- nlevs - 1
if (nlevs > 1) {
nCol <- nlevs - 1
for (i in 2:nlevs) {
rval[[i]] <- interaction(rhs[,nCol], rval[[(i-1)]], drop=TRUE)
nCol <- nCol - 1
}
}
rval <- as.data.frame(rval[rev(1:length(rval))])
l2 <- sapply(rval, function(z) length(unique(z)))
if (any(l1 != l2))
stop("levels are not perfectly nested")
fullgamma <-if (nlevels(rhs[,nlevs]) == 1)
TRUE else FALSE
if (fullgamma && drop.highest)
nlevs <- nlevs - 1
if (nlevs == 1 && relative)
stop("'relative=FALSE' makes no sense with one level")
ftmp <- vector("list", nlevs)
for (i in 1:nlevs) {
ftmp[[i]] <- as.formula(paste("~", tlab[i], "- 1"))
}
burnin <- if (is.null(list(...)$burnin))
0 else list(...)$burnin
thin <- if (is.null(list(...)$thin))
1 else list(...)$thin
if (!is.function(FUN))
stop("'FUN' must be a function")
location <- match.arg(location)
aggrFUN <- switch(location,
"mean" = mean,
"median" = median)
evalFUN <- function(x) {
if (fullgamma && !drop.highest) {
tmp <- lapply(1:(nlevs-1), function(i) t(model.matrix(ftmp[[i]], rhs)) %*% x)
tmp[[nlevs]] <- matrix(colSums(x), nrow = 1, ncol = ncol(x))
} else {
tmp <- lapply(1:nlevs, function(i) t(model.matrix(ftmp[[i]], rhs)) %*% x)
}
a <- sapply(1:nlevs, function(i) aggrFUN(FUN(tmp[[i]])))
if (relative)
a <- a / a[length(a)]
a
}
sim <- oecosimu(lhs, evalFUN, method = method, nsimul=nsimul,
burnin=burnin, thin=thin)
names(sim$statistic) <- attr(sim$oecosimu$statistic, "names") <- tlab[1:nlevs]
call <- match.call()
call[[1]] <- as.name("hiersimu")
attr(sim, "call") <- call
attr(sim, "FUN") <- FUN
attr(sim, "location") <- location
attr(sim, "n.levels") <- nlevs
attr(sim, "terms") <- tlab
attr(sim, "model") <- rhs
class(sim) <- c("hiersimu", class(sim))
sim
}
|
rbdd <- function(lambda, discr, window, seed = NULL){
grainlib <- solist(disc(radius = discr))
bufferdist <- 1.1 * discr
if (!missing(seed)){set.seed(seed)}
pp <- rpoispp(lambda = lambda, win = dilation(window, bufferdist), nsim = 1, drop = TRUE)
if (pp$n == 0 ){return(complement.owin(window))}
xibuffer <- placegrainsfromlib(pp, grainlib)
xi <- intersect.owin(xibuffer, window)
return(xi)
}
bddcoverageprob <- function(lambda, discr){
return (1 - exp(-pi * discr ^ 2 * lambda))
}
bddlambda <- function(coverp, discr){
return(log(1 - coverp)/ (-pi * discr ^2))
}
bdddiscr <- function(coverp, lambda){
return(sqrt(log(1 - coverp)/ (-pi * lambda)))
}
setcovdisc <- function(r, discr){
setcovariance <- r*0
rsubset <- r[r < 2 * discr]
setcovariance[r < 2 * discr] <- 2 * discr ^ 2 * acos(rsubset / (2 * discr)) - (rsubset / 2) * sqrt(4 * discr ^ 2 - rsubset ^ 2)
return(setcovariance)
}
bddcovar.iso <- function(r, lambda, discr){
expectedsetcovariance <- setcovdisc(r, discr)
p <- 1 - exp(-pi * discr ^ 2 * lambda)
covariance <- 2 * p - 1 + (1 - p ) ^ 2 * exp(lambda * expectedsetcovariance)
return(covariance)
}
bddcovar.vec <- function(X, Y, lambda, discr){
rlist <- sqrt(X ^ 2 + Y ^ 2)
covar <- vector(length(rlist), mode = "numeric")
for (i in 1:length(rlist)){
covar[i] <- bddcovar.iso(rlist[i], lambda = lambda, discr = discr)
}
return(covar)
}
bddcovar <- function(xrange, yrange, eps, lambda, discr){
if (length(eps) == 1){ eps <- c(eps, eps) }
xpts <- seq(from = xrange[1], to = xrange[2], by = eps[1])
ypts <- seq(from = yrange[1], to = yrange[2], by = eps[2])
mat <- outer(xpts, ypts, FUN = "bddcovar.vec", lambda = lambda, discr = discr)
mat <- t(mat)
return(im(mat, xcol = xpts, yrow = ypts))
}
|
setGeneric("removePane",
function(object, sheet) standardGeneric("removePane"))
setMethod("removePane",
signature(object = "workbook", sheet = "numeric"),
function(object, sheet) {
xlcCall(object, "removePane", as.integer(sheet - 1))
invisible()
}
)
setMethod("removePane",
signature(object = "workbook", sheet = "character"),
function(object, sheet) {
xlcCall(object, "removePane", sheet)
invisible()
}
)
|
library(hamcrest)
expected <- c(0x1.4bb02e9fe3622p+3 + 0x0p+0i, 0x1.39ff092cd5b83p+2 + 0x0p+0i,
0x1.c12e2781c3ba8p+1 + 0x0p+0i, 0x1.87b305864dbc8p+1 + 0x0p+0i,
0x1.581a71f25d7c3p+1 + 0x0p+0i, 0x1.3dd023c523d6cp+1 + 0x0p+0i,
0x1.256a24d3a0dcbp+1 + 0x0p+0i, 0x1.1562265964a27p+1 + 0x0p+0i,
0x1.05dcd693611fdp+1 + 0x0p+0i, 0x1.f56ff316f56dep+0 + 0x0p+0i,
0x1.df6710c5b99f9p+0 + 0x0p+0i, 0x1.ceb1c78b12145p+0 + 0x0p+0i,
0x1.bdfa2312d9fccp+0 + 0x0p+0i, 0x1.b0d214c3536d6p+0 + 0x0p+0i,
0x1.a38eca971100cp+0 + 0x0p+0i, 0x1.98d560e3ebbdcp+0 + 0x0p+0i,
0x1.8df7e6caae9e3p+0 + 0x0p+0i, 0x1.84ffeaf6e28f1p+0 + 0x0p+0i,
0x1.7be1a850068a4p+0 + 0x0p+0i, 0x1.743b3ba299357p+0 + 0x0p+0i,
0x1.6c6f24e5baa4bp+0 + 0x0p+0i, 0x1.65ce45437e345p+0 + 0x0p+0i,
0x1.5f09861bef288p+0 + 0x0p+0i, 0x1.593872cb1c13bp+0 + 0x0p+0i,
0x1.5345b87a1e3eap+0 + 0x0p+0i, 0x1.4e1d2ccff7ac4p+0 + 0x0p+0i,
0x1.48d547873930ap+0 + 0x0p+0i, 0x1.4437cbc8d48c8p+0 + 0x0p+0i,
0x1.3f7d2fbc01013p+0 + 0x0p+0i, 0x1.3b5425902d6b8p+0 + 0x0p+0i,
0x1.37100ec02207dp+0 + 0x0p+0i, 0x1.3349c3234d575p+0 + 0x0p+0i,
0x1.2f6a52cdfa62dp+0 + 0x0p+0i, 0x1.2bf8b0af69181p+0 + 0x0p+0i,
0x1.286fa58b53ba6p+0 + 0x0p+0i, 0x1.25474ea6d64b2p+0 + 0x0p+0i,
0x1.22092062dbc14p+0 + 0x0p+0i, 0x1.1f20c92a81fcfp+0 + 0x0p+0i,
0x1.1c240577fddbap+0 + 0x0p+0i, 0x1.1973fe296e778p+0 + 0x0p+0i,
0x1.16b0d22aac01ep+0 + 0x0p+0i, 0x1.1432af090055bp+0 + 0x0p+0i,
0x1.11a28f805d3b3p+0 + 0x0p+0i, 0x1.0f50e6f9873fap+0 + 0x0p+0i,
0x1.0cee4e34ec015p+0 + 0x0p+0i, 0x1.0ac486be8f8e2p+0 + 0x0p+0i,
0x1.088ac226fd751p+0 + 0x0p+0i, 0x1.0684eb907197ap+0 + 0x0p+0i,
0x1.046ff4a9e62d6p+0 + 0x0p+0i, 0x1.028aa9e8e976cp+0 + 0x0p+0i,
0x1.009707cf79606p+0 + 0x0p+0i, 0x1.fd9eae52dae35p-1 + 0x0p+0i,
0x1.f9f40ce1b101ap-1 + 0x0p+0i, 0x1.f69abcf22dd41p-1 + 0x0p+0i,
0x1.f3277bd1727dep-1 + 0x0p+0i, 0x1.efffbc6aea254p-1 + 0x0p+0i,
0x1.ecbf3e847b90dp-1 + 0x0p+0i, 0x1.e9c51806f81bfp-1 + 0x0p+0i,
0x1.e6b34c66328ap-1 + 0x0p+0i, 0x1.e3e33c18ba9d6p-1 + 0x0p+0i,
0x1.e0fc89433e527p-1 + 0x0p+0i, 0x1.de53709ad626ap-1 + 0x0p+0i,
0x1.db94a38897518p-1 + 0x0p+0i, 0x1.d90fba49ba58cp-1 + 0x0p+0i,
0x1.d675f841c675ap-1 + 0x0p+0i, 0x1.d412c0ed295fep-1 + 0x0p+0i,
0x1.d19b7bb6a2c95p-1 + 0x0p+0i, 0x1.cf57b9d18eef1p-1 + 0x0p+0i,
0x1.cd00a5c80086ep-1 + 0x0p+0i, 0x1.cada55a8767fp-1 + 0x0p+0i,
0x1.c8a1615aa0a14p-1 + 0x0p+0i, 0x1.c696b131ac547p-1 + 0x0p+0i,
0x1.c479fe444a9c6p-1 + 0x0p+0i, 0x1.c289482f01142p-1 + 0x0p+0i,
0x1.c087254b35f8fp-1 + 0x0p+0i, 0x1.beaeea3dbff3cp-1 + 0x0p+0i,
0x1.bcc5cdddda0e7p-1 + 0x0p+0i, 0x1.bb04b14479ba8p-1 + 0x0p+0i,
0x1.b933353a6aa91p-1 + 0x0p+0i, 0x1.b787f9332680ap-1 + 0x0p+0i,
0x1.b5ccd6cad464p-1 + 0x0p+0i, 0x1.b43658dfc8b03p-1 + 0x0p+0i,
0x1.b2906584cd622p-1 + 0x0p+0i, 0x1.b10d9bd3697c8p-1 + 0x0p+0i,
0x1.af7bc62625eaep-1 + 0x0p+0i, 0x1.ae0bbce307124p-1 + 0x0p+0i,
0x1.ac8d0a2c6b7p-1 + 0x0p+0i, 0x1.ab2ee1764acfbp-1 + 0x0p+0i,
0x1.a9c26b6c83c28p-1 + 0x0p+0i, 0x1.a8755562ec538p-1 + 0x0p+0i,
0x1.a71a48336dd4ep-1 + 0x0p+0i, 0x1.a5dd8747c395ep-1 + 0x0p+0i,
0x1.a4931fdd07af2p-1 + 0x0p+0i, 0x1.a3660555ea819p-1 + 0x0p+0i,
0x1.a22b8fd0c1d0ep-1 + 0x0p+0i, 0x1.a10d7a7913263p-1 + 0x0p+0i,
0x1.9fe250d6a8085p-1 + 0x0p+0i, 0x1.9ed2abd280ad5p-1 + 0x0p+0i,
0x1.9db634b9394ffp-1 + 0x0p+0i, 0x1.9cb4767bf6c7fp-1 + 0x0p+0i,
0x1.9ba6242a40bddp-1 + 0x0p+0i, 0x1.9ab1cd89873b5p-1 + 0x0p+0i,
0x1.99b11ce250cbfp-1 + 0x0p+0i, 0x1.98c9b842775f2p-1 + 0x0p+0i,
0x1.97d62ff3b6636p-1 + 0x0p+0i, 0x1.96fb508a92ef9p-1 + 0x0p+0i,
0x1.9614804abbe98p-1 + 0x0p+0i, 0x1.9545c17630aabp-1 + 0x0p+0i,
0x1.946b4155ef408p-1 + 0x0p+0i, 0x1.93a84603f7325p-1 + 0x0p+0i,
0x1.92d9b5d1d6eb4p-1 + 0x0p+0i, 0x1.922227f81c39dp-1 + 0x0p+0i,
0x1.915f2eb41fe65p-1 + 0x0p+0i, 0x1.90b2bed56a097p-1 + 0x0p+0i,
0x1.8ffb0a32d431ep-1 + 0x0p+0i, 0x1.8f596ef0d3fe9p-1 + 0x0p+0i,
0x1.8eacb2e49cd88p-1 + 0x0p+0i, 0x1.8e15a89ccdcccp-1 + 0x0p+0i,
0x1.8d739ef77525fp-1 + 0x0p+0i, 0x1.8ce6e769f5093p-1 + 0x0p+0i,
0x1.8c4f4f7b898b9p-1 + 0x0p+0i, 0x1.8bccb17aebf74p-1 + 0x0p+0i,
0x1.8b3f4fc0466ffp-1 + 0x0p+0i, 0x1.8ac696e9893a3p-1 + 0x0p+0i,
0x1.8a4334c1dabc6p-1 + 0x0p+0i, 0x1.89d4313bbbb69p-1 + 0x0p+0i,
0x1.895a9ca5a98fdp-1 + 0x0p+0i, 0x1.88f522e6b6009p-1 + 0x0p+0i,
0x1.88852e4456e4cp-1 + 0x0p+0i, 0x1.882916df2226p-1 + 0x0p+0i,
0x1.87c298c045a01p-1 + 0x0p+0i, 0x1.876fc03546b6ap-1 + 0x0p+0i,
0x1.8712932782b63p-1 + 0x0p+0i, 0x1.86c8d9bc293d4p-1 + 0x0p+0i,
0x1.8674dc203a568p-1 + 0x0p+0i, 0x1.863425bad984ap-1 + 0x0p+0i,
0x1.85e9399ef17dcp-1 + 0x0p+0i, 0x1.85b16da72d15bp-1 + 0x0p+0i,
0x1.856f78a5d8725p-1 + 0x0p+0i, 0x1.854081e94b322p-1 + 0x0p+0i,
0x1.85076d0ca2885p-1 + 0x0p+0i, 0x1.84e139a780226p-1 + 0x0p+0i,
0x1.84b0f15064b7bp-1 + 0x0p+0i, 0x1.84937299e339bp-1 + 0x0p+0i,
0x1.846be66b104c1p-1 + 0x0p+0i, 0x1.845710e56d8f7p-1 + 0x0p+0i,
0x1.843833b230b05p-1 + 0x0p+0i, 0x1.842bfefe30d9cp-1 + 0x0p+0i,
0x1.8415c6bca4321p-1 + 0x0p+0i, 0x1.84122d906df08p-1 + 0x0p+0i,
0x1.8404934f170d3p-1 + 0x0p+0i, 0x1.8409937059bd8p-1 + 0x0p+0i,
0x1.8404934f170d3p-1 + 0x0p+0i, 0x1.84122d906df0cp-1 + 0x0p+0i,
0x1.8415c6bca4321p-1 + 0x0p+0i, 0x1.842bfefe30d9cp-1 + 0x0p+0i,
0x1.843833b230b05p-1 + 0x0p+0i, 0x1.845710e56d8f4p-1 + 0x0p+0i,
0x1.846be66b104c1p-1 + 0x0p+0i, 0x1.84937299e339dp-1 + 0x0p+0i,
0x1.84b0f15064b7bp-1 + 0x0p+0i, 0x1.84e139a780225p-1 + 0x0p+0i,
0x1.85076d0ca2885p-1 + 0x0p+0i, 0x1.854081e94b324p-1 + 0x0p+0i,
0x1.856f78a5d8725p-1 + 0x0p+0i, 0x1.85b16da72d16p-1 + 0x0p+0i,
0x1.85e9399ef17dcp-1 + 0x0p+0i, 0x1.863425bad9847p-1 + 0x0p+0i,
0x1.8674dc203a568p-1 + 0x0p+0i, 0x1.86c8d9bc293d4p-1 + 0x0p+0i,
0x1.8712932782b63p-1 + 0x0p+0i, 0x1.876fc03546b6ep-1 + 0x0p+0i,
0x1.87c298c045a01p-1 + 0x0p+0i, 0x1.882916df2225ap-1 + 0x0p+0i,
0x1.88852e4456e4cp-1 + 0x0p+0i, 0x1.88f522e6b600ap-1 + 0x0p+0i,
0x1.895a9ca5a98fdp-1 + 0x0p+0i, 0x1.89d4313bbbb67p-1 + 0x0p+0i,
0x1.8a4334c1dabc6p-1 + 0x0p+0i, 0x1.8ac696e9893a5p-1 + 0x0p+0i,
0x1.8b3f4fc0466ffp-1 + 0x0p+0i, 0x1.8bccb17aebf73p-1 + 0x0p+0i,
0x1.8c4f4f7b898b9p-1 + 0x0p+0i, 0x1.8ce6e769f5094p-1 + 0x0p+0i,
0x1.8d739ef77525fp-1 + 0x0p+0i, 0x1.8e15a89ccdccbp-1 + 0x0p+0i,
0x1.8eacb2e49cd88p-1 + 0x0p+0i, 0x1.8f596ef0d3fe8p-1 + 0x0p+0i,
0x1.8ffb0a32d431ep-1 + 0x0p+0i, 0x1.90b2bed56a09bp-1 + 0x0p+0i,
0x1.915f2eb41fe65p-1 + 0x0p+0i, 0x1.922227f81c39dp-1 + 0x0p+0i,
0x1.92d9b5d1d6eb4p-1 + 0x0p+0i, 0x1.93a84603f7323p-1 + 0x0p+0i,
0x1.946b4155ef408p-1 + 0x0p+0i, 0x1.9545c17630aa9p-1 + 0x0p+0i,
0x1.9614804abbe98p-1 + 0x0p+0i, 0x1.96fb508a92efdp-1 + 0x0p+0i,
0x1.97d62ff3b6636p-1 + 0x0p+0i, 0x1.98c9b842775ebp-1 + 0x0p+0i,
0x1.99b11ce250cbfp-1 + 0x0p+0i, 0x1.9ab1cd89873bap-1 + 0x0p+0i,
0x1.9ba6242a40bddp-1 + 0x0p+0i, 0x1.9cb4767bf6c81p-1 + 0x0p+0i,
0x1.9db634b9394ffp-1 + 0x0p+0i, 0x1.9ed2abd280acfp-1 + 0x0p+0i,
0x1.9fe250d6a8085p-1 + 0x0p+0i, 0x1.a10d7a7913267p-1 + 0x0p+0i,
0x1.a22b8fd0c1d0ep-1 + 0x0p+0i, 0x1.a3660555ea81fp-1 + 0x0p+0i,
0x1.a4931fdd07af2p-1 + 0x0p+0i, 0x1.a5dd8747c395bp-1 + 0x0p+0i,
0x1.a71a48336dd4ep-1 + 0x0p+0i, 0x1.a8755562ec536p-1 + 0x0p+0i,
0x1.a9c26b6c83c28p-1 + 0x0p+0i, 0x1.ab2ee1764acf8p-1 + 0x0p+0i,
0x1.ac8d0a2c6b7p-1 + 0x0p+0i, 0x1.ae0bbce307129p-1 + 0x0p+0i,
0x1.af7bc62625eaep-1 + 0x0p+0i, 0x1.b10d9bd3697c8p-1 + 0x0p+0i,
0x1.b2906584cd622p-1 + 0x0p+0i, 0x1.b43658dfc8bp-1 + 0x0p+0i,
0x1.b5ccd6cad464p-1 + 0x0p+0i, 0x1.b787f9332680dp-1 + 0x0p+0i,
0x1.b933353a6aa91p-1 + 0x0p+0i, 0x1.bb04b14479baap-1 + 0x0p+0i,
0x1.bcc5cdddda0e7p-1 + 0x0p+0i, 0x1.beaeea3dbff3ap-1 + 0x0p+0i,
0x1.c087254b35f8fp-1 + 0x0p+0i, 0x1.c289482f01142p-1 + 0x0p+0i,
0x1.c479fe444a9c6p-1 + 0x0p+0i, 0x1.c696b131ac544p-1 + 0x0p+0i,
0x1.c8a1615aa0a14p-1 + 0x0p+0i, 0x1.cada55a8767f5p-1 + 0x0p+0i,
0x1.cd00a5c80086ep-1 + 0x0p+0i, 0x1.cf57b9d18eeeap-1 + 0x0p+0i,
0x1.d19b7bb6a2c95p-1 + 0x0p+0i, 0x1.d412c0ed29603p-1 + 0x0p+0i,
0x1.d675f841c675ap-1 + 0x0p+0i, 0x1.d90fba49ba58ep-1 + 0x0p+0i,
0x1.db94a38897518p-1 + 0x0p+0i, 0x1.de53709ad6264p-1 + 0x0p+0i,
0x1.e0fc89433e527p-1 + 0x0p+0i, 0x1.e3e33c18ba9dap-1 + 0x0p+0i,
0x1.e6b34c66328ap-1 + 0x0p+0i, 0x1.e9c51806f81bep-1 + 0x0p+0i,
0x1.ecbf3e847b90dp-1 + 0x0p+0i, 0x1.efffbc6aea251p-1 + 0x0p+0i,
0x1.f3277bd1727dep-1 + 0x0p+0i, 0x1.f69abcf22dd4p-1 + 0x0p+0i,
0x1.f9f40ce1b101ap-1 + 0x0p+0i, 0x1.fd9eae52dae35p-1 + 0x0p+0i,
0x1.009707cf79606p+0 + 0x0p+0i, 0x1.028aa9e8e9769p+0 + 0x0p+0i,
0x1.046ff4a9e62d6p+0 + 0x0p+0i, 0x1.0684eb907197cp+0 + 0x0p+0i,
0x1.088ac226fd751p+0 + 0x0p+0i, 0x1.0ac486be8f8e4p+0 + 0x0p+0i,
0x1.0cee4e34ec015p+0 + 0x0p+0i, 0x1.0f50e6f9873fcp+0 + 0x0p+0i,
0x1.11a28f805d3b3p+0 + 0x0p+0i, 0x1.1432af0900556p+0 + 0x0p+0i,
0x1.16b0d22aac01ep+0 + 0x0p+0i, 0x1.1973fe296e77dp+0 + 0x0p+0i,
0x1.1c240577fddbap+0 + 0x0p+0i, 0x1.1f20c92a81fccp+0 + 0x0p+0i,
0x1.22092062dbc14p+0 + 0x0p+0i, 0x1.25474ea6d64b7p+0 + 0x0p+0i,
0x1.286fa58b53ba6p+0 + 0x0p+0i, 0x1.2bf8b0af6917ep+0 + 0x0p+0i,
0x1.2f6a52cdfa62dp+0 + 0x0p+0i, 0x1.3349c3234d571p+0 + 0x0p+0i,
0x1.37100ec02207dp+0 + 0x0p+0i, 0x1.3b5425902d6b9p+0 + 0x0p+0i,
0x1.3f7d2fbc01013p+0 + 0x0p+0i, 0x1.4437cbc8d48cp+0 + 0x0p+0i,
0x1.48d547873930ap+0 + 0x0p+0i, 0x1.4e1d2ccff7acep+0 + 0x0p+0i,
0x1.5345b87a1e3eap+0 + 0x0p+0i, 0x1.593872cb1c13ap+0 + 0x0p+0i,
0x1.5f09861bef288p+0 + 0x0p+0i, 0x1.65ce45437e341p+0 + 0x0p+0i,
0x1.6c6f24e5baa4bp+0 + 0x0p+0i, 0x1.743b3ba299359p+0 + 0x0p+0i,
0x1.7be1a850068a4p+0 + 0x0p+0i, 0x1.84ffeaf6e28efp+0 + 0x0p+0i,
0x1.8df7e6caae9e3p+0 + 0x0p+0i, 0x1.98d560e3ebbe3p+0 + 0x0p+0i,
0x1.a38eca971100cp+0 + 0x0p+0i, 0x1.b0d214c3536d4p+0 + 0x0p+0i,
0x1.bdfa2312d9fccp+0 + 0x0p+0i, 0x1.ceb1c78b1213cp+0 + 0x0p+0i,
0x1.df6710c5b99f9p+0 + 0x0p+0i, 0x1.f56ff316f56e1p+0 + 0x0p+0i,
0x1.05dcd693611fdp+1 + 0x0p+0i, 0x1.1562265964a27p+1 + 0x0p+0i,
0x1.256a24d3a0dcbp+1 + 0x0p+0i, 0x1.3dd023c523d6dp+1 + 0x0p+0i,
0x1.581a71f25d7c3p+1 + 0x0p+0i, 0x1.87b305864dbccp+1 + 0x0p+0i,
0x1.c12e2781c3ba8p+1 + 0x0p+0i, 0x1.39ff092cd5b7dp+2 + 0x0p+0i
)
assertThat(stats:::fft(inverse=FALSE,z=c(1.0986855396044, 0.2746713849011, 0.183114256600733, 0.143875487329148,
0.121158305119282, 0.106013516979372, 0.0950466014297817, 0.0866601365977422,
0.0799939722440697, 0.0745398377728831, 0.0699761742357678, 0.0660886090004474,
0.0627281712546619, 0.0597877882270996, 0.0571883191737474, 0.0548698738018388,
0.052786207708098, 0.0509009860042374, 0.0491852224310608, 0.047615481289644,
0.0461725879172305, 0.0448406863426951, 0.0436065390121622, 0.0424589985118421,
0.0413886035913755, 0.0403872664077132, 0.0394480276540454, 0.0385648628558205,
0.0377325276862704, 0.0369464333594731, 0.0362025454394837, 0.0354973010478055,
0.0348275406506771, 0.034190451492433, 0.0335835204008514, 0.0330044941870436,
0.0324513462397747, 0.0319222482032566, 0.031415545850824, 0.0309297384407597,
0.0304634609768287, 0.03001546890364, 0.0295846248523916, 0.029169887120816,
0.0287702996260103, 0.028384983113162, 0.0280131274391904, 0.0276539847797136,
0.0273068636318511, 0.02697112350523, 0.0266461702099863, 0.026331451664199,
0.0260264541545751, 0.0257306989937277, 0.0254437395253961, 0.025165158435702,
0.0248945653342429, 0.0246315945736699, 0.0243759032805176, 0.0241271695735735,
0.0238850909490895, 0.0236493828147235, 0.0234197771563281, 0.0231960213236244,
0.0229778769224618, 0.0227651188028093, 0.0225575341328749, 0.0223549215508431,
0.0221570903866763, 0.0219638599472577, 0.0217750588588858, 0.0215905244617766,
0.0214101022517896, 0.0212336453650991, 0.0210610141019682, 0.020892075486177,
0.0207267028569988, 0.0205647754909285, 0.0204061782506386, 0.0202508012588824,
0.0200985395952818, 0.0199492930141287, 0.0198029656815068, 0.0196594659301915,
0.01951870603093, 0.0193806019788244, 0.0192450732936578, 0.0191120428331025,
0.0189814366178421, 0.0188531836677216, 0.0187272158481154, 0.0186034677257711,
0.0184818764334458, 0.0183623815427123, 0.018244924944358, 0.0181294507358494,
0.0180159051153744, 0.0179042362820146, 0.0177943943416342, 0.017686331218102,
0.0175800005694962, 0.0174753577089635, 0.0173723595299322, 0.0172709644353996,
0.0171711322710331, 0.0170728242618478, 0.0169760029522343, 0.0168806321491318,
0.0167866768681534, 0.0166941032824834, 0.0166028786743824, 0.0165129713891421,
0.016424350791347, 0.016336987223308, 0.0162508519655401, 0.0161659171991697,
0.0160821559701584, 0.0159995421552432, 0.015918050429495, 0.0158376562354066,
0.0157583357534263, 0.0156800658738563, 0.0156028241700442, 0.0155265888727964,
0.0154513388459492, 0.015377053563036, 0.0153037130849929, 0.0152312980388494,
0.0151597895973525, 0.0150891694594766, 0.0150194198317749, 0.0149505234105282,
0.0148824633646532, 0.014815223319331, 0.0147487873403206, 0.0146831399189245,
0.0146182659575743, 0.014554150756006, 0.0144907799979972, 0.0144281397386399,
0.0143662163921221, 0.0143049967199966, 0.014244467819912, 0.0141846171147863,
0.0141254323424019, 0.014066901545403, 0.0140090130616771, 0.0139517555151035,
0.0138951178066525, 0.0138390891058192, 0.013783658842378, 0.0137288166984428,
0.013783658842378, 0.0138390891058192, 0.0138951178066525, 0.0139517555151035,
0.0140090130616771, 0.014066901545403, 0.0141254323424019, 0.0141846171147863,
0.014244467819912, 0.0143049967199966, 0.0143662163921221, 0.0144281397386399,
0.0144907799979972, 0.014554150756006, 0.0146182659575743, 0.0146831399189245,
0.0147487873403206, 0.014815223319331, 0.0148824633646532, 0.0149505234105282,
0.0150194198317749, 0.0150891694594766, 0.0151597895973525, 0.0152312980388494,
0.0153037130849929, 0.015377053563036, 0.0154513388459492, 0.0155265888727964,
0.0156028241700442, 0.0156800658738563, 0.0157583357534263, 0.0158376562354066,
0.015918050429495, 0.0159995421552432, 0.0160821559701584, 0.0161659171991697,
0.0162508519655401, 0.016336987223308, 0.016424350791347, 0.0165129713891421,
0.0166028786743824, 0.0166941032824834, 0.0167866768681534, 0.0168806321491318,
0.0169760029522343, 0.0170728242618478, 0.0171711322710331, 0.0172709644353996,
0.0173723595299322, 0.0174753577089635, 0.0175800005694962, 0.017686331218102,
0.0177943943416342, 0.0179042362820146, 0.0180159051153744, 0.0181294507358494,
0.018244924944358, 0.0183623815427123, 0.0184818764334458, 0.0186034677257711,
0.0187272158481154, 0.0188531836677216, 0.0189814366178421, 0.0191120428331025,
0.0192450732936578, 0.0193806019788244, 0.01951870603093, 0.0196594659301915,
0.0198029656815068, 0.0199492930141287, 0.0200985395952818, 0.0202508012588824,
0.0204061782506386, 0.0205647754909285, 0.0207267028569988, 0.020892075486177,
0.0210610141019682, 0.0212336453650991, 0.0214101022517896, 0.0215905244617766,
0.0217750588588858, 0.0219638599472577, 0.0221570903866763, 0.0223549215508431,
0.0225575341328749, 0.0227651188028093, 0.0229778769224618, 0.0231960213236244,
0.0234197771563281, 0.0236493828147235, 0.0238850909490895, 0.0241271695735735,
0.0243759032805176, 0.0246315945736699, 0.0248945653342429, 0.025165158435702,
0.0254437395253961, 0.0257306989937277, 0.0260264541545751, 0.026331451664199,
0.0266461702099863, 0.02697112350523, 0.0273068636318511, 0.0276539847797136,
0.0280131274391904, 0.028384983113162, 0.0287702996260103, 0.029169887120816,
0.0295846248523916, 0.03001546890364, 0.0304634609768287, 0.0309297384407597,
0.031415545850824, 0.0319222482032566, 0.0324513462397747, 0.0330044941870436,
0.0335835204008514, 0.034190451492433, 0.0348275406506771, 0.0354973010478055,
0.0362025454394837, 0.0369464333594731, 0.0377325276862704, 0.0385648628558205,
0.0394480276540454, 0.0403872664077132, 0.0413886035913755, 0.0424589985118421,
0.0436065390121622, 0.0448406863426951, 0.0461725879172305, 0.047615481289644,
0.0491852224310608, 0.0509009860042374, 0.052786207708098, 0.0548698738018388,
0.0571883191737474, 0.0597877882270996, 0.0627281712546619, 0.0660886090004474,
0.0699761742357678, 0.0745398377728831, 0.0799939722440697, 0.0866601365977422,
0.0950466014297817, 0.106013516979372, 0.121158305119282, 0.143875487329148,
0.183114256600733, 0.2746713849011))
, identicalTo( expected, tol = 1e-6 ) )
|
DropMissing <- function(cross, pheno.names)
{
as.vector(attr(stats::na.omit(cross$pheno[, pheno.names]), "na.action"))
}
normal.trans <- function (x)
{
x <- rank(x, na.last = "keep")
stats::qnorm(x/(1 + sum(!is.na(x))))
}
|
make_nth_mday_of_the_quarter <- function(n) {
rr_nth_of_q1 <- yearly() %>%
recur_on_ymonth(1:3) %>%
recur_on_mday(1:31) %>%
recur_on_position(n)
rr_nth_of_q2 <- yearly() %>%
recur_on_ymonth(4:6) %>%
recur_on_mday(1:31) %>%
recur_on_position(n)
rr_nth_of_q3 <- yearly() %>%
recur_on_ymonth(7:9) %>%
recur_on_mday(1:31) %>%
recur_on_position(n)
rr_nth_of_q4 <- yearly() %>%
recur_on_ymonth(10:12) %>%
recur_on_mday(1:31) %>%
recur_on_position(n)
rb_nth_day_of_quarter <- runion() %>%
add_rschedule(rr_nth_of_q1) %>%
add_rschedule(rr_nth_of_q2) %>%
add_rschedule(rr_nth_of_q3) %>%
add_rschedule(rr_nth_of_q4)
rb_nth_day_of_quarter
}
test_that("can construct a runion to select n-th mday of the quarter", {
n <- 60L
start <- as.Date("2000-01-01")
stop <- as.Date("2001-12-31")
rb_60th_day_of_quarter <- make_nth_mday_of_the_quarter(n)
expect <- seq(start, stop, "1 day")
expect <- expect[lubridate::qday(expect) == n]
x <- alma_search(start, stop, rb_60th_day_of_quarter)
expect_equal(x, expect)
})
test_that("can select n-th mday of the quarter from the back", {
n <- -1
rb_neg_1th_day_of_quarter <- make_nth_mday_of_the_quarter(n)
x <- alma_search("2000-01-01", "2001-12-31", rb_neg_1th_day_of_quarter)
expect <- as.Date(c(
"2000-03-31", "2000-06-30", "2000-09-30", "2000-12-31",
"2001-03-31", "2001-06-30", "2001-09-30", "2001-12-31"
))
expect_equal(x, expect)
})
make_nth_wday_of_the_quarter <- function(wday, n) {
rr_nth_wday_of_q1 <- yearly() %>%
recur_on_ymonth(1:3) %>%
recur_on_wday(wday) %>%
recur_on_position(n)
rr_nth_wday_of_q2 <- yearly() %>%
recur_on_ymonth(4:6) %>%
recur_on_wday(wday) %>%
recur_on_position(n)
rr_nth_wday_of_q3 <- yearly() %>%
recur_on_ymonth(7:9) %>%
recur_on_wday(wday) %>%
recur_on_position(n)
rr_nth_wday_of_q4 <- yearly() %>%
recur_on_ymonth(10:12) %>%
recur_on_wday(wday) %>%
recur_on_position(n)
rb_nth_wday_of_quarter <- runion() %>%
add_rschedule(rr_nth_wday_of_q1) %>%
add_rschedule(rr_nth_wday_of_q2) %>%
add_rschedule(rr_nth_wday_of_q3) %>%
add_rschedule(rr_nth_wday_of_q4)
rb_nth_wday_of_quarter
}
test_that("can construct a runion to select n-th wday of the quarter", {
n <- 6L
wday <- "Monday"
start <- as.Date("2000-01-01")
stop <- as.Date("2001-12-31")
rb_6th_monday_of_quarter <- make_nth_wday_of_the_quarter(wday, n)
x <- alma_search(start, stop, rb_6th_monday_of_quarter)
expect <- as.Date(c(
"2000-02-07", "2000-05-08", "2000-08-07", "2000-11-06",
"2001-02-05", "2001-05-07", "2001-08-06", "2001-11-05"
))
expect_equal(x, expect)
})
test_that("not all quarters might have the requested position", {
n <- 14
wday <- "Monday"
rb_14th_monday_of_quarter <- make_nth_wday_of_the_quarter(wday, n)
x <- alma_search("2000-01-01", "2001-12-31", rb_14th_monday_of_quarter)
expect <- as.Date("2001-12-31")
expect_equal(x, expect)
})
test_that("can select n-th wday in the quarter from the back", {
n <- -2
wday <- c("Monday", "Tuesday")
rb_neg_2nd_monday_or_tuesday_of_quarter <- make_nth_wday_of_the_quarter(wday, n)
x <- alma_search("2000-01-01", "2001-12-31", rb_neg_2nd_monday_or_tuesday_of_quarter)
expect <- as.Date(c(
"2000-03-27", "2000-06-26", "2000-09-25", "2000-12-25",
"2001-03-26", "2001-06-25", "2001-09-24", "2001-12-25"
))
expect_equal(x, expect)
})
|
phinR <- function(t,x) mean(cos(t*x))
ComputeFirstRootRealeCF <- function(x,...,tol=1e-3,maxIter=100,
lowerBand=1e-4,upperBand=30){
WelshSol <- WelshFirstRootRealeCF(x,tol,maxIter)
if (WelshSol$phinR < tol) return(WelshSol$t)
else return(numFirstRootRealeCF(x,tol,lowerBand,upperBand,...)$t)
}
WelshFirstRootRealeCF <- function(x,tol=1e-3,maxIter=100){
A=0;iter=0
m=mean(abs(x))
val=phinR(A,x)
while ((abs(val) > tol) && (iter< maxIter)){
A=A+val/m
val=phinR(A,x)
iter=iter+1
}
list(t=A,phinR=val)
}
graphFirstRootRealeCF <- function(x,tol=1e-3,lowerBand=1e-4,upperBand=30){
t_seq<- seq(lowerBand,upperBand,tol)
phiVal <- sapply(t_seq,phinR,x=x)
t <- t_seq[abs(phiVal)< tol][1]
list(t=t, phinR=phinR(t,x))
}
numFirstRootRealeCF <- function(x,tol=1e-3,lowerBand=1e-4,upperBand=30,...){
t_init<-graphFirstRootRealeCF(x,tol=tol,
lowerBand=lowerBand,
upperBand=upperBand)$t
if (is.na(t_init)) t_init <- upperBand
objectiveFct <- function(t) abs(phinR(t,x))
optInfo <- nlminb(start=t_init,objective=objectiveFct,
lower=lowerBand,
upper=upperBand)
list(t=as.numeric(optInfo$par),phinR=optInfo$objective)
}
test.ComputeComputeFirstRootRealeCF <- function(){
test.WelshFirstRootRealeCF()
test.graphFirstRootRealeCF()
test.numFirstRootRealeCF()
}
test.numFirstRootRealeCF <- function(){
set.seed(345); x <- rstable(500,1.5,0.5)
tEstim <- numFirstRootRealeCF(x)$t
tRef <- 2.305364
expect_almost_equal(tEstim,tRef)
}
test.graphFirstRootRealeCF <- function(){
set.seed(345); x <- rstable(500,1.5,0.5)
tEstim <- graphFirstRootRealeCF(x)$t
tRef <- 2.3031
expect_almost_equal(tEstim,tRef)
}
test.WelshFirstRootRealeCF <- function(){
set.seed(345); x <- rstable(500,1.5,0.5)
tEstim <- WelshFirstRootRealeCF(x)$t
tRef <- 2.302698
expect_almost_equal(tEstim,tRef)
}
|
library(ClusterR)
data(dietary_survey_IBS)
dim(dietary_survey_IBS)
X = dietary_survey_IBS[, -ncol(dietary_survey_IBS)]
y = dietary_survey_IBS[, ncol(dietary_survey_IBS)]
dat = center_scale(X, mean_center = T, sd_scale = T)
library(OpenImageR)
im = readImage('elephant.jpg')
im = resizeImage(im, 75, 75, method = 'bilinear')
imageShow(im)
im2 = apply(im, 3, as.vector)
km_rc = KMeans_rcpp(im2, clusters = 5, num_init = 5, max_iters = 100,
initializer = 'optimal_init', verbose = F)
km_rc$between.SS_DIV_total.SS
pr = predict(km_rc, newdata = im2)
getcent = km_rc$centroids
getclust = km_rc$clusters
new_im = getcent[getclust, ]
dim(new_im) = c(nrow(im), ncol(im), 3)
imageShow(new_im)
opt = Optimal_Clusters_KMeans(im2, max_clusters = 10, plot_clusters = T,
criterion = 'distortion_fK', fK_threshold = 0.85,
initializer = 'optimal_init', tol_optimal_init = 0.2)
im_d = readImage('dog.jpg')
im_d = resizeImage(im_d, 350, 350, method = 'bilinear')
imageShow(im_d)
im3 = apply(im_d, 3, as.vector)
dim(im3)
start = Sys.time()
km_init = KMeans_rcpp(im3, clusters = 5, num_init = 5, max_iters = 100,
initializer = 'kmeans++', verbose = F)
end = Sys.time()
t = end - start
cat('time to complete :', t, attributes(t)$units, '\n')
getcent_init = km_init$centroids
getclust_init = km_init$clusters
new_im_init = getcent_init[getclust_init, ]
dim(new_im_init) = c(nrow(im_d), ncol(im_d), 3)
imageShow(new_im_init)
start = Sys.time()
km_mb = MiniBatchKmeans(im3, clusters = 5, batch_size = 20, num_init = 5, max_iters = 100,
init_fraction = 0.2, initializer = 'kmeans++', early_stop_iter = 10,
verbose = F)
pr_mb = predict(km_mb, newdata = im3)
end = Sys.time()
t = end - start
cat('time to complete :', t, attributes(t)$units, '\n')
getcent_mb = km_mb$centroids
new_im_mb = getcent_mb[pr_mb, ]
dim(new_im_mb) = c(nrow(im_d), ncol(im_d), 3)
imageShow(new_im_mb)
data(mushroom)
X = mushroom[, -1]
y = as.numeric(mushroom[, 1])
gwd = FD::gowdis(X)
gwd_mat = as.matrix(gwd)
cm = Cluster_Medoids(gwd_mat, clusters = 2, swap_phase = TRUE, verbose = F)
knitr::kable(data.frame(adusted_rand_index = external_validation(y, cm$clusters, method = "adjusted_rand_index", summary_stats = F), avg_silhouette_width = mean(cm$silhouette_matrix[, 'silhouette_widths'])), caption = "Non-Weigthed-K-medoids", align = 'l')
knitr::kable(data.frame(predictors = c("cap_shape", "cap_surface", "cap_color", "bruises", "odor",
"gill_attachment", "gill_spacing", "gill_size", "gill_color",
"stalk_shape", "stalk_root", "stalk_surface_above_ring", "stalk_surface_below_ring",
"stalk_color_above_ring", "stalk_color_below_ring", "veil_type",
"veil_color", "ring_number", "ring_type", "spore_print_color",
"population", "habitat"), weights = c(4.626, 38.323, 55.899, 34.028, 169.608, 6.643, 42.08, 57.366,
37.938, 33.081, 65.105, 18.718, 76.165, 27.596, 26.238, 0.0, 1.507,
37.314, 32.685, 127.87, 64.019, 44.519)), align = 'l')
weights = c(4.626, 38.323, 55.899, 34.028, 169.608, 6.643, 42.08, 57.366, 37.938,
33.081, 65.105, 18.718, 76.165, 27.596, 26.238, 0.0, 1.507, 37.314,
32.685, 127.87, 64.019, 44.519)
gwd_w = FD::gowdis(X, w = weights)
gwd_mat_w = as.matrix(gwd_w)
cm_w = Cluster_Medoids(gwd_mat_w, clusters = 2, swap_phase = TRUE, verbose = F)
knitr::kable(data.frame(adusted_rand_index = external_validation(y, cm_w$clusters, method = "adjusted_rand_index", summary_stats = F), avg_silhouette_width = mean(cm_w$silhouette_matrix[, 'silhouette_widths'])), caption = "Weigthed-K-medoids", align = 'l')
cl_X = X
for (i in 1:ncol(cl_X)) { cl_X[, i] = as.numeric(cl_X[, i]) }
start = Sys.time()
cl_f = Clara_Medoids(cl_X, clusters = 2, distance_metric = 'hamming', samples = 5,
sample_size = 0.2, swap_phase = TRUE, verbose = F, threads = 1)
end = Sys.time()
t = end - start
cat('time to complete :', t, attributes(t)$units, '\n')
knitr::kable(data.frame(adusted_rand_index = external_validation(y, cl_f$clusters, method = "adjusted_rand_index", summary_stats = F), avg_silhouette_width = mean(cl_f$silhouette_matrix[, 'silhouette_widths'])), caption = "hamming-Clara-Medoids", align = 'l')
start = Sys.time()
cl_e = Cluster_Medoids(cl_X, clusters = 2, distance_metric = 'hamming', swap_phase = TRUE,
verbose = F, threads = 1)
end = Sys.time()
t = end - start
cat('time to complete :', t, attributes(t)$units, '\n')
knitr::kable(data.frame(adusted_rand_index = external_validation(y, cl_e$clusters, method = "adjusted_rand_index", summary_stats = F), avg_silhouette_width = mean(cl_e$silhouette_matrix[, 'silhouette_widths'])), caption = "hamming-Cluster-Medoids", align = 'l')
Silhouette_Dissimilarity_Plot(cl_f, silhouette = TRUE)
Silhouette_Dissimilarity_Plot(cl_e, silhouette = TRUE)
|
SearchGeneticAlgoritm <- function(pResidual) {
if (requireNamespace("GA", quietly = T)) {
res.ga <-
GA::ga(
type = "binary",
ComputeLambda,
pResidual = pResidual,
nBits = ncol(pResidual),
monitor = F
)
uMax <- matrix((-1) ^ res.ga@solution[1,],
ncol = ncol(pResidual),
nrow = 1)
uMax <- uMax * (-1) ^ (uMax[1] == -1)
return(list(L1Max = max(res.ga@fitness), uMax = uMax))
} else {
return(NULL)
}
}
|
summary.dea_fuzzy <- function(object, ..., exportExcel = TRUE, filename = NULL, returnList = FALSE){
if (!is.dea_fuzzy(object)) {
stop("Input should be of class dea_fuzzy!")
}
modelname <- object$modelname
DMU <- NULL
if (modelname == "fuzzy_guotanaka") {
eff <- efficiencies(object)
if (!returnList) {
effmat <-
do.call(rbind, lapply(seq(dim(eff)[3]), function(x)
eff[, , x]))
effdf <- cbind(data.frame(
DMU = dimnames(effmat)[[1]],
hlevel = rep(object$h, each = dim(eff)[1])
),
data.frame(effmat, row.names = NULL))
if (exportExcel) {
if (is.null(filename)) {
filename <- paste("ResultsDEA", Sys.time(), ".xlsx", sep = "")
filename <- gsub(" ", "_", filename)
filename <- gsub(":", ".", filename)
}
write_xlsx(effdf, path = filename)
}
return(effdf)
} else {
efflist <- lapply(seq(dim(eff)[3]), function(x)
eff[, , x])
names(efflist) <- paste("h =", dimnames(eff)[[3]])
if (exportExcel) {
if (is.null(filename)) {
filename <- paste("ResultsDEA", Sys.time(), ".xlsx", sep = "")
filename <- gsub(" ", "_", filename)
filename <- gsub(":", ".", filename)
}
write_xlsx(efflist, path = filename)
}
return(efflist)
}
} else if (modelname == "fuzzy_possibilistic_basic") {
eff <- efficiencies(object)
eff <- cbind(data.frame(DMU = dimnames(eff)[[1]]),
data.frame(eff, row.names = NULL))
eff %>% gather(key = "hlevel", value = "efficiency", -DMU) -> eff
eff$hlevel <- rep(object$h, each = length(object$data$dmunames))
eff <- eff[,c(2,1,3)]
lamb <- lambdas(object)
lamblist <- lapply(seq(dim(lamb)[3]), function(x)
lamb[, , x])
lambmat <- do.call(rbind, lamblist)
if (!returnList) {
df <- cbind(eff, data.frame(lambmat, row.names = NULL))
if (exportExcel) {
if (is.null(filename)) {
filename <- paste("ResultsDEA", Sys.time(), ".xlsx", sep = "")
filename <- gsub(" ", "_", filename)
filename <- gsub(":", ".", filename)
}
write_xlsx(df, path = filename)
}
return(df)
} else {
lambdas = data.frame(hlevel = eff$hlevel, DMU =eff$DMU, data.frame(lambmat, row.names = NULL))
reslist <- list(efficiencies = eff, lambdas = lambdas)
if (exportExcel) {
if (is.null(filename)) {
filename <- paste("ResultsDEA", Sys.time(), ".xlsx", sep = "")
filename <- gsub(" ", "_", filename)
filename <- gsub(":", ".", filename)
}
write_xlsx(reslist, path = filename)
}
return(reslist)
}
} else {
modelkl <- strsplit(object$modelname, "_")[[1]][3]
if (!modelkl %in% c("addsupereff")) {
eff <- efficiencies(object)
if (!modelkl %in% c("nonradial", "deaps")) {
eff.Worst <- data.frame(eff$Worst, stringsAsFactors = FALSE)
eff.Worst <-
data.frame(cbind(data.frame(DMU = rownames(eff.Worst)),
eff.Worst),
row.names = NULL)
eff.Worst %>% gather(key = "alphacut",
value = "efficiency.Worst", -DMU) -> eff.Worst
eff.Worst$alphacut <- rep(object$alpha,
each = length(object$data$dmunames))
eff.Best <- data.frame(eff$Best, stringsAsFactors = FALSE)
eff.Best <-
data.frame(cbind(data.frame(DMU = rownames(eff.Best)), eff.Best), row.names = NULL)
eff.Best %>% gather(key = "alphacut", value = "efficiency.Best", -DMU) -> eff.Best
eff.Best$alphacut <-
rep(object$alpha, each = length(object$data$dmunames))
eff.df <- merge(eff.Worst, eff.Best, by = c("DMU", "alphacut"))
} else {
neff <- length(object$alphacut[[1]]$DMU$Worst[[1]]$efficiency)
if(neff > 1){
effmat.Worst <-
do.call(rbind, lapply(seq(dim(eff$Worst)[3]), function(x)
eff$Worst[, , x]))
effdf.Worst <-
cbind(
data.frame(
DMU = dimnames(effmat.Worst)[[1]],
alphacut = rep(object$alpha, each = dim(eff$Worst)[1])
),
data.frame(effmat.Worst, row.names = NULL)
)
colnames(effdf.Worst)[3:(ncol(effdf.Worst))] <-
paste("eff", colnames(effdf.Worst)[3:(ncol(effdf.Worst))], "Worst", sep = ".")
effmat.Best <-
do.call(rbind, lapply(seq(dim(eff$Best)[3]), function(x)
eff$Best[, , x]))
effdf.Best <-
cbind(
data.frame(
DMU = dimnames(effmat.Best)[[1]],
alphacut = rep(object$alpha, each = dim(eff$Best)[1])
),
data.frame(effmat.Best, row.names = NULL)
)
colnames(effdf.Best)[3:(ncol(effdf.Best))] <-
paste("eff", colnames(effdf.Best)[3:(ncol(effdf.Best))], "Best", sep = ".")
eff.df <-
merge(effdf.Worst, effdf.Best, by = c("alphacut", "DMU"))
srtidx <- (3:ncol(eff.df))
srtidx <- t(matrix(srtidx, ncol = 2))
dim(srtidx) <- c(1, length(srtidx))
eff.df <- eff.df[, c(2, 1, srtidx)]
} else {
eff.Worst <- data.frame(eff$Worst, stringsAsFactors = FALSE)
eff.Worst <-
data.frame(cbind(data.frame(DMU = rownames(eff.Worst)),
eff.Worst),
row.names = NULL)
eff.Worst %>% gather(key = "alphacut",
value = "efficiency.Worst", -DMU) -> eff.Worst
eff.Worst$alphacut <- rep(object$alpha,
each = length(object$data$dmunames))
eff.Best <- data.frame(eff$Best, stringsAsFactors = FALSE)
eff.Best <-
data.frame(cbind(data.frame(DMU = rownames(eff.Best)), eff.Best), row.names = NULL)
eff.Best %>% gather(key = "alphacut", value = "efficiency.Best", -DMU) -> eff.Best
eff.Best$alphacut <-
rep(object$alpha, each = length(object$data$dmunames))
eff.df <- merge(eff.Worst, eff.Best, by = c("DMU", "alphacut"))
}
}
} else {
eff.df <- NULL
}
s <- slacks(object)
s[sapply(s, is.null)] <-
NULL
dmunames <- object$data$dmunames
if (!modelkl %in% c("nonradial", "deaps")) {
s.i.Worst <- do.call(rbind, lapply(seq(dim(s$slack_input.W)[3]),
function(x)
matrix(
s$slack_input.W[, , x],
nrow = length(dmunames),
dimnames = list(dmunames, dimnames(s$slack_input.W)[[2]])
)))
s.i.Worst <-
cbind(
data.frame(
DMU = object$data$dmunames,
alphacut = rep(object$alpha, each = dim(s$slack_input.W)[1])
),
data.frame(s.i.Worst, row.names = NULL)
)
colnames(s.i.Worst)[3:(ncol(s.i.Worst))] <- paste("slack",
colnames(s.i.Worst)[3:(ncol(s.i.Worst))],
"Worst", sep = ".")
s.o.Worst <-
do.call(rbind, lapply(seq(dim(s$slack_output.W)[3]),
function(x)
matrix(
s$slack_output.W[, , x],
nrow = length(dmunames),
dimnames = list(dmunames, dimnames(s$slack_output.W)[[2]])
)))
s.o.Worst <-
cbind(
data.frame(
DMU = object$data$dmunames,
alphacut = rep(object$alpha, each = dim(s$slack_output.W)[1])
),
data.frame(s.o.Worst, row.names = NULL)
)
colnames(s.o.Worst)[3:(ncol(s.o.Worst))] <- paste("slack",
colnames(s.o.Worst)[3:(ncol(s.o.Worst))],
"Worst", sep = ".")
s.i.Best <- do.call(rbind, lapply(seq(dim(s$slack_input.B)[3]),
function(x)
matrix(
s$slack_input.B[, , x],
nrow = length(dmunames),
dimnames = list(dmunames, dimnames(s$slack_input.B)[[2]])
)))
s.i.Best <- cbind(
data.frame(
DMU = dimnames(s.i.Best)[[1]],
alphacut = rep(object$alpha, each = dim(s$slack_input.B)[1])
),
data.frame(s.i.Best, row.names = NULL)
)
colnames(s.i.Best)[3:(ncol(s.i.Best))] <- paste("slack",
colnames(s.i.Best)[3:(ncol(s.i.Best))],
"Best", sep = ".")
s.o.Best <-
do.call(rbind, lapply(seq(dim(s$slack_output.B)[3]),
function(x)
matrix(
s$slack_output.B[, , x],
nrow = length(dmunames),
dimnames = list(dmunames, dimnames(s$slack_output.B)[[2]])
)))
s.o.Best <-
cbind(
data.frame(
DMU = object$data$dmunames,
alphacut = rep(object$alpha, each = dim(s$slack_output.B)[1])
),
data.frame(s.o.Best, row.names = NULL)
)
colnames(s.o.Best)[3:(ncol(s.o.Best))] <- paste("slack",
colnames(s.o.Best)[3:(ncol(s.o.Best))],
"Best", sep = ".")
s.i.df <- merge(s.i.Worst, s.i.Best, by = c("alphacut", "DMU"))
srtidx <- (3:ncol(s.i.df))
srtidx <- t(matrix(srtidx, ncol = 2))
dim(srtidx) <- c(1, length(srtidx))
s.i.df <- s.i.df[, c(2, 1, srtidx)]
s.o.df <- merge(s.o.Worst, s.o.Best, by = c("alphacut", "DMU"))
srtidx <- (3:ncol(s.o.df))
srtidx <- t(matrix(srtidx, ncol = 2))
dim(srtidx) <- c(1, length(srtidx))
s.o.df <- s.o.df[, c(2, 1, srtidx)]
s.df <- cbind(s.i.df, s.o.df[, 3:ncol(s.o.df)])
} else {
if (object$orientation == "io") {
s.o.Worst <- do.call(rbind, lapply(seq(dim(s$slack_output.W)[3]),
function(x)
matrix(
s$slack_output.W[, , x],
nrow = length(dmunames),
dimnames = list(dmunames, dimnames(s$slack_output.W)[[2]])
)))
s.o.Worst <-
cbind(
data.frame(
DMU = object$data$dmunames,
alphacut = rep(object$alpha, each = dim(s$slack_output.W)[1])
),
data.frame(s.o.Worst, row.names = NULL)
)
colnames(s.o.Worst)[3:(ncol(s.o.Worst))] <- paste("slack",
colnames(s.o.Worst)[3:(ncol(s.o.Worst))],
"Worst", sep = ".")
s.o.Best <-
do.call(rbind, lapply(seq(dim(s$slack_output.B)[3]),
function(x)
matrix(
s$slack_output.B[, , x],
nrow = length(dmunames),
dimnames = list(dmunames, dimnames(s$slack_output.B)[[2]])
)))
s.o.Best <-
cbind(
data.frame(
DMU = object$data$dmunames,
alphacut = rep(object$alpha, each = dim(s$slack_output.B)[1])
),
data.frame(s.o.Best, row.names = NULL)
)
colnames(s.o.Best)[3:(ncol(s.o.Best))] <- paste("slack",
colnames(s.o.Best)[3:(ncol(s.o.Best))],
"Best", sep = ".")
s.o.df <-
merge(s.o.Worst, s.o.Best, by = c("alphacut", "DMU"))
srtidx <- (3:ncol(s.o.df))
srtidx <- t(matrix(srtidx, ncol = 2))
dim(srtidx) <- c(1, length(srtidx))
s.df <- s.o.df[, c(2, 1, srtidx)]
} else {
s.i.Worst <-
do.call(rbind, lapply(seq(dim(s$slack_input.W)[3]),
function(x)
matrix(
s$slack_input.W[, , x],
nrow = length(dmunames),
dimnames = list(dmunames, dimnames(s$slack_input.W)[[2]])
)))
s.i.Worst <-
cbind(
data.frame(
DMU = object$data$dmunames,
alphacut = rep(object$alpha, each = dim(s$slack_input.W)[1])
),
data.frame(s.i.Worst, row.names = NULL)
)
colnames(s.i.Worst)[3:(ncol(s.i.Worst))] <- paste("slack",
colnames(s.i.Worst)[3:(ncol(s.i.Worst))],
"Worst", sep = ".")
s.i.Best <-
do.call(rbind, lapply(seq(dim(s$slack_input.B)[3]),
function(x)
matrix(
s$slack_input.B[, , x],
nrow = length(dmunames),
dimnames = list(dmunames, dimnames(s$slack_input.B)[[2]])
)))
s.i.Best <- cbind(
data.frame(
DMU = dimnames(s.i.Best)[[1]],
alphacut = rep(object$alpha, each = dim(s$slack_input.B)[1])
),
data.frame(s.i.Best, row.names = NULL)
)
colnames(s.i.Best)[3:(ncol(s.i.Best))] <- paste("slack",
colnames(s.i.Best)[3:(ncol(s.i.Best))],
"Best", sep = ".")
s.i.df <-
merge(s.i.Worst, s.i.Best, by = c("alphacut", "DMU"))
srtidx <- (3:ncol(s.i.df))
srtidx <- t(matrix(srtidx, ncol = 2))
dim(srtidx) <- c(1, length(srtidx))
s.df <- s.i.df[, c(2, 1, srtidx)]
}
}
if (modelkl %in% c("addsupereff", "sbmsupereff")) {
supers.i.Worst <-
do.call(rbind, lapply(seq(dim(s$superslack_input.W)[3]),
function(x)
matrix(
s$superslack_input.W[, , x],
nrow = length(dmunames),
dimnames = list(dmunames, dimnames(s$superslack_input.W)[[2]])
)))
supers.i.Worst <-
cbind(
data.frame(
DMU = object$data$dmunames,
alphacut = rep(object$alpha, each = dim(s$superslack_input.W)[1])
),
data.frame(supers.i.Worst, row.names = NULL)
)
colnames(supers.i.Worst)[3:(ncol(supers.i.Worst))] <-
paste("superslack",
colnames(supers.i.Worst)[3:(ncol(supers.i.Worst))],
"Worst", sep = ".")
supers.o.Worst <-
do.call(rbind, lapply(seq(dim(s$superslack_output.W)[3]),
function(x)
matrix(
s$superslack_output.W[, , x],
nrow = length(dmunames),
dimnames = list(dmunames, dimnames(s$superslack_output.W)[[2]])
)))
supers.o.Worst <-
cbind(
data.frame(
DMU = object$data$dmunames,
alphacut = rep(object$alpha, each = dim(s$superslack_output.W)[1])
),
data.frame(supers.o.Worst, row.names = NULL)
)
colnames(supers.o.Worst)[3:(ncol(supers.o.Worst))] <-
paste("superslack",
colnames(supers.o.Worst)[3:(ncol(supers.o.Worst))],
"Worst", sep = ".")
supers.i.Best <-
do.call(rbind, lapply(seq(dim(s$superslack_input.B)[3]),
function(x)
matrix(
s$superslack_input.B[, , x],
nrow = length(dmunames),
dimnames = list(dmunames, dimnames(s$superslack_input.B)[[2]])
)))
supers.i.Best <-
cbind(
data.frame(
DMU = dimnames(supers.i.Best)[[1]],
alphacut = rep(object$alpha, each = dim(s$superslack_input.B)[1])
),
data.frame(supers.i.Best, row.names = NULL)
)
colnames(supers.i.Best)[3:(ncol(supers.i.Best))] <-
paste("superslack",
colnames(supers.i.Best)[3:(ncol(supers.i.Best))],
"Best", sep = ".")
supers.o.Best <-
do.call(rbind, lapply(seq(dim(s$superslack_output.B)[3]),
function(x)
matrix(
s$superslack_output.B[, , x],
nrow = length(dmunames),
dimnames = list(dmunames, dimnames(s$superslack_output.B)[[2]])
)))
supers.o.Best <-
cbind(
data.frame(
DMU = object$data$dmunames,
alphacut = rep(object$alpha, each = dim(s$superslack_output.B)[1])
),
data.frame(supers.o.Best, row.names = NULL)
)
colnames(supers.o.Best)[3:(ncol(supers.o.Best))] <-
paste("superslack",
colnames(supers.o.Best)[3:(ncol(supers.o.Best))],
"Best", sep = ".")
supers.i.df <-
merge(supers.i.Worst, supers.i.Best, by = c("alphacut", "DMU"))
srtidx <- (3:ncol(supers.i.df))
srtidx <- t(matrix(srtidx, ncol = 2))
dim(srtidx) <- c(1, length(srtidx))
supers.i.df <- supers.i.df[, c(2, 1, srtidx)]
supers.o.df <-
merge(supers.o.Worst, supers.o.Best, by = c("alphacut", "DMU"))
srtidx <- (3:ncol(supers.o.df))
srtidx <- t(matrix(srtidx, ncol = 2))
dim(srtidx) <- c(1, length(srtidx))
supers.o.df <- supers.o.df[, c(2, 1, srtidx)]
supers.df <-
cbind(supers.i.df, supers.o.df[, 3:ncol(supers.o.df)])
} else {
supers.df <- NULL
}
lmb <- lambdas(object)
lmbmat.Worst <-
do.call(rbind, lapply(seq(dim(lmb$Worst)[3]), function(x)
lmb$Worst[, , x]))
lmbdf.Worst <-
cbind(
data.frame(
DMU = dimnames(lmbmat.Worst)[[1]],
alphacut = rep(object$alpha, each = dim(lmb$Worst)[1])
),
data.frame(lmbmat.Worst, row.names = NULL)
)
colnames(lmbdf.Worst)[3:(ncol(lmbdf.Worst))] <-
paste("lambda", colnames(lmbdf.Worst)[3:(ncol(lmbdf.Worst))], "Worst", sep = ".")
lmbmat.Best <-
do.call(rbind, lapply(seq(dim(lmb$Best)[3]), function(x)
lmb$Best[, , x]))
lmbdf.Best <-
cbind(
data.frame(
DMU = dimnames(lmbmat.Best)[[1]],
alphacut = rep(object$alpha, each = dim(lmb$Best)[1])
),
data.frame(lmbmat.Best, row.names = NULL)
)
colnames(lmbdf.Best)[3:(ncol(lmbdf.Best))] <-
paste("lambda", colnames(lmbdf.Best)[3:(ncol(lmbdf.Best))], "Best", sep = ".")
lmb.df <-
merge(lmbdf.Worst, lmbdf.Best, by = c("alphacut", "DMU"))
srtidx <- (3:ncol(lmb.df))
srtidx <- t(matrix(srtidx, ncol = 2))
dim(srtidx) <- c(1, length(srtidx))
lmb.df <- lmb.df[, c(2, 1, srtidx)]
tar <- targets(object)
tar.i.Worst <-
do.call(rbind, lapply(seq(dim(tar$target_input.W)[3]),
function(x)
matrix(
tar$target_input.W[, , x],
nrow = length(dmunames),
dimnames = list(dmunames, dimnames(tar$target_input.W)[[2]])
)))
tar.i.Worst <-
cbind(
data.frame(
DMU = object$data$dmunames,
alphacut = rep(object$alpha, each = dim(tar$target_input.W)[1])
),
data.frame(tar.i.Worst, row.names = NULL)
)
colnames(tar.i.Worst)[3:(ncol(tar.i.Worst))] <- paste("target",
colnames(tar.i.Worst)[3:(ncol(tar.i.Worst))],
"Worst", sep = ".")
tar.o.Worst <-
do.call(rbind, lapply(seq(dim(tar$target_output.W)[3]),
function(x)
matrix(
tar$target_output.W[, , x],
nrow = length(dmunames),
dimnames = list(dmunames, dimnames(tar$target_output.W)[[2]])
)))
tar.o.Worst <-
cbind(
data.frame(
DMU = object$data$dmunames,
alphacut = rep(object$alpha, each = dim(tar$target_output.W)[1])
),
data.frame(tar.o.Worst, row.names = NULL)
)
colnames(tar.o.Worst)[3:(ncol(tar.o.Worst))] <- paste("target",
colnames(tar.o.Worst)[3:(ncol(tar.o.Worst))],
"Worst", sep = ".")
tar.i.Best <-
do.call(rbind, lapply(seq(dim(tar$target_input.B)[3]),
function(x)
matrix(
tar$target_input.B[, , x],
nrow = length(dmunames),
dimnames = list(dmunames, dimnames(tar$target_input.B)[[2]])
)))
tar.i.Best <-
cbind(
data.frame(
DMU = dimnames(tar.i.Best)[[1]],
alphacut = rep(object$alpha, each = dim(tar$target_input.B)[1])
),
data.frame(tar.i.Best, row.names = NULL)
)
colnames(tar.i.Best)[3:(ncol(tar.i.Best))] <- paste("target",
colnames(tar.i.Best)[3:(ncol(tar.i.Best))],
"Best", sep = ".")
tar.o.Best <-
do.call(rbind, lapply(seq(dim(tar$target_output.B)[3]),
function(x)
matrix(
tar$target_output.B[, , x],
nrow = length(dmunames),
dimnames = list(dmunames, dimnames(tar$target_output.B)[[2]])
)))
tar.o.Best <-
cbind(
data.frame(
DMU = object$data$dmunames,
alphacut = rep(object$alpha, each = dim(tar$target_output.B)[1])
),
data.frame(tar.o.Best, row.names = NULL)
)
colnames(tar.o.Best)[3:(ncol(tar.o.Best))] <- paste("target",
colnames(tar.o.Best)[3:(ncol(tar.o.Best))],
"Best", sep = ".")
tar.i.df <-
merge(tar.i.Worst, tar.i.Best, by = c("alphacut", "DMU"))
srtidx <- (3:ncol(tar.i.df))
srtidx <- t(matrix(srtidx, ncol = 2))
dim(srtidx) <- c(1, length(srtidx))
tar.i.df <- tar.i.df[, c(2, 1, srtidx)]
tar.o.df <-
merge(tar.o.Worst, tar.o.Best, by = c("alphacut", "DMU"))
srtidx <- (3:ncol(tar.o.df))
srtidx <- t(matrix(srtidx, ncol = 2))
dim(srtidx) <- c(1, length(srtidx))
tar.o.df <- tar.o.df[, c(2, 1, srtidx)]
tar.df <- cbind(tar.i.df, tar.o.df[, 3:ncol(tar.o.df)])
if (!modelkl %in% c("additive", "addsupereff")) {
df <-
cbind(eff.df, s.df[, 3:ncol(s.df)], lmb.df[, 3:ncol(lmb.df)], tar.df[, 3:ncol(tar.df)])
} else {
df <-
cbind(s.df[, 3:ncol(s.df)], lmb.df[, 3:ncol(lmb.df)], tar.df[, 3:ncol(tar.df)])
}
if (modelkl %in% c("addsupereff", "sbmsupereff")) {
df <- cbind(df, supers.df[3:ncol(supers.df)])
}
if (exportExcel) {
df.list <- list(
efficiencies = eff.df,
slacks = s.df,
superslacks = supers.df,
lambdas = lmb.df,
targets = tar.df
)
df.list[sapply(df.list, is.null)] <- NULL
if (is.null(filename)) {
filename <- paste("ResultsDEA", Sys.time(), ".xlsx", sep = "")
filename <- gsub(" ", "_", filename)
filename <- gsub(":", ".", filename)
}
write_xlsx(df.list, path = filename)
}
if (!returnList) {
return(df)
} else {
df.list <- list(
efficiencies = eff.df,
slacks = s.df,
superslacks = supers.df,
lambdas = lmb.df,
targets = tar.df
)
return(df.list)
}
}
}
|
NOT_CRAN <- identical(tolower(Sys.getenv("NOT_CRAN")), "true")
knitr::opts_chunk$set(
collapse = TRUE,
comment = "
purl = NOT_CRAN,
eval = NOT_CRAN
)
library(googleLanguageR)
test_audio <- system.file("woman1_wb.wav", package = "googleLanguageR")
gl_speech(test_audio)$transcript
gl_speech(test_audio, maxAlternatives = 2L)$transcript
gl_speech(test_audio, languageCode = "en-GB")$transcript
gl_speech(test_audio,
languageCode = "en-GB",
speechContexts = list(phrases = list("is frequently a very difficult")))$transcript
|
normal_cre <-
function(Z_mat=Z_mat, first.order=first.order,home.field=home.field, control=control){
if(home.field&!control$OT.flag){
y_fixed_effects <- formula(~Location+0)
}else if(home.field&control$OT.flag){
y_fixed_effects <- formula(~Location+(OT)+0)
}else if(!home.field&control$OT.flag){
y_fixed_effects <- formula(~(OT)+0)
}else{
y_fixed_effects <- formula(~1)
}
home_field<-home.field
X<-NULL
H.eta <- function(sigmas, cross_Z_j, Sig.mat, G.inv, nyear,
n_eta,sigmas2, cross_R_Z_j, Sig.mat2,R_R.inv) {
h.eta <- G.inv
h.r<-crossprod(R_Z, R_R.inv) %*% R_Z
symmpart(h.eta + h.r)
}
ltriangle <- function(x) {
if (!is.null(dim(x)[2])) {
resA <- as.vector(x[lower.tri(x, diag = TRUE)])
resA
}
else {
nx <- length(x)
d <- 0.5 * (-1 + sqrt(1 + 8 * nx))
resB <- .symDiagonal(d)
resB[lower.tri(resB, diag = TRUE)] <- x
if (nx > 1) {
resB <- resB + t(resB) - diag(diag(resB))
}
as(resB, "sparseMatrix")
}
}
reduce.G<-function(G) ltriangle(as.matrix(G[1:2,1:2]))
update.eta <- function(X, Y, Z, cross_Z_j, Sig.mat,
ybetas, sigmas, G, nyear, n_eta, cons.logLik,R_X, R_Y, R_Z,
cross_R_Z_j, Sig.mat2, ybetas2,
sigmas2,R_R.inv) {
G.chol <- chol(G)
G.inv <- symmpart(chol2inv(G.chol))
H <- H.eta(sigmas = sigmas, cross_Z_j = cross_Z_j, Sig.mat = Sig.mat,
G.inv = G.inv, nyear = nyear, n_eta = n_eta, sigmas2=sigmas2,cross_R_Z_j=cross_R_Z_j,Sig.mat2=Sig.mat2,R_R.inv)
chol.H <- chol(H)
H.inv <- symmpart(chol2inv(chol.H))
c.temp <- crossprod(R_X, R_R.inv ) %*% R_Z
c.1 <- rbind(crossprod(R_X, R_R.inv ) %*% R_X, t(c.temp))
c.2 <- rbind(c.temp, H)
C_inv <- cbind(c.1, c.2)
chol.C_inv <- chol(forceSymmetric(symmpart(C_inv)))
cs <- symmpart(chol2inv(chol.C_inv))
C12<-as.matrix(cs[1:length(ybetas2),(length(ybetas2)+1):ncol(cs)])
C.mat <- cs[-c(1:length(ybetas2)),-c(1:length(ybetas2))]
betacov<-as.matrix(cs[c(1:length(ybetas2)),c(1:length(ybetas2))])
if (control$REML.N) {
var.eta <- C.mat
} else {
var.eta <- H.inv
}
rm(H)
eta<-H.inv%*% as.vector(crossprod(R_Z, R_R.inv) %*%(R_Y-R_X%*%ybetas2))
log.p.eta <- -(length(eta)/2) * log(2 * pi) - sum(log(diag(G.chol))) -
0.5 * crossprod(eta, as(G.inv,"generalMatrix")) %*% eta
log.p.r <- -(Nr/2) * log(2 * pi) + sum(log(diag(chol(R_R.inv)))) -
0.5 * crossprod(R_Y - R_X %*% ybetas2 - R_Z %*% eta, R_R.inv) %*%
(R_Y - R_X %*% ybetas2 - R_Z %*% eta)
res <- var.eta
if (control$REML.N) {
attr(res, "likelihood") <- as.vector(cons.logLik + log.p.eta +
log.p.r - 0.5 * (2 * sum(log(diag(chol.C_inv)))))
} else {
attr(res, "likelihood") <- as.vector(cons.logLik + log.p.eta +
log.p.r - 0.5 * (2 * sum(log(diag(chol.H)))))
}
attr(res, "eta") <- eta
attr(res, "betacov") <- betacov
attr(res, "C12") <- C12
res
}
update.ybeta <- function(X, Y, Z, R_inv, eta.hat) {
A.ybeta <- crossprod(X, R_inv) %*% X
B.ybeta <- crossprod(X, R_inv) %*% (Y - Z %*% eta.hat)
as.vector(solve(A.ybeta, B.ybeta))
}
Z_mat$home<-as.character(Z_mat$home)
Z_mat$away<-as.character(Z_mat$away)
Z_mat$year<-rep(1,dim(Z_mat)[1])
teams <- sort(unique(c(Z_mat$home,Z_mat$away)))
nteams<-length(teams)
teamsfbs<-teams
nfbs<-length(teamsfbs)
J_Y<-c(t(cbind(Z_mat$Score.For,Z_mat$Score.Against)))
Nr <- length(Z_mat$home_win)
R_RE_mat <- Matrix(0,Nr,length(teams))
J_RE_mat <- Matrix(0,2*Nr,2*length(teams))
colnames(R_RE_mat)<-teams
colnames(J_RE_mat)<-rep(teams,each=2)
for(i in 1:length(teams)){
R_RE_mat[Z_mat$home==teams[i],i]<-rep(1,length(R_RE_mat[Z_mat$home==teams[i],i]))
R_RE_mat[Z_mat$away==teams[i],i]<-rep(-1,length(R_RE_mat[Z_mat$away==teams[i],i]))
}
joffense<-c(t(cbind(Z_mat$home,Z_mat$away)))
jdefense<-c(t(cbind(Z_mat$away,Z_mat$home)))
J_mat<-cbind(as.numeric(J_Y),joffense,jdefense)
J_mat<-as.data.frame(J_mat)
templ<-rep(Z_mat$neutral.site,each=2)
templ2<-rep("Neutral Site",length(templ))
for(i in 1:(2*Nr)){
if(templ[i]==0&i%%2==1)
templ2[i]<-"Home"
if(templ[i]==0&i%%2==0)
templ2[i]<-"Away"
}
J_mat<-cbind(J_mat,templ2)
colnames(J_mat)<-c("J_Y","offense","defense","Location")
if(control$OT.flag){
J_mat<-cbind(J_mat,rep(Z_mat$OT,each=2))
colnames(J_mat)<-c("J_Y","offense","defense","Location","OT")
}
J_mat<-as.data.frame(J_mat)
J_mat$J_Y<-as.numeric(J_Y)
jreo<-sparse.model.matrix(as.formula(~offense+0),data=J_mat)
jred<--1*sparse.model.matrix(as.formula(~defense+0),data=J_mat)
J_RE_mat[,seq(1,2*length(teams),by=2)]<-jreo
J_RE_mat[,seq(2,2*length(teams),by=2)]<-jred
J_X_mat <- sparse.model.matrix(y_fixed_effects, J_mat, drop.unused.levels = TRUE)
n_eta <- 2*length(teams)
n_jbeta <- dim(J_X_mat)[2]
Sig.mat <- as.matrix(rep(1,Nr))
Sig.mat2 <- as.matrix(rep(1,2*Nr))
nyear<-1
FE.count<-0
R_X <- Matrix(J_X_mat)
R_Y <- as.numeric(as.vector(J_mat$J_Y))
Z<-c(NULL)
R_Z <- Matrix(J_RE_mat)
t_R_Z <- t(R_Z)
Y <- as.vector(Z_mat$Score.For-Z_mat$Score.Against)
cross_R_Z <- crossprod(R_Z)
cross_Z_j <- list()
cross_R_Z_j <- list()
X_j <- list(NULL)
R_X_j <- list(NULL)
cross_X_j <- list(NULL)
cross_R_X_j <- list(NULL)
Y_j <- list(NULL)
R_Y_j <- list(NULL)
Z_j <- list(NULL)
R_Z_j <- list(NULL)
for (j in 1:nyear) {
cross_R_Z_j[[j]] <- crossprod(Matrix(J_RE_mat[Z_mat$year ==
j, ]))
R_X_j[[j]] <- J_X_mat
R_Y_j[[j]] <- as.vector(J_Y[Z_mat$year == j ])
R_Z_j[[j]] <- J_RE_mat[Z_mat$year == j, ]
cross_R_X_j[[j]] <- crossprod(R_X_j[[j]])
}
eta.hat <- numeric(n_eta)
var.eta.hat <- Matrix(0, n_eta, n_eta)
G <- 100*Diagonal(n_eta)
R_R<-R_R.inv<-Diagonal(nrow(J_mat))
if (control$REML.N) {
cons.logLik <- 0.5 * (n_eta + ncol(R_X)) * log(2 * pi)
} else {
cons.logLik <- 0.5 * (n_eta) * log(2 * pi)
}
sigmas <- c(rep(0, nyear))
sigmas2 <- c(rep(0, nyear))
ybetas<-0
ybetas2 <- update.ybeta(X=R_X, Y=R_Y, Z=R_Z, R_inv=R_R.inv, eta.hat=eta.hat)
names(ybetas2)<-colnames(J_X_mat)
year.count<-Nr
iter <- control$iter.EM
r.mat <- Matrix(0, iter, length(ybetas2))
time.mat <- Matrix(0, iter, 1)
G.mat <- Matrix(0, iter, 3)
lgLik <- numeric(iter)
L1.conv <- FALSE
L2.conv <- FALSE
L1.conv.it <- 0
for (it in 1:iter) {
ptm <- proc.time()
rm(var.eta.hat)
new.eta <- update.eta(X = X, Y = Y, Z = Z,
cross_Z_j = cross_Z_j, Sig.mat = Sig.mat, ybetas = ybetas,
sigmas = sigmas, G = G, nyear = nyear, n_eta = n_eta,
cons.logLik = cons.logLik,R_X = R_X, R_Y = R_Y, R_Z = R_Z,
cross_R_Z_j = cross_R_Z_j, Sig.mat2 = Sig.mat2, ybetas2 = ybetas2,
sigmas2 = sigmas2,R_R.inv=R_R.inv)
r.mat[it, ] <- c(ybetas2)
lgLik[it] <- attr(new.eta, "likelihood")
trc.y1 <- numeric(n_eta)
trc.y2 <- Matrix(0, n_eta, n_eta)
G.mat[it, ] <- reduce.G(G)
eta <- as.vector(attr(new.eta, "eta"))
C12 <- attr(new.eta, "C12")
betacov <- matrix(attr(new.eta, "betacov"),nrow=length(ybetas2))
var.eta <- new.eta
eta.hat <- as.vector(eta)
var.eta.hat <- var.eta
rm(new.eta)
thets1 <- c(r.mat[it - 1, ], G.mat[it - 1, ])
thets2 <- c(r.mat[it, ], G.mat[it, ])
if ((control$verbose) & (it > 1)) {
cat("\n\niter:", it, "\n")
cat("log-likelihood:", lgLik[it], "\n")
cat("change in loglik:", sprintf("%.7f", lgLik[it] -
lgLik[it - 1]), "\n")
cat("n.mean:", round(ybetas2, 4), "\n")
cat("G:", reduce.G(G),"\n")
cat("R:", ltriangle(suppressWarnings(suppressMessages(as.matrix(R_R[1:2,1:2])))),"\n")
}
if (it > 5) {
check.lik <- abs(lgLik[it] - lgLik[it - 1])/abs(lgLik[it] +
control$tol1) < control$tol1
if (check.lik) {
conv <- TRUE
if (control$verbose) {
cat("\n\n Algorithm converged.\n")
cat("\n\niter:", it, "\n")
cat("log-likelihood:", sprintf("%.7f", lgLik[it]),
"\n")
cat("change in loglik:", sprintf("%.7f", lgLik[it] -
lgLik[it - 1]), "\n")
cat("n.mean:", round(ybetas2, 4), "\n")
cat("G:", reduce.G(G),"\n")
cat("R:", ltriangle(suppressWarnings(suppressMessages(as.matrix(R_R[1:2,1:2])))),"\n")
flush.console()
}
rm(j)
break
}
}
flush.console()
ptm.rbet <- proc.time()[3]
rm(eta)
eblup <- as.matrix(cbind(eta.hat, sqrt(diag(var.eta.hat))))
colnames(eblup) <- c("eblup", "std. error")
rownames(eblup) <- rep(teams,each=2)
rm(var.eta)
temp_mat <- symmpart(var.eta.hat + tcrossprod(eta.hat, eta.hat))
gt1<-gt2<-matrix(0,2,2)
for(i in 1:length(teams)){
gt1<-gt1+temp_mat[(2*(i-1)+1):(2*i),(2*(i-1)+1):(2*i)]
}
gt1<-gt1/nfbs
Gn<-kronecker(Diagonal(nfbs),symmpart(gt1))
sigup<-matrix(0,2,2)
for(i in 1:(nrow(J_mat)/2)){
yb<-R_Y[(2*i-1):(2*i)]
xb<-R_X[(2*i-1):(2*i),,drop=FALSE]
zb<-R_Z[(2*i-1):(2*i),]
if(home.field){
yxb<-yb-xb%*%ybetas2
}else{
yxb<-as.matrix(yb-rep(ybetas2,2))
}
if(control$REML.N){
sigup<- suppressWarnings(sigup+suppressMessages(tcrossprod(yxb))-yxb%*%t(zb%*%eta.hat)- (zb%*%eta.hat)%*%t(yxb)+zb%*%temp_mat%*%t(zb)+xb%*%betacov%*%t(xb)+2*xb%*%C12%*%t(zb))
}else{
sigup<- suppressWarnings(sigup+suppressMessages(tcrossprod(yxb))-yxb%*%t(zb%*%eta.hat)- (zb%*%eta.hat)%*%t(yxb)+zb%*%temp_mat%*%t(zb))
}
}
sigup<-symmpart(sigup/(nrow(J_mat)/2))
if(!control$REML.N){
ybetas2 <- update.ybeta(X=R_X, Y=R_Y, Z=R_Z, R_inv=R_R.inv, eta.hat=eta.hat)
}
R_R<-suppressMessages(kronecker(Diagonal(nrow(J_mat)/2),sigup))
R_R.inv<-suppressMessages(kronecker(Diagonal(nrow(J_mat)/2),solve(sigup)))
G <- Gn
if(control$REML.N){
G.chol <- chol(G)
G.inv <- chol2inv(G.chol)
R.inv.Z <- R_R.inv %*% R_Z
V.1 <- symmpart(chol2inv(chol(G.inv + t(R_Z) %*% R.inv.Z)))
tX.Rinv.Z <- t(R_X) %*% R.inv.Z
tX.Rinv.X <- t(R_X) %*% R_R.inv %*% R_X
ybetas2 <-
as.vector(chol2inv(chol(forceSymmetric(
symmpart(tX.Rinv.X -
tX.Rinv.Z %*% V.1 %*% t(tX.Rinv.Z))
))) %*% (t(R_X) %*% R_R.inv -
tX.Rinv.Z %*% V.1 %*% t(R.inv.Z)) %*% R_Y)
}
rm(Gn)
it.time <- (proc.time() - ptm)[3]
time.mat[it, ] <- c(it.time)
cat("Iteration", it, "took", it.time, "\n")
eblup <- cbind(eta.hat, sqrt(diag(var.eta.hat)))
colnames(eblup) <- c("eblup", "std. error")
rownames(eblup) <- rep(teams,each=2)
}
pattern.f.score <- function(R.i.parm,ybetas,X,Y,Z,Ny) {
R_i <- ltriangle(as.vector(R.i.parm))
pattern.Rtemplate <- ltriangle(1:(2/2 * (2 + 1)))
pattern.diag <- diag(pattern.Rtemplate)
pattern.score <- numeric(2/2 * (2 + 1))
pattern.sum <- matrix(0, 2, 2)
for (i in 1:(Ny/2)) {
X.t <- X[(1 + (i - 1) * 2):(i *
2), , drop = FALSE]
Y.t <- Y[(1 + (i - 1) * 2):(i *
2)]
Z.t <- Z[(1 + (i - 1) * 2):(i *
2), , drop = FALSE]
temp.t <- Y.t - X.t %*% ybetas
if(control$REML.N){
pattern.sum <- pattern.sum + tcrossprod(temp.t) -
tcrossprod(temp.t, Z.t %*% eta.hat) - tcrossprod(Z.t %*%
eta.hat, temp.t) + as.matrix(Z.t%*%temp_mat%*%t(Z.t)) + as.matrix(X.t%*%betacov%*%t(X.t))+2*as.matrix(X.t%*%C12%*%t(Z.t))
}else{
pattern.sum <- pattern.sum + tcrossprod(temp.t) -
tcrossprod(temp.t, Z.t %*% eta.hat) - tcrossprod(Z.t %*%
eta.hat, temp.t) + as.matrix(Z.t%*%temp_mat%*%t(Z.t))
}
}
pattern.y <- solve(R_i)
pattern.score<- -ltriangle(((Ny/2) * pattern.y) -(pattern.y %*% pattern.sum %*% pattern.y))
pattern.score<-pattern.score*c(1,.5,1)
-pattern.score
}
Score <- function(thetas) {
n_ybeta<-length(ybetas2)
Ny<-length(R_Y)
ybetas <- thetas[1:n_ybeta]
R.tri<-R.i.parm <- thetas[(n_ybeta+1):(n_ybeta+3)]
LRI <- length(R.i.parm)
R_i<-ltriangle(R.tri)
R <- symmpart(suppressMessages(kronecker(suppressMessages(Diagonal(Ny/2)),
R_i)))
R_inv <- symmpart(suppressMessages(kronecker(suppressMessages(Diagonal(Ny/2)),
chol2inv(chol(R_i)))))
G <- thetas[(n_ybeta+4):length(thetas)]
G<-kronecker(Diagonal(length(teams)),ltriangle(G))
new.eta <- update.eta(X = X, Y = Y, Z = Z,
cross_Z_j = cross_Z_j, Sig.mat = Sig.mat, ybetas = ybetas,
sigmas = sigmas, G = G, nyear = nyear, n_eta = n_eta,
cons.logLik = cons.logLik,R_X = R_X, R_Y = R_Y, R_Z = R_Z,
cross_R_Z_j = cross_R_Z_j, Sig.mat2 = Sig.mat2, ybetas2 = ybetas,
sigmas2 = sigmas2,R_R.inv=R_R.inv)
eta <- attr(new.eta, "eta")
C12 <- attr(new.eta, "C12")
eta.hat<-eta
betacov <- matrix(attr(new.eta, "betacov"),nrow=length(ybetas))
var.eta <- var.eta.hat <- new.eta
eta.hat <- as.vector(eta)
temp_mat <- var.eta.hat + tcrossprod(eta.hat, eta.hat)
rm(new.eta)
score.R <- -pattern.f.score(R.i.parm,ybetas2,R_X,R_Y,R_Z,Ny)
A.ybeta <- crossprod(R_X, R_inv) %*%R_X
B.ybeta <- crossprod(R_X, R_inv) %*% (R_Y - R_Z %*% eta.hat)
score.y <- as.vector(B.ybeta - A.ybeta %*% ybetas)
gam_t_sc <- list()
index1 <- 0
score.G <- Matrix(0, 0, 0)
gam_t_sc <- matrix(0, 2,2)
index2 <- c(1)
for (k in 1:nteams) {
gam_t_sc <- gam_t_sc + temp_mat[(index2):(index2 +
1), (index2):(index2 + 1)]
index2 <- index2 + 2
}
gam_t <- G[1:2, 1:2]
sv_gam_t <- chol2inv(chol(gam_t))
der <- -0.5 * (nteams * sv_gam_t - sv_gam_t %*%
gam_t_sc %*% sv_gam_t)
if (is.numeric(drop(sv_gam_t))) {
score.eta.t <- der
}
else {
score.eta.t <- 2 * der - diag(diag(der))
}
score.G<-ltriangle(score.eta.t)
if (home.field) {
-c(score.y, score.R, score.G)
}
else {
-c(score.R, score.G)
}
}
Hessian<-NULL
thetas <- c(ybetas2, ltriangle(sigup), reduce.G(G))
gradient<-Score(thetas)
if(control$Hessian){
cat("\nCalculating Hessian with a central difference approximation...\n")
flush.console()
Hessian <- symmpart(jacobian(Score, thetas, method="simple"))
if(!all(eigen(Hessian)$values>0)) cat("\nWarning: Hessian not positive-definite\n")
}
c.temp <- crossprod(R_X, R_R.inv) %*% R_Z
c.1 <- rbind(crossprod(R_X, R_R.inv) %*% R_X, t(c.temp))
G.inv <- chol2inv(chol(G))
c.2 <- rbind(c.temp, H.eta(sigmas = sigmas, cross_Z_j = cross_Z_j, Sig.mat = Sig.mat,
G.inv = G.inv, nyear = nyear, n_eta = n_eta, sigmas2=sigmas2,cross_R_Z_j=cross_R_Z_j,Sig.mat2=Sig.mat2,R_R.inv))
C_inv <- cbind(c.1, c.2)
C <- solve(C_inv)
eblup_stderror <- sqrt(diag(C)[-c(1:ncol(R_X))])
ybetas_stderror <- sqrt(diag(C)[1:ncol(R_X)])
ybetas_asycov<-C[1:ncol(R_X),1:ncol(R_X)]
ybetas_eblup_asycov<-C
rm(C, C_inv, c.2, c.1, c.temp)
eblup <- as.matrix(cbind(eta.hat, eblup_stderror))
rownames(eblup) <- colnames(R_Z)
G.res<-as.matrix(G[1:2,1:2])
colnames(G.res)<-c("Offense","Defense")
G.res.cor<-cov2cor(G.res)
R.res<-as.matrix(R_R[1:2,1:2])
colnames(R.res)<-c("Home","Away")
R.res.cor<-cov2cor(R.res)
if(!home.field) ybetas2<-ybetas2[1]
names(ybetas2)<-colnames(J_X_mat)
N.output<-list(Z=R_Z,Y=R_Y,X=R_X,G=G,R=R_R,eta=eta.hat,var.eta=var.eta.hat,ybetas_eblup_asycov=ybetas_eblup_asycov, ybetas_asycov=ybetas_asycov,ybetas_stderror=ybetas_stderror,gradient=gradient )
sresid=NULL
cresid=NULL
mresid <- try(as.numeric(R_Y - R_X %*% ybetas2))
cresid <- try(as.numeric(mresid - R_Z %*% eta.hat))
yhat <- try(as.numeric(R_X %*% ybetas2 + R_Z %*% eta.hat))
rchol <- try(chol(R_R.inv))
yhat.s <- try(as.vector(rchol %*% (yhat)))
sresid <- try(as.vector(rchol %*% R_Y - yhat.s))
res<-list(n.ratings.mov=NULL,n.ratings.offense=eblup[seq(1,2*nteams,by=2),1],n.ratings.defense=eblup[seq(2,2*nteams,by=2),1],p.ratings.offense=NULL,p.ratings.defense=NULL,b.ratings=NULL,n.mean=ybetas2,p.mean=NULL,b.mean=NULL,G=G.res,G.cor=G.res.cor,R=R.res,R.cor=R.res.cor,home.field=home.field,actual=R_Y,pred=R_X%*%ybetas2+R_Z%*%eta.hat,Hessian=Hessian,parameters=thetas,sresid=sresid,N.output=N.output)
}
|
methStatusEval <- function(x, error=0.05, uninformative=TRUE){
threshold= ((error + error) - (2*error*error))
if(uninformative) fMet <- 1-(length(x[which(x==11|x==0)])/length(x))
else fMet <- 1-(length(x[which(x==11)])/length(x))
return(fMet>threshold)
}
|
grob_matrix_object = R6::R6Class(
classname = "grob_matrix_object",
public = list(
initial = tibble::tibble(),
current = matrix(),
test = tibble::tibble(),
type = character(),
current_group = NA_character_,
current_aesthetic = NA_character_,
current_structure = NA_character_,
last_edit = NA_character_,
structure_list = list(),
aesthetic_list = list(),
column_names_to_row = 0,
column_headings_added = 0,
height = NULL,
width = NULL,
units = "mm",
theme = 'default',
initialize = function(initial,
type){
self$initial = initial
self$type = type
}),
active = list(
finish_ga_list = function(height = self$height,
width = self$width,
units = self$units,
type = self$type,
test = self$test,
current = self$current,
theme = self$theme,
aesthetic_list = self$aesthetic_list,
structure_list = self$structure_list) {
structure_lookup_df = get_structure_lookup_df(
type = type,
current = current,
height = height,
width = width
)
for (structure in unique(structure_lookup_df[['structure']])) {
default = structure_lookup_df %>% dplyr::filter(structure %in% !!structure)
default_mat = default[['value']][[1]]
accepted_classes = default[['accepted_classes']][[1]]
input_mat = structure_list[[structure]]
if (is.null(input_mat)) {
input_mat = matrix(NA, nrow = nrow(default_mat), ncol = ncol(default_mat))
} else {
if (!any(methods::is(input_mat[1,1]) %in% accepted_classes)) {
error_msg = glue::glue("
The class of the {structure} structure input must be one of: \\
{paste(accepted_classes, collapse = ', ')}
")
stop(error_msg, call. = FALSE)
}
if (!all(dim(structure_list[[structure]]) == dim(default_mat))) {
nr_default = nrow(default_mat)
nc_default = ncol(default_mat)
nr_input = nrow(input_mat)
nc_input = ncol(input_mat)
error_msg = glue::glue("
The dimensions of {structure} must be 1x1 or {nr_default}x{nc_default}, \\
not {nr_input}x{nc_input}.
")
stop(error_msg, call. = FALSE)
}
}
boolean_matrix = is.na(input_mat)
input_mat[boolean_matrix] = default_mat[boolean_matrix]
structure_list[[structure]] = input_mat
}
aesthetic_lookup_list = get_matrix_aesthetic_lookup_df(
test = test,
current = current,
type = type,
width = width,
height = height,
units = units,
structure_list = structure_list
)
current = aesthetic_lookup_list[['current']]
self$current = current
aesthetic_lookup_df = aesthetic_lookup_list[['lookup_df']] %>%
dplyr::filter(theme == !!theme)
test_body_groups = unique(test[['grobblR_group']])
for (aesthetic in unique(aesthetic_lookup_df[['aesthetic']])) {
default_list = aesthetic_lookup_df %>%
dplyr::filter(
aesthetic %in% !!aesthetic,
group %in% test_body_groups
) %>%
dplyr::arrange(
match(
x = group,
table = c('column_headings', 'column_names', 'cells')
)
)
default_mat = do.call(rbind, default_list[['value']])
accepted_classes = default_list[['accepted_classes']][[1]]
input_mat = aesthetic_list[[aesthetic]]
if (is.null(input_mat)) {
input_mat = aes_matrix(df = current, value = NA)
} else {
if (type %in% 'text') {
input_mat = matrix(input_mat[1,1], ncol = 1, nrow = nrow(current))
}
if (!any(methods::is(input_mat[1,1]) %in% accepted_classes)) {
error_msg = glue::glue("
The class of the {aesthetic} aesthetic input must be one of: \\
{paste(accepted_classes, collapse = ', ')}
")
stop(error_msg, call. = FALSE)
}
if (!all(dim(input_mat) == dim(default_mat))) {
nr_default = nrow(default_mat)
nc_default = ncol(default_mat)
nr_input = nrow(input_mat)
nc_input = ncol(input_mat)
error_msg = glue::glue("
The dimensions of {aesthetic} must be {nr_default}x{nc_default}, \\
not {nr_input}x{nc_input}.
")
stop(error_msg, call. = FALSE)
}
}
boolean_matrix = is.na(input_mat)
input_mat[boolean_matrix] = default_mat[boolean_matrix]
if (methods::is(input_mat[1,1], 'character')) {
input_mat[input_mat %in% get_empty_placeholder()] = NA_character_
}
aesthetic_list[[aesthetic]] = input_mat
}
al_text_just = aesthetic_list[['text_just']]
al_text_align = aesthetic_list[['text_align']]
aesthetic_list[['text_just']][al_text_just %in% 'center' | al_text_align %in% 'center'] = 0.5
aesthetic_list[['text_align']][al_text_just %in% 'center' | al_text_align %in% 'center'] = 0.5
aesthetic_list[['text_just']][al_text_just %in% 'left' | al_text_align %in% 'left'] = 0
aesthetic_list[['text_align']][al_text_just %in% 'left' | al_text_align %in% 'left'] = 0
aesthetic_list[['text_just']][al_text_just %in% 'right' | al_text_align %in% 'right'] = 1
aesthetic_list[['text_align']][al_text_just %in% 'right' | al_text_align %in% 'right'] = 1
aesthetic_list[['text_just']] = matrix(
data = as_numeric_without_warnings(aesthetic_list[['text_just']]),
nrow = nrow(aesthetic_list[['text_just']])
)
aesthetic_list[['text_align']] = matrix(
data = as_numeric_without_warnings(aesthetic_list[['text_align']]),
nrow = nrow(aesthetic_list[['text_align']])
)
if (any(is.na(aesthetic_list[['text_align']])) | any(is.na(aesthetic_list[['text_just']]))) {
error_msg = glue::glue("
If a character string is inputted into text_align or text_just, the \\
character value must be in ('left', 'right', 'center').
")
stop(error_msg, call. = FALSE)
}
al_text_v_just = aesthetic_list[['text_v_just']]
al_text_v_align = aesthetic_list[['text_v_align']]
aesthetic_list[['text_v_just']][al_text_v_just %in% 'center' | al_text_v_align %in% 'center'] = 0.5
aesthetic_list[['text_v_align']][al_text_v_just %in% 'center' | al_text_v_align %in% 'center'] = 0.5
aesthetic_list[['text_v_just']][al_text_v_just %in% 'bottom' | al_text_v_align %in% 'bottom'] = 0
aesthetic_list[['text_v_align']][al_text_v_just %in% 'bottom' | al_text_v_align %in% 'bottom'] = 0
aesthetic_list[['text_v_just']][al_text_v_just %in% 'top' | al_text_v_align %in% 'top'] = 1
aesthetic_list[['text_v_align']][al_text_v_just %in% 'top' | al_text_v_align %in% 'top'] = 1
aesthetic_list[['text_v_just']] = matrix(
data = as_numeric_without_warnings(aesthetic_list[['text_v_just']]),
nrow = nrow(aesthetic_list[['text_v_just']])
)
aesthetic_list[['text_v_align']] = matrix(
data = as_numeric_without_warnings(aesthetic_list[['text_v_align']]),
nrow = nrow(aesthetic_list[['text_v_align']])
)
if (any(is.na(aesthetic_list[['text_v_align']])) | any(is.na(aesthetic_list[['text_v_just']]))) {
error_msg = glue::glue("
If a character string is inputted into text_v_align or text_v_just, the \\
character value must be in ('top', 'bottom', 'center').
")
stop(error_msg, call. = FALSE)
}
return(c(aesthetic_list, structure_list))
})
)
grob_image_object = R6::R6Class(
classname = "grob_image_object",
public = list(
initial = character(),
structure_list = list(),
initialize = function(initial){
self$initial = initial
}),
active = list(
finish_ga_list = function(structure_list = self$structure_list) {
structure_lookup_df = get_structure_lookup_df(type = 'image')
for (structure in unique(structure_lookup_df[['structure']])) {
default = structure_lookup_df %>% dplyr::filter(structure %in% !!structure)
default_value = default[['value']][[1]]
accepted_classes = default[['accepted_classes']][[1]]
input = structure_list[[structure]]
if (is.null(input)) {
input = default_value
} else {
if (!any(methods::is(input) %in% accepted_classes)) {
error_msg = glue::glue("
The class of the {structure} structure input must be one of: \\
{paste(accepted_classes, collapse = ', ')}
")
stop(error_msg, call. = FALSE)
}
}
structure_list[[structure]] = input
}
return(c(structure_list))
})
)
|
makeClusterPSOCK <- function(workers, makeNode = makeNodePSOCK, port = c("auto", "random"), ..., autoStop = FALSE, tries = getOption2("parallelly.makeNodePSOCK.tries", 3L), delay = getOption2("parallelly.makeNodePSOCK.tries.delay", 15.0), validate = getOption2("parallelly.makeNodePSOCK.validate", TRUE), verbose = getOption2("parallelly.debug", FALSE)) {
localhostHostname <- getOption2("parallelly.localhost.hostname", "localhost")
if (is.numeric(workers)) {
if (length(workers) != 1L) {
stopf("When numeric, argument 'workers' must be a single value: %s", length(workers))
}
workers <- as.integer(workers)
if (is.na(workers) || workers < 1L) {
stopf("Number of 'workers' must be one or greater: %s", workers)
}
workers <- rep(localhostHostname, times = workers)
}
tries <- as.integer(tries)
stop_if_not(length(tries) == 1L, is.integer(tries), !is.na(tries), tries >= 1L)
delay <- as.numeric(delay)
stop_if_not(length(delay) == 1L, is.numeric(delay), !is.na(delay), delay >= 0)
validate <- as.logical(validate)
stop_if_not(length(validate) == 1L, is.logical(validate), !is.na(validate))
if (identical(makeNode, makeNodePSOCK)) {
free <- freeConnections()
if (validate) free <- free - 1L
if (length(workers) > free) {
stopf("Cannot create %d parallel PSOCK nodes. Each node needs one connection but there are only %d connections left out of the maximum %d available on this R installation", length(workers), free, availableConnections())
}
}
verbose_prefix <- "[local output] "
if (verbose) {
mdebugf("%sWorkers: [n = %d] %s", verbose_prefix,
length(workers), hpaste(sQuote(workers)))
}
if (length(port) == 0L) {
stop("Argument 'port' must be of length one or more: 0")
}
port <- freePort(port)
if (verbose) mdebugf("%sBase port: %d", verbose_prefix, port)
n <- length(workers)
nodeOptions <- vector("list", length = n)
if (verbose) mdebugf("%sGetting setup options for %d cluster nodes ...", verbose_prefix, n)
for (ii in seq_len(n)) {
if (verbose) mdebugf("%s - Node %d of %d ...", verbose_prefix, ii, n)
options <- makeNode(workers[[ii]], port = port, ..., rank = ii, action = "options", verbose = verbose)
stop_if_not(inherits(options, "makeNodePSOCKOptions"))
nodeOptions[[ii]] <- options
}
if (verbose) mdebugf("%sGetting setup options for %d cluster nodes ... done", verbose_prefix, n)
setup_strategy <- lapply(nodeOptions, FUN = function(options) {
value <- options$setup_strategy
if (is.null(value)) value <- "sequential"
stop_if_not(is.character(value), length(value) == 1L)
value
})
setup_strategy <- unlist(setup_strategy, use.names = FALSE)
is_parallel <- (setup_strategy == "parallel")
force_sequential <- FALSE
if (any(is_parallel)) {
if (verbose) mdebugf("%s - Parallel setup requested for some PSOCK nodes", verbose_prefix)
if (!all(is_parallel)) {
if (verbose) mdebugf("%s - Parallel setup requested only for some PSOCK nodes; will revert to a sequential setup for all", verbose_prefix)
force_sequential <- TRUE
} else {
affected <- affected_by_bug18119()
if (!is.na(affected) && affected) {
if (verbose) mdebugf("%s - Parallel setup requested but not supported on this version of R: %s", verbose_prefix, getRversion())
force_sequential <- TRUE
}
}
}
if (force_sequential) {
setup_strategy <- "sequential"
for (ii in which(is_parallel)) {
if (verbose) mdebugf("%s - Node %d of %d ...", verbose_prefix, ii, n)
args <- list(workers[[ii]], port = port, ..., rank = ii, action = "options", verbose = verbose)
args$setup_strategy <- "sequential"
options <- do.call(makeNode, args = args)
stop_if_not(inherits(options, "makeNodePSOCKOptions"))
nodeOptions[[ii]] <- options
}
}
setup_strategy <- lapply(nodeOptions, FUN = function(options) {
value <- options$setup_strategy
if (is.null(value)) value <- "sequential"
stop_if_not(is.character(value), length(value) == 1L)
value
})
setup_strategy <- unlist(setup_strategy, use.names = FALSE)
setup_strategy <- unique(setup_strategy)
stop_if_not(length(setup_strategy) == 1L)
cl <- vector("list", length = length(nodeOptions))
class(cl) <- c("RichSOCKcluster", "SOCKcluster", "cluster")
on.exit({
nodes <- vapply(cl, FUN = inherits, c("SOCKnode", "SOCK0node"),
FUN.VALUE = FALSE)
stopCluster(cl[nodes])
cl <- NULL
})
if (setup_strategy == "parallel") {
if (getRversion() < "4.0.0") {
stopf("Parallel setup of PSOCK cluster nodes is not supported in R %s", getRversion())
socketAccept <- serverSocket <- function(...) NULL
}
sendCall <- importParallel("sendCall")
recvResult <- importParallel("recvResult")
options <- nodeOptions[[1]]
if (verbose) {
mdebugf("%sSetting up PSOCK nodes in parallel", verbose_prefix)
mstr(options)
}
port <- options[["port"]]
connectTimeout <- options[["connectTimeout"]]
timeout <- options[["timeout"]]
useXDR <- options[["useXDR"]]
nodeClass <- c("RichSOCKnode", if(useXDR) "SOCKnode" else "SOCK0node")
cmd <- options[["cmd"]]
if (verbose) {
mdebugf("%sSystem call to launch all workers:", verbose_prefix)
mdebugf("%s%s", verbose_prefix, cmd)
}
if (verbose) mdebugf("%sStarting PSOCK main server", verbose_prefix)
socket <- serverSocket(port = port)
on.exit(if (!is.null(socket)) close(socket), add = TRUE)
if (.Platform$OS.type == "windows") {
for (ii in seq_along(cl)) {
system(cmd, wait = FALSE, input = "")
}
} else {
cmd <- paste(rep(cmd, times = length(cl)), collapse = " & ")
system(cmd, wait = FALSE)
}
if (verbose) mdebugf("%sWorkers launched", verbose_prefix)
ready <- 0L
pending <- list()
on.exit({
lapply(pending, FUN = function(x) close(x$con))
cl <- NULL
}, add = TRUE)
if (verbose) mdebugf("%sWaiting for workers to connect back", verbose_prefix)
t0 <- Sys.time()
while (ready < length(cl)) {
if (verbose) mdebugf("%s%d workers out of %d ready", verbose_prefix, ready, length(cl))
cons <- lapply(pending, FUN = function(x) x$con)
if (difftime(Sys.time(), t0, units="secs") > connectTimeout + 5) {
failed <- length(cl) - ready
stop(ngettext(failed,
"Cluster setup failed. %d worker of %d failed to connect.",
"Cluster setup failed. %d of %d workers failed to connect."),
failed, length(cl))
}
a <- socketSelect(append(list(socket), cons), write = FALSE, timeout = connectTimeout)
canAccept <- a[1]
canReceive <- seq_along(pending)[a[-1]]
if (canAccept) {
con <- socketAccept(socket = socket, blocking = TRUE, open = "a+b", timeout = timeout)
scon <- structure(list(con = con, host = localhostHostname, rank = ready), class = nodeClass)
res <- tryCatch({
sendCall(scon, eval, list(quote(Sys.getpid())))
}, error = identity)
pending <- append(pending, list(scon))
}
for (scon in pending[canReceive]) {
pid <- tryCatch({
recvResult(scon)
}, error = identity)
if (is.integer(pid)) {
ready <- ready + 1L
cl[[ready]] <- scon
} else {
close(scon$con)
}
}
if (length(canReceive) > 0L) pending <- pending[-canReceive]
}
} else if (setup_strategy == "sequential") {
retryPort <- getOption2("parallelly.makeNodePSOCK.tries.port", "same")
for (ii in seq_along(cl)) {
if (verbose) {
mdebugf("%sCreating node %d of %d ...", verbose_prefix, ii, n)
mdebugf("%s- setting up node", verbose_prefix)
}
options <- nodeOptions[[ii]]
for (kk in 1:tries) {
if (verbose) {
mdebugf("%s- attempt
}
node <- tryCatch({
makeNode(options, verbose = verbose)
}, error = identity)
if (!inherits(node, "PSOCKConnectionError")) break
if (kk < tries) {
if (verbose) {
message(conditionMessage(node))
if (retryPort == "next") {
options$port <- max(options$port + 1L, 65535L)
} else if (retryPort == "available") {
options$port <- freePort()
}
mdebugf("%s- waiting %g seconds before trying again",
verbose_prefix, delay)
}
Sys.sleep(delay)
}
}
if (inherits(node, "error")) {
ex <- node
if (inherits(node, "PSOCKConnectionError")) {
if (verbose) {
mdebugf("%s Failed %d attempts with %g seconds delay",
verbose_prefix, tries, delay)
}
ex$message <- sprintf("%s\n * Number of attempts: %d (%gs delay)",
conditionMessage(ex), tries, delay)
} else {
ex$call <- sys.call()
}
stop(ex)
}
cl[[ii]] <- node
if (verbose) {
mdebugf("%sCreating node %d of %d ... done", verbose_prefix, ii, n)
}
}
}
try(close(socket), silent = TRUE)
socket <- NULL
if (validate) {
if (verbose) {
mdebugf("%s- collecting session information", verbose_prefix)
}
for (ii in seq_along(cl)) {
cl[ii] <- add_cluster_session_info(cl[ii])
}
}
if (autoStop) cl <- autoStopCluster(cl)
on.exit()
cl
}
makeNodePSOCK <- function(worker = getOption2("parallelly.localhost.hostname", "localhost"), master = NULL, port, connectTimeout = getOption2("parallelly.makeNodePSOCK.connectTimeout", 2 * 60), timeout = getOption2("parallelly.makeNodePSOCK.timeout", 30 * 24 * 60 * 60), rscript = NULL, homogeneous = NULL, rscript_args = NULL, rscript_envs = NULL, rscript_libs = NULL, rscript_startup = NULL, rscript_sh = c("auto", "cmd", "sh"), default_packages = c("datasets", "utils", "grDevices", "graphics", "stats", if (methods) "methods"), methods = TRUE, socketOptions = getOption2("parallelly.makeNodePSOCK.socketOptions", "no-delay"), useXDR = getOption2("parallelly.makeNodePSOCK.useXDR", FALSE), outfile = "/dev/null", renice = NA_integer_, rshcmd = getOption2("parallelly.makeNodePSOCK.rshcmd", NULL), user = NULL, revtunnel = TRUE, rshlogfile = NULL, rshopts = getOption2("parallelly.makeNodePSOCK.rshopts", NULL), rank = 1L, manual = FALSE, dryrun = FALSE, quiet = FALSE, setup_strategy = getOption2("parallelly.makeNodePSOCK.setup_strategy", "parallel"), action = c("launch", "options"), verbose = FALSE) {
verbose <- as.logical(verbose)
stop_if_not(length(verbose) == 1L, !is.na(verbose))
if (inherits(worker, "makeNodePSOCKOptions")) {
return(launchNodePSOCK(options = worker, verbose = verbose))
}
localhostHostname <- getOption2("parallelly.localhost.hostname", "localhost")
localMachine <- is.element(worker, c(localhostHostname, "localhost", "127.0.0.1"))
if (!localMachine) {
localMachine <- is_localhost(worker)
if (localMachine) worker <- getOption2("parallelly.localhost.hostname", "localhost")
}
attr(worker, "localhost") <- localMachine
stop_if_not(is.character(rscript_sh), length(rscript_sh) >= 1L, !anyNA(rscript_sh))
rscript_sh <- rscript_sh[1]
if (rscript_sh == "auto") {
if (localMachine) {
rscript_sh <- if (.Platform$OS.type == "windows") "cmd" else "sh"
} else {
rscript_sh <- "sh"
}
}
manual <- as.logical(manual)
stop_if_not(length(manual) == 1L, !is.na(manual))
dryrun <- as.logical(dryrun)
stop_if_not(length(dryrun) == 1L, !is.na(dryrun))
setup_strategy <- match.arg(setup_strategy, choices = c("sequential", "parallel"))
quiet <- as.logical(quiet)
stop_if_not(length(quiet) == 1L, !is.na(quiet))
if (identical(rshcmd, "")) rshcmd <- NULL
if (!is.null(rshcmd)) {
rshcmd <- as.character(rshcmd)
stop_if_not(length(rshcmd) >= 1L)
}
if (identical(rshopts, "")) rshopts <- NULL
rshopts <- as.character(rshopts)
user <- as.character(user)
stop_if_not(length(user) <= 1L)
port <- as.integer(port)
assertPort(port)
revtunnel <- as.logical(revtunnel)
stop_if_not(length(revtunnel) == 1L, !is.na(revtunnel))
if (!is.null(rshlogfile)) {
if (is.logical(rshlogfile)) {
stop_if_not(!is.na(rshlogfile))
if (rshlogfile) {
rshlogfile <- tempfile(pattern = "parallelly_makeClusterPSOCK_", fileext = ".log")
} else {
rshlogfile <- NULL
}
} else {
rshlogfile <- as.character(rshlogfile)
rshlogfile <- normalizePath(rshlogfile, mustWork = FALSE)
}
}
if (is.null(master)) {
if (localMachine || revtunnel) {
master <- localhostHostname
} else {
master <- Sys.info()[["nodename"]]
}
}
stop_if_not(!is.null(master))
timeout <- as.numeric(timeout)
stop_if_not(length(timeout) == 1L, !is.na(timeout), is.finite(timeout), timeout >= 0)
methods <- as.logical(methods)
stop_if_not(length(methods) == 1L, !is.na(methods))
if (!is.null(default_packages)) {
default_packages <- as.character(default_packages)
stop_if_not(!anyNA(default_packages))
is_asterisk <- (default_packages == "*")
if (any(is_asterisk)) {
pkgs <- getOption("defaultPackages")
if (length(pkgs) == 0) {
default_packages[!is_asterisk]
} else {
pkgs <- paste(pkgs, collapse=",")
default_packages[is_asterisk] <- pkgs
default_packages <- unlist(strsplit(default_packages, split = ",", fixed = TRUE))
}
}
default_packages <- unique(default_packages)
pattern <- sprintf("^%s$", .standard_regexps()$valid_package_name)
invalid <- grep(pattern, default_packages, invert = TRUE, value = TRUE)
if (length(invalid) > 0) {
stop(sprintf("Argument %s specifies invalid package names: %s", sQuote("default_packages"), paste(sQuote(invalid), collapse = ", ")))
}
}
if (is.null(homogeneous)) {
homogeneous <- {
localMachine ||
(!revtunnel && is_localhost(master)) ||
(!is_ip_number(worker) && !is_fqdn(worker))
}
}
homogeneous <- as.logical(homogeneous)
stop_if_not(length(homogeneous) == 1L, !is.na(homogeneous))
if (setup_strategy == "parallel") {
if (getRversion() < "4.0.0" ||
manual || dryrun || !homogeneous || !localMachine) {
setup_strategy <- "sequential"
}
}
bin <- "Rscript"
if (homogeneous) bin <- file.path(R.home("bin"), bin)
if (is.null(rscript)) {
rscript <- bin
} else {
if (!is.character(rscript)) rscript <- as.character(rscript)
stop_if_not(length(rscript) >= 1L)
rscript[rscript == "*"] <- bin
bin <- rscript[1]
if (homogeneous && !inherits(bin, "AsIs")) {
bin <- Sys.which(bin)
if (bin == "") bin <- normalizePath(rscript[1], mustWork = FALSE)
rscript[1] <- bin
}
}
name <- sub("[.]exe$", "", basename(bin))
is_Rscript <- (tolower(name) == "rscript")
rscript_args <- as.character(rscript_args)
if (length(rscript_startup) > 0L) {
if (!is.list(rscript_startup)) rscript_startup <- list(rscript_startup)
rscript_startup <- lapply(rscript_startup, FUN = function(init) {
if (is.language(init)) {
init <- deparse(init, width.cutoff = 500L)
init <- paste(init, collapse = ";")
}
init <- as.character(init)
if (length(init) == 0L) return(NULL)
tryCatch({
parse(text = init)
}, error = function(ex) {
stopf("Syntax error in argument 'rscript_startup': %s", conditionMessage(ex))
})
init
})
rscript_startup <- unlist(rscript_startup, use.names = FALSE)
}
if (!is.null(rscript_libs)) {
rscript_libs <- as.character(rscript_libs)
stop_if_not(!anyNA(rscript_libs))
}
useXDR <- as.logical(useXDR)
stop_if_not(length(useXDR) == 1L, !is.na(useXDR))
if (!is.null(socketOptions)) {
stop_if_not(is.character(socketOptions),length(socketOptions) == 1L,
!is.na(socketOptions), nzchar(socketOptions))
if (socketOptions == "NULL") socketOptions <- NULL
}
stop_if_not(is.null(outfile) || is.character(outfile))
renice <- as.integer(renice)
stop_if_not(length(renice) == 1L)
rank <- as.integer(rank)
stop_if_not(length(rank) == 1L, !is.na(rank))
action <- match.arg(action, choices = c("launch", "options"))
verbose_prefix <- "[local output] "
if (!inherits(rscript, "AsIs")) {
idxs <- grep("^[[:alpha:]_][[:alnum:]_]*=.*", rscript, invert = TRUE)
rscript[idxs] <- shQuote(rscript[idxs], type = rscript_sh)
}
rscript_args_internal <- character(0L)
if (localMachine && !dryrun) {
res <- useWorkerPID(rscript, rank = rank, rscript_sh = rscript_sh, verbose = verbose)
pidfile <- res$pidfile
rscript_args_internal <- c(res$rscript_pid_args, rscript_args_internal)
} else {
pidfile <- NULL
}
rscript_label <- getOption2("parallelly.makeNodePSOCK.rscript_label", NULL)
if (!is.null(rscript_label) && nzchar(rscript_label) && !isFALSE(as.logical(rscript_label))) {
if (isTRUE(as.logical(rscript_label))) {
script <- grep("[.]R$", commandArgs(), value = TRUE)[1]
if (is.na(script)) script <- "UNKNOWN"
rscript_label <- sprintf("%s:%s:%s:%s", script, Sys.getpid(), Sys.info()[["nodename"]], Sys.info()[["user"]])
}
rscript_args_internal <- c("-e", shQuote(paste0("
}
if (!is.null(default_packages)) {
pkgs <- paste(unique(default_packages), collapse = ",")
if (is_Rscript) {
arg <- sprintf("--default-packages=%s", pkgs)
rscript_args_internal <- c(arg, rscript_args_internal)
} else {
arg <- sprintf("R_DEFAULT_PACKAGES=%s", pkgs)
on_MSWindows <- (rscript_sh %in% c("cmd", "cmd2"))
if (on_MSWindows) {
rscript_args <- c(arg, rscript_args)
} else {
rscript <- c(arg, rscript)
}
}
}
if (!localMachine && revtunnel && getOption2("parallelly.makeNodePSOCK.port.increment", TRUE)) {
rscript_port <- assertPort(port + (rank - 1L))
if (verbose) {
mdebugf("%sRscript port: %d + %d = %d\n", verbose_prefix, port, rank-1L, rscript_port)
}
} else {
rscript_port <- port
if (verbose) {
mdebugf("%sRscript port: %d\n", verbose_prefix, rscript_port)
}
}
if (length(socketOptions) == 1L) {
code <- sprintf("options(socketOptions = \"%s\")", socketOptions)
rscript_expr <- c("-e", shQuote(code, type = rscript_sh))
rscript_args_internal <- c(rscript_args_internal, rscript_expr)
}
if (length(rscript_startup) > 0L) {
rscript_startup <- paste("invisible({", rscript_startup, "})", sep = "")
rscript_startup <- shQuote(rscript_startup, type = rscript_sh)
rscript_startup <- lapply(rscript_startup, FUN = function(value) c("-e", value))
rscript_startup <- unlist(rscript_startup, use.names = FALSE)
rscript_args_internal <- c(rscript_args_internal, rscript_startup)
}
if (length(rscript_envs) > 0L) {
names <- names(rscript_envs)
if (is.null(names)) {
copy <- seq_along(rscript_envs)
} else {
copy <- which(nchar(names) == 0L)
}
if (length(copy) > 0L) {
missing <- NULL
for (idx in copy) {
name <- rscript_envs[idx]
if (!nzchar(name)) {
stop("Argument 'rscript_envs' contains an empty non-named environment variable")
}
value <- Sys.getenv(name, NA_character_)
if (!is.na(value)) {
rscript_envs[idx] <- value
names(rscript_envs)[idx] <- name
} else {
missing <- c(missing, name)
}
}
if (length(missing) > 0L) {
warnf("Did not pass down missing environment variables to cluster node: %s", paste(sQuote(missing), collapse = ", "))
}
names <- names(rscript_envs)
rscript_envs <- rscript_envs[nzchar(names)]
names <- names(rscript_envs)
}
if (length(unset <- which(is.na(rscript_envs))) > 0L) {
names <- names(rscript_envs[unset])
code <- sprintf("\"%s\"", names)
code <- paste(code, collapse = ", ")
code <- paste0("Sys.unsetenv(c(", code, "))")
tryCatch({
parse(text = code)
}, error = function(ex) {
stopf("Argument 'rscript_envs' appears to contain invalid values: %s", paste(sprintf("%s", sQuote(names)), collapse = ", "))
})
rscript_args_internal <- c(rscript_args_internal, "-e", shQuote(code, type = rscript_sh))
rscript_envs <- rscript_envs[-unset]
names <- names(rscript_envs)
}
if (length(names) > 0L) {
code <- sprintf('"%s"="%s"', names, rscript_envs)
code <- paste(code, collapse = ", ")
code <- paste0("Sys.setenv(", code, ")")
tryCatch({
parse(text = code)
}, error = function(ex) {
stopf("Argument 'rscript_envs' appears to contain invalid values: %s", paste(sprintf("%s=%s", sQuote(names), sQuote(rscript_envs)), collapse = ", "))
})
rscript_args_internal <- c(rscript_args_internal, "-e", shQuote(code, type = rscript_sh))
}
}
if (length(rscript_libs) > 0L) {
rscript_libs <- gsub("\\\\", "\\\\\\\\", rscript_libs, fixed = TRUE)
code <- paste0('"', rscript_libs, '"')
code[rscript_libs == "*"] <- ".libPaths()"
code <- paste(code, collapse = ",")
code <- paste0('.libPaths(c(', code, '))')
tryCatch({
parse(text = code)
}, error = function(ex) {
stopf("Argument 'rscript_libs' appears to contain invalid values: %s", paste(sQuote(rscript_libs), collapse = ", "))
})
rscript_args_internal <- c(rscript_args_internal, "-e", shQuote(code, type = rscript_sh))
}
if (!any(grepl("parallel:::[.](slave|work)RSOCK[(][)]", rscript_args))) {
cmd <- "workRSOCK <- tryCatch(parallel:::.workRSOCK, error=function(e) parallel:::.slaveRSOCK); workRSOCK()"
rscript_args_internal <- c(rscript_args_internal, "-e", shQuote(cmd, type = rscript_sh))
}
idx <- which(rscript_args == "*")
if (length(idx) == 0L) {
rscript_args <- c(rscript_args, rscript_args_internal)
} else if (length(idx) == 1L) {
n <- length(rscript_args)
if (idx == 1L) {
rscript_args <- c(rscript_args_internal, rscript_args[-1])
} else if (idx == n) {
rscript_args <- c(rscript_args[-n], rscript_args_internal)
} else {
rscript_args <- c(rscript_args[1:(idx-1)], rscript_args_internal,
rscript_args[(idx+1):n])
}
} else {
stop(sprintf("Argument 'rscript_args' may contain at most one asterisk ('*'): %s", paste(sQuote(rscript_args), collapse = " ")))
}
rscript <- paste(rscript, collapse = " ")
rscript_args <- paste(rscript_args, collapse = " ")
envvars <- paste0("MASTER=", master, " PORT=", rscript_port, " OUT=", outfile, " TIMEOUT=", timeout, " XDR=", useXDR,
" SETUPTIMEOUT=", connectTimeout, " SETUPSTRATEGY=", setup_strategy)
cmd <- paste(rscript, rscript_args, envvars)
if (!is.na(renice) && renice > 0L) {
cmd <- sprintf("nice --adjustment=%d %s", renice, cmd)
}
if (!localMachine) {
find <- is.null(rshcmd)
if (find) {
which <- NULL
if (verbose) {
mdebugf("%sWill search for all 'rshcmd' available\n",
verbose_prefix)
}
} else if (all(grepl("^<[a-zA-Z-]+>$", rshcmd))) {
find <- TRUE
if (verbose) {
mdebugf("%sWill search for specified 'rshcmd' types: %s\n",
verbose_prefix, paste(sQuote(rshcmd), collapse = ", "))
}
which <- gsub("^<([a-zA-Z-]+)>$", "\\1", rshcmd)
}
if (find) {
rshcmd <- find_rshcmd(which = which,
must_work = !localMachine && !manual && !dryrun)
if (verbose) {
s <- unlist(lapply(rshcmd, FUN = function(r) {
sprintf("%s [type=%s, version=%s]", paste(sQuote(r), collapse = ", "), sQuote(attr(r, "type")), sQuote(attr(r, "version")))
}))
s <- paste(sprintf("%s %d. %s", verbose_prefix, seq_along(s), s), collapse = "\n")
mdebugf("%sFound the following available 'rshcmd':\n%s", verbose_prefix, s)
}
rshcmd <- rshcmd[[1]]
} else {
if (is.null(attr(rshcmd, "type"))) attr(rshcmd, "type") <- "<unknown>"
if (is.null(attr(rshcmd, "version"))) attr(rshcmd, "version") <- "<unknown>"
}
stop_if_not(is.character(rshcmd), length(rshcmd) >= 1L)
s <- sprintf("type=%s, version=%s", sQuote(attr(rshcmd, "type")), sQuote(attr(rshcmd, "version")))
rshcmd_label <- sprintf("%s [%s]", paste(sQuote(rshcmd), collapse = ", "), s)
if (verbose) mdebugf("%sUsing 'rshcmd': %s", verbose_prefix, rshcmd_label)
if (length(user) == 1L) rshopts <- c("-l", user, rshopts)
if (revtunnel) {
if (is_localhost(master) && .Platform$OS.type == "windows" && (
isTRUE(attr(rshcmd, "OpenSSH_for_Windows")) ||
basename(rshcmd[1]) == "ssh"
)) {
master <- "127.0.0.1"
}
rshopts <- c(sprintf("-R %d:%s:%d", rscript_port, master, port), rshopts)
}
if (is.character(rshlogfile)) {
rshopts <- c(sprintf("-E %s", shQuote(rshlogfile)), rshopts)
}
rshopts <- paste(rshopts, collapse = " ")
rsh_call <- paste(paste(shQuote(rshcmd), collapse = " "), rshopts, worker)
local_cmd <- paste(rsh_call, shQuote(cmd, type = rscript_sh))
} else {
rshcmd_label <- NULL
rsh_call <- NULL
local_cmd <- cmd
}
stop_if_not(length(local_cmd) == 1L)
options <- structure(list(
local_cmd = local_cmd,
worker = worker,
rank = rank,
rshlogfile = rshlogfile,
port = port,
connectTimeout = connectTimeout,
timeout = timeout,
useXDR = useXDR,
pidfile = pidfile,
setup_strategy = setup_strategy,
outfile = outfile,
rshcmd_label = rshcmd_label,
rsh_call = rsh_call,
cmd = cmd,
localMachine = localMachine,
manual = manual,
dryrun = dryrun,
quiet = quiet,
rshcmd = rshcmd,
revtunnel = revtunnel
), class = c("makeNodePSOCKOptions", "makeNodeOptions"))
if (action == "options") return(options)
launchNodePSOCK(options, verbose = verbose)
}
launchNodePSOCK <- function(options, verbose = FALSE) {
stop_if_not(inherits(options, "makeNodePSOCKOptions"))
local_cmd <- options[["local_cmd"]]
worker <- options[["worker"]]
rank <- options[["rank"]]
rshlogfile <- options[["rshlogfile"]]
port <- options[["port"]]
connectTimeout <- options[["connectTimeout"]]
timeout <- options[["timeout"]]
pidfile <- options[["pidfile"]]
useXDR <- options[["useXDR"]]
outfile <- options[["outfile"]]
rshcmd_label <- options[["rshcmd_label"]]
rsh_call <- options[["rsh_call"]]
cmd <- options[["cmd"]]
localMachine <- options[["localMachine"]]
manual <- options[["manual"]]
dryrun <- options[["dryrun"]]
quiet <- options[["quiet"]]
rshcmd <- options[["rshcmd"]]
revtunnel <- options[["revtunnel"]]
setup_strategy <- options[["setup_strategy"]]
if (setup_strategy == "parallel") {
stop("INTERNAL ERROR: launchNodePSOCK() called with setup_strategy='parallel', which should never occur")
}
verbose <- as.logical(verbose)
stop_if_not(length(verbose) == 1L, !is.na(verbose))
verbose_prefix <- "[local output] "
is_worker_output_visible <- is.null(outfile)
if (manual || dryrun) {
if (!quiet) {
msg <- c("----------------------------------------------------------------------")
if (localMachine) {
msg <- c(msg, sprintf("Manually, start worker
} else {
msg <- c(msg, sprintf("Manually, (i) login into external machine %s:", sQuote(worker)),
sprintf("\n %s\n", rsh_call))
msg <- c(msg, sprintf("and (ii) start worker
sprintf("\n %s\n", cmd))
msg <- c(msg, sprintf("Alternatively, start worker
sprintf("\n %s\n", local_cmd))
}
msg <- paste(c(msg, ""), collapse = "\n")
cat(msg)
flush.console()
}
if (dryrun) return(NULL)
} else {
if (verbose) {
mdebugf("%sStarting worker
}
input <- if (.Platform$OS.type == "windows") "" else NULL
res <- system(local_cmd, wait = FALSE, input = input)
if (verbose) {
mdebugf("%s- Exit code of system() call: %s", verbose_prefix, res)
}
if (res != 0) {
warnf("system(%s) had a non-zero exit code: %d", local_cmd, res)
}
}
if (verbose) {
mdebugf("%sWaiting for worker
if (is_worker_output_visible) {
if (.Platform$OS.type == "windows") {
mdebugf("%s- Detected 'outfile=NULL' on Windows: this will make the output from the background worker visible when running R from a terminal, but it will most likely not be visible when using a GUI.", verbose_prefix)
} else {
mdebugf("%s- Detected 'outfile=NULL': this will make the output from the background worker visible", verbose_prefix)
}
}
}
con <- local({
setTimeLimit(elapsed = connectTimeout)
on.exit(setTimeLimit(elapsed = Inf))
localhostHostname <- getOption2("parallelly.localhost.hostname", "localhost")
warnings <- list()
tryCatch({
withCallingHandlers({
socketConnection(localhostHostname, port = port, server = TRUE,
blocking = TRUE, open = "a+b", timeout = timeout)
}, warning = function(w) {
if (verbose) {
mdebugf("%sDetected a warning from socketConnection(): %s", verbose_prefix, sQuote(conditionMessage(w)))
}
warnings <<- c(warnings, list(w))
})
}, error = function(ex) {
setTimeLimit(elapsed = Inf)
machineType <- if (localMachine) "local" else "remote"
msg <- sprintf("Failed to launch and connect to R worker on %s machine %s from local machine %s.\n", machineType, sQuote(worker), sQuote(Sys.info()[["nodename"]]))
cmsg <- conditionMessage(ex)
if (grepl(gettext("reached elapsed time limit"), cmsg)) {
msg <- c(msg, sprintf(" * The error produced by socketConnection() was: %s (which suggests that the connection timeout of %.0f seconds (argument 'connectTimeout') kicked in)\n", sQuote(cmsg), connectTimeout))
} else {
msg <- c(msg, sprintf(" * The error produced by socketConnection() was: %s\n", sQuote(cmsg)))
}
if (length(warnings) > 0) {
msg <- c(msg, sprintf(" * In addition, socketConnection() produced %d warning(s):\n", length(warnings)))
for (kk in seq_along(warnings)) {
cmsg <- conditionMessage(warnings[[kk]])
if (grepl("port [0-9]+ cannot be opened", cmsg)) {
msg <- c(msg, sprintf(" - Warning
} else {
msg <- c(msg, sprintf(" - Warning
}
}
}
msg <- c(msg, sprintf(" * The localhost socket connection that failed to connect to the R worker used port %d using a communication timeout of %.0f seconds and a connection timeout of %.0f seconds.\n", port, timeout, connectTimeout))
msg <- c(msg, sprintf(" * Worker launch call: %s.\n", local_cmd))
pid <- readWorkerPID(pidfile)
if (!is.null(pid)) {
if (verbose) mdebugf("Killing worker process (PID %d) if still alive", pid)
success <- pid_kill(pid)
if (verbose) mdebugf("Worker (PID %d) was successfully killed: %s", pid, success)
msg <- c(msg, sprintf(" * Worker (PID %d) was successfully killed: %s\n", pid, success))
} else if (localMachine) {
msg <- c(msg, sprintf(" * Failed to kill local worker because it's PID is could not be identified.\n"))
}
suggestions <- NULL
if (!verbose) {
suggestions <- c(suggestions, "Set 'verbose=TRUE' to see more details.")
}
if (.Platform$OS.type == "windows") {
if (is_worker_output_visible) {
suggestions <- c(suggestions, "On Windows, to see output from worker, set 'outfile=NULL' and run R from a terminal (not a GUI).")
} else {
suggestions <- c(suggestions, "On Windows, output from worker when using 'outfile=NULL' is only visible when running R from a terminal (not a GUI).")
}
} else {
if (!is_worker_output_visible) {
suggestions <- c(suggestions, "Set 'outfile=NULL' to see output from worker.")
}
}
if (is.character(rshlogfile)) {
smsg <- sprintf("Inspect the content of log file %s for %s.", sQuote(rshlogfile), paste(sQuote(rshcmd), collapse = " "))
lmsg <- tryCatch(readLines(rshlogfile, n = 15L, warn = FALSE), error = function(ex) NULL)
if (length(lmsg) > 0) {
lmsg <- sprintf(" %2d: %s", seq_along(lmsg), lmsg)
smsg <- sprintf("%s The first %d lines are:\n%s", smsg, length(lmsg), paste(lmsg, collapse = "\n"))
}
suggestions <- c(suggestions, smsg)
} else {
suggestions <- c(suggestions, sprintf("Set 'rshlogfile=TRUE' to enable logging for %s.", paste(sQuote(rshcmd), collapse = " ")))
}
if (!localMachine && revtunnel && isTRUE(attr(rshcmd, "OpenSSH_for_Windows"))) {
suggestions <- c(suggestions, sprintf("The 'rshcmd' (%s) used may not support reverse tunneling (revtunnel = TRUE). See ?parallelly::makeClusterPSOCK for alternatives.\n", rshcmd_label))
}
if (length(suggestions) > 0) {
suggestions <- sprintf(" - Suggestion
msg <- c(msg, " * Troubleshooting suggestions:\n", suggestions)
}
msg <- paste(msg, collapse = "")
ex$message <- msg
class(ex) <- c("PSOCKConnectionError", class(ex))
local({
oopts <- options(warning.length = 2000L)
on.exit(options(oopts))
stop(ex)
})
})
})
setTimeLimit(elapsed = Inf)
if (verbose) {
mdebugf("%sConnection with worker
}
structure(list(con = con, host = worker, rank = rank, rshlogfile = rshlogfile),
class = c("RichSOCKnode", if (useXDR) "SOCKnode" else "SOCK0node"))
}
is_localhost <- local({
localhosts <- c("localhost", "127.0.0.1")
non_localhosts <- character(0L)
function(worker, hostname = Sys.info()[["nodename"]], pathnames = "/etc/hosts") {
if (is.null(worker) && is.null(hostname)) {
localhosts <<- c("localhost", "127.0.0.1")
non_localhosts <<- character(0L)
return(NA)
}
stop_if_not(length(worker) == 1, length(hostname) == 1)
if (worker %in% localhosts) return(TRUE)
if (worker %in% non_localhosts) return(FALSE)
if (worker == hostname) {
localhosts <<- unique(c(localhosts, worker))
return(TRUE)
}
alias <- getOption2("parallelly.localhost.hostname")
if (is.character(alias) && worker == alias) {
localhosts <<- unique(c(localhosts, worker))
return(TRUE)
}
pathnames <- pathnames[file_test("-f", pathnames)]
if (length(pathnames) == 0L) return(FALSE)
pattern <- sprintf("^((|.*[[:space:]])%s[[:space:]]+%s([[:space:]]+|)|(|.*[[:space:]])%s[[:space:]]+%s([[:space:]]+|))$", hostname, worker, worker, hostname)
for (pathname in pathnames) {
bfr <- readLines(pathname, warn = FALSE)
if (any(grepl(pattern, bfr, ignore.case = TRUE))) {
localhosts <<- unique(c(localhosts, worker))
return(TRUE)
}
}
non_localhosts <<- unique(c(non_localhosts, worker))
FALSE
}
})
is_ip_number <- function(worker) {
ip <- strsplit(worker, split = ".", fixed = TRUE)[[1]]
if (length(ip) != 4) return(FALSE)
ip <- as.integer(ip)
if (anyNA(ip)) return(FALSE)
all(0 <= ip & ip <= 255)
}
is_fqdn <- function(worker) {
grepl(".", worker, fixed = TRUE)
}
find_rshcmd <- function(which = NULL, first = FALSE, must_work = TRUE) {
query_version <- function(bin, args = "-V") {
v <- suppressWarnings(system2(bin, args = args, stdout = TRUE, stderr = TRUE))
v <- paste(v, collapse = "; ")
stop_if_not(length(v) == 1L)
v
}
find_rstudio_ssh <- function() {
path <- Sys.getenv("RSTUDIO_MSYS_SSH")
if (!file_test("-d", path)) return(NULL)
path <- normalizePath(path)
path_org <- Sys.getenv("PATH")
on.exit(Sys.setenv(PATH = path_org))
Sys.setenv(PATH = path)
bin <- Sys.which("ssh")
if (!nzchar(bin)) return(NULL)
attr(bin, "type") <- "rstudio-ssh"
attr(bin, "version") <- query_version(bin, args = "-V")
bin
}
find_putty_plink <- function() {
bin <- Sys.which("plink")
if (!nzchar(bin)) return(NULL)
res <- c(bin, "-ssh")
attr(res, "type") <- "putty-plink"
attr(res, "version") <- query_version(bin, args = "-V")
res
}
find_ssh <- function() {
bin <- Sys.which("ssh")
if (!nzchar(bin)) return(NULL)
attr(bin, "type") <- "ssh"
v <- query_version(bin, args = "-V")
attr(bin, "version") <- v
if (any(grepl("OpenSSH_for_Windows", v)))
attr(bin, "OpenSSH_for_Windows") <- TRUE
bin
}
if (!is.null(which)) stop_if_not(is.character(which), length(which) >= 1L, !anyNA(which))
stop_if_not(is.logical(first), length(first) == 1L, !is.na(first))
stop_if_not(is.logical(must_work), length(must_work) == 1L, !is.na(must_work))
if (is.null(which)) {
if (.Platform$OS.type == "windows") {
which <- c("ssh", "putty-plink", "rstudio-ssh")
} else {
which <- c("ssh")
}
}
res <- list()
for (name in which) {
pathname <- switch(name,
"ssh" = find_ssh(),
"putty-plink" = find_putty_plink(),
"rstudio-ssh" = find_rstudio_ssh(),
stopf("Unknown 'rshcmd' type: %s", sQuote(name))
)
if (!is.null(pathname)) {
if (first) return(pathname)
res[[name]] <- pathname
}
}
if (length(res) > 0) return(res)
msg <- sprintf("Failed to locate a default SSH client (checked: %s). Please specify one via argument 'rshcmd'.", paste(sQuote(which), collapse = ", "))
if (must_work) stop(msg)
pathname <- "ssh"
msg <- sprintf("%s Will still try with %s.", msg, sQuote(paste(pathname, collapse = " ")))
warning(msg)
pathname
}
session_info <- function(pkgs = getOption2("parallelly.makeNodePSOCK.sessionInfo.pkgs", FALSE)) {
libs <- .libPaths()
info <- list(
r = c(R.version, os.type = .Platform$OS.type),
system = as.list(Sys.info()),
libs = libs,
pkgs = if (isTRUE(pkgs)) {
structure(lapply(libs, FUN = function(lib.loc) {
pkgs <- installed.packages(lib.loc = lib.loc)
if (length(pkgs) == 0) return(NULL)
paste0(pkgs[, "Package"], "_", pkgs[, "Version"])
}), names = libs)
},
pwd = getwd(),
process = list(pid = Sys.getpid())
)
info
}
add_cluster_session_info <- local({
get_session_info <- session_info
formals(get_session_info)$pkgs <- FALSE
environment(get_session_info) <- getNamespace("utils")
function(cl) {
stop_if_not(inherits(cl, "cluster"))
for (ii in seq_along(cl)) {
node <- cl[[ii]]
if (is.null(node)) next
if (!is.null(node$session_info)) next
pkgs <- getOption2("parallelly.makeNodePSOCK.sessionInfo.pkgs", FALSE)
node$session_info <- clusterCall(cl[ii], fun = get_session_info, pkgs = pkgs)[[1]]
if (inherits(node, "SOCK0node") || inherits(node, "SOCKnode")) {
pid <- capture.output(print(node))
pid <- as.integer(gsub(".* ", "", pid))
stop_if_not(node$session_info$process$pid == pid)
}
cl[[ii]] <- node
}
cl
}
})
windows_build_version <- local({
if (.Platform$OS.type != "windows") return(function() NULL)
function() {
res <- shell("ver", intern = TRUE)
if (length(res) == 0) return(NULL)
res <- grep("Microsoft", res, value = TRUE)
if (length(res) == 0) return(NULL)
res <- gsub(".*Version ([0-9.]+).*", "\\1", res)
tryCatch({
numeric_version(res)
}, error = function(ex) NULL)
}
})
useWorkerPID <- local({
parent_pid <- NULL
.cache <- list()
makeResult <- function(rank, rscript_sh) {
if (is.null(parent_pid)) parent_pid <<- Sys.getpid()
pidfile <- tempfile(pattern = sprintf("worker.rank=%d.parallelly.parent=%d.",
rank, parent_pid), fileext = ".pid")
pidfile <- normalizePath(pidfile, winslash = "/", mustWork = FALSE)
pidcode <- sprintf('try(suppressWarnings(cat(Sys.getpid(),file="%s")), silent = TRUE)', pidfile)
rscript_pid_args <- c("-e", shQuote(pidcode, type = rscript_sh))
list(pidfile = pidfile, rscript_pid_args = rscript_pid_args)
}
function(rscript, rank, rscript_sh, force = FALSE, verbose = FALSE) {
autoKill <- getOption2("parallelly.makeNodePSOCK.autoKill", TRUE)
if (!isTRUE(as.logical(autoKill))) return(list())
result <- makeResult(rank, rscript_sh = rscript_sh)
key <- paste(rscript, collapse = "\t")
if (!force && isTRUE(.cache[[key]])) return(result)
test_cmd <- paste(c(
rscript,
result$rscript_pid_args,
"-e", shQuote(sprintf('file.exists("%s")', result$pidfile), type = rscript_sh)
), collapse = " ")
if (verbose) {
mdebugf("Testing if worker's PID can be inferred: %s", sQuote(test_cmd))
}
input <- NULL
if (any(grepl("singularity", rscript, ignore.case = TRUE))) input <- ""
res <- system(test_cmd, intern = TRUE, input = input)
status <- attr(res, "status")
suppressWarnings(file.remove(result$pidfile))
.cache[[key]] <<- (is.null(status) || status == 0L) && any(grepl("TRUE", res))
if (verbose) mdebugf("- Possible to infer worker's PID: %s", .cache[[key]])
result
}
})
readWorkerPID <- function(pidfile, wait = 0.5, maxTries = 8L, verbose = FALSE) {
if (is.null(pidfile)) return(NULL)
if (verbose) mdebug("Attempting to infer PID for worker process ...")
pid <- NULL
tries <- 0L
while (!file.exists(pidfile) && tries <= maxTries) {
Sys.sleep(wait)
tries <- tries + 1L
}
if (file.exists(pidfile)) {
pid0 <- NULL
for (tries in 1:maxTries) {
pid0 <- tryCatch(readLines(pidfile, warn = FALSE), error = identity)
if (!inherits(pid0, "error")) break
pid0 <- NULL
Sys.sleep(wait)
}
file.remove(pidfile)
if (length(pid0) > 0L) {
pid <- as.integer(pid0[length(pid0)])
if (verbose) mdebugf(" - pid: %s", pid)
if (is.na(pid)) {
warnf("Worker PID is a non-integer: %s", pid0)
pid <- NULL
} else if (pid == Sys.getpid()) {
warnf("Hmm... worker PID and parent PID are the same: %s", pid)
pid <- NULL
}
}
}
if (verbose) mdebug("Attempting to infer PID for worker process ... done")
pid
}
summary.RichSOCKnode <- function(object, ...) {
res <- list(
host = NA_character_,
r_version = NA_character_,
platform = NA_character_,
pwd = NA_character_,
pid = NA_integer_
)
host <- object[["host"]]
if (!is.null(host)) res$host <- host
session_info <- object[["session_info"]]
if (!is.null(session_info)) {
res$r_version <- session_info[["r"]][["version.string"]]
res$platform <- session_info[["r"]][["platform"]]
res$pwd <- session_info[["pwd"]]
res$pid <- session_info[["process"]][["pid"]]
}
as.data.frame(res, stringsAsFactors = FALSE)
}
summary.RichSOCKcluster <- function(object, ...) {
res <- lapply(object, FUN = function(node) {
if (is.null(node)) return(summary.RichSOCKnode(node))
summary(node)
})
res <- do.call(rbind, res)
rownames(res) <- NULL
res
}
print.RichSOCKcluster <- function (x, ...) {
info <- summary(x)
txt <- sprintf("host %s", sQuote(info[["host"]]))
specs <- sprintf("(%s, platform %s)", info[["r_version"]], info[["platform"]])
specs[is.na(info[["r_version"]])] <- "(R version and platform not queried)"
txt <- paste(txt, specs, sep = " ")
t <- table(txt)
t <- t[order(t, decreasing = TRUE)]
w <- ifelse(t == 1L, "node is", "nodes are")
txt <- sprintf("%d %s on %s", t, w, names(t))
txt <- paste(txt, collapse = ", ")
txt <- sprintf("Socket cluster with %d nodes where %s", length(x), txt)
if (!is.null(attr(x, "gcMe"))) {
txt <- sprintf("%s. This cluster is registered to be automatically stopped by the garbage collector", txt)
}
cat(txt, "\n", sep = "")
invisible(x)
}
|
FisherGTest <-
function(z){
n <- length(z)
m <- ifelse(n%%2==0,(n-2)/2,(n-1)/2)
Ip <- pgram(z)[,2]
if( n%%2 ==0 )
Ip<-Ip[-(m+1)]
maxL <- which.max(Ip)
g <- Ip[maxL]/sum(Ip)
p <- floor(1/g)
i <- 1:p
pvalue <- sum(choose(m,i)*(-1)^(i-1) *(1-i*g)^(m-1))
ans <- c(gstat=g,pvalue=pvalue,freq=maxL/n)
ans
}
|
semdrw <- function() {
shiny::runApp(appDir = system.file("shiny-examples", "myapp", package = "semdrw"))
Sys.setenv("R_TESTS" = "")
}
|
"QRISK3_2019_test"
|
content_language <- function(language, content) {
if (is.na(language)) language <- FALSE
if (is.logical(language)) {
if (language) {
if (requireNamespace("cld3", quietly = TRUE)) {
detect_language <- cld3::detect_language
} else if (requireNamespace("cld2", quietly = TRUE)) {
detect_language <- cld2::detect_language
} else {
stop("Unable to auto-detect language. Install {cld3} or {cld2}.")
}
language <- detect_language(content)
} else {
return(NULL)
}
} else {
if (!is.character(language)) {
stop("Language must either be a string or TRUE/FALSE.")
}
}
header("Content-Language", paste(language, collapse = ", "))
}
|
yadirStartCampaigns <- function(Login = getOption("ryandexdirect.user"),
Ids = NULL,
Token = NULL,
AgencyAccount = getOption("ryandexdirect.agency_account"),
TokenPath = yadirTokenPath()){
Token <- tech_auth(login = Login, token = Token, AgencyAccount = AgencyAccount, TokenPath = TokenPath)
if(length(Ids) > 1000){
stop(paste0("In the parameter Ids transferred numbers of ",length(Ids), " campaigns, maximum number of campaigns in one request is 1000."))
}
if(is.null(Ids)){
stop("In the Ids argument, you must pass the vector containing the Id campaigns for which you want to resume displaying ads. You have not transferred any Id.")
}
CounErr <- 0
errors_id <- vector()
start_time <- Sys.time()
packageStartupMessage("Processing", appendLF = T)
IdsPast <- paste0(Ids, collapse = ",")
queryBody <- paste0("{
\"method\": \"resume\",
\"params\": {
\"SelectionCriteria\": {
\"Ids\": [",IdsPast,"]}
}
}")
answer <- POST("https://api.direct.yandex.com/json/v5/campaigns", body = queryBody, add_headers(Authorization = paste0("Bearer ",Token), 'Accept-Language' = "ru","Client-Login" = Login))
ans_pars <- content(answer)
if(!is.null(ans_pars$error)){
stop(paste0("Error: ", ans_pars$error$error_string,". Message: ",ans_pars$error$error_detail, ". Request ID: ",ans_pars$error$request_id))
}
for(error_search in 1:length(ans_pars$result$ResumeResults)){
if(!is.null(ans_pars$result$ResumeResults[[error_search]]$Errors)){
CounErr <- CounErr + 1
errors_id <- c(errors_id, Ids[error_search])
packageStartupMessage(paste0(" CampId: ",Ids[error_search]," - ", ans_pars$result$ResumeResults[[error_search]]$Errors[[1]]$Details))
}
}
out_message <- ""
TotalCampStoped <- length(Ids) - CounErr
if(TotalCampStoped %in% c(2,3,4) & !(TotalCampStoped %% 100 %in% c(12,13,14))){
out_message <- "campaings start"
} else if(TotalCampStoped %% 10 == 1 & TotalCampStoped %% 100 != 11){
out_message <- "campaings start"
} else {
out_message <- "campaings start"
}
packageStartupMessage(paste0(TotalCampStoped, " ", out_message))
packageStartupMessage(paste0("Total time: ", as.integer(round(difftime(Sys.time(), start_time , units ="secs"),0)), " sec."))
return(errors_id)}
|
jomo.smc <-
function(formula, data, level=rep(1,ncol(data)), beta.start=NULL, l2.beta.start=NULL, u.start=NULL, l1cov.start=NULL, l2cov.start=NULL, l1cov.prior=NULL, l2cov.prior=NULL, a.start=NULL, a.prior=NULL, nburn=1000, nbetween=1000, nimp=5, meth="common", family="binomial",output=1, out.iter=10, model) {
if (model=="lm") {
imp<-jomo.lm(formula=formula, data=data, beta.start=beta.start, l1cov.start=l1cov.start, l1cov.prior=l1cov.prior, nburn=nburn, nbetween=nbetween, nimp=nimp, output=output, out.iter=out.iter)
} else if (model=="glm") {
imp<-jomo.glm(formula=formula, data=data, beta.start=beta.start, l1cov.start=l1cov.start, l1cov.prior=l1cov.prior, nburn=nburn, nbetween=nbetween, nimp=nimp, output=output, out.iter=out.iter, family=family)
} else if (model=="polr") {
imp<-jomo.polr(formula=formula, data=data, beta.start=beta.start, l1cov.start=l1cov.start, l1cov.prior=l1cov.prior, nburn=nburn, nbetween=nbetween, nimp=nimp, output=output, out.iter=out.iter)
}else if (model=="coxph") {
imp<-jomo.coxph(formula=formula, data=data, beta.start=beta.start, l1cov.start=l1cov.start, l1cov.prior=l1cov.prior, nburn=nburn, nbetween=nbetween, nimp=nimp, output=output, out.iter=out.iter)
} else if (model=="lmer") {
imp<-jomo.lmer(formula=formula, data=data, level=level, beta.start=beta.start, l2.beta.start=l2.beta.start, u.start=u.start, l1cov.start=l1cov.start, l2cov.start=l2cov.start, l1cov.prior=l1cov.prior, l2cov.prior=l2cov.prior, a.start=a.start, a.prior=a.prior, nburn=nburn, nbetween=nbetween, nimp=nimp, meth=meth, output=output, out.iter=out.iter)
} else if (model=="glmer") {
imp<-jomo.glmer(formula=formula, data=data, level=level, beta.start=beta.start, l2.beta.start=l2.beta.start, u.start=u.start, l1cov.start=l1cov.start, l2cov.start=l2cov.start, l1cov.prior=l1cov.prior, l2cov.prior=l2cov.prior, a.start=a.start, a.prior=a.prior, nburn=nburn, nbetween=nbetween, nimp=nimp, meth=meth, output=output, out.iter=out.iter, family=family)
} else if (model=="clmm") {
imp<-jomo.clmm(formula=formula, data=data, level=level, beta.start=beta.start, l2.beta.start=l2.beta.start, u.start=u.start, l1cov.start=l1cov.start, l2cov.start=l2cov.start, l1cov.prior=l1cov.prior, l2cov.prior=l2cov.prior, a.start=a.start, a.prior=a.prior, nburn=nburn, nbetween=nbetween, nimp=nimp, meth=meth, output=output, out.iter=out.iter)
}else {
cat("Invalid model specification. Models currently available: lm, glm (binomial), polr, coxph, lmer,clmm, glmer (binomial).\n")
}
return(imp)
}
|
NULL
make_getter_setters("col_width", "col", check_fun = is_numeric_or_character)
NULL
make_getter_setters("row_height", "row", check_fun = is_numeric_or_character)
NULL
make_getter_setters("header_cols", "col", check_fun = is.logical)
NULL
make_getter_setters("header_rows", "row", check_fun = is.logical)
|
util_as_integer <- function(x) UseMethod("util_as_integer")
util_as_integer.RasterLayer <- function(x){
raster::values(x) <- as.integer(raster::values(x))
x
}
|
drop_extra_covariates = function( M0, data ) {
cfs = stats::coef( M0 )
nas = names( cfs )[ is.na( cfs ) ]
if ( length( nas ) > 0 ) {
nas = paste0( nas, collapse = " - " )
warning( paste0( "Dropped covariates due to colinearity with update of: ~ . - ", nas ) )
stats::update( M0, formula. = stats::as.formula( paste( "~ . ", nas, sep= "-" ) ), data=data )
} else {
M0
}
}
process_outcome_model = function( outcomename, dat, t0, R=400, summarize=FALSE,
smooth=FALSE, smoother = NULL,
fit_model = fit_model_default,
covariates = NULL,
plug_in = FALSE, ... ) {
if ( is.null( covariates ) ) {
covariates = attr( fit_model, "lags" )
}
dat = add_lagged_covariates( dat, outcomename, covariates = covariates )
dat.pre = dplyr::filter( dat, month <= t0 )
M0 = fit_model( dat.pre, outcomename )
if ( any( is.na( stats::coef( M0 ) ) ) ) {
M0 = drop_extra_covariates( M0, dat.pre[-c(1),] )
}
if ( smooth && !is.null( covariates ) && is.null( smoother ) ) {
M0full = stats::model.frame( M0, data=dat, na.action=NULL )
smoother = make_model_smoother( covariates=M0full, fit_model = fit_model )
} else {
smoother = smooth_series
}
res = extrapolate_model( M0, outcomename, dat, t0, R, summarize=summarize,
smooth=smooth, smoother=smoother,
fix_parameters = plug_in,
... )
if ( summarize ) {
res$Ybar = generate_Ybars( fit_model, outcomename, t0, dat )
}
res
}
|
eigenLaplace <- function(mm, nn) {
In <- seq(0, mm - 1, 1)
Im <- seq(0, nn - 1, 1)
In <- t(In)
Im <- t(Im)
mu <- 2 - 2 * cos(pi * Im / nn)
lambda <- 2 - 2 * cos(pi * In / mm)
return(.Call(`_mrbsizeR_for_eigenLaplace`, mu, lambda, mm, nn))
}
|
if (!curl::has_internet()) {
exit_file("Skipping tests for lack of internet.")
}
if (Sys.getenv("RunAllGtrendsRTests", unset="") == "") {
exit_file("Skipping tests not opted into.")
}
kw <- "news"
res <- gtrends(kw)
expect_true(nrow(res$interest_over_time) > 0)
expect_true(nrow(res$interest_by_country) > 0)
expect_true(nrow(res$interest_by_dma) > 0)
expect_true(nrow(res$interest_by_city) > 0)
expect_true(nrow(res$related_topics) > 0)
expect_true(nrow(res$related_queries) > 0)
expect_true(all(Vectorize(identical, "x")(
list(
unique(res$interest_over_time$keyword),
unique(res$interest_by_country$keyword),
unique(res$interest_by_dma$keyword),
unique(res$interest_by_city$keyword),
unique(res$related_topics$keyword),
unique(res$related_queries$keyword)
), kw
)))
res <- gtrends("NHL", geo = "US")
expect_true(nrow(res$interest_by_region) > 0)
kw <- c("NHL", "NFL")
res <- gtrends(kw)
expect_true(nrow(res$interest_over_time) > 0)
expect_true(nrow(res$interest_by_country) > 0)
expect_true(nrow(res$interest_by_dma) > 0)
expect_true(nrow(res$interest_by_city) > 0)
expect_true(nrow(res$related_queries) > 0)
expect_true(all(Vectorize(identical, "x")(
list(
unique(res$interest_over_time$keyword),
unique(res$interest_by_country$keyword),
unique(res$interest_by_dma$keyword),
unique(res$interest_by_city$keyword),
unique(res$related_queries$keyword)
), kw
)))
|
s3_register <- function(generic, class, method = NULL) {
stopifnot(is.character(generic), length(generic) == 1)
stopifnot(is.character(class), length(class) == 1)
pieces <- strsplit(generic, "::")[[1]]
stopifnot(length(pieces) == 2)
package <- pieces[[1]]
generic <- pieces[[2]]
caller <- parent.frame()
get_method_env <- function() {
top <- topenv(caller)
if (isNamespace(top)) {
asNamespace(environmentName(top))
} else {
caller
}
}
get_method <- function(method, env) {
if (is.null(method)) {
get(paste0(generic, ".", class), envir = get_method_env())
} else {
method
}
}
method_fn <- get_method(method)
stopifnot(is.function(method_fn))
setHook(
packageEvent(package, "onLoad"),
function(...) {
ns <- asNamespace(package)
method_fn <- get_method(method)
registerS3method(generic, class, method_fn, envir = ns)
}
)
if (!isNamespaceLoaded(package)) {
return(invisible())
}
envir <- asNamespace(package)
if (exists(generic, envir)) {
registerS3method(generic, class, method_fn, envir = envir)
}
invisible()
}
|
partial_dep.obs_all <- function(model, predictor, data, observation, column = colnames(data), accuracy = min(length(data), 10), exact_only = TRUE, label_name = "Target", comparator_name = "Evolution") {
temp_data <- observation
temp_percentile <- (1/accuracy) * (1:accuracy)
temp_percentile <- (temp_percentile - 1/accuracy) / (1 - 1/accuracy)
initial_observation <- predictor(model = model, data = observation)
grid_search <- list()
best_grid <- list()
if (exact_only == FALSE) {
for (i in 1:length(column)) {
temp_values <- data[[column[i]]]
unique_values <- unique(temp_values)
if (length(unique_values) < accuracy) {
grid_search[[i]] <- sort(unique_values)
} else {
grid_search[[i]] <- sort(unique(quantile(data[[column[i]]], temp_percentile, na.rm = TRUE, names = FALSE, type = 7)))
}
names(grid_search)[i] <- column[i]
temp_values <- temp_data[[column[i]]]
best_grid[[i]] <- rbindlist(sapply(grid_search[[i]], function(x, temp_data, predictor, label_name, initial_observation) {
temp_data[[column[i]]] <- rep(x, nrow(temp_data))
best_grid <- data.table(Feature = rep(column[i], nrow(temp_data)),
Value = rep(x, nrow(temp_data)))
best_grid[[label_name]] <- predictor(model = model, data = temp_data)
is_unchanged <- as.character(best_grid[[label_name]] == initial_observation)
is_unchanged[is_unchanged == TRUE] <- "Fixed"
is_unchanged[best_grid[[label_name]] < initial_observation] <- "Decreasing"
is_unchanged[best_grid[[label_name]] > initial_observation] <- "Increasing"
best_grid[[comparator_name]] <- is_unchanged
return(best_grid)
}, temp_data = temp_data, predictor = predictor, label_name = label_name, initial_observation = initial_observation, simplify = FALSE, USE.NAMES = FALSE))
temp_data[[column[i]]] <- temp_values
}
} else {
for (i in 1:length(column)) {
temp_values <- data[[column[i]]]
unique_values <- unique(temp_values)
if (length(unique_values) < accuracy) {
grid_search[[i]] <- sort(unique_values)
} else {
grid_search[[i]] <- sort(unique(quantile(data[[column[i]]], temp_percentile, na.rm = TRUE, names = FALSE, type = 3)))
}
names(grid_search)[i] <- column[i]
temp_values <- temp_data[[column[i]]]
best_grid[[i]] <- rbindlist(sapply(grid_search[[i]], function(x, temp_data, predictor, label_name, initial_observation) {
temp_data[[column[i]]] <- rep(x, nrow(temp_data))
best_grid <- data.table(Feature = rep(column[i], nrow(temp_data)),
Value = rep(x, nrow(temp_data)))
best_grid[[label_name]] <- predictor(model = model, data = temp_data)
is_unchanged <- as.character(best_grid[[label_name]] == initial_observation)
is_unchanged[is_unchanged == TRUE] <- "Fixed"
is_unchanged[best_grid[[label_name]] < initial_observation] <- "Decreasing"
is_unchanged[best_grid[[label_name]] > initial_observation] <- "Increasing"
best_grid[[comparator_name]] <- is_unchanged
return(best_grid)
}, temp_data = temp_data, predictor = predictor, label_name = label_name, initial_observation = initial_observation, simplify = FALSE, USE.NAMES = FALSE))
temp_data[[column[i]]] <- temp_values
}
}
best_grid <- rbindlist(best_grid)
best_grid$Feature <- factor(best_grid$Feature, levels = column)
best_grid[[comparator_name]] <- factor(best_grid[[comparator_name]], levels = if ("Fixed" %in% best_grid[[comparator_name]]) {c("Decreasing", "Fixed", "Increasing")} else {c("Decreasing", "Increasing")})
return(list(grid_init = grid_search,
grid_exp = best_grid,
preds = best_grid[[label_name]],
obs = initial_observation))
}
|
test_that("unite pastes columns together & removes old col", {
df <- tibble(x = "a", y = "b")
out <- unite(df, z, x:y)
expect_equal(names(out), "z")
expect_equal(out$z, "a_b")
})
test_that("unite does not remove new col in case of name clash", {
df <- tibble(x = "a", y = "b")
out <- unite(df, x, x:y)
expect_equal(names(out), "x")
expect_equal(out$x, "a_b")
})
test_that("unite preserves grouping", {
df <- tibble(g = 1, x = "a") %>% dplyr::group_by(g)
rs <- df %>% unite(x, x)
expect_equal(df, rs)
expect_equal(class(df), class(rs))
expect_equal(dplyr::group_vars(df), dplyr::group_vars(rs))
})
test_that("drops grouping when needed", {
df <- tibble(g = 1, x = "a") %>% dplyr::group_by(g)
rs <- df %>% unite(gx, g, x)
expect_equal(rs$gx, "1_a")
expect_equal(dplyr::group_vars(rs), character())
})
test_that("empty var spec uses all vars", {
df <- tibble(x = "a", y = "b")
expect_equal(unite(df, "z"), tibble(z = "a_b"))
})
test_that("can remove missing vars on request", {
df <- expand_grid(x = c("a", NA), y = c("b", NA))
out <- unite(df, "z", x:y, na.rm = TRUE)
expect_equal(out$z, c("a_b", "a", "b", ""))
})
test_that("regardless of the type of the NA", {
vec_unite <- function(df, vars) {
unite(df, "out", any_of(vars), na.rm = TRUE)$out
}
df <- tibble(
x = c("x", "y", "z"),
lgl = NA,
dbl = NA_real_,
chr = NA_character_
)
expect_equal(vec_unite(df, c("x", "lgl")), c("x", "y", "z"))
expect_equal(vec_unite(df, c("x", "dbl")), c("x", "y", "z"))
expect_equal(vec_unite(df, c("x", "chr")), c("x", "y", "z"))
})
|
require(rbacon)
Bacon("MSB2K", ask=FALSE, coredir=tempdir(), suggest=FALSE)
agedepth()
Bacon.hist(20)
a.d20 <- Bacon.Age.d(20)
summary(a.d20)
hist(a.d20)
a.d30 <- Bacon.Age.d(30)
a.d20 <- Bacon.Age.d(20)
summary(a.d30-a.d20)
hist(a.d30-a.d20)
acc.d20 <- accrate.depth(20)
summary(acc.d20)
acc.a4500 <- accrate.age(4500)
summary(acc.a4500)
|
pull. <- function(.df, var = -1, name = NULL) {
UseMethod("pull.")
}
pull..data.frame <- function(.df, var = -1, name = NULL) {
vec <- .pull(.df, {{ var }})
name <- enquo(name)
if (!quo_is_null(name)) {
names(vec) <- .pull(.df, !!name)
}
vec
}
.pull <- function(.df, var) {
var_list <- as.list(seq_along(.df))
names(var_list) <- names(.df)
.var <- eval_tidy(enquo(var), var_list)
if (.var < 0) .var <- length(var_list) + .var + 1
.df[[.var]]
}
|
run <- function(script, ..., job = NULL, name = NULL, project = NULL) {
renv_scope_error_handler()
renv_dots_check(...)
script <- renv_path_normalize(script, winslash = "/", mustWork = TRUE)
project <- project %||% renv_file_find(script, function(path) {
paths <- file.path(path, c("renv", "renv.lock"))
if (any(file.exists(paths)))
return(path)
})
if (is.null(project)) {
fmt <- "could not determine project root for script '%s'"
stopf(fmt, aliased_path(script))
}
activate <- renv_paths_activate(project = project)
if (!file.exists(activate)) {
fmt <- "project '%s' does not have an renv activate script"
stopf(fmt, aliased_path(project))
}
jobbable <-
!identical(job, FALSE) &&
renv_rstudio_available() &&
renv_package_installed("rstudioapi") &&
renv_package_version("rstudioapi") >= "0.10" &&
rstudioapi::verifyAvailable("1.2.1335")
if (identical(job, TRUE) && identical(jobbable, FALSE))
stopf("cannot run script as job: required versions of RStudio + rstudioapi not available")
if (jobbable)
renv_run_job(script = script, name = name, project = project)
else
renv_run_impl(script = script, name = name, project = project)
}
renv_run_job <- function(script, name, project) {
activate <- renv_paths_activate(project = project)
jobscript <- tempfile("renv-job-", fileext = ".R")
exprs <- substitute(local({
on.exit(unlink(jobscript), add = TRUE)
source(activate)
source(script)
}), list(activate = activate, script = script, jobscript = jobscript))
code <- deparse(exprs)
writeLines(code, con = jobscript)
rstudioapi::jobRunScript(
path = jobscript,
workingDir = project,
name = name
)
}
renv_run_impl <- function(script, name, project) {
owd <- setwd(project)
on.exit(setwd(owd), add = TRUE)
system2(R(), c("-s", "-f", shQuote(script)))
}
|
print.fsn <- function(x, digits=x$digits, ...) {
mstyle <- .get.mstyle("crayon" %in% .packages())
.chkclass(class(x), must="fsn")
digits <- .get.digits(digits=digits, xdigits=x$digits, dmiss=FALSE)
.space()
cat(mstyle$section(paste("Fail-safe N Calculation Using the", x$type, "Approach")))
cat("\n\n")
if (x$type == "Rosenthal") {
cat(mstyle$text("Observed Significance Level: "))
cat(mstyle$result(.pval(x$pval, digits[["pval"]])))
cat("\n")
cat(mstyle$text("Target Significance Level: "))
cat(mstyle$result(x$alpha))
}
if (x$type == "Orwin") {
cat(mstyle$text("Average Effect Size: "))
cat(mstyle$result(.fcf(x$meanes, digits[["est"]])))
cat("\n")
cat(mstyle$text("Target Effect Size: "))
cat(mstyle$result(.fcf(x$target, digits[["est"]])))
}
if (x$type == "Rosenberg") {
cat(mstyle$text("Average Effect Size: "))
cat(mstyle$result(.fcf(x$meanes, digits[["est"]])))
cat("\n")
cat(mstyle$text("Observed Significance Level: "))
cat(mstyle$result(.pval(x$pval, digits[["pval"]])))
cat("\n")
cat(mstyle$text("Target Significance Level: "))
cat(mstyle$result(x$alpha))
}
if (x$type == "REM") {
cat(mstyle$text("Average Effect Size: "))
cat(mstyle$result(.fcf(x$meanes, digits[["est"]])))
cat("\n")
cat(mstyle$text("Target Effect Size: "))
cat(mstyle$result(.fcf(x$target, digits[["est"]])))
}
cat("\n\n")
cat(mstyle$text("Fail-safe N: "))
cat(mstyle$result(x$fsnum))
cat("\n")
.space()
invisible()
}
|
PiS <- function(M){
tmp = svd(M);
tmp$u%*%diag(PiW(tmp$d))%*%t(tmp$v)
}
|
.add_labels_to_groupvariable <- function(mydf, original_model_frame, terms) {
grp.lbl <- sjlabelled::get_labels(
original_model_frame[[terms[2]]],
non.labelled = TRUE,
values = "n",
drop.unused = TRUE
)
if (is.factor(mydf$group) && !.is_numeric_factor(mydf$group))
grp.lbl <- NULL
if (is.factor(mydf$group) && .n_distinct(mydf$group) < nlevels(mydf$group))
mydf$group <- droplevels(mydf$group)
if (!is.null(grp.lbl) && !is.null(names(grp.lbl))) {
values <- as.numeric(as.vector(unique(stats::na.omit(mydf$group))))
if (min(values) < 1) values <- round(.recode_to(values, lowest = 1))
grp.lbl <- grp.lbl[values]
mydf$group <- sjlabelled::set_labels(mydf$group, labels = grp.lbl)
if (!all(mydf$group %in% sjlabelled::get_values(mydf$group)))
attr(mydf$group, "labels") <- NULL
}
if (.obj_has_name(mydf, "facet")) {
facet.lbl <- sjlabelled::get_labels(
original_model_frame[[terms[3]]],
non.labelled = TRUE,
values = "n",
drop.unused = TRUE
)
if (is.factor(mydf$facet) && !.is_numeric_factor(mydf$facet))
facet.lbl <- NULL
if (is.factor(mydf$facet) && .n_distinct(mydf$facet) < nlevels(mydf$facet))
mydf$facet <- droplevels(mydf$facet)
if (!is.null(facet.lbl) && !is.null(names(facet.lbl))) {
values <- as.numeric(as.vector(unique(stats::na.omit(mydf$facet))))
if (min(values) < 1) values <- .recode_to(values, lowest = 1)
facet.lbl <- facet.lbl[values]
mydf$facet <- sjlabelled::set_labels(mydf$facet, labels = facet.lbl)
if (!all(mydf$facet %in% sjlabelled::get_values(mydf$facet)))
attr(mydf$facet, "labels") <- NULL
}
}
mydf
}
.groupvariable_to_labelled_factor <- function(mydf) {
mydf$group <-
sjlabelled::as_label(
mydf$group,
prefix = FALSE,
drop.na = TRUE,
drop.levels = !is.numeric(mydf$group)
)
if (.obj_has_name(mydf, "facet")) {
mydf$facet <-
sjlabelled::as_label(
mydf$facet,
prefix = TRUE,
drop.na = TRUE,
drop.levels = !is.numeric(mydf$facet)
)
}
mydf
}
.get_axis_titles_and_labels <- function(model, original_model_frame, terms, fun, model_info, no.transform, type) {
resp.col <- insight::find_response(model)
ysc <- .get_title_labels(fun, model_info, no.transform, type)
t.title <-
paste(sprintf("Predicted %s of", ysc),
sjlabelled::get_label(original_model_frame[[1]], def.value = resp.col))
x.title <- sjlabelled::get_label(original_model_frame[[terms[1]]], def.value = terms[1])
y.title <- sjlabelled::get_label(original_model_frame[[1]], def.value = resp.col)
if (fun == "coxph") {
if (!is.null(type) && type == "surv") {
t.title <- y.title <- "Probability of Survival"
} else if (!is.null(type) && type == "cumhaz") {
t.title <- y.title <- "Cumulative Hazard"
} else {
t.title <- "Predicted risk scores"
y.title <- "Risk Score"
}
}
l.title <- sjlabelled::get_label(original_model_frame[[terms[2]]], def.value = terms[2])
axis.labels <- sjlabelled::get_labels(
original_model_frame[[terms[1]]],
non.labelled = TRUE,
drop.unused = TRUE
)
list(
t.title = t.title,
x.title = x.title,
y.title = y.title,
l.title = l.title,
axis.labels = axis.labels
)
}
.get_title_labels <- function(fun, model_info, no.transform, type) {
ysc <- "values"
if (!is.null(type) && type == "zi.prob") {
ysc <- "zero-inflation probabilities"
} else if (fun == "glm") {
if (model_info$is_brms_trial)
ysc <- "successes"
else if (model_info$is_binomial || model_info$is_ordinal || model_info$is_multinomial)
ysc <- ifelse(isTRUE(no.transform), "log-odds", "probabilities")
else if (model_info$is_count)
ysc <- ifelse(isTRUE(no.transform), "log-mean", "counts")
} else if (model_info$is_beta) {
ysc <- "proportion"
} else if (fun == "coxph") {
if (!is.null(type) && type == "surv")
ysc <- "survival probabilities"
else if (!is.null(type) && type == "cumhaz")
ysc <- "cumulative hazard"
else
ysc <- "risk scores"
}
ysc
}
.recode_to <- function(x, lowest, highest = -1) {
if (is.factor(x)) {
x <- as.numeric(as.character(x))
}
minval <- min(x, na.rm = TRUE)
downsize <- minval - lowest
x <- sapply(x, function(y) y - downsize)
if (highest > lowest) x[x > highest] <- NA
x
}
|
serve_site = function(..., .site_dir = NULL) {
serve = switch(
generator(), hugo = serve_it(),
jekyll = serve_it(
baseurl = get_config2('baseurl', ''),
pdir = get_config2('destination', '_site')
),
hexo = serve_it(
baseurl = get_config2('root', ''),
pdir = get_config2('public_dir', 'public')
),
stop("Cannot recognize the site (only Hugo, Jekyll, and Hexo are supported)")
)
serve(..., .site_dir = .site_dir)
}
server_ready = function(url) {
url = sub('^http://localhost:', 'http://127.0.0.1:', url)
!inherits(
xfun::try_silent(suppressWarnings(readLines(url))), 'try-error'
)
}
preview_site = function(..., startup = FALSE) {
if (startup) {
opts$set(preview = TRUE)
on.exit(opts$set(preview = NULL), add = TRUE)
init_files = get_option('blogdown.initial_files')
if (is.function(init_files)) init_files = init_files()
for (f in init_files) if (file_exists(f)) open_file(f)
} else {
opts$set(knitting = TRUE)
on.exit(refresh_viewer(), add = TRUE)
}
invisible(serve_site(...))
}
preview_mode = function() {
isTRUE(opts$get('preview')) || isTRUE(opts$get('knitting'))
}
serve_it = function(pdir = publish_dir(), baseurl = site_base_dir()) {
g = generator(); config = config_files(g)
function(..., .site_dir = NULL) {
root = site_root(config, .site_dir)
if (root %in% opts$get('served_dirs')) {
if (preview_mode()) return()
servr::browse_last()
return(message(
'The site has been served under the directory "', root, '". I have tried ',
'to reopen it for you with servr::browse_last(). If you do want to ',
'start a new server, you may stop existing servers with ',
'blogdown::stop_server(), or restart R. Normally you should not need to ',
'serve the same site multiple times in the same R session',
if (is_rstudio()) c(
', otherwise you may run into issues like ',
'https://github.com/rstudio/blogdown/issues/404'
), '.'
))
}
owd = setwd(root); on.exit(setwd(owd), add = TRUE)
server = servr::server_config(..., baseurl = baseurl, hosturl = function(host) {
if (g == 'hugo' && host == '127.0.0.1') 'localhost' else host
})
cmd = if (g == 'hugo') find_hugo() else g
host = server$host; port = server$port; intv = server$interval
if (!servr:::port_available(port, host)) stop(
'The port ', port, ' at ', host, ' is unavailable', call. = FALSE
)
args_fun = match.fun(paste0(g, '_server_args'))
cmd_args = args_fun(host, port)
if (g == 'hugo') {
tweak_hugo_env(server = TRUE, relativeURLs = if (is_rstudio_server()) TRUE)
if (length(list_rmds(pattern = bundle_regex('.R(md|markdown)$'))))
create_shortcode('postref.html', 'blogdown/postref')
}
if (is.function(serve_first <- getOption('blogdown.server.first'))) serve_first()
if (!server$daemon) return(system2(cmd, cmd_args))
pid = if (server_processx()) {
proc = processx::process$new(cmd, cmd_args, stderr = '|', cleanup_tree = TRUE)
I(proc$get_pid())
} else {
xfun::bg_process(cmd, cmd_args)
}
opts$append(pids = list(pid))
message(
'Launching the server via the command:\n ',
paste(c(cmd, cmd_args), collapse = ' ')
)
i = 0
repeat {
Sys.sleep(1)
if (inherits(pid, 'AsIs') && !proc$is_alive()) {
err = paste(gsub('^Error: ', '', proc$read_error()), collapse = '\n')
stop(if (err == '') {
'Failed to serve the site; see if blogdown::build_site() gives more info.'
} else err, call. = FALSE)
}
if (server_ready(server$url)) break
if (i >= get_option('blogdown.server.timeout', 30)) {
s = proc_kill(pid)
stop(if (s == 0) c(
'Failed to launch the site preview in ', i, ' seconds. Try to give ',
'it more time via the global option "blogdown.server.timeout", e.g., ',
'options(blogdown.server.timeout = 600).'
) else c(
'It took more than ', i, ' seconds to launch the server. An error might ',
'have occurred with ', g, '. You may run blogdown::build_site() and see ',
'if it gives more info.'
), call. = FALSE)
}
i = i + 1
}
server$browse()
opts$append(served_dirs = root)
Sys.setenv(BLOGDOWN_SERVING_DIR = root)
message(
'Launched the ', g, ' server in the background (process ID: ', pid, '). ',
'To stop it, call blogdown::stop_server() or restart the R session.'
)
if (g == 'hugo') del_empty_dir('resources')
if (!get_option('blogdown.knit.on_save', TRUE)) return(invisible())
rebuild = function(files) {
if (is.null(b <- get_option('blogdown.knit.on_save'))) {
b = !isTRUE(opts$get('knitting'))
if (!b) {
options(blogdown.knit.on_save = b)
message(
'It seems you have clicked the Knit button in RStudio. If you prefer ',
'knitting a document manually over letting blogdown automatically ',
'knit it on save, you may set options(blogdown.knit.on_save = FALSE) ',
'in your .Rprofile so blogdown will not knit documents automatically ',
'again (I have just set this option for you for this R session). If ',
'you prefer knitting on save, set this option to TRUE instead.'
)
files = b
}
}
xfun::in_dir(root, build_site(TRUE, run_hugo = FALSE, build_rmd = files))
}
rebuild(rmd_files <- filter_newfile(list_rmds()))
watch = servr:::watch_dir('.', rmd_pattern, handler = function(files) {
files = list_rmds(files = files)
i = if (g == 'hugo') !xfun::is_sub_path(files, rel_path(publish_dir())) else TRUE
rmd_files <<- files[i]
})
watch_build = function() {
if (is.null(opts$get('served_dirs'))) return(invisible())
if (watch()) try({rebuild(rmd_files); refresh_viewer()})
if (get_option('blogdown.knit.on_save', TRUE)) later::later(watch_build, intv)
}
watch_build()
return(invisible())
}
}
server_processx = function() {
v = get_option('blogdown.server.verbose', FALSE)
if (v) {
options(xfun.bg_process.verbose = TRUE)
return(FALSE)
}
getOption('blogdown.use.processx', xfun::loadable('processx'))
}
jekyll_server_args = function(host, port) {
c('serve', '--port', port, '--host', host, get_option(
'blogdown.jekyll.server', c('--watch', '--incremental', '--livereload')
))
}
hexo_server_args = function(host, port) {
c('server', '-p', port, '-i', host, get_option('blogdown.hexo.server'))
}
stop_server = function() {
ids = NULL
quitting = isTRUE(opts$get('quitting'))
for (i in opts$get('pids')) {
if (quitting && inherits(i, 'AsIs')) next
if (proc_kill(i, stdout = FALSE, stderr = FALSE) != 0) ids = c(ids, i)
}
if (length(ids)) warning(
'Failed to kill the process(es): ', paste(i, collapse = ' '),
'. You may need to kill them manually.'
) else if (!quitting) message('The web server has been stopped.')
set_envvar(c('BLOGDOWN_SERVING_DIR' = NA))
opts$set(pids = NULL, served_dirs = NULL)
}
get_config2 = function(key, default) {
res = yaml_load_file('_config.yml')
res[[key]] %n% default
}
refresh_viewer = function() {
if (!is_rstudio_server()) return()
server_wait()
rstudioapi::executeCommand('viewerRefresh')
}
server_wait = function() {
Sys.sleep(get_option('blogdown.server.wait', 2))
}
|
lstats <- function(object,...)
UseMethod("lstats")
lstats.liu <- function(object,...) {
y <- object$y
resid <- resid(object)
n <- nrow(resid)
d <- object$d
x <- object$xs
coef<-object$coef
p<-ncol(x)
Eval <- eigen(t(x) %*% x)$values
Evec <- eigen(t(x) %*% x)$vector
SSER <- apply(resid, 2, function(x) {
sum(x ^ 2)
})
SSRR <- apply(object$lfit, 2, function(x) {
sum(x ^ 2)
})
SSTR <- t(y) %*% y
ledf <- lapply(hatl(object), function(x) {
n - sum(diag(2 * x - x %*% t(x)))
})
ledf <- do.call(rbind,ledf)
rownames(ledf) <- paste("d=", d, sep = "")
colnames(ledf) <- c("EDF")
lsigma2 <- mapply(function(x,y) {
x / y }, SSER, ledf, SIMPLIFY = FALSE)
lsigma2 <- do.call(rbind, lsigma2)
rownames(lsigma2) <- paste("d=", d, sep = "")
colnames(lsigma2) <- c("Sigma2")
diaghat <- lapply(hatl(object), function(x) {
diag(x)
})
diaghat <- do.call(cbind, diaghat)
Cl <- lapply(1:length(d), function(i, SSRl, lsigma2, hatL) {
SSRl[i] / lsigma2[i] - n + 2 + 2 * sum(diaghat[,i])
}, SSRl = SSER, hatL = hatl(object), lsigma2 = lsigma2)
Cl <- do.call(rbind, Cl)
rownames(Cl) <- paste("d=", d, sep = " ")
colnames(Cl) <- c("CL")
bols <- lm.fit(x,y)$coef
abeta<-bols%*%Evec
var <- lapply(vcov(object), function(x) {
sum(diag(x))
})
var <- do.call(rbind, var)
rownames(var) <- paste("d=", d, sep = "")
colnames(var) <- c("VAR")
bias2 <-
lapply(d, function(d) {
(d - 1) ^ 2 * sum( (abeta^2) / (Eval + 1) ^ 2)
})
bias2 <- do.call(rbind, bias2)
rownames(bias2) <- paste("d=", d, sep = "")
colnames(bias2) <- c("Bias^2")
msel <- mapply(function(x,y) {
x + y
}, var, bias2, SIMPLIFY = FALSE)
msel <- do.call(rbind, msel)
rownames(msel) <- paste("d=", d, sep = "")
colnames(msel) <- c("MSE")
Fv<- lapply(1:length(d), function(i, b, v){1/p*t(b[,i])%*%solve(v[[i]])%*%b[,i]},
b=coef, v=vcov(object))
Fv<-do.call(rbind, Fv)
rownames(Fv) <-paste("d=", d , sep="")
colnames(Fv) <-c("F")
R2l<-lapply(SSER, function(x){1-x/SSTR})
R2l<-do.call(rbind,R2l)
rownames(R2l) <-paste("d=", d, sep="")
colnames(R2l)<-c("R2")
adjR2l<-1-(n-1)/(n-p-1)*(1-R2l)
rownames(adjR2l) <-paste("d=", d, sep="")
colnames(adjR2l) <-c("adj-R2")
minmse<-d[which.min(msel)]
lstat <-list(
lEDF = ledf,
lsigma2 = lsigma2,
Cl = Cl,
var = var,
bias2 = bias2,
mse = msel,
Fv=Fv,
R2=R2l,
adjR2=adjR2l,
minmse=minmse,
SSER=SSER
)
class(lstat) <- "lstats"
lstat
}
print.lstats <- function(x, ...) {
cat("\nLiu Regression Statistics:\n\n")
res <-cbind(
DEDF = x$lEDF,
lsigma2 = x$lsigma2,
Cl = x$Cl,
var = x$var,
bias2 = x$bias2,
mse = x$mse,
Fv=x$Fv,
R2=x$R2,
adjR2=x$adjR2
)
print(round(res,4), ...)
cat("\nminimum MSE occurred at d =", x$minmse, "\n")
}
|
emis_wear <- function (veh,
lkm,
ef,
what = "tyre",
speed,
agemax = ncol(veh),
profile,
hour = nrow(profile),
day = ncol(profile)) {
if(units(lkm)$numerator == "m" ){
stop("Units of lkm is 'm'")
}
veh <- as.data.frame(veh)
lkm <- as.numeric(lkm)
for (i in 1:ncol(veh) ) {
veh[,i] <- as.numeric(veh[,i])
}
for (i in 1:ncol(speed) ) {
speed[,i] <- as.numeric(speed[, i])
}
if(is.vector(profile)){
profile <- matrix(as.numeric(profile), ncol = 1)
}
if(ncol(ef)/24 != day){
stop("Number of days of ef and profile must be the same")
}
lef <- lapply(1:day, function(i){
as.list(ef[, (24*(i-1) + 1):(24*i)])
})
if (what == "tyre"){
d <- simplify2array(
lapply(1:day,function(j){
simplify2array(
lapply(1:hour,function(i){
simplify2array(
lapply(1:agemax, function(k){
ifelse(
speed[,i] < 40,
veh[, k]*profile[i,j]*lkm*lef[[j]][[i]]*1.67,
ifelse(
speed[,i] >= 40 & speed[,i] <= 95,
veh[, k]*profile[i,j]*lkm*lef[[j]][[i]]*(-0.0270*speed[, i] + 2.75),
veh[, k]*profile[i,j]*lkm*lef[[j]][[i]]*0.185
)) })) })) }))
} else if(what == "break"){
d <- simplify2array(
lapply(1:day,function(j){
simplify2array(
lapply(1:hour,function(i){
simplify2array(
lapply(1:agemax, function(k){
ifelse(
speed[,i] < 40,
veh[, k]*profile[i,j]*lkm*lef[[j]][[i]]*1.39,
ifelse(
speed[,i] >= 40 & speed[,i] < 80,
veh[, k]*profile[i,j]*lkm*lef[[j]][[i]]*(-0.00974*speed[, i] + 1.78),
ifelse(
speed[,i] == 80,
veh[, k]*profile[i,j]*lkm*lef[[j]][[i]],
ifelse(
speed[,i] > 80 & speed[,i] <= 90,
veh[, k]*profile[i,j]*lkm*lef[[j]][[i]]*(-0.00974*speed[, i] + 1.78),
veh[, k]*profile[i,j]*lkm*lef[[j]][[i]]*0.902
)))) })) })) }))
} else if (what == "road"){
d <- simplify2array(
lapply(1:day,function(j){
simplify2array(
lapply(1:hour,function(i){
simplify2array(
lapply(1:agemax, function(k){
veh[, k]*profile[i,j]*lkm*lef[[j]][[i]]
})) })) }))
}
return(EmissionsArray(d))
}
|
AutoH2oGBMSizeFreqDist <- function(CountData = NULL,
SizeData = NULL,
CountQuantiles = seq(0.10,0.90,0.10),
SizeQuantiles = seq(0.10,0.90,0.10),
AutoTransform = TRUE,
DataPartitionRatios = c(0.75,0.20,0.05),
StratifyColumnName = NULL,
StratifyTargets = FALSE,
NTrees = 1500,
MaxMem = {gc();paste0(as.character(floor(as.numeric(system("awk '/MemFree/ {print $2}' /proc/meminfo", intern=TRUE)) / 1000000)),"G")},
NThreads = max(1, parallel::detectCores()-2),
EvalMetric = "Quantile",
GridTune = FALSE,
CountTargetColumnName = NULL,
SizeTargetColumnName = NULL,
CountFeatureColNames = NULL,
SizeFeatureColNames = NULL,
ModelIDs = c("CountModel","SizeModel"),
MaxModelsGrid = 5,
ModelPath = NULL,
MetaDataPath = NULL,
NumOfParDepPlots = 0) {
if(parallel::detectCores() > 10) data.table::setDTthreads(threads = max(1L, parallel::detectCores() - 2L)) else data.table::setDTthreads(threads = max(1L, parallel::detectCores()))
if(is.null(ModelPath)) return("Need to supply a path in ModelPath for saving models")
if(AutoTransform) TransFormCols <- CountTargetColumnName else TransFormCols <- NULL
gc()
if(StratifyTargets) {
StratTargetColumns <- "Counts"
StratTargetPrecision <- 0.001
} else {
StratTargetColumns <- NULL
StratTargetPrecision <- NULL
}
CountDataSets <- AutoDataPartition(
data = CountData,
NumDataSets = 3,
Ratios = DataPartitionRatios,
PartitionType = "random",
StratifyColumnNames = StratifyColumnName,
TimeColumnName = NULL)
CountDataTrain <- CountDataSets$TrainData
CountDataValidate <- CountDataSets$ValidationData
CountDataTest <- CountDataSets$TestData
for(quan in CountQuantiles) {
CountDataTrainCopy <- data.table::copy(CountDataTrain)
CountDataValidateCopy <- data.table::copy(CountDataValidate)
CountDataTestCopy <- data.table::copy(CountDataTest)
TestModel <- AutoH2oGBMRegression(
data = CountDataTrainCopy,
ValidationData = CountDataValidateCopy,
TestData = CountDataTestCopy,
TargetColumnName = CountTargetColumnName,
FeatureColNames = CountFeatureColNames,
TransformNumericColumns = TransFormCols,
Alpha = quan,
Distribution = "quantile",
eval_metric = EvalMetric,
Trees = NTrees,
GridTune = GridTune,
MaxMem = MaxMem,
NThreads = NThreads,
MaxModelsInGrid = MaxModelsGrid,
model_path = ModelPath,
metadata_path = MetaDataPath,
ModelID = paste0(ModelIDs[1],"_",quan),
NumOfParDepPlots = NumOfParDepPlots,
ReturnModelObjects = FALSE,
SaveModelObjects = TRUE,
IfSaveModel = "standard",
H2OShutdown = TRUE,
Methods = c("BoxCox", "Asinh", "Asin", "Log", "LogPlus1", "Logit", "YeoJohnson"))
Sys.sleep(10)
}
rm(CountDataSets,CountData,CountDataTrain,CountDataValidate,CountDataTest)
if(AutoTransform) {
TransFormCols <- SizeTargetColumnName
} else {
TransFormCols <- NULL
}
if(StratifyTargets) {
StratTargetColumns <- "Size"
StratTargetPrecision <- 0.001
} else {
StratTargetColumns <- NULL
StratTargetPrecision <- NULL
}
SizeDataSets <- AutoDataPartition(
data = SizeData,
NumDataSets = 3,
Ratios = DataPartitionRatios,
PartitionType = "random",
StratifyColumnNames = NULL,
TimeColumnName = NULL)
SizeDataTrain <- SizeDataSets$TrainData
SizeDataValidate <- SizeDataSets$ValidationData
SizeDataTest <- SizeDataSets$TestData
gc()
for(quan in SizeQuantiles) {
SizeDataTrainCopy <- data.table::copy(SizeDataTrain)
SizeDataValidateCopy <- data.table::copy(SizeDataValidate)
SizeDataTestCopy <- data.table::copy(SizeDataTest)
TestModel <- AutoH2oGBMRegression(
data = SizeDataTrainCopy,
ValidationData = SizeDataValidateCopy,
TestData = SizeDataTestCopy,
TargetColumnName = SizeTargetColumnName,
FeatureColNames = SizeFeatureColNames,
TransformNumericColumns = TransFormCols,
Alpha = quan,
Distribution = "quantile",
eval_metric = EvalMetric,
Trees = NTrees,
GridTune = GridTune,
MaxMem = MaxMem,
NThreads = NThreads,
MaxModelsInGrid = MaxModelsGrid,
model_path = ModelPath,
metadata_path = MetaDataPath,
ModelID = paste0(ModelIDs[2],"_",quan),
NumOfParDepPlots = NumOfParDepPlots,
ReturnModelObjects = FALSE,
SaveModelObjects = TRUE,
IfSaveModel = "standard",
H2OShutdown = TRUE,
Methods = c("BoxCox", "Asinh", "Asin", "Log", "LogPlus1", "Logit", "YeoJohnson"))
Sys.sleep(10)
}
}
|
context("Checking rm_endmark")
test_that("rm_endmark is removing/replacing emoticon strings",{
x <- c("I like the dog.", "I want it *|", "I;",
"Who is| that?", "Hello world", "You...")
x2 <- c("I like the dog", "I want it", "I;", "Who is| that", "Hello world",
"You")
expect_equivalent(rm_endmark(x), x2)
})
test_that("rm_endmark is extracting emoticon strings",{
x <- c("I like the dog.", "I want it *|", "I;",
"Who is| that?", "Hello world", "You...")
x3 <- list(".", "*|", NA_character_, "?", NA_character_, "...")
expect_equivalent(rm_endmark(x, extract=TRUE), x3)
})
|
tot_plot <- function(dataframe, text.var, grouping.var = NULL, facet.vars = NULL,
tot = TRUE, transform = FALSE, ncol = NULL, ylab=NULL, xlab=NULL, bar.space=0,
scale = NULL, space = NULL, plot = TRUE) {
word.count <- group <- caps <- NULL
DF <- dataframe
if (is.logical(tot)) {
if (isTRUE(tot)) {
if (!"tot" %in% colnames(dataframe)) {
stop("supply valid tot argument")
}
tot <- TOT(dataframe[["tot"]])
} else {
if (!is.null(facet.vars)) {
DF[, "qdapIDqdap"] <- seq_len(nrow(DF))
rmout <- lapply(split(DF, DF[[facet.vars]]), function(x) {
x <- x[order(x[, "qdapIDqdap"]), ]
x[, "tot"] <- seq_len(nrow(x))
x
})
rmout <- do.call(rbind, rmout)
rmout <- rmout[order(rmout[, "qdapIDqdap"]), ]
tot <- rmout[, "tot"]
DF[, "qdapIDqdap"] <- NULL
} else {
tot <- seq_len(nrow(DF))
}
}
} else {
if (is.character(tot)) {
lentot <- length(tot)
if (lentot != 1 && lentot != nrow(DF)) {
stop("tot not = to nrow of dataframe")
}
if (lentot == 1) {
tot <- dataframe[, tot]
}
a <- rle(as.character(tot))
tot <- rep(seq_along(a$lengths), a$lengths)
}
}
dataframe <- data.frame(tot = tot, text.var = dataframe[, text.var])
if (!is.null(grouping.var)) {
G <- paste(grouping.var, collapse="&")
if (ncol(DF[, grouping.var, drop=FALSE]) > 1) {
dataframe[, "group"] <- paste2(DF[, grouping.var])
} else {
dataframe[, "group"] <- DF[, grouping.var]
}
}
if (!is.null(facet.vars)) {
G2 <- paste(facet.vars, collapse="&")
if (ncol(DF[, facet.vars, drop=FALSE]) > 1) {
dataframe[, "new2"] <- DF[, facet.vars[1]]
dataframe[, "new3"] <- DF[, facet.vars[2]]
} else {
dataframe[, "new2"] <- DF[, facet.vars[1]]
}
}
dataframe[, "word.count"] <- wc(dataframe[, "text.var"])
if (is.null(xlab)) {
Xlab <- "Turn of Talk"
}
if (is.null(ylab)) {
Ylab <- "Word Count"
}
dataframe <- stats::na.omit(dataframe)
dataframe <- droplevels(dataframe)
dataframe[, "bar.space"] <- rep(bar.space, nrow(dataframe))
dataframe[, "tot"] <- factor(dataframe[, "tot"],
levels= sort(unique(dataframe[, "tot"])))
theplot <- ggplot(dataframe, aes(x = tot))
if (!is.null(grouping.var)) {
theplot <- theplot + geom_bar(aes(weight = word.count, fill = group),
width= 1-bar.space, data=dataframe) +
labs(fill = Caps(gsub("&", " & ", G, fixed=TRUE), all=TRUE))
} else {
theplot <- theplot +
geom_bar(aes(weight = word.count), width= 1-bar.space, data=dataframe)
}
theplot <- theplot + ylab(Ylab) + xlab(Xlab) +
scale_y_continuous(expand = c(0,0)) +
theme(axis.text.x=element_blank(), axis.ticks.x=element_blank())
if (!is.null(facet.vars)) {
if(!is.null(ncol)){
theplot <- theplot + facet_wrap(~new2, scales = scale, ncol=ncol)
} else {
if (length(facet.vars) == 1) {
if (transform) {
theplot <- theplot + facet_grid(.~new2, scales = scale, space = space)
} else {
theplot <- theplot + facet_grid(new2~., scales = scale, space = space)
}
} else {
theplot <- theplot + facet_grid(new2~new3, scales = scale, space = space)
}
}
}
if (plot) {
print(theplot)
}
invisible(theplot)
}
|
num_default <- function(dt, ...){
dots <- list(...)
rowlabel <- dots$rowlabel
missing <- dots$missing
digits <- dots$digits
rnd <- paste0("%.", digits, "f")
nocols <- FALSE
if (is.null(ncol(dt))){
nocols <- TRUE
dt <- data.frame(x = dt) %>%
mutate(y= 1:n() %% 2)
}
if (missing == TRUE){
miss <- dt %>% filter(is.na(dt[,1]))
miss <- miss[,2] %>% table() %>% as.data.frame() %>% t()
miss <- if (dim(miss)[1] >= 2) as.numeric(miss[2,]) else 0
}
dt <- dt[complete.cases(dt),]
out <- aggregate(dt[,1],list(dt[,2]),mean)
out[,2] <- sprintf(rnd, out[,2])
out <- out %>% t() %>% as.data.frame()
SD <- aggregate(dt[,1],list(dt[,2]),sd)
SD[,2] <- sprintf(rnd, SD[,2])
SD <- SD %>% t() %>% as.data.frame()
med <- aggregate(dt[,1],list(dt[,2]),median)
med[,2] <- sprintf(rnd, med[,2])
med <- med %>% t() %>% as.data.frame()
Q1 <- aggregate(dt[,1],list(dt[,2]),quantile, probs=.25)
Q1[,2] <- sprintf(rnd, Q1[,2])
Q1 <- Q1 %>% t() %>% as.data.frame()
Q3 <- aggregate(dt[,1],list(dt[,2]),quantile, probs=.75)
Q3[,2] <- sprintf(rnd, Q3[,2])
Q3 <- Q3 %>% t() %>% as.data.frame()
MIN <- aggregate(dt[,1],list(dt[,2]),min)
MIN[,2] <- sprintf(rnd, MIN[,2])
MIN <- MIN %>% t() %>% as.data.frame()
MAX <- aggregate(dt[,1],list(dt[,2]),max)
MAX[,2] <- sprintf(rnd, MAX[,2])
MAX <- MAX %>% t() %>% as.data.frame()
out["min",] <- MIN[2,]
out["Q1",] <- Q1[2,]
out["median",] <- med[2,]
out["Q3",] <- Q3[2,]
out["max",] <- MAX[2,]
out["mean",] <- out[2,]
out["SD",] <- SD[2,]
colnames(out) <- out[1,]
out$Overall <- ""
out$Overall[3] <- sprintf(rnd, min(dt[,1]))
out$Overall[4] <- sprintf(rnd, as.numeric(quantile(dt[,1],.25)))
out$Overall[5] <- sprintf(rnd, median(dt[,1]))
out$Overall[6] <- sprintf(rnd, as.numeric(quantile(dt[,1],.75)))
out$Overall[7] <- sprintf(rnd, max(dt[,1]))
out$Overall[8] <- sprintf(rnd, mean(dt[,1]))
out$Overall[9] <- sprintf(rnd, sd(dt[,1]))
out <- out[(3:nrow(out)),]
out <- data.frame(Measure=rownames(out), out)
rownames(out) <- NULL
if (missing == TRUE){
out <- cbind(Variable="",out)
out[8,] <- ""
out$Variable[1] <- rowlabel
out$Measure[8] <- "Missing"
for (i in 1:length(miss)){
out[8,(2+i)] <- miss[i]
}
out$Overall[8] <- sum(miss)
} else {
out <- cbind(Variable="",out)
out$Variable[1] <- rowlabel
}
if (nocols == TRUE){
out <- out[,-c(3,4)]
}
out
}
|
cat("\n
cat("
LongData3d_validity <- function(object){
if(length(object@idAll)==0&length(object@time)==0&length(object@varNames)==0&length(object@traj)==0){
}else{
if(any(c(length(object@idAll)==0,length(object@time)==0,length(object@varNames)==0,length(object@traj)==0))){
stop("[LongData3d:validity]: at least one slot is empty")}else{}
if(length(object@idFewNA)!=dim(object@traj)[1]){
stop("[LongData3d:validity]: The number of id does not fit with the number of trajectories
[LongData3d:validity]: length(idFewNA) =",length(object@idFewNA)," ; dim(traj)[1] =",dim(object@traj)[1])}else{}
if(length(object@time)!=dim(object@traj)[2]){
stop("[LongData3d:validity]: The number of time does not fit with the length of trajectories
[LongData3d:validity]: length(time) =",length(object@time)," ; dim(traj)[2]=",dim(object@traj)[2])}else{}
if(length(object@varNames)!=dim(object@traj)[3]){
stop("[LongData3d:validity]: The number of variable does not fit with the width ot trajectories
[LongData3d:validity]: length(varNames) =",length(object@varNames)," ; dim(traj)[3]=",dim(object@traj)[3])}else{}
if(any(is.na(object@time))){
stop("[LongData3d:validity]: There is some unknow times
[LongData3d:validity]: is.na(time) =",is.na(object@time))}else{}
if(!identical(object@time,sort(object@time))){
stop("[LongData3d:validity]: time is not in increasing order
[LongData3d:validity]: time =",object@time)}else{}
if(any(duplicated(object@time))){
stop("[LongData3d:validity]: Some time are duplicate
[LongData3d:validity]: duplicated(time) =",duplicated(object@time))}else{}
if(any(is.na(object@idAll))){
stop("[LongData3d:validity]: Some idAll are NA
[LongData3d:validity]: is.na(idAll) =",is.na(object@idAll))}else{}
if(any(duplicated(object@idAll))){
stop("[LongData3d:validity]: Some idAll are duplicate
[LongData3d:validity]: duplicated(idAll) =",duplicated(object@idAll))}else{}
if(any(dimnames(object@traj)[[1]]!=object@idFewNA,
dimnames(object@traj)[[2]]!=paste("t",object@time,sep=""),
dimnames(object@traj)[[3]]!=object@varNames)){
stop("[LongData3d:validity]: dimnames of traj is not correct
[LongData3d:validity]: dimnames(traj) =",dimnames(object@traj),"
[LongData3d:validity]: idFewNA =",object@idFewNA,"
[LongData3d:validity]: paste('t',time) =",paste("t",object@time,sep=""),"
[LongData3d:validity]: varNames=",object@varNames)}else{}
if(max(object@maxNA)>=length(object@time)){
stop("[LongData3d:validity]: some maxNA are too high (trajectories with only NA are not trajectories)
[LongData3d:validity]: maxNA =",object@maxNA," ; length(time) =",length(object@time))}else{}
}
}
setClass(
Class="LongData3d",
representation=representation(
idAll="character",
idFewNA="character",
time="numeric",
varNames="character",
traj="array",
dimTraj="numeric",
maxNA="numeric",
reverse="matrix"
),
prototype=prototype(
idAll=character(),
idFewNA=character(),
time=numeric(),
varNames=character(),
traj=array(dim=c(0,0,0)),
dimTraj=numeric(),
maxNA=numeric(),
reverse=matrix(NA,2)
),
validity=LongData3d_validity
)
cat("\n
longData3d <- function(traj,idAll,time,timeInData,varNames,maxNA){
if(missing(traj)){
return(new("LongData3d"))
}else{}
if(is.data.frame(traj)){
if(missing(idAll)){
idAll <- traj[,1]
}else{}
matr <- as.matrix(traj[,sort(na.omit(unlist(timeInData)))])
lengthTime <- length(timeInData[[1]])
nbVar <- length(timeInData)
traj <- array(matr[,rank(unlist(timeInData),na.last="keep")],c(nrow(traj),lengthTime,nbVar))
}else{
if(is.array(traj)){
if(missing(idAll)){
idAll <- paste("i",1:nrow(traj),sep="")
}else{}
if(!missing(timeInData)){
traj <- traj[,timeInData,,drop=FALSE]
}else{}
lengthTime <- dim(traj)[2]
nbVar <- dim(traj)[3]
}else{
stop("[LongData3d:constructor]: 'traj' should be either a data.frame or an array")
}
}
if(missing(maxNA)){maxNA <- lengthTime-2}else{}
if(length(maxNA)==1){maxNA <- rep(maxNA,nbVar)}else{}
if(missing(varNames)){
if(!missing(timeInData)){
if(!is.null(names(timeInData))){
varNames <- names(timeInData)
}else{
varNames <- paste("V",1:nbVar,sep="")
}
}else{
varNames <- paste("V",1:nbVar,sep="")
}
}else{}
if(missing(time)){time <- 1:lengthTime}else{}
keepId <- apply(t(apply(traj,c(1,3),function(x){sum(is.na(x))}))<=maxNA,2,all)
traj <- traj[keepId,,,drop=FALSE]
idFewNA <- idAll[keepId]
dimnames(traj) <- list(idFewNA,paste("t",time,sep=""),varNames)
reverse <- matrix(c(0,1),2,length(varNames),dimnames=list(c("mean","sd"),varNames))
return(new("LongData3d",
idAll=as.character(idAll),
idFewNA=as.character(idFewNA),
time=time,
varNames=varNames,
traj=traj,
dimTraj=dim(traj),
maxNA=maxNA,
reverse=reverse)
)
}
cat("\n
cat("
LongData3d_get <- function(x,i,j,drop){
switch(EXPR=i,
"idAll"={return(x@idAll)},
"idFewNA"={return(x@idFewNA)},
"varNames"={return(x@varNames)},
"time"={return(x@time)},
"traj"={return(x@traj)},
"dimTraj"={return(x@dimTraj)},
"nbIdFewNA"={return(x@dimTraj[1])},
"nbTime"={return(x@dimTraj[2])},
"nbVar"={return(x@dimTraj[3])},
"maxNA"={return(x@maxNA)},
"reverse"={return(x@reverse)},
stop("[LongData3d:get]:",i," is not a 'LongData' slot")
)
}
setMethod("[","LongData3d",LongData3d_get)
cat("\n
cat("
LongData3d_show <- function(object){
cat("\n~ idAll = [",length(object@idAll),"] ",sep="");catShort(object@idAll)
cat("\n~ idFewNA = [",object['nbIdFewNA'],"] ",sep="");catShort(object@idFewNA)
cat("\n~ varNames = [",object['nbVar'],"] ",sep="");catShort(object@varNames)
cat("\n~ time = [",object['nbTime'],"] ",sep="");catShort(object@time)
cat("\n~ maxNA = [",object['nbVar'],"] ",sep="");catShort(object@maxNA)
cat("\n~ reverse = [2x",object['nbVar'],"]",sep="");
cat("\n - mean =",object['reverse'][1,])
cat("\n - SD =",object['reverse'][2,])
cat("\n\n~ traj = [",object['nbIdFewNA'],"x",object['nbTime'],"x",object['nbVar'],"] (limited to 5x10x3) :\n",sep="")
if(length(object@idFewNA)!=0){
for(iVar in 1:min(3,length(object@varNames))){
cat("\n",object@varNames[iVar],":\n")
if(ncol(object@traj)>10){
trajToShow <- as.data.frame(object@traj[,1:10,iVar])
trajToShow$more <- "..."
}else{
trajToShow <- as.data.frame(object@traj[,,iVar])
}
if(nrow(object@traj)>5){
print(trajToShow[1:5,])
cat("... ...\n")
}else{
print(trajToShow)
}
}
}else{cat(" <no trajectories>\n")}
return(invisible(object))
}
setMethod("show","LongData3d",
definition=function(object){
cat("\n ~~~ Class: LongData3d ~~~")
LongData3d_show(object)
}
)
cat("
LongData3d_print <- function(x){
object <- x
cat("\n ~~~ Class: LongData3d ~~~")
cat("\n~ Class :",class(object))
cat("\n\n~ traj = [",object['nbIdFewNA'],"x",object['nbTime'],"x",object['nbVar'],"] (limited to 5x10x3) :\n",sep="")
print(object['traj'])
cat("\n\n~ idAll = [",length(object@idAll),"]\n",sep="");print(object@idAll)
cat("\n~ idFewNA = [",object['nbIdFewNA'],"]\n",sep="");print(object@idFewNA)
cat("\n~ varNames = [",object['nbVar'],"]\n",sep="");print(object@varNames)
cat("\n~ time = [",object['nbTime'],"]\n",sep="");print(object@time)
cat("\n~ maxNA = [",object['nbVar'],"]\n",sep="");print(object@maxNA)
cat("\n~ reverse mean =\n");print(object['reverse'][1,])
cat("\n~ reverse SD =\n");print(object['reverse'][2,])
return(invisible(object))
}
setMethod("print","LongData3d",LongData3d_print)
setMethod("is.na", "LongData3d", function(x) FALSE)
cat("\n
LongData3d_scale <- function(x,center=TRUE,scale=TRUE){
nameObject<-deparse(substitute(x))
traj <- x@traj
if(identical(center,TRUE)){center <- apply(traj,3,meanNA)}else{}
if(identical(scale,TRUE)){scale <- apply(traj,3,function(x){sdNA(as.numeric(x))})}else{}
for (i in 1:x@dimTraj[3]){
traj[,,i] <- (traj[,,i]-center[i])/scale[i]
}
x@reverse[1,] <- x@reverse[1,] + center*x@reverse[2,]
x@reverse[2,] <- x@reverse[2,] * scale
x@traj <- traj
assign(nameObject,x,envir=parent.frame())
return(invisible())
}
setMethod(f="scale",
signature=c(x="LongData3d"),
definition=LongData3d_scale
)
LongData3d_restoreRealData <- function(object){
nameObject<-deparse(substitute(object))
traj <- object@traj
for (i in 1:object@dimTraj[3]){
traj[,,i] <- traj[,,i]*object@reverse[2,i] + object@reverse[1,i]
}
object@reverse[1,] <- 0
object@reverse[2,] <- 1
object@traj <- traj
assign(nameObject,object,envir=parent.frame())
return(invisible())
}
setMethod(f="restoreRealData",
signature=c(object="LongData3d"),
definition=LongData3d_restoreRealData
)
varNumAndName <- function(variable,allVarNames){
if(class(variable)=="character"){
varName <- variable
varNum <- c(1:length(allVarNames))[allVarNames %in% varName]
if(length(varNum)==0){stop("[LongData3d:varNumAndName]: 'variable' is not a correct variable name
[LongData3d:plod3d]: variable=",varName," is not in allVarNames=",allVarNames)}else{}
}else{
varNum <- variable
varName <- allVarNames[varNum]
}
return(list(num=varNum,name=varName))
}
longDataFrom3d <- function(xLongData3d,variable){
variable <- varNumAndName(variable,xLongData3d["varNames"])[[2]]
selectVar <- xLongData3d["varNames"] %in% variable
if(all(!selectVar)){stop("[LongData3d:longDataFrom3d] invalide variable names")}else{}
idAll <- xLongData3d["idAll"]
time <- xLongData3d["time"]
traj <- xLongData3d["traj"][,,selectVar]
traj <- rbind(traj,matrix(NA,nrow=length(idAll)-nrow(traj),ncol=ncol(traj),dimnames=list(idAll[!idAll %in% xLongData3d["idFewNA"]])))[idAll,]
return(longData(traj=traj,
idAll=idAll,
time=time,
varNames=xLongData3d["varNames"][selectVar],
maxNA=xLongData3d["maxNA"][selectVar])
)
}
longDataTo3d <- function(xLongData){
idAll <- xLongData["idAll"]
traj <- xLongData["traj"]
time <- xLongData["time"]
traj <- rbind(traj,matrix(NA,nrow=length(idAll)-nrow(traj),ncol=ncol(traj),dimnames=list(idAll[!idAll %in% xLongData["idFewNA"]])))[idAll,]
dim(traj) <- c(dim(traj),1)
return(longData3d(traj=traj,
idAll=idAll,
time=time,
varNames=xLongData["varNames"],
maxNA=xLongData["maxNA"])
)
}
cat("\n-------------------------------------------------------------------
-------------------------- Class LongData -------------------------
------------------------------- Fin -------------------------------
-------------------------------------------------------------------\n")
|
require(MASS)
data(Insurance)
glmmod <- glm(Claims ~ District + Group + Age + offset(log(Holders)),
data = Insurance, family = poisson)
head(model.frame(glmmod))
require(glarma)
data(DriverDeaths)
y <- DriverDeaths[, "Deaths"]
X <- as.matrix(DriverDeaths[, 2:5])
Population <- DriverDeaths[, "Population"]
glarmamodNoARMA <- glarma(y, X, offset = log(Population/100000),
type = "Poi", method = "FS",
residuals = "Pearson", maxit = 100, grad = 1e-6)
head(model.frame(glarmamodNoARMA))
glmmod <- glm(y ~ X - 1, offset = log(Population/100000),
family = poisson)
head(model.frame(glmmod))
summary(glarmamodNoARMA)
summary(glmmod)
print(glarmamodNoARMA)
print(glmmod)
glarmamod <- glarma(y, X, phiLags = c(12),
type = "Poi", method = "FS",
residuals = "Pearson", maxit = 100, grad = 1e-6)
head(model.frame(glarmamod))
glarmamodOffset <- glarma(y, X, offset = log(Population/100000),
phiLags = c(12),
type = "Poi", method = "FS",
residuals = "Pearson", maxit = 100, grad = 1e-6)
head(model.frame(glarmamodOffset))
summary(glmmod)
summary(glarmamodOffset)
summary(glarmamod)
print(glmmod)
print(glarmamodOffset)
print(glarmamod)
coef(glmmod)
coef(glarmamodOffset)
coef(glarmamod)
|
setwd(Sys.getenv("ICD_HOME"))
rhub_env <- read.delim(
comment.char = "
sep = "=",
file = "tools/env/rhub",
header = FALSE,
strip.white = TRUE,
blank.lines.skip = TRUE,
quote = '"',
col.names = c(
"name",
"value"
),
row.names = 1
)
rhe <- c()
for (n in rownames(rhub_env)) {
rhe[n] <- rhub_env[n, "value"]
}
sanitize <- FALSE
if (sanitize) {
rhub::check_with_sanitizers(
env_vars = c(MAKEFLAGS = "CXX11FLAGS+=-w CXXFLAGS+=-w")
)
rhub::check_on_windows(env_vars = rhe)
}
rhub_res <- list()
plats <- c(
"macos-highsierra-release-cran",
"linux-x86_64-rocker-gcc-san",
"fedora-clang-devel",
"debian-gcc-patched",
"windows-x86_64-patched",
"ubuntu-gcc-devel",
"solaris-x86-patched"
)
rhub_res <- rhub::check(env_vars = rhe, platform = plats)
|
varband_path <- function(S, w = FALSE, lasso = FALSE, lamlist = NULL, nlam = 60, flmin = 0.01){
p <- ncol(S)
stopifnot(p == nrow(S))
if (is.null(lamlist)) {
lam_max <- lammax(S = S)
lamlist <- pathGen(nlam = nlam, lam_max = lam_max,
flmin = flmin, S = S)
} else {
nlam <- length(lamlist)
}
result<- array(NA, c(p, p, nlam))
for (i in seq(nlam)) {
if(i==1){
result[, , i] <- diag(1/sqrt(diag(S)))
}
else
{
result[, , i] <- varband(S = S, lambda = lamlist[i],
init = result[, , i-1], w = w, lasso = lasso)
}
}
return(list(path = result, lamlist = lamlist))
}
lammax <- function(S){
p <- ncol(S)
sighat <- rep(NA, p-1)
for (r in seq(2, p)){
sighat[r-1] <- max(abs(S[(1:(r-1)), r]))/sqrt(S[r, r])
}
2 * max(sighat)
}
pathGen <- function(nlam, lam_max, flmin, S){
lamlist_lin <- lam_max * exp(seq(0, log(flmin), length = nlam/2))
lamlist_exp <- seq(lam_max - 1e-8, lam_max*flmin - 1e-8, length.out = nlam/2)
return(sort(unique(c(lamlist_lin, lamlist_exp)), decreasing = T))
}
|
generate_r_sexp <- function(x, data, meta) {
if (is.recursive(x)) {
fn <- x[[1L]]
args <- x[-1L]
if (fn == "length") {
generate_r_sexp(data$elements[[args[[1L]]]]$dimnames$length,
data, meta)
} else if (fn == "dim") {
nm <- data$elements[[args[[1L]]]]$dimnames$dim[[args[[2L]]]]
generate_r_sexp(nm, data, meta)
} else if (fn == "odin_sum") {
generate_r_sexp_sum(lapply(args, generate_r_sexp,
data, meta))
} else if (fn == "norm_rand") {
quote(rnorm(1L))
} else if (fn == "unif_rand") {
quote(runif(1L))
} else if (fn == "exp_rand") {
quote(rexp(1L))
} else {
args <- lapply(args, generate_r_sexp, data, meta)
if (fn %in% names(FUNCTIONS_STOCHASTIC) && fn != "rmhyper") {
args <- c(list(1L), args)
}
if (fn == "rbinom") {
args[[2L]] <- call("round", args[[2L]])
}
as.call(c(list(as.name(fn)), args))
}
} else if (is.character(x)) {
location <- data$elements[[x]]$location
if (!is.null(location) && location == "internal") {
call("[[", as.name(meta$internal), x)
} else {
as.name(x)
}
} else if (is.integer(x)) {
as.numeric(x)
} else {
x
}
}
generate_r_sexp_sum <- function(args) {
f <- function(a, b) {
if (identical(a, b)) a else call("seq.int", a, b, by = 1L)
}
i <- seq(2L, by = 2L, to = length(args))
idx <- Map(f, args[i], args[i + 1L])
call("sum", as.call(c(list(as.name("["), args[[1L]]), idx)))
}
|
ggraph <- function(graph, layout = 'auto', ...) {
envir <- parent.frame()
p <- ggplot(data = create_layout(graph, layout, ...), environment = envir) +
th_no_axes()
class(p) <- c('ggraph', class(p))
p
}
ggplot_build.ggraph <- function(plot) {
.register_graph_context(attr(plot$data, 'graph'), free = TRUE)
NextMethod()
}
|
redist.plot.cores <- function(shp, plan = NULL, core = NULL, lwd = 2) {
if (missing(shp)) {
stop('Please provide an argument to shp.')
}
plan <- eval_tidy(enquo(plan), shp)
if (is.null(plan)) {
if(inherits(shp, 'redist_map')){
plan <- get_existing(shp)
} else {
stop('Please provide an argument to plan.')
}
}
core <- eval_tidy(enquo(core), shp)
if (missing(core)) {
stop('Please provide an argument to core.')
}
shp$plan <- plan
shp$core <- core
shp_un <- shp %>%
group_by(plan) %>%
summarize(geometry = st_union(geometry),
.groups = 'drop') %>%
suppressMessages()
shp_cores <- shp %>%
group_by(plan, core) %>%
summarize(ct = n(),
geometry = st_union(geometry),
.groups = 'drop') %>%
mutate(ct = if_else(.data$ct == 1, NA_integer_, .data$ct)) %>%
suppressMessages()
shp_cores %>%
ggplot() +
geom_sf(aes(fill = .data$ct)) +
ggplot2::scale_fill_distiller(direction = 1, na.value = 'white') +
geom_sf(fill = NA, data = shp_un, color = 'black', lwd = lwd) +
labs(fill = 'Number of Units in Core') +
theme_void() +
theme(legend.position = 'bottom')
}
|
model.average<- function(x,...)
{
UseMethod("model.average")
}
|
topological.approx.ess <- function(chains, burnin = 0, max.sampling.interval = 100, treedist = 'PD', use.all.samples = FALSE){
chains = check.chains(chains)
if(inherits(chains, "list")){
N = length(chains[[1]]$trees)
} else {
N = length(chains$trees)
}
if(N-burnin < max.sampling.interval){
warning("Not enough trees to use your chosen max.sampling.interval")
warning("Setting it to 90% of the length of your post-burnin chain instead")
max.sampling.interval = floor((N - burnin) * 0.9)
}
autocorr.intervals = max.sampling.interval
print(sprintf("Calculating approximate ESS with sampling intervals from 1 to %d", max.sampling.interval))
autocorr.df = topological.autocorr(chains, burnin, max.sampling.interval, autocorr.intervals, squared = TRUE, treedist = treedist, use.all.samples = use.all.samples)
autocorr.m = estimate.autocorr.m(autocorr.df)
approx.ess.df = approx.ess.multi(autocorr.df, autocorr.m, (N-burnin))
return(approx.ess.df)
}
approx.ess.multi <- function(autocorr.df, autocorr.m, N){
r = length(unique(autocorr.df$chain))
approx.ess.df = data.frame(operator = rep(NA, r), approx.ess = rep(NA, r), chain = unique(autocorr.df$chain))
for(i in 1:nrow(approx.ess.df)){
thischain = approx.ess.df$chain[i]
thism = autocorr.m$autocorr.time[autocorr.m$chain == thischain]
thisdata = autocorr.df[autocorr.df$chain == thischain,]
ess.info = approx.ess.single(thisdata, thism, N)
ess = ess.info$ess
operator = ess.info$operator
approx.ess.df$approx.ess[approx.ess.df$chain == thischain] = ess
approx.ess.df$operator[approx.ess.df$chain == thischain] = operator
}
return(approx.ess.df)
}
approx.ess.single <- function(df, autocorr.time, N){
if(autocorr.time < 0){
m = nrow(df) + 1
}else{
m = autocorr.time
}
D = max(df$topo.distance)
S = 0
if(m>1){
for(k in 1:(m - 1)){
f = df$topo.distance[k]
S = S + ((N - k) * f)
}
}
S = S + (N - m + 1) * (N - m) * D / 2
S = S / 2 / N^2
ESS = 1 / (1 - 4 * S / D)
if(autocorr.time<0){
operator = "<"
}else{
operator = "="
}
return(list("ess" = ESS, "operator" = operator))
}
|
`sim.mar1s` <-
function(object, n.ahead = 1, n.sim = 1, start.time = 0,
xreg.absdata = NULL, init.absdata = NULL)
{
arcoef <- head(coef(object$logstoch.ar1), 1)
xregcoef <- tail(coef(object$logstoch.ar1), -1)
loginnov <- matrix(rnorm(n.ahead*n.sim, sd = object$logresid.sd),
n.ahead, n.sim)
d <- .decomp(object, start.time, xreg.absdata, init.absdata)
y1 <- compose.ar1(arcoef, loginnov, head(d$init.logstoch, 1),
xregcoef, d$xreg.logstoch, tail(d$init.logstoch, -1))
cycl <- cycle(ts(y1, start = start.time,
frequency = frequency(object$logseasonal)))
result <- exp(tail(y1, 1) +
as.matrix(object$logseasonal)[tail(cycl, 1), 1])
return(as.vector(result))
}
|
`print.segRatio` <-
function(x, digits=3, ..., index=c(1:min(10,length(x$r))) ) {
cat("Summary statistics for segregation ratios:\n")
print(summary(x$seg.ratio),...)
cat("Observed numbers and segregation proportions for\n",
length(index),"of the markers for",x$n.individuals,
"individuals:\n")
miss <- x$n.individuals*length(x$n) - sum(x$n)
if( miss>0 ) {
cat("Percentage of missing markers:",100*miss/sum(x$n),"\n")
}
print(x$seg.ratio[index],digits=digits, ...)
}
|
"region_isos_demo"
|
mixed.sdf <- function(formula,
data,
weightVars=NULL,
weightTransformation=TRUE,
recode=NULL,
defaultConditions=TRUE,
tolerance=0.01,
nQuad=NULL,
verbose=0,
family=NULL,
centerGroup=NULL,
centerGrand=NULL,
fast=FALSE,
...) {
call <- match.call()
call0 <- call
if(!missing(nQuad) & is.null(family)) {
warning(paste0("The ", sQuote("nQuad"), " argument is depreciated for linear models."))
}
if(!missing(tolerance) & is.null(family)) {
warning(paste0("The ", sQuote("tolerance"), " argument is depreciated."))
}
if(!missing(fast)) {
warning(paste0("The ", sQuote("fast"), " argument is depreciated."))
}
if(!missing(family)) {
stop(paste0("The ", dQuote("family") ," argument is depreciated; plase use the ", dQuote("WeMix"), " package's ", dQuote("mix"), " function direclty for binomial models."))
}
formula0 <- formula
checkDataClass(data, c("edsurvey.data.frame", "light.edsurvey.data.frame"))
survey <- getAttributes(data, "survey")
if (is.null(weightVars)) {
if (survey == "PISA") {
weightVars <- c("w_fstuwt", "w_fschwt")
} else if (survey %in% c("TIMSS", "TIMSS Advanced")) {
weightVars <- c("totwgt", "schwgt")
} else {
stop("mixed.sdf currently only supports automated weights for PISA, TIMSS, and TIMSS Advanced. If you use another survey, please specify your own weights. ")
}
call$weightVars <- weightVars
}
if (!inherits(formula, "formula")){
stop(paste0(sQuote("formula"), " argument must be of class formula."))
}
zeroLengthLHS <- attr(terms(formula), "response") == 0
if(zeroLengthLHS) {
yvar <- attributes(getAttributes(data, "pvvars"))$default
formula <- update(formula, new=substitute( yvar ~ ., list(yvar=as.name(yvar))))
} else{
yvar <- all.vars(formula[[2]])
}
pv <- hasPlausibleValue(yvar,data)
yvars <- yvar
linkingError <- "NAEP" %in% getAttributes(data, "survey") & any(grepl("_linking", yvars, fixed=TRUE))
if(linkingError) {
stop("mixed.sdf does not support estimation with linking error.")
}
if(pv){
yvars <- getPlausibleValue(yvar,data)
}
getDataArgs <- list(data=data,
varnames=unique(c(all.vars(formula), weightVars, yvars)),
returnJKreplicates=FALSE,
drop=FALSE,
omittedLevels=FALSE,
recode=recode,
includeNaLabel=TRUE,
dropUnusedLevels=TRUE)
if(!missing(defaultConditions)) {
getDataArgs <- c(getDataArgs, list(defaultConditions=defaultConditions))
}
edf <- do.call(getData, getDataArgs)
rawN <- nrow(edf)
for(wgt in weightVars) {
if(any(!(!is.na(edf[,wgt]) & edf[,wgt] > 0))) {
warning("Removing ", sum(!(!is.na(edf[ , wgt]) & edf[ , wgt] > 0))," rows with 0 or NA weight on ", dQuote(wgt), " from analysis.")
edf <- edf[!is.na(edf[ , wgt]) & edf[ , wgt] > 0, ]
}
}
pvy <- hasPlausibleValue(yvar, data)
yvars <- yvar
lyv <- length(yvars)
if(any(pvy)) {
yvars <- getPlausibleValue(yvar, data)
} else {
edf[,"yvar"] <- as.numeric(eval(formula[[2]],edf))
formula <- update(formula, new=substitute( yvar ~ ., list(yvar=as.name(yvar))))
yvars <- "yvar"
}
yvar0 <- yvars[1]
if(!is.null(family) && family$family %in% c("binomial")) {
if(any(pvy)) {
for(i in 1:length(yvars)) {
for(yvi in 1:length(pvy)) {
if(pvy[yvi]) {
edf[,yvar[yvi]] <- edf[,getPlausibleValue(yvar[yvi], data)[i]]
}
}
edf[,yvars[i]] <- as.numeric(eval(formula[[2]],edf))
}
oneDef <- max(edf[,yvars], na.rm=TRUE)
for(i in yvars) {
edf[,i] <- ifelse(edf[,i] %in% oneDef, 1, 0)
}
} else {
oneDef <- max(edf[,yvars], na.rm=TRUE)
edf[,yvar0] <- ifelse(edf$yvar %in% oneDef, 1, 0)
}
}
formula <- update(formula, as.formula(paste0(yvar0," ~ .")))
lformula <- lFormula(formula=formula, data=edf)
unparsedGroupNames <- names(lformula$reTrms$cnms)
groupParser <- function(groupi) {
all.vars(formula(paste0("~",groupi)))
}
groupNames <- rev(unique(unlist(lapply(unparsedGroupNames, groupParser))))
if (length(groupNames) == 0) {
stop("The formula only indicates one level. Use lm.sdf instead.")
}
if (length(weightVars) != length(groupNames) + 1) {
stop(paste0("The model requires ", length(groupNames) + 1, " weights."))
}
level <- length(groupNames) + 1
if (!weightTransformation) {
for(wi in 1:length(weightVars)) {
edf[[paste0("pwt",wi)]] <- edf[ , weightVars[wi]]
}
} else {
if (survey == "PISA") {
edf$sqw <- edf[ , weightVars[1]]^2
sumsqw <- aggregate(as.formula(paste0("sqw ~ ", groupNames)), data = edf, sum)
sumw <- aggregate(as.formula(paste0(weightVars[1], "~", groupNames)), data = edf, sum)
edf$sumsqw <- sapply(edf[,groupNames], function(s) sumsqw$sqw[sumsqw[ , groupNames] == s])
edf$sumw <- sapply(edf[,groupNames], function(s) sumw[sumw[ , groupNames] == s, weightVars[1]])
edf$pwt1 <- edf[ , weightVars[1]] * (edf$sumw / edf$sumsqw)
edf$pwt2 <- edf[ , weightVars[2]]
edf$sqw <- NULL
edf$sumsqw <- NULL
edf$sumw <- NULL
} else if (survey %in% c("TIMSS", "TIMSS Advanced")) {
edf$pwt1 <- edf[ , weightVars[1]] / edf[ , weightVars[2]]
edf$pwt2 <- edf[ , weightVars[2]]
} else {
warning(paste0("EdSurvey currently does not specify weight transformation rules for ",survey,". Raw weights were used for the analysis."))
edf$pwt1 <- edf[ , weightVars[1]]
edf$pwt2 <- edf[ , weightVars[2]]
}
}
lev <- unlist(getAttributes(data, "omittedLevels"))
keep <- rep(0, nrow(edf))
for (i in 1:ncol(edf)) {
vari <- names(edf)[i]
keep <- keep + (tolower(edf[ , vari]) %in% tolower(lev))
}
if(sum(keep>0) > 0) {
edf <- edf[keep==0, , drop=FALSE]
}
formula_pv <- formula
summary.WeMixResults <- getFromNamespace("summary.WeMixResults","WeMix")
if(!pv){
res <- run_mix(nQuad=nQuad, call=call, formula=formula, edf=edf,
verbose=verbose, family=family,center_group=centerGroup,center_grand=centerGrand,
tolerance=tolerance, fast=fast, ...)
env <- environment(res$lnlf)
model_sum <- summary.WeMixResults(res)
res$se <- c(model_sum$coef[,2] , model_sum$vars[,2])
names(res$se) <- c(row.names(model_sum$coef), row.names(model_sum$vars))
res$vars <- model_sum$vars[,1]
names(res$vars) <- row.names(model_sum$vars)
res$CMODE <- NULL
res$CMEAN <- NULL
res$hessian <- NULL
res$call <- call0
res$formula <- call0$formula
varsmat0 <- model_sum$varsmat
groupSum <- varsmat0[!duplicated(varsmat0$Level), c("Level", "Group")]
groupSum$Group[groupSum$Level == 1] <- "Obs"
groupSum$"n size" <- rev(res$ngroups)
for (i in 1:length(res$wgtStats)) {
groupSum$"mean wgt"[groupSum$Level == i] <- res$wgtStats[[i]]$mean
groupSum$"sum wgt"[groupSum$Level == i] <- res$wgtStats[[i]]$sum
}
res$groupSum <- groupSum
varsmat0 <- res$varDF
m <- length(yvars)
varsmat <- varsmat0[is.na(varsmat0$var2), c("level", "grp", "var1", "vcov", "SEvcov")]
varsmat$st <- sqrt(varsmat$vcov)
colnames(varsmat) <- c("Level", "Group", "Name", "Variance", "Std. Error", "Std.Dev.")
res$varsmatSum <- varsmat
res$VC <- model_sum$cov_mat
} else {
results <- list()
variances <- list()
pvi <- 0
for (value in yvars){
if (verbose>0) {
eout(paste0("Estimating mixed model with ", value, " as the outcome."))
}
pvi <- pvi+1
formula_pv <- update(formula_pv, as.formula(paste(value,"~.")))
model <- withCallingHandlers(run_mix(nQuad=nQuad, call=call, formula=formula_pv,
edf=edf, verbose=verbose, family=family, center_group=centerGroup, center_grand=centerGrand,
tolerance=tolerance, ...),
warning = function(w) {
if (pvi != 1) {
invokeRestart("muffleWarning")
} else {
message(conditionMessage(w))
}
},
message = function(c) {
if (pvi != 1) {
invokeRestart("muffleMessage")
}
})
results[[value]] <- model
model_sum <- summary.WeMixResults(model)
variances[[value]] <- c(model_sum$coef[,"Std. Error"]^2 , model_sum$varDF$SEvcov^2)
if(verbose > 1) {
print(model_sum)
}
}
res <- results[[1]]
varsmat0 <- model_sum$varsmat
groupSum <- varsmat0[!duplicated(varsmat0$Level), c("Level", "Group")]
groupSum$Group[groupSum$Level == 1] <- "Obs"
groupSum$"n size" <- rev(res$ngroups)
for (i in 1:length(res$wgtStats)) {
groupSum$"mean wgt"[groupSum$Level == i] <- res$wgtStats[[i]]$mean
groupSum$"sum wgt"[groupSum$Level == i] <- res$wgtStats[[i]]$sum
}
res$groupSum <- groupSum
M <- length(yvars)
co0 <- (1/M) * Reduce("+",
lapply(results, function(r){
coef(r)
}))
res$B <- (1/(M-1))* Reduce("+",
lapply(results, function(r) {
co <- coef(r) - co0
outer(co,co)
}))
res$Ubar <- (1/M) * Reduce("+",
lapply(results, function(r) {
r$cov_mat
}))
res$VC <- res$Ubar + ((M+1)/M) * res$B
res$lnl <- NULL
res$lnlf <- NULL
res$CMODE <- NULL
res$CMEAN <- NULL
res$hessian <- NULL
res$SE <- NULL
res$call <- call
res$PVresults <- results
env <- environment(results[[1]]$lnlf)
avg_coef <- rowSums(matrix(sapply(results,function(x){x$coef}),nrow=length(results[[1]]$coef)))/length(yvars)
names(avg_coef) <- names(results[[1]]$coef)
M <- length(yvars)
imputation_var <- ((M+1)/((M-1)*M)) * rowSums(matrix(sapply(results, function(x){x$coef - avg_coef})^2,nrow=length(avg_coef)))
res$coef <- avg_coef
sampling_var <- colSums(Reduce(rbind, variances))/length(yvars)
names(sampling_var) <- c(names(avg_coef), names(results[[1]]$vars))
res$se <- sqrt(sampling_var[1:length(imputation_var)] + imputation_var)
res$ICC <- tryCatch(sum(sapply(results,function(x){x$ICC}))/length(yvars),
error=function(cond) {
return(NA)
})
varsmat0 <- results[[1]]$varDF
m <- length(yvars)
varsmat0$vcov <- rowSums(matrix(sapply(results,function(x){x$varDF$vcov}),
nrow = nrow(results[[1]]$varDF)))/m
varsmat <- varsmat0[is.na(varsmat0$var2), c("level", "grp", "var1", "vcov", "SEvcov")]
varsmat$st <- sqrt(varsmat$vcov)
colnames(varsmat) <- c("Level", "Group", "Name", "Variance", "Std. Error", "Std.Dev.")
for(li in 2:max(varsmat0$level)) {
varVC <- lapply(results, function(x) {
vc <- as.matrix(x$varVC[[li]])
cr <- atanh(cov2cor(vc))
diag(cr) <- diag(vc)
return(cr)
})
varVC <- Reduce("+", varVC) / length(varVC)
cr <- tanh(varVC)
if(ncol(cr)>1) {
for(i in 2:ncol(cr)) {
for(j in 1:(i-1)){
varsmat[varsmat$Level==li & varsmat$Name==rownames(cr)[i],paste0("Corr",j)] <- cr[i,j]
}
}
}
}
res$varsmatSum <- varsmat
res$vars <- varsmat[,4:6]
rownames(res$vars) <- names(results[[1]]$vars)
colnames(res$vars) <- colnames(results[[1]]$varDF)[4:6]
res$varDF$vcov <- varsmat0$vcov
imputation_var_for_vars <- ((M+1)/((M-1)*M)) *
apply(sapply(results,function(x){x$varDF$vcov}), 1, function(x) { sum((x - mean(x))^2)})
sampling_var_for_vars <- apply(sapply(results, function(x) {x$varDF$SEvcov^2}), 1, mean)
res$varDF$SEvcov <- sqrt(sampling_var_for_vars + imputation_var_for_vars)
res$se <- c(res$se, sqrt(sampling_var_for_vars + imputation_var_for_vars))
res$Vimp <- c(imputation_var, imputation_var_for_vars)
res$Vjrr <- sampling_var
varn <- unlist(lapply(1:nrow(results[[1]]$varDF), function(ii) { paste(na.omit(unlist(results[[1]]$varDF[ii,1:3])), collapse=".") } ))
names(res$Vjrr) <- c(names(res$Vjrr)[1:(length(res$Vjrr)-length(varn))], varn)
names(res$Vimp) <- names(res$Vjrr)
names(res$se) <- names(res$Vjrr)
res$formula <- res$call$formula
}
res$npv <- length(yvars)
res$n0 <- rawN
res$nUsed <- nrow(edf)
ngrp <- res$varDF
ngrp <- ngrp[, c("grp", "ngrp", "level")]
ngrp <- ngrp[!duplicated(ngrp$level), ]
names(ngrp) <- c("Group Var","Observations","Level")
res$ngroups <- ngrp
nullOut <- c("ranefs", "theta", "invHessian", "is_adaptive", "sigma", "cov_mat",
"varDF", "varVC", "var_theta", "PVresults", "SE")
for(ni in 1:length(nullOut)) {
res[[nullOut[ni]]] <- NULL
}
class(res) <- "mixedSdfResults"
return(res)
}
run_mix <- function(nQuad, call, formula, edf, verbose, tolerance, family, center_group, center_grand, fast, ...){
verboseAll <- ifelse(verbose==2,TRUE,FALSE)
if(is.null(family)) {
res <- mix(formula, data=edf, weights=c("pwt1", "pwt2"), verbose = verboseAll, center_group=center_group, center_grand=center_grand, ...)
return(res)
}
if (!is.null(nQuad)) {
if(verbose > 0) {
message(sQuote("nQuad"), " argument is specified so ", sQuote("tolerance"), " argument will not be used. It's recommended that users try incrementing ", sQuote("nQuad"), " to check whether the estimates are stable. ")
}
res <- mix(formula, data=edf, weights=c("pwt1", "pwt2"), verbose = verboseAll, nQuad = nQuad, family=family, center_group=center_group, center_grand=center_grand, fast=fast, ...)
call$tolerance <- NULL
res$call <- call
return(res)
} else {
nQuad <- Inf
diff <- Inf
if (verbose>0) {
eout("Trying nQuad = ",nQuad,".")
}
res0 <- mix(formula, data=edf, weights=c("pwt1","pwt2"), verbose = verboseAll, nQuad = nQuad, family=family,
center_group=center_group, center_grand=center_grand, fast=fast, ...)
while(diff > tolerance) {
nQuad <- nQuad + 2
if (verbose>0) {
eout("Trying nQuad = ",nQuad,".")
}
res <- mix(formula, data=edf, weights=c("pwt1","pwt2"), verbose = verboseAll, nQuad = nQuad, family=family, center_group=center_group, center_grand=center_grand, fast=fast, ...)
diff <- abs(res$lnl - res0$lnl)/abs(res0$lnl)
res0 <- res
}
call$nQuad <- nQuad
res$call <- call
class(res) <- "mixedSdfResults"
return(res)
}
}
summary.mixedSdfResults <- function(object, ...) {
object$coef <- cbind(Estimate=object$coef, "Std. Error"=object$se[1:length(object$coef)], "t value"=object$coef/object$se[1:length(object$coef)])
object$vars <- object$varmatSum
class(object) <- "summary.mixedSdfResults"
return(object)
}
print.summary.mixedSdfResults <- function(x, digits = max(3, getOption("digits") - 3), nsmall=2, ...) {
eout("Call:")
print(x$call)
cat("\n")
eout(paste0("Formula: ", paste(deparse(x$call$formula), collapse=""),"\n"))
if(x$npv>1){
cat("\n")
eout(paste0("Plausible Values: ", x$npv))
}
eout("Number of Groups:")
print(x$groupSum, digits=digits, nsmall=nsmall, row.names=FALSE, ...)
cat("\n")
eout("Variance terms:")
vars <- x$vars
cori <- 1
corvi <- paste0("Corr",cori)
while(corvi %in% colnames(vars)) {
vars[[corvi]] <- as.character(round(vars[[corvi]], 2))
cori <- cori + 1
corvi <- paste0("Corr",cori)
}
print(vars, na.print="", row.names=FALSE, digits=digits, nsmall=nsmall, ...)
cat("\n")
eout("Fixed Effects:")
printCoefmat(x$coef, digits=digits, nsmall=nsmall, ...)
if(x$npv==1){
cat("\n")
eout(paste0("lnl=", format(x$lnl, nsmall=2)))
}
if (!is.na(x$ICC)) {
if(x$npv!=1) {
cat("\n")
}
eout(paste0("Intraclass Correlation= ", format(x$ICC, nsmall=3, digits=3)))
}
}
vcov.mixedSdfResults <- function(object, ...) {
return(object$VC)
}
coef.mixedSdfResults <- function(object, ...) {
return(object$coef)
}
|
expand_phenocam = function(data,
truncate = NULL,
internal = TRUE,
out_dir = tempdir()) {
if(class(data) != "phenocamr"){
if(file.exists(data)){
data = read_phenocam(data)
on_disk = TRUE
} else {
stop("not a valid PhenoCam data frame or file")
}
} else {
on_disk = FALSE
}
phenocam_data = contract_phenocam(data,
internal = TRUE,
no_padding = TRUE)$data
phenocam_dates = as.Date(phenocam_data$date)
max_date = max(phenocam_dates)
min_range = min(as.Date(phenocam_data$date)) - 90
max_range = max(as.Date(phenocam_data$date)) + 90
truncate_date = as.Date(ifelse(is.null(truncate),
max_date,
as.Date(sprintf("%s-12-31",truncate))),"1970-01-01")
if ( max_date > truncate_date ) {
phenocam_data = phenocam_data[which(as.Date(phenocam_data$date) <= truncate_date),]
phenocam_dates = as.Date(phenocam_data$date)
max_range = truncate_date + 90
}
all_dates = seq(as.Date(min_range), as.Date(max_range), "days")
all_years = as.integer(format(all_dates, "%Y"))
all_doy = as.integer(format(all_dates, "%j"))
all_dates = as.data.frame(as.character(all_dates))
colnames(all_dates) = "date"
output = merge(all_dates, phenocam_data, by = "date", all.x = TRUE)
output$date = as.character(output$date)
output$year = all_years
output$doy = all_doy
data$data = output
if(on_disk | !internal ){
write_phenocam(data, out_dir = out_dir)
} else {
class(data) = "phenocamr"
return(data)
}
}
|
logRegDeriv <- function(beta, Y, Z){
n <- dim(Z)[1]
p <- dim(Z)[2]
pro <- Z %*% beta
p <- exp(pro) / (1 + exp(pro))
G <- t(Z) %*% (p - Y)
return(list("dL" = -G))
}
|
getLetters <- function(k) {
reps <- rep(LETTERS, round(k / length(LETTERS) + 1))[1:k]
prefix <- rep(c("", LETTERS), each = length(LETTERS))[1:k]
return(paste0(prefix, reps))
}
generateSample <- function(N, k, distr = "gaussian") {
numericVec <- switch(distr,
"gaussian" = rnorm(N),
"exp" = rexp(N, 1),
"beta" = rbeta(N, 1, 1),
"binomial" = rep(0, N),
stop("Unknown distribution."))
kLetters <- getLetters(k)
factorVec <- as.factor(sample(kLetters,
size = N,
replace = TRUE))
for (i in 1:k) {
let <- kLetters[i]
if (distr == "binomial") {
numericVec[factorVec == let] <-
rbinom(length(numericVec[factorVec == let]), 1, runif(1))
} else {
randomShift <- sample(seq(0, 1, 0.1), size = 1)
numericVec[factorVec == let] <-
numericVec[factorVec == let] + randomShift * 0.1
randomShift <- sample(seq(0, 1, 0.1), size = 1)
}
}
generatedSample <- list(
factor = setIncreasingOrder(numericVec, factorVec),
response = numericVec)
class(generatedSample) <- append("generatedSample", class(generatedSample))
return(generatedSample)
}
generateMultivariateSample <- function(N, k, d = 2) {
tmp <- generateSample(N, k, "gaussian")
if (d > 1) {
res <- matrix(, nrow = N, ncol = d)
res[, 1] <- tmp$response
for (j in 2:d) {
for (i in 1:k) {
randomShift <- sample(seq(0, 1, 0.1), size = 1)
normal <- rnorm(N)
normal[tmp$factor == LETTERS[i]] <-
normal[tmp$factor == LETTERS[i]] + randomShift
}
res[, j] <- normal
}
return(list(factor = tmp$factor, response = res))
} else {
return(tmp)
}
}
|
misclassificationPenalties <-
function(data=NULL, model=NULL, addCosts=NULL) {
if(is.null(data) || is.null(model)) {
stop("Need both data and model to calculate misclassification penalties!")
}
if(attr(model, "hasPredictions")) {
predictions = model$predictions
} else {
if(length(data$test) > 0) {
predictions = rbind.fill(lapply(data$test, function(x) {
data$data = data$data[x,]
data$best = data$best[x]
model(data)
}))
} else {
predictions = model(data)
}
}
optfun = if(data$minimize) { min } else { max }
if(is.null(data$algorithmFeatures)) {
perfs = data$data[data$performance]
} else {
d = data$data[c(data$ids, data$algos, data$performance)]
perfs = convertLongToWide(data=d, timevar=data$algos, idvar=data$ids, prefix=paste(data$performance,".",sep=""))
perfs = perfs[data$algorithmNames]
}
opts = apply(perfs, 1, optfun)
if(is.null(data$algorithmFeatures)) {
predictions$iid = match(do.call(paste, predictions[data$ids]), do.call(paste, data$data[data$ids]))
predictions$pid = match(predictions$algorithm, data$performance)
} else {
d = data$data[c(data$ids, data$algos, data$performance)]
d = convertLongToWide(data=d, timevar=data$algos, idvar=data$ids, prefix=paste(data$performance,".",sep=""), remove.id=FALSE)
predictions$iid = match(do.call(paste, predictions[data$ids]), do.call(paste, d[data$ids]))
predictions$pid = match(predictions$algorithm, data$algorithmNames)
}
predictions$score = apply(predictions, 1, function(x) {
pid = as.numeric(x[["pid"]])
if(is.na(pid)) {
0
} else {
iid = as.numeric(x[["iid"]])
as.numeric(abs(as.numeric(perfs[iid,pid]) - opts[iid]))
}
})
agg = aggregate(as.formula(paste("score~", paste(c(data$ids, "iteration"), sep="+", collapse="+"))), predictions, function(ss) { ss[1] })
agg$score
}
class(misclassificationPenalties) = "llama.metric"
attr(misclassificationPenalties, "minimize") = TRUE
|
fbDeleteAdAccountUsers <- function(user_ids = NULL,
accounts_id = getOption("rfacebookstat.accounts_id"),
api_version = getOption("rfacebookstat.api_version"),
username = getOption("rfacebookstat.username"),
token_path = fbTokenPath(),
access_token = getOption("rfacebookstat.access_token")){
if ( is.null(access_token) ) {
if ( Sys.getenv("RFB_API_TOKEN") != "" ) {
access_token <- Sys.getenv("RFB_API_TOKEN")
} else {
access_token <- fbAuth(username = username,
token_path = token_path)$access_token
}
}
if ( class(access_token) == "fb_access_token" ) {
access_token <- access_token$access_token
}
if(is.null(accounts_id)|is.null(access_token)){
stop("Arguments accounts_id and access_token is require.")
}
for(account_id in accounts_id){
for(uid in user_ids){
print(paste0("Account ",account_id))
QueryString <- paste0("https://graph.facebook.com/",api_version,"/",account_id,"/users/",uid,"?access_token=",access_token)
ans <- httr::DELETE(QueryString)
ans <- content(ans)
print(ans)
Sys.sleep(3)
}
}
}
|
expected <- eval(parse(text="TRUE"));
test(id=0, code={
argv <- eval(parse(text="list(c(TRUE, TRUE, NA), c(TRUE, TRUE, NA), TRUE, TRUE, TRUE, TRUE, FALSE)"));
.Internal(`identical`(argv[[1]], argv[[2]], argv[[3]], argv[[4]], argv[[5]], argv[[6]], argv[[7]]));
}, o=expected);
|
swSinglyCensoredGeneralGofTest <-
function (x, censored, censoring.side = "left", distribution,
est.arg.list)
{
if (!is.vector(x, mode = "numeric") || is.factor(x))
stop("'x' must be a numeric vector")
if (!((is.vector(censored, mode = "numeric") && !is.factor(censored)) ||
is.vector(censored, mode = "logical")))
stop("'censored' must be a logical or numeric vector")
if (length(censored) != length(x))
stop("'censored' must be the same length as 'x'")
data.name <- deparse(substitute(x))
censoring.name <- deparse(substitute(censored))
if ((bad.obs <- sum(!(ok <- is.finite(x) & is.finite(as.numeric(censored))))) >
0) {
is.not.finite.warning(x)
is.not.finite.warning(as.numeric(censored))
x <- x[ok]
censored <- censored[ok]
warning(paste(bad.obs, "observations with NA/NaN/Inf in 'x' and 'censored' removed."))
}
if (is.numeric(censored)) {
if (!all(censored == 0 | censored == 1))
stop(paste("When 'censored' is a numeric vector, all values of",
"'censored' must be 0 (not censored) or 1 (censored)."))
censored <- as.logical(censored)
}
est.fcn <- paste("e", distribution, "Censored", sep = "")
est.list <- do.call(est.fcn, c(list(x = x, censored = censored,
censoring.side = censoring.side), est.arg.list))
params <- est.list$parameters
Z <- do.call(paste("p", distribution, sep = ""), c(list(q = x),
as.list(params)))
Y <- qnorm(Z)
ret.list <- swSinglyCensoredGofTest(Y, censored)
ret.list$data <- x
ret.list$data.name <- data.name
ret.list$censored <- censored
ret.list$censoring.name <- censoring.name
ret.list$censoring.levels <- est.list$censoring.levels
ret.list$bad.obs <- bad.obs
ret.list$dist.abb <- distribution
ret.list$distribution <- EnvStats::Distribution.df[distribution,
"Name"]
ret.list$distribution.parameters <- params
ret.list$n.param.est <- length(params)
ret.list$estimation.method <- est.list$method
sep.string <- paste("\n", space(33), sep = "")
ret.list$alternative <- paste("True cdf does not equal the",
paste(ret.list$distribution, "Distribution."), sep = sep.string)
ret.list$method <- paste("Shapiro-Wilk GOF", "(Singly Censored Data)",
"Based on Chen & Balakrisnan (1995)", sep = sep.string)
ret.list
}
|
model_parameters.rma <- function(model,
ci = .95,
bootstrap = FALSE,
iterations = 1000,
standardize = NULL,
exponentiate = FALSE,
include_studies = TRUE,
verbose = TRUE,
...) {
ci_level <- parse(text = .safe_deparse(model$call))[[1]]$level
if (!is.null(ci_level) && missing(ci)) {
ci <- ci_level / 100
}
meta_analysis_overall <- .model_parameters_generic(
model = model,
ci = ci,
bootstrap = bootstrap,
iterations = iterations,
merge_by = "Parameter",
standardize = standardize,
exponentiate = exponentiate,
...
)
subgroups <- NULL
group_variable <- NULL
if (!is.null(model$formula.mods)) {
group_variable <- deparse(model$formula.mods[[2]])[1]
model_data <- insight::get_data(model)
if (group_variable %in% colnames(model_data)) {
subgroups <- sort(unique(model_data[[group_variable]]))
}
}
if (nrow(meta_analysis_overall) > 1 && !is.null(subgroups)) {
meta_analysis_overall$Subgroup <- subgroups
meta_analysis_overall$Parameter <- "(Intercept)"
}
alpha <- (1 + ci) / 2
rma_parameters <- if (!is.null(model$slab) && !is.numeric(model$slab)) {
sprintf("%s", model$slab)
} else {
sprintf("Study %i", 1:model[["k"]])
}
if (!is.null(model$yi.f) && anyNA(model$yi.f)) {
rma_parameters <- rma_parameters[match(model$yi, model$yi.f)]
}
rma_coeffients <- as.vector(model$yi)
rma_se <- as.vector(sqrt(model$vi))
rma_ci_low <- rma_coeffients - rma_se * stats::qt(alpha, df = Inf)
rma_ci_high <- rma_coeffients + rma_se * stats::qt(alpha, df = Inf)
rma_statistic <- rma_coeffients / rma_se
rma_ci_p <- 2 * stats::pt(abs(rma_statistic), df = Inf, lower.tail = FALSE)
meta_analysis_studies <- data.frame(
Parameter = rma_parameters,
Coefficient = rma_coeffients,
SE = rma_se,
CI = ci,
CI_low = rma_ci_low,
CI_high = rma_ci_high,
z = rma_statistic,
df_error = NA,
p = rma_ci_p,
Weight = 1 / as.vector(model$vi),
stringsAsFactors = FALSE
)
if (!is.null(subgroups)) {
meta_analysis_studies$Subgroup <- insight::get_data(model, verbose = FALSE)[[group_variable]]
}
original_attributes <- attributes(meta_analysis_overall)
out <- merge(meta_analysis_studies, meta_analysis_overall, all = TRUE, sort = FALSE)
out$Parameter[out$Parameter == "(Intercept)"] <- "Overall"
if (isFALSE(include_studies)) {
out <- out[out$Parameter == "Overall", ]
}
original_attributes$names <- names(out)
original_attributes$row.names <- 1:nrow(out)
original_attributes$pretty_names <- stats::setNames(out$Parameter, out$Parameter)
attributes(out) <- original_attributes
out$df_error <- NULL
attr(out, "object_name") <- .safe_deparse(substitute(model))
attr(out, "measure") <- model$measure
if (!"Method" %in% names(out)) {
out$Method <- "Meta-analysis using 'metafor'"
}
attr(out, "title") <- unique(out$Method)
out
}
p_value.rma <- function(model, ...) {
params <- insight::get_parameters(model)
.data_frame(
Parameter = .remove_backticks_from_string(params$Parameter),
p = model$pval
)
}
ci.rma <- function(x, ci = .95, ...) {
params <- insight::get_parameters(x)
out <- tryCatch(
{
tmp <- lapply(ci, function(i) {
model <- stats::update(x, level = i)
.data_frame(
Parameter = params$Parameter,
CI = i,
CI_low = as.vector(model$ci.lb),
CI_high = as.vector(model$ci.ub)
)
})
.remove_backticks_from_parameter_names(do.call(rbind, tmp))
},
error = function(e) {
NULL
}
)
if (is.null(out)) {
se <- standard_error(x)
out <- lapply(ci, function(i) {
alpha <- (1 + i) / 2
fac <- stats::qnorm(alpha)
.data_frame(
Parameter = params$Parameter,
CI = i,
CI_low = params$Estimate - as.vector(se$SE) * fac,
CI_high = params$Estimate + as.vector(se$SE) * fac
)
})
out <- .remove_backticks_from_parameter_names(do.call(rbind, out))
}
out
}
standard_error.rma <- function(model, ...) {
params <- insight::get_parameters(model)
.data_frame(
Parameter = .remove_backticks_from_string(params$Parameter),
SE = model[["se"]]
)
}
format_parameters.rma <- function(model, ...) {
params <- insight::find_parameters(model, flatten = TRUE)
names(params) <- params
params
}
|
clust.cond.info <- function (x = NULL,
plot.type = "pie",
my.out.put = "data",
normalize.ncell = TRUE,
normalize.by = "percentage") {
if ("iCellR" != class(x)[1]) {
stop("x should be an object of class iCellR")
}
Cells <- colnames([email protected])
MYConds <- as.character((unique(data.frame(do.call('rbind', strsplit(as.character(Cells),'_',fixed=TRUE)))[1]))$X1)
if (length(MYConds) == 1) {
stop("You need more then one condition/sample to run this function")
}
if (length(MYConds) == 0) {
stop("You need more then one condition/sample to run this function")
}
DATA <- ([email protected])
Conds <- (as.data.frame(do.call("rbind", strsplit(row.names(DATA), "_")))[1])
ForNorm1 <- as.data.frame(table(Conds))
ForNorm <- min(ForNorm1$Freq)
SizeFactors <- round(ForNorm1$Freq/ForNorm,3)
ForNorm1$SF <- SizeFactors
clusts <- (as.data.frame(DATA$clusters))
cond.clust <- cbind(Conds, clusts)
colnames(cond.clust) <- c("conditions","clusters")
Conds <- as.character(ForNorm1$Conds)
My.Conds.data <- cond.clust
DATA <- as.data.frame(table(cond.clust))
Freq <- DATA$Freq
colnames(ForNorm1) <- c("conditions","TC","SF")
DATA <- merge(ForNorm1,DATA,by="conditions")
DATA$Norm.Freq <- round(DATA$Freq/DATA$SF,3)
DATA$percentage <- round((DATA$Freq/DATA$TC)*100,2)
myBP <- ggplot(DATA,aes(y=Freq, x=conditions, fill = conditions)) +
geom_bar(stat = "identity") + theme_bw() + theme(axis.text.x=element_text(angle=90)) + facet_wrap(~ clusters, scales = "free")
myBP2 <- ggplot(DATA,aes(y=Freq, x=conditions, fill = clusters)) +
geom_bar(stat = "identity") + theme_bw() + theme(axis.text.x=element_text(angle=90)) + facet_wrap(~ conditions, scales = "free")
myPIE <- ggplot(DATA,aes(y=Freq, x="", fill = conditions)) +
geom_bar(stat = "identity", position = "fill") + theme_bw() + facet_wrap(~ clusters) +
theme(axis.title.y=element_blank(),
axis.text.y=element_blank(),
axis.ticks.y=element_blank()) + coord_polar(theta="y")
myPIE2 <- ggplot(DATA,aes(y=Freq, x="", fill = clusters)) +
geom_bar(stat = "identity", position = "fill") + theme_bw() + facet_wrap(~ conditions) +
theme(axis.title.y=element_blank(),
axis.text.y=element_blank(),
axis.ticks.y=element_blank()) + coord_polar(theta="y")
if (normalize.ncell == TRUE) {
if(normalize.by == "sf") {
myBP <- ggplot(DATA,aes(y=Norm.Freq, x=conditions, fill = conditions)) +
geom_bar(stat = "identity") + theme_bw() + theme(axis.text.x=element_text(angle=90)) + facet_wrap(~ clusters, scales = "free")
myBP2 <- ggplot(DATA,aes(y=Norm.Freq, x=conditions, fill = clusters)) +
geom_bar(stat = "identity") + theme_bw() + theme(axis.text.x=element_text(angle=90)) + facet_wrap(~ conditions, scales = "free")
myPIE <- ggplot(DATA,aes(y=Norm.Freq, x="", fill = conditions)) +
geom_bar(stat = "identity", position = "fill") + theme_bw() + facet_wrap(~ clusters) +
theme(axis.title.y=element_blank(),
axis.text.y=element_blank(),
axis.ticks.y=element_blank()) + coord_polar(theta="y")
myPIE2 <- ggplot(DATA,aes(y=Norm.Freq, x="", fill = clusters)) +
geom_bar(stat = "identity", position = "fill") + theme_bw() + facet_wrap(~ conditions) +
theme(axis.title.y=element_blank(),
axis.text.y=element_blank(),
axis.ticks.y=element_blank()) + coord_polar(theta="y")
}
if (normalize.by == "percentage") {
myBP <- ggplot(DATA,aes(y=percentage, x=conditions, fill = conditions)) +
geom_bar(stat = "identity") + theme_bw() + theme(axis.text.x=element_text(angle=90)) + facet_wrap(~ clusters, scales = "free")
myBP2 <- ggplot(DATA,aes(y=percentage, x=conditions, fill = clusters)) +
geom_bar(stat = "identity") + theme_bw() + theme(axis.text.x=element_text(angle=90)) + facet_wrap(~ conditions, scales = "free")
myPIE <- ggplot(DATA,aes(y=percentage, x="", fill = conditions)) +
geom_bar(stat = "identity", position = "fill") + theme_bw() + facet_wrap(~ clusters) +
theme(axis.title.y=element_blank(),
axis.text.y=element_blank(),
axis.ticks.y=element_blank()) + coord_polar(theta="y")
myPIE2 <- ggplot(DATA,aes(y=percentage, x="", fill = clusters)) +
geom_bar(stat = "identity", position = "fill") + theme_bw() + facet_wrap(~ conditions) +
theme(axis.title.y=element_blank(),
axis.text.y=element_blank(),
axis.ticks.y=element_blank()) + coord_polar(theta="y")
}
}
if (my.out.put == "plot") {
if (plot.type == "bar") {
return(myBP)
}
if (plot.type == "bar.cond") {
return(myBP2)
}
if (plot.type == "pie") {
return(myPIE)
}
if (plot.type == "pie.cond") {
return(myPIE2)
}
}
if (my.out.put == "data") {
attributes(x)$my.freq <- DATA
return(x)
}
}
|
"unbal.diet.data"
|
context("Testing performance of ei_propreprocessing functions")
test_that("dedupe_precincts handles cases correctly", {
input <- data.frame("p" = c(1, 2), "e" = c(1, 1))
expected <- input
output <- suppressMessages(dedupe_precincts(input, "p"))
expect_equal(output, expected)
expect_message(dedupe_precincts(input, "p"))
input$p[2] <- 1
expected <- input[1, ]
output <- suppressMessages(dedupe_precincts(input, "p"))
expect_equal(output, expected)
input[1, 2] <- 2
expected <- cbind(input, data.frame("duplicate" = c(TRUE, TRUE)))
output <- suppressWarnings(dedupe_precincts(input, "p"))
expect_equal(output, expected)
})
test_that("resolve_missing_vals() handles cases correctly", {
input <- data.frame(
"x" = rep(1, 3),
"y" = rep(1, 3),
"t" = rep(1, 3)
)
cand_cols <- c("x")
race_cols <- c("y")
totals_col <- "t"
expected <- input
output <- resolve_missing_vals(
data = input,
cand_cols = cand_cols,
race_cols = race_cols,
totals_col = totals_col,
verbose = FALSE
)
expect_equal(output, expected)
input$y[1] <- NA
output <- resolve_missing_vals(
data = input,
cand_cols = cand_cols,
race_cols = race_cols,
totals_col = totals_col,
na_action = "mean",
verbose = FALSE
)
expect_equal(output, expected)
input$x[1] <- NA
expected <- expected[-1, ]
output <- resolve_missing_vals(
data = input,
cand_cols = cand_cols,
race_cols = race_cols,
totals_col = totals_col,
verbose = FALSE
)
expect_equal(output, expected)
})
test_that("standardize_votes() returns correct results", {
votes <- empty_ei_df(2, 0, 2)
votes$c1 <- c(1, 1)
votes$c2 <- c(1, 1)
totals <- c(2, 2)
expected <- data.frame(
"c1_prop" = c(0.5, 0.5),
"c2_prop" = c(0.5, 0.5),
"total" = c(2, 2)
)
expect_equal(standardize_votes(votes, new_names = TRUE), expected)
})
test_that("check_diffs() gets conditions right", {
vote_sums <- rep(1, 5)
provided_totals <- rep(1, 5)
max_dev <- 0.1
avg_dev <- 0.025
res <- check_diffs(
vote_sums,
provided_totals,
max_dev,
avg_dev
)
expect_equal(res$closeness, 2)
expect_equal(res$deviates, rep(FALSE, 5))
vote_sums[1] <- 1.05
res <- check_diffs(
vote_sums,
provided_totals,
max_dev,
avg_dev
)
expect_equal(res$closeness, 1)
expect_equal(res$deviates, c(TRUE, rep(FALSE, 4)))
vote_sums[1] <- 1.11
res <- check_diffs(
vote_sums,
provided_totals,
max_dev,
avg_dev
)
expect_equal(res$closeness, 0)
expect_equal(res$deviates, c(TRUE, rep(FALSE, 4)))
vote_sums <- rep(1.03, 5)
res <- check_diffs(
vote_sums,
provided_totals,
max_dev,
avg_dev
)
expect_equal(res$closeness, 0)
expect_equal(res$deviates, rep(TRUE, 5))
max_dev <- -0.3
expect_error(check_diffs(
vote_sums, provided_totals, max_dev, avg_dev
))
max_dev <- 0.1
avg_dev <- -0.3
expect_error(check_diffs(
vote_sums, provided_totals, max_dev, avg_dev
))
max_dev <- 0
avg_dev <- 0
res <- check_diffs(
vote_sums,
provided_totals,
max_dev,
avg_dev
)
expect_equal(res$closeness, 0)
})
test_that("stdize_votes() handles all cases", {
df <- empty_ei_df()
df$r1 <- 1
df$r2 <- 1
df$t <- 2
res <- stdize_votes(
data = df,
cols = c("r1", "r2"),
totals_col = "t",
new_names = TRUE,
verbose = FALSE,
diagnostic = FALSE
)
expected <- data.frame(
"r1_prop" = c(0.5, 0.5),
"r2_prop" = c(0.5, 0.5),
"total" = c(2, 2)
)
expect_equal(res, expected)
expect_message(
stdize_votes(
data = df,
cols = c("r1", "r2"),
totals_col = "t",
new_names = TRUE,
verbose = TRUE,
diagnostic = FALSE
)
)
res <- stdize_votes(
data = df,
cols = c("r1", "r2"),
totals_col = "t",
new_names = TRUE,
verbose = F,
diagnostic = T
)
expected <- data.frame(
"r1_prop" = c(0.5, 0.5),
"r2_prop" = c(0.5, 0.5),
"total" = c(2, 2),
"deviates" = c(FALSE, FALSE)
)
expect_equal(res, expected)
df$r1[1] <- 0.99
df$r1[2] <- 1.01
df$r2[1] <- 1.01
expect_message(
stdize_votes(
data = df,
cols = c("r1", "r2"),
totals_col = "t",
new_names = TRUE,
verbose = TRUE,
diagnostic = FALSE
)
)
res <- stdize_votes(
data = df,
cols = c("r1", "r2"),
totals_col = "t",
new_names = TRUE,
verbose = TRUE,
diagnostic = FALSE
)
r11 <- 0.99 / (0.99 + 1.01)
r12 <- 1.01 / (1 + 1.01)
r21 <- 1.01 / (0.99 + 1.01)
r22 <- 1 / (1 + 1.01)
expected <- data.frame(
"r1_prop" = c(r11, r12),
"r2_prop" = c(r21, r22),
"total" = c(2.00, 2.01)
)
expect_equal(expected, res)
df$r2 <- 1
df$r1 <- c(10, 1)
expect_warning(
stdize_votes(
data = df,
cols = c("r1", "r2"),
totals_col = "t",
new_names = TRUE,
verbose = TRUE,
diagnostic = FALSE
)
)
res <- suppressWarnings({
stdize_votes(
data = df,
cols = c("r1", "r2"),
totals_col = "t",
new_names = TRUE,
verbose = FALSE,
diagnostic = FALSE
)
})
expected <- data.frame("deviates" = c(TRUE, FALSE))
expect_equal(res, expected)
})
test_that("stdize_votes_all() handles all cases", {
df <- empty_ei_df()
df[1, ] <- 1
df[2, ] <- 1
res <- stdize_votes_all(
data = df,
race_cols = c("r1", "r2"),
cand_cols = c("c1", "c2"),
new_names = TRUE
)
expected <- data.frame(
"c1_prop" = rep(0.5, 2),
"c2_prop" = rep(0.5, 2),
"r1_prop" = rep(0.5, 2),
"r2_prop" = rep(0.5, 2),
"total" = c(2, 2)
)
expect_equal(res, expected)
df$c1[1] <- 9
res <- suppressWarnings({
stdize_votes_all(
data = df,
race_cols = c("r1", "r2"),
cand_cols = c("c1", "c2"),
new_names = TRUE
)
})
expected <- data.frame(
"c1_prop" = c(0.9, 0.5),
"c2_prop" = c(0.1, 0.5),
"total" = c(10, 2),
"race_deviates" = c(TRUE, FALSE)
)
expect_equal(res, expected)
res <- suppressWarnings({
stdize_votes_all(
data = df,
race_cols = c("r1", "r2"),
cand_cols = c("c1", "c2"),
totals_from = "race",
new_names = TRUE
)
})
expected <- data.frame(
"r1_prop" = c(0.5, 0.5),
"r2_prop" = c(0.5, 0.5),
"total" = c(2, 2),
"cand_deviates" = c(TRUE, FALSE)
)
expect_equal(res, expected)
df$c1[1] <- 1
df$r1[1] <- 9
res <- suppressWarnings({
stdize_votes_all(
data = df,
race_cols = c("r1", "r2"),
cand_cols = c("c1", "c2"),
totals_from = "race",
new_names = TRUE
)
})
expected <- data.frame(
"r1_prop" = c(0.9, 0.5),
"r2_prop" = c(0.1, 0.5),
"total" = c(10, 2),
"cand_deviates" = c(TRUE, FALSE)
)
expect_equal(res, expected)
res <- suppressWarnings({
stdize_votes_all(
data = df,
race_cols = c("r1", "r2"),
cand_cols = c("c1", "c2"),
totals_from = "cand",
new_names = TRUE
)
})
expected <- data.frame(
"c1_prop" = c(0.5, 0.5),
"c2_prop" = c(0.5, 0.5),
"total" = c(2, 2),
"race_deviates" = c(TRUE, FALSE)
)
expect_equal(res, expected)
df$r1 <- 1
df$t <- 2
res <- suppressMessages({
stdize_votes_all(
data = df,
race_cols = c("r1", "r2"),
cand_cols = c("c1", "c2"),
totals_col = "t",
new_names = TRUE
)
})
expected <- data.frame(
"c1_prop" = rep(0.5, 2),
"c2_prop" = rep(0.5, 2),
"r1_prop" = rep(0.5, 2),
"r2_prop" = rep(0.5, 2),
"total" = c(2, 2)
)
expect_equal(res, expected)
df$t[1] <- 10
res <- suppressWarnings({
stdize_votes_all(
data = df,
race_cols = c("r1", "r2"),
cand_cols = c("c1", "c2"),
totals_col = "t",
new_names = TRUE
)
})
expected <- data.frame(
"cand_deviates" = c(TRUE, FALSE),
"race_deviates" = c(TRUE, FALSE)
)
})
|
xx_mod_ui <- function(id) {
ns <- NS(id)
tagList(
sidebarPanel(
selectInput(ns("xcol"), "X Variable", names(iris)),
selectInput(ns("ycol"), "Y Variable", names(iris),
selected = names(iris)[[2]]
),
numericInput(ns("clusters"), "Cluster count", 3,
min = 1, max = 9
)
),
mainPanel(
plotOutput(ns("plot1"))
)
)
}
xx_mod_server <- function(input, output, session) {
selectedData <- reactive({
iris[, c(input$xcol, input$ycol)]
})
clusters <- reactive({
kmeans(selectedData(), input$clusters)
})
output$plot1 <- renderPlot({
palette(c(
"
"
))
par(mar = c(5.1, 4.1, 0, 1))
plot(selectedData(),
col = clusters()$cluster,
pch = 20, cex = 3
)
points(clusters()$centers, pch = 4, cex = 4, lwd = 4)
})
}
|
library(gemma2)
context("Testing calc_qi")
as.matrix(readr::read_tsv(system.file("extdata", "mouse100.cXX.txt", package = "gemma2"), col_names = FALSE)[, 1:100]) -> kinship
eigen2(kinship) -> e2_out
e2_out$values -> eval
e2_out$vectors -> U
eigen_proc(V_g = diag(c(1.91352, 0.530827)), V_e = diag(c(0.320028, 0.561589))) -> ep_out
calc_qi(eval = eval, D_l = ep_out[[4]], X = t(rep(1, 100)) %*% U) -> cq_out
test_that("logdetVe and Qi match that from GEMMAv0.97 for intercept-only model",{
expect_equal(cq_out[[1]], diag(rep(0.01, 2)), tolerance = 0.0001)
expect_equal(cq_out[[2]], 9.21034, tolerance = 0.00001)
})
|
workerCommand <- function(machine, options, setup_strategy = "sequential")
{
outfile <- getClusterOption("outfile", options)
master <- if (machine == "localhost") "localhost"
else getClusterOption("master", options)
port <- getClusterOption("port", options)
setup_timeout <- getClusterOption("setup_timeout", options)
manual <- getClusterOption("manual", options)
timeout <- getClusterOption("timeout", options)
methods <- getClusterOption("methods", options)
useXDR <- getClusterOption("useXDR", options)
homogeneous <- getClusterOption("homogeneous", options)
env <- paste0("MASTER=", master,
" PORT=", port,
" OUT=", shQuote(outfile),
" SETUPTIMEOUT=", setup_timeout,
" TIMEOUT=", timeout,
" XDR=", useXDR,
" SETUPSTRATEGY=", setup_strategy)
arg <- "tryCatch(parallel:::.workRSOCK,error=function(e)parallel:::.slaveRSOCK)()"
rscript <-
if (homogeneous) shQuote(getClusterOption("rscript", options)) else "Rscript"
rscript_args <- getClusterOption("rscript_args", options)
if(methods)
rscript_args <-c("--default-packages=datasets,utils,grDevices,graphics,stats,methods",
rscript_args)
cmd <- paste(rscript,
if(length(rscript_args)) paste(rscript_args, collapse = " "),
"-e", shQuote(arg), env)
renice <- getClusterOption("renice", options)
if(!is.na(renice) && renice)
cmd <- sprintf("nice +%d %s", as.integer(renice), cmd)
if (!manual && machine != "localhost") {
rshcmd <- getClusterOption("rshcmd", options)
user <- getClusterOption("user", options)
cmd <- paste(rshcmd,
if(length(user) == 1L) paste("-l", user),
machine, shQuote(cmd))
}
cmd
}
newPSOCKnode <- function(machine = "localhost", ...,
options = defaultClusterOptions, rank)
{
options <- addClusterOptions(options, list(...))
if (is.list(machine)) {
options <- addClusterOptions(options, machine)
machine <- machine$host
}
port <- getClusterOption("port", options)
manual <- getClusterOption("manual", options)
timeout <- getClusterOption("timeout", options)
useXDR <- getClusterOption("useXDR", options)
cmd <- workerCommand(machine, options)
if (manual) {
cat("Manually start worker on", machine, "with\n ", cmd, "\n")
utils::flush.console()
} else {
if (.Platform$OS.type == "windows") {
system(cmd, wait = FALSE, input = "")
}
else {
cmd <- paste("R_HOME=", cmd)
system(cmd, wait = FALSE)
}
}
con <- socketConnection("localhost", port = port, server = TRUE,
blocking = TRUE, open = "a+b", timeout = timeout)
structure(list(con = con, host = machine, rank = rank),
class = if(useXDR) "SOCKnode" else "SOCK0node")
}
closeNode.SOCKnode <- closeNode.SOCK0node <- function(node) close(node$con)
sendData.SOCKnode <- function(node, data) serialize(data, node$con)
sendData.SOCK0node <- function(node, data) serialize(data, node$con, xdr = FALSE)
recvData.SOCKnode <- recvData.SOCK0node <- function(node) unserialize(node$con)
recvOneData.SOCKcluster <- function(cl)
{
socklist <- lapply(cl, function(x) x$con)
repeat {
ready <- socketSelect(socklist)
if (length(ready) > 0) break;
}
n <- which.max(ready)
list(node = n, value = unserialize(socklist[[n]]))
}
makePSOCKcluster <- function(names, ...)
{
options <- addClusterOptions(defaultClusterOptions, list(...))
manual <- getClusterOption("manual", options)
homogeneous <- getClusterOption("homogeneous", options)
setup_strategy <- match.arg(getClusterOption("setup_strategy",
options),
c("sequential", "parallel"))
setup_timeout <- getClusterOption("setup_timeout", options)
local <- is.numeric(names) || (is.character(names) &&
identical(names, rep('localhost', length(names))))
if (is.numeric(names)) {
names <- as.integer(names[1L])
if(is.na(names) || names < 1L) stop("numeric 'names' must be >= 1")
names <- rep('localhost', names)
}
.check_ncores(length(names))
cl <- vector("list", length(names))
if (!manual && homogeneous && local && setup_strategy == "parallel") {
port <- getClusterOption("port", options)
timeout <- getClusterOption("timeout", options)
useXDR <- getClusterOption("useXDR", options)
cmd <- workerCommand("localhost", options,
setup_strategy = "parallel" )
socket <- serverSocket(port = port)
on.exit(close(socket), add = TRUE)
if (.Platform$OS.type == "windows") {
for(i in seq_along(cl))
system(cmd, wait = FALSE, input = "")
} else {
cmd <- paste(rep(cmd, length(cl)), collapse = " & ")
system(cmd, wait = FALSE)
}
cls <- if(useXDR) "SOCKnode" else "SOCK0node"
ready <- 0
pending <- list()
on.exit(lapply(pending, function(x) close(x$con)), add = TRUE)
t0 <- Sys.time()
while (ready < length(cl)) {
cons <- lapply(pending, function(x) x$con)
if (difftime(Sys.time(), t0, units="secs") > setup_timeout + 5) {
failed <- length(cl) - ready
msg <- sprintf(ngettext(failed,
"Cluster setup failed. %d worker of %d failed to connect.",
"Cluster setup failed. %d of %d workers failed to connect."),
failed, length(cl))
stop(msg)
}
a <- socketSelect(append(list(socket), cons), FALSE,
timeout = setup_timeout)
canAccept <- a[1]
canReceive <- seq_along(pending)[a[-1]]
if (canAccept) {
con <- socketAccept(socket = socket, blocking = TRUE,
open = "a+b", timeout = timeout)
scon <- structure(list(con = con, host = "localhost",
rank = ready), class = cls)
tryCatch({ sendCall(scon, eval, list(quote(Sys.getpid()))) },
error = identity)
pending <- append(pending, list(scon))
}
for (scon in pending[canReceive]) {
pid <- tryCatch({ recvResult(scon) }, error = identity)
if (is.integer(pid)) {
ready <- ready + 1
cl[[ready]] <- scon
} else
close(scon$con)
}
if (length(canReceive) > 0)
pending <- pending[-canReceive]
}
} else {
for (i in seq_along(cl))
cl[[i]] <- newPSOCKnode(names[[i]], options = options, rank = i)
}
class(cl) <- c("SOCKcluster", "cluster")
cl
}
print.SOCKcluster <- function(x, ...)
{
nc <- length(x)
hosts <- unique(sapply(x, `[[`, "host"))
msg <- sprintf(ngettext(length(hosts),
"socket cluster with %d nodes on host %s",
"socket cluster with %d nodes on hosts %s"),
nc, paste(sQuote(hosts), collapse = ", "))
cat(msg, "\n", sep = "")
invisible(x)
}
print.SOCKnode <- print.SOCK0node <- function(x, ...)
{
sendCall(x, eval, list(quote(Sys.getpid())))
pid <- recvResult(x)
msg <- gettextf("node of a socket cluster on host %s with pid %d",
sQuote(x[["host"]]), pid)
cat(msg, "\n", sep = "")
invisible(x)
}
.workRSOCK <- function()
{
makeSOCKmaster <- function(master, port, setup_timeout, timeout, useXDR,
setup_strategy)
{
port <- as.integer(port)
timeout <- as.integer(timeout)
stopifnot(setup_timeout >= 0)
cls <- if(useXDR) "SOCKnode" else "SOCK0node"
retryDelay <- 0.1
retryScale <- 1.5
t0 <- Sys.time()
scon_timeout <- 1
repeat {
if (setup_strategy == "parallel")
scon_timeout <- scon_timeout + 0.2
else
scon_timeout <- timeout
con <- tryCatch({
socketConnection(master, port = port, blocking = TRUE,
open = "a+b",
timeout = as.integer(scon_timeout))
}, error = identity)
hres <- NULL
if (inherits(con, "connection")) {
scon <- structure(list(con = con), class = cls)
if (setup_strategy == "sequential")
return(scon)
hres <- tryCatch({ workCommand(scon) }, error = identity)
if (identical(hres, TRUE)) {
if (setup_strategy == "parallel")
socketTimeout(socket = con, timeout = timeout)
return(scon)
} else if (identical(hres, FALSE)) {
return(NULL)
} else
close(con)
}
if (difftime(Sys.time(), t0, units="secs") > setup_timeout) {
if (inherits(hres, "error"))
stop(hres)
if (inherits(con, "error"))
stop(con)
stop("Connection setup failed or timed out.")
}
Sys.sleep(retryDelay)
retryDelay <- retryScale * retryDelay
}
}
master <- "localhost"
port <- NA_integer_
outfile <- Sys.getenv("R_SNOW_OUTFILE")
setup_timeout <- 120
timeout <- 2592000L
useXDR <- TRUE
setup_strategy <- "sequential"
for (a in commandArgs(TRUE)) {
pos <- regexpr("=", a)
name <- substr(a, 1L, pos - 1L)
value <- substr(a, pos + 1L, nchar(a))
switch(name,
MASTER = {master <- value},
PORT = {port <- value},
OUT = {outfile <- value},
SETUPTIMEOUT = {setup_timeout <- as.numeric(value)},
TIMEOUT = {timeout <- value},
XDR = {useXDR <- as.logical(value)},
SETUPSTRATEGY = {
setup_strategy <- match.arg(value,
c("sequential", "parallel"))
})
}
if (is.na(port)) stop("PORT must be specified")
sinkWorkerOutput(outfile)
msg <- sprintf("starting worker pid=%d on %s at %s\n",
Sys.getpid(), paste(master, port, sep = ":"),
format(Sys.time(), "%H:%M:%OS3"))
cat(msg)
workLoop(makeSOCKmaster(master, port, setup_timeout, timeout, useXDR,
setup_strategy))
}
|
reconstructLinearEffects <-
function (noia.multilinear)
{
if (class(noia.multilinear) != "noia.multilinear") {
stop("Object of class \"multilinear\" expected\n")
}
a <- noia::effectsNames[2]
d <- noia::effectsNames[3]
e <- noia::effectsNames[4]
meff <- noia.multilinear$E
mstd <- noia.multilinear$std.err
nloc <- noia.multilinear$nloc
ans.effects <- rep(0, 3^nloc)
ans.stderr <- rep(0, 3^nloc)
names(ans.effects) <- effectsNamesGeneral(noia.multilinear$nloc)
names(ans.stderr) <- effectsNamesGeneral(noia.multilinear$nloc)
ans.effects[effNames(nloc = nloc)] <- meff[effNames(nloc = nloc)]
ans.stderr[effNames(nloc = nloc)] <- mstd[effNames(nloc = nloc)]
for (l1 in 1:nloc) {
add <- meff[effNames(c(a), c(l1), nloc)]
dom <- meff[effNames(c(d), c(l1), nloc)]
std.add <- mstd[effNames(c(a), c(l1), nloc)]
std.dom <- mstd[effNames(c(d), c(l1), nloc)]
ans.effects[effNames(c(a), c(l1), nloc)] <- add
ans.effects[effNames(c(d), c(l1), nloc)] <- dom
ans.stderr[effNames(c(a), c(l1), nloc)] <- std.add
ans.stderr[effNames(c(d), c(l1), nloc)] <- std.dom
}
if (nloc > 1) {
for (l1 in 1:(nloc - 1)) {
for (l2 in (l1 + 1):nloc) {
a1 <- meff[effNames(c(a), c(l1), nloc)]
a2 <- meff[effNames(c(a), c(l2), nloc)]
d1 <- meff[effNames(c(d), c(l1), nloc)]
d2 <- meff[effNames(c(d), c(l2), nloc)]
ee <- meff[effNames(c(e, e), c(l1, l2), nloc)]
cv2.a1 <- ((mstd[effNames(c(a), c(l1), nloc)])/(meff[effNames(c(a),
c(l1), nloc)]))^2
cv2.a2 <- ((mstd[effNames(c(a), c(l2), nloc)])/(meff[effNames(c(a),
c(l2), nloc)]))^2
cv2.d1 <- ((mstd[effNames(c(d), c(l1), nloc)])/(meff[effNames(c(d),
c(l1), nloc)]))^2
cv2.d2 <- ((mstd[effNames(c(d), c(l2), nloc)])/(meff[effNames(c(d),
c(l2), nloc)]))^2
cv2.ee <- ((mstd[effNames(c(e, e), c(l1, l2),
nloc)])/(meff[effNames(c(e, e), c(l1, l2),
nloc)]))^2
ans.effects[effNames(c(a, a), c(l1, l2), nloc)] <- a1 *
a2 * ee
ans.effects[effNames(c(a, d), c(l1, l2), nloc)] <- a1 *
d2 * ee
ans.effects[effNames(c(d, a), c(l1, l2), nloc)] <- d1 *
a2 * ee
ans.effects[effNames(c(d, d), c(l1, l2), nloc)] <- d1 *
d2 * ee
ans.stderr[effNames(c(a, a), c(l1, l2), nloc)] <- sqrt(((a1 *
a2 * ee)^2) * (cv2.a1 + cv2.a2 + cv2.ee + cv2.a1 *
cv2.a2 + cv2.a1 * cv2.ee + cv2.a2 * cv2.ee +
cv2.a1 * cv2.a2 + cv2.ee))
ans.stderr[effNames(c(a, d), c(l1, l2), nloc)] <- sqrt(((a1 *
d2 * ee)^2) * (cv2.a1 + cv2.d2 + cv2.ee + cv2.a1 *
cv2.d2 + cv2.a1 * cv2.ee + cv2.d2 * cv2.ee +
cv2.a1 * cv2.d2 + cv2.ee))
ans.stderr[effNames(c(d, a), c(l1, l2), nloc)] <- sqrt(((d1 *
a2 * ee)^2) * (cv2.d1 + cv2.a2 + cv2.ee + cv2.d1 *
cv2.a2 + cv2.d1 * cv2.ee + cv2.a2 * cv2.ee +
cv2.d1 * cv2.a2 + cv2.ee))
ans.stderr[effNames(c(d, d), c(l1, l2), nloc)] <- sqrt(((d1 *
d2 * ee)^2) * (cv2.d1 + cv2.d2 + cv2.ee + cv2.d1 *
cv2.d2 + cv2.d1 * cv2.ee + cv2.d2 * cv2.ee +
cv2.d1 * cv2.d2 + cv2.ee))
}
}
}
ans.effects <- ans.effects[colnames(noia.multilinear$smat)]
ans.stderr <- ans.stderr[colnames(noia.multilinear$smat)]
return(cbind(ans.effects, ans.stderr))
}
|
generateRandomStartStates <- function(network, n)
{
mat <- matrix(nrow=n,ncol=length(network$genes))
fixedPositions <- which(network$fixed != -1)
nonFixedPositions <- which(network$fixed == -1)
if (n > (2 ^ length(nonFixedPositions)))
stop("The number of states to generate exceeds the total number of possible states!")
if (length(fixedPositions) != 0)
mat[,fixedPositions] <- sapply(fixedPositions,function(x)
rep(network$fixed[x],n))
if (n != 2 ^ length(nonFixedPositions))
{
mat[,nonFixedPositions] <- round(runif(n=n*length(nonFixedPositions)))
}
else
{
mat[,nonFixedPositions] <- allcombn(2,length(nonFixedPositions)) - 1
}
mat <- unique(mat)
while (nrow(mat) != n)
{
vec <- rep(0,length(network$genes))
if (length(fixedPositions) != 0)
vec[fixedPositions] <- sapply(fixedPositions,
function(x)network$fixed[x])
vec[nonFixedPositions] <- round(runif(n=length(nonFixedPositions)))
mat <- unique(rbind(mat,vec))
}
res <- lapply(1:nrow(mat),function(i)
{
mat[i,]
})
return(res);
}
|
print.ebpLMMne <-
function(x, ...){
cat(paste(c('Value/s of the predictor of the defined function/s of the dependent variable =',
round(x$thetaP, 4)), collapse=" "))
cat('\nto see the details, please use str()', '\n', '\n')
cat(paste('Sample size = ', length(x$YS), '\n'))
cat(paste('Dataset size = ', nrow(x$reg), '\n', '\n'))
}
|
parse_remote_standard <- function(specs, config, ...) {
parsed_specs <- re_match(specs, standard_rx())
parsed_specs$ref <- parsed_specs$.text
cn <- setdiff(colnames(parsed_specs), c(".match", ".text"))
parsed_specs <- parsed_specs[, cn]
parsed_specs$type <- "standard"
lapply(
seq_len(nrow(parsed_specs)),
function(i) as.list(parsed_specs[i,])
)
}
resolve_remote_standard <- function(remote, direct, config,
cache, dependencies, ...) {
force(remote); force(direct); force(dependencies)
versions <- if ("type" %in% names(remote)) {
remote$version
} else {
vcapply(remote, "[[", "version")
}
if (all(versions %in% c("", "current"))) {
resolve_from_metadata(remote, direct, config, cache, dependencies)
} else {
type_cran_resolve_version(remote, direct, config, cache, dependencies)
}
}
download_remote_standard <- function(resolution, target, target_tree,
config, cache, which, on_progress) {
rptp <- resolution$repotype
if (identical(rptp, "cran")) {
download_remote_cran(resolution, target, target_tree, config, cache,
which, on_progress)
} else if (identical(rptp, "bioc")) {
download_remote_bioc(resolution, target, target_tree, config, cache,
which, on_progress)
} else {
download_ping_if_no_sha(resolution, target, config, cache,
on_progress)
}
}
satisfy_remote_standard <- function(resolution, candidate, config, ...) {
if (resolution$package != candidate$package) {
return(structure(FALSE, reason = "Package names differ"))
}
if (resolution$direct) {
if (candidate$type == "installed") {
type <- candidate$extra[[1]][["repotype"]] %||% "unknown"
if (is.na(type)) type <- "unknown"
remotetype <- candidate$extra[[1]][["remotetype"]] %||% "unknown"
if (is.na(remotetype)) remotetype <- "unknown"
} else {
type <- candidate$type
remotetype <- "unknown"
}
if (!type %in% c("cran", "bioc", "standard") && remotetype != "standard") {
return(structure(FALSE, reason = "User requested CRAN package"))
}
if (candidate$type == "installed" &&
package_version(resolution$version) > candidate$version) {
return(structure(FALSE, reason = "Direct ref needs update"))
}
}
version <- tryCatch(resolution$remote[[1]]$version, error = function(e) "")
if (version == "") return(TRUE)
if (!version_satisfies(
candidate$version,
resolution$remote[[1]]$atleast,
version)) {
return(structure(FALSE, reason = "Insufficient version"))
}
TRUE
}
installedok_remote_standard <- function(installed, solution, config, ...) {
if (solution$repotype == "cran") {
installedok_remote_cran(installed, solution, config, ...)
} else if (solution$repotype == "bioc") {
installedok_remote_bioc(installed, solution, config, ...)
} else if (solution$platform != "source") {
identical(installed$package, solution$package) &&
identical(installed$version, solution$version) &&
(identical(installed[["platform"]], solution[["platform"]]) ||
identical(installed[["platform"]], "*")) &&
identical(installed$remoterepos, solution$metadata[[1]][["RemoteRepos"]])
} else {
identical(installed$package, solution$package) &&
identical(installed$version, solution$version) &&
identical(installed$remoterepos, solution$metadata[[1]][["RemoteRepos"]])
}
}
|
context("mock writing to disk")
enable()
test_that("Write to a file before mocked request: crul", {
skip_on_cran()
library(crul)
f <- tempfile(fileext = ".json")
cat("{\"hello\":\"world\"}\n", file = f)
expect_is(readLines(f), "character")
expect_match(readLines(f), "world")
stub_request("get", "https://httpbin.org/get") %>%
to_return(body = file(f))
out <- HttpClient$new("https://httpbin.org/get")$get(disk = f)
expect_is(out$content, "character")
expect_equal(attr(out$content, "type"), "file")
expect_is(readLines(out$content), "character")
expect_match(readLines(out$content), "hello")
unlink(f)
stub_registry_clear()
})
test_that("Write to a file before mocked request: httr", {
skip_on_cran()
library(httr)
f <- tempfile(fileext = ".json")
cat("{\"hello\":\"world\"}\n", file = f)
expect_is(readLines(f), "character")
expect_match(readLines(f), "world")
stub_request("get", "https://httpbin.org/get") %>%
to_return(body = file(f),
headers = list('content-type' = "application/json"))
out <- GET("https://httpbin.org/get", write_disk(f, overwrite=TRUE))
content(out)
expect_is(out$content, "path")
expect_equal(attr(out$content, "class"), "path")
expect_is(readLines(out$content), "character")
expect_match(readLines(out$content), "hello")
unlink(f)
stub_registry_clear()
})
test_that("Use mock_file to have webmockr handle file and contents: crul", {
skip_on_cran()
library(crul)
f <- tempfile(fileext = ".json")
stub_request("get", "https://httpbin.org/get") %>%
to_return(body = mock_file(f, "{\"hello\":\"mars\"}\n"))
out <- crul::HttpClient$new("https://httpbin.org/get")$get(disk = f)
out$content
expect_is(out$content, "character")
expect_match(out$content, "json")
expect_is(readLines(out$content), "character")
expect_true(any(grepl("hello", readLines(out$content))))
unlink(f)
stub_registry_clear()
})
test_that("Use mock_file to have webmockr handle file and contents: httr", {
skip_on_cran()
library(httr)
f <- tempfile(fileext = ".json")
stub_request("get", "https://httpbin.org/get") %>%
to_return(
body = mock_file(path = f, payload = "{\"foo\": \"bar\"}"),
headers = list('content-type' = "application/json")
)
out <- GET("https://httpbin.org/get", write_disk(f))
expect_is(out$content, "path")
expect_match(out$content, "json")
expect_is(readLines(out$content), "character")
expect_true(any(grepl("foo", readLines(out$content))))
unlink(f)
stub_registry_clear()
})
|
rotate_resid <- function(semPaths_plot, rotate_resid_list = NULL) {
if (is.null(rotate_resid_list)) {
stop("rotate_resid_list not specified.")
}
if (is.null(semPaths_plot)) {
stop("semPaths_plot not specified.")
} else {
if (!inherits(semPaths_plot, "qgraph")) {
stop("semPaths_plot is not a qgraph object.")
}
}
if (!is.list(rotate_resid_list) && is.numeric(rotate_resid_list)) {
rotate_resid_list_org <- rotate_resid_list
rotate_resid_list <- to_list_of_lists(rotate_resid_list,
name1 = "node",
name2 = "rotate")
}
Nodes_in <- sapply(rotate_resid_list, function(x) x$node)
Nodes_names <- semPaths_plot$graphAttributes$Nodes$names
if (!is.null(names(Nodes_names))) {
Nodes_names <- names(Nodes_names)
}
if (!all(Nodes_in %in% Nodes_names)) {
stop("One or more nodes in rotate_resid_list not in semPaths_plot.")
}
Nodes_id <- seq_len(length(Nodes_names))
names(Nodes_id) <- Nodes_names
loopRotation_old <- semPaths_plot$graphAttributes$Nodes$loopRotation
loopRotation_new <- loopRotation_old
loopRotation_new[Nodes_id[Nodes_in]] <- sapply(rotate_resid_list,
function(x) x$rotate*pi/180)
semPaths_plot$graphAttributes$Nodes$loopRotation <- loopRotation_new
semPaths_plot
}
|
.likelihood_coal_exp_mod <- function(Vtimes,ntips,tau0,gamma,N0)
{
Ttimes <- diff(Vtimes)
Vtimes <- Vtimes[2:length(Vtimes)]
nbint <- length(Ttimes)
samp <- seq((ntips-2),(ntips-nbint-1),by=-1)
indLikelihood <- samp*(samp+1)/2*2*tau0/N0*exp(gamma*Vtimes)*exp(-samp*(samp+1)/2*2*tau0/N0*1/gamma*exp(gamma*Vtimes)*(1-exp(-gamma*Ttimes)))
res <- sum(log(indLikelihood))
return(list("res"=res,"all"=indLikelihood))
}
|
TOKENS <- c("IDENTIFIER", "POINTER", "STRING", "SYMBOL",
"DATE", "TIME",
"REAL", "BINT", "DINT",
"UNIT",
"END", "END_GROUP", "END_OBJECT",
"BEGIN_GROUP", "BEGIN_OBJECT",
"COMMENT")
LITERALS <- c("(", ")", ",", "=", "{", "}")
odl_lexer <- R6::R6Class("Lexer",
public = list(
tokens = TOKENS,
literals = LITERALS,
t_POINTER = function(re="\\^[A-Z][A-Z0-9_]+", t) {
return(t)
},
t_STRING = function(re="\"[^\"]+\"", t) {
t$value <- substring(t$value, 2, nchar(t$value) - 1)
return(t)
},
t_SYMBOL = function(re="'[^']+'", t) {
t$value <- substring(t$value, 2, nchar(t$value) - 1)
return(t)
},
t_DATE = function(re=
"\\d{4}\\-(\\d{2}\\-\\d{2}|\\d{3})(T\\d{2}:\\d{2}(:\\d{1,2}(.\\d+)?)?)?(\\+\\d+|\\-\\d+|Z)?",
t) {
if (grepl(":\\d{1,2}(.\\d+)?[\\+\\-][\\d:]+$", t$value, perl = T)) {
m <- regexec("[\\d:]+$", t$value, perl = T)
tz_offset <- regmatches(t$value, m)
if (grepl(":", tz_offset, fixed = TRUE)) {
tz_offset <- paste0(
lapply(strsplit(tz_offset, ":"), function(comp) {
sprintf("%02d", strtoi(comp))
}
)[[1]], collapse = "")
} else if (nchar(tz_offset) == 4) {
} else {
tz_offset <- sprintf("%02d00", strtoi(tz_offset))
}
t$value <- paste0(substr(t$value, 1, m[[1]] - 1), tz_offset)
}
t$value <- sub("[zZ]$", "", t$value)
date_formats <- c("%Y-%j", "%Y-%m-%d")
time_formats <- c("%H:%M",
"%H:%M:%OS")
zone_formats <- c("", "%z")
time_zone_formats <- paste(rep(time_formats,
each = length(zone_formats)),
zone_formats, sep = "")
time_zone_formats <- c("", paste0("T", time_zone_formats))
try_formats <- paste(rep(date_formats,
each = length(time_zone_formats)),
time_zone_formats, sep = "")
try_formats <- try_formats[order(nchar(try_formats),
try_formats,
decreasing = TRUE)]
t$value <- as.POSIXlt(t$value,
tz = "UTC",
tryFormats = try_formats)
return(t)
},
t_TIME =
function(re="\\d{2}:\\d{2}(:\\d{1,2}(\\.\\d*)?)?(\\+\\d+|\\-\\d+|Z)?", t) {
return(t)
},
t_REAL = function(re=
"[+-]?(\\d+[Ee][+-]?[0-9]+|((\\d+\\.\\d+|\\d+\\.|\\.\\d+)([Ee][+-]?[0-9]+)?))",
t) {
t$value <- as.numeric(t$value)
return(t)
},
t_BINT = function(re="[0-9]+
components <- strsplit(t$value, "
t$value <- strtoi(components[2], components[1])
return(t)
},
t_DINT = function(re="[+-]?[0-9]+", t) {
t$value <- strtoi(t$value)
return(t)
},
t_UNIT = function(re="<[^>]+>", t) {
t$value <- substring(t$value, 2, nchar(t$value) - 1)
return(t)
},
t_IDENTIFIER = function(re="[A-Z][A-Z0-9_:]+", t) {
if (t$value == "END") t$type <- "END"
else if (t$value == "END_GROUP") t$type <- "END_GROUP"
else if (t$value == "END_OBJECT") t$type <- "END_OBJECT"
else if (t$value == "GROUP") t$type <- "BEGIN_GROUP"
else if (t$value == "OBJECT") t$type <- "BEGIN_OBJECT"
else if (t$value == "BEGIN_OBJECT") t$type <- "BEGIN_OBJECT"
return(t)
},
t_COMMENT = function(re="/\\*.+?\\*/", t) {
return()
},
t_ignore = " \t\r\n",
t_error = function(t) {
cat(sprintf("Illegal character '%s'", t$value[1]))
t$lexer$skip(1)
return(t)
}
)
)
|
ssh_tunnel <- function(session, port = 5555, target = "rainmaker.wunderground.com:23") {
assert_session(session)
stopifnot(is.numeric(port))
target <- parse_host(target, NA)
if(is.na(target$port))
stop("No port specified in 'target'")
.Call(C_blocking_tunnel, session, as.integer(port), target$host, target$port)
invisible()
}
|
lilikoi.featuresSelection <- function(PDSmatrix,threshold= 0.5,method="info"){
pds_matrix=(as.data.frame(cbind(t(PDSmatrix),Label=Metadata$Label)))
set.seed(2000)
training_ID <- createDataPartition(pds_matrix$Label, p = .8,list = FALSE,times = 1)
training_diagnosis<-pds_matrix[training_ID,]
if (method=="info"){
InfoGainAttributeEval(as.logical(training_diagnosis$Label-1) ~ . , data = training_diagnosis)->infogainfeatures
selected_pathways<-names(infogainfeatures[infogainfeatures>threshold])}
else{
GainRatioAttributeEval(as.logical(training_diagnosis$Label-1) ~ . , data = training_diagnosis)->infogainfeatures
selected_pathways<-names(infogainfeatures[infogainfeatures>threshold])}
info.paireddiagnosis.R<-discretize(training_diagnosis[,selected_pathways])
info.paireddiagnosis.R<-cbind(info.paireddiagnosis.R,as.numeric(as.matrix(training_diagnosis[,ncol(training_diagnosis)])))
I.R <- mutinformation(info.paireddiagnosis.R,method= "emp")
I.R.paireddiagnosis<-I.R[,ncol(I.R)]
theTable <- within(as.data.frame(I.R.paireddiagnosis),
I.R.paireddiagnosis <- as.numeric(I.R.paireddiagnosis))
theTable<-cbind(row.names(theTable),theTable)
theTable<-theTable[-ncol(I.R),]
colnames(theTable)[1]<-c("name")
theTable <- transform(theTable,
name = reorder(name,order(I.R.paireddiagnosis, decreasing = TRUE)))
p <- ggplot(theTable, aes(name, I.R.paireddiagnosis)) + geom_col() + xlab(NULL) +
ylab(NULL)
p + theme(axis.text.x = element_text(angle = 90))
p + coord_flip()
q <- p + aes(stringr::str_wrap(name, 20), I.R.paireddiagnosis) + ylab("Mutual information") +
xlab("Pathways")
plot(q + coord_flip())
return(selected_pathways)
}
|
test.gdi13 <- function() {
dataPath <- file.path(path.package(package="clusterCrit"),"unitTests","data","testsInternal_400_4.Rdata")
load(file=dataPath, envir=.GlobalEnv)
idx <- intCriteria(traj_400_4, part_400_4[[4]], c("GDI13"))
cat(paste("\nFound idx =",idx))
val <- 1.86222124332669
cat(paste("\nShould be =",val,"\n"))
checkEqualsNumeric(idx[[1]],val)
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.