code
stringlengths 1
13.8M
|
---|
hartman3 <- function(x){
x1 <- x[1]
x2 <- x[2]
x3 <- x[3]
a <- matrix(c(3.0,0.1,3.0,0.1,10.0,10.0,10.0,10.0,30.0,35.0,30.0,35.0),3,4,byrow=TRUE)
p <- matrix(c(0.3689,0.4699,0.1091,0.03815,0.117,0.4387,0.8732,0.5743,0.2673,0.747,0.5547,0.8828),3,4,byrow=TRUE)
c <- c(1.0,1.2,3.0,3.2)
d <- matrix(0,1,4)
for (i in seq(1,4)){
d[i] <- sum(a[,i]*(x - p[,i])^2)
}
f <- -sum(c*exp(-d))
return(f)
}
|
starts_uni_cov <- function(W, var_trait, var_ar, var_state, a, time_index=NULL,
add_meas_error=NULL )
{
if ( is.null(time_index) ){
time_index <- 1:W
}
covmat <- starts_rcpp_starts_uni_cov( var_trait=var_trait, var_ar=var_ar,
var_state=var_state, a=a, time_index=time_index )
if ( ! is.null(add_meas_error) ){
if (is.vector(add_meas_error)){
diag(covmat) <- diag(covmat) + add_meas_error
} else {
covmat <- covmat + add_meas_error
}
}
return(covmat)
}
|
context("test-merge.disk.frame")
setup({
b = data.frame(a = 51:150, b = 1:100)
d = data.frame(a = 151:250, b = 1:100)
as.disk.frame(b, file.path(tempdir(), "tmp_merge.df"), nchunks = 5, overwrite = TRUE)
as.disk.frame(d, file.path(tempdir(), "tmp_merge2.df"), nchunks = 5, overwrite = TRUE)
})
test_that("testing merge of disk.frame", {
b.df = disk.frame(file.path(tempdir(), "tmp_merge.df"))
d.df = disk.frame(file.path(tempdir(), "tmp_merge2.df"))
bd.df = merge(b.df, d.df, by = "b", outdir = file.path(tempdir(), "tmp_bd_merge.df"), overwrite = TRUE, merge_by_chunk_id = TRUE)
expect_s3_class(bd.df, "disk.frame")
expect_equal(nrow(bd.df), 100)
})
test_that("testing merge of data.frame", {
b.df = disk.frame(file.path(tempdir(), "tmp_merge.df"))
d = data.frame(a = 151:250, b = 1:100)
bd.df = merge(b.df, d, by = "b", outdir = file.path(tempdir(), "tmp_bd_merge2.df"), overwrite = TRUE)
expect_s3_class(bd.df, "disk.frame")
expect_equal(nrow(bd.df), 100)
tmp = collect(bd.df)
expect_s3_class(tmp, "data.frame")
expect_equal(nrow(tmp), 100)
})
test_that("testing error when merge_by_chunk = FALSE", {
b.df = disk.frame(file.path(tempdir(), "tmp_merge.df"))
d.df = disk.frame(file.path(tempdir(), "tmp_merge2.df"))
testthat::expect_error()
expect_error(
merge(
b.df,
d.df,
by = "b",
outdir = file.path(tempdir(), "tmp_bd_merge.df"),
overwrite = TRUE,
merge_by_chunkd_id = FALSE
)
)
})
teardown({
fs::dir_delete(file.path(tempdir(), "tmp_merge.df"))
fs::dir_delete(file.path(tempdir(), "tmp_merge2.df"))
fs::dir_delete(file.path(tempdir(), "tmp_bd_merge.df"))
fs::dir_delete(file.path(tempdir(), "tmp_bd_merge2.df"))
})
|
posterior_samples <- function(object, ...){
if(is(object, "estimate") | is(object, "explore")) {
if(!is(object, "default")){
stop("object most be from 'estimate' or 'explore'")
}
p <- object$p
pcors_total <- p * (p - 1) * 0.5
I_p <- diag(p)
iter <- object$iter
pcor_samples <-
matrix(
object$post_samp$pcors[, , 51:(iter + 50)][upper.tri(I_p)],
nrow = iter,
ncol = pcors_total,
byrow = TRUE
)
cn <- colnames(object$Y)
if(is.null(cn)){
col_names <- sapply(1:p, function(x) paste(1:p, x, sep = "--"))[upper.tri(I_p)]
} else {
col_names <- sapply(cn, function(x) paste(cn, x, sep = "--"))[upper.tri(I_p)]
}
colnames(pcor_samples) <- col_names
posterior_samples <- pcor_samples
if(!is.null(object$formula)){
if(ncol(object$X) == 1){
beta_terms <- "(Intercept)"
} else {
beta_terms <- colnames(object$X)
}
n_beta_terms <- length(beta_terms)
beta_samples <- object$post_samp$beta
if(is.null(cn)){
col_names <- 1:p
} else {
col_names <- cn
}
beta_start <- matrix(beta_samples[1:n_beta_terms,1, 51:(iter+50)],
nrow = iter, n_beta_terms, byrow = TRUE)
colnames(beta_start) <- paste0(col_names[1], "_", beta_terms)
for(i in 2:p){
beta_i <- matrix(beta_samples[1:n_beta_terms, i, 51:(iter+50)],
nrow = iter,
n_beta_terms,
byrow = TRUE)
colnames(beta_i) <- paste0(col_names[i], "_", beta_terms)
beta_start <- cbind(beta_start, beta_i)
}
posterior_samples <- cbind(posterior_samples, beta_start)
}
} else if (is(object, "var_estimate")) {
if(!is(object, "default")){
stop("object most be from 'var_estimate'")
}
p <- object$p
pcors_total <- p * (p - 1) * 0.5
I_p <- diag(p)
iter <- object$iter
pcor_samples <-
matrix(
object$fit$pcors[, , 51:(iter + 50)][upper.tri(I_p)],
nrow = iter,
ncol = pcors_total,
byrow = TRUE
)
cn <- colnames(object$Y)
if(is.null(cn)){
col_names <- sapply(1:p, function(x) paste(1:p, x, sep = "--"))[upper.tri(I_p)]
} else {
col_names <- sapply(cn, function(x) paste(cn, x, sep = "--"))[upper.tri(I_p)]
}
colnames(pcor_samples) <- col_names
posterior_samples <- pcor_samples
n_beta_terms <- nrow(object$beta_mu)
beta_samples <- object$fit$beta
col_names <- colnames(object$Y)
beta_terms <- colnames(object$X)
beta_start <- matrix(beta_samples[1:n_beta_terms,1, 51:(iter+50)],
nrow = iter, n_beta_terms, byrow = TRUE)
colnames(beta_start) <- paste0(col_names[1], "_", beta_terms)
for(i in 2:p){
beta_i <- matrix(beta_samples[1:n_beta_terms, i, 51:(iter+50)],
nrow = iter,
n_beta_terms,
byrow = TRUE)
colnames(beta_i) <- paste0(col_names[i], "_", beta_terms)
beta_start <- cbind(beta_start, beta_i)
}
posterior_samples <- cbind(posterior_samples, beta_start)
} else {
stop("object class not currently supported")
}
return(posterior_samples)
}
|
summary.MBPCA<-function (object, nvar=NULL, ncompprint = NULL, digits=2,...){
Res<-object
if (!inherits(Res, "MBPCA"))
stop("non convenient data")
appel <- as.list(Res$call)
group <- eval.parent(appel$group)
ntab <- length(group)
if (is.null(nvar)) nvar=nrow(Res$globalcor)
if (is.null(ncompprint)) ncompprint=Res$components[2]
if (ncompprint > Res$components[2]) stop(cat("\nncompprint should be less or equal to", Res$components[2],"\n\n"))
eig=cbind("|",round(Res$cumexplained[1:ncompprint,1], digits),"|",round(Res$cumexplained[1:ncompprint,2], digits),"|")
colnames(eig) <-c("|", "% of inertia","|","Cumul % of inertia","|")
cat("\n Percentages of inertia explained in X blocks for the first", ncompprint,"dimensions and their cumulative values. \n\n")
print(eig,quote=FALSE)
variables=cbind("|",round(Res$globalcor[1:nvar,1:ncompprint], digits),"|")
colnames(variables) <-c("|", paste("Dim.", 1:ncompprint, sep=""),"|")
cat("\n Correlation of the first", nvar, "variables of X blocks with the first", ncompprint,"dimensions. \n\n")
print(variables,quote=FALSE)
}
|
knitr::opts_chunk$set(collapse = TRUE, comment = "
library(cmocean)
plot_cm <- function(name, n=256) {
z <- matrix(seq(0, 1, length.out=n))
image(z, col=cmocean(name)(n), axes=FALSE)
mtext(name, 3, adj=0)
}
pal <- c('algae', 'amp', 'balance', 'diff',
'gray', 'curl', 'deep', 'delta',
'dense', 'haline', 'ice', 'matter',
'oxy', 'phase', 'rain', 'solar',
'speed', 'tarn', 'tempo', 'thermal',
'topo', 'turbid')
opar <- par(no.readonly=TRUE)
par(mfrow=c(6, 1), mar=c(0.5, 0.5, 1.5, 0.5))
for (i in seq_along(pal)) {
plot_cm(pal[i])
}
par(mfrow=c(1, 1))
par(opar)
par(mar=c(2, 2, 1, 1), cex=0.5)
image(volcano, col=cmocean('thermal')(256))
par(mar=c(2, 2, 1, 1), cex=0.5)
x <- y <- seq(-4*pi, 4*pi, len = 27)
r <- sqrt(outer(x^2, y^2, "+"))
image(z = z <- cos(r^2)*exp(-r/6), col = cmocean('haline')(256))
|
te_renyi <- function(x,
lx,
y,
ly,
q,
shuffles,
type,
quantiles,
bins,
limits,
nboot,
burn,
quiet) {
x <- code_sample(x, type, quantiles, bins, limits)
y <- code_sample(y, type, quantiles, bins, limits)
if (!quiet) cat(" [calculate] X->Y transfer entropy\n")
texy <- calc_te_renyi(x = y, lx = ly, y = x, ly = lx, q = q)
consty <- shuffle_renyi(
x = y,
lx = ly,
y = x,
ly = lx,
q = q,
shuffles = shuffles
)
stexy <- texy - consty
if (!quiet) cat(" [calculate] Y->X transfer entropy\n")
teyx <- calc_te_renyi(x = x, lx = lx, y = y, ly = ly, q = q)
constx <- shuffle_renyi(
x = x,
lx = lx,
y = y,
ly = ly,
q = q,
shuffles = shuffles
)
steyx <- teyx - constx
if (nboot > 1) {
if (!quiet) {
cat(sprintf(
" [bootstrap] %s time%s\n",
nboot, mult_s(nboot)
))
}
boot <- future.apply::future_sapply(seq_len(nboot), function(i) {
bootstrap_renyi(
x = x,
lx = lx,
y = y,
ly = ly,
q = q,
burn = burn
)
}, future.seed = TRUE)
} else {
boot <- NA
}
return(list(
teyx = teyx,
texy = texy,
steyx = steyx,
stexy = stexy,
boot = boot
))
}
|
setClass('mcsContainer',
representation(mcsObjs = 'list',
stats = 'matrix',
description = 'character'
),
prototype = list(mcsObjs = list(),
description = ''
),
validity = function(object) {
numObjs = length(object@mcsObjs)
if(numObjs < 1)
return('no "InclusionZone" objects found in mcsObjs slot!')
for(i in seq_len(numObjs))
validObject(object@mcsObjs[[i]])
class = class(object@mcsObjs[[1]])
for(i in seq_len(numObjs))
if(class(object@mcsObjs[[i]]) != class)
return('Please do not mix Monte Carlo classes in the collection!')
return(TRUE)
}
)
if(!isGeneric("mcsContainer"))
setGeneric('mcsContainer',
function(object, ...) standardGeneric('mcsContainer'),
signature = c('object')
)
setMethod('mcsContainer',
signature(object = 'list'),
function(object,
description = 'Monte Carlo Sampling container object',
...
)
{
numObjs = length(object)
if(numObjs < 1)
stop('Error: there must be at least one object in the list for a collection!')
volEst = sapply(object, function(x) x@volEst)
volVar = sapply(object, function(x) x@volVar)
ci.lo = sapply(object, function(x) [email protected])
ci.up = sapply(object, function(x) [email protected])
trueVol = sapply(object, function(x) x@trueVol)
relErrPct = sapply(object, function(x) x@relErrPct)
stats = rbind(trueVol, volEst, relErrPct, volVar, ci.lo, ci.up)
mcobj = new('mcsContainer',
mcsObjs = object,
stats = stats,
description = description
)
return(mcobj)
}
)
setMethod('show',
signature(object = 'mcsContainer'),
function(object)
{
return(summary(object))
}
)
setMethod('summary',
signature(object = 'mcsContainer'),
function(object,
...
)
{
.StemEnv$underLine(60)
cat(object@description, fill=60)
.StemEnv$underLine(60, prologue='')
numObjs = length(object@mcsObjs)
cat('There are ', numObjs, ' ', class(object@mcsObjs[[1]]), ' objects in the collection',sep='')
if(is(object, 'antitheticContainer'))
cat('\n--Antithetic sampling from original', class(object@mcsObjs[[1]]@mcsObj),'objects.')
cat('\n\nSummary stats over all objects...\n')
statSum = apply(object@stats, 1, summary)
print(statSum)
cat('\nProxy tabulation...')
if(is(object, 'antitheticContainer'))
print(table(sapply(object@mcsObjs, function(x)x@mcsObj@proxy)))
else
print(table(sapply(object@mcsObjs,function(x)x@proxy)))
cat('\n')
return(invisible(statSum))
}
)
setMethod('hist',
signature(x = 'mcsContainer'),
function(x,
stat = c('relErrPct', 'volVar'),
xlab = stat,
main = NA,
col = 'gray90',
...
)
{
stat = match.arg(stat)
if(stat == 'relErrPct')
vals = sapply(x@mcsObjs,function(z) z@relErrPct)
else
vals = sapply(x@mcsObjs,function(z) z@volVar)
hg = hist(vals, main=main, xlab=xlab, col=col, ...)
return(invisible(hg))
}
)
setMethod('plot',
signature(x = 'mcsContainer', y='missing'),
function(x,
xlab = 'Estimated volume',
ylab = 'True volume',
showDiagonal = TRUE,
...
)
{
xvals = sapply(x@mcsObjs, function(z) z@volEst)
yvals = sapply(x@mcsObjs, function(z) z@trueVol)
plot(xvals, yvals, xlab=xlab, ylab=ylab, ...)
if(showDiagonal)
abline(0,1, lty='dashed', col='gray50')
return(invisible())
}
)
setClass('antitheticContainer',
contains = 'mcsContainer',
validity = function(object) {
class = class(object@mcsObjs[[1]])
if(class != 'antitheticSampling')
return('antitheticContainer can only have objects of class \"antitheticSampling\" !')
return(TRUE)
}
)
if(!isGeneric("antitheticContainer"))
setGeneric('antitheticContainer',
function(object, ...) standardGeneric('antitheticContainer'),
signature = c('object')
)
setMethod('antitheticContainer',
signature(object = 'list'),
function(object,
description = 'Antithetic Sampling container object',
...
)
{
mcobj = mcsContainer(object, description, ...)
anti = as(mcobj, 'antitheticContainer')
return(anti)
}
)
|
Cloglin_mult <-
function (table){
fit.glm<-glm(count~.^3, data=table, family=poisson)
lmu_Y<-summary(fit.glm)$coef[4,]
lmu_XY<-summary(fit.glm)$coef[6,]
lmu_ZY<-summary(fit.glm)$coef[7,]
lmu_XZY<-summary(fit.glm)$coef[8,]
a<-table$count[1]+table$count[5]
b<-table$count[2]+table$count[6]
c<-table$count[3]+table$count[7]
d<-table$count[4]+table$count[8]
tableXZ<-data.frame(expand.grid(
X=factor(c("0","1"),levels=c("0","1")),
Z=factor(c("0","1"),levels=c("0","1"))),
count=c(a,b,c,d))
fit.glmXZ<-glm(count~.^2, data=tableXZ, family=poisson)
lmu_c_Z<-summary(fit.glmXZ)$coef[3,]
mu_c_Z<-exp(summary(fit.glmXZ)$coef[3])
lmu_c_XZ<-summary(fit.glmXZ)$coef[4,]
mu_c_XZ<-exp(summary(fit.glmXZ)$coef[4] )
e<-tableXZ$count[1]+tableXZ$count[3]
f<-tableXZ$count[2]+tableXZ$count[4]
tableX<-data.frame(expand.grid(
interest=factor(c("0","1"),levels=c("0","1"))),
count=c(e,f))
fit.glmX<-glm(count~.^2, data=tableX, family=poisson)
lmu_c_X<-summary(fit.glmX)$coef[2,]
mu_c_X<-exp(summary(fit.glmX)$coef[2] )
return(rbind(lmu_Y,lmu_XY,lmu_ZY, lmu_XZY, lmu_c_Z, lmu_c_XZ, lmu_c_X))}
|
sombreroGUI <- function() {
if (all(requireNamespace("shinycssloaders", quietly = TRUE),
requireNamespace("shinyBS", quietly = TRUE),
requireNamespace("shinyjs", quietly = TRUE),
requireNamespace("shinyjqui", quietly = TRUE))) {
shiny::runApp(system.file('shiny', package = 'SOMbrero'))
} else {
stop("The packages 'shinycssloaders', 'shinyBS', 'shinyjs' and 'shinyjqui' are required to launch the graphical interface.",
call. = TRUE)
}
}
|
"pupil_data"
|
context("Verify dark_mode operates as expected.")
library(ggplot2)
light_theme <- theme_void() +
theme(plot.background = element_rect(fill = "
panel.grid.major = element_line(color = "
axis.text.x = element_text(color = "
dark_theme <- dark_mode(light_theme)
test_that("dark_mode inverts fill and colour aesthetics of all theme elements", {
expect_equal(dark_theme$plot.background$fill, "
expect_equal(dark_theme$plot.background$colour, "
expect_equal(dark_theme$panel.grid.major$colour, "
expect_equal(dark_theme$axis.text.x$colour, "
})
light_theme_alt <- dark_mode(dark_theme)
test_that("dark_mode applied twice returns the original fill and colour aesthetics", {
expect_equal(light_theme_alt$plot.background$fill, light_theme$plot.background$fill)
expect_equal(light_theme_alt$plot.background$colour, light_theme$plot.background$colour)
expect_equal(light_theme_alt$panel.grid.major$colour, light_theme$panel.grid.major$colour)
expect_equal(light_theme_alt$axis.text.x$colour, light_theme$axis.text.x$colour)
})
light_theme_blank <- light_theme + theme(plot.background = element_blank())
light_theme_null <- light_theme + theme(plot.background = NULL)
dark_theme_blank <- dark_mode(light_theme_blank)
dark_theme_null <- dark_mode(light_theme_null)
test_that("dark_mode adds a black plot background if missing", {
expect_equal(dark_theme_blank$plot.background$fill, "
expect_equal(dark_theme_null$plot.background$fill, "
})
invert_geom_defaults()
p <- ggplot(iris, aes(Sepal.Width, Sepal.Length)) + geom_point()
test_that("invert_geom_defaults changes fill and colour to 'black'", {
expect_equal(p$layers[[1]]$geom$default_aes$colour, "
})
p + dark_mode()
test_that("Activating dark mode updates the geom fill and color defaults", {
expect_equal(p$layers[[1]]$geom$default_aes$colour, "
})
|
fit.CLMM.2 <- function(data.y1, data.x1, data.z1, data.y2, data.x2, data.z2, n.clst, n.run=1){
na.flag1<-data.y1;
na.flag1[!is.na(na.flag1)]=FALSE;
na.flag1[is.na(na.flag1)]=TRUE;
na.flag2<-data.y2;
na.flag2[!is.na(na.flag2)]=FALSE;
na.flag2[is.na(na.flag2)]=TRUE;
if((TRUE%in%na.flag1)|(TRUE%in%na.flag2)){
time.NA1=apply(data.y1,c(1,2),function(x){if(FALSE%in%is.na(x)){y=0}else{y=1};return(y)});
sample.NA1=apply(data.y1,c(1,3),function(x){if(FALSE%in%is.na(x)){y=0}else{y=1};return(y)});
gene.NA1=apply(data.y1,c(2,3),function(x){if(FALSE%in%is.na(x)){y=0}else{y=1};return(y)});
time.NA2=apply(data.y1,c(1,2),function(x){if(FALSE%in%is.na(x)){y=0}else{y=1};return(y)});
sample.NA2=apply(data.y1,c(1,3),function(x){if(FALSE%in%is.na(x)){y=0}else{y=1};return(y)});
gene.NA2=apply(data.y1,c(2,3),function(x){if(FALSE%in%is.na(x)){y=0}else{y=1};return(y)});
if((1%in%time.NA1)|(1%in%sample.NA1)|(1%in%gene.NA1)|(1%in%time.NA2)|(1%in%sample.NA2)|(1%in%gene.NA2)){
stop("All NAs are found in a pair of observations. Please recheck your input.");
}else{
if(TRUE%in%na.flag1){
index.NA1=which(is.na(data.y1),arr.ind=TRUE);
for(i in 1:nrow(index.NA1)){
temp=data.y1[index.NA1[i,1],,index.NA1[i,3]];
data.y1[index.NA1[i,1],index.NA1[i,2],index.NA1[i,3]]=mean(temp[!is.na(temp)]);
};
};
if(TRUE%in%na.flag2){
index.NA2=which(is.na(data.y2),arr.ind=TRUE);
for(i in 1:nrow(index.NA2)){
temp=data.y2[index.NA2[i,1],,index.NA2[i,3]];
data.y2[index.NA2[i,1],index.NA2[i,2],index.NA2[i,3]]=mean(temp[!is.na(temp)]);
};
};
fit.CLMM.simple.2data.NA(data.y1=data.y1,data.x1=data.x1,data.z1=data.x1,
data.y2=data.y2,data.x2=data.x2,data.z2=data.x2,
na.flag1=na.flag1,na.flag2=na.flag2,n.clst=n.clst,n.run=n.run);
}
}else{
fit.CLMM.simple.2data(data.y1=data.y1,data.x1=data.x1,data.z1=data.x1,
data.y2=data.y2,data.x2=data.x2,data.z2=data.x2,
n.clst=n.clst,n.run=n.run);
}
}
fit.CLMM.simple.2data <- function(data.y1, data.x1, data.z1, data.y2, data.x2, data.z2, n.clst, n.run=1){
data.x.x.sum <- compute.x.z.sum.CLMM.simple.2(data.x1, data.x1) + compute.x.z.sum.CLMM.simple.2(data.x2, data.x2)
llh <- -9999999999
for(s in 1:n.run){
theta.hat <- fit.CLMM.simple.start.2(data.x1, data.y1, data.z1, data.x2, data.y2, data.z2, n.clst, start=s)
est.hat.new <- fit.CLMM.simple.EM.2(data.x1, data.y1, data.z1,data.x2, data.y2, data.z2, data.x.x.sum, theta.hat)
if(est.hat.new$theta.hat$llh > llh){
est.hat <- est.hat.new
llh <- est.hat.new$theta.hat$llh
}
}
return(est.hat)
}
library(cluster)
fit.CLMM.simple.start.2 <- function(data.x1, data.y1, data.z1, data.x2, data.y2, data.z2, n.clst, start=1){
J <- dim(data.y1)[1]
m1 <- dim(data.x1)[1]
m2 <- dim(data.x2)[1]
P <- dim(data.x1)[3]
Q <- dim(data.z1)[3]
beta.hat <- matrix(0, nrow=J, ncol=P)
temp.x <- data.x1[1,,]
if(m1>1){for(i in 2:m1){
temp.x <- rbind(temp.x, data.x1[i,,])
}}
if(m2>0){for(i in 1:m2){
temp.x <- rbind(temp.x, data.x2[i,,])
}}
temp <- solve(t(temp.x) %*% temp.x) %*% t(temp.x)
for(j in 1:J){
beta.hat[j,] <- temp %*% c(as.vector(t(data.y1[j,,])),as.vector(t(data.y2[j,,])))
}
if(start>1) {
temp <- sample(J, n.clst)
zeta.hat <- beta.hat[temp,]
}
if(start<=1) {
temp <- pam(beta.hat, n.clst)
zeta.hat <- temp$medoids
}
D.hat <- rep(1, n.clst)
sigma2.hat <- rep(1, n.clst)
pi.hat <- rep(1/n.clst, n.clst)
return(list(zeta.hat=zeta.hat, D.hat=D.hat, sigma2.hat=sigma2.hat, pi.hat=pi.hat))
}
fit.CLMM.simple.EM.2 <- function(data.x1, data.y1, data.z1, data.x2, data.y2, data.z2, data.x.x.sum, theta.hat){
J <- dim(data.y1)[1]
m1 <- dim(data.y1)[2]
L1 <- dim(data.y1)[3]
m2 <- dim(data.y2)[2]
L2 <- dim(data.y2)[3]
P <- dim(data.x1)[3]
Q <- dim(data.z1)[3]
K <- length(theta.hat$pi.hat)
llh.old <- -9999999999
llh <- -9999999990
while(llh-llh.old>0.01){
hats <- compute.Ehats.CLMM.simple.2(data.x1, data.y1, data.z1, data.x2, data.y2, data.z2,
theta.hat, J, m1, L1, m2, L2, K, Q)
temp <- compute.theta.hat.CLMM.simple.2(data.x1, data.y1, data.z1, data.x2, data.y2, data.z2,
data.x.x.sum, hats, J, m1, L1, m2, L2, K, P, Q)
llh.old <- llh
if(!is.na(temp$llh) & temp$llh > llh){
theta.hat <- temp
llh <- temp$llh
print(llh)
}
}
return(list(u.hat=hats$u.hat, b.hat.1=hats$b.hat.1, b.hat.2=hats$b.hat.2, theta.hat=theta.hat))
}
compute.Ehats.CLMM.simple.2 <- function(data.x1, data.y1, data.z1, data.x2, data.y2, data.z2,
theta.hat, J, m1, L1, m2, L2, K, Q){
zeta.hat <- theta.hat$zeta.hat
D.hat <- theta.hat$D.hat
sigma2.hat <- theta.hat$sigma2.hat
pi.hat <- theta.hat$pi.hat
V.inv.1 <- compute.V.inv.simple.2(data.z1, D.hat, sigma2.hat, J, m1, L1)
V.inv.2 <- compute.V.inv.simple.2(data.z2, D.hat, sigma2.hat, J, m2, L2)
pResid.1 <- compute.pResid.CLMM.simple.2(data.x1, data.y1, zeta.hat, J, m1, L1, K)
pResid.2 <- compute.pResid.CLMM.simple.2(data.x2, data.y2, zeta.hat, J, m2, L2, K)
u.hat <- compute.u.hat.CLMM.simple.2(pResid.1, V.inv.1, pResid.2, V.inv.2, pi.hat, J, m1, m2, K)
b.hat.1 <- compute.b.hat.CLMM.simple.2(data.z1, pResid.1, D.hat, V.inv.1, J, m1, Q, K)
b.hat.2 <- compute.b.hat.CLMM.simple.2(data.z2, pResid.2, D.hat, V.inv.2, J, m2, Q, K)
b2.hat <- compute.b2.hat.CLMM.simple.2(data.z1, data.z2, u.hat, b.hat.1, b.hat.2, D.hat,
V.inv.1, V.inv.2, J, m1, m2, Q, K)
e.hat.1 <- compute.e.hat.CLMM.simple.2(data.z1, b.hat.1, pResid.1, J, m1, L1, K)
e.hat.2 <- compute.e.hat.CLMM.simple.2(data.z2, b.hat.2, pResid.2, J, m2, L2, K)
e2.hat <- compute.e2.hat.CLMM.simple.2(data.z1, e.hat.1, data.z2, e.hat.2,
D.hat, V.inv.1, V.inv.2, sigma2.hat, J, m1, L1, m2, L2, K)
return(list(u.hat=u.hat, b.hat.1=b.hat.1, b.hat.2=b.hat.2, b2.hat=b2.hat, e2.hat=e2.hat))
}
compute.theta.hat.CLMM.simple.2 <- function(data.x1, data.y1, data.z1, data.x2, data.y2, data.z2,
data.x.x.sum, hats, J, m1, L1, m2, L2, K, P, Q){
u.hat <- hats$u.hat
b.hat.1 <- hats$b.hat.1
b.hat.2 <- hats$b.hat.2
b2.hat <- hats$b2.hat
e2.hat <- hats$e2.hat
pi.hat <- apply(u.hat, FUN=sum, MARGIN=2)/J
zeta.hat <- matrix(0, nrow=K, ncol=P)
D.hat <- rep(0, K)
sigma2.hat <- rep(0, K)
for(k in 1:K){
zeta.num <- 0
for(j in 1:J){
for(i in 1:m1){
zeta.num<- zeta.num + u.hat[j,k]*t(data.x1[i,,])%*%(data.y1[j,i,]-as.matrix(data.z1[i,,])%*%b.hat.1[j,i,,k])
}
for(i in 1:m2){
zeta.num<- zeta.num + u.hat[j,k]*t(data.x2[i,,])%*%(data.y2[j,i,]-as.matrix(data.z2[i,,])%*%b.hat.2[j,i,,k])
}
}
zeta.den <- sum(u.hat[,k])*data.x.x.sum
zeta.hat[k,] <- solve(zeta.den) %*% zeta.num
D.hat[k] <- sum(u.hat[,k]*b2.hat[,k])/((m1+m2)*Q*sum(u.hat[,k]))
sigma2.hat[k] <- sum(u.hat[,k]*e2.hat[,k])/((m1*L1+m2*L2)*sum(u.hat[,k]))
}
V.inv.1 <- compute.V.inv.simple.2(data.z1, D.hat, sigma2.hat, J, m1, L1)
V.inv.2 <- compute.V.inv.simple.2(data.z2, D.hat, sigma2.hat, J, m2, L2)
pResid.1 <- compute.pResid.CLMM.simple.2(data.x1, data.y1, zeta.hat, J, m1, L1, K)
pResid.2 <- compute.pResid.CLMM.simple.2(data.x2, data.y2, zeta.hat, J, m2, L2, K)
llh <- compute.llh.CLMM.2(pResid.1, V.inv.1, pResid.2, V.inv.2, pi.hat, J, m1, m2, K)
return(list(zeta.hat=zeta.hat, pi.hat=pi.hat, D.hat=D.hat, sigma2.hat=sigma2.hat, llh=llh))
}
compute.x.z.sum.CLMM.simple.2 <- function(data.x, data.z){
m <- dim(data.x)[1]
P <- dim(data.x)[3]
Q <- dim(data.z)[3]
data.x.z.sum <- matrix(0, nrow=P, ncol=Q)
for(i in 1:m){
data.x.z.sum <- data.x.z.sum + t(data.x[i,,]) %*% data.z[i,,]
}
return(data.x.z.sum)
}
compute.V.inv.simple.2 <- function(data.z, D.hat, sigma2.hat, J, m, L){
K <- length(sigma2.hat)
V.inv <- array(0, dim=c(J, m, L, L, K))
for(k in 1:K){
temp1 <- diag(sigma2.hat[k], L)
for(i in 1:m){
temp2 <- data.z[i,,]
for(j in 1:J){
temp <- temp1 + D.hat[k]*(temp2%*%t(temp2))
V.inv[j,i,,,k] <- solve(temp, tol=1e-50)
}
}
}
return(V.inv)
}
compute.pResid.CLMM.simple.2 <- function(data.x, data.y, zeta.hat, J, m, L, K){
pResid <- array(0, dim=c(J, m, L, K))
for(k in 1:K){
for(i in 1:m){
y.hat <- data.x[i,,] %*% zeta.hat[k,]
pResid[,i,,k] <- t(t(data.y[,i,]) - as.vector(y.hat))
}
}
return(pResid)
}
compute.b.hat.CLMM.simple.2 <- function(data.z, pResid, D.hat, V.inv, J, m, Q, K){
b.hat <- array(0, dim=c(J, m, Q, K))
for(j in 1:J)
for(i in 1:m)
for(k in 1:K)
b.hat[j,i,,k] <- D.hat[k]*t(data.z[i,,])%*%V.inv[j,i,,,k]%*%pResid[j,i,,k]
return(b.hat)
}
compute.b2.hat.CLMM.simple.2 <- function(data.z1, data.z2, u.hat, b.hat.1, b.hat.2, D.hat, V.inv.1, V.inv.2,
J, m1, m2, Q, K){
b2.hat <- array(0, dim=c(J, K))
for(j in 1:J){
for(k in 1:K){
temp1 <- 0
temp2 <- 0
tau2 <- D.hat[k]
for(i in 1:m1){
temp1 <- temp1 + sum(b.hat.1[j,i,,k]^2)
temp2 <- temp2 + sum(diag(t(data.z1[i,,])%*%V.inv.1[j,i,,,k]%*%data.z1[i,,]))
}
for(i in 1:m2){
temp1 <- temp1 + sum(b.hat.2[j,i,,k]^2)
temp2 <- temp2 + sum(diag(t(data.z2[i,,])%*%V.inv.2[j,i,,,k]%*%data.z2[i,,]))
}
b2.hat[j,k] <- temp1 + tau2*Q*(m1+m2) - tau2^2*temp2
}
}
return(b2.hat)
}
compute.e.hat.CLMM.simple.2 <- function(data.z, b.hat, pResid, J, m, L, K){
e.hat <- array(0, dim=c(J,m,L,K))
for(i in 1:m)
for(j in 1:J){
e.hat[j,i,,] <- pResid[j,i,,] - as.matrix(data.z[i,,])%*%b.hat[j,i,,]
}
return(e.hat)
}
compute.e2.hat.CLMM.simple.2 <- function(data.z1, e.hat.1, data.z2, e.hat.2,
D.hat, V.inv.1, V.inv.2, sigma2.hat, J, m1, L1, m2, L2, K){
e2.hat <- array(0, dim=c(J, K))
for(j in 1:J){
for(k in 1:K){
temp1 <- 0
temp2 <- 0
for(i in 1:m1){
temp1 <- temp1 + sum(e.hat.1[j,i,,k]^2)
temp2 <- temp2 + sum(diag(V.inv.1[j,i,,,k]))
}
for(i in 1:m2){
temp1 <- temp1 + sum(e.hat.2[j,i,,k]^2)
temp2 <- temp2 + sum(diag(V.inv.2[j,i,,,k]))
}
e2.hat[j,k] <- temp1 + sigma2.hat[k]*(L1*m1+L2*m2) - (sigma2.hat[k])^2*temp2
}
}
return(e2.hat)
}
compute.u.hat.CLMM.simple.2 <- function(pResid.1, V.inv.1, pResid.2, V.inv.2, pi.hat, J, m1, m2, K){
log.u.hat.num <- matrix(0, nrow=J, ncol=K)
for(k in 1:K){
for(j in 1:J){
temp <- 0
for(i in 1:m1){
temp <- temp + mvn.dnorm.log.2(pResid.1[j,i,,k], V.inv.1[j,i,,,k])
}
for(i in 1:m2){
temp <- temp + mvn.dnorm.log.2(pResid.2[j,i,,k], V.inv.2[j,i,,,k])
}
log.u.hat.num[j,k] <- log(pi.hat[k]) + temp
}
}
u.hat.num <- exp(t(apply(log.u.hat.num, MARGIN=1, FUN=all.ceiling.2)))
u.hat.den <- apply(u.hat.num, FUN=sum, MARGIN=1)
u.hat <- u.hat.num/u.hat.den
return(u.hat)
}
all.ceiling.2 <- function(aVector, cutoff=600){
xx <- max(aVector)
aVector <- aVector - xx + cutoff
return(aVector)
}
mvn.dnorm.log.2 <- function(aVector, var.inv){
L <- length(aVector)
y <- matrix(aVector, nrow=L)
log.dens <- log(abs(det(var.inv)))/2 + (log(2*pi)*(-L/2)) + ((-1/2) * t(y) %*% var.inv %*% y)
return(log.dens)
}
compute.llh.CLMM.2 <- function(pResid.1, V.inv.1, pResid.2, V.inv.2, pi.hat, J, m1, m2, K){
llh <- 0
for(j in 1:J){
temp.log <- rep(0, K)
for(k in 1:K){
temp.ind <- 0
for(i in 1:m1){
temp.ind <- temp.ind + mvn.dnorm.log.2(pResid.1[j,i,,k], V.inv.1[j,i,,,k])
}
for(i in 1:m2){
temp.ind <- temp.ind + mvn.dnorm.log.2(pResid.2[j,i,,k], V.inv.2[j,i,,,k])
}
temp.log[k] <- temp.ind
}
temp.log.max <- max(temp.log)
temp <- exp(temp.log - temp.log.max) %*% pi.hat
llh <- llh + log(temp) + temp.log.max
}
return(llh)
}
fit.CLMM.simple.2data.NA <- function(data.y1,data.x1,data.z1,data.y2,data.x2,data.z2,na.flag1,na.flag2,n.clst,n.run=1){
data.x.x.sum <- compute.x.z.sum.CLMM.simple.NA.2(data.x=data.x1, data.z=data.x1, na.flag=na.flag1)+compute.x.z.sum.CLMM.simple.NA.2(data.x=data.x2,data.z=data.x2, na.flag=na.flag2)
llh <- -9999999999
for(s in 1:n.run){
theta.hat <- fit.CLMM.simple.start.NA.2(data.x1, data.y1, data.z1, data.x2, data.y2, data.z2, n.clst, start=s)
est.hat.new <- fit.CLMM.simple.EM.NA.2(data.x1, data.y1, data.z1,data.x2, data.y2, data.z2, na.flag1, na.flag2, data.x.x.sum, theta.hat)
if(est.hat.new$theta.hat$llh > llh){
est.hat <- est.hat.new
llh <- est.hat.new$theta.hat$llh
}
}
return(est.hat)
}
library(cluster)
fit.CLMM.simple.start.NA.2 <- function(data.x1, data.y1, data.z1, data.x2, data.y2, data.z2, n.clst, start=1){
J <- dim(data.y1)[1]
m1 <- dim(data.x1)[1]
m2 <- dim(data.x2)[1]
P <- dim(data.x1)[3]
Q <- dim(data.z1)[3]
beta.hat <- matrix(0, nrow=J, ncol=P)
temp.x <- data.x1[1,,]
if(m1>1){for(i in 2:m1){
temp.x <- rbind(temp.x, data.x1[i,,])
}}
if(m2>0){for(i in 1:m2){
temp.x <- rbind(temp.x, data.x2[i,,])
}}
temp <- solve(t(temp.x) %*% temp.x) %*% t(temp.x)
for(j in 1:J){
beta.hat[j,] <- temp %*% c(as.vector(t(data.y1[j,,])),as.vector(t(data.y2[j,,])))
}
if(start>1) {
temp <- sample(J, n.clst)
zeta.hat <- beta.hat[temp,]
}
if(start<=1) {
temp <- pam(beta.hat, n.clst)
zeta.hat <- temp$medoids
}
D.hat <- rep(1, n.clst)
sigma2.hat <- rep(1, n.clst)
pi.hat <- rep(1/n.clst, n.clst)
return(list(zeta.hat=zeta.hat, D.hat=D.hat, sigma2.hat=sigma2.hat, pi.hat=pi.hat))
}
fit.CLMM.simple.EM.NA.2 <- function(data.x1, data.y1, data.z1, data.x2, data.y2, data.z2,
na.flag1, na.flag2, data.x.x.sum, theta.hat){
J <- dim(data.y1)[1]
m1 <- dim(data.y1)[2]
L1 <- dim(data.y1)[3]
m2 <- dim(data.y2)[2]
L2 <- dim(data.y2)[3]
P <- dim(data.x1)[3]
Q <- dim(data.z1)[3]
K <- length(theta.hat$pi.hat)
llh.old <- -9999999999
llh <- -9999999990
while(llh-llh.old>0.1){
hats <- compute.Ehats.CLMM.simple.NA.2(data.x1, data.y1, data.z1, data.x2, data.y2, data.z2, na.flag1, na.flag2,
theta.hat, J, m1, L1, m2, L2, K, Q)
temp <- compute.theta.hat.CLMM.simple.NA.2(data.x1, data.y1, data.z1, data.x2, data.y2, data.z2, na.flag1, na.flag2,
data.x.x.sum, hats, J, m1, L1, m2, L2, K, P, Q)
llh.old <- llh
if(!is.na(temp$llh) & temp$llh > llh){
theta.hat <- temp
llh <- temp$llh
print(llh)
}
}
return(list(u.hat=hats$u.hat, b.hat.1=hats$b.hat.1, b.hat.2=hats$b.hat.2, theta.hat=theta.hat))
}
compute.Ehats.CLMM.simple.NA.2 <- function(data.x1, data.y1, data.z1, data.x2, data.y2, data.z2, na.flag1, na.flag2,
theta.hat, J, m1, L1, m2, L2, K, Q){
zeta.hat <- theta.hat$zeta.hat
D.hat <- theta.hat$D.hat
sigma2.hat <- theta.hat$sigma2.hat
pi.hat <- theta.hat$pi.hat
V.1 <- compute.V.simple.NA.2(data.z1, D.hat, sigma2.hat, J, m1, L1)
V.2 <- compute.V.simple.NA.2(data.z2, D.hat, sigma2.hat, J, m2, L2)
pResid.1 <- compute.pResid.CLMM.simple.NA.2(data.x1, data.y1, zeta.hat, J, m1, L1, K)
pResid.2 <- compute.pResid.CLMM.simple.NA.2(data.x2, data.y2, zeta.hat, J, m2, L2, K)
u.hat <- compute.u.hat.CLMM.simple.NA.2(pResid.1, V.1, pResid.2, V.2, pi.hat, J, m1, m2, K, na.flag1, na.flag2)
b.hat.1 <- compute.b.hat.CLMM.simple.NA.2(data.z1, pResid.1, D.hat, V.1, J, m1, Q, K, na.flag1)
b.hat.2 <- compute.b.hat.CLMM.simple.NA.2(data.z2, pResid.2, D.hat, V.2, J, m2, Q, K, na.flag2)
b2.hat <- compute.b2.hat.CLMM.simple.NA.2(data.z1, data.z2, u.hat, b.hat.1, b.hat.2, D.hat, V.1, V.2,
J, m1, m2, Q, K, na.flag1, na.flag2)
e.hat.1 <- compute.e.hat.CLMM.simple.NA.2(data.z1, b.hat.1, pResid.1, J, m1, L1, K)
e.hat.2 <- compute.e.hat.CLMM.simple.NA.2(data.z2, b.hat.2, pResid.2, J, m2, L2, K)
e2.hat <- compute.e2.hat.CLMM.simple.NA.2(data.z1, e.hat.1, data.z2, e.hat.2,
D.hat, V.1, V.2, sigma2.hat, J, m1, L1, m2, L2, K, na.flag1, na.flag2)
return(list(u.hat=u.hat, b.hat.1=b.hat.1, b.hat.2=b.hat.2, b2.hat=b2.hat, e2.hat=e2.hat))
}
compute.theta.hat.CLMM.simple.NA.2 <- function(data.x1, data.y1, data.z1, data.x2, data.y2, data.z2, na.flag1, na.flag2,
data.x.x.sum, hats, J, m1, L1, m2, L2, K, P, Q){
u.hat <- hats$u.hat
b.hat.1 <- hats$b.hat.1
b.hat.2 <- hats$b.hat.2
b2.hat <- hats$b2.hat
e2.hat <- hats$e2.hat
pi.hat <- apply(u.hat, FUN=sum, MARGIN=2)/J
zeta.hat <- matrix(0, nrow=K, ncol=P)
D.hat <- rep(0, K)
sigma2.hat <- rep(0, K)
for(k in 1:K){
zeta.num <- 0
zeta.den <- 0
for(j in 1:J){
for(i in 1:m1){
temp1 <- !(na.flag1[j,i,])
temp2 <- t(data.x1[i,temp1,])%*%(data.y1[j,i,temp1]-as.matrix(data.z1[i,temp1,])%*%b.hat.1[j,i,,k])
zeta.num <- zeta.num + u.hat[j,k]*temp2
}
for(i in 1:m2){
temp1 <- !(na.flag2[j,i,])
temp2 <- t(data.x2[i,temp1,])%*%(data.y2[j,i,temp1]-as.matrix(data.z2[i,temp1,])%*%b.hat.2[j,i,,k])
zeta.num <- zeta.num + u.hat[j,k]*temp2
}
zeta.den <- zeta.den + u.hat[j,k]*data.x.x.sum[j,,]
}
zeta.hat[k,] <- solve(zeta.den, tol=1e-50) %*% zeta.num
D.hat[k] <- sum(u.hat[,k]*b2.hat[,k])/((m1+m2)*Q*sum(u.hat[,k]))
sigma2.hat[k] <- sum(u.hat[,k]*e2.hat[,k])/((m1*L1+m2*L2)*sum(u.hat[,k]))
}
V.1 <- compute.V.simple.NA.2(data.z1, D.hat, sigma2.hat, J, m1, L1)
V.2 <- compute.V.simple.NA.2(data.z2, D.hat, sigma2.hat, J, m2, L2)
pResid.1 <- compute.pResid.CLMM.simple.NA.2(data.x1, data.y1, zeta.hat, J, m1, L1, K)
pResid.2 <- compute.pResid.CLMM.simple.NA.2(data.x2, data.y2, zeta.hat, J, m2, L2, K)
llh <- compute.llh.CLMM.NA.2(pResid.1, V.1, pResid.2, V.2, pi.hat, J, m1, m2, K, na.flag1, na.flag2)
return(list(zeta.hat=zeta.hat, pi.hat=pi.hat, D.hat=D.hat, sigma2.hat=sigma2.hat, llh=llh))
}
compute.x.z.sum.CLMM.simple.NA.2 <- function(data.x, data.z, na.flag){
m <- dim(data.x)[1]
P <- dim(data.x)[3]
Q <- dim(data.z)[3]
J <- dim(na.flag)[1]
data.x.z.sum <- array(0, dim=c(J,P,Q))
for(j in 1:J){
for(i in 1:m){
temp <- !(na.flag[j,i,])
data.x.z.sum[j,,] <- data.x.z.sum[j,,] + t(data.x[i,temp,]) %*% data.z[i,temp,]
}
}
return(data.x.z.sum)
}
compute.V.simple.NA.2 <- function(data.z, D.hat, sigma2.hat, J, m, L){
K <- length(sigma2.hat)
V <- array(0, dim=c(J, m, L, L, K))
for(k in 1:K){
temp1 <- diag(sigma2.hat[k], L)
for(i in 1:m){
temp2 <- data.z[i,,]
for(j in 1:J){
V[j,i,,,k] <- temp1 + D.hat[k]*(temp2%*%t(temp2))
}
}
}
return(V)
}
compute.pResid.CLMM.simple.NA.2 <- function(data.x, data.y, zeta.hat, J, m, L, K){
pResid <- array(0, dim=c(J, m, L, K))
for(k in 1:K){
for(i in 1:m){
y.hat <- data.x[i,,] %*% zeta.hat[k,]
pResid[,i,,k] <- t(t(data.y[,i,]) - as.vector(y.hat))
}
}
return(pResid)
}
compute.b.hat.CLMM.simple.NA.2 <- function(data.z, pResid, D.hat, V, J, m, Q, K, na.flag){
b.hat <- array(0, dim=c(J, m, Q, K))
for(j in 1:J){
for(i in 1:m){
temp <- !(na.flag[j,i,])
for(k in 1:K){
b.hat[j,i,,k] <- D.hat[k]*t(data.z[i,temp,])%*%solve(V[j,i,temp,temp,k],tol=1e-50)%*%pResid[j,i,temp,k]
}
}
}
return(b.hat)
}
compute.b2.hat.CLMM.simple.NA.2 <- function(data.z1, data.z2, u.hat, b.hat.1, b.hat.2, D.hat, V.1, V.2,
J, m1, m2, Q, K, na.flag1, na.flag2){
b2.hat <- array(0, dim=c(J, K))
for(j in 1:J){
for(k in 1:K){
temp1 <- 0
temp2 <- 0
tau2 <- D.hat[k]
for(i in 1:m1){
temp <- !(na.flag1[j,i,])
temp1 <- temp1 + sum(b.hat.1[j,i,,k]^2)
temp2 <- temp2 + sum(diag(t(data.z1[i,temp,])%*%solve(V.1[j,i,temp,temp,k],tol=1e-50)%*%data.z1[i,temp,]))
}
for(i in 1:m2){
temp <- !(na.flag2[j,i,])
temp1 <- temp1 + sum(b.hat.2[j,i,,k]^2)
temp2 <- temp2 + sum(diag(t(data.z2[i,temp,])%*%solve(V.2[j,i,temp,temp,k],tol=1e-50)%*%data.z2[i,temp,]))
}
b2.hat[j,k] <- temp1 + tau2*Q*(m1+m2) - tau2^2*temp2
}
}
return(b2.hat)
}
compute.e.hat.CLMM.simple.NA.2 <- function(data.z, b.hat, pResid, J, m, L, K){
e.hat <- array(0, dim=c(J,m,L,K))
for(i in 1:m)
for(j in 1:J){
e.hat[j,i,,] <- pResid[j,i,,] - as.matrix(data.z[i,,])%*%b.hat[j,i,,]
}
return(e.hat)
}
compute.e2.hat.CLMM.simple.NA.2 <- function(data.z1, e.hat.1, data.z2, e.hat.2, D.hat, V.1, V.2, sigma2.hat,
J, m1, L1, m2, L2, K, na.flag1, na.flag2){
e2.hat <- array(0, dim=c(J, K))
for(j in 1:J){
for(k in 1:K){
temp1 <- 0
temp2 <- 0
for(i in 1:m1){
temp <- !(na.flag1[j,i,])
temp1 <- temp1 + sum(e.hat.1[j,i,temp,k]^2)
temp2 <- temp2 + sum(diag(solve(V.1[j,i,temp,temp,k],tol=1e-50)))
}
for(i in 1:m2){
temp <- !(na.flag2[j,i,])
temp1 <- temp1 + sum(e.hat.2[j,i,temp,k]^2)
temp2 <- temp2 + sum(diag(solve(V.2[j,i,temp,temp,k],tol=1e-50)))
}
e2.hat[j,k] <- temp1 + sigma2.hat[k]*(L1*m1+L2*m2) - (sigma2.hat[k])^2*temp2
}
}
return(e2.hat)
}
compute.u.hat.CLMM.simple.NA.2 <- function(pResid.1, V.1, pResid.2, V.2, pi.hat, J, m1, m2, K, na.flag1, na.flag2){
log.u.hat.num <- matrix(0, nrow=J, ncol=K)
for(k in 1:K){
for(j in 1:J){
temp1 <- 0
for(i in 1:m1){
temp <- !(na.flag1[j,i,])
temp1 <- temp1 + mvn.dnorm.log.NA.2(pResid.1[j,i,temp,k], solve(V.1[j,i,temp,temp,k],tol=1e-50))
}
for(i in 1:m2){
temp <- !(na.flag2[j,i,])
temp1 <- temp1 + mvn.dnorm.log.NA.2(pResid.2[j,i,temp,k], solve(V.2[j,i,temp,temp,k],tol=1e-50))
}
log.u.hat.num[j,k] <- log(pi.hat[k]) + temp1
}
}
u.hat.num <- exp(t(apply(log.u.hat.num, MARGIN=1, FUN=all.ceiling.NA.2)))
u.hat.den <- apply(u.hat.num, FUN=sum, MARGIN=1)
u.hat <- u.hat.num/u.hat.den
return(u.hat)
}
all.ceiling.NA.2 <- function(aVector, cutoff=600){
xx <- max(aVector)
aVector <- aVector - xx + cutoff
return(aVector)
}
mvn.dnorm.log.NA.2 <- function(aVector, var.inv){
L <- length(aVector)
y <- matrix(aVector, nrow=L)
log.dens <- log(abs(det(var.inv)))/2 + (log(2*pi)*(-L/2)) + ((-1/2) * t(y) %*% var.inv %*% y)
return(log.dens)
}
compute.llh.CLMM.NA.2 <- function(pResid.1, V.1, pResid.2, V.2, pi.hat, J, m1, m2, K, na.flag1, na.flag2){
llh <- 0
for(j in 1:J){
temp.log <- rep(0, K)
for(k in 1:K){
temp.ind <- 0
for(i in 1:m1){
temp <- !(na.flag1[j,i,])
temp.ind <- temp.ind + mvn.dnorm.log.NA.2(pResid.1[j,i,temp,k], solve(V.1[j,i,temp,temp,k],tol=1e-50))
}
for(i in 1:m2){
temp <- !(na.flag2[j,i,])
temp.ind <- temp.ind + mvn.dnorm.log.NA.2(pResid.2[j,i,temp,k], solve(V.2[j,i,temp,temp,k],tol=1e-50))
}
temp.log[k] <- temp.ind
}
temp.log.max <- max(temp.log)
temp <- exp(temp.log - temp.log.max) %*% pi.hat
llh <- llh + log(temp) + temp.log.max
}
return(llh)
}
|
sa_pairing_generalized <- function(block, total_items, Temperature, eta_Temperature = 0.01,
r = 0.999, end_criteria = 10^(-6),
item_chars, weights, FUN, n_exchange = 2, prob_newitem = 0.25,
use_IIA = FALSE, rater_chars,
iia_weights = c(BPlin = 1, BPquad = 1, AClin = 1, ACquad = 1)) {
if (missing(block)) {
block <- make_random_block(nrow(item_chars), item_per_block = 2)
}
if (missing(FUN)) {
types <- sapply(item_chars, class)
types <- replace(types, types == "factor" | types == "character", "facfun")
types <- replace(types, types == "numeric", "var")
FUN <- types
}
if (missing(weights)) {
weights <- rep(1, ncol(item_chars))
}
if (missing(total_items)) {
total_items <- length(unique(block))
}
if (r >= 1 | r < 0) {
stop("Invalid value for r: Should be a value between 0 and 1.")
}
if (end_criteria >= 1 | end_criteria < 0) {
stop("Invalid value for end_criteria: Should be a value between 0 and 1.")
}
if (use_IIA & missing(rater_chars)) {
stop("Item responses required if use_IIA = TRUE.")
}
if (prob_newitem > 1 | prob_newitem < 0) {
stop("Invalid value for prob_newitem: Should be a value between 0 and 1.")
}
if (eta_Temperature < 0){
stop("Invalid value for eta_Temperature: Should be a value larger than 0.")
}
if (!(n_exchange %% 1 == 0) | n_exchange > nrow(block) | n_exchange < 2) {
stop("Invalid value for n_exchange: Should be an integer between 2 and nrow(block)")
}
block0 <- block
energy0 <- ifelse(!use_IIA, cal_block_energy(block, item_chars, weights, FUN),
cal_block_energy_with_iia(block, item_chars, weights, FUN,
rater_chars = rater_chars, iia_weights = iia_weights))
energy <- energy0
if (missing(Temperature)) {
Temperature <- eta_Temperature * abs(energy0)
}
T0 <- Temperature
all_item_used <- length(unique(c(block))) == total_items
if (all_item_used) {
while (Temperature > end_criteria * T0) {
sample_index <- sample(1:nrow(block), n_exchange)
sample_block <- block[sample_index,]
sample_energy <- ifelse(!use_IIA, cal_block_energy(sample_block, item_chars, weights, FUN),
cal_block_energy_with_iia(sample_block, item_chars, weights, FUN, rater_chars, iia_weights))
l <- length(sample_block)
exchanged_items <- sample(sample_block,l)
exchanged_block <- matrix(exchanged_items, nrow = n_exchange, byrow = TRUE)
exchanged_energy <- ifelse(!use_IIA, cal_block_energy(exchanged_block, item_chars, weights, FUN),
cal_block_energy_with_iia(exchanged_block, item_chars, weights, FUN, rater_chars, iia_weights))
if (exchanged_energy >= sample_energy) {
for (i in seq(1:n_exchange)) {
block[sample_index[i],] <- exchanged_block[i,]
}
energy <- energy + exchanged_energy - sample_energy
}
else if (exp((exchanged_energy - sample_energy)/Temperature) > runif(1)) {
for (i in seq(1:n_exchange)) {
block[sample_index[i],] <- exchanged_block[i,]
}
energy <- energy + exchanged_energy - sample_energy
}
else {
}
Temperature <- Temperature * r
}
}
else {
while (Temperature > end_criteria * T0) {
if (prob_newitem > runif(1)) {
unused_items <- setdiff(seq(1:total_items), block)
sample_index <- sample(nrow(block),1)
sample_block <- block[sample_index,]
sample_energy <- ifelse(!use_IIA, cal_block_energy(sample_block, item_chars, weights, FUN),
cal_block_energy_with_iia(sample_block, item_chars, weights, FUN, rater_chars, iia_weights))
picked_item <- sample(unused_items, 1)
exchanged_block <- sample_block
exchanged_block[1] <- picked_item
exchanged_energy <- ifelse(!use_IIA, cal_block_energy(exchanged_block, item_chars, weights, FUN),
cal_block_energy_with_iia(exchanged_block, item_chars, weights, FUN, rater_chars, iia_weights))
if (exchanged_energy >= sample_energy) {
block[sample_index[1],] <- exchanged_block
energy <- energy + exchanged_energy - sample_energy
}
else if (exp((exchanged_energy - sample_energy)/Temperature) > runif(1)) {
block[sample_index[1],] <- exchanged_block
energy <- energy + exchanged_energy - sample_energy
}
else {
}
Temperature <- Temperature * r
}
else {
sample_index <- sample(1:nrow(block), n_exchange)
sample_block <- block[sample_index,]
sample_energy <- ifelse(!use_IIA, cal_block_energy(sample_block, item_chars, weights, FUN),
cal_block_energy_with_iia(sample_block, item_chars, weights, FUN, rater_chars, iia_weights))
l <- length(sample_block)
exchanged_items <- sample(sample_block,l)
exchanged_block <- matrix(exchanged_items, nrow = n_exchange, byrow = TRUE)
exchanged_energy <- ifelse(!use_IIA, cal_block_energy(exchanged_block, item_chars, weights, FUN),
cal_block_energy_with_iia(exchanged_block, item_chars, weights, FUN, rater_chars, iia_weights))
if (exchanged_energy >= sample_energy) {
for (i in seq(1:n_exchange)) {
block[sample_index[i],] <- exchanged_block[i,]
}
energy <- energy + exchanged_energy - sample_energy
}
else if (exp((exchanged_energy - sample_energy)/Temperature) > runif(1)) {
for (i in seq(1:n_exchange)) {
block[sample_index[i],] <- exchanged_block[i,]
}
energy <- energy + exchanged_energy - sample_energy
}
else {
}
Temperature <- Temperature * r
}
}
}
return(list(block_initial = block0, energy_initial = energy0, block_final = block, energy_final = energy))
}
|
vlfFun <-
function(x, p=0.001, seqlength=648, own = NULL){
species.names <- x[,2]
specimen.Number <- nrow(x)
rownames(x) <- species.names
Nuc.count <- count.function(x, specimen.Number,seqlength)
frequency.matrix <- ffrequency.matrix.function(Nuc.count,seqlength)
spec.freq <- specimen.frequencies(frequency.matrix, x, specimen.Number, species.names,seqlength)
nucleotide.modalSequence <- MODE(frequency.matrix,seqlength)
first.modal.frequencies <- MODE.freq(frequency.matrix,seqlength)
second.modal.frequencies <- MODE.second.freq(frequency.matrix,seqlength)
First_conserved_100 <- conservation_first(first.modal.frequencies, 1,seqlength)
First_conserved_99.9 <- conservation_first(first.modal.frequencies, (1-p),seqlength)
FirstAndSecond_conserved_99.9 <- conservation_two(first.modal.frequencies, second.modal.frequencies, (1-p),seqlength)
specimen_VLFcount <- VLF.count.spec(spec.freq, p,seqlength)
position_VLFcount <- VLF.count.pos(spec.freq, p,seqlength)
VLFconvert <- VLF.convert.matrix(x, spec.freq, p,seqlength)
VLFnuc <- VLF.nucleotides(VLFconvert, x,seqlength)
VLFreduced <- VLF.reduced(VLFnuc, specimen_VLFcount, seqlength)
species <- separate(VLFreduced)
singleAndShared <- find.singles(species,seqlength)
if(is.null(own)){
foo<-list(modal=nucleotide.modalSequence, con100=First_conserved_100, conp=First_conserved_99.9,combine=FirstAndSecond_conserved_99.9,specimen=specimen_VLFcount, position=position_VLFcount, sas=singleAndShared, VLFmatrix = VLFreduced)
class(foo)<-"vlf"
foo
}
else{
ownspec.freq <- specimen.frequencies(frequency.matrix, own, nrow(own), own[,2], seqlength)
ownspec.VLFcount <- VLF.count.spec(ownspec.freq, p, seqlength)
ownpos.VLFcount <- VLF.count.pos(ownspec.freq, p, seqlength)
own.VLFconvert <- VLF.convert.matrix(own, ownspec.freq, p, seqlength)
own.VLFnuc <- VLF.nucleotides(own.VLFconvert, own, seqlength)
own.VLFreduced <- VLF.reduced(own.VLFnuc, ownspec.VLFcount, seqlength)
foo<-list(modal=nucleotide.modalSequence, con100=First_conserved_100, conp=First_conserved_99.9,combine=FirstAndSecond_conserved_99.9,specimen=specimen_VLFcount, position=position_VLFcount, sas=singleAndShared, VLFmatrix = VLFreduced, ownSpecCount = ownspec.VLFcount, ownPosCount = ownpos.VLFcount, ownVLFMatrix = own.VLFnuc, ownVLFreduced = own.VLFreduced)
class(foo)<-"vlf"
foo
}
}
|
test_that("Finds test files", {
expect_equal(findTests("example_test_dirs/simple/tests"), c("testa.r", "testb.R"))
expect_equal(findTests("example_test_dirs/"), character(0))
})
test_that("Filters out based on given test names", {
expect_equal(findTests("example_test_dirs/simple/tests", "testa"), c("testa.r"))
expect_equal(findTests("example_test_dirs/simple/tests", "testb.r"), c("testb.R"))
expect_error(findTests("example_test_dirs/simple/tests", "i don't exist"))
})
test_that("findTestsDir works", {
expect_match(suppressMessages(findTestsDir(test_path("example_test_dirs/simple/"))), "/tests$")
expect_message(findTestsDir(test_path("example_test_dirs/simple/"), quiet=FALSE), "shinytests should be placed in")
expect_match(findTestsDir(test_path("example_test_dirs/nested/")), "/shinytest$")
endir <- expect_warning(findTestsDir(test_path("example_test_dirs/empty-nested/"), quiet=FALSE), "there are some shinytests in")
expect_match(endir, "/shinytest$")
expect_match(suppressMessages(findTestsDir(test_path("example_test_dirs/empty-toplevel/"), mustExist=FALSE)), "/tests/shinytest$")
expect_error(findTestsDir(test_path("example_test_dirs/empty-toplevel/"), mustExist=TRUE), "should be placed in tests/shinytest")
expect_error(findTestsDir(test_path("example_test_dirs/mixed-toplevel/")))
expect_match(findTestsDir(test_path("example_test_dirs/"), mustExist=FALSE), "/shinytest$")
expect_match(findTestsDir(test_path("example_test_dirs/nested/tests"), mustExist=FALSE), "/nested/tests/shinytest$")
})
test_that("isShinyTest works", {
expect_false(isShinyTest("blah"))
expect_true(isShinyTest("app<-ShinyDriver$new()"))
expect_true(isShinyTest(c("blah", "app<-ShinyDriver$new()")))
expect_true(isShinyTest("app\t<- ShinyDriver$new("))
})
|
ccdplot <-
function (x, remove.absolute = NA, remove.ratio = NA, drawcomposite = TRUE,
jump = NA, xlab = "Observations", ylab = "Cumulatives", ...)
{
check.remove.params(remove.absolute, remove.ratio)
if (!is.list(x)) {
x.removed <- remove.vector(x, remove.absolute, remove.ratio)
x.sortsum <- calculate.sortsum(x.removed)
plot(1:length(x.sortsum), x.sortsum, type = "l", xlab = xlab,
ylab = ylab, ...)
abline(h = 0, lty = "dotted")
}
else {
x.removed <- remove.list(x, remove.absolute, remove.ratio)
characteristics.number <- length(x.removed)
x.composite <- c()
x.sortsum <- list()
absoluteMin <- Inf
absoluteMax <- -Inf
for (i in 1:characteristics.number) {
x.sortsum[[i]] <- calculate.sortsum(x.removed[[i]])
x.composite <- c(x.composite, x.removed[[i]])
actualMin <- min(x.sortsum[[i]])
if (!is.na(actualMin) && actualMin < absoluteMin) {
absoluteMin <- actualMin
}
actualMax <- max(x.sortsum[[i]])
if (!is.na(actualMax) && actualMax > absoluteMax) {
absoluteMax <- actualMax
}
}
x.composite.sortsum <- calculate.sortsum(x.composite)
actualMin <- min(x.composite.sortsum)
if (actualMin < absoluteMin) {
absoluteMin <- actualMin
}
actualMax <- max(x.composite.sortsum)
if (actualMax > absoluteMax) {
absoluteMax <- actualMax
}
if (is.na(jump)) {
jump <- length(x.composite.sortsum)/50
}
xmax <- length(x.composite.sortsum)
if (!drawcomposite) {
xmax <- xmax + (characteristics.number - 1) * jump
}
plot(c(1, xmax), c(absoluteMin, absoluteMax), xlab = xlab,
ylab = ylab, type = "n", ...)
abline(h = 0, lty = "dotted")
cumulativeLength <- 0
loopend <- characteristics.number
if (drawcomposite) {
points(1:length(x.composite.sortsum), x.composite.sortsum,
type = "l", ...)
loopend <- loopend - 1
}
for (i in 1:loopend) {
x.from <- i * jump + 1 + cumulativeLength
x.to <- i * jump + cumulativeLength + length(x.sortsum[[i]])
if (!drawcomposite) {
x.from <- x.from - jump
x.to <- x.to - jump
}
points(x.from:x.to, x.sortsum[[i]], type = "l", ...)
cumulativeLength <- cumulativeLength + length(x.sortsum[[i]])
}
if (drawcomposite) {
points((-jump + 1 + cumulativeLength):(-jump + cumulativeLength +
length(x.sortsum[[characteristics.number]])),
x.sortsum[[characteristics.number]], type = "l",
...)
}
}
}
|
ml.nb1 <- function(formula, data, offset = 0, start = NULL, verbose = FALSE) {
mf <- model.frame(formula, data)
mt <- attr(mf, "terms")
y <- model.response(mf, "numeric")
nb1X <- model.matrix(formula, data = data)
nb1.reg.ml <- function(b.hat, X, y) {
a.hat <- b.hat[1]
xb.hat <- X %*% b.hat[-1] + offset
mu.hat <- exp(xb.hat)
r.hat <- (1/a.hat) * mu.hat
sum(dnbinom(y,
size = r.hat,
mu = mu.hat,
log = TRUE))
}
if (is.null(start))
start <- c(0.5, -1, rep(0, ncol(nb1X) - 1))
fit <- optim(start,
nb1.reg.ml,
X = nb1X,
y = y,
control = list(
fnscale = -1,
maxit = 10000),
hessian = TRUE
)
if (verbose | fit$convergence > 0) print(fit)
beta.hat <- fit$par
se.beta.hat <- sqrt(diag(solve(-fit$hessian)))
results <- data.frame(Estimate = beta.hat,
SE = se.beta.hat,
Z = beta.hat / se.beta.hat,
LCL = beta.hat - 1.96 * se.beta.hat,
UCL = beta.hat + 1.96 * se.beta.hat)
rownames(results) <- c("alpha", colnames(nb1X))
results <- results[c(2:nrow(results), 1),]
return(results)
}
|
print.glm.i <-
function(x, ...)
{
atrass <- attr(x,"assign")
if (!is.null(atrass)) attr(x,"assign") <- NULL
atrsng <- attr(x,"singular")
if (!is.null(atrsng)) attr(x,"singular") <- NULL
class(x) <- NULL
NextMethod(...)
if (!is.null(atrass)) cat(attr(x,"assign"),"\n")
if (!is.null(atrsng)) cat(attr(x,"singular"),"\n")
invisible(x)
}
|
plot_lev <- function(model, type, smoother, theme, axis.text.size, title.text.size, title.opt){
Leverage = hatvalues(model)
cutoff = 0.999999999
one_lev = sum(Leverage >= cutoff) > 0
range_lev <- range(subset(Leverage, Leverage < cutoff), na.rm = TRUE)
const_lev <- all(range_lev == 0) || diff(range_lev) < 1e-10 * mean(subset(Leverage, Leverage < cutoff), na.rm = TRUE)
if(const_lev){
plot_constlev(model = model,
type = type,
theme = theme,
axis.text.size = axis.text.size,
title.text.size = title.text.size,
title.opt = title.opt)
} else {
if(one_lev){
warning("Observations with a leverage value of 1 are not included
in the residuals versus leverage plot.")
}
model_values <- data.frame(Leverage = hatvalues(model))
if (class(model)[1] == "lm") {
if (one_lev){
model_values$Std_Res = suppressWarnings(helper_resid(model, type = "standardized"))
} else {
model_values$Std_Res = helper_resid(model, type = "standardized")
}
} else if (class(model)[1] == "glm"){
if(is.na(type) | type == "response" | type == "deviance" | type == "stand.deviance"){
if (one_lev){
model_values$Std_Res = suppressWarnings(helper_resid(model, type = "stand.deviance"))
} else {
model_values$Std_Res = helper_resid(model, type = "stand.deviance")
}
} else if (type == "pearson" | type == "stand.pearson"){
if (one_lev){
model_values$Std_Res = suppressWarnings(helper_resid(model, type = "stand.pearson"))
} else {
model_values$Std_Res = helper_resid(model, type = "stand.pearson")
}
}
}
p <- model$rank
hat_seq <- seq.int(min(range_lev[1], range_lev[2]/100),
range_lev[2],
length.out = 100)
xlimits <- c(0, max(subset(model_values$Leverage, model_values$Leverage < cutoff), na.rm = TRUE))
ylimits <- extendrange(range(subset(model_values$Std_Res, model_values$Leverage < cutoff), na.rm = TRUE), f = 0.1)
cooksd_contours <- data.frame(case = rep(c("pos_0.5", "neg_0.5", "pos_1", "neg_1"), each = length(hat_seq)),
hat_seq = rep(hat_seq, 4),
stdres = c(sqrt(0.5 * p * (1 - hat_seq) / hat_seq),
-sqrt(0.5 * p * (1 - hat_seq) / hat_seq),
sqrt(1 * p * (1 - hat_seq) / hat_seq),
-sqrt(1 * p * (1 - hat_seq) / hat_seq)))
cooksd_contours <- subset(cooksd_contours, cooksd_contours$hat_seq <= xlimits[2] &
cooksd_contours$stdres <= ylimits[2] &
cooksd_contours$stdres >= ylimits[1])
if(class(model)[1] == "lm"){
r_label <- helper_label(type = "standardized", model)
} else if (class(model)[1] == "glm"){
if(is.na(type) | type == "response" | type == "deviance" | type == "stand.deviance"){
r_label <- helper_label(type = "stand.deviance", model)
} else if (type == "pearson" | type == "stand.pearson"){
r_label <- helper_label(type = "stand.pearson", model)
}
}
Data <- helper_plotly_label(model)
model_values$Data <- Data
model_values <- subset(model_values, model_values$Leverage < cutoff)
plot <- ggplot() +
labs(x = "Leverage", y = r_label) +
expand_limits(x = 0) +
geom_point(data = model_values,
mapping = aes_string(x = "Leverage", y = "Std_Res", group = "Data"),
na.rm = TRUE) +
geom_hline(yintercept = 0, linetype = "dashed") +
geom_vline(xintercept = 0, linetype = "dashed") +
scale_x_continuous(limits = xlimits) +
scale_y_continuous(limits = ylimits) +
geom_text(aes(x = 2.25 * min(model_values$Leverage, na.rm = TRUE),
y = 1.1 * min(model_values$Std_Res, na.rm = TRUE)),
label = "- - - Cook's distance contours", color = "red", size = 3)
if (smoother == TRUE){
plot <- plot +
geom_smooth(data = model_values,
aes_string(x = "Leverage", y = "Std_Res"),
na.rm = TRUE,
method = "loess",
se = FALSE,
color = "red",
size = 0.5)
}
if (dim(cooksd_contours)[1] > 0) {
plot <- plot +
geom_line(data = cooksd_contours,
mapping = aes_string(x = "hat_seq", y = "stdres", group = "case"),
color = "red", linetype = "dashed", na.rm = TRUE)
}
xlable <- max(model_values$Leverage, na.rm = TRUE)
ylable_pos_0.5 <- 1.05 * sqrt(0.5 * p * (1 - max(model_values$Leverage, na.rm = TRUE)) /
max(model_values$Leverage, na.rm = TRUE))
ylable_neg_0.5 <- 1.05 * -sqrt(0.5 * p * (1 - max(model_values$Leverage, na.rm = TRUE)) /
max(model_values$Leverage, na.rm = TRUE))
ylable_pos_1 <- 1.05 * sqrt(1 * p * (1 - max(model_values$Leverage, na.rm = TRUE)) /
max(model_values$Leverage, na.rm = TRUE))
ylable_neg_1 <- 1.05 * -sqrt(1 * p * (1 - max(model_values$Leverage, na.rm = TRUE)) /
max(model_values$Leverage, na.rm = TRUE))
if (ylable_pos_0.5 <= ylimits[2]){
plot <- plot + geom_text(aes(x = xlable, y = ylable_pos_0.5), label = "0.5", color = "red", size = 3)
}
if (ylable_neg_0.5 >= ylimits[1]){
plot <- plot + geom_text(aes(x = xlable, y = ylable_neg_0.5), label = "0.5", color = "red", size = 3)
}
if (ylable_pos_1 <= ylimits[2]){
plot <- plot + geom_text(aes(x = xlable, y = ylable_pos_1), label = "1", color = "red", size = 3)
}
if (ylable_neg_1 >= ylimits[1]){
plot <- plot + geom_text(aes(x = xlable, y = ylable_neg_1), label = "1", color = "red", size = 3)
}
if (theme == "bw"){
plot <- plot + theme_bw()
} else if (theme == "classic"){
plot <- plot + theme_classic()
} else if (theme == "gray" | theme == "grey"){
plot <- plot + theme_grey()
}
if(title.opt == TRUE){
plot +
labs(title = "Residual-Leverage Plot") +
theme(plot.title = element_text(size = title.text.size, face = "bold"),
axis.title = element_text(size = axis.text.size))
} else if (title.opt == FALSE){
plot + theme(axis.title = element_text(size = axis.text.size))
}
}
}
|
format_holos <- function(path.data) {
options(warn = -1)
nbsubj <- length(list.files(path.data))
exemple <- read.table(paste(path.data, list.files(path.data)[1], "001_data.txt", sep = "/"), sep = ",", header = FALSE)
exemple[, 5] <- as.factor(exemple[, 5])
nbstim <- nlevels(exemple[, 5])
name.subjects <- as.data.frame(matrix(NA, 1, nbsubj))
rownames(name.subjects) <- c("name")
raw.datadigit <- as.data.frame(matrix(NA, 1, 6))
colnames(raw.datadigit) <- c("subject", "step", "stimulus", "time", "coordX", "coordY")
datadigit <- list()
steps.by.subject <- list()
datafinal_coord <- as.data.frame(matrix(NA, nbstim, 2 * nbsubj))
rownames(datafinal_coord) <- levels(exemple[, 5])
colnames(datafinal_coord) <- paste(paste(rep("S", 2 * nbsubj), rep(1 : nbsubj, each = 2), sep = ""), rep(c("coordX", "coordY"), nbsubj), sep = "_")
datafinal_verb <- as.data.frame(matrix(NA, nbstim, nbsubj))
rownames(datafinal_verb) <- levels(exemple[, 5])
colnames(datafinal_verb) <- paste(rep("S", nbsubj), 1 : nbsubj, sep = "")
for (i in 1 : nbsubj) {
name_subj <- mixedsort(sort(list.files(path.data)))[i]
name.subjects[1, i] <- name_subj
colnames(name.subjects)[i] <- paste("S", i, sep = "")
file_subj <- list.files(paste(path.data, name_subj, sep = "/"))
digit_subj <- read.table(paste(path.data, name_subj, "001_data.txt", sep = "/"), sep = ",", header = FALSE)
digit_subj <- digit_subj[, -c(1, 4)]
colnames(digit_subj) <- c("coordX", "coordY", "stimulus", "time")
digit_subj$stimulus <- as.factor(digit_subj$stimulus)
step0 <- as.data.frame(matrix(NA, nlevels(digit_subj$stimulus), ncol(digit_subj)))
for (j in 1 : nlevels(digit_subj$stimulus)) {
stim <- levels(digit_subj$stimulus)[j]
steps.stim <- digit_subj[which(digit_subj$stimulus == stim), ]
step0[j, 1 : 2] <- steps.stim[1, 1 : 2]
step0[j, 3] <- as.character(steps.stim[1, 3])
step0[j, 4] <- as.character(steps.stim[1, 4])
}
datastep <- cbind.data.frame(rep(0, nrow(step0)), step0)
colnames(datastep) <- c("step", colnames(digit_subj))
change <- vector()
change[1] <- 0
stimulus_int <- as.integer(digit_subj$stimulus)
for(j in 2 : length(stimulus_int)) {
change[j] <- stimulus_int[j] - stimulus_int[j-1]
}
change[length(stimulus_int)] <- 1
stimchange <- which(change != 0)
step.inf <- datastep
for(j in 1 : length(stimchange)) {
step <- as.data.frame(matrix(NA, 1, ncol(datastep)))
colnames(step) <- colnames(datastep)
position <- stimchange[j]
step$step <- j
if(position == nrow(digit_subj)) {
stim.step <- as.character(digit_subj[position, 3])
step[1, 2 : 3] <- digit_subj[position, 1 : 2]
step[1, 4] <- stim.step
step$time <- as.character(digit_subj[position, 4])
} else {
stim.step <- as.character(digit_subj[position - 1, 3])
step[1, 2 : 3] <- digit_subj[position - 1, 1 : 2]
step[1,4] <- stim.step
step$time <- as.character(digit_subj[position - 1, 4])
}
step.inf <- rbind.data.frame(step.inf, step)
}
datadigit_subj <- cbind.data.frame(rep(i, nrow(step.inf)), step.inf)
colnames(datadigit_subj)[1] <- "subject"
datadigit_subj <- datadigit_subj[, c(1, 2, 5, 6, 3, 4)]
raw.datadigit <- rbind.data.frame(raw.datadigit, datadigit_subj)
datadigit_subj$step <- as.factor(datadigit_subj$step)
datadigit_subj$stimulus <- as.factor(datadigit_subj$stimulus)
ini_step <- datadigit_subj[which(datadigit_subj$step == "0"), ]
n_step <- datadigit_subj[which(datadigit_subj$step != "0"), ]
steps.by.s <- as.data.frame(matrix(0, nrow(ini_step), (nrow(n_step)*2+2), list(as.character(ini_step[, "stimulus"])), byrow = TRUE))
steps.by.s[, 1:2] <- ini_step[, c("coordX", "coordY")]
for (j in 1 : nrow(n_step)) {
steps.by.s[, ( 2 * j + 1) : (2 * j + 2)] <- steps.by.s[, (2 * j - 1) : (2 * j)]
temp <- n_step[j, c("coordX", "coordY")]
steps.by.s[as.character(n_step[j, "stimulus"]), (2 * j + 1) : (2 * j + 2)] <- temp
}
colnames(steps.by.s) <- paste(paste(rep("S", ncol(steps.by.s)), rep(i,ncol(steps.by.s)), sep = ""), paste(rep("step", ncol(steps.by.s) / 2 - 1), rep(0 : (ncol(steps.by.s) / 2 - 1), each = 2), rep(c("_X", "_Y"), ncol(steps.by.s) / 2 - 1), sep = ""), sep = ".")
steps.by.subject[[i]] <- steps.by.s
names(steps.by.subject)[i] <- paste("S", i, sep = "")
datadigit_subj$stimulus <- as.factor(datadigit_subj$stimulus)
for (j in 1 : nbstim) {
steps.stim <- datadigit_subj[which(datadigit_subj$stimulus == rownames(datafinal_coord)[j]), ]
datafinal_coord[j, (2 * i - 1) : (2 * i)] <- steps.stim[nrow(steps.stim), 5 : 6]
}
verb_subj <- read.table(paste(path.data, name_subj, "001_comment.txt", sep = "/"), sep = ",", header = FALSE)
verb_subj <- as.matrix(verb_subj)
verb_subj <- verb_subj[- (1 : 6), ]
verb_subj <- strsplit(verb_subj, "::")
verb_subj <- unlist(verb_subj)
list.stim <- as.character(verb_subj)
pos.stim <- which(list.stim%in%rownames(datafinal_verb))
change.group <- vector()
for (j in 1 : length(pos.stim)) {
change.group[j] <- pos.stim[j] - pos.stim[j - 1]
}
change.group[1] <- 0
change.group <- pos.stim[which(change.group != 1)]
for (j in 1 : length(change.group)) {
list.stim.group <- vector()
list.verb.group <- ""
if (j != length(change.group)) {
pos.group <- change.group[j] : (change.group[ j + 1] - 1)
} else {
pos.group <- change.group[j] : length(list.stim)
}
for (l in 1 : length(pos.group)) {
if (list.stim[pos.group[l]]%in%rownames(datafinal_verb)) {
list.stim.group[length(list.stim.group) + 1] <- list.stim[pos.group[l]]
} else {
list.verb.group <- paste(list.verb.group, verb_subj[pos.group[l]], sep = "")
}
}
datafinal_verb[which(rownames(datafinal_verb)%in%list.stim.group),i] <- rep(list.verb.group, length(list.stim.group))
}
}
colnames(datafinal_verb) <- paste(colnames(datafinal_verb), rep("_verb", nbsubj), sep = "")
raw.datadigit <- raw.datadigit[-1, ]
datadigit[[1]] <- raw.datadigit
datadigit[[2]] <- steps.by.subject
names(datadigit) <- c("raw.datadigit", "steps.by.subject")
options(warn = 0)
results <- list(name.subjects, datadigit, datafinal_coord, datafinal_verb)
names(results) <- c("IDsubjects", "datadigit", "datafinal_coord", "datafinal_verb")
return(results)
}
|
coxpls.formula <- function(Xplan,time,time2,event,type,origin,typeres="deviance", collapse, weighted, scaleX=TRUE, scaleY=TRUE, ncomp=min(7,ncol(Xplan)), modepls="regression", plot=FALSE, allres=FALSE,dataXplan=NULL,subset,weights,model_frame=FALSE,...) {
if (missing(dataXplan))
dataXplan <- environment(Xplan)
mf0 <- match.call(expand.dots = FALSE)
m0 <- match(c("subset", "weights"), names(mf0), 0L)
mf0 <- mf0[c(1L, m0)]
mf0$data <- dataXplan
mf0$formula <- Xplan
mf0$drop.unused.levels <- TRUE
mf0[[1L]] <- as.name("model.frame")
mf0 <- eval(mf0, parent.frame())
if (model_frame)
return(mf0)
mt0 <- attr(mf0, "terms")
Y <- model.response(mf0, "any")
if (length(dim(Y)) == 1L) {
nm <- rownames(Y)
dim(Y) <- NULL
if (!is.null(nm))
names(Y) <- nm
}
Xplan <- if (!is.empty.model(mt0)) model.matrix(mt0, mf0, contrasts)[,-1]
else matrix(, NROW(Y), 0L)
weights <- as.vector(model.weights(mf0))
if (!is.null(weights) && !is.numeric(weights))
stop("'weights' must be a numeric vector")
if (!is.null(weights) && any(weights < 0))
stop("negative weights not allowed")
NextMethod("coxpls")
}
|
get_eigen_spline_matrix <- function(inputData,ind,time,ncores=0) {
if(ncores!=0) {
cl <- parallel::makeCluster( ncores )
doParallel::registerDoParallel( cl )
splineMatrix <- foreach::foreach(x=iterators::iter(inputData, by='col'), .combine='rbind', .export=c("get_ind_time_matrix")) %dopar% get_ind_time_matrix(x, ind, time)
parallel::stopCluster( cl )
} else {
splineMatrix <- plyr::ldply( apply(inputData, 2, function(x) get_ind_time_matrix(x, ind, time)), .id=NULL)
}
return( splineMatrix )
}
get_eigen_spline <- function(inputData,ind,time,nPC=NA,scaling="scaling_UV",method="nipals",verbose=TRUE,centering=TRUE,ncores=0) {
tot.time <- Sys.time()
uniqueTime <- sort(unique(time))
if (length(uniqueTime) < 4){
message("Error: Check input, a minimum of 4 unique time-points are required to fit a smooth.splines")
stop("Check input, a minimum of 4 unique time-points are required to fit a smooth.splines")
}
if ( scaling=="scaling_UV" ) {
dataMatrix <- data.frame(scaling_UV(inputData))
} else if (scaling=="scaling_mean") {
dataMatrix <- data.frame(scaling_mean(inputData))
} else {
dataMatrix <- inputData
}
if(ncores!=0) {
cl <- parallel::makeCluster( ncores )
doParallel::registerDoParallel( cl )
splineMatrix <- foreach::foreach(x=iterators::iter(inputData, by='col'), .combine='rbind', .export=c("get_ind_time_matrix")) %dopar% get_ind_time_matrix(x, ind, time)
parallel::stopCluster( cl )
} else {
splineMatrix <- plyr::ldply( apply(inputData, 2, function(x) get_ind_time_matrix(x, ind, time)), .id=NULL)
}
splineMatrix <- splineMatrix[ apply(!is.na(splineMatrix),1,sum) > 0, apply(!is.na(splineMatrix),2,sum) > 0]
countTP <- apply(splineMatrix , 1, function(x) sum(!is.na(x)) )
countTP <- as.matrix(table(countTP))
if (is.na(nPC)){
nPC = dim(splineMatrix)[2] - 1
}
modelPCA <- pcaMethods::pca( t(splineMatrix), method=method, center=centering, nPcs=nPC )
if (verbose) {
summary(modelPCA)
}
get.eigen.splineMatrix <- data.frame( t( modelPCA@scores ) )
colnames(get.eigen.splineMatrix) <- rownames(modelPCA@scores)
eigen <- list()
eigen$matrix <- get.eigen.splineMatrix
eigen$variance <- modelPCA@R2
eigen$model <- modelPCA
eigen$countTP <- countTP
tot.time2 <- Sys.time()
message('total time: ',round(as.double(difftime(tot.time2,tot.time)),2),' ',units( difftime(tot.time2,tot.time)))
return(eigen)
}
get_eigen_DF <- function(eigen) {
OptimAIC <- function(df,eigen,i) { AIC_smooth_spline( stats::smooth.spline( x=as.numeric(colnames(eigen$matrix)), y=eigen$matrix[i,], df=df)) }
OptimBIC <- function(df,eigen,i) { BIC_smooth_spline( stats::smooth.spline( x=as.numeric(colnames(eigen$matrix)), y=eigen$matrix[i,], df=df)) }
OptimAICc <- function(df,eigen,i) { AICc_smooth_spline(stats::smooth.spline( x=as.numeric(colnames(eigen$matrix)), y=eigen$matrix[i,], df=df)) }
df <- as.vector(matrix(0,ncol=1,nrow=5))
wdf <- as.vector(matrix(0,ncol=1,nrow=5))
names(df) <- c("CV","GCV","AIC","BIC", "AICc")
names(wdf) <- c("CV","GCV","AIC","BIC", "AICc")
nPC <- dim(eigen$matrix)[1]
nTP <- dim(eigen$matrix)[2]
for (i in 1: nPC) {
fitCV <- stats::smooth.spline( x=as.numeric(colnames(eigen$matrix)), y=eigen$matrix[i,], cv=TRUE)
df[1] <- df[1] + fitCV$df
wdf[1] <- wdf[1] + (fitCV$df * eigen$variance[i])
fitGCV <- stats::smooth.spline( x=as.numeric(colnames(eigen$matrix)), y=eigen$matrix[i,], cv=FALSE)
df[2] <- df[2] + fitGCV$df
wdf[2] <- wdf[2] + (fitGCV$df * eigen$variance[i])
AICDf <- stats::optim( par=1.00000000001, OptimAIC, eigen=eigen, i=i, method="Brent", lower=1.00000000001, upper=nTP )$par
df[3] <- df[3] + AICDf
wdf[3] <- wdf[3] + (AICDf * eigen$variance[i])
BICDf <- stats::optim( par=1.00000000001, OptimBIC, eigen=eigen, i=i, method="Brent", lower=1.00000000001, upper=nTP )$par
df[4] <- df[4] + BICDf
wdf[4] <- wdf[4] + (BICDf * eigen$variance[i])
AICcDf <- stats::optim( par=1.00000000001, OptimAICc, eigen=eigen, i=i, method="Brent", lower=1.00000000001, upper=nTP )$par
df[5] <- df[5] + AICcDf
wdf[5] <- wdf[5] + (AICcDf * eigen$variance[i])
}
df[1] <- df[1]/nPC
df[2] <- df[2]/nPC
df[3] <- df[3]/nPC
df[4] <- df[4]/nPC
df[5] <- df[5]/nPC
answer <- list()
answer$df <- df
answer$wdf <- wdf
return(answer)
}
get_param_evolution <- function(eigen,step=0.1) {
nPC <- dim(eigen$matrix)[1]
nTP <- dim(eigen$matrix)[2]
answer <- vector( "list", nPC )
dfList <- seq( 1+step, nTP, step )
for( PC in 1:nPC ) {
tmpMat <- matrix( NA, ncol=length(dfList), nrow=5 )
rownames(tmpMat) <- c("Penalised_residuals(CV)","Penalised_residuals(GCV)","AIC","BIC", "AICc")
colnames(tmpMat) <- round( dfList, 2)
for( i in 1:length(dfList) ) {
tmpMat[1,i] <- stats::smooth.spline( x=as.numeric(colnames(eigen$matrix)), y=eigen$matrix[PC,], df=dfList[i], cv=T )$cv.crit
tmpMat[2,i] <- stats::smooth.spline( x=as.numeric(colnames(eigen$matrix)), y=eigen$matrix[PC,], df=dfList[i], cv=F )$cv.crit
tmpMat[3,i] <- AIC_smooth_spline( stats::smooth.spline( x=as.numeric(colnames(eigen$matrix)), y=eigen$matrix[PC,], df=dfList[i] ))
tmpMat[4,i] <- BIC_smooth_spline( stats::smooth.spline( x=as.numeric(colnames(eigen$matrix)), y=eigen$matrix[PC,], df=dfList[i] ))
tmpMat[5,i] <- AICc_smooth_spline(stats::smooth.spline( x=as.numeric(colnames(eigen$matrix)), y=eigen$matrix[PC,], df=dfList[i] ))
}
answer[[PC]] <- tmpMat
}
return(answer)
}
plot_param_evolution <- function(paramSpace,scaled=FALSE) {
tmp <- data.frame()
for( i in 1:length(paramSpace) ) {
tmp <- rbind(tmp, data.frame( x=as.numeric(colnames(paramSpace[[i]])), PC=rep(i,dim(paramSpace[[i]])[2]), t(paramSpace[[i]]) ))
}
tmp <- reshape2::melt(tmp, id=c("PC","x"))
pList <- list()
crit <- unique(tmp$variable)
for (j in 1:length(crit)) {
critValue <- tmp[tmp$variable==crit[j],]
if(scaled){
tmpScaled <- data.frame()
for (k in 1:length(unique(critValue$PC))) {
subCrit <- critValue[critValue$PC==k,]
rng <- range(subCrit$value)
subCrit$value <- (subCrit$value - rng[1]) / (rng[2] - rng[1])
tmpScaled <- rbind(tmpScaled, subCrit)
}
p <- ggplot2::ggplot( data=tmpScaled, ggplot2::aes(x=x, y=value, colour=as.factor(PC))) + ggplot2::geom_line() + ggplot2::ggtitle(crit[j]) + ggplot2::xlab("df") + ggplot2::ylab("scaled criteria") + ggplot2::scale_colour_discrete(name="PC") + ggplot2::theme(plot.title = ggplot2::element_text(hjust = 0.5))
} else {
p <- ggplot2::ggplot( data=critValue, ggplot2::aes(x=x, y=value, colour=as.factor(PC))) + ggplot2::geom_line() + ggplot2::ggtitle(crit[j]) + ggplot2::xlab("df") + ggplot2::ylab("criteria") + ggplot2::scale_colour_discrete(name="PC") + ggplot2::theme(plot.title = ggplot2::element_text(hjust = 0.5))
}
tmpMin <- data.frame()
for (l in 1:length(unique(critValue$PC))) {
if(scaled) {
tmpMin <- rbind( tmpMin, tmpScaled[tmpScaled$value==min(tmpScaled[tmpScaled$PC==l,]$value),] )
} else {
tmpMin <- rbind( tmpMin, critValue[critValue$value==min(critValue[critValue$PC==l,]$value),] )
}
}
p <- p + ggplot2::geom_point(data=tmpMin, ggplot2::aes(x=x, y=value, colour=as.factor(PC)), shape=1, size=4)
pList <- c(pList, list(p))
}
return(pList)
}
plot_nbTP_histogram <- function(eigen,dfCutOff=NA) {
countTP <- eigen$countTP
if(length(countTP)>1) {
inferior <- matrix(0,nrow=length(countTP),ncol=1)
for(i in 2:length(countTP)) {
inferior[i,] <- sum(countTP[1:i-1])
}
numTP <- cbind(countTP,as.matrix(apply(inferior, 1, function(x) sum(countTP)-x)))
numTP <- cbind(numTP, apply(numTP, 1, function(x) round((x/sum(numTP[,1]))*100,digits=0))[2,])
numTP[,1] <- as.numeric(rownames(countTP))
} else {
numTP <- matrix(data=c(as.numeric(rownames(countTP)[1]),as.numeric(countTP[,1]),100),ncol=3,nrow=1)
}
rownames(numTP) <- rownames(countTP)
colnames(numTP) <- c("numTP","count","percent")
numTP <- data.frame(numTP)
if (!is.na(dfCutOff)){
numTP <- cbind(numTP,ifelse(numTP$numTP>=dfCutOff,1,0))
colnames(numTP) <- c("numTP","count","percent", "colFill")
rejectWindow <- c(min(numTP$numTP),dfCutOff-1)
p <- ggplot2::ggplot(data=numTP, ggplot2::aes(x=numTP,y=count,label=paste(percent,"%",sep=""), fill=factor(colFill)), parse=TRUE) + ggplot2::geom_bar(ggplot2::aes(y=count),stat='identity',colour='black',show.legend=FALSE) + ggplot2::geom_text(vjust=-0.5) + ggplot2::scale_x_continuous(breaks=seq(min(numTP$numTP),max(numTP$numTP),1), labels=seq(min(numTP$numTP),max(numTP$numTP),1)) + ggplot2::theme_bw()
p <- p + ggplot2::scale_fill_manual(values=c("1"="blue","0"="white"))
p <- p + ggplot2::geom_vline(xintercept=seq(min(rejectWindow), max(rejectWindow), 1), linetype="dotted")
p <- p + ggplot2::geom_vline(xintercept=dfCutOff-0.5, color="red", size=1.5)
p <- p + ggplot2::xlab("Number of time-points / df cut-off") + ggplot2::ylab("Number of trajectories with corresponding TP") + ggplot2::ggtitle(paste("Trajectories with
} else {
p <- ggplot2::ggplot(data=numTP, ggplot2::aes(x=numTP,y=count,label=paste(percent,"%",sep="")), parse=TRUE) + ggplot2::geom_bar(stat="identity") + ggplot2::geom_text(vjust=-0.5) + ggplot2::scale_x_continuous(breaks=seq(min(numTP$numTP),max(numTP$numTP),1), labels=seq(min(numTP$numTP),max(numTP$numTP),1)) + ggplot2::theme_bw()
p <- p + ggplot2::xlab("Number of time-points") + ggplot2::ylab("Number of trajectories with corresponding TP") + ggplot2::ggtitle("Trajectories with
}
return(p)
}
get_eigen_DFoverlay_list <- function(eigen,manualDf=5,nPC=NA,step=NA,showPt=TRUE,autofit=TRUE) {
if (is.na(nPC)){
nPC=dim(eigen$matrix)[1]
}
if (is.na(step)){
step=0.2
}
pList <- list()
for (i in 1: nPC) {
p <- ggplot2::ggplot(NULL,ggplot2::aes(x), environment = environment())
for (j in seq(1+step,dim(eigen$matrix)[2],step)) {
fit <- stats::smooth.spline( x=as.numeric(colnames(eigen$matrix)), y=eigen$matrix[i,], df=j)
fitProj <- data.frame(stats::predict(fit, seq( min(fit$x),max(fit$x), ((max(fit$x)-min(fit$x))/250) ) ))
p <- p + ggplot2::geom_line(data=fitProj, ggplot2::aes_string(x="x", y="y"), linetype=2, col="grey" )
}
fit <- stats::smooth.spline( x=as.numeric(colnames(eigen$matrix)), y=eigen$matrix[i,], df=manualDf)
fitProj <- data.frame(stats::predict(fit, seq( min(fit$x),max(fit$x), ((max(fit$x)-min(fit$x))/250) ) ))
p <- p + ggplot2::geom_line(data=fitProj, ggplot2::aes(x=x, y=y), linetype=4, col="blue", size=1.5 )
if(showPt) {
p <- p + ggplot2::geom_point(data=data.frame(x=fit$data$x, y=fit$data$y), ggplot2::aes(x=x, y=y), shape=16, size=5, col="green")
}
if(autofit) {
fit <- stats::smooth.spline( x=as.numeric(colnames(eigen$matrix)), y=eigen$matrix[i,])
fitProj <- data.frame(stats::predict(fit, seq( min(fit$x),max(fit$x), ((max(fit$x)-min(fit$x))/250) ) ))
p <- p + ggplot2::geom_line(data=fitProj, ggplot2::aes(x=x, y=y), linetype=4, col="red", size=1.5 )
p <- p + ggplot2::theme_bw() + ggplot2::ggtitle(paste("Fit PC ",i," varExp=",round(100*eigen$variance[i],1),"% - df=",manualDf," - Auto-df=",round(fit$df,2), sep="")) + ggplot2::theme(plot.title = ggplot2::element_text(hjust = 0.5))
} else {
p <- p + ggplot2::theme_bw() + ggplot2::ggtitle(paste("Fit PC ",i," varExp=",round(100*eigen$variance[i],1),"% - df=",manualDf, sep="")) + ggplot2::theme(plot.title = ggplot2::element_text(hjust = 0.5))
}
p <- p + ggplot2::xlab("Time") + ggplot2::ylab("Projection")
pList <- c(pList, list(p))
}
return(pList)
}
loglik_smooth_spline <- function(fittedSmoothSpline) {
s <- fittedSmoothSpline
x <- fittedSmoothSpline$x
y <- fittedSmoothSpline$y
w <- fittedSmoothSpline$w
yinput <- fittedSmoothSpline$yin
wrss <- sum(w * (yinput-y)^2)
lambda <- s$lambda
sDeriv <- stats::smooth.spline(stats::predict(s, x, deriv=2))
ab <- range(x, na.rm=TRUE)
Js <- stats::integrate(function(x) stats::predict(sDeriv, x=x)$y,lower=ab[1], upper=ab[2], rel.tol=.Machine$double.eps^(1/8), stop.on.error=FALSE)$value
penalty <- -lambda * Js
l <- (wrss + penalty)
return(l)
}
AIC_smooth_spline <- function(fittedSmoothSpline) {
df = fittedSmoothSpline$df
AIC = -2 * -loglik_smooth_spline(fittedSmoothSpline) + 2 * df
return(AIC)
}
AICc_smooth_spline <- function(fittedSmoothSpline) {
df = fittedSmoothSpline$df
nobs = length(fittedSmoothSpline$yin)
AICc = AIC_smooth_spline(fittedSmoothSpline) + 2 * df * (df + 1) / (nobs - df - 1)
return(AICc)
}
BIC_smooth_spline <- function(fittedSmoothSpline) {
df = fittedSmoothSpline$df
nobs = length(fittedSmoothSpline$yin)
BIC = -2 * -loglik_smooth_spline(fittedSmoothSpline) + log(nobs) * df
return(BIC)
}
|
lsm_c_dcore_sd <- function(landscape, directions = 8, consider_boundary = FALSE, edge_depth = 1) {
landscape <- landscape_as_list(landscape)
result <- lapply(X = landscape,
FUN = lsm_c_dcore_sd_calc,
directions = directions,
consider_boundary = consider_boundary,
edge_depth = edge_depth)
layer <- rep(seq_along(result),
vapply(result, nrow, FUN.VALUE = integer(1)))
result <- do.call(rbind, result)
tibble::add_column(result, layer, .before = TRUE)
}
lsm_c_dcore_sd_calc <- function(landscape, directions, consider_boundary, edge_depth,
points = NULL){
dcore <- lsm_p_ncore_calc(landscape,
directions = directions,
consider_boundary = consider_boundary,
edge_depth = edge_depth,
points = points)
if (all(is.na(dcore$value))) {
return(tibble::tibble(level = "class",
class = as.integer(NA),
id = as.integer(NA),
metric = "dcore_sd",
value = as.double(NA)))
}
dcore_sd <- stats::aggregate(x = dcore[, 5], by = dcore[, 2], FUN = stats::sd)
return(tibble::tibble(level = "class",
class = as.integer(dcore_sd$class),
id = as.integer(NA),
metric = "dcore_sd",
value = as.double(dcore_sd$value)))
}
|
rstandard.manylm <- function(model,
sd = sqrt(deviance(model)/df.residual(model)), ...) {
wt.res <- as.matrix(weighted.residuals(model))
hat <- as.vector(diag(model$hat.X))
n <- NROW(wt.res)
n.vars <- NCOL(wt.res)
sD <- matrix(rep(sd, each=n), nrow=n, ncol=n.vars)
hatX <- matrix(rep(hat,times=n.vars), nrow=n, ncol=n.vars)
res <- wt.res /(sD * sqrt(1 - hatX))
res[is.infinite(res)] <- NaN
res
}
|
qHdiv <-
function(comm, C, q=2){
if (!inherits(comm, "data.frame") & !inherits(comm, "matrix"))
stop("comm must be a data frame or a matrix")
if (any(comm < 0))
stop("non-negative values expected in comm")
comms <- apply(comm, 1, sum)
if (any(comms == 0))
stop("column in comm with zero values only")
fun <- function(x){
x <- x/sum(x)
d <- svd(diag(sqrt(x))%*%C%*%diag(sqrt(x)))$d
d <- d/sum(d)
divx <- (sum(d^q))^(1/1-q)
return(divx)
}
div <- apply(comm, 1, fun)
return(div)
}
|
mdp_example_forest <- function(S, r1, r2, p) {
if ( nargs() >= 1 & ifelse(!missing(S), S <= 1, F) ) {
print('----------------------------------------------------------')
print('MDP Toolbox ERROR: Number of states S must be upper than 1')
print('----------------------------------------------------------')
} else if ( nargs() >= 2 & ifelse(!missing(r1), r1 <= 0, F) ) {
print('-----------------------------------------------------------')
print('MDP Toolbox ERROR: The reward value r1 must be upper than 0')
print('-----------------------------------------------------------')
} else if ( nargs() >= 3 & ifelse(!missing(r2), r1 <= 0, F) ) {
print('-----------------------------------------------------------')
print('MDP Toolbox ERROR: The reward value r2 must be upper than 0')
print('-----------------------------------------------------------')
} else if ( nargs() >=4 & ifelse(!missing(p), p < 0 | p > 1, F) ) {
print('--------------------------------------------------------')
print('MDP Toolbox ERROR: Probability p must be in [0; 1]')
print('--------------------------------------------------------')
} else {
if (nargs() < 4) p <- 0.1
if (nargs() < 3) r2 <- 2
if (nargs() < 2) r1 <- 4
if (nargs() < 1) S <- 3
P1 <- matrix(0,S,S)
if (S > 2) diag(P1[-nrow(P1),-1]) <- (1-p) else P1[1,2] <- 1-p
P1[,1] <- p
P1[S,S] <- 1-p
P2 <- matrix(0,S,S)
P2[,1] <- 1
P <- array(0, c(S,S,2))
P[,,1] <- P1
P[,,2] <- P2
R1 <- numeric(S)
R1[S] <- r1
R2 <- rep(1,S)
R2[1] <- 0
R2[S] <- r2
R <- cbind(R1,R2)
return(list("P"=P, "R"=R))
}
}
|
n <- 100
p <- 9
q <- 3
beta <- c( rep(1,q), rep(0,p-q))
w <- matrix(rnorm(n*p),n,p)
x <- runif(n,0,10)
z <- runif(n,0,10)
y <- w %*% beta + sin(x) + (z^2)/50 + rnorm(n)/5
d <- data.frame(w,x,y,z)
f <- rqss(y ~ w + qss(x,lambda = 3) + qss(z,lambda = 2),
method = "lasso", lambda = 3, data = d)
plot(f, bands = "both", bcol = c("lightsteelblue", "lightsteelblue4"))
|
simple_ci <- function(ci){
b.temp <- c()
for(i in 1:(length(ci[[4]]))){
if(identical(ci[[4]][[i]]$C,character(0)) || identical(ci[[4]][[i]]$B,character(0))){b.temp <- c(b.temp,F)} else{b.temp <- c(b.temp,T)}
}
temp <- vector(mode = "list", length = sum(b.temp))
count <- 1
for(i in 1:length(ci[[4]])){
if (b.temp[i] == T){
temp[[count]] <- ci$cond_ind[[i]]
count <- count+1
}
}
ci$cond_ind <- temp
return(ci)
}
ci_submatrix <- function(ci){
simpleci <- simple_ci(ci)
A <- c()
B <- c()
C <- c()
for(i in 1:length(simpleci[[4]])){
A <- c(A,simpleci[[4]][[i]]$A)
B <- c(B,simpleci[[4]][[i]]$B)
C <- c(C,simpleci[[4]][[i]]$C)
}
return(list(A=unique(A),B=unique(B),C=unique(C)))
}
variation_mat <- function(x,entry,delta){
mat <- matrix(1,nrow = nrow(x$covariance),ncol= ncol(x$covariance))
mat[entry[1],entry[2]] <- delta
mat[entry[2],entry[1]] <- delta
return(mat)
}
|
Particle <- R6::R6Class('Particle',
private = list(
.values_ranges = NA,
.values = NA,
.fitness = NA,
.fitness_function = NA,
.personal_best_values = NA,
.personal_best_fitness = 0,
.velocity = 0,
.acceleration_coefficient = NA,
.inertia = NA
),
active = list(
values_ranges = function(value){
if (missing(value)) {
private$.values_ranges
} else {
stop("`$values_range can't be changed after the creation of the particle", call. = FALSE)
}
},
values = function(value){
if (missing(value)) {
private$.values
} else {
stop("`$values can't be changed after the creation of the particle (changed by the update method", call. = FALSE)
}
},
fitness = function(value){
if (missing(value)) {
private$.fitness
} else {
stop("`$fitness can't be changed", call. = FALSE)
}
},
fitness_function = function(value){
if (missing(value)) {
private$.fitness_function
} else {
stop("`$fitness_function can't be changed after the creation of the particle", call. = FALSE)
}
},
personal_best_values = function(value){
if (missing(value)) {
private$.personal_best_values
} else {
stop("`$personal_best_values can't be changed by the user", call. = FALSE)
}
},
personal_best_fitness = function(value){
if (missing(value)) {
private$.personal_best_fitness
} else {
stop("`$personal_best_fitness can't be changed by the user", call. = FALSE)
}
},
velocity = function(value){
if (missing(value)) {
private$.velocity
} else {
stop("`$velocity can't be changed by the user", call. = FALSE)
}
},
acceleration_coefficient = function(value){
if (missing(value)) {
private$.acceleration_coefficient
} else {
stop("`$acceleration_coefficient can't be changed after the creation of the particle", call. = FALSE)
}
},
inertia = function(value){
if (missing(value)) {
private$.inertia
} else {
stop("`$inertia can't be changed after the creation of the particle", call. = FALSE)
}
}
),
public = list(
initialize = function(values_ranges,
values,
fitness_function,
acceleration_coefficient,
inertia){
if (is.list(values_ranges)){
if (length(values_ranges) != length(values)){
stop('ERROR The length of values_ranges and values need to be the same.')
}
private$.values_ranges <- values_ranges
} else {stop("ERROR Values_ranges need to be a list.")}
if (is.numeric(values)){
private$.values <- values
} else{stop('ERROR Values need to be numeric.')}
if(is.function(fitness_function)){
private$.fitness_function <- fitness_function
} else{stop('ERROR fitness_function need to be a function')}
private$.personal_best_values <- values
if (length(acceleration_coefficient) != 2){
stop('ERROR acceleration_coefficient need to be two numeric values c(c1,c2)')
}
private$.acceleration_coefficient <- acceleration_coefficient
if (is.numeric(inertia)){
private$.inertia <- inertia
} else {stop("inertia need to be a numeric value")}
private$.velocity <- rep(0,length(values))
},
get_fitness=function(){
private$.fitness <- self$fitness_function(private$.values)
invisible(self)
},
update=function(swarm_best){
c1 <- private$.acceleration_coefficient[1]
c2 <- private$.acceleration_coefficient[2]
r1 <- runif(n = 1,min = 0,max = 1)
r2 <- runif(n = 1,min = 0,max = 1)
for (index in 1:length(private$.values)){
perso_ecart <- (private$.personal_best_values[index] - private$.values[index])
global_ecart <- (swarm_best[index] - private$.values[index])
global_attac <- c2*r2*global_ecart
perso_attrac <- c1*r1*perso_ecart
private$.velocity[index] <- (private$.inertia * private$.velocity[index] + perso_attrac + global_attac)
private$.values[index] <- private$.values[index] + private$.velocity[index]
if (private$.values[index] > max(unlist(private$.values_ranges[index]))){
private$.values[index] <- max(unlist(private$.values_ranges[index]))
}
else if (private$.values[index] < min(unlist(private$.values_ranges[index]))){
private$.values[index] <- min(unlist(private$.values_ranges[index]))
}
}
self$get_fitness()
if (private$.fitness > private$.personal_best_fitness){
self$update_personal_best_fitness()
}
invisible(self)
},
update_personal_best_fitness=function(){
private$.personal_best_fitness <- private$.fitness
private$.personal_best_values <- private$.values
invisible(self)
},
print = function(){
cat('Particle: \n')
for (index in 1:length(private$.values)){
cat('Values ', index, ': ', private$.values[index],'\n',sep = '')
}
cat('fitness : ',private$.fitness, '\n',sep = '')
}
)
)
|
simulate.mmglm0 <- function (object, nsim=1, seed=NULL, ...){
if (!is.null(seed)) set.seed(seed)
obj1 <- simulate.mchain(object, nsim=nsim, seed=seed)
if (is.null(obj1$x$x1)) obj1$x$x1 <- runif(nsim, min=0, max=1)
else if (length(obj1$x$x1)!=nsim) stop("length(x1) ne nsim")
if (obj1$family=="binomial"){
if (is.null(obj1$x$size)) obj1$x$size <- 100+rpois(nsim, lambda=5)
else if (length(obj1$x$size)!=nsim) stop("length(xsize) ne nsim")
}
obj1$x$y <- rep(NA,nsim)
for (i in 1:nsim){
eta <- obj1$beta[1,obj1$mc[i]] + obj1$beta[2,obj1$mc[i]]*obj1$x$x1[i]
if (obj1$link=="inverse") mu <- 1/eta
else if (obj1$link=="identity") mu <- eta
else if (obj1$link=="log") mu <- exp(eta)
else if (obj1$link=="logit") prob <- exp(eta)/(1+exp(eta))
else if (obj1$link=="probit") prob <- pnorm(eta)
else if (obj1$link=="cloglog") prob <- 1-exp(-exp(eta))
if (obj1$family=="gaussian")
obj1$x$y[i] <- rnorm(1, mean=mu, sd=obj1$sigma[obj1$mc[i]])
else if (obj1$family=="poisson")
obj1$x$y[i] <- rpois(1, lambda=mu)
else if (obj1$family=="Gamma")
obj1$x$y[i] <- rgamma(1, scale=mu*obj1$sigma[obj1$mc[i]]^2,
shape=1/obj1$sigma[obj1$mc[i]]^2)
else if (obj1$family=="binomial")
obj1$x$y[i] <- rbinom(1, size=obj1$x$size[i], prob=prob)
}
obj1$x <- as.data.frame(obj1$x)
return(obj1)
}
|
deSolve.lsoda.wrapper=function(
t,
ydot,
startValues
){
parms=NULL
lsexamp <- function(t, y,parms){
yv=cbind(y)
YD=ydot(y,t)
yd=as.vector(YD)
list(yd)
}
out <- lsoda(startValues,t,lsexamp)
n=length(startValues)
if (n==1) { Yt=matrix(ncol=n,out[,-1])}
else {Yt=out[,-1]}
tn=length(t)
Y=matrix(ncol=n,nrow=length(t))
for (i in 1:n){
Y[,i]=Yt[,i]
}
return(Y)
}
|
PropertyValue <-function(id = NULL,
valueReference = NULL,
value = NULL,
unitText = NULL,
unitCode = NULL,
propertyID = NULL,
minValue = NULL,
maxValue = NULL,
url = NULL,
sameAs = NULL,
potentialAction = NULL,
name = NULL,
mainEntityOfPage = NULL,
image = NULL,
identifier = NULL,
disambiguatingDescription = NULL,
description = NULL,
alternateName = NULL,
additionalType = NULL) {
Filter(Negate(is.null),
list(
type = "PropertyValue",
id = id,
valueReference = valueReference,
value = value,
unitText = unitText,
unitCode = unitCode,
propertyID = propertyID,
minValue = minValue,
maxValue = maxValue,
url = url,
sameAs = sameAs,
potentialAction = potentialAction,
name = name,
mainEntityOfPage = mainEntityOfPage,
image = image,
identifier = identifier,
disambiguatingDescription = disambiguatingDescription,
description = description,
alternateName = alternateName,
additionalType = additionalType))}
|
rm(list=ls())
setwd("C:/Users/Tom/Documents/Kaggle/Santander")
library(data.table)
library(bit64)
library(xgboost)
library(stringr)
library(matrixStats)
submissionDate <- "18-12-2016"
loadFile <- "xgboost weighted trainAll 16 10 folds 200 rounds, linear increase jun15 times6 back 13-0 no zeroing, exponential normalisation joint"
submissionFile <- "xgboost weighted trainAll 20, reca lin mult 0.5"
targetDate <- "12-11-2016"
trainModelsFolder <- "trainTrainAll Top 100 monthProduct 200 rounds 10 Folds"
trainAll <- grepl("TrainAll", trainModelsFolder)
testFeaturesFolder <- "testNoStagnantRemoval"
loadPredictions <- FALSE
loadBaseModelPredictions <- TRUE
savePredictions <- TRUE
saveBaseModelPredictions <- TRUE
savePredictionsBeforeNormalisation <- TRUE
mapBoosting <- FALSE
maxDiffNominaNomPensMapBoosting <- Inf
maxRelDiffNominaNomPensMapBoosting <- 0.5
averageNominaNomPensProbsMAPBoosting <- TRUE
averageNominaNomPensProbsMAPBoostingMethod <- c("Average", "Min")[1]
consideredMapTopPredictions <- 1:6
swapRelativeCutoff <- 1
swapWithCno <- FALSE
nbMapTopPredictions <- length(consideredMapTopPredictions)
nomPensAboveNominaBothNotOwned <- TRUE
if(mapBoosting && !nomPensAboveNominaBothNotOwned){
stop("MAP boosting requires nomPensAboveNominaBothNotOwned to be TRUE")
}
linearMultipliers <- rep(1, 24)
linearMultipliers[18] <- 1/2
generalizedMeanPowers <- rep(1, 24)
dropFoldModels <- TRUE
medianModelPrediction <- FALSE
dropBootModels <- FALSE
onlyBootModels <- FALSE
normalizeProdProbs <- TRUE
normalizeMode <- c("additive", "linear", "exponential")[3]
additiveNormalizeProds <- NULL
fractionPosFlankUsers <- 0.035114
expectedCountPerPosFlank <- 1.25
marginalNormalisation <- c("linear", "exponential")[2]
weightSum <- 1
predictSubset <- FALSE
predictionsFolder <- "Predictions"
zeroTargets <- NULL
source("Common/exponentialNormaliser.R")
source("Common/getModelWeights.R")
source("Common/apk.R")
dateTargetWeights <- readRDS(file.path(getwd(), "Model weights", targetDate,
"model weights first.rds"))
predictionsPath <- file.path(getwd(), "Submission", submissionDate,
predictionsFolder)
dir.create(predictionsPath, showWarnings = FALSE)
if(saveBaseModelPredictions){
baseModelPredictionsPath <- file.path(predictionsPath, submissionFile)
dir.create(baseModelPredictionsPath, showWarnings = FALSE)
}
if(loadBaseModelPredictions){
baseModelPredictionsPath <- file.path(predictionsPath, loadFile)
}
if(loadPredictions){
rawPredictionsPath <- file.path(predictionsPath,
paste0("prevNorm", loadFile, ".rds"))
} else{
rawPredictionsPath <- file.path(predictionsPath,
paste0("prevNorm", submissionFile, ".rds"))
}
posFlankClientsFn <- file.path(getwd(), "Feature engineering", targetDate,
"positive flank clients.rds")
posFlankClients <- readRDS(posFlankClientsFn)
modelsBasePath <- file.path(getwd(), "First level learners", targetDate,
trainModelsFolder)
modelGroups <- list.dirs(modelsBasePath)[-1]
modelGroups <- modelGroups[!grepl("Manual tuning", modelGroups)]
modelGroups <- modelGroups[!grepl("no fold BU", modelGroups)]
nbModelGroups <- length(modelGroups)
baseModelInfo <- NULL
baseModels <- list()
for(i in 1:nbModelGroups){
modelGroup <- modelGroups[i]
slashPositions <- gregexpr("\\/", modelGroup)[[1]]
modelGroupExtension <- substring(modelGroup,
1 + slashPositions[length(slashPositions)])
modelGroupFiles <- list.files(modelGroup)
modelGroupFiles <- modelGroupFiles[!grepl("no fold BU", modelGroupFiles)]
if(dropFoldModels){
modelGroupFiles <- modelGroupFiles[!grepl("Fold", modelGroupFiles)]
}
if(dropBootModels){
modelGroupFiles <- modelGroupFiles[!grepl("Boot", modelGroupFiles)]
} else{
if(onlyBootModels){
modelGroupFiles <- modelGroupFiles[grepl("Boot", modelGroupFiles)]
}
}
nbModels <- length(modelGroupFiles)
monthsBack <- suppressWarnings(
as.numeric(substring(gsub("Lag.*$", "", modelGroupExtension), 5)))
lag <- suppressWarnings(as.numeric(gsub("^.*Lag", "", modelGroupExtension)))
if(nbModels>0){
for(j in 1:nbModels){
modelGroupFile <- modelGroupFiles[j]
shortFn <- gsub(".rds$", "", modelGroupFile)
modelInfo <- readRDS(file.path(modelGroup, modelGroupFile))
targetProduct <- modelInfo$targetVar
relativeWeight <- getModelWeights(monthsBack, targetProduct,
dateTargetWeights)
isFold <- grepl("Fold", modelGroupFile)
prodMonthFiles <- modelGroupFiles[grepl(targetProduct, modelGroupFiles)]
prodMonthFiles <- modelGroupFiles[grepl(targetProduct, modelGroupFiles)]
nbFoldsProd <- sum(grepl("Fold", prodMonthFiles))
modelGroupFilesTarget <-
modelGroupFiles[grepl(targetProduct, modelGroupFiles) &
grepl("Fold", modelGroupFiles)]
nbFoldsModTarget <- as.numeric(gsub("^.* of | -.*$", "",
modelGroupFilesTarget))
totalModelFoldWeight <- 1 + sum(1 - 1/nbFoldsModTarget)
if(isFold){
nbFoldsMod <- as.numeric(gsub("^.* of | -.*$", "", modelGroupFile))
foldModelWeight <- (1 - 1/nbFoldsMod)/totalModelFoldWeight
} else{
foldModelWeight <- 1/totalModelFoldWeight
}
baseModelInfo <- rbind(baseModelInfo,
data.table(
modelGroupExtension = modelGroupExtension,
shortFn = shortFn,
targetProduct = targetProduct,
monthsBack = monthsBack,
modelLag = lag,
relativeWeight = relativeWeight * foldModelWeight)
)
baseModels <- c(baseModels, list(modelInfo))
}
}
}
baseModelInfo[, modelId := 1:nrow(baseModelInfo)]
if(all(is.na(baseModelInfo$modelLag))){
nbGroups <- length(unique(baseModelInfo$modelGroupExtension))
baseModelInfo <- baseModelInfo[order(targetProduct), ]
baseModelInfo$modelLag <- 5
baseModelInfo$relativeWeight <- 1
monthsBackLags <- rep(defaultTestLag, nbGroups)
nbMarginalLags <- length(monthsBackLags)
nbConditionalLags <- 1
} else{
monthsBackLags <- rev(sort(unique(baseModelInfo$modelLag)))
nbMarginalLags <- length(monthsBackLags)
nbConditionalLags <- length(monthsBackLags)
}
uniqueBaseModels <- sort(unique(baseModelInfo$targetProduct))
for(i in 1:length(uniqueBaseModels)){
productIds <- baseModelInfo$targetProduct==uniqueBaseModels[i]
productWeightSum <- baseModelInfo[productIds, sum(relativeWeight)]
normalizeWeightRatio <- weightSum/productWeightSum
baseModelInfo[productIds, relativeWeight := relativeWeight*
normalizeWeightRatio]
}
baseModelInfo <- baseModelInfo[order(monthsBack), ]
baseModelNames <- unique(baseModelInfo[monthsBack==0, targetProduct])
testDataLag <- readRDS(file.path(getwd(), "Feature engineering", targetDate,
testFeaturesFolder, "Lag1 features.rds"))
if(predictSubset){
predictSubsetIds <- sort(sample(1:nrow(testDataLag), predictSubsetCount))
testDataLag <- testDataLag[predictSubsetIds]
}
testDataPosFlank <- testDataLag$ncodpers %in% posFlankClients
trainFn <- "train/Back15Lag1 features.rds"
colOrderData <- readRDS(file.path(getwd(), "Feature engineering",
targetDate, trainFn))
targetCols <- grep("^ind_.*_ult1$", names(colOrderData), value=TRUE)
rm(colOrderData)
gc()
nbBaseModels <- length(targetCols)
countContributions <- readRDS(file.path(getwd(), "Feature engineering",
targetDate,
"monthlyRelativeProductCounts.rds"))
if(!trainAll){
posFlankModelInfo <- baseModelInfo[targetProduct=="hasNewProduct"]
newProdPredictions <- rep(0, nrow(testDataLag))
if(nrow(posFlankModelInfo) != nbMarginalLags) browser()
for(i in 1:nbMarginalLags){
cat("Generating new product predictions for lag", i, "of", nbMarginalLags,
"\n")
lag <- posFlankModelInfo[i, modelLag]
weight <- posFlankModelInfo[i, relativeWeight]
newProdModel <- baseModels[[posFlankModelInfo[i, modelId]]]
testDataLag <- readRDS(file.path(getwd(), "Feature engineering", targetDate,
testFeaturesFolder,
paste0("Lag", lag, " features.rds")))
if(predictSubset){
testDataLag <- testDataLag[predictSubsetIds]
}
predictorData <- testDataLag[, newProdModel$predictors, with=FALSE]
predictorDataM <- data.matrix(predictorData)
rm(predictorData)
gc()
newProdPredictionsLag <- predict(newProdModel$model, predictorDataM)
newProdPredictions <- newProdPredictions + newProdPredictionsLag*weight
}
newProdPredictions <- newProdPredictions/weightSum
meanGroupPredsMayFlag <-
c(mean(newProdPredictions[testDataLag$hasMay15Data==0]),
mean(newProdPredictions[testDataLag$hasMay15Data==1]))
meanGroupPredsPosFlank <- c(mean(newProdPredictions[!testDataPosFlank]),
mean(newProdPredictions[testDataPosFlank]))
expectedPosFlanks <- sum(newProdPredictions)
leaderboardPosFlanks <- fractionPosFlankUsers*nrow(testDataLag)
normalisedProbRatio <- leaderboardPosFlanks/expectedPosFlanks
cat("Expected/leaderboard positive flank ratio",
round(1/normalisedProbRatio, 2), "\n")
if(marginalNormalisation == "linear"){
newProdPredictions <- newProdPredictions * normalisedProbRatio
} else{
newProdPredictions <- probExponentNormaliser(newProdPredictions,
normalisedProbRatio)
}
} else{
newProdPredictions <- rep(1, nrow(testDataLag))
}
if(loadPredictions && file.exists(rawPredictionsPath)){
allPredictions <- readRDS(rawPredictionsPath)
} else{
allPredictions <- NULL
for(lagId in 1:nbConditionalLags){
cat("\nGenerating positive flank predictions for lag", lagId, "of",
nbConditionalLags, "@", as.character(Sys.time()), "\n\n")
lag <- monthsBackLags[lagId]
loadTestDataLag <- TRUE
baseModelPredPaths <- file.path(baseModelPredictionsPath,
paste0(targetCols, " Lag ", lag, ".rds"))
if(loadBaseModelPredictions && exists("baseModelPredictionsPath")){
loadTestDataLag <- !all(sapply(baseModelPredPaths, file.exists))
}
if(loadTestDataLag){
testDataLag <- readRDS(file.path(getwd(), "Feature engineering", targetDate,
testFeaturesFolder,
paste0("Lag", lag, " features.rds")))
if(predictSubset){
testDataLag <- testDataLag[predictSubsetIds]
}
}
for(i in 1:nbBaseModels){
targetVar <- targetCols[i]
targetModelIds <- baseModelInfo[targetProduct==targetVar &
modelLag==lag, modelId]
cat("Generating test predictions for model", i, "of", nbBaseModels, "\n")
if(exists("baseModelPredictionsPath")){
baseModelPredPath <- file.path(baseModelPredictionsPath,
paste0(targetVar, " Lag ", lag, ".rds"))
} else{
baseModelPredPath <- ""
}
foldWeights <- baseModelInfo[modelId %in% targetModelIds,
relativeWeight]
weight <- sum(foldWeights)
loadFileExists <- file.exists(baseModelPredPath)
if(loadBaseModelPredictions && loadFileExists){
predictionsDT <- readRDS(baseModelPredPath)
} else{
if(targetVar %in% zeroTargets || weight <= 0){
predictions <- rep(0, nrow(testDataLag))
} else{
nbTargetModelFolds <- length(targetModelIds)
foldPredictions <- rep(0, nrow(testDataLag))
alreadyOwned <- is.na(testDataLag[[paste0(targetVar, "Lag1")]]) |
testDataLag[[paste0(targetVar, "Lag1")]] == 1
predictorData <-
testDataLag[!alreadyOwned,
baseModels[[targetModelIds[1]]]$predictors, with=FALSE]
predictorDataM <- data.matrix(predictorData)
rm(predictorData)
gc()
if(medianModelPrediction){
baseFoldPredictions <- matrix(NA, nrow=nrow(predictorDataM),
ncol = nbTargetModelFolds)
}
for(fold in 1:nbTargetModelFolds){
targetModelId <- targetModelIds[fold]
targetModel <- baseModels[[targetModelId]]
weightFold <- foldWeights[fold]
if(targetModel$targetVar != targetVar) browser()
predictionsPrevNotOwnedFold <- predict(targetModel$model,
predictorDataM)
if(medianModelPrediction){
baseFoldPredictions[, fold] <- predictionsPrevNotOwnedFold
} else{
foldPredictions[!alreadyOwned] <- foldPredictions[!alreadyOwned] +
predictionsPrevNotOwnedFold*weightFold
}
}
if(medianModelPrediction){
predictions <- rep(0, nrow(testDataLag))
predictions[!alreadyOwned] <- rowMedians(baseFoldPredictions)
} else{
predictions <- foldPredictions/weight
}
predictions[alreadyOwned] <- 0
}
if(any(is.na(predictions))) browser()
predictionsDT <- data.table(ncodpers = testDataLag$ncodpers,
predictions = predictions,
product = targetVar)
}
predictionsDT[, weightedPrediction :=
predictionsDT$predictions*weight]
if(targetVar %in% allPredictions$product){
allPredictions[product==targetVar, weightedPrediction:=
weightedPrediction +
(predictionsDT$weightedPrediction *
linearMultipliers[i]) ^ generalizedMeanPowers[i]]
} else{
allPredictions <- rbind(allPredictions, predictionsDT)
allPredictions[product == targetVar, predictions :=
(predictions * linearMultipliers[i]) ^
generalizedMeanPowers[i]]
allPredictions[product == targetVar, weightedPrediction :=
(weightedPrediction * linearMultipliers[i]) ^
generalizedMeanPowers[i]]
}
if(saveBaseModelPredictions && (!loadBaseModelPredictions ||
(loadBaseModelPredictions &&
!loadFileExists))){
predictionsDT[, weightedPrediction:=NULL]
saveRDS(predictionsDT, baseModelPredPath)
}
}
}
allPredictions[, prediction := (weightedPrediction / weightSum) ^
(1/rep(generalizedMeanPowers, each=nrow(testDataLag)))]
allPredictions[, weightedPrediction := NULL]
allPredictions[, predictions := NULL]
if(savePredictionsBeforeNormalisation){
saveRDS(allPredictions, file=rawPredictionsPath)
}
}
probMultipliers <- rep(NA, nbBaseModels)
if(normalizeProdProbs){
for(i in 1:nbBaseModels){
cat("Normalizing product predictions", i, "of", nbBaseModels, "\n")
targetVar <- targetCols[i]
alreadyOwned <- is.na(testDataLag[[paste0(targetVar, "Lag1")]]) |
testDataLag[[paste0(targetVar, "Lag1")]] == 1
predictions <- allPredictions[product==targetVar, prediction]
predictionsPrevNotOwned <- predictions[!alreadyOwned]
if(suppressWarnings(max(predictions[alreadyOwned]))>0) browser()
predictedPosFlankCount <- sum(predictionsPrevNotOwned *
newProdPredictions[!alreadyOwned])
probMultiplier <- nrow(testDataLag) * fractionPosFlankUsers *
expectedCountPerPosFlank * countContributions[17, i] /
predictedPosFlankCount
probMultipliers[i] <- probMultiplier
if(i %in% c(3, 5, 7, 13, 18, 19, 22, 23, 24)) browser()
if(is.finite(probMultiplier)){
if(normalizeMode == "additive" || targetVar %in% additiveNormalizeProds){
predictions[!alreadyOwned] <- predictions[!alreadyOwned] +
(probMultiplier-1)*mean(predictions[!alreadyOwned])
} else{
if(normalizeMode == "linear"){
predictions[!alreadyOwned] <- predictions[!alreadyOwned] *
probMultiplier
} else{
predictions[!alreadyOwned] <- probExponentNormaliser(
predictions[!alreadyOwned], probMultiplier,
weights=newProdPredictions[!alreadyOwned])
}
}
allPredictions[product==targetVar, prediction:=predictions]
}
}
}
setkey(allPredictions, ncodpers)
allPredictions[,order_predict := match(1:length(prediction),
order(-prediction)), by=ncodpers]
allPredictions <- allPredictions[order(ncodpers, -prediction), ]
if(nomPensAboveNominaBothNotOwned){
ncodpers <- unique(allPredictions$ncodpers)
nominaProb <- allPredictions[product == "ind_nomina_ult1", prediction]
nominaProbRank <- allPredictions[product == "ind_nomina_ult1", order_predict]
nomPensProb <- allPredictions[product == "ind_nom_pens_ult1", prediction]
nomPensProbRank <- allPredictions[product == "ind_nom_pens_ult1", order_predict]
swapIds <- nominaProb>0 & nomPensProb>0 & nominaProb>nomPensProb
swapNcodpers <- ncodpers[swapIds]
allPredictions[ncodpers %in% swapNcodpers & product == "ind_nomina_ult1",
order_predict := nomPensProbRank[swapIds]]
allPredictions[ncodpers %in% swapNcodpers & product == "ind_nom_pens_ult1",
order_predict := nominaProbRank[swapIds]]
allPredictions[ncodpers %in% swapNcodpers & product == "ind_nomina_ult1",
prediction := nomPensProb[swapIds]]
allPredictions[ncodpers %in% swapNcodpers & product == "ind_nom_pens_ult1",
prediction := nominaProb[swapIds]]
}
if(mapBoosting){
nominaPreds <- allPredictions[product == "ind_nomina_ult1", prediction]
nomPensPreds <- allPredictions[product == "ind_nom_pens_ult1", prediction]
averagedIds <- nominaPreds>0 & nomPensPreds>0 &
abs(nominaPreds - nomPensPreds) < maxDiffNominaNomPensMapBoosting &
(nominaPreds/nomPensPreds) > maxRelDiffNominaNomPensMapBoosting &
(nominaPreds/nomPensPreds) < (1/maxRelDiffNominaNomPensMapBoosting)
avNcodPers <- ncodpers[averagedIds]
if(averageNominaNomPensProbsMAPBoosting){
if(averageNominaNomPensProbsMAPBoostingMethod == "Average"){
averageProbs <- (nominaPreds[averagedIds] +
nomPensPreds[averagedIds])/2
} else{
if(averageNominaNomPensProbsMAPBoostingMethod == "Min"){
averageProbs <- pmin(nominaPreds[averagedIds],
nomPensPreds[averagedIds])
} else{
stop("averageNominaNomPensProbsMAPBoostingMethod does not exist")
}
}
allPredictions[ncodpers %in% avNcodPers & product == "ind_nomina_ult1",
prediction := averageProbs]
allPredictions[ncodpers %in% avNcodPers & product == "ind_nom_pens_ult1",
prediction := averageProbs * (1 + 1e-10)]
allPredictions[, order_predict := match(1:length(prediction),
order(-prediction)), by=ncodpers]
allPredictions <- allPredictions[order(ncodpers, -prediction), ]
}
meanNomNomPensProb <-
allPredictions[product %in% c("ind_nomina_ult1", "ind_nom_pens_ult1"),
.(meanPred = mean(prediction)), ncodpers]
allSwapped <- c()
for(i in 1:nbMapTopPredictions){
nomPensRank <- consideredMapTopPredictions[i]
nomRank <- nomPensRank + 1
consideredNcodpers <-
intersect(
avNcodPers,
intersect(allPredictions[product == "ind_nomina_ult1" &
order_predict == nomRank, ncodpers],
allPredictions[product == "ind_nom_pens_ult1" &
order_predict == nomPensRank, ncodpers])
)
if(!swapWithCno){
consideredNcodpers <-
setdiff(consideredNcodpers,
allPredictions[ncodpers %in% consideredNcodpers &
product == "ind_cno_fin_ult1" &
order_predict == nomRank + 1, ncodpers])
}
pairRightMap <- map7(matrix(c(c(rep(0, nomPensRank-1), rep(1, 2),
rep(0, 7-nomPensRank))[1:7], 2), nrow=1))
pairWrongMap <- map7(matrix(c(c(rep(0, nomPensRank), rep(1, 2),
rep(0, 6-nomPensRank))[1:7], 2), nrow=1))
singleRightMap <- map7(matrix(c(c(rep(0, nomPensRank-1), 1,
rep(0, 7-nomPensRank))[1:7], 1),
nrow=1))
singleWrongMap <- map7(matrix(c(c(rep(0, 1 + nomPensRank), 1,
rep(0, 6-nomPensRank))[1:7], 1),
nrow=1))
meanNomNomPensProbCons <-
meanNomNomPensProb[ncodpers %in% consideredNcodpers, meanPred]
meanSingleProbCons <-
allPredictions[ncodpers %in% consideredNcodpers &
order_predict == nomPensRank + 2,
prediction]
swapNcodpersIds <-
(meanNomNomPensProbCons * (1-meanSingleProbCons) * pairRightMap +
meanSingleProbCons * (1-meanNomNomPensProbCons) * singleWrongMap)/
(meanSingleProbCons * (1-meanNomNomPensProbCons) * singleRightMap +
meanNomNomPensProbCons * (1-meanSingleProbCons) * pairWrongMap) <
swapRelativeCutoff
swapNcodpers <- consideredNcodpers[swapNcodpersIds]
allSwapped <- unique(c(allSwapped, swapNcodpers))
allPredictions[order_predict == nomPensRank + 2 &
ncodpers %in% swapNcodpers,
order_predict := as.integer(order_predict - 2)]
allPredictions[product %in% c("ind_nomina_ult1", "ind_nom_pens_ult1") &
ncodpers %in% swapNcodpers,
order_predict := as.integer(order_predict + 1)]
}
}
orderCount <- allPredictions[, .N, .(ncodpers, order_predict)]
if(max(orderCount$N)>1) browser()
hist(allPredictions[order_predict==1, prediction])
topPredictions <- allPredictions[order_predict==1, .N, product]
topPredictions <- topPredictions[order(-N)]
topPredictionsPosFlanks <- allPredictions[order_predict==1 &
ncodpers %in% posFlankClients,
.N, product]
topPredictionsPosFlanks <- topPredictionsPosFlanks[order(-N)]
productRankDelaFin <- allPredictions[product=="ind_dela_fin_ult1", .N,
order_predict]
productRankDelaFin <- productRankDelaFin[order(order_predict),]
productRankDecoFin <- allPredictions[product=="ind_deco_fin_ult1", .N,
order_predict]
productRankDecoFin <- productRankDecoFin[order(order_predict),]
productRankTjcrFin <- allPredictions[product=="ind_tjcr_fin_ult1", .N,
order_predict]
productRankTjcrFin <- productRankTjcrFin[order(order_predict),]
productRankRecaFin <- allPredictions[product=="ind_reca_fin_ult1", .N,
order_predict]
productRankRecaFin <- productRankRecaFin[order(order_predict),]
allPredictions[, totalProb := prediction * rep(newProdPredictions,
each = nbBaseModels)]
meanProductProbs <- allPredictions[, .(meanCondProb = mean(prediction),
meanProb = mean(totalProb),
totalProb = sum(totalProb)), product]
meanProductProbs <- meanProductProbs[order(-meanProb), ]
productString <- paste(allPredictions[order_predict==1, product],
allPredictions[order_predict==2, product],
allPredictions[order_predict==3, product],
allPredictions[order_predict==4, product],
allPredictions[order_predict==5, product],
allPredictions[order_predict==6, product],
allPredictions[order_predict==7, product])
if(length(productString) != nrow(testDataLag)) browser()
submission <- data.frame(ncodpers = testDataLag$ncodpers,
added_products = productString)
paddedSubmission <- fread("Data/sample_submission.csv")
paddedSubmission[, added_products := ""]
matchIds <- match(submission$ncodpers, paddedSubmission$ncodpers)
paddedSubmission[matchIds, added_products := submission$added_products]
write.csv(paddedSubmission, file.path(getwd(), "Submission", submissionDate,
paste0(submissionFile, ".csv")),
row.names = FALSE)
if(savePredictions){
saveRDS(allPredictions, file=file.path(predictionsPath,
paste0(submissionFile, ".rds")))
}
cat("Submission file created successfully!\n",
nrow(submission)," records were predicted (",
round(nrow(submission)/nrow(paddedSubmission)*100,2), "%)\n", sep="")
|
testInvariance <-
function(original_model, sub_model1, sub_model2, sub1_mxdata, sub2_mxdata, verbose=FALSE, alpha.level=0.05, invariance=NULL)
{
if (verbose) {
print(paste("Performing additional invariance test with " ));
}
original_model$data <- sub1_mxdata;
try (
run_sub1_original <- OpenMx::mxRun(original_model, suppressWarnings=T, silent=T)
)
original_model$data <- sub2_mxdata;
try( run_sub2_original <- OpenMx::mxRun(original_model, suppressWarnings=T, silent=T) )
sum_sub1_original <- summary(run_sub1_original);
sum_sub2_original <- summary(run_sub2_original);
sum_sub1 <- summary(sub_model1)
sum_sub2 <- summary(sub_model2)
df1 <- sum_sub1_original$estimatedParameters + sum_sub2_original$estimatedParameters
df2 <- sum_sub1$estimatedParameters + sum_sub2$estimatedParameters
df_invariance <- df1-df2
lr_invariance <- +sum_sub1$Minus2LogLikelihood + sum_sub2$Minus2LogLikelihood -sum_sub1_original$Minus2LogLikelihood - sum_sub2_original$Minus2LogLikelihood
p_invariance <- pchisq(lr_invariance, df_invariance, lower.tail=F)
if (verbose) {
cat(sum_sub1_original$estimatedParameters ,":", sum_sub2_original$estimatedParameters, ":",sum_sub1$estimatedParameters ,":",sum_sub2$estimatedParameters);
cat(paste(" |-- Invariance result: LR ",lr_invariance,df1,df2,p_invariance,"\n") );
}
if (is.null(alpha.level)) {
alpha.level = 0.05
}
result <- list();
result$passed <- (p_invariance > alpha.level);
result$p.value <- p_invariance;
if (is.na(result$passed)) {
result$passed <- F
warning("Invariance Test produced a NaN value!")
}
return(result);
}
|
library(shiny)
library(shiny.router)
options(shiny.router.debug = T)
menu <- (
tags$ul(
tags$li(a(class = "item", href = route_link("/"), "Page")),
tags$li(a(class = "item", href = route_link("other"), "Other page"))
)
)
page <- function(title, content) {
div(
menu,
titlePanel(title),
p(content)
)
}
root_page <- page("Home page", "Welcome on sample routing page!")
other_page <- page("Some other page", "Lorem ipsum dolor sit amet.")
router <- make_router(
route("/", root_page),
route("other", other_page)
)
ui <- fluidPage(
router$ui,
actionButton("change", "Change query path"),
verbatimTextOutput("url")
)
server <- function(input, output, session) {
router$server(input, output, session)
output$url <- renderPrint(
get_query_param()
)
observeEvent(input$change, {
change_page("other?a=2")
})
}
shinyApp(ui, server)
|
essHistogram <- function(x, alpha = 0.5, q = NULL, intv = NULL,
plot = TRUE, mode = ifelse(anyDuplicated(x),"Gen","Con"),
xname = deparse(substitute(x)), ...) {
if (!is.numeric(x))
stop("'x' must be numeric")
y = sort(x[is.finite(x)])
n = as.integer(length(y))
if (is.na(n))
stop("invalid length(x)")
if (length(unique(y)) < 2) {
message(sprintf('Only %d distinct finite observations, call "graphics::hist()" instead!',length(unique(y))))
return (hist(x,freq=FALSE,plot=plot,...))
}
if (is.null(intv))
intv = genIntv(n)
if (!('left' %in% names(intv) && 'right' %in% names(intv)))
stop("'intv' must have two fields 'left' and 'right'!")
intv = intv[(intv$left<intv$right)&(intv$left>=1)&(intv$right<=n),]
if (nrow(intv) < 1)
stop("No valid intervals in 'intv'!")
if (is.null(q))
q = msQuantile(n, alpha, intv = intv, mode = mode)
else if (!missing(alpha) || !missing(mode) || !missing(intv))
warning("Use input 'q' and ignore 'alpha', 'intv' or 'mode'!")
if (length(q) > 1) {
q = q[1]
warning("Length of 'q' or 'alpha' > 1: use only the first value, and ignore the others!")
}
if (!is.finite(q))
stop("'q' must be a finite value!")
minQ = .minThreshold(n,intv)
if (q < minQ)
stop(sprintf("Empty constraint set: threshold 'q' should be >= %g!", minQ))
message(sprintf('The threshold is %g', q))
message("Dynamic programming ...", appendLF=FALSE)
intv = .validInterval(intv, y)
cumcnt = c(0,which(diff(y)!=0),n)
y = unique(y)
y = c(y[1],(y[1]+y[2])/2,y[-1])
nintv = nrow(intv)
intv$left = sapply(1:nintv, function (x) { if (intv$left[x] == 1) 1 else which(intv$left[x] == cumcnt)})
intv$right = sapply(1:nintv, function (x) which(intv$right[x] == cumcnt))
if (length(intv$left)!=nintv || length(intv$right)!=nintv)
stop('There are some invalid intervals')
bnd = .intv2Bounds(intv, y, cumcnt, q)
eh = .boundedHistogram(y, cumcnt, as.integer(bnd$start), as.integer(bnd$bounds$ri-1),
bnd$bounds$lower, bnd$bounds$upper)
message(" ... end!")
nSeg = length(eh$value)
breakPoint = c(y[1],y[eh$rightIndex])
ret = list("breaks" = breakPoint,
"counts" = round(eh$value*diff(breakPoint)*n),
"density" = eh$value,
"mids" = (breakPoint[1:nSeg]+breakPoint[2:(nSeg+1)])/2,
"xname" = xname,
"equidist" = FALSE)
class(ret) = "histogram"
if (plot)
plot(ret, ...)
ret
}
.intv2Bounds <- function (intv, y, cumcnt, q) {
if (is.unsorted(y))
stop("'y' must be sorted!")
n = cumcnt[length(cumcnt)]
nintv = nrow(intv)
Left = intv$left
Right = intv$right
od = order(Left, Right)
Left = Left[od]
Right = Right[od]
Len = cumcnt[Right]-cumcnt[Left]
maxIt = 10
len = unique(Len)
pen = sqrt(2*(1+log(n^2/len/(n-len))))
a = len * log(len/n) + (n-len) * log(1 - len/n) - 1/2 * (q + pen)^2
lower = (len/n) * 0.99
z = atanh( 2 * lower - 1 )
for(i in 1:maxIt) {
a2 = -0.5 * len * ( 1 - tanh(z)^2 )
a1 = len * (1 - tanh(z)) - (n-len) * (1 + tanh(z))
a0 = a - len * log(lower) - (n-len) * log(1 - lower)
p2 = a1 / a2 / 2
root = pmax(p2^2 + a0 / a2, 0)
z = z + ifelse(root > 0 & len != 0, -p2 - sqrt(root), a0 / a1)
lower = (tanh(z) + 1) / 2
}
upper = 1 - ( 1 - (len/n) ) * 0.9
z = atanh( 2 * upper - 1 )
for (i in 1:maxIt) {
a2 = -0.5 * len * ( 1 - tanh(z)^2 )
a1 = len * (1 - tanh(z)) - (n-len) * (1 + tanh(z))
a0 = a - len * log(upper) - (n-len) * log(1 - upper)
p2 = a1 / a2 / 2
root = pmax(p2^2 + a0 / a2, 0)
z = z + ifelse(root > 0 & len != 0, -p2 + sqrt(root), a0 / a1)
upper = (tanh(z) + 1) / 2
}
if (any(is.na(lower)))
lower[is.na(lower)] = -Inf
if (any(is.na(upper)))
upper[is.na(upper)] = Inf
Lower = rep(NA, nintv)
Upper = rep(NA, nintv)
for (i in 1:length(len)) {
Lower[Len==len[i]] = lower[i]
Upper[Len==len[i]] = upper[i]
}
eLen = y[Right] - y[Left]
vInd = (eLen != 0)
Lower = Lower[vInd]
Upper = Upper[vInd]
Left = Left[vInd]
Right = Right[vInd]
eLen = eLen[vInd]
bnd = data.frame(li = Left, ri = Right, lower = Lower/eLen, upper = Upper/eLen)
remove('Lower', 'Upper', 'Left', 'Right', 'eLen', 'Len', 'lower', 'upper', 'len', 'vInd')
bnd = bnd[order(bnd$li, bnd$ri),]
st = rep(NA,length(y))
st[bnd$li[1]] = 0
aux = which(diff(bnd$li) != 0)
st[bnd$li[aux+1]] = aux
si = c(st[!is.na(st)], nrow(bnd)) + 1
feas = sapply(1:(length(si)-1),
function(i) with(bnd[si[i]:(si[i+1]-1),],
{wi <- ri == li[1]; if(any(wi)) max(lower[wi]) <= min(upper[wi]) else TRUE}))
list(bounds = bnd, start = as.integer(st), feasible = all(feas))
}
|
make_vis <- function(x) {
row.names(x) <- 1:nrow(x)
x <- x[!x[[2]] %in% "Total", ]
x[["Missing"]] <- FALSE
ind <- which(x[[1]] %in% "Missing"):nrow(x)
x[["Missing"]][ind] <- TRUE
x <- x[, -1]
x[["label"]][x[[1]] %in% "<NA>"] <- "<NA>"
x[["label"]][x[[1]] %in% "<blank>"] <- "<blank>"
x[["label"]][x[["label"]] %in% ""] <- x[[1]][x[["label"]] %in% ""]
x[["Freq"]][x[["Freq"]] %in% "..."] <- 0
x$Freq <- as.numeric(x$Freq)
x$Freq[is.na(x$Freq)] <- 0
x$fact <- factor(x$label, levels=x$label)
labels.wrap <- rev(lapply(strwrap(x$fact, 30, simplify=F),paste,collapse="\n"))
p <- ggplot() +
geom_bar(aes_string(x="fact", y = "Freq"), data = x, stat = "identity") +
coord_flip() + scale_x_discrete(limits = rev(levels(x$fact)),
labels = labels.wrap) +
theme(axis.title.x = element_blank(),
axis.title.y = element_blank(),
aspect.ratio = 1/1)
p
}
|
context("Unit tests of the isSymmetricPD function")
isSymmetricPDAlt <- function(M) {
nm <- deparse(substitute(M))
if (!is.matrix(M) || !is.numeric(M)) {
stop(nm, " is not a numeric matrix")
}
if (!isSymmetric(M)) {
stop(nm, " is not a symmetric matrix")
}
if (!all(eigen(M, symmetric = TRUE)$values > .Machine$double.eps)) {
return(FALSE)
} else {
return(TRUE)
}
}
test_that("isSymmetricPD works as intended", {
pdS <- createS(n = 15, p = 10)
notpdS <- createS(n = 5, p = 10)
expect_that(isSymmetricPD(pdS), is_true())
expect_that(isSymmetricPD(notpdS), is_false())
})
test_that("isSymmetricPD works for degenerate input", {
expect_false(isSymmetricPD(matrix(1,0,0)))
expect_true(isSymmetricPD(matrix(1,1,1)))
expect_false(isSymmetricPD(matrix(0,1,1)))
})
test_that("isSymmetricPD throws errors when appropriate", {
S1 <- createS(n = 15, p = 10)
S2 <- createS(n = 5, p = 10)
S1[1,2] <- 1
expect_that(isSymmetricPD(S1), throws_error("symmetric"))
S2 <- as.character(S2)
expect_that(isSymmetricPD(S2), throws_error("numeric"))
S2 <- as.numeric(S2)
expect_that(isSymmetricPD(S2), throws_error("matrix"))
})
|
surv_dist <- function(values = values_surv_dist) {
new_qual_param(
type = "character",
values = values,
label = c(surv_dist = "Distribution"),
finalize = NULL
)
}
values_surv_dist <- c("weibull", "exponential", "gaussian",
"logistic", "lognormal", "loglogistic")
|
context("h5-DataSet-scalar")
fname <- tempfile(fileext=".h5")
test_that("datatypes-Array-BugWithScalar",{
fname <- system.file("test-scalar.h5", package = "hdf5r", mustWork = TRUE)
f <- h5file(fname, "r")
dset <- f[["/Analyses/Basecall_2D_000/BaseCalled_2D/Fastq"]]
dat <- dset[]
expect_identical(length(dat), 1L)
expect_is(dat, "character")
h5close(f)
})
|
kmeansClustering <-function(DataOrDistances,ClusterNo=2,Type='LBG',RandomNo=5000,PlotIt=FALSE,Verbose=FALSE,...){
if (!isSymmetric(unname(DataOrDistances))) {
if (ClusterNo < 2) {
warning("ClusterNo should be an integer > 2. Now, all of your data is in one cluster.")
if (is.null(nrow(DataOrDistances))) {
return(cls <- rep(1, length(DataOrDistances)))
} else{
return(cls <- rep(1, nrow(DataOrDistances)))
}
}
switch(
Type,
'Hartigan' = {
res = kmeans(DataOrDistances, centers = ClusterNo,algorithm = "Hartigan-Wong", ...)
Cls = as.vector(res$cluster)
if (Verbose == TRUE) {
print(res)
}
if (PlotIt) {
ClusterPlotMDS(DataOrDistances, Cls)
}
Cls = ClusterRename(Cls, DataOrDistances)
return(list(
Cls = Cls,
Object = list(
res
),
Centroids = res$centers
))
},
'kcentroids' = {
if (!requireNamespace('flexclust',quietly = TRUE)) {
message(
'Subordinate clustering (flexclust) package is missing. No computations are performed.
Please install the package which is defined in "Suggests".'
)
return(
list(
Cls = rep(1, nrow(DataOrDistances)),
Object = "Subordinate clustering (flexclust) package is missing.
Please install the package which is defined in 'Suggests'."
)
)
}
res = flexclust::kcca(x = DataOrDistances, k = ClusterNo, ...)
Cls = as.vector(res@cluster)
Centroids=res@centers
if (PlotIt) {
ClusterPlotMDS(DataOrDistances, Cls)
}
Cls = ClusterRename(Cls, DataOrDistances)
return(list(
Cls = Cls,
Object = res,
Centroids = Centroids
))
},
'LBG' = {
if (!requireNamespace('cclust',quietly = TRUE)) {
message(
'Subordinate clustering package (cclust) is missing. No computations are performed.
Please install the package which is defined in "Suggests".'
)
return(
list(
Cls = rep(1, nrow(DataOrDistances)),
Object = "Subordinate clustering package (cclust) is missing.
Please install the package which is defined in 'Suggests'."
)
)
}
res = cclust::cclust(
x = DataOrDistances,
centers = ClusterNo,
method = 'kmeans',
verbose = Verbose,
...
)
Cls = res$cluster
SSE = res$withinss
if (PlotIt) {
ClusterPlotMDS(DataOrDistances, Cls)
}
Cls = ClusterRename(Cls, DataOrDistances)
return(list(
Cls = Cls,
Object = res,
Centroids = res$centers
))
},
"Sparse" = {
out=SparseClustering(DataOrDistances = DataOrDistances,
ClusterNo = ClusterNo,Strategy = 'kmeans',...)
return(list(
Cls = out$Cls,
Object = out$Object
))
},
"Steinley" = {
Liste = lapply(1:RandomNo, function(i, DataOrDistances, centers, ...) {
c = kmeans(DataOrDistances, centers = ClusterNo, ...)
return(list(sum(c$withinss), kmeansOut = c))
}, DataOrDistances, ClusterNo, ...)
SSEs = unlist(lapply(Liste, "[[", 1))
res = Liste[[which.min(SSEs)]]$kmeansOut
Cls = as.vector(res$cluster)
if (PlotIt) {
requireNamespace('DataVisualizations',quietly = TRUE)
ClusterPlotMDS(DataOrDistances, Cls)
}
Cls = ClusterRename(Cls, DataOrDistances)
return(list(
Cls = Cls,
Object = list(
res
),
Centroids = res$centers
))
},
{
res = kmeans(DataOrDistances, centers = ClusterNo, algorithm = Type, ...)
Cls = as.vector(res$cluster)
if (Verbose == TRUE) {
print(res)
}
if (PlotIt) {
ClusterPlotMDS(DataOrDistances, Cls)
}
Cls = ClusterRename(Cls, DataOrDistances)
return(list(
Cls = Cls,
Object = res,
Centroids = res$centers
))
}
)
} else{
message(
'Currently, the "Type" parameter of k-means cannot be set in the case of using a distance matrix.'
)
return(
kmeansDist(
Distance = DataOrDistances,
ClusterNo = ClusterNo,
RandomNo = RandomNo,
PlotIt = PlotIt,
verbose = Verbose,
...
)
)
}
}
|
pred.int.norm.Modified.CA.on.r.K <-
function (n, df = n - 1, n.mean = 1, r = 1, delta.over.sigma = 0,
conf.level = 0.95, K.tol = .Machine$double.eps^(1/2),
integrate.args.list = NULL)
{
if (!is.vector(n, mode = "numeric") || length(n) != 1 ||
!is.vector(df, mode = "numeric") || length(df) != 1 ||
!is.vector(n.mean, mode = "numeric") || length(n.mean) !=
1 || !is.vector(r, mode = "numeric") || length(r) !=
1 || !is.vector(delta.over.sigma, mode = "numeric") ||
length(delta.over.sigma) != 1 || !is.vector(conf.level,
mode = "numeric") || length(conf.level) != 1)
stop(paste("'n', 'df', 'r', 'delta.over.sigma',", "and 'conf.level' must be numeric scalars"))
if (n < 2 || n != trunc(n))
stop("'n' must be an integer greater than or equal to 2")
if (df < 1 || df != trunc(df))
stop("'df' must be an integer greater than or equal to 1")
if (n.mean < 1 || n.mean != trunc(n.mean))
stop("'n.mean' must be an integer greater than or equal to 1")
if (r < 1)
stop("'r' must be greater than or equal to 1")
if (!is.finite(delta.over.sigma))
stop("'delta.over.sigma' must be finite")
if (conf.level <= 0 || conf.level >= 1)
stop("'conf.level' must be between 0 and 1")
fcn.to.min <- function(K, n.weird, df.weird, n.mean, r.weird,
delta.over.sigma, conf.level, integrate.args.list) {
(conf.level - pred.int.norm.Modified.CA.on.r.prob(n = n.weird,
df = df.weird, n.mean = n.mean, K = K, delta.over.sigma = delta.over.sigma,
r = r.weird, integrate.args.list = integrate.args.list))^2
}
K <- nlminb(start = 1, objective = fcn.to.min, lower = 0,
control = list(x.tol = K.tol), n.weird = n, df.weird = df,
n.mean = n.mean, r.weird = r, delta.over.sigma = delta.over.sigma,
conf.level = conf.level, integrate.args.list = integrate.args.list)$par
K
}
|
claiminfo <- function(...) {
arglist <- list(...)
if (length(arglist) == 1L && is.riskproc(arglist[[1L]])) {
return(arglist[[1L]][['claims']])
} else {
if ('hypoexp' %in% names(arglist) && is.numeric(arglist[[c('hypoexp', 'rates')]])) {
arglist <- within(arglist, {
mu <- sum(1.0 / arglist[[c('hypoexp', 'rates')]])
hypoexp$coef <- ratetoalpha(arglist[[c('hypoexp', 'rates')]])
mgf <- function(x) {
mgfhypoexp(x = x,
rate = arglist[[c('hypoexp', 'rates')]],
difforder = 0L)
}
mgf.d1 <- function(x) {
mgfhypoexp(x = x,
rate = arglist[[c('hypoexp', 'rates')]],
difforder = 1L)
}
mgf.d2 <- function(x) {
mgfhypoexp(x = x,
rate = arglist[[c('hypoexp', 'rates')]],
difforder = 2L)
}
cdf <- function(x) {
phypoexp(q = x,
rate = arglist[[c('hypoexp', 'rates')]])
}
cdf.tailarea <- function(x) {
phypoexp(q = x,
rate = arglist[[c('hypoexp', 'rates')]],
tailarea = TRUE)
}
pdf <- function(x) {
dhypoexp(x = x,
rate = arglist[[c('hypoexp', 'rates')]])
}
})
}
return(structure(.Data = arglist,
class = c('claiminfo', 'list')))
}
}
|
NULL
countCells <- function(x, samples, meta.data=NULL){
if(!is.data.frame(meta.data) & !is.null(meta.data)){
meta.data <- as.data.frame(meta.data)
}
if(length(samples) > 1 & !is.null(meta.data)){
stop("Multiple sample columns provided, please specify a unique column name")
} else if(is.null(meta.data) & length(samples) != ncol(x)){
stop("Length of vector does not match dimensions of object. Length:",
length(samples), " Dimensions: ", ncol(x))
}
if(ncol(nhoods(x)) == 1 & nrow(nhoods(x)) == 1){
stop("No neighbourhoods found. Please run makeNhoods() first.")
}
message("Checking meta.data validity")
if(!is.null(meta.data)){
if (is.factor(meta.data[, samples])){
samp.ids <- levels(meta.data[, samples])
} else {
samp.ids <- unique(as.character(meta.data[, samples]))
}
} else {
if (is.factor(samples)){
samp.ids <- levels(samples)
} else {
samp.ids <- unique(as.character(samples))
}
}
num.hoods <- ncol(nhoods(x))
dummy.meta.data <- Matrix(data=0, nrow=nrow(meta.data), ncol = length(samp.ids), sparse = TRUE)
colnames(dummy.meta.data) <- samp.ids
rownames(dummy.meta.data) <- rownames(meta.data)
for (s in seq_along(samp.ids)){
i.s <- samp.ids[s]
s.ixs <- which(meta.data[samples]==i.s)
dummy.meta.data[s.ixs, as.character(i.s)] <- 1
}
message("Counting cells in neighbourhoods")
count.matrix <- Matrix::t(nhoods(x)) %*% dummy.meta.data
rownames(count.matrix) <- seq_len(num.hoods)
nhoodCounts(x) <- count.matrix
return(x)
}
|
findformants = function (sound, fs = 10000, coeffs = NULL, maxbw = 600,
minformant = 200, verify = TRUE, showbws = FALSE, showrejected = TRUE){
if (missing (sound)) sound = 1
if (class(sound) == "ts") fs = frequency(sound)
if (class(sound) == "sound") {
fs = sound$fs
sound = sound$sound
}
if (is.null(coeffs)) coeffs = lpc (sound, fs = fs)
if (length(coeffs) == 1) coeffs = lpc (sound, fs = fs, order = coeffs)
roots = polyroot (rev(coeffs))
angs = atan2 (Im(roots), Re(roots))
formants = round (angs * (fs/(2*pi)), 2)
nums = order (formants)
formants = formants[nums]
bws = -(fs/pi) * log (abs(roots[nums]))
touse = (bws < maxbw & formants > minformant & formants < fs/2)
out = data.frame (formant = formants[touse], bandwidth = bws[touse])
if (verify == TRUE){
multiplot (sizes = c(.7,.3), type = 'c', show = FALSE)
cols = rep (2:6, 10)
freqresponse (1, coeffs, fs = fs)
if (length(sound) > 1) spectralslice (preemphasis(sound,fs=fs), fs = fs, add = TRUE, padding = 0, col = 1, lty = 'dotted')
for (i in 1:nrow(out)){
abline (v = out[i,1], lwd = 2, col = cols[i])
if (showbws == TRUE) abline (v = out[i,1] + out[i,2], lty = 'dotted', col = cols[i])
if (showbws == TRUE) abline (v = out[i,1] - out[i,2], lty = 'dotted', col = cols[i])
}
if (showrejected == TRUE) abline (v = formants[!touse], lty = 'dotted', lwd = 2)
plot (roots[nums], xlim = range (-1.1,1.1), ylim = range (-1.1,1.1), pch = 4, lwd = 2,
xlab = 'Real', ylab = 'Imaginary', col = !touse)
sdellipse (means = c(0,0), points = matrix (c(1,0,0,1),2,2), stdev = 1, density = .01)
abline (h = 0, v = 0, lty = 'dotted')
tmp = 0
for (i in 1:length(touse))
if (touse[i]){
tmp = tmp + 1
points (roots[nums][i], pch = 4, lwd = 2, col = cols[tmp])
}
}
invisible (out)
}
|
plis <-
function (lis, fdr = 0.001, adjust = TRUE)
{
n = length(lis)
s.lis = sort(lis)
for (i in 1:n) {
if (mean(s.lis[1:i]) > fdr)
break
}
nNonNull = i - 1
States = rep(0, n)
if (nNonNull > 0)
States[lis <= s.lis[nNonNull]] = 1
if (adjust) {
aLIS = sapply(lis, function(cut) mean(lis[which(lis <=
cut)]))
return(list(States = States, aLIS = aLIS))
}
else {
return(list(States = States))
}
}
|
test_that("node_analysis",{
set.seed(1337)
res <- Node_analysis(coquettes, 50, "rdtable")
expect_lt(abs(sum(SOS(res, 28), na.rm = T)-24.38952), 0.0001)
expect_equal(sum(is.na(SOS(res, 31))), 49)
expect_lt(abs(GND(res, 28) - 0.622806), 0.0001)
})
|
ve <- 2
fn <- NULL
rf <- c("def-model.RDS")
em <- EnvManager$new(ve = ve, rp = "./")
ed <- em$setup_env(rf, fn)
mfn <- paste0(ed, "/def-model.RDS")
mp <- ModelPredictor$new(mf = mfn, ve = ve)
l <- "last year at this time i was preparing for a trip to rome"
w <- strsplit(l, " ")[[1]]
p <- mp$calc_perplexity(w)
print(p)
em$td_env()
|
source("setup.R")
dir <- system.file("rda", package = "TH.data")
load(file.path(dir, "india.rda"))
xvars <- c("cage", "breastfeeding", "mbmi", "mage", "medu",
"edupartner", "csex", "ctwin", "cbirthorder",
"munemployed", "mreligion", "mresidence", "deadchildren",
"electricity", "radio", "television", "refrigerator",
"bicycle", "motorcycle", "car")
fvars <- xvars[sapply(xvars, function(x) length(unique(kids[[x]]))) < 6]
nvars <- xvars[!xvars %in% fvars]
kids[fvars] <- lapply(fvars, function(f) factor(kids[[f]]))
kids[nvars] <- lapply(nvars, function(n) scale(kids[[n]]))
fm_gam <- c(
"ctm" = as.formula(paste("stunting ~ ",
paste("bols(", xvars, ", df = 2)", collapse = "+"), "+",
paste("bbs(", nvars, ", center = TRUE, df = 2)", collapse = "+"))),
"tram" = as.formula(paste("stunting ~",
paste("bols(", xvars, ", intercept = FALSE, df = 1)", collapse = "+"), "+",
paste("bbs(", nvars, ", center = TRUE, df = 1)", collapse = "+"))))
fm_glm <- c(
"ctm" = as.formula(paste("stunting ~ ",
paste("bols(", xvars, ", df = 2)", collapse = "+"))),
"tram" = as.formula(paste("stunting ~",
paste("bols(", xvars, ", intercept = FALSE, df = 1)", collapse = "+"))))
fm_tree <- as.formula(paste("stunting ~ ", paste(xvars, collapse = "+")))
kids$stunting <- as.double(kids$stunting)
ldata <- kids
m_mlt <- BoxCox(stunting ~ 1, data = ldata, prob = c(.05, .975), extrapolate = TRUE)
ll0 <- logLik(m_mlt) / nrow(ldata)
fd <- cv(weights(m_mlt), type = "subsampling", B = B, prob = .75)
bctrl <- boost_control(mstop = M, trace = TRUE)
(m_glm <- FUN(m_mlt, fm_glm, ldata, control = bctrl, folds = fd))
(m_gam <- FUN(m_mlt, fm_gam, ldata, control = bctrl, folds = fd))
(m_tree <- FUN(m_mlt, fm_tree, ldata, control = bctrl, method =
quote(mboost::blackboost), folds = fd))
tctrl <- ctree_control(saveinfo = FALSE, alpha = .01,
minbucket = length(coef(as.mlt(m_mlt))) * 2)
fctrl <- ctree_control(saveinfo = FALSE, alpha = 1,
minsplit = 50, minbucket = 25, nmax = c("yx" = Inf, "z" = 100))
r_trtf <- FUN2(m_mlt, fm_tree, ldata, tcontrol = tctrl, fcontrol = fctrl, fd)
r_glm <- m_glm$risk
r_gam <- m_gam$risk
r_tree <- m_tree$risk
colnames(r_glm) <- paste("glm", colnames(r_glm), sep = "_")
colnames(r_gam) <- paste("gam", colnames(r_gam), sep = "_")
colnames(r_tree) <- paste("tree", colnames(r_tree), sep = "_")
colnames(r_trtf) <- paste("trtf", colnames(r_trtf), sep = "_")
risk <- cbind(r_glm, r_gam, r_tree, r_trtf)
ll0 <- numeric(ncol(fd))
for (i in 1:ncol(fd)) {
w <- fd[,i]
ll0[i] <- logLik(update(m_mlt, theta = coef(as.mlt(m_mlt)), weights = w), w = 1 - w) / sum(1 - w)
}
save(risk, ll0, file = "ex_india.rda")
warnings()
sessionInfo()
|
data(ttrc)
rownames(ttrc) <- ttrc$Date
ttrc$Date <- NULL
input <- list( all=ttrc[1:250,], top=ttrc[1:250,], mid=ttrc[1:250,] )
input$top[1:10,] <- NA
input$mid[9:20,] <- NA
load(system.file("unitTests/output.MA.rda", package="TTR"))
test.ALMA.output.length.eq.input.length <- function() {
v <- 1:10
x <- xts::.xts(v, seq_along(v))
av <- ALMA(v)
ax <- ALMA(x)
checkEquals(NROW(av), NROW(ax))
}
test.SMA <- function() {
checkEqualsNumeric( SMA(input$all$Close), output$allSMA )
checkEquals( attributes(SMA(input$all$Close)), attributes(output$allSMA) )
checkEqualsNumeric( SMA(input$top$Close), output$topSMA )
checkEquals( attributes(SMA(input$top$Close)), attributes(output$topSMA) )
checkException( SMA(input$mid$Close) )
checkException( SMA(input$all[,1:2]) )
}
test.EMA <- function() {
checkEqualsNumeric( EMA(input$all$Close), output$allEMA )
checkEquals( attributes(EMA(input$all$Close)), attributes(output$allEMA) )
checkEqualsNumeric( EMA(input$top$Close), output$topEMA )
checkEquals( attributes(EMA(input$top$Close)), attributes(output$topEMA) )
checkException( EMA(input$mid$Close) )
checkException( EMA(input$all[,1:2]) )
checkException( EMA(input$all$Close, n = -1) )
checkException( EMA(input$all$Close, n = NROW(input$all) + 1) )
}
test.EMA.n.ratio <- function() {
out <- 0:9 * 1.0
is.na(out) <- 1:2
checkEqualsNumeric(EMA(1:10, ratio = 0.5), out)
checkEqualsNumeric(EMA(1:10, n = 3), out)
checkEqualsNumeric(EMA(1:10, n = 3, ratio = 0.5), out)
}
test.EMA.ratio.eq.0 <- function() {
checkException(EMA(1:10, ratio = 0.0))
}
test.EMA.wilder <- function() {
checkEqualsNumeric( EMA(input$all$Close, wilder=TRUE), output$allEMAwilder )
checkEquals( attributes(EMA(input$all$Close, wilder=TRUE)), attributes(output$allEMAwilder) )
checkEqualsNumeric( EMA(input$top$Close, wilder=TRUE), output$topEMAwilder )
checkEquals( attributes(EMA(input$top$Close, wilder=TRUE)), attributes(output$topEMAwilder) )
checkException( EMA(input$mid$Close, wilder=TRUE) )
}
test.DEMA <- function() {
checkEqualsNumeric( DEMA(input$all$Close), output$allDEMA )
checkEquals( attributes(DEMA(input$all$Close)), attributes(output$allDEMA) )
checkEqualsNumeric( DEMA(input$top$Close), output$topDEMA )
checkEquals( attributes(DEMA(input$top$Close)), attributes(output$topDEMA) )
checkException( DEMA(input$mid$Close) )
checkException( DEMA(input$all[,1:2]) )
}
test.HMA <- function() {
hma <- HMA(1:10, 2)
checkEqualsNumeric(hma, c(NA, 2:10 + 1/3))
}
test.HMA.odd.n <- function() {
hma <- HMA(1:10, 3)
checkEqualsNumeric(hma, c(rep(NA, 2), 3:10 + 2/3))
}
test.WMA <- function() {
checkEqualsNumeric( WMA(input$all$Close), output$allWMA )
checkEquals( attributes(WMA(input$all$Close)), attributes(output$allWMA) )
checkEqualsNumeric( WMA(input$top$Close), output$topWMA )
checkEquals( attributes(WMA(input$top$Close)), attributes(output$topWMA) )
checkException( WMA(input$mid$Close) )
checkException( WMA(input$all$Close, wts=1) )
checkException( WMA(input$all[,1:2]) )
checkException( WMA(input$all$Close, n = -1) )
checkException( WMA(input$all$Close, n = NROW(input$all) + 1) )
}
test.WMAvol <- function() {
checkEqualsNumeric( WMA(input$all$Close, wts=input$all$Volume), output$allWMAvol )
checkEquals( attributes(WMA(input$all$Close, wts=input$all$Volume)), attributes(output$allWMAvol) )
checkEqualsNumeric( WMA(input$top$Close, wts=input$top$Volume), output$topWMAvol )
checkEquals( attributes(WMA(input$top$Close, wts=input$top$Volume)), attributes(output$topWMAvol) )
checkException( WMA(input$all$Close, wts=input$mid$Volume) )
checkException( WMA(input$all[,1:2], wts=input$all$Volume) )
checkException( WMA(input$all$Close, wts=input$all[,1:2]) )
}
test.WMA_returns_xts <- function() {
x <- xts::.xts(x = c(NA, 1:3), 1:4)
wma <- WMA(x, 2)
checkTrue(inherits(wma, "xts"))
}
test.EVWMA <- function() {
checkEqualsNumeric( EVWMA(input$all$Close, input$all$Volume), output$allEVWMA )
checkEquals( attributes(EVWMA(input$all$Close, input$all$Volume)), attributes(output$allEVWMA) )
checkEqualsNumeric( EVWMA(input$top$Close, input$top$Volume), output$topEVWMA )
checkEquals( attributes(EVWMA(input$top$Close, input$top$Volume)), attributes(output$topEVWMA) )
checkException( EVWMA(input$mid$Close, input$mid$Volume) )
checkException( EVWMA(input$all$Close) )
checkException( EVWMA(input$all[,1:2], input$all$Volume) )
checkException( EVWMA(input$all$Close, input$all[,1:2]) )
checkException( EVWMA(input$all$Close, n = -1) )
checkException( EVWMA(input$all$Close, n = NROW(input$all) + 1) )
}
test.ZLEMA <- function() {
checkEqualsNumeric( ZLEMA(input$all$Close), output$allZLEMA )
checkEquals( attributes(ZLEMA(input$all$Close)), attributes(output$allZLEMA) )
checkEqualsNumeric( ZLEMA(input$top$Close), output$topZLEMA )
checkEquals( attributes(ZLEMA(input$top$Close)), attributes(output$topZLEMA) )
checkException( ZLEMA(input$mid$Close) )
checkException( ZLEMA(input$all[,1:2]) )
}
test.ZLEMA.n.ratio <- function() {
out <- c(rep(NA, 6), 4.0, 6.0, 7.75, 9.3125)
checkEqualsNumeric(ZLEMA(1:10, ratio = 0.25), out)
checkEqualsNumeric(ZLEMA(1:10, n = 7), out)
checkEqualsNumeric(ZLEMA(1:10, n = 7, ratio = 0.25), out)
}
test.ZLEMA.ratio.eq.0 <- function() {
checkException(ZLEMA(1:10, ratio = 0.0))
}
test.EMA.non.na.eq.n.does.not.error <- function() {
x <- c(NA, rnorm(10))
e <- EMA(x, 10)
z <- ZLEMA(x, 10)
return(TRUE)
}
|
popfit_init_tuning <- function(x,
y,
proximity=TRUE,
verbose=FALSE,
log=FALSE) {
log_info("MSG", paste0("Start tuning of our randomForest population density regression."), verbose=verbose, log=log)
start_time <- Sys.time()
init_popfit = tuneRF(x=x,
y=y,
plot=TRUE,
mtryStart=length(x)/3,
ntreeTry=length(y)/20,
improve=0.0001,
stepFactor=1.20,
trace=verbose,
doBest=TRUE,
nodesize=length(y)/1000,
na.action=na.omit,
importance=TRUE,
proximity=proximity,
sampsize=min(c(length(y), 1000)),
replace=TRUE)
end_time <- Sys.time()
log_info("MSG", paste("End tuning RF. Elapsed Fitting Time:", tmDiff(start_time,end_time)), verbose=verbose, log=log)
return(init_popfit)
}
|
library(BMRSr)
knitr::opts_chunk$set(
collapse = TRUE,
comment = "
)
get_parameters("FUELINST")
generation_data <- generation_dataset_example
library(ggplot2, quietly = TRUE, warn.conflicts = FALSE)
library(tidyr, quietly = TRUE, warn.conflicts = FALSE)
library(dplyr, quietly = TRUE, warn.conflicts = FALSE)
generation_data <- generation_data %>%
dplyr::mutate(settlement_period = as.factor(settlement_period)) %>%
tidyr::gather(key = "fuel_type", value = "generation_mw", ccgt:intnem)
ggplot2::ggplot(data = generation_data, aes(x = spot_time, y = generation_mw, colour = fuel_type)) +
ggplot2::geom_line()
|
context("cleaning strings")
test_that("clean_strings is working", {
expect_is(clean_strings(Easplist@taxonNames$TaxonName), "character")
expect_is(clean_strings(Easplist@taxonRelations$Level), "factor")
expect_is(clean_strings(Easplist@taxonNames), "data.frame")
expect_equal(clean_strings(" Daucus carota "), "Daucus carota")
}
)
|
asym.v.e <- function(X,w,h){
M_theta.e <- function(X,w,h){
out <- matrix(0, nrow=nrow(X), ncol=ncol(X)-1)
for(i in 1:nrow(X)){
for(k in 1:(ncol(X)-1)){
int.m <- function(x){
w.sum <- 0
for(j in 1:ncol(X)){
w.sum <- w.sum + w[j]*dnorm(x, mean = X[i,j])
}
(dnorm(x, mean = X[i,k]) - dnorm(x, mean = X[i,ncol(X)]))*(1 + log(w.sum))
}
out[i,k] <- -integrate(int.m, range(X[i,])[1] - 3*h, range(X[i,])[2] + 3*h)$value
}
}
t(out)%*%out/nrow(X)
}
V_theta.e <- function(X,w,h){
temp <- array(0, c(ncol(X)-1, ncol(X)-1, nrow(X)))
for(i in 1:nrow(X)){
for(j in 1:(ncol(X)-1)){
for(l in 1:(ncol(X)-1)){
int.v <- function(x){
w.sum <- 0
for(k in 1:ncol(X)){
w.sum <- w.sum + w[k]*dnorm(x, mean = X[i,k])
}
(dnorm(x, mean = X[i,j]) - dnorm(x, mean = X[i,ncol(X)]))*(dnorm(x, mean = X[i,l]) - dnorm(x, mean = X[i,ncol(X)]))/w.sum
}
temp[j,l,i] <- -integrate(int.v, range(X[i,])[1] - 3*h, range(X[i,])[2] + 3*h)$value
}
}
}
apply(temp, c(1,2), sum)/nrow(X)
}
solve(V_theta.e(X,w,h))%*%M_theta.e(X,w,h)%*%solve(V_theta.e(X,w,h))
}
|
Ln <-
function(x,y){
s<-order(y[order(x)])
rr<-lis(s)
}
|
combine <- function(...) {
lifecycle::deprecate_warn("1.0.0", "combine()", "vctrs::vec_c()")
args <- list2(...)
if (length(args) == 1 && is.list(args[[1]])) {
args <- args[[1]]
}
args <- keep(args, function(.x) !is.null(.x))
names(args) <- NULL
if (length(args) == 0) {
logical()
} else {
vec_c(!!!args)
}
}
|
keyfct.hn <- function(distance, key.scale){
exp( - (( distance/ (sqrt(2) * key.scale) )^2) )
}
|
predict.scalreg <-
function(object, newX=NULL,...) {
if(is.null(newX))
y = fitted(object)
else{
y=as.vector(newX%*%object$coefficients)
}
y
}
|
output.coxphout <-
function(coxout) {
nodetree <- coxout$nodetree[-1,]
Lkl <- coxout$lkl
Depth <- nodetree[,1]
Block <- nodetree[,2]
Node <- nodetree[,3]
Left <- nodetree[,4]
Right <- nodetree[,5]
Score <- nodetree[,6]
Start <- nodetree[,7]
End <- nodetree[,8]
ncases <- nodetree[,9]
nevents <- nodetree[,10]
nnodetree <- as.data.frame(cbind(Depth,Block,Node,Left,
Right,Score,Lkl,Start,End,ncases,nevents))
dimnames(nnodetree) <- list(nodetree[,3],c('Depth','Block','Node','Left',
'Right','Score','lkl','Start','End','
nnodetree }
|
P.hA <- function(x){
rho=1000; g=9.8
v <- sqrt(2*g*x$h)
Q <- v*x$A
P <- (1/2)*rho*x$A*v^3*10^-6; Punits <- "(MW)"
if(P < 10) {P <- P*10^3; Punits <- "(kW)"}
X <- t(c(x$h,v,x$A,Q,P)); nX <- length(X)
X <- data.frame(X)
rdig <- rep(2,nX)
for (i in 1:nX) X[i] <- round(X[i],rdig[i])
names(X) <-c("Head(m)","Vel(m/s)","Area(m2)", "Flow(m3/s)",paste("Power",Punits,sep=""))
return(X)
}
P.Qh <- function(x){
rho=1000; g=9.8
Pw <- rho*g*x$Q*x$h*10^-6; Punits <- "(MW)"
if(Pw < 10) {Pw <- Pw*10^3; Punits <- "(kW)"}
X <- t(c(x$h,x$Q,Pw)); nX <- length(X)
X <- data.frame(X)
rdig <- rep(2,nX)
for (i in 1:nX) X[i] <- round(X[i],rdig[i])
names(X) <-c("Head(m)","Flow(m3/s)",paste("Power",Punits,sep=""))
return(X)
}
Pmax.Qh <- function(x){
rho=1000; g=9.8
net.h <- x$h - x$h/3
Pw <- rho*g*x$Q*net.h*10^-6; Punits <- "(MW)"
if(Pw < 10) {Pw <- Pw*10^3; Punits <- "(kW)"}
X <- t(c(x$h,net.h,x$Q,Pw)); nX <- length(X)
X <- data.frame(X)
rdig <- rep(2,nX)
for (i in 1:nX) X[i] <- round(X[i],rdig[i])
names(X) <-c("Gross head (m)","Net head (m)","Flow (m3/s)",paste("Power",Punits,sep=""))
return(X)
}
Pe.Pw <- function(x){
rho=1000; g=9.8
net.h <- x$h - x$h/3
Pw <- rho*g*x$Q*net.h*10^-6; Punits <- "(MW)"
if(Pw < 10) {Pw <- Pw*10^3; Punits <- "(kW)"}
Pe <- x$nu*Pw
press <- rho*g*net.h/1000
X <- t(c(x$h,net.h,x$Q,press,x$nu,Pw,Pe)); nX <- length(X)
X <- data.frame(X)
rdig <- rep(2,nX)
for (i in 1:nX) X[i] <- round(X[i],rdig[i])
names(X) <-c("GrossHead(m)","NetHead(m)","Flow(m3/s)","Press(kPa)","Eff", paste("PowWater",Punits,sep=""),paste("PowGen",Punits,sep=""))
return(X)
}
Pmax.Qh.plot <- function(x){
rho=1000; g=9.8
Q <- c(0.1,0.5,1,5,10,50,100,500,1000,2000);nQ <- length(Q)
P <- c(0.01,0.1,1,10,100,1000); nP <- length(P)
h <- matrix(nrow=nQ,ncol=nP)
for(i in 1:nP){
h[,i] <- (P[i]*10^6)/(rho*g*Q)
}
par(pty='s')
matplot(Q,h,type="l", col=1, lty=1, log='xy',ylim=c(1,2000),xlim=c(0.1,2000),
xaxs='i',yaxs='i',xaxt='n',yaxt='n',ylab="Net Head (m)",xlab="Flow (m3/s)",
lwd=1.5,cex.axis=0.8)
tck <- c(0.1,0.2,0.5); xtck <- c(tck,10*tck,100*tck,1000*tck,10000*tck)
ytck <- c(10*tck,100*tck,1000*tck,10000*tck)
axis(1,at=xtck,labels=format(xtck,scientific=FALSE),cex.axis=0.6)
axis(2,at=ytck,labels=format(ytck,scientific=FALSE),cex.axis=0.6)
points(x$Q,x$h-x$h/3,type="p", pch=x$plab,cex=1,font=2)
slab <- c(0.5,1.6,5,16,50,600)
for(i in 1:nP){
xt <- slab[i]; yt <- slab[i]*8
text(xt,yt,paste(P[i],"MW",sep=""),srt=-55,cex=0.8)
}
grid(col='gray')
}
turbine.regions <- function(type){
if(type=='kaplan'){
polygon(x=c(1,1,10,200,1000,80,1),y=c(1,20,80,80,15,1,1),lwd=2,border='gray')
text(50,70,"Kaplan")
}
if(type=='francis'){
polygon(x=c(1,5,100,1000,1000,5,1),y=c(80,800,800,80,10,10,80),lwd=2,border='gray')
text(200,200,"Francis",srt=-45)
}
if(type=='pelton'){
polygon(x=c(1,1,20,50,40,1),y=c(80,1000,1000,800,600,80),lwd=2,border='gray')
text(2,900,"Pelton")
}
if(type=='crossflow'){
polygon(x=c(0.5,0.8,20,15),y=c(3,5,5,3),lwd=2,border='gray')
text(5,4,"Crossflow")
}
if(type=='slh'){
polygon(x=c(0.5,0.8,20,15),y=c(3,5,5,3),lwd=2,border='gray')
text(5,4,"SLH")
}
}
turbine.regions.all <- function(){
polygon(x=c(1,1,10,200,1000,80,1),y=c(1,20,80,80,15,1,1),lwd=2,border='gray',lty=1)
text(50,70,"Kaplan")
polygon(x=c(1,5,100,1000,1000,5,1),y=c(80,800,800,80,10,10,80),lwd=2,border='gray',lty=2)
text(200,200,"Francis",srt=-45)
polygon(x=c(1,1,20,50,40,1),y=c(80,1000,1000,800,600,80),lwd=2,border='gray',lty=3)
text(2,900,"Pelton")
polygon(x=c(0.5,0.8,20,15),y=c(3,5,5,3),lwd=2,border='gray',lty=4)
text(5,4,"SLH")
}
pipe.loss <- function(pipe){
x <- pipe
k1=10.67; k2=1.852; k3=4.87
if (x$mat=='pvc') C <-c(150,150)
if (x$mat=='concrete') C <-c(100,140)
if (x$mat=='steel') C <-c(90,110)
if (x$mat=='galvanized') C <-c(120,120)
if (x$mat=='poly') C <-c(140,140)
h.loss <- k1*(x$L/x$d^k3)*(x$Q/mean(C))^k2
X <- t(c(h.loss,mean(C))); nX <- length(X)
X <- data.frame(X)
rdig <- rep(2,nX)
for (i in 1:nX) X[i] <- round(X[i],rdig[i])
names(X) <-c("Head loss(m)","Roughness")
return(X)
}
exceed <- function(flow){
y <- flow; z <- sort(y)
proby <- sort(rank(y),decreasing=T)/length(y)
quant <- c(0.5,0.95)
yp <- list(); yp.ea <- array()
for(i in 1:length(quant)){
yp[[i]] <- z[which(proby<=quant[i])]
yp.ea[i] <- yp[[i]][1]
}
ym <- mean(y)
pym <- proby[which(z>=ym)][1]
prob <- round(quant,2)
Q <- round(yp.ea,2)
Prob.Qmean <- round(c(pym,ym),2)
X <- rbind(c(quant,pym),c(yp.ea,ym))
X <- data.frame(X)
X <- round(X,2)
names(X) <-c(c("Q50","Q95","Qmean"))
row.names(X) <- c("Prob","Q")
return(list(y=z,proby=proby, prob=prob, Q=Q, Prob.Qmean=Prob.Qmean, prob.Q=X))
}
model.flow <- function(mf){
x <- mf
days <- seq(1,365)
seasonal <- exp(-((days-x$day.peak)/x$length.season)^2)
delta <- x$peak.flow - x$base.flow
noise <- rnorm(365,delta*x$variab[1],x$variab[2])
flow <- array()
flow[1:3] <- seasonal[1:3]
for (i in 4:365){
flow[i] <-0
for (j in 1:3) flow[i] <- flow[i]+seasonal[i]*x$coef[j]*flow[i-j]
flow[i] <- flow[i]+seasonal[i]*x$coef[4]*noise[i]
}
flow <- flow*x$peak.flow + x$base.flow
for (i in 1:365)if(flow[i]<=0) flow[i] <-0
return(flow=flow)
}
flow.plot <- function(flow,label){
Qmean <- mean(flow)
days <- seq(1,365)
plot(days,flow,type="l",lty=1, ylim=c(0,max(flow)),
ylab=label, xlab="Days", cex.lab=0.8)
abline(h=Qmean,lty=2,lwd=0.7)
text(20,Qmean,"Qmean",cex=0.7)
}
annual.avg <- function(mf,nyrs){
x <- mf
Xt <- matrix(nrow=365,ncol=nyrs)
for(i in 1:nyrs)Xt[,i] <- model.flow(x)
Xtm <- array()
for (j in 1:365) Xtm[j] <- mean(Xt[j,])
return(Xtm)
}
flow.exc.plot <- function(flow,exc,label){
levels <- list(lev=exc$Q,lev.lab=names(exc$prob.Q)[-length(names(exc$prob.Q))])
Qmean <- exc$Prob.Qmean[2]; prob.Qm <-exc$Prob.Qmean[1]
days <- seq(1,365)
plot(days,flow,type="l",lty=1, ylim=c(0,max(flow)),
ylab=label, xlab="Days", cex.lab=0.8)
for(k in 1:length(levels$lev)){
abline(h=levels$lev[k],lty=2,lwd=0.7)
text(2,levels$lev[k],levels$lev.lab[k],cex=0.7)
}
abline(h=Qmean,lty=2,lwd=0.7)
text(20,Qmean,"Qmean",cex=0.7)
plot(exc$proby,exc$y,type="l",ylim=c(0,max(exc$y)),xlim=c(0,1),lty=1,
ylab=label,xlab="Exceedance probability", cex.lab=0.8)
for(k in 1:length(levels$lev)){
abline(h=levels$lev[k],lty=2,lwd=0.7)
text(0.01,levels$lev[k],levels$lev.lab[k],cex=0.7)
}
abline(h=Qmean,lty=2,lwd=0.7)
text(0.1,Qmean,"Qmean",cex=0.7)
abline(v=prob.Qm,lty=2,lwd=0.7)
text(prob.Qm,0,paste("ProbQmean=",prob.Qm),cex=0.7)
}
area.vol <- function(xav){
x <- xav
h <- seq(0,1,0.01)*(x$H-x$B)
tail <- (x$L*1000/x$H)*h
area <- (x$W*1000/x$H)*h*tail*10^-4
vol <- (area/2)*h
vmax <- max(vol)
amax <- max(area)
par(mar = c(5,5,5,2))
plot(vol,h+x$B,type="l",xlim=c(0,vmax),lty=1,col=1,
xlab="Volume (ha.m)", ylab="Lake elevation asl (m)")
abline(h=x$H,col='gray')
text(vmax/2,x$H,"Cons. pool elevation (m)",cex=0.8)
par(new=T)
plot((area),h,type="l",xlim=c(amax,0),axes=F,xlab=NA,
ylab=NA,lty=2,col=1)
axis(side=3); mtext(side=3, line=3, "Area (ha)")
legend('bottom',legend=c("Volume","Area"),lty=1:2,col=1,bg='white',cex=0.7)
}
|
plotpdf <- function(pdf, qdf, cdf, lq = 0.01, uq = 0.99, ...){
if(missing(qdf) && !missing(cdf))
qdf <- function(q){
cdf2quantile(q, cdf)
}
from <- qdf(lq)
to <- qdf(uq)
plot(pdf, from = from, to = to, ...)
}
cdf2quantile <- function(p, cdf, interval = c(-3, 3),
lower = min(interval), upper = max(interval), ...){
f <- function(x, ...){
cdf(x, ...) - p
}
wrk <- uniroot(f, lower = lower, upper = upper, extendInt = "upX", ...)
res <- wrk$root
res
}
|
is.vector(myData1)
is.vector(myData2)
|
test_that("simple calls generate expected results", {
dt <- lazy_dt(data.table(x = 1), "DT")
expect_equal(
dt %>% head() %>% show_query(),
expr(head(DT, n = 6L))
)
expect_equal(
dt %>% tail() %>% show_query(),
expr(tail(DT, n = 6L))
)
})
test_that("vars set correctly", {
dt <- lazy_dt(data.frame(x = 1:3, y = 1:3))
expect_equal(dt %>% head() %>% .$vars, c("x", "y"))
})
test_that("simple calls generate expected translations", {
dt <- lazy_dt(data.table(x = 1, y = 1, z = 1), "DT")
expect_equal(
dt %>% rename(b = y) %>% show_query(),
expr(setnames(copy(DT), "y", "b"))
)
})
test_that("vars set correctly", {
dt <- lazy_dt(data.frame(x = 1:3, y = 1:3))
expect_equal(dt %>% rename(a = x) %>% .$vars, c("a", "y"))
})
test_that("empty rename returns original", {
dt <- data.table(x = 1, y = 1, z = 1)
lz <- lazy_dt(dt, "DT")
expect_equal(lz %>% rename() %>% show_query(), expr(DT))
})
test_that("renames grouping vars", {
dt <- lazy_dt(data.table(x = 1, y = 1, z = 1))
gt <- group_by(dt, x)
expect_equal(rename(gt, a = x)$groups, "a")
})
test_that("can rename with a function or formula", {
dt <- lazy_dt(data.table(x = 1, y = 1))
expect_equal(dt %>% rename_with(toupper) %>% .$vars, c("X", "Y"))
expect_equal(dt %>% rename_with(toupper, 1) %>% .$vars, c("X", "y"))
expect_equal(dt %>% rename_with("toupper") %>% .$vars, c("X", "Y"))
expect_equal(dt %>% rename_with(~ toupper(.x)) %>% .$vars, c("X", "Y"))
})
test_that("but not with anything else", {
dt <- lazy_dt(data.table(x = 1, y = 1))
expect_snapshot(error = TRUE, {
dt %>% rename_with(1)
})
})
test_that("rename_with generates minimal spec", {
dt <- lazy_dt(matrix(ncol = 26, dimnames = list(NULL, letters)), "DT")
expect_snapshot({
dt %>% rename_with(toupper) %>% show_query()
dt %>% rename_with(toupper, 1:3) %>% show_query()
})
})
test_that("can rename_with() a data.table", {
dt <- data.table(x = 1:5, y = 1:5)
out <- rename_with(dt, toupper, x)
expect_s3_class(out, "dtplyr_step")
expect_named(as_tibble(out), c("X", "y"))
})
test_that("no input uses all variables", {
dt <- lazy_dt(data.table(x = c(1, 1), y = c(1, 2)), "dt")
expect_equal(
dt %>% distinct() %>% show_query(),
expr(unique(dt))
)
expect_equal(dt %>% distinct() %>% .$vars, c("x", "y"))
})
test_that("uses supplied variables", {
dt <- lazy_dt(data.table(x = c(1, 1), y = c(1, 2)), "dt")
expect_equal(
dt %>% distinct(y) %>% show_query(),
expr(unique(dt[, .(y)]))
)
expect_equal(dt %>% distinct(y) %>% .$vars, "y")
expect_equal(
dt %>% group_by(x) %>% distinct(x, y) %>% show_query(),
expr(unique(dt))
)
})
test_that("doesn't duplicate variables", {
dt <- lazy_dt(data.table(x = c(1, 1), y = c(1, 2)), "dt")
expect_equal(
dt %>% distinct(x, x) %>% show_query(),
expr(unique(dt[, .(x)]))
)
expect_equal(dt %>% distinct(x, x) %>% .$vars, "x")
expect_equal(
dt %>% group_by(x) %>% distinct(x) %>% show_query(),
expr(unique(dt[, .(x)]))
)
})
test_that("keeps all variables if requested", {
dt <- lazy_dt(data.table(x = 1, y = 1, z = 1), "dt")
expect_equal(
dt %>% distinct(y, .keep_all = TRUE) %>% show_query(),
expr(unique(dt, by = "y"))
)
expect_equal(dt %>% distinct(y, .keep_all = TRUE) %>% .$vars, c("x", "y", "z"))
expect_equal(
dt %>% group_by(x) %>% distinct(y, .keep_all = TRUE) %>% show_query(),
expr(unique(dt, by = !!c("x", "y")))
)
})
test_that("can compute distinct computed variables", {
dt <- lazy_dt(data.table(x = c(1, 1), y = c(1, 2)), "dt")
expect_equal(
dt %>% distinct(z = x + y) %>% show_query(),
expr(unique(dt[, .(z = x + y)]))
)
expect_equal(
dt %>% distinct(z = x + y, .keep_all = TRUE) %>% show_query(),
expr(unique(copy(dt)[, `:=`(z = x + y)], by = "z"))
)
})
test_that("unique is an alias for distinct", {
dt <- lazy_dt(data.table(x = c(1, 1)))
expect_equal(unique(dt), distinct(dt))
})
test_that("empty call drops every row", {
tb <- tibble(x = c(1, 2, NA), y = c("a", NA, "b"))
step <- drop_na(lazy_dt(tb, "DT"))
expect_equal(show_query(step), expr(na.omit(DT)))
expect_equal(as_tibble(step), tb[1, ])
})
test_that("uses specified variables", {
df <- tibble(x = c(1, 2, NA), y = c("a", NA, "b"))
dt <- lazy_dt(df, "DT")
step <- drop_na(dt, x)
expect_equal(show_query(step), expr(na.omit(DT, cols = "x")))
expect_equal(collect(step), df[1:2, ])
step <- drop_na(dt, x:y)
expect_equal(show_query(step), expr(na.omit(DT, cols = !!c("x", "y"))))
expect_equal(collect(step), df[1, ])
})
test_that("errors are raised", {
tb <- tibble(x = c(1, 2, NA), y = c("a", NA, "b"))
dt <- lazy_dt(tb, "DT")
expect_snapshot(collect(drop_na(dt, "z")), error = TRUE)
})
test_that("converts data.table to dtplyr_step", {
df <- data.table(x = c(1, 2, NA), y = c("a", NA, "b"))
expect_s3_class(drop_na(df), "dtplyr_step_call")
})
|
sqrtm <- function(x) {
d <- dim(x)
if(length(d) != 2 || d[1] != d[2]) stop("'x' must be a quadratic matrix")
n <- d[1]
Sch.x <- Schur(Matrix(x))
ev <- Sch.x@EValues
if(getOption("verbose") && any(abs(Arg(ev) - pi) < 1e-7))
message(sprintf("'x' has negative real eigenvalues; maybe ok for %s", "sqrtm()"))
S <- as.matrix(Sch.x@T)
Q <- as.matrix(Sch.x@Q)
if(n > 1L) {
J.has.2 <- S[cbind(2:n, 1:(n-1))] != 0
k <- sum(J.has.2)
} else k <- 0L
R.index <- vector("list",n-k)
l <- 1L
i <- 1L
while(i < n) {
if (S[i+1L,i] == 0) {
R.index[[l]] <- i
}
else {
i1 <- i+1L
R.index[[l]] <- c(i,i1)
i <- i1
}
i <- i+1L
l <- l+1L
}
if (is.null(R.index[[n-k]])) {
R.index[[n-k]] <- n
}
I <- diag(2)
X <- matrix(0,n,n)
for (j in seq_len(n-k)) {
ij <- R.index[[j]]
if (length(ij) == 1L) {
X[ij,ij] <- if((.s <- S[ij,ij]) < 0) sqrt(.s + 0i) else sqrt(.s)
}
else {
ev1 <- ev[ij[1]]
r1 <- Re(sqrt(ev1))
X[ij,ij] <- r1*I + 1/(2*r1)*(S[ij,ij] - Re(ev1)*I)
}
}
if (n-k > 1L) for (j in 2L:(n-k)) {
ij <- R.index[[j]]
for (i in (j-1L):1L) {
ii <- R.index[[i]]
sumU <- 0
if (length(ij) == 1L & length(ii) == 1L) {
if (j-i > 1L) for (l in (i+1L):(j-1L)) {
il <- R.index[[l]]
sumU <- sumU + {
if (length(il) == 2 ) X[ii,il]%*%X[il,ij]
else X[ii,il] * X[il,ij]
}
}
X[ii,ij] <- solve(X[ii,ii]+X[ij,ij],S[ii,ij]-sumU)
}
else if (length(ij) == 2 & length(ii) == 1L ) {
if (j-i > 1L) for (l in(i+1L):(j-1L)) {
il <- R.index[[l]]
sumU <- sumU + {
if (length(il) == 2 ) X[ii,il]%*%X[il,ij]
else X[ii,il] * X[il,ij]
}
}
X[ii,ij] <- solve(t(X[ii,ii]*I + X[ij,ij]),
as.vector(S[ii,ij] - sumU))
}
else if (length(ij) == 1L & length(ii) == 2 ) {
if (j-i > 1L) for (l in(i+1L):(j-1L)) {
il <- R.index[[l]]
sumU <- sumU + {
if (length(il) == 2 ) X[ii,il]%*%X[il,ij]
else X[ii,il] * X[il,ij]
}
}
X[ii,ij] <- solve(X[ii,ii]+X[ij,ij]*I, S[ii,ij]-sumU)
}
else if (length(ij) == 2 & length(ii) == 2 ) {
if (j-i > 1L) for (l in(i+1L):(j-1L)) {
il <- R.index[[l]]
sumU <- sumU + {
if (length(il) == 2 ) X[ii,il] %*% X[il,ij]
else X[ii,il] %*% t(X[il,ij])
}
}
tUii <- matrix(0,4,4)
tUii[1:2,1:2] <- X[ii,ii]
tUii[3:4,3:4] <- X[ii,ii]
tUjj <- matrix(0,4,4)
tUjj[1:2,1:2] <- t(X[ij,ij])[1L,1L]*I
tUjj[3:4,3:4] <- t(X[ij,ij])[2L,2L]*I
tUjj[1:2,3:4] <- t(X[ij,ij])[1L,2L]*I
tUjj[3:4,1:2] <- t(X[ij,ij])[2L,1L]*I
X[ii,ij] <- solve(tUii+tUjj, as.vector(S[ii,ij]-sumU))
}
}
}
Q %*% X %*% solve(Q)
}
|
frame_vars <- function(animation = last_animation()) {
attr(animation, 'frame_vars')
}
split_animation <- function(animation = last_animation(), by) {
by <- enquo(by)
fv <- frame_vars(animation)
by <- eval_tidy(by, fv)
if (length(by) != nrow(fv)) {
stop('`by` must have the same length as the number of frames', call. = FALSE)
}
tryCatch(
split(animation, by),
error = function(e) {
stop('The renderer output doesn\'t support splitting')
}
)
}
|
reorder.mpoly <- function(x, varorder = vars(x), order = "lex", ...){
vars <- vars(x)
p <- length(vars)
n <- length(x)
if(!missing(varorder)){
if(!all(vars %in% varorder)){
error <- stri_c(
"If specified, varorder must contain all computed vars - ",
paste(vars, collapse = ", ")
)
stop(error, call. = FALSE)
}
varorder <- varorder[varorder %in% vars]
x <- lapply(x, function(v){
if(length(v) == 1) return(v)
v <- v[intersect(c(varorder, "coef"), names(v))]
})
class(x) <- "mpoly"
}
if(missing(varorder) && !missing(order)){
message <- stri_c(
"Using variable ordering - ",
paste(vars, collapse = ", ")
)
message(message)
}
match.arg(order, c("lex","glex","grlex"))
if(order == "lex"){
if(n == 1) return(x)
l <- lapply(x, function(v){
z <- rep(0, p + 1)
names(z) <- c(varorder, "coef")
z[names(v)] <- v
z
})
m <- matrix(unname(unlist(l)), nrow = length(l), ncol = p + 1, byrow = TRUE)
dimnames(m) <- list(1:nrow(m), c(varorder, "coef"))
for(k in p:1) m <- m[order(m[,k], decreasing = TRUE),]
list4mpoly <- unname(lapply(split(m, 1:n), function(v){
names(v) <- c(varorder, "coef")
v
}))
return( mpoly(list4mpoly, varorder = varorder) )
}
if(order == "glex"){
if(n == 1) return(x)
l <- lapply(x, function(v){
z <- rep(0, p + 1)
names(z) <- c(varorder, "coef")
z[names(v)] <- v
z
})
m <- matrix(unname(unlist(l)), nrow = length(l), ncol = p + 1, byrow = TRUE)
dimnames(m) <- list(1:nrow(m), c(varorder, "coef"))
for(k in p:1) m <- m[order(m[,k], decreasing = TRUE),]
m <- m[order(apply(m[, 1:p, drop = FALSE],1,sum), decreasing = TRUE),]
list4mpoly <- unname(lapply(split(m, 1:n), function(v){
names(v) <- c(varorder, "coef")
v
}))
return( mpoly(list4mpoly, varorder = varorder) )
}
if(order == "grlex"){
if(n == 1) return(x)
l <- lapply(x, function(v){
z <- rep(0, p + 1)
names(z) <- c(varorder, "coef")
z[names(v)] <- v
z
})
m <- matrix(unname(unlist(l)), nrow = length(l), ncol = p + 1, byrow = TRUE)
dimnames(m) <- list(1:nrow(m), c(varorder, "coef"))
for(k in 1:p) m <- m[order(m[,k]),]
m <- m[order(apply(m[, 1:p, drop = FALSE],1,sum), decreasing = TRUE),]
list4mpoly <- unname(lapply(split(m, 1:n), function(v){
names(v) <- c(varorder, "coef")
v
}))
return( mpoly(list4mpoly, varorder = varorder) )
}
}
|
setClass("multiprocessing", contains = "jobjRef")
setClass("sa_item", contains = "jobjRef")
setClass("workspace", contains = "jobjRef")
is.multiprocessing <- function(x){
inherits(x, "multiprocessing")
}
is.sa_item <- function(x){
inherits(x, "sa_item")
}
is.workspace <- function(x){
inherits(x, "workspace")
}
load_workspace <- function(file){
if (missing(file) || is.null(file)) {
if (Sys.info()[['sysname']] == "Windows") {
file <- utils::choose.files(caption = "Select a workspace",
filters = c("JDemetra+ workspace (.xml)", "*.xml"))
}else{
file <- base::file.choose()
}
if (length(file) == 0)
stop("You have to choose a file !")
}
if (!file.exists(file) | length(grep("\\.xml$",file)) == 0)
stop("The file doesn't exist or isn't a .xml file !")
workspace <- .jcall("ec/tstoolkit/jdr/ws/Workspace", "Lec/tstoolkit/jdr/ws/Workspace;", "open", file)
workspace <- new("workspace", workspace)
return(workspace)
}
get_object <- function(x, pos = 1){
UseMethod("get_object", x)
}
get_object.workspace <- function(x, pos = 1){
multiproc <- .jcall(x, "Lec/tstoolkit/jdr/ws/MultiProcessing;", "getMultiProcessing", as.integer(pos - 1))
multiproc <- new("multiprocessing", multiproc)
return(multiproc)
}
get_object.multiprocessing <- function(x, pos = 1){
sa_item_obj <- .jcall(x, "Lec/tstoolkit/jdr/ws/SaItem;", "get", as.integer(pos - 1))
sa_item_obj <- new("sa_item", sa_item_obj)
return(sa_item_obj)
}
get_all_objects <- function(x){
UseMethod("get_all_objects", x)
}
get_all_objects.multiprocessing <- function(x){
nb_sa_objects <- count(x)
all_sa_object <- lapply(seq_len(nb_sa_objects),
function(i) {
get_object(x, i)
})
names(all_sa_object) <- sapply(all_sa_object, get_name)
all_sa_object
}
get_all_objects.workspace <- function(x){
nb_multiprocessing <- count(x)
all_multiprocessings <- lapply(seq_len(nb_multiprocessing),
function(i) {
get_object(x, i)
})
names(all_multiprocessings) <- sapply(all_multiprocessings, get_name)
all_multiprocessings
}
get_name <- function(x){
UseMethod("get_name", x)
}
get_name.multiprocessing <- function(x){
return(.jcall(x, "S", "getName"))
}
get_name.sa_item <- function(x){
jt <- .jcall(x, "Ldemetra/datatypes/sa/SaItemType;", "getSaDefinition")
jts <- .jcall(jt, "Ldemetra/datatypes/Ts;", "getTs")
name <- .jcall(jts, "S", "getName")
name <- gsub("^.*\\n", "", name)
return(name)
}
count <- function(x){
UseMethod("count", x)
}
count.multiprocessing <- function(x){
return(.jcall(x, "I", "size"))
}
count.workspace <- function(x){
return(.jcall(x, "I", "getMultiProcessingCount"))
}
get_ts <- function(x){
UseMethod("get_ts", x)
}
get_ts.workspace <- function(x){
multiprocessings <- get_all_objects(x)
lapply(multiprocessings, get_ts)
}
get_ts.multiprocessing <- function(x){
all_sa_objects <- get_all_objects(x)
lapply(all_sa_objects, get_ts)
}
get_ts.sa_item <- function(x){
jt <- .jcall(x, "Ldemetra/datatypes/sa/SaItemType;", "getSaDefinition")
jts <- .jcall(jt, "Ldemetra/datatypes/Ts;", "getTs")
j_ts_series <- .jcall(jts, "Lec/tstoolkit/timeseries/simplets/TsData;", "getData")
return(ts_jd2r(j_ts_series))
}
get_ts.SA <- function(x){
return(x$final$series[,"y"])
}
get_ts.jSA <- function(x){
return(get_indicators(x, "y")[[1]])
}
get_ts.regarima <- function(x){
mts <- x[["model"]][["effects"]]
y <- mts[,"y_lin"] + mts[,"tde"] + mts[,"ee"] + mts[,"omhe"] + mts[,"out"]
if (x$model$spec_rslt[1, "Log transformation"]) {
y <- exp(y)
}
return(y)
}
compute <- function(workspace, i) {
if (missing(i)) {
return(.jcall(workspace, "V", "computeAll"))
}
if (is.numeric(i)) {
nb_mp_objects <- count(workspace)
mp_names <- sapply(seq_len(nb_mp_objects),
function(i) {
get_name(get_object(workspace, i))
})
if (i < 1 || i > nb_mp_objects)
stop("The index ",i," is incorrect !\n",
"It must be beetween 1 and ", nb_mp_objects)
i <- mp_names[i]
}
if (!is.character(i))
stop("The parameter i must be a character or a numeric")
.jcall(workspace, "V", "compute", i)
}
get_model <- function(x, workspace,
userdefined = NULL,
progress_bar = TRUE){
UseMethod("get_model", x)
}
get_model.workspace <- function(x, workspace,
userdefined = NULL,
progress_bar = TRUE){
multiprocessings <- get_all_objects(x)
nb_mp <- length(multiprocessings)
result <- lapply(seq_len(nb_mp), function(i){
if (progress_bar)
cat(sprintf("Multiprocessing %i on %i:\n", i, nb_mp))
get_model(multiprocessings[[i]],
workspace = x, userdefined = userdefined,
progress_bar = progress_bar)
})
names(result) <- names(multiprocessings)
result
}
get_model.multiprocessing <- function(x, workspace,
userdefined = NULL,
progress_bar = TRUE){
all_sa_objects <- get_all_objects(x)
nb_sa_objs <- length(all_sa_objects)
if (progress_bar)
pb <- txtProgressBar(min = 0, max = nb_sa_objs, style = 3)
result <- lapply(seq_len(nb_sa_objs), function(i){
res <- get_model(all_sa_objects[[i]],
workspace = workspace, userdefined = userdefined)
if (progress_bar)
setTxtProgressBar(pb, i)
res
})
names(result) <- names(all_sa_objects)
if (progress_bar)
close(pb)
result
}
get_model.sa_item <- function(x, workspace,
userdefined = NULL,
progress_bar = TRUE){
jsa_result <- get_jmodel.sa_item(x, workspace)
if(is.null(jsa_result))
return(NULL)
jspec <- jsa_result[["spec"]]
jresult <- jsa_result[["result"]]@internal
y_ts <- get_ts(x)
context_dictionary <- .jcall(workspace,
"Lec/tstoolkit/algorithm/ProcessingContext;",
"getContext")
result <- tryCatch({
sa_jd2r(jrslt = jresult, spec = jspec, userdefined = userdefined,
context_dictionary = context_dictionary,
extra_info = TRUE, freq = frequency(y_ts))
},error = function(e){
warning(e, "Error while importing a model: NULL object will be returned",
call. = FALSE)
NULL
})
result
}
sa_results <- function(jsa) {
jresult <- .jcall(jsa, "Ldemetra/algorithm/IProcResults;", "getResults")
if (is.null(jresult))
warning("The result of the object is NULL: have you computed the workspace after importing it?\n",
"See ?compute for more information.")
return(jresult)
}
sa_spec <- function(jsa, type = "Domain") {
jt <- .jcall(jsa, "Ldemetra/datatypes/sa/SaItemType;", "getSaDefinition")
if (type == "Domain") {
return(.jcall(jt, "Lec/satoolkit/ISaSpecification;", "getDomainSpec"))
}
if (type == "Estimation") {
return(.jcall(jt, "Lec/satoolkit/ISaSpecification;", "getEstimationSpec"))
}
if (type == "Point") {
return(.jcall(jt, "Lec/satoolkit/ISaSpecification;", "getPointSpec"))
}
return(NULL)
}
|
library(testthat)
library(mrgsolve)
library(dplyr)
Sys.setenv(R_TESTS="")
options("mrgsolve_mread_quiet"=TRUE)
context("test-mrgmod")
mod <- mrgsolve::house()
test_that("methods", {
expect_equal(mod$CL,1)
expect_equal(mod$VC,20)
expect_equal(mod$RESP,50)
expect_equal(mod[["RESP"]],50)
expect_equal(mod$end,120)
expect_equal(mod[["end"]],mod@end)
expect_error(mod$kylebaron)
expect_error(mod[["kylebaron"]],regexp = "not found or not extractable")
expect_is(as.list(mod), "list")
expect_output(summary(mod), "Model: housemodel")
expect_true(mrgsolve:::valid.mrgmod(mod))
expect_true(all.equal(mod, mrgsolve::house()))
l <- mod[c("CL", "VC", "CENT", "end")]
expect_identical(l, list(CL = mod$CL, VC = mod$VC, CENT = mod$CENT, end = mod$end))
expect_error(mod[c("CL", "Kyle")],regexp = "not found or not extractable")
x <- capture.output(see(mod))
expect_true(grepl("Model file", x[2]))
expect_true(grepl("housemodel\\.cpp", x[2]))
})
test_that("defaults issue-540", {
mod <- modlib("pk1", compile = FALSE)
expect_equal(mod@mxhnil,2)
expect_equal(mod@maxsteps,20000)
expect_equal(mod@rtol,1e-8)
})
|
test_that("devRateInfo returns NULL",{
res <- devRateInfo(eq = taylor_81)
expect_equal(
object = res,
expected = NULL
)
})
test_that("devRatePlotInfo returns NULL",{
eqOpt <- devRateEqList
trash <- lapply(eqOpt, function(j){
sortOpt <- c("ordersp", "familysp", "genussp", "species", "genSp")
res <- lapply(sortOpt, function(i){
devRatePlotInfo(
eq = j,
sortBy = i,
xlim = c(0, 40),
ylim = c(0, 0.05)
)
})
sapply(res, function(i){
expect_equal(
object = i,
expected = NULL
)
})
})
})
|
sheet = function(...){
data.frame(..., check.names = FALSE, stringsAsFactors = FALSE)
}
as.sheet = function(x, ...) {
as.data.frame(x, optional = FALSE, check.names = FALSE, ...,
cut.names = FALSE, col.names = names(x),
stringsAsFactors = FALSE)
}
|
walk_regions <- function(
.wb,
.pattern = ".*",
.fun,
...
){
assert_that(inherits(.wb, "Workbook"))
.formals <- names(formals(.fun))
assert_that(
all(c("wb", "sheet") %in% .formals) &&
any(c("rows", "cols") %in% .formals),
msg = paste(
".fun must be a function with formal arguments 'wb', 'sheet', and either",
"'rows', 'cols' or both. For example: 'openxlsx::addStyle()',",
"'openxlsx::addFilter()', 'openxlsx::setRowHeights()'"
))
rgs <- regions(.wb)
rgs <- rgs[grep(.pattern, rgs$region)]
for (s in rgs$sheet){
rsub <- rgs[sheet == s]
if (all(c("cols", "rows") %in% .formals)) {
.fun(wb = .wb, sheet = s, rows = rsub$rows, cols = rsub$cols, ...)
} else if ("cols" %in% .formals){
.fun(wb = .wb, sheet = s, cols = rsub$cols, ...)
} else if ("rows" %in% .formals){
.fun(wb = .wb, sheet = s, rows = rsub$rows, ...)
}
}
return(.wb)
}
map_regions<- function(
.wb,
.pattern = ".*",
.fun,
...
){
wb <- openxlsx::copyWorkbook(.wb)
walk_regions(.wb = wb, .pattern = .pattern, .fun = .fun, ...)
}
|
uev <- uniset_env_name <- "XXX_unisetEnv"
|
plot(RMEquity, type = "l", ylim = range(y), ylab = "Equity Index",
xlab = "Out-of-Sample Periods")
lines(LBEquity, lty = 2)
lines(TDEquity, lty = 3)
legend("topleft",
legend = c("S&P 500", "Low Beta", "Lower Tail Dep."),
lty = 1:3)
RelOut <- rbind((LBEquity / RMEquity - 1) * 100,
(TDEquity / RMEquity - 1) * 100)
RelOut <- RelOut[, -1]
barplot(RelOut, beside = TRUE, ylim = c(-5, 17),
names.arg = 1:ncol(RelOut),
legend.text = c("Low Beta", "Lower Tail Dep."),
args.legend = list(x = "topleft"))
abline(h = 0)
box()
|
PR2.plot <- function(fasta.df) {
freq.nt.all <- data.frame()
length <- 1:length(fasta.df$seq_name)
for (i_seq in length) {
sequence <- tolower(as.character(fasta.df$sequence[[i_seq]]))
seq_name <- as.character(fasta.df$seq_name[[i_seq]])
freq.nt <- count(s2c(sequence), wordsize = 1, by = 3, start = 2)
freq.nt <- as.data.frame(freq.nt)
col.name <- freq.nt$Var1
freq.nt <- as.data.frame(t(as.data.frame(freq.nt)))
colnames(freq.nt) <- col.name
freq.nt <- freq.nt[-c(1), ]
rownames(freq.nt) <- seq_name
freq.nt.all <- rbind(freq.nt.all, freq.nt)
}
freq.nt.all$a <- as.numeric(freq.nt.all$a)
freq.nt.all$t <- as.numeric(freq.nt.all$t)
freq.nt.all$g <- as.numeric(freq.nt.all$g)
freq.nt.all$c <- as.numeric(freq.nt.all$c)
A3T3 <- NULL
G3C3 <- NULL
freq.nt.all$A3T3 <- freq.nt.all$a / (freq.nt.all$a + freq.nt.all$t)
freq.nt.all$G3C3 <- freq.nt.all$g / (freq.nt.all$g + freq.nt.all$c)
plot <- ggplot(freq.nt.all, aes(x = A3T3, y = G3C3)) + geom_point(size = 4) +
ylab("A3/(A3 + T3)") + xlab("G3/(G3 + C3)") + ylim(0, 1) + xlim(0, 1) + theme_classic(base_size = 20) +
geom_hline(yintercept = 0.5, color = "red", size = 1.2) + geom_vline(xintercept = 0.5, color = "red", size = 1.2)
return(plot)
}
|
gene.remove=function(data,namecolumn=1,toremove=NULL, extractpattern=expression("^(.+?)_.+")){
todelete = which(sub(extractpattern,"\\1",data[,namecolumn],perl=TRUE) %in% toremove)
data = data[-todelete,]
return(data)
}
|
check_data <- function(X){
if (length(as.vector(X))==1){
stop("* filling : input data should not be a single number.")
}
if (is.vector(X)){
X = matrix(X,ncol=length(X))
}
if (any(is.infinite(X))){
stop("* filling : Inf or -Inf values are not allowed. Missing entries should be NAs.")
}
return(X)
}
check_bycol <- function(X){
fun <- function(x){(sum(is.na(x))==length(x))||(sum(is.nan(x))==length(x))}
res <- apply(X, 2, fun)
if ((sum(res))>0){
return(FALSE)
} else {
return(TRUE)
}
}
check_na <- function(X){
if ((is.na(X))||(is.nan(X))){
return(TRUE)
} else {
return(FALSE)
}
}
|
library(modelsummary)
test_that("dvnames adds names", {
d <- data.frame(x = 1:10, y = 2:11)
m1 <- lm(y~x, data = d)
m2 <- lm(x~y, data = d)
dvnout <- dvnames(list(m1,m2))
nondvn <- list('y' = m1, 'x' = m2)
expect_identical(dvnout, nondvn)
})
test_that("dvnames with single input", {
d <- data.frame(x = 1:10, y = 2:11)
m1 <- lm(y~x, data = d)
dvnout <- dvnames(m1)
nondvn <- list('y' = m1)
expect_identical(dvnout, nondvn)
})
test_that("dvnames numbering", {
d <- data.frame(x = 1:10, y = 2:11)
m1 <- lm(y~x, data = d)
m2 <- lm(x~y, data = d)
dvnout <- dvnames(list(m1,m2), number = TRUE)
nondvn <- list('y (1)' = m1, 'x (2)' = m2)
expect_identical(dvnout, nondvn)
})
test_that("dvnames fill", {
d <- data.frame(x = 1:10, y = 2:11)
m1 <- lm(y~x, data = d)
m2 <- lm(x~y, data = d)
dvnout <- dvnames(list(m1,m2,1))
nondvn <- list('y' = m1, 'x' = m2, 'Model' = 1)
expect_identical(dvnout, nondvn)
})
|
compare_AIC <- function(..., factor.value=1, silent=FALSE, FUN=function(x) specify_decimal(x, decimals=2)) {
result <- list(...)
if (is.list(result) & length(result)==1) result <- unlist(result, recursive=FALSE)
if (!is.null(result)) {
if (!is.list(result) || (is.null(names(result))) || (any(names(result)==""))) {
stop("The results must be included within a list with names; see example.")
} else {
out<-NULL
l<-length(result)
if (l<2) {
stop("A least two results must be provided.")
} else {
aic<-NULL
name<-names(result)
for (i in 1:l) {
encours <- result[i]
t <- (class(try(AIC(encours[[1]]), silent=TRUE))=="try-error")
if (t & is.null(encours[[1]]$aic) & is.null(encours[[1]]$AIC) & is.null(encours[[1]]$value))
encours <- encours[[1]]
sumAIC <- 0
AICencours <- NULL
for (j in 1:length(encours)) {
encours2 <- encours[[j]]
t <- (class(try(AIC(encours2), silent=TRUE))=="try-error")
if (!t) AICencours <- AIC(encours2)
if (!is.null(encours2$AIC)) AICencours <- encours2$AIC
if (!is.null(encours2$aic)) AICencours <- encours2$aic
if (!is.null(encours2$value) & !is.null(encours2$par))
AICencours <- 2*factor.value*encours2$value+2*(length(encours2$par))
if (is.null(AICencours))
{
stop(paste("Object", name[i], "has not the required format"))
}
sumAIC <- sumAIC + AICencours
}
aic <- c(aic, sumAIC)
}
bestaic<-min(aic)
ser<-which.min(aic)
deltaaic<-aic-bestaic
aw<-exp(-0.5*deltaaic)
saw=sum(aw)
aw<-aw/saw
out<-data.frame(cbind(AIC=FUN(aic), DeltaAIC=FUN(deltaaic), Akaike_weight=FUN(aw)), row.names=name)
if (!silent) print(paste("The lowest AIC (",sprintf("%.3f", bestaic) ,") is for series ", name[ser], " with Akaike weight=", sprintf("%.3f", aw[ser]), sep=""))
return(out)
}
}
}
}
|
SS_writestarter <- function(mylist, dir=NULL, file="starter.ss",
overwrite=FALSE, verbose=TRUE, warn=TRUE){
if(verbose) cat("running SS_writestarter\n")
if(mylist$type!="Stock_Synthesis_starter_file"){
stop("input 'mylist' should be a list with $type=='Stock_Synthesis_starter_file'\n")
}
on.exit({if(sink.number()>0) sink()})
if(is.null(dir)) dir <- getwd()
if(grepl("/$", dir)) {
outfile <- paste0(dir, file)
} else {
outfile <- paste(dir,file,sep="/")
}
if(file.exists(outfile)){
if(!overwrite){
stop(paste("file exists:",outfile,"\n set overwrite=TRUE to replace\n"))
}else{
if(warn) {cat("overwriting file:",outfile,"\n")}
file.remove(outfile)
}
}else{
if(verbose)cat("writing new file:",outfile,"\n")
}
oldwidth <- options()$width
options(width=1000)
if(verbose) cat("opening connection to",outfile,"\n")
zz <- file(outfile, open="at")
sink(zz)
wl <- function(name){
value = mylist[names(mylist)==name]
writeLines(paste0(value,"
}
writeLines("
writeLines("
writeLines(paste("
writeLines(paste("
writeLines("
wl("datfile")
wl("ctlfile")
wl("init_values_src")
wl("run_display_detail")
wl("detailed_age_structure")
wl("checkup")
wl("parmtrace")
wl("cumreport")
wl("prior_like")
wl("soft_bounds")
wl("N_bootstraps")
wl("last_estimation_phase")
wl("MCMCburn")
wl("MCMCthin")
wl("jitter_fraction")
wl("minyr_sdreport")
wl("maxyr_sdreport")
wl("N_STD_yrs")
if(mylist$N_STD_yrs>0){
wl("STD_yr_vec")
}
wl("converge_criterion")
wl("retro_yr")
wl("min_age_summary_bio")
wl("depl_basis")
wl("depl_denom_frac")
wl("SPR_basis")
wl("F_report_units")
if(mylist$F_report_units %in% 4:5){
cat(mylist[["F_age_range"]],"
}
wl("F_report_basis")
if(mylist$final==3.3){
wl("MCMC_output_detail")
wl("ALK_tolerance")
}
writeLines("
wl("final")
options(width=oldwidth)
sink()
close(zz)
if(verbose) cat("file written to",outfile,"\n")
}
|
JTree_Basis <-
function(Zpos, T, PCidx, maxlev, all_nodes,whichsave){
J = dim(Zpos)[1]
m = dim(all_nodes)[2]
nodes = all_nodes[maxlev, ]
nodes = nodes[which(nodes > 0)]
tmpfilts = diag(rep(1, m))
ind = list()
sums = matrix(rep(0, m * maxlev), ncol = m)
difs = matrix(rep(0, m * maxlev), ncol = m)
basis = list()
for (lev in 1:maxlev) {
s = tmpfilts[Zpos[lev, ], ]
R = T[[lev]]
y = t(R)%*%s
tmpfilts[Zpos[lev, ], ] = y
y = y[PCidx[lev, ], ]
sums[lev, ] = y[1, ]
difs[lev, ] = y[2, ]
if (lev %in% whichsave){
basis[[lev]]=t(tmpfilts)}
else basis[[lev]]=NULL
}
return(list(basis=basis))
}
|
get.bounds <- function(lprec, columns = 1:n)
{
n <- dim(lprec)[2]
if(n < 1)
columns <- integer(0)
lower <- .Call(RlpSolve_get_lowbo, lprec, as.integer(columns))
upper <- .Call(RlpSolve_get_upbo, lprec, as.integer(columns))
list(lower = lower, upper = upper)
}
|
1+1
if(TRUE){
x=1
}else{
x=2;print('Oh no... ask the right bracket to go away!')}
1*3
2+2+2
df=data.frame(y=rnorm(100),x1=rnorm(100),x2=rnorm(100))
lm(y~x1+x2, data=df)
1+1+1+1+1+1+1+1+1+1+1+1+1+1+1+1+1+1+1+1+1+1+1
|
sim_geno2 <-
function(cross, map=NULL, n_draws=1, error_prob=1e-4,
map_function=c("haldane", "kosambi", "c-f", "morgan"),
quiet=TRUE, cores=1)
{
if(!is.cross2(cross))
stop('Input cross must have class "cross2"')
if(error_prob < 0)
stop("error_prob must be > 0")
map_function <- match.arg(map_function)
cores <- setup_cluster(cores)
if(!quiet && n_cores(cores) > 1) {
message(" - Using ", n_cores(cores), " cores")
quiet <- TRUE
}
if(is.null(map)) {
if(is.null(cross$gmap)) stop("If cross does not contain a genetic map, map must be provided.")
map <- insert_pseudomarkers(cross$gmap)
}
if(length(map) != length(cross$geno) || !all(names(map) == names(cross$geno))) {
chr <- names(cross$geno)
if(!all(chr %in% names(map)))
stop("map doesn't contain all of the necessary chromosomes")
map <- map[chr]
}
index <- create_marker_index(lapply(cross$geno, colnames), map)
probs <- vector("list", length(map))
rf <- map2rf(map, map_function)
ind <- rownames(cross$geno[[1]])
chrnames <- names(cross$geno)
is_x_chr <- handle_null_isxchr(cross$is_x_chr, chrnames)
cross$is_female <- handle_null_isfemale(cross$is_female, ind)
cross$cross_info <- handle_null_isfemale(cross$cross_info, ind)
founder_geno <- cross$founder_geno
if(is.null(founder_geno))
founder_geno <- create_empty_founder_geno(cross$geno)
by_group_func <- function(i) {
dr <- .sim_geno2(cross$crosstype, t(cross$geno[[chr]][group[[i]],,drop=FALSE]),
founder_geno[[chr]], cross$is_x_chr[chr], cross$is_female[group[[i]][1]],
cross$cross_info[group[[i]][1],], rf[[chr]], index[[chr]],
error_prob, n_draws)
aperm(dr, c(3,1,2))
}
sex_crossinfo <- paste(cross$is_female, apply(cross$cross_info, 1, paste, collapse=":"), sep=":")
group <- split(seq(along=sex_crossinfo), sex_crossinfo)
names(group) <- NULL
nc <- n_cores(cores)
while(nc > length(group) && max(sapply(group, length)) > 1) {
mx <- which.max(sapply(group, length))
g <- group[[mx]]
group <- c(group, list(g[seq(1, length(g), by=2)]))
group[[mx]] <- g[seq(2, length(g), by=2)]
}
groupindex <- seq(along=group)
draws <- vector("list", length(cross$geno))
names(draws) <- names(cross$geno)
for(chr in seq(along=cross$geno)) {
if(!quiet) message("Chr ", names(cross$geno)[chr])
temp <- cluster_lapply(cores, groupindex, by_group_func)
d <- vapply(temp, dim, rep(0,3))
nr <- sum(d[1,])
draws[[chr]] <- array(dim=c(nr, d[2,1], d[3,1]))
for(i in groupindex)
draws[[chr]][group[[i]],,] <- temp[[i]]
dimnames(draws[[chr]]) <- list(rownames(cross$geno[[chr]]),
names(map[[chr]]),
NULL)
}
names(draws) <- names(cross$geno)
attr(draws, "crosstype") <- cross$crosstype
attr(draws, "is_x_chr") <- cross$is_x_chr
attr(draws, "alleles") <- cross$alleles
class(draws) <- c("sim_geno", "list")
draws
}
|
do.kmvp <- function(X, label, ndim=2,
preprocess=c("center","scale","cscale","decorrelate","whiten"),
bandwidth=1.0){
aux.typecheck(X)
n = nrow(X)
p = ncol(X)
label = check_label(label, n)
ulabel = unique(label)
for (i in 1:length(ulabel)){
if (sum(label==ulabel[i])==1){
stop("* do.kmvp : no degerate class of size 1 is allowed.")
}
}
N = length(ulabel)
if (any(is.na(label))||(any(is.infinite(label)))){
stop("* Supervised Learning : any element of 'label' as NA or Inf will simply be considered as a class, not missing entries.")
}
ndim = as.integer(ndim)
if (!check_ndim(ndim,p)){stop("* do.kmvp : 'ndim' is a positive integer in [1,
if (ndim>=N){
stop("* do.kmvp : the method requires {ndim <= N-1}, where N is the number of classes.")
}
if (missing(preprocess)){
algpreprocess = "center"
} else {
algpreprocess = match.arg(preprocess)
}
bandwidth = as.double(bandwidth)
if (!check_NumMM(bandwidth,0,1e+10,compact=TRUE)){stop("* do.kmvp : 'bandwidth' should be a nonnegative real number.")}
tmplist = aux.preprocess.hidden(X,type=algpreprocess,algtype="linear")
trfinfo = tmplist$info
pX = tmplist$pX
outPCA = do.pca(pX,ndim=(N-1))
projection_first = aux.adjprojection(outPCA$projection)
ppX = outPCA$Y
S = kmvp_S(ppX, label, bandwidth)
D = diag(rowSums(S))
L = D-S
W = array(0,c(n,n))
for (i in 1:n){
dataw = ppX[i,]
tgtidx = setdiff(which(label==label[i]),i)
tgtdata = ppX[tgtidx,]
W[i,tgtidx] = kmvp_Gvec(dataw, tgtdata)
}
Mhalf = diag(n)-W
M = (t(Mhalf)%*%Mhalf)
LHS = t(ppX)%*%M%*%ppX
RHS = t(ppX)%*%L%*%ppX
projection_second = aux.geigen(LHS,RHS,ndim,maximal=FALSE)
projection_all = projection_first%*%projection_second
result = list()
result$Y = pX%*%projection_all
result$trfinfo = trfinfo
result$projection = projection_all
return(result)
}
kmvp_S <- function(X, label, bd){
n = nrow(X)
S = array(0,c(n,n))
for (i in 1:(n-1)){
for (j in (i+1):n){
if (label[i]!=label[j]){
diffvec = as.vector(X[i,]-X[j,])
thevalue = exp(-sum(diffvec*diffvec)/bd)
S[i,j] = thevalue
S[j,i] = thevalue
}
}
}
return(S)
}
kmvp_Gvec <- function(vec, mat){
n = nrow(mat)
G = array(0,c(n,n))
for (i in 1:n){
veci = mat[i,]
for (j in i:n){
vecj = mat[j,]
diffi = vec-veci
diffj = vec-vecj
if (i==j){
G[i,i] = sum(diffi*diffj)
} else {
valueout = sum(diffi*diffj)
G[i,j] = valueout
G[j,i] = valueout
}
}
}
Ginv = 1/G
if (any(is.infinite(Ginv))){
Ginv[which(is.infinite(Ginv))] = 0
}
denominator = sum(Ginv)
woutput = (rowSums(Ginv))/(sum(Ginv))
return(woutput)
}
|
sprs <- function(df,Yi, plot_area, total_area, age=NA, .groups=NA, alpha = 0.05, error = 10, dec_places=4, pop="inf",tidy=TRUE){
n<-VC<-N<-t_rec<-Sy<-Abserror<-Y<-Yhat<-Total_Error<-VC<-NULL
if( missing(df) ){
stop("df not set", call. = F)
}else if(!is.data.frame(df)){
stop("df must be a dataframe", call.=F)
}else if(length(df)<=1 | nrow(df)<=1){
stop( "length and number of rows 'df' must be greater than 1", call.=F)
}
if( missing(Yi) || Yi == ""){
stop("Yi not set", call. = F)
}else if( !is.character(Yi) ){
stop("'Yi' must be a character containing a variable name", call.=F)
}else if(length(Yi)!=1){
stop("length of 'Yi' must be 1", call.=F)
}else if(forestmangr::check_names(df, Yi)==F){
stop(forestmangr::check_names(df, Yi, boolean = F), call.=F)
}
if( missing(plot_area) || plot_area == "" ){
stop("plot_area not set", call. = F)
}else if(is.numeric(plot_area) & length(plot_area)==1){
df$plot_area <- plot_area
plot_area <- "plot_area"
}else if(!is.character(plot_area)){
stop("'plot_area' must be a character containing a variable name or a numeric value", call.=F)
}else if(length(plot_area)!=1){
stop("length of 'plot_area' must be 1", call.=F)
}else if(forestmangr::check_names(df, plot_area)==F){
stop(forestmangr::check_names(df, plot_area, boolean = F), call.=F)
}
if( missing(total_area) || total_area == "" ){
stop("total_area not set", call. = F)
}else if(is.numeric(total_area) & length(total_area)==1){
df$total_area <- total_area
total_area <- "total_area"
}else if(!is.character(total_area)){
stop("'total_area' must be a character containing a variable name or a numeric value", call.=F)
}else if(length(total_area)!=1){
stop("length of 'total_area' must be 1", call.=F)
}else if(forestmangr::check_names(df, total_area)==F){
stop(forestmangr::check_names(df, total_area, boolean = F), call.=F)
}
if(missing(age)||is.null(age)||is.na(age)||age==""){
df$age <- NA
age <- "age"
}else if(!is.character(age)){
stop("'age' must be a character containing a variable name", call.=F)
}else if(length(age)!=1){
stop("length of 'age' must be 1", call.=F)
}else if(forestmangr::check_names(df, age)==F){
stop(forestmangr::check_names(df, age, boolean = F), call.=F)
}
if(missing(.groups)||any(is.null(.groups))||any(is.na(.groups))||any(.groups==F)||any(.groups=="") ){
.groups_syms <- character()
}else if(!is.character(.groups)){
stop(".groups must be a character", call.=F)
}else if(!length(.groups) %in% 1:10){
stop("length of '.groups' must be between 1 and 10", call.=F)
}else if(forestmangr::check_names(df,.groups)==F ){
stop(forestmangr::check_names(df,.groups, boolean=F), call.=F)
}else{
.groups_syms <- rlang::syms(.groups)
}
if(!is.numeric( alpha )){
stop( "'alpha' must be numeric",call.=F)
}else if(length(alpha)!=1){
stop("length of 'alpha' must be 1",call.=F)
}else if(! alpha > 0 | ! alpha <= 0.30){
stop("'alpha' must be a number between 0 and 0.30", call.=F)
}
if(!is.numeric( error )){
stop( "'error' must be numeric", call.=F )
}else if(length(error)!=1){
stop("length of 'error' must be 1",call.=F)
}else if(!error > 0 | !error <= 20){
stop("'error' must be a number between 0 and 20", call.=F)
}
if(!is.numeric( dec_places )){
stop( "'dec_places' must be numeric", call.=F)
}else if(length(dec_places)!=1){
stop("length of 'dec_places' must be 1",call.=F)
}else if(! dec_places %in% seq(from=0,to=9,by=1) ){
stop("'dec_places' must be a integer between 0 and 9", call.=F)
}
if(!is.character( pop )){
stop( "'pop' must be character", call.=F)
}else if(length(pop)!=1){
stop( "length of 'pop' must be 1", call.=F)
}else if( ! pop %in% c("fin", "inf" ) ){
stop("'pop' must be equal to 'fin' or 'inf' ", call. = F)
}
if( is.null(tidy) || ! tidy %in% c(TRUE, FALSE) ){
stop("tidy must be equal to TRUE or FALSE", call. = F)
}else if(length(tidy)!=1){
stop( "length of 'tidy' must be 1", call.=F)
}
Yi_sym <- rlang::sym(Yi)
plot_area_sym <- rlang::sym(plot_area)
total_area_sym <- rlang::sym(total_area)
age_sym <- rlang::sym(age)
x_ <-df %>%
dplyr::na_if(0) %>%
dplyr::group_by(!!!.groups_syms,.add=T) %>%
dplyr::summarise(
age = mean(!!age_sym,na.rm=T),
n = dplyr::n() ,
N = mean(!!total_area_sym,na.rm=T) / ( mean(!!plot_area_sym,na.rm=T)/10000 ),
VC = stats::sd(!!Yi_sym,na.rm=T) / mean(!!Yi_sym,na.rm=T) * 100,
t = stats::qt(alpha/2, df = n-1, lower.tail = FALSE) ,
t_rec = ifelse(pop=="inf",
stats::qt(alpha/2, df = ceiling( t^2 * VC^2 / error^2) - 1, lower.tail = FALSE) ,
stats::qt(alpha/2, df = ceiling( t^2 * VC^2 / ( error^2 +(t^2 * VC^2 / N) ) ) - 1, lower.tail = FALSE) ) ,
n_recalc = ifelse(pop=="inf",
ceiling( t_rec ^2 * VC^2 / error^2 ) ,
ceiling( t_rec ^2 * VC^2 / ( error^2 +(t_rec^2 * VC^2 / N) ) ) ),
S2 = stats::var(!!Yi_sym,na.rm=T),
sd = stats::sd(!!Yi_sym,na.rm=T),
Y = mean(!!Yi_sym, na.rm=T),
Sy = ifelse(pop=="inf",
sqrt( stats::var(!!Yi_sym,na.rm=T)/n ),
sqrt( stats::var(!!Yi_sym,na.rm=T)/n * (1 - (n/N)) ) ),
Abserror = Sy * t ,
Percerror = Abserror / Y * 100 ,
Yhat = Y * N,
Total_Error = Abserror * N,
CI_Inf = Y - Abserror,
CI_Sup = Y + Abserror,
CI_ha_Inf = (Y - Abserror)*10000/mean(!!plot_area_sym,na.rm=T),
CI_ha_Sup = (Y + Abserror)*10000/mean(!!plot_area_sym,na.rm=T),
CI_Total_inf = Yhat - Total_Error,
CI_Total_Sup = Yhat + Total_Error) %>%
dplyr::na_if(0) %>%
rm_empty_col %>%
forestmangr::round_df(dec_places)
x <- x_ %>%
plyr::rename(c( "age" = "Age" ,
"n" = "Total number of sampled plots (n)",
"N" = "Number of maximum plots (N)",
"t" = "t-student" ,
"t_rec" = "recalculated t-student",
"n_recalc" = "Number of samples regarding the admited error",
"S2" = "Variance (S2)",
"sd" = "Standard deviation (s)",
"VC" = "Variance Quoeficient (VC)",
"Y" = "Mean (Y)" ,
"Sy" = "Standard error of the mean (Sy)",
"Abserror" = "Absolute Error" ,
"Percerror" = "Relative Error (%)",
"Yhat" = "Estimated Total Value (Yhat)",
"Total_Error" = "Total Error",
"CI_Inf" = "Inferior Confidence Interval (m3)" ,
"CI_Sup" = "Superior Confidence Interval (m3)",
"CI_ha_Inf" = "Inferior Confidence Interval (m3/ha)" ,
"CI_ha_Sup" = "Superior Confidence Interval (m3/ha)",
"CI_Total_inf" = "inferior Total Confidence Interval (m3)",
"CI_Total_Sup" = "Superior Total Confidence Interval (m3)"),
warn_missing = F)
if(tidy==F)
{
return(x_)
}
else if(tidy==T & length(.groups_syms)==0 )
{
x <- tibble::rownames_to_column(data.frame("Values"=t(x)) , "Variables" )
return(as.data.frame(x))
}
else
{
all_but_group_vars <- rlang::syms(names(x)[! names(x) %in% .groups ])
last_group_var <- rlang::sym(.groups[length(.groups)])
y <- x %>%
tidyr::gather("Variables","value", !!!all_but_group_vars, factor_key=T ) %>%
dplyr::arrange(!!! .groups_syms ) %>%
tidyr::spread(!!last_group_var,"value",sep="") %>%
dplyr::ungroup()
return(as.data.frame(y))
}
}
|
NUMBER_OF_SIDES <- 2
NUMBER_OF_CYLINDERS <- 80
NUMBER_OF_SECTORS_DD <- 11
NUMBER_OF_SECTORS_HD <- 22
BLOCK_SIZE <- 512
TYPES <- data.frame(type = c("T_HEADER", "T_DATA", "T_LIST", "DIRCACHE"),
value = c (2, 8, 16, 33))
SEC_TYPES <- data.frame(type = c("ST_ROOT", "ST_FILE", "ST_USERDIR", "ST_LINKFILE", "ST_LINKDIR", "ST_SOFTLINK"),
value = c(1, 0x100000000-3, 2, 0x100000000-4, 4, 3))
|
num_to_schoice <- num2schoice <- function(
correct,
wrong = NULL,
range = c(0.5, 1.5) * correct,
delta = 1,
digits = 2,
method = c("runif", "delta"),
sign = FALSE,
verbose = getOption("num_to_choice_warnings")
)
{
verbose <- if(is.null(verbose)) TRUE else isTRUE(verbose)
range <- range(c(correct, range))
if(!is.null(wrong)) {
wrong <- as.numeric(na.omit(wrong))
range <- range(c(wrong, range))
if(length(wrong) > 2) wrong <- sample(wrong, 2)
}
if((length(wrong) > 0 && any(abs(round2(wrong, digits = digits) - round2(correct, digits = digits)) < delta)) |
(length(wrong) > 1 && min(abs(dist(round2(wrong, digits = digits)))) < delta)) {
if(verbose) warning("specified 'wrong' is too small for 'delta'")
return(NULL)
}
if(diff(range) < delta | max(abs(range - correct)) < delta) {
if(verbose) warning("specified 'range' is too small for 'delta'")
return(NULL)
}
if(isTRUE(sign)) sign <- sample(0:(4 - length(wrong)), 1)
sign <- as.integer(sign)
ok <- FALSE
nle <- sample(0:4, 1)
if(abs(round2(correct, digits = digits) - range[1]) < (nle * delta) | abs(round2(correct, digits = digits) - range[2]) < ((4 - nle) * delta)){
if(verbose) warning("specified 'range' is too small for 'delta'")
return(NULL)
}
while(!ok) {
rand <- switch(match.arg(method),
"runif" = c(runif(nle, range[1], round2(correct, digits = digits)), runif(4 - nle, round2(correct, digits = digits), range[2])),
"delta" = c(sample(seq(round2(correct, digits = digits) - delta, range[1], by = -delta), nle),
sample(seq(round2(correct, digits = digits) + delta, range[2], by = delta), 4 - nle))
)
if(sign == 0L) {
solution <- c(correct, wrong, sample(rand, 4 - length(wrong)))
} else {
solution <- c(correct, wrong, sample(rand, 4 - length(wrong) - sign), sample(-c(correct, wrong, rand), sign))
}
solution <- round2(solution, digits = digits)
ok <- length(unique(solution)) == 5 & min(abs(dist(solution))) >= delta
}
o <- sample(1:5)
list(
solutions = c(TRUE, rep(FALSE, 4))[o],
questions = paste("$", format(solution, nsmall = digits, trim = TRUE)[o], "$", sep = "")
)
}
matrix_to_mchoice <- matrix2mchoice <- function(
x,
y = NULL,
lower = FALSE,
name = "a",
comparisons = c("==", "<", ">", "<=", ">=")
)
{
x <- as.matrix(x)
d <- dim(x)
if(is.null(y)) y <- sample(-round(max(abs(x))):round(max(abs(x))), 5, replace = TRUE)
stopifnot(length(y) == 5)
ix <- as.matrix(expand.grid(row = 1:d[1], col = 1:d[2]))
if(lower) ix <- ix[ix[,1] >= ix[,2], , drop = FALSE]
ix <- ix[rep(sample(1:nrow(ix)), length.out = 5), , drop = FALSE]
prob <- runif(1, 0.3, 0.8)
y <- ifelse(sample(c(TRUE, FALSE), 5, replace = TRUE, prob = c(prob, 1 - prob)), x[ix], y)
comp <- comp_latex <- sample(comparisons, 5, replace = TRUE)
comp_latex[comp == "=="] <- "="
comp_latex[comp == "<="] <- "\\le"
comp_latex[comp == ">="] <- "\\ge"
questions <- character(5)
solutions <- logical(5)
explanations <- character(5)
for(i in 1:5) {
solutions[i] <- eval(parse(text = paste(x[ix][i], comp[i], y[i])))
questions[i] <- paste("$", name, "_{", ix[i,1], ix[i,2], "} ", comp_latex[i], " ", y[i], "$", sep = "")
explanations[i] <- paste("$", name, "_{", ix[i,1], ix[i,2], "} = ", x[ix][i],
if(solutions[i]) "$" else paste(" \\not", comp_latex[i], " ", y[i], "$", sep = ""), sep = "")
}
return(list(
questions = questions,
solutions = solutions,
explanations = explanations))
}
matrix_to_schoice <- matrix2schoice <- function(
x,
y = NULL,
lower = FALSE,
name = "a",
delta = 0.5,
digits = 0
)
{
x <- as.matrix(x)
d <- dim(x)
if(is.null(y)) y <- -round(max(abs(x))):round(max(abs(x)))
stopifnot(length(y) >= 5)
ix <- as.matrix(expand.grid(row = 1:d[1], col = 1:d[2]))
if(lower) ix <- ix[ix[,1] >= ix[,2], , drop = FALSE]
ix <- ix[sample(1:nrow(ix)), , drop = FALSE]
ix0 <- ix[1, ]
ix <- ix[-1, , drop = FALSE]
correct <- x[ix0[1], ix0[2]]
wrong <- x[ix]
wrong <- unique(wrong[wrong != correct])
random <- unique(y[!(y %in% c(correct, wrong))])
if(length(c(wrong, random)) < 4) {
warning("'y' contains too few potentially wrong comparisons")
return(NULL)
}
solution <- correct
while(length(unique(solution)) != 5 || min(abs(dist(solution))) < delta) {
if(length(random) < 1) {
solution <- c(correct, sample(wrong, 4))
} else if(length(random) == 1) {
solution <- c(correct, sample(wrong, 3), random)
} else {
nw <- sample(max(0, 4 - length(random)):min(3, length(wrong)), 1)
solution <- c(correct, sample(wrong, nw), sample(random, 4 - nw))
}
solution <- round2(solution, digits = digits)
}
o <- sample(1:5)
list(
index = ix0,
name = paste("$", name, "_{", ix0[1], ix0[2], "}", c("", paste("=", correct)), "$", sep = ""),
solutions = c(TRUE, rep(FALSE, 4))[o],
questions = paste("$", format(solution, nsmall = digits)[o], "$", sep = "")
)
}
det_to_schoice <- det2schoice <- function(
x,
y = NULL,
range = NULL,
delta = 0.5,
digits = 0
)
{
x <- as.matrix(x)
d <- dim(x)
stopifnot(d[1] == 2 & d[2] == 2)
if(is.null(range)) {
m <- max(2, max(abs(x)))
range <- c(-1.2 * m^2, 1.2 * m^2)
} else {
range <- range(range)
}
correct <- x[1,1] * x[2,2] - x[2,1] * x[1,2]
wrong <- x[1,1] * x[2,2] + x[2,1] * x[1,2]
if(!is.null(y)) wrong <- c(wrong, y[1,1] * y[2,2] - y[2,1] * y[1,2])
wrong <- wrong[wrong != correct]
solution <- c(correct, wrong)
if(isTRUE(all.equal(as.vector(x), round(as.vector(x))))) {
range <- round(range[1]):round(range[2])
range <- range[!(range %in% solution)]
while(length(unique(solution)) != 5 || min(abs(dist(solution))) < delta) {
solution <- c(correct, wrong, sample(range, 4 - length(wrong)))
solution <- round(solution, digits = digits)
}
} else {
while(length(unique(solution)) != 5 || min(abs(dist(solution))) < delta) {
solution <- c(correct, wrong, runif(4 - length(wrong), range[1], range[2]))
solution <- round(solution, digits = digits)
}
}
o <- sample(1:5)
list(
solutions = c(TRUE, rep(FALSE, 4))[o],
questions = paste("$", format(solution, nsmall = digits, trim = TRUE)[o], "$", sep = "")
)
}
|
library(ibd)
psfreq=function(design)
{
v=max(design)
k=ncol(design)
b=nrow(design)
trtpos.freq=matrix(0,v,k)
for(pos in 1:k)
{
for (blk in 1:b)
{
trtpos.freq[design[blk,pos],pos]=trtpos.freq[design[blk,pos],pos]+1
}
}
return(trtpos.freq)
}
cycle = function(x)
{
x = c(x[2:length(x)],x[1])
return(x)
}
dpf = function(v,b,r,k)
{
m = floor(b/v)
m
k1 = r-k*m
k2 = k - k1
temp = c(rep(m + 1, k1),rep(m, k2))
dpm = matrix(0,v,k)
dpm[1,] = temp
for(i in 2:v) dpm[i,] = cycle(dpm[i-1,])
return(dpm)
}
allocate = function(j,v,b,k,mvec,x1,x2)
{
Bs = NULL
for(i in 1:v)
{
m = mvec[i]
Bi = which(apply(x1[,1:k], 1, function(x) any(x == i)))
Bia = setdiff(Bi, Bs)
if(length(Bia) < m) return(0)
if(length(Bia) == 1) Bs.t = Bia else Bs.t = sample(Bia, size = m)
x2[Bs.t, j] = i
x1[Bs.t,][which(x1[Bs.t,] == i)] = 0
Bs = union(Bs,Bs.t)
}
return(out = list(x1 = x1, x2 = x2))
}
balancify = function(d1)
{
v = max(d1)
b = nrow(d1)
k = ncol(d1)
r = b*k/v
if(r - floor(r) != 0) stop("Design should be equireplicate")
M = dpf(v,b,r,k)
d2 = matrix(0,b,k)
j = 0
trial = 0
while(j < k & trial <= 10000)
{
trial = trial + 1
j = j + 1
mvec = M[,j]
out = allocate(j,v,b,k,mvec, x1 = d1, x2 = d2)
if(is.list(out))
{
d1 = out$x1
d2 = out$x2
} else j = j - 1
}
if(j == k & trial <= 10000)
{
P = psfreq(d2)
result = list(design = d2, P = P)
} else result = "Try again"
return(result)
}
pbbd = function(v,b,k)
{
if(v > 30 & k > 3) stop("Better to use balancify() function with an input design")
r = b*k/v
if(r - floor(r) != 0) stop("Equireplicate design not possible")
d = ibd(v,b,k)
if(all(diag(d$conc.mat) != r)) stop("Try again or use balancify() function with an equireplicate design")
d1 = d$design
d2 = balancify(d1)
if(is.list(d2))
{
parameters = c(v = v, b = b, r = r, k = k)
efficiencies = c(Aeff = d$A.Efficiency, Deff = d$D.Efficiency)
out = list(parameters = parameters, efficiencies = efficiencies, design = d2$design, P = d2$P)
} else out = d2
return(out)
}
|
BSstack_predict <- function(BSmodel, Xi){
if(length(BSmodel) == 7){
BSmodel = BSmodel[[1]]
}
N <- nrow(Xi)
w <- BSmodel[[1]]
RFn <- BSmodel[[2]]
fNames <- BSmodel[[3]]
Nstack <- length(RFn)
P <- matrix(data = 0, nrow = N, ncol = (Nstack+1))
for(l in 1:Nstack){
Xp <- Xi[,fNames[[l]]]
if(anyNA(Xp)){
stop("ERROR: Input samples missing features.")
}
P[,l] <- predict(RFn[[l]], Xp)
}
if(N==1){
P[,Nstack+1] <- c(1, P[1:Nstack]) %*% w
}
else{
P[,Nstack+1] <- cbind(matrix(1,N,1),P[1:N,1:Nstack]) %*% w
}
return(P)
}
|
GNB <- function(level){
x <- NULL
if(level==1){
x1 <- github.cssegisanddata.covid19(country = "Guinea-Bissau")
x2 <- ourworldindata.org(id = "GNB")
x <- full_join(x1, x2, by = "date")
}
return(x)
}
|
is.diag <- function (c, EPS=1e-12) {
if (!is.matrix(c)) return(FALSE)
cd <- dim(c)
if (cd[1] != cd[2]) return(FALSE)
mindg <- min(abs(diag(c)))
maxodg <- max(abs(c - diag(diag(c))))
if (maxodg/mindg < EPS) return(TRUE) else return(FALSE)
}
|
ATilde <- function(A) {
n <- dim(A)[1]
total.mean <- sum(A)/n^2
m1 <- Rfast::rowmeans(A)
m2 <- Rfast::colmeans(A)
A2 <- t( t( t(A - m1) - m2 ) ) + total.mean
A2
}
|
tile_coords <- function(data, id){
if(!all(c("row", "col") %in% names(data))) stop("`data` must contain columns named `col` and `row`", call. = FALSE)
if(any(c("x", "y") %in% names(data))) stop("`data` cannot contain columns named `x` or `y`", call. = FALSE)
id0 <- id
x <- dplyr::filter(rtrek::stTiles, .data[["id"]] == id0)
w <- x$width
h <- x$height
if(id == "galaxy2"){
w <- 8000
h <- 6445
}
r <- h / w
dplyr::mutate(data, x = 250 * col / w, y = -250 * r * row / h)
}
st_tiles <- function(id){
x <- rtrek::stTiles
x$url[x$id == id]
}
st_tiles_data <- function(id){
dplyr::filter(rtrek::stGeo, .data[["id"]] == !! id) %>%
dplyr::mutate(body = "Planet", category = "Homeworld", zone = .st_zone, species = .st_species)
}
|
assess_has_website <- function(x, ...) {
pkg_metric_eval(class = "pkg_metric_has_website", {
x$website_urls
})
}
attributes(assess_has_website)$column_name <- "has_website"
attributes(assess_has_website)$label <- "a vector of associated website urls"
metric_score.pkg_metric_has_website <- function(x, ...) {
as.numeric(length(x) > 0)
}
attributes(metric_score.pkg_metric_has_website)$label <-
"A binary indicator of whether the package has an acompanying website."
|
print.summary.catpredi <-
function(x, ...) {
print.catpredi(x, digits = x$digits)
cat("\n\n---------------------------------------------------\n")
cat("Fitted model for the categorized predictor variable\n")
cat("---------------------------------------------------\n\n")
tpm <- summary(x$fit.gam)
digits = max(3, getOption("digits") - 3)
signif.stars = getOption("show.signif.stars")
print(tpm$family)
cat("Formula:\n")
if (is.list(tpm$formula))
for (i in 1:length(tpm$formula)) print(tpm$formula[[i]])
else print(tpm$formula)
if (length(tpm$p.coeff) > 0) {
cat("\nParametric coefficients:\n")
printCoefmat(tpm$p.table, digits = digits, signif.stars = signif.stars,
na.print = "NA", ...)
}
cat("\n")
if (tpm$m > 0) {
cat("Approximate significance of smooth terms:\n")
printCoefmat(tpm$s.table, digits = digits, signif.stars = signif.stars,
has.Pvalue = TRUE, na.print = "NA", cs.ind = 1, ...)
}
cat("\n")
if (!is.null(tpm$rank) && tpm$rank < tpm$np)
cat("Rank: ", tpm$rank, "/", tpm$np, "\n", sep = "")
invisible(tpm)
}
|
setSolverPath <- function(path) {
if (missing(path)) {
path <- file.choose()
}
if (!grepl(pattern = "solver\\.exe$", x = path)) {
path_ <- gsub(pattern = ".*/", replacement = "", x = path)
cat(paste0("You have selected:\n-> ", path_, "\n"))
ans <- readline("Are you sure that's the Antares solver? (y/n) ")
if (ans != "y" & interactive()) {
setSolverPath()
} else {
warning("Unrecognized Antares solver name.")
options(antares.solver = path)
}
} else {
options(antares.solver = path)
}
return(invisible(path))
}
|
as.mcmc.bugs <- function(x, ...){
n.chains <- x$n.chains
sims <- x$sims.array
n.thin <- x$n.thin
if (n.chains==1) return(coda::mcmc(sims[, 1, ], thin=n.thin))
out <- vector("list", length=n.chains)
for (i in seq(n.chains)) out[[i]] <- mcmc(sims[, i, ], thin=n.thin)
out <- mcmc.list(out)
varnames(out) <- dimnames(sims)[[3]]
return(out)
}
|
rp.patterns <- function(data,
max.length=NULL,
min.length=2,
id.var=NULL,
na.rm=FALSE,
std.patterns=TRUE,
na.top=FALSE,
store.data=TRUE
) {
data <- as.data.frame(data)
if(!is.null(id.var)) {
if(!id.var %in% names(data))
stop("id.var not found in the data")
id <- data[,paste0(id.var)]
data <- data[,-which(names(data)==id.var)]
} else
id <- rep(NA,nrow(data))
if(!typeof(data[[1]]) %in% c("integer","double"))
stop("Data set contains other than numeric values")
if(nrow(data)==0 | ncol(data)==0)
stop("Data set is empty")
if(ncol(data) < 4)
stop("The analysis cannot proceed with a data set of less than four items")
n.vars <- ncol(data)
n.obs <- nrow(data)
if(is.null(max.length) | !is.numeric(max.length))
max.length <- floor(ncol(data)/2)
if(max.length > floor(ncol(data)/2))
max.length <- floor(ncol(data)/2)
if(!is.numeric(min.length) | min.length > max.length)
min.length <- max.length
patterns.df <- as.data.frame(matrix(nrow=nrow(data),ncol=max.length-min.length+1))
indices.df <- as.data.frame(matrix(nrow=nrow(data),ncol=2))
rownames(patterns.df) <- rownames(indices.df) <- rownames(data)
colnames(patterns.df) <- paste0("L",c(min.length:max.length))
patterns.df[,] <- 0
colnames(indices.df) <- c("score","percentile")
patterns.df <- t(apply(data, 1, function(row) {
row <- sapply(c(min.length:max.length),function(length) {
count <- 0
for(start in 1:(n.vars-length)) {
pattern <- row[c(start:(start+length-1))]
if(std.patterns==TRUE)
pattern <- pattern - min(pattern, na.rm=TRUE)
for(position in (start+1):(n.vars-length+1)) {
sequence <- row[c(position:(position+length-1))]
if(std.patterns==TRUE)
sequence <- sequence - min(sequence, na.rm=TRUE)
is.equal <- all(sequence==pattern, na.rm=na.rm)
if(!is.na(is.equal) & is.equal==TRUE)
count <- count + 1
}
}
max.rep <- n.vars - length
count <- count/max.rep
return(count)
})
return(row)
}))
indices.df$score <- rowSums(patterns.df)
indices.df$score <- indices.df$score/max(indices.df$score)
indices.df$percentile <- floor(rank(indices.df$score,na.last=na.top) / nrow(indices.df) * 100)
if(store.data==TRUE)
store <- data
else
store <- data.frame()
rp <- methods::new("ResponsePatterns",
options=list(
method="patterns",
max.length=max.length,
min.length=min.length,
id.var=ifelse(!is.null(id.var),id.var,""),
na.rm=na.rm,
std.patterns=std.patterns,
cor.method="none"
),
id=id,
percentile=0,
n.obs=n.obs,
n.vars=n.vars,
data=store,
coefficients=as.data.frame(patterns.df),
indices=indices.df
)
return(rp)
}
|
"bgtPower" <-
function(n, s, delta, p.hyp, conf.level=0.95, method="CP", alternative="two.sided")
{
if( any(n<=3) )
{stop("the number of groups n allowed in calculations must be integers greater than 1")}
if( any(s<1) ){stop("group size s must be specified as integers > 0")}
if( length(conf.level)!=1 || conf.level<0 || conf.level>1)
{stop("conf.level must be a positive number between 0 and 1")}
if( length(p.hyp)!=1 || p.hyp>1 || p.hyp<0)
{stop("true proportion p.hyp must be specified as a single number between 0 and 1")}
method<-match.arg(method, choices=c("CP","Blaker","AC","Score","Wald","SOC"))
alternative<-match.arg(alternative, choices=c("two.sided","less","greater"))
if(alternative=="less")
{if( any( p.hyp-delta < 0) || any(p.hyp-delta > 1) )
{stop("alternative=less: specify delta as a number between 0 and the threshold p.hyp")}
}
if(alternative=="greater")
{if( any( p.hyp+delta < 0) || any(p.hyp+delta > 1) )
{stop("alternative=greater: specify delta as a number between the threshold p.hyp and 1")}
}
if(alternative=="two.sided")
{if( any(p.hyp+delta < 0) || any(p.hyp+delta > 1) || any(p.hyp-delta < 0) || any(p.hyp-delta > 1))
{stop("alternative=two.sided: specify delta as a number between the threshold p.hyp and 1")}
}
matnsp <- cbind(n,s,delta)
matnsp <- cbind("ns"=matnsp[,1]*matnsp[,2], matnsp)
power <- numeric(length=nrow(matnsp))
bias <- numeric(length=nrow(matnsp))
for( i in 1:length(power))
{
temp <- bgtPowerI(n=matnsp[i,2], s=matnsp[i,3], delta=matnsp[i,4], p.hyp=p.hyp, conf.level=conf.level, method=method, alternative=alternative)
power[i] <- temp$power
bias[i] <- temp$bias
}
return(cbind(matnsp, power, bias))
}
"bgtPowerI" <-
function(n, s, delta, p.hyp, conf.level, method, alternative){
P.Ind <- function(n,y,s,p.hyp,conf.level,method,alternative){
if(method=="Score"){
KI.Wilson <- bgtWilson(n=n,y=y,s=s,conf.level=conf.level, alternative=alternative)
(KI.Wilson[[1]]>=p.hyp||KI.Wilson[[2]]<=p.hyp)
}
else{if(method=="AC"){
KI.AC<-bgtAC(n=n,y=y,s=s,conf.level=conf.level, alternative=alternative)
(KI.AC[[1]]>=p.hyp||KI.AC[[2]]<=p.hyp)
}
else{if(method=="Wald"){
KI.Wald<-bgtWald(n=n,y=y,s=s,conf.level=conf.level, alternative=alternative)
(KI.Wald[[1]]>=p.hyp||KI.Wald[[2]]<=p.hyp)
}
else{if(method=="CP"){
KI.CP<-bgtCP(n=n,y=y,s=s,conf.level=conf.level, alternative=alternative)
(KI.CP[[1]]>=p.hyp||KI.CP[[2]]<=p.hyp)
}
else{if(method=="SOC"){
KI.SOC<-bgtSOC(n=n,y=y,s=s,conf.level=conf.level, alternative=alternative)
(KI.SOC[[1]]>=p.hyp||KI.SOC[[2]]<=p.hyp)
}
else{if(method=="Blaker"){
KI.Bl<-bgtBlaker(n=n,y=y,s=s,conf.level=conf.level)
if(alternative=="two.sided")
{dec<-(KI.Bl[[1]]>=p.hyp||KI.Bl[[2]]<=p.hyp)}
if(alternative=="less")
{dec<-(KI.Bl[[2]]<=p.hyp)}
if(alternative=="greater")
{dec<-(KI.Bl[[1]]>=p.hyp)}
dec
}
else{stop("argument method mis-specified")}}}}}}
}
bgt.prob<-function(n,y,s,p.tr)
{
theta<-1-(1-p.tr)^s
dbinom(x=y,size=n, prob=theta)
}
if(alternative=="less" || alternative=="greater")
{
if(alternative=="less"){p.tr = p.hyp - delta}
if(alternative=="greater"){p.tr = p.hyp + delta}
yvec<-0:n
probvec<-numeric(length=length(yvec))
powvec<-numeric(length=length(yvec))
expvec<-numeric(length=length(yvec))
for(i in 1:length(yvec))
{
probvec[i] <- bgt.prob(n=n,y=yvec[i],s=s,p.tr=p.tr)
powvec[i] <- P.Ind(n=n,y=yvec[i],s=s,p.hyp=p.hyp,conf.level=conf.level,method=method, alternative = alternative)
expvec[i] <- (1-(1-yvec[i]/n)^(1/s))
}
powex<-sum(powvec * probvec)
expex<-sum(expvec * probvec)
bias<-expex-p.tr
out<-list(power=powex, bias=bias, p.tr=p.tr)
}
if(alternative=="two.sided")
{
p.trl = p.hyp - delta
p.trg = p.hyp + delta
yvec<-0:n
powvec<-numeric(length=length(yvec))
expvec<-numeric(length=length(yvec))
probvecl<-numeric(length=length(yvec))
probvecg<-numeric(length=length(yvec))
for(i in 1:length(yvec))
{
probvecl[i] <- bgt.prob(n=n,y=yvec[i],s=s,p.tr=p.trl)
probvecg[i] <- bgt.prob(n=n,y=yvec[i],s=s,p.tr=p.trg)
powvec[i] <- P.Ind(n=n,y=yvec[i],s=s,p.hyp=p.hyp,conf.level=conf.level,method=method, alternative = alternative)
expvec[i] <- (1-(1-yvec[i]/n)^(1/s))
}
powexl<-sum(powvec * probvecl)
expexl<-sum(expvec * probvecl)
biasl<-expexl-p.trl
powexg<-sum(powvec * probvecg)
expexg<-sum(expvec * probvecg)
biasg<-expexg-p.trg
out<-list(power=min(powexl,powexg),bias=max(biasl,biasg), p.tr=c(p.trl,p.trg))
}
class(out)<-"bgtpower"
out
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.