code
stringlengths 1
13.8M
|
---|
sbn_strahler <- function(g) {
if (max(igraph::degree(g, igraph::V(g), mode = "in")) > 2) stop("network must not have vertices with more that two connected upstream verticies")
res <- rep(0, igraph::gorder(g))
names(res) <- 1:igraph::gorder(g)
dg <- igraph::degree(g, mode = "in")
res[dg == 0] <- 1
while(any(res == 0)) {
xx <- unique(unlist(igraph::adjacent_vertices(g,
v = names(res[res > 0]),
mode = "out")))
xx <- res[xx]
xx <- as.numeric(names(xx[xx == 0]))
for (i in xx) {
zz <- igraph::adjacent_vertices(g, v = i, mode = "in")
vs <- res[unlist(zz)]
if (all(vs > 0)) {
if (length(unique(vs)) == 1 & length(vs) == 2) {
res[i] <- max(vs) + 1
} else {res[i] <- max(vs)}
}
}
}
return(res)
} |
expected <- eval(parse(text="c(\"/\", \" not meaningful for factors\")"));
test(id=0, code={
argv <- eval(parse(text="list(NULL, c(\"/\", \" not meaningful for factors\"))"));
.Internal(gettext(argv[[1]], argv[[2]]));
}, o=expected); |
context("apa_print.htest()")
test_that(
"t-Test for means"
, {
t_test <- t.test(extra ~ group, data = sleep)
t_test_output <- apa_print(t_test)
expect_is(t_test_output, "list")
expect_equal(names(t_test_output), container_names)
expect_is(t_test_output$stat, "character")
expect_equal(t_test_output$stat, "$t(17.78) = -1.86$, $p = .079$")
expect_is(t_test_output$est, "character")
expect_equal(t_test_output$est, "$\\Delta M = -1.58$, 95\\% CI $[-3.37$, $0.21]$")
expect_is(t_test_output$full, "character")
expect_equal(t_test_output$full, "$\\Delta M = -1.58$, 95\\% CI $[-3.37$, $0.21]$, $t(17.78) = -1.86$, $p = .079$")
t_test <- t.test(extra ~ group, data = sleep, paired = TRUE)
t_test_output <- apa_print(t_test)
expect_equal(t_test_output$full, "$M_d = -1.58$, 95\\% CI $[-2.46$, $-0.70]$, $t(9) = -4.06$, $p = .003$")
t_test <- t.test(sleep$extra, mu = 0)
t_test_output <- apa_print(t_test)
expect_equal(t_test_output$full, "$M = 1.54$, 95\\% CI $[0.60$, $2.48]$, $t(19) = 3.41$, $p = .003$")
t_test_output <- apa_print(t_test, ci = matrix(c(1, 2), ncol = 2, dimnames = list(NULL, c("2.5 \\%", "97.5 \\%"))))
expect_equal(t_test_output$est, "$M = 1.54$, 95\\% CI $[1.00$, $2.00]$")
t_test_output <- apa_print(t_test, stat_name = "foobar")
expect_equal(t_test_output$stat, "$foobar(19) = 3.41$, $p = .003$")
t_test_output <- apa_print(t_test, est_name = "foobar")
expect_equal(t_test_output$est, "$foobar = 1.54$, 95\\% CI $[0.60$, $2.48]$")
t_test_output <- apa_print(t_test, digits = 3)
expect_equal(t_test_output$est, "$M = 1.540$, 95\\% CI $[0.596$, $2.484]$")
}
)
test_that(
"Wilcoxon tests"
, {
wilcox_test <- wilcox.test(extra ~ group, data = sleep, exact = FALSE)
wilcox_test_output <- apa_print(wilcox_test)
expect_equal(names(wilcox_test_output), container_names)
expect_is(wilcox_test_output$stat, "character")
expect_equal(wilcox_test_output$stat, "$W = 25.50$, $p = .069$")
wilcox_test <- wilcox.test(extra ~ group, data = sleep, conf.int = TRUE, exact = FALSE)
wilcox_test_output <- apa_print(wilcox_test)
expect_is(wilcox_test_output$est, "character")
expect_equal(wilcox_test_output$est, "$Mdn_d = -1.35$, 95\\% CI $[-3.60$, $0.10]$")
expect_is(wilcox_test_output$full, "character")
expect_equal(wilcox_test_output$full, "$Mdn_d = -1.35$, 95\\% CI $[-3.60$, $0.10]$, $W = 25.50$, $p = .069$")
wilcox_test <- wilcox.test(extra ~ group, data = sleep, paired = TRUE, exact = FALSE)
wilcox_test_output <- apa_print(wilcox_test)
expect_equal(wilcox_test_output$stat, "$V = 0.00$, $p = .009$")
wilcox_test <- wilcox.test(sleep$extra, mu = 0, conf.int = TRUE, exact = FALSE)
wilcox_test_output <- apa_print(wilcox_test)
expect_equal(wilcox_test_output$full, "$Mdn^* = 1.60$, 95\\% CI $[0.45$, $2.65]$, $V = 162.50$, $p = .007$")
}
)
test_that(
"Tests for correlations"
, {
x <- c(44.4, 45.9, 41.9, 53.3, 44.7, 44.1, 50.7, 45.2, 60.1)
y <- c( 2.6, 3.1, 2.5, 5.0, 3.6, 4.0, 5.2, 2.8, 3.8)
cor_test <- cor.test(x, y)
cor_test_output <- apa_print(cor_test)
expect_is(cor_test_output, "list")
expect_equal(names(cor_test_output), container_names)
expect_is(cor_test_output$stat, "character")
expect_equal(cor_test_output$stat, "$t(7) = 1.84$, $p = .108$")
expect_is(cor_test_output$est, "character")
expect_equal(cor_test_output$est, "$r = .57$, 95\\% CI $[-.15$, $.90]$")
expect_is(cor_test_output$full, "character")
expect_equal(cor_test_output$full, "$r = .57$, 95\\% CI $[-.15$, $.90]$, $t(7) = 1.84$, $p = .108$")
cor_test <- cor.test(x, y, method = "spearman")
cor_test_output <- apa_print(cor_test)
expect_equal(cor_test_output$full, "$r_{\\mathrm{s}} = .60$, $S = 48.00$, $p = .097$")
cor_test <- cor.test(x, y, method = "kendall")
cor_test_output <- apa_print(cor_test)
expect_equal(cor_test_output$full, "$\\uptau = .44$, $T = 26.00$, $p = .119$")
cor_test <- cor.test(x, y, method = "kendall", exact = FALSE)
cor_test_output <- apa_print(cor_test)
expect_equal(cor_test_output$full, "$\\uptau = .44$, $z = 1.67$, $p = .095$")
}
)
test_that(
"Chi-squared for contingency tables"
, {
smokers <- c(83, 90, 129, 70)
patients <- c(86, 93, 136, 82)
prop_test <- prop.test(smokers, patients)
expect_error(apa_print(prop_test), "Please provide the sample size to report.")
prop_test_output <- suppressWarnings(apa_print(prop_test, n = sum(patients)))
expect_is(prop_test_output, "list")
expect_equal(names(prop_test_output), container_names)
expect_is(prop_test_output$stat, "character")
expect_equal(prop_test_output$stat, "$\\chi^2(3, n = 397) = 12.60$, $p = .006$")
}
)
test_that(
"Bartlett test"
, {
bartlett_test <- bartlett.test(count ~ spray, data = InsectSprays)
bartlett_test_output <- apa_print(bartlett_test)
expect_is(bartlett_test_output, "list")
expect_equal(names(bartlett_test_output), container_names)
expect_is(bartlett_test_output$stat, "character")
expect_equal(bartlett_test_output$stat, "$K^2(5) = 25.96$, $p < .001$")
}
)
test_that(
"Mauchly test"
, {
tmp <- capture.output(utils::example(SSD))
mauchly_data <- data.frame(
deg = gl(3, 1, 6, labels = c(0, 4, 8))
, noise = gl(2, 3, 6, labels = c("A", "P"))
)
mauchly_test <- mauchly.test(mlmfit, X = ~ deg + noise, idata = mauchly_data)
mauchly_output <- apa_print(mauchly_test)
expect_is(mauchly_output, "list")
expect_equal(names(mauchly_output), container_names)
expect_is(mauchly_output$stat, "character")
expect_equal(mauchly_output$stat, "$W = 0.89$, $p = .638$")
mauchly_test <- mauchly.test(mlmfit, M = ~ deg + noise, X = ~ noise, idata = mauchly_data)
mauchly_output <- apa_print(mauchly_test)
expect_is(mauchly_output$stat, "character")
expect_equal(mauchly_output$stat, "$W = 0.96$, $p = .850$")
}
)
test_that(
"One-way ANOVA"
, {
oneway_test <- oneway.test(extra ~ group, data = sleep)
oneway_output <- apa_print(oneway_test)
expect_is(oneway_output, "list")
expect_equal(names(oneway_output), container_names)
expect_is(oneway_output$stat, "character")
expect_equal(oneway_output$stat, "$F(1, 17.78) = 3.46$, $p = .079$")
}
)
test_that(
"One-sided t test (with infty in CI)"
, {
t_out <- t.test(formula = yield ~ N, data = npk, alternative = "greater")
apa_out <- apa_print(t_out)
t2 <- t.test(formula = yield ~ N, data = npk, alternative = "less")
apa2 <- apa_print(t2)
expect_identical(
object = apa_out$full_result
, expected = "$\\Delta M = -5.62$, 95\\% CI $[-9.54$, $\\infty]$, $t(21.88) = -2.46$, $p = .989$"
)
expect_identical(
object = apa_out$estimate
, expected = "$\\Delta M = -5.62$, 95\\% CI $[-9.54$, $\\infty]$"
)
expect_identical(
object = apa2$full_result
, expected = "$\\Delta M = -5.62$, 95\\% CI $[-\\infty$, $-1.70]$, $t(21.88) = -2.46$, $p = .011$"
)
expect_identical(
object = apa2$estimate
, expected = "$\\Delta M = -5.62$, 95\\% CI $[-\\infty$, $-1.70]$"
)
}
) |
library(ggplot2)
library(plyr)
bnames <- read.csv("baby-names-by-state.csv", stringsAsFactors = F)
bnames <- subset(bnames, !is.na(number))
bnames$state <- factor(bnames$state)
births <- read.csv("births.csv")
bnames <- merge(bnames, births, by = c("state", "year", "sex"))
bnames$prop <- bnames$number / bnames$births
bnames$namesex <- paste(bnames$name, bnames$sex, sep = "-")
counts <- ddply(bnames, c("namesex"), summarise,
n = length(namesex),
number = sum(number))
counts <- counts[order(-counts$number), ]
counts <- subset(counts, n > 1250 * 0.25 & number > 1e5)
top <- subset(bnames, namesex %in% counts$namesex)
show_name <- function(name) {
one <- top[top$namesex == name, ]
qplot(year, prop, data = one, geom = "line", group = state)
}
bystate <- cast(top, namesex + year ~ state, value = "prop")
cors <- dlply(bystate, "namesex", function(df)
cor(as.matrix(df[, -(1:2)]), use = "pairwise.complete.obs"))
arrange(ldply(cors, min, na.rm = T), V1)
patterns <- dlply(top, c("namesex"), function(df) {
lm(prop ~ factor(year), data = df, weight = sqrt(births))
}, .progress = "text")
rsq <- function(mod) {
summarise(summary(mod), rsq = r.squared, sigma = sigma)
}
qual <- arrange(merge(ldply(patterns, rsq), counts), -rsq)
sub <- c(as.character(qual$namesex[seq(1, nrow(qual), by = 5)]), "Juan-boy")
interesting <- subset(bnames, namesex %in% sub)
write.table(interesting, "interesting-names.csv", sep = ",", row = F) |
same_size_clustering <- function(mat, diss = FALSE, clsize = NULL,
algo = c("nnit", "hcbottom", "kmvar"),
method = c(
"maxd", "random", "mind", "elki",
"ward.D", "average", "complete", "single"
)) {
stopifnot(is.numeric(clsize))
algo <- match.arg(algo)
method <- match.arg(method)
do.call(algo, args = list(mat = mat, diss = diss, clsize = clsize, method = method))
}
nnit <- function(mat,
clsize = NULL,
diss = FALSE,
method = "maxd") {
stopifnot(is.logical(diss))
clsize.rle <- rle(as.numeric(cut(1:nrow(mat), ceiling(nrow(mat) / clsize))))
clsize <- clsize.rle$lengths
lab <- rep(NA, nrow(mat))
if (isFALSE(diss)) {
dmat <- as.matrix(dist(mat))
} else {
dmat <- mat
}
cpt <- 1
while (sum(is.na(lab)) > 0) {
lab.ii <- which(is.na(lab))
dmat.m <- dmat[lab.ii, lab.ii]
ii <- switch(method,
maxd = which.max(rowSums(dmat.m)),
mind = which.min(rowSums(dmat.m)),
random = sample.int(nrow(dmat.m), 1),
stop("unsupported method in 'nnit'!")
)
lab.m <- rep(NA, length(lab.ii))
lab.m[head(order(dmat.m[ii, ]), clsize[cpt])] <- cpt
lab[lab.ii] <- lab.m
cpt <- cpt + 1
}
if (any(is.na(lab))) {
lab[which(is.na(lab))] <- cpt
}
lab
}
kmvar <- function(mat,
clsize = NULL,
diss = FALSE,
method = "maxd") {
stopifnot(is.logical(diss))
k <- ceiling(nrow(mat) / clsize)
if (isFALSE(diss)) {
km.o <- kmeans(mat, k)
centd <- lapply(1:k, function(kk) {
euc <- t(mat) - km.o$centers[kk, ]
sqrt(apply(euc, 2, function(x) sum(x^2)))
})
centd <- matrix(unlist(centd), ncol = k)
} else {
message("PAM algorithm is applied when input distance matrix.")
pam.o <- cluster::pam(mat, k, diss = TRUE)
centd <- mat[, pam.o$id.med, drop = FALSE]
}
labs <- rep(NA, nrow(mat))
clsizes <- rep(0, k)
ptord <- switch(method,
maxd = order(-apply(centd, 1, max)),
mind = order(apply(centd, 1, min)),
random = sample.int(nrow(mat)),
elki = order(apply(centd, 1, min) - apply(centd, 1, max)),
stop("unsupported method in 'kmvar'!")
)
for (ii in ptord) {
bestcl <- which.max(centd[ii, ])
labs[ii] <- bestcl
clsizes[bestcl] <- clsizes[bestcl] + 1
if (clsizes[bestcl] >= clsize) {
centd[, bestcl] <- NA
}
}
return(labs)
}
hcbottom <- function(mat,
clsize = NULL,
diss = FALSE,
method = "ward.D") {
stopifnot(is.logical(diss))
method <- match.arg(method, choices = c("ward.D", "average", "complete", "single"))
if (isFALSE(diss)) {
dmat <- as.matrix(dist(mat))
} else {
dmat <- mat
}
clsize.rle <- rle(as.numeric(cut(1:nrow(mat), ceiling(nrow(mat) / clsize))))
clsizes <- clsize.rle$lengths
cpt <- 1
lab <- rep(NA, nrow(mat))
for (clss in clsizes[-1]) {
lab.ii <- which(is.na(lab))
hc.o <- hclust(as.dist(dmat[lab.ii, lab.ii]), method = method)
clt <- 0
ct <- length(lab.ii) - clss
while (max(clt) < clss) {
cls <- cutree(hc.o, ct)
clt <- table(cls)
ct <- ct - 1
}
cl.sel <- which(cls == as.numeric(names(clt)[which.max(clt)]))
lab[lab.ii[head(cl.sel, clss)]] <- cpt
cpt <- cpt + 1
}
lab[is.na(lab)] <- cpt
lab
} |
makeClusterFunctionsDocker = function(image, docker.args = character(0L), image.args = character(0L), scheduler.latency = 1, fs.latency = 65) {
assertString(image)
assertCharacter(docker.args, any.missing = FALSE)
assertCharacter(image.args, any.missing = FALSE)
user = Sys.info()["user"]
submitJob = function(reg, jc) {
assertRegistry(reg, writeable = TRUE)
assertClass(jc, "JobCollection")
assertIntegerish(jc$resources$ncpus, lower = 1L, any.missing = FALSE, .var.name = "resources$ncpus")
assertIntegerish(jc$resources$memory, lower = 1L, any.missing = FALSE, .var.name = "resources$memory")
timeout = if (is.null(jc$resources$walltime)) character(0L) else sprintf("timeout %i", asInt(jc$resources$walltime, lower = 0L))
cmd = c("docker", docker.args, "run", "--detach=true", image.args,
sprintf("-e DEBUGME='%s'", Sys.getenv("DEBUGME")),
sprintf("-e OMP_NUM_THREADS=%i", jc$resources$omp.threads %??% jc$resources$threads),
sprintf("-e OPENBLAS_NUM_THREADS=%i", jc$resources$blas.threads %??% jc$resources$threads),
sprintf("-e MKL_NUM_THREADS=%i", jc$resources$blas.threads %??% jc$resources$threads),
sprintf("-c %i", jc$resources$ncpus),
sprintf("-m %im", jc$resources$memory),
sprintf("--memory-swap %im", jc$resources$memory),
sprintf("--label batchtools=%s", jc$job.hash),
sprintf("--label user=%s", user),
sprintf("--name=%s_bt_%s", user, jc$job.hash),
image, timeout, "Rscript", stri_join("-e", shQuote(sprintf("batchtools::doJobCollection('%s', '%s')", jc$uri, jc$log.file)), sep = " "))
res = runOSCommand(cmd[1L], cmd[-1L])
if (res$exit.code > 0L) {
housekeeping(reg)
no.res.msg = "no resources available"
if (res$exit.code == 1L && any(stri_detect_fixed(res$output, no.res.msg)))
return(makeSubmitJobResult(status = 1L, batch.id = NA_character_, msg = no.res.msg))
return(cfHandleUnknownSubmitError(stri_flatten(cmd, " "), res$exit.code, res$output))
} else {
if (length(res$output != 1L)) {
matches = which(stri_detect_regex(res$output, "^[[:alnum:]]{64}$"))
if (length(matches) != 1L)
stopf("Command '%s' did not return a long UUID identitfier", stri_flatten(cmd, " "))
res$output = res$output[matches]
}
return(makeSubmitJobResult(status = 0L, batch.id = stri_sub(res$output, 1L, 12L)))
}
}
listJobs = function(reg, filter = character(0L)) {
assertRegistry(reg, writeable = FALSE)
args = c(docker.args, "ps", "--format={{.ID}}", "--filter 'label=batchtools'", filter)
res = runOSCommand("docker", args)
if (res$exit.code > 0L)
OSError("Listing of jobs failed", res)
if (length(res$output) == 0L || !nzchar(res$output))
return(character(0L))
res$output
}
housekeeping = function(reg, ...) {
batch.ids = chintersect(listJobs(reg, "--filter 'status=exited'"), reg$status$batch.id)
if (length(batch.ids) > 0L)
runOSCommand("docker", c(docker.args, "rm", batch.ids))
invisible(TRUE)
}
killJob = function(reg, batch.id) {
assertRegistry(reg, writeable = TRUE)
assertString(batch.id)
cfKillJob(reg, "docker", c(docker.args, "kill", batch.id))
}
listJobsRunning = function(reg) {
assertRegistry(reg, writeable = FALSE)
listJobs(reg, sprintf("--filter 'user=%s'", user))
}
makeClusterFunctions(name = "Docker", submitJob = submitJob, killJob = killJob, listJobsRunning = listJobsRunning,
store.job.collection = TRUE, scheduler.latency = scheduler.latency, fs.latency = fs.latency,
hooks = list(post.submit = housekeeping, post.sync = housekeeping))
} |
m <- mean( ~ duration, data = geyser)
s <- sd( ~ duration, data = geyser)
ml <- maxLik(loglik.faithful, x = geyser$duration,
start = c(0.5, m - 1, m + 1, s, s))
mle <- coef(ml); mle
loglik.faithful(mle, x = geyser$duration)
logLik(ml)
ml0 <- maxLik(loglik0.faithful, x = geyser$duration,
start = c(m - 1, m + 1, s, s))
mle0 <- coef(ml0); mle0
logLik(ml0)
lrt.stat <- 2 * (logLik(ml) - logLik(ml0)); lrt.stat
1 - pchisq(lrt.stat, df = 1) |
context("mle")
test_that("mle works correctly for default Gaussian models", {
AbaIdtmle <-mle(AbaloneIdt)
BestM <- 1
names(BestM) <- "NModCovC1"
expect_equal(BestModel(AbaIdtmle),BestM)
AllAbnames <- paste(names(AbaloneIdt),rep(c("MidP","LogR"),each=ncol(AbaloneIdt)),sep=".")
Abmeans <- c( 0.5071875, 0.396145833, 0.166666667, 1.078052083, 0.45571875, 0.224302083, 0.3373125,
-1.296114861, -1.534997069, -2.235509665, 0.095119706, -0.699579885, -1.43648703, -1.176694026 )
Abstddevs <- c( 0.126984892, 0.106306307, 0.099868838, 0.463170117, 0.204648509, 0.09817756, 0.159985436,
0.599929088, 0.646096586, 0.872978407, 1.098762777, 1.13577332, 1.108493279, 1.229327926 )
Abcor <- matrix(
c( 1, 0.993982764, 0.46722427, 0.943204961, 0.806954256, 0.877049526, 0.8988425, 0.197447713, 0.215699101, 0.254071722, 0.791917195, 0.767629178, 0.776267869, 0.717014941,
0.993982764, 1, 0.460916165, 0.949745049, 0.801663444, 0.883022118, 0.914306533, 0.156517814, 0.176048065, 0.214254783, 0.75904035, 0.737417731, 0.74382086, 0.677761255,
0.46722427, 0.460916165, 1, 0.507651246, 0.534390369, 0.492541963, 0.377370092, 0.206446863, 0.211721929, 0.621096109, 0.451146829, 0.457607674, 0.438228753, 0.374981242,
0.943204961, 0.949745049, 0.507651246, 1, 0.911504486, 0.949135574, 0.923149776, 0.220628059, 0.237353382, 0.279750403, 0.76107623, 0.751149887, 0.746209619, 0.679318984,
0.806954256, 0.801663444, 0.534390369, 0.911504486, 1, 0.914368882, 0.747872639, 0.444048709, 0.461436449, 0.494848721, 0.810320958, 0.832231663, 0.803650431, 0.723120244,
0.877049526, 0.883022118, 0.492541963, 0.949135574, 0.914368882, 1, 0.825795256, 0.358369732, 0.37524005, 0.398271111, 0.794065249, 0.787335437, 0.807205016, 0.715729116,
0.8988425, 0.914306533, 0.377370092, 0.923149776, 0.747872639, 0.825795256, 1, 0.051592822, 0.068467193, 0.096948743, 0.639028743, 0.609158995, 0.608926341, 0.627478887,
0.197447713, 0.156517814, 0.206446863, 0.220628059, 0.444048709, 0.358369732, 0.051592822, 1, 0.992566651, 0.84448339, 0.696977234, 0.739845117, 0.717739093, 0.667198309,
0.215699101, 0.176048065, 0.211721929, 0.237353382, 0.461436449, 0.37524005, 0.068467193, 0.992566651, 1, 0.835603923, 0.694992692, 0.741356203, 0.717499093, 0.665089517,
0.254071722, 0.214254783, 0.621096109, 0.279750403, 0.494848721, 0.398271111, 0.096948743, 0.84448339, 0.835603923, 1, 0.692460681, 0.721881721, 0.702482351, 0.666151817,
0.791917195, 0.75904035, 0.451146829, 0.76107623, 0.810320958, 0.794065249, 0.639028743, 0.696977234, 0.694992692, 0.692460681, 1, 0.990235553, 0.994727084, 0.960669307,
0.767629178, 0.737417731, 0.457607674, 0.751149887, 0.832231663, 0.787335437, 0.609158995, 0.739845117, 0.741356203, 0.721881721, 0.990235553, 1, 0.986396398, 0.930830052,
0.776267869, 0.74382086, 0.438228753, 0.746209619, 0.803650431, 0.807205016, 0.608926341, 0.717739093, 0.717499093, 0.702482351, 0.994727084, 0.986396398, 1, 0.952600258,
0.717014941, 0.677761255, 0.374981242, 0.679318984, 0.723120244, 0.715729116, 0.627478887, 0.667198309, 0.665089517, 0.666151817, 0.960669307, 0.930830052, 0.952600258, 1),
nrow=2*ncol(AbaloneIdt),ncol=2*ncol(AbaloneIdt)
)
names(Abmeans) <- names(Abstddevs) <- rownames(Abcor) <- colnames(Abcor) <- AllAbnames
expect_equal(mean(AbaIdtmle),Abmeans)
expect_equal(sd(AbaIdtmle),Abstddevs)
expect_equal(cor(AbaIdtmle),Abcor)
} )
test_that("mle computes correct standar errors for default Gaussian models", {
for (Cv in 1:4) {
AbaIdtmle <-mle(AbaloneIdt, CovCase = Cv)
n <- nrow(AbaloneIdt)
q <- 2*ncol(AbaloneIdt)
AbmeanStder <- sd(AbaIdtmle) / sqrt(n)
expect_equal(stdEr(AbaIdtmle)$mu,AbmeanStder)
vcovb_AbmeanStder <- sqrt(diag(vcov(AbaIdtmle)[1:q,1:q]))
names(vcovb_AbmeanStder) <- names(AbmeanStder) <- NULL
expect_equal(vcovb_AbmeanStder,AbmeanStder)
mlecov <- var(AbaIdtmle)
mlecov[mlecov==0.] <- NA
mlevar <- diag(mlecov)
AbcovStder <- sqrt( (mlecov^2 + outer(mlevar,mlevar)) / (n-1) )
expect_equal(stdEr(AbaIdtmle)$Sigma,AbcovStder)
if (Cv==1) {
vcovb_AbcovStder <- matrix(nrow=q,ncol=q)
vcovb_AbcovStder[lower.tri(vcovb_AbcovStder,diag=TRUE)] <- sqrt(diag(vcov(AbaIdtmle)[-(1:q),-(1:q)]))
vcovb_AbcovStder[upper.tri(vcovb_AbcovStder)] <- t(vcovb_AbcovStder)[upper.tri(t(vcovb_AbcovStder))]
dimnames(vcovb_AbcovStder) <- dimnames(AbcovStder)
expect_equal(vcovb_AbcovStder,AbcovStder)
}
}
} ) |
ogsrre<-function (formula, r, R, dpn, delt, k, data = NULL, na.action,
...)
{
k <- as.matrix(k)
k1 <- k[1L]
ogsrres <- function(formula, r, R, dpn, delt, k1, data = NULL,
na.action, ...) {
cal <- match.call(expand.dots = FALSE)
mat <- match(c("formula", "data", "na.action"), names(cal))
cal <- cal[c(1L, mat)]
cal[[1L]] <- as.name("model.frame")
cal <- eval(cal)
y <- model.response(cal)
md <- attr(cal, "terms")
x <- model.matrix(md, cal, contrasts)
s <- t(x) %*% x
xin <- solve(s)
r <- as.matrix(r)
RC <- matrix(R, NCOL(x))
RR <- t(RC)
if (is.matrix(R))
RR <- R
else RR <- RR
if (length(dpn) == 1L)
shi <- dpn
else if (is.matrix(dpn))
shi <- dpn
else shi <- diag(dpn)
de1 <- as.matrix(delt)
I <- diag(NCOL(x))
bb <- xin %*% t(x) %*% y
ev <- (t(y) %*% y - t(bb) %*% t(x) %*% y)/(NROW(x) -
NCOL(x))
ev <- diag(ev)
w1 <- solve(s/ev + t(RR) %*% solve(shi) %*% RR)
w2 <- (t(x) %*% y)/ev + t(RR) %*% solve(shi) %*% r
bm <- w1 %*% w2
tk <- solve(s + k1 * I) %*% s
bsrr <- tk %*% bm
bsrrve<-as.vector(bsrr)
j<-0
sumsq<-0
for (j in 1:NROW(bsrrve))
{
sumsq=(bsrrve[j])^2+sumsq
}
cval<-sumsq
ahat<-bsrr%*%t(bsrr)%*%solve(ev*xin+bsrr%*%t(bsrr))
bogsrre<-ahat%*%bb
colnames(bogsrre) <- c("Estimate")
dbd <- ev*(ahat%*%xin%*%t(ahat))
Standard_error <- sqrt(diag(abs(dbd)))
rdel <- matrix(delt, NROW(RR))
lenr <- length(RR)
dlpt <- diag(RR %*% xin %*% t(RR))
if (lenr == ncol(RR))
ilpt <- sqrt(solve(abs(dlpt)))
else ilpt <- sqrt(solve(diag(abs(dlpt))))
upt <- RR %*% bsrr
tb <- t(upt)
t_statistic <- ((tb - t(rdel)) %*% ilpt)/sqrt(ev)
tst <- t(2L * pt(-abs(t_statistic), df <- (NROW(x) -
NCOL(x))))
pvalue <- c(tst, rep(NA, (NCOL(x) - NROW(RR))))
dbd <- ev*(ahat%*%xin%*%t(ahat))
rval<-(1/cval)*bsrr%*%t(bsrr)
mse1 <-cval^2*ev*tr(ev*rval*solve(ev*xin+cval*rval)%*%xin%*%solve(ev*xin+cval*rval)%*%rval)+ev^2*t(bsrr)%*%solve(ev*I+cval*rval%*%s)%*%solve(ev*I+cval*rval%*%s)%*%bsrr
mse1<-as.vector(mse1)
mse1 <- round(mse1, digits <- 4L)
names(mse1) <- c("MSE")
t_statistic <- c(t_statistic, rep(NA, (NCOL(x) - NROW(RR))))
ans1 <- cbind(bogsrre, Standard_error, t_statistic, pvalue)
ans <- round(ans1, digits <- 4L)
anw <- list(`*****Ordinary Generalized Stochastic Restricted Ridge Estimator*****` = ans,
`*****Mean square error value*****` = mse1)
return(anw)
}
npt <- ogsrres(formula, r, R, dpn, delt, k1, data, na.action)
plotogsrre <- function(formula, r, R, dpn, delt, k, data = NULL,
na.action, ...) {
j <- 0
arr <- 0
for (j in 1:nrow(k)) {
ogsrrem <- function(formula, r, R, dpn, delt, k, data,
na.action, ...) {
cal <- match.call(expand.dots = FALSE)
mat <- match(c("formula", "data", "na.action"),
names(cal))
cal <- cal[c(1L, mat)]
cal[[1L]] <- as.name("model.frame")
cal <- eval(cal)
y <- model.response(cal)
md <- attr(cal, "terms")
x <- model.matrix(md, cal, contrasts)
s <- t(x) %*% x
xin <- solve(s)
r <- as.matrix(r)
RC <- matrix(R, NCOL(x))
RR <- t(RC)
if (is.matrix(R))
RR <- R
else RR <- RR
if (length(dpn) == 1L)
shi <- dpn
else if (is.matrix(dpn))
shi <- dpn
else shi <- diag(dpn)
de1 <- as.matrix(delt)
I <- diag(NCOL(x))
bb <- xin %*% t(x) %*% y
ev <- (t(y) %*% y - t(bb) %*% t(x) %*% y)/(NROW(x) -
NCOL(x))
ev <- diag(ev)
w1 <- solve(s/ev + t(RR) %*% solve(shi) %*% RR)
w2 <- (t(x) %*% y)/ev + t(RR) %*% solve(shi) %*% r
bm <- w1 %*% w2
tk <- solve(s + k * I) %*% s
bsrr <- tk %*% bm
bsrrve<-as.vector(bsrr)
j<-0
sumsq<-0
for (j in 1:NROW(bsrrve))
{
sumsq=(bsrrve[j])^2+sumsq
}
cval<-sumsq
ahat<-bsrr%*%t(bsrr)%*%solve(ev*xin+bsrr%*%t(bsrr))
dbd <-ev*(ahat%*%xin%*%t(ahat))
rval<-(1/cval)*bsrr%*%t(bsrr)
mse1 <-cval^2*ev*tr(ev*rval*solve(ev*xin+cval*rval)%*%xin%*%solve(ev*xin+cval*rval)%*%rval)+ev^2*t(bsrr)%*%solve(ev*I+cval*rval%*%s)%*%solve(ev*I+cval*rval%*%s)%*%bsrr
mse1<-as.vector(mse1)
return(mse1)
}
arr[j] <- ogsrrem(formula, r, R, dpn, delt, k[j], data,
na.action)
}
MSE <- arr
Parameter <- k
pvl <- cbind(Parameter, MSE)
colnames(pvl) <- c("Parameter", "MSE")
sval <- pvl
return(sval)
}
psrre <- plotogsrre(formula, r, R, dpn, delt, k, data, na.action)
if (nrow(k) > 1L)
val <- psrre
else val <- npt
val
} |
add_besthit <- function(x, sep=":"){
Class<-Domain<- Family<- Genus<- Genus.Species<- NULL
Order<- Phylum<- Species<-NULL
x.nw <- x
if(length(rank_names(x.nw))== 6){
colnames(tax_table(x.nw)) <- c("Domain", "Phylum", "Class", "Order", "Family", "Genus")
}
if(length(rank_names(x.nw))==7){
colnames(tax_table(x.nw)) <- c("Domain", "Phylum", "Class", "Order", "Family", "Genus", "Species")
}
tax.tib <- .get_taxa_tib_unite(x)
tax.tib <- tax.tib %>%
dplyr::mutate(Domain =ifelse(is.na(Domain), "Unclassifed", Domain),
Phylum =ifelse(is.na(Phylum), Domain, Phylum),
Class =ifelse(is.na(Class), Phylum, Class),
Order =ifelse(is.na(Order), Class, Order),
Family =ifelse(is.na(Family), Order, Family),
Genus =ifelse(is.na(Genus), Family, Genus))
if(length(rank_names(x))==7){
tax.tib <- tax.tib %>%
dplyr::mutate(Species =ifelse(is.na(Species), Genus, Species))
}
best_hit <- paste0(taxa_names(x), sep,tax.tib[,ncol(tax.tib)])
taxa_names(x) <- best_hit
return(x)
}
.get_taxa_tib_unite <- function(x){
Genus<- Species <- Genus.Species<- NULL
tax.tib <- tax_table(x) %>%
as.matrix() %>%
as.data.frame()
if(any(rank_names(x) == "Species") && any(rank_names(x) == "Genus")){
tax.tib <- tax.tib %>%
dplyr::mutate(Genus.Species = ifelse(!is.na(Species),
paste0(Genus, ".", Species), Species)) %>%
dplyr::select(-Species) %>%
dplyr::rename(Species = Genus.Species)
}
return(tax.tib)
} |
alfaridge.plot <- function(y, x, a, lambda = seq(0, 5, by = 0.1) ){
z <- alfa(x, a, h = TRUE)$aff
Compositional::ridge.plot(y, z, lambda = lambda )
} |
library("knitr")
opts_chunk$set(
collapse = TRUE,
eval = !(Sys.getenv("NASS_KEY") == ""),
comment = "
)
library("usdarnass")
nass_data(year = 2012,
short_desc = "AG LAND, INCL BUILDINGS - ASSET VALUE, MEASURED IN $",
county_name = "WAKE",
state_name = "NORTH CAROLINA")
nass_param("source_desc")
nass_param("group_desc",
state_name = "OHIO",
agg_level_desc = "COUNTY",
year = 2000)
nass_param("commodity_desc",
group_desc = "dairy",
state_name = "OHIO",
agg_level_desc = "COUNTY",
year = ">2000")
nass_count()
nass_count(commodity_desc = "AG LAND",
agg_level_desc = "COUNTY")
nass_data(commodity_desc = "AG LAND",
agg_level_desc = "COUNTY")
years <- 2000:2017
sapply(years, function(x) nass_count(year = x,
commodity_desc = "AG LAND",
agg_level_desc = "COUNTY"))
agland_params <- nass_param("short_desc",
commodity_desc = "AG LAND",
agg_level_desc = "COUNTY")
agland_params
sapply(agland_params, function(x) nass_count(short_desc = x,
commodity_desc = "AG LAND",
agg_level_desc = "COUNTY"))
agland_domain <- nass_param("domain_desc",
short_desc = "AG LAND - TREATED, MEASURED IN ACRES",
commodity_desc = "AG LAND",
agg_level_desc = "COUNTY")
sapply(agland_domain, function(x) nass_count(domain_desc = x,
short_desc = "AG LAND - TREATED, MEASURED IN ACRES",
commodity_desc = "AG LAND",
agg_level_desc = "COUNTY")) |
print.svocc <-
function (x, digits, ...)
{
if (missing(digits))
digits <- max(3, getOption("digits") - 3)
cat("\nCall:", deparse(x$call,
width.cutoff = floor(getOption("width") * 0.85)), "", sep = "\n")
cat(paste("Single visit site-occupancy model", sep = ""))
pen <- if (x$penalized)
"Penalized " else ""
cat(paste("\n", pen, "Maximum Likelihood estimates (", x$method, " method)\n\n", sep = ""))
cat(paste("Coefficients for occurrence (", x$link$sta, " link):\n", sep = ""))
print.default(format(x$coefficients$sta, digits = digits),
print.gap = 2, quote = FALSE)
cat(paste("Coefficients for detection (", x$link$det, " link):\n", sep = ""))
print.default(format(x$coefficients$det, digits = digits),
print.gap = 2, quote = FALSE)
Conv <- if (!x$penalized)
x$converged[1] else x$converged[2]
if (!Conv)
cat("Warning:\n Model did not converge\n\n")
cat("\n")
invisible(x)
} |
as.transactions <- function(x){
as(as.matrix(x), "transactions")
} |
library(tibble)
credit <- read.csv("credit.csv")
credit_tbl <- as_tibble(credit)
credit_tbl
library(dplyr)
credit <- as_tibble(read.csv("credit.csv"))
credit %>%
filter(age >= 21) %>%
mutate(years_loan_duration =
months_loan_duration / 12) %>%
select(default, years_loan_duration) %>%
group_by(default) %>%
summarize(mean_duration = mean(years_loan_duration))
library(readr)
credit <- read_csv("credit.csv")
library(rio)
credit <- import("credit.csv")
export(credit, "credit.xlsx")
convert("credit.csv", "credit.dta")
library(DBI)
library(SQLite)
con <- dbConnect(RSQLite::SQLite(), "credit.sqlite3")
dbListTables(con)
res <- dbSendQuery(con, "SELECT * FROM credit WHERE age >= 45")
credit_age45 <- dbFetch(res)
summary(credit_age45$age)
dbClearResult(res)
dbDisconnect(con)
library(DBI)
con <- dbConnect(odbc:odbc(), "my_data_source_name")
library(DBI)
con <- dbConnect(odbc::odbc(),
database = "my_database",
uid = "my_username",
pwd = "my_password",
host = "my.server.address",
port = 1234)
library(DBI)
con <- dbConnect(RSQLite::SQLite(), "credit.sqlite3")
credit_tbl <- con %>% tbl("credit")
library(dplyr)
credit_tbl %>%
filter(age >= 45) %>%
select(age) %>%
collect() %>%
summary()
library(RODBC)
my_db <- odbcConnect("my_dsn")
my_db <- odbcConnect("my_dsn", uid = "my_username", pwd = "my_password")
my_query <- "select * from my_table where my_value = 1"
results_df <- sqlQuery(channel = my_db, query = my_query, stringsAsFactors = FALSE)
odbcClose(my_db)
library(dplyr)
credit <- read_csv("credit.csv")
credit_db_conn <- src_sqlite("credit.sqlite3", create = TRUE)
copy_to(credit_db_conn, credit, temporary = FALSE)
credit_db_conn <- src_sqlite("credit.sqlite3")
credit_tbl <- tbl(credit_db_conn, "credit")
select(credit_tbl, amount)
mydata <- read.csv("http://www.mysite.com/mydata.csv")
mytext <- readLines("http://www.mysite.com/myfile.txt")
download.file("http://www.mysite.com/myfile.zip", "myfile.zip")
library(RCurl)
packt_page <- getURL("https://www.packtpub.com")
str(packt_page, nchar.max = 200)
library(httr)
packt_page <- GET("https://www.packtpub.com")
str(packt_page, max.level = 1)
str(content(packt_page, type = "text"), nchar.max = 200)
library(rvest)
packt_page <- read_html("https://www.packtpub.com")
html_node(packt_page, "title") %>% html_text()
library(rvest)
cran_ml <- read_html("http://cran.r-project.org/web/views/MachineLearning.html")
cran_ml
ml_packages <- html_nodes(cran_ml, "li a")
head(ml_packages, n = 5)
ml_packages %>% html_text() %>% head()
library(XML)
library(xml2)
library(jsonlite)
ml_book <- list(book_title = "Machine Learning with R", author = "Brett Lantz")
toJSON(ml_book)
ml_book_json <- "{
\"title\": \"Machine Learning with R\",
\"author\": \"Brett Lantz\",
\"publisher\": {
\"name\": \"Packt Publishing\",
\"url\": \"https://www.packtpub.com\"
},
\"topics\": [\"R\", \"machine learning\", \"data mining\"],
\"MSRP\": 54.99
}"
ml_book_r <- fromJSON(ml_book_json)
str(ml_book_r)
library(httr)
music_search <- GET("https://itunes.apple.com/search",
query = list(term = "Beatles",
media = "music",
entity = "album",
limit = 10))
music_search
library(jsonlite)
music_results <- fromJSON(content(music_search))
str(music_results)
music_results$results$collectionName
library(igraph)
karate <- read.graph("karate.txt", "edgelist", directed = FALSE)
plot(karate)
degree(karate)
betweenness(karate)
library(data.table)
credit <- fread("credit.csv")
credit[credit_history == "good", mean(amount)]
credit[, mean(amount), by=.(credit_history)]
library(ff)
credit <- read.csv.ffdf(file = "credit.csv", header = TRUE)
mean(credit$amount)
library(ffbase)
mean(credit$amount)
system.time(rnorm(1000000))
library(parallel)
detectCores()
system.time(l1 <- unlist(mclapply(1:10, function(x) {
rnorm(1000000)}, mc.cores = 1)))
system.time(l2 <- unlist(mclapply(1:10, function(x) {
rnorm(1000000)}, mc.cores = 2)))
system.time(l4 <- unlist(mclapply(1:10, function(x) {
rnorm(1000000) }, mc.cores = 4)))
system.time(l8 <- unlist(mclapply(1:10, function(x) {
rnorm(1000000) }, mc.cores = 8)))
cl1 <- makeCluster(4)
clusterCall(cl1, function() { Sys.info()["nodename"] })
clusterCall(cl1, function() { print("ready!") })
clusterApply(cl1, c('A', 'B', 'C', 'D'),
function(x) { paste("Cluster", x, "ready!") })
stopCluster(cl1)
library(foreach)
system.time(l1 <- rnorm(100000000))
system.time(l4 <- foreach(i = 1:4, .combine = 'c')
%do% rnorm(25000000))
library(doParallel)
detectCores()
registerDoParallel(cores = 4)
system.time(l4p <- foreach(i = 1:4, .combine = 'c')
%dopar% rnorm(25000000))
stopImplicitCluster()
library(caret)
credit <- read.csv("credit.csv",, stringsAsFactors = TRUE)
system.time(train(default ~ ., data = credit, method = "rf",
trControl = trainControl(allowParallel = FALSE)))
library(doParallel)
registerDoParallel(cores = 8)
system.time(train(default ~ ., data = credit, method = "rf"))
library(sparklyr)
spark_install(version = "2.1.0")
spark_cluster <- spark_connect(master = "local")
credit_spark <- spark_read_csv(spark_cluster, "credit.csv")
splits <- sdf_partition(credit_spark,
train = 0.75, test = 0.25,
seed = 123)
credit_rf <- splits$train %>%
ml_random_forest(default ~ .)
pred <- ml_predict(credit_rf, splits$test)
ml_binary_classification_evaluator(pred, metric_name = "areaUnderROC")
library(ranger)
credit <- read.csv("credit.csv", stringsAsFactors = TRUE)
m <- ranger(default ~ ., data = credit,
num.trees = 500,
mtry = 4)
p <- predict(m, credit)
head(p$predictions)
library(h2o)
h2o_instance <- h2o.init()
credit.hex <- h2o.uploadFile("credit.csv")
h2o.randomForest(y = "default",
training_frame = credit.hex,
ntrees = 500,
seed = 123) |
summod <- function(x, y, mod) UseMethod("summod")
summod.default <- function(x, y, mod) stop ("x, y and mod have to be specified as vli objects or 32 bits integers")
summod.numeric <- function(x, y, mod){
if ( abs(x) < 2147483648 ) x = vliC(toString(x))
else stop("The x object passed as argument is neither a vli object nor a 32 bits integer")
if ( !is.vli(y) ){
if ( is.numeric(y) & (abs(y) < 2147483648) ){
y = vliC(toString(y))
}
else stop("The y object passed as argument is neither a vli object nor a 32 bits integer")
}
if ( !is.vli(mod) ){
if ( is.numeric(mod) & (abs(mod) < 2147483648) ){
mod = vliC(toString(mod))
}
else stop("The mod object passed as argument is neither a vli object nor a 32 bits integer")
}
if ( eqC(mod, .pkgenv$zero) ) stop("mod argument can not be equal to zero")
return( divbaseC( sumC(x,y), mod)[[2]] )
}
summod.vli <- function(x, y, mod){
if ( !is.vli(y) ){
if ( is.numeric(y) & (abs(y) < 2147483648) ){
y = vliC(toString(y))
}
else stop("The y object passed as argument is neither a vli object nor a 32 bits integer")
}
if ( !is.vli(mod) ){
if ( is.numeric(mod) & (abs(mod) < 2147483648) ){
mod = vliC(toString(mod))
}
else stop("The mod object passed as argument is neither a vli object nor a 32 bits integer")
}
if ( eqC(mod, .pkgenv$zero) ) stop("mod argument can not be equal to zero")
return( divbaseC( sumC(x,y), mod)[[2]] )
}
submod <- function(x, y, mod) UseMethod("submod")
submod.default <- function(x, y, mod) stop ("x, y and mod have to be specified as vli objects or 32 bits integers")
submod.numeric <- function(x, y, mod){
if ( abs(x) < 2147483648 ) x = vliC(toString(x))
else stop("The x object passed as argument is neither a vli object nor a 32 bits integer")
if ( !is.vli(y) ){
if ( is.numeric(y) & (abs(y) < 2147483648) ){
y = vliC(toString(y))
}
else stop("The y object passed as argument is neither a vli object nor a 32 bits integer")
}
if ( !is.vli(mod) ){
if ( is.numeric(mod) & (abs(mod) < 2147483648) ){
mod = vliC(toString(mod))
}
else stop("The mod object passed as argument is neither a vli object nor a 32 bits integer")
}
if ( eqC(mod, .pkgenv$zero) ) stop("mod argument can not be equal to zero")
return( divbaseC( subC(x,y), mod)[[2]] )
}
submod.vli <- function(x, y, mod){
if ( !is.vli(y) ){
if ( is.numeric(y) & (abs(y) < 2147483648) ){
y = vliC(toString(y))
}
else stop("The y object passed as argument is neither a vli object nor a 32 bits integer")
}
if ( !is.vli(mod) ){
if ( is.numeric(mod) & (abs(mod) < 2147483648) ){
mod = vliC(toString(mod))
}
else stop("The mod object passed as argument is neither a vli object nor a 32 bits integer")
}
if ( eqC(mod, .pkgenv$zero) ) stop("mod argument can not be equal to zero")
return( divbaseC( subC(x,y), mod)[[2]] )
}
mulmodbase <- function(x, y, mod){
if ( max(c(x$length, y$length) ) < 40 ){
return(divbaseC( ( mulC( divbaseC(x,mod)[[2]], divbaseC(y,mod)[[2]] ) ),mod)[[2]] )
}
else{
return(divbaseC( ( karC( divbaseC(x,mod)[[2]], divbaseC(y,mod)[[2]] ) ),mod)[[2]] )
}
}
mulmod <- function(x, y, mod) UseMethod("mulmod")
mulmod.default <- function(x, y, mod) stop("x, y and mod have to be specified as vli objects or 32 bits integers")
mulmod.numeric <- function(x, y, mod){
if ( abs(x) < 2147483648 ){
x = vliC(toString(x))
}
else stop("The x object passed as argument is neither a vli object nor a 32 bits integer")
if ( !is.vli(y) ){
if ( is.numeric(y) & (abs(y) < 2147483648) ){
y = vliC(toString(y))
}
else stop("The y object passed as argument is neither a vli object nor a 32 bits integer")
}
if ( !is.vli(mod) ){
if ( is.numeric(mod) & (abs(mod) < 2147483648) ){
mod = vliC(toString(mod))
}
else stop("The mod object passed as argument is neither a vli object nor a 32 bits integer")
}
if ( eqC(mod, .pkgenv$zero) ) stop("mod argument can not be equal to zero")
return(mulmodbase(x, y, mod))
}
mulmod.vli <- function(x, y, mod){
if ( !is.vli(y) ){
if ( is.numeric(y) & (abs(y) < 2147483648) ){
y = vliC(toString(y))
}
else stop("The y object passed as argument is neither a vli object nor a 32 bits integer")
}
if ( !is.vli(mod) ){
if ( is.numeric(mod) & (abs(mod) < 2147483648) ){
mod = vliC(toString(mod))
}
else stop("The mod object passed as argument is neither a vli object nor a 32 bits integer")
}
if ( eqC(mod, .pkgenv$zero) ) stop("mod argument can not be equal to zero")
return(mulmodbase(x, y, mod))
}
is.even <- function(x){
((tail(x$value, 1)) %% 2) == 0
}
powmodbase <- function(x, n, mod){
res = .pkgenv$one
x = divbaseC(x, mod)[[2]]
while( gtC(n, .pkgenv$zero) ){
if ( !is.even(n) ){
res = mulmodbase(res, x, mod)
}
n = divbaseC(n, .pkgenv$two)[[1]]
x = mulmod(x, x, mod)
}
res
}
powmod <- function(x, n, mod) UseMethod("powmod")
powmod.default <- function(x, n, mod) stop("x, y and mod have to be specified as vli objects or 32 bits integers")
powmod.numeric <- function(x, n, mod){
if ( abs(x) < 2147483648 ){
x = vliC(toString(x))
}
else stop("The x object passed as argument is neither a vli object nor a 32 bits integer")
if ( !is.vli(n) ){
if ( is.numeric(n) & (abs(n) < 2147483648) ){
n = vliC(toString(n))
}
else stop("The y object passed as argument is neither a vli object nor a 32 bits integer")
}
if ( !is.vli(mod) ){
if ( is.numeric(mod) & (abs(mod) < 2147483648) ){
mod = vliC(toString(mod))
}
else stop("The mod object passed as argument is neither a vli object nor a 32 bits integer")
}
if ( eqC(mod, .pkgenv$zero) ) stop("mod argument can not be equal to zero")
return(powmodbase(x, n, mod))
}
powmod.vli <- function(x, n, mod){
if ( !is.vli(n) ){
if ( is.numeric(n) & (abs(n) < 2147483648) ){
n = vliC(toString(n))
}
else stop("The y object passed as argument is neither a vli object nor a 32 bits integer")
}
if ( !is.vli(mod) ){
if ( is.numeric(mod) & (abs(mod) < 2147483648) ){
mod = vliC(toString(mod))
}
else stop("The mod object passed as argument is neither a vli object nor a 32 bits integer")
}
if ( eqC(mod, .pkgenv$zero) ) stop("mod argument can not be equal to zero")
return(powmodbase(x, n, mod))
}
invmodbase <- function(x, n){
if ( neqC( gcdbase(x, n), .pkgenv$one) ){
stop("x and n are not coprimes so it does not exists a multiplicative inverse of x in the ring of integer modulo n")
}
else return(exteuclidbase(x, n)[[2]])
}
invmod <- function(x, n) UseMethod("invmod")
invmod.default <- function(x, n) stop("x and n have to be specified as vli objects or 32 bits integers")
invmod.numeric <- function(x, n){
if ( abs(x) < 2147483648 ){
if ( x >= 0 ){
x = vliC(toString(x))
}
else stop("invmod is only defined for positive integer numbers")
}
else stop("The first object passed as argument is neither a vli object nor a 32 bits integer")
if ( !is.vli(n) ){
if ( is.numeric(n) & (abs(n) < 2147483648) ){
if ( n >= 0 ){
n = vliC(toString(n))
}
else stop("invmod is only defined for positive integer numbers")
}
else stop("The second object passed as argument is neither a vli object nor a 32 bits integer")
}
else if ( n$sign == -1 ) stop("invmod is only defined for positive integer numbers")
if ( eqC(n, .pkgenv$zero) ) stop("n can not be equal to zero")
return(invmodbase(x, n))
}
invmod.vli <- function(x, n){
if ( x$sign == -1 ) stop("invmod is only defined for positive integer numbers")
if ( !is.vli(n) ){
if ( is.numeric(n) & (abs(n) < 2147483648) ){
if ( n >= 0 ){
n = vliC(toString(n))
}
else stop("invmod is only defined for positive integer numbers")
}
else stop("The second object passed as argument is neither a vli object nor a 32 bits integer")
}
else if ( n$sign == -1 ) stop("invmod is only defined for positive integer numbers")
if ( eqC(n, .pkgenv$zero) ) stop("n can not be equal to zero")
return(invmodbase(x, n))
}
divmodbase <- function(x, y, mod){
if ( neqC( gcdbase(y, mod), .pkgenv$one) ){
stop("y and mod are not coprimes so it does not exists a multiplicative inverse of y in the ring of integer modulo mod and modular division is not defined")
}
mulbaseC(invmodbase(y, mod), x) %% mod
}
divmod <- function(x, y, mod) UseMethod("divmod")
divmod.default <- function(x, y, mod) stop("x, y and mod have to be specified as vli objects or 32 bits integers")
divmod.numeric <- function(x, y, mod){
if ( abs(x) < 2147483648 ){
x = vliC(toString(x))
}
else stop("The x object passed as argument is neither a vli object nor a 32 bits integer")
if ( !is.vli(y) ){
if ( is.numeric(y) & (abs(y) < 2147483648) ){
y = vliC(toString(y))
}
else stop("The y object passed as argument is neither a vli object nor a 32 bits integer")
}
if ( !is.vli(mod) ){
if ( is.numeric(mod) & (abs(mod) < 2147483648) ){
mod = vliC(toString(mod))
}
else stop("The mod object passed as argument is neither a vli object nor a 32 bits integer")
}
if ( eqC(mod, .pkgenv$zero) ) stop("mod argument can not be equal to zero")
return(divmodbase(x, y, mod))
}
divmod.vli <- function(x, y, mod){
if ( !is.vli(y) ){
if ( is.numeric(y) & (abs(y) < 2147483648) ){
y = vliC(toString(y))
}
else stop("The y object passed as argument is neither a vli object nor a 32 bits integer")
}
if ( !is.vli(mod) ){
if ( is.numeric(mod) & (abs(mod) < 2147483648) ){
mod = vliC(toString(mod))
}
else stop("The mod object passed as argument is neither a vli object nor a 32 bits integer")
}
if ( eqC(mod, .pkgenv$zero) ) stop("mod argument can not be equal to zero")
return(divmodbase(x, y, mod))
} |
useRepos <- function(repos=NULL, where=c("before", "after", "replace"), unique=TRUE, fallback=TRUE, ...) {
if (is.null(repos)) {
return(options("repos"))
}
if (is.list(repos)) {
old <- options(repos)
return(old)
}
repos <- parseRepos(sets=repos, where=where, fallback=fallback, ...)
if (unique) {
names <- names(repos)
if (length(names) > 0L) {
dups <- (nzchar(names) & duplicated(names))
repos <- repos[!dups]
}
}
reposT <- grep("^@[^@]+@$", repos, value=TRUE, invert=TRUE)
isUrl <- isUrl(reposT)
bad <- repos[!isUrl]
if (length(bad) > 0L) {
stop("Detected reposities that are not specified as URLs: ", bad)
}
old <- options(repos=repos)
invisible(old)
}
parseRepos <- function(sets=NULL, where=c("before", "after", "replace"), fallback=TRUE, ...) {
reposKnownToR <- function() {
p <- file.path(Sys.getenv("HOME"), ".R", "repositories")
if (!file.exists(p)) p <- file.path(R.home("etc"), "repositories")
ns <- getNamespace("tools")
.read_repositories <- get(".read_repositories", envir=ns)
a <- .read_repositories(p)
repos <- a$URL
names <- rownames(a)
names(repos) <- names
repos
}
reposCustom <- function() {
c("braju.com"="https://braju.com/R")
}
reposFallback <- function() {
c("CRAN"="https://cran.r-project.org")
}
reposAll <- function() {
c(reposKnownToR(), reposCustom())
}
superPattern <- function(name="all") {
known <- list(
CRAN = "^(CRAN.*)$",
BioC = "^(BioC.*)$",
all = "",
current = "<current>"
)
known$`mainstream` <- c(known$CRAN, known$BioC)
known$`braju.com` <- c("^braju[.]com$", known$mainstream)
known$`R-Forge` <- c("^R-Forge$", known$mainstream)
known$`rforge.net` <- c("^rforge[.]net$", known$mainstream)
if (!is.element(name, names(known)))
return(NULL)
known[[name]]
}
reposSubst <- function(repos, known=repos) {
pattern <- "^@[^@]+@$"
subs <- grep(pattern, repos)
if (length(subs) > 0L) {
known <- grep(pattern, known, value=TRUE, invert=TRUE)
names <- names(repos)[subs]
reposT <- known[names]
.stop_if_not(length(reposT) == length(subs))
ok <- !is.na(reposT)
reposT <- reposT[ok]
if (length(reposT) > 0L) {
idxs <- match(names(reposT), names)
subs <- subs[idxs]
repos[subs] <- reposT
}
}
repos
}
if (is.null(sets)) return(getOption("repos"))
.stop_if_not(is.character(sets))
where <- match.arg(where)
repos00 <- c(getOption("repos"), reposAll())
if (where == "after") {
repos0 <- repos00
} else if (where == "before") {
repos0 <- c(reposAll(), getOption("repos"))
} else {
repos0 <- reposAll()
}
sets <- unlist(strsplit(sets, split=",", fixed=TRUE), use.names=FALSE)
names <- names(sets)
sets <- sapply(sets, FUN=trim)
names(sets) <- names
if (is.character(sets)) {
repos <- c()
patternS <- "^\\[(.*)\\]$"
for (kk in seq_along(sets)) {
set <- sets[kk]
if (regexpr(patternS, set) != -1L) {
pattern <- gsub(patternS, "\\1", set)
if (regexpr(patternS, pattern) != -1L) {
name <- gsub(patternS, "\\1", pattern)
pattern <- superPattern(name)
if (length(pattern) == 0L) {
stop("Unknown repository super set: ", name)
}
}
if (identical(pattern, "<current>")) {
repos <- getOption("repos")
} else {
keep <- lapply(pattern, FUN=grep, names(repos0))
keep <- unique(unlist(keep))
repos <- c(repos, repos0[keep])
}
} else if (isUrl(set)) {
repos <- c(repos, set)
} else {
repos <- c(repos, repos0[set])
}
}
}
repos <- reposSubst(repos)
repos <- reposSubst(repos, known=repos00)
if (fallback) {
repos0 <- repos
repos <- reposSubst(repos, known=reposFallback())
if (!identical(repos, repos0)) {
idxs <- which(repos0 != repos)
diff <- sprintf("%s -> %s", sQuote(repos0[idxs]), sQuote(repos[idxs]))
keys <- names(repos[idxs])
if (!is.null(keys)) diff <- sprintf("%s: %s", keys, diff)
diff <- paste(diff, collapse=", ")
warning("Had to fall back to a set of predefined repositories (please make sure to set your package repositories properly, cf. ?setRepositories): ", diff)
}
}
keys <- paste(names(repos), repos, sep=":")
repos <- repos[!duplicated(keys)]
.stop_if_not(is.character(repos))
repos
} |
knitr::opts_chunk$set(collapse = TRUE, echo=TRUE,comment = "
cache=FALSE)
library(crimelinkage)
library(crimelinkage)
data(crimes)
data(offenders)
seriesData = makeSeriesData(crimedata=crimes,offenderTable=offenders)
set.seed(1)
allPairs = makePairs(seriesData,thres=365,m=40)
varlist = list( spatial = c("X", "Y"),
temporal = c("DT.FROM","DT.TO"),
categorical = c("MO1", "MO2", "MO3"))
X = compareCrimes(allPairs,crimedata=crimes,varlist=varlist,binary=TRUE)
Y = ifelse(allPairs$type=='linked',1,0)
set.seed(3)
train = sample(c(TRUE,FALSE),nrow(X),replace=TRUE,prob=c(.7,.3))
test = !train
D.train = data.frame(X[train,],Y=Y[train])
vars = c("spatial","temporal","tod","dow","MO1","MO2","MO3")
fmla.all = as.formula(paste("Y ~ ", paste(vars, collapse= "+")))
NB = naiveBayes(fmla.all,data=D.train,weights=weight,df=10,nbins=15,partition='quantile')
estimateBF <- function(X){
predict(NB,newdata=X)
}
unsolved = subset(crimes, !crimeID %in% seriesData$crimeID)
tree = crimeClust_hier(unsolved,varlist,estimateBF,linkage='average', binary=TRUE)
plot_hcc(tree,yticks=seq(-2,6,by=2),type="triangle",hang=.05,main="Average Linkage")
subset(crimes,crimeID %in% c('C:431','C:460'))
cp = clusterPath('C:429',tree)
cp[cp$logBF>0,]
solved = subset(crimes, crimeID %in% seriesData$crimeID)
unsolved = subset(crimes, !crimeID %in% seriesData$crimeID)
crime = unsolved[2,]
crime
results = seriesID(crime,solved,seriesData,varlist,estimateBF)
head(results$score)
subset(results$groups,group=='12')
subset(results$groups,group=='154')
subset(results$groups,group=='9')
crime4 = unsolved[4,]
results4 = seriesID(crime4,solved,seriesData,varlist,estimateBF)
head(results4$score)
pairs = data.frame(i1=unsolved$crimeID[4],i2=unique(unsolved$crimeID[-4]))
X = compareCrimes(pairs,unsolved,varlist,binary=TRUE)
score = data.frame(pairs,logBF=estimateBF(X))
head(score[order(-score$logBF),])
C429 = which(unsolved$crimeID %in% 'C:429')
pairs = data.frame(i1=unsolved$crimeID[C429],i2=unique(unsolved$crimeID[-C429]))
X = compareCrimes(pairs,unsolved,varlist,binary=TRUE)
score = data.frame(pairs,logBF=estimateBF(X))
head(score[order(-score$logBF),])
cp = clusterPath('C:429',tree)
cp[cp$logBF>0,]
load("MCMC-results.RData")
library(fields)
ind.unsolved = which(is.na(A$CG))
n = nrow(A)
fields::image.plot(1:n,ind.unsolved,pp[1:n,ind.unsolved],
xlab="Crime",ylab="Unsolved Crime",
main="Probability crimes are linked")
unsolved.probs = apply(pp[ind.unsolved,],1,max,na.rm=TRUE)
plot(ind.unsolved,unsolved.probs,xlab="unsolved crime",ylab='maximum probability of linkage')
abline(h=0.25)
ind = ind.unsolved[unsolved.probs > 0.25]
investigate = as.character(A$crimeID[ind])
investigate
bp = bayesProb(pp[A$crimeID %in% "C:417"])
bp$crimeID = A$crimeID[bp$index]
bp$CG = A$CG[bp$index]
head(bp) |
check_ped <- function(ped_file){
if (class(ped_file) != "data.frame") {
stop("ped_file must be a data.frame with the following variables:\n FamID, ID, dadID, momID, sex, affected")
}
if (!"FamID" %in% colnames(ped_file) |
!"ID" %in% colnames(ped_file) |
!"dadID" %in% colnames(ped_file) |
!"momID" %in% colnames(ped_file) |
!"sex" %in% colnames(ped_file) |
!"affected" %in% colnames(ped_file)) {
stop('please provide a data.frame with the following variables:\n FamID, ID, dadID, momID, sex, affected')
}
if(any(is.na(ped_file$ID))) {
stop('ID contains missing values.\n Please ensure all individuals have a valid ID.')
}
if (any(!ped_file$affected %in% c(TRUE, FALSE, NA))) {
stop('For the variable "affected" please use the following convention
TRUE = affected by disease
FALSE = unaffected
NA = unknown disease-affection status.\n')
}
moms <- unique(ped_file$momID[!is.na(ped_file$momID)])
dads <- unique(ped_file$dadID[!is.na(ped_file$dadID)])
if (any(ped_file$sex[which(ped_file$ID %in% moms)] != 1) |
any(ped_file$sex[which(ped_file$ID %in% dads)] != 0)){
wrong_sex <- c(ped_file$ID[which(ped_file$sex[which(ped_file$ID %in% dads)] != 0)],
ped_file$ID[which(ped_file$sex[which(ped_file$ID %in% moms)] != 1)])
stop(paste0('Sex improperly specifed ID: ', sep = '', wrong_sex, '. Please ensure that for males: sex = 0; and for females: sex = 1.'))
}
if (any(!moms %in% ped_file$ID) | any(!dads %in% ped_file$ID)) {
wrong_par <- c(ped_file$ID[which(ped_file$momID == moms[which(!moms %in% ped_file$ID)])],
ped_file$ID[which(ped_file$dadID == dads[which(!dads %in% ped_file$ID)])])
stop(paste0('ID: ', sep = '', wrong_par, '. Non-founders must have a mother and a father. Founders have neither.'))
}
if (any(!is.na(ped_file$momID[is.na(ped_file$dadID)])) |
any(!is.na(ped_file$dadID[is.na(ped_file$momID)]))) {
stop("Non-founders must have both a mother and a father, while founders have missing momID and dadID.")
}
} |
"_PACKAGE"
.onAttach <- function(libname, pkgname){
if (!has_cxx17())
packageStartupMessage("The kdtools package was compiled without c++17 and will have reduced functionality\n")
} |
context("test-hierarchy.R")
test_that("test of hierarchy method", {
expect_equal(flag_hierarchy(c("p","b","s","b","u","e","b"), flag_list = c("e","s","t")),"e")
expect_equal(flag_hierarchy(c("p","b","s","b","u","b"), flag_list = c("e","s","t")),"s")
expect_equal(flag_hierarchy(c("p","b","b","u","b"), flag_list = c("e","s","t")),NA)
expect_equal(flag_hierarchy(c(NA,NA,NA,NA), flag_list = c("e","s","t")),NA)
}) |
options(prompt=" ", continue=" ", width=100)
library(fbRanks)
temp=create.fbRanks.dataframes(scores.file="scores-web.csv")
scores=temp$scores
head(scores[,1:5])
ranks1=rank.teams(scores=scores)
temp=create.fbRanks.dataframes(scores.file="scores-web.csv", teams.file="teams-web.csv")
scores=temp$scores
teams=temp$teams
head(teams[,c("name","age","region","fall.league")])
ranks2=rank.teams(scores=scores, teams=teams)
print(ranks2, fall.league="RCL D1 U12")
names(scores)
ranks4=rank.teams(scores=scores,teams=teams,add=c("surface","adv"))
coef(ranks4$fit$cluster.1)["surface.fTurf"]
coef(ranks4$fit$cluster.1)["adv.fhome"]
ranks.summer=rank.teams(scores=scores,teams=teams,add=c("surface"), max.date="2012-9-5")
simulate(ranks.summer, venue="RCL D1")
predict(ranks.summer, venue="RCL D1", date=as.Date("2012-09-16"))
fantasy.teams=c("Seattle United Copa B00","Seattle United Tango B00",
"Seattle United Samba B00","Seattle United S Black B00")
home.team=combn(fantasy.teams,2)[1,]
away.team=combn(fantasy.teams,2)[2,]
fantasy.games=data.frame(
date="2013-1-1",
home.team=home.team,
home.score=NaN,
away.team=away.team,
away.score=NaN, surface="Grass",
home.adv="neutral", away.adv="neutral")
simulate(ranks4, newdata=fantasy.games, points.rule="tournament10pt")
options(prompt="> ", continue="+ ") |
lncDIFF<-function(edata,group,covariate=NULL,link.function='log',CompareGroups=NULL,simulated.pvalue=FALSE,permutation=100){
group.labels<-names(table(group))
if(is.null(CompareGroups)) {
cat('Compared groups are not specified, default to all groups','\n')
CompareGroups<-group.labels
}
if(length(CompareGroups)>length(group.labels)) stop('Duplicates or unspecified groups in CompareGroups')
if(sum(!CompareGroups %in% group)>0) stop('Groups to be compared (CompareGroups) are not in the range of labels')
if(length(CompareGroups)==1) stop('Specify at least 2 groups to be compared')
if(!is.vector(group)) stop('Treatment groups or phenotypes of interest (group) must be a vector ')
if(!is.null(covariate)){
if(length(group)!=nrow(covariate))
stop('Dimensions of covariate and group do not match')
}
if(length(group)!=ncol(edata)) stop('Dimensions of counts (edata) and group do not match')
if(length(CompareGroups)>2) cat('More than 2 groups are compared, fold change are not computed','\n')
full.coefficients=which(group.labels %in% CompareGroups)
if(! 1 %in% full.coefficients){
group<-factor(group,levels = c(CompareGroups,group.labels[-full.coefficients]))
group.labels<-names(table(group))
full.coefficients=which(group.labels %in% CompareGroups)
}
pdata=as.data.frame(cbind(group,covariate))
formula='~'
for(i in colnames(pdata)[-ncol(pdata)]){
formula=paste(formula,i,'+',sep = '')
}
formula=paste(formula,colnames(pdata)[ncol(pdata)],sep='')
design.matrix=model.matrix(as.formula(formula),pdata)
colnames(design.matrix)[2:length(group.labels)]=paste(deparse(substitute(group)),group.labels[-1],sep = '')
ZIQML.fit.full=ZIQML.fit(edata,design.matrix,link=link.function)
test.coef=sort(full.coefficients)[-1]
n=nrow(design.matrix)
g=nrow(edata)
test=LRT(ZIQML.fit.full,coef=test.coef)
LRT.stat=test$LRT.stat
LRT.pvalue=test$LRT.pvalue
LRT.fdr=p.adjust(LRT.pvalue,method = 'BH')
DE.Gene=ifelse(LRT.fdr<0.05,'Yes','No')
results=data.frame(test.statistics=LRT.stat,Pvalue=LRT.pvalue,FDR=LRT.fdr,DE.Gene=DE.Gene)
compare.id=group %in% CompareGroups
sub.edata=t(edata[,compare.id])
groupwise.mean=aggregate(sub.edata,by=list(group[compare.id]),FUN=mean)
rownames(groupwise.mean)=groupwise.mean[,1]
groupwise.mean=t(groupwise.mean[,-1])
colnames(groupwise.mean)=paste('Mean',colnames(groupwise.mean),sep ='_' )
results=cbind(results,groupwise.mean)
if(simulated.pvalue){
LRT.STAT=NULL
for(i in 1:permutation){
id=sample(1:n,n)
ZIQML.fit.null=ZIQML.fit(edata,design.matrix[id,],link = ZIQML.fit.full$link)
test=LRT(ZIQML.fit.null,coef=test.coef)
LRT.STAT=cbind(LRT.STAT,test$LRT.stat)
}
LRT.simulated.pvalue=(0.1+rowSums(LRT.STAT>LRT.stat))/permutation
LRT.simulated.pvalue=lapply(LRT.simulated.pvalue,function(x)min(x,1))
LRT.simulated.fdr=p.adjust(LRT.simulated.pvalue,method = 'BH')
results$Simulated.Pvalue=LRT.simulated.pvalue
results$Simulated.FDR=LRT.simulated.fdr
results$DE.Gene.Simulated.Fdr=ifelse(results$Simulated.FDR<0.05,'Yes','No')
}
if(length(CompareGroups)==2){
results$Fold.Change=groupwise.mean[,1]/groupwise.mean[,2]
results$Log2.Fold.Change=log2(groupwise.mean[,1]/groupwise.mean[,2])
}
rownames(results)=rownames(ZIQML.fit.full$edata)
output=list(DE.results=results,full.model.fit=ZIQML.fit.full)
return(output)
} |
BS_call <- function(V, D, T., r, vol){
lens <- c(length(V), length(D), length(T.), length(r), length(vol))
if(all(lens == 1))
return(drop(BS_call_cpp(V = V, D = D, T = T., r = r, vol = vol)))
.check_args(V = V, D = D, T. = T., r = r, vol = vol)
args <- .get_eq_length_args(
lens = lens, V = V, D = D, T = T., r = r, vol = vol)
with.default(
args, drop(mapply(BS_call_cpp, V = V, D = D, T = T, r = r, vol = vol)))
} |
edgecluster = function(data, h1n, h2n, maxval, bw = max(h1n, h2n)/qnorm(0.975), asteps = 4,
estimator = "M_median", kernel = "gauss", score = "gauss", sigma = 1, kernelfunc = NULL) {
if (estimator == "test_mean" || estimator == "test_median")
test = TRUE
else
test = FALSE
ep = eplist(edgepoints(data, h1n, h2n, asteps = asteps, estimator = estimator,
kernel = kernel, score = score, sigma = sigma, kernelfunc = kernelfunc), maxval, test = test)
list(oregMclust(ep, bw = bw), ep)
} |
use_distmat <- function(distmat, x, centroids) {
if (!inherits(distmat, "Distmat"))
stop("Invalid distance matrix in control.")
i <- 1L:length(x)
j <- if (is.null(centroids)) i else distmat$id_cent
distmat[i, j, drop = FALSE]
}
get_dots <- function(dist_entry, x, centroids, ...) {
dots <- list(...)
if (is.null(dots$window.size)) {
dots$window.type <- "none"
}
else if (is.null(dots$window.type)) {
dots$window.type <- "slantedband"
}
dots$error.check <- FALSE
if (tolower(dist_entry$names[1L]) == "dtw" && is.null(dots$dist.method) && is_multivariate(c(x, centroids))) {
dots$dist.method <- "L1"
}
valid_args <- names(dots)
if (is.function(dist_entry$FUN)) {
if (!has_dots(dist_entry$FUN)) {
valid_args <- union(names(formals(proxy::dist)), names(formals(dist_entry$FUN)))
}
}
else {
valid_args <- names(formals(proxy::dist))
}
dots[intersect(names(dots), valid_args)]
}
split_parallel_symmetric <- function(n, num_workers, adjust = 0L) {
if (num_workers <= 2L || n <= 4L) {
mid_point <- as.integer(n / 2)
ul_trimat <- 1L:mid_point + adjust
ll_trimat <- (mid_point + 1L):n + adjust
trimat <- list(ul = ul_trimat, ll = ll_trimat)
attr(trimat, "trimat") <- TRUE
trimat <- list(trimat)
mid_point <- mid_point + adjust
attr(ul_trimat, "rows") <- ll_trimat
mat <- list(ul_trimat)
ids <- c(trimat, mat)
}
else {
mid_point <- as.integer(n / 2)
rec1 <- split_parallel_symmetric(mid_point, as.integer(num_workers / 4), adjust)
rec2 <- split_parallel_symmetric(n - mid_point, as.integer(num_workers / 4), mid_point + adjust)
endpoints <- parallel::splitIndices(mid_point, max(length(rec1) + length(rec2), num_workers))
endpoints <- endpoints[lengths(endpoints) > 0L]
mat <- lapply(endpoints, function(ids) {
ids <- ids + adjust
attr(ids, "rows") <- (mid_point + 1L):n + adjust
ids
})
ids <- c(rec1, rec2, mat)
}
chunk_sizes <- unlist(lapply(ids, function(x) {
if (is.null(attr(x, "trimat"))) length(x) else median(lengths(x))
}))
ids[sort(chunk_sizes, index.return = TRUE)$ix]
}
parallel_symmetric <- function(d_desc, ids, x, distance, dots) {
attach.big.matrix <- get("attach.big.matrix", asNamespace("bigmemory"), mode = "function")
dd <- attach.big.matrix(d_desc)
if (isTRUE(attr(ids, "trimat"))) {
ul <- ids$ul
if (length(ul) > 1L) {
dd[ul,ul] <- base::as.matrix(quoted_call(
proxy::dist,
x = x[ul],
y = NULL,
method = distance,
dots = dots
))
}
ll <- ids$ll
if (length(ll) > 1L) {
dd[ll,ll] <- base::as.matrix(quoted_call(
proxy::dist,
x = x[ll],
y = NULL,
method = distance,
dots = dots
))
}
}
else {
rows <- attr(ids, "rows")
mat_chunk <- base::as.matrix(quoted_call(
proxy::dist,
x = x[rows],
y = x[ids],
method = distance,
dots = dots
))
dd[rows,ids] <- mat_chunk
dd[ids,rows] <- t(mat_chunk)
}
}
ddist2 <- function(distance, control) {
dist_entry <- proxy::pr_DB$get_entry(distance)
symmetric <- isTRUE(control$symmetric)
warned <- FALSE
export <- c("check_consistency", "quoted_call", "parallel_symmetric", "distance", "dist_entry")
ret <- function(result, ...) {
ret <- structure(result, method = toupper(distance), ...)
if (!is.null(attr(ret, "call"))) {
attr(ret, "call") <- NULL
}
ret
}
distfun <- function(x, centroids = NULL, ...) {
x <- tslist(x)
if (!is.null(centroids)) centroids <- tslist(centroids)
if (length(x) == 1L && is.null(centroids)) {
return(ret(base::matrix(0, 1L, 1L),
class = "crossdist",
dimnames = list(names(x), names(x))))
}
if (!is.null(control$distmat)) {
return(ret(use_distmat(control$distmat, x, centroids)))
}
dots <- get_dots(dist_entry, x, centroids, ...)
if (!dist_entry$loop) {
dm <- base::as.matrix(quoted_call(
proxy::dist, x = x, y = centroids, method = distance, dots = dots
))
if (isTRUE(dots$pairwise)) {
dim(dm) <- NULL
return(ret(dm, class = "pairdist"))
}
else {
return(ret(dm, class = "crossdist"))
}
}
if (is.null(centroids) && symmetric && !isTRUE(dots$pairwise)) {
multiple_workers <- foreach::getDoParWorkers() > 1L
if (multiple_workers && isNamespaceLoaded("bigmemory")) {
len <- length(x)
seed <- get0(".Random.seed", .GlobalEnv, mode = "integer")
big.matrix <- get("big.matrix", asNamespace("bigmemory"), mode = "function")
bigmemory_describe <- get("describe", asNamespace("bigmemory"), mode = "function")
d <- big.matrix(len, len, "double", 0)
d_desc <- bigmemory_describe(d)
assign(".Random.seed", seed, .GlobalEnv)
ids <- integer()
foreach(
ids = split_parallel_symmetric(len, foreach::getDoParWorkers()),
.combine = c,
.multicombine = TRUE,
.noexport = c("d"),
.packages = c(control$packages, "bigmemory"),
.export = export
) %op% {
if (!check_consistency(dist_entry$names[1L], "dist")) {
do.call(proxy::pr_DB$set_entry, dist_entry, TRUE)
}
parallel_symmetric(d_desc, ids, x, distance, dots)
NULL
}
return(ret(d[,], class = "crossdist", dimnames = list(names(x), names(x))))
}
else if (multiple_workers && !warned && isTRUE(getOption("dtwclust_suggest_bigmemory", TRUE))) {
warned <<- TRUE
warning("Package 'bigmemory' is not available, cannot parallelize computation with '",
distance,
"'. Use options(dtwclust_suggest_bigmemory = FALSE) to avoid this warning.")
}
else {
dm <- base::as.matrix(quoted_call(
proxy::dist, x = x, y = NULL, method = distance, dots = dots
))
return(ret(dm, class = "crossdist"))
}
}
if (is.null(centroids)) centroids <- x
dim_names <- list(names(x), names(centroids))
x <- split_parallel(x)
if (isTRUE(dots$pairwise)) {
centroids <- split_parallel(centroids)
validate_pairwise(x, centroids)
combine <- c
}
else {
centroids <- lapply(1L:foreach::getDoParWorkers(), function(dummy) { centroids })
if (length(centroids) > length(x)) centroids <- centroids[1L:length(x)]
combine <- rbind
}
d <- foreach(
x = x, centroids = centroids,
.combine = combine,
.multicombine = TRUE,
.packages = control$packages,
.export = export
) %op% {
if (!check_consistency(dist_entry$names[1L], "dist")) {
do.call(proxy::pr_DB$set_entry, dist_entry, TRUE)
}
quoted_call(proxy::dist, x = x, y = centroids, method = distance, dots = dots)
}
if (isTRUE(dots$pairwise)) {
attr(d, "class") <- "pairdist"
}
else {
attr(d, "class") <- "crossdist"
attr(d, "dimnames") <- dim_names
}
ret(d)
}
distfun
} |
Field <- R6Class(
"Field",
public = list(
initialize = function(descriptor,
base_path = NULL,
strict = NULL,
missingValues = as.list(config::get("DEFAULT_MISSING_VALUES", file = system.file("config/config.yml", package = "tableschema.r"))),
...) {
if (missing(base_path)) {
private$base_path <- NULL
}
else {
private$base_path <- base_path
}
if (missing(strict)) {
private$strict <- NULL
}
else {
private$strict <- strict
}
if (missing(descriptor)) {
private$descriptor_ <- NULL
}
else {
private$descriptor_ <- descriptor
}
private$missingValues <- missingValues
private$descriptor_ <- helpers.expandFieldDescriptor(descriptor)
},
cast_value = function(...) {
return(private$castValue(...))
},
testValue = function(value, constraints = TRUE) {
result <- tryCatch({
private$castValue(value, constraints)
TRUE
}, warning = function(w) {
return(FALSE)
}, error = function(e) {
return(FALSE)
}, finally = {
})
return(result)
}
),
active = list(
descriptor = function() {
return(private$descriptor_)
},
required = function(){
if (!is.null(private$descriptor_)) {
return(identical(private$descriptor_$required, TRUE))
}
else{
return(FALSE)
}
},
name = function() {
return(private$descriptor_$name)
},
type = function() {
return(private$descriptor_$type)
},
format = function() {
return(private$descriptor_$format)
},
constraints = function() {
if (is.list(private$descriptor_) && "constraints" %in% names(private$descriptor_))
{
return(private$descriptor_$constraints)
}
else {
return(list())
}
}
),
private = list(
missingValues = NULL,
base_path = NULL,
strict = NULL,
descriptor_ = NULL,
types = Types$new(),
constraints_ = Constraints$new()$constraints,
castFunction = function() {
options <- list()
if (self$type == 'number') {
lapply(list('decimalChar', 'groupChar', 'currency'), function(key) {
value <- private$descriptor_[[key]]
if (!is.null(value)) {
options[[key]] <- value
}
})
}
func <- private$types$casts[[stringr::str_interp("cast${stringr::str_to_title(self$type)}")]]
if (is.null(func))
stop(stringr::str_interp("Not supported field type ${self$type}"))
cast <- purrr::partial(func, format = self$format)
return(cast)
},
castValue = function(value, constraints = TRUE, ...) {
if (value %in% private$missingValues) {
value <- NULL
}
castValue <- value
if (!is.null(value)) {
castFunction <- private$castFunction()
castValue <- castFunction(value)
if (identical(castValue , config::get("ERROR", file = system.file("config/config.yml", package = "tableschema.r")))) {
err_message <-
stringr::str_interp(
"Field ${private$name} can't cast value ${value} for type ${self$type} with format ${self$format}"
)
stop(err_message)
}
}
if (constraints || is.list(constraints)) {
checkFunctions <- private$checkFunctions()
if (is.list(checkFunctions) &
length(checkFunctions) > 0) {
names_ <- Filter(function(n) {
if (!is.list(constraints)) {
return(TRUE)
}
else if (n %in% names(constraints)) {
return(TRUE)
}
else
return(FALSE)
}, names(checkFunctions))
lapply(checkFunctions[names_],
function(check) {
passed <- check(castValue)
if (!passed) {
err_message <-
stringr::str_interp(
"Field ${private$name} has constraint ${name} which is not satisfied for value ${value}"
)
stop(err_message)
}
})
}
}
return(castValue)
},
checkFunctions = function() {
checks <- list()
cast <-
purrr::partial(private$castValue, constraints = FALSE)
for (name in names(self$constraints)) {
constraint <- self$constraints[[name]]
castConstraint <- constraint
if (name %in% list('enum')) {
castConstraint <- lapply(constraint, cast)
}
if (name %in% list('maximum', 'minimum')) {
castConstraint <- cast(constraint)
}
func <- private$constraints_[[stringr::str_interp("check${paste0(toupper(substr(name, 1, 1)), substr(name, 2, nchar(name)))}")]]
check <- purrr::partial(func, constraint = castConstraint)
checks[[name]] <- check
}
return(checks)
}
)
) |
library(OutliersO3)
library(ggplot2)
data(Election2005)
data <- Election2005[, c(6, 10, 17, 28)]
O3s <- O3prep(data, method="HDo", tols=0.05, boxplotLimits=6)
O3s1 <- O3plotT(O3s, caseNames=Election2005$Name)
O3s1$gO3 + theme(plot.margin = unit(c(0, 2, 0, 0), "cm"))
O3x <- O3prep(data, method="HDo", tols=c(0.1, 0.05, 0.01), boxplotLimits=c(3, 6, 10))
O3x1 <- O3plotT(O3x)
library(gridExtra)
grid.arrange(O3x1$gO3, O3x1$gpcp, ncol=1)
O3m <- O3prep(data, method=c("HDo", "PCS"))
O3m1 <- O3plotM(O3m)
grid.arrange(O3m1$gO3, O3m1$gpcp, ncol=1)
O3y <- O3prep(data, method=c("HDo", "PCS", "BAC", "adjOut", "DDC", "MCD"))
O3y1 <- O3plotM(O3y)
cx <- data.frame(outlier_method=names(O3y1$nOut), number_of_outliers=O3y1$nOut)
knitr::kable(cx, row.names=FALSE)
grid.arrange(O3y1$gO3, O3y1$gpcp, ncol=1) |
SurvivalFromCumhaz <- function(cumhaz, time.max, surv.factor = 10, surv.epsilon = 0.0000000001){
K <- length(cumhaz)
eval.vec <- seq(0, (time.max - surv.epsilon), length = surv.factor*K)
cumhaz.interpolated <- approxfun( (time.max/K)*(0:K), c(0, cumhaz) )
surv <- function(t){exp(-cumhaz.interpolated(t))}
return(surv(eval.vec))
} |
rkt_prep <- function(scores, positives, negatives = totals - positives, totals = 1) {
if(missing(scores)) stop("\"scores\" is required")
if(missing(positives)) stop("\"positives\" is required")
if (!missing(negatives) && !missing(totals)) {
stopifnot(all(positives + negatives == totals))
}
stopifnot(all(totals >= 0))
stopifnot(all(positives >= 0))
stopifnot(all(negatives >= 0))
out <- new.env(parent = emptyenv())
out$pos_ecdf <- rkt_ecdf(scores, positives)
out$neg_ecdf <- rkt_ecdf(scores, negatives)
out$pos_n <- sum(positives)
out$neg_n <- sum(negatives)
class(out) <- c("rkt_prep", class(out))
out
}
print.rkt_prep <- function(x, ...) {
cat(".:: ROCket Prep Object \n")
cat("Positives (pos_n):", x$pos_n, "\n")
cat("Negatives (neg_n):", x$neg_n, "\n")
cat("Pos ECDF (pos_ecdf):",class(x$pos_ecdf), "\n")
cat("Neg ECDF (neg_ecdf):",class(x$neg_ecdf), "\n")
}
plot.rkt_prep <- function(x, ...) {
inargs <- list(...)
s <- get_cutoffs(x)
outargs <- list(x = 1 - x$neg_ecdf(s),
y = 1 - x$pos_ecdf(s),
xlim = c(0, 1),
ylim = c(0, 1),
xlab = expression(FPR),
ylab = expression(TPR),
main = 'ROC',
v = c(0, 1),
h = c(0, 1))
outargs[names(inargs)] <- inargs
do.call(plot_points, outargs)
invisible()
} |
cubecsi<-function(m,ordinal,W,starting,maxiter,toler){
tt0<-proc.time()
n<-length(ordinal)
W<-as.matrix(W)
if (ncol(W)==1){
W<-as.numeric(W)
}
q<-length(starting)-3
pai<-starting[1]; gama<-starting[2:(q+2)]; phi<-starting[q+3];
loglikzero<-loglikcubecsi(m,ordinal,W,pai,gama,phi)
param<-c(pai,gama,phi)
optimparam<-optim(param,effecubecsi,ordinal=ordinal,W=W,m=m,method="L-BFGS-B",lower=c(0.01,rep(-Inf,q+1),0.01), upper=c(0.99,rep(Inf,q+1),0.3),gr=NULL,hessian=TRUE)
paramest<-optimparam$par
pai<-paramest[1]
gama<-paramest[2:(q+2)]
phi<-paramest[q+3]
hessian<-optimparam$hessian
loglik<-loglikcubecsi(m,ordinal,W,pai,gama,phi)
vettestim<-c(pai,gama,phi)
nparam<-length(vettestim)
AICCUBEcsi<- -2*loglik+2*nparam
BICCUBEcsi<- -2*loglik+log(n)*nparam
if (det(hessian)<=0){
warning("Variance-Covariance matrix is not positive definite")
varmat<-ddd<-cormat<-matrix(NA,nrow=nparam,ncol=nparam)
errstd<-wald<-pval<-rep(NA,nparam)
ICOMP<-trvarmat<-NA
} else {
varmat<-solve(hessian)
errstd<-sqrt(diag(varmat))
ddd<-diag(sqrt(1/diag(varmat)))
wald<-vettestim/errstd
pval<-2*(1-pnorm(abs(wald)))
cormat<-(ddd%*%varmat)%*%ddd
trvarmat<-sum(diag(varmat))
ICOMP<- -2*loglik + nparam*log(trvarmat/nparam) - log(det(varmat))
errstd<-errstd
wald<-wald
pval<-pval
}
stime<-vettestim
durata<-proc.time()-tt0;durata<-durata[1];
results<-list('estimates'=stime, 'loglik'=loglik, 'varmat'=varmat,
'BIC'= BICCUBEcsi,'time'=durata,'niter'=1)
} |
kap <- function(data, ...) {
UseMethod("kap")
}
kap <- new_class_metric(
kap,
direction = "maximize"
)
kap.data.frame <- function(data,
truth,
estimate,
weighting = "none",
na_rm = TRUE,
...) {
metric_summarizer(
metric_nm = "kap",
metric_fn = kap_vec,
data = data,
truth = !!enquo(truth),
estimate = !!enquo(estimate),
na_rm = na_rm,
metric_fn_options = list(weighting = weighting)
)
}
kap.table <- function(data,
weighting = "none",
...) {
check_table(data)
metric_tibbler(
.metric = "kap",
.estimator = finalize_estimator(data, metric_class = "kap"),
.estimate = kap_table_impl(data, weighting = weighting)
)
}
kap.matrix <- function(data,
weighting = "none",
...) {
data <- as.table(data)
kap.table(data, weighting = weighting)
}
kap_vec <- function(truth,
estimate,
weighting = "none",
na_rm = TRUE,
...) {
estimator <- finalize_estimator(truth, metric_class = "kap")
kap_impl <- function(truth, estimate, weighting) {
xtab <- vec2table(
truth = truth,
estimate = estimate
)
kap_table_impl(xtab, weighting = weighting)
}
metric_vec_template(
metric_impl = kap_impl,
truth = truth,
estimate = estimate,
na_rm = na_rm,
estimator = estimator,
cls = "factor",
weighting = weighting
)
}
kap_table_impl <- function(data, weighting) {
full_sum <- sum(data)
row_sum <- rowSums(data)
col_sum <- colSums(data)
expected <- outer(row_sum, col_sum) / full_sum
n_levels <- nrow(data)
w <- make_weighting_matrix(weighting, n_levels)
n_disagree <- sum(w * data)
n_chance <- sum(w * expected)
1 - n_disagree / n_chance
}
make_weighting_matrix <- function(weighting, n_levels) {
validate_weighting(weighting)
if (is_no_weighting(weighting)) {
w <- matrix(1L, nrow = n_levels, ncol = n_levels)
diag(w) <- 0L
return(w)
}
if (is_linear_weighting(weighting)) {
power <- 1L
} else {
power <- 2L
}
w <- rlang::seq2(0L, n_levels - 1L)
w <- matrix(w, nrow = n_levels, ncol = n_levels)
w <- abs(w - t(w)) ^ power
w
}
validate_weighting <- function(x) {
if (!rlang::is_string(x)) {
abort("`weighting` must be a string.")
}
ok <- is_no_weighting(x) ||
is_linear_weighting(x) ||
is_quadratic_weighting(x)
if (!ok) {
abort("`weighting` must be 'none', 'linear', or 'quadratic'.")
}
invisible(x)
}
is_no_weighting <- function(x) {
identical(x, "none")
}
is_linear_weighting <- function(x) {
identical(x, "linear")
}
is_quadratic_weighting <- function(x) {
identical(x, "quadratic")
} |
DATA_PATH <- "50_Data/"
COMPONENT_PATH <- "03_Components/"
PAGE_PATH <- "04_Pages/" |
track_progress <- function(job_id = "") {
captr_CHECKAUTH()
if ( is.null(job_id) | identical(job_id, "")) stop("Provide a Valid Job ID.")
h <- new_handle()
handle_setopt(h, customrequest = "GET")
handle_setheaders(h, "Captricity-API-Token" = Sys.getenv("CaptricityToken"))
tag_con <- curl_fetch_memory(paste0("https://shreddr.captricity.com/api/v1/job/", job_id), handle = h)
tag <- fromJSON(rawToChar(tag_con$content))
tag
} |
data_dir <- file.path("..", "testdata")
tempfile_nc <- function() {
tempfile_helper("hoursum_")
}
file_out <- tempfile_nc()
hoursum("SIS", file.path(data_dir, "ex_hourx.nc"), file_out)
file <- nc_open(file_out)
test_that("data is correct", {
actual <- ncvar_get(file)
expected_data <- c(18102.0,18162.0,18121.0,18080.0,18140.0,18099.0,18058.0,
18118.0,18077.0,18036.0,18096.0,18055.0,18014.0,18074.0,
18033.0,17992.0,18052.0,18011.0,17970.0,18030.0,17989.0,
17948.0,18008.0,17967.0,17926.0,17986.0,17945.0,17904.0,
17964.0,17923.0,17882.0,17942.0,17901.0,17860.0,17920.0,
17879.0,17838.0,17898.0,17857.0,17816.0,17876.0,17936.0,
17895.0,17955.0,18015.0,17974.0,18034.0,18094.0,18053.0,
18055.0,18014.0,18074.0,18033.0,17992.0,18052.0,18011.0,
17970.0,18030.0,17989.0,17948.0,18008.0,17967.0,17926.0,
17986.0,17945.0,17904.0,17964.0,17923.0,17882.0,17942.0,
17901.0,17860.0,17920.0,17879.0,17838.0,17898.0,17857.0,
17816.0,17876.0,17936.0,17895.0,17955.0,18015.0,17974.0,
18034.0,18094.0,18053.0,18113.0,18173.0,18132.0,18091.0,
18151.0,18110.0,18069.0,18129.0,18088.0,18047.0,18107.0,
18008.0,17967.0,17926.0,17986.0,17945.0,17904.0,17964.0,
17923.0,17882.0,17942.0,17901.0,17860.0,17920.0,17879.0,
17838.0,17898.0,17857.0,17816.0,17876.0,17936.0,17895.0,
17955.0,18015.0,17974.0,18034.0,18094.0,18053.0,18113.0,
18173.0,18132.0,18091.0,18151.0,18110.0,18069.0,18129.0,
18088.0,18047.0,18107.0,18066.0,18025.0,18085.0,18044.0,
18003.0,18063.0,18022.0,17981.0,18041.0,18000.0,17959.0)
expected <- array(expected_data, dim = c(7, 7, 3))
expect_equivalent(actual[1:147], expected)
})
test_that("variable attributes are correct", {
actual <- ncatt_get(file, "SIS", "units")$value
expect_equal(actual, "W m-2")
actual <- ncatt_get(file, "SIS", "_FillValue")$value
expect_equal(actual, -999)
actual <- ncatt_get(file, "SIS", "standard_name")$value
expect_equal(actual, "SIS_standard")
actual <- ncatt_get(file, "SIS", "long_name")$value
expect_equal(actual, "Surface Incoming Shortwave Radiation")
actual <- ncatt_get(file, "SIS", "missing_value")$value
expect_equal(actual, 0)
})
test_that("attributes are correct", {
actual <- ncatt_get(file, "lon", "units")$value
expect_equal(actual, "degrees_east")
actual <- ncatt_get(file, "lon", "long_name")$value
expect_equal(actual, "longitude")
actual <- ncatt_get(file, "lon", "standard_name")$value
expect_equal(actual, "longitude")
actual <- ncatt_get(file, "lon", "axis")$value
expect_equal(actual, "X")
actual <- ncatt_get(file, "lat", "units")$value
expect_equal(actual, "degrees_north")
actual <- ncatt_get(file, "lat", "long_name")$value
expect_equal(actual, "latitude")
actual <- ncatt_get(file, "lat", "standard_name")$value
expect_equal(actual, "latitude")
actual <- ncatt_get(file, "lat", "axis")$value
expect_equal(actual, "Y")
actual <- ncatt_get(file, "time", "units")$value
expect_equal(actual, "minutes since 1983-01-01 00:00:00")
actual <- ncatt_get(file, "time", "long_name")$value
expect_equal(actual, "time")
actual <- ncatt_get(file, "time", "standard_name")$value
expect_equal(actual, "time")
actual <- ncatt_get(file, "time", "calendar")$value
expect_equal(actual, "standard")
actual <- ncatt_get(file, "SIS", "standard_name")$value
expect_equal(actual, "SIS_standard")
actual <- ncatt_get(file, "SIS", "long_name")$value
expect_equal(actual, "Surface Incoming Shortwave Radiation")
actual <- ncatt_get(file, "SIS", "units")$value
expect_equal(actual, "W m-2")
actual <- ncatt_get(file, "SIS", "_FillValue")$value
expect_equal(actual, -999)
actual <- ncatt_get(file, "SIS", "cmsaf_info")$value
expect_equal(actual, "cmsafops::hoursum for variable SIS")
global_attr <- ncatt_get(file, 0)
expect_equal(length(global_attr), 1)
actual <- names(global_attr[1])
expect_equal(actual, "Info")
actual <- global_attr[[1]]
expect_equal(actual, "Created with the CM SAF R Toolbox.")
})
test_that("coordinates are correct", {
actual <- ncvar_get(file, "lon")
expect_identical(actual, array(seq(5, 8, 0.5)))
actual <- ncvar_get(file, "lat")
expect_identical(actual, array(seq(45, 48, 0.5)))
actual <- ncvar_get(file, "time")
expect_equal(actual[1:3], array(c(8941680, 8941740, 8941800)))
})
nc_close(file)
file_out <- tempfile_nc()
hoursum("SIS", file.path(data_dir, "ex_hourx.nc"), file_out, nc34 = 4)
file <- nc_open(file_out)
test_that("data is correct", {
actual <- ncvar_get(file)
expected_data <- c(18102.0,18162.0,18121.0,18080.0,18140.0,18099.0,18058.0,
18118.0,18077.0,18036.0,18096.0,18055.0,18014.0,18074.0,
18033.0,17992.0,18052.0,18011.0,17970.0,18030.0,17989.0,
17948.0,18008.0,17967.0,17926.0,17986.0,17945.0,17904.0,
17964.0,17923.0,17882.0,17942.0,17901.0,17860.0,17920.0,
17879.0,17838.0,17898.0,17857.0,17816.0,17876.0,17936.0,
17895.0,17955.0,18015.0,17974.0,18034.0,18094.0,18053.0,
18055.0,18014.0,18074.0,18033.0,17992.0,18052.0,18011.0,
17970.0,18030.0,17989.0,17948.0,18008.0,17967.0,17926.0,
17986.0,17945.0,17904.0,17964.0,17923.0,17882.0,17942.0,
17901.0,17860.0,17920.0,17879.0,17838.0,17898.0,17857.0,
17816.0,17876.0,17936.0,17895.0,17955.0,18015.0,17974.0,
18034.0,18094.0,18053.0,18113.0,18173.0,18132.0,18091.0,
18151.0,18110.0,18069.0,18129.0,18088.0,18047.0,18107.0,
18008.0,17967.0,17926.0,17986.0,17945.0,17904.0,17964.0,
17923.0,17882.0,17942.0,17901.0,17860.0,17920.0,17879.0,
17838.0,17898.0,17857.0,17816.0,17876.0,17936.0,17895.0,
17955.0,18015.0,17974.0,18034.0,18094.0,18053.0,18113.0,
18173.0,18132.0,18091.0,18151.0,18110.0,18069.0,18129.0,
18088.0,18047.0,18107.0,18066.0,18025.0,18085.0,18044.0,
18003.0,18063.0,18022.0,17981.0,18041.0,18000.0,17959.0)
expected <- array(expected_data, dim = c(7, 7, 3))
expect_equivalent(actual[1:147], expected)
})
test_that("variable attributes are correct", {
actual <- ncatt_get(file, "SIS", "units")$value
expect_equal(actual, "W m-2")
actual <- ncatt_get(file, "SIS", "_FillValue")$value
expect_equal(actual, -999)
actual <- ncatt_get(file, "SIS", "standard_name")$value
expect_equal(actual, "SIS_standard")
actual <- ncatt_get(file, "SIS", "long_name")$value
expect_equal(actual, "Surface Incoming Shortwave Radiation")
actual <- ncatt_get(file, "SIS", "missing_value")$value
expect_equal(actual, 0)
})
test_that("attributes are correct", {
actual <- ncatt_get(file, "lon", "units")$value
expect_equal(actual, "degrees_east")
actual <- ncatt_get(file, "lon", "long_name")$value
expect_equal(actual, "longitude")
actual <- ncatt_get(file, "lon", "standard_name")$value
expect_equal(actual, "longitude")
actual <- ncatt_get(file, "lon", "axis")$value
expect_equal(actual, "X")
actual <- ncatt_get(file, "lat", "units")$value
expect_equal(actual, "degrees_north")
actual <- ncatt_get(file, "lat", "long_name")$value
expect_equal(actual, "latitude")
actual <- ncatt_get(file, "lat", "standard_name")$value
expect_equal(actual, "latitude")
actual <- ncatt_get(file, "lat", "axis")$value
expect_equal(actual, "Y")
actual <- ncatt_get(file, "time", "units")$value
expect_equal(actual, "minutes since 1983-01-01 00:00:00")
actual <- ncatt_get(file, "time", "long_name")$value
expect_equal(actual, "time")
actual <- ncatt_get(file, "time", "standard_name")$value
expect_equal(actual, "time")
actual <- ncatt_get(file, "time", "calendar")$value
expect_equal(actual, "standard")
global_attr <- ncatt_get(file, 0)
expect_equal(length(global_attr), 1)
actual <- names(global_attr[1])
expect_equal(actual, "Info")
actual <- global_attr[[1]]
expect_equal(actual, "Created with the CM SAF R Toolbox.")
})
test_that("coordinates are correct", {
actual <- ncvar_get(file, "lon")
expect_identical(actual, array(seq(5, 8, 0.5)))
actual <- ncvar_get(file, "lat")
expect_identical(actual, array(seq(45, 48, 0.5)))
actual <- ncvar_get(file, "time")
expect_equal(actual[1:3], array(c(8941680, 8941740, 8941800)))
})
nc_close(file)
file_out <- tempfile_nc()
test_that("error is thrown if ncdf version is wrong", {
expect_error(
hoursum("SIS", file.path(data_dir, "ex_hourx.nc"), file_out, nc34 = 7),
"nc version must be in c(3, 4), but was 7", fixed = TRUE
)
})
file_out <- tempfile_nc()
test_that("ncdf version NULL throws an error", {
expect_error(
hoursum("SIS",
file.path(data_dir, "ex_hourx.nc"),
file_out, nc34 = NULL),
"nc_version must not be NULL"
)
})
file_out <- tempfile_nc()
test_that("warning is shown if var does not exist", {
expect_warning(hoursum("notExist",
file.path(data_dir, "ex_hourx.nc"),
file_out),
"Variable 'notExist' not found. Variable 'SIS' will be used instead.")
})
file <- nc_open(file_out)
test_that("data is correct", {
actual <- ncvar_get(file)
expected_data <- c(18102.0,18162.0,18121.0,18080.0,18140.0,18099.0,18058.0,
18118.0,18077.0,18036.0,18096.0,18055.0,18014.0,18074.0,
18033.0,17992.0,18052.0,18011.0,17970.0,18030.0,17989.0,
17948.0,18008.0,17967.0,17926.0,17986.0,17945.0,17904.0,
17964.0,17923.0,17882.0,17942.0,17901.0,17860.0,17920.0,
17879.0,17838.0,17898.0,17857.0,17816.0,17876.0,17936.0,
17895.0,17955.0,18015.0,17974.0,18034.0,18094.0,18053.0,
18055.0,18014.0,18074.0,18033.0,17992.0,18052.0,18011.0,
17970.0,18030.0,17989.0,17948.0,18008.0,17967.0,17926.0,
17986.0,17945.0,17904.0,17964.0,17923.0,17882.0,17942.0,
17901.0,17860.0,17920.0,17879.0,17838.0,17898.0,17857.0,
17816.0,17876.0,17936.0,17895.0,17955.0,18015.0,17974.0,
18034.0,18094.0,18053.0,18113.0,18173.0,18132.0,18091.0,
18151.0,18110.0,18069.0,18129.0,18088.0,18047.0,18107.0,
18008.0,17967.0,17926.0,17986.0,17945.0,17904.0,17964.0,
17923.0,17882.0,17942.0,17901.0,17860.0,17920.0,17879.0,
17838.0,17898.0,17857.0,17816.0,17876.0,17936.0,17895.0,
17955.0,18015.0,17974.0,18034.0,18094.0,18053.0,18113.0,
18173.0,18132.0,18091.0,18151.0,18110.0,18069.0,18129.0,
18088.0,18047.0,18107.0,18066.0,18025.0,18085.0,18044.0,
18003.0,18063.0,18022.0,17981.0,18041.0,18000.0,17959.0)
expected <- array(expected_data, dim = c(7, 7, 3))
expect_equivalent(actual[1:147], expected)
})
test_that("variable attributes are correct", {
actual <- ncatt_get(file, "SIS", "units")$value
expect_equal(actual, "W m-2")
actual <- ncatt_get(file, "SIS", "_FillValue")$value
expect_equal(actual, -999)
actual <- ncatt_get(file, "SIS", "standard_name")$value
expect_equal(actual, "SIS_standard")
actual <- ncatt_get(file, "SIS", "long_name")$value
expect_equal(actual, "Surface Incoming Shortwave Radiation")
actual <- ncatt_get(file, "SIS", "missing_value")$value
expect_equal(actual, 0)
})
test_that("attributes are correct", {
actual <- ncatt_get(file, "lon", "units")$value
expect_equal(actual, "degrees_east")
actual <- ncatt_get(file, "lon", "long_name")$value
expect_equal(actual, "longitude")
actual <- ncatt_get(file, "lon", "standard_name")$value
expect_equal(actual, "longitude")
actual <- ncatt_get(file, "lon", "axis")$value
expect_equal(actual, "X")
actual <- ncatt_get(file, "lat", "units")$value
expect_equal(actual, "degrees_north")
actual <- ncatt_get(file, "lat", "long_name")$value
expect_equal(actual, "latitude")
actual <- ncatt_get(file, "lat", "standard_name")$value
expect_equal(actual, "latitude")
actual <- ncatt_get(file, "lat", "axis")$value
expect_equal(actual, "Y")
actual <- ncatt_get(file, "time", "units")$value
expect_equal(actual, "minutes since 1983-01-01 00:00:00")
actual <- ncatt_get(file, "time", "long_name")$value
expect_equal(actual, "time")
actual <- ncatt_get(file, "time", "standard_name")$value
expect_equal(actual, "time")
actual <- ncatt_get(file, "time", "calendar")$value
expect_equal(actual, "standard")
global_attr <- ncatt_get(file, 0)
expect_equal(length(global_attr), 1)
actual <- names(global_attr[1])
expect_equal(actual, "Info")
actual <- global_attr[[1]]
expect_equal(actual, "Created with the CM SAF R Toolbox.")
})
test_that("coordinates are correct", {
actual <- ncvar_get(file, "lon")
expect_identical(actual, array(seq(5, 8, 0.5)))
actual <- ncvar_get(file, "lat")
expect_identical(actual, array(seq(45, 48, 0.5)))
actual <- ncvar_get(file, "time")
expect_equal(actual[1:3], array(c(8941680, 8941740, 8941800)))
})
nc_close(file)
file_out <- tempfile_nc()
test_that("error is thrown if variable is NULL", {
expect_error(
hoursum(NULL,
file.path(data_dir, "ex_hourx.nc"),
file_out),
"variable must not be NULL"
)
})
file_out <- tempfile_nc()
test_that("warning is shown if var is empty", {
expect_warning(hoursum("",
file.path(data_dir, "ex_hourx.nc"),
file_out),
"Variable '' not found. Variable 'SIS' will be used instead.")
})
file <- nc_open(file_out)
test_that("data is correct", {
actual <- ncvar_get(file)
expected_data <- c(18102.0,18162.0,18121.0,18080.0,18140.0,18099.0,18058.0,
18118.0,18077.0,18036.0,18096.0,18055.0,18014.0,18074.0,
18033.0,17992.0,18052.0,18011.0,17970.0,18030.0,17989.0,
17948.0,18008.0,17967.0,17926.0,17986.0,17945.0,17904.0,
17964.0,17923.0,17882.0,17942.0,17901.0,17860.0,17920.0,
17879.0,17838.0,17898.0,17857.0,17816.0,17876.0,17936.0,
17895.0,17955.0,18015.0,17974.0,18034.0,18094.0,18053.0,
18055.0,18014.0,18074.0,18033.0,17992.0,18052.0,18011.0,
17970.0,18030.0,17989.0,17948.0,18008.0,17967.0,17926.0,
17986.0,17945.0,17904.0,17964.0,17923.0,17882.0,17942.0,
17901.0,17860.0,17920.0,17879.0,17838.0,17898.0,17857.0,
17816.0,17876.0,17936.0,17895.0,17955.0,18015.0,17974.0,
18034.0,18094.0,18053.0,18113.0,18173.0,18132.0,18091.0,
18151.0,18110.0,18069.0,18129.0,18088.0,18047.0,18107.0,
18008.0,17967.0,17926.0,17986.0,17945.0,17904.0,17964.0,
17923.0,17882.0,17942.0,17901.0,17860.0,17920.0,17879.0,
17838.0,17898.0,17857.0,17816.0,17876.0,17936.0,17895.0,
17955.0,18015.0,17974.0,18034.0,18094.0,18053.0,18113.0,
18173.0,18132.0,18091.0,18151.0,18110.0,18069.0,18129.0,
18088.0,18047.0,18107.0,18066.0,18025.0,18085.0,18044.0,
18003.0,18063.0,18022.0,17981.0,18041.0,18000.0,17959.0)
expected <- array(expected_data, dim = c(7, 7, 3))
expect_equivalent(actual[1:147], expected)
})
test_that("variable attributes are correct", {
actual <- ncatt_get(file, "SIS", "units")$value
expect_equal(actual, "W m-2")
actual <- ncatt_get(file, "SIS", "_FillValue")$value
expect_equal(actual, -999)
actual <- ncatt_get(file, "SIS", "standard_name")$value
expect_equal(actual, "SIS_standard")
actual <- ncatt_get(file, "SIS", "long_name")$value
expect_equal(actual, "Surface Incoming Shortwave Radiation")
actual <- ncatt_get(file, "SIS", "missing_value")$value
expect_equal(actual, 0)
})
test_that("attributes are correct", {
actual <- ncatt_get(file, "lon", "units")$value
expect_equal(actual, "degrees_east")
actual <- ncatt_get(file, "lon", "long_name")$value
expect_equal(actual, "longitude")
actual <- ncatt_get(file, "lon", "standard_name")$value
expect_equal(actual, "longitude")
actual <- ncatt_get(file, "lon", "axis")$value
expect_equal(actual, "X")
actual <- ncatt_get(file, "lat", "units")$value
expect_equal(actual, "degrees_north")
actual <- ncatt_get(file, "lat", "long_name")$value
expect_equal(actual, "latitude")
actual <- ncatt_get(file, "lat", "standard_name")$value
expect_equal(actual, "latitude")
actual <- ncatt_get(file, "lat", "axis")$value
expect_equal(actual, "Y")
actual <- ncatt_get(file, "time", "units")$value
expect_equal(actual, "minutes since 1983-01-01 00:00:00")
actual <- ncatt_get(file, "time", "long_name")$value
expect_equal(actual, "time")
actual <- ncatt_get(file, "time", "standard_name")$value
expect_equal(actual, "time")
actual <- ncatt_get(file, "time", "calendar")$value
expect_equal(actual, "standard")
global_attr <- ncatt_get(file, 0)
expect_equal(length(global_attr), 1)
actual <- names(global_attr[1])
expect_equal(actual, "Info")
actual <- global_attr[[1]]
expect_equal(actual, "Created with the CM SAF R Toolbox.")
})
test_that("coordinates are correct", {
actual <- ncvar_get(file, "lon")
expect_identical(actual, array(seq(5, 8, 0.5)))
actual <- ncvar_get(file, "lat")
expect_identical(actual, array(seq(45, 48, 0.5)))
actual <- ncvar_get(file, "time")
expect_equal(actual[1:3], array(c(8941680, 8941740, 8941800)))
})
nc_close(file)
file_out <- tempfile_nc()
test_that("error is thrown if input file does not exist", {
expect_error(
hoursum("SIS",
file.path(data_dir, "xemaple1.nc"),
file_out),
"Input file does not exist")
})
file_out <- tempfile_nc()
test_that("error is thrown if input filename is empty", {
expect_error(
hoursum("SIS", "", file_out),
"Input file does not exist")
})
file_out <- tempfile_nc()
test_that("error is thrown if input filename is NULL", {
expect_error(
hoursum("SIS", NULL, file_out),
"Input filepath must be of length one and not NULL"
)
})
file_out <- tempfile_nc()
cat("test\n", file = file_out)
test_that("error is thrown if output file already exists", {
expect_error(
hoursum("SIS",
file.path(data_dir, "ex_dayx.nc"),
file_out),
paste0("File '",
file_out,
"' already exists. Specify 'overwrite = TRUE' if you want to overwrite it."),
fixed = TRUE
)
expect_equal(readLines(con = file_out), "test")
})
file_out <- tempfile_nc()
cat("test\n", file = file_out)
test_that("no error is thrown if overwrite = TRUE", {
expect_error(
hoursum("SIS",
file.path(data_dir, "ex_hourx.nc"),
file_out,
overwrite = TRUE
),
NA
)
})
file <- nc_open(file_out)
test_that("data is correct", {
actual <- ncvar_get(file)
expected_data <- c(18102.0,18162.0,18121.0,18080.0,18140.0,18099.0,18058.0,
18118.0,18077.0,18036.0,18096.0,18055.0,18014.0,18074.0,
18033.0,17992.0,18052.0,18011.0,17970.0,18030.0,17989.0,
17948.0,18008.0,17967.0,17926.0,17986.0,17945.0,17904.0,
17964.0,17923.0,17882.0,17942.0,17901.0,17860.0,17920.0,
17879.0,17838.0,17898.0,17857.0,17816.0,17876.0,17936.0,
17895.0,17955.0,18015.0,17974.0,18034.0,18094.0,18053.0,
18055.0,18014.0,18074.0,18033.0,17992.0,18052.0,18011.0,
17970.0,18030.0,17989.0,17948.0,18008.0,17967.0,17926.0,
17986.0,17945.0,17904.0,17964.0,17923.0,17882.0,17942.0,
17901.0,17860.0,17920.0,17879.0,17838.0,17898.0,17857.0,
17816.0,17876.0,17936.0,17895.0,17955.0,18015.0,17974.0,
18034.0,18094.0,18053.0,18113.0,18173.0,18132.0,18091.0,
18151.0,18110.0,18069.0,18129.0,18088.0,18047.0,18107.0,
18008.0,17967.0,17926.0,17986.0,17945.0,17904.0,17964.0,
17923.0,17882.0,17942.0,17901.0,17860.0,17920.0,17879.0,
17838.0,17898.0,17857.0,17816.0,17876.0,17936.0,17895.0,
17955.0,18015.0,17974.0,18034.0,18094.0,18053.0,18113.0,
18173.0,18132.0,18091.0,18151.0,18110.0,18069.0,18129.0,
18088.0,18047.0,18107.0,18066.0,18025.0,18085.0,18044.0,
18003.0,18063.0,18022.0,17981.0,18041.0,18000.0,17959.0)
expected <- array(expected_data, dim = c(7, 7, 3))
expect_equivalent(actual[1:147], expected)
})
test_that("variable attributes are correct", {
actual <- ncatt_get(file, "SIS", "units")$value
expect_equal(actual, "W m-2")
actual <- ncatt_get(file, "SIS", "_FillValue")$value
expect_equal(actual, -999)
actual <- ncatt_get(file, "SIS", "standard_name")$value
expect_equal(actual, "SIS_standard")
actual <- ncatt_get(file, "SIS", "long_name")$value
expect_equal(actual, "Surface Incoming Shortwave Radiation")
actual <- ncatt_get(file, "SIS", "missing_value")$value
expect_equal(actual, 0)
})
test_that("attributes are correct", {
actual <- ncatt_get(file, "lon", "units")$value
expect_equal(actual, "degrees_east")
actual <- ncatt_get(file, "lon", "long_name")$value
expect_equal(actual, "longitude")
actual <- ncatt_get(file, "lon", "standard_name")$value
expect_equal(actual, "longitude")
actual <- ncatt_get(file, "lon", "axis")$value
expect_equal(actual, "X")
actual <- ncatt_get(file, "lat", "units")$value
expect_equal(actual, "degrees_north")
actual <- ncatt_get(file, "lat", "long_name")$value
expect_equal(actual, "latitude")
actual <- ncatt_get(file, "lat", "standard_name")$value
expect_equal(actual, "latitude")
actual <- ncatt_get(file, "lat", "axis")$value
expect_equal(actual, "Y")
actual <- ncatt_get(file, "time", "units")$value
expect_equal(actual, "minutes since 1983-01-01 00:00:00")
actual <- ncatt_get(file, "time", "long_name")$value
expect_equal(actual, "time")
actual <- ncatt_get(file, "time", "standard_name")$value
expect_equal(actual, "time")
actual <- ncatt_get(file, "time", "calendar")$value
expect_equal(actual, "standard")
global_attr <- ncatt_get(file, 0)
expect_equal(length(global_attr), 1)
actual <- names(global_attr[1])
expect_equal(actual, "Info")
actual <- global_attr[[1]]
expect_equal(actual, "Created with the CM SAF R Toolbox.")
})
test_that("coordinates are correct", {
actual <- ncvar_get(file, "lon")
expect_identical(actual, array(seq(5, 8, 0.5)))
actual <- ncvar_get(file, "lat")
expect_identical(actual, array(seq(45, 48, 0.5)))
actual <- ncvar_get(file, "time")
expect_equal(actual[1:3], array(c(8941680, 8941740, 8941800)))
})
nc_close(file) |
library(uGMAR)
context("functions used in the genetic algorithm")
params12 <- c(1.0, 0.9, 0.25, 4.5, 0.7, 3.0, 0.8)
params22t <- c(1.4, 0.8, 0.05, 0.27, 3.5, 0.9, -0.18, 3.1, 0.7, 203, 3)
params23 <- c(2.7, 0.8, -0.06, 0.3, 3.5, 0.8, -0.07, 2.6, 7.2, 0.3, -0.01, 0.1, 0.6, 0.25)
params12tr <- c(0.8, 0.96, 0.9, 0.4, 5.8, 0.9, 4, 272)
params23r <- c(1.7, 1.9, 2.1, 0.8, -0.05, 0.3, 0.7, 4.5, 0.7, 0.2)
params23tr <- c(1.9, 1.6, 2.1, 0.8, -0.02, 0.4, 0.1, 3.9, 0.6, 0.3, 15, 200, 220)
R1 <- matrix(c(1, 0, 0, 0, 0, 1), ncol=2)
R2 <- diag(1, ncol=3, nrow=3)
R3 <- matrix(c(0.5, 0.5), ncol=1)
R4 <- diag(1, ncol=2, nrow=2)
params21c <- c(1, 0.9, 1, 3)
params22c <- c(1, 0.1, -0.1, 1, 2, 0.2, 2, 0.8, 11, 12)
params33c <- c(1, 0.1, 0.1, 0.1, 1, 2, 0.2, 0.2, 0.2, 2, 3, 0.3, -0.3, 3, 0.5, 0.4)
params21cr <- c(1, 1, 1)
params22cr <- c(1, 2, 0.8, 1, 2, 0.7, 11, 12)
params32cr <- c(1, 2, 0.3, -0.3, 1, 2, 0.6)
params22gs <- c(1, 0.1, 0.1, 1, 2, 0.2, 0.2, 2, 0.3, 10)
params23gsr <- c(1, 2, 3, 0.5, 0.05, 1, 2, 3, 0.4, 0.4, 20, 30)
test_that("extract_regime extracts the right regime", {
expect_equal(extract_regime(2, c(1, 2), params23gsr, model="G-StMAR", restricted=TRUE, regime=1), c(1, 1))
expect_equal(extract_regime(2, c(1, 2), params23gsr, model="G-StMAR", restricted=TRUE, regime=3), c(3, 3, 30))
expect_equal(extract_regime(2, c(1, 2), params23gsr, model="G-StMAR", restricted=TRUE, regime=3, with_dfs=FALSE), c(3, 3))
expect_equal(extract_regime(2, c(1, 1), params22gs, model="G-StMAR", regime=2), c(2, 0.2, 0.2, 2, 10))
expect_equal(extract_regime(2, c(1, 1), params22gs, model="G-StMAR", regime=2, with_dfs=FALSE), c(2, 0.2, 0.2, 2))
expect_equal(extract_regime(2, c(1, 1), params22gs, model="G-StMAR", regime=1), c(1, 0.1, 0.1, 1))
expect_equal(extract_regime(1, 2, params12, regime=1), c(1.00, 0.90, 0.25))
expect_equal(extract_regime(2, 2, params22t, model="StMAR", regime=2), c(3.50, 0.90, -0.18, 3.10, 3.00))
expect_equal(extract_regime(2, 2, params22t, model="StMAR", regime=2, with_dfs=FALSE), c(3.50, 0.90, -0.18, 3.10))
expect_equal(extract_regime(2, 3, params23, regime=3), c(7.20, 0.30, -0.01, 0.10))
expect_equal(extract_regime(1, 2, params12tr, model="StMAR", restricted=TRUE, regime=2), c(0.96, 5.80, 272.00))
expect_equal(extract_regime(1, 2, params12tr, model="StMAR", restricted=TRUE, regime=2, with_dfs=FALSE), c(0.96, 5.80))
expect_equal(extract_regime(2, 3, params23r, restricted=TRUE, regime=2), c(1.9, 0.7))
expect_equal(extract_regime(2, 3, params23tr, model="StMAR", restricted=TRUE, regime=3), c(2.1, 3.9, 220.0))
expect_equal(extract_regime(2, 3, params23tr, model="StMAR", restricted=TRUE, regime=3, with_dfs=FALSE), c(2.1, 3.9))
expect_equal(extract_regime(2, 1, params21c, model="StMAR", constraints=list(R3), regime=1), c(1.0, 0.9, 1.0, 3.0))
expect_equal(extract_regime(2, 1, params21c, model="StMAR", constraints=list(R3), regime=1, with_dfs=FALSE), c(1.0, 0.9, 1.0))
expect_equal(extract_regime(2, 2, params22c, model="StMAR", constraints=list(R4, R3), regime=2), c(2.0, 0.2, 2.0, 12))
expect_equal(extract_regime(3, 3, params33c, constraints=list(R2, R2, R1), regime=2), c(2.0, 0.2, 0.2, 0.2, 2.0))
expect_equal(extract_regime(3, 3, params33c, constraints=list(R2, R2, R1), regime=3), c(3.0, 0.3, -0.3, 3.0))
expect_equal(extract_regime(2, 1, params21cr, restricted=TRUE, constraints=R3, regime=1), c(1, 1))
expect_equal(extract_regime(2, 2, params22cr, model="StMAR", restricted=TRUE, constraints=R3, regime=1), c(1, 1, 11))
expect_equal(extract_regime(2, 2, params22cr, model="StMAR", restricted=TRUE, constraints=R3, regime=2), c(2, 2, 12))
expect_equal(extract_regime(2, 2, params22cr, model="StMAR", restricted=TRUE, constraints=R3, regime=2, with_dfs=FALSE), c(2, 2))
expect_equal(extract_regime(3, 2, params32cr, restricted=TRUE, constraints=R1, regime=1), c(1, 1))
expect_equal(extract_regime(3, 2, params32cr, restricted=TRUE, constraints=R1, regime=2), c(2, 2))
})
params12 <- c(1.0, 0.9, 0.25, 4.5, 0.7, 3.0, 0.8)
params22t <- c(1.4, 0.8, 0.05, 0.27, 3.5, 0.9, -0.18, 3.1, 0.7, 203, 3)
params23 <- c(2.7, 0.8, -0.06, 0.3, 3.5, 0.8, -0.07, 2.6, 7.2, 0.3, -0.01, 0.1, 0.6, 0.25)
params12tr <- c(0.8, 0.96, 0.9, 0.4, 5.8, 0.9, 4, 272)
params23r <- c(1.7, 1.9, 2.1, 0.8, -0.05, 0.3, 0.7, 4.5, 0.7, 0.2)
params23tr <- c(1.9, 1.6, 2.1, 0.8, -0.02, 0.4, 0.1, 3.9, 0.6, 0.3, 15, 200, 220)
R1 <- matrix(c(1, 0, 0, 0, 0, 1), ncol=2)
R2 <- diag(1, ncol=3, nrow=3)
R3 <- matrix(c(0.5, 0.5), ncol=1)
R4 <- diag(1, ncol=2, nrow=2)
params21c <- c(1, 0.9, 1, 3)
params22c <- c(1, 0.1, -0.1, 1, 2, 0.2, 2, 0.8, 11, 12)
params33c <- c(1, 0.1, 0.1, 0.1, 1, 2, 0.2, 0.2, 0.2, 2, 3, 0.3, -0.3, 3, 0.5, 0.4)
params21cr <- c(1, 1, 1)
params22cr <- c(1, 2, 0.8, 1, 2, 0.7, 11, 12)
params32cr <- c(1, 2, 0.3, -0.3, 1, 2, 0.6)
params22gs <- c(1, 0.1, 0.1, 1, 2, 0.2, 0.2, 2, 0.3, 10)
params23gsr <- c(1, 2, 3, 0.5, 0.05, 1, 2, 3, 0.4, 0.4, 20, 30)
test_that("change_regime changes the right regime correctly", {
expect_equal(change_regime(2, c(1, 2), params23gsr, model="G-StMAR", restricted=TRUE, regime_params=c(7, 7, 70), regime=3), c(1, 2, 7, 0.5, 0.05, 1, 2, 7, 0.4, 0.4, 20, 70))
expect_equal(change_regime(2, c(1, 2), params23gsr, model="G-StMAR", restricted=TRUE, regime_params=c(7, 7), regime=1), c(7, 2, 3, 0.5, 0.05, 7, 2, 3, 0.4, 0.4, 20, 30))
expect_equal(change_regime(2, c(1, 1), params22gs, model="G-StMAR", regime_params=c(3, 0.3, 0.3, 3, 30), regime=2), c(1, 0.1, 0.1, 1, 3, 0.3, 0.3, 3, 0.3, 30))
expect_equal(change_regime(2, c(1, 1), params22gs, model="G-StMAR", regime_params=c(3, 0.3, 0.3, 3), regime=1), c(3, 0.3, 0.3, 3, 2, 0.2, 0.2, 2, 0.3, 10))
expect_equal(change_regime(1, 2, params12, regime_params=c(99, 98, 97), regime=1), c(99.0, 98.0, 97.0, 4.5, 0.7, 3.0, 0.8))
expect_equal(change_regime(2, 2, params22t, model="StMAR", regime_params=c(9, 8, 7, 6, 5), regime=2), c(1.4, 0.8, 0.05, 0.27, 9, 8, 7, 6, 0.7, 203, 5))
expect_equal(change_regime(2, 3, params23, regime_params=c(9, 8, 7, 6), regime=3), c(2.7, 0.8, -0.06, 0.3, 3.5, 0.8, -0.07, 2.6, 9, 8, 7, 6, 0.6, 0.25))
expect_equal(change_regime(1, 2, params12tr, model="StMAR", restricted=TRUE, regime_params=c(9, 8, 7), regime=2), c(0.8, 9, 0.9, 0.4, 8, 0.9, 4, 7))
expect_equal(change_regime(2, 3, params23r, restricted=TRUE, regime_params=c(11, 12), regime=2), c(1.7, 11, 2.1, 0.8, -0.05, 0.3, 12, 4.5, 0.7, 0.2))
expect_equal(change_regime(2, 3, params23tr, model="StMAR", restricted=TRUE, regime_params=c(99, 88, 77), regime=3), c(1.9, 1.6, 99, 0.8, -0.02, 0.4, 0.1, 88, 0.6, 0.3, 15, 200, 77))
expect_equal(change_regime(2, 1, params21c, model="StMAR", constraints=list(R3), regime_params=c(9, 8, 7, 6), regime=1), c(9, 8, 7, 6))
expect_equal(change_regime(2, 2, params22c, model="StMAR", constraints=list(R4, R3), regime_params=c(9, 8, 7, 6), regime=2), c(1, 0.1, -0.1, 1, 9, 8, 7, 0.8, 11, 6))
expect_equal(change_regime(3, 3, params33c, constraints=list(R2, R2, R1), regime_params=c(9, 8, 7, 6), regime=3), c(1, 0.1, 0.1, 0.1, 1, 2, 0.2, 0.2, 0.2, 2, 9, 8, 7, 6, 0.5, 0.4))
expect_equal(change_regime(2, 1, params21cr, restricted=TRUE, constraints=R3, regime_params=c(9, 8), regime=1), c(9, 1, 8))
expect_equal(change_regime(2, 2, params22cr, model="StMAR", restricted=TRUE, constraints=R3, regime_params=c(9, 8, 7), regime=1), c(9, 2, 0.8, 8, 2, 0.7, 7, 12))
expect_equal(change_regime(3, 2, params32cr, restricted=TRUE, constraints=R1, regime_params=c(9, 8), regime=2), c(1, 9, 0.3, -0.3, 1, 8, 0.6))
})
test_that("GA functions don't throw errors", {
test_length0 <- function(x, length_x)expect_equal(length(x), length_x)
test_length0(regime_distance(1:3, 2:4), 1)
test_length0(random_regime(p=4, mu_scale=1:2, sigma_scale=3, forcestat=TRUE), 4 + 2)
test_length0(random_arcoefs(p=5, forcestat=TRUE), 5)
test_length0(add_dfs(1, how_many=3), 4)
test_length0(random_ind_int(p=1, M=1, model="StMAR", mu_scale=1:2, sigma_scale=1), 4)
test_length0(smart_ind_int(p=1, M=2, params=params12, model="GMAR", mu_scale=1:2, sigma_scale=1, accuracy=1, which_random=2), 7)
}) |
rbase.median <- function(input, maxiter=496, eps=1e-6, parallel=FALSE){
if ((class(input))!="riemdata"){
stop("* rbase.median : the input must be of 'riemdata' class. Use 'riemfactory' first to manage your data.")
}
mfdname = tolower(input$name)
newdata = aux_stack3d(input)
if (is.matrix(newdata)){
output = list()
output$x = newdata
output$iteration = 0
return(output)
}
if (dim(newdata)[3]==1){
output = list()
output$x = matrix(newdata,nrow=nrow(newdata))
output$iteration = 0
return(output)
}
tmpinit = engine_mean(newdata, mfdname, 10, as.double(eps))
xinit = tmpinit$x
nCores = parallel::detectCores()
if (parallel==FALSE){
output = engine_median(newdata, mfdname, as.integer(maxiter), as.double(eps), xinit)
} else {
if ((nCores==1)||(is.na(nCores))){
output = engine_median(newdata, mfdname, as.integer(maxiter), as.double(eps), xinit)
} else {
output = engine_median_openmp(newdata, mfdname, as.integer(maxiter), as.double(eps), nCores, xinit)
}
}
return(output)
} |
simto <- function(entry.ij, from.ij, mpl, eta.ij, x.i, max.time, pme){
exit.ij <- simexit(entry.ij, all.bhr = mpl[[from.ij]]$bhr, x.i = x.i,
eta.ij = eta.ij, max.time = max.time, pme = pme)$new.exit
hr.at.exit.ij <- rep(NA, length(eta.ij))
for(hi in mpl[[from.ij]]$all.to){
hr.at.exit.ij[hi] <- hr(bhr = mpl[[from.ij]]$bhr[[hi]],
t = exit.ij,
eta.ij = eta.ij[[hi]],
x.i = x.i) * pme[hi]
}
hr.at.exit.ij <- hr.at.exit.ij[!is.na(hr.at.exit.ij)]
if(length(hr.at.exit.ij) > 1.5){
probs <- hr.at.exit.ij/sum(hr.at.exit.ij)
to.ij <- sample(mpl[[from.ij]]$all.to, size = 1, prob = probs)
}else{
to.ij <- as.numeric(mpl[[from.ij]]$all.to)
}
return(list(entry.ij = entry.ij,
exit.ij = exit.ij,
from.ij = from.ij,
to.ij = to.ij))} |
tidy_chisquare <- function(.n = 50, .df = 1, .ncp = 1, .num_sims = 1){
n <- as.integer(.n)
num_sims <- as.integer(.num_sims)
df <- as.numeric(.df)
ncp <- as.numeric(.ncp)
if(!is.integer(n) | n < 0){
rlang::abort(
"The parameters '.n' must be of class integer. Please pass a whole
number like 50 or 100. It must be greater than 0."
)
}
if(!is.integer(num_sims) | num_sims < 0){
rlang::abort(
"The parameter `.num_sims' must be of class integer. Please pass a
whole number like 50 or 100. It must be greater than 0."
)
}
if(!is.numeric(df) | !is.numeric(ncp)){
rlang::abort(
"The parameters of .df and .ncp must be of class numeric."
)
}
if(df < 0 | ncp < 0){
rlang::abort("The parameters of .df and .ncp must be greater than or equal to 0.")
}
x <- seq(1, num_sims, 1)
ps <- seq(-n, n-1, 2)
qs <- seq(0, 1, (1/(n-1)))
df <- dplyr::tibble(sim_number = as.factor(x)) %>%
dplyr::group_by(sim_number) %>%
dplyr::mutate(x = list(1:n)) %>%
dplyr::mutate(y = list(stats::rchisq(n = n, df = df, ncp = ncp))) %>%
dplyr::mutate(d = list(density(unlist(y), n = n)[c("x","y")] %>%
purrr::set_names("dx","dy") %>%
dplyr::as_tibble())) %>%
dplyr::mutate(p = list(stats::pchisq(ps, df = df, ncp = ncp))) %>%
dplyr::mutate(q = list(stats::qchisq(qs, df = df, ncp = ncp))) %>%
tidyr::unnest(cols = c(x, y, d, p, q)) %>%
dplyr::ungroup()
attr(df, ".df") <- .df
attr(df, ".ncp") <- .ncp
attr(df, ".n") <- .n
attr(df, ".num_sims") <- .num_sims
attr(df, "tibble_type") <- "tidy_chisquare"
attr(df, "ps") <- ps
attr(df, "qs") <- qs
return(df)
} |
tar_traceback <- function(
name,
envir = NULL,
packages = NULL,
source = NULL,
characters = getOption("width"),
store = targets::tar_config_get("store")
) {
tar_assert_scalar(characters, "characters must have length 1.")
tar_assert_dbl(characters, "characters must be numeric.")
tar_assert_positive(characters, "characters must be positive.")
if (!is.null(envir) || !is.null(packages) || !is.null(source)) {
tar_warn_deprecate(
"The envir, packages, and source arguments of tar_traceback() ",
"are deprectaed in targets > 0.3.1 (2021-03-28)."
)
}
name <- tar_deparse_language(substitute(name))
tar_assert_chr(name)
tar_assert_scalar(name)
workspace <- workspace_read(name = name, path_store = store)
out <- workspace$target$metrics$traceback
if (is.null(out)) {
return(character(0))
}
min <- max(which(grepl("^build_eval_fce17be7", out))) %||% 1 %||NA% 1
if (is.finite(min) && length(min) == 1L) {
out <- out[seq(min + 1, length(out))]
}
characters <- min(characters, max(nchar(out)))
substr(out, 0, characters)
} |
set_y_values2 <- function(d, optimize_y){
set_y_values(
set_order(fix_columns(d, col.event = "event",
col.start = "start",
col.end = "end",
col.group = "group",
col.color = "color",
col.fontcolor = "fontcolor",
col.tooltip = "tooltip")),
optimize_y
)
}
test_that("1 group -> do is sophisticated", {
dat <- data.frame(
event = 1:4, start = c("2019-01-01", "2019-01-10"),
end = c("2019-01-01", "2019-01-10"),
subplot = 1,
stringsAsFactors = FALSE
)
expect_equal(set_y_values2(dat, TRUE)$y, rep(2:1, 2))
expect_equal(set_y_values2(dat, FALSE)$y, rev(as.integer(factor(dat$event))))
})
test_that("Events begin top left with first event", {
d = read.csv(stringsAsFactors = FALSE,text = "event,start,duration,group
compile datasets,0,2,descriptive analysis
baseline data,1,2,descriptive analysis
areas,1,1,visualisation
routes,1.5,1,visualisation
route networks,2,2,visualisation")
start_date = as.Date("2018-05-01")
d$start = start_date + d$start * 7
d$end = d$start + d$duration * 7
d$target_y <- c(5,4,2,1,2)
actual <- set_y_values2(d, TRUE)[,c("event", "y")]
expected <- d[,c("event", "target_y")]
result <- merge(actual,expected)
expect_equal(result$y, result$target_y)
d$target_y <- c(6,5,3,2,1)
actual <- set_y_values2(d, FALSE)[,c("event", "y")]
expected <- d[,c("event", "target_y")]
result <- merge(actual,expected)
expect_equal(result$y, result$target_y)
})
test_that("Subsequent Events are on same y level when optimize_y = TRUE and on different otherwise", {
d = read.csv(stringsAsFactors = FALSE,text = "event,start,end
compile datasets,2020-01-01,2020-02-01
route networks,2020-02-01,2020-02-05")
d$target_y <- c(1,1)
expect_equal(set_y_values2(d, TRUE)$y, d$target_y)
expect_equal(set_y_values2(d, FALSE)$y, c(2,1))
})
test_that("Events start from top not from bottom of chart", {
d <- data.frame(
event = 1:3, start = c("2019-01-01", "2019-01-09", "2019-01-11"),
end = c("2019-01-10", "2019-01-12", "2019-01-14"), subplot = 1, stringsAsFactors = F)
expect_equal(set_y_values2(d, TRUE)$y, c(2,1,2))
})
test_that("optimize_y starts on top", {
data <- read.csv(text="event,start,end
Phase 1,2020-12-15,2020-12-24
Phase 2,2020-12-23,2020-12-29
Phase 3,2020-12-28,2021-01-06
Phase 4,2021-01-06,2021-02-02")
with_optimize <- set_y_values2(data, optimize_y = T)
without_optimize <- set_y_values2(data, optimize_y = F)
expect_equal(without_optimize$y, c(4,3,2,1))
expect_equal(with_optimize$y, c(2,1,2,2))
})
test_that("event is inside another event", {
df <- read.csv(text = "event,start,end,
event2,2020-12-16,2020-12-20,
event3,2020-12-18,2020-12-19")
expect_equal(set_y_values2(df, TRUE)$y, c(2,1))
})
test_that("subsequent can be optimized", {
df <- read.csv(text = "event,start,end,
event2,2020-12-16,2020-12-20,
event3,2020-12-20,2020-12-22")
expect_equal(set_y_values2(df, TRUE)$y, c(1,1))
}) |
.FormatBstsDataAndOptions <- function(family, response, predictors,
model.options, timestamp.info) {
if (family != "gaussian" && model.options$bma.method == "ODA") {
warning("Orthoganal data augmentation is not available with a",
"non-Gaussian model family. Switching to SSVS.")
model.options$bma.method <- "SSVS"
}
if (family %in% c("gaussian", "student")) {
if (is.matrix(response) && ncol(response) != 1) {
stop("Matrix responses only work for logit and Poisson models. ",
"Did you mean to specify a different model family?")
}
data.list <- list(response = as.numeric(response),
predictors = predictors,
response.is.observed = !is.na(response))
} else if (family == "logit") {
if (!is.null(dim(response)) && length(dim(response)) > 1) {
stopifnot(length(dim(response)) == 2, ncol(response) == 2)
trials <- response[, 1] + response[, 2]
response <- response[, 1]
} else {
response <- response > 0
trials <- rep(1, length(response))
}
stopifnot(all(trials > 0, na.rm = TRUE),
all(response >= 0, na.rm = TRUE),
all(trials >= response, na.rm = TRUE))
stopifnot(all(abs(response - as.integer(response)) < 1e-8, na.rm = TRUE))
stopifnot(all(abs(trials - as.integer(trials)) < 1e-8, na.rm = TRUE))
data.list <- list(response = as.numeric(response),
trials = trials,
predictors = predictors,
response.is.observed = !is.na(response))
model.options$clt.threshold <- as.integer(3)
} else if (family == "poisson") {
if (!is.null(dim(response)) && length(dim(response)) > 1) {
stopifnot(length(dim(response)) == 2, ncol(response) == 2)
exposure <- response[, 2]
response <- response[, 1]
} else {
exposure <- rep(1, length(response))
}
stopifnot(is.numeric(response))
stopifnot(all(exposure > 0, na.rm = TRUE),
all(response >= 0, na.rm = TRUE))
stopifnot(all(abs(response - as.integer(response)) < 1e-8, na.rm = TRUE))
data.list <- list(response = as.numeric(response),
exposure = exposure,
predictors = predictors,
response.is.observed = !is.na(response))
} else {
stop("Unrecognized value for 'family' argument in bsts.")
}
data.list$timestamp.info <- timestamp.info
return(list(data.list = data.list, model.options = model.options))
} |
library(checkargs)
context("isStrictlyNegativeIntegerVectorOrNull")
test_that("isStrictlyNegativeIntegerVectorOrNull works for all arguments", {
expect_identical(isStrictlyNegativeIntegerVectorOrNull(NULL, stopIfNot = FALSE, message = NULL, argumentName = NULL), TRUE)
expect_identical(isStrictlyNegativeIntegerVectorOrNull(TRUE, stopIfNot = FALSE, message = NULL, argumentName = NULL), FALSE)
expect_identical(isStrictlyNegativeIntegerVectorOrNull(FALSE, stopIfNot = FALSE, message = NULL, argumentName = NULL), FALSE)
expect_identical(isStrictlyNegativeIntegerVectorOrNull(NA, stopIfNot = FALSE, message = NULL, argumentName = NULL), FALSE)
expect_identical(isStrictlyNegativeIntegerVectorOrNull(0, stopIfNot = FALSE, message = NULL, argumentName = NULL), FALSE)
expect_identical(isStrictlyNegativeIntegerVectorOrNull(-1, stopIfNot = FALSE, message = NULL, argumentName = NULL), TRUE)
expect_identical(isStrictlyNegativeIntegerVectorOrNull(-0.1, stopIfNot = FALSE, message = NULL, argumentName = NULL), FALSE)
expect_identical(isStrictlyNegativeIntegerVectorOrNull(0.1, stopIfNot = FALSE, message = NULL, argumentName = NULL), FALSE)
expect_identical(isStrictlyNegativeIntegerVectorOrNull(1, stopIfNot = FALSE, message = NULL, argumentName = NULL), FALSE)
expect_identical(isStrictlyNegativeIntegerVectorOrNull(NaN, stopIfNot = FALSE, message = NULL, argumentName = NULL), FALSE)
expect_identical(isStrictlyNegativeIntegerVectorOrNull(-Inf, stopIfNot = FALSE, message = NULL, argumentName = NULL), FALSE)
expect_identical(isStrictlyNegativeIntegerVectorOrNull(Inf, stopIfNot = FALSE, message = NULL, argumentName = NULL), FALSE)
expect_identical(isStrictlyNegativeIntegerVectorOrNull("", stopIfNot = FALSE, message = NULL, argumentName = NULL), FALSE)
expect_identical(isStrictlyNegativeIntegerVectorOrNull("X", stopIfNot = FALSE, message = NULL, argumentName = NULL), FALSE)
expect_identical(isStrictlyNegativeIntegerVectorOrNull(c(TRUE, FALSE), stopIfNot = FALSE, message = NULL, argumentName = NULL), FALSE)
expect_identical(isStrictlyNegativeIntegerVectorOrNull(c(FALSE, TRUE), stopIfNot = FALSE, message = NULL, argumentName = NULL), FALSE)
expect_identical(isStrictlyNegativeIntegerVectorOrNull(c(NA, NA), stopIfNot = FALSE, message = NULL, argumentName = NULL), FALSE)
expect_identical(isStrictlyNegativeIntegerVectorOrNull(c(0, 0), stopIfNot = FALSE, message = NULL, argumentName = NULL), FALSE)
expect_identical(isStrictlyNegativeIntegerVectorOrNull(c(-1, -2), stopIfNot = FALSE, message = NULL, argumentName = NULL), TRUE)
expect_identical(isStrictlyNegativeIntegerVectorOrNull(c(-0.1, -0.2), stopIfNot = FALSE, message = NULL, argumentName = NULL), FALSE)
expect_identical(isStrictlyNegativeIntegerVectorOrNull(c(0.1, 0.2), stopIfNot = FALSE, message = NULL, argumentName = NULL), FALSE)
expect_identical(isStrictlyNegativeIntegerVectorOrNull(c(1, 2), stopIfNot = FALSE, message = NULL, argumentName = NULL), FALSE)
expect_identical(isStrictlyNegativeIntegerVectorOrNull(c(NaN, NaN), stopIfNot = FALSE, message = NULL, argumentName = NULL), FALSE)
expect_identical(isStrictlyNegativeIntegerVectorOrNull(c(-Inf, -Inf), stopIfNot = FALSE, message = NULL, argumentName = NULL), FALSE)
expect_identical(isStrictlyNegativeIntegerVectorOrNull(c(Inf, Inf), stopIfNot = FALSE, message = NULL, argumentName = NULL), FALSE)
expect_identical(isStrictlyNegativeIntegerVectorOrNull(c("", "X"), stopIfNot = FALSE, message = NULL, argumentName = NULL), FALSE)
expect_identical(isStrictlyNegativeIntegerVectorOrNull(c("X", "Y"), stopIfNot = FALSE, message = NULL, argumentName = NULL), FALSE)
expect_identical(isStrictlyNegativeIntegerVectorOrNull(NULL, stopIfNot = TRUE, message = NULL, argumentName = NULL), TRUE)
expect_error(isStrictlyNegativeIntegerVectorOrNull(TRUE, stopIfNot = TRUE, message = NULL, argumentName = NULL))
expect_error(isStrictlyNegativeIntegerVectorOrNull(FALSE, stopIfNot = TRUE, message = NULL, argumentName = NULL))
expect_error(isStrictlyNegativeIntegerVectorOrNull(NA, stopIfNot = TRUE, message = NULL, argumentName = NULL))
expect_error(isStrictlyNegativeIntegerVectorOrNull(0, stopIfNot = TRUE, message = NULL, argumentName = NULL))
expect_identical(isStrictlyNegativeIntegerVectorOrNull(-1, stopIfNot = TRUE, message = NULL, argumentName = NULL), TRUE)
expect_error(isStrictlyNegativeIntegerVectorOrNull(-0.1, stopIfNot = TRUE, message = NULL, argumentName = NULL))
expect_error(isStrictlyNegativeIntegerVectorOrNull(0.1, stopIfNot = TRUE, message = NULL, argumentName = NULL))
expect_error(isStrictlyNegativeIntegerVectorOrNull(1, stopIfNot = TRUE, message = NULL, argumentName = NULL))
expect_error(isStrictlyNegativeIntegerVectorOrNull(NaN, stopIfNot = TRUE, message = NULL, argumentName = NULL))
expect_error(isStrictlyNegativeIntegerVectorOrNull(-Inf, stopIfNot = TRUE, message = NULL, argumentName = NULL))
expect_error(isStrictlyNegativeIntegerVectorOrNull(Inf, stopIfNot = TRUE, message = NULL, argumentName = NULL))
expect_error(isStrictlyNegativeIntegerVectorOrNull("", stopIfNot = TRUE, message = NULL, argumentName = NULL))
expect_error(isStrictlyNegativeIntegerVectorOrNull("X", stopIfNot = TRUE, message = NULL, argumentName = NULL))
expect_error(isStrictlyNegativeIntegerVectorOrNull(c(TRUE, FALSE), stopIfNot = TRUE, message = NULL, argumentName = NULL))
expect_error(isStrictlyNegativeIntegerVectorOrNull(c(FALSE, TRUE), stopIfNot = TRUE, message = NULL, argumentName = NULL))
expect_error(isStrictlyNegativeIntegerVectorOrNull(c(NA, NA), stopIfNot = TRUE, message = NULL, argumentName = NULL))
expect_error(isStrictlyNegativeIntegerVectorOrNull(c(0, 0), stopIfNot = TRUE, message = NULL, argumentName = NULL))
expect_identical(isStrictlyNegativeIntegerVectorOrNull(c(-1, -2), stopIfNot = TRUE, message = NULL, argumentName = NULL), TRUE)
expect_error(isStrictlyNegativeIntegerVectorOrNull(c(-0.1, -0.2), stopIfNot = TRUE, message = NULL, argumentName = NULL))
expect_error(isStrictlyNegativeIntegerVectorOrNull(c(0.1, 0.2), stopIfNot = TRUE, message = NULL, argumentName = NULL))
expect_error(isStrictlyNegativeIntegerVectorOrNull(c(1, 2), stopIfNot = TRUE, message = NULL, argumentName = NULL))
expect_error(isStrictlyNegativeIntegerVectorOrNull(c(NaN, NaN), stopIfNot = TRUE, message = NULL, argumentName = NULL))
expect_error(isStrictlyNegativeIntegerVectorOrNull(c(-Inf, -Inf), stopIfNot = TRUE, message = NULL, argumentName = NULL))
expect_error(isStrictlyNegativeIntegerVectorOrNull(c(Inf, Inf), stopIfNot = TRUE, message = NULL, argumentName = NULL))
expect_error(isStrictlyNegativeIntegerVectorOrNull(c("", "X"), stopIfNot = TRUE, message = NULL, argumentName = NULL))
expect_error(isStrictlyNegativeIntegerVectorOrNull(c("X", "Y"), stopIfNot = TRUE, message = NULL, argumentName = NULL))
}) |
NULL
setGeneric("mpm_has_prop",
function(object) {
standardGeneric("mpm_has_prop")
}
)
setMethod("mpm_has_prop", signature = "CompadreMat",
function(object) {
"prop" %in% object@matrixClass$MatrixClassOrganized
}
)
setMethod("mpm_has_prop", signature = "CompadreDB",
function(object) {
vapply(object@data$mat,
function(m) "prop" %in% m@matrixClass$MatrixClassOrganized,
logical(1))
}
)
setGeneric("mpm_has_active",
function(object) {
standardGeneric("mpm_has_active")
}
)
setMethod("mpm_has_active", signature = "CompadreMat",
function(object) {
"active" %in% object@matrixClass$MatrixClassOrganized
}
)
setMethod("mpm_has_active", signature = "CompadreDB",
function(object) {
vapply(object@data$mat,
function(m) "active" %in% m@matrixClass$MatrixClassOrganized,
logical(1))
}
)
setGeneric("mpm_has_dorm",
function(object) {
standardGeneric("mpm_has_dorm")
}
)
setMethod("mpm_has_dorm", signature = "CompadreMat",
function(object) {
"dorm" %in% object@matrixClass$MatrixClassOrganized
}
)
setMethod("mpm_has_dorm", signature = "CompadreDB",
function(object) {
vapply(object@data$mat,
function(m) "dorm" %in% m@matrixClass$MatrixClassOrganized,
logical(1))
}
)
setGeneric("mpm_first_active",
function(object) {
standardGeneric("mpm_first_active")
}
)
setMethod("mpm_first_active", signature = "CompadreMat",
function(object) {
mclass <- object@matrixClass$MatrixClassOrganized
ifelse(!"active" %in% mclass,
NA_integer_,
min(which(mclass == "active")))
}
)
setMethod("mpm_first_active", signature = "CompadreDB",
function(object) {
vapply(object@data$mat,
function(m) {
mclass <- m@matrixClass$MatrixClassOrganized
ifelse(!"active" %in% mclass,
NA_integer_,
min(which(mclass == "active")))
},
integer(1))
}
) |
singular <- function() {
structure("", class = "singular")
}
rep.singular <- function(x, ...) {
structure(NextMethod(), class = "singular")
}
print.singular <- function(x, ...) cat("<singular>\n")
as.data.frame.singular <- function(x, ...) {
df <- list(x)
attr(df, "row.names") <- .set_row_names(length(x))
class(df) <- "data.frame"
df
}
vector_type.singular <- function(x) "singular"
scale_singular <- function(vis, property, name = property, label = name,
points = TRUE, domain = NULL, override = NULL) {
vis <- scale_nominal(vis, domain = "", property = property, name = name,
label = "", points = points, override = override)
vis <- add_axis(vis, property, tick_size_major = 0)
vis
} |
"slp75_81"
|
"aovFbyrow" <-
function(x=matrix(rnorm(1000),ncol=20), cl = factor(rep(1:3, c(7,9,4)))){
y <- t(x)
qr.obj <- qr(model.matrix(~cl))
qty.obj <- qr.qty(qr.obj,y)
tab <- table(factor(cl))
dfb <- length(tab)-1
dfw <- sum(tab)-dfb-1
ms.between <- apply(qty.obj[2:(dfb+1), , drop=FALSE]^2, 2, sum)/dfb
ms.within <- apply(qty.obj[-(1:(dfb+1)), , drop=FALSE]^2, 2, sum)/dfw
Fstat <- ms.between/ms.within
} |
'fn.isPD' <- function(A)
{
as.logical(all(eigen(A)$values>0))
}
|
setValidity("named.list",function(object){
if(all(is.na(object@names))) "list is unnamed"
else if(length([email protected])!=length(object@names[nzchar(object@names)])){
znames <- which(!nzchar(object@names))
paste(
if(length(znames) == 1) "element" else "elements",
paste(znames,collapse=", "),
if(length(znames) == 1) "is" else "are",
"unnamed"
)
}
else if(length(unique(object@names)) != length(object@names)) paste(
"list has duplicate names:",
paste(dQuote(object@names[duplicated(object@names)]),collapse=", ")
)
else TRUE
})
setValidity("data.set",function(object){
isItemVector <- sapply(object,is,"item.vector")
if(!all(isItemVector)) {
wrong.els <- object[!isItemVector]
wrong.classes <- sapply(wrong.els,class)
wrong.names <- object@names[!isItemVector]
paste(
"object has elements of wrong class:",
paste(
paste("class(",wrong.names,") = ",wrong.classes,sep=""),
collapse=", "
)
)
}
else if(any(length(object@row_names) != sapply(object,length))){
wrong.els <- object[!isItemVector]
wrong.names <- object@names[!isItemVector]
wront.lengths <- sapply(object,length)
paste(
if(length(which(wrong.lengths)) > 1) "elements have" else "element has",
"wrong length: ",
paste(
paste("class(",wrong.names,") = ",wrong.classes,sep=""),
collapse=", "
),
"where",
length(object@row_names),
"is required"
)
}
else TRUE
})
setMethod("initialize","named.list",function(.Object,...){
args <- list(...)
if(is.list(args[[1]])) args <- unclass(args[[1]])
[email protected] <- unname(args)
.Object@names <- as.character(names(args))
if(validObject(.Object)) .Object
})
setMethod("initialize","item.list",function(.Object,...){
args <- list(...)
if(is.list(args[[1]])) args <- unclass(args[[1]])
[email protected] <- unname(lapply(args,as.item))
.Object@names <- as.character(names(args))
if(validObject(.Object)) .Object
})
setMethod("show","named.list",function(object)
print.default(unclass(object))
)
setLength <- function(x,n){
tmp <- unname(x)
length(x) <- n
x[] <- tmp
attributes(x) <- attributes(tmp)
x
}
setMethod("initialize","data.set",function(.Object,...,row.names=NULL,document=character()){
args <- list(...)
if(is.list(args[[1]])) args <- unclass(args[[1]])
nr <- max(sapply(args,length))
args <- lapply(args,setLength,n=nr)
args <- lapply(args,as.item)
[email protected] <- unname(args)
.Object@names <- as.character(names(args))
if (is.null(row.names))
row.names <- seq_len(nr)
else {
if (is.object(row.names) || !is.integer(row.names))
row.names <- as.character(row.names)
if (any(is.na(row.names)))
stop("row names contain missing values")
if (any(duplicated(row.names)))
stop("duplicate row.names: ", paste(unique(row.names[duplicated(row.names)]),
collapse = ", "))
}
.Object@row_names <- row.names
.Object@document <- document
if(validObject(.Object)) .Object
})
setAs("data.set","named.list",function(from,to){
new(to,structure([email protected],names=from@names))
})
setMethod("dim","data.set",function(x)
c( length(x@row_names),
length([email protected])
)
)
setMethod("row.names","data.set",function(x){
x@row_names
})
setReplaceMethod("row.names","data.set",function(x,value){
nr <- length([email protected][[1]])
if(is.null(value)){
value <- seq_len(nr)
}
else if(length(value) != nr)
stop("invalid 'row.names' given for data set")
x@row_names <- value
x
})
setMethod("dimnames","data.set",function(x)
list(x@row_names,x@names))
setReplaceMethod("dimnames","data.set",function(x,value) {
d <- dim(x)
if (!is.list(value) || length(value) != 2L)
stop("invalid 'dimnames' given for data set")
value[[1L]] <- as.character(value[[1L]])
value[[2L]] <- as.character(value[[2L]])
if (d[[1L]] != length(value[[1L]]) || d[[2L]] != length(value[[2L]]))
stop("invalid 'dimnames' given for data set")
row.names(x) <- value[[1L]]
names(x) <- value[[2L]]
x
})
setMethod("[",signature(x="data.set",i="atomic",j="atomic",drop="ANY"),
function(x,i,j,...,drop=FALSE){
frame <- structure([email protected],row.names=x@row_names,names=x@names,class="data.frame")
frame <- frame[i,j,drop=drop]
if(is.data.frame(frame))
new("data.set",
unclass(frame),
document=x@document
)
else
frame
})
setMethod("[",signature(x="data.set",i="atomic",j="missing",drop="ANY"),
function(x,i,j,...,drop=FALSE){
Narg <- nargs()-!missing(drop)
frame <- structure([email protected],row.names=x@row_names,names=x@names,class="data.frame")
if(Narg > 2){
frame <- frame[i,,drop=drop]
if(!is.data.frame(frame))
frame
else
new("data.set",
unclass(frame),
document=x@document
)
}
else {
frame <- frame[i]
if(!is.data.frame(frame))
frame
else
new("data.set",
unclass(frame),
document=x@document
)
}
})
setMethod("[",signature(x="data.set",i="missing",j="atomic",drop="ANY"),
function(x,i,j,...,drop=FALSE){
frame <- structure([email protected],row.names=x@row_names,names=x@names,class="data.frame")
frame <- frame[,j,drop=drop]
if(is.data.frame(frame))
new("data.set",
unclass(frame),
document=x@document
)
else
frame
})
setMethod("[",signature(x="data.set",i="missing",j="missing",drop="ANY"),
function(x,i,j,...,drop=FALSE){
frame <- structure([email protected],row.names=x@row_names,names=x@names,class="data.frame")
frame <- frame[,,drop=drop]
if(is.data.frame(frame))
new("data.set",
unclass(frame),
document=x@document
)
else
frame
})
setReplaceMethod("[",signature(x="data.set",i="ANY",j="ANY",value="ANY"),
function(x,i,j,value){
frame <- structure([email protected],row.names=x@row_names,names=x@names,class="data.frame")
frame[i,j] <- value
new("data.set",
unclass(frame),
document=x@document
)
})
"[[<-.data.set" <- function(x,...,value){
frame <- structure([email protected],row.names=x@row_names,names=x@names,class="data.frame")
frame[[...]] <- value
new("data.set",
unclass(frame),
document=x@document
)
}
as.list.data.set <- function(x,...)structure([email protected],names=x@names)
as.data.frame.data.set <- function(x, row.names = NULL, optional = FALSE, ...){
as.data.frame(as.list(x),
row.names=if(length(row.names)) rownames
else x@row_names,
optional=optional)
}
data.set <- function(..., row.names = NULL, check.rows = FALSE, check.names = TRUE,
stringsAsFactors = FALSE,
document = NULL){
args <- list(...)
if(!length(names(args))){
subst <- substitute(list(...))
names(args) <- as.character(subst[-1])
}
argn <- names(args)
args <- lapply(seq_along(args),function(i){
x <- args[[i]]
n <- names(args)[[i]]
if(is(x,"item.vector"))
structure(list(x),class="data.frame",row.names=seq_len(length(x)),names=n)
else if(is(x,"data.set"))
structure(as.list(x),class="data.frame",row.names=x@row_names)
else x
})
names(args) <- argn
frame <- do.call(data.frame,
c(args,
row.names=row.names,
check.rows=check.rows,
check.names=check.names,
stringsAsFactors=stringsAsFactors
))
new("data.set",
frame,
document=as.character(document)
)
}
setMethod("annotation","data.set",function(x){
d <- lapply(x,annotation)
if(length(d))
structure(d,names=x@names,class="annotation.list")
else NULL
})
print.data.set <- function(x,max.obs=Inf,width=Inf,...){
frame <- structure([email protected],row.names=x@row_names,names=x@names,class="data.frame")
print_frame_internal(frame,max.obs=max.obs,width=width,...)
}
print_frame_internal <- function(x,max.obs=Inf,width=Inf,...){
if(is.finite(max.obs)){
if(nrow(x)<=max.obs)
{
max.obs <- Inf
res <- x
}
else
res <- x[seq_len(max.obs),,drop=FALSE]
}
else
res <- x
varn <- names(res)
rown <- rownames(res)
res <- lapply(res,format)
res <- mapply(c,varn,res)
res <- apply(res,2,format,justify="right")
res <- apply(cbind(c("",rown),res),2,format,justify="right")
if(is.finite(width) && ncol(res)){
ww <- cumsum(nchar(res[1,])+1)-1
if(any(ww > width)){
keep <- which(ww < width - 3)
res <- cbind(res[,keep],"...")
}
}
if(is.finite(max.obs) && nrow(res)){
mkdots <- function(n) paste(rep(".",n),collapse="")
ww <- nchar(res[1,])
res <- rbind(res,sapply(ww,mkdots))
res <- apply(res,1,paste,collapse=" ")
res <- c(res,paste("(",length(res)-2," of ",nrow(x)," observations shown)",sep=""))
}
else
res <- apply(res,1,paste,collapse=" ")
writeLines(res)
}
setMethod("show","data.set",function(object){
cat("\nData set with",nrow(object),"observations and",ncol(object),"variables\n\n")
print.data.set(object,max.obs=getOption("show.max.obs"),width=getOption("width"))
})
setMethod("print","data.set",function(x,...)print.data.set(x,...))
is.data.set <- function(x) is(x,"data.set")
str.data.set <- function (object, ...)
{
cat("Data set ","with ", nrow(object), " obs. of ", (p <- ncol(object)),
" variable", if (p != 1)
"s", if (p > 0)
":", "\n", sep = "")
object <- structure(as.list(object),class="data.frame")
if (length(l <- list(...)) && any("give.length" == names(l)))
invisible(NextMethod("str", ...))
else invisible(NextMethod("str", give.length = FALSE, ...))
}
subset.data.set <- function (x, subset, select, drop = FALSE, ...)
{
r <- if (missing(subset))
rep_len(TRUE, nrow(x))
else {
e <- substitute(subset)
r <- eval(e, x, parent.frame())
if (!is.logical(r))
stop("'subset' must be logical")
r & !is.na(r)
}
vars <- if (missing(select))
rep_len(TRUE, ncol(x))
else {
nl <- as.list(seq_along(x))
names(nl) <- names(x)
eval(substitute(select), nl, parent.frame())
}
x[r, vars, drop = drop]
}
setMethod("within","data.set",function (data, expr, ...)
{
parent <- parent.frame()
encl <- new.env(parent=parent)
frame <- structure([email protected],row.names=data@row_names,names=data@names,class="data.frame")
nr <- nrow(frame)
rn <- row.names(frame)
assign("N_",nr,envir=encl)
e <- evalq(environment(), frame, encl)
ret <- eval(substitute(expr), e)
l <- rev(as.list(e))
length1 <- sapply(l,length) == 1
if(any(length1)){
ii <- which(length1)
for(i in ii){
l[[i]] <- rep(l[[i]],nr)
}
}
wrong.length <- sapply(l,length) != nr
if(any(wrong.length)){
warning("Variables ",paste(sQuote(names(l)[wrong.length]),collapse=","),
" have wrong length, removing them.")
l[wrong.length] <- NULL
}
coercable <- sapply(l,is.atomic) | sapply(l,is.factor)
items <- sapply(l,is,"item")
if(any(!items & coercable))
l[!items & coercable] <- lapply(l[!items & coercable],as.item)
if(any(!items & !coercable)){
warning("Cannot change variables ",paste(sQuote(names(l)[!items & !coercable]),collapse=","),
" into items, removing them.")
l[!items & !coercable] <- NULL
}
frame[names(l)] <- l
use <- names(frame) %in% names(l)
frame <- frame[use]
row.names(frame) <- rn
new("data.set",
frame,
document=data@document)
})
cbind.data.set <- function (..., deparse.level = 1)
data.set(..., check.names = FALSE)
setMethod("description","data.set",function(x){
res <- lapply(x,description)
structure(res,class="descriptions")
})
print.descriptions <- function(x,quote=FALSE,...){
Write.descriptions(x,file=stdout())
}
Write.descriptions <- function(x,file=stdout(),...){
x <- sapply(x,function(des){
if(length(des)) sQuote(des)
else " (none) "
})
out <- c(
"",
paste("",format(names(x),justify="left"),format(x,justify="left")),
""
)
writeLines(out,con=file)
}
as.data.frame.descriptions <- function(x,...){
data.frame(variable=names(x),
description=as.character(x))
}
setMethod("unique","data.set",function(x, incomparables = FALSE, ...){
frame <- structure([email protected],row.names=x@row_names,names=x@names,class="data.frame")
new("data.set",
unique(frame,incomparables=incomparables,...),
document=x@document
)
})
fapply.data.set <- function(formula,data,...)
fapply.default(formula,data=as.data.frame(data,optional=TRUE),...)
setMethod("as.data.set","list",function(x,row.names=NULL,...){
class(x) <- "data.frame"
if(length(row.names)){
if(length(row.names)!=nrow(x)) stop("row.names argument has wrong length")
attr(x,"row.names") <- row.names
}
else
attr(x,"row.names") <- seq_len(nrow(x))
new("data.set",x)
})
setMethod("merge",signature(x="data.set","data.set"),function(x,y,...){
x <- new("data.frame",as.list(x),row.names=x@row_names)
y <- new("data.frame",as.list(y),row.names=y@row_names)
z <- merge(x,y,...)
new("data.set",z)
})
setMethod("merge",signature(x="data.set","data.frame"),function(x,y,...){
x <- new("data.frame",as.list(x),row.names=x@row_names)
z <- merge(x,y,...)
new("data.set",z)
})
setMethod("merge",signature(x="data.frame","data.set"),function(x,y,...){
y <- new("data.frame",as.list(y),row.names=y@row_names)
z <- merge(x,y,...)
new("data.set",z)
})
setMethod("rbind2",signature(x="data.set",y="data.set"),function(x,y){
x <- asS4(new("data.frame",as.list(x),row.names=x@row_names),FALSE)
y <- asS4(new("data.frame",as.list(y),row.names=y@row_names),FALSE)
z <- rbind(x,y)
new("data.set",z)
})
setMethod("rbind2",signature(x="data.set",y="data.frame"),function(x,y){
x <- asS4(new("data.frame",as.list(x),row.names=x@row_names),FALSE)
z <- cbind(x,y)
new("data.set",z)
})
setMethod("cbind2",signature(x="data.set",y="data.set"),function(x,y){
x <- asS4(new("data.frame",as.list(x),row.names=x@row_names),FALSE)
y <- asS4(new("data.frame",as.list(y),row.names=y@row_names),FALSE)
z <- cbind(x,y)
new("data.set",z)
})
setMethod("cbind2",signature(x="data.frame",y="data.set"),function(x,y){
y <- asS4(new("data.frame",as.list(y),row.names=y@row_names),FALSE)
z <- cbind(x,y)
new("data.set",z)
})
setMethod("cbind2",signature(x="data.set",y="data.frame"),function(x,y){
x <- asS4(new("data.frame",as.list(x),row.names=x@row_names),FALSE)
z <- cbind(x,y)
new("data.set",z)
})
rbind.data.set <- function(...,deparse.level=1){
args <- list(...)
to.data.frame <- function(x){
if(inherits(x,"data.set"))
structure(
[email protected],
names=x@names,
row.names=x@row_names,
class="data.frame"
)
else as.data.frame(x)
}
args <- lapply(args,to.data.frame)
res <- do.call("rbind",c(args,list(deparse.level=deparse.level)))
new("data.set",res,row.names=row.names(res))
}
dsView <- function(x){
title <- paste("Data set:", deparse(substitute(x))[1])
Data <- lapply([email protected],format,justify="left")
document <- x@document
row.names <- x@row_names
.names <- x@names
frame <- structure(Data,row.names=row.names,names=x@names,
class="data.frame")
for(n in names(frame)){
d <- description(x[[n]])
if(length(d))
attr(frame[[n]],"label") <- d
}
View.call <- call("View",x=frame,title=title)
eval(View.call,globalenv())
}
collect.data.set <- function(...,
names=NULL,inclusive=TRUE,fussy=FALSE,warn=TRUE,
sourcename=".origin"){
args <- list(...)
subst <- substitute(list(...))
if(length(names)) {
if(length(names)!=length(args)) stop("names argument has wrong length")
}
else {
if(length(names(args))) names <- names(args)
else {
names <- sapply(lapply(subst[-1],deparse),paste,collapse=" ")
}
}
all.vars <- lapply(args,names)
common.vars <- reduce(all.vars,intersect)
all.vars <- reduce(all.vars,union)
other.vars <- setdiff(all.vars,common.vars)
source <- rep(seq_along(args),sapply(args,nrow))
nrow.items <- sapply(args,nrow)
nrow.total <- sum(nrow.items)
ix <- split(seq_len(nrow.total),source)
res <- lapply(common.vars,function(var){
vecs <- lapply(args,function(x)x[[var]])
collOne(vecs,source=source,nrow.items=nrow.items,varname=var,fussy=fussy)
})
names(res) <- common.vars
if(inclusive){
res1 <- lapply(other.vars,function(var){
vecs <- lapply(args,function(x)x[[var]])
collOne(vecs,source=source,nrow.items=nrow.items,varname=var,fussy=fussy)
})
names(res1) <- other.vars
res <- c(res,res1)
}
res[[sourcename]] <- factor(source,labels=names)
as.data.set(res)
}
setMethod("summary","data.set",
function(object, maxsum = 7, digits = max(3, getOption("digits") -3), ...){
z <- lapply(as.list(object), summary, maxsum = maxsum, digits = 12,
...)
nv <- length(object)
nm <- names(object)
lw <- numeric(nv)
nr <- max(unlist(lapply(z, NROW)))
for (i in 1:nv) {
sms <- z[[i]]
if (is.matrix(sms)) {
cn <- paste(nm[i], gsub("^ +", "", colnames(sms)),
sep = ".")
tmp <- format(sms)
if (nrow(sms) < nr)
tmp <- rbind(tmp, matrix("", nr - nrow(sms),
ncol(sms)))
sms <- apply(tmp, 1, function(x) paste(x, collapse = " "))
wid <- sapply(tmp[1, ], nchar, type = "w")
blanks <- paste(character(max(wid)), collapse = " ")
pad0 <- floor((wid - nchar(cn, type = "w"))/2)
pad1 <- wid - nchar(cn, type = "w") - pad0
cn <- paste(substring(blanks, 1, pad0), cn, substring(blanks,
1, pad1), sep = "")
nm[i] <- paste(cn, collapse = " ")
z[[i]] <- sms
}
else {
lbs <- format(names(sms))
sms <- paste(lbs, ":", format(sms, digits = digits),
" ", sep = "")
lw[i] <- nchar(lbs[1], type = "w")
length(sms) <- nr
z[[i]] <- sms
}
}
z <- unlist(z, use.names = TRUE)
dim(z) <- c(nr, nv)
blanks <- paste(character(max(lw) + 2), collapse = " ")
pad <- floor(lw - nchar(nm, type = "w")/2)
nm <- paste(substring(blanks, 1, pad), nm, sep = "")
dimnames(z) <- list(rep.int("", nr), nm)
attr(z, "class") <- c("table")
z
})
as.list.item.list <- function(x,...)structure([email protected],names=x@names)
setMethod("head",signature(x="data.set"),
function(x,n=20,...){
y <- utils::head.matrix(x,n=n,...)
rownames(y) <- rownames(x)[1:n]
return(y)
})
setMethod("tail",signature(x="data.set"),
function(x,n=20,...){
y <- utils::tail.matrix(x,n=n,...)
rownames(y) <- rownames(x)[seq.int(to=nrow(x),length.out=n)]
return(y)
})
as.data.table.data.set <- function(x, ...){
dataf <- as.data.frame(as.list(x),
row.names=if(length(row.names)) rownames
else x@row_names)
as.data.table(dataf,...)
} |
plot.MinED <- function(x, name, ...){
Dose_Level <- Lower_Efficacy <- Lower_Toxicity <- Posterior_Efficacy_Est <- NULL
Posterior_Toxicity_Est <- Sec.. <- Upper_Efficacy <- Upper_Toxicity <- NULL
X..Pts.response.to.eff <- X..Pts.response.to.tox <- X..Pts.treated <- NULL
if (class(x)[1] != "MinED"){
stop("the object putting here is not correct")
}
else {
if (is.matrix(x)){
df <- data.frame(t(x[, -6]))
df$Dose_Level <- rownames(df)
rownames(df) <- c()
if (tolower(name) == "sel%"){
p <- ggplot(data = df, aes(x = Dose_Level, y = Sec..)) + geom_bar(stat = "identity") + xlab("Dose Level") + ylab("MinED Selection %")
}
else if (tolower(name) == "
p <- ggplot(data = df, aes(x = Dose_Level, y = X..Pts.treated)) + geom_bar(stat = "identity") + xlab("Dose Level") +
ylab("Number of Patients Treated")
}
else if (tolower(name) == "
p <- ggplot(data = df, aes(x = Dose_Level, y = X..Pts.response.to.tox)) + geom_bar(stat = "identity") + xlab("Dose Level") +
ylab("Number of Toxicities")
}
else if (tolower(name) == "
p <- ggplot(data = df, aes(x = Dose_Level, y = X..Pts.response.to.eff)) + geom_bar(stat = "identity") + xlab("Dose Level") +
ylab("Number of Efficacy Responses")
}
}
else if (is.list(x)){
df <- x[[3]]
df[, 1] <- factor(df[, 1], levels = 1:length(df[, 1]))
df <- df[rowSums(is.na(df)) == 0, ]
eff_plot <- ggplot() +
geom_errorbar(data = df, aes(x = Dose_Level, ymin = Lower_Efficacy, ymax = Upper_Efficacy), width = 0.2, size = 1, color = "blue") +
geom_point(data = df, aes(x = Dose_Level, y = Posterior_Efficacy_Est), size = 4, shape = 21, fill = "white") +
ylim(c(0, 1)) + geom_hline(yintercept = x[[2]][, 1], linetype="dashed", color = "red", size=1) +
ylab("Posterior Efficacy Est") + xlab("Dose Level") + scale_x_discrete(drop = F)
tox_plot <- ggplot() +
geom_errorbar(data = df, aes(x = Dose_Level, ymin = Lower_Toxicity, ymax = Upper_Toxicity), width = 0.2, size = 1, color = "blue") +
geom_point(data = df, aes(x = Dose_Level, y = Posterior_Toxicity_Est), size = 4, shape = 21, fill = "white") +
ylim(c(0, 1)) + geom_hline(yintercept = x[[2]][, 2], linetype = "dashed", color = "red", size = 1) +
ylab("Posterior Toxicity Est") + xlab("Dose Level") + scale_x_discrete(drop = F)
p <- grid.arrange(eff_plot, tox_plot, nrow = 1)
}
}
p
} |
auth0_app <- function(app_url, app_name, key, secret) {
function(app_url) {
httr::oauth_app(appname = app_name, key = key, secret = secret, redirect_uri = app_url)
}
}
auth0_api <- function(auth0_url, request, access) {
httr::oauth_endpoint(base_url = auth0_url, request = request,
authorize = "authorize", access = access)
}
has_auth_code <- function(params, state) {
is.null(params$error) && !is.null(params$code) && params$state == state
}
auth0_server_verify <- function(session, app, api, state) {
u_search <- session[["clientData"]]$url_search
params <- shiny::parseQueryString(u_search)
if (has_auth_code(params, state)) {
cred <- httr::oauth2.0_access_token(api, app(redirect_uri), params$code)
token <- httr::oauth2.0_token(
app = app(redirect_uri), endpoint = api, cache = FALSE, credentials = cred,
user_params = list(grant_type = "authorization_code"))
userinfo_url <- sub("authorize", "userinfo", api$authorize)
resp <- httr::RETRY(
verb = "GET"
, url = userinfo_url
, httr::config(token = token)
, times = 5
)
assign("auth0_credentials", token$credentials, envir = session$userData)
assign("auth0_info", httr::content(resp, "parsed"), envir = session$userData)
}
}
auth0_state <- function(server) {
paste(sample(c(letters, LETTERS, 0:9), 10, replace = TRUE), collapse = "")
}
auth0_info <- function(config) {
if (missing(config)) config <- auth0_config()
if (!is.list(config) && is.character(config)) config <- auth0_config(config)
scope <- config$auth0_config$scope
state <- auth0_state()
conf <- config$auth0_config
app <- auth0_app(app_name = config$name, key = conf$credentials$key, secret = conf$credentials$secret)
api <- auth0_api(conf$api_url, conf$request, conf$access)
audience <- conf$audience
rurl <- config$remote_url
if (is.null(rurl)) rurl <- config$shiny_config$remote_url
list(scope = scope, state = state, app = app, api = api, audience=audience,
remote_url = rurl)
}
auth0_config <- function(config_file) {
if (missing(config_file)) config_file <- auth0_find_config_file()
config <- yaml::read_yaml(config_file, eval.expr = TRUE)
if (is.null(config$auth0_config)) {
stop("Missing 'auth0_config' tag in YAML file.")
}
config_names <- names(unlist(config$auth0_config))
required_names <- c("api_url", "credentials.key", "credentials.secret")
missing_args <- setdiff(required_names, config_names)
s <- strrep("s", max(length(missing_args) - 1L, 0))
if (length(missing_args) > 0) {
msg <- sprintf("Missing '%s' tag%s in YAML file", paste(missing_args, collapse = "','"), s)
stop(msg)
}
scp <- config$auth0_config$scope
if (is.null(scp)) scp <- "openid profile"
defaults <- list(scope = scp, request = "oauth/token", access = "oauth/token")
for (nm in names(defaults)) {
if (!nm %in% config_names) {
config$auth0_config[[nm]] <- defaults[[nm]]
}
}
config
}
use_auth0 <- function(path = ".", file = "_auth0.yml", overwrite = FALSE) {
f <- paste0(normalizePath(path), "/", file)
if (file.exists(f) && !overwrite) {
stop("File exists and overwrite is FALSE.")
}
ks <- list(key = 'Sys.getenv("AUTH0_KEY")', secret = 'Sys.getenv("AUTH0_SECRET")')
api_url <- "paste0('https://', Sys.getenv('AUTH0_USER'), '.auth0.com')"
attr(ks[[1]], "tag") <- "!expr"
attr(ks[[2]], "tag") <- "!expr"
attr(api_url, "tag") <- "!expr"
yaml_list <- list(
name = "myApp",
remote_url = "",
auth0_config = list(api_url = api_url, credentials = ks))
yaml::write_yaml(yaml_list, f)
} |
library(httpuv)
app <- list(
call = function(req) {
wsUrl = paste(sep='',
'"',
"ws://",
ifelse(is.null(req$HTTP_HOST), req$SERVER_NAME, req$HTTP_HOST),
'"')
list(
status = 200L,
headers = list(
'Content-Type' = 'text/html'
),
body = paste(
sep = "\r\n",
"<!DOCTYPE html>",
"<html>",
"<head>",
'<style type="text/css">',
'body { font-family: Helvetica; }',
'pre { margin: 0 }',
'</style>',
"<script>",
sprintf("var ws = new WebSocket(%s);", wsUrl),
"ws.onmessage = function(msg) {",
' var msgDiv = document.createElement("pre");',
' msgDiv.innerHTML = msg.data.replace(/&/g, "&").replace(/\\</g, "<");',
' document.getElementById("output").appendChild(msgDiv);',
"}",
"function sendInput() {",
" var input = document.getElementById('input');",
" ws.send(input.value);",
" input.value = '';",
"}",
"</script>",
"</head>",
"<body>",
'<h3>Send Message</h3>',
'<form action="" onsubmit="sendInput(); return false">',
'<input type="text" id="input"/>',
'<h3>Received</h3>',
'<div id="output"/>',
'</form>',
"</body>",
"</html>"
)
)
},
onWSOpen = function(ws) {
ws$onMessage(function(binary, message) {
ws$send(message)
})
}
)
browseURL("http://localhost:9454/")
runServer("0.0.0.0", 9454, app, 250) |
permuANOVA <- function(y,x,z, perm.type="unrestricted", reps=5000){
if(!missing(z)){
if(perm.type=="restricted"){
l <- summary(aov(y ~ x+z))[[1]]$F[1]
x = as.factor(x)
z = as.factor(z)
results1<-numeric(reps)
for (i in 1:reps) {
for (n in 1:nlevels(z)){
assign(paste("z", n, sep = ""), sample(y[z==levels(z)[n]]))
}
temp1 <- c(get0("z1"),get0("z2"), get0("z3"), get0("z4"),
get0("z5"), get0("z6"), get0("z7"), get0("z8"),
get0("z9"), get0("z10"))
results1[i] <- summary(aov(temp1 ~ x[order(z)]+z[order(z)]))[[1]]$F[1]
}
p.value1 <- (sum(results1 >= l)) / reps
k <- summary(aov(y ~ x+z))[[1]]$F[2]
results<- numeric(reps)
for (i in 1:reps) {
for (n in 1:nlevels(x)){
assign(paste("x", n, sep = ""), sample(y[x==levels(x)[n]]))
}
temp <- c(get0("x1"),get0("x2"), get0("x3"), get0("x4"),
get0("x5"), get0("x6"), get0("x7"), get0("x8"),
get0("x9"), get0("x10"))
results[i] <- summary(aov(temp ~ x[order(x)]+z[order(x)]))[[1]]$F[2]
}
p.value2 <- (sum(results >= k)) / reps
}
else{
if(perm.type=="unrestricted"){
h <- summary(aov(y ~ x*z))[[1]]$F[1]
l <- summary(aov(y ~ x*z))[[1]]$F[2]
results <- numeric(reps)
results1 <- numeric(reps)
for (i in 1:reps) {
temp <- sample(y)
results[i] <- summary(aov(temp ~ x*z))[[1]]$F[1]
results1[i] <- summary(aov(temp ~ x*z))[[1]]$F[2]
}
p.value1 <- (sum(results >= h)) / reps
p.value2 <- (sum(results1 >= l)) / reps}
else {return(" perm.type must be restricted or unrestricted")}
}
j <- summary(aov(y ~ x*z))[[1]]$F[3]
results2 <- numeric(reps)
for (i in 1:reps) {
temp2 <- sample(y)
results2[i] <- summary(aov(temp2 ~ x*z))[[1]]$F[3]
}
p.value3 <- (sum(results2 >= j)) / reps
p.values <- c(p.value1, p.value2, p.value3)
as.data.frame(p.values, row.names=c("Variable_x", "Variable_z", "x:z"))
}
else{
z <- summary(aov(y ~ x))[[1]]$F[1]
results <- numeric(reps)
for (i in 1:reps) {
temp <- sample(y)
results[i] <- summary(aov(temp ~ x))[[1]]$F[1]
}
p.value4 <- sum(results >= z) / reps
p.value <- c(p.value4)
as.data.frame(x = p.value, row.names="Variable_x")
}
} |
.uncertaintyOpticut1 <-
function (object, which=NULL,
type=c("asymp", "boot", "multi"), B=99, pb=FALSE, ...)
{
dots <- setdiff(names(object$call)[-1L],
c("X", "Y", "formula", "data", "strata", "dist", "comb", "sset", "cl"))
if (length(dots) > 0)
stop("Extra arguments detected in opticut call (...)")
type <- match.arg(type)
if (missing(which))
stop("specify which argument")
if (!length(which))
stop("which argument must have length 1")
linkinv <- .get_linkinv(object, ...)
scale <- object$scale
obj <- object$species[[which]]
n <- nobs(object)
k <- which.max(obj$logLR)
if (type == "asymp") {
if (length(B) > 1)
stop("Provide single integer for B.")
niter <- B
bm <- rownames(obj)[k]
mle <- getMLE(object, which, vcov=TRUE, ...)
if (!is.function(object$dist) &&
.opticut_dist(object$dist, make_dist=TRUE) == "rsf") {
cf <- MASS::mvrnorm(niter, mle$coef[-1L],
mle$vcov[-1L,-1L,drop=FALSE])
cf <- rbind(mle$coef[c(1L, 2L)], cbind(0, cf)[,c(1L, 2L)])
} else {
cf <- MASS::mvrnorm(niter, mle$coef, mle$vcov)
cf <- rbind(mle$coef[c(1L, 2L)], cf[,c(1L, 2L)])
}
cf0 <- linkinv(cf[,1L])
cf1 <- linkinv(cf[,1L] + cf[,2L])
I <- abs(tanh(cf[,2L] * scale))
out <- data.frame(best=bm, I=I, mu0=cf0, mu1=cf1)
} else {
if (length(B) == 1) {
niter <- B
if (!is.function(object$dist) &&
.opticut_dist(object$dist, make_dist=TRUE) %in% c("rsf", "rspf")) {
avail <- which(object$Y[,1]==0)
used <- which(object$Y[,1]==1)
nused <- length(used)
BB <- replicate(niter, c(sample(used, nused, replace=TRUE), avail))
} else {
BB <- replicate(niter, sample.int(n, replace=TRUE))
}
} else {
BB <- B
niter <- ncol(B)
}
nstr <- check_strata(object, BB)
if (!all(nstr))
stop("Not all strata represented in resampling")
}
if (type == "boot") {
bm <- rownames(obj)[k]
m1 <- .extractOpticut(object, which,
boot=FALSE,
internal=TRUE,
full_model=FALSE,
best=TRUE, ...)[[1L]]
cf <- if (pb) {
t(pbapply::pbapply(BB, 2, function(z, ...) {
.extractOpticut(object, which,
boot=z,
internal=TRUE,
full_model=FALSE,
best=TRUE, ...)[[1L]]$coef[c(1L, 2L)]
}))
} else {
t(apply(BB, 2, function(z, ...) {
.extractOpticut(object, which,
boot=z,
internal=TRUE,
full_model=FALSE,
best=TRUE, ...)[[1L]]$coef[c(1L, 2L)]
}))
}
cf <- rbind(m1$coef[c(1L, 2L)], cf)
cf0 <- linkinv(cf[,1L])
cf1 <- linkinv(cf[,1L] + cf[,2L])
I <- abs(tanh(cf[,2L] * scale))
out <- data.frame(best=bm, I=I, mu0=cf0, mu1=cf1)
}
if (type == "multi") {
bm <- character(niter + 1L)
bm[1L] <- rownames(obj)[k]
mat <- matrix(NA, niter + 1L, 3)
colnames(mat) <- c("I", "mu0", "mu1")
tmp <- as.numeric(obj[k, -1L])
names(tmp) <- colnames(obj)[-1L]
mat[1L, ] <- tmp[c("I", "mu0", "mu1")]
if (pb) {
pbar <- pbapply::startpb(0, niter)
on.exit(pbapply::closepb(pbar), add=TRUE)
}
for (j in seq_len(niter)) {
mod <- .extractOpticut(object, which,
boot=BB[,j],
internal=FALSE,
best=FALSE, ...)[[1L]]
k <- which.max(mod$logLR)
bm[j + 1L] <- rownames(mod)[k]
tmp <- as.numeric(mod[k, -1L])
names(tmp) <- colnames(mod)[-1L]
mat[j + 1L, ] <- tmp[c("I", "mu0", "mu1")]
if (pb)
pbapply::setpb(pbar, j)
}
out <- data.frame(best=bm, mat)
attr(out, "est") <- attr(obj, "est")
}
class(out) <- c("uncertainty1_opti", "uncertainty1", "data.frame")
attr(out, "B") <- niter
attr(out, "type") <- type
attr(out, "scale") <- scale
attr(out, "collapse") <- object$collapse
out
} |
cpgram <-
function(ts, taper = 0.1,
main = paste("Series: ", deparse1(substitute(ts))),
ci.col = "blue")
{
main
if(NCOL(ts) > 1)
stop("only implemented for univariate time series")
x <- as.vector(ts)
x <- x[!is.na(x)]
x <- spec.taper(scale(x, TRUE, FALSE), p=taper)
y <- Mod(fft(x))^2/length(x)
y[1L] <- 0
n <- length(x)
x <- (0:(n/2))*frequency(ts)/n
if(length(x)%%2==0) {
n <- length(x)-1
y <- y[1L:n]
x <- x[1L:n]
} else y <- y[seq_along(x)]
xm <- frequency(ts)/2
mp <- length(x)-1
crit <- 1.358/(sqrt(mp)+0.12+0.11/sqrt(mp))
oldpty <- par(pty ="s")
on.exit(par(oldpty))
plot(x, cumsum(y)/sum(y), type="s", xlim=c(0, xm),
ylim=c(0, 1), xaxs="i", yaxs="i", xlab="frequency",
ylab="")
lines(c(0, xm*(1-crit)), c(crit, 1), col = ci.col, lty = 2)
lines(c(xm*crit, xm), c(0, 1-crit), col = ci.col, lty = 2)
title(main = main)
invisible()
} |
accessSlotsByName <- function(x,i,j,drop=FALSE) {
names <- slotNames(x)
if (!(i %in% names))
stop(paste(i, "is not a valid slot specification"))
return(slot(x, i))
}
accessReplaceSlotsByName <- function(x,i,j,value) {
names <- slotNames(x)
if (!(i %in% names))
stop(paste(i, "dis not a valid slot specification"))
else
slot(x, i) <- value
validObject(x)
return(x)
}
setMethod(f="[", signature = CLASS_CLIST, def = accessSlotsByName)
setMethod(f="[", signature = CLASS_FIT, def = accessSlotsByName)
setMethod(f="[", signature = CLASS_RM, def = accessSlotsByName)
setReplaceMethod(f="[", signature = CLASS_CLIST, accessReplaceSlotsByName)
setReplaceMethod(f="[", signature = CLASS_FIT, accessReplaceSlotsByName)
setReplaceMethod(f="[", signature = CLASS_RM,accessReplaceSlotsByName)
setMethod('c', signature=c(CLASS_CLIST),
function(x, ..., recursive = FALSE) R.c(x, ...) )
resolve <- function(e1, e2, sign) {
d <- list()
if (e1@name==sign && (len.e1 <- length(e1@submodels)) < MAXSUB) {
for (i in 1:len.e1) d[[i]] <- e1@submodels[[i]]
} else {
len.e1 <- 1
d[[1]] <- if (is.character(e1))
stop("characters cannot be combined with 'RMmodels'")
else e1
}
d[[len.e1 + 1]] <-
(if (is.character(e2))
stop("characters cannot be combined with 'RMmodels'")
else e2)
if (sign == RM_MULT[1]) {
model <- do.call(sign, d)
} else model <- do.call(sign, d)
return(model)
}
warn.resolve.txt <- "A large vector consists fully of NAs -- the model is probably not correct.\nNote that it is always better to define the covariance model in the first\nsummands and then the trend. Also better use explicitely 'R.c', 'RMcovariate'\nand 'R.const' if the model is more complicated"
resolveRight<- function(e1, e2, sign) {
d <- list()
if (e1@name==sign && (len.e1 <- length(e1@submodels)) < MAXSUB) {
for (i in 1:len.e1) d[[i]] <- e1@submodels[[i]]
} else {
len.e1 <- 1
d[[1]] <- (if (is.character(e1))
stop("characters cannot be combined with 'RMmodels'")
else e1)
}
d[[len.e1 + 1]] <-
if (is.list(e2)) do.call(R_C, e2)
else if (length(e2)==1) do.call(R_CONST, list(e2))
else if (!all(is.finite(e2))) {
if (all(is.na(e2)) && length(e2)>5) warning(warn.resolve.txt)
do.call(R_C, list(e2))
} else if (sign == RM_PLUS[1]) {
tmpList <- list(RM_COVARIATE)
tmpList[[COVARIATE_C_NAME]] <- e2
tmpList[[COVARIATE_X_NAME]] <- NULL
tmpList[[COVARIATE_ADDNA_NAME]] <- TRUE
do.call(RM_COVARIATE, tmpList)
} else do.call(R_C, list(e2))
model <- do.call(sign, d)
return(model)
}
resolveLeft<- function(e1, e2, sign) {
d <- list()
len.e1 <- 1
d[[1]] <- if (is.list(e1)) do.call(R_C, e1)
else if (length(e1)==1) do.call(R_CONST, list(e1))
else if (!all(is.finite(e1))) {
if (all(is.na(e1)) && length(e1)>5) warning(warn.resolve.txt)
do.call(R_C, list(e1))
} else if (sign == RM_PLUS[1]) {
tmpList <- list(RM_COVARIATE)
tmpList[[COVARIATE_C_NAME]] <- e1
tmpList[[COVARIATE_X_NAME]] <- NULL
tmpList[[COVARIATE_ADDNA_NAME]] <- TRUE
do.call(RM_COVARIATE, tmpList)
} else do.call(R_C, list(e1))
d[[len.e1 + 1]] <- (if (is.character(e2))
stop("characters cannot be combined with 'RMmodels'")
else e2)
model <- do.call(sign, d)
return(model)
}
setMethod('+', signature=c(CLASS_CLIST, CLASS_CLIST),
function(e1, e2) resolve(e1, e2, RM_PLUS[1]))
setMethod('+', signature=c(CLASS_CLIST, 'numeric'),
function(e1, e2) resolveRight(e1, e2, RM_PLUS[1]))
setMethod('+', signature=c(CLASS_CLIST, 'logical'),
function(e1, e2) resolveRight(e1, e2, RM_PLUS[1]))
setMethod('+', signature=c(CLASS_CLIST, 'factor'),
function(e1, e2) resolveRight(e1, e2, RM_PLUS[1]))
setMethod('+', signature=c(CLASS_CLIST, 'list'),
function(e1, e2) resolveRight(e1, e2, RM_PLUS[1]))
setMethod('+', signature=c('numeric', CLASS_CLIST),
function(e1, e2) resolveLeft(e1, e2, RM_PLUS[1]))
setMethod('+', signature=c('logical', CLASS_CLIST),
function(e1, e2) resolveLeft(e1, e2, RM_PLUS[1]))
setMethod('+', signature=c('data.frame', CLASS_CLIST),
function(e1, e2) resolveLeft(e1, e2, RM_PLUS[1]))
setMethod('+', signature=c('factor', CLASS_CLIST),
function(e1, e2) resolveLeft(e1, e2, RM_PLUS[1]))
setMethod('+', signature=c(CLASS_CLIST, 'character'),
function(e1, e2) resolve(e1, e2, RM_PLUS[1]))
setMethod('+', signature=c('character', CLASS_CLIST),
function(e1, e2) resolve(e1, e2, RM_PLUS[1]))
setMethod('*', signature=c(CLASS_CLIST, CLASS_CLIST),
function(e1, e2) resolve(e1, e2, RM_MULT[1]))
setMethod('*', signature=c('numeric', CLASS_CLIST),
function(e1, e2) resolveLeft(e1, e2, RM_MULT[1]))
setMethod('*', signature=c('logical', CLASS_CLIST),
function(e1, e2) resolveLeft(e1, e2, RM_MULT[1]))
setMethod('*', signature=c(CLASS_CLIST, 'logical'),
function(e1, e2) resolveRight(e1, e2, RM_MULT[1]))
setMethod('*', signature=c(CLASS_CLIST, 'numeric'),
function(e1, e2) resolveRight(e1, e2, RM_MULT[1]))
setMethod('*', signature=c(CLASS_CLIST, 'character'),
function(e1, e2) resolve(e1, e2, RM_MULT[1]))
setMethod('*', signature=c('character', CLASS_CLIST),
function(e1, e2) resolve(e1, e2, RM_MULT[1]))
Xresolve <- function(e1, e2, model) {
d <- list()
len.e1 <- 1
d[[1]] <-
(if (is.character(e1))
stop("characters cannot be combined with 'RMmodels'")
else e1)
d[[len.e1 + 1]] <-
( if (is.character(e2))
stop("characters cannot be combined with 'RMmodels'")
else e2)
model <- do.call(model, d)
return(model)
}
XresolveLeft <- function(e1, e2, model) {
d <- list()
len.e1 <- 1
if (length(e1)==1) d[[1]] <- do.call(R_CONST, list(e1))
else {
e <- list(e1)
d[[1]] <- do.call(R_CONST, e)
}
d[[len.e1 + 1]] <- (if (is.character(e2))
stop("characters cannot be combined with 'RMmodels'")
else e2)
model <- do.call(model, d)
return(model)
}
XresolveRight <- function(e1, e2, model) {
d <- list()
len.e1 <- 1
d[[1]] <- (if (is.character(e1))
stop("characters cannot be combined with 'RMmodels'")
else e1)
if (length(e2)==1) d[[len.e1 + 1]] <- do.call(R_CONST, list(e2))
else {
e <- list(e2)
d[[len.e1 + 1]] <- do.call(R_CONST, e)
}
model <- do.call(model, d)
return(model)
}
setMethod('-', signature=c(CLASS_CLIST, CLASS_CLIST),
function(e1, e2) Xresolve(e1, e2, "R.minus"))
setMethod('-', signature=c('numeric', CLASS_CLIST),
function(e1, e2) XresolveLeft(e1, e2, "R.minus"))
setMethod('-', signature=c('logical', CLASS_CLIST),
function(e1, e2) XresolveLeft(e1, e2, "R.minus"))
setMethod('-', signature=c(CLASS_CLIST, 'numeric'),
function(e1, e2) XresolveRight(e1, e2, "R.minus"))
setMethod('-', signature=c(CLASS_CLIST, 'logical'),
function(e1, e2) XresolveRight(e1, e2, "R.minus"))
setMethod('-', signature=c(CLASS_CLIST, 'character'),
function(e1, e2) Xresolve(e1, e2, "R.minus"))
setMethod('-', signature=c('character', CLASS_CLIST),
function(e1, e2) Xresolve(e1, e2, "R.minus"))
setMethod('/', signature=c(CLASS_CLIST, CLASS_CLIST),
function(e1, e2) Xresolve(e1, e2, "R.div"))
setMethod('/', signature=c('numeric', CLASS_CLIST),
function(e1, e2) XresolveLeft(e1, e2, "R.div"))
setMethod('/', signature=c('logical', CLASS_CLIST),
function(e1, e2) XresolveLeft(e1, e2, "R.div"))
setMethod('/', signature=c(CLASS_CLIST, 'numeric'),
function(e1, e2) XresolveRight(e1, e2, "R.div"))
setMethod('/', signature=c(CLASS_CLIST, 'logical'),
function(e1, e2) XresolveRight(e1, e2, "R.div"))
setMethod('/', signature=c('RMmodel', 'character'),
function(e1, e2) Xresolve(e1, e2, "R.div"))
setMethod('/', signature=c('character', 'RMmodel'),
function(e1, e2) Xresolve(e1, e2, "R.div"))
setMethod('^', signature=c(CLASS_CLIST, CLASS_CLIST),
function(e1, e2) Xresolve(e1, e2, "R.pow"))
setMethod('^', signature=c('numeric', CLASS_CLIST),
function(e1, e2) XresolveLeft(e1, e2, "R.pow"))
setMethod('^', signature=c('logical', CLASS_CLIST),
function(e1, e2) XresolveLeft(e1, e2, "R.pow"))
setMethod('^', signature=c(CLASS_CLIST, 'numeric'),
function(e1, e2) XresolveRight(e1, e2, "R.pow"))
setMethod('^', signature=c(CLASS_CLIST, 'logical'),
function(e1, e2) XresolveRight(e1, e2, "R.pow"))
setMethod('^', signature=c('RMmodel', 'character'),
function(e1, e2) Xresolve(e1, e2, "R.pow"))
setMethod('^', signature=c('character', 'RMmodel'),
function(e1, e2) Xresolve(e1, e2, "R.pow"))
str.RMmodel <-
function(object, max.level = NA, vec.len = strO$vec.len,
digits.d = strO$digits.d,
nchar.max = 128, give.attr = TRUE, give.head = TRUE,
give.length = give.head,
width = getOption("width"), nest.lev = 0,
indent.str = paste(rep.int(" ",
max(0, nest.lev + 1)), collapse = ".."),
comp.str = "$ ", no.list = FALSE, envir = baseenv(),
strict.width = strO$strict.width,
drop.deparse.attr = strO$drop.deparse.attr,
formatNum = strO$formatNum, list.len = 99, ...)
{
oDefs <- c("strict.width", "vec.len", "digits.d", "drop.deparse.attr",
"formatNum")
strO <- getOption("str")
if (!is.list(strO)) {
warning("invalid options('str') -- using defaults instead")
strO <- strOptions()
}
else {
if (!all(names(strO) %in% oDefs))
warning("invalid components in options('str'): ",
paste(setdiff(names(strO), oDefs), collapse = ", "))
strO <- modifyList(strOptions(), strO)
}
strict.width <- match.arg(strict.width, choices = c("no", "cut", "wrap"))
if (strict.width != "no") {
ss <- capture.output(str(object, max.level = max.level,
vec.len = vec.len, digits.d = digits.d,
drop.deparse.attr = drop.deparse.attr,
nchar.max = nchar.max,
give.attr = give.attr,
give.head = give.head,
give.length = give.length,
width = width, nest.lev = nest.lev,
indent.str = indent.str,
comp.str = comp.str,
no.list = no.list || is.data.frame(object),
envir = envir, strict.width = "no", ...))
if (strict.width == "wrap") {
nind <- nchar(indent.str) + 2
ss <- strwrap(ss, width = width, exdent = nind)
}
if (any(iLong <- nchar(ss) > width))
ss[iLong] <- sub(sprintf("^(.{1,%d}).*", width - 2), "\\1..", ss[iLong])
cat(ss, sep = "\n")
return(invisible())
}
oo <- options(digits = digits.d)
on.exit(options(oo))
le <- length(object)
P0 <- function(...) paste(..., sep = "")
`%w/o%` <- function(x, y) x[is.na(match(x, y))]
nfS <- names(fStr <- formals())
strSub <- function(obj, ...) {
nf <- nfS %w/o% c("object", "give.length", "comp.str",
"no.list", names(match.call())[-(1:2)], "...")
aList <- as.list(fStr)[nf]
aList[] <- lapply(nf, function(n) eval(as.name(n)))
if ("par.general" %in% names(obj)){
is.RFdefault <-
unlist(lapply(obj$par.general,
FUN=function(x){
!is(x, CLASS_CLIST) && !is.na(x) && x==RM_DEFAULT
}))
obj$par.general[is.RFdefault] <- NULL
if (all(is.RFdefault)) obj$par.general <- list()
}
do.call(utils::str, c(list(object = obj), aList, list(...)),
quote = TRUE)
}
v.len <- vec.len
std.attr <- "names"
cl <- if ((S4 <- isS4(object)))
class(object)
else oldClass(object)
has.class <- S4 || !is.null(cl)
mod <- ""
char.like <- FALSE
if (give.attr)
a <- attributes(object)
if (is.null(object))
cat(" NULL\n")
else if (S4) {
a <- sapply(methods::.slotNames(object), methods::slot,
object = object, simplify = FALSE)
cat("Formal class", " '", paste(cl, collapse = "', '"),
"' [package \"", attr(cl, "package"), "\"] with ",
length(a), " slots\n", sep = "")
strSub(a, comp.str = "@ ", no.list = TRUE,
give.length = give.length,
indent.str = paste(indent.str, ".."),
nest.lev = nest.lev + 1)
return(invisible())
}
}
summary.RMmodel <- function(object, max.level=5, ...)
summary(PrepareModel2(object, ...), max.level=max.level)
summary.RM_model <- function(object, ...) {
class(object) <- "summary.RMmodel"
object
}
print.summary.RMmodel <- function(x, max.level=5, ...) {
str(x, no.list=TRUE, max.level = max.level, give.attr=FALSE)
invisible(x)
}
print.RM_model <- function(x, max.level=5,...) {
print.summary.RMmodel(summary.RM_model(x, max.level=max.level,...),
max.level=max.level)
}
print.RMmodel <- function(x, max.level=5,...) {
print.summary.RMmodel(summary.RMmodel(x, max.level=max.level, ...),
max.level=max.level)
}
setMethod(f="show", signature=CLASS_CLIST,
definition=function(object) print.RMmodel(object))
print.RMmodelgenerator <- function(x, ...) {
cat("*** object of Class '", CLASS_RM, "' ***\n", sep="")
str(args([email protected]))
cat(" type : \t", paste(x@type, collapse=", "), "\n")
cat(" domain : \t", paste(x@domain, collapse=", "), "\n")
cat(" isotropy : \t", paste(x@isotropy, collapse=", "), "\n")
cat(" monotoniciy :\t", paste(x@monotone, collapse=", "), "\n")
cat(" multivariate:\t",
if (x@vdim >= 0) x@vdim
else if (x@vdim == PARAM_DEP) "parameter dependent"
else if (x@vdim == PREVMODEL_DEP) "depends on calling model"
else if (x@vdim == SUBMODEL_DEP) "submodel dependent"
else "specification unclear -- please contact maintainer",
"\n")
cat(" max. dimen. :\t",
if (x@maxdim >= 0) x@maxdim
else if (x@maxdim == PARAM_DEP) "parameter dependent"
else if (x@maxdim == PREVMODEL_DEP) "depends on calling model"
else if (x@maxdim == SUBMODEL_DEP) "submodel dependent"
else "specification unclear -- please contact maintainer",
"\n")
cat(" finite range:\t", x@finiterange, "\n")
cat(" operator : \t", x@operator, "\n")
cat(" simple fctn :\t", x@simpleArguments, "\n")
}
setMethod("show", signature=CLASS_RM,
definition=function(object) print.RMmodelgenerator(object))
rfConvertRMmodel2string <- function(model){
if (!is(model, class2=CLASS_CLIST))
stop("model must be of class '", CLASS_CLIST, "'")
par <- c([email protected], [email protected])
idx.random <- unlist(lapply(par, FUN=isRMmodel))
if (is.null(idx.random)){
param.string <- ""
param.random.string <- ""
} else {
idx.default <- par[!idx.random] == RM_DEFAULT
param.string <- paste(names(par[!idx.random][!idx.default]),
par[!idx.random][!idx.default],
sep="=", collapse=", ")
string.vector <- lapply(par[idx.random], FUN=rfConvertRMmodel2string)
param.random.string <- paste(names(par[idx.random]), string.vector,
sep="=", collapse=", ")
}
if (length(model@submodels) > 0){
string.vector <- lapply(model@submodels, FUN=rfConvertRMmodel2string)
submodel.string <- paste(names(model@submodels), string.vector,
sep="=", collapse=", ")
if (!(nchar(param.string)==0))
submodel.string <- paste(submodel.string, ", ", sep="")
}
else submodel.string <- ""
string <- paste(model@name, "(", submodel.string, param.random.string,
param.string, ")", sep="")
return(string)
}
preparePlotRMmodel <- function(x, xlim, ylim, n.points, dim, fct.type,
MARGIN, fixed.MARGIN, ...){
types <- c("Cov", "Variogram", "Fctn")
verballist <- paste("'", types, "'", sep="", collapse="")
if (!missing(fct.type) && length(fct.type) > 0) {
if (!(fct.type %in% types))
stop("fct.type must be NULL or of the types ", verballist)
types <- fct.type
}
all.fct.types <- character(length(x))
all.vdim <- numeric(length(x))
for (i in 1:length(x)) {
fct.type <- types
m <- list("", PrepareModel2(x[[i]]), ...)
while (length(fct.type) > 0 &&
{ m[[1]] <- fct.type[1];
!is.numeric(vdim <- try( InitModel(MODEL_AUX, m, dim),
silent=TRUE))
})
fct.type <- fct.type[-1]
if (!is.numeric(vdim)) {
stop(attr(vdim, "condition")$message)
}
if (vdim[1] != vdim[2]) stop("only simple models can be plotted")
all.vdim[i] <- vdim[1]
all.fct.types[i] <- fct.type[1]
}
if (!all(all.vdim == all.vdim[1]))
stop("models have different multivariability")
if (is.null(xlim)) {
xlim <- if (dim > 1 || all.vdim[1] > 1) c(-1, 1) * 1.75 else c(0, 1.75)
}
if (is.null(ylim) && dim > 1) ylim <- xlim
distance <- seq(xlim[1], xlim[2], length=n.points)
if (prod(xlim) <= 0)
distance <- sort(c(if (!any(distance==0)) 0, 1e-5, distance))
if (dim > 1) {
distanceY <- seq(ylim[1], ylim[2], length=n.points)
if (prod(ylim) < 0 & !(any(distanceY==0)))
distanceY <- sort(c(0, distanceY))
}
if (all(all.fct.types[1] == all.fct.types)) {
switch(all.fct.types[1],
"Cov" = {
main <-"Covariance function"
ylab <- "C(distance)"
},
"Variogram" = {
main <- "Variogram"
ylab <- expression(gamma(distance))
},
"Fctn" = {
main<- ""
ylab <- "f(distance)"
},
stop("method only implemented for ", verballist)
)
} else {
main <- ""
ylab <- "f(distance)"
}
if (length(x) == 1) {
for.what <- rfConvertRMmodel2string(x[[1]])
if (nchar(for.what) > 20) for.what <- strsplit(for.what,"\\(")[[1]][1]
} else {
for.what <- "various models"
}
main <- paste("plot for ", for.what, "\n", sep="")
if (dim >= 3)
main <- paste(sep="", main, "; component", if (dim>3) "s", " ",
paste((1:dim)[-MARGIN], collapse=", "),
" fixed to the value", if (dim>3) "s", " ",
paste(format(fixed.MARGIN, digits=4), collapse=", "))
return(list(main=main, fctcall=all.fct.types, ylab=ylab, distance=distance,
distanceY = if (dim > 1) distanceY, xlim=xlim, ylim=ylim))
}
singleplot <- function(cov, dim, li, plotmethod, dots, dotnames) {
if (dim==1) {
D <- li$distance
iszero <- D == 0
if (plotmethod == "matplot") {
plotpoint <- any(iszero) && diff(range(dots$ylim)) * 1e-3 <
diff(range(cov)) - diff(range(cov[!iszero]))
if (plotpoint) D[iszero] <- NA
} else plotpoint <- FALSE
liXY <-
if (plotmethod=="plot.xy") list(xy = xy.coords(x=D, y=cov))
else list(x=D, y=cov)
do.call(plotmethod, args=c(dots, liXY))
if (plotpoint) {
for (i in 1:ncol(cov))
points(0, cov[iszero, i], pch=19 + i, col = dots$col[i])
}
} else {
if (!("zlim" %in% dotnames)) dots$zlim <- range(unlist(cov), finite=TRUE)
addgiven <- "add" %in% dotnames
local.dots <- dots
local.dots$col <- NULL
local.dots$lty <- NULL
is.contour <- is.character(plotmethod) && plotmethod == "contour"
if (!is.contour) {
if (ncol(cov) > 1)
stop("several models can be plotted at once only with 'contour'")
col <- default.image.par(dots$zlim, NULL)$data$default.col
}
for (i in 1:ncol(cov)) {
if (!addgiven && is.contour) local.dots$add <- i > 1
do.call(plotmethod,
args=c(list(x=li$distance, y=li$distanceY,
col=if (is.contour) dots$col[i] else col,
lty = dots$lty[i],
z=matrix(cov[, i], nrow=length(li$distance))), local.dots))
}
}
}
RFplotModel <- function(x, y, dim=1,
n.points=
if (dim==1 || is.contour) 200 else 100,
fct.type=NULL,
MARGIN, fixed.MARGIN,
maxchar=15, ...,
plotmethod=if (dim==1) "matplot" else "contour") {
is.contour <- is.character(plotmethod) && plotmethod == "contour"
RFopt <- RFoptions()
if (ex.red <- RFopt$internal$examples_reduced)
n.points <- as.integer(min(n.points, ex.red - 2))
stopifnot(length(dim)==1)
if (!(dim %in% 1:10))
stop("only 'dim==1', 'dim==2' and 'dim==3' are allowed")
if (dim==3)
if (missing(MARGIN) || missing(fixed.MARGIN))
stop("'MARGIN' and 'fixed.MARGIN' must be given if dim >=3")
if ((!missing(MARGIN)) || (!missing(fixed.MARGIN))) {
stopifnot((!missing(MARGIN)) && (!missing(fixed.MARGIN)))
if (dim < 3)
stop("'MARGIN' and 'fixed.MARGIN' should only be given for dim>=3")
stopifnot(is.numeric(MARGIN) && length(MARGIN)==2)
stopifnot(is.numeric(fixed.MARGIN) && (length(fixed.MARGIN) == dim - 2))
}
dots <- list(...)
dotnames <- names(dots)
models <- substr(dotnames, 1, 5) == "model"
x <- c(list(x), dots[models])
mnames <- c("", substr(dotnames[models], 6, 100))
idx <- substr(mnames, 1, 1) == "."
mnames[idx] <- substr(mnames[idx], 2, 1000)
for (i in which(!idx)) {
mnames[i] <- rfConvertRMmodel2string(x[[i]])
nmn <- nchar(mnames[i]) - 1
if (substr(mnames[i], nmn, nmn) == "(") {
mnames[i] <- substr(mnames[i], 1, nmn - 1)
}
msplit <- strsplit(mnames[i], "RM")[[1]]
if (length(msplit) == 2 && msplit[1]=="") mnames[i] <- msplit[2]
}
mnames <- substr(mnames, 1, maxchar)
dots <- mergeWithGlobal(dots[!models])
dotnames <- names(dots)
if (!("type" %in% dotnames)) dots$type <- "l"
li <- preparePlotRMmodel(x=x, xlim=dots$xlim, ylim=dots$ylim,
n.points=n.points, dim=dim,
fct.type=fct.type,
MARGIN=MARGIN, fixed.MARGIN=fixed.MARGIN)
dots$xlim <- li$xlim
if (!is.null(li$ylim)) dots$ylim <- li$ylim
if (!("main" %in% dotnames)) dots$main <- li$main
if (!("cex" %in% dotnames)) dots$cex <- 1
if (!("cex.main" %in% dotnames)) dots$cex.main <- 1.3 * dots$cex
if (!("cex.axis" %in% dotnames)) dots$cex.axis <- 1.0 * dots$cex
if (!("cex.lab" %in% dotnames)) dots$cex.lab <- 1.0 * dots$cex
if (!("col" %in% dotnames)) dots$col <- rep(1:7, length.out=length(x))
cov <- list()
if (dim==1) {
for (i in 1:length(x))
cov[[i]] <- rfeval(x=li$distance, model=x[[i]], fctcall=li$fctcall[i])
lab <- xylabs("distance", li$ylab)
if (!("ylim" %in% dotnames)) dots$ylim <- range(0, unlist(cov), finite=TRUE)
if (!("xlab" %in% dotnames)) dots$xlab <- lab$x
if (!("ylab" %in% dotnames)) dots$ylab <- lab$y
} else {
lab <- xylabs("", "")
if (!("xlab" %in% dotnames)) dots$xlab <- lab$x
if (!("ylab" %in% dotnames)) dots$ylab <- lab$y
dots$type <- NULL
if (dim==2) {
di <- as.matrix(expand.grid(li$distance, li$distanceY))
for (i in 1:length(x)) {
cov[[i]] <- rfeval(x=di, model=x[[i]], fctcall=li$fctcall[i])
}
} else if (dim>=3) {
m1 <- expand.grid(li$distance, li$distanceY)
m2 <- matrix(NA, ncol=dim, nrow=nrow(m1))
m2[,MARGIN] <- as.matrix(m1)
m2[,-MARGIN] <- rep(fixed.MARGIN, each=nrow(m1))
for (i in 1:length(x))
cov[[i]] <- rfeval(x=m2, model=x[[i]], fctcall=li$fctcall[i])
} else stop("this error should never appear")
}
if ((is.null(dots$xlab) || dots$xlab=="") &&
(is.null(dots$ylab) || dots$ylab=="")) {
margins <- c(3, 3, if (dots$main=="") 0 else 2, 0) + 0.2
} else {
margins <- c(5, 5, if (dots$main=="") 0 else 2, 0) + 0.2
}
dimcov <- dim(cov[[1]])
graphics <- RFopt$graphics
if (plotmethod != "plot.xy") {
if (is.null(dimcov)) {
ArrangeDevice(graphics, c(1,1))
} else {
figs <- dimcov[2:3]
ArrangeDevice(graphics, figs)
}
}
scr <- NULL
if (is.null(dimcov)) {
cov <- sapply(cov, function(x) x)
if (!("lty" %in% dotnames)) dots$lty <- 1:5
singleplot(cov=cov, dim=dim, li=li, plotmethod, dots=dots,
dotnames=dotnames)
if (length(x) > 1) legend(x="topright", legend=mnames,
col=dots$col, lty=dots$lty)
} else {
scr <- matrix(split.screen(figs=figs), ncol=dimcov[2], byrow=TRUE)
par(oma=margins)
title(main=dots$main, xlab=dots$xlab, ylab=dots$ylab,
outer=TRUE, cex.main=dots$cex.main)
dots.axis = dots[names(dots) != "type"]
dots.axis$col = dots.axis$col.axis
if (!("axes" %in% dotnames)) dots$axes <- FALSE
for (i in 1:ncol(scr)) {
for (j in 1:nrow(scr)) {
dots$main <-
eval(parse(text=paste("expression(C[", i, j,"])", sep="")))
screen(scr[i,j])
par(mar=c(0,0,2,1))
singleplot(cov = sapply(cov, function(x) x[, i,j]),
dim = dim, li=li, plotmethod=plotmethod, dots=dots,
dotnames = dotnames)
box()
if (i==1) do.call(graphics::axis,
args=c(dots.axis, list(side=1, outer = TRUE, line=1)))
if (j==1) do.call(graphics::axis,
args=c(dots.axis, list(side=2, outer = TRUE, line=1)))
}
}
}
if (graphics$split_screen && graphics$close_screen) {
close.screen(scr)
scr <- NULL
}
return(scr)
}
points.RMmodel <- function(x, ..., type="p")
RFplotModel(x, ...,type=type, plotmethod="plot.xy")
lines.RMmodel <- function(x, ..., type="l")
RFplotModel(x, ...,type=type, plotmethod="plot.xy")
list2RMmodel <- function(x) {
if (!is.list(x)) return(x)
name <- x[[1]]
if (!is.character(name)) return(x)
if (name == RM_DECLARE) return(NULL)
len <- length(x)
if (name %in% DOLLAR) return(list2RMmodel(c(x[[len]], x[-c(1, len)])))
if (name==SYMBOL_PLUS) name <- RM_PLUS[1] else
if (name==SYMBOL_MULT) name <- RM_MULT[1] else
if (!(name %in% list2RMmodel_Names)) {
if (!(name %in% list2RMmodel_oldNames))
stop(paste("'", name, "' is not the name of a valid model", sep=""))
}
if (len==1) return(eval(parse(text=paste(name, "()", sep=""))))
else {
x <- lapply(x, FUN=list2RMmodel)
if (length(idx <- which("anisoT" == names(x))) == 1){
names(x)[idx] <- "Aniso"
x[[idx]] <- t(x[[idx]])
}
return(do.call(name, args=x[-1]))
}
}
setMethod(f="plot", signature(x=CLASS_CLIST, y="missing"),
function(x, y, ...) RFplotModel(x, ...))
setMethod(f="lines", signature(x=CLASS_CLIST),
function(x, ..., type="l")
RFplotModel(x, ..., type=type, plotmethod="plot.xy"))
setMethod(f="points", signature(x=CLASS_CLIST),
function(x, ..., type="p")
RFplotModel(x, ..., type=type, plotmethod="plot.xy"))
setMethod(f="persp", signature(x=CLASS_CLIST),
function(x, ..., dim=2, zlab="")
RFplotModel(x,...,dim=dim,zlab=zlab,plotmethod="persp"))
setMethod(f="image", signature(x=CLASS_CLIST),
function(x, ..., dim=2) RFplotModel(x,...,dim=dim,plotmethod="image")) |
describe("Confront", {
it("returns a validation object", {
rules <- validator(x > 1, y < x, x == 0)
con <- dbplyr::src_memdb()
d <- data.frame(x = 1, y = 2)
tbl_d <- dplyr::copy_to(con, d, overwrite=TRUE)
cf <- confront(tbl_d, rules)
expect_true(is(cf, "tbl_validation"))
})
it("handles linear constraints", {
rules <- validator(x > 1, y < x, y == 2)
con <- dbplyr::src_memdb()
d <- data.frame(x = c(2, NA), y = 2:1)
tbl_d <- dplyr::copy_to(con, d, overwrite=TRUE)
cf <- confront(tbl_d, rules)
res <- values(cf, type = "list", simplify=FALSE)
expect_equal(res, list( V1 = c(TRUE, NA)
, V2 = c(FALSE, NA)
, V3=c(TRUE, FALSE))
)
})
it("handles categorical constraints", {
rules <- validator( a %in% c("A1", "A2")
, b %in% c("B1", "B2")
)
con <- dbplyr::src_memdb()
d <- data.frame(a = c("A1", "A3", NA), b = c("B3", NA, "B2"))
tbl_d <- dplyr::copy_to(con, d, overwrite=TRUE)
cf <- confront(tbl_d, rules)
res <- values(cf, type = "list", simplify=FALSE)
expect_equal(res, list( V1 = c(TRUE, FALSE,NA)
, V2 = c(FALSE, NA, TRUE)
)
)
})
it("handles conditional constraints", {
rules <- validator( a %in% c("A1", "A2")
, b %in% c("B1", "B2")
, if (a == "A1") b == "B1"
, if (b == "B2") x > 0
)
con <- dbplyr::src_memdb()
d <- data.frame(a = c("A1", "A3", NA), b = c("B3", NA, "B2"), x = c(NA, 1,-1))
tbl_d <- dplyr::copy_to(con, d, overwrite=TRUE)
cf <- confront(tbl_d, rules)
res <- values(cf, type = "list", simplify=FALSE)
expect_equal(res, list( V1 = c(TRUE, FALSE,NA)
, V2 = c(FALSE, NA, TRUE)
, V3 = c(FALSE, NA, NA)
, V4 = c(NA, NA, FALSE)
)
)
})
it ("warns on not working rules",{
f <- function(x){x}
rules <- validator(f(x) > 0, x > 0, y < 0)
con <- dbplyr::src_memdb()
d <- data.frame(x=c(NA, 1, -1))
tbl_d <- dplyr::copy_to(con, d, overwrite=TRUE)
expect_warning(cf <- confront(tbl_d, rules))
res <- values(cf, type = "list", simplify=FALSE)
expect_equal(res, list(
V1 = NULL,
V2 = c(NA, TRUE, FALSE),
V3 = NULL
))
expect_equal(length(cf$errors), 2)
})
}) |
AlleleFreq.default <- function(object, variants, ...) {
variants <- unique(x = variants)
meta_row_mat <- as.data.frame(
x = stri_split_fixed(
str = rownames(x = object),
pattern = "-",
simplify = TRUE
), stringsAsFactors = TRUE
)
colnames(meta_row_mat) = c("letter", "position", "strand")
variant_df <- data.frame(
variant = variants,
position = factor(
x = substr(
x = variants,
start = 1,
stop = nchar(x = variants) - 3),
levels = levels(x = meta_row_mat$position)
),
ref = factor(
x = substr(
x = variants,
start = nchar(x = variants) - 2,
stop = nchar(x = variants) - 2),
levels = levels(x = meta_row_mat$letter)
),
alt = factor(
x = substr(
x = variants,
start = nchar(x = variants),
stop = nchar(x = variants)),
levels = levels(x = meta_row_mat$letter)
)
)
ref_letter <- paste0(meta_row_mat$position, meta_row_mat$letter)
alt_letter <- paste0(variant_df$position, variant_df$alt)
idx_numerator <- lapply(
X = alt_letter, FUN = function(x) {
which(ref_letter == x)
}
)
fwd_half_idx <- sapply(X = idx_numerator, FUN = `[[`, 1)
rev_half_idx <- sapply(X = idx_numerator, FUN = `[[`, 2)
if (!all.equal(
target = meta_row_mat[fwd_half_idx, 2],
current = meta_row_mat[rev_half_idx, 2]
)) {
stop("Variant count matrix does not have the required structure")
}
numerator_counts <- object[fwd_half_idx, ] + object[rev_half_idx, ]
rownames(x = numerator_counts) <- variants
denom_counts <- sapply(X = variant_df$position, FUN = function(x) {
idx <- which(meta_row_mat$position == x)
total_coverage <- colSums(x = object[idx, ])
return(total_coverage)
})
denom_counts <- t(x = denom_counts)
rownames(x = denom_counts) <- variant_df$variant
allele_freq_matrix <- numerator_counts / denom_counts
colnames(x = allele_freq_matrix) <- colnames(x = object)
allele_freq_matrix@x[is.nan(x = allele_freq_matrix@x)] <- 0
return(allele_freq_matrix[variants, ])
}
AlleleFreq.Assay <- function(object, variants, ...) {
mat <- GetAssayData(object = object, slot = "counts")
allele.freq <- AlleleFreq(object = mat, variants = variants, ...)
allele.assay <- CreateAssayObject(counts = allele.freq)
return(allele.assay)
}
AlleleFreq.Seurat <- function(
object,
variants,
assay = NULL,
new.assay.name = "alleles",
...
) {
assay <- SetIfNull(x = assay, y = DefaultAssay(object = object))
allele.assay <- AlleleFreq(
object = object[[assay]],
variants = variants,
...
)
object[[new.assay.name]] <- allele.assay
return(object)
}
ClusterClonotypes <- function(object, assay = NULL, group.by = NULL) {
if (is.null(x = group.by)) {
object$allele_ident_stash_clon <- Idents(object = object)
} else {
object$allele_ident_stash_clon <- object[[]][[group.by]]
}
md <- object[[]]
assay <- SetIfNull(x = assay, y = DefaultAssay(object = object))
mat <- GetAssayData(object = object, assay = assay, slot = "data")
matty <- sapply(
X = unique(x = object$allele_ident_stash_clon),
FUN = function(x) {
cells <- rownames(x = md[md$allele_ident_stash_clon == x, ])
return(rowMeans(x = sqrt(x = mat[, cells])))
})
object$allele_ident_stash_clon <- NULL
cos_matty <- cosine(x = matty)
cos_matty_t <- cosine(x = t(x = matty))
cos_matty[is.nan(x = cos_matty)] <- 0
cos_matty_t[is.nan(x = cos_matty_t)] <- 0
hc <- hclust(d = dist(x = cos_matty))
hf <- hclust(d = dist(x = cos_matty_t))
return(list("cells" = hc, "features" = hf))
}
FindClonotypes <- function(
object,
assay = NULL,
features = NULL,
metric = "cosine",
resolution = 1,
k = 10,
algorithm = 3
) {
assay <- SetIfNull(x = assay, y = DefaultAssay(object = object))
features <- SetIfNull(x = features, y = rownames(x = object[[assay]]))
mat <- GetAssayData(object = object, assay = assay, slot = "data")[features, ]
mat <- sqrt(x = t(x = mat))
graph <- FindNeighbors(object = mat, k.param = k, annoy.metric = metric)
object[[paste0(assay, "_nn")]] <- graph$nn
object[[paste0(assay, "_snn")]] <- graph$snn
object <- FindClusters(
object = object,
graph.name = paste0(assay, "_snn"),
resolution = resolution,
algorithm = algorithm
)
hc <- ClusterClonotypes(object = object, assay = assay, group.by = NULL)
features <- as.character(rownames(x = object[[assay]])[hc$features$order])
VariableFeatures(object = object, assay = assay) <- features
levels(x = object) <- hc$cells$order - 1
return(object)
}
ReadMGATK <- function(dir, verbose = TRUE) {
if (!dir.exists(paths = dir)) {
stop("Directory not found")
}
a.path <- list.files(path = dir, pattern = "*.A.txt.gz", full.names = TRUE)
c.path <- list.files(path = dir, pattern = "*.C.txt.gz", full.names = TRUE)
t.path <- list.files(path = dir, pattern = "*.T.txt.gz", full.names = TRUE)
g.path <- list.files(path = dir, pattern = "*.G.txt.gz", full.names = TRUE)
refallele.path <- list.files(
path = dir,
pattern = "*_refAllele.txt*",
full.names = TRUE
)
depthfile.path <- list.files(
path = dir,
pattern = "*.depthTable.txt",
full.names = TRUE
)
if (verbose) {
message("Reading allele counts")
}
column.names <- c("pos", "cellbarcode", "plus", "minus")
a.counts <- read.table(
file = a.path,
sep = ",",
header = FALSE,
stringsAsFactors = FALSE,
col.names = column.names
)
c.counts <- read.table(
file = c.path,
sep = ",",
header = FALSE,
stringsAsFactors = FALSE,
col.names = column.names
)
t.counts <- read.table(
file = t.path,
sep = ",",
header = FALSE,
stringsAsFactors = FALSE,
col.names = column.names
)
g.counts <- read.table(
file = g.path,
sep = ",",
header = FALSE,
stringsAsFactors = FALSE,
col.names = column.names
)
if (verbose) {
message("Reading metadata")
}
refallele <- read.table(
file = refallele.path,
header = FALSE,
stringsAsFactors = FALSE,
col.names = c("pos", "ref")
)
refallele$ref <- toupper(x = refallele$ref)
depth <- read.table(
file = depthfile.path,
header = FALSE,
stringsAsFactors = FALSE,
col.names = c("cellbarcode", "mito.depth"),
row.names = 1
)
cellbarcodes <- unique(x = rownames(depth))
cb.lookup <- seq_along(along.with = cellbarcodes)
names(cb.lookup) <- cellbarcodes
if (verbose) {
message("Building matrices")
}
maxpos <- dim(refallele)[1]
a.mat <- SparseMatrixFromBaseCounts(
basecounts = a.counts, cells = cb.lookup, dna.base = "A", maxpos = maxpos
)
c.mat <- SparseMatrixFromBaseCounts(
basecounts = c.counts, cells = cb.lookup, dna.base = "C", maxpos = maxpos
)
t.mat <- SparseMatrixFromBaseCounts(
basecounts = t.counts, cells = cb.lookup, dna.base = "T", maxpos = maxpos
)
g.mat <- SparseMatrixFromBaseCounts(
basecounts = g.counts, cells = cb.lookup, dna.base = "G", maxpos = maxpos
)
counts <- rbind(a.mat[[1]], c.mat[[1]], t.mat[[1]], g.mat[[1]],
a.mat[[2]], c.mat[[2]], t.mat[[2]], g.mat[[2]])
return(list("counts" = counts, "depth" = depth, "refallele" = refallele))
}
IdentifyVariants.default <- function(
object,
refallele,
stabilize_variance = TRUE,
low_coverage_threshold = 10,
verbose = TRUE,
...
) {
coverages <- ComputeTotalCoverage(object = object, verbose = verbose)
a.df <- ProcessLetter(
object = object,
letter = "A",
coverage = coverages,
ref_alleles = refallele,
stabilize_variance = stabilize_variance,
low_coverage_threshold = low_coverage_threshold,
verbose = verbose
)
t.df <- ProcessLetter(
object = object,
letter = "T",
coverage = coverages,
ref_alleles = refallele,
stabilize_variance = stabilize_variance,
low_coverage_threshold = low_coverage_threshold,
verbose = verbose
)
c.df <- ProcessLetter(
object = object,
letter = "C",
coverage = coverages,
ref_alleles = refallele,
stabilize_variance = stabilize_variance,
low_coverage_threshold = low_coverage_threshold,
verbose = verbose
)
g.df <- ProcessLetter(
object = object,
letter = "G",
coverage = coverages,
ref_alleles = refallele,
stabilize_variance = stabilize_variance,
low_coverage_threshold = low_coverage_threshold,
verbose = verbose
)
return(rbind(a.df, t.df, c.df, g.df))
}
IdentifyVariants.Assay <- function(
object,
refallele,
...
) {
counts <- GetAssayData(object = object, slot = 'counts')
df <- IdentifyVariants(object = counts, refallele = refallele, ...)
return(df)
}
IdentifyVariants.Seurat <- function(
object,
refallele,
assay = NULL,
...
) {
assay <- SetIfNull(x = assay, y = DefaultAssay(object = object))
assay.obj <- GetAssay(object = object, assay = assay)
df <- IdentifyVariants(object = assay.obj, refallele = refallele, ...)
return(df)
}
SparseMatrixFromBaseCounts <- function(basecounts, cells, dna.base, maxpos) {
fwd.mat <- sparseMatrix(
i = c(basecounts$pos,maxpos),
j = c(cells[basecounts$cellbarcode],1),
x = c(basecounts$plus,0)
)
colnames(x = fwd.mat) <- names(x = cells)
rownames(x = fwd.mat) <- paste(
dna.base,
seq_len(length.out = nrow(fwd.mat)),
"fwd",
sep = "-"
)
rev.mat <- sparseMatrix(
i = c(basecounts$pos,maxpos),
j = c(cells[basecounts$cellbarcode],1),
x = c(basecounts$minus,0)
)
colnames(x = rev.mat) <- names(x = cells)
rownames(x = rev.mat) <- paste(
dna.base,
seq_len(length.out = nrow(rev.mat)),
"rev",
sep = "-"
)
return(list(fwd.mat, rev.mat))
}
ComputeTotalCoverage <- function(object, verbose = TRUE) {
if (verbose) {
message("Computing total coverage per base")
}
rowstep <- nrow(x = object) / 8
mat.list <- list()
for (i in seq_len(length.out = 8)) {
mat.list[[i]] <- object[(rowstep * (i - 1) + 1):(rowstep * i), ]
}
coverage <- Reduce(f = `+`, x = mat.list)
coverage <- as.matrix(x = coverage)
rownames(x = coverage) <- seq_along(along.with = rownames(x = coverage))
return(coverage)
}
globalVariables(
names = c("forward", "reverse", ".", "variant"), package = "Signac"
)
ProcessLetter <- function(
object,
letter,
ref_alleles,
coverage,
stabilize_variance = TRUE,
low_coverage_threshold = 10,
verbose = TRUE
) {
if (verbose) {
message("Processing ", letter)
}
boo <- ref_alleles$ref != letter & ref_alleles$ref != "N"
cov <- coverage[boo, ]
variant_name <- paste0(
as.character(ref_alleles$pos),
ref_alleles$ref,
">",
letter
)[boo]
nucleotide <- paste0(
ref_alleles$ref,
">",
letter
)[boo]
position_filt <- ref_alleles$pos[boo]
fwd.counts <- GetMutationMatrix(
object = object,
letter = letter,
strand = "fwd"
)[boo, ]
rev.counts <- GetMutationMatrix(
object = object,
letter = letter,
strand = "rev"
)[boo, ]
fwd.ijx <- summary(fwd.counts)
rev.ijx <- summary(rev.counts)
bulk <- (rowSums(fwd.counts + rev.counts) / rowSums(cov))
bulk[is.na(bulk)] <- 0
bulk[is.nan(bulk)] <- 0
both.strand <- data.table(cbind(fwd.ijx, rev.ijx$x))
both.strand$i <- variant_name[both.strand$i]
colnames(both.strand) <- c("variant", "cell_idx", "forward", "reverse")
cor_dt <- suppressWarnings(expr = both.strand[, .(cor = cor(
x = forward, y = reverse, method = "pearson", use = "pairwise.complete")
), by = list(variant)])
cor_vec_val <- cor_dt$cor
names(cor_vec_val) <- as.character(cor_dt$variant)
mat <- (fwd.counts + rev.counts) / cov
rownames(mat) <- variant_name
mat@x[!is.finite(mat@x)] <- 0
if (stabilize_variance) {
idx_mat <- which(cov < low_coverage_threshold, arr.ind = TRUE)
idx_mat_mean <- bulk[idx_mat[, 1]]
ones <- 1 - sparseMatrix(
i = c(idx_mat[, 1], dim(x = mat)[1]),
j = c(idx_mat[, 2], dim(x = mat)[2]),
x = 1
)
means_mat <- sparseMatrix(
i = c(idx_mat[, 1], dim(x = mat)[1]),
j = c(idx_mat[, 2], dim(x = mat)[2]),
x = c(idx_mat_mean, 0)
)
mmat2 <- mat * ones + means_mat
variance <- SparseRowVar(x = mmat2)
} else {
variance <- SparseRowVar(x = mat)
}
detected <- (fwd.counts >= 2) + (rev.counts >= 2)
var_summary_df <- data.frame(
position = position_filt,
nucleotide = nucleotide,
variant = variant_name,
vmr = variance / (bulk + 0.00000000001),
mean = round(x = bulk, digits = 7),
variance = round(x = variance, digits = 7),
n_cells_conf_detected = rowSums(x = detected == 2),
n_cells_over_5 = rowSums(x = mat >= 0.05),
n_cells_over_10 = rowSums(x = mat >= 0.10),
n_cells_over_20 = rowSums(x = mat >= 0.20),
strand_correlation = cor_vec_val[variant_name],
mean_coverage = rowMeans(x = cov),
stringsAsFactors = FALSE,
row.names = variant_name
)
return(var_summary_df)
}
GetMutationMatrix <- function(object, letter, strand) {
keep.rows <- paste(
letter,
seq_len(length.out = nrow(x = object) / 8),
strand,
sep = "-"
)
return(object[keep.rows, ])
} |
lowres <- function(x, np=2, which.fac=NULL, ...)
{
if (is(x, "SpatialGrid"))
fullgrid(x) = FALSE
if (!inherits(x, "SpatialPixelsDataFrame"))
stop("x should be of class \"SpatialPixelsDataFrame\"")
pfs <- proj4string(x)
gr <- gridparameters(x)
if (nrow(gr) > 2)
stop("x should be defined in two dimensions")
if ((gr[1, 2] - gr[2, 2])> get(".adeoptions", envir=.adehabitatMAEnv)$epsilon)
stop("the cellsize should be the same in x and y directions")
res <- list()
for (i in 1:(ncol(slot(x,"data")))) {
nc <- gr[2, 3]
nr <- gr[1, 3]
cs <- gr[2, 2]
maa<-as.image.SpatialGridDataFrame(x[,i])
y <- maa$z
typ <- "numeric"
if (i%in%which.fac)
typ="factor"
y<-y[1:(nr-(((nr/np)-floor(nr/np)))*np),
1:(nc-(((nc/np)-floor(nc/np)))*np)]
nr<-nrow(y)
nc<-ncol(y)
if (typ=="factor") {
repr<- as.numeric(levels(factor(as.vector(y))))
y <- as.numeric(as.character(factor(y)))
y <- matrix(y, nrow=nr, ncol=nc)
}
y[is.na(y)]<--9999
xs<-matrix(0, nrow=nr/np, ncol=nc/np)
if (typ == "numeric") {
mat<-.C("regrouascnumr", as.double(t(y)), as.double(t(xs)),
as.double(nrow(y)), as.double(ncol(y)),
as.double(nrow(xs)), as.double(ncol(xs)),
PACKAGE = "adehabitatMA")[[2]]
} else {
mat<-.C("regroufacascr", as.double(t(y)),
as.double(t(xs)), as.integer(np),
as.integer(length(repr)), as.integer(nrow(y)),
as.integer(ncol(y)),
as.integer(nrow(xs)), as.integer(ncol(xs)),
PACKAGE = "adehabitatMA")[[2]]
}
mat<-matrix(mat,ncol=ncol(xs), byrow=TRUE)
mat[mat==-9999]<-NA
maa$z <- mat
maa$x <- mean(maa$x[1:np]) + c(0:(nr/np - 1)) * cs*np
maa$y <- mean(maa$y[1:np]) + c(0:(nc/np - 1)) * cs*np
maa <- image2Grid(maa)
maa <- as(maa, "SpatialPixelsDataFrame")
gridded(maa) <- TRUE
res[[i]] <- maa
}
names(res) <- names(slot(x, "data"))
re <- do.call("data.frame",lapply(res, function(x) x[[1]]))
coordinates(re) <- coordinates(res[[1]])
gridded(re) <- TRUE
if (!is.na(pfs))
proj4string(re) <- CRS(pfs)
return(re)
} |
UqN <- function(beta,qvalue,N){
if(qvalue==1){qvalue=0.99999}
value = ((1/beta)^(1-qvalue) - (1/N)^(1-qvalue)) / (1 - (1/N)^(1-qvalue))
return(value)
} |
check_homogeneity <- function(x, method = c("bartlett", "fligner", "levene", "auto"), ...) {
UseMethod("check_homogeneity")
}
check_homogeneity.default <- function(x, method = c("bartlett", "fligner", "levene", "auto"), ...) {
method <- match.arg(method)
resp <- insight::find_response(x)
pred <- insight::find_predictors(x, component = "conditional", flatten = TRUE)
ws_pred <- pred != make.names(pred)
if (any(ws_pred)) {
pred[ws_pred] <- paste0("`", pred[ws_pred], "`")
}
if (length(pred) > 1) {
pred <- paste0("interaction(", paste0(pred, collapse = ", "), ")", collapse = "")
}
f <- stats::as.formula(sprintf("%s ~ %s", resp, pred))
if (method == "auto") {
check <- tryCatch(
{
utils::capture.output(p <- check_normality(x))
p
},
error = function(e) {
NULL
}
)
if (is.null(check)) {
insight::print_color("'check_homogeneity()' cannot perform check for normality. Please specify the 'method'-argument for the test of equal variances.\n", "red")
return(NULL)
}
method <- ifelse(check < 0.05, "fligner", "bartlett")
}
if (method == "fligner") {
r <- stats::fligner.test(f, data = insight::get_data(x))
p.val <- r$p.value
} else if (method == "bartlett") {
r <- stats::bartlett.test(f, data = insight::get_data(x))
p.val <- r$p.value
} else if (method == "levene") {
insight::check_if_installed("car")
r <- car::leveneTest(x, ...)
p.val <- r$`Pr(>F)`
}
method.string <- switch(method,
"bartlett" = "Bartlett Test",
"fligner" = "Fligner-Killeen Test",
"levene" = "Levene's Test"
)
if (is.na(p.val)) {
warning(paste0("Could not perform ", method.string, "."), call. = FALSE)
invisible(NULL)
} else if (p.val < 0.05) {
insight::print_color(sprintf("Warning: Variances differ between groups (%s, p = %.3f).\n", method.string, p.val), "red")
} else {
insight::print_color(sprintf("OK: There is not clear evidence for different variances across groups (%s, p = %.3f).\n", method.string, p.val), "green")
}
attr(p.val, "object_name") <- deparse(substitute(x), width.cutoff = 500)
attr(p.val, "method") <- method.string
class(p.val) <- unique(c("check_homogeneity", "see_check_homogeneity", class(p.val)))
invisible(p.val)
}
check_homogeneity.afex_aov <- function(x, method = "levene", ...) {
if (!requireNamespace("car")) {
stop("car required for this function to work.")
}
if (tolower(method) != "levene") {
message("Only Levene's test for homogeneity supported for afex_aov")
}
if (length(attr(x, "between")) == 0) {
stop("Levene test is only aplicable to ANOVAs with between-subjects factors.")
}
data <- x$data$long
dv <- attr(x, "dv")
id <- attr(x, "id")
between <- names(attr(x, "between"))
is_covar <- sapply(attr(x, "between"), is.null)
ag_data <- stats::aggregate(data[, dv], data[, c(between, id)], mean)
colnames(ag_data)[length(c(between, id)) + 1] <- dv
if (any(is_covar)) {
warning(insight::format_message("Levene's test is not appropriate with quantitative explanatory variables. Testing assumption of homogeneity among factor groups only."), call. = FALSE)
between <- between[!is_covar]
}
form <- stats::formula(paste0(dv, "~", paste0(between, collapse = "*")))
test <- car::leveneTest(form, ag_data, center = mean, ...)
p.val <- test[1, "Pr(>F)"]
method.string <- "Levene's Test"
if (is.na(p.val)) {
warning(paste0("Could not perform ", method.string, "."), call. = FALSE)
invisible(NULL)
} else if (p.val < 0.05) {
insight::print_color(sprintf("Warning: Variances differ between groups (%s, p = %.3f).\n", method.string, p.val), "red")
} else {
insight::print_color(sprintf("OK: There is not clear evidence for different variances across groups (%s, p = %.3f).\n", method.string, p.val), "green")
}
attr(p.val, "object_name") <- deparse(substitute(x), width.cutoff = 500)
attr(p.val, "method") <- method.string
class(p.val) <- unique(c("check_homogeneity", "see_check_homogeneity", class(p.val)))
invisible(p.val)
} |
cdn_PaleyI<-function(order){
q <- (order-1)
if(numbers::mod(q,4)==3 & is.prime(q)==TRUE){
return(ret_value=2)
}else
return(NULL)
} |
trend.test <- function(object, significance.level = 0.05) {
data <- NULL
if (is.element("precintcon.daily", class(object)) ||
is.element("precintcon.monthly", class(object))) {
if (is.element("precintcon.monthly", class(object)))
data <- object[[3]]
else
data <- as.vector((as.matrix(object[,3:33])))
} else if (is.vector(object) && class(object) == "numeric")
data <- object
else
stop("Invalid data. Please, check your input object.")
n <- length(data)
data[is.na(data)] <- 0.0
S <- 0.0
for (i in 2:n) {
r <- data[(i:n)] - data[i-1]
S <- S + length(r[r>0]) + (-1 * length(r[r<0]))
}
S.var <- ((n * (n - 1) * (2 * n + 5))) / 18
Z <- 0.0
p.value <- 0.0
if (n > 10) {
Z <- if (S > 0) (S - 1) / sqrt(S.var) else if (S == 0) 0 else (S + 1) /sqrt(S.var)
p.value <- pnorm(Z)
}
return(data.frame(S=S, var.S=S.var, Z=Z,p.value=p.value, p.value.two.tailed=2*p.value))
} |
plotPullup_gui <- function(env = parent.frame(), savegui = NULL, debug = FALSE, parent = NULL) {
.gData <- NULL
.gDataName <- NULL
.gPlot <- NULL
.theme <- c(
"theme_grey()", "theme_bw()", "theme_linedraw()",
"theme_light()", "theme_dark()", "theme_minimal()",
"theme_classic()", "theme_void()"
)
.scales <- c("fixed", "free_x", "free_y", "free")
val_obj <- NULL
fnc <- as.character(match.call()[[1]])
if (debug) {
print(paste("IN:", fnc))
}
strWinTitle <- "Plot pull-up"
strChkGui <- "Save GUI settings"
strBtnHelp <- "Help"
strFrmDataset <- "Dataset and kit"
strLblDataset <- "Pull-up dataset:"
strDrpDataset <- "<Select dataset>"
strLblSamples <- "samples"
strLblKit <- "and the kit used:"
strFrmOptions <- "Options"
strChkOverride <- "Override automatic titles"
strLblTitlePlot <- "Plot title:"
strLblTitleX <- "X title:"
strLblTitleY <- "Y title:"
strLblTheme <- "Plot theme:"
strChkSex <- "Exclude sex markers"
strExpPoints <- "Data points"
strLblShape <- "Shape:"
strLblAlpha <- "Alpha:"
strLblJitter <- "Jitter (width):"
strExpAxes <- "Axes"
strLblLimitY <- "Limit Y axis (min-max)"
strLblLimitX <- "Limit X axis (min-max)"
strLblScales <- "Scales:"
strExpLabels <- "X labels"
strLblSize <- "Text size (pts):"
strLblAngle <- "Angle:"
strLblJustification <- "Justification (v/h):"
strFrmPlot <- "Plot pull-up data"
strBtnRatioVsHeight <- "Ratio vs. Height"
strBtnRatioVsAllele <- "Ratio vs. Allele"
strBtnProcessing <- "Processing..."
strFrmSave <- "Save as"
strLblSave <- "Name for result:"
strBtnSaveObject <- "Save as object"
strBtnSaveImage <- "Save as image"
strBtnObjectSaved <- "Object saved"
strLblMainTitle <- "Pull-up ratio"
strLblYTitle <- "Ratio"
strLblXTitleHeight <- "Allele peak height (RFU)"
strLblXTitleAllele <- "Allele designation"
strMsgNull <- "Data frame is NULL or NA!"
strMsgTitleError <- "Error"
dtStrings <- getStrings(gui = fnc)
if (!is.null(dtStrings)) {
strtmp <- dtStrings["strWinTitle"]$value
strWinTitle <- ifelse(is.na(strtmp), strWinTitle, strtmp)
strtmp <- dtStrings["strChkGui"]$value
strChkGui <- ifelse(is.na(strtmp), strChkGui, strtmp)
strtmp <- dtStrings["strBtnHelp"]$value
strBtnHelp <- ifelse(is.na(strtmp), strBtnHelp, strtmp)
strtmp <- dtStrings["strFrmDataset"]$value
strFrmDataset <- ifelse(is.na(strtmp), strFrmDataset, strtmp)
strtmp <- dtStrings["strLblDataset"]$value
strLblDataset <- ifelse(is.na(strtmp), strLblDataset, strtmp)
strtmp <- dtStrings["strDrpDataset"]$value
strDrpDataset <- ifelse(is.na(strtmp), strDrpDataset, strtmp)
strtmp <- dtStrings["strLblSamples"]$value
strLblSamples <- ifelse(is.na(strtmp), strLblSamples, strtmp)
strtmp <- dtStrings["strLblKit"]$value
strLblKit <- ifelse(is.na(strtmp), strLblKit, strtmp)
strtmp <- dtStrings["strFrmOptions"]$value
strFrmOptions <- ifelse(is.na(strtmp), strFrmOptions, strtmp)
strtmp <- dtStrings["strChkOverride"]$value
strChkOverride <- ifelse(is.na(strtmp), strChkOverride, strtmp)
strtmp <- dtStrings["strLblTitlePlot"]$value
strLblTitlePlot <- ifelse(is.na(strtmp), strLblTitlePlot, strtmp)
strtmp <- dtStrings["strLblTitleX"]$value
strLblTitleX <- ifelse(is.na(strtmp), strLblTitleX, strtmp)
strtmp <- dtStrings["strLblTitleY"]$value
strLblTitleY <- ifelse(is.na(strtmp), strLblTitleY, strtmp)
strtmp <- dtStrings["strLblTheme"]$value
strLblTheme <- ifelse(is.na(strtmp), strLblTheme, strtmp)
strtmp <- dtStrings["strChkSex"]$value
strChkSex <- ifelse(is.na(strtmp), strChkSex, strtmp)
strtmp <- dtStrings["strExpPoints"]$value
strExpPoints <- ifelse(is.na(strtmp), strExpPoints, strtmp)
strtmp <- dtStrings["strLblShape"]$value
strLblShape <- ifelse(is.na(strtmp), strLblShape, strtmp)
strtmp <- dtStrings["strLblAlpha"]$value
strLblAlpha <- ifelse(is.na(strtmp), strLblAlpha, strtmp)
strtmp <- dtStrings["strLblJitter"]$value
strLblJitter <- ifelse(is.na(strtmp), strLblJitter, strtmp)
strtmp <- dtStrings["strExpAxes"]$value
strExpAxes <- ifelse(is.na(strtmp), strExpAxes, strtmp)
strtmp <- dtStrings["strLblLimitY"]$value
strLblLimitY <- ifelse(is.na(strtmp), strLblLimitY, strtmp)
strtmp <- dtStrings["strLblLimitX"]$value
strLblLimitX <- ifelse(is.na(strtmp), strLblLimitX, strtmp)
strtmp <- dtStrings["strLblScales"]$value
strLblScales <- ifelse(is.na(strtmp), strLblScales, strtmp)
strtmp <- dtStrings["strExpLabels"]$value
strExpLabels <- ifelse(is.na(strtmp), strExpLabels, strtmp)
strtmp <- dtStrings["strLblSize"]$value
strLblSize <- ifelse(is.na(strtmp), strLblSize, strtmp)
strtmp <- dtStrings["strLblAngle"]$value
strLblAngle <- ifelse(is.na(strtmp), strLblAngle, strtmp)
strtmp <- dtStrings["strLblJustification"]$value
strLblJustification <- ifelse(is.na(strtmp), strLblJustification, strtmp)
strtmp <- dtStrings["strFrmPlot"]$value
strFrmPlot <- ifelse(is.na(strtmp), strFrmPlot, strtmp)
strtmp <- dtStrings["strBtnRatioVsHeight"]$value
strBtnRatioVsHeight <- ifelse(is.na(strtmp), strBtnRatioVsHeight, strtmp)
strtmp <- dtStrings["strBtnRatioVsAllele"]$value
strBtnRatioVsAllele <- ifelse(is.na(strtmp), strBtnRatioVsAllele, strtmp)
strtmp <- dtStrings["strBtnProcessing"]$value
strBtnProcessing <- ifelse(is.na(strtmp), strBtnProcessing, strtmp)
strtmp <- dtStrings["strFrmSave"]$value
strFrmSave <- ifelse(is.na(strtmp), strFrmSave, strtmp)
strtmp <- dtStrings["strLblSave"]$value
strLblSave <- ifelse(is.na(strtmp), strLblSave, strtmp)
strtmp <- dtStrings["strBtnSaveObject"]$value
strBtnSaveObject <- ifelse(is.na(strtmp), strBtnSaveObject, strtmp)
strtmp <- dtStrings["strBtnSaveImage"]$value
strBtnSaveImage <- ifelse(is.na(strtmp), strBtnSaveImage, strtmp)
strtmp <- dtStrings["strBtnObjectSaved"]$value
strBtnObjectSaved <- ifelse(is.na(strtmp), strBtnObjectSaved, strtmp)
strtmp <- dtStrings["strLblMainTitle"]$value
strLblMainTitle <- ifelse(is.na(strtmp), strLblMainTitle, strtmp)
strtmp <- dtStrings["strLblYTitle"]$value
strLblYTitle <- ifelse(is.na(strtmp), strLblYTitle, strtmp)
strtmp <- dtStrings["strLblXTitleHeight"]$value
strLblXTitleHeight <- ifelse(is.na(strtmp), strLblXTitleHeight, strtmp)
strtmp <- dtStrings["strLblXTitleAllele"]$value
strLblXTitleAllele <- ifelse(is.na(strtmp), strLblXTitleAllele, strtmp)
strtmp <- dtStrings["strMsgNull"]$value
strMsgNull <- ifelse(is.na(strtmp), strMsgNull, strtmp)
strtmp <- dtStrings["strMsgTitleError"]$value
strMsgTitleError <- ifelse(is.na(strtmp), strMsgTitleError, strtmp)
}
w <- gwindow(title = strWinTitle, visible = FALSE)
addHandlerUnrealize(w, handler = function(h, ...) {
.saveSettings()
if (!is.null(parent)) {
focus(parent)
}
if (gtoolkit() == "tcltk") {
if (as.numeric(gsub("[^0-9]", "", packageVersion("gWidgets2tcltk"))) <= 106) {
message("tcltk version <= 1.0.6, returned TRUE!")
return(TRUE)
} else {
message("tcltk version >1.0.6, returned FALSE!")
return(FALSE)
}
} else {
message("RGtk2, returned FALSE!")
return(FALSE)
}
})
gv <- ggroup(
horizontal = FALSE,
spacing = 5,
use.scrollwindow = FALSE,
container = w,
expand = TRUE
)
gh <- ggroup(container = gv, expand = FALSE, fill = "both")
savegui_chk <- gcheckbox(text = strChkGui, checked = FALSE, container = gh)
addSpring(gh)
help_btn <- gbutton(text = strBtnHelp, container = gh)
addHandlerChanged(help_btn, handler = function(h, ...) {
print(help(fnc, help_type = "html"))
})
f0 <- gframe(
text = strFrmDataset,
horizontal = TRUE,
spacing = 2,
container = gv
)
glabel(text = strLblDataset, container = f0)
dataset_drp <- gcombobox(
items = c(
strDrpDataset,
listObjects(
env = env,
obj.class = "data.frame"
)
),
selected = 1,
editable = FALSE,
container = f0,
ellipsize = "none"
)
f0_samples_lbl <- glabel(
text = paste(" (0 ", strLblSamples, ") ", sep = ""),
container = f0
)
glabel(text = strLblKit, container = f0)
kit_drp <- gcombobox(
items = getKit(),
selected = 1,
editable = FALSE,
container = f0,
ellipsize = "none"
)
addHandlerChanged(dataset_drp, handler = function(h, ...) {
val_obj <- svalue(dataset_drp)
requiredCol <- c(
"Sample.Name", "Marker", "Dye", "Allele", "Height",
"Size", "Data.Point", "P.Marker", "P.Dye", "P.Allele",
"P.Height", "P.Size", "P.Data.Point", "Delta", "Ratio"
)
ok <- checkDataset(
name = val_obj, reqcol = requiredCol,
env = env, parent = w, debug = debug
)
if (ok) {
.gData <<- get(val_obj, envir = env)
.gDataName <<- val_obj
svalue(f5_save_edt) <- paste(val_obj, "_ggplot", sep = "")
svalue(f0_samples_lbl) <- paste(" (",
length(unique(.gData$Sample.Name)),
" ", strLblSamples, ")",
sep = ""
)
kitIndex <- detectKit(.gData, index = TRUE)
svalue(kit_drp, index = TRUE) <- kitIndex
.enablePlotButtons()
} else {
.gData <<- NULL
svalue(f5_save_edt) <- ""
svalue(dataset_drp, index = TRUE) <- 1
svalue(f0_samples_lbl) <- paste(" (0 ", strLblSamples, ") ", sep = "")
}
})
f1 <- gframe(
text = strFrmOptions,
horizontal = FALSE,
spacing = 2,
container = gv
)
titles_chk <- gcheckbox(
text = strChkOverride,
checked = FALSE, container = f1
)
addHandlerChanged(titles_chk, handler = function(h, ...) {
.updateGui()
})
titles_group <- ggroup(
container = f1, spacing = 1, horizontal = FALSE,
expand = TRUE, fill = TRUE
)
glabel(text = strLblTitlePlot, container = titles_group, anchor = c(-1, 0))
title_edt <- gedit(expand = TRUE, fill = TRUE, container = titles_group)
glabel(text = strLblTitleX, container = titles_group, anchor = c(-1, 0))
x_title_edt <- gedit(expand = TRUE, fill = TRUE, container = titles_group)
glabel(text = strLblTitleY, container = titles_group, anchor = c(-1, 0))
y_title_edt <- gedit(expand = TRUE, fill = TRUE, container = titles_group)
f1g2 <- glayout(container = f1)
f1g2[1, 1] <- glabel(text = strLblTheme, anchor = c(-1, 0), container = f1g2)
f1g2[1, 2] <- f1_theme_drp <- gcombobox(
items = .theme,
selected = 1,
container = f1g2,
ellipsize = "none"
)
f1_drop_chk <- gcheckbox(
text = strChkSex,
checked = TRUE,
container = f1
)
addHandlerChanged(f1_drop_chk, handler = function(h, ...) {
.enablePlotButtons()
})
f7 <- gframe(
text = strFrmPlot,
horizontal = TRUE,
container = gv
)
plot_height_btn <- gbutton(text = strBtnRatioVsHeight, container = f7)
plot_allele_btn <- gbutton(text = strBtnRatioVsAllele, container = f7)
addHandlerChanged(plot_height_btn, handler = function(h, ...) {
requiredCol <- c(
"Sample.Name", "Marker", "Dye", "Allele", "Height",
"Size", "Data.Point", "P.Marker", "P.Dye", "P.Allele",
"P.Height", "P.Size", "P.Data.Point", "Delta", "Ratio"
)
ok <- checkDataset(
name = val_obj, reqcol = requiredCol,
env = env, parent = w, debug = debug
)
if (ok) {
enabled(plot_height_btn) <- FALSE
.plotPullup(what = "Height")
enabled(plot_height_btn) <- TRUE
}
})
addHandlerChanged(plot_allele_btn, handler = function(h, ...) {
requiredCol <- c(
"Sample.Name", "Marker", "Dye", "Allele", "Height",
"Size", "Data.Point", "P.Marker", "P.Dye", "P.Allele",
"P.Height", "P.Size", "P.Data.Point", "Delta", "Ratio"
)
ok <- checkDataset(
name = val_obj, reqcol = requiredCol,
env = env, parent = w, debug = debug
)
if (ok) {
enabled(plot_allele_btn) <- FALSE
.plotPullup(what = "Allele")
enabled(plot_allele_btn) <- TRUE
}
})
f5 <- gframe(
text = strFrmSave,
horizontal = TRUE,
spacing = 2,
container = gv
)
glabel(text = strLblSave, container = f5)
f5_save_edt <- gedit(container = f5, expand = TRUE, fill = TRUE)
f5_save_btn <- gbutton(text = strBtnSaveObject, container = f5)
f5_ggsave_btn <- gbutton(text = strBtnSaveImage, container = f5)
addHandlerClicked(f5_save_btn, handler = function(h, ...) {
val_name <- svalue(f5_save_edt)
blockHandlers(f5_save_btn)
svalue(f5_save_btn) <- strBtnProcessing
unblockHandlers(f5_save_btn)
enabled(f5_save_btn) <- FALSE
saveObject(
name = val_name, object = .gPlot,
parent = w, env = env, debug = debug
)
blockHandlers(f5_save_btn)
svalue(f5_save_btn) <- strBtnObjectSaved
unblockHandlers(f5_save_btn)
})
addHandlerChanged(f5_ggsave_btn, handler = function(h, ...) {
val_name <- svalue(f5_save_edt)
ggsave_gui(
ggplot = .gPlot, name = val_name,
parent = w, env = env, savegui = savegui, debug = debug
)
})
e2 <- gexpandgroup(
text = strExpPoints,
horizontal = FALSE,
container = f1
)
visible(e2) <- FALSE
grid2 <- glayout(container = e2)
grid2[1, 1] <- glabel(text = strLblShape, container = grid2)
grid2[1, 2] <- e2_shape_spb <- gspinbutton(
from = 0, to = 25,
by = 1, value = 18,
container = grid2
)
grid2[1, 3] <- glabel(text = strLblAlpha, container = grid2)
grid2[1, 4] <- e2_alpha_spb <- gspinbutton(
from = 0, to = 1,
by = 0.01, value = 0.60,
container = grid2
)
grid2[1, 5] <- glabel(text = strLblJitter, container = grid2)
grid2[1, 6] <- e2_jitter_edt <- gedit(text = "0", width = 4, container = grid2)
e3 <- gexpandgroup(
text = strExpAxes,
horizontal = FALSE,
container = f1
)
visible(e3) <- FALSE
grid3 <- glayout(container = e3, spacing = 1)
grid3[1, 1:2] <- glabel(text = strLblLimitY, container = grid3)
grid3[2, 1] <- e3_y_min_edt <- gedit(text = "", width = 5, container = grid3)
grid3[2, 2] <- e3_y_max_edt <- gedit(text = "", width = 5, container = grid3)
grid3[3, 1:2] <- glabel(text = strLblLimitX, container = grid3)
grid3[4, 1] <- e3_x_min_edt <- gedit(text = "", width = 5, container = grid3)
grid3[4, 2] <- e3_x_max_edt <- gedit(text = "", width = 5, container = grid3)
grid3[1, 3] <- glabel(text = " ", container = grid3)
grid3[1, 4] <- glabel(text = strLblScales, container = grid3)
grid3[2:4, 4] <- e3_scales_opt <- gradio(
items = .scales,
selected = 2,
horizontal = FALSE,
container = grid3
)
addHandlerChanged(e3_scales_opt, handler = function(h, ...) {
.enablePlotButtons()
})
e4 <- gexpandgroup(
text = strExpLabels,
horizontal = FALSE,
container = f1
)
visible(e4) <- FALSE
grid4 <- glayout(container = e4)
grid4[1, 1] <- glabel(text = strLblSize, container = grid4)
grid4[1, 2] <- e4_size_edt <- gedit(text = "8", width = 4, container = grid4)
grid4[1, 3] <- glabel(text = strLblAngle, container = grid4)
grid4[1, 4] <- e4_angle_spb <- gspinbutton(
from = 0, to = 360, by = 1,
value = 270,
container = grid4
)
grid4[2, 1] <- glabel(text = strLblJustification, container = grid4)
grid4[2, 2] <- e4_vjust_spb <- gspinbutton(
from = 0, to = 1, by = 0.1,
value = 0.5,
container = grid4
)
grid4[2, 3] <- e4_hjust_spb <- gspinbutton(
from = 0, to = 1, by = 0.1,
value = 0,
container = grid4
)
.plotPullup <- function(what) {
val_titles <- svalue(titles_chk)
val_title <- svalue(title_edt)
val_xtitle <- svalue(x_title_edt)
val_ytitle <- svalue(y_title_edt)
val_shape <- as.numeric(svalue(e2_shape_spb))
val_alpha <- as.numeric(svalue(e2_alpha_spb))
val_jitter <- as.numeric(svalue(e2_jitter_edt))
val_ymin <- as.numeric(svalue(e3_y_min_edt))
val_ymax <- as.numeric(svalue(e3_y_max_edt))
val_xmin <- as.numeric(svalue(e3_x_min_edt))
val_xmax <- as.numeric(svalue(e3_x_max_edt))
val_angle <- as.numeric(svalue(e4_angle_spb))
val_vjust <- as.numeric(svalue(e4_vjust_spb))
val_hjust <- as.numeric(svalue(e4_hjust_spb))
val_size <- as.numeric(svalue(e4_size_edt))
val_scales <- svalue(e3_scales_opt)
val_kit <- svalue(kit_drp)
val_drop <- svalue(f1_drop_chk)
val_theme <- svalue(f1_theme_drp)
if (debug) {
print("val_title")
print(val_title)
print("val_xtitle")
print(val_xtitle)
print("val_ytitle")
print(val_ytitle)
print("val_shape")
print(val_shape)
print("val_alpha")
print(val_alpha)
print("val_jitter")
print(val_jitter)
print("val_ymin")
print(val_ymin)
print("val_ymax")
print(val_ymax)
print("val_angle")
print(val_angle)
print("val_vjust")
print(val_vjust)
print("val_hjust")
print(val_hjust)
print("val_size")
print(val_size)
print("str(.gData)")
print(str(.gData))
print("val_drop")
print(val_drop)
print("val_kit")
print(val_kit)
print("val_theme")
print(val_theme)
}
ymax <- NULL
ymin <- NULL
if (!is.na(.gData) && !is.null(.gData)) {
.gData <- sortMarker(
data = .gData,
kit = val_kit,
add.missing.levels = TRUE
)
dyes <- unique(getKit(kit = val_kit, what = "Color")$Color)
dyes <- addColor(data = dyes, have = "Color", need = "Dye")
.gData$Dye <- factor(.gData$Dye, levels = dyes)
.gData$P.Dye <- factor(.gData$P.Dye, levels = dyes)
if (val_drop) {
sexMarkers <- getKit(kit = val_kit, what = "Sex.Marker")
if (length(sexMarkers) > 0) {
n0 <- nrow(.gData)
for (m in seq(along = sexMarkers)) {
.gData <- .gData[.gData$Marker != sexMarkers[m], ]
}
n1 <- nrow(.gData)
message(paste(n1, " rows after removing ", n0 - n1, " sex marker rows.", sep = ""))
.gData$Marker <- factor(.gData$Marker,
levels = levels(.gData$Marker)[!levels(.gData$Marker) %in% sexMarkers]
)
}
}
if (!is.numeric(.gData$Height)) {
.gData$Height <- as.numeric(as.character(.gData$Height))
message("'Height' not numeric, converting to numeric.")
}
if (!is.numeric(.gData$Ratio)) {
.gData$Ratio <- as.numeric(as.character(.gData$Ratio))
message("'Ratio' not numeric, converting to numeric.")
}
markerDye <- data.frame(Marker = levels(.gData$Marker))
markerDye <- addColor(data = markerDye, kit = val_kit)
markerDye <- markerDye[c("Marker", "Dye")]
uniqueMarkerDye <- markerDye[!duplicated(markerDye), ]
val_ncol <- unique(table(uniqueMarkerDye$Dye))
val_palette <- unique(getKit(kit = val_kit, what = "Color")$Color)
val_palette <- addColor(data = val_palette, have = "Color", need = "R.Color")
if (debug) {
print("Before plot: str(.gData)")
print(str(.gData))
print("Number of columns")
print(val_ncol)
print("val_palette")
print(val_palette)
}
if (val_titles) {
mainTitle <- val_title
xTitle <- val_xtitle
yTitle <- val_ytitle
}
if (!val_titles) {
if (debug) {
print("Using default titles.")
}
if (what == "Height") {
mainTitle <- strLblMainTitle
xTitle <- strLblXTitleHeight
yTitle <- strLblYTitle
} else if (what == "Allele") {
mainTitle <- strLblMainTitle
xTitle <- strLblXTitleAllele
yTitle <- strLblYTitle
} else {
stop(paste("what=", what, " not handled!"))
}
}
if (debug) {
print("Titles:")
print(mainTitle)
print(xTitle)
print(yTitle)
}
dt <- data.table::data.table(.gData)
tmp <- dt[, list(Sum = sum(Ratio)), by = Marker]
if (any(tmp$Sum == 0) || !all(levels(dt$Marker) %in% unique(dt$Marker))) {
message("Empty facets detected! If this leads to plot error try another scale for axes.")
}
if (length(val_ncol) == 1) {
if (debug) {
print("Simple plot.")
}
if (what == "Height") {
gp <- ggplot(.gData, aes_string(x = "Height", y = "Ratio", colour = "P.Dye"))
} else if (what == "Allele") {
gp <- ggplot(.gData, aes_string(x = "Allele", y = "Ratio", colour = "P.Dye"))
}
if (debug) {
print("Plot created.")
}
gp <- gp + eval(parse(text = val_theme))
gp <- gp + geom_point(
shape = val_shape, alpha = val_alpha,
position = position_jitter(height = 0, width = val_jitter)
)
gp <- gp + facet_grid("Dye ~ Marker")
gp <- gp + facet_wrap(as.formula(paste("~", "Marker")),
ncol = val_ncol,
drop = FALSE, scales = val_scales
)
gp <- gp + scale_colour_manual(guide = FALSE, values = val_palette, drop = FALSE)
if (!is.na(val_ymin) && !is.na(val_ymax)) {
val_y <- c(val_ymin, val_ymax)
} else {
val_y <- NULL
}
if (!is.na(val_xmin) && !is.na(val_xmax)) {
val_x <- c(val_xmin, val_xmax)
} else {
val_x <- NULL
}
gp <- gp + coord_cartesian(xlim = val_x, ylim = val_y)
if (debug) {
print(paste(
"Plot zoomed to xlim:", paste(val_x, collapse = ","),
"ylim:", paste(val_y, collapse = ",")
))
}
gp <- gp + guides(fill = guide_legend(reverse = TRUE))
gp <- gp + theme(axis.text.x = element_text(
angle = val_angle,
hjust = val_hjust,
vjust = val_vjust,
size = val_size
))
gp <- gp + labs(title = mainTitle)
gp <- gp + xlab(xTitle)
gp <- gp + ylab(yTitle)
print(gp)
svalue(f5_save_btn) <- strBtnSaveObject
enabled(f5_save_btn) <- TRUE
} else if (length(val_ncol) > 1) {
if (debug) {
print("Complex plot.")
}
if (val_scales %in% c("fixed", "free_x")) {
ymax <- max(.gData$Ratio, na.rm = TRUE) * 1.05
ymin <- min(.gData$Ratio, na.rm = TRUE) * 0.95
}
noDyes <- length(dyes)
noRows <- length(dyes) + 2
g <- gtable::gtable(
widths = grid::unit(c(1.5, 1), c("lines", "null")),
heights = grid::unit(c(1.5, rep(1, noDyes), 1.5), c("line", rep("null", noDyes), "line"))
)
g <- gtable::gtable_add_grob(g, grid::textGrob(mainTitle), t = 1, b = 1, l = 2, r = 2)
g <- gtable::gtable_add_grob(g, grid::textGrob(xTitle), t = noRows, b = noRows, l = 2, r = 2)
g <- gtable::gtable_add_grob(g, grid::textGrob(yTitle, rot = 90), t = 1, b = noRows, l = 1, r = 1)
gLevel <- data.frame(Marker = levels(.gData$Marker))
gLevel <- addColor(data = gLevel, kit = val_kit)
for (d in seq(along = dyes)) {
gDataSub <- .gData[.gData$Dye == dyes[d], ]
gDyeLevel <- as.character(gLevel$Marker[gLevel$Dye == dyes[d]])
if (nrow(gDataSub) == 0) {
tmp <- data.frame(
Sample.Name = "", Marker = gDyeLevel, Dye = dyes[d], Allele = "", Height = 0,
Size = 0, Data.Point = 0, P.Marker = NA, P.Dye = dyes[d], P.Allele = NA,
P.Height = 0, P.Size = 0, P.Data.Point = 0, Delta = 0, Ratio = 0
)
gDataSub <- plyr::rbind.fill(gDataSub, tmp)
}
gDataSub$Marker <- factor(gDataSub$Marker, levels = gDyeLevel)
gDataSub$Dye <- factor(dyes[d])
gDataSub$P.Dye <- factor(gDataSub$P.Dye, levels = dyes)
if (what == "Height") {
gp <- ggplot(gDataSub, aes_string(x = "Height", y = "Ratio", colour = "P.Dye"))
} else if (what == "Allele") {
gp <- ggplot(gDataSub, aes_string(x = "Allele", y = "Ratio", colour = "P.Dye"))
}
gp <- gp + eval(parse(text = val_theme))
gp <- gp + geom_point(
shape = val_shape, alpha = val_alpha,
position = position_jitter(height = 0, width = val_jitter)
)
gp <- gp + scale_colour_manual(guide = FALSE, values = val_palette, drop = FALSE)
gp <- gp + facet_grid("Dye ~ Marker", scales = val_scales, drop = FALSE)
gp <- gp + theme(plot.margin = grid::unit(c(0.25, 1.25, 0, 0), "lines"))
if (!is.na(val_ymin) && !is.na(val_ymax)) {
val_y <- c(val_ymin, val_ymax)
} else {
if (val_scales %in% c("fixed", "free_x")) {
val_y <- c(ymin, ymax)
}
val_y <- NULL
}
if (!is.na(val_xmin) && !is.na(val_xmax)) {
val_x <- c(val_xmin, val_xmax)
} else {
if (val_scales %in% c("fixed", "free_x")) {
val_y <- c(ymin, ymax)
}
val_x <- NULL
}
gp <- gp + coord_cartesian(xlim = val_x, ylim = val_y)
if (debug) {
print(paste(
"Plot zoomed to xlim:", paste(val_x, collapse = ","),
"ylim:", paste(val_y, collapse = ",")
))
}
gp <- gp + labs(title = element_blank())
gp <- gp + theme(axis.title.x = element_blank())
gp <- gp + theme(axis.text.x = element_text(
angle = val_angle,
hjust = val_hjust,
vjust = val_vjust,
size = val_size
))
gp <- gp + theme(axis.title.y = element_blank())
gp <- gp + theme(legend.position = "none")
g <- gtable::gtable_add_grob(g, ggplotGrob(gp), t = (d + 1), b = (d + 1), l = 2, r = 2)
}
grid::grid.newpage()
grid::grid.draw(g)
gp <- gridExtra::arrangeGrob(g)
svalue(f5_save_btn) <- strBtnSaveObject
enabled(f5_save_btn) <- FALSE
} else {
stop(paste("Unsupported number of columns:", val_ncol))
}
.gPlot <<- gp
} else {
gmessage(
msg = strMsgNull,
title = strMsgTitleError,
icon = "error"
)
}
}
.updateGui <- function() {
val <- svalue(titles_chk)
if (val) {
enabled(titles_group) <- TRUE
} else {
enabled(titles_group) <- FALSE
}
}
.enablePlotButtons <- function() {
enabled(plot_allele_btn) <- TRUE
enabled(plot_height_btn) <- TRUE
}
.loadSavedSettings <- function() {
if (!is.null(savegui)) {
svalue(savegui_chk) <- savegui
enabled(savegui_chk) <- FALSE
if (debug) {
print("Save GUI status set!")
}
} else {
if (exists(".strvalidator_plotPullup_gui_savegui", envir = env, inherits = FALSE)) {
svalue(savegui_chk) <- get(".strvalidator_plotPullup_gui_savegui", envir = env)
}
if (debug) {
print("Save GUI status loaded!")
}
}
if (debug) {
print(svalue(savegui_chk))
}
if (svalue(savegui_chk)) {
if (exists(".strvalidator_plotPullup_gui_title", envir = env, inherits = FALSE)) {
svalue(title_edt) <- get(".strvalidator_plotPullup_gui_title", envir = env)
}
if (exists(".strvalidator_plotPullup_gui_title_chk", envir = env, inherits = FALSE)) {
svalue(titles_chk) <- get(".strvalidator_plotPullup_gui_title_chk", envir = env)
}
if (exists(".strvalidator_plotPullup_gui_x_title", envir = env, inherits = FALSE)) {
svalue(x_title_edt) <- get(".strvalidator_plotPullup_gui_x_title", envir = env)
}
if (exists(".strvalidator_plotPullup_gui_y_title", envir = env, inherits = FALSE)) {
svalue(y_title_edt) <- get(".strvalidator_plotPullup_gui_y_title", envir = env)
}
if (exists(".strvalidator_plotPullup_gui_sex", envir = env, inherits = FALSE)) {
svalue(f1_drop_chk) <- get(".strvalidator_plotPullup_gui_sex", envir = env)
}
if (exists(".strvalidator_plotPullup_gui_points_shape", envir = env, inherits = FALSE)) {
svalue(e2_shape_spb) <- get(".strvalidator_plotPullup_gui_points_shape", envir = env)
}
if (exists(".strvalidator_plotPullup_gui_points_alpha", envir = env, inherits = FALSE)) {
svalue(e2_alpha_spb) <- get(".strvalidator_plotPullup_gui_points_alpha", envir = env)
}
if (exists(".strvalidator_plotPullup_gui_points_jitter", envir = env, inherits = FALSE)) {
svalue(e2_jitter_edt) <- get(".strvalidator_plotPullup_gui_points_jitter", envir = env)
}
if (exists(".strvalidator_plotPullup_gui_axes_y_min", envir = env, inherits = FALSE)) {
svalue(e3_y_min_edt) <- get(".strvalidator_plotPullup_gui_axes_y_min", envir = env)
}
if (exists(".strvalidator_plotPullup_gui_axes_y_max", envir = env, inherits = FALSE)) {
svalue(e3_y_max_edt) <- get(".strvalidator_plotPullup_gui_axes_y_max", envir = env)
}
if (exists(".strvalidator_plotPullup_gui_axes_x_min", envir = env, inherits = FALSE)) {
svalue(e3_x_min_edt) <- get(".strvalidator_plotPullup_gui_axes_x_min", envir = env)
}
if (exists(".strvalidator_plotPullup_gui_axes_x_max", envir = env, inherits = FALSE)) {
svalue(e3_x_max_edt) <- get(".strvalidator_plotPullup_gui_axes_x_max", envir = env)
}
if (exists(".strvalidator_plotPullup_gui_axes_scales", envir = env, inherits = FALSE)) {
svalue(e3_scales_opt) <- get(".strvalidator_plotPullup_gui_axes_scales", envir = env)
}
if (exists(".strvalidator_plotPullup_gui_xlabel_size", envir = env, inherits = FALSE)) {
svalue(e4_size_edt) <- get(".strvalidator_plotPullup_gui_xlabel_size", envir = env)
}
if (exists(".strvalidator_plotPullup_gui_xlabel_angle", envir = env, inherits = FALSE)) {
svalue(e4_angle_spb) <- get(".strvalidator_plotPullup_gui_xlabel_angle", envir = env)
}
if (exists(".strvalidator_plotPullup_gui_xlabel_justh", envir = env, inherits = FALSE)) {
svalue(e4_hjust_spb) <- get(".strvalidator_plotPullup_gui_xlabel_justh", envir = env)
}
if (exists(".strvalidator_plotPullup_gui_xlabel_justv", envir = env, inherits = FALSE)) {
svalue(e4_vjust_spb) <- get(".strvalidator_plotPullup_gui_xlabel_justv", envir = env)
}
if (exists(".strvalidator_plotPullup_gui_theme", envir = env, inherits = FALSE)) {
svalue(f1_theme_drp) <- get(".strvalidator_plotPullup_gui_theme", envir = env)
}
if (debug) {
print("Saved settings loaded!")
}
}
}
.saveSettings <- function() {
if (svalue(savegui_chk)) {
assign(x = ".strvalidator_plotPullup_gui_savegui", value = svalue(savegui_chk), envir = env)
assign(x = ".strvalidator_plotPullup_gui_sex", value = svalue(f1_drop_chk), envir = env)
assign(x = ".strvalidator_plotPullup_gui_title", value = svalue(title_edt), envir = env)
assign(x = ".strvalidator_plotPullup_gui_title_chk", value = svalue(titles_chk), envir = env)
assign(x = ".strvalidator_plotPullup_gui_x_title", value = svalue(x_title_edt), envir = env)
assign(x = ".strvalidator_plotPullup_gui_y_title", value = svalue(y_title_edt), envir = env)
assign(x = ".strvalidator_plotPullup_gui_points_shape", value = svalue(e2_shape_spb), envir = env)
assign(x = ".strvalidator_plotPullup_gui_points_alpha", value = svalue(e2_alpha_spb), envir = env)
assign(x = ".strvalidator_plotPullup_gui_points_jitter", value = svalue(e2_jitter_edt), envir = env)
assign(x = ".strvalidator_plotPullup_gui_axes_y_min", value = svalue(e3_y_min_edt), envir = env)
assign(x = ".strvalidator_plotPullup_gui_axes_y_max", value = svalue(e3_y_max_edt), envir = env)
assign(x = ".strvalidator_plotPullup_gui_axes_x_min", value = svalue(e3_x_min_edt), envir = env)
assign(x = ".strvalidator_plotPullup_gui_axes_x_max", value = svalue(e3_x_max_edt), envir = env)
assign(x = ".strvalidator_plotPullup_gui_axes_scales", value = svalue(e3_scales_opt), envir = env)
assign(x = ".strvalidator_plotPullup_gui_xlabel_size", value = svalue(e4_size_edt), envir = env)
assign(x = ".strvalidator_plotPullup_gui_xlabel_angle", value = svalue(e4_angle_spb), envir = env)
assign(x = ".strvalidator_plotPullup_gui_xlabel_justh", value = svalue(e4_hjust_spb), envir = env)
assign(x = ".strvalidator_plotPullup_gui_xlabel_justv", value = svalue(e4_vjust_spb), envir = env)
assign(x = ".strvalidator_plotPullup_gui_theme", value = svalue(f1_theme_drp), envir = env)
} else {
if (exists(".strvalidator_plotPullup_gui_savegui", envir = env, inherits = FALSE)) {
remove(".strvalidator_plotPullup_gui_savegui", envir = env)
}
if (exists(".strvalidator_plotPullup_gui_title", envir = env, inherits = FALSE)) {
remove(".strvalidator_plotPullup_gui_title", envir = env)
}
if (exists(".strvalidator_plotPullup_gui_title_chk", envir = env, inherits = FALSE)) {
remove(".strvalidator_plotPullup_gui_title_chk", envir = env)
}
if (exists(".strvalidator_plotPullup_gui_x_title", envir = env, inherits = FALSE)) {
remove(".strvalidator_plotPullup_gui_x_title", envir = env)
}
if (exists(".strvalidator_plotPullup_gui_y_title", envir = env, inherits = FALSE)) {
remove(".strvalidator_plotPullup_gui_y_title", envir = env)
}
if (exists(".strvalidator_plotPullup_gui_sex", envir = env, inherits = FALSE)) {
remove(".strvalidator_plotPullup_gui_sex", envir = env)
}
if (exists(".strvalidator_plotPullup_gui_points_shape", envir = env, inherits = FALSE)) {
remove(".strvalidator_plotPullup_gui_points_shape", envir = env)
}
if (exists(".strvalidator_plotPullup_gui_points_alpha", envir = env, inherits = FALSE)) {
remove(".strvalidator_plotPullup_gui_points_alpha", envir = env)
}
if (exists(".strvalidator_plotPullup_gui_points_jitter", envir = env, inherits = FALSE)) {
remove(".strvalidator_plotPullup_gui_points_jitter", envir = env)
}
if (exists(".strvalidator_plotPullup_gui_axes_y_min", envir = env, inherits = FALSE)) {
remove(".strvalidator_plotPullup_gui_axes_y_min", envir = env)
}
if (exists(".strvalidator_plotPullup_gui_axes_y_max", envir = env, inherits = FALSE)) {
remove(".strvalidator_plotPullup_gui_axes_y_max", envir = env)
}
if (exists(".strvalidator_plotPullup_gui_axes_x_min", envir = env, inherits = FALSE)) {
remove(".strvalidator_plotPullup_gui_axes_x_min", envir = env)
}
if (exists(".strvalidator_plotPullup_gui_axes_x_max", envir = env, inherits = FALSE)) {
remove(".strvalidator_plotPullup_gui_axes_x_max", envir = env)
}
if (exists(".strvalidator_plotPullup_gui_axes_scales", envir = env, inherits = FALSE)) {
remove(".strvalidator_plotPullup_gui_axes_scales", envir = env)
}
if (exists(".strvalidator_plotPullup_gui_xlabel_size", envir = env, inherits = FALSE)) {
remove(".strvalidator_plotPullup_gui_xlabel_size", envir = env)
}
if (exists(".strvalidator_plotPullup_gui_xlabel_angle", envir = env, inherits = FALSE)) {
remove(".strvalidator_plotPullup_gui_xlabel_angle", envir = env)
}
if (exists(".strvalidator_plotPullup_gui_xlabel_justh", envir = env, inherits = FALSE)) {
remove(".strvalidator_plotPullup_gui_xlabel_justh", envir = env)
}
if (exists(".strvalidator_plotPullup_gui_xlabel_justv", envir = env, inherits = FALSE)) {
remove(".strvalidator_plotPullup_gui_xlabel_justv", envir = env)
}
if (exists(".strvalidator_plotPullup_gui_theme", envir = env, inherits = FALSE)) {
remove(".strvalidator_plotPullup_gui_theme", envir = env)
}
if (debug) {
print("Settings cleared!")
}
}
if (debug) {
print("Settings saved!")
}
}
.loadSavedSettings()
.updateGui()
visible(w) <- TRUE
focus(w)
} |
iirlp2mb <- function(b, ...) UseMethod("iirlp2mb")
iirlp2mb.Arma <- function(b, Wo, Wt, type, ...) {
iirlp2mb(b$b, b$a, Wo, Wt, type, ...)
}
iirlp2mb.Zpg <- function(b, Wo, Wt, type, ...) {
ba <- as.Arma(b)
iirlp2mb(ba$b, ba$a, Wo, Wt, type, ...)
}
iirlp2mb.Sos <- function(b, Wo, Wt, type, ...) {
ba <- as.Arma(b)
iirlp2mb(ba$b, ba$a, Wo, Wt, type, ...)
}
iirlp2mb.default <- function(b, a, Wo, Wt, type = c("pass", "stop"), ...) {
type <- match.arg(type)
if (type == "pass") {
pass_stop <- -1
} else if (type == "stop") {
pass_stop <- 1
}
if (!isPosscal(Wo) || Wo > 1) {
stop(paste("Frequency value Wo of prototype filter",
"must be a scalar between 0 and 1"))
}
if (any(Wt < 0) || any(Wt > 1)) {
stop("Frequency values Wt of target filter must be between 0 and 1")
}
Wt <- unique(sort(Wt))
K <- apd(pi * Wo)
phi <- pi * Wt
P <- apd(phi)
PP <- rev(P)
AllpassDen <- P - (K[2] * PP)
AllpassDen <- AllpassDen / AllpassDen[1]
AllpassNum <- pass_stop * rev(AllpassDen)
ba <- transform(b, a, AllpassNum, AllpassDen, pass_stop)
ba
}
apd <- function(phi) {
Pkm1 <- 1
for (k in seq_along(phi)) {
P <- pk(Pkm1, k, phi[k])
Pkm1 <- P
}
P
}
pk <- function(Pkm1, k, phik) {
Pk <- rep(0L, k + 1)
sin_k <- sin(phik / 2)
cos_k <- cos(phik / 2)
for (i in 1:k) {
Pk[i] <- Pk[i] + sin_k * Pkm1[i] - ((-1)^k * cos_k * Pkm1[k + 1 - i])
Pk[i + 1] <- Pk[i + 1] +
sin_k * Pkm1[i] +
((-1)^k * cos_k * Pkm1[k + 1 - i])
}
Pk <- Pk / Pk[1]
Pk
}
ppower <- function(Ppower, i, powcols) {
if (i == 0) {
p <- 1
} else {
p <- NULL
for (j in 1:powcols) {
if (is.na(Ppower[i, j])) break
p <- cbind(p, Ppower[i, j])
}
}
p
}
polysum <- function(p1, p2) {
n1 <- length(p1)
n2 <- length(p2)
if (n1 > n2) {
p2 <- c(p2, rep(0L, n1 - n2))
} else if (n2 > n1) {
p1 <- c(p1, rep(0L, n2 - n1))
}
poly <- p1 + p2
poly
}
transform <- function(B, A, PP, P, pass_stop) {
na <- length(A)
nb <- length(B)
n <- max(na, nb)
np <- length(P)
powcols <- np + (np - 1) * (n - 2)
Ppower <- matrix(NA, nrow = n - 1, ncol = powcols)
Ptemp <- P
for (i in 1:(n - 1)) {
for (j in seq_along(Ptemp)) {
Ppower[i, j] <- Ptemp[j]
}
Ptemp <- conv(Ptemp, P)
}
Num <- Den <- NULL
for (i in 1:n) {
if ((n - i) == 0) {
p_pownmi <- 1
} else {
p_pownmi <- ppower(Ppower, n - i, powcols)
}
if (i == 1) {
pp_powim1 <- 1
} else {
pp_powim1 <- rev(ppower(Ppower, i - 1, powcols))
}
if (i <= nb) {
Bterm <- (pass_stop ^ (i - 1)) * B[i] * conv(pp_powim1, p_pownmi)
Num <- polysum(Num, Bterm)
}
if (i <= na) {
Aterm <- (pass_stop ^ (i - 1)) * A[i] * conv(pp_powim1, p_pownmi)
Den <- polysum(Den, Aterm)
}
}
Den <- Den / Den[1]
Num <- Num / Den[1]
Arma(Num, Den)
} |
isStrictlyNegativeNumberOrNaOrNanOrInfScalarOrNull <- function(argument, default = NULL, stopIfNot = FALSE, message = NULL, argumentName = NULL) {
checkarg(argument, "N", default = default, stopIfNot = stopIfNot, nullAllowed = TRUE, n = 1, zeroAllowed = FALSE, negativeAllowed = TRUE, positiveAllowed = FALSE, nonIntegerAllowed = TRUE, naAllowed = TRUE, nanAllowed = TRUE, infAllowed = TRUE, message = message, argumentName = argumentName)
} |
pat_dygraph <- function(
pat = NULL,
parameter = "pm25",
sampleSize = 5000,
title = NULL,
xlab = NULL,
ylab = NULL,
tlim = NULL,
rollPeriod = 1,
showLegend = TRUE,
colors = NULL,
timezone = NULL
) {
MazamaCoreUtils::stopIfNull(pat)
if ( !pat_isPat(pat) )
stop("Parameter 'pat' is not a valid 'pa_timeseries' object.")
if ( pat_isEmpty(pat) )
stop("Parameter 'pat' has no data.")
pat <- pat_distinct(pat)
if ( is.null(timezone) )
timezone <- pat$meta$timezone
if ( !is.null(sampleSize) ) {
if ( sampleSize > 1 ) {
pat <-
pat %>%
pat_sample(sampleSize = sampleSize)
} else {
pat <-
pat %>%
pat_sample(sampleFraction = sampleSize)
}
}
if ( !is.null(tlim) ) {
dateWindow <- MazamaCoreUtils::parseDatetime(tlim, timezone = timezone)
} else {
dateWindow <- NULL
}
tzCount <- length(unique(pat$meta$timezone))
if (tzCount > 1) {
warning(paste0(tzCount, " timezones found. Using UTC time."))
tzone <- "UTC"
} else {
tzone <- unique(pat$meta$timezone)
}
datetime <- pat$data$datetime
pm25_A <- pat$data$pm25_A
pm25_B <- pat$data$pm25_B
temperature <- pat$data$temperature
humidity <- pat$data$humidity
pressure <- pat$data$pressure
label <- pat$meta$label
if ( is.null(parameter) || tolower(parameter) == "pm25" ) {
channelA <- xts::xts(x = pm25_A, order.by = datetime, tzone = tzone)
channelB <- xts::xts(x = pm25_B, order.by = datetime, tzone = tzone)
timeseriesMatrix <- cbind(channelA, channelB)
names(timeseriesMatrix) <- c("Channel A", "Channel B")
if ( is.null(ylab) )( ylab <- "\u03bcg / m\u00b3" )
if ( is.null(colors) )( colors <- c("red", "blue") )
} else if ( tolower(parameter) == "humidity" ) {
humidityData <- xts::xts(x = humidity, order.by = datetime, tzone = tzone)
timeseriesMatrix <- cbind(humidityData)
names(timeseriesMatrix) <- c(paste0(label, "-Humidity"))
if ( is.null(ylab) )( ylab <- "RH%")
} else if ( tolower(parameter) == "temperature" || tolower(parameter) == "temp" ) {
temperatureData <- xts::xts(x = temperature, order.by = datetime, tzone = tzone)
timeseriesMatrix <- cbind(temperatureData)
names(timeseriesMatrix) <- c(paste0(label, "-Temperature"))
if ( is.null(ylab) )( ylab <- "\u00b0F" )
} else if ( tolower(parameter) == "pressure" || tolower(parameter) == "hpa" ) {
pressureData <- xts::xts(x = pressure, order.by = datetime, tzone = tzone)
timeseriesMatrix <- cbind(pressureData)
names(timeseriesMatrix) <- c(paste0(label, "-Pressure"))
if ( is.null(ylab) )( ylab <- "hPa" )
} else {
stop("Required parameter 'parameter' is not recognized")
}
if ( is.null(title) ) title <- label
show <- ifelse(showLegend, "always", "never")
graph <-
dygraphs::dygraph(timeseriesMatrix, main = title, xlab = xlab, ylab = ylab) %>%
dygraphs::dyOptions(useDataTimezone = TRUE) %>%
dygraphs::dyLegend(show = show, width = 250, labelsSeparateLines = TRUE) %>%
dygraphs::dyRangeSelector(dateWindow = dateWindow) %>%
dygraphs::dyRoller(rollPeriod = rollPeriod) %>%
dygraphs::dyOptions(colors = colors)
return( graph )
} |
teams <- read.csv("lahman/teams.csv")
tail(teams)
myteams <- subset(teams, yearID > 2000)[ , c("teamID", "yearID",
"lgID", "G", "W", "L", "R", "RA")]
tail(myteams)
myteams$RD <- with(myteams, R - RA)
myteams$Wpct <- with(myteams, W / (W + L))
plot(myteams$RD, myteams$Wpct,
xlab="run differential",
ylab="winning percentage")
linfit <- lm(Wpct ~ RD, data=myteams)
abline(a=coef(linfit)[1], b=coef(linfit)[2], lwd=2)
myteams$linWpct <- predict(linfit)
myteams$linResiduals <- residuals(linfit)
plot(myteams$RD, myteams$linResiduals,
xlab="run differential",
ylab="residual")
abline(h=0, lty=3)
points(c(68, 88), c(.0749, -.0733), pch=19)
text(68, .0749, "LAA '08", pos=4, cex=.8)
text(88, -.0733, "CLE '06", pos=4, cex=.8)
mean(myteams$linResiduals)
linRMSE <- sqrt(mean(myteams$linResiduals ^ 2))
linRMSE
nrow(subset(myteams, abs(linResiduals) < linRMSE)) /
nrow(myteams)
nrow(subset(myteams, abs(linResiduals) < 2 * linRMSE)) /
nrow(myteams)
myteams$pytWpct <- with(myteams, R ^ 2 / (R ^ 2 + RA ^ 2))
myteams$pytResiduals <- myteams$Wpct - myteams$pytWpct
sqrt(mean(myteams$pytResiduals ^ 2))
myteams$logWratio <- log(myteams$W / myteams$L)
myteams$logRratio <- log(myteams$R / myteams$RA)
pytFit <- lm(logWratio ~ 0 + logRratio, data=myteams)
pytFit
gl2011 <- read.table("gl2011.txt", sep=",")
glheaders <- read.csv("game_log_header.csv")
names(gl2011) <- names(glheaders)
BOS2011 <- subset(gl2011, HomeTeam=="BOS" | VisitingTeam=="BOS")[
, c("VisitingTeam", "HomeTeam", "VisitorRunsScored",
"HomeRunsScore")]
head(BOS2011)
BOS2011$ScoreDiff <- with(BOS2011, ifelse(HomeTeam == "BOS",
HomeRunsScore - VisitorRunsScored,
VisitorRunsScored - HomeRunsScore))
BOS2011$W <- BOS2011$ScoreDiff > 0
aggregate(abs(BOS2011$ScoreDiff), list(W=BOS2011$W), summary)
results <- gl2011[,c("VisitingTeam", "HomeTeam",
"VisitorRunsScored", "HomeRunsScore")]
results$winner <- ifelse(results$HomeRunsScore >
results$VisitorRunsScored, as.character(results$HomeTeam),
as.character(results$VisitingTeam))
results$diff <- abs(results$VisitorRunsScored -
results$HomeRunsScore)
onerungames <- subset(results, diff == 1)
onerunwins <- as.data.frame(table(onerungames$winner))
names(onerunwins) <- c("teamID", "onerunW")
teams2011 <- subset(myteams, yearID == 2011)
teams2011[teams2011$teamID == "LAA", "teamID"] <- "ANA"
teams2011 <- merge(teams2011, onerunwins)
plot(teams2011$onerunW, teams2011$pytResiduals,
xlab="one run wins",
ylab="Pythagorean residuals")
identify(teams2011$onerunW, teams2011$pytResiduals,
labels=teams2011$teamID)
pit <- read.csv("lahman/pitching.csv")
top_closers <- subset(pit, GF > 50 & ERA < 2.5)[ ,c("playerID",
"yearID", "teamID")]
teams_top_closers <- merge(myteams, top_closers)
summary(teams_top_closers$pytResiduals)
D(expression(G * R ^ 2 / (R ^ 2 + RA ^ 2)), "R")
IR <- function(RS=5, RA=5){
round((RS ^ 2 + RA ^ 2)^2 / (2 * RS * RA ^ 2), 1)
}
IRtable <- expand.grid(RS=seq(3, 6, .5), RA=seq(3, 6, .5))
rbind(head(IRtable), tail(IRtable))
IRtable$IRW <- IR(IRtable$RS, IRtable$RA)
xtabs(IRW ~ RS + RA, data=IRtable) |
TLS <- function(level){
x <- NULL
if(level==1){
x1 <- github.cssegisanddata.covid19(country = "Timor-Leste")
x2 <- ourworldindata.org(id = "TLS")
x <- full_join(x1, x2, by = "date")
}
return(x)
} |
IsoTestBH <- function (rp, FDR, type = c("BH", "BY"),
stat = c("E2", "Williams", "Marcus", "M", "ModifM")){
type <- match.arg(type)
stat <- match.arg(stat)
Probe.ID <- rp[,1]
rpraw <- switch(stat,
E2 = rp[,2],
Williams = rp[,3],
Marcus = rp[,4],
M = rp[,5],
ModifM = rp[,6])
adjp <- cbind(rpraw, p.adjust(rpraw, "BH"),p.adjust(rpraw,"BY"))
place.keep33 <- if (type == "BH"){
which(adjp[,2] <= FDR)
} else {
which(adjp[,3] <= FDR)
}
sign.Probe.ID <- Probe.ID[place.keep33]
if (type == "BH") {
sign.genes <- data.frame(sign.Probe.ID,
place.keep33,
adjp[adjp[,2] <= FDR,1],
adjp[adjp[,2] <= FDR,2])
} else {
sign.genes <- data.frame(sign.Probe.ID,
place.keep33,
adjp[adjp[,3] <= FDR,1],
adjp[adjp[,3] <= FDR,3])
}
names(sign.genes) <- c("Probe.ID", "row.name", "raw p-values",
paste(type, "adjusted p values", sep = " "))
return(sign.genes)
} |
knitr::opts_chunk$set(comment = NA)
library(trelliscopejs)
library(ggplot2)
library(gapminder)
str(gapminder)
qplot(year, lifeExp, data = subset(gapminder, continent == "Europe")) +
facet_wrap(~ country + continent) +
theme_bw()
qplot(year, lifeExp, data = gapminder) +
xlim(1948, 2011) + ylim(10, 95) + theme_bw() +
facet_wrap(~ country + continent)
qplot(class, cty, data = mpg, geom = c("boxplot", "jitter")) +
ylim(7, 37) + theme_bw()
library(dplyr)
library(tidyr)
library(purrr)
library(gapminder)
by_country <- nest(gapminder, data = !one_of(c("country", "continent")))
by_country
country_model <- function(df)
lm(lifeExp ~ year, data = df)
by_country <- by_country %>%
mutate(model = map(data, country_model))
by_country
library(plotly)
library(trelliscopejs)
country_plot <- function(data, model) {
plot_ly(data = data, x = ~year, y = ~lifeExp,
type = "scatter", mode = "markers", name = "data") %>%
add_trace(data = data, x = ~year, y = ~predict(model),
mode = "lines", name = "lm") %>%
layout(
xaxis = list(range = c(1948, 2011)),
yaxis = list(range = c(10, 95)),
showlegend = FALSE)
}
by_country <- by_country %>%
mutate(data_plot = map2_plot(data, model, country_plot))
by_country
by_country <- by_country %>%
mutate(resid_mad = cog(
map_dbl(model, ~ mad(resid(.x))),
desc = "median absolute deviation of residuals")) |
pkg_ref_cache.bug_reports_host <- function(x, ...) {
UseMethod("pkg_ref_cache.bug_reports_host")
}
pkg_ref_cache.bug_reports_host.default <- function(x, ...) {
if (is.null(x$bug_reports_url)) return(NULL)
sapply(strsplit(domain(x$bug_reports_url), "\\."), function(dm) dm[length(dm)-1])
} |
setOldClass(c("behavr", "data.table"))
NULL
behavr <- function(x, metadata){
check_conform(x, metadata)
out <- data.table::copy(x)
setbehavr(out, metadata)
return(out)
}
setbehavr <- function(x, metadata){
check_conform(x, metadata)
m <- data.table::copy(metadata)
data.table::setattr(x,"metadata",m)
data.table::setattr(x,"class",c("behavr","data.table","data.frame"))
}
"[.behavr" <- function(x, ..., meta=FALSE,verbose=FALSE){
m <- data.table::copy(meta(x))
old_key <- data.table::key(m)
if(!identical(old_key, data.table::key(m)))
stop("Something is wrong with this table.
Keys in metadata and data are different!")
if(meta==TRUE){
out <- m[...]
inline <- ifelse(data.table::address(out) == data.table::address(m), TRUE, FALSE)
if(inline){
if(!identical(old_key, data.table::key(out)))
stop("You are trying to modify metadata in a way that removes its key. This is not allowed!")
data.table::setattr(x,"metadata",m)
}
return(out)
}
out <- NextMethod()
if(!data.table::is.data.table(out))
return(out)
inline <- ifelse(data.table::address(out) == data.table::address(x), TRUE, FALSE)
if(!identical(data.table::key(out), old_key)){
data.table::setattr(out,"metadata",NULL)
data.table::setattr(out,"class",c("data.table","data.frame"))
}
else{
md <- meta(x)
if(!inline){
unique_ids <- unique(utils::getS3method("[","data.table")(out, j=data.table::key(out), with=FALSE))
mismatches <- md[!unique_ids]
if(nrow(mismatches) > 0){
if(verbose ==TRUE){
message(sprintf("Implicitly removing %i individuals from metadata (as they are absent from it)", nrow(mismatches)))
}
md <- md[unique_ids]
}
}
data.table::setattr(out,"metadata",md)
data.table::setattr(out,"class",c("behavr","data.table","data.frame"))
}
if(inline)
invisible(out)
return(out)
}
is.behavr <- function(x){
data.table::is.data.table(x) & "behavr" %in% class(x)
}
print.behavr <- function(x,...){
cat("\n ==== METADATA ====\n\n")
print(x[meta=TRUE],class=TRUE,...)
cat("\n ====== DATA ======\n\n")
NextMethod(x, class=TRUE,...)
}
summary.behavr <- function(object, detailed = F, ...){
. = .SD = .N = NULL
met <- object[meta=TRUE]
n_key <- length(data.table::key(met))
n_mvar <- ncol(met) - n_key
n_var <- ncol(object) - n_key
n_reads <- nrow(object)
if(!detailed){
cat("behavr table with:\n")
cat(sprintf(" %i\tindividuals\n", nrow(met)))
cat(sprintf(" %i\tmetavariables\n", n_mvar))
cat(sprintf(" %i\tvariables\n", n_var))
cat(sprintf(" %s\tmeasurements\n", format( as.double(n_reads), scientific=TRUE)))
cat(sprintf(" %i\tkey (%s)\n", n_key, paste(data.table::key(met),collapse=", ")))
}
else{
cat("\n Summary of each individual (one per row):\n")
if(!"t" %in% colnames(object))
sum_dt <- object[,
.(data_points =.N),
by = c(data.table::key(object))]
else
sum_dt <- object[,
.(data_points =.N,
time_range = sprintf("[%s -> %s (%s)]",min(t), max(t), max(t) -min(t))),
by = c(data.table::key(object))]
print(rejoin(sum_dt))
}
} |
"fa.rgraph" <-
function(fa.results,out.file=NULL,labels=NULL,cut=.3,simple=TRUE,
size=c(8,6), node.font=c("Helvetica", 14),
edge.font=c("Helvetica", 10), rank.direction=c("RL","TB","LR","BT"), digits=1,main="Factor Analysis",graphviz=TRUE, ...){
if (!requireNamespace('Rgraphviz')) {stop("I am sorry, you need to have loaded the Rgraphviz package")
nodes <- function() {}
addEdge <- function() {}
subGraph <- function(){} }
Phi <- NULL
if((!is.matrix(fa.results)) && (!is.data.frame(fa.results))) {factors <- as.matrix(fa.results$loadings)
if(!is.null(fa.results$Phi)) Phi <- fa.results$Phi} else {factors <- fa.results}
rank.direction <- match.arg(rank.direction)
num.var <- dim(factors)[1]
if (is.null(num.var) ){num.var <- length(factors)
num.factors <- 1} else {
num.factors <- dim(factors)[2]}
if (simple) {k=1} else {k <- num.factors}
vars <- paste("V",1:num.var,sep="")
fact <- paste("F",1:num.factors,sep="")
clust.graph <- new("graphNEL",nodes=c(vars,fact),edgemode="directed")
graph.shape <- c(rep("box",num.var),rep("ellipse",num.factors))
graph.rank <- c(rep("sink",num.var),rep("min",num.factors))
names(graph.shape) <- nodes(clust.graph)
names(graph.rank) <- nodes(clust.graph)
edge.label <- rep("",num.var*k)
edge.name <- rep("",num.var*k)
names(edge.label) <- seq(1:num.var*k)
edge.dir <- rep("forward",num.var*k)
l <- factors
if (num.factors ==1) {
for (i in 1:num.var) { clust.graph <- addEdge(fact[1], vars[i], clust.graph,1)
edge.label[i] <- round(factors[i],digits)
edge.name[i] <- paste(fact[1],"~",vars[i],sep="")
}
} else {
if(simple){
m1 <- matrix(apply(t(apply(l, 1, abs)), 1, which.max),
ncol = 1)
for (i in 1:num.var) {clust.graph <- addEdge(fact[m1[i]], vars[i], clust.graph,1)
edge.label[i] <- round(factors[i,m1[i]],digits)
edge.name[i] <- paste(fact[m1[i]],"~",vars[i],sep="")
}
} else {
k <- 1
for (i in 1:num.var) {
for (f in 1:num.factors) { if (abs(factors[i,f]) > cut) {clust.graph <- addEdge(fact[f], vars[i], clust.graph,1)
edge.label[k] <- round(factors[i,f],digits)
edge.name[k] <- paste(fact[f],"~",vars[i],sep="")
k <- k+1 }
}
}
}
}
if(!is.null(Phi)) {
k <- num.var +1
for (f in 2:num.factors) {
for (f1 in 1:(f-1)) { if(Phi[f,f1] > cut) {
clust.graph <- addEdge(fact[f1], fact[f], clust.graph,1)
edge.label[k] <- round(Phi[f,f1],digits)
edge.name[k] <- paste(fact[f1],"~",fact[f],sep="")
edge.dir[k] <- paste("both")
k <- k+1}
}
}
}
nAttrs <- list()
eAttrs <- list()
if (!is.null(labels)) {var.labels <- c(labels,fact)
names(var.labels) <- nodes(clust.graph)
nAttrs$label <- var.labels
names(edge.label) <- edge.name
}
names(edge.label) <- edge.name
names(edge.dir) <- edge.name
nAttrs$shape <- graph.shape
nAttrs$rank <- graph.rank
eAttrs$label <- edge.label
eAttrs$dir <- edge.dir
attrs <- list(node = list(shape = "ellipse", fixedsize = FALSE),graph=list(rankdir=rank.direction, fontsize=edge.font[2],bgcolor="white" ))
obs.var <- subGraph(vars,clust.graph)
cluster.vars <- subGraph(fact,clust.graph)
observed <- list(list(graph=obs.var,cluster=TRUE,attrs=c(rank="sink")),list(graph=cluster.vars,cluster=FALSE ,attrs=c(rank = "source")))
observed <- list(list(graph=obs.var,cluster=TRUE,attrs=c(rank="sink")))
if(!is.null(out.file) ){toDotty(clust.graph,out.file,nodeAttrs = nAttrs, edgeAttrs = eAttrs, attrs = attrs) }
plot(clust.graph, nodeAttrs = nAttrs, edgeAttrs = eAttrs, attrs = attrs,subGList=observed,main=main)
return(clust.graph)
}
|
library(glmnet)
gregElasticNett <- function(data, xpopd, indices, alpha, lambda){
d <- data[indices,]
y <- d[,1]
pis <- d[,2]
p <- dim(d)[2] - 2
xsample_d <- d[, 3:(p + 2)]
pred.mod <- glmnet(x = as.matrix(xsample_d[,-1]), y = y, alpha = alpha, family = "gaussian", standardize = FALSE, weights = pis^{-1})
beta_hat <- predict(pred.mod, s = lambda, type = "coefficients")[1:dim(xsample_d)[2],]
return(beta_hat %*% (xpopd) + t(y - xsample_d %*% beta_hat) %*% pis^(-1))
} |
plot_spdsplits <- function(act_data, ...) UseMethod('plot_spdsplits')
plot_spdsplits.list <- function(act_data, stoken, acts = 1, id = NULL, units = 'metric', fill = 'darkblue', ...){
act_data <- compile_activities(act_data, acts = acts, id = id, units = units)
plot_spdsplits.default(act_data, stoken, size = size, units = units, fill = fill, ...)
}
plot_spdsplits.default <- function(act_data, stoken, units = 'metric', fill = 'darkblue', ...){
act <- get_activity(act_data$id[1], stoken)
sptyp <- paste0('splits_', units)
sptyp <- gsub('imperial$', 'standard', sptyp)
splt <- lapply(act[[sptyp]], function(x) x[['average_speed']]) %>%
do.call('rbind', .) %>%
data.frame(spd = ., split = 1:length(.))
splt$spd <- 3.6 * splt$spd
ave <- 3.6 * act$average_speed
ylab <- 'Average Speed (km/hr)'
xlab <- 'Split (km)'
if(units == 'imperial'){
splt$spd <- splt$spd * 0.621371
ave <- 0.621371 * ave
ylab <- gsub('km', 'mi', ylab)
xlab <- gsub('km', 'mi', xlab)
}
p <- ggplot2::ggplot(splt, ggplot2::aes(x = factor(split), y = spd)) +
ggplot2::geom_bar(stat = 'identity', fill = fill) +
ggplot2::theme_bw() +
ggplot2::scale_x_discrete(xlab) +
ggplot2::scale_y_continuous(ylab) +
ggplot2::geom_hline(ggplot2::aes(yintercept = ave), linetype = 'dashed')
return(p)
} |
ped = function(id, fid, mid, sex, famid = "", reorder = TRUE, validate = TRUE, isConnected = FALSE, verbose = FALSE) {
n = length(id)
if(n == 0)
stop2("`id` vector has length 0")
if(length(fid) != n)
stop2(sprintf("Incompatible input: length(id) = %d, but length(fid) = %d", n, length(fid)))
if(length(mid) != n)
stop2(sprintf("Incompatible input: length(id) = %d, but length(mid) = %d", n, length(mid)))
if(length(sex) != n)
stop2(sprintf("Incompatible input: length(id) = %d, but length(sex) = %d", n, length(sex)))
id = as.character(id)
fid = as.character(fid)
mid = as.character(mid)
sex = as.integer(sex)
famid = as.character(famid)
if(anyDuplicated.default(id) > 0)
stop2("Duplicated entry in `id` vector: ", id[duplicated(id)])
missing = c("", "0", NA)
FIDX = match(fid, id)
FIDX[fid %in% missing] = 0L
MIDX = match(mid, id)
MIDX[mid %in% missing] = 0L
if(any(is.na(FIDX)))
stop2("`fid` entry does not appear in `id` vector: ", fid[is.na(FIDX)])
if(any(is.na(MIDX)))
stop2("`mid` entry does not appear in `id` vector: ", mid[is.na(MIDX)])
if(all(FIDX + MIDX > 0))
stop2("Pedigree has no founders")
if(length(famid) != 1)
stop2("`famid` must be a character string: ", famid)
if(!isConnected) {
comps = connectedComponents(id, fidx = FIDX, midx = MIDX)
if(length(comps) > 1) {
famids = paste0(famid, "_comp", seq_along(comps))
pedlist = lapply(seq_along(comps), function(i) {
idx = match(comps[[i]], id)
ped(id = id[idx], fid = fid[idx], mid = mid[idx],
sex = sex[idx], famid = famids[i], reorder = reorder,
validate = validate, isConnected = TRUE, verbose = verbose)
})
return(structure(pedlist, names = famids, class = c("pedList", "list")))
}
}
x = newPed(id, FIDX, MIDX, sex, famid)
if(validate)
validatePed(x)
if(reorder)
x = parentsBeforeChildren(x)
x
}
singleton = function(id = 1, sex = 1, famid = "") {
if (length(id) != 1)
stop2("`id` must have length 1")
sex = validate_sex(sex, nInd = 1)
ped(id = id, fid = 0, mid = 0, sex = sex, famid = famid)
}
newPed = function(ID, FIDX, MIDX, SEX, FAMID) {
if(!all(is.character(ID), is.integer(FIDX), is.integer(MIDX),
is.integer(SEX), is.character(FAMID)))
stop2("Type error in the creation of `ped` object")
x = list(ID = ID,
FIDX = FIDX,
MIDX = MIDX,
SEX = SEX,
FAMID = FAMID,
UNBROKEN_LOOPS = FALSE,
LOOP_BREAKERS = NULL,
FOUNDER_INBREEDING = NULL,
MARKERS = NULL)
if(length(ID) == 1) {
class(x) = c("singleton", "ped")
return(x)
}
class(x) = "ped"
nucs = peelingOrder(x)
lastnuc_link = nucs[[length(nucs)]]$link
x$UNBROKEN_LOOPS = is.null(lastnuc_link)
x
}
validatePed = function(x) {
ID = x$ID; FIDX = x$FIDX; MIDX = x$MIDX; SEX = x$SEX; FAMID = x$FAMID
n = length(ID)
stopifnot2(is.character(ID), is.integer(FIDX), is.integer(MIDX), is.integer(SEX),
is.character(FAMID), is.singleton(x) == (n == 1))
stopifnot2(n > 0, length(FIDX) == n, length(MIDX) == n, length(SEX) == n,
all(FIDX >= 0), all(MIDX >= 0), all(FIDX <= n), all(MIDX <= n),
length(FAMID) == 1)
errs = character(0)
has1parent = (FIDX > 0) != (MIDX > 0)
if (any(has1parent))
errs = c(errs, paste("Individual", ID[has1parent], "has exactly 1 parent; this is not allowed"))
if (!all(SEX %in% 0:2))
errs = c(errs, paste("Illegal sex:", unique(setdiff(SEX, 0:2))))
self_anc = any_self_ancestry(x)
if(length(self_anc) > 0)
errs = c(errs, paste("Individual", self_anc, "is their own ancestor"))
if(anyDuplicated.default(ID) > 0)
errs = c(errs, paste("Duplicated ID label:", ID[duplicated(ID)]))
if(any(SEX[FIDX] == 2)) {
female_fathers_int = intersect(which(SEX == 2), FIDX)
first_child = ID[match(female_fathers_int, FIDX)]
errs = c(errs, paste("Individual", ID[female_fathers_int],
"is female, but appear as the father of", first_child))
}
if(any(SEX[MIDX] == 1)) {
male_mothers_int = intersect(which(SEX == 1), MIDX)
first_child = ID[match(male_mothers_int, MIDX)]
errs = c(errs, paste("Individual", ID[male_mothers_int],
"is male, but appear as the mother of", first_child))
}
if(length(errs) > 0) {
errs = c("Malformed pedigree.", errs)
stop2(paste0(errs, collapse = "\n "))
}
invisible(NULL)
}
any_self_ancestry = function(x) {
n = pedsize(x)
nseq = 1:n
FIDX = x$FIDX
MIDX = x$MIDX
self_parent = (nseq == FIDX) | (nseq == MIDX)
if(any(self_parent))
return(labels(x)[self_parent])
fou_int = founders(x, internal = TRUE)
OK = rep(FALSE, n)
OK[fou_int] = TRUE
for(i in 1:n) {
parents = which(OK)
children = which(FIDX %in% parents | MIDX %in% parents)
fatherOK = OK[FIDX[children]]
motherOK = OK[MIDX[children]]
childrenOK = children[fatherOK & motherOK]
if(all(OK[childrenOK]))
break
OK[childrenOK] = TRUE
}
labels(x)[!OK]
} |
context("code quality")
library(lintr)
test_that("Package Style", {
skip_on_cran()
major_lintr_version <- strsplit(as.character(packageVersion("lintr")), ".", fixed = TRUE)[[1]]
if (as.integer(major_lintr_version[1]) >= 2) {
lints <- with_defaults(
line_length_linter = line_length_linter(120),
cyclocomp_linter = cyclocomp_linter(37))
} else {
lints <- with_defaults(
line_length_linter = line_length_linter(120))
}
lints <- lints[!(names(lints) %in%
c("object_usage_linter", "camel_case_linter", "commas_linter", "multiple_dots_linter"))]
code_files <- list.files(
c("../../R", "../../tests"), "R$", full.names = TRUE, recursive = TRUE)
code_files <- code_files[!(code_files %in%
c("../../R/RcppExports.R"))]
lint_results <- lintr:::flatten_lints(lapply(code_files, function(file) {
if (interactive()) {
message(".", appendLF = FALSE)
}
lint(file, linters = lints, parse_settings = FALSE)
}))
if (interactive()) {
message()
}
lint_output <- NULL
if (length(lint_results) > 0) {
lint_results <- sapply(lint_results,
function(lint_res) {
paste(lint_res$filename, " (", lint_res$line_number, "): ", lint_res$message)
})
print(lint_results)
}
expect_true(length(lint_results) == 0, paste(lint_results, sep = "\n", collapse = "\n"))
}) |
knitr::opts_chunk$set(
echo = TRUE,
fig.width = 6,
fig.asp = 0.7
)
library("sftrack")
data('raccoon', package = 'sftrack')
coords = raccoon[,c('longitude','latitude')]
crs = '+init=epsg:4326'
group = list(id = raccoon$animal_id,month = as.POSIXlt(raccoon$timestamp)$mon+1)
active_group = c('id','month')
time = as.POSIXct(raccoon$timestamp, tz='EST')
error = raccoon$fix
my_sftrack <- as_sftrack(data = raccoon, coords = coords, group = group,
active_group = active_group, time = time,
crs = crs, error = error)
head(my_sftrack)
raccoon$time <- as.POSIXct(raccoon$timestamp, tz='EST')
raccoon$month <- as.POSIXlt(raccoon$timestamp)$mon+1
coords = c('longitude','latitude')
group = c(id = 'animal_id', month = 'month')
time = 'time'
error = 'fix'
my_sftraj <- as_sftraj(data = raccoon, coords = coords, group = group, time = time, error = error)
head(my_sftraj)
library("adehabitatLT")
ltraj_df <- as.ltraj(xy=raccoon[,c('longitude','latitude')], date = as.POSIXct(raccoon$timestamp),
id = raccoon$animal_id, typeII = TRUE,
infolocs = raccoon[,1:6] )
my_sf <- as_sftrack(ltraj_df)
head(my_sf)
library("sf")
df1 <- raccoon[!is.na(raccoon$latitude),]
sf_df <- st_as_sf(df1, coords=c('longitude','latitude'), crs = crs)
group = c(id = 'animal_id')
time_col = 'time'
new_sftraj <- as_sftraj(sf_df, group = group, time = time_col)
head(new_sftraj)
new_sftrack <- as_sftrack(sf_df, group = group, time= time_col)
head(new_sftrack)
coords = c('longitude','latitude')
group = c(id = 'animal_id', month = 'month')
time = 'time'
error = 'fix'
my_sftraj <- as_sftraj(data = raccoon, coords = coords, group = group, time = time, error = error)
my_sftrack <- as_sftrack(data = raccoon, coords = coords, group = group, time = time, error = error)
new_sftrack <- as_sftrack(my_sftraj)
new_sftraj <- as_sftraj(my_sftrack)
identical(my_sftraj,new_sftraj)
identical(my_sftrack,new_sftrack)
raccoon$time[1] <- raccoon$time[2]
try(as_sftrack(data = raccoon, coords = coords, group = group, time = time, error = error))
which_duplicated(data = raccoon , group = group, time = time)
raccoon <- raccoon[-2,]
my_sftrack <- as_sftrack(data = raccoon, coords = coords, group = group, time = time, error = error) |
odds.hk2malay <- function (x) {
malay <- x
malay[] <- NA_real_
malay[which(x > 0)] <- -100 / odds.hk2us(x[which(x > 0)])
malay
} |
normSpectra2D <- function(spectra, method = "zero2one") {
.chkArgs(mode = 21L)
chkSpectra(spectra)
ok <- c("zero2one", "TotInt", "minusPlus")
if (!method %in% ok) stop("Invalid method specified")
ns <- length(spectra$names)
if (method == "zero2one") {
for (i in 1:ns) {
rMin <- min(spectra$data[[i]], na.rm = TRUE)
rMax <- max(spectra$data[[i]], na.rm = TRUE)
spectra$data[[i]] <- .rescale(spectra$data[[i]], 0.0, 1.0, rMin, rMax)
}
}
if (method == "minusPlus") {
for (i in 1:ns) {
rMin <- min(spectra$data[[i]], na.rm = TRUE)
rMax <- max(spectra$data[[i]], na.rm = TRUE)
spectra$data[[i]] <- .rescale(spectra$data[[i]], -1.0, 1.0, rMin, rMax)
}
}
if (method == "TotInt") {
for (i in 1:ns) {
spectra$data[[i]] <- spectra$data[[i]] / sum(spectra$data[[i]], na.rm = TRUE)
}
}
chkSpectra(spectra)
return(spectra)
} |
NULL
tidy.stanreg <- function(x,
effects = "fixed",
conf.int = FALSE,
conf.level = 0.9,
conf.method=c("quantile","HPDinterval"),
...) {
conf.method <- match.arg(conf.method)
effects <-
match.arg(effects,
several.ok = TRUE,
choices = c(
"fixed", "ran_vals",
"ran_pars", "auxiliary"
)
)
if (any(effects %in% c("ran_vals", "ran_pars"))) {
if (!inherits(x, "lmerMod")) {
stop("Model does not have varying ('ran_vals') or hierarchical ('ran_pars') effects.")
}
}
nn <- c("estimate", "std.error")
ret_list <- list()
if ("fixed" %in% effects) {
nv_pars <- names(rstanarm::fixef(x))
ret <- cbind(
rstanarm::fixef(x),
rstanarm::se(x)[nv_pars]
)
if (inherits(x, "polr")) {
cp <- x$zeta
se_cp <- apply(as.matrix(x, pars = names(cp)), 2, stats::mad)
ret <- rbind(ret, cbind(cp, se_cp))
nv_pars <- c(nv_pars, names(cp))
}
if (conf.int) {
cifix <- switch(conf.method,
HPDinterval= {
m <- as.matrix(x$stanfit)
m <- m[,colnames(m) %in% nv_pars]
coda::HPDinterval(coda::as.mcmc(m),
prob=conf.level)
},
quantile=rstanarm::posterior_interval(
object = x,
pars = nv_pars,
prob = conf.level
)
)
ret <- data.frame(ret, cifix)
nn <- c(nn, "conf.low", "conf.high")
}
ret_list$non_ran_vals <- fix_data_frame(ret, newnames = nn, newcol="term")
}
if ("auxiliary" %in% effects) {
nn <- c("estimate", "std.error")
parnames <- rownames(x$stan_summary)
auxpars <- c(
"sigma", "shape", "overdispersion", "R2", "log-fit_ratio",
grep("mean_PPD", parnames, value = TRUE)
)
auxpars <- auxpars[which(auxpars %in% parnames)]
ret <- summary(x, pars = auxpars)[, c("50%", "sd"), drop = FALSE]
if (conf.int) {
ints <- rstanarm::posterior_interval(x, pars = auxpars, prob = conf.level)
ret <- data.frame(ret, ints)
nn <- c(nn, "conf.low", "conf.high")
}
ret_list$auxiliary <-
fix_data_frame(ret, newnames = nn, newcol="term")
}
if ("ran_pars" %in% effects) {
ret <- (rstanarm::VarCorr(x)
%>% as.data.frame()
%>% mutate_if(is.factor,as.character)
)
rscale <- "sdcor"
ran_prefix <- c("sd", "cor")
pfun <- function(x) {
v <- na.omit(unlist(x))
if (length(v) == 0) v <- "Observation"
p <- paste(v, collapse = ".")
if (!identical(ran_prefix, NA)) {
p <- paste(ran_prefix[length(v)], p, sep = "_")
}
return(p)
}
rownames(ret) <- paste(apply(ret[c("var1", "var2")], 1, pfun),
ret[, "grp"],
sep = "."
)
ret_list$hierarchical <- fix_data_frame(ret[c("grp", rscale)],
newcol="term",
newnames = c("group", "estimate"))
}
if ("ran_vals" %in% effects) {
nn <- c("estimate", "std.error")
s <- summary(x, pars = "varying")
ret <- cbind(s[, "50%"], rstanarm::se(x)[rownames(s)])
if (conf.int) {
ciran <- rstanarm::posterior_interval(x,
regex_pars = "^b\\[",
prob = conf.level
)
ret <- data.frame(ret, ciran)
nn <- c(nn, "conf.low", "conf.high")
}
double_splitter <- function(x, split1, sel1, split2, sel2) {
y <- unlist(lapply(strsplit(x, split = split1, fixed = TRUE), "[[", sel1))
unlist(lapply(strsplit(y, split = split2, fixed = TRUE), "[[", sel2))
}
vv <- fix_data_frame(ret, newnames = nn, newcol="term")
nn <- c("level", "group", "term", nn)
nms <- vv$term
vv$term <- NULL
lev <- double_splitter(nms, ":", 2, "]", 1)
grp <- double_splitter(nms, " ", 2, ":", 1)
trm <- double_splitter(nms, " ", 1, "[", 2)
vv <- data.frame(lev, grp, trm, vv)
ret_list$ran_vals <- fix_data_frame(vv, newnames = nn, newcol="term")
}
return(dplyr::bind_rows(ret_list))
}
glance.stanreg <- function(x, looic = FALSE, ...) {
glance_stan(x, looic = looic, type = "stanreg", ...)
}
glance_stan <- function(x, looic = FALSE, ..., type) {
sigma <- if (getRversion() >= "3.3.0") {
get("sigma", asNamespace("stats"))
} else {
get("sigma", asNamespace("rstanarm"))
}
if (type == "stanreg") {
algo <- x$algorithm
sim <- x$stanfit@sim
} else {
algo <- x$fit@stan_args[[1]][["method"]]
sim <- x$fit@sim
}
ret <- dplyr::tibble(algorithm = algo)
if (algo != "optimizing") {
pss <- sim$n_save
if (algo == "sampling") {
pss <- sum(pss - sim$warmup2)
}
ret <- dplyr::mutate(ret, pss = pss)
}
ret <- mutate(ret, nobs = stats::nobs(x))
if (length(sx <- sigma(x)) > 0) {
ret <- dplyr::mutate(ret, sigma = sx)
}
if (looic) {
if (algo == "sampling") {
if (type == "stanreg") {
loo1 <- rstanarm::loo(x, ...)
} else {
loo1 <- brms::loo(x, ...)
}
loo1_est <- loo1[["estimates"]]
ret <- data.frame(
ret,
rbind(loo1_est[
c("looic", "elpd_loo", "p_loo"),
"Estimate"
])
)
} else {
message("looic only available for models fit using MCMC")
}
}
dplyr::as_tibble(unrowname(ret))
} |
plotIntervals <- function(intervals)
{
errorCheck(intervals, FALSE)
intervals[, 'idx'] <- 1:nrow(intervals)
idx <- intervals[, 'idx']
left <- intervals[, 'left']
right <- intervals[, 'right']
p <- ggplot2::ggplot()
p <- p + ggplot2::theme(panel.border = ggplot2::element_blank())
p <- p + ggplot2::theme(panel.grid.major = ggplot2::element_blank(),
panel.grid.minor = ggplot2::element_blank())
p <- p + ggplot2::geom_segment(data = intervals, ggplot2::aes(x = left,
y = idx, xend = right, yend = idx))
p <- p + ggplot2::theme(axis.text.x = ggplot2::element_blank(),
axis.ticks.x = ggplot2::element_blank())
p <- p + ggplot2::theme(axis.text.y = ggplot2::element_blank(),
axis.ticks.y = ggplot2::element_blank())
p <- p + ggplot2::theme(axis.title.x = ggplot2::element_blank(),
axis.title.y = ggplot2::element_blank())
p
} |
read.txt.Horiba <- function (file, cols = c (spc = "I / a.u.",
.wavelength = expression (Delta*tilde(nu) / cm^-1)),
header = TRUE, sep = "\t", row.names = NULL,
check.names = FALSE, ...){
spc <- read.txt.wide (file, cols = cols,
header = header, sep = sep, row.names = row.names,
check.names = check.names, ...)
spc
}
read.txt.Horiba.xy <- function (file, ...){
read.txt.Horiba (file = file,
cols = c (x = expression (x / mu*m),
y = expression (y / mu*m),
spc = "I / a.u.",
.wavelength = expression (Delta*tilde(nu) / cm^-1)),
...)
}
read.txt.Horiba.t <- function (file, header = TRUE, sep = "\t", row.names = NULL,
check.names = FALSE, ...){
read.txt.Horiba (file,
cols = c (t = "t / s",
spc = "I / a.u.",
.wavelength = expression (Delta*tilde(nu) / cm^-1)),
...)
} |
runARGES <- function(X, parentsOf, variableSelMat, setOptions, directed, verbose,
...){
package <- setOptions$package
if(is.null(package)) package <- "huge"
if(package == "huge"){
method <- setOptions$method
if(is.null(method)) method <- "mb"
criterion <- setOptions$criterion
if(is.null(criterion)) criterion <- "ric"
}else if(package == "flare"){
method <- setOptions$method
if(is.null(method)) method <- "tiger"
criterion <- setOptions$criterion
if(is.null(criterion)) criterion <- "cv"
}else{
stop(paste("Package", package, "not supported for CIG estimation. Valid
options are 'huge' or 'flare'."))
}
if(is.null(variableSelMat)){
if(package == "huge"){
hugeObj <- huge::huge(X, method = method, verbose = FALSE)
hugeSel <- huge::huge.select(hugeObj, criterion = criterion, verbose = FALSE)
variableSelMat <- hugeSel$refit
}else{
flareObj <- flare::sugm(X, method = method, verbose = FALSE)
flareSel <- flare::sugm.select(flareObj, criterion = criterion, verbose = FALSE)
variableSelMat <- flareSel$refit
}
variableSelMat <- as.matrix(variableSelMat)
variableSelMat[variableSelMat == 1] <- TRUE
variableSelMat[variableSelMat == 0] <- FALSE
}
if(is.null(setOptions$adaptive)){
setOptions$adaptive <- "vstructures"
}else{
if(setOptions$adaptive == "none")
setOptions$adaptive <- "vstructures"
}
runGES(X, parentsOf, variableSelMat, setOptions, directed, verbose,
...)
} |
na_range_to_values <- function(x){
if( ! inherits(x, "haven_labelled_spss")) return(x)
if ( is.null(attr(x, "na_range")) ) return(x)
na_values <- vector (mode = "double", length = 0 )
if ( ! is.null(attr(x, "na_value"))) {
na_values <- attr(x, "na_value")
}
na_min <- labelled::na_range(x)[1]
na_max <- labelled::na_range(x)[2]
if ( length(na_values) >0 ) {
if (min(na_values) < na_min) {
warning("Inconsistent missing ranges: min(na_values) < min(na_range)")
na_min <- min(na_values)
}
if (max(na_values) > na_max) {
warning("Inconsistent missing ranges: max(na_values) > max(na_range)")
na_max <- max(na_values)
}
}
above_min <- unclass(x)[(unclass(x) >= na_min)]
na_values <- above_min[above_min <= na_max]
if (length(na_values)==0) {
na_values <- na_min:na_max
}
labelled::na_values(x) <- na_values
labelled::na_range(x) <- c(na_min, na_max)
x
}
is.na_range_to_values <- function(x) {
!is.null(attr(x, "na_values")) && !is.null(attr(x, "na_range"))
} |
get_theta_ig <- function(alpha = 0.01, method = "integrate", Z, c = 3, eps = .Machine$double.eps, Kinv, equals = FALSE, a = 1,type="marginalt")
{
warning("implementation currently does not work for equals=TRUE (a=b), when both values are small")
if(method != "integrate")
stop("method not existing")
if(!(type %in% c("integrate","marginalt")))
stop("method not existing")
ztKz <- diag(Z%*%Kinv%*%t(Z))
if(NROW(Z) == 1 | NCOL(Z) == 1)
{
nknots <- 1
} else {
nknots <- NROW(Z)
}
weights <- rep(1, nknots)
alphafx <- alpha * weights / nknots
eps2 <- eps3 <- eps4 <- eps
marginal_df <- function(f, lambda, ztz, a)
{
if(equals == TRUE)
a <- lambda
if(type == "marginalt")
{
df <- 2 * a
mu <- 0
sigma <- sqrt(ztz*(lambda/a))
res <- pt((f-mu) / sigma, df = df) - pt((-f-mu) / sigma, df = df)
} else {
integrand <- function(tau2, a=a)
{
dnorm(f, mean = 0, sd = sqrt(tau2 * ztz)) * ((lambda^a)/gamma(a) * tau2^(-a-1) * exp(-lambda/tau2))
}
res <- try(integrate(integrand, 0, Inf,a=a)$value, TRUE)
while(inherits(res, "try-error"))
{
eps2 <- eps2 * 10
res <- try(integrate(integrand, eps2, Inf,a=a)$value, TRUE)
}
}
return(res)
}
marginal_Pf <- function(lambda, Cov, alpha)
{
if(method == "integrate")
{
tempvar <- 0
for(countnknots in 1:NROW(Z))
{
if(type=="integrate")
{
contri <- try(2*integrate(Vectorize(marginal_df), -c, 0, lambda = lambda, ztz = Cov[countnknots], a=a)$value, TRUE)
while(inherits(contri, "try-error"))
{
eps3 <- eps3 * 10
contri <- try(2*integrate(Vectorize(marginal_df), -c, eps3, lambda = lambda, ztz = Cov[countnknots],a=a)$value, TRUE)
}
} else {
contri <- marginal_df(f=c, lambda = lambda, ztz = Cov[countnknots],a=a)
}
tempvar <- tempvar + contri
}
NROW(Z) - alpha - tempvar
} else {
stop("selected method not implemented.")
}
}
result <- try(uniroot(marginal_Pf, interval = c(1000000000000*.Machine$double.eps, 1000), Cov = ztKz, alpha = alpha), TRUE)
while(inherits(result, "try-error"))
{
eps4 <- eps4 * 10
result <- try(uniroot(marginal_Pf, interval = c(eps4, 1000), Cov = ztKz, alpha = alpha), TRUE)
}
return(result)
} |
context("Tests list-as-an-outcome ictreg.joint")
rm(list=ls())
set.seed(1)
data(mexico)
test_that("ictreg.joint works", {
skip_on_cran()
loyal <- mexico[mexico$mex.loyal == 1,]
notloyal <- mexico[mexico$mex.loyal == 0,]
loyalreg <- ictreg.joint(formula = mex.y.all ~ mex.male + mex.age + mex.age2 + mex.education +
mex.interest + mex.married +
mex.wealth + mex.urban + mex.havepropoganda + mex.concurrent, data = loyal,
treat = "mex.t", outcome = "mex.votecard", J = 3, constrained = TRUE,
outcome.reg = "logistic", maxIter = 1000)
summary(loyalreg)
approvalreg <- ictreg.joint(formula = mex.y.all ~ mex.male + mex.age + mex.age2 +
mex.education +
mex.interest + mex.married +
mex.urban +
mex.cleanelections + mex.cleanelectionsmiss +
mex.havepropoganda +
mex.wealth + mex.northregion +
mex.centralregion + mex.metro + mex.pidpriw2 +
mex.pidpanw2 + mex.pidprdw2,
data = mexico, treat = "mex.t", outcome = "mex.epnapprove",
J = 3, constrained = TRUE,
outcome.reg = "linear", maxIter = 1000)
summary(approvalreg)
loyalpred <- predict.ictreg.joint(loyalreg, se.fit = TRUE, interval = "confidence",
level = 0.95, avg = TRUE,
sensitive.value = "both",
sensitive.diff = TRUE, return.draws = TRUE,
predict.sensitive = TRUE)
loyalpred$fit
loyalpred$fitsens
}) |
"data_cleaned_mlpca_c" |
FPCA.FEM<-function(locations = NULL, datamatrix, FEMbasis,lambda, nPC = 1, validation = NULL, NFolds = 5,
GCVmethod = "Stochastic", nrealizations = 100)
{
incidence_matrix=NULL
if(class(FEMbasis$mesh) == "mesh.2D"){
ndim = 2
mydim = 2
}else if(class(FEMbasis$mesh) == "mesh.2.5D"){
ndim = 3
mydim = 2
}else if(class(FEMbasis$mesh) == "mesh.3D"){
ndim = 3
mydim = 3
}else{
stop('Unknown mesh class')
}
if(GCVmethod=="Stochastic")
GCVmethod=2
else if(GCVmethod=="Exact")
GCVmethod=1
else{
stop("GCVmethod must be either Stochastic or Exact")
}
checkSmoothingParametersFPCA(locations, datamatrix, FEMbasis, incidence_matrix, lambda, nPC, validation, NFolds, GCVmethod ,nrealizations)
if(!is.null(locations))
locations = as.matrix(locations)
datamatrix = as.matrix(datamatrix)
if(!is.null(incidence_matrix))
incidence_matrix = as.matrix(incidence_matrix)
lambda = as.matrix(lambda)
checkSmoothingParametersSizeFPCA(locations, datamatrix, FEMbasis, incidence_matrix, lambda, ndim, mydim, validation, NFolds)
bigsol = NULL
if(class(FEMbasis$mesh) == 'mesh.2D'){
print('C++ Code Execution')
bigsol = CPP_smooth.FEM.FPCA(locations, datamatrix, FEMbasis, incidence_matrix,
lambda, ndim, mydim, nPC, validation, NFolds,
GCVmethod, nrealizations)
numnodes = nrow(FEMbasis$mesh$nodes)
} else if(class(FEMbasis$mesh) == 'mesh.2.5D'){
print('C++ Code Execution')
bigsol = CPP_smooth.manifold.FEM.FPCA(locations, datamatrix, FEMbasis,
incidence_matrix, lambda, ndim, mydim,
nPC, validation, NFolds, GCVmethod, nrealizations)
numnodes = FEMbasis$mesh$nnodes
} else if(class(FEMbasis$mesh) == 'mesh.3D'){
print('C++ Code Execution')
bigsol = CPP_smooth.volume.FEM.FPCA(locations, datamatrix, FEMbasis,
incidence_matrix, lambda, ndim, mydim,
nPC, validation, NFolds, GCVmethod, nrealizations)
numnodes = FEMbasis$mesh$nnodes
}
loadings=bigsol[[1]]
loadings.FEM=FEM(loadings,FEMbasis)
scores=bigsol[[2]]
lambda=bigsol[[3]]
variance_explained=bigsol[[4]]
cumsum_percentage=bigsol[[5]]
var=bigsol[[6]]
reslist=list(loadings.FEM=loadings.FEM, scores=scores, lambda=lambda, variance_explained=variance_explained, cumsum_percentage=cumsum_percentage)
return(reslist)
} |
apa_table <- function(x, ...) {
UseMethod("apa_table", x)
}
apa_table.default <- function(x, ...) no_method(x)
apa_table.apa_results_table <- function(x, escape = FALSE, ...) {
NextMethod(x, escape = FALSE, ...)
}
apa_table.matrix <- function(
x
, caption = NULL
, note = NULL
, stub_indents = NULL
, added_stub_head = NULL
, col_spanners = NULL
, midrules = NULL
, placement = "tbp"
, landscape = FALSE
, font_size = NULL
, escape = TRUE
, span_text_columns = TRUE
, ...
, format.args = NULL
) {
x <- data.frame(
x
, check.names = FALSE
, fix.empty.names = FALSE
, stringsAsFactors = FALSE
)
apa_table(
x
, caption = caption
, note = note
, stub_indents = stub_indents
, added_stub_head = added_stub_head
, col_spanners = col_spanners
, midrules = midrules
, placement = placement
, landscape = landscape
, font_size = font_size
, escape = escape
, span_text_columns = span_text_columns
, ...
, format.args = format.args
)
}
apa_table.list <- function(
x
, caption = NULL
, note = NULL
, stub_indents = NULL
, added_stub_head = NULL
, col_spanners = NULL
, midrules = NULL
, placement = "tbp"
, landscape = FALSE
, font_size = NULL
, escape = TRUE
, merge_method = "indent"
, span_text_columns = TRUE
, ...
, format.args = NULL
) {
ellipsis <- list(...)
row_names <- if(is.null(ellipsis$row.names)) TRUE else ellipsis$row.names
validate(row_names, "row.names", check_class = "logical", check_length = 1)
validate(merge_method, "merge_method", check_class = "character", check_length = 1)
force_row_names <- !(
(
all(
sapply(x, function(y) all(as.character(rownames(y)) == as.character(1:nrow(y))))
)
) ||
all(
as.character(unlist(lapply(x, rownames))) == as.character(1:nrow(do.call(rbind.data.frame, x)))
)
)
if(row_names & force_row_names) {
x <- lapply(
x
, add_row_names
, added_stub_head = added_stub_head
, force = force_row_names
)
} else {
x <- lapply(
x
, data.frame
, check.names = FALSE
, fix.empty.names = FALSE
, stringsAsFactors = FALSE
)
}
if(!merge_method %in% c("indent", "table_spanner")) {
warning("merge_method '", merge_method, "' not supported. Defaulting to 'indent'.")
merge_method <- "indent"
}
if(!is.null(ellipsis$format)) {
output_format <- ellipsis$format
} else {
output_format <- knitr::opts_knit$get("rmarkdown.pandoc.to")
if(length(output_format) == 0 || output_format == "markdown") output_format <- "latex"
}
if(merge_method == "table_spanner") {
if(output_format %in% c("docx", "word")) {
warning("merge_method '", merge_method, "' not supported for Word documents. Defaulting to 'indent'.")
merge_method <- "indent"
} else {
if(!is.null(format.args)) validate(format.args, check_class = "list")
ellipsis <- list(...)
if(is.null(ellipsis$digits) & is.null(format.args$digits)) {
format.args$digits <- 2
} else if(!is.null(ellipsis$digits)) {
format.args$digits <- ellipsis$digits
}
x <- lapply(x, format_cells, format.args)
if(!is.null(names(x))) {
x <- mapply(
add_table_spanner
, x = x
, name = names(x)
, SIMPLIFY = FALSE
)
}
merged_table <- do.call(rbind.data.frame, x)
rownames(merged_table) <- NULL
}
}
list_indents <- list()
if(merge_method == "indent") {
merged_table <- do.call(rbind.data.frame, x)
rownames(merged_table) <- NULL
if(!is.null(names(x))) {
list_indents <- lapply(x, function(x) 1:nrow(x))
for(i in seq_along(list_indents)[-1]) {
list_indents[[i]] <- list_indents[[i]] + max(list_indents[[i - 1]])
}
}
}
apa_table(
merged_table
, caption = caption
, note = note
, stub_indents = c(list_indents, stub_indents)
, added_stub_head = added_stub_head
, col_spanners = col_spanners
, midrules = midrules
, placement = placement
, landscape = landscape
, font_size = font_size
, escape = escape
, span_text_columns = span_text_columns
, format.args = format.args
, ...
)
}
apa_table.data.frame <- function(
x
, caption = NULL
, note = NULL
, stub_indents = NULL
, added_stub_head = NULL
, col_spanners = NULL
, midrules = NULL
, placement = "tbp"
, landscape = FALSE
, font_size = NULL
, escape = TRUE
, span_text_columns = TRUE
, ...
, format.args = NULL
) {
if(!is.null(caption)) validate(caption, check_class = "character", check_length = 1)
if(!is.null(note)) validate(note, check_class = "character", check_length = 1)
if(!is.null(added_stub_head)) validate(added_stub_head, check_class = "character", check_length = 1)
if(!is.null(stub_indents)) validate(stub_indents, check_class = "list")
if(!is.null(format.args)) validate(format.args, check_class = "list")
validate(escape, check_class = "logical", check_length = 1)
validate(placement, check_class = "character", check_length = 1)
validate(landscape, check_class = "logical", check_length = 1)
ellipsis <- list(...)
row_names <- if(is.null(ellipsis$row.names)) TRUE else ellipsis$row.names
validate(row_names, "row.names", check_class = "logical", check_length = 1)
if(is.null(ellipsis$digits) & is.null(format.args$digits)) {
format.args$digits <- 2
} else if(!is.null(ellipsis$digits)) {
format.args$digits <- ellipsis$digits
}
prep_table <- default_label(x)
if(row_names) {
prep_table <- add_row_names(x, added_stub_head = added_stub_head)
} else {
prep_table <- x
}
prep_table <- format_cells(prep_table, format.args)
if(escape) {
prep_table <- as.data.frame(lapply(prep_table, escape_latex, spaces = TRUE), check.names = FALSE, fix.empty.names = FALSE, stringsAsFactors = FALSE)
colnames(prep_table) <- escape_latex(colnames(prep_table))
caption <- escape_latex(caption)
note <- escape_latex(note)
} else {
prep_table <- as.data.frame(lapply(prep_table, function(x) gsub("([^\\\\]+)(%)", "\\1\\\\%", x)), check.names = FALSE, fix.empty.names = FALSE, stringsAsFactors = FALSE)
prep_table <- as.data.frame(lapply(prep_table, function(x) gsub("([^\\\\])(&)", "\\1\\\\&", x)), check.names = FALSE, fix.empty.names = FALSE, stringsAsFactors = FALSE)
}
if(!is.null(stub_indents)) prep_table <- indent_stubs(prep_table, stub_indents, "\\ \\ \\ ")
ellipsis$escape <- FALSE
ellipsis$row.names <- FALSE
if(!is.null(ellipsis$format)) {
output_format <- ellipsis$format
ellipsis$format <- NULL
} else {
output_format <- knitr::opts_knit$get("rmarkdown.pandoc.to")
if(length(output_format) == 0 || output_format == "markdown") output_format <- "latex"
}
if(output_format == "latex") {
if(!is.null(col_spanners)) {
validate(col_spanners, check_class = "list")
validate(unlist(col_spanners), "col_spanners", check_range = c(1, ncol(prep_table)))
}
do.call(
function(...) apa_table.latex(
x = prep_table
, caption = caption
, note = note
, col_spanners = col_spanners
, midrules = midrules
, placement = placement
, landscape = landscape
, font_size = font_size
, span_text_columns = span_text_columns
, ...
)
, ellipsis
)
} else {
do.call(
function(...) apa_table.markdown(
x = prep_table
, caption = caption
, note = note
, ...
)
, ellipsis
)
}
}
apa_table.latex <- function(
x
, caption = NULL
, note = NULL
, col_spanners = NULL
, midrules = NULL
, placement = "tbp"
, landscape = FALSE
, font_size = NULL
, span_text_columns = TRUE
, ...
) {
if(!is.null(font_size)) validate(font_size, check_class = "character", check_length = 1)
apa_terms <- options()$papaja.terms
ellipsis <- list(...)
if(!is.null(ellipsis$small)) {
validate(ellipsis$small, check_class = "logical", check_length = 1)
if(ellipsis$small) {
font_size <- "small"
ellipsis$small <- NULL
}
}
ellipsis$booktabs <- TRUE
longtable <- if(!is.null(ellipsis$longtable)) ellipsis$longtable else FALSE
if(longtable || landscape) {
tabular_env <- "ThreePartTable"
table_note_env <- "TableNotes"
} else {
tabular_env <- "threeparttable"
table_note_env <- "tablenotes"
}
n_cols <- ncol(x)
n_rows <- nrow(x)
current_chunk <- knitr::opts_current$get("label")
if(!is.null(current_chunk)) caption <- paste0("\\label{tab:", current_chunk, "}", caption)
x <- default_label(x)
colnames(x) <- paste0("\\multicolumn{1}{c}{", unlist(variable_label(x)), "}")
colnames(x)[1] <- if(!is.na(variable_label(x)[[1]])) variable_label(x)[[1]] else ""
res_table <- do.call(function(...) knitr::kable(x, format = "latex", ...), ellipsis)
table_lines <- unlist(strsplit(res_table, "\n"))
table_lines <- table_lines[!grepl("\\\\addlinespace", table_lines)]
table_lines <- remove_excess_table_spanner_columns(table_lines)
if(!is.null(col_spanners)) table_lines <- add_col_spanners(table_lines, col_spanners, n_cols)
table_content_start <- grep("\\\\midrule", table_lines)
if((longtable || landscape) & !is.null(caption)) {
table_lines <- c(
table_lines[1:2]
, paste0("\\caption{", caption, "}\\\\")
, table_lines[3:table_content_start]
, "\\endfirsthead"
, paste0("\\caption*{\\normalfont{Table \\ref{tab:", current_chunk, "} continued}}\\\\")
, table_lines[3:table_content_start]
, "\\endhead"
, table_lines[-c(1:table_content_start)]
)
table_content_start <- grep("\\\\endhead", table_lines)
}
table_content_end <- grep("\\\\bottomrule", table_lines)
if(!is.null(note)) table_lines[table_content_end] <- paste(table_lines[table_content_end], "\\addlinespace", sep = "\n")
if(!is.null(midrules)) {
validate(midrules, check_class = "numeric", check_range = c(1, n_rows))
table_lines[table_content_start + midrules] <- paste(
table_lines[table_content_start + midrules]
, "\\midrule"
)
}
if(!is.null(note) & (longtable || landscape)) table_lines <- c(table_lines[-length(table_lines)], "\\insertTableNotes", table_lines[length(table_lines)])
if(longtable || landscape) {
table_lines <- gsub("\\{tabular\\}", "{longtable}", table_lines)
table_lines[grep("\\\\begin\\{longtable\\}", table_lines)] <- paste0(
table_lines[grep("\\\\begin\\{longtable\\}", table_lines)]
, "\\noalign{\\getlongtablewidth\\global\\LTcapwidth=\\longtablewidth}"
)
}
place_opt <- paste0("[", placement, "]")
table_output <- "\n\n"
if(landscape) {
table_output <- c(table_output, "\\begin{lltable}")
place_opt <- NULL
}
if(any(grepl("jou", c(rmarkdown::metadata$classoption, rmarkdown::metadata$class))) && span_text_columns) {
table_env <- "table*"
} else {
table_env <- "table"
}
if(!landscape && !longtable) table_output <- c(table_output, paste0("\\begin{", table_env, "}", place_opt))
if(!landscape) table_output <- c(table_output, paste0("\n\\begin{center}\n\\begin{", tabular_env, "}"))
if(!is.null(caption) && !(longtable || landscape)) table_output <- c(table_output, paste0("\n\\caption{", caption, "}"))
if(!is.null(note) && (longtable || landscape)) table_output <- c(table_output, paste0("\n\\begin{", table_note_env, "}[para]\n\\normalsize{\\textit{", apa_terms$note, ".} ", note, "}\n\\end{", table_note_env, "}"))
if(!is.null(font_size)) table_output <- c(table_output, paste0("\n\\", font_size, "{"))
table_output <- c(table_output, table_lines)
if(!is.null(font_size)) table_output <- c(table_output, "\n}")
if(!is.null(note) & !(longtable || landscape)) table_output <- c(table_output, paste0("\n\\begin{", table_note_env, "}[para]\n\\normalsize{\\textit{", apa_terms$note, ".} ", note, "}\n\\end{", table_note_env, "}"))
if(!landscape) table_output <- c(table_output, paste0("\n\\end{", tabular_env, "}\n\\end{center}"))
if(!landscape && !longtable) table_output <- c(table_output, paste0("\n\\end{", table_env, "}"))
if(landscape) {
table_output <- c(table_output, "\n\\end{lltable}")
}
table_output <- c(table_output, "\n\n")
knitr::asis_output(paste(table_output, collapse = "\n"))
}
apa_table.markdown <- function(
x
, caption = NULL
, note = NULL
, ...
) {
ellipsis <- list(...)
x <- default_label(x)
colnames(x) <- unlist(variable_label(x))
colnames(x)[1] <- if(!is.na(variable_label(x)[[1]])) variable_label(x)[[1]] else ""
table_output <- do.call(function(...) knitr::kable(x, format = "pandoc", ...), ellipsis)
apa_terms <- options()$papaja.terms
caption <- paste0("*", caption, "*")
current_chunk <- knitr::opts_current$get("label")
if(!is.null(current_chunk)) caption <- paste0("<caption>(\\
table_output <- c(caption, table_output)
if(!is.null(note)) {
table_output <- c(
table_output
, "\n<div custom-style='Compact'>"
, paste0("*", apa_terms$note, ".* ", note)
, "</div>\n\n \n\n"
)
}
knitr::asis_output(paste(table_output, collapse = "\n"))
}
format_cells <- function(x, format.args = NULL) {
format.args$x <- x
do.call("printnum.data.frame", format.args)
}
add_row_names <- function(x, added_stub_head, force = FALSE) {
if(!is.null(rownames(x)) && (all(rownames(x) != 1:nrow(x))) || force) {
row_names <- rownames(x)
rownames(x) <- NULL
mod_table <- data.frame(row_names, x, check.names = FALSE, fix.empty.names = FALSE, stringsAsFactors = FALSE)
if(!is.null(added_stub_head)) {
colnames(mod_table) <- c(added_stub_head, colnames(x))
if(is(mod_table, "apa_results_table")) variable_label(mod_table[, 1]) <- added_stub_head
} else {
colnames(mod_table) <- c("", colnames(x))
if(is(mod_table, "apa_results_table")) variable_label(mod_table[, 1]) <- ""
}
} else mod_table <- data.frame(x, check.names = FALSE, fix.empty.names = FALSE, stringsAsFactors = FALSE)
rownames(mod_table) <- NULL
mod_table
}
indent_stubs <- function(x, lines, filler = "\ \ \ ") {
for(i in seq_along(lines)) {
x[lines[[i]], 1] <- paste0(filler, x[lines[[i]], 1])
}
section_titles <- lines[which(names(lines) != "")]
section_titles <- sapply(section_titles, min)
if(length(section_titles) > 0) {
for(i in seq_along(section_titles)) {
top <- if(section_titles[i] != 1) x[1:(section_titles[i] - 1 + (i-1)), ] else NULL
bottom <- if(section_titles[i] != nrow(x)) x[(section_titles[i] + (i-1)):nrow(x), ] else x[nrow(x), ]
x <- rbind.data.frame(top, c(names(section_titles[i]), rep("", ncol(x) - 1)), bottom)
}
}
x
}
add_col_spanners <- function(table_lines, col_spanners, n_cols) {
multicols <- sapply(
seq_along(col_spanners)
, function(i, names) {
spanner_length <- diff(col_spanners[[i]])
if(length(spanner_length) == 0) spanner_length <- 0
paste0("\\multicolumn{", spanner_length + 1, "}{c}{", names[i], "}")
}
, names(col_spanners)
)
multicol_spanners <- vapply(col_spanners, length, 1) > 1
n_ampersands <- c()
if(sum(multicol_spanners) > 1) {
for(i in 2:length(col_spanners)) {
n_ampersands <- c(n_ampersands, min(col_spanners[[i]]) - max(col_spanners[[i - 1]]))
}
}
n_ampersands <- c(n_ampersands, 0)
leading_amps <- paste(rep(" &", min(unlist(col_spanners)) - 1), collapse = " ")
trailing_amps <- if(n_cols - max(unlist(col_spanners)) > 0) {
paste(rep(" &", n_cols - max(unlist(col_spanners))), collapse = " ")
} else ""
group_headings <- c()
for(i in 1:(length(col_spanners))) {
group_headings <- paste(group_headings, multicols[i], paste(rep("&", n_ampersands[i]), collapse = " "))
}
group_headings <- paste(leading_amps, group_headings, trailing_amps, "\\\\", sep = "")
group_midrules <- sapply(
seq_along(col_spanners)
, function(i) {
paste0("\\cmidrule(r){", min(col_spanners[[i]]), "-", max(col_spanners[[i]]), "}")
}
)
group_midrules <- paste(group_midrules, collapse = " ")
table_environment <- which(grepl("\\\\toprule", table_lines))
table_lines <- c(
table_lines[1:table_environment]
, group_headings
, group_midrules
, table_lines[(table_environment + 1):length(table_lines)]
)
table_lines
}
add_table_spanner <- function(x, name, ...) {
name <- paste0("!!bs!!multicolumn!!ob!!", ncol(x), "!!cb!!!!ob!!c!!cb!!!!ob!!", name, "!!cb!!!!bs!!!!bs!!REMOVE!!REST")
table_spanner <- c(name, rep("", ncol(x)-1))
rbind(table_spanner, x)
}
remove_excess_table_spanner_columns <- function(x) {
table_spanner_rows <- which(grepl("REMOVE!!REST", x))
x[table_spanner_rows] <- gsub("REMOVE!!REST.*", "", x[table_spanner_rows])
x[table_spanner_rows] <- gsub("!!bs!!", "\\\\", x[table_spanner_rows])
x[table_spanner_rows] <- gsub("!!ob!!", "{", x[table_spanner_rows])
x[table_spanner_rows] <- gsub("!!cb!!", "}", x[table_spanner_rows])
x
}
merge_tables <- function(x, empty_cells, row_names, added_stub_head) {
table_names <- names(x)
prep_table <- lapply(seq_along(x), function(i) {
if(row_names[i]) {
i_table <- add_row_names(x[[i]], added_stub_head = added_stub_head[(length(added_stub_head) == 2) + 1])
} else if(any(row_names)) {
i_table <- cbind("", i_table)
colnames(i_table) <- c("", colnames(i_table))
} else i_table <- x[[i]]
prep_table <- cbind(
c(table_names[i], rep("", nrow(x[[i]])-1))
, i_table
)
if(row_names[i] && !is.null(rownames(x[[i]])) && length(added_stub_head) < 2) {
second_col <- ""
} else second_col <- NULL
if(is.null(added_stub_head)) {
colnames(prep_table) <- c("", second_col, colnames(x[[i]]))
} else {
colnames(prep_table) <- c(added_stub_head, second_col, colnames(x[[i]]))
}
as.data.frame(prep_table, stringsAsFactors = FALSE)
})
prep_table
} |
source("ESEUR_config.r")
library(MASS)
library(numDeriv)
pal_col=rainbow(3)
data=read.csv(paste0(ESEUR_dir, "economics/Givon_et_al_Software_piracy_data.csv"), as.is=TRUE)
yrmth=data$Year+data$Month/12
p<-0.00037
q<-0.0316
m<-15386100
stdev_generate<-0
stdev_observe<-0
timeperiods<-400
steps<-10
dt<-1/steps
N<-c(0,rep(NA,timeperiods*steps))
n<-rep(NA,timeperiods*steps+1)
for (i in 1:(timeperiods*steps)) {
n[i+1]<-((p+q*N[i]/m)*(m-N[i]))*dt
N[i+1]<-N[i]+n[i+1]
}
NObs<-N[seq(1,length(N),steps)]
NObs<-NObs[13:length(NObs)]
N<-N[(12*steps+1):length(N)]
gen_pirate_diffusion<-function(xx) {
set.seed(1)
a<-xx[1]
b1<-xx[2]
b2<-xx[3]
alpha<-xx[4]
q11<-xx[5]
q22<-xx[6]
q12<-xx[7]
rr1<-xx[8]
epsilon<-xx[9]
X<-vector()
Y<-vector()
x<-vector()
y<-vector()
X[1]<-0
Y[1]<-0
for (i in 2:120) {
x[i]<-(a+alpha*(b1*X[i-1]+b2*Y[i-1])/NObs[i])*(NObs[i]-X[i-1]-Y[i-1])
y[i]<-(1-alpha)*(max(Y[i-1],1)^epsilon)*((b1*X[i-1]+b2*Y[i-1])/NObs[i])*(NObs[i]-X[i-1]-Y[i-1])
X[i]<-X[i-1]+x[i]
Y[i]<-Y[i-1]+y[i]
}
plot(2:120,c(max(y[2:120]),rep(0,length(y[2:120])-1)),col="White",type="l",xlab="Time",ylab="Sales")
lines(2:120,x[2:120],col="Black",type="l")
lines(2:120,y[2:120],col="Red",type="l")
x<<-x
y<<-y
X<<-X+rnorm(1,0,sqrt(r11))
Y<<-Y
x_stored_generated<<-x
y_stored_generated<<-y
x_sim<<-x[53:120]
y_sim<<-y[53:120]
}
a<-0.000200000
b1<-0.135310
b2<-0.1351100
alpha<-0.1438000
q11<-0
q22<-0
q12<-0
r11<-0
epsilon<-0
gen_pirate_diffusion(c(a,b1,b2,alpha,q11,q22,q12,r11,epsilon))
X[52]
Y[52]
X<-X[52]+cumsum(WordProcessors)
Y[1]<-Y[52]
Y[2:length(Y)]<-rep(NA,length(Y)-1)
NObs<-NObs[53:length(NObs)]
N<-N[(52*steps+1):length(N)]
projectkalman<-function(xx) {
a<-0.02*xx[1]^2/(1+xx[1]^2)
b1<-xx[2]^2/(1+xx[2]^2)
b2<-xx[3]^2/(1+xx[3]^2)
alpha<-xx[4]^2/(1+xx[4]^2)
q11<-10^9*xx[5]^2/(1+xx[5]^2)
epsilon<-0.2*xx[6]/(1+abs(xx[6]))
q22<-q11
q12<--q11/2
r11<-0
Q<-matrix(c(q11,q12,q12,q22),nrow=2)
R<-matrix(r11,nrow=1)
LL<-0
timeperiods<-length(X)-1
Xtt<-array(,c(timeperiods+1,2))
Xtt[1,1]<-X[1]
Xtt[1,2]<-Y[1]
Ptt<-array(,c(timeperiods+1,2,2))
Ptt[1,1,1]<-0
Ptt[1,1,2]<-0
Ptt[1,2,1]<-0
Ptt[1,2,2]<-0
ll<-vector()
mse_comp<-vector()
predict_observe<-vector()
for (j in 1:timeperiods) {
X_pred<-Xtt[j,]
P_pred<-Ptt[j,,]
for (i in 1:steps) {
X1<-X_pred[1]
X2<-X_pred[2]
X_pred[1]<-X1+((a+alpha*(b1*X1+b2*X2)/N[j*steps+i])*(N[j*steps+i]-X1-X2))*dt
X_pred[2]<-X2+((1-alpha)*(max(X2,1)^epsilon)*((b1*X1+b2*X2)/N[j*steps+i])*(N[j*steps+i]-X1-X2))*dt
F11<-alpha*b1-a-2*alpha*(b1/N[j*steps+i])*X1-alpha*((b1+b2)/N[j*steps+i])*X2
F12<-alpha*b2-a-alpha*((b1+b2)/N[j*steps+i])*X1-2*alpha*(b2/N[j*steps+i])*X2
F21<-(1-alpha)*(max(X2,1)^epsilon)*(b1/N[j*steps+i])*(N[j*steps+i]-X1-X2)-(1-alpha)*(max(X2,1)^epsilon)*((b1*X1+b2*X2)/N[j*steps+i])
F22<-(1-alpha)*epsilon*(max(X2,1)^(epsilon-1))*((b1*X1+b2*X2)/N[j*steps+i])*(N[j*steps+i]-X1-X2)+(1-alpha)*(max(X2,1)^epsilon)*(b2/N[j*steps+i])*(N[j*steps+i]-X1-X2)-(1-alpha)*(max(X2,1)^epsilon)*((b1*X1+b2*X2)/N[j*steps+i])
F<-matrix(c(F11,F12,F21,F22),nrow=2,byrow=TRUE)
P_pred<-P_pred+(F%*%P_pred+P_pred%*%t(F)+Q)*dt
}
Xttminus<-X_pred
Pttminus<-P_pred
Ht<-c(1,0)
predict_observe[j+1]<-t(Ht)%*%Xttminus
Kt<-Pttminus%*%Ht%*%(t(Ht)%*%Pttminus%*%Ht+R)^(-1)
Xtt[j+1,]<-Xttminus+Kt%*%(X[j+1]-t(Ht)%*%Xttminus)
Ptt[j+1,,]<-(diag(2)-Kt%*%t(Ht))%*%Pttminus
LL<-LL+log((2*pi)^(-1/2)) + log((t(Ht)%*%Pttminus%*%Ht+R)^(-1/2)) + (-1/2)*(X[j+1]-t(Ht)%*%Xttminus)*(t(Ht)%*%Pttminus%*%Ht+R)^(-1)*(X[j+1]-t(Ht)%*%Xttminus)
ll[j]<-log((2*pi)^(-1/2)) + log((t(Ht)%*%Pttminus%*%Ht+R)^(-1/2)) + (-1/2)*(X[j+1]-t(Ht)%*%Xttminus)*(t(Ht)%*%Pttminus%*%Ht+R)^(-1)*(X[j+1]-t(Ht)%*%Xttminus)
mse_comp[j]<-(X[j+1]-t(Ht)%*%Xttminus)^2
}
actual_sales<<-X[3:(timeperiods+1)]-X[2:(timeperiods)]
predicted_sales<<-predict_observe[3:(timeperiods+1)]-X[2:(timeperiods)]
plot(1:(timeperiods-1),actual_sales,col="Black",type="l",xlab="Time",ylab="Sales",main="Black=actual, red=predicted")
lines(1:(timeperiods-1),predicted_sales,col="Red",type="l")
ll<<-ll
mse<<-sum(mse_comp)/timeperiods
LL<<-LL
LL
}
projectkalman_original_params<-function(xx) {
a<-xx[1]
b1<-xx[2]
b2<-xx[3]
alpha<-xx[4]
q11<-xx[5]*10^9
epsilon<-xx[6]
q22<-q11
q12<--q11/2
r11<-0
Q<-matrix(c(q11,q12,q12,q22),nrow=2)
R<-matrix(r11,nrow=1)
LL<-0
timeperiods<-length(X)-1
Xtt<-array(,c(timeperiods+1,2))
Xtt[1,1]<-X[1]
Xtt[1,2]<-Y[1]
Ptt<-array(,c(timeperiods+1,2,2))
Ptt[1,1,1]<-0
Ptt[1,1,2]<-0
Ptt[1,2,1]<-0
Ptt[1,2,2]<-0
ll<-vector()
mse_comp<-vector()
predict_observe<-vector()
for (j in 1:timeperiods) {
X_pred<-Xtt[j,]
P_pred<-Ptt[j,,]
for (i in 1:steps) {
X1<-X_pred[1]
X2<-X_pred[2]
X_pred[1]<-X1+((a+alpha*(b1*X1+b2*X2)/N[j*steps+i])*(N[j*steps+i]-X1-X2))*dt
X_pred[2]<-X2+((1-alpha)*(max(X2,1)^epsilon)*((b1*X1+b2*X2)/N[j*steps+i])*(N[j*steps+i]-X1-X2))*dt
F11<-alpha*b1-a-2*alpha*(b1/N[j*steps+i])*X1-alpha*((b1+b2)/N[j*steps+i])*X2
F12<-alpha*b2-a-alpha*((b1+b2)/N[j*steps+i])*X1-2*alpha*(b2/N[j*steps+i])*X2
F21<-(1-alpha)*(max(X2,1)^epsilon)*(b1/N[j*steps+i])*(N[j*steps+i]-X1-X2)-(1-alpha)*(max(X2,1)^epsilon)*((b1*X1+b2*X2)/N[j*steps+i])
F22<-(1-alpha)*epsilon*(max(X2,1)^(epsilon-1))*((b1*X1+b2*X2)/N[j*steps+i])*(N[j*steps+i]-X1-X2)+(1-alpha)*(max(X2,1)^epsilon)*(b2/N[j*steps+i])*(N[j*steps+i]-X1-X2)-(1-alpha)*(max(X2,1)^epsilon)*((b1*X1+b2*X2)/N[j*steps+i])
F<-matrix(c(F11,F12,F21,F22),nrow=2,byrow=TRUE)
P_pred<-P_pred+(F%*%P_pred+P_pred%*%t(F)+Q)*dt
}
Xttminus<-X_pred
Pttminus<-P_pred
Ht<-c(1,0)
predict_observe[j+1]<-t(Ht)%*%Xttminus
Kt<-Pttminus%*%Ht%*%(t(Ht)%*%Pttminus%*%Ht+R)^(-1)
Xtt[j+1,]<-Xttminus+Kt%*%(X[j+1]-t(Ht)%*%Xttminus)
Ptt[j+1,,]<-(diag(2)-Kt%*%t(Ht))%*%Pttminus
LL<-LL+log((2*pi)^(-1/2)) + log((t(Ht)%*%Pttminus%*%Ht+R)^(-1/2)) + (-1/2)*(X[j+1]-t(Ht)%*%Xttminus)*(t(Ht)%*%Pttminus%*%Ht+R)^(-1)*(X[j+1]-t(Ht)%*%Xttminus)
ll[j]<-log((2*pi)^(-1/2)) + log((t(Ht)%*%Pttminus%*%Ht+R)^(-1/2)) + (-1/2)*(X[j+1]-t(Ht)%*%Xttminus)*(t(Ht)%*%Pttminus%*%Ht+R)^(-1)*(X[j+1]-t(Ht)%*%Xttminus)
mse_comp[j]<-(X[j+1]-t(Ht)%*%Xttminus)^2
}
actual_sales<<-X[3:(timeperiods+1)]-X[2:(timeperiods)]
predicted_sales<<-predict_observe[3:(timeperiods+1)]-X[2:(timeperiods)]
plot(1:(timeperiods-1),actual_sales,col="Black",type="l",xlab="Time",ylab="Sales",main="Black=actual, red=predicted")
lines(1:(timeperiods-1),predicted_sales,col="Red",type="l")
ll<<-ll
mse<<-sum(mse_comp)/timeperiods
predict_observe<<-predict_observe
mse_comp<<-mse_comp
LL<<-LL
LL
}
a<-0.000200000
b1<-0.135310
b2<-0.1351100
alpha<-0.1438000
q11<-1000
epsilon<-0.00
start_params<-c(a,b1,b2,alpha,q11,epsilon)
trial_params<-c(((start_params[1]/0.02)/(1-(start_params[1]/0.02)))^0.5,(start_params[2]/(1-start_params[2]))^0.5,(start_params[3]/(1-start_params[3]))^0.5,(start_params[4]/(1-start_params[4]))^0.5,0.5,0)
estvals<-optim(trial_params,projectkalman,hessian=TRUE,control=list(trace=3,maxit=2000,fnscale=-1))
pr<-estvals$par
estpars<-c(0.02*pr[1]^2/(1+pr[1]^2),pr[2]^2/(1+pr[2]^2),pr[3]^2/(1+pr[3]^2),pr[4]^2/(1+pr[4]^2),pr[5]^2/(1+pr[5]^2),0.2*pr[6]/(1+abs(pr[6])))
esthess<-hessian(projectkalman_original_params,estpars)
ll_projectkalman_original_params<-function(xx) {
projectkalman_original_params(xx)
ll
}
Iop_projectkalman_original_params<-function(xx) {
jac<-jacobian(ll_projectkalman_original_params,xx)
Iop<-matrix(0,nrow=ncol(jac),ncol=ncol(jac))
for (iopi in 1:nrow(jac)) {
Iop<-Iop+jac[iopi,]%*%t(jac[iopi,])
}
Iop<-Iop/nrow(jac)
Iop
}
Iop<-Iop_projectkalman_original_params(estpars)
varop<-(1/length(X))*solve(Iop)
stdevs<-sqrt(diag(varop))
pvalues<-sapply(1:length(estpars),function(x) 2*(1-pnorm(abs(estpars[x]),0,stdevs[x])))
print(rbind(estpars,stdevs,pvalues,mse,LL[1,1])) |
test_that("Check Inputs", {
withr::local_options(list("given" = NULL, "family" = NULL, "email" = NULL,
"orcid" = NULL, "github" = NULL))
expect_error(add_license(quiet = TRUE), "No 'DESCRIPTION' file found.")
create_temp_compendium()
add_description("John", "Doe", "[email protected]", "9999-9999-9999-9999",
organisation = "society", open = FALSE, overwrite = FALSE,
quiet = TRUE)
expect_error(add_license(quiet = TRUE))
expect_error(add_license(license = NA, quiet = TRUE))
expect_error(add_license(license = numeric(0), quiet = TRUE))
expect_error(add_license(license = c("MIT", "GPL-2"), quiet = TRUE))
expect_error(add_license(license = "GPL2", quiet = TRUE))
expect_error(add_license(license = "GPL 2", quiet = TRUE))
expect_error(add_license(quiet = 0))
expect_error(add_license(quiet = NULL))
expect_error(add_license(quiet = "false"))
expect_error(add_license(license = "MIT", quiet = TRUE))
expect_error(add_license(license = "MIT", "John", quiet = TRUE))
expect_error(add_license(license = "MIT", "John Doe", quiet = TRUE))
expect_error(add_license(license = "MIT", c("John", "Doe"), quiet = TRUE))
expect_error(add_license(license = "MIT", family = "Doe", quiet = TRUE))
expect_invisible(add_license(license = "MIT", "John", "Doe", quiet = TRUE))
})
test_that("Check Credentials", {
withr::local_options(list("given" = "john", "family" = "doe",
"email" = "[email protected]",
"orcid" = "9999-9999-9999-9999"))
create_temp_compendium()
add_description(organisation = "society", open = FALSE, overwrite = FALSE,
quiet = TRUE)
expect_invisible(add_license(license = "MIT", quiet = TRUE))
})
test_that("Check Files and Overwrite", {
withr::local_options(list("given" = "john", "family" = "doe",
"email" = "[email protected]",
"orcid" = "9999-9999-9999-9999"))
create_temp_compendium()
add_description(organisation = "society", open = FALSE, overwrite = FALSE,
quiet = TRUE)
add_license(license = "MIT", quiet = TRUE)
expect_true("LICENSE" %in% list.files(getwd()))
expect_true("LICENSE.md" %in% list.files(getwd()))
content <- readLines("LICENSE.md")
expect_length(grep("MIT License", content[1]), n = 1)
add_license(license = "GPL-2", quiet = TRUE)
expect_false("LICENSE" %in% list.files(getwd()))
content <- readLines("LICENSE.md")
expect_length(grep("GNU General Public License", content[1]), n = 1)
})
test_that("Check DESCRIPTION Fields", {
withr::local_options(list("given" = "john", "family" = "doe",
"email" = "[email protected]",
"orcid" = "9999-9999-9999-9999"))
create_temp_compendium()
add_description(organisation = "society", open = FALSE, overwrite = FALSE,
quiet = TRUE)
add_license(license = "MIT", quiet = TRUE)
expect_equal(read_descr()$"License", "MIT + file LICENSE")
add_license(license = "LGPL (>= 3)", quiet = TRUE)
expect_equal(read_descr()$"License", "LGPL (>= 3)")
}) |
Equal_Vertex_Normals <- function(plyFile) {
VertFace <- vertex_to_face_list(plyFile)
FaceVert <- plyFile$it
v <- plyFile$vb
rawNorms <- plyFile$normals * 0
rownames(rawNorms) <- c('x', 'y', 'z', 'length')
for (i in 1:length(VertFace)) {
Faces <- VertFace[[i]]
VertFaceMatrix <- matrix(0, nrow=length(Faces), ncol=3)
colnames(VertFaceMatrix) <- c('xpts', 'ypts', 'zpts')
for (j in 1:length(Faces)) {
Face <- Faces[j]
pts <- FaceVert[,Face]
pt1 <- v[,pts[1]]
pt2 <- v[,pts[2]]
pt3 <- v[,pts[3]]
if (which(pts==i) == 1) {
Vec1 <- pt2 - pt1
Vec2 <- pt3 - pt1
}
if (which(pts==i) == 2) {
Vec1 <- pt3 - pt2
Vec2 <- pt1 - pt2
}
if (which(pts==i) == 3) {
Vec1 <- pt1 - pt3
Vec2 <- pt2 - pt3
}
pfNorm <- as.vector(c(Vec1[2]*Vec2[3]-Vec1[3]*Vec2[2], Vec1[3]*Vec2[1]- Vec1[1]*Vec2[3],Vec1[1]*Vec2[2]-Vec1[2]*Vec2[1]))
Length <- sqrt(sum(pfNorm^2))
VertFaceMatrix[j,] <- pfNorm/Length
}
vNormRaw <- as.vector(colSums(VertFaceMatrix))
vNormRaw <- as.vector(c(vNormRaw, sqrt(sum(vNormRaw^2))))
rawNorms[,i] <- vNormRaw
}
Norms <- rawNorms %*% diag(1/rawNorms[4,])
plyFile$normals <- Norms
return(plyFile)
} |
extractClustersMBM = function(resMBM,whichModel = 1){
v_distrib <- resMBM$fittedModel[[whichModel]]$paramEstim$v_distrib
dataR6 <- formattingData(resMBM$list_Net,v_distrib)
param <- resMBM$fittedModel[[whichModel]]$paramEstim
vK_estim <- param$v_K
clusters <- lapply(1:length(vK_estim),function(q){lapply(1:vK_estim[q],function(l){
namesq <- names(param$Z[[q]])
if (is.null(namesq)){namesq <- 1:length(param$Z[[q]])}
clustql <- namesq[param$Z[[q]] == l]
return(clustql)})})
names(clusters) <- dataR6$namesFG
return(clusters)
} |
glm.prep <- function(offfull.list, offhalf.list, offnon.list)
{
non <- data.frame(rep("NON",length(offnon.list[!is.na(offnon.list)])),offnon.list[!is.na(offnon.list)])
hs <- data.frame(rep("HS",length(offhalf.list[!is.na(offhalf.list)])),offhalf.list[!is.na(offhalf.list)])
fs <- data.frame(rep("FS",length(offfull.list[!is.na(offfull.list)])),offfull.list[!is.na(offfull.list)])
rel.Y <- c(as.character(non[,1]),as.character(hs[,1]),as.character(fs[,1]))
rel.X <- c(as.numeric(non[,2]),as.numeric(hs[,2]),as.numeric(fs[,2]))
relate.data <- data.frame(as.factor(rel.Y),as.numeric(rel.X))
names(relate.data) <- c("Sib","Mxy")
redata <- mlogit.data(relate.data,varying=NULL,choice="Sib",shape="wide")
mlogit.model <- mlogit(Sib~1|Mxy, data=redata, reflevel="NON")
sumlrm <- summary(mlogit.model)
nonlog2 <- (log(1)-sumlrm[[1]][1])/sumlrm[[1]][3]
nonlog3 <- (log(1)-sumlrm[[1]][2])/sumlrm[[1]][4]
half <- min(nonlog2,nonlog3)
return(list(half, sumlrm))
}
|
"boa.getiter" <-
function(link, iter)
{
result <- NULL
idx <- is.element(dimnames(link)[[1]], iter)
if(any(idx)) result <- link[idx, , drop = FALSE]
return(result)
} |
expected <- eval(parse(text="structure(c(TRUE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE), .Tsp = c(1, 101, 1), class = \"ts\")"));
test(id=0, code={
argv <- eval(parse(text="list(structure(0:100, .Tsp = c(1, 101, 1), class = \"ts\"), 0)"));
do.call(`<=`, argv);
}, o=expected); |
obs_exp <- function(model, covar, cut=NULL){
oe <- model$data
oe$N <- predict(model)
if(!is.null(cut)){
oe[[covar]] <- cut(oe[[covar]], breaks=cut)
}
oe <- plyr::ddply(oe, covar, function(x){
data.frame(Observed = sum(x$count),
Expected = sum(x$N))
})
cn <- oe[,1]
oe <- t(oe[,2:3])
colnames(oe) <- cn
return(oe)
} |
set.seed(1234)
x <- runif(300, min = -1, max = 1)
y <- runif(300, min = -1, max = 1)
red <- data.frame(x,y, color = "red")
x <- runif(50, min = -1, max = 1)
y <- runif(50, min = -1, max = 1)
green <- data.frame(x,y, color = "green")
mydata <- rbind(red,green)
distance.matrix <- as.matrix(dist(mydata[,c("x","y")]))
r.min.post.data <- min(distance.matrix[distance.matrix!=0])
r.max.post.data <- max(distance.matrix[distance.matrix!=0])
expect_that(nsinc.z(data = mydata, membership = "member", dim = 2),
throws_error("There is no column names in the data called member!"))
expect_that(nsinc.z(data = mydata, membership = "color", dim = 4),
throws_error("dim must be either 2 or 3!"))
colnames(mydata) <- c("a","y","color")
expect_that(nsinc.z(data = mydata, membership = "color", dim = 2),
throws_error("Data must contain a 'x' column!"))
colnames(mydata) <- c("x","b","color")
expect_that(nsinc.z(data = mydata, membership = "color", dim = 2),
throws_error("Data must contain a 'y' column!"))
colnames(mydata) <- c("x", "y", "color")
box <- data.frame(min=-1, xmax=1, ymin=-1, ymax=1)
expect_that(nsinc.z(data = mydata, membership = "color", dim = 2, box = box),
throws_error("'box' must be a dataframe containing columns 'xmin','xmax','ymin' and 'ymax'!"))
box <- data.frame(xmin=2, xmax=1, ymin=-1, ymax=1)
expect_that(nsinc.z(data = mydata, membership = "color", dim = 2, box = box),
throws_error("'xmax' or 'ymax' must be larger than 'xmin' or 'ymin' in 'box'!"))
expect_that(nsinc.z(data = red, membership = "color", dim = 2),
throws_error("There must be at least two memberships of signals in the input data!"))
box <- data.frame(xmin=2, xmax=3, ymin=2, ymax=3)
expect_that(nsinc.z(data = mydata, membership = "color", dim = 2, box = box),
throws_error("There must be at least two memberships of signals enclosed in the study region!"))
expect_that(nsinc.z(data = mydata, membership = "color", dim = 2,
r.model = "other", r.max = 0.5),
throws_error("If choose the 'other' for r.model, then r.min must be specified by the user!"))
expect_that(nsinc.z(data = mydata, membership = "color", dim = 2,
r.model = "other", r.min = 0.01),
throws_error("If choose the 'other' for r.model, then r.max must be specified by the user!"))
expect_that(nsinc.z(data = mydata, membership = "color", dim = 2,
r.model = "other", r.min = r.min.post.data-0.1, r.max = 0.5),
throws_error("r.min must be between the smallest and half of the largest interpoint distances"))
expect_that(nsinc.z(data = mydata, membership = "color", dim = 2,
r.model = "other", r.min = 0.5*r.max.post.data+0.1, r.max = 0.5),
throws_error("r.min must be between the smallest and half of the largest interpoint distances"))
expect_that(nsinc.z(data = mydata, membership = "color", dim = 2,
r.model = "other", r.min = 0.01, r.max = r.min.post.data-0.1),
throws_error("r.max must be between the smallest and half of the largest interpoint distances"))
expect_that(nsinc.z(data = mydata, membership = "color", dim = 2,
r.model = "other", r.min = 0.01, r.max = 0.5*r.max.post.data+0.1),
throws_error("r.max must be between the smallest and half of the largest interpoint distances"))
expect_that(nsinc.z(data = mydata, membership = "color", dim = 2,
r.model = "other", r.min = 0.5, r.max = 0.01),
throws_error("The r.min must be smaller than r.max!"))
expect_that(nsinc.z(data = mydata, membership = "color", dim = 2,
r.model = "other", r.min = 0.01, r.max = 0.5, r.adjust = -1),
throws_error("The r.adjust must be a nonnegative number smaller than half of the difference between r.max and r.min!"))
expect_that(nsinc.z(data = mydata, membership = "color", dim = 2,
r.model = "other", r.min = 0.01, r.max = 0.5, r.adjust = 0.25),
throws_error("The r.adjust must be a nonnegative number smaller than half of the difference between r.max and r.min!"))
expect_that(nsinc.z(data = mydata, membership = "color", dim = 2, r.model = "Bayesian"),
throws_error("r.model must be one of 'full'"))
expect_that(nsinc.z(data = mydata, membership = "color", dim = 2,
strata = TRUE, base.member = "blue"),
throws_error("The specified base membership 'blue' is not found in the provided data!")) |