code
stringlengths 1
13.8M
|
---|
as.data.frame.simple_sews_single <- function(x, ..., wide = FALSE) {
as.data.frame.simple_sews_list( list(x), wide = wide )
}
as.data.frame.simple_sews_list <- function(x, ..., wide = FALSE) {
indicnames <- ifNULLthen(names(x[[1]][['value']]),
paste0("indic_", seq_along(x[[1]][['value']])))
if ( wide ) {
output <- Map(function(n, o) {
a <- as.data.frame(matrix(o[['value']], nrow = 1))
names(a) <- indicnames
data.frame(matrixn = n, a)
}, seq_along(x), x)
} else {
output <- Map(function(n, o) {
data.frame(matrixn = n, indic = indicnames, value = o[['value']])
}, seq_along(x), x)
}
output <- do.call(rbind, output)
row.names(output) <- NULL
output
}
print.simple_sews_single <- function(x, ...) {
summary.simple_sews_single(x, ...)
}
print.simple_sews_list <- function(x, ...) {
summary.simple_sews_list(x, ...)
}
summary.simple_sews_single <- function(object,
indicname = object[["taskname"]],
...) {
cat('Spatial Early-Warning:', indicname, '\n')
cat('\n')
display_size_info(object)
cat('\n')
output <- as.data.frame(object, wide = TRUE)
names(output)[1] <- c('Mat.
print.data.frame(output, row.names = FALSE, digits = DIGITS)
cat('\n')
cat("The following methods are available: \n")
cat(list_methods(class(object)), "\n")
}
summary.simple_sews_list <- function(object,
indicname = object[[1]][["taskname"]],
...) {
cat('Spatial Early-Warning:', indicname, '\n')
cat('\n')
display_size_info(object)
cat('\n')
output <- as.data.frame.simple_sews_list(object, wide = TRUE)
names(output)[1] <- c('Mat.
print.data.frame(output, row.names = FALSE, digits = DIGITS)
cat('\n')
cat("The following methods are available: \n")
cat(list_methods(class(object)), "\n")
invisible(output)
}
plot.simple_sews_list <- function(x, along = NULL, ...) {
plot.simple_sews_test_list(x, along = along, display_null = FALSE)
}
|
"pairdat.heavy" <-
function(n=100, VAR=4, M=0)
{
x <- (exp(rnorm(n, 0, sqrt(2))) - exp(1)) * sqrt(VAR/2/(exp(4)-exp(2)))
y <- (exp(rnorm(n, 0, sqrt(2))) - exp(1)) * sqrt(VAR/2/(exp(4)-exp(2))) + M
list(x=x, y=y)
}
|
vcovCR.rma.uni <- function(obj, cluster, type, target, inverse_var, form = "sandwich", ...) {
if (missing(cluster)) stop("You must specify a clustering variable.")
if (missing(target)) {
target <- NULL
if (missing(inverse_var)) inverse_var <- is.null(obj$weights) & obj$weighted
} else {
if (missing(inverse_var)) inverse_var <- FALSE
}
vcov_CR(obj, cluster = cluster, type = type,
target = target, inverse_var = inverse_var, form = form)
}
residuals_CS.rma <- function(obj) {
res <- residuals(obj)
not_na <- obj$not.na
if (length(res) == length(not_na)) res <- res[not_na]
if (!is.null(wts <- weights(obj)) && !all(pos_wts <- wts > 0)) {
res <- res[pos_wts]
}
return(res)
}
na.action.rma <- function(object, ...) {
if (all(object$not.na)) return(NULL)
res <- which(!object$not.na)
class(res) <- "omit"
res
}
targetVariance.rma.uni <- function(obj, cluster) {
vi <- obj$vi
if (obj$weighted && !is.null(wts <- obj$weights) && !all(pos_wts <- wts > 0)) {
vi <- vi[pos_wts]
}
matrix_list(vi + obj$tau2, cluster, "both")
}
weightMatrix.rma.uni <- function(obj, cluster) {
if (obj$weighted) {
if (is.null(obj$weights)) {
wi <- 1 / (obj$vi + obj$tau2)
} else {
wi <- obj$weights
wi <- wi[wi > 0]
}
} else {
wi <- rep(1, obj$k)
}
w_scale <- mean(wi)
wi <- wi / w_scale
W_list <- matrix_list(wi, cluster, "both")
attr(W_list, "w_scale") <- w_scale
W_list
}
bread.rma.uni <- function(x, ...) {
X_mat <- model_matrix(x)
if (x$weighted) {
if (is.null(x$weights)) {
wi <- 1 / (x$vi + x$tau2)
} else {
wi <- x$weights
wi <- wi[wi > 0]
}
XWX <- crossprod(X_mat, wi * X_mat)
} else {
XWX <- crossprod(X_mat)
}
B <- chol2inv(chol(XWX)) * nobs(x)
rownames(B) <- colnames(B) <- colnames(X_mat)
B
}
v_scale.robu <- function(obj) {
nobs(obj)
}
|
context("dtstm.R unit tests")
library("data.table")
library("nnet")
library("msm")
rm(list = ls())
sim_markov_chain <- function(p, x0, times, time_stop){
n_states <- ncol(p[, , 1])
n_times <- length(times)
x <- matrix(NA, nrow = n_times, ncol = n_states)
time_interval <- 1
p_t <- p[, , 1]
x[1, ] <- x0
for (t in 2:n_times){
if (times[t] > time_stop[time_interval]){
time_interval <- time_interval + 1
p_t <- p[, , time_interval]
}
x[t, ] <- x[t - 1, ] %*% p_t
}
rownames(x) <- times
return(x)
}
test_sim_stateprobs <- function(x, sample_val = 1, strategy_id_val = 1,
patient_id_val = 1){
stprobs1 <- x$stateprobs_[sample == sample_val &
strategy_id == strategy_id_val &
patient_id == patient_id_val]
stprobs1 <- matrix(stprobs1$prob, nrow = length(unique(stprobs1$t)))
tm <- x$trans_model
index <- which(tm$params$sample == sample_val &
tm$params$strategy_id == strategy_id_val &
tm$params$patient_id == patient_id_val)
p <- tm$params$value[,, index, drop = FALSE]
n_states <- ncol(p[,, 1])
time_stop <- tm$params$time_intervals$time_stop
if (is.null(time_stop)) time_stop <- Inf
stprobs2 <- sim_markov_chain(p = p,
x0 = c(1, rep(0, n_states - 1)),
times = unique(x$stateprobs_$t),
time_stop = time_stop)
expect_equal(c(stprobs1), c(stprobs2))
}
apply_rr <- function(x, rr){
x[upper.tri(x)] <- x[upper.tri(x)] * rr
for (i in 1:(nrow(x) - 1)){
x[i, i] <- 1 - sum(x[i, (i + 1):ncol(x)])
}
return(x)
}
make_alpha <- function(x, rr = 1, n = c(500, 800, 700)){
return(apply_rr(x, rr) * n)
}
n_samples <- 3
strategies <- data.frame(strategy_id = c(1, 2))
n_strategies <- nrow(strategies)
patients <- data.frame(patient_id = 1:2)
n_patients <- nrow(patients)
hesim_dat <- hesim_data(strategies = strategies,
patients = patients)
time_start <- c(0, 5)
n_times <- length(time_start)
n_states <- 3
tprob <- matrix(c(.2, .3, .5,
0, .8, .2,
0, 0, 1),
ncol = 3, nrow = 3, byrow = TRUE)
tprob_array <- array(NA, dim = c(n_states, n_states, 2, n_patients,
n_strategies, n_samples))
tprob_array[, , 1, 1, 1, ] <- rdirichlet_mat(n_samples, make_alpha(tprob))
tprob_array[, , 1, 1, 2, ] <- rdirichlet_mat(n_samples, make_alpha(tprob, .7))
tprob_array[, , 1, 2, 1, ] <- rdirichlet_mat(n_samples, make_alpha(tprob, .9))
tprob_array[, , 1, 2, 2, ] <- rdirichlet_mat(n_samples, make_alpha(tprob, .9))
tprob_array[, , 2, 1, 1, ] <- rdirichlet_mat(n_samples, make_alpha(tprob, .6))
tprob_array[, , 2, 1, 2, ] <- rdirichlet_mat(n_samples, make_alpha(tprob, .4))
tprob_array[, , 2, 2, 1, ] <- rdirichlet_mat(n_samples, make_alpha(tprob, .5))
tprob_array[, , 2, 2, 2, ] <- rdirichlet_mat(n_samples, make_alpha(tprob, .5))
tprob_array <- aperm(tprob_array, perm = c(6:3, 1, 2))
tprob <- tparams_transprobs(tprob_array, times = time_start)
tprob_t1 <- tparams_transprobs(tprob_array[,,,1,,, drop = FALSE])
test_that("Extra arguments work with tparams_transprobs.array()" , {
expect_equal(tparams_transprobs(tprob_array, times = time_start, grp_id = 1)$grp_id,
rep(1, prod(dim(tprob_array)[1:4])))
expect_equal(tparams_transprobs(tprob_array, times = time_start, patient_wt = 1)$patient_wt,
rep(1, prod(dim(tprob_array)[1:4])))
expect_error(tparams_transprobs(tprob_array),
paste0("'times' cannot be NULL if the number of time intervals ",
"is greater than 1"))
expect_error(tparams_transprobs(tprob_array, times = time_start,
grp_id = rep(1, 3)),
paste0("The length of 'grp_id' must be equal to the 3rd dimension of the ",
"array (i.e., the number of patients)."),
fixed = TRUE)
})
test_that("tparams_transprobs.array() returns array of matrices" , {
expect_true(inherits(tprob$value, "array"))
expect_equal(length(dim(tprob$value)), 3)
expect_equal(dim(tprob$value)[1], dim(tprob$value)[2])
})
test_that("tparams_transprobs.array() works with only 1 time interval", {
expect_true(inherits(tprob_t1, "tparams_transprobs"))
expect_equal(tprob_t1$n_times, 1)
expect_equal(nrow(tprob_t1$time_intervals), 1)
expect_true(all(tprob_t1$time_id == 1))
})
test_that("Summary method for tparams_transprobs.array() works as expected with defaults", {
ts <- summary(tprob)
expect_equal(
colnames(ts),
c("strategy_id", "patient_id", "time_id", "time_start", "time_stop",
"from", "to",
"mean", "sd")
)
i <- which(tprob$patient_id == 1 & tprob$strategy_id == 1 &
tprob$time_id == 2)
m <- apply(tprob$value[,, i], c(1,2), mean)
expect_equal(
c(t(m)),
ts[patient_id == 1 & strategy_id == 1 & time_id == 2]$mean
)
})
test_that("Summary method for tparams_transprobs.array() works with quantiles", {
ts <- summary(tprob, probs = c(.025, .975))
expect_equal(
colnames(ts),
c("strategy_id", "patient_id", "time_id", "time_start", "time_stop",
"from", "to",
"mean", "sd", "2.5%", "97.5%")
)
})
test_that("Summary method for tparams_transprobs uses correct state names", {
tprob2 <- tprob
n_states <- dim(tprob2$value)[1]
state_names <- paste0("State", 1:n_states)
dimnames(tprob2$value) <- list(state_names, state_names, NULL)
ts <- summary(tprob2)
expect_equal(
unique(ts$from),
state_names
)
})
test_that("Print method for tparams_transprobs works as expected", {
expect_output(print(tprob), "A \"tparams_transprobs\" object")
expect_output(
print(tprob),
"Column binding the ID variables with the transition probabilities:"
)
})
tprob_dt <- as.data.table(tprob)
test_that("as.data.table.tparams_transprobs() returns correct output" , {
x1 <- as.data.table(tprob)
x2 <- as.data.table(tprob, long = TRUE)
expect_true(inherits(x1, "data.table"))
expect_true(inherits(x2, "data.table"))
expect_equal(
colnames(x1),
c("sample", "strategy_id", "patient_id", "time_id", "time_start",
"time_stop",
tpmatrix_names(states = paste0("s", 1:3), prefix = "prob_", sep = "_"))
)
expect_equal(
colnames(x2),
c("sample", "strategy_id", "patient_id", "time_id", "time_start",
"time_stop", "from", "to", "prob")
)
})
test_that("as.data.table.tparams_transprobs() returns the same output with wide and long formats" , {
x1 <- as.data.table(tprob)
x2 <- as.data.table(tprob, long = TRUE)
expect_equal(
c(t(x1[, grepl("prob_", colnames(x1)), with = FALSE])),
x2$prob
)
})
tprob2 <- tparams_transprobs(tprob_dt)
test_that(paste0("tparams_transprobs() returns the same values with ",
".array and .data.table "), {
expect_equal(tprob, tprob2)
expect_equal(tprob_t1,
tparams_transprobs(tprob_dt[time_id == 1]))
})
test_that("tparams_transprobs.data.table() checks ID attributes", {
expect_error(
tparams_transprobs(tprob_dt[1:5]),
paste0("The length of the ID variables is not consistent with the ",
"number of unique values of each ID variable.")
)
})
test_that("tparams_transprobs.data.table() throws error if there are no 'prob_' columns", {
expect_error(
tparams_transprobs(tprob_dt[, .(sample, strategy_id)]),
"No columns with names starting with 'prob_'."
)
})
p <- c(.7, .6)
tpmat <- tpmatrix(
C, p,
0, 1
)
input_dat <- expand(hesim_dat)
test_that("tparams_transprobs() returns error if 'tpmatrix_id' has wrong class", {
expect_error(
tparams_transprobs(tpmat, 2),
"'tpmatrix_id' must be of class 'data.frame'."
)
})
test_that("tparams_transprobs() returns error if 'tpmatrix_id' has wrong number of rows", {
expect_error(
tparams_transprobs(tpmat, data.frame(2)),
"'object' and 'tpmatrix_id' must have the same number of rows."
)
})
test_that("tparams_transprobs() returns the correct class", {
tpmat_id <- tpmatrix_id(input_dat, n_samples = 1)
tp <- tpmatrix(
C, c(.6, .7, .5, .4),
0, 1
)
expect_true(
inherits(tparams_transprobs(tp, tpmat_id),
"tparams_transprobs")
)
})
p <- c(.7, .6, .55, .58)
tpmat <- tpmatrix(
C, p,
0, 1
)
tparray <- as_array3(tpmat)
tpmat_id <- tpmatrix_id(input_dat, n_samples = 1)
test_that("tparams_transprobs.array() works as expected with 3D array", {
tprob1 <- tparams_transprobs(tparray, tpmat_id)
tprob2 <- tparams_transprobs(tpmat, tpmat_id)
expect_equal(tprob1, tprob2)
})
test_that("tparams_transprobs.array() returns an error with the incorrect number of slices", {
expect_error(tparams_transprobs(tparray[, , -1], tpmat_id),
paste0("The third dimension of the array 'object' must equal ",
"the number or rows in 'tpmatrix_id'"))
})
transmod <- CohortDtstmTrans$new(params = tprob)
test_that("CohortDtstmTrans$new() automatically sets 'start_stateprobs' ",{
expect_equal(transmod$start_stateprobs, c(1, 0, 0))
})
test_that("CohortDtstmTrans$new() 'start_stateprobs' normalizes to 1 ",{
v <- c(5, 5, 10, 10)
tmp <- CohortDtstmTrans$new(params = tprob,
start_stateprobs = v)
expect_equal(tmp$start_stateprobs, v/sum(v))
tmp$start_stateprobs <- c(0, 0)
expect_equal(tmp$start_stateprobs, c(1/2, 1/2))
tmp <- CohortDtstmTrans$new(params = tprob,
start_stateprobs = c(0, 0))
expect_equal(tmp$start_stateprobs, c(1/2, 1/2))
})
test_that("CohortDtstmTrans$new() 'start_stateprobs' exceptions ",{
expect_error(CohortDtstmTrans$new(params = tprob,
start_stateprobs = c(Inf, 1)),
"Elements of 'state_stateprobs' cannot be infinite.")
expect_error(CohortDtstmTrans$new(params = tprob,
start_stateprobs = c(0, -1)),
"All elements of 'state_stateprobs' must be non-negative.")
})
test_that("CohortDtstmTrans 'trans_mat' must be a matrix of the correct form ",{
msg_matrix <- "'trans_mat' must be a matrix"
msg_form <- paste0("'trans_mat' is not of the correct form. Each row should ",
"contain integers from 0 to K - 1 where K is the number ",
"of possible transitions (i.e., non-NA elements)")
expect_error(CohortDtstmTrans$new(params = tprob, trans_mat = 1),
msg_matrix)
tmat_bad <- rbind(c(0, 0),
c(0, 0))
expect_error(CohortDtstmTrans$new(params = tprob, trans_mat = tmat_bad),
msg_form, fixed = TRUE)
tmat_good <- rbind(c(0, 1, 2),
c(NA, 0, 1),
c(NA, NA, NA))
tmp <- CohortDtstmTrans$new(params = tprob, trans_mat = tmat_good)
expect_equal(tmp$trans_mat, tmat_good)
expect_error(tmp$trans_mat <- 1, msg_matrix)
expect_error(tmp$trans_mat <- tmat_bad, msg_form, fixed = TRUE)
})
econmod <- CohortDtstm$new(trans_model = transmod)
econmod$sim_stateprobs(n_cycles = 3)
test_that("CohortDtstmTrans$sim_stateprobs() has correct grp_id ",{
expect_true(all(econmod$stateprobs_$grp_id == 1))
})
test_that("CohortDtstmTrans$sim_stateprobs() is correct ",{
test_sim_stateprobs(econmod)
})
transitions_data <- data.table(multinom3_exdata$transitions)
data_healthy <- transitions_data[state_from == "Healthy"]
fit_healthy <- multinom(state_to ~ strategy_name + female + age + year_cat,
data = data_healthy, trace = FALSE)
data_sick <- droplevels(transitions_data[state_from == "Sick"])
fit_sick <- multinom(state_to ~ strategy_name + female + age + year_cat,
data = data_sick, trace = FALSE)
n_patients <- 100
patients <- transitions_data[year == 1, .(patient_id, age, female)][
sample.int(nrow(transitions_data[year == 1]), n_patients)][
, grp_id := 1:n_patients]
hesim_dat <- hesim_data(
patients = patients,
strategies = data.table(strategy_id = 1:2,
strategy_name = c("Reference", "Intervention")),
states = data.table(state_id = c(1, 2),
state_name = c("Healthy", "Sick"))
)
n_samples <- 10
tmat <- rbind(c(0, 1, 2),
c(NA, 0, 1),
c(NA, NA, NA))
transfits <- multinom_list(healthy = fit_healthy, sick = fit_sick)
tintervals <- time_intervals(unique(transitions_data[, .(year_cat)])
[, time_start := c(0, 2, 6)])
transmod_data <- expand(hesim_dat, times = tintervals)
transmod <- create_CohortDtstmTrans(transfits,
input_data = transmod_data,
trans_mat = tmat,
n = n_samples,
uncertainty = "none")
test_that(paste0("create_CohortDtstmTrans$sim_stateprobs() is consistent with ",
"predict.multinom()"), {
hesim_probs <- transmod$sim_stateprobs(n_cycles = 1)[t == 1]
hesim_probs[, state_id := factor(state_id, labels = c("Healthy", "Sick", "Dead"))]
hesim_probs <- dcast(hesim_probs,
strategy_id + patient_id ~ state_id,
value.var = "prob")
multinom_probs <- predict(fit_healthy, newdata = transmod_data[time_id == 1],
type = "prob")
rownames(multinom_probs) <- NULL
expect_equal(multinom_probs,
as.matrix(hesim_probs[, c("Healthy", "Sick", "Dead")]))
})
test_that(paste0("create_CohortDtstmTrans$sim_stateprobs() with multinom() objects"), {
tpmatrix_multinom <- function(fits, data, patid){
newdata <- data[patient_id == patid]
n_times <- length(unique(transmod_data$time_id))
tpmatrix1 <- tpmatrix2 <- array(NA, dim = c(3, 3, n_times))
for (j in 1:n_times){
probs_healthy <- predict(fits$healthy, newdata[time_id == j], type = "probs")
probs_sick <- predict(fits$sick, newdata[time_id == j], type = "probs")
tpmatrix1[, , j] <- rbind(probs_healthy[1, ],
c(0, 1 - probs_sick[1], probs_sick[1]),
c(0, 0, 1))
tpmatrix2[, , j] <- rbind(probs_healthy[2, ],
c(0, 1 - probs_sick[2], probs_sick[2]),
c(0, 0, 1))
}
return(list(p_ref = tpmatrix1,
p_int = tpmatrix2))
}
times <- 0:10
patid <- sample(unique(transmod_data$patient_id), 1)
p <- tpmatrix_multinom(transfits, transmod_data, patid = patid)
stprobs_ref <- sim_markov_chain(p = p$p_ref,
x0 = c(1, 0, 0),
times = times,
time_stop = unique(transmod_data$time_stop))
stprobs_int <- sim_markov_chain(p = p$p_int,
x0 = c(1, 0, 0),
times = times,
time_stop = unique(transmod_data$time_stop))
hesim_stprobs <- transmod$sim_stateprobs(n_cycles = max(times))[patient_id == patid]
hesim_stprobs <- dcast(hesim_stprobs,
strategy_id + patient_id + t~ state_id,
value.var = "prob")
test_equal <- function(R_stprobs, hesim_stprobs, strat_id){
hesim_stprobs <- as.matrix(hesim_stprobs[strategy_id == strat_id][,
c("1", "2", "3"), with = FALSE])
colnames(hesim_stprobs) <- NULL
rownames(hesim_stprobs) <- times
expect_equal(hesim_stprobs, R_stprobs)
}
test_equal(stprobs_ref, hesim_stprobs, strat_id = 1)
test_equal(stprobs_int, hesim_stprobs, strat_id = 2)
})
test_that(paste0("create_CohortDtstmTrans does not support offset term ",
"with mulinom() objects"), {
m <- matrix(c(1, 100, 1), nrow = nrow(data_healthy), ncol = 3, byrow = TRUE)
fit_healthy2 <- multinom(state_to ~ offset(m) + strategy_name + female + age +
year_cat,
data = data_healthy, trace = FALSE)
transfits2 <- multinom_list(healthy = fit_healthy2, sick = fit_sick)
expect_error(create_CohortDtstmTrans(transfits2,
input_data = transmod_data,
trans_mat = tmat),
"An offset is not supported")
})
input_dat <- expand(hesim_dat)
input_dat[, intercept := 1L]
input_dat[, intervention := ifelse(strategy_name == "Intervention", 1L, 0L)]
tmat <- rbind(
c(0, 1, 2),
c(NA, 0, 1),
c(NA, NA, NA)
)
p <- params_mlogit_list(
sick = params_mlogit(
coefs = list(
sicker = data.frame(
intercept = c(-0.33, -.2),
intervention = c(log(.75), log(.8))
),
death = data.frame(
intercept = c(-1, -1.2),
strategy_name = c(log(.6), log(.65))
)
)
),
sicker = params_mlogit(
coefs = list(
death = data.frame(
intercept = c(-1.5, -1.4),
intervention = c(log(.5), log(.55))
)
)
)
)
test_that("create_CohortDtstmTrans.params_mlogit_list works as expcted", {
transmod <- create_CohortDtstmTrans(p, input_data = input_dat, trans_mat = tmat)
expect_true(inherits(transmod, "CohortDtstmTrans"))
expect_equal(
unique(c(sapply(transmod$input_data$X, colnames))),
c("intercept", "intervention")
)
})
test_that("create_CohortDtstmTrans requires numeric input data when built from params objects", {
p2 <- p
dimnames(p2$sick$coefs)[[2]][2] <- "strategy_name"
dimnames(p2$sicker$coefs)[[2]][2] <- "strategy_name"
expect_error(
create_CohortDtstmTrans(p2, input_data = input_dat, trans_mat = tmat),
"'input_data' must only include numeric variables."
)
})
set.seed(101)
strategies <- data.table(strategy_id = c(1, 2, 3),
strategy_name = factor(c("SOC", "New 1", "New 2")))
patients <- data.table(patient_id = 1:2)
hesim_dat <- hesim_data(strategies = strategies,
patients = patients)
transmod_data <- expand(hesim_dat)
qinit <- rbind(
c(0, 0.28163, 0.01239),
c(0, 0, 0.10204),
c(0, 0, 0)
)
fit <- msm(state_id ~ time, subject = patient_id,
data = onc3p[patient_id %in% sample(patient_id, 100)],
covariates = list("1-2" =~ strategy_name),
qmatrix = qinit)
test_that("create_CohortDtstmTrans.msm() returns correct transition probability matrices with no uncertainty", {
transmod <- create_CohortDtstmTrans(fit,
input_data = transmod_data,
cycle_length = 1/2,
fixedpars = 2,
uncertainty = "none")
expect_equal(transmod$params$n_samples, 1)
expect_equal(
expmat(qmatrix(fit, transmod_data, uncertainty = "none"), t = 1/2),
transmod$params$value
)
})
test_that(paste0("create_CohortDtstmTrans.msm() returns transition probability matrices",
"with correct dimensions when there is uncertainty"), {
transmod <- create_CohortDtstmTrans(fit,
input_data = transmod_data,
cycle_length = 1/2,
fixedpars = 2,
n = 2)
expect_equal(transmod$params$n_samples, 2)
expect_equal(dim(transmod$params$value)[3], 2 * nrow(transmod_data))
})
test_that(paste0("define_model() works to create CohortDtstmTrans object"), {
hesim_dat <- hesim_data(
strategies = data.table(strategy_id = 1:2,
strategy_name = c("Monotherapy", "Combination therapy")),
patients = data.table(patient_id = 1)
)
data <- expand(hesim_dat)
rng_def <- define_rng({
alpha <- matrix(c(1251, 350, 116, 17,
0, 731, 512, 15,
0, 0, 1312, 437,
0, 0, 0, 469),
nrow = 4, byrow = TRUE)
rownames(alpha) <- colnames(alpha) <- c("A", "B", "C", "D")
lrr_mean <- log(.509)
lrr_se <- (log(.710) - log(.365))/(2 * qnorm(.975))
list(
p_mono = dirichlet_rng(alpha),
rr_comb = lognormal_rng(lrr_mean, lrr_se),
u = 1,
c_zido = 2278,
c_lam = 2086.50,
c_med = gamma_rng(mean = c(A = 2756, B = 3052, C = 9007),
sd = c(A = 2756, B = 3052, C = 9007))
)
}, n = 2)
tparams_def <- define_tparams({
rr = ifelse(strategy_name == "Monotherapy", 1, rr_comb)
list(
tpmatrix = tpmatrix(
C, p_mono$A_B * rr, p_mono$A_C * rr, p_mono$A_D * rr,
0, C, p_mono$B_C * rr, p_mono$B_D * rr,
0, 0, C, p_mono$C_D * rr,
0, 0, 0, 1),
utility = u,
costs = list(
drug = ifelse(strategy_name == "Monotherapy",
c_zido, c_zido + c_lam),
medical = c_med
)
)
})
model_def <- define_model(
tparams_def = tparams_def,
rng_def = rng_def)
econmod <- create_CohortDtstm(model_def, data)
expect_true(inherits(econmod, "CohortDtstm"))
})
|
freqdist.augmentedRCBD <- function(aug, xlab, highlight.check = TRUE,
check.col = "red") {
if (!is(aug, "augmentedRCBD")) {
stop('"aug" is not of class "augmentedRCBD"')
}
if (!all(iscolour(check.col))) {
stop('"check.col" specifies invalid colour(s)')
}
checks <- aug$Details$`Check treatments`
dat <- aug$Means$`Adjusted Means`
if (length(check.col) != 1) {
if (length(check.col) != length(checks)) {
stop('"checks" and "check.col" are of unequal lengths')
}
}
NN <- length(dat)
bw <- binw(dat, "sturges")
dat <- data.frame(dat)
G1 <- ggplot(dat, aes(x = dat)) +
geom_histogram(colour = "black", fill = "grey",
binwidth = bw) +
scale_x_continuous(limits = c( (min(dat$dat, na.rm = TRUE)),
(max(dat$dat, na.rm = TRUE)))) +
stat_function(geom = "line", fun = function(x, mean, sd, n, bw){
dnorm(x = x, mean = mean, sd = sd) * n * bw},
args = list(mean = mean(dat$dat, na.rm = TRUE),
sd = sd(dat$dat, na.rm = TRUE),
n = NN, bw = bw), colour = "blue") +
labs(x = xlab, y = "Frequency") +
theme_bw() +
theme(axis.text = element_text(colour = "black"),
plot.margin = unit(c(0, 1, 1, 1), "lines"))
if (highlight.check) {
G1 <- G1 +
geom_vline(xintercept = aug$Means[aug$Means$Treatment %in% checks, ]$`Adjusted Means`,
size = 1, colour = check.col)
dat2 <- aug$Means[aug$Means$Treatment %in% checks, ]
dat2$lower <- dat2$`Adjusted Means` - dat2$SE
dat2$upper <- dat2$`Adjusted Means` + dat2$SE
G2 <- ggplot(dat2, aes(x = Treatment, y = Means)) +
geom_errorbar(aes(ymin = lower, ymax = upper), colour = check.col,
width = 0.25) +
geom_point(colour = check.col) +
labs(x = NULL, y = NULL) +
scale_y_continuous(limits = c( (min(dat$dat, na.rm = TRUE)),
(max(dat$dat, na.rm = TRUE)))) +
coord_flip() +
theme_bw() +
theme(axis.text.x = element_blank(),
axis.ticks.x = element_blank()) +
theme(legend.position = "none") +
theme(plot.margin = unit(c(0.25, 0.1, 0, 0.25), "cm"),
axis.text = element_text(colour = "black"))
G <- rbind(ggplotGrob(G2), ggplotGrob(G1), size = "max")
G <- resize_heights(G, c(1, 3))
} else {
G <- ggplotGrob(G1 + theme(plot.margin = unit(c(1, 1, 1, 1), "lines")))
}
return(G)
}
binw <- function(x, method = c("fd", "scott", "sturges")) {
method <- match.arg(method)
if (method == "fd") {
bw <- pretty(range(x, na.rm = TRUE), n = nclass.FD(na.omit(x)),
min.n = 1, right = TRUE)[2] -
pretty(range(x, na.rm = TRUE), n = nclass.FD(na.omit(x)),
min.n = 1, right = TRUE)[1]
}
if (method == "scott") {
bw <- pretty(range(x, na.rm = TRUE), n = nclass.scott(na.omit(x)),
min.n = 1, right = TRUE)[2] -
pretty(range(x, na.rm = TRUE), n = nclass.scott(na.omit(x)),
min.n = 1, right = TRUE)[1]
}
if (method == "sturges") {
bw <- pretty(range(x, na.rm = TRUE), n = nclass.Sturges(na.omit(x)),
min.n = 1, right = TRUE)[2] -
pretty(range(x, na.rm = TRUE), n = nclass.Sturges(na.omit(x)),
min.n = 1, right = TRUE)[1]
}
return(bw)
}
if (getRversion() >= "4.0.0") {
resize_heights <- function(g, heights = rep(1, length(idpanels))){
idpanels <- unique(g$layout[grepl("panel",g$layout$name), "t"])
g$heights <- grid::unit(g$heights, "null")
g$heights[idpanels] <- grid::unit(do.call(grid::unit,
list(heights, 'null')), "null")
g
}
} else {
unit.list <- getFromNamespace("unit.list", "grid")
resize_heights <- function(g, heights = rep(1, length(idpanels))){
idpanels <- unique(g$layout[grepl("panel", g$layout$name), "t"])
g$heights <- unit.list(g$heights)
hunits <- lapply(heights, unit, "null")
class(hunits) <- class(g$heights[idpanels])
g$heights[idpanels] <- hunits
g
}
}
iscolour <- function(x) {
sapply(x, function(X) {
tryCatch(is.matrix(col2rgb(X)),
error = function(e) FALSE)
})
}
|
MI <- function(X) {
if (!is.numeric((X)) && !is.logical((X))) {
warning("Argument is not numeric or logical: returning NA")
return(NA)
}
MI_Cpp(X)
}
|
rct3 <- function(formula, data, predictions = NULL, shrink = FALSE,
power = 3, range = 20, min.se = 0.2,
old = TRUE)
{
form <- formula[[3]]
bits <- list()
while(length(form)>1) {
bits <- c(bits, form[[3]])
form <- form[[2]]
}
bits <- rev(c(bits, form))
formulas <- lapply(bits, function(x) {tmp <- formula; tmp[[3]] <- tmp[[2]]; tmp[[2]] <- x; tmp})
formulas2 <- lapply(bits, function(x) {tmp <- formula; tmp[[3]] <- x; tmp})
weight <- function(y, y0, D, p) pmax(0, (1 - ((y0 - y)/D)^p)^p)
log.data <- data
if (old) {
log.data[names(data) != "yearclass"] <- log(data[names(data) != "yearclass"] + 1)
} else
{
log.data[names(data) != "yearclass"] <- log(data[names(data) != "yearclass"])
}
do.one.prediction <- function(i, predict.yr) {
wk.data <- log.data[log.data$yearclass < predict.yr,]
yr_diff <- max(wk.data$yearclass) - wk.data$yearclass
wk.data$wts <- (1 - (pmin(range, yr_diff)/range)^power)^power
if (nrow(wk.data) < 3) stop("too few data points in one survey!")
m <- lm(formulas[[i]], data = wk.data, weights = wts)
b <- {function(x) c(-x[1], 1)/x[2] }(unname(coef(m)))
wts <- wk.data[names(m$residuals),"wts"]
rss <- sum( wts * m$residuals^2 )
mss <- sum(wts * (m$fitted.values - mean(m$fitted.values))^2)
sigma <- b[2] * sqrt(rss / (sum(wts) - 2))
rsqr <- mss / (rss + mss)
n <- m$df.residual + 2
Xp <- unname(model.matrix(formulas2[[i]][c(1,3)], log.data[log.data$yearclass == predict.yr,]))
if (nrow(Xp)) {
X <- unname(model.matrix(formulas2[[i]], wk.data))
XXinv <- solve(t(X) %*% diag(wts) %*% X)
pred <- drop(Xp %*% b)
se.pred <- sqrt(sum(wts) / (sum(wts)-2)) * sigma * sqrt(1 + drop(Xp %*% XXinv %*% t(Xp)))
index <- Xp[,2]
} else {
index <- pred <- se.pred <- NA
}
data.frame(index = as.character(formulas[[i]][[2]]),
slope = b[2], intercept = b[1],
se = sigma, rsquare = rsqr, n = n,
indices = index, prediction = pred,
se.pred = se.pred)
}
if (is.null(predictions)) {
y <- eval(formula[[2]], log.data)
predictions <- log.data $ yearclass[is.na(y)]
}
out <-
lapply(predictions,
function(yr)
{
out <- do.call( rbind, lapply(1:length(formulas), do.one.prediction, predict.yr = yr))
out$slope[is.na(out$indices)] <- NA
out$intercept[is.na(out$indices)] <- NA
out$se[is.na(out$indices)] <- NA
out$rsquare[is.na(out$indices)] <- NA
out$n[is.na(out$indices)] <- NA
vpa <- eval(formula[[2]], log.data)[log.data $ yearclass < yr]
yrs <- log.data$yearclass[log.data$yearclass < yr]
notNA <- !is.na(vpa)
wts <- (1 - (pmin(range, max(yrs) - yrs)/range)^power)^power
vpa <- vpa[notNA]
yrs <- yrs[notNA]
wts <- wts[notNA]
out <- rbind(out, data.frame(index = "VPA Mean",
slope = NA, intercept = NA,
se = NA, rsquare = NA, n = length(vpa),
indices = NA,
prediction = sum(wts * vpa) / sum(wts),
se.pred = sqrt(sum(wts * (vpa - mean(vpa))^2) / (sum(wts)-1))
))
if (shrink)
{
se.pred <- pmax(out$se.pred, min.se)
out$WAP.weights <- (1/se.pred^2) / sum(1/se.pred^2, na.rm = TRUE)
}
else
{
se.pred <- pmax(out[1:(nrow(out)-1),]$se.pred, min.se)
out $ WAP.weights <- c((1/se.pred^2) / sum(1/se.pred^2, na.rm = TRUE), 0)
}
out
})
names(out) <- paste("yearclass", predictions, sep=":")
summarise.rct3 <- function(tmp)
{
pred <- with(tmp, sum(prediction * WAP.weights, na.rm = TRUE))
int.se <- 1/sqrt(sum(1/tmp $ se.pred^2, na.rm = TRUE))
data.frame(WAP = exp(pred), logWAP = pred, int.se = int.se)
}
out <- list(stock = attr(data, "stock"),
info = c(length(bits), nrow(data), range(log.data $ yearclass)),
rct3 = out,
rct3.summary = do.call(rbind, lapply(out, summarise.rct3)),
shrink = shrink,
power = power,
range = range,
min.se = min.se)
class(out) <- "rct3"
out
}
|
fld.depth <- function(level, elevation, percentile = 0.5) {
if (length(percentile) == 1) {
A <- as.numeric(NA)
for (i in 1:length(elevation)) {
flooded <- level[level >= elevation[i] & !is.na(level)]
ifelse(percentile == 0.5,
percentileDepth <- stats::median(flooded) - elevation[i],
percentileDepth <- stats::quantile(flooded, percentile) - elevation[i]
)
A[i] <- percentileDepth
}
} else if (length(percentile) > 1) {
A <- data.frame(elev = elevation)
for(i in 1:length(percentile)) {
A[, (i + 1)] <- as.numeric(NA)
}
for (i in 1:length(elevation)) {
flooded <- level[level >= elevation[i] & !is.na(level)]
percentileDepth <- quantile(flooded, percentile) - elevation[i]
A[i, 2:(length(percentile) + 1)] <- as.numeric(percentileDepth)
if (names(A)[2] == "V2") {
names(A)[2:(length(percentile) + 1)] <- names(percentileDepth)
}
}
} else if (length(percentile) < 1) {
stop("error: 'percentile' cannot have length zero")
}
A
}
|
predict.rma <- function(object, newmods, intercept, tau2.levels, gamma2.levels, addx=FALSE,
level, digits, transf, targs, vcov=FALSE, ...) {
mstyle <- .get.mstyle("crayon" %in% .packages())
.chkclass(class(object), must="rma", notav="rma.ls")
na.act <- getOption("na.action")
if (!is.element(na.act, c("na.omit", "na.exclude", "na.fail", "na.pass")))
stop(mstyle$stop("Unknown 'na.action' specified under options()."))
x <- object
if (missing(newmods))
newmods <- NULL
if (missing(intercept))
intercept <- x$intercept
if (missing(tau2.levels))
tau2.levels <- NULL
if (missing(gamma2.levels))
gamma2.levels <- NULL
if (missing(level))
level <- x$level
if (missing(digits)) {
digits <- .get.digits(xdigits=x$digits, dmiss=TRUE)
} else {
digits <- .get.digits(digits=digits, xdigits=x$digits, dmiss=FALSE)
}
if (missing(transf))
transf <- FALSE
if (missing(targs))
targs <- NULL
level <- ifelse(level == 0, 1, ifelse(level >= 1, (100-level)/100, ifelse(level > .5, 1-level, level)))
ddd <- list(...)
.chkdots(ddd, c("pi.type", "newvi"))
if (is.null(ddd$pi.type)) {
pi.type <- "default"
} else {
pi.type <- ddd$pi.type
}
if (x$int.only && !is.null(newmods))
stop(mstyle$stop("Cannot specify new moderator values for models without moderators."))
if (is.null(newmods)) {
if (!inherits(object, "rma.mv") || (inherits(object, "rma.mv") && any(is.element(object$struct, c("GEN","GDIAG"))))) {
if (x$int.only) {
k.new <- 1
X.new <- cbind(1)
} else {
k.new <- x$k.f
X.new <- x$X.f
}
} else {
if (x$int.only) {
if (!x$withG) {
k.new <- 1
X.new <- cbind(1)
}
if (x$withG && x$withH) {
if (is.null(tau2.levels) && is.null(gamma2.levels)) {
k.new <- x$tau2s * x$gamma2s
X.new <- cbind(rep(1,k.new))
if (x$tau2s == 1) {
tau2.levels <- rep(1,k.new)
} else {
tau2.levels <- rep(levels(x$mf.g.f$inner), each=x$gamma2s)
}
if (x$gamma2s == 1) {
gamma2.levels <- rep(1,k.new)
} else {
gamma2.levels <- rep(levels(x$mf.h.f$inner), times=x$tau2s)
}
}
if ((!is.null(tau2.levels) && is.null(gamma2.levels)) ||
(is.null(tau2.levels) && !is.null(gamma2.levels)))
stop(mstyle$stop("Either specify both of 'tau2.levels' and 'gamma2.levels' or neither."))
if (!is.null(tau2.levels) && !is.null(gamma2.levels)) {
if (length(tau2.levels) != length(gamma2.levels))
stop(mstyle$stop("Length of 'tau2.levels' and 'gamma2.levels' is not the same."))
k.new <- length(tau2.levels)
X.new <- cbind(rep(1,k.new))
}
}
if (x$withG && !x$withH) {
if (is.null(tau2.levels)) {
k.new <- x$tau2s
X.new <- cbind(rep(1,k.new))
if (x$tau2s == 1) {
tau2.levels <- rep(1, k.new)
} else {
tau2.levels <- levels(x$mf.g.f$inner)
}
} else {
k.new <- length(tau2.levels)
X.new <- cbind(rep(1,k.new))
}
gamma2.levels <- rep(1, k.new)
}
} else {
k.new <- x$k.f
X.new <- x$X.f
if (!is.null(tau2.levels) || !is.null(gamma2.levels))
warning(mstyle$warning("Arguments 'tau2.levels' and 'gamma2.levels' ignored when obtaining fitted values."), call.=FALSE)
tau2.levels <- as.character(x$mf.g.f$inner)
gamma2.levels <- as.character(x$mf.h.f$inner)
}
}
} else {
if (!(.is.vector(newmods) || inherits(newmods, "matrix")))
stop(mstyle$stop(paste0("Argument 'newmods' should be a vector or matrix, but is of class '", class(newmods), "'.")))
if ((!x$int.incl && x$p == 1L) || (x$int.incl && x$p == 2L)) {
k.new <- length(newmods)
X.new <- cbind(c(newmods))
} else {
if (.is.vector(newmods) || nrow(newmods) == 1L) {
k.new <- 1
X.new <- rbind(newmods)
} else {
k.new <- nrow(newmods)
X.new <- cbind(newmods)
}
if (!is.null(colnames(X.new)) && all(colnames(X.new) != "") && !is.null(colnames(x$X)) && all(colnames(x$X) != "")) {
colnames.mod <- colnames(x$X)
if (x$int.incl)
colnames.mod <- colnames.mod[-1]
pos <- sapply(colnames(X.new), function(colname) {
d <- c(adist(colname, colnames.mod, costs=c(ins=1, sub=Inf, del=Inf)))
if (all(is.infinite(d)))
stop(mstyle$stop(paste0("Could not find variable '", colname, "' in the model.")), call. = FALSE)
d <- which(d == min(d))
if (length(d) > 1L)
stop(mstyle$stop(paste0("Could not match up variable '", colname, "' uniquely to a variable in the model.")), call. = FALSE)
return(d)
})
if (anyDuplicated(pos)) {
dups <- paste(unique(colnames(X.new)[duplicated(pos)]), collapse=", ")
stop(mstyle$stop(paste0("Found multiple matches for the same variable name (", dups, ").")))
}
if (length(pos) != length(colnames.mod)) {
no.match <- colnames.mod[seq_along(colnames.mod)[-pos]]
if (length(no.match) > 3L)
stop(mstyle$stop(paste0("Argument 'newmods' does not specify values for these variables: ", paste0(no.match[1:3], collapse=", "), ", ...")))
if (length(no.match) > 1L)
stop(mstyle$stop(paste0("Argument 'newmods' does not specify values for these variables: ", paste0(no.match, collapse=", "))))
if (length(no.match) == 1L)
stop(mstyle$stop(paste0("Argument 'newmods' does not specify values for this variable: ", no.match)))
}
X.new <- X.new[,order(pos),drop=FALSE]
colnames(X.new) <- colnames.mod
}
}
if (inherits(X.new[1,1], "character"))
stop(mstyle$stop(paste0("Argument 'newmods' should only contain numeric variables.")))
if (x$int.incl) {
if (intercept) {
X.new <- cbind(intrcpt=1, X.new)
} else {
X.new <- cbind(intrcpt=0, X.new)
}
}
if (ncol(X.new) != x$p)
stop(mstyle$stop(paste0("Dimensions of 'newmods' (", ncol(X.new), ") do not the match dimensions of the model (", x$p, ").")))
}
if (inherits(object, "rma.mv") && x$withG) {
if (x$tau2s > 1) {
if (is.null(tau2.levels)) {
} else {
if (!is.numeric(tau2.levels) && anyNA(pmatch(tau2.levels, x$g.levels.f[[1]], duplicates.ok=TRUE)))
stop(mstyle$stop("Non-existing levels specified via 'tau2.levels' argument."))
if (is.numeric(tau2.levels)) {
tau2.levels <- round(tau2.levels)
if (any(tau2.levels < 1) || any(tau2.levels > x$g.nlevels.f[1]))
stop(mstyle$stop("Non-existing tau^2 values specified via 'tau2.levels' argument."))
}
if (length(tau2.levels) == 1L)
tau2.levels <- rep(tau2.levels, k.new)
if (length(tau2.levels) != k.new)
stop(mstyle$stop(paste0("Length of 'tau2.levels' argument (", length(tau2.levels), ") does not match the number of predicted values (", k.new, ").")))
}
} else {
tau2.levels <- rep(1, k.new)
}
}
if (inherits(object, "rma.mv") && x$withH) {
if (x$gamma2s > 1) {
if (is.null(gamma2.levels)) {
} else {
if (!is.numeric(gamma2.levels) && anyNA(pmatch(gamma2.levels, x$h.levels.f[[1]], duplicates.ok=TRUE)))
stop(mstyle$stop("Non-existing levels specified via 'gamma2.levels' argument."))
if (is.numeric(gamma2.levels)) {
gamma2.levels <- round(gamma2.levels)
if (any(gamma2.levels < 1) || any(gamma2.levels > x$h.nlevels.f[1]))
stop(mstyle$stop("Non-existing gamma^2 values specified via 'gamma2.levels' argument."))
}
if (length(gamma2.levels) == 1L)
gamma2.levels <- rep(gamma2.levels, k.new)
if (length(gamma2.levels) != k.new)
stop(mstyle$stop(paste0("Length of 'gamma2.levels' argument (", length(gamma2.levels), ") does not match the number of predicted values (", k.new, ").")))
}
} else {
gamma2.levels <- rep(1, k.new)
}
}
if (length(x$ddf) == 1L) {
ddf <- rep(x$ddf, k.new)
} else {
ddf <- rep(NA, k.new)
for (j in seq_len(k.new)) {
bn0 <- X.new[j,] != 0
ddf[j] <- min(x$ddf[bn0])
}
}
ddf[is.na(ddf)] <- x$k - x$p
pred <- rep(NA_real_, k.new)
vpred <- rep(NA_real_, k.new)
for (i in seq_len(k.new)) {
Xi.new <- X.new[i,,drop=FALSE]
pred[i] <- Xi.new %*% x$beta
vpred[i] <- Xi.new %*% tcrossprod(x$vb, Xi.new)
}
if (is.element(x$test, c("knha","adhoc","t"))) {
crit <- sapply(seq_along(ddf), function(j) if (ddf[j] > 0) qt(level/2, df=ddf[j], lower.tail=FALSE) else NA)
} else {
crit <- qnorm(level/2, lower.tail=FALSE)
}
se <- sqrt(vpred)
ci.lb <- pred - crit * se
ci.ub <- pred + crit * se
if (vcov)
vcovpred <- X.new %*% x$vb %*% t(X.new)
if (pi.type == "simple") {
crit <- qnorm(level/2, lower.tail=FALSE)
vpred <- 0
}
pi.ddf <- ddf
if (is.element(pi.type, c("riley","t"))) {
if (pi.type == "riley")
pi.ddf <- ddf - x$parms + x$p
if (pi.type == "t")
pi.ddf <- ddf
pi.ddf[pi.ddf < 1] <- 1
crit <- sapply(seq_along(pi.ddf), function(j) if (pi.ddf[j] > 0) qt(level/2, df=pi.ddf[j], lower.tail=FALSE) else NA)
}
if (is.null(ddd$newvi)) {
newvi <- 0
} else {
newvi <- ddd$newvi
if (length(newvi) == 1L)
newvi <- rep(newvi, k.new)
if (length(newvi) != k.new)
stop(mstyle$stop(paste0("Length of 'newvi' argument (", length(newvi), ") does not match the number of predicted values (", k.new, ").")))
}
if (!inherits(object, "rma.mv")) {
pi.lb <- pred - crit * sqrt(vpred + x$tau2 + newvi)
pi.ub <- pred + crit * sqrt(vpred + x$tau2 + newvi)
} else {
if (!x$withG) {
pi.lb <- pred - crit * sqrt(vpred + sum(x$sigma2) + newvi)
pi.ub <- pred + crit * sqrt(vpred + sum(x$sigma2) + newvi)
}
if (x$withG && !x$withH) {
if (x$tau2s == 1) {
pi.lb <- pred - crit * sqrt(vpred + sum(x$sigma2) + x$tau2 + newvi)
pi.ub <- pred + crit * sqrt(vpred + sum(x$sigma2) + x$tau2 + newvi)
} else {
if (is.null(tau2.levels)) {
pi.lb <- rep(NA, k.new)
pi.ub <- rep(NA, k.new)
tau2.levels <- rep(NA, k.new)
} else {
if (!is.numeric(tau2.levels))
tau2.levels <- pmatch(tau2.levels, x$g.levels.f[[1]], duplicates.ok=TRUE)
pi.lb <- pred - crit * sqrt(vpred + sum(x$sigma2) + x$tau2[tau2.levels] + newvi)
pi.ub <- pred + crit * sqrt(vpred + sum(x$sigma2) + x$tau2[tau2.levels] + newvi)
tau2.levels <- x$g.levels.f[[1]][tau2.levels]
}
}
}
if (x$withG && x$withH) {
if (x$tau2s == 1 && x$gamma2s == 1) {
pi.lb <- pred - crit * sqrt(vpred + sum(x$sigma2) + x$tau2 + x$gamma2 + newvi)
pi.ub <- pred + crit * sqrt(vpred + sum(x$sigma2) + x$tau2 + x$gamma2 + newvi)
} else {
if (is.null(tau2.levels) || is.null(gamma2.levels)) {
pi.lb <- rep(NA, k.new)
pi.ub <- rep(NA, k.new)
tau2.levels <- rep(NA, k.new)
gamma2.levels <- rep(NA, k.new)
} else {
if (!is.numeric(tau2.levels))
tau2.levels <- pmatch(tau2.levels, x$g.levels.f[[1]], duplicates.ok=TRUE)
if (!is.numeric(gamma2.levels))
gamma2.levels <- pmatch(gamma2.levels, x$h.levels.f[[1]], duplicates.ok=TRUE)
pi.lb <- pred - crit * sqrt(vpred + sum(x$sigma2) + x$tau2[tau2.levels] + x$gamma2[gamma2.levels] + newvi)
pi.ub <- pred + crit * sqrt(vpred + sum(x$sigma2) + x$tau2[tau2.levels] + x$gamma2[gamma2.levels] + newvi)
tau2.levels <- x$g.levels.f[[1]][tau2.levels]
gamma2.levels <- x$h.levels.f[[1]][gamma2.levels]
}
}
}
}
if (is.function(transf)) {
if (is.null(targs)) {
pred <- sapply(pred, transf)
se <- rep(NA,k.new)
ci.lb <- sapply(ci.lb, transf)
ci.ub <- sapply(ci.ub, transf)
pi.lb <- sapply(pi.lb, transf)
pi.ub <- sapply(pi.ub, transf)
} else {
pred <- sapply(pred, transf, targs)
se <- rep(NA,k.new)
ci.lb <- sapply(ci.lb, transf, targs)
ci.ub <- sapply(ci.ub, transf, targs)
pi.lb <- sapply(pi.lb, transf, targs)
pi.ub <- sapply(pi.ub, transf, targs)
}
do.transf <- TRUE
} else {
do.transf <- FALSE
}
tmp <- .psort(ci.lb, ci.ub)
ci.lb <- tmp[,1]
ci.ub <- tmp[,2]
tmp <- .psort(pi.lb, pi.ub)
pi.lb <- tmp[,1]
pi.ub <- tmp[,2]
if (is.null(newmods) && !x$int.only) {
slab <- x$slab
} else {
slab <- seq_len(k.new)
}
if (vcov)
rownames(vcovpred) <- colnames(vcovpred) <- slab
if (k.new == 1L)
slab <- ""
not.na <- rep(TRUE, k.new)
if (na.act == "na.omit") {
if (is.null(newmods) && !x$int.only) {
not.na <- x$not.na
} else {
not.na <- !is.na(pred)
}
}
if (na.act == "na.fail" && any(!x$not.na))
stop(mstyle$stop("Missing values in results."))
out <- list(pred=pred[not.na], se=se[not.na], ci.lb=ci.lb[not.na], ci.ub=ci.ub[not.na], pi.lb=pi.lb[not.na], pi.ub=pi.ub[not.na], cr.lb=pi.lb[not.na], cr.ub=pi.ub[not.na])
if (vcov)
vcovpred <- vcovpred[not.na,not.na,drop=FALSE]
if (na.act == "na.exclude" && is.null(newmods) && !x$int.only) {
out <- lapply(out, function(val) ifelse(x$not.na, val, NA))
if (vcov) {
vcovpred[!x$not.na,] <- NA
vcovpred[,!x$not.na] <- NA
}
}
if (inherits(object, "rma.mv") && x$withG && x$tau2s > 1)
out$tau2.level <- tau2.levels
if (inherits(object, "rma.mv") && x$withH && x$gamma2s > 1)
out$gamma2.level <- gamma2.levels
if (inherits(object, "rma.mv") && any(is.element(object$struct, c("GEN","GDIAG")))) {
out$cr.lb <- NULL
out$cr.ub <- NULL
out$pi.lb <- NULL
out$pi.ub <- NULL
out$tau2.level <- NULL
out$gamma2.level <- NULL
}
if (addx) {
out$X <- matrix(X.new[not.na,], ncol=x$p)
colnames(out$X) <- colnames(x$X)
}
out$slab <- slab[not.na]
if (is.element(x$method, c("FE","EE","CE"))) {
out$cr.lb <- NULL
out$cr.ub <- NULL
out$pi.lb <- NULL
out$pi.ub <- NULL
}
out$digits <- digits
out$method <- x$method
out$transf <- do.transf
if (x$test != "z")
out$ddf <- ddf
if ((x$test != "z" || is.element(pi.type, c("riley","t"))) && pi.type != "simple")
out$pi.ddf <- pi.ddf
class(out) <- "list.rma"
if (vcov & !do.transf) {
out <- list(pred=out)
out$vcov <- vcovpred
}
return(out)
}
|
knitr::opts_chunk$set(
collapse = TRUE,
comment = "
)
df <- cbind(all = c("$\\textbf{all}$","$\\text{AvgRelA}^{[m]}_j$","$\\vdots$","$\\text{AvgRelA}^{[k]}_j$","$\\vdots$",
"$\\text{AvgRelA}^{[1]}_j$","$\\vdots$","$\\text{AvgRelA}_j$"),
uts = c("$\\textbf{uts}$","$\\text{AvgRelA}^{[m]}_{a,j}$","$\\vdots$","$\\text{AvgRelA}^{[k]}_{a,j}$","$\\vdots$",
"$\\text{AvgRelA}^{[1]}_{a,j}$","$\\vdots$","$\\text{AvgRelA}_{a,j}$"),
bts = c("$\\textbf{bts}$","$\\text{AvgRelA}^{[m]}_{b,j}$","$\\vdots$","$\\text{AvgRelA}^{[k]}_{b,j}$","$\\vdots$",
"$\\text{AvgRelA}^{[1]}_{b,j}$","$\\vdots$","$\\text{AvgRelA}_{b,j}$"))
rownames(df) <- c("","$\\textbf{m}$","$\\vdots$","$\\textbf{k}$","$\\vdots$","$\\textbf{1}$","$\\vdots$","$\\textbf{all}$")
knitr::kable(df,align='cccc',escape = F, col.names = rep("",3))
df <- cbind(col1 = c("$\\textbf{m}$","$\\textbf{1}$","$\\text{RelA}^{[m],1}_{1,j}$","$\\vdots$",
"$\\text{RelA}^{[m],1}_{i,j}$","$\\vdots$","$\\text{RelA}^{[m],1}_{n,j}$"),
col2 = c("$\\dots$","$\\dots$","$\\dots$","","$\\dots$","","$\\dots$"),
col3 = c("", "$\\textbf{1}$","$\\text{RelA}^{[k],1}_{1,j}$","$\\vdots$",
"$\\text{RelA}^{[k],1}_{i,j}$","$\\vdots$","$\\text{RelA}^{[k],1}_{n,j}$"),
col4 = c("$\\textbf{k}$","$\\dots$","$\\dots$","","$\\dots$","","$\\dots$"),
col5 = c("", "$\\mathbf{h_k}$","$\\text{RelA}^{[k],h_k}_{1,j}$","$\\vdots$",
"$\\text{RelA}^{[k],h_k}_{i,j}$","$\\vdots$","$\\text{RelA}^{[k],h_k}_{n,j}$"),
col6 = c("$\\dots$","$\\dots$","$\\dots$","","$\\dots$","","$\\dots$"),
col7 = c("", "$\\textbf{1}$","$\\text{RelA}^{[1],1}_{1,j}$","$\\vdots$",
"$\\text{RelA}^{[1],1}_{i,j}$","$\\vdots$","$\\text{RelA}^{[1],1}_{n,j}$"),
col8 = c("$\\textbf{1}$","$\\dots$","$\\dots$","","$\\dots$","","$\\dots$"),
col9 = c("", "$\\mathbf{m}$","$\\text{RelA}^{[1],m}_{1,j}$","$\\vdots$",
"$\\text{RelA}^{[1],m}_{i,j}$","$\\vdots$","$\\text{RelA}^{[1],m}_{n,j}$"))
rownames(df) <- c("${\\cal K}$","$\\textbf{h}$","$\\textbf{1}$","$\\vdots$","$\\textbf{i}$","$\\vdots$","$\\textbf{n}$")
knitr::kable(df,align='ccccccccc',escape = F, col.names = rep("",9))
df <- cbind(col1 = c("$\\textbf{m}$","$\\textbf{1:1}$","$\\text{RelA}^{[m],1:1}_{1,j}$","$\\vdots$",
"$\\text{RelA}^{[m],1:1}_{i,j}$","$\\vdots$","$\\text{RelA}^{[m],1:1}_{n,j}$"),
col2 = c("$\\dots$","$\\dots$","$\\dots$","","$\\dots$","","$\\dots$"),
col3 = c("", "$\\textbf{1:1}$","$\\text{RelA}^{[k],1:1}_{1,j}$","$\\vdots$",
"$\\text{RelA}^{[k],1:1}_{i,j}$","$\\vdots$","$\\text{RelA}^{[k],1:1}_{n,j}$"),
col4 = c("$\\textbf{k}$","$\\dots$","$\\dots$","","$\\dots$","","$\\dots$"),
col5 = c("", "$\\mathbf{1:h_k}$","$\\text{RelA}^{[k],1:h_k}_{1,j}$","$\\vdots$",
"$\\text{RelA}^{[k],1:h_k}_{i,j}$","$\\vdots$","$\\text{RelA}^{[k],1:h_k}_{n,j}$"),
col6 = c("$\\dots$","$\\dots$","$\\dots$","","$\\dots$","","$\\dots$"),
col7 = c("", "$\\textbf{1:1}$","$\\text{RelA}^{[1],1:1}_{1,j}$","$\\vdots$",
"$\\text{RelA}^{[1],1:1}_{i,j}$","$\\vdots$","$\\text{RelA}^{[1],1:1}_{n,j}$"),
col8 = c("$\\textbf{1}$","$\\dots$","$\\dots$","","$\\dots$","","$\\dots$"),
col9 = c("", "$\\mathbf{1:m}$","$\\text{RelA}^{[1],1:m}_{1,j}$","$\\vdots$",
"$\\text{RelA}^{[1],1:m}_{i,j}$","$\\vdots$","$\\text{RelA}^{[1],1:m}_{n,j}$"))
rownames(df) <- c("${\\cal K}$","$\\textbf{h}$","$\\textbf{1}$","$\\vdots$","$\\textbf{i}$","$\\vdots$","$\\textbf{n}$")
knitr::kable(df,align='ccccccccc',escape = F, col.names = rep("",9))
df <- cbind(col1 = c("$\\textbf{m}$","$\\text{AvgRelA}^{[m]}_{1,j}$","$\\vdots$",
"$\\text{AvgRelA}^{[m]}_{i,j}$","$\\vdots$","$\\text{AvgRelA}^{[m]}_{n,j}$"),
col2 = c("$\\dots$","$\\dots$","","$\\dots$","","$\\dots$"),
col3 = c("$\\textbf{k}$", "$\\text{AvgRelA}^{[k]}_{1,j}$","$\\vdots$",
"$\\text{AvgRelA}^{[k]}_{i,j}$","$\\vdots$","$\\text{AvgRelA}^{[k]}_{n,j}$"),
col4 = c("$\\dots$","$\\dots$","","$\\dots$","","$\\dots$"),
col5 = c("$\\textbf{1}$","$\\text{AvgRelA}^{[1]}_{1,j}$","$\\vdots$",
"$\\text{AvgRelA}^{[1]}_{i,j}$","$\\vdots$","$\\text{AvgRelA}^{[1]}_{n,j}$"),
col6 = c("$\\dots$","$\\dots$","","$\\dots$","","$\\dots$"),
col7 = c("$\\mathbf{all}$","$\\text{AvgRelA}_{1,j}$","$\\vdots$",
"$\\text{AvgRelA}_{i,j}$","$\\vdots$","$\\text{AvgRelA}_{n,j}$"))
rownames(df) <- c("","$\\textbf{1}$","$\\vdots$","$\\textbf{i}$","$\\vdots$","$\\textbf{n}$")
knitr::kable(df,align='ccccccc',escape = F, col.names = rep("",7))
df <- cbind(col1 = c("$\\textbf{m}$","$\\textbf{1}$","$\\text{AvgRelA}^{[m],1}_{j}$",
"$\\text{AvgRelA}^{[m],1}_{a,j}$","$\\text{AvgRelA}^{[m],1}_{b,j}$"),
col2 = c("$\\dots$","$\\dots$","$\\dots$","$\\dots$","$\\dots$"),
col3 = c("", "$\\textbf{1}$","$\\text{AvgRelA}^{[k],1}_{j}$",
"$\\text{AvgRelA}^{[k],1}_{a,j}$","$\\text{AvgRelA}^{[k],1}_{b,j}$"),
col4 = c("$\\textbf{k}$","$\\dots$","$\\dots$","$\\dots$","$\\dots$"),
col5 = c("", "$\\mathbf{h_k}$","$\\text{AvgRelA}^{[k],h_k}_{j}$",
"$\\text{AvgRelA}^{[k],h_k}_{a,j}$","$\\text{AvgRelA}^{[k],h_k}_{b,j}$"),
col6 = c("$\\dots$","$\\dots$","$\\dots$","$\\dots$","$\\dots$"),
col7 = c("", "$\\textbf{1}$","$\\text{AvgRelA}^{[1],1}_{j}$",
"$\\text{AvgRelA}^{[1],1}_{a,j}$","$\\text{AvgRelA}^{[1],1}_{b,j}$"),
col8 = c("$\\textbf{1}$","$\\dots$","$\\dots$","$\\dots$","$\\dots$"),
col9 = c("", "$\\mathbf{m}$","$\\text{AvgRelA}^{[1],m}_{j}$",
"$\\text{AvgRelA}^{[1],m}_{a,j}$","$\\text{AvgRelA}^{[1],m}_{b,j}$"))
rownames(df) <- c("${\\cal K}$","$\\textbf{h}$","$\\textbf{all}$","$\\textbf{a}$","$\\textbf{b}$")
knitr::kable(df,align='ccccccccc',escape = F, col.names = rep("",9))
df <- cbind(col1 = c("$\\textbf{m}$","$\\textbf{1:1}$","$\\text{AvgRelA}^{[m],1:1}_{j}$",
"$\\text{AvgRelA}^{[m],1:1}_{a,j}$","$\\text{AvgRelA}^{[m],1:1}_{b,j}$"),
col2 = c("$\\dots$","$\\dots$","$\\dots$","$\\dots$","$\\dots$"),
col3 = c("", "$\\textbf{1:1}$","$\\text{AvgRelA}^{[k],1:1}_{j}$",
"$\\text{AvgRelA}^{[k],1:1}_{a,j}$","$\\text{AvgRelA}^{[k],1:1}_{b,j}$"),
col4 = c("$\\textbf{k}$","$\\dots$","$\\dots$","$\\dots$","$\\dots$"),
col5 = c("", "$\\mathbf{1:h_k}$","$\\text{AvgRelA}^{[k],1:h_k}_{j}$",
"$\\text{AvgRelA}^{[k],1:h_k}_{a,j}$","$\\text{AvgRelA}^{[k],1:h_k}_{b,j}$"),
col6 = c("$\\dots$","$\\dots$","$\\dots$","$\\dots$","$\\dots$"),
col7 = c("", "$\\textbf{1:1}$","$\\text{AvgRelA}^{[1],1:1}_{j}$",
"$\\text{AvgRelA}^{[1],1:1}_{a,j}$","$\\text{AvgRelA}^{[1],1:1}_{b,j}$"),
col8 = c("$\\textbf{1}$","$\\dots$","$\\dots$","$\\dots$","$\\dots$"),
col9 = c("", "$\\mathbf{1:m}$","$\\text{AvgRelA}^{[1],1:m}_{j}$",
"$\\text{AvgRelA}^{[1],1:m}_{a,j}$","$\\text{AvgRelA}^{[1],1:m}_{b,j}$"))
rownames(df) <- c("${\\cal K}$","$\\textbf{h}$","$\\textbf{all}$","$\\textbf{a}$","$\\textbf{b}$")
knitr::kable(df,align='ccccccccc',escape = F, col.names = rep("",9))
|
rossberg.exp.test<-function(x)
{
DNAME <- deparse(substitute(x))
n<-length(x)
s<-0
sh<-0
sg<-0
for (m in 1:n)
{
h<-0
for (i in 1:(n-2))
for (j in (i+1):(n-1))
for (k in (j+1):n)
{
if ((x[i]+x[j]+x[k]-2*min(x[i],x[j],x[k])-max(x[i],x[j],x[k]))<x[m])
{
h=h+1
}
}
h=((6*factorial(n-3))/factorial(n))*h
sh=sh+h
}
for (m in 1:n)
{
g<-0
for (i in 1:(n-1))
for (j in (i+1):n)
{
if (min(x[i],x[j])<x[m])
{
g=g+1
}
}
g=((2*factorial(n-2))/factorial(n))*g
sg=sg+g
}
s=sh-sg
s<-s/n
v<-sqrt(n)*abs(s)/sqrt(52/1125)
p.value<-2*(1-pnorm(v))
RVAL<-list(statistic=c(Sn=s), p.value=p.value, method="Test for exponentiality based on Rossberg's characterization",data.name = DNAME)
class(RVAL)<-"htest"
return(RVAL)
}
|
are_equal_treelogs <- function(
treelog_1, treelog_2
) {
beautier::check_treelog(treelog_1)
beautier::check_treelog(treelog_2)
if (is.na(treelog_1$filename)) {
if (!is.na(treelog_2$filename)) return(FALSE)
} else {
if (treelog_1$filename != treelog_2$filename) return(FALSE)
}
treelog_1$log_every == treelog_2$log_every &&
treelog_1$mode == treelog_2$mode &&
treelog_1$sanitise_headers == treelog_2$sanitise_headers &&
treelog_1$sort == treelog_2$sort
}
|
test_that("class testing on output",{
library(ape)
tree<-ape::rtree(20)
tree$tip.label<-sample(tree$tip.label[1:10],size=20,replace = TRUE)
sppVector<-tree$tip.label
expect_equal(length(monophyly.prop(tree,sppVector,singletonsMono=F)),4)
})
|
opwf <- function(fun_f_1,
parameterNames_s,
functionName_s_1 = NA_character_) {
qfa <- qualifyFunctionArguments(fun_f_1)
l <- length(parameterNames_s)
lfa <- length(qfa$arguments)
if (lfa != l)
abort('function owns', lfa, 'arguments, you provided', l, 'arguments')
if (l == 0) return(fun_f_1)
if (l > 0) {
rv <- sapply(seq_len(l), function(k) {
x <- wyz.code.offensiveProgramming::FunctionParameterName(parameterNames_s[k])
x$isSemanticName()
})
if (!all(rv)) abort('provided parameter names are not all semantic names',
strBracket(paste(parameterNames_s[!rv], collapse = ', ')))
}
ff <- qfa$arguments
names(ff) <- parameterNames_s
audit <- wyz.code.offensiveProgramming::isAuditable()
if (qfa$owns_ellipsis) {
substitution_names <- removeEllipsisName(parameterNames_s)
sfa <- removeEllipsisName(qfa$argument_names)
args <- qfa$arguments[-qfa$ellipsis_index]
} else {
substitution_names <- parameterNames_s
sfa <- qfa$argument_names
args <- qfa$arguments
}
fg <- codePatcher(args, sfa, substitution_names)
callParameters <- function() {
sapply(seq_len(l), function(k) {
if (is.symbol(qfa$arguments[[k]])) parameterNames_s[k] else {
paste(qfa$argument_names[k], '=', parameterNames_s[k])
}
})
}
f <- function() {}
formals(f) <- if (qfa$owns_ellipsis) append(fg, ff[getEllipsisName()], qfa$ellipsis_index - 1) else fg
dsf <- ifelse(is.na(functionName_s_1), deparse(substitute(fun_f_1)), functionName_s_1)
bd <- paste0('`', dsf, '`', '(', paste(callParameters(), collapse = ', '), ')')
if (audit) cat('>>> patching body with', bd, '\n')
e <- str2lang(bd)
if (audit) { cat('>>>result\n'); print(e) }
body(f) <- as.call(c(as.name('{'), e))
f
}
offensiveProgrammingWrapFunction <- opwf
|
chi3 <-
function(f3,digits=3){
nn <- dim(f3)
ni <- nn[1]
nj <- nn[2]
nk <- nn[3]
n <- sum(f3)
p3 <- f3/n
if(length(dim(f3)) != 3){
stop("f3 is not a 3 way table\n")
}
pi <- apply(p3, 1, sum)
pj <- apply(p3, 2, sum)
pk <- apply(p3, 3, sum)
pijk <- pi %o% pj %o% pk
khi3 <- n * (sum(p3^2/pijk) - 1)
pij <- apply(p3, c(1, 2), sum)
pik <- apply(p3, c(1, 3), sum)
pjk <- apply(p3, c(2, 3), sum)
khij <- n * (sum(pij^2/(pi %o% pj)) - 1)
khik <- n * (sum(pik^2/(pi %o% pk)) - 1)
khjk <- n * (sum(pjk^2/(pj %o% pk)) - 1)
khin3 <- khi3 - khij - khik - khjk
nom <- c("X2IJ", "X2IK", "X2JK", "X2INTER", "X2IJK")
x <- c(khij, khik, khjk, khin3, khi3)
y <- (100 * x)/khi3
dijk <- (ni - 1) * (nj - 1) * (nk - 1)
dij <- (ni - 1) * (nj - 1)
dik <- (ni - 1) * (nk - 1)
djk <- (nj - 1) * (nk - 1)
dtot<-dij+dik+djk+dijk
df <- c(dij, dik, djk, dijk, dtot)
pvalue= 1 - pchisq(x, df)
x2<-x/df
z <- rbind(x, y, df,pvalue,x2)
nomr <- c("Index", "% of Inertia", "df","p-value", "X2/df")
dimnames(z) <- list(nomr, nom)
z <- round(z, digits = digits)
list(z = z,pij=pij,pik=pik,pjk=pjk)
}
|
test.cumulated =
function()
{
tS = dummySeries(format = "counts")
cumulated(tS)
tS = dummySeries()
cumulated(tS)
}
|
context("Unit tests for datasets functionalities")
test_cases = expand.grid(
return_type = c(
"data.frame", "data.table",
"matrix", "DoubleMLData"),
polynomial_features = c(TRUE, FALSE),
instrument = c(TRUE, FALSE),
stringsAsFactors = FALSE)
test_cases[".test_name"] = apply(test_cases, 1, paste, collapse = "_")
testthat::skip_on_cran()
patrick::with_parameters_test_that("Unit tests for datasets functionalities:",
.cases = test_cases, {
n_obs = 100
if (return_type != "matrix") {
df = make_plr_CCDDHNR2018(return_type = return_type)
expect_is(df, paste0(return_type))
} else {
df = make_plr_CCDDHNR2018(return_type = return_type)
expect_is(df, "list")
expect_is(df$X, "matrix")
expect_is(df$y, "matrix")
expect_is(df$d, "matrix")
}
if (return_type != "matrix") {
df = make_pliv_CHS2015(n_obs, return_type = return_type)
expect_is(df, paste0(return_type))
} else {
df = make_pliv_CHS2015(n_obs, return_type = return_type)
expect_is(df, "list")
expect_is(df$X, "matrix")
expect_is(df$y, "matrix")
expect_is(df$d, "matrix")
expect_is(df$z, "matrix")
}
N = 10
M = 10
if (return_type == "DoubleMLData") {
df = make_pliv_multiway_cluster_CKMS2021(N, M,
return_type = "DoubleMLClusterData")
expect_is(df, "DoubleMLClusterData")
} else if (return_type != "matrix") {
df = make_pliv_multiway_cluster_CKMS2021(N, M, return_type = return_type)
expect_is(df, paste0(return_type))
} else {
df = make_pliv_multiway_cluster_CKMS2021(N, M, return_type = return_type)
expect_is(df, "list")
expect_is(df$X, "matrix")
expect_is(df$y, "matrix")
expect_is(df$d, "matrix")
expect_is(df$z, "matrix")
}
if (return_type != "matrix") {
df = make_irm_data(return_type = return_type)
expect_is(df, paste0(return_type))
} else {
df = make_irm_data(return_type = return_type)
expect_is(df, "list")
expect_is(df$X, "matrix")
expect_is(df$y, "matrix")
expect_is(df$d, "matrix")
}
if (return_type != "matrix") {
df = make_iivm_data(return_type = return_type)
expect_is(df, paste0(return_type))
} else {
df = make_iivm_data(return_type = return_type)
expect_is(df, "list")
expect_is(df$X, "matrix")
expect_is(df$y, "matrix")
expect_is(df$d, "matrix")
expect_is(df$z, "matrix")
}
if (return_type != "matrix") {
df = make_plr_turrell2018(return_type = return_type)
expect_is(df, paste0(return_type))
} else {
df = make_plr_turrell2018(return_type = return_type)
expect_is(df, "list")
expect_is(df$X, "matrix")
expect_is(df$y, "matrix")
expect_is(df$d, "matrix")
}
if (return_type != "matrix") {
df = fetch_401k(
return_type = return_type, polynomial_features = polynomial_features,
instrument = instrument)
expect_is(df, paste0(return_type))
}
if (return_type != "matrix") {
df = fetch_bonus(return_type = return_type, polynomial_features = polynomial_features)
expect_is(df, paste0(return_type))
}
}
)
|
st_make_bboxes = function(xmin, xmax, ymin, ymax, crs = sf::NA_crs_)
{
bboxes <- mapply(c, xmin, xmax, ymin, ymax, SIMPLIFY = FALSE)
bboxes <- lapply(bboxes, function(x) { names(x) = c("xmin", "xmax", "ymin", "ymax") ; x})
bboxes <- lapply(bboxes, sf::st_bbox)
return(bboxes)
}
st_expand_bbox <- function(bbox, size)
{
return(bbox + c(-size, -size, size, size))
}
st_adjust_bbox <- function(x, res, start = c(0,0), buffer = 0)
{
bbox <- sf::st_bbox(x)
if (length(res) == 1) res <- c(res, res)
bbox[1] <- round_any(bbox$xmin - buffer - 0.5 * res[1] - start[1], res[1]) + start[1]
bbox[3] <- round_any(bbox$xmax + buffer - 0.5 * res[1] - start[1], res[1]) + res[1] + start[1]
bbox[2] <- round_any(bbox$ymin - buffer - 0.5 * res[2] - start[2], res[2]) + start[2]
bbox[4] <- round_any(bbox$ymax + buffer - 0.5 * res[2] - start[2], res[2]) + res[2] + start[2]
return(bbox)
}
st_crop_if_not_similar_bbox = function(source, las)
{
bbox1 <- st_bbox(las)
bbox2 <- sf::st_bbox(source)
width <- (bbox1[3] - bbox1[1])
height <- (bbox1[4] - bbox1[2])
bbox <- bbox1 + c(-width, -height, width, height)
if (bbox2$xmin > bbox$xmin && bbox2$ymin > bbox$ymin && bbox2$xmax < bbox$xmax && bbox2$xmin < bbox$ymax)
{
return(source)
}
bbox <- st_bbox(las)
width <- (bbox[3] - bbox[1])*0.01 + las[["X scale factor"]]
height <- (bbox[4] - bbox[2])*0.01 + las[["Y scale factor"]]
bbox <- bbox + c(-width, -height, width, height)
sf::st_crs(bbox) <- sf::st_crs(source)
if (is(source, "sf")) sf::st_agr(source) <- "constant"
source <- sf::st_crop(source, bbox)
return(source)
}
st_proj_is_meters <- function(obj)
{
!is.na(sf::st_crs(obj)) & !sf::st_is_longlat(obj) & is.null(sf::st_crs(obj)$to_meter)
}
|
split_knockoffs.statistics.pathorder.W_fixed <-function(X, D, y, nu, option)
{
m <- nrow(D)
option$copy = 'true'
result <- split_knockoffs.create(X, y, D, nu, option)
A_beta <- result$A_beta
A_gamma <- result$A_gamma
tilde_y <- result$tilde_y
tilde_A_gamma <- result$tilde_A_gamma
beta_hat <- option$beta_choice
y_new <- tilde_y - A_beta %*% beta_hat
lambda_vec <- option$lambda
if(is.null(lambda_vec) == 'false'){
option$lambda <- lambda_vec
}
fit_step1 <- glmnet(A_gamma, y_new, standardize = FALSE)
lambda_vec <- fit_step1$lambda
coef1 <- fit_step1$beta
r <- matrix(0, m, 1)
Z <- matrix(0, m, 1)
for (i in 1: m) {
list[Z(i), r(i)] <- private.hittingpoint(coef1[i, ], lambda_vec)
}
fit_step2 = glmnet(tilde_A_gamma, y_new, lambda = lambda_vec)
coef2 <- fit_step2$beta
t_Z <- matrix(0,m, 1)
t_r <- matrix(0,m, 1)
for (i in 1: m) {
result <- private.hittingpoint(coef2[i, ], lambda_vec)
tilde_Z <-result$Z
if(t_r[i] == r[i]){
t_Z[i] = tilde_Z
}
}
W <- max(Z, t_Z) %*% sign(Z - t_Z)
structure(list(call = match.call(),
W = W,
Z = Z,
t_Z = t_Z),
class = 'fixed_result')
}
split_knockoffs.statistics.pathorder.W_path <-function(X, D, y, nu, option)
{
m <- nrow(D)
p <- ncol(D)
creat.result <- split_knockoffs.create(X, y, D, nu, option)
A_beta <- creat.result$A_beta
A_gamma <- creat.result$A_gamma
tilde_y <- creat.result$tilde_y
tilde_A_gamma <- creat.result$tilde_A_gamma
lambda_vec <- option$lambda
nlambda <- length(lambda_vec)
penalty <- matrix(1,m+p,1)
for (i in 1: p) {
penalty[i, 1] = 0
}
fit_step0 = glmnet(cbind(A_beta,A_gamma) , tilde_y, lambda =lambda_vec, penalty.factor = penalty)
coefs <- fit_step0$beta
betas = coefs[1: p, ]
coef1 = matrix(0,m, 1)
for (i in 1: nlambda) {
y_new <- tilde_y - A_beta %*% betas[, i]
lambda = lambda_vec[i]
fit_step1 = glmnet(A_gamma, y_new, lambda =lambda)
coef <- fit_step1$beta
coef1 <- cbind(coef1,coef)
}
coef1 <- coef1[,-1]
r <- matrix(0,m,1)
Z <- matrix(0,m,1)
for (i in 1: m) {
hit <- private.hittingpoint(coef1[i, ], lambda_vec)
Z[i] <- hit$Z
r[i] <- hit$r
}
coef2 <- matrix(0,m,1)
for (i in 1: nlambda) {
y_new <- tilde_y - A_beta %*% betas[, i]
lambda = lambda_vec[i]
fit_step2 <- glmnet(tilde_A_gamma, y_new, lambda=lambda)
coef <- fit_step2$beta
coef2 <- cbind(coef2,coef)
}
coef2 <- coef2[,-1]
t_Z <- matrix(0,m, 1)
t_r <- matrix(0,m, 1)
for (i in 1: m) {
t_hit <- private.hittingpoint(coef2[i, ], lambda_vec)
t_r[i] <- t_hit$r
if(t_r[i] == r[i]){
t_Z[i] = t_hit$Z
}
}
W <- matrix(0,m, 1)
for (i in 1: m) {
W[i] <- max(Z[i], t_Z[i]) * sign(Z[i] - t_Z[i])
}
structure(list(call = match.call(),
W = W,
Z = Z,
t_Z = t_Z),
class = 'path_result')
}
split_knockoffs.statistics.sign.W_path <-function(X, D, y, nu, option)
{
m <- nrow(D)
p <- ncol(D)
creat.result <- split_knockoffs.create(X, y, D, nu, option)
A_beta <- creat.result$A_beta
A_gamma <- creat.result$A_gamma
tilde_y <- creat.result$tilde_y
tilde_A_gamma <- creat.result$tilde_A_gamma
lambda_vec <- option$lambda
nlambda <- length(lambda_vec)
penalty <- matrix(1,m+p,1)
for (i in 1: p) {
penalty[i, 1] = 0
}
fit_step0 = glmnet(cbind(A_beta,A_gamma) , tilde_y, lambda =lambda_vec, penalty.factor = penalty)
coefs <- fit_step0$beta
betas = coefs[1: p, ]
coef1 = coefs[(p+1):(p+m),]
r <- matrix(0,m,1)
Z <- matrix(0,m,1)
for (i in 1: m) {
hit <- private.hittingpoint(coef1[i, ], lambda_vec)
Z[i] <- hit$Z
r[i] <- hit$r
}
coef2 <- matrix(0,m,1)
for (i in 1: nlambda) {
y_new <- tilde_y - A_beta %*% betas[, i]
lambda = lambda_vec[i]
fit_step2 <- glmnet(tilde_A_gamma, y_new, lambda=lambda)
coef <- fit_step2$beta
coef2 <- cbind(coef2,coef)
}
coef2 <- coef2[,-1]
t_Z <- matrix(0,m, 1)
for (i in 1: m) {
t_hit <- private.hittingpoint(coef2[i, ], lambda_vec)
t_Z[i] <- t_hit$Z
}
W <- matrix(0,m, 1)
for (i in 1: m) {
W[i] <- Z[i] * sign(Z[i] - t_Z[i])
}
structure(list(call = match.call(),
W = W,
r = r,
Z = Z,
t_Z = t_Z),
class = 'path_result')
}
|
newformula <- function(old, change, eold, enew,
expandpoly=spatstat.options("expand.polynom")) {
old <- if(is.null(old)) ~1 else eval(old, eold)
change <- if(is.null(change)) ~1 else eval(change, enew)
old <- as.formula(old, env=eold)
change <- as.formula(change, env=enew)
if(expandpoly) {
old <- expand.polynom(old)
change <- expand.polynom(change)
}
answer <- update.formula(old, change)
return(answer)
}
|
setMethod("getInfRobIC", signature(L2deriv = "UnivariateDistribution",
risk = "asAnscombe",
neighbor = "UncondNeighborhood"),
function(L2deriv, risk, neighbor, symm, Finfo, trafo,
upper = NULL, lower = NULL, maxiter, tol, warn, noLow = FALSE,
verbose = NULL, checkBounds = TRUE, ...){
if(missing(warn)|| is.null(warn)) warn <- FALSE
if(missing(verbose)|| is.null(verbose))
verbose <- getRobAStBaseOption("all.verbose")
eff <- eff(risk)
i <- 0
maxi <- min(5,maxiter%/%4)
toli <- min(tol*100,1e-3)
FI0 <- trafo%*%matrix(1/Finfo)%*%t(trafo)
normtype <- normtype(risk)
std <- if(is(normtype(risk),"QFNorm"))
QuadForm(normtype(risk)) else diag(nrow(trafo))
FI <- sum(diag(std%*%FI0))
lowBerg <- minmaxBias(L2deriv = L2deriv, neighbor = neighbor,
biastype = biastype(risk), symm = symm,
trafo = trafo, maxiter = maxi,
tol = toli, warn = warn, Finfo = Finfo)
V <- lowBerg$risk$asCov
trV <- sum(diag(std%*%V))
if(FI/trV >eff){
lowBerg$eff <- FI/trV
return(lowBerg)
}
it.erg <- 0
erg <- 0
if(is.null(lower) || lower < lowBerg$risk$asBias$value)
{ lower <- lowBerg$risk$asBias$value
f.low <- FI/trV-eff
} else f.low <- NULL
if(is.null(upper))
upper <- max(4*lower,q.l(L2deriv)(eff^.5)*3)
e.up <- 0
while(e.up < eff){
risk.b <- asHampel(bound = upper, biastype = biastype(risk),
normtype = normtype(risk))
upBerg <- getInfRobIC(L2deriv, risk.b, neighbor, symm, Finfo, trafo,
upper = 3*upper, lower = lower, maxiter = maxi,
tol = toli, warn = warn, noLow = noLow,
verbose = FALSE, checkBounds = FALSE)
trV <- upBerg$risk$trAsCov$value
if(!is.na(trV)) e.up <- FI/trV
upper <- upper * 3
}
upper <- upper / 3
funb <- function(b0){
risk.b <- asHampel(bound = b0, biastype = biastype(risk),
normtype = normtype(risk))
it.erg <<- it.erg + 1
maxi <- min(5,maxiter%/%4^(1/it.erg))
toli <- min(tol*100^(1/it.erg),1e-3)
checkBounds <- checkBounds & it.erg>10
erg <<- getInfRobIC(L2deriv, risk.b, neighbor, symm, Finfo, trafo,
upper = upper, lower = lower, maxiter = maxi, tol = toli,
warn = warn, noLow = noLow,
verbose = verbose, checkBounds = checkBounds)
trV <- erg$risk$trAsCov$value
if(verbose) cat("Outer iteration:", it.erg," b_0=", round(b0,3),
" eff=", round(FI/trV,3), "\n")
return(FI/trV-eff)
}
if(is.null(f.low)) f.low <- funb(lower)
if(verbose) print(c(lower,upper, f.lower=f.low, f.upper=e.up-eff))
b <- uniroot(funb, interval=c(lower,upper), f.lower=f.low,
f.upper=e.up-eff,tol=tol,maxiter=maxiter)
erg$info <- c(erg$info,
paste("optimally bias-robust IC for ARE", eff, " in the ideal model" ,collapse="", sep=" "))
erg$risk$eff <- b$f.root+eff
return(erg)
})
setMethod("getInfRobIC", signature(L2deriv = "RealRandVariable",
risk = "asAnscombe",
neighbor = "UncondNeighborhood"),
function(L2deriv, risk, neighbor, Distr, DistrSymm, L2derivSymm,
L2derivDistrSymm, Finfo, trafo, onesetLM = FALSE,
z.start, A.start, upper = NULL, lower = NULL,
OptOrIter = "iterate", maxiter, tol, warn,
verbose = NULL, checkBounds = TRUE, ...){
dotsI <- .filterEargsWEargList(list(...))
if(is.null(dotsI$useApply)) dotsI$useApply <- FALSE
if(missing(verbose)|| is.null(verbose))
verbose <- getRobAStBaseOption("all.verbose")
mc <- match.call()
eff <- eff(risk)
biastype <- biastype(risk)
normtype <- normtype(risk)
p <- nrow(trafo)
k <- ncol(trafo)
maxi <- min(5,maxiter%/%4)
toli <- min(tol*100,1e-3)
std <- if(is(normtype,"QFNorm")) QuadForm(normtype) else diag(p)
if(! is(neighbor,"ContNeighborhood") && p>1)
stop("Not yet implemented")
FI1 <- trafo%*%distr::solve(Finfo)
FI0 <- FI1%*%t(trafo)
FI <- distr::solve(FI0)
if(is(normtype,"InfoNorm") || is(normtype,"SelfNorm") ){
QuadForm(normtype) <- PosSemDefSymmMatrix(FI)
normtype(risk) <- normtype
}
std <- if(is(normtype,"QFNorm"))
QuadForm(normtype) else diag(p)
trV.ML <- sum(diag(std%*%FI0))
if(is.null(upper))
upper <- sqrt(eff*max(diag(std%*%FI0)))*3
lowBerg <- .getLowerSol(L2deriv = L2deriv, risk = risk,
neighbor = neighbor, Distr = Distr,
DistrSymm = DistrSymm,
L2derivSymm = L2derivSymm,
L2derivDistrSymm = L2derivDistrSymm,
z.start = z.start, A.start = A.start,
trafo = trafo, maxiter = maxiter,
tol = tol,
warn = FALSE, Finfo = Finfo,
QuadForm = std, verbose = verbose,...)
if(is.null(lower)||(lower< lowBerg$b))
{lower <- lowBerg$b
f.low <- lowBerg$risk$asAnscombe - eff
} else {
risk.b <- asHampel(bound = lower, biastype = biastype,
normtype = normtype)
lowBerg <- getInfRobIC(L2deriv, risk.b, neighbor,
Distr, DistrSymm, L2derivSymm,
L2derivDistrSymm, Finfo, trafo, onesetLM = onesetLM,
z.start, A.start, upper = upper, lower = lower,
OptOrIter = OptOrIter, maxiter=maxi,
tol=toli, warn = warn,
verbose = FALSE, checkBounds = FALSE, ...)
trV <- lowBerg$risk$trAsCov$value
f.low <- trV.ML/trV -eff
}
if(f.low > 0){
lowBerg$call <- mc
lowBerg$eff <- f.low + eff
return(lowBerg)
}
e.up <- 0
if(lower>=upper) upper <- lower*3
while(e.up < eff){
risk.b <- asHampel(bound = upper, biastype = biastype,
normtype = normtype)
upBerg <- getInfRobIC(L2deriv, risk.b, neighbor,
Distr, DistrSymm, L2derivSymm,
L2derivDistrSymm, Finfo, trafo, onesetLM = onesetLM,
z.start, A.start, upper = upper, lower = lower,
OptOrIter = OptOrIter, maxiter=maxi,
tol=toli, warn = warn,
verbose = FALSE, checkBounds = FALSE, ...)
trV <- upBerg$risk$trAsCov$value
e.up <- trV.ML/trV
upper <- upper * 3
}
upper <- upper / 3
erg <- 0
it.erg <- 0
funb <- function(b0){
risk.b <- asHampel(bound = b0, biastype = biastype(risk),
normtype = normtype(risk))
it.erg <<- it.erg + 1
maxi <- min(5,maxiter%/%4^(1/it.erg))
toli <- min(tol*100^(1/it.erg),1e-3)
chkbd <- if(it.erg<25) FALSE else checkBounds
verbL <- if(it.erg<25) FALSE else verbose
erg <<- do.call(getInfRobIC, c(list(L2deriv, risk.b, neighbor,
Distr, DistrSymm, L2derivSymm,
L2derivDistrSymm, Finfo, trafo, onesetLM = onesetLM,
z.start, A.start, upper = upper, lower = lower,
OptOrIter = OptOrIter, maxiter = maxi, tol = toli , warn = warn,
verbose = verbL, checkBounds = chkbd), dotsI))
trV <- erg$risk$trAsCov$value
if(verbose) cat("Outer iteration:", it.erg," b_0=", round(b0,3),
" eff=", round(trV.ML/trV,3), "\n")
return(trV.ML/trV-eff)
}
if(verbose) print(c(lower,upper, f.lower=f.low, f.upper=e.up-eff))
b <- uniroot(funb, interval=c(lower,upper), f.lower=f.low,
f.upper=e.up-eff,tol=tol,maxiter=maxiter)
erg$info <- c(erg$info,
paste("optimally bias-robust IC for ARE", eff, " in the ideal model",
collapse="", sep=" "))
erg$risk$eff <- b$f.root+eff
erg$call <- mc
return(erg)
}
)
|
date_check <- function(start_date, end_date) {
if (missing(start_date) || missing(end_date)) {
stop("Values for `start_date` and `end_date` parameters must be included",
call. = FALSE
)
}
if (as.numeric(as.Date(end_date) - as.Date(start_date)) > 365) {
stop("Only 365 days of data can be returned at one time",
call. = FALSE
)
}
date_query <- paste0("&start_date=", as.Date(start_date),
"&end_date=", as.Date(end_date))
date_query
}
|
points2class<-function(points,img.class,x.microns,y.microns,z.microns,mask=array(TRUE,dim(img.class)))
{
X<-dim(img.class)[1]
Y<-dim(img.class)[2]
Z<-dim(img.class)[3]
color<-data.frame("x"=1+floor(X*points$X/x.microns),"y"=1+floor(Y*points$Y/y.microns),"z"=1+floor(Z*points$Z/z.microns))
class<-rep(NA,dim(color)[1])
for (i in 1:dim(color)[1])
if(mask[color[i,1],color[i,2],color[i,3]]==1)
class[i]<-img.class[color[i,1],color[i,2],color[i,3]]
class<-class[!is.na(class)]
return(table(class))
}
|
suppressMessages(library(rENA, quietly = T, verbose = F))
context("Test making sets");
data(RS.data)
codenames <- c("Data", "Technical.Constraints", "Performance.Parameters",
"Client.and.Consultant.Requests", "Design.Reasoning", "Collaboration");
test_that("Simple data.frame to accumulate and make set", {
accum <- ena.accumulate.data.file(
RS.data, units.by = c("UserName", "Condition"),
conversations.by = c("ActivityNumber", "GroupName"),
codes = codenames
);
set <- ena.make.set(accum)
testthat::expect_equal(
label = "Used 6 codes",
object = length(set$rotation$codes),
expected = 6
);
testthat::expect_equal(
label = "48 units with all dimensions",
object = dim(as.matrix(set$points)),
expected = c(48,choose(length(codenames),2))
);
testthat::expect_equal(
label = "Has all 48 units",
object = length(set$model$unit.labels),
expected = 48
);
})
test_that("Disable sphere norm", {
accum <- ena.accumulate.data.file(
RS.data, units.by = c("UserName", "Condition"),
conversations.by = c("ActivityNumber", "GroupName"),
codes = codenames
);
set <- ena.make.set(accum, norm.by = fun_skip_sphere_norm)
set_normed <- ena.make.set(accum, norm.by = fun_sphere_norm)
proj <- as.vector(as.matrix(set$model$points.for.projection)[1,])
proj_normed <- as.vector(as.matrix(set_normed$model$points.for.projection)[1,])
testthat::expect_false(all(proj == proj_normed))
})
test_that("Test custom rotation.set", {
df.file <- RS.data
conversations.by <- c("Condition", "ActivityNumber", "GroupName")
df_accum_grps <- rENA:::ena.accumulate.data.file(
df.file, units.by = c("GroupName", "Condition"),
conversations.by = conversations.by, codes = codenames);
df_accum_usrs <- rENA:::ena.accumulate.data.file(
df.file, units.by = c("UserName", "Condition"),
conversations.by = conversations.by, codes = codenames);
df_set_grps <- ena.make.set(df_accum_grps)
df_set_usrs <- ena.make.set(df_accum_usrs)
df_set_grps_usrs <- ena.make.set(
df_accum_grps, rotation.set = df_set_usrs$rotation)
expect_true(all(
df_set_grps_usrs$rotation.matrix == df_set_usrs$rotation.matrix
))
expect_false(all(
df_set_grps_usrs$rotation.matrix == df_set_grps$rotation.matrix
))
expect_equal(df_set_usrs$rotation$nodes, df_set_grps_usrs$rotation$nodes)
expect_equal(df_set_grps$line.weights, df_set_grps_usrs$line.weights)
expect_equal(df_set_usrs$rotation$center.vec, df_set_grps_usrs$rotation$center.vec)
testthat::expect_error(
df_set_bogus <- ena.make.set(df_accum_grps, rotation.set = list()),
regexp = "rotation.set is not an instance"
)
})
test_that("Test rotate by mean", {
codenames <- c("Data", "Technical.Constraints", "Performance.Parameters",
"Client.and.Consultant.Requests", "Design.Reasoning", "Collaboration");
data(RS.data)
df.file <- RS.data
conversations.by <- c("Condition", "ActivityNumber", "GroupName")
df_accum_usrs <- rENA:::ena.accumulate.data.file(
df.file, units.by = c("UserName", "Condition"),
conversations.by = conversations.by, codes = codenames);
set.svd <- ena.make.set(df_accum_usrs)
set.mr <- ena.make.set(df_accum_usrs,
rotation.by = ena.rotate.by.mean,
rotation.params = list(
df_accum_usrs$meta.data$Condition == "FirstGame",
df_accum_usrs$meta.data$Condition == "SecondGame"
)
);
expect_equal(ncol(set.svd$rotation.matrix), ncol(set.mr$rotation.matrix))
expect_equal(
colnames(as.matrix(set.svd$rotation.matrix)),
colnames(as.matrix(set.svd$points))
)
expect_equal(
colnames(as.matrix(set.mr$rotation.matrix)), colnames(as.matrix(set.mr$points))
)
expect_equal("MR1", colnames(set.mr$rotation.matrix)[2])
expect_equal("SVD1", colnames(set.svd$rotation.matrix)[2])
})
test_that("Test rotation with table for weights", {
codenames <- c("Data", "Technical.Constraints", "Performance.Parameters",
"Client.and.Consultant.Requests", "Design.Reasoning", "Collaboration");
data(RS.data)
df.file <- RS.data
conversations.by <- c("Condition", "ActivityNumber", "GroupName")
df_accum_usrs <- rENA:::ena.accumulate.data.file(
df.file, units.by = c("UserName", "Condition"),
conversations.by = conversations.by, codes = codenames);
testthat::expect_error(set.mr <- ena.make.set(df_accum_usrs,
rotation.by = ena.rotate.by.mean,
rotation.params = list()
))
rotate.grps <- list(
df_accum_usrs$meta.data$Condition == "FirstGame",
df_accum_usrs$meta.data$Condition == "SecondGame"
)
set.svd <- ena.make.set(df_accum_usrs)
set.svd$line.weights <- remove_meta_data(set.svd$line.weights)
testthat::expect_message(
rENA:::orthogonal_svd(
set.svd$line.weights,
matrix(0, nrow = ncol(set.svd$line.weights), ncol = 2)
),
regexp = "converting data to matrix"
)
})
test_that("Test bad position method", {
codenames <- c("Data", "Technical.Constraints", "Performance.Parameters",
"Client.and.Consultant.Requests", "Design.Reasoning", "Collaboration");
data(RS.data)
df.file <- RS.data
conversations.by <- c("Condition", "ActivityNumber", "GroupName")
acc <- rENA:::ena.accumulate.data.file(
df.file, units.by = c("UserName", "Condition"),
conversations.by = conversations.by, codes = codenames);
bad_positions <- function(enaset, groups) {
return(list(bad = 1))
}
testthat::expect_error(
ena.make.set(acc, node.position.method = bad_positions),
regexp = "position method didn't return back the expected objects"
)
custom_rotation <- structure(list(), class = "ena.rotation.set")
testthat::expect_error(
ena.make.set(acc, rotation.by = NULL, rotation.set = custom_rotation),
regexp = "does not have a center vector"
)
custom_rotation$center.vec <- runif(choose(length(codenames), 2))
testthat::expect_error(
ena.make.set(acc, rotation.by = NULL, rotation.set = custom_rotation),
regexp = "no rotation matrix"
)
custom_rotation$rotation.matrix <- matrix(1,
ncol = ncol(acc$rotation$adjacency.key),
nrow = ncol(acc$rotation$adjacency.key)
)
testthat::expect_error(
ena.make.set(acc, rotation.by = NULL, rotation.set = custom_rotation),
regexp = "Unable to determine the node positions"
)
testthat::expect_error(
ena.make.set(acc, rotation.by = NULL, rotation.set = NULL),
regexp = "Unable to find or create a rotation set"
)
})
|
library(readxl)
suppressPackageStartupMessages(library(dplyr))
library(ggplot2)
library(readr)
le_xls <-
read_excel("xls/life-expectancy-reference-spreadsheet-20090204-xls-format.xls",
sheet = "Data and metadata")
le_xls %>% str()
le_raw <- le_xls %>%
select(country = contains("country"), continent = contains("continent"),
year = contains("year"), lifeExp = contains("expectancy"))
le_raw %>% str()
le_raw %>% head()
le_raw %>% tail()
n_distinct(le_raw$year)
unique(le_raw$year)
all(le_raw$year %in% 1800:2007)
le_raw <- le_raw %>%
mutate(year = year %>% as.integer())
le_raw$year %>% summary()
le_raw$lifeExp %>% head(100)
sum(is.na(le_raw$lifeExp))
le_raw <- le_raw %>%
filter(!is.na(lifeExp))
str(le_raw)
le_raw$lifeExp %>% summary()
n_distinct(le_raw$continent)
unique(le_raw$continent)
(empty_continent <- le_raw %>%
filter(is.na(continent)) %>%
select(country) %>%
unique())
str(empty_continent)
(fsu_continent <- le_raw %>%
filter(continent == "FSU") %>%
select(country) %>%
unique())
n_distinct(le_raw$country)
unique(le_raw$country)
n_distinct(le_raw$year)
(p <- ggplot(le_raw, aes(x = year)) + geom_histogram(binwidth = 1))
p + xlim(c(1945, 2010))
p + xlim(c(1950, 1960))
p + xlim(c(2000, 2010))
year_min <- 1950
year_max <- 2007
le_raw <- le_raw %>%
filter(year %>% between(year_min, year_max))
le_raw %>% str()
le_raw <- le_raw %>%
select(country, continent, year, lifeExp)
write_tsv(le_raw, "02_lifeExp.tsv")
devtools::session_info()
|
context("normalise_node_positions")
pc <- matrix(1, 10, 148, dimnames = list(c(paste0("r",1:5),paste0("c",1:5)),paste0("np",1:148)))
test_that("'none' doesn't change anything",{
nnp <- normalise_node_positions(pc = pc, type = "none", six_node = TRUE)
expect_identical(pc, nnp)
})
test_that("'sum' works correctly when pc is all 1s",{
nnp <- normalise_node_positions(pc = pc, type = "sum", six_node = TRUE)
rsnpp <- sapply(rowSums(nnp), function(x) all.equal(x, 1, tolerance = sqrt(.Machine$double.eps)))
expect_identical(all(rsnpp), TRUE)
})
test_that("'position' works correctly when pc is all 1s",{
nnp <- normalise_node_positions(pc = pc, type = "position", six_node = TRUE)
csnpp <- sapply(colSums(nnp), function(x) all.equal(x, 1, tolerance = sqrt(.Machine$double.eps)))
expect_identical(all(csnpp), TRUE)
})
test_that("'sizeclass' works correctly when pc is all 1s",{
nnp <- normalise_node_positions(pc = pc, type = "sizeclass", six_node = TRUE)
rsnpp <- sapply(rowSums(nnp[,1:2]), function(x) all.equal(x, 1, tolerance = sqrt(.Machine$double.eps)))
expect_identical(all(rsnpp), TRUE)
rsnpp <- sapply(rowSums(nnp[,3:6]), function(x) all.equal(x, 1, tolerance = sqrt(.Machine$double.eps)))
expect_identical(all(rsnpp), TRUE)
rsnpp <- sapply(rowSums(nnp[,7:16]), function(x) all.equal(x, 1, tolerance = sqrt(.Machine$double.eps)))
expect_identical(all(rsnpp), TRUE)
rsnpp <- sapply(rowSums(nnp[,17:46]), function(x) all.equal(x, 1, tolerance = sqrt(.Machine$double.eps)))
expect_identical(all(rsnpp), TRUE)
rsnpp <- sapply(rowSums(nnp[,47:148]), function(x) all.equal(x, 1, tolerance = sqrt(.Machine$double.eps)))
expect_identical(all(rsnpp), TRUE)
})
test_that("'levelsize' works correctly when pc is all 1s",{
nnp <- normalise_node_positions(pc = pc, type = "levelsize", six_node = TRUE)
rsnpp <- sapply(rowSums(nnp[,1:2]), function(x) all.equal(x, 1, tolerance = sqrt(.Machine$double.eps)))
expect_identical(all(rsnpp), TRUE)
rsnpp <- sapply(rowSums(nnp[,3:4]), function(x) all.equal(x, 1, tolerance = sqrt(.Machine$double.eps)))
expect_identical(all(rsnpp), TRUE)
rsnpp <- sapply(rowSums(nnp[,5:6]), function(x) all.equal(x, 1, tolerance = sqrt(.Machine$double.eps)))
expect_identical(all(rsnpp), TRUE)
rsnpp <- sapply(rowSums(nnp[,7:8]), function(x) all.equal(x, 1, tolerance = sqrt(.Machine$double.eps)))
expect_identical(all(rsnpp), TRUE)
rsnpp <- sapply(rowSums(nnp[,9:14]), function(x) all.equal(x, 1, tolerance = sqrt(.Machine$double.eps)))
expect_identical(all(rsnpp), TRUE)
rsnpp <- sapply(rowSums(nnp[,15:16]), function(x) all.equal(x, 1, tolerance = sqrt(.Machine$double.eps)))
expect_identical(all(rsnpp), TRUE)
rsnpp <- sapply(rowSums(nnp[,17:18]), function(x) all.equal(x, 1, tolerance = sqrt(.Machine$double.eps)))
expect_identical(all(rsnpp), TRUE)
rsnpp <- sapply(rowSums(nnp[,19:31]), function(x) all.equal(x, 1, tolerance = sqrt(.Machine$double.eps)))
expect_identical(all(rsnpp), TRUE)
rsnpp <- sapply(rowSums(nnp[,32:44]), function(x) all.equal(x, 1, tolerance = sqrt(.Machine$double.eps)))
expect_identical(all(rsnpp), TRUE)
rsnpp <- sapply(rowSums(nnp[,45:46]), function(x) all.equal(x, 1, tolerance = sqrt(.Machine$double.eps)))
expect_identical(all(rsnpp), TRUE)
rsnpp <- sapply(rowSums(nnp[,47:48]), function(x) all.equal(x, 1, tolerance = sqrt(.Machine$double.eps)))
expect_identical(all(rsnpp), TRUE)
rsnpp <- sapply(rowSums(nnp[,49:70]), function(x) all.equal(x, 1, tolerance = sqrt(.Machine$double.eps)))
expect_identical(all(rsnpp), TRUE)
rsnpp <- sapply(rowSums(nnp[,71:124]), function(x) all.equal(x, 1, tolerance = sqrt(.Machine$double.eps)))
expect_identical(all(rsnpp), TRUE)
rsnpp <- sapply(rowSums(nnp[,125:146]), function(x) all.equal(x, 1, tolerance = sqrt(.Machine$double.eps)))
expect_identical(all(rsnpp), TRUE)
rsnpp <- sapply(rowSums(nnp[,147:148]), function(x) all.equal(x, 1, tolerance = sqrt(.Machine$double.eps)))
expect_identical(all(rsnpp), TRUE)
})
test_that("'motif' works correctly when pc is all 1s",{
nnp <- normalise_node_positions(pc = pc, type = "motif", six_node = TRUE)
mps <- lapply(1:44, function(x) motif_info(x, link = FALSE))
for(i in mps){
rsnpp <- sapply(rowSums(nnp[,i]), function(x) all.equal(x, 1, tolerance = sqrt(.Machine$double.eps)))
expect_identical(all(rsnpp), TRUE)
}
})
test_that("'sizeclass_NAzero' replaces all NAs/NaNs with zero",{
for(i in 1:10){
m <- motifs[[sample(17,size = 1)]]
np <- node_positions(M = m, six_node = TRUE, level = "all", weights_method = "none", weights_combine = "none", normalisation = "none")
np_normal <- normalise_node_positions(np, type = "sizeclass", six_node = TRUE)
np_NAzero <- normalise_node_positions(np, type = "sizeclass_NAzero", six_node = TRUE)
expect_identical(all(np_NAzero[is.na(np_normal)] == 0), TRUE)
}
})
test_that("'levelsize_NAzero' replaces all NAs/NaNs with zero",{
for(i in 1:10){
m <- motifs[[sample(44,size = 1)]]
np <- node_positions(M = m, six_node = TRUE, level = "all", weights_method = "none", weights_combine = "none", normalisation = "none")
np_normal <- normalise_node_positions(np, type = "levelsize", six_node = TRUE)
np_NAzero <- normalise_node_positions(np, type = "levelsize_NAzero", six_node = TRUE)
expect_identical(all(np_NAzero[is.na(np_normal)] == 0), TRUE)
}
})
test_that("'motif_NAzero' replaces all NAs/NaNs with zero",{
for(i in 1:10){
m <- motifs[[sample(44,size = 1)]]
np <- node_positions(M = m, six_node = TRUE, level = "all", weights_method = "none", weights_combine = "none", normalisation = "none")
np_normal <- normalise_node_positions(np, type = "motif", six_node = TRUE)
np_NAzero <- normalise_node_positions(np, type = "motif_NAzero", six_node = TRUE)
expect_identical(all(np_NAzero[is.na(np_normal)] == 0), TRUE)
}
})
test_that("'sizeclass_plus1' removes all NAs/NaNs",{
for(i in 1:10){
m <- motifs[[sample(17,size = 1)]]
np <- node_positions(M = m, six_node = TRUE, level = "all", weights_method = "none", weights_combine = "none", normalisation = "none")
np_normal <- normalise_node_positions(np, type = "sizeclass", six_node = TRUE)
np_NAzero <- normalise_node_positions(np, type = "sizeclass_plus1", six_node = TRUE)
expect_identical(all(!is.na(np_NAzero[is.na(np_normal)])), TRUE)
}
})
test_that("'levelsize_plus1' removes all NAs/NaNs",{
for(i in 1:10){
m <- motifs[[sample(44,size = 1)]]
np <- node_positions(M = m, six_node = TRUE, level = "all", weights_method = "none", weights_combine = "none", normalisation = "none")
np_normal <- normalise_node_positions(np, type = "levelsize", six_node = TRUE)
np_NAzero <- normalise_node_positions(np, type = "levelsize_plus1", six_node = TRUE)
expect_identical(all(!is.na(np_NAzero[is.na(np_normal)])), TRUE)
}
})
test_that("'motif_plus1' removes all NAs/NaNs",{
for(i in 1:10){
m <- motifs[[sample(44,size = 1)]]
np <- node_positions(M = m, six_node = TRUE, level = "all", weights_method = "none", weights_combine = "none", normalisation = "none")
np_normal <- normalise_node_positions(np, type = "motif", six_node = TRUE)
np_NAzero <- normalise_node_positions(np, type = "motif_plus1", six_node = TRUE)
expect_identical(all(!is.na(np_NAzero[is.na(np_normal)])), TRUE)
}
})
test_that("Check that we never have NaNs, sum-normalisation", {
M <- matrix(1,1,1)
npc <- node_positions(M, normalisation = "none", six_node = TRUE, level = "all", weights_method = "none", weights_combine = "none")
np <- normalise_node_positions(npc, type = "sum", six_node = TRUE)
l <- lapply(np, is.nan)
for (item in l) {
expect_true(!any(item))
}
})
test_that("Check that we never have NaNs, sizeclass-normalisation", {
M <- matrix(1,1,1)
npc <- node_positions(M, normalisation = "none", six_node = TRUE, level = "all", weights_method = "none", weights_combine = "none")
np <- normalise_node_positions(npc, type = "sizeclass", six_node = TRUE)
l <- lapply(np, is.nan)
for (item in l) {
expect_true(!any(item))
}
})
test_that("Check that we never have NaNs, levelsize-normalisation", {
M <- matrix(1,1,1)
npc <- node_positions(M, normalisation = "none", six_node = TRUE, level = "all", weights_method = "none", weights_combine = "none")
np <- normalise_node_positions(npc, type = "levelsize", six_node = TRUE)
l <- lapply(np, is.nan)
for (item in l) {
expect_true(!any(item))
}
})
test_that("Check that we never have NaNs, motif-normalisation", {
M <- matrix(1,1,1)
npc <- node_positions(M, normalisation = "none", six_node = TRUE, level = "all", weights_method = "none", weights_combine = "none")
np <- normalise_node_positions(npc, type = "motif", six_node = TRUE)
l <- lapply(np, is.nan)
for (item in l) {
expect_true(!any(item))
}
})
|
knitr::opts_chunk$set(
collapse = TRUE,
comment = "
)
library(GGMncv)
set.seed(1)
main <- gen_net(p = 10)
y1 <- MASS::mvrnorm(n = 500,
mu = rep(0, 10),
Sigma = main$cors)
y2 <- MASS::mvrnorm(n = 500,
mu = rep(0, 10),
Sigma = main$cors)
Correlation <- function(x, y){
cor(x[upper.tri(x)], y[upper.tri(x)])
}
compare_ggms <- nct(y1, y2,
FUN = Correlation,
progress = FALSE)
compare_ggms
hist(compare_ggms$Correlation_perm,
main = "null dist: correlation")
abline(v = compare_ggms$Correlation_obs)
1 - mean(compare_ggms$Correlation_perm > compare_ggms$Correlation_obs)
r2 <- function(x, y){
diag(x) <- 1
diag(y) <- 1
inv1 <- solve(corpcor::pcor2cor(x))
beta1 <- -(inv1[1,-1] / inv1[1,1])
r21 <- cor(y1[,1], y1[,-1] %*% beta1)^2
inv2 <- solve(corpcor::pcor2cor(y))
beta2 <- -(inv2[1,-1] / inv2[1,1])
r22 <- cor(y2[,1], y2[,-1] %*% beta2)^2
return(as.numeric(r21 - r22))
}
compare_ggms <- nct(y1, y2,
progress = FALSE,
FUN = r2)
hist(compare_ggms$r2_perm,
main = "null dist: R2 Difference")
abline(v = compare_ggms$r2_obs)
|
mfreconstruct <- function(U, groups = as.list(1L:10L)) {
N <- U$N
Y <- U$Y
p <- length(U$Y@C)
L <- U$L
K <- N - L + 1
basis <- list()
m <- length(groups)
for (j in 1:p) {
basis[[j]] <- U$Y@B[[j]]
}
recon_out <- list()
new_grid <- list()
for (i in 1L:m) {
recon_out_j <- list()
C <- list()
S <- list()
for (j in 1:p) {
d <- ncol(U$Y@B[[j]])
C[[j]] <- matrix(NA, nrow = d, ncol = N)
S[[j]] <- 0L
}
g <- groups[[i]]
for (k in 1L:length(g)) {
projection <- mfproj(U, g[k])
for (j in 1:p) {
S[[j]] <- S[[j]] + projection[[j]]
}
}
for (j in 1:p) {
d <- ncol(Y@B[[j]])
S[[j]] <- fH(S[[j]], d)
C_jx <- C[[j]]
S_jx <- S[[j]]
C_jx[, 1L:L] <- S_jx[, 1L, ]
C_jx[, L:N] <- S_jx[, , L]
recon_out_j[[j]] <- basis[[j]] %*% C_jx
if (ncol(Y@grid[[j]]) == 2) {
x <- unique(Y@grid[[j]][, 1])
y <- unique(Y@grid[[j]][, 2])
recon_two_d <- array(data = NA, dim = c(length(x), length(y), N))
for (n in 1:N) {
count <- 1
for (i_1 in 1:length(x)) {
for (i_2 in 1:length(y)) {
recon_two_d[i_1, i_2, n] <- recon_out_j[[j]][count, n]
count <- count + 1
}
}
}
recon_out_j[[j]] <- recon_two_d
new_grid[[j]] <- list(x, y)
} else {
new_grid[[j]] <- Y@grid[[j]]
}
}
recon_out[[i]] <- Rfssa::fts(X = recon_out_j, B = basis, grid = new_grid)
}
recon_out$values <- U$values
return(recon_out)
}
|
effective_sample <- function(model, ...) {
UseMethod("effective_sample")
}
effective_sample.brmsfit <- function(model, effects = c("fixed", "random", "all"), component = c("conditional", "zi", "zero_inflated", "all"), parameters = NULL, ...) {
effects <- match.arg(effects)
component <- match.arg(component)
pars <- insight::get_parameters(
model,
effects = effects,
component = component,
parameters = parameters
)
insight::check_if_installed("rstan")
s <- rstan::summary(model$fit)$summary
s <- subset(s, subset = rownames(s) %in% colnames(pars))
data.frame(
Parameter = rownames(s),
ESS = round(s[, "n_eff"]),
stringsAsFactors = FALSE,
row.names = NULL
)
}
effective_sample.stanreg <- function(model, effects = c("fixed", "random", "all"), component = c("location", "all", "conditional", "smooth_terms", "sigma", "distributional", "auxiliary"), parameters = NULL, ...) {
effects <- match.arg(effects)
component <- match.arg(component)
pars <-
insight::get_parameters(
model,
effects = effects,
component = component,
parameters = parameters
)
s <- as.data.frame(summary(model))
s <- s[rownames(s) %in% colnames(pars), ]
data.frame(
Parameter = rownames(s),
ESS = s[["n_eff"]],
stringsAsFactors = FALSE,
row.names = NULL
)
}
effective_sample.stanfit <- function(model, effects = c("fixed", "random", "all"), parameters = NULL, ...) {
effects <- match.arg(effects)
pars <-
insight::get_parameters(
model,
effects = effects,
parameters = parameters
)
insight::check_if_installed("rstan")
s <- as.data.frame(rstan::summary(model)$summary)
s <- s[rownames(s) %in% colnames(pars), ]
data.frame(
Parameter = rownames(s),
ESS = s[["n_eff"]],
stringsAsFactors = FALSE,
row.names = NULL
)
}
effective_sample.blavaan <- function(model, parameters = NULL, ...) {
insight::check_if_installed("blavaan")
ESS <- blavaan::blavInspect(model, what = "neff")
data.frame(
Parameter = colnames(insight::get_parameters(model)),
ESS = ESS,
stringsAsFactors = FALSE,
row.names = NULL
)
}
effective_sample.MCMCglmm <- function(model, effects = c("fixed", "random", "all"), parameters = NULL, ...) {
effects <- match.arg(effects)
pars <-
insight::get_parameters(
model,
effects = effects,
parameters = parameters,
summary = TRUE
)
s.fixed <- as.data.frame(summary(model)$solutions)
s.random <- as.data.frame(summary(model)$Gcovariances)
es <- data.frame(
Parameter = rownames(s.fixed),
ESS = round(s.fixed[["eff.samp"]]),
stringsAsFactors = FALSE,
row.names = NULL
)
if (nrow(s.random) > 0) {
es <- rbind(es, data.frame(
Parameter = rownames(s.random),
ESS = round(s.random[["eff.samp"]]),
stringsAsFactors = FALSE,
row.names = NULL
))
}
es[match(pars[[1]], es$Parameter), ]
}
|
.onAttach <- function(...) {
packageStartupMessage("Package version: ", as.character(utils::packageVersion("quanteda")), "\n",
"Unicode version: ", stringi::stri_info()[["Unicode.version"]], "\n",
"ICU version: ", stringi::stri_info()[["ICU.version"]])
quanteda_options(initialize = TRUE)
if (qatd_cpp_tbb_enabled()) {
packageStartupMessage("Parallel computing: ", quanteda_options("threads"), " of ",
RcppParallel::defaultNumThreads(), " threads used.")
} else {
packageStartupMessage("Parallel computing: disabled")
}
packageStartupMessage("See https://quanteda.io for tutorials and examples.")
}
.onUnload <- function (libpath) {
library.dynam.unload("quanteda", libpath)
}
|
my_params <- function(x) {
params <- list(
xfull = "x object of class \\code{sf} or \\code{sfc}",
x = "x object of class \\code{sf}",
var = "var name(s) of the variable(s) to plot",
vars = "var names of the variables to plot",
bg = "bg background color",
fg = "fg foreground color",
border = "border border color",
lwd = "lwd border width",
inches = paste0(
"inches size of the biggest symbol (radius for circles,",
" half width for squares) in inches."
),
lwd_max = "lwd_max line width of the largest line",
symbol = "symbol type of symbols, 'circle' or 'square'",
col = "col color",
leg_pos = paste0(
"leg_pos position of the legend, one of 'topleft', 'top',",
"'topright', 'right', 'bottomright', 'bottom', ",
"'bottomleft', 'left' or a vector of two coordinates ",
"in map units (c(x, y)). If leg_pos = NA then the ",
"legend is not plotted."
),
leg_pos2 = paste0(
"leg_pos position of the legend, two of 'topleft', 'top','topright', 'right', ",
"'bottomright', 'bottom', 'bottomleft', 'left' or vector of two ",
"coordinates in map units (c(x, y)). leg_pos argument can be ",
"c('position', 'position'), c('position', x2, y2), ",
"c(x1,y1, 'position') or c(x1, y1, x2, y2). ",
"If leg_pos = NA then the legend is not plotted."
),
leg_title = "leg_title legend title",
leg_title_cex = "leg_title_cex size of the legend title",
leg_val_cex = "leg_val_cex size of the values in the legend",
leg_val_rnd = "leg_val_rnd number of decimal places of the values in the legend",
val_order = "val_order values order, a character vector that matches var modalities",
leg_frame = "leg_frame whether to add a frame to the legend (TRUE) or not (FALSE)",
leg_no_data = "leg_no_data label for missing values",
add = "add whether to add the layer to an existing plot (TRUE) or not (FALSE)",
pal = paste0(
"pal a set of colors or a palette name",
" (from \\link{hcl.colors})"
),
alpha = paste0(
"alpha if \\code{pal} is a \\link{hcl.colors} palette name, ",
"the alpha-transparency level in the range [0,1]"
),
col_na = "col_na color for missing values",
cex_na = "cex_na cex for NA values",
pch_na = "pch_na pch for NA values",
val_max = "val_max maximum value used for proportional symbols",
breaks = "breaks either a numeric vector with the actual breaks, or a classification method name (see \\link{mf_get_breaks})",
nbreaks = "nbreaks number of classes",
pos = paste0(
"pos position. It can be one of 'topleft', 'top',",
"'topright', 'right', 'bottomright', 'bottom',",
"'bottomleft', 'left' or a vector of two coordinates ",
"in map units (c(x, y))"
),
title = "title legend title",
title_cex = "title_cex size of the legend title",
val_cex = "val_cex size of the values in the legend",
val_rnd = "val_rnd number of decimal places of the values in the legend",
frame = "frame whether to add a frame to the legend (TRUE) or not (FALSE)",
no_data_txt = "no_data_txt label for missing values",
no_data = "no_data if TRUE a 'missing values' box is plotted",
cex = "cex size of the legend; 2 means two times bigger"
)
for (i in 1:length(params)) {
params[[i]] <- paste0(
"@param ", " ",
params[[i]]
)
}
unname(unlist(params[x]))
}
|
"humpfit" <-
function (mass, spno, family = poisson, start)
{
.Defunct("natto::humpfit() from https://github.com/jarioksa/natto/")
hump <- function(p, mass, spno, ...) {
x <- ifelse(mass < p[1], mass/p[1], p[1] * p[1]/mass/mass)
fv <- p[3] * log(1 + p[2] * x/p[3])
n <- wt <- rep(1, length(x))
dev <- sum(dev.resids(spno, fv, wt))
aicfun(spno, n, fv, wt, dev)/2
}
fam <- family(link = "identity")
aicfun <- fam$aic
dev.resids <- fam$dev.resids
if (missing(start))
p <- c(mean(mass), 100, 10)
else
p <- start
fit <- nlm(hump, p = p, mass = mass, spno = spno, hessian = TRUE)
p <- fit$estimate
names(p) <- c("hump", "scale", "alpha")
x <- ifelse(mass < p[1], mass/p[1], p[1] * p[1]/mass/mass)
fv <- p[3] * log(1 + p[2] * x/p[3])
res <- dev.resids(spno, fv, rep(1, length(x)))
dev <- sum(res)
residuals <- spno - fv
aic <- fit$minimum * 2 + 6
rdf <- length(x) - 3
out <- list(nlm = fit, family = fam, y = spno, x = mass,
coefficients = p, fitted.values = fv, aic = aic, rank = 3,
df.residual = rdf, deviance = dev, residuals = residuals,
prior.weights = rep(1, length(x)))
class(out) <- c("humpfit", "glm")
out
}
|
wod_4_p <- function( obs_index, s , covariate.data , B , B.N ) {
actual_data <- cbind( covariate.data , s[,1] , s[,2] )
time_index <- ncol(actual_data) - 1
status_index <- ncol(actual_data)
data_length <- nrow(actual_data)
obs_influences <- rep(x = 0 , data_length)
obs_max_influences <- rep(x= 0 , data_length)
concs_vector <- rep(x = 0 , B)
cox_object <- survival::coxph( survival::Surv(actual_data[,time_index], as.integer(actual_data[,status_index]) ) ~ . , data = data.frame( actual_data[,-c(time_index,status_index)] ) )
baseline_concordance <- cox_object$concordance[1]/(cox_object$concordance[1] + cox_object$concordance[2] )
concordance_sum <- 0
for ( i in 1:B ){
boot_datax <- getBootstrap_K(data = actual_data[-obs_index,], k = B.N )
actual_conc <- coxph.call(boot_data = boot_datax, time_index = time_index , status_index = status_index )
if (is.null(actual_conc)) {
for( ix in 1:10) {
cat('Error: ', ix)
actual_conc <- coxph.call(boot_data = boot_datax, time_index = time_index , status_index = status_index )
if (!is.null(actual_conc)) {
break
}
}
}
if (is.null(actual_conc)) {
next
}
concordance_run <- actual_conc - baseline_concordance
concordance_sum <- concordance_sum + concordance_run
concs_vector[i] <- concordance_run
}
obs_influence <- concordance_sum/B;
sorted_concs <- sort( concs_vector )
obs_max_influence <- sorted_concs[B]
sorted_concs <- sort( concs_vector )
index = 1
while( index<=B && sorted_concs[index] <= 0 ){
index = index +1
}
pvalue <- 1 - (B-index)/(B)
res <- c(obs_index,obs_influence,obs_max_influence,pvalue)
names(res) <- c( "obs_id" , "avg_delta","max_delta", "pvalue" )
out_list <- list(res,concs_vector)
names(out_list) <- c("metrics","histogram")
return( out_list )
}
|
popdynMICE <- function(qsx, qfracx, np, nf, nyears, nareas, maxage, Nx, VFx, FretAx, Effind,
movx, Spat_targ, M_ageArrayx, Mat_agex, Fec_agex,
Asizex, WatAgex, Len_agex,
Karrayx, Linfarrayx, t0arrayx, Marrayx,
R0x, R0ax, SSBpRx, hsx, aRx,
bRx, ax, bx, Perrx, SRrelx, Rel, SexPars, x, plusgroup, maxF, SSB0x, B0x) {
n_age <- maxage + 1
Bx <- SSNx <- SSBx <- VBx <- Zx <- array(NA_real_, c(np, n_age, nyears, nareas))
Fy <- array(NA_real_, c(np, nf, nyears))
Fty <- array(NA_real_, c(np, n_age, nyears, nareas))
FMy <- FMrety <- VBfx <- array(NA_real_, c(np, nf, n_age, nyears, nareas))
hsy <- ay <- by <- array(NA_real_, c(np, nyears+1))
hsy[] <- hsx
ay[] <- ax
by[] <- bx
VBfind <- TEG(c(np, nf, n_age, 1, nareas))
Nind <- TEG(c(np, n_age, 1, nareas))
FMx <- FMretx <- Fdist <- array(NA_real_, c(np, nf, n_age, nareas))
Find <- TEG(dim(Fdist))
VBcur <- array(NA, dim(VBfx)[-4])
Ecur <- array(NA, c(np, nf))
Vcur <- Retcur <- array(NA_real_, c(np, nf, n_age))
SSBx[Nind] <- Nx[Nind] * Fec_agex[Nind[, 1:3]]
SSNx[Nind] <- Nx[Nind] * Mat_agex[Nind[, 1:3]]
Bx[Nind] <- Nx[Nind] * WatAgex[Nind[, 1:3]]
for(y in 1:nyears + 1) {
Nind[, 3] <- VBfind[, 4] <- y-1
VBfx[VBfind] <- Bx[VBfind[, c(1,3,4,5)]] * VFx[VBfind[, 1:4]]
VBcur[] <- VBfx[, , , y-1, ]
Fdist[Find] <- VBcur[Find]^Spat_targ[Find[, 1:2]]
Fdist[Find] <- Fdist[Find]/apply(Fdist,1:3,sum)[Find[,1:3]]
Fdist[is.na(Fdist)] <- 0
Ecur[] <- Effind[, , y-1]
Vcur[] <- VFx[, , , y-1]
FMx[Find] <- qsx[Find[, 1]] * qfracx[Find[, 1:2]] * Ecur[Find[, 1:2]] * Fdist[Find] *
Vcur[Find[, 1:3]]/Asizex[Find[, c(1, 4)]]
FMx[FMx > maxF] <- maxF
Retcur[] <- FretAx[, , , y-1]
FMretx[Find] <- qsx[Find[,1]] * qfracx[Find[,1:2]] * Ecur[Find[,1:2]] * Fdist[Find] *
Retcur[Find[, 1:3]]/Asizex[Find[, c(1, 4)]]
FMretx[FMretx > maxF] <- maxF
out <- popdynOneMICE(np, nf, nareas, maxage,
Ncur = array(Nx[, , y-1, ], dim(Nx)[c(1:2, 4)]),
Bcur = array(Bx[, , y-1, ], dim(Nx)[c(1:2, 4)]),
SSBcur = array(SSBx[, , y-1, ], dim(Nx)[c(1:2, 4)]),
Vcur = Vcur,
FMretx = FMretx,
FMx = FMx,
PerrYrp = Perrx[, y+n_age-1],
hsx = hsy[, y], aRx = aRx, bRx = bRx,
movy = array(movx[, , , , y], c(np, n_age, nareas, nareas)),
Spat_targ = Spat_targ, SRrelx = SRrelx,
M_agecur = array(M_ageArrayx[, , y-1], c(np, n_age)),
Mat_agenext = array(Mat_agex[, , y], c(np, n_age)),
Fec_agenext = array(Fec_agex[, , y], c(np, n_age)),
Asizex = Asizex,
Kx = Karrayx[, y],
Linfx = Linfarrayx[, y],
t0x = t0arrayx[, y],
Mx = Marrayx[, y-1],
R0x = R0x, R0ax = R0ax, SSBpRx = SSBpRx, ax = ay[, y],
bx = by[, y], Rel = Rel, SexPars = SexPars, x = x,
plusgroup = plusgroup, SSB0x = SSB0x, B0x = B0x,
Len_agenext = array(Len_agex[, , y], c(np, n_age)),
Wt_agenext = array(WatAgex[, , y], c(np, n_age)))
if (y <= nyears) {
if (length(Rel) && y <= nyears) {
gc <- FALSE
if (any(Linfarrayx[, y] != out$Linfx)) Linfarrayx[, y] <- out$Linfx; gc <- TRUE
if (any(Karrayx[, y] != out$Kx)) Karrayx[, y] <- out$Kx; gc <- TRUE
if (any(t0arrayx[, y] != out$t0x)) t0arrayx[, y] <- out$t0x; gc <- TRUE
if (any(ay[, y] != out$ax)) ay[, y] <- out$ax; gc <- TRUE
if (any(by[, y] != out$bx)) by[, y] <- out$bx; gc <- TRUE
if (gc) {
Len_agex[, , y] <- out$Len_agenext
WatAgex[, , y] <- out$Wt_agenext
Fec_agex[, , y] <- out$Fec_agenext
}
}
Nx[, , y, ] <- out$Nnext
Bx[, , y, ] <- out$Bnext
SSNx[, , y, ] <- out$SSNnext
SSBx[, , y, ] <- out$SSBnext
}
M_ageArrayx[, , y-1] <- out$M_agecur
Marrayx[, y-1] <- out$Mx
VBx[, , y-1, ] <- out$VBt
VBfx[, , , y-1, ] <- out$VBft
Zx[, , y-1, ] <- out$Zt
Fty[, , y-1, ] <- out$Ft
FMy[, , , y-1, ] <- out$FMx
FMrety[, , , y-1, ] <- out$FMretx
}
list(Nx=Nx,
Bx=Bx,
SSNx=SSNx,
SSBx=SSBx,
VBx=VBx,
FMy=FMy,
FMrety=FMrety,
Linfarrayx=Linfarrayx,
Karrayx=Karrayx,
t0array=t0arrayx,
Len_age=Len_agex,
Wt_age=WatAgex,
My=Marrayx,
hsy=hsy,
ay=ay,
by=by,
VBfx=VBfx,
Zx=Zx,
Fty=Fty,
M_ageArrayx=M_ageArrayx,
Fec_Agex=Fec_agex)
}
popdynOneMICE <- function(np, nf, nareas, maxage, Ncur, Bcur, SSBcur, Vcur, FMretx, FMx, PerrYrp,
hsx, aRx, bRx, movy, Spat_targ,
SRrelx, M_agecur, Mat_agecur, Mat_agenext, Fec_agenext,
Asizex,
Kx, Linfx, t0x, Mx, R0x, R0ax, SSBpRx, ax, bx, Rel, SexPars, x,
plusgroup, SSB0x, B0x,
Len_agenext, Wt_agenext) {
n_age <- maxage + 1
Nind <- TEG(dim(Ncur))
surv <- array(c(rep(1,np), t(exp(-apply(M_agecur, 1, cumsum)))[, 1:(n_age-1)]), c(np, n_age))
surv[plusgroup, n_age] <- surv[plusgroup, n_age]/(1 - exp(-M_agecur[plusgroup, n_age]))
oldMx <- Mx
oldM_agecur <- M_agecur
oldLen_agenext <- Len_agenext
oldWt_agenext <- Wt_agenext
oldFec_agenext <- Fec_agenext
if (length(Rel)) {
Responses <- ResFromRel(Rel, Bcur, SSBcur, Ncur, SSB0x, B0x, seed = 1, x)
DV <- sapply(Responses, function(xx) xx[4])
for (r in 1:length(Responses)) {
eval(parse(text = paste0(DV[r], "[", Responses[[r]][3], "]<-", Responses[[r]][1])))
}
if (any(DV %in% c("Linfx", "Kx", "t0x"))) {
Len_agenext <- matrix(Linfx * (1 - exp(-Kx * (rep(0:maxage, each = np) - t0x))), nrow = np)
Len_agenext[Len_agenext < 0] <- tiny
}
if (any(DV %in% c("Linfx", "Kx", "t0x", "ax", "bx"))) {
Fec_per_weight <- array(NA, dim = dim(Fec_agenext))
Fec_per_weight[Nind[, 1:2]] <- Fec_agenext[Nind[, 1:2]]/Wt_agenext[Nind[ ,1:2]]
Wt_agenext <- ax * Len_agenext ^ bx
Fec_agenext[Nind[, 1:2]] <- Fec_per_weight[Nind[, 1:2]] * Wt_agenext[Nind[, 1:2]]
}
if (any(DV == "Mx")) {
M_agecur <- M_agecur * Mx/oldMx
surv <- array(c(rep(1,np), t(exp(-apply(M_agecur, 1, cumsum)))[, 1:(n_age-1)]),
c(np, n_age))
surv[plusgroup, n_age] <- surv[plusgroup, n_age]/(1 - exp(-M_agecur[plusgroup, n_age]))
}
}
VBft <- Fdist <- array(NA, c(np, nf, n_age, nareas))
VBind <- TEG(dim(VBft))
VBft[VBind] <- Vcur[VBind[, 1:3]] * Bcur[VBind[, c(1,3:4)]]
Ft <- apply(FMx, c(1, 3, 4), sum) %>% array(c(np, n_age, nareas))
Zcur <- Ft + replicate(nareas, M_agecur)
SumF <- apply(FMx, c(1, 3, 4), sum, na.rm = TRUE)
Fapic <- apply(SumF, c(1, 3), max)
Selx <- array(NA, dim(SumF))
Selx[Nind] <- SumF[Nind]/Fapic[Nind[, c(1, 3)]]
VBt <- Bcur * Selx
Nnext <- sapply(1:np, function(p) {
popdynOneTScpp(nareas, maxage, Ncurr = Ncur[p, , ], Zcurr = Zcur[p, , ], plusgroup = plusgroup[p])
}, simplify = "array") %>% aperm(c(3, 1, 2))
Nnext[, 1, ] <- 0
if (length(SexPars$Herm)) {
Nnext[is.na(Nnext)] <- 0
for (i in 1:length(SexPars$Herm)) {
ps <- as.numeric(strsplit(names(SexPars$Herm)[i], "_")[[1]][2:3])
pfrom <- ps[2]
pto <- ps[1]
frac <- rep(1,maxage)
frac[1:length(SexPars$Herm[[i]][x, ])] <- SexPars$Herm[[i]][x, ]
h_rate <- hrate(frac)
Nnext[pto, , ] <- Nnext[pto, , ] * (frac > 0)
Nmov <- Nnext[pfrom, , ] * h_rate
Nnext[pto, , ] <- Nnext[pto,,] + Nmov
Nnext[pfrom, , ] <- Nnext[pfrom, , ] - Nmov
}
}
SSB_SR <- local({
SSBtemp <- array(NA_real_, dim(Nnext))
SSBtemp[Nind] <- Nnext[Nind] * Fec_agenext[Nind[, 1:2]]
if (length(SexPars$SSBfrom)) {
sapply(1:np, function(p) apply(SexPars$SSBfrom[p, ] * SSBtemp, 2:3, sum), simplify = "array") %>%
aperm(c(3, 1, 2))
} else {
SSBtemp
}
})
Nnext[, 1, ] <- sapply(1:np, function(p) {
calcRecruitment_int(SRrel = SRrelx[p], SSBcurr = SSB_SR[p, , ], recdev = PerrYrp[p], hs = hsx[p],
aR = aRx[p, 1], bR = 1/sum(1/bRx[p, ]), R0a = R0ax[p, ], SSBpR = SSBpRx[p, 1])
}) %>% t()
for (p in 1:np) {
Nnext[p,,] <- movestockCPP(nareas, maxage, mov=movy[p,,,], Nnext[p,,])
}
Bnext <- SSBnext <- SSNnext <- array(NA_real_, dim(Nnext))
Bnext[Nind] <- Nnext[Nind] * Wt_agenext[Nind[, 1:2]]
SSBnext[Nind] <- Nnext[Nind] * Fec_agenext[Nind[, 1:2]]
SSNnext[Nind] <- Nnext[Nind] * Mat_agenext[Nind[, 1:2]]
list(Nnext=Nnext,
M_agecur=M_agecur,
R0x=R0x,
R0ax=R0ax,
hsx=hsx,
aRx=aRx,
bRx=bRx,
Linfx=Linfx,
Kx=Kx,
t0x=t0x,
Mx=Mx,
ax=ax,
bx=bx,
Len_agenext=Len_agenext,
Wt_agenext=Wt_agenext,
surv=surv,
FMx=FMx,
FMretx=FMretx,
VBt=VBt,
VBft=VBft,
Zt=Zcur,
Ft=Ft,
Bnext=Bnext,
SSNnext=SSNnext,
SSBnext=SSBnext,
Fec_agenext=Fec_agenext)
}
ResFromRel <- function(Rel, Bcur, SSBcur, Ncur, SSB0, B0, seed, x) {
IVnams <- c("B", "SSB", "N", "SSB0", "B0", "x")
IVcode <- c("Bcur", "SSBcur", "Ncur", "SSB0", "B0", "x")
B <- apply(Bcur, 1, sum)
SSB <- apply(SSBcur, 1, sum)
N <- apply(Ncur, 1, sum)
DVnam <- c("M", "a", "b", "R0", "hs", "K", "Linf", "t0")
modnam <- c("Mx", "ax", "bx", "R0x", "hsx", "Kx", "Linfx", "t0x")
nRel <- length(Rel)
out <- lapply(1:nRel, function(r) {
fnams <- names(Rel[[r]]$model)
DV <- fnams[1]
Dp <- unlist(strsplit(DV, "_"))[2]
Dnam <- unlist(strsplit(DV, "_"))[1]
IV <- fnams[-1]
nIV <- length(IV)
newdata <- sapply(IV, function(iv, B, SSB, N, SSB0, B0, x) {
IVs <- unlist(strsplit(iv, "_"))
p <- ifelse(length(IVs) == 1, 1, as.numeric(IVs[2]))
get(IVs[1], inherits = FALSE)[p]
}, B = B, SSB = SSB, N = N, SSB0 = SSB0, B0 = B0, x = x) %>%
matrix(nrow = 1) %>% as.data.frame() %>% structure(names = IV)
ys <- predict(Rel[[r]], newdata = newdata)
templm <- Rel[[r]]
templm$fitted.values <- ys
ysamp <- stats::simulate(templm, nsim = 1, seed = seed) %>% unlist()
c(ysamp, DV, Dp, modnam[match(Dnam, DVnam)])
})
out
}
hrate<-function(frac){
m1frac<-1-frac
ind1<-(1:(length(frac)-1))
ind2<-ind1+1
hrate<-rep(0,length(frac))
hrate[ind2]<-1-(m1frac[ind2]/m1frac[ind1])
hrate[is.na(hrate)]<-1
hrate[hrate<0]<-0
hrate
}
|
interactiveFX <- function(data, spheres=TRUE, color="pitch_types", avg.by, interval=0.01, alpha=1, show.legend=TRUE, ...){
if (!requireNamespace('rgl')) warning("This function requireNamespaces the rgl package. Please try to install.packages('rgl') before using.")
if ("pitch_type" %in% names(data)) {
data$pitch_type <- factor(data$pitch_type)
pitch.type <- c("SI", "FF", "IN", "SL", "CU", "CH", "FT", "FC", "PO", "KN", "FS", "FA", NA, "FO")
pitch.types <- c("Sinker", "Fastball (four-seam)", "Intentional Walk", "Slider", "Curveball", "Changeup",
"Fastball (two-seam)", "Fastball (cutter)", "Pitchout", "Knuckleball", "Fastball (split-finger)",
"Fastball", "Unknown", "Forkball")
types <- data.frame(pitch_type=factor(pitch.type, levels=sort(pitch.type)),
pitch_types=factor(pitch.types, levels=sort(pitch.types)))
data <- join(data, types, by = "pitch_type", type="inner")
}
idx <- c("x0", "y0", "z0", "vx0", "vy0", "vz0", "ax", "ay", "az")
if (!all(idx %in% names(data))) warning("You must have the following variables in your dataset to animate pitch locations: 'x0', 'y0', 'z0', 'vx0', 'vy0', 'vz0', 'ax', 'ay', 'az'")
complete <- data[complete.cases(data[,idx]),]
for (i in idx) complete[,i] <- as.numeric(complete[,i])
if (!missing(avg.by)) complete <- ddply(complete, avg.by, numcolwise(mean))
snaps <- getSnapshots(complete, interval)
nplots <- length(snaps[1,,1])
if (isTRUE(!color %in% names(data))) {
warning(paste(color, "is the variable that defines coloring but it isn't in the dataset!"))
full.pal <- rgb(0, 0, 0, alpha)
} else {
types <- as.character(complete[,color])
ncolors <- length(unique(types))
if (ncolors > 3) pal <- terrain.colors(ncolors)
if (ncolors == 3) pal <- c(rgb(1, 0, 0), rgb(0, 1, 0), rgb(0, 0, 1))
if (ncolors == 2) pal <- c(rgb(1, 0, 0), rgb(0, 0, 1))
if (ncolors == 1) pal <- rgb(0, 0, 0)
if (show.legend) {
legend <- data.frame(unique(types), pal)
names(legend) <- c(color, "colors")
cat("Here is the coloring scheme for your plot. Use http://www.colorhexa.com/ to translate color codes.", "\n")
print(legend)
}
full.pal <- factor(types)
levels(full.pal) <- pal
}
rgl::open3d()
if (spheres){
rgl::spheres3d(x=as.vector(snaps[,,1]), y=as.vector(snaps[,,2]), z=as.vector(snaps[,,3]),
col=as.character(full.pal), radius=.12, alpha=alpha, ...)
rgl::axes3d(c('x', 'y', 'z'))
rgl::title3d(xlab='Horizontal Axis', ylab='Distance from Home Plate', zlab='Height From Ground')
} else {
rgl::plot3d(x=as.vector(snaps[,,1]), y=as.vector(snaps[,,2]), z=as.vector(snaps[,,3]),
xlab="Horizontal Axis", ylab="Distance from Home Plate", zlab="Height From Ground",
col=as.character(full.pal), alpha=alpha, ...)
}
}
|
powerTOSTpaired<-function(alpha, statistical_power, N, low_eqbound_dz, high_eqbound_dz){
if(missing(N)) {
NT1<-(qnorm(1-alpha)+qnorm(1-((1-statistical_power)/2)))^2/(low_eqbound_dz)^2
NT2<-(qnorm(1-alpha)+qnorm(1-((1-statistical_power)/2)))^2/(high_eqbound_dz)^2
N<-max(NT1,NT2)
message(cat("The required sample size to achieve",100*statistical_power,"% power with equivalence bounds of",low_eqbound_dz,"and",high_eqbound_dz,"is",ceiling(N),"pairs"))
return(N)
}
if(missing(statistical_power)) {
statistical_power1<-2*(pnorm((abs(low_eqbound_dz)*sqrt(N))-qnorm(1-alpha))+pnorm(-(abs(low_eqbound_dz)*sqrt(N))-qnorm(1-alpha)))-1
statistical_power2<-2*(pnorm((abs(high_eqbound_dz)*sqrt(N))-qnorm(1-alpha))+pnorm(-(abs(high_eqbound_dz)*sqrt(N))-qnorm(1-alpha)))-1
statistical_power<-min(statistical_power1,statistical_power2)
if(statistical_power<0) {statistical_power<-0}
message(cat("The statistical power is",round(100*statistical_power,2),"% for equivalence bounds of",low_eqbound_dz,"and",high_eqbound_dz,"."))
return(statistical_power)
}
if(missing(low_eqbound_dz) && missing(high_eqbound_dz)) {
low_eqbound_dz<--sqrt((qnorm(1-alpha)+qnorm(1-((1-statistical_power)/2)))^2/N)
high_eqbound_dz<-sqrt((qnorm(1-alpha)+qnorm(1-((1-statistical_power)/2)))^2/N)
message(cat("The equivalence bounds to achieve",100*statistical_power,"% power with N =",N,"are",round(low_eqbound_dz,2),"and",round(high_eqbound_dz,2),"."))
bounds<-c(low_eqbound_dz,high_eqbound_dz)
return(bounds)
}
}
|
NULL
as.list.mts <- function(x, ...) {
tspx <- tsp(x)
listx <- as.list(as.data.frame(x))
listx <- purrr::map(
listx,
function(u) {
u <- as.ts(u)
tsp(u) <- tspx
return(u)
}
)
return(listx)
}
|
active_zoom<-function(outmo,nframes=25,att=TRUE){
nc=outmo$nchunk
out=list()
if(att==TRUE){
lout=list()
lout$xr=cbind(seq(from=outmo[[(1)]]$xr[1],to=outmo[[(1)]]$xr[1],length=nframes),
seq(from=outmo[[(1)]]$xr[2],to=outmo[[(1)]]$xr[2],length=nframes))*1.01
lout$yr=cbind(seq(from=outmo[[(1)]]$yr[1],to=outmo[[(1)]]$yr[1],length=nframes),
seq(from=outmo[[(1)]]$yr[2],to=outmo[[(1)]]$yr[2],length=nframes))*1.01
out[[1]]=lout
for(i in 2:nc){
lout=list()
lout$xr=cbind(seq(from=outmo[[(i-1)]]$xr[1],to=outmo[[(i)]]$xr[1],length=nframes),
seq(from=outmo[[(i-1)]]$xr[2],to=outmo[[(i)]]$xr[2],length=nframes))*1.01
lout$yr=cbind(seq(from=outmo[[(i-1)]]$yr[1],to=outmo[[(i)]]$yr[1],length=nframes),
seq(from=outmo[[(i-1)]]$yr[2],to=outmo[[(i)]]$yr[2],length=nframes))*1.01
out[[i]]=lout
}
}else{
lout=list()
lout$xr=cbind(seq(from=outmo[[(1)]]$uxr[1],to=outmo[[(1)]]$uxr[1],length=nframes),
seq(from=outmo[[(1)]]$uxr[2],to=outmo[[(1)]]$uxr[2],length=nframes))*1.01
lout$yr=cbind(seq(from=outmo[[(1)]]$uyr[1],to=outmo[[(1)]]$uyr[1],length=nframes),
seq(from=outmo[[(1)]]$uyr[2],to=outmo[[(1)]]$uyr[2],length=nframes))*1.01
out[[1]]=lout
for(i in 2:nc){
lout=list()
lout$xr=cbind(seq(from=outmo[[(i-1)]]$uxr[1],to=outmo[[(i)]]$uxr[1],length=nframes),
seq(from=outmo[[(i-1)]]$uxr[2],to=outmo[[(i)]]$uxr[2],length=nframes))*1.01
lout$yr=cbind(seq(from=outmo[[(i-1)]]$uyr[1],to=outmo[[(i)]]$uyr[1],length=nframes),
seq(from=outmo[[(i-1)]]$uyr[2],to=outmo[[(i)]]$uyr[2],length=nframes))*1.01
out[[i]]=lout
}
}
return(out)
}
|
data("traits_birds")
simple_site_sp <- matrix(1, nrow = 1, ncol = nrow(traits_birds),
dimnames = list("s1", row.names(traits_birds)))
test_that("Functional Evenness output format", {
feve <- expect_silent(fd_feve(traits_birds, sp_com = simple_site_sp))
expect_s3_class(feve, "data.frame")
expect_length(feve, 2)
expect_equal(nrow(feve), 1)
expect_equal(colnames(feve), c("site", "FEve"))
feve <- expect_silent(fd_feve(traits_birds))
expect_s3_class(feve, "data.frame")
expect_length(feve, 2)
expect_equal(nrow(feve), 1)
expect_equal(colnames(feve), c("site", "FEve"))
})
test_that("Functional Evenness computation are in line with other packages", {
expect_equal(fd_feve(traits_birds, simple_site_sp)$FEve, 0.3743341,
tolerance = 1e-6)
test_dissim <- matrix(c(
0, 1, 2,
1, 0, 1,
2, 1, 0
),
byrow = TRUE, ncol = 3, dimnames = list(letters[1:3], letters[1:3]))
abund_mat <- matrix(1, ncol = 3, dimnames = list("site1", letters[1:3]))
expect_equal(fd_feve(sp_com = abund_mat, dist_matrix = test_dissim)$FEve, 1)
})
test_that("Functional Evenness works in 1D", {
expect_identical(
fd_feve(traits_birds[, 1], sp_com = simple_site_sp),
fd_feve(traits_birds[, 1, drop = FALSE], sp_com = simple_site_sp)
)
})
test_that("Functional Evenness works on subset of site/species", {
site_sp <- matrix(1, ncol = nrow(traits_birds))
colnames(site_sp) <- rownames(traits_birds)
rownames(site_sp) <- "s1"
expect_message(fd_feve(traits_birds, site_sp[, 2:ncol(site_sp),
drop = FALSE]),
paste0("Differing number of species between trait dataset ",
"and site-species matrix\nTaking subset of species"))
expect_message(fd_feve(traits_birds[2:nrow(traits_birds),], site_sp),
paste0("Differing number of species between trait dataset ",
"and site-species matrix\nTaking subset of species"))
})
test_that("Functional Evenness edge cases", {
expect_equal(fd_feve(traits_birds[1:2,],
simple_site_sp[, 1:2, drop = FALSE])[["FEve"]],
NA_real_)
data("traits_plants")
data("site_sp_plants")
feve <- expect_silent(
fd_feve(traits_plants, site_sp_plants[10,, drop = FALSE])
)
expect_equal(feve$FEve[[1]], NA_real_)
})
test_that("Functional Evenness works on sparse matrices", {
skip_if_not_installed("Matrix")
site_sp <- matrix(1, ncol = nrow(traits_birds))
colnames(site_sp) <- rownames(traits_birds)
rownames(site_sp) <- "s1"
sparse_site_sp <- Matrix(site_sp, sparse = TRUE)
sparse_dist_mat <- Matrix(as.matrix(dist(traits_birds)), sparse = TRUE)
expect_silent(fd_feve(traits_birds, sparse_site_sp))
expect_equal(fd_feve(traits_birds, sparse_site_sp)$FEve, 0.3743341,
tolerance = 1e-6)
expect_silent(fd_feve(sp_com = site_sp, dist_matrix = sparse_dist_mat))
expect_equal(fd_feve(sp_com = site_sp, dist_matrix = sparse_dist_mat)$FEve,
0.3743341, tolerance = 1e-6)
expect_silent(fd_feve(sp_com = sparse_site_sp, dist_matrix = sparse_dist_mat))
expect_equal(
fd_feve(sp_com = sparse_site_sp, dist_matrix = sparse_dist_mat)$FEve,
0.3743341, tolerance = 1e-6)
})
test_that("Functional Evenness fails gracefully", {
expect_error(
fd_feve(NULL, matrix(1), NULL),
"Please provide either a trait dataset or a dissimilarity matrix",
fixed = TRUE
)
expect_error(
fd_feve(data.frame(a = 1), matrix(1), matrix(1)),
"Please provide either a trait dataset or a dissimilarity matrix",
fixed = TRUE
)
expect_error(
fd_feve(data.frame(a = 1, row.names = "sp1"), matrix(1)),
paste0("No species in common found between trait dataset ",
"and site-species matrix"),
fixed = TRUE
)
})
|
grid_search <- function(func, params=NULL, n.iter=1,
output=c('list', 'data.frame'), boot=FALSE, bootParams=NULL,
parallel=c('no', 'multicore', 'snow'), ncpus=1, cl=NULL, beep=NULL, ...) {
if (!is.null(params)) {
grid <- expand.grid(params, KEEP.OUT.ATTRS=FALSE)
} else {
grid <- data.frame()
}
output <- run_test(func=func, params=grid, n.iter=n.iter, output=output,
boot=boot, bootParams=bootParams, parallel=parallel, ncpus=ncpus, cl=cl,
beep=beep, ...)
return(output)
}
random_search <- function(func, params=NULL, n.sample=1, n.iter=1,
output=c('list', 'data.frame'), boot=FALSE, bootParams=NULL,
parallel=c('no', 'multicore', 'snow'), ncpus=1, cl=NULL, beep=NULL, ...) {
if (!is.null(params)) {
grid <- list()
for (p in 1:length(params)) {
if (!is.null(names(params[[p]])) &&
all.equal(names(params[[p]]), c('lower', 'upper'))) {
grid[[names(params)[p]]] <- stats::runif(n.sample, params[[p]]['lower'],
params[[p]]['upper'])
} else {
grid[[names(params)[p]]] <- sample(params[[p]], n.sample, replace=TRUE)
}
}
grid <- as.data.frame(grid)
} else {
grid <- data.frame()
}
output <- run_test(func=func, params=grid, n.iter=n.iter, output=output,
boot=boot, bootParams=bootParams, parallel=parallel, ncpus=ncpus, cl=cl,
beep=beep, ...)
return(output)
}
|
plot_counts <- function(p, x, text_size = 3, facet_scales = "free_x",
show_labels = TRUE, big.mark = "'",
scientific = FALSE, digits = 0, ...) {
stopifnot(is.ggplot(p), is.light_effects(x),
!("lab_" %in% colnames(x$response)))
label_name <- getOption("flashlight.label_name")
counts_name <- getOption("flashlight.counts_name")
multi <- is.light_effects_multi(x)
key <- c(x$by, x$v, label_name)
x$response <- right_join(x$response, unique(p$data[, key, drop = FALSE]),
by = key)
if (any((bad <- is.na(x$response[[counts_name]])))) {
x$response[[counts_name]][bad] <- 0
}
if (show_labels) {
x$response[["lab_"]] <- format(
round(x$response[[counts_name]], digits),
big.mark = big.mark, scientific = scientific
)
}
ct <- ggplot(x$response, aes_string(x = x$v, y = counts_name)) +
geom_bar(stat = "identity", ...) +
theme_void() +
theme(strip.text.x = element_blank(), panel.grid = element_blank())
if (show_labels) {
ct <- ct + geom_text(aes_string(y = 0, label = "lab_"),
angle = 90, hjust = -0.1, size = text_size)
}
if (multi || length(x$by)) {
ct <- ct + facet_wrap(reformulate(if (multi) label_name else x$by[1]),
scales = facet_scales, nrow = 1L)
}
plot_grid(ct, p, rel_heights = c(0.2, 1), ncol = 1, nrow = 2, align = "v")
}
|
.perform_grouping <- function(
input,
critDist,
use_prog_bar = TRUE
) {
assert_that(class(input)[1] == "matrix")
assert_that(class(input[1]) %in% c("integer", "numeric"))
assert_that(nrow(input) >= 1)
if(nrow(input) == 1){
return(1)
}
column_variances <- apply(input, MARGIN = 2, FUN = var)
new_column_order <- order(column_variances, decreasing = TRUE)
input <- input[, new_column_order, drop = FALSE]
ordered_elements <- do.call(order, as.data.frame(input))
input <- input[ordered_elements,,drop = FALSE]
groups <- .euclidean_linker_cpp(
input,
critDist,
use_prog_bar = use_prog_bar
)
output <- vector(mode = "numeric", length = length(groups))
output[ordered_elements] <- groups
output <- as.numeric(factor(output))
output
}
|
is_AsIs <- function(x) {
inherits(x, "AsIs")
}
is_html_any <- function(x) {
is_html_tag(x) || is_html_chr(x)
}
is_html_chr <- function(x) {
is.character(x) && inherits(x, "html")
}
is_html_tag <- function(x) {
inherits(x, c("shiny.tag", "shiny.tag.list"))
}
split_code_headers <- function(code, prefix = "section") {
if (is.null(code)) {
return(NULL)
}
code <- paste(code, collapse = "\n")
code <- str_trim(code, character = "[\r\n]")
code <- strsplit(code, "\n")[[1]]
rgx_header <- "^(
headers <- regmatches(code, regexec(rgx_header, code, perl = TRUE))
lines_headers <- which(vapply(headers, length, integer(1)) > 0)
if (length(lines_headers) > 0 && max(lines_headers) == length(code)) {
lines_headers <- lines_headers[-length(lines_headers)]
}
if (!length(lines_headers)) {
return(list(paste(code, collapse = "\n")))
}
header_names <- vapply(headers[lines_headers], `[[`, character(1), 4)
header_names <- str_trim(header_names)
if (any(!nzchar(header_names))) {
header_names[!nzchar(header_names)] <- sprintf(
paste0(prefix, "%02d"),
which(!nzchar(header_names))
)
}
rgx_header_line <- gsub("[$^]", "(^|\n|$)", rgx_header)
sections <- strsplit(paste(code, collapse = "\n"), rgx_header_line, perl = TRUE)[[1]]
if (length(sections) > length(header_names)) {
header_names <- c(paste0(prefix, "00"), header_names)
}
names(sections) <- header_names
sections <- str_trim(sections, character = "[\r\n]")
sections <- sections[nzchar(str_trim(sections))]
as.list(sections)
}
str_trim <- function(x, side = "both", character = "\\s") {
if (side %in% c("both", "left", "start")) {
rgx <- sprintf("^%s+", character)
x <- sub(rgx, "", x)
}
if (side %in% c("both", "right", "end")) {
rgx <- sprintf("%s+$", character)
x <- sub(rgx, "", x)
}
x
}
|
NULL
contingency_table_pairs <- function(true_pairs, pred_pairs, num_pairs=NULL, ordered=FALSE) {
if (!is.null(num_pairs)) {
if (length(num_pairs) != 1 | num_pairs <= 0)
stop("num_pairs must be a positive scalar or NULL")
}
comb_pairs <- rbind(true_pairs, pred_pairs)
true_pairs <- comb_pairs[seq_len(nrow(true_pairs)),]
pred_pairs <- comb_pairs[nrow(true_pairs) + seq_len(nrow(pred_pairs)),]
pred_pairs <- as.data.frame(canonicalize_pairs(pred_pairs, ordered = ordered))
true_pairs <- as.data.frame(canonicalize_pairs(true_pairs, ordered = ordered))
colnames(pred_pairs) <- c("ID.x", "ID.y")
colnames(true_pairs) <- c("ID.x", "ID.y")
pred_pairs[["PRED_MATCH"]] <- rep(TRUE, times=nrow(pred_pairs))
true_pairs[["MATCH"]] <- rep(TRUE, times=nrow(true_pairs))
merged_pairs <- merge(pred_pairs, true_pairs, by=c("ID.x", "ID.y"), all=TRUE)
merged_pairs$PRED_MATCH[is.na(merged_pairs$PRED_MATCH)] <- FALSE
merged_pairs$MATCH[is.na(merged_pairs$MATCH)] <- FALSE
prediction = factor(merged_pairs$PRED_MATCH, levels = c(TRUE, FALSE))
truth = factor(merged_pairs$MATCH, levels = c(TRUE, FALSE))
ct <- table(prediction, truth, dnn = c("Prediction", "Truth"))
if (is.null(num_pairs)) {
ct[2,2] <- NA
} else {
ct[2,2] <- num_pairs - nrow(merged_pairs)
}
return(ct)
}
eval_report_pairs <- function(true_pairs, pred_pairs, num_pairs = NULL, ordered=FALSE)
{
ct <- contingency_table_pairs(true_pairs, pred_pairs, num_pairs = num_pairs, ordered = ordered)
list("precision" = precision_pairs_ct(ct),
"recall" = recall_pairs_ct(ct),
"specificity" = specificity_pairs_ct(ct),
"sensitivity" = recall_pairs_ct(ct),
"f1score" = f_measure_pairs_ct(ct),
"accuracy" = accuracy_pairs_ct(ct),
"balanced_accuracy" = balanced_accuracy_pairs_ct(ct))
}
precision_pairs <- function(true_pairs, pred_pairs, ordered=FALSE) {
ct <- contingency_table_pairs(true_pairs, pred_pairs, ordered = ordered)
precision_pairs_ct(ct)
}
recall_pairs <- function(true_pairs, pred_pairs, ordered=FALSE) {
ct <- contingency_table_pairs(true_pairs, pred_pairs, ordered = ordered)
recall_pairs_ct(ct)
}
sensitivity_pairs <- function(true_pairs, pred_pairs, ordered=FALSE) {
recall_pairs(true_pairs, pred_pairs)
}
f_measure_pairs <- function(true_pairs, pred_pairs, beta=1, ordered=FALSE) {
ct <- contingency_table_pairs(true_pairs, pred_pairs, ordered = ordered)
f_measure_pairs_ct(ct, beta)
}
specificity_pairs <- function(true_pairs, pred_pairs, num_pairs, ordered=FALSE) {
ct <- contingency_table_pairs(true_pairs, pred_pairs, num_pairs = num_pairs, ordered = ordered)
specificity_pairs_ct(ct)
}
accuracy_pairs <- function(true_pairs, pred_pairs, num_pairs, ordered=FALSE) {
ct <- contingency_table_pairs(true_pairs, pred_pairs, num_pairs = num_pairs, ordered = ordered)
accuracy_pairs_ct(ct)
}
balanced_accuracy_pairs <- function(true_pairs, pred_pairs, num_pairs, ordered=FALSE) {
ct <- contingency_table_pairs(true_pairs, pred_pairs, num_pairs = num_pairs, ordered = ordered)
balanced_accuracy_pairs_ct(ct)
}
fowlkes_mallows_pairs <- function(true_pairs, pred_pairs, ordered=FALSE) {
ct <- contingency_table_pairs(true_pairs, pred_pairs, ordered = ordered)
fowlkes_mallows_pairs_ct(ct)
}
precision_pairs_ct <- function(ct) {
tp <- ct["TRUE", "TRUE"]
fp <- ct["TRUE", "FALSE"]
pp <- tp + fp
return(tp / pp)
}
recall_pairs_ct <- function(ct) {
tp <- ct["TRUE", "TRUE"]
fn <- ct["FALSE", "TRUE"]
p <- tp + fn
return(tp / p)
}
f_measure_pairs_ct <- function(ct, beta=1.0) {
if (beta < 0)
stop("`beta` must be non-negative")
P <- precision_pairs_ct(ct)
R <- recall_pairs_ct(ct)
alpha <- 1/(1 + beta^2)
1 / (alpha / P + (1 - alpha) / R)
}
specificity_pairs_ct <- function(ct) {
fp <- ct["TRUE", "FALSE"]
tn <- ct["FALSE", "FALSE"]
n <- tn + fp
tn / n
}
accuracy_pairs_ct <- function(ct) {
tp <- ct["TRUE", "TRUE"]
fp <- ct["TRUE", "FALSE"]
fn <- ct["FALSE", "TRUE"]
tn <- ct["FALSE", "FALSE"]
correct <- tp + tn
total <- tp + fp + tn + fn
correct/total
}
balanced_accuracy_pairs_ct <- function(ct) {
sensitivity <- recall_pairs_ct(ct)
specificity <- specificity_pairs_ct(ct)
(sensitivity + specificity) / 2
}
fowlkes_mallows_pairs_ct <- function(ct) {
P <- precision_pairs_ct(ct)
R <- recall_pairs_ct(ct)
sqrt(P) * sqrt(R)
}
|
is.positive.semi.definite <- function( x, tol=1e-8 )
{
if ( !is.square.matrix( x ) )
stop( "argument x is not a square matrix" )
if ( !is.symmetric.matrix( x ) )
stop( "argument x is not a symmetric matrix" )
if ( !is.numeric( x ) )
stop( "argument x is not a numeric matrix" )
eigenvalues <- eigen(x, only.values = TRUE)$values
n <- nrow( x )
for ( i in 1: n ) {
if ( abs( eigenvalues[i] ) < tol ) {
eigenvalues[i] <- 0
}
}
if ( any( eigenvalues < 0 ) ) {
return( FALSE )
}
return( TRUE )
}
|
context("Get latest IMECA values")
test_that("latest data", {
skip_on_cran()
df <- get_latest_imeca()
expect_gt(nrow(df), 0)
expect_type(df$value, "integer")
expect_type(df$datetime, "character")
expect_false(all(is.na(df$datetime)))
expect_warning(get_latest_data())
})
test_that("All stations in the `stations`` data.frame are up-to-date", {
skip_on_cran()
df <- get_latest_imeca()
expect_equal(sort(intersect(df$station_code, stations$station_code)),
sort(df$station_code))
})
|
diag_matlab <- function(mat){
dim.mat <- dim(mat)
if(!is.null(dim.mat)){
if(any(dim.mat==1)){
if(!all(dim.mat==1)){
mat <- mat[,,drop=T]
}
}
}
return(diag(mat))
}
|
modelFormulaDoE <- defmacro(frame = modelFrame, hasLhs = TRUE, rhschr="", expr={
currentModel <- FALSE
checkAddOperator <- function(rhs) {
rhs.chars <- rev(strsplit(rhs, "")[[1]])
if (length(rhs.chars) < 1)
return(FALSE)
check.char <- if ((rhs.chars[1] != " ") || (length(rhs.chars) ==
1))
rhs.chars[1]
else rhs.chars[2]
!is.element(check.char, c("+", "*", ":", "/", "-", "^",
"(", "%"))
}
.variables <- Variables()
word <- paste("\\[", gettextRcmdr("factor"), "\\]", sep = "")
variables <- paste(.variables, ifelse(is.element(.variables,
Factors()), paste("[", gettextRcmdr("factor"), "]", sep = ""),
""))
xBox <- variableListBox(frame, variables, selectmode = "multiple",
title = gettextRcmdr("Variables (double-click to formula)"))
onDoubleClick <- if (!hasLhs) {
function() {
var <- getSelection(xBox)
tkselection.clear(xBox$listbox, "0", "end")
if (length(grep(word, var)) == 1)
var <- sub(word, "", var)
tkfocus(rhsEntry)
rhs <- tclvalue(rhsVariable)
rhs.chars <- rev(strsplit(rhs, "")[[1]])
check.char <- if (length(rhs.chars) > 0) {
if ((rhs.chars[1] != " ") || (length(rhs.chars) ==
1))
rhs.chars[1]
else rhs.chars[2]
}
else ""
tclvalue(rhsVariable) <- if (rhs == "" || is.element(check.char,
c("+", "*", ":", "/", "-", "^", "(", "%")))
paste(rhs, var, sep = "") else paste(rhs, "+", var)
tkicursor(rhsEntry, "end")
tkxview.moveto(rhsEntry, "1")
}
}
else {
function() {
var <- getSelection(xBox)
which <- tkcurselection(xBox$listbox)
tkselection.clear(xBox$listbox, "0", "end")
if (length(grep(word, var)) == 1)
var <- sub(word, "", var)
lhs <- tclvalue(lhsVariable)
if (lhs == "" || tclvalue(tkselection.present(lhsEntry)) ==
"1") {
tclvalue(lhsVariable) <- var
tkselection.clear(lhsEntry)
tkfocus(rhsEntry)
}
else {
tkfocus(rhsEntry)
rhs <- tclvalue(rhsVariable)
rhs.chars <- rev(strsplit(rhs, "")[[1]])
check.char <- if (length(rhs.chars) > 0) {
if ((rhs.chars[1] != " ") || (length(rhs.chars) ==
1))
rhs.chars[1]
else rhs.chars[2]
}
else ""
putRcmdr(tclvalue(rhsVariable)) <- if (rhs == "" || is.element(check.char,
c("+", "*", ":", "/", "-", "^", "(", "%")))
paste(rhs, var, sep = "")
else paste(rhs, "+", var)
}
tkicursor(rhsEntry, "end")
tkxview.moveto(rhsEntry, "1")
}
}
tkbind(xBox$listbox, "<Double-ButtonPress-1>", onDoubleClick)
onPlus <- function() {
rhs <- tclvalue(rhsVariable)
var <- getSelection(xBox)
tkselection.clear(xBox$listbox, "0", "end")
if ((check <- !checkAddOperator(rhs)) && length(var) ==
0)
return()
if (length(var) > 1) {
if (length(grep(word, var)) > 0)
var <- sub(word, "", var)
if (length(var) > 1)
var <- paste(var, collapse = " + ")
}
tclvalue(rhsVariable) <- paste(rhs, if (!check)
" + ", var, sep = "")
tkicursor(rhsEntry, "end")
tkxview.moveto(rhsEntry, "1")
}
onTimes <- function() {
rhs <- tclvalue(rhsVariable)
var <- getSelection(xBox)
tkselection.clear(xBox$listbox, "0", "end")
if ((check <- !checkAddOperator(rhs)) && length(var) ==
0)
return()
if (length(var) > 1) {
if (length(grep(word, var)) > 0)
var <- sub(word, "", var)
var <- trim.blanks(var)
if (length(var) > 1)
var <- paste(var, collapse = "*")
tclvalue(rhsVariable) <- paste(rhs, if (!check)
" + ", var, sep = "")
}
else tclvalue(rhsVariable) <- paste(rhs, if (!check)
"*", sep = "")
tkicursor(rhsEntry, "end")
tkxview.moveto(rhsEntry, "1")
}
onColon <- function() {
rhs <- tclvalue(rhsVariable)
var <- getSelection(xBox)
tkselection.clear(xBox$listbox, "0", "end")
if ((check <- !checkAddOperator(rhs)) && length(var) ==
0)
return()
if (length(var) > 1) {
if (length(grep(word, var)) > 0)
var <- sub(word, "", var)
var <- trim.blanks(var)
if (length(var) > 1)
var <- paste(var, collapse = ":")
tclvalue(rhsVariable) <- paste(rhs, if (!check)
" + ", var, sep = "")
}
else tclvalue(rhsVariable) <- paste(rhs, if (!check)
":", sep = "")
tkicursor(rhsEntry, "end")
tkxview.moveto(rhsEntry, "1")
}
onSlash <- function() {
rhs <- tclvalue(rhsVariable)
if (!checkAddOperator(rhs))
return()
tclvalue(rhsVariable) <- paste(rhs, "/", sep = "")
tkicursor(rhsEntry, "end")
tkxview.moveto(rhsEntry, "1")
}
onIn <- function() {
rhs <- tclvalue(rhsVariable)
if (!checkAddOperator(rhs))
return()
tclvalue(rhsVariable) <- paste(rhs, "%in% ")
tkicursor(rhsEntry, "end")
tkxview.moveto(rhsEntry, "1")
}
onMinus <- function() {
rhs <- tclvalue(rhsVariable)
if (!checkAddOperator(rhs))
return()
tclvalue(rhsVariable) <- paste(rhs, "- ")
tkicursor(rhsEntry, "end")
tkxview.moveto(rhsEntry, "1")
}
onPower <- function() {
rhs <- tclvalue(rhsVariable)
if (!checkAddOperator(rhs))
return()
tclvalue(rhsVariable) <- paste(rhs, "^", sep = "")
tkicursor(rhsEntry, "end")
tkxview.moveto(rhsEntry, "1")
}
onLeftParen <- function() {
tkfocus(rhsEntry)
rhs <- tclvalue(rhsVariable)
tclvalue(rhsVariable) <- paste(rhs, "(", sep = "")
tkicursor(rhsEntry, "end")
tkxview.moveto(rhsEntry, "1")
}
onRightParen <- function() {
rhs <- tclvalue(rhsVariable)
if (!checkAddOperator(rhs))
return()
tclvalue(rhsVariable) <- paste(rhs, ")", sep = "")
tkicursor(rhsEntry, "end")
tkxview.moveto(rhsEntry, "1")
}
outerOperatorsFrame <- tkframe(frame)
operatorsFrame <- tkframe(outerOperatorsFrame)
plusButton <- buttonRcmdr(operatorsFrame, text = "+", width = "3",
command = onPlus)
timesButton <- buttonRcmdr(operatorsFrame, text = "*", width = "3",
command = onTimes)
colonButton <- buttonRcmdr(operatorsFrame, text = ":", width = "3",
command = onColon)
slashButton <- buttonRcmdr(operatorsFrame, text = "/", width = "3",
command = onSlash)
inButton <- buttonRcmdr(operatorsFrame, text = "%in%", width = "5",
command = onIn)
minusButton <- buttonRcmdr(operatorsFrame, text = "-", width = "3",
command = onMinus)
powerButton <- buttonRcmdr(operatorsFrame, text = "^", width = "3",
command = onPower)
leftParenButton <- buttonRcmdr(operatorsFrame, text = "(",
width = "3", command = onLeftParen)
rightParenButton <- buttonRcmdr(operatorsFrame, text = ")",
width = "3", command = onRightParen)
tkgrid(plusButton, timesButton, colonButton, slashButton,
inButton, minusButton, powerButton, leftParenButton,
rightParenButton, sticky = "w")
formulaFrame <- tkframe(frame)
if (hasLhs) {
tkgrid(labelRcmdr(outerOperatorsFrame, text = gettextRcmdr("Model Formula: "),
fg = "blue"), operatorsFrame)
lhsVariable <- if (currentModel)
tclVar(currentFields$lhs)
else tclVar("")
rhsVariable <- if (currentModel)
tclVar(currentFields$rhs)
else tclVar("")
rhsEntry <- ttkentry(formulaFrame, width = "50", textvariable = rhsVariable)
rhsXscroll <- ttkscrollbar(formulaFrame, orient = "horizontal",
command = function(...) tkxview(rhsEntry, ...))
tkconfigure(rhsEntry, xscrollcommand = function(...) tkset(rhsXscroll,
...))
lhsEntry <- ttkentry(formulaFrame, width = "10", textvariable = lhsVariable)
lhsScroll <- ttkscrollbar(formulaFrame, orient = "horizontal",
command = function(...) tkxview(lhsEntry, ...))
tkconfigure(lhsEntry, xscrollcommand = function(...) tkset(lhsScroll,
...))
tkgrid(lhsEntry, labelRcmdr(formulaFrame, text = " ~ "),
rhsEntry, sticky = "w")
tkgrid(lhsScroll, labelRcmdr(formulaFrame, text = ""),
rhsXscroll, sticky = "w")
tkgrid.configure(lhsScroll, sticky = "ew")
}
else {
rhsVariable <- if (currentModel)
tclVar(currentFields$rhs)
else tclVar(rhschr)
rhsEntry <- ttkentry(formulaFrame, width = "50", textvariable = rhsVariable)
rhsXscroll <- ttkscrollbar(formulaFrame, orient = "horizontal",
command = function(...) tkxview(rhs, ...))
tkconfigure(rhsEntry, xscrollcommand = function(...) tkset(rhsXscroll,
...))
tkgrid(labelRcmdr(formulaFrame, text = " ~ "), rhsEntry,
sticky = "w")
tkgrid(labelRcmdr(formulaFrame, text = ""), rhsXscroll,
sticky = "w")
}
tkgrid.configure(rhsXscroll, sticky = "ew")
tkgrid(operatorsFrame, sticky="w")
tkgrid(getFrame(xBox), outerOperatorsFrame, sticky="w")
tkgrid(formulaFrame, sticky="w", columnspan="2")
}
)
|
make.tran = function(type = c("genlog", "power", "boxcox", "sympower",
"asin.sqrt", "bcnPower", "scale"), param = 1, y, ...) {
type = match.arg(type)
origin = 0
mu.lbl = "mu"
if (length(param) > 1) {
origin = param[2]
param = param[1]
mu.lbl = paste0("(mu - ", round(origin, 3), ")")
}
if(type == "scale") {
sy = scale(y, ...)
if(is.null(origin <- attr(sy, "scaled:center")))
origin = 0
if(is.null(param <- attr(sy, "scaled:scale")))
param = 1
remove(list = c("y", "sy"))
}
switch(type,
genlog = {
if((origin < 0) || (origin == 1))
stop('"genlog" transformation must have a positive base != 1')
logbase = ifelse(origin == 0, 1, log(origin))
xlab = ifelse(origin == 0, "", paste0(" (base ", round(origin, 3), ")"))
list(linkfun = function(mu) log(pmax(mu + param, 0)) / logbase,
linkinv = function(eta) pmax(exp(logbase * eta), .Machine$double.eps) - param,
mu.eta = function(eta) logbase * pmax(exp(logbase * eta), .Machine$double.eps),
valideta = function(eta) TRUE,
param = c(param, origin),
name = paste0("log(mu + ", round(param,3), ")", xlab)
)
},
power = {
if (param == 0) {
if(origin == 0) make.link("log")
else make.tran("genlog", -origin)
}
else list(
linkfun = function(mu) pmax(mu - origin, 0)^param,
linkinv = function(eta) origin + pmax(eta, 0)^(1/param),
mu.eta = function(eta) pmax(eta, 0)^(1/param - 1) / param,
valideta = function(eta) all(eta > 0),
param = c(param, origin),
name = ifelse(param > 0,
paste0(mu.lbl, "^", round(param,3)),
paste0(mu.lbl, "^(", round(param,3), ")"))
)
},
boxcox = {
if (param == 0) {
result = if(origin == 0) make.link("log")
else make.tran("genlog", -origin)
return (result)
}
min.eta = ifelse(param > 0, -1 / param, -Inf)
xlab = ifelse(origin == 0, "", paste0(" with origin at ", round(origin, 3)))
list(
linkfun = function(mu) ((mu - origin)^param - 1) / param,
linkinv = function(eta) origin + (1 + param * pmax(eta, min.eta))^(1/param),
mu.eta = function(eta) (1 + param * pmax(eta, min.eta))^(1/param - 1),
valideta = function(eta) all(eta > min.eta),
param = c(param, origin),
name = paste0("Box-Cox (lambda = ", round(param, 3), ")", xlab)
)
},
sympower = {
if (param <= 0)
stop('"sympower" transformation requires positive param')
if (origin == 0)
mu.lbl = paste0("(", mu.lbl, ")")
absmu.lbl = gsub("\\(|\\)", "|", mu.lbl)
list(linkfun = function(mu) sign(mu - origin) * abs(mu - origin)^param,
linkinv = function(eta) origin + sign(eta) * abs(eta)^(1/param),
mu.eta = function(eta) (abs(eta))^(1/param - 1),
valideta = function(eta) all(eta > min.eta),
param = c(param, origin),
name = paste0(absmu.lbl, "^", round(param,3), " * sign", mu.lbl)
)
},
asin.sqrt = {
mu.lbl = ifelse(param == 1, "mu", paste0("mu/", round(param,3)))
list(linkfun = function(mu) asin(sqrt(mu/param)),
linkinv = function(eta) param * sin(pmax(pmin(eta, pi/2), 0))^2,
mu.eta = function(eta) param * sin(2*pmax(pmin(eta, pi/2), 0)),
valideta = function(eta) all(eta <= pi/2) && all(eta >= 0),
name = paste0("asin(sqrt(", mu.lbl, "))")
)
},
bcnPower = {
if(origin <= 0)
stop ("The second parameter for 'bcnPower' must be strictly positive.")
list(
linkfun = function(mu) {
s = sqrt(mu^2 + origin^2)
if (abs(param) < 1e-10) log(.5*(mu + s))
else ((0.5 * (mu + s))^param - 1) / param },
linkinv = function(eta) {
q = if (abs(param) < 1e-10) 2 * exp(eta)
else 2 * (param * eta + 1) ^ (1/param)
(q^2 - origin^2) / (2 * q) },
mu.eta = function(eta) {
if (abs(param) < 1e-10) { q = 2 * exp(eta); dq = q }
else { q = 2 * (param * eta + 1) ^ (1/param)
dq = 2 * (param * eta + 1)^(1/param - 1) }
0.5 * (1 + (origin/q)^2) * dq },
valideta = function(eta) all(eta > 0),
param = c(param, origin),
name = paste0("bcnPower(", signif(param,3), ", ", signif(origin,3), ")")
)
},
scale = list(
linkfun = function(mu) (mu - origin) / param,
linkinv = function(eta) param * eta + origin,
mu.eta = function(eta) rep(param, length(eta)),
valideta = function(eta) TRUE,
name = paste0("scale(", signif(origin, 3), ", ", signif(param, 3), ")"),
param = c(param, origin)
)
)
}
.make.link = function(link) {
if (link %in% c("logit", "probit", "cauchit", "cloglog", "identity", "log"))
result = stats::make.link(link)
else result = switch(link,
sqrt = {
tmp = make.link("sqrt")
tmp$linkinv = function(eta) pmax(0, eta)^2
tmp$mu.eta = function(eta) 2*pmax(0, eta)
tmp },
`1/mu^2` = {
tmp = make.link("1/mu^2")
tmp$linkinv = function(eta) 1/sqrt(pmax(0, eta))
tmp$mu.eta = function(eta) -1/(2*pmax(0, eta)^1.5)
tmp },
inverse = {
tmp = make.link("inverse")
tmp$linkinv = function(eta) 1/pmax(0, eta)
tmp$mu.eta = function(eta) -1/pmax(0, eta)^2
tmp },
`/` = .make.link("inverse"),
reciprocal = .make.link("inverse"),
log10 = list(
linkfun = log10,
linkinv = function(eta) 10^eta,
mu.eta = function(eta) 10^eta * log(10),
name = "log10"
),
log2 = list(
linkfun = log2,
linkinv = function(eta) 2^eta,
mu.eta = function(eta) 2^eta * log(2),
name = "log2"
),
log1p = list(
linkfun = log1p,
linkinv = expm1,
mu.eta = exp,
name = "log1p"
),
asin.sqrt = make.tran("asin.sqrt"),
`asin.sqrt./` = make.tran("asin.sqrt", 100),
asinh.sqrt = list(
linkinv = function(eta) sinh(eta)^2,
mu.eta = function(eta) sinh(2 * eta),
name = "asinh(sqrt(mu))"
),
exp = list(
linkinv = function(eta) log(eta),
mu.eta = function(eta) 1/eta,
name = "exp"
),
`+.sqrt` = {
tmp = .make.link("sqrt")
tmp$mult = 2
tmp
},
log.o.r. = {
tmp = make.link("log")
tmp$name = "log odds ratio"
tmp
},
{
tmp = stats::make.link("identity")
tmp$unknown = TRUE
tmp$name = link
tmp
}
)
result
}
.make.scale = function(misc) {
if (!requireNamespace("scales", quiet = TRUE))
stop("type = \"scale\" requires the 'scales' package to be installed")
tran = misc$tran
if (is.character(tran)) {
if ((length(intersect(names(misc), c("tran.mult", "tran.offset"))) == 0) &&
tran %in% c("log", "log1p", "log2", "log10", "sqrt", "logit", "probit",
"exp", "identity"))
return(get(paste(tran, "trans", sep = "_"), envir = asNamespace("scales"))())
tran = .make.link(tran)
}
tran$mult = ifelse(is.null(misc$tran.mult), 1, misc$tran.mult)
tran$offset = ifelse(is.null(misc$tran.offset), 0, misc$tran.offset)
with(tran,
scales::trans_new(name,
function(x) mult * linkfun(x + offset),
function(z) linkinv(z / mult) - offset))
}
|
rm(list = ls())
library(pdynmc)
library(pder)
library(plm)
data("DemocracyIncome", package = "pder")
data("DemocracyIncome25", package = "pder")
dat <- DemocracyIncome
rm(DemocracyIncome, DemocracyIncome25)
head(dat)
tail(dat)
str(dat)
table(dat[, "year"])
data.info(dat, i.name = "country", t.name = "year")
strucUPD.plot(dat, i.name = "country", t.name = "year")
m10 <- pdynmc(dat = dat, varname.i = "country", varname.t = "year"
,use.mc.diff = TRUE, use.mc.lev = FALSE, use.mc.nonlin = FALSE
,varname.y = "democracy", include.y = TRUE, lagTerms.y = 1
,opt.meth = "none"
)
summary(m10)
m11 <- pdynmc(dat = dat, varname.i = "country", varname.t = "year"
,use.mc.diff = TRUE, use.mc.lev = FALSE, use.mc.nonlin = FALSE
,varname.y = "democracy", include.y = TRUE, lagTerms.y = 1
,include.dum = TRUE, dum.lev = TRUE, dum.diff = FALSE, varname.dum = "year"
,opt.meth = "none"
)
summary(m11)
m12 <- pdynmc(dat = dat, varname.i = "country", varname.t = "year"
,use.mc.diff = TRUE, use.mc.lev = FALSE, use.mc.nonlin = FALSE
,varname.y = "democracy", include.y = TRUE, lagTerms.y = 1
,include.dum = TRUE, dum.lev = TRUE, dum.diff = TRUE, varname.dum = "year"
,opt.meth = "none"
)
summary(m12)
m13 <- pdynmc(dat = dat, varname.i = "country", varname.t = "year"
,use.mc.diff = TRUE, use.mc.lev = FALSE, use.mc.nonlin = FALSE
,varname.y = "democracy", include.y = TRUE, lagTerms.y = 1
,include.dum = TRUE, dum.lev = FALSE, dum.diff = TRUE, varname.dum = "year"
,opt.meth = "none"
)
summary(m13)
m14 <- pdynmc(dat = dat, varname.i = "country", varname.t = "year"
,use.mc.diff = TRUE, use.mc.lev = FALSE, use.mc.nonlin = FALSE
,varname.y = "democracy", include.y = TRUE, lagTerms.y = 2
,include.dum = TRUE, dum.lev = FALSE, dum.diff = TRUE, varname.dum = "year"
,opt.meth = "none"
)
summary(m14)
m15 <- pdynmc(dat = dat, varname.i = "country", varname.t = "year"
,use.mc.diff = TRUE, use.mc.lev = FALSE, use.mc.nonlin = FALSE
,varname.y = "democracy", include.y = TRUE, lagTerms.y = 3
,include.dum = TRUE, dum.lev = TRUE, dum.diff = FALSE, varname.dum = "year"
,opt.meth = "none"
)
summary(m15)
m16 <- pdynmc(dat = dat, varname.i = "country", varname.t = "year"
,use.mc.diff = TRUE, use.mc.lev = FALSE, use.mc.nonlin = FALSE
,varname.y = "democracy", include.y = TRUE, lagTerms.y = 3
,include.dum = TRUE, dum.lev = FALSE, dum.diff = TRUE, varname.dum = "year"
,opt.meth = "none"
)
summary(m16)
m17 <- pdynmc(dat = dat, varname.i = "country", varname.t = "year"
,use.mc.diff = TRUE, use.mc.lev = FALSE, use.mc.nonlin = FALSE
,varname.y = "democracy", include.y = TRUE, lagTerms.y = 3
,include.dum = TRUE, dum.lev = TRUE, dum.diff = TRUE, varname.dum = "year"
,opt.meth = "none"
)
summary(m17)
m20 <- pdynmc(dat = dat, varname.i = "country", varname.t = "year"
,use.mc.diff = TRUE, use.mc.lev = FALSE, use.mc.nonlin = FALSE
,varname.y = "democracy", include.y = TRUE, lagTerms.y = 1
,opt.meth = "BFGS"
)
summary(m20)
m21 <- pdynmc(dat = dat, varname.i = "country", varname.t = "year"
,use.mc.diff = TRUE, use.mc.lev = FALSE, use.mc.nonlin = FALSE
,varname.y = "democracy", include.y = TRUE, lagTerms.y = 1
,include.dum = TRUE, dum.lev = FALSE, dum.diff = TRUE, varname.dum = "year"
,opt.meth = "BFGS", max.iter = 4
)
summary(m21)
m22 <- pdynmc(dat = dat, varname.i = "country", varname.t = "year"
,use.mc.diff = TRUE, use.mc.lev = FALSE, use.mc.nonlin = FALSE
,varname.y = "democracy", include.y = TRUE, lagTerms.y = 1
,include.dum = TRUE, dum.lev = TRUE, dum.diff = FALSE, varname.dum = "year"
,opt.meth = "BFGS", max.iter = 4
)
summary(m22)
m23 <- pdynmc(dat = dat, varname.i = "country", varname.t = "year"
,use.mc.diff = TRUE, use.mc.lev = FALSE, use.mc.nonlin = FALSE
,varname.y = "democracy", include.y = TRUE, lagTerms.y = 1
,include.dum = TRUE, dum.lev = TRUE, dum.diff = TRUE, varname.dum = "year"
,opt.meth = "BFGS", max.iter = 4
)
summary(m23)
m24 <- pdynmc(dat = dat, varname.i = "country", varname.t = "year"
,use.mc.diff = TRUE, use.mc.lev = FALSE, use.mc.nonlin = FALSE
,varname.y = "democracy", include.y = TRUE, lagTerms.y = 2
,include.dum = TRUE, dum.lev = FALSE, dum.diff = TRUE, varname.dum = "year"
,opt.meth = "none"
)
summary(m24)
m25 <- pdynmc(dat = dat, varname.i = "country", varname.t = "year"
,use.mc.diff = TRUE, use.mc.lev = FALSE, use.mc.nonlin = FALSE
,varname.y = "democracy", include.y = TRUE, lagTerms.y = 2
,include.dum = TRUE, dum.lev = TRUE, dum.diff = FALSE, varname.dum = "year"
,opt.meth = "none"
)
summary(m25)
m26 <- pdynmc(dat = dat, varname.i = "country", varname.t = "year"
,use.mc.diff = TRUE, use.mc.lev = FALSE, use.mc.nonlin = FALSE
,varname.y = "democracy", include.y = TRUE, lagTerms.y = 2
,include.dum = TRUE, dum.lev = TRUE, dum.diff = TRUE, varname.dum = "year"
,opt.meth = "none"
)
summary(m26)
m27 <- pdynmc(dat = dat, varname.i = "country", varname.t = "year"
,use.mc.diff = TRUE, use.mc.lev = FALSE, use.mc.nonlin = FALSE
,varname.y = "democracy", include.y = TRUE, lagTerms.y = 3
,include.dum = TRUE, dum.lev = FALSE, dum.diff = TRUE, varname.dum = "year"
,opt.meth = "none"
)
summary(m27)
m28 <- pdynmc(dat = dat, varname.i = "country", varname.t = "year"
,use.mc.diff = TRUE, use.mc.lev = FALSE, use.mc.nonlin = FALSE
,varname.y = "democracy", include.y = TRUE, lagTerms.y = 3
,include.dum = TRUE, dum.lev = TRUE, dum.diff = FALSE, varname.dum = "year"
,opt.meth = "none"
)
summary(m28)
m29 <- pdynmc(dat = dat, varname.i = "country", varname.t = "year"
,use.mc.diff = TRUE, use.mc.lev = FALSE, use.mc.nonlin = FALSE
,varname.y = "democracy", include.y = TRUE, lagTerms.y = 3
,include.dum = TRUE, dum.lev = TRUE, dum.diff = TRUE, varname.dum = "year"
,opt.meth = "none"
)
summary(m29)
m30 <- pdynmc(dat = dat, varname.i = "country", varname.t = "year"
,use.mc.diff = TRUE, use.mc.lev = TRUE, use.mc.nonlin = FALSE
,varname.y = "democracy", include.y = TRUE, lagTerms.y = 1
,opt.meth = "none"
)
summary(m30)
m31 <- pdynmc(dat = dat, varname.i = "country", varname.t = "year"
,use.mc.diff = TRUE, use.mc.lev = TRUE, use.mc.nonlin = FALSE
,varname.y = "democracy", include.y = TRUE, lagTerms.y = 2
,opt.meth = "none"
)
summary(m31)
m32 <- pdynmc(dat = dat, varname.i = "country", varname.t = "year"
,use.mc.diff = TRUE, use.mc.lev = TRUE, use.mc.nonlin = FALSE
,varname.y = "democracy", include.y = TRUE, lagTerms.y = 2
,opt.meth = "BFGS", max.iter = 4
)
summary(m32)
m33 <- pdynmc(dat = dat, varname.i = "country", varname.t = "year"
,use.mc.diff = TRUE, use.mc.lev = TRUE, use.mc.nonlin = FALSE
,varname.y = "democracy", include.y = TRUE, lagTerms.y = 2
,opt.meth = "none", estimation = "iterative", max.iter = 50
)
summary(m33)
m34 <- pdynmc(dat = dat, varname.i = "country", varname.t = "year"
,use.mc.diff = TRUE, use.mc.lev = TRUE, use.mc.nonlin = FALSE
,varname.y = "democracy", include.y = TRUE, lagTerms.y = 2
,include.dum = TRUE, dum.lev = FALSE, dum.diff = TRUE, varname.dum = "year"
,opt.meth = "none"
)
summary(m34)
m35 <- pdynmc(dat = dat, varname.i = "country", varname.t = "year"
,use.mc.diff = TRUE, use.mc.lev = TRUE, use.mc.nonlin = FALSE
,varname.y = "democracy", include.y = TRUE, lagTerms.y = 2
,include.dum = TRUE, dum.lev = TRUE, dum.diff = FALSE, varname.dum = "year"
,opt.meth = "none"
)
summary(m35)
m36 <- pdynmc(dat = dat, varname.i = "country", varname.t = "year"
,use.mc.diff = TRUE, use.mc.lev = TRUE, use.mc.nonlin = FALSE
,varname.y = "democracy", include.y = TRUE, lagTerms.y = 2
,include.dum = TRUE, dum.lev = TRUE, dum.diff = TRUE, varname.dum = "year"
,opt.meth = "none"
)
summary(m36)
m37 <- pdynmc(dat = dat, varname.i = "country", varname.t = "year"
,use.mc.diff = TRUE, use.mc.lev = TRUE, use.mc.nonlin = FALSE
,varname.y = "democracy", include.y = TRUE, lagTerms.y = 3
,include.dum = TRUE, dum.lev = FALSE, dum.diff = TRUE, varname.dum = "year"
,opt.meth = "none"
)
summary(m37)
m38 <- pdynmc(dat = dat, varname.i = "country", varname.t = "year"
,use.mc.diff = TRUE, use.mc.lev = TRUE, use.mc.nonlin = FALSE
,varname.y = "democracy", include.y = TRUE, lagTerms.y = 3
,include.dum = TRUE, dum.lev = TRUE, dum.diff = FALSE, varname.dum = "year"
,opt.meth = "none"
)
summary(m38)
m39 <- pdynmc(dat = dat, varname.i = "country", varname.t = "year"
,use.mc.diff = TRUE, use.mc.lev = TRUE, use.mc.nonlin = FALSE
,varname.y = "democracy", include.y = TRUE, lagTerms.y = 3
,include.dum = TRUE, dum.lev = TRUE, dum.diff = TRUE, varname.dum = "year"
,opt.meth = "none"
)
summary(m39)
m40 <- pdynmc(dat = dat, varname.i = "country", varname.t = "year"
,use.mc.diff = TRUE, use.mc.lev = FALSE, use.mc.nonlin = TRUE
,varname.y = "democracy", include.y = TRUE, lagTerms.y = 1
,opt.meth = "BFGS"
)
summary(m40)
m41 <- pdynmc(dat = dat, varname.i = "country", varname.t = "year"
,use.mc.diff = TRUE, use.mc.lev = FALSE, use.mc.nonlin = TRUE
,varname.y = "democracy", include.y = TRUE, lagTerms.y = 2
,opt.meth = "BFGS"
)
summary(m41)
m42 <- pdynmc(dat = dat, varname.i = "country", varname.t = "year"
,use.mc.diff = TRUE, use.mc.lev = TRUE, use.mc.nonlin = FALSE
,varname.y = "democracy", include.y = TRUE, lagTerms.y = 2
,opt.meth = "BFGS", max.iter = 4
)
summary(m42)
m44 <- pdynmc(dat = dat, varname.i = "country", varname.t = "year"
,use.mc.diff = TRUE, use.mc.lev = TRUE, use.mc.nonlin = FALSE
,varname.y = "democracy", include.y = TRUE, lagTerms.y = 2
,include.dum = TRUE, dum.lev = FALSE, dum.diff = TRUE, varname.dum = "year"
,opt.meth = "BFGS", max.iter = 4
)
summary(m44)
m45 <- pdynmc(dat = dat, varname.i = "country", varname.t = "year"
,use.mc.diff = TRUE, use.mc.lev = TRUE, use.mc.nonlin = FALSE
,varname.y = "democracy", include.y = TRUE, lagTerms.y = 2
,include.dum = TRUE, dum.lev = TRUE, dum.diff = FALSE, varname.dum = "year"
,opt.meth = "BFGS", max.iter = 4
)
summary(m45)
m46 <- pdynmc(dat = dat, varname.i = "country", varname.t = "year"
,use.mc.diff = TRUE, use.mc.lev = TRUE, use.mc.nonlin = FALSE
,varname.y = "democracy", include.y = TRUE, lagTerms.y = 2
,include.dum = TRUE, dum.lev = TRUE, dum.diff = TRUE, varname.dum = "year"
,opt.meth = "BFGS", max.iter = 4
)
summary(m46)
m47 <- pdynmc(dat = dat, varname.i = "country", varname.t = "year"
,use.mc.diff = TRUE, use.mc.lev = TRUE, use.mc.nonlin = FALSE
,varname.y = "democracy", include.y = TRUE, lagTerms.y = 3
,include.dum = TRUE, dum.lev = FALSE, dum.diff = TRUE, varname.dum = "year"
,opt.meth = "BFGS", max.iter = 4
)
summary(m47)
m48 <- pdynmc(dat = dat, varname.i = "country", varname.t = "year"
,use.mc.diff = TRUE, use.mc.lev = TRUE, use.mc.nonlin = FALSE
,varname.y = "democracy", include.y = TRUE, lagTerms.y = 3
,include.dum = TRUE, dum.lev = TRUE, dum.diff = FALSE, varname.dum = "year"
,opt.meth = "BFGS", max.iter = 4
)
summary(m48)
m49 <- pdynmc(dat = dat, varname.i = "country", varname.t = "year"
,use.mc.diff = TRUE, use.mc.lev = TRUE, use.mc.nonlin = FALSE
,varname.y = "democracy", include.y = TRUE, lagTerms.y = 3
,include.dum = TRUE, dum.lev = TRUE, dum.diff = TRUE, varname.dum = "year"
,opt.meth = "BFGS", max.iter = 4
)
summary(m49)
ls()[grepl(ls(), pattern = "m")]
length(ls()[grepl(ls(), pattern = "m")])
|
d=read.csv("~/data/SEER/crt.csv")
names(d)=c("casenum","seqnum","RT","CT")
crt=d%>%select(casenum,seqnum,CT,RT)
save(crt,file="~/data/SEER/crt.RData")
head(crt)
load("~/data/SEER/crt.RData")
load("~/data/SEER/mrgd/cancDef.RData")
levels(crt$seqnum)
head(crt$seqnum)
x=strsplit(as.character(crt$seqnum), "[^0-9]+")
crt$seq=sapply(x,function(y) y[[1]])
crt$seq[crt$seqnum=="One primary only"]=0
crt$seq[crt$seqnum=="Only one state registry-defined neoplasm"]=60
crt$seq[crt$seqnum=="Unknown seq num - federally required in situ or malig tumors"]=99
crt$seq[crt$seqnum=="Unknown seq num - state registry-defined neoplasms"]=88
(S=paste0(1:27,"th of ",1:27," or more state registry-defined neoplasms"))
for (i in 1:27) crt$seq[crt$seqnum==S[i]] = 60+i
crt$seq=as.integer(crt$seq)
crt$seq[1:800]
crt$ct=as.integer(crt$CT)-1
levels(crt$RT)
crt$radiatn=9
crt$radiatn[crt$RT=="None/Unknown"]=0
crt$radiatn[crt$RT=="Beam radiation"]=1
crt$radiatn[crt$RT=="Radioactive implants"]=2
crt$radiatn[crt$RT=="Radioisotopes"]=3
crt$radiatn[crt$RT=="Combination of beam with implants or isotopes"]=4
crt$radiatn[crt$RT=="Radiation, NOS method or source not specified"]=5
crt$radiatn[crt$RT=="Other radiation (1973-1987 cases only)"]=6
crt$radiatn[crt$RT=="Refused"]=7
crt$radiatn[crt$RT=="Recommended, unknown if administered"]=8
head(crt,20)
cr=crt%>%select(casenum,seqnum=seq,ct,radiatn)
d=left_join(canc,cr)
canc=d
canc$trt="noRad"
canc$trt[canc$radiatn==8]="unk"
canc$trt[canc$radiatn%in%c(1:6)]="rad"
save(canc,file="~/data/SEER/cancCRT.RData")
|
context("opt_loop_invariant")
test_that("loop invariant empty code", {
code <- paste(
"",
sep = "\n"
)
opt_code <- opt_loop_invariant(list(code))$codes[[1]]
expect_equal(opt_code, paste(
"",
sep = "\n"
))
})
test_that("simple loop invariant", {
code <- paste(
"while (TRUE) {",
" x <- 3",
"}",
sep = "\n"
)
opt_code <- opt_loop_invariant(list(code))$codes[[1]]
expect_equal(opt_code, paste(
"if (TRUE) {",
" x <- 3",
"} ",
"while (TRUE) {",
"}",
sep = "\n"
))
})
test_that("simple loop invariant two exprs", {
code <- paste(
"while (TRUE) {",
" x <- 3",
" y <- 4",
"}",
sep = "\n"
)
opt_code <- opt_loop_invariant(list(code))$codes[[1]]
expect_equal(opt_code, paste(
"if (TRUE) {",
" x <- 3",
" y <- 4",
"} ",
"while (TRUE) {",
"}",
sep = "\n"
))
})
test_that("double loop invariant", {
code <- paste(
"while (TRUE) {",
" for(i in 1:10) {",
" x <- 3",
" }",
"}",
sep = "\n"
)
opt_code <- opt_loop_invariant(list(code))$codes[[1]]
expect_equal(opt_code, paste(
"while (TRUE) {",
" if (length(1:10) > 0) {",
" x <- 3",
" } ",
" for(i in 1:10) {",
" }",
"}",
sep = "\n"
))
})
test_that("double loop invariant dont skip one", {
code <- paste(
"while (TRUE) {",
" while (FALSE) {",
" x <- 3",
" }",
"}",
sep = "\n"
)
opt_code <- opt_loop_invariant(list(code))$codes[[1]]
expect_equal(opt_code, paste(
"while (TRUE) {",
" if (FALSE) {",
" x <- 3",
" } ",
" while (FALSE) {",
" }",
"}",
sep = "\n"
))
})
test_that("double loop invariant in outer", {
code <- paste(
"for (i in 1:100) {",
" c <- 1",
" for (j in 1:100) {",
" c <- c + 1",
" }",
"}",
sep = "\n"
)
opt_code <- opt_loop_invariant(list(code))$codes[[1]]
expect_equal(opt_code, code)
})
test_that("double loop invariant in outer 2", {
code <- paste(
"for (i in 1:100) {",
" c <- 1",
" for (j in 1:100) {",
" c <- c + 1",
" d <- c",
" }",
"}",
sep = "\n"
)
opt_code <- opt_loop_invariant(list(code))$codes[[1]]
expect_equal(opt_code, code)
})
test_that("simple loop invariant", {
code <- paste(
"for(j in 1:20) {",
" for(i in 1:10) {",
" x <- j + 1",
" }",
"}",
sep = "\n"
)
opt_code <- opt_loop_invariant(list(code))$codes[[1]]
expect_equal(opt_code, paste(
"for(j in 1:20) {",
" if (length(1:10) > 0) {",
" x <- j + 1",
" } ",
" for(i in 1:10) {",
" }",
"}",
sep = "\n"
))
env_orig <- new.env()
eval(parse(text = code), envir = env_orig)
env_opt <- new.env()
eval(parse(text = opt_code), envir = env_opt)
expect_equal(env_orig, env_opt)
})
test_that("dont propagate", {
code <- paste(
"for(i in 1:20) {",
" x <- i + 1",
"}",
"",
"i <- 0",
"while (i < 20) {",
" x <- x + i",
" i <- i + 1",
"}",
"",
"i <- 0",
"repeat{",
" x <- x + i",
" i <- i + 1",
" if (i >= 20) break",
"}",
sep = "\n"
)
opt_code <- opt_loop_invariant(list(code))$codes[[1]]
expect_equal(opt_code, code)
})
test_that("abort loop invariant if next/break/return", {
code <- paste(
"while (i < 10) {",
" break",
" x <- 8818",
"}",
"while (i < 10) {",
" next",
" x <- 8818",
"}",
"for (i in 1:10) {",
" break",
" x <- 8818",
"}",
"for (i in 1:10) {",
" next",
" x <- 8818",
"}",
sep = "\n"
)
opt_code <- opt_loop_invariant(list(code))$codes[[1]]
expect_equal(opt_code, code)
})
test_that("no error on empty loops", {
code <- paste(
"while (i < 8818) {}",
"while (i < 8818) NULL",
"for (i in 1:8818) {}",
"for (i in 1:8818) NULL",
sep = "\n"
)
opt_code <- opt_loop_invariant(list(code))$codes[[1]]
expect_equal(opt_code, paste(
"while (i < 8818) {}",
"if (i < 8818) {",
"NULL",
"}",
"for (i in 1:8818) {}",
"if (length(1:8818) > 0) {",
"NULL}",
"",
sep = "\n"
))
})
|
prepareSequences=function(sequence.A = NULL,
sequence.A.name = "A",
sequence.B = NULL,
sequence.B.name = "B",
merge.mode = "complete",
sequences = NULL,
grouping.column = NULL,
time.column = NULL,
exclude.columns = NULL,
if.empty.cases = "zero",
transformation = "none",
paired.samples = FALSE,
same.time = FALSE){
input.mode <- NULL
if (!(transformation %in% c("none", "percentage", "proportion", "hellinger", "scale", "PERCENTAGE", "percent", "Percent", "PERCENT", "proportion", "Proportion", "PROPORTION", "hellinger", "Hellinger", "HELLINGER", "scale", "Scale", "SCALE", "center", "Center", "CENTER"))){
stop("Argument 'transformation' must be one of: 'none', 'percentage', 'proportion', 'hellinger', 'scale'.")
} else {
if(transformation %in% c("none", "None", "NONE")){transformation <- "none"}
if(transformation %in% c("Percentage", "PERCENTAGE", "percent", "Percent", "PERCENT")){transformation <- "percentage"}
if(transformation %in% c("proportion", "Proportion", "PROPORTION")){transformation <- "proportion"}
if(transformation %in% c("hellinger", "Hellinger", "HELLINGER")){transformation <- "hellinger"}
if(transformation %in% c("scale", "Scale", "SCALE", "center", "Center", "CENTER")){transformation <- "scale"}
}
if(!(merge.mode %in% c("overlap", "Overlap", "OVERLAP", "complete", "Complete", "COMPLETE", "completed", "Completed", "COMPLETED"))){
stop("Argument 'merge.mode' must be one of: 'overlap', 'complete'.")
} else {
if(merge.mode %in% c("overlap", "Overlap", "OVERLAP")){merge.mode <- "overlap"}
if(merge.mode %in% c("complete", "Complete", "COMPLETE", "completed", "Completed", "COMPLETED")){merge.mode <- "complete"}
}
if(!is.null(exclude.columns) & !is.character(exclude.columns)){
stop("Argument 'exclude.columns' must be of type character.")
}
if(!is.null(sequence.A) & !is.null(sequence.B) & is.null(sequences)){
if(is.data.frame(sequence.A) & is.data.frame(sequence.B)){
input.mode <- "two.sequences"
if(!is.character(sequence.A.name)){warning("Argument 'sequence.A.name' must be character. Setting it to 'A'")}
if(!is.character(sequence.B.name)){warning("Argument 'sequence.B.name' must be character. Setting it to 'B'")}
} else {
stop("sequence.A and sequence.B must be dataframes.")
}
}
if(!is.null(sequences)){
if(is.data.frame(sequences)){
input.mode <- "many.sequences"
} else {
stop("Argument 'sequences' must be a dataframe with multiple sequences identified by different values in a grouping.column.")
}
}
if(input.mode == "two.sequences"){
if(!(is.null(time.column))){
if(!(time.column %in% c(colnames(sequence.A), colnames(sequence.B)))){
warning(paste("The argument 'time.column' has the value ", time.column, " but I couldn't find that column name in the input datasets. I will ignore this column."))
}
if(time.column %in% colnames(sequence.A) & !(time.column %in% colnames(sequence.B))){
sequence.B[, time.column] <- NA
warning("I couldn't find 'time.column' in 'sequenceB'. Added one and filled it with NA.")
}
if(time.column %in% colnames(sequence.B) & !(time.column %in% colnames(sequence.A))){
sequence.A[, time.column] <- NA
warning("I couldn't find 'time.column' in 'sequenceA'. Added one and filled it with NA.")
}
}
sequence.A <- data.frame(id=rep(sequence.A.name, nrow(sequence.A)), sequence.A, stringsAsFactors = FALSE)
sequence.B <- data.frame(id=rep(sequence.B.name, nrow(sequence.B)), sequence.B, stringsAsFactors = FALSE)
if(is.null(grouping.column)){
grouping.column <- "id"
} else {
colnames(sequence.A)[1] <- grouping.column
colnames(sequence.B)[1] <- grouping.column
}
if(merge.mode == "overlap"){
common.column.names <- intersect(colnames(sequence.A), colnames(sequence.B))
sequence.A <- sequence.A[, common.column.names]
sequence.B <- sequence.B[, common.column.names]
}
sequences <- plyr::rbind.fill(sequence.A, sequence.B)
}
if(input.mode == "many.sequences"){
if(!(grouping.column %in% colnames(sequences))){
stop("The argument 'grouping.column' must be a column name of the 'sequences' dataset.")
}
if(length(unique(sequences[, grouping.column])) < 2){
stop("According to 'grouping.column' there is only one sequence in the 'sequences' dataset. At least two sequences are required!")
}
}
sequences <- handleNA(
sequence = sequences,
if.empty.cases = if.empty.cases
)
if(!(transformation %in% c("none", "None", "NONE", "nope", "Nope", "NOPE", "no", "No", "NO", "hell no!"))){
id.column <- sequences[, grouping.column]
sequences <- sequences[, !(colnames(sequences) %in% grouping.column)]
if(!is.null(exclude.columns)){
if(sum(exclude.columns %in% colnames(sequences)) > 0){
excluded.columns <- sequences[, exclude.columns]
sequences <- sequences[,!(colnames(sequences) %in% exclude.columns)]
}
}
if(!is.null(time.column)){
if(sum(time.column %in% colnames(sequences)) > 0){
time.column.data <- sequences[, time.column]
sequences <- sequences[, !(colnames(sequences) %in% time.column)]
}
}
sequences[sequences==0] <- 0.00001
if (transformation == "proportion"){
sequences <- sweep(sequences, 1, rowSums(sequences), FUN = "/")
}
if (transformation == "percentage"){
sequences <- sweep(sequences, 1, rowSums(sequences), FUN = "/")*100
}
if (transformation == "hellinger"){
sequences <- sqrt(sweep(sequences, 1, rowSums(sequences), FUN = "/"))
}
if (transformation == "scale"){
sequences <- scale(x=sequences, center = TRUE, scale = TRUE)
}
sequences <- data.frame(id=id.column, sequences, stringsAsFactors = FALSE)
if(grouping.column != "id"){
colnames(sequences)[1] <- grouping.column
}
if(!(is.null(time.column))){
sequences <- data.frame(time=time.column.data, sequences, stringsAsFactors = FALSE)
colnames(sequences)[1] <- time.column
}
if(!(is.null(exclude.columns))){
if(is.data.frame(excluded.columns) | inherits(excluded.columns, "data.frame") == TRUE){
sequences <- data.frame(excluded.columns, sequences, stringsAsFactors = FALSE)
} else {
names.sequences <- colnames(sequences)
sequences <- data.frame(excluded.columns, sequences, stringsAsFactors = FALSE)
colnames(sequences) <- c(exclude.columns, names.sequences)
}
}
}
if(paired.samples == TRUE & same.time == TRUE){
if(!is.null(time.column)){
temp.table.time <- table(sequences[, time.column])
temp.table.time <- data.frame(value=names(temp.table.time), frequency=as.vector(temp.table.time), stringsAsFactors = FALSE)
valid.time.values <- as.numeric(temp.table.time[temp.table.time$frequency == length(unique(sequences[, grouping.column])), "value"])
sequences <- sequences[sequences[, time.column] %in% valid.time.values, ]
}
}
for(col.name in colnames(sequences)){
if(sum(is.nan(sequences[, col.name])) == nrow(sequences)){
sequences[, col.name] <- NULL
}
}
if(is.character(sequences[, grouping.column]) == FALSE){
sequences[, grouping.column] <- as.character(sequences[, grouping.column])
}
return(sequences)
}
|
FMA.historical <-
function(Z,M,Y,delta.grid1=1,delta.grid2=1,delta.grid3=1,intercept=TRUE,basis1=NULL,Ld2.basis1=NULL,basis2=NULL,Ld2.basis2=NULL,basis.type=c("fourier"),
nbasis1=3,nbasis2=3,timeinv=c(0,1),timegrids=NULL,lambda1.m=0.01,lambda2.m=0.01,lambda1.y=0.01,lambda2.y=0.01)
{
N<-dim(Z)[1]
ntp<-dim(Z)[2]
if(is.null(timegrids))
{
timegrids<-seq(timeinv[1],timeinv[2],length.out=ntp)
}
if(is.null(basis1))
{
if(basis.type[1]=="fourier")
{
basis1<-fourier.basis(timeinv=timeinv,ntp=ntp,nbasis=nbasis1)
Ld2.basis1<-Ld2.fourier(timeinv=timeinv,ntp=ntp,nbasis=nbasis1)
}
}else
{
nbasis1<-ncol(basis1)
}
if(is.null(basis2))
{
if(basis.type[1]=="fourier")
{
basis2<-fourier.basis(timeinv=timeinv,ntp=ntp,nbasis=nbasis2)
Ld2.basis2<-Ld2.fourier(timeinv=timeinv,ntp=ntp,nbasis=nbasis2)
}
}else
{
nbasis2<-ncol(basis2)
}
fit.m<-FDA.historical(Z,M,delta.grid=delta.grid1,intercept=intercept,basis1=basis1,Ld2.basis1=Ld2.basis1,basis2=basis2,Ld2.basis2=Ld2.basis2,basis.type=basis.type,
nbasis1=nbasis1,nbasis2=nbasis2,timeinv=timeinv,timegrids=timegrids,lambda1=lambda1.m,lambda2=lambda2.m)
fit.y<-FDA.historical2(X1=Z,X2=M,Y,delta.grid1=delta.grid2,delta.grid2=delta.grid3,intercept=intercept,basis1=basis1,Ld2.basis1=Ld2.basis1,basis2=basis2,Ld2.basis2=Ld2.basis2,basis.type=basis.type,
nbasis1=nbasis1,nbasis2=nbasis2,timeinv=timeinv,timegrids=timegrids,lambda1=lambda1.y,lambda2=lambda2.y)
if(intercept)
{
coef.inter.m<-fit.m$coefficients[1:nbasis1,1:nbasis2]
curve.inter.m<-fit.m$gamma.curve[,,1]
coef.alpha<-fit.m$coefficients[(nbasis1+1):(2*nbasis1),(nbasis2+1):(2*nbasis2)]
curve.alpha<-fit.m$gamma.curve[,,2]
coef.inter.y<-fit.y$coefficients[1:nbasis1,1:nbasis2]
curve.inter.y<-fit.y$gamma.curve[,,1]
coef.gamma<-fit.y$coefficients[(nbasis1+1):(2*nbasis1),(nbasis2+1):(2*nbasis2)]
curve.gamma<-fit.y$gamma.curve[,,2]
coef.beta<-fit.y$coefficients[(2*nbasis1+1):(3*nbasis1),(2*nbasis2+1):(3*nbasis2)]
curve.beta<-fit.y$gamma.curve[,,3]
curve.IE<-rep(NA,ntp)
curve.DE<-rep(NA,ntp)
alpha.int<-rep(NA,ntp)
for(i in 1:ntp)
{
rtmp1<-max(i-delta.grid1,1)
rtmp2<-max(i-delta.grid2,1)
rtmp3<-max(i-delta.grid3,1)
alpha.int[i]<-int.func(curve.alpha[rtmp1:i,i],timeinv=c(timegrids[rtmp1],timegrids[i]),timegrids=timegrids[rtmp1:i])
curve.IE[i]<-int.func(alpha.int[rtmp3:i]*curve.beta[rtmp3:i,i],timeinv=c(timegrids[rtmp3],timegrids[i]),timegrids=timegrids[rtmp3:i])
curve.DE[i]<-int.func(curve.gamma[rtmp2:i,i],timeinv=c(timegrids[rtmp2],timegrids[i]),timegrids=timegrids[rtmp2:i])
}
coef.m<-list(Intercept=coef.inter.m,alpha=coef.alpha)
curve.m<-list(Intercept=curve.inter.m,alpha=curve.alpha)
re.m<-list(coefficients=coef.m,curve=curve.m,fitted=fit.m$fitted,lambda1=lambda1.m,lambda2=lambda2.m)
coef.y<-list(Intercept=coef.inter.y,gamma=coef.gamma,beta=coef.beta)
curve.y<-list(Intercept=curve.inter.y,gamma=curve.gamma,beta=curve.beta)
re.y<-list(coefficients=coef.y,curve=curve.y,fitted=fit.y$fitted,lambda1=lambda1.y,lambda2=lambda2.y)
re.IE<-list(curve=curve.IE)
re.DE<-list(curve=curve.DE)
re<-list(basis1=basis1,basis2=basis2,M=re.m,Y=re.y,IE=re.IE,DE=re.DE)
}else
{
coef.alpha<-fit.m$coefficients[1:nbasis1,1:nbasis2]
curve.alpha<-fit.m$gamma.curve[,,1]
coef.gamma<-fit.y$coefficients[1:nbasis1,1:nbasis2]
curve.gamma<-fit.y$gamma.curve[,,1]
coef.beta<-fit.y$coefficients[(nbasis1+1):(2*nbasis1),(nbasis2+1):(2*nbasis2)]
curve.beta<-fit.y$gamma.curve[,,2]
curve.IE<-rep(NA,ntp)
curve.DE<-rep(NA,ntp)
alpha.int<-rep(NA,ntp)
for(i in 1:ntp)
{
rtmp1<-max(i-delta.grid1,1)
rtmp2<-max(i-delta.grid2,1)
rtmp3<-max(i-delta.grid3,1)
alpha.int[i]<-int.func(curve.alpha[rtmp1:i,i],timeinv=c(timegrids[rtmp1],timegrids[i]),timegrids=timegrids[rtmp1:i])
curve.IE[i]<-int.func(alpha.int[rtmp3:i]*curve.beta[rtmp3:i,i],timeinv=c(timegrids[rtmp3],timegrids[i]),timegrids=timegrids[rtmp3:i])
curve.DE[i]<-int.func(curve.gamma[rtmp2:i,i],timeinv=c(timegrids[rtmp2],timegrids[i]),timegrids=timegrids[rtmp2:i])
}
coef.m<-list(alpha=coef.alpha)
curve.m<-list(alpha=curve.alpha)
re.m<-list(coefficients=coef.m,curve=curve.m,fitted=fit.m$fitted,lambda1=lambda1.m,lambda2=lambda2.m)
coef.y<-list(gamma=coef.gamma,beta=coef.beta)
curve.y<-list(gamma=curve.gamma,beta=curve.beta)
re.y<-list(coefficients=coef.y,curve=curve.y,fitted=fit.y$fitted,lambda1=lambda1.y,lambda2=lambda2.y)
re.IE<-list(curve=curve.IE)
re.DE<-list(curve=curve.DE)
re<-list(basis1=basis1,basis2=basis2,M=re.m,Y=re.y,IE=re.IE,DE=re.DE)
}
return(re)
}
|
`predict.prcurve` <- function(object, newdata, ...) {
if(missing(newdata))
return(fitted(object))
nNew <- colnames(newdata)
nData <- colnames(object$data)
if (!isTRUE(all.equal(nNew, nData))) {
if (isTRUE(all.equal(sort(nNew), sort(nData)))) {
newdata <- newdata[, nData]
} else {
stop("Variables in 'newdata' don't match with training datat.")
}
}
p <- project_to_curve(data.matrix(newdata), s = object$s,
stretch = object$stretch)
out <- p$s
attr(out, "tag") <- p$ord
out
}
|
set_space_around_op <- function(pd_flat, strict) {
pd_flat <- add_space_after_comma(pd_flat)
op_after <- pd_flat$token %in% op_token
op_before <- lead(op_after, default = FALSE)
op_after <- op_after | pd_flat$token == "','"
if (!any(op_after)) {
return(pd_flat)
}
if (sum(pd_flat$lag_newlines) > 2 &&
is_function_call(pd_flat) &&
any(pd_flat$token %in% c("EQ_SUB", "','"))
) {
is_on_aligned_line <- token_is_on_aligned_line(pd_flat)
} else {
is_on_aligned_line <- FALSE
}
must_have_space_before <- op_before & (pd_flat$newlines == 0L) & !is_on_aligned_line
pd_flat$spaces[must_have_space_before] <- if (strict) {
1L
} else {
pmax(pd_flat$spaces[must_have_space_before], 1L)
}
must_have_space_after <- op_after & (pd_flat$newlines == 0L) & !is_on_aligned_line
pd_flat$spaces[must_have_space_after] <- if (strict) {
1L
} else {
pmax(pd_flat$spaces[must_have_space_after], 1L)
}
pd_flat
}
style_space_around_math_token <- function(strict, zero, one, pd_flat) {
if (any(pd_flat$token %in% zero)) {
pd_flat <- pd_flat %>%
style_space_around_token(
strict = TRUE, tokens = zero, level_before = 0L, level_after = 0L
)
}
if (any(pd_flat$token %in% one)) {
pd_flat <- pd_flat %>%
style_space_around_token(
strict = strict, tokens = one, level_before = 1L, level_after = 1L
)
}
pd_flat
}
style_space_around_token <- function(pd_flat,
strict,
tokens,
level_before,
level_after = level_before) {
op_after <- pd_flat$token %in% tokens
op_before <- lead(op_after, default = FALSE)
idx_before <- op_before & (pd_flat$newlines == 0L)
idx_after <- op_after & (pd_flat$newlines == 0L)
if (strict) {
pd_flat$spaces[idx_before] <- level_before
pd_flat$spaces[idx_after] <- level_after
} else {
pd_flat$spaces[idx_before] <-
pmax(pd_flat$spaces[idx_before], level_before)
pd_flat$spaces[idx_after] <-
pmax(pd_flat$spaces[idx_after], level_after)
}
pd_flat
}
style_space_around_tilde <- function(pd_flat, strict) {
if (is_symmetric_tilde_expr(pd_flat)) {
pd_flat <- style_space_around_token(pd_flat,
strict, "'~'",
level_before = 1, level_after = 1
)
} else if (is_asymmetric_tilde_expr(pd_flat)) {
pd_flat <- style_space_around_token(pd_flat,
strict = TRUE, "'~'", level_before = 1,
level_after = ifelse(nrow(pd_flat$child[[2]]) > 1, 1, 0)
)
}
pd_flat
}
remove_space_after_unary_pm_nested <- function(pd) {
if (any(pd$token[1] %in% c("'+'", "'-'"))) {
pd$spaces[1] <- 0L
}
pd
}
remove_space_before_opening_paren <- function(pd_flat) {
paren_after <- pd_flat$token %in% c("'('", "'['", "LBB")
if (!any(paren_after)) {
return(pd_flat)
}
paren_before <- lead(paren_after, default = FALSE)
pd_flat$spaces[paren_before & (pd_flat$newlines == 0L)] <- 0L
pd_flat
}
remove_space_after_opening_paren <- function(pd_flat) {
paren_after <- pd_flat$token %in% c("'('", "'['", "LBB")
if (!any(paren_after)) {
return(pd_flat)
}
pd_flat$spaces[paren_after & (pd_flat$newlines == 0L)] <- 0L
pd_flat
}
remove_space_before_closing_paren <- function(pd_flat) {
paren_after <- pd_flat$token %in% c("')'", "']'")
if (!any(paren_after)) {
return(pd_flat)
}
paren_before <- lead(paren_after, default = FALSE)
pd_flat$spaces[paren_before & (pd_flat$newlines == 0L)] <- 0L
pd_flat
}
add_space_after_for_if_while <- function(pd_flat) {
comma_after <- pd_flat$token %in% c("FOR", "IF", "WHILE")
if (!any(comma_after)) {
return(pd_flat)
}
idx <- comma_after & (pd_flat$newlines == 0L)
pd_flat$spaces[idx] <- pmax(pd_flat$spaces[idx], 1L)
pd_flat
}
set_space_in_curly_curly <- function(pd) {
if (is_curly_expr(pd)) {
after_inner_opening <- pd$token == "'{'" & pd$token_before == "'{'"
before_inner_closing <- lead(pd$token == "'}'" & pd$token_after == "'}'")
is_curly_curly_inner <- any(after_inner_opening, na.rm = TRUE) &&
any(before_inner_closing, na.rm = TRUE)
if (is_curly_curly_inner) {
pd$spaces[after_inner_opening] <- 1L
pd$spaces[before_inner_closing] <- 1L
}
after_outer_opening <- pd$token == "'{'" & pd$token_after == "'{'"
before_outer_closing <- lead(pd$token == "'}'" & pd$token_before == "'}'")
is_curly_curly_outer <- any(after_outer_opening, na.rm = TRUE) &&
any(before_outer_closing, nna.rm = TRUE)
if (is_curly_curly_outer) {
pd$spaces[after_outer_opening] <- 0L
pd$spaces[before_outer_closing] <- 0L
}
}
pd
}
add_space_after_comma <- function(pd_flat) {
comma_after <- (pd_flat$token == "','") & (pd_flat$newlines == 0L)
pd_flat$spaces[comma_after] <- pmax(pd_flat$spaces[comma_after], 1L)
pd_flat
}
set_space_after_comma <- function(pd_flat) {
comma_after <- (pd_flat$token == "','") & (pd_flat$newlines == 0L)
pd_flat$spaces[comma_after] <- 1L
pd_flat
}
remove_space_before_comma <- function(pd_flat) {
comma_after <- pd_flat$token == "','"
if (!any(comma_after)) {
return(pd_flat)
}
comma_before <- lead(comma_after, default = FALSE)
idx <- comma_before & (pd_flat$newlines == 0L)
pd_flat$spaces[idx] <- 0L
pd_flat
}
set_space_between_levels <- function(pd_flat) {
if (pd_flat$token[1] %in% c("FUNCTION", "IF", "WHILE")) {
index <- pd_flat$token == "')'" & pd_flat$newlines == 0L
pd_flat$spaces[index] <- 1L
} else if (pd_flat$token[1] == "FOR") {
index <- pd_flat$token == "forcond" & pd_flat$newlines == 0
pd_flat$spaces[index] <- 1L
}
pd_flat
}
start_comments_with_space <- function(pd, force_one = FALSE) {
is_comment <- is_comment(pd)
if (any(is_comment)) {
is_comment <- is_comment &
!is_shebang(pd) &
!is_code_chunk_header_or_xaringan_or_code_output(pd)
if (!any(is_comment)) {
return(pd)
}
} else {
return(pd)
}
comments <- rematch2::re_match(
pd$text[is_comment],
"^(?<prefix>
)
comments$space_after_prefix <- nchar(
comments$space_after_prefix,
type = "width"
)
comments$space_after_prefix <- set_spaces(
spaces_after_prefix = comments$space_after_prefix,
force_one
)
pd$text[is_comment] <-
paste0(
comments$prefix,
map_chr(comments$space_after_prefix, rep_char, char = " "),
comments$text
) %>%
trimws("right")
pd$short[is_comment] <- substr(pd$text[is_comment], 1, 5)
pd
}
set_space_before_comments <- function(pd_flat) {
comment_after <- (pd_flat$token == "COMMENT") & (pd_flat$lag_newlines == 0L)
if (!any(comment_after)) {
return(pd_flat)
}
comment_before <- lead(comment_after, default = FALSE)
pd_flat$spaces[comment_before & (pd_flat$newlines == 0L)] <- 1L
pd_flat
}
add_space_before_comments <- function(pd_flat) {
comment_after <- (pd_flat$token == "COMMENT") & (pd_flat$lag_newlines == 0L)
if (!any(comment_after)) {
return(pd_flat)
}
comment_before <- lead(comment_after, default = FALSE)
pd_flat$spaces[comment_before & (pd_flat$newlines == 0L)] <-
pmax(pd_flat$spaces[comment_before], 1L)
pd_flat
}
remove_space_after_excl <- function(pd_flat) {
excl <- (pd_flat$token == "'!'") &
(pd_flat$token_after != "'!'") &
(pd_flat$newlines == 0L)
pd_flat$spaces[excl] <- 0L
pd_flat
}
set_space_after_bang_bang <- function(pd_flat) {
last_bang <- (pd_flat$token == "'!'") &
(pd_flat$token_after != "'!'") &
(pd_flat$newlines == 0L) &
(pd_flat$token_before == "'!'")
pd_flat$spaces[last_bang] <- 0L
pd_flat
}
remove_space_before_dollar <- function(pd_flat) {
dollar_after <- (pd_flat$token == "'$'") & (pd_flat$lag_newlines == 0L)
dollar_before <- lead(dollar_after, default = FALSE)
pd_flat$spaces[dollar_before] <- 0L
pd_flat
}
remove_space_after_fun_dec <- function(pd_flat) {
fun_after <- (pd_flat$token == "FUNCTION") & (pd_flat$lag_newlines == 0L)
pd_flat$spaces[fun_after] <- 0L
pd_flat
}
remove_space_around_colons <- function(pd_flat) {
one_two_or_three_col_after <-
pd_flat$token %in% c("':'", "NS_GET_INT", "NS_GET")
one_two_or_three_col_before <-
lead(one_two_or_three_col_after, default = FALSE)
col_around <-
one_two_or_three_col_before | one_two_or_three_col_after
pd_flat$spaces[col_around & (pd_flat$newlines == 0L)] <- 0L
pd_flat
}
set_space_between_eq_sub_and_comma <- function(pd) {
op_before <- which(pd$token == "EQ_SUB" & lead(pd$token == "','"))
pd$spaces[op_before] <- 1L
pd
}
|
aspectSum<-function(r,pow = 1) {
m<-dim(r)[1]
list(f=sum(r^pow),g=pow*r^(pow-1))
}
|
IFAA=function(
MicrobData,
CovData,
linkIDname,
testCov=NULL,
ctrlCov=NULL,
testMany=TRUE,
ctrlMany=FALSE,
nRef=40,
nRefMaxForEsti=2,
refTaxa=NULL,
adjust_method="BY",
fdrRate=0.25,
paraJobs=NULL,
bootB=500,
standardize=FALSE,
sequentialRun=FALSE,
refReadsThresh=0.2,
taxkeepThresh=0,
SDThresh=0.05,
SDquantilThresh=0,
balanceCut=0.2,
seed=1
){
allFunc=allUserFunc()
results=list()
start.time = proc.time()[3]
runMeta=metaData(MicrobData=MicrobData,CovData=CovData,
linkIDname=linkIDname,testCov=testCov,
ctrlCov=ctrlCov,testMany=testMany,
ctrlMany=ctrlMany,taxkeepThresh=taxkeepThresh)
data=runMeta$data
results$covariatesData=runMeta$covariatesData
binaryInd=runMeta$binaryInd
covsPrefix=runMeta$covsPrefix
Mprefix=runMeta$Mprefix
testCovInd=runMeta$testCovInd
testCovInOrder=runMeta$testCovInOrder
testCovInNewNam=runMeta$testCovInNewNam
ctrlCov=runMeta$ctrlCov
microbName=runMeta$microbName
newMicrobNames=runMeta$newMicrobNames
results$covriateNames=runMeta$xNames
rm(runMeta)
if(length(refTaxa)>0){
if(sum(refTaxa%in%microbName)!=length(refTaxa)){
stop("Error: One or more of the specified reference taxa in phase 1 have no sequencing reads
or are not in the data set. Double check the names of the reference taxa and their
sparsity levels.")
}
}
if (nRefMaxForEsti<2) {
nRefMaxForEsti<-2
warning("Warning: Needs at least two final reference taxon for estimation.")
}
if(nRef>(length(microbName))){
stop("Error: number of random reference taxa can not be larger than the total number
of taxa in the data. Try lower nRef")
}
refTaxa_newNam=newMicrobNames[microbName%in%refTaxa]
results$analysisResults=Regulariz(data=data,testCovInd=testCovInd,
testCovInOrder=testCovInOrder,
testCovInNewNam=testCovInNewNam,
microbName=microbName,nRef=nRef,
nRefMaxForEsti=nRefMaxForEsti,
binaryInd=binaryInd,
covsPrefix=covsPrefix,Mprefix=Mprefix,
refTaxa=refTaxa_newNam,
paraJobs=paraJobs,
adjust_method=adjust_method,
fwerRate=fdrRate,
bootB=bootB,
standardize=standardize,
sequentialRun=sequentialRun,
allFunc=allFunc,refReadsThresh=refReadsThresh,
SDThresh=SDThresh,
SDquantilThresh=SDquantilThresh,
balanceCut=balanceCut,seed=seed
)
rm(data)
results$testCov=testCovInOrder
results$ctrlCov=ctrlCov
results$microbName=microbName
results$bootB=bootB
results$refReadsThresh=refReadsThresh
results$balanceCut=balanceCut
results$SDThresh=SDThresh
results$paraJobs=paraJobs
results$SDquantilThresh=SDquantilThresh
results$nRef=nRef
if(length(seed)==1){
results$seed=seed
}else{
results$seed="No seed used."
}
rm(testCovInOrder,ctrlCov,microbName)
totalTimeMins = (proc.time()[3] - start.time)/60
message("The entire analysis took ",round(totalTimeMins,2), " minutes")
results$totalTimeMins=totalTimeMins
return(results)
}
|
NULL
with_ <- function(set,
reset = set,
get = NULL,
...,
envir = parent.frame(),
new = TRUE) {
if (!missing(...)) {
stop("`...` must be empty.")
}
fmls <- formals(set)
if (length(fmls) > 0L) {
called_fmls <- stats::setNames(lapply(names(fmls), as.symbol), names(fmls))
if (new) {
called_fmls[[1]] <- as.symbol("new")
fun_args <- c(alist(new =, code =), fmls[-1L])
} else {
fun_args <- c(alist(code =), fmls)
}
} else {
called_fmls <- NULL
fun_args <- alist(code =)
}
set_call <- as.call(c(substitute(set), called_fmls))
reset <- if (missing(reset)) substitute(set) else substitute(reset)
if (is.null(get)) {
fun <- eval(bquote(function(args) {
old <- .(set_call)
on.exit(.(reset)(old))
force(code)
}))
} else {
get_call <- as.call(c(substitute(get), called_fmls))
fun <- eval(bquote(function(args) {
old <- .(get_call)
on.exit(.(reset)(old))
.(set_call)
force(code)
}))
}
formals(fun) <- fun_args
environment(fun) <- envir
fun
}
merge_new <- function(old, new, action, merge_fun = c) {
action <- match.arg(action, c("replace", "prefix", "suffix"))
if (action == "suffix") {
new <- merge_fun(old, new)
} else if (action == "prefix") {
new <- merge_fun(new, old)
}
new
}
is.named <- function(x) {
!is.null(names(x)) && all(names(x) != "")
}
|
prop.test <-
function(x, n, p = NULL, alternative = c("two.sided", "less", "greater"),
conf.level = 0.95, correct = TRUE)
{
DNAME <- deparse(substitute(x))
if (is.table(x) && length(dim(x)) == 1L) {
if (dim(x) != 2L)
stop("table 'x' should have 2 entries")
l <- 1
n <- sum(x)
x <- x[1L]
}
else if (is.matrix(x)) {
if (ncol(x) != 2L)
stop("'x' must have 2 columns")
l <- nrow(x)
n <- rowSums(x)
x <- x[, 1L]
}
else {
DNAME <- paste(DNAME, "out of", deparse(substitute(n)))
if ((l <- length(x)) != length(n))
stop("'x' and 'n' must have the same length")
}
OK <- complete.cases(x, n)
x <- x[OK]
n <- n[OK]
if ((k <- length(x)) < 1L)
stop("not enough data")
if (any(n <= 0))
stop("elements of 'n' must be positive")
if (any(x < 0))
stop("elements of 'x' must be nonnegative")
if (any(x > n))
stop("elements of 'x' must not be greater than those of 'n'")
if (is.null(p) && (k == 1))
p <- .5
if (!is.null(p)) {
DNAME <- paste0(DNAME, ", null ",
if(k == 1) "probability " else "probabilities ",
deparse(substitute(p)))
if (length(p) != l)
stop("'p' must have the same length as 'x' and 'n'")
p <- p[OK]
if (any((p <= 0) | (p >= 1)))
stop("elements of 'p' must be in (0,1)")
}
alternative <- match.arg(alternative)
if (k > 2 || (k == 2) && !is.null(p))
alternative <- "two.sided"
if ((length(conf.level) != 1L) || is.na(conf.level) ||
(conf.level <= 0) || (conf.level >= 1))
stop("'conf.level' must be a single number between 0 and 1")
correct <- as.logical(correct)
ESTIMATE <- setNames(x/n,
if (k == 1) "p" else paste("prop", 1L:l)[OK])
NVAL <- p
CINT <- NULL
YATES <- if(correct && (k <= 2)) .5 else 0
if (k == 1) {
z <- qnorm(if(alternative == "two.sided")
(1 + conf.level) / 2 else conf.level)
YATES <- min(YATES, abs(x - n * p))
z22n <- z^2 / (2 * n)
p.c <- ESTIMATE + YATES / n
p.u <- if(p.c >= 1) 1 else (p.c + z22n
+ z * sqrt(p.c * (1 - p.c) / n + z22n / (2 * n))) / (1+2*z22n)
p.c <- ESTIMATE - YATES / n
p.l <- if(p.c <= 0) 0 else (p.c + z22n
- z * sqrt(p.c * (1 - p.c) / n + z22n / (2 * n))) / (1+2*z22n)
CINT <- switch(alternative,
"two.sided" = c(max(p.l, 0), min(p.u, 1)),
"greater" = c(max(p.l, 0), 1),
"less" = c(0, min(p.u, 1)))
}
else if ((k == 2) & is.null(p)) {
DELTA <- ESTIMATE[1L] - ESTIMATE[2L]
YATES <- min(YATES, abs(DELTA) / sum(1/n))
WIDTH <- (switch(alternative,
"two.sided" = qnorm((1 + conf.level) / 2),
qnorm(conf.level))
* sqrt(sum(ESTIMATE * (1 - ESTIMATE) / n))
+ YATES * sum(1/n))
CINT <- switch(alternative,
"two.sided" = c(max(DELTA - WIDTH, -1),
min(DELTA + WIDTH, 1)),
"greater" = c(max(DELTA - WIDTH, -1), 1),
"less" = c(-1, min(DELTA + WIDTH, 1)))
}
if (!is.null(CINT))
attr(CINT, "conf.level") <- conf.level
METHOD <- paste(if(k == 1) "1-sample proportions test" else
paste0(k, "-sample test for ",
if(is.null(p)) "equality of" else "given",
" proportions"),
if(YATES) "with" else "without",
"continuity correction")
if (is.null(p)) {
p <- sum(x)/sum(n)
PARAMETER <- k - 1
}
else {
PARAMETER <- k
names(NVAL) <- names(ESTIMATE)
}
names(PARAMETER) <- "df"
x <- cbind(x, n - x)
E <- cbind(n * p, n * (1 - p))
if (any(E < 5))
warning("Chi-squared approximation may be incorrect")
STATISTIC <- sum((abs(x - E) - YATES)^2 / E)
names(STATISTIC) <- "X-squared"
if (alternative == "two.sided")
PVAL <- pchisq(STATISTIC, PARAMETER, lower.tail = FALSE)
else {
if (k == 1)
z <- sign(ESTIMATE - p) * sqrt(STATISTIC)
else
z <- sign(DELTA) * sqrt(STATISTIC)
PVAL <- pnorm(z, lower.tail = (alternative == "less"))
}
RVAL <- list(statistic = STATISTIC,
parameter = PARAMETER,
p.value = as.numeric(PVAL),
estimate = ESTIMATE,
null.value = NVAL,
conf.int = CINT,
alternative = alternative,
method = METHOD,
data.name = DNAME)
class(RVAL) <- "htest"
return(RVAL)
}
|
NULL
outsider_init <- function(pkgnm, cmd = NA, arglist = NULL, wd = NULL,
files_to_send = NULL, ignore_errors = FALSE) {
container <- container_init(pkgnm = pkgnm)
parts <- list(pkgnm = pkgnm, cmd = cmd, arglist = arglist, wd = wd,
files_to_send = files_to_send, container = container,
ignore_errors = FALSE)
structure(parts, class = 'outsider')
}
run <- function(x, ...) {
UseMethod('run', x)
}
run.outsider <- function(x, ...) {
if (is.na(x[['cmd']])) {
stop('Command not set')
}
cntnr <- x[['container']]
successes <- list()
successes[['start']] <- start(cntnr)
on.exit(halt(x = cntnr))
if (length(x[['files_to_send']]) > 0) {
successes[['send']] <- copy(x = cntnr, send = x[['files_to_send']])
}
successes[['run']] <- run(x = cntnr, cmd = x[['cmd']], args = x[['arglist']])
if (length(x[['wd']]) > 0) {
successes[['return']] <- copy(x = cntnr, rtrn = x[['wd']])
}
if (x[['ignore_errors']]) {
return(TRUE)
}
are_errors <- vapply(X = successes, FUN = inherits, FUN.VALUE = logical(1),
'error')
success <- all(vapply(X = successes, FUN = is.logical,
FUN.VALUE = logical(1))) && all(unlist(successes))
if (any(are_errors)) {
message('An error occurred in the following container ...')
message(print(x))
stop(successes[are_errors][[1]])
}
if (!success) {
message('Command and arguments failed to run for ...')
message(print(x))
}
invisible(success)
}
print.outsider <- function(x, ...) {
cat_line(cli::rule())
cat_line(crayon::bold('Outsider module:'))
cat_line('Package ', char(x[['pkgnm']]))
cat_line('Command ', char(x[['cmd']]))
arglist <- lapply(X = x[['arglist']], FUN = function(x) {
ifelse(is.numeric(x), stat(x), char(x))
})
cat_line('Args ', paste0(arglist, collapse = ', '))
cat_line('Files to send ', paste0(x[['files_to_send']], collapse = ', '))
cat_line('Working dir ', char(x[['wd']]))
cat_line('Container image ', char(x[['container']][['img']]))
cat_line('Container name ', char(x[['container']][['cntnr']]))
cat_line('Container tag ', char(x[['container']][['tag']]))
cat_line('Container status ', char(status(x[['container']])))
cat_line(cli::rule())
}
|
ops <- list(
"+",
"=",
"==",
"!=",
"<=",
">=",
"<-",
"<<-",
"<",
">",
"->",
"->>",
"%%",
"/",
"^",
"*",
"**",
"|",
"||",
"&",
"&&",
rex("%", except_any_of("%"), "%"))
commented_code_linter <- function() {
Linter(function(source_file) {
if (is.null(source_file$full_xml_parsed_content)) return(list())
all_comment_nodes <- xml2::xml_find_all(source_file$full_xml_parsed_content, "//COMMENT")
all_comments <- xml2::xml_text(all_comment_nodes)
code_candidates <- re_matches(
all_comments,
rex(some_of("
capture(name = "code",
anything,
or(some_of("{}[]"),
or(ops),
group(graphs, "(", anything, ")"),
group("!", alphas)
),
anything
)
),
global = FALSE, locations = TRUE)
lapply(rownames(na.omit(code_candidates)), function(code_candidate) {
is_parsable <- parsable(code_candidates[code_candidate, "code"])
if (is_parsable) {
comment_node <- all_comment_nodes[[as.integer(code_candidate)]]
line_number <- as.integer(xml2::xml_attr(comment_node, "line1"))
column_offset <- as.integer(xml2::xml_attr(comment_node, "col1")) - 1L
Lint(
filename = source_file$filename,
line_number = line_number,
column_number = column_offset + code_candidates[code_candidate, "code.start"],
type = "style",
message = "Commented code should be removed.",
line = source_file$file_lines[line_number],
ranges = list(column_offset + c(code_candidates[code_candidate, "code.start"],
code_candidates[code_candidate, "code.end"]))
)
}
})
})
}
parsable <- function(x) {
res <- try_silently(parse(text = x))
!inherits(res, "try-error")
}
todo_comment_linter <- function(todo = c("todo", "fixme")) {
Linter(function(source_file) {
tokens <- with_id(source_file, ids_with_token(source_file, "COMMENT"))
are_todo <- re_matches(tokens[["text"]], rex(one_or_more("
tokens <- tokens[are_todo, ]
lapply(
split(tokens, seq_len(nrow(tokens))),
function(token) {
Lint(
filename = source_file[["filename"]],
line_number = token[["line1"]],
column_number = token[["col1"]],
type = "style",
message = "TODO comments should be removed.",
line = source_file[["lines"]][[as.character(token[["line1"]])]],
ranges = list(c(token[["col1"]], token[["col2"]]))
)
}
)
})
}
|
GroupOtherTrades = function(group_trades, trade_classes_tree,hedging_set_name)
{
sub_classes <- unique(lapply(group_trades, function(x) x$SubClass))
if(!missing(hedging_set_name))
{
if(substr(hedging_set_name,1,4)=="Vol_"||substr(hedging_set_name,1,6)=="Basis_")
temp_sub_classes = paste0(hedging_set_name,"_",sub_classes)
}else
{ temp_sub_classes = sub_classes}
sub_classes_addon <- array(data<-0,dim<-length(sub_classes))
sub_classes_tree = list()
if(length(trade_classes_tree[["Other SubClasses"]])==0)
{ sub_classes_tree_name = trade_classes_tree$AddChild("Other SubClasses")
}else
{ sub_classes_tree_name = trade_classes_tree[["Other SubClasses"]]}
for (j in 1:length(sub_classes))
{
sub_classes_trades <- group_trades[sapply(group_trades, function(x) (x$SubClass==temp_sub_classes[j]))]
sub_classes_tree[[j]] = sub_classes_tree_name$AddChild(temp_sub_classes[[j]])
trades_tree_name = sub_classes_tree[[j]]$AddChild("Trades")
for (l in 1:length(sub_classes_trades))
{
tree_trade = trades_tree_name$AddChild(sub_classes_trades[[l]]$external_id)
tree_trade$trade_details = Trading::GetTradeDetails(sub_classes_trades[[l]])
tree_trade$trade = sub_classes_trades[[l]]
}
}
return(trade_classes_tree)
}
|
monobinShinyApp <- function() {
appDir <- system.file("monobinShiny", package = "monobinShiny")
source(system.file("monobinShiny/global.R", package = "monobinShiny"), local = environment())
shiny::runApp(appDir = appDir, launch.browser = TRUE)
}
|
progressor <- local({
progressor_count <- 0L
void_progressor <- function(...) NULL
environment(void_progressor)$enable <- FALSE
class(void_progressor) <- c("progressor", class(void_progressor))
function(steps = length(along), along = NULL, offset = 0L, scale = 1L, transform = function(steps) scale * steps + offset, message = character(0L), label = NA_character_, trace = FALSE, initiate = TRUE, auto_finish = TRUE, on_exit = !identical(envir, globalenv()), enable = getOption("progressr.enable", TRUE), envir = parent.frame()) {
stop_if_not(is.logical(enable), length(enable) == 1L, !is.na(enable))
if (!enable) return(void_progressor)
stop_if_not(!is.null(steps) || !is.null(along))
stop_if_not(length(steps) == 1L, is.numeric(steps), !is.na(steps),
steps >= 0)
stop_if_not(length(offset) == 1L, is.numeric(offset), !is.na(offset))
stop_if_not(length(scale) == 1L, is.numeric(scale), !is.na(scale))
stop_if_not(is.function(transform))
label <- as.character(label)
stop_if_not(length(label) == 1L)
steps <- transform(steps)
stop_if_not(length(steps) == 1L, is.numeric(steps), !is.na(steps),
steps >= 0)
stop_if_not(is.logical(on_exit), length(on_exit) == 1L, !is.na(on_exit))
if (identical(envir, globalenv())) {
if (!progressr_in_globalenv()) {
stop("A progressor must not be created in the global environment unless wrapped in a with_progress() or without_progress() call. Alternatively, create it inside a function or in a local() environment to make sure there is a finite life span of the progressor")
}
if (on_exit) {
stop("It is not possible to create a progressor in the global environment with on_exit = TRUE")
}
}
owner_session_uuid <- session_uuid(attributes = TRUE)
progressor_count <<- progressor_count + 1L
progressor_uuid <- progressor_uuid(progressor_count)
progression_index <- 0L
fcn <- function(message = character(0L), ..., type = "update") {
progression_index <<- progression_index + 1L
cond <- progression(
type = type,
message = message,
...,
progressor_uuid = progressor_uuid,
progression_index = progression_index,
owner_session_uuid = owner_session_uuid,
call = if (trace) sys.call() else NULL,
calls = if (trace) sys.calls() else NULL
)
withRestarts(
signalCondition(cond),
muffleProgression = function(p) NULL
)
invisible(cond)
}
formals(fcn)$message <- message
class(fcn) <- c("progressor", class(fcn))
progressor_envir <- new.env(parent = getNamespace(.packageName))
for (name in c("progression_index", "progressor_uuid",
"owner_session_uuid", "progressor_count",
"enable", "initiate", "auto_finish", "trace",
"steps", "label", "offset", "scale")) {
progressor_envir[[name]] <- get(name)
}
environment(fcn) <- progressor_envir
if (exists("...progressor", mode = "function", envir = envir)) {
...progressor <- get("...progressor", mode = "function", envir = envir)
...progressor(type = "finish")
do.call(unlockBinding, args = list("...progressor", env = envir))
rm("...progressor", envir = envir)
}
if (initiate) {
fcn(
type = "initiate",
steps = steps,
auto_finish = auto_finish
)
}
if (on_exit) {
assign("...progressor", value = fcn, envir = envir)
lockBinding("...progressor", env = envir)
call <- call("...progressor", type = "finish")
do.call(base::on.exit, args = list(call, add = TRUE), envir = envir)
}
fcn
}
})
print.progressor <- function(x, ...) {
s <- sprintf("%s:", class(x)[1])
e <- environment(x)
pe <- parent.env(e)
s <- c(s, paste("- label:", e$label))
s <- c(s, paste("- steps:", e$steps))
s <- c(s, paste("- initiate:", e$initiate))
s <- c(s, paste("- auto_finish:", e$auto_finish))
if (is.function(e$message)) {
message <- "<a function>"
} else {
message <- hpaste(deparse(e$message))
}
s <- c(s, paste("- default message:", message))
call <- vapply(e$calls, FUN = function(call) deparse(call[1]), FUN.VALUE = "")
stack <- if (e$trace) paste(call, collapse = " -> ") else "<disabled>"
s <- c(s, paste("- call stack:", stack))
s <- c(s, paste("- progressor_uuid:", e$progressor_uuid))
s <- c(s, paste("- progressor_count:", pe$progressor_count))
s <- c(s, paste("- progression_index:", e$progression_index))
owner_session_uuid <- e$owner_session_uuid
s <- c(s, paste("- owner_session_uuid:", owner_session_uuid))
s <- c(s, paste("- enable:", e$enable))
size <- object.size(x)
size2 <- serialization_size(x)
s <- c(s, sprintf("- size: %s [%s serialized]", format(size, units = "auto", standard = "SI"), format(size2, units = "auto", standard = "SI")))
s <- paste(s, collapse = "\n")
cat(s, "\n", sep = "")
invisible(x)
}
progressr_in_globalenv <- local({
state <- FALSE
function(action = c("query", "allow", "disallow")) {
action <- match.arg(action)
if (action == "query") return(state)
old_state <- state
state <<- switch(action, allow = TRUE, disallow = FALSE)
invisible(old_state)
}
})
|
TPEA <-
function(DEGs,scores,n,FDR_method){
pkgEnv <- new.env(parent=emptyenv())
if(!exists("all_genes", pkgEnv)) {
data("all_genes", package="TPEA", envir=pkgEnv)
da1<-pkgEnv[["all_genes"]]
}
if(!exists("pathway_names", pkgEnv)) {
data("pathway_names", package="TPEA", envir=pkgEnv)
da2<-pkgEnv[["pathway_names"]]
}
all_genes<-da1;
pathway_names<-da2;
number<-n;
all_rand_area<-matrix(0,109,1);
for(i in 1:number){
DEG1<-intersect(DEGs[,1],all_genes[,1]);
DEG1<-as.data.frame(DEG1);
num<-sample(1:nrow(all_genes),size=nrow(DEG1));
rand_genes<-all_genes[num,1];
rand_genes<-as.data.frame(rand_genes);
rand_area<-AUEC(rand_genes);
rand_area[,2]<-as.matrix(rand_area[,2]);
all_rand_area<-cbind(all_rand_area,rand_area[,2]);
print(i);
}
all_rand_area[,1]<-scores[,2];
p_value<-data.frame();
N_AUEC<-data.frame();
for(j in 1:109){
p<-length(which(all_rand_area[j,-1]>=all_rand_area[j,1]))/number;
p_value<-rbind(p_value,p);
nor_area<-(all_rand_area[j,1]-mean(all_rand_area[j,-1]))/sd(all_rand_area[j,-1]);
N_AUEC<-rbind(N_AUEC,nor_area);
}
result1<-cbind(pathway_names,scores[,1],p_value,N_AUEC);
p_v<-as.matrix(p_value);
FDR<-p.adjust(p_v,method=FDR_method,n=109);
FDR<-as.matrix(FDR);
colnames(FDR)<-c("FDR");
result1<-as.matrix(result1);
result2<-cbind(result1,FDR);
result2<-result2[order(result2[,4]),];
return(result2);
}
|
pmx_cov <-
function(values, labels = NULL) {
assert_that(is_list(values))
assert_that(is_list_or_null(labels))
if (missing(labels) || is.null(labels)) labels <- values
assert_that(length(values) == length(labels))
structure(
list(
values = values,
labels = labels
),
class = c("pmxCOVObject")
)
}
is_pmxcov <- function(x)
inherits(x, "pmxCOVObject") || is.null(x)
eta_cov <- function(
labels,
type = c("cats", "conts"),
dname = NULL,
show.correl = TRUE,
correl = NULL,
facets = NULL,
point = NULL,
covariates = NULL,
...) {
type <- match.arg(type)
assert_that(is_string_or_null(dname))
assert_that(is_pmxcov(covariates))
if (is.null(dname)) dname <- "eta"
if (missing(labels)) {
labels <- list(
title = "EBE vs. covariates",
x = "",
y = ""
)
}
assert_that(is_list(labels))
labels$subtitle <- ""
structure(list(
ptype = "ETA_COV",
strat = FALSE,
dname = dname,
type = type,
show.correl = show.correl,
correl = correl,
facets = facets,
point = point,
covariates = covariates,
gp = pmx_gpar(
labels = labels,
discrete = TRUE,
...
)
), class = c("eta_cov", "pmx_gpar"))
}
plot_pmx.eta_cov <- function(x, dx, ...) {
assert_that(is_pmxcov(x$covariates))
p <- if (x$type == "cats") {
x$gp$is.smooth <- FALSE
cats <- x[["cats"]]
if (all(nzchar(x[["cats"]]))) {
dx.cats <- dx[, c(cats, "VALUE", "EFFECT"), with = FALSE]
dx.cats <- melt(dx.cats, measure.vars = cats)
if (!is.null(x$covariates)) {
dx.cats <-
with(
x$covariates,
dx.cats[variable %in% values][
,
variable := factor(variable, levels = values, labels = labels)
]
)
}
ggplot(dx.cats, measure.vars = cats) +
geom_boxplot(aes_string(x = "value", y = "VALUE")) +
facet_grid(stats::as.formula("EFFECT~variable"), scales = "free")
}
} else {
value <- variable <- corr <- corr_exp <- NULL
conts <- x[["conts"]]
if (all(nzchar(x[["conts"]]))) {
dx.conts <- unique(dx[, c(conts, "VALUE", "EFFECT"), with = FALSE])
dx.conts <- melt(dx.conts, id = c("VALUE", "EFFECT"))
if (!is.null(x$covariates)) {
dx.conts <-
with(
x$covariates,
dx.conts[variable %in% values][
,
variable := factor(variable, levels = values, labels = labels)
]
)
}
x$facets$facets <- stats::as.formula("EFFECT~variable")
p <- ggplot(dx.conts, aes_string(x = "value", y = "VALUE")) +
do.call(geom_point, x$point) +
do.call(facet_grid, x$facets)
if (x$show.correl) {
df_cor <-
dx.conts[
, list(corr = round(cor(get("value"), get("VALUE"), use = "na.or.complete"), 3)),
"EFFECT,variable"
]
corr_eqn <- function(x) {
eq <- substitute(italic(corr) == a, list(a = x))
as.character(as.expression(eq))
}
df_cor[, corr_exp := corr_eqn(corr), "EFFECT,variable"]
correl_obj <- list(
data = df_cor,
x = -Inf, y = Inf, hjust = -0.2, vjust = 1.2,
mapping = aes(label = corr_exp), parse = TRUE
)
correl_obj <- l_left_join(x$correl, correl_obj)
p <- p + do.call("geom_text", correl_obj)
}
p
}
}
if (!is.null(p)) plot_pmx(x$gp, p)
}
|
setGeneric(name = "capser", def = function(y, min, max){standardGeneric("capser")})
setMethod(f = "capser",
signature = c(y = "data.frame"),
definition = function(y, min, max){
if(!all(apply(y, 2, is.numeric))){
stop("Data frame has non-numeric columns.\n")
}
cs <- data.frame(apply(y, 2, capser, min = min, max = max))
return(cs)
}
)
setMethod(f = "capser",
signature = c(y = "matrix"),
definition = function(y, min, max){
cs <- apply(y, 2, capser, min = min, max = max)
return(cs)
}
)
setMethod(f = "capser",
signature = c(y = "mts"),
definition = function(y, min, max){
cs <- apply(y, 2, capser, min = min, max = max)
attributes(cs) <- attributes(y)
return(cs)
}
)
setMethod(f = "capser",
signature = c(y = "numeric"),
definition = function(y, min, max){
min <- as.numeric(min)[1]
max <- as.numeric(max)[1]
if(min >= max){
stop("\nMinimum value is greater or equal than for maximum.\n")
}
cs <- y
cs[y < min] <- min
cs[y > max] <- max
return(cs)
}
)
setMethod(f = "capser",
signature = c(y = "timeSeries"),
definition = function(y, min, max){
cs <- apply(y, 2, capser, min = min, max = max)
return(cs)
}
)
setMethod(f = "capser",
signature = c(y = "ts"),
definition = function(y, min, max){
cs <- capser(c(y), min = min, max = max)
attributes(cs) <- attributes(y)
return(cs)
}
)
|
library(tidyverse)
currencies <-
readr::read_csv(
file = "data-raw/x_currencies.csv",
col_types = "ccccc")
|
ncaa_season_id_lu <- baseballr::ncaa_season_id_lu
readr::write_csv(ncaa_season_id_lu, "data-raw/ncaa_season_id_lu.csv")
ncaa_season_id_lu <- readr::read_csv("data-raw/ncaa_season_id_lu.csv")
usethis::use_data(ncaa_season_id_lu, internal=FALSE, overwrite = TRUE)
ncaa_team_lu <- baseballr::ncaa_team_lu
ncaa_team_most_recent <- ncaa_team_lu %>% dplyr::filter(.data$year == 2021)
ncaa_team_most_recent <- ncaa_team_most_recent %>%
dplyr::mutate(year = 2022)
ncaa_team_lu <- dplyr::bind_rows(ncaa_team_lu, ncaa_team_most_recent)
ncaa_team_lu <- ncaa_team_lu %>% dplyr::arrange(.data$division, .data$school)
readr::write_csv(ncaa_team_lu, "data-raw/ncaa_team_lu.csv")
usethis::use_data(ncaa_team_lu, internal=FALSE, overwrite = TRUE)
ncaa_team_lu <- readr::read_csv("data-raw/ncaa_team_lu.csv")
|
Chi2x2<-function(r1c1, r1c2, r2c1, r2c2, n, alpha=.05)
{
df<-1
po1<-r1c1
po2<-r1c2
po3<-r2c1
po4<-r2c2
sum<-po1+po2+po3+po4
pe1<-(r1c1+r1c2)*(r1c1+r2c1)
pe2<-(r1c1+r1c2)*(r1c2+r2c2)
pe3<-(r2c1+r2c2)*(r1c1+r2c1)
pe4<-(r1c1+r1c2)*(r1c2+r2c2)
lambda<-n*((((po1-pe1)^2)/pe1)+(((po2-pe2)^2)/pe2)+(((po3-pe3)^2)/pe3)+(((po4-pe4)^2)/pe4))
tabled<-stats::qchisq(1-alpha, df=df)
power<-round(1-stats::pchisq(tabled, df=df, lambda),3)
if(sum!=1.0){stop("Expected proportions must add to 1.0. Check input po values")
}
else message("Power for n of ", n, " = ", power)
result <- data.frame(matrix(ncol = 2))
colnames(result) <- c( "n","Power")
result[, 1]<-n
result[, 2]<-power
output<-na.omit(result)
rownames(output)<- c()
invisible(output)
}
|
evppi_plot_ggplot <- function(evppi_obj,
pos_legend = c(0, 0.8),
col = c(1,1),
...) {
extra_args <- list(...)
plot_dat <-
evppi_obj[c("evi", "evppi", "k")] %>%
bind_rows() %>%
melt(id.vars = "k") %>%
mutate(variable = as.character(.data$variable),
variable = ifelse(.data$variable == "evppi",
paste("EVPPI for",
evppi_obj$parameters),
"EVPI"))
theme_add <- purrr::keep(extra_args, is.theme)
size <- purrr::pluck(extra_args, "size", .default = c(1, 0.5))
legend_params <- make_legend_ggplot(evppi_obj, pos_legend)
ggplot(plot_dat,
aes(x = .data$k, y = .data$value,
group = .data$variable, size = .data$variable, colour = .data$variable)) +
geom_line() +
theme_default() +
theme_add +
scale_color_manual(values = col) +
scale_size_manual(values = size) +
do.call(theme, legend_params) +
ggtitle("Expected Value of Perfect Partial Information") +
xlab("Willingness to pay") +
ylab("")
}
|
test_that('efftox_dtps fails when cohort_sizes is not vector of +ve integers', {
expect_error(
efftox_dtps(cohort_sizes = c(3, 0),
next_dose = 1,
real_doses = c(1.0, 2.0, 4.0, 6.6, 10.0),
efficacy_hurdle = 0.5, toxicity_hurdle = 0.3,
p_e = 0.1, p_t = 0.1,
eff0 = 0.5, tox1 = 0.65,
eff_star = 0.7, tox_star = 0.25,
alpha_mean = -7.9593, alpha_sd = 3.5487,
beta_mean = 1.5482, beta_sd = 3.5018,
gamma_mean = 0.7367, gamma_sd = 2.5423,
zeta_mean = 3.4181, zeta_sd = 2.4406,
eta_mean = 0, eta_sd = 0.2,
psi_mean = 0, psi_sd = 1,
seed = 123, refresh = 0)
)
expect_error(
efftox_dtps(cohort_sizes = c(3, -1),
next_dose = 1,
real_doses = c(1.0, 2.0, 4.0, 6.6, 10.0),
efficacy_hurdle = 0.5, toxicity_hurdle = 0.3,
p_e = 0.1, p_t = 0.1,
eff0 = 0.5, tox1 = 0.65,
eff_star = 0.7, tox_star = 0.25,
alpha_mean = -7.9593, alpha_sd = 3.5487,
beta_mean = 1.5482, beta_sd = 3.5018,
gamma_mean = 0.7367, gamma_sd = 2.5423,
zeta_mean = 3.4181, zeta_sd = 2.4406,
eta_mean = 0, eta_sd = 0.2,
psi_mean = 0, psi_sd = 1,
seed = 123, refresh = 0)
)
expect_error(
efftox_dtps(cohort_sizes = c(3, 2.3),
next_dose = 1,
real_doses = c(1.0, 2.0, 4.0, 6.6, 10.0),
efficacy_hurdle = 0.5, toxicity_hurdle = 0.3,
p_e = 0.1, p_t = 0.1,
eff0 = 0.5, tox1 = 0.65,
eff_star = 0.7, tox_star = 0.25,
alpha_mean = -7.9593, alpha_sd = 3.5487,
beta_mean = 1.5482, beta_sd = 3.5018,
gamma_mean = 0.7367, gamma_sd = 2.5423,
zeta_mean = 3.4181, zeta_sd = 2.4406,
eta_mean = 0, eta_sd = 0.2,
psi_mean = 0, psi_sd = 1,
seed = 123, refresh = 0)
)
expect_error(
efftox_dtps(cohort_sizes = c(3, NA),
next_dose = 1,
real_doses = c(1.0, 2.0, 4.0, 6.6, 10.0),
efficacy_hurdle = 0.5, toxicity_hurdle = 0.3,
p_e = 0.1, p_t = 0.1,
eff0 = 0.5, tox1 = 0.65,
eff_star = 0.7, tox_star = 0.25,
alpha_mean = -7.9593, alpha_sd = 3.5487,
beta_mean = 1.5482, beta_sd = 3.5018,
gamma_mean = 0.7367, gamma_sd = 2.5423,
zeta_mean = 3.4181, zeta_sd = 2.4406,
eta_mean = 0, eta_sd = 0.2,
psi_mean = 0, psi_sd = 1,
seed = 123, refresh = 0)
)
})
test_that('efftox_dtps and derived tibbles perform as expected.', {
paths <- efftox_dtps(cohort_sizes = c(1, 1),
previous_outcomes = '',
next_dose = 1,
real_doses = c(1.0, 2.0, 4.0, 6.6, 10.0),
efficacy_hurdle = 0.5, toxicity_hurdle = 0.3,
p_e = 0.1, p_t = 0.1,
eff0 = 0.5, tox1 = 0.65,
eff_star = 0.7, tox_star = 0.25,
alpha_mean = -7.9593, alpha_sd = 3.5487,
beta_mean = 1.5482, beta_sd = 3.5018,
gamma_mean = 0.7367, gamma_sd = 2.5423,
zeta_mean = 3.4181, zeta_sd = 2.4406,
eta_mean = 0, eta_sd = 0.2,
psi_mean = 0, psi_sd = 1,
seed = 123, refresh = 0)
expect_equal(length(paths), 21)
expect_true("" %in% names(paths))
expect_true("1B" %in% names(paths))
expect_true("1B 2B" %in% names(paths))
expect_true("1E" %in% names(paths))
expect_true("1E 2B" %in% names(paths))
expect_true("1N" %in% names(paths))
expect_true("1N 2B" %in% names(paths))
expect_true("1T" %in% names(paths))
expect_true("1T 2B" %in% names(paths))
expect_true("1B 2E" %in% names(paths))
expect_true("1E 2E" %in% names(paths))
expect_true("1N 2E" %in% names(paths))
expect_true("1T 2E" %in% names(paths))
expect_true("1B 2N" %in% names(paths))
expect_true("1E 2N" %in% names(paths))
expect_true("1N 2N" %in% names(paths))
expect_true("1T 2N" %in% names(paths))
expect_true("1B 2T" %in% names(paths))
expect_true("1E 2T" %in% names(paths))
expect_true("1N 2T" %in% names(paths))
expect_true("1T 2T" %in% names(paths))
library(tibble)
df <- as_tibble(paths)
expect_equal(nrow(df), 21)
expect_equal(nrow(spread_paths(df)), 16)
expect_equal(nrow(spread_paths(dose_finding_paths = paths)), 16)
})
|
NULL
protate <- function(src, dest) {
YX <- crossprod(dest, src)
svd.YX <- svd(YX)
rot <- svd.YX$v %*% t(svd.YX$u)
src %*% rot
}
|
test_that("cfa_summary: single factor", {
summary <- cfa_summary(
data = lavaan::HolzingerSwineford1939,
x1:x3,
return_result = TRUE,
quite = TRUE,
plot = FALSE
)
expect_equal(class(summary)[1], "lavaan")
})
test_that("cfa_summary: mutiple factor", {
summary <- cfa_summary(
data = lavaan::HolzingerSwineford1939,
x1:x3,
x4:x6,
x7:x9,
return_result = TRUE,
plot = FALSE,
quite = TRUE
)
expect_equal(class(summary)[1], "lavaan")
})
test_that("cfa_summary: mutiple factor with group", {
summary <- cfa_summary(
data = lavaan::HolzingerSwineford1939,
x1:x3,
x4:x6,
x7:x9,
group = school,
return_result = TRUE,
plot = FALSE,
quite = TRUE
)
expect_equal(class(summary)[1], "lavaan")
})
|
test_that("prepInputs works with NULL archive + file without extension, but originally a .zip", {
skip_on_cran()
testInitOut <- testInit("raster", needGoogle = FALSE)
on.exit({
testOnExit(testInitOut)
}, add = TRUE)
noisyOutput <- capture.output(
testthat::expect_message(
ras <- reproducible::prepInputs(url = "https://github.com/tati-micheletti/host/raw/master/data/unknownExtension",
alsoExtract = "similar",
destinationPath = tempdir2(rndstr(1,6))))
)
testthat::expect_is(object = ras, class = "RasterLayer")
})
test_that("prepInputs WORKS if the file is not originally a .zip, but archive is provided (only extension matters)", {
skip_on_cran()
testInitOut <- testInit("raster", needGoogle = FALSE)
on.exit({
testOnExit(testInitOut)
}, add = TRUE)
noisyOutput <- capture.output(
testthat::expect_message(
ras <- reproducible::prepInputs(url = "https://github.com/tati-micheletti/host/raw/master/data/unknownTAR",
alsoExtract = "similar", archive = "unknownTAR.tar",
destinationPath = tempdir2(rndstr(1,6))))
)
testthat::expect_is(object = ras, class = "RasterLayer")
})
test_that("prepInputs WORKS if passing archive .zip", {
skip_on_cran()
testInitOut <- testInit("raster", needGoogle = FALSE)
on.exit({
testOnExit(testInitOut)
}, add = TRUE)
noisyOutput <- capture.output(
testthat::expect_message(
ras <- reproducible::prepInputs(url = "https://github.com/tati-micheletti/host/raw/master/data/unknownExtension",
archive = "unknownExtension.zip",
alsoExtract = "similar", destinationPath = tempdir2(rndstr(1,6))))
)
testthat::expect_is(object = ras, class = "RasterLayer")
})
test_that("prepInputs WORKS passing just targetFile that is NOT an archive", {
skip_on_cran()
testInitOut <- testInit("raster", needGoogle = FALSE)
on.exit({
testOnExit(testInitOut)
}, add = TRUE)
noisyOutput <- capture.output(
testthat::expect_message(
ras <- reproducible::prepInputs(url = "https://github.com/tati-micheletti/host/raw/master/data/unknownTIF",
alsoExtract = "similar", targetFile = "unknownTIF.tif", destinationPath = tempdir2(rndstr(1,6))))
)
testthat::expect_is(object = ras, class = "RasterLayer")
})
test_that("prepInputs WORKS passing archive + targetFile", {
skip_on_cran()
testInitOut <- testInit("raster", needGoogle = FALSE)
on.exit({
testOnExit(testInitOut)
}, add = TRUE)
noisyOutput <- capture.output(
testthat::expect_message(
ras <- reproducible::prepInputs(url = "https://github.com/tati-micheletti/host/raw/master/data/unknownExtension",
archive = "unknownExtension.zip", targetFile = "rasterTest.tif",
alsoExtract = "similar",
destinationPath = tempdir2(rndstr(1,6))))
)
testthat::expect_is(object = ras, class = "RasterLayer")
})
|
kcvmae <-
function(x, Xmat, Ymat, nfolds, foldid, ...){
cverr <- rep(0, nfolds)
for(kk in 1:nfolds){
indx <- which(foldid == kk)
Bhat <- cmls(X = Xmat[-indx,,drop=FALSE], Y = Ymat[-indx,,drop=FALSE],
const = x$const, df = x$df,
degree = x$degree, intercept = x$intercept, ...)
cverr[kk] <- mean(abs(Ymat[indx,] - Xmat[indx,,drop=FALSE] %*% Bhat))
}
mean(cverr)
}
|
nomis_codelist <- function(id, concept, search = NULL) {
if (missing(id)) {
stop("id must be specified", call. = FALSE)
}
id_query <- paste0(gsub("NM", "CL", id), "_")
search_query <- ifelse(is.null(search), "", paste0("?search=", search))
code_query <- paste0(
codelist_url, id_query, concept,
".def.sdmx.xml", search_query
)
df <- tibble::as_tibble(rsdmx::readSDMX(code_query))
df
}
|
c(
Mod5Step1updateVind <- function(input, nb.IS){
m <- matrix(rep(0,nb.IS*nb.IS),nb.IS)
diag(m)[1] <- input$Mod5Step1_Vi
return(m)
},
output$Mod5Step1_hidden <- renderUI({
list(
numericInput("Mod5Step1_Tmax", "", Modules_VAR$Tmax$max),
shinyMatrix::matrixInput("Mod5Step1_Vind", value = Mod5Step1updateVind(input, nb.IS), class = "numeric"),
shinyMatrix::matrixInput("Mod5Step1_B", value = matrix(c(0,input$Mod5Step1_B1,input$Mod5Step1_B2,0),1), class = "numeric"),
checkboxInput("Mod5Step1_X1_state", "", value = TRUE),
checkboxInput("Mod5Step1_X1_sto_state", "", value = TRUE),
checkboxInput("Mod5Step1_X2_state", "", value = TRUE),
checkboxInput("Mod5Step1_X2_sto_state", "", value = TRUE)
)
}),
outputOptions(output, "Mod5Step1_hidden", suspendWhenHidden = FALSE),
Mod5Step1_output <- reactive({
if (input$Mod5Step1_Run == 0)
return(NULL)
isolate({
updateCheckboxInput(session, "isRunning", value = TRUE)
data <- squid::squidR(input, module = "Mod5Step1")
LMR <- lme4::lmer(Phenotype ~ 1 + X1 + (1|Individual), data = data$sampled_data)
FIXEF <- lme4::fixef(LMR)
SE.FIXEF <- arm::se.fixef(LMR)
RANDEF <- as.data.frame(lme4::VarCorr(LMR))$vcov
data$Vi_1 <- round(RANDEF[1],2)
data$Vr_1 <- round(RANDEF[2],2)
data$B1_1 <- round(FIXEF["X1"],2)
data$se.B1_1 <- round(SE.FIXEF["X1"],2)
LMR <- lme4::lmer(Phenotype ~ 1 + X1 + X2 + (1|Individual), data = data$sampled_data)
FIXEF <- lme4::fixef(LMR)
SE.FIXEF <- arm::se.fixef(LMR)
RANDEF <- as.data.frame(lme4::VarCorr(LMR))$vcov
data$Vi_2 <- round(RANDEF[1],2)
data$Vr_2 <- round(RANDEF[2],2)
data$B1_2 <- round(FIXEF["X1"],2)
data$se.B1_2 <- round(SE.FIXEF["X1"],2)
data$B2_2 <- round(FIXEF["X2"],2)
data$se.B2_2 <- round(SE.FIXEF["X2"],2)
updateCheckboxInput(session, "isRunning", value = FALSE)
return(data)
})
}),
output$Mod5Step1_summary_table1 <- renderUI({
data <- Mod5Step1_output()
myTable <- data.frame(
"True" = c(paste0("$",EQ3$mean1,"$ = ",input$Mod5Step1_B[2]),
paste0("Individual variance ($V_",NOT$devI,"$) = ",input$Mod5Step1_Vi),
paste0("Measurement variance ($V_",NOT$mError,"$) = ",input$Mod5Step1_Ve)),
"Estimated" = c(paste0("$",NOT$mean,"'_1$ = ",ifelse(!is.null(data),paste(data$B1_1,"\U00b1", data$se.B1_1),"...")),
paste0("Individual variance ($V'_",NOT$devI,"$) = ", ifelse(!is.null(data),data$Vi_1,"...")),
paste0("Residual variance of sample ($V'_",NOT$residualUpper,"$) = ", ifelse(!is.null(data),data$Vr_1,"..."))))
getTable(myTable)
}),
output$Mod5Step1_summary_table2 <- renderUI({
data <- Mod5Step1_output()
myTable <- data.frame(
"True" = c(paste0("$",EQ3$mean1,"$ = ",input$Mod5Step1_B[2]),
paste0("$",EQ3$mean2,"$ = ",input$Mod5Step1_B[3]),
paste0("Individual variance ($V_",NOT$devI,"$) = ",input$Mod5Step1_Vi),
paste0("Measurement variance ($V_",NOT$mError,"$) = ",input$Mod5Step1_Ve)),
"Estimated" = c(paste0("$",NOT$mean,"'_1$ = ",ifelse(!is.null(data),paste(data$B1_2,"\U00b1", data$se.B1_2),"...")),
paste0("$",NOT$mean,"'_2$ = ",ifelse(!is.null(data),paste(data$B2_2,"\U00b1", data$se.B2_2),"...")),
paste0("Individual variance ($V'_",NOT$devI,"$) = ", ifelse(!is.null(data),data$Vi_2,"...")),
paste0("Residual variance of sample ($V'_",NOT$residualUpper,"$) = ", ifelse(!is.null(data),data$Vr_2,"..."))))
getTable(myTable)
}),
output$Mod5Step1_3D_1 <- renderPlotly({
data <- Mod5Step1_output()$sampled_data
isolate({
if (!is.null(data)) {
X_seq <- seq(from = min(data[ , c("X1", "X2")]), to = max(data[ , c("X1", "X2")]), length.out = 10)
predictors <- cbind("intecept" = 1, expand.grid("X1" = X_seq, "X2" = X_seq))
predictors$X1X2 <- predictors$X1 * predictors$X2
Phenotype_mean <- as.matrix(predictors) %*% as.vector(input$Mod5Step1_B)
Phenotype_mean <- t(matrix(Phenotype_mean, nrow = length(X_seq), ncol = length(X_seq)))
plotly::plot_ly(hoverinfo = "none") %>%
plotly::add_surface(x = X_seq, y = X_seq, z = Phenotype_mean, opacity = 0.7,
colorscale = list(c(0, 1), c("black", "black"))) %>%
plotly::add_markers(data = data, x = ~X1, y = ~X2, z = ~Phenotype, color = ~Individual, size=4) %>%
plotly::layout(showlegend = FALSE) %>%
plotly::hide_colorbar()
}else{defaultPlot()}
})
}),
output$Mod5Step1_3D_2 <- renderPlotly({
data <- Mod5Step1_output()$sampled_data
isolate({
if (!is.null(data)) {
X_seq <- seq(from = min(data[ , c("X1", "X2")]), to = max(data[ , c("X1", "X2")]), length.out = 10)
predictors <- cbind("intecept" = 1, expand.grid("X1" = X_seq, "X2" = X_seq))
predictors$X1X2 <- predictors$X1 * predictors$X2
Phenotype_mean <- as.matrix(predictors) %*% as.vector(input$Mod5Step1_B)
Phenotype_mean <- t(matrix(Phenotype_mean, nrow = length(X_seq), ncol = length(X_seq)))
All.I <- sort(unique(data$I))
I.min <- min(All.I)
I.max <- max(All.I)
I.med <- All.I[round(length(All.I) / 2)]
data <- as.data.table(data)
data <- data[I %in% c(I.min, I.med, I.max)]
plotly::plot_ly(hoverinfo = "none") %>%
plotly::add_surface(x = X_seq, y = X_seq, z = Phenotype_mean, opacity = 0.7,
colorscale = list(c(0, 1), c("black", "black"))) %>%
plotly::add_markers(data = data, x = ~X1, y = ~X2, z = ~Phenotype, color = ~Individual, size=4) %>%
plotly::layout(showlegend = FALSE) %>%
plotly::hide_colorbar()
}else{defaultPlot()}
})
})
)
|
foreccomb_res <- function(method,
modelnames,
fitted,
accuracy_insample,
input_data,
predict = NULL,
intercept = NULL,
weights = NULL,
pred = NULL,
accuracy_outsample = NULL,
trim_factor = NULL,
top_predictors = NULL,
ranking = NULL) {
if(is.ts(input_data$Actual_Train)) {
fitted <- transfer_ts_prop(input_data$Actual_Train, fitted)
}
result <- list(Method = method, Models = modelnames, Fitted = fitted, Accuracy_Train = accuracy_insample,
Input_Data = input_data)
if(is.null(rownames(result$Accuracy_Train))) {
rownames(result$Accuracy_Train) <- "Training Set"
}
if(!is.null(predict)) {
result <- append(result, list(Predict = predict))
}
if(!is.null(intercept)) {
result <- append(result, list(Intercept = intercept))
}
if(!is.null(weights)) {
result <- append(result, list(Weights = weights))
}
if(!is.null(pred)) {
if(is.ts(input_data$Actual_Test)) {
pred <- transfer_ts_prop(input_data$Actual_Test, pred)
}
result <- append(result, list(Forecasts_Test = pred))
}
if(!is.null(accuracy_outsample)) {
result <- append(result, list(Accuracy_Test = accuracy_outsample))
if(is.null(rownames(result$Accuracy_Test))) {
rownames(result$Accuracy_Test) <- "Test Set"
}
}
if(!is.null(trim_factor)) {
result <- append(result, list(Trim_Factor = trim_factor))
}
if(!is.null(top_predictors)) {
result <- append(result, list(Top_Predictors = top_predictors))
}
if(!is.null(ranking)) {
result <- append(result, list(Ranking = ranking))
}
result <- structure(result, class = c("foreccomb_res"))
return(result)
}
transfer_ts_prop <- function(ts, vec) {
vec <- stats::ts(vec)
attributes(vec)$tsp <- attributes(ts)$tsp
return(vec)
}
|
setMethodS3("fitCalMaTe", "matrix", function(dataT, references, flavor=c("v2", "v1"), ...) {
if (flavor == "v2") {
res <- fitCalMaTeV2(dataT, references=references, ...);
} else if (flavor == "v1") {
res <- fitCalMaTeV1(dataT, references=references, ...);
} else {
throw("Unknown algorithm flavor: ", flavor);
}
res;
}, protected=TRUE)
|
plotM <-
function (mat, mattext, col = c("grey", "red"), main, las = 1,
xlab = "To", ylab = "From", xnames, ynames, cex = min(1,
nrow(mat)/8), fig = 3, opacity_factor)
{
mat = as.matrix(mat)
if (missing(main)) {
main = ""
}
if (missing(mattext)) {
mattext = round(mat, fig)
}
if (missing(xnames)) {
xnames = dimnames(mat)[[2]]
}
if (missing(ynames)) {
ynames = dimnames(mat)[[1]]
}
nc = ncol(mat)
nr = nrow(mat)
posmat = mat
posmat[which(posmat <= 0)] = NA
negmat = mat
negmat[which(mat >= 0)] = NA
if (missing(opacity_factor)) {
opacity_factor = vector(length = 2)
if (prod(is.na(posmat)) == 0) {
opacity_factor[1] = max(posmat[which(posmat > 0)])/quantile(posmat[which(posmat >
0)], 0.75)[[1]]
}
if (prod(is.na(negmat)) == 0) {
opacity_factor[2] = max(abs(negmat)[which(negmat <
0)])/quantile(abs(negmat)[which(negmat < 0)],
0.75)[[1]]
}
}
specp = rev(1 - ((0:(nr * nc))/(nr * nc))^opacity_factor[1])
specn = rev(1 - ((0:(nr * nc))/(nr * nc))^opacity_factor[2])
if (prod(is.na(posmat)) == 0) {
image(t(apply(posmat, 2, rev)), col = rgb(t(col2rgb(col[1]))/255,
alpha = specp), main = main, axes = F, zlim = c(0,
max(mat)), xlab = xlab, ylab = ylab)
}
if (prod(is.na(negmat)) == 0) {
image(t(apply(abs(negmat), 2, rev)), col = rgb(t(col2rgb(col[2]))/255,
alpha = specn), main = main, axes = F, zlim = c(0,
abs(min(mat))), xlab = xlab, ylab = ylab, add = T)
}
axis(1, (0:(nc - 1))/(nc - 1), xnames, las = las)
axis(2, (0:(nr - 1))/(nr - 1), rev(ynames), las = las)
rvec = (0:(nr - 1)/(nr - 1))
cvec = (0:(nc - 1)/(nc - 1))
for (j in 1:nr) {
text(cvec, 1 - rvec[j], mattext[j, ], cex = cex)
}
}
|
df.inv <-
function (d, df, lambda = 1, iterations = 15)
{
df.diriv <- function(d, lambda) -sum(d*lambda /(1 + lambda*d)^2)
current.df <- sum(1/(1 + lambda*d))
if (abs((df - current.df)/df) < 1e-04 | iterations == 1)
return(list(lambda = lambda, df = current.df))
else {
lambda <- exp(logb(lambda) - (current.df - df)/df.diriv(d,
lambda))
Recall(d, df, lambda, iterations - 1)
}
}
|
get_statements <- function(ticker){
check_credentials()
rfinanceConnection$get_statements(ticker)
}
|
library("PAFit")
net <- generate_BB(N = 100, multiple_node = 20, m = 1)
plot(net)
plot(net, slice = 3)
u <- as.PAFit_net(coauthor.net, type = "undirected")
plot(u)
plot(u, slice = 10)
plot(net, plot = "PA")
plot(net, plot = "fit")
|
"diff_true"
|
`Qh.times.dhyper` <-
function(h,n1,n0,M,SM,T0,use.ranks=TRUE){
K<-n1+n0-M
k1p<-h
k0p<-K-h
m1p<-n1-k1p
m0p<-n0-k0p
if (m0p/n0 >= m1p/n1){
k0c<- 0
k1c<- k1p-floor(n1*k0p/n0)
}
else {
k0c<- k0p-floor(n0*k1p/n1)
k1c<- 0
}
Kstar<-k0c+k1c
m0c<- m0p
m1star<- m1p
hstar<- k1c
m1star<-n1-h
n1star<-m1star+hstar
n0star<-m0c+k0c
if (use.ranks) S0Kstar<- -(Kstar-1)/2
else S0Kstar<-0
VM<-(M-1)*var(SM)*var(c(rep(1,n1star),rep(0,n0star)))
Nstar<-M+Kstar
SbarNstar<- (Kstar*S0Kstar + sum(SM))/Nstar
VNstar<- (Nstar-1)*var(c(rep(S0Kstar,Kstar),SM))* (Nstar-n1star)*n1star/(Nstar*(Nstar-1))
Zstat<- (T0*sqrt(VNstar) - hstar*S0Kstar+n1star*SbarNstar - (n1star-hstar)*mean(SM) )/sqrt(VM)
Qhhat<- c(pnorm( Zstat ))
return(dhyper(h,K,M,n1)*Qhhat)
}
|
print.summary.spqtest <- function(x, digits = max(3L, getOption("digits") - 3L),
...) {
class(x) <- c("gt_tbl")
print(x)
invisible(x)
}
|
pathwayRMSE <- function(x,maxlen=2, ... ){
S <- x$exogenous[1]
M <- x$exogenous[2:length(x$exogenous)]
R <- x$endogenous
if(maxlen > length(M)){
stop("The value of maxlen must be less than the number of mechanism")
}
result <- data.frame()
relName <- x$bestModels[S,R]
theModel <- x$allModels[[S,R,relName]]
pred <- predict(theModel)
RMSE_dir <- round(sqrt(mean((pred-x$data[,x$endogenous])^2)),4)
result[1,1] <- 0
result[1,2] <- paste0(S,"-->",R)
result[1,3] <- RMSE_dir
colnames(result) <- c("length","path","RMSE")
for(k in 1:maxlen){
nM <- length(M)
Pr <- permutations(n=nM,r=k,v=M)
temp <- result
res <- data.frame()
for(j in 1:nrow(Pr)){
path <- paste0(S,"-->",paste(Pr[j,],collapse="-->"),"-->",R)
suppressWarnings(RMSE <- pathwayPredict(x, path)$RMSE)
res[j,1] <- k
res[j,2] <- paste0(S,"-->",paste(Pr[j,],collapse="-->"),"-->",R)
res[j,3] <- RMSE
colnames(res) <- c("length","path","RMSE")
}
result <- rbind(temp,res)
}
result_final <- result[which(!is.na(result$RMSE)),]
rownames(result_final) <- 1:nrow(result_final)
return(data.frame(result_final))
}
|
predict.rbart = function(
object,
x.test=object$x.train,
tc=1,
fmean=mean(object$y.train),
q.lower=0.025,
q.upper=0.975,...)
{
nd=object$ndpost
m=object$ntree
mh=object$ntreeh
xi=object$xicuts
p = ncol(object$x.train)
x = t(object$x.train)
xp = t(x.test)
if(is.null(object)) stop("No fitted model specified!\n")
res=.Call("cpsambrt_predict",
x,
xp,
m,
mh,
nd,
xi,
tc,
object,
PACKAGE="rbart"
)
res$mdraws=res$mdraws+fmean
res$mmean=apply(res$mdraws,2,mean)
res$smean=apply(res$sdraws,2,mean)
res$msd=apply(res$mdraws,2,sd)
res$ssd=apply(res$sdraws,2,sd)
res$m.5=apply(res$mdraws,2,quantile,0.5)
res$m.lower=apply(res$mdraws,2,quantile,q.lower)
res$m.upper=apply(res$mdraws,2,quantile,q.upper)
res$s.5=apply(res$sdraws,2,quantile,0.5)
res$s.lower=apply(res$sdraws,2,quantile,q.lower)
res$s.upper=apply(res$sdraws,2,quantile,q.upper)
res$q.lower=q.lower
res$q.upper=q.upper
return(res)
}
|
mod_ui_choose_custom_element_colors <- function(id, available) {
ns <- NS(id)
tagList(
fluidRow(
column(6,
shinyWidgets::pickerInput(ns("custom_elements"),
"Element(s):",
choices = available,
options = list(`actions-box` = TRUE, size = 6),
multiple = TRUE
), style='padding:0px;'
),
column(6,
colourpicker::colourInput(ns("custom_color"), "Color:"), style='padding-left:0px;'
)
)
)
}
mod_server_choose_custom_element_colors <- function(id)
{
moduleServer(id, function(input, output, session){
reactive({
el <- input$custom_elements
cols <- input$custom_color
final <- rep(cols, length(el))
names(final) <- el
final
})
}
)
}
|
test_that("checkPath: normPath consistency", {
cwd <- getwd()
tmpdir <- tempdir2("test_normPath")
tmpdir <- normalizePath(tmpdir, winslash = "/", mustWork = FALSE)
setwd(tmpdir)
on.exit({
setwd(cwd)
unlink(tmpdir, recursive = TRUE)
}, add = TRUE)
paths <- list("./aaa/zzz",
"./aaa/zzz/",
".//aaa//zzz",
".//aaa//zzz/",
".\\aaa\\zzz",
".\\aaa\\zzz\\",
paste0(tmpdir, "/aaa/zzz"),
paste0(tmpdir, "/aaa/zzz/"),
file.path(tmpdir, "aaa", "zzz"))
checked <- normPath(paths)
expect_equal(length(unique(checked)), 1)
expect_equal(normPath(), character())
expect_true(all(is.na(normPath(list(NA, NA_character_)))))
expect_equal(normPath(NULL), character())
})
test_that("checkPath: checkPath consistency", {
currdir <- getwd()
tmpdir <- tempdir2("test_checkPath")
on.exit({
setwd(currdir)
unlink(tmpdir, recursive = TRUE)
}, add = TRUE)
setwd(tmpdir)
dir.create("aaa/zzz", recursive = TRUE, showWarnings = FALSE)
paths <- list("./aaa/zzz",
"./aaa/zzz/",
".//aaa//zzz",
".//aaa//zzz/",
".\\aaa\\zzz",
".\\aaa\\zzz\\",
paste0(tmpdir, "/aaa/zzz"),
paste0(tmpdir, "/aaa/zzz/"),
file.path(tmpdir, "aaa", "zzz"))
checked <- lapply(paths, checkPath, create = FALSE)
expect_equal(length(unique(checked)), 1)
unlink(tmpdir, recursive = TRUE)
expect_error(checkPath(), "Invalid path: no path specified.")
expect_error(checkPath(NULL), "Invalid path: cannot be NULL.")
expect_error(checkPath(NA_character_), "Invalid path: cannot be NA.")
f1 <- tempfile()
expect_true(file.create(f1))
expect_true(file.exists(f1))
expect_message(a <- checkPath(f1), "is an existing file")
})
|
if(getRversion() >= "2.15.1") utils::globalVariables("myinfo")
getidxs <- function(m) {
splitIndices(m,myinfo$nwrkrs)[[myinfo$id]]
}
mgrinit <- function(cls) {
setmyinfo <- function(i,n) {
assign("myinfo",list(id = i,nwrkrs = n),pos=tmpenv)
}
ncls <- length(cls)
clusterEvalQ(cls,tmpenv <- new.env())
clusterApply(cls,1:ncls,setmyinfo,ncls)
clusterEvalQ(cls,myinfo <- get("myinfo",tmpenv))
clusterEvalQ(cls,gbl <- globalenv())
clusterExport(cls,"getidxs",envir=environment())
}
mgrmakevar <- function(cls,varname,nr,nc,vartype="double") {
tmp <- big.matrix(nrow=nr,ncol=nc,type=vartype)
assign(varname,tmp,pos=parent.frame())
clusterExport(cls,"varname",envir=environment())
desc <- describe(tmp)
clusterExport(cls,"desc",envir=environment())
clusterEvalQ(cls, tmp <- attach.big.matrix(desc))
clusterEvalQ(cls,assign(varname,tmp))
invisible(0)
}
|
install.conda = function (version = 3,
bitNo = "auto",
...)
{
bitNo <- as.character(bitNo)
if(bitNo == "auto"){
if(is.x64()){
bitNo <- "x86_64"
}else{
bitNo <- "x86"
}
}
if(bitNo == "64"){
bitNo <- "x86_64"
}
if(bitNo == "32"){
bitNo <- "x86"
}
URL <- paste0("https://repo.continuum.io/miniconda/Miniconda",version,"-latest-Windows-",bitNo,".exe")
install.URL(URL, ...)
}
|
if (!require(qkerntool)) {
data("iris")
testset <- sample(1:150,20)
train <- as.matrix(iris[-testset,-5])
test <- as.matrix(iris[testset,-5])
labeltrain<- as.integer(iris[-testset,5])
kpc1 <- qkpca(train, kernel = "rbfbase", qpar = list(sigma = 30,q=0.8),features = 2)
plot(rotated(kpc1),col=labeltrain,xlab="1st Principal Component",ylab="2nd Principal Component")
kpc1 <- qkpca(train, kernel = "nonlbase", qpar = list(alpha = 0.1,q=0.8),features = 2)
plot(rotated(kpc1),col=labeltrain,xlab="1st Principal Component",ylab="2nd Principal Component")
kpc1 <- qkpca(train, kernel = "laplbase", qpar = list(sigma = 0.4,q=0.8),features = 2)
plot(rotated(kpc1),col=labeltrain,xlab="1st Principal Component",ylab="2nd Principal Component")
kpc1 <- qkpca(train, kernel = "ratibase", qpar = list(c = 150,q=0.8),features = 2)
plot(rotated(kpc1),col=labeltrain,xlab="1st Principal Component",ylab="2nd Principal Component")
kpc1 <- qkpca(train, kernel = "multbase", qpar = list(c = 8,q=0.8),features = 2)
plot(rotated(kpc1),col=labeltrain,xlab="1st Principal Component",ylab="2nd Principal Component")
kpc1 <- qkpca(train, kernel = "invbase", qpar = list(c = 9,q=0.8),features = 2)
plot(rotated(kpc1),col=labeltrain,xlab="1st Principal Component",ylab="2nd Principal Component")
kpc1 <- qkpca(train, kernel = "wavbase", qpar = list(theta = 5,q=0.8),features = 2)
plot(rotated(kpc1),col=labeltrain,xlab="1st Principal Component",ylab="2nd Principal Component")
kpc1 <- qkpca(train, kernel = "powbase", qpar = list(d = 2,q=0.99),features = 2)
plot(rotated(kpc1),col=labeltrain,xlab="1st Principal Component",ylab="2nd Principal Component")
kpc1 <- qkpca(train, kernel = "logbase", qpar = list(d = 2,q=0.9999),features = 2)
plot(rotated(kpc1),col=labeltrain,xlab="1st Principal Component",ylab="2nd Principal Component")
kpc1 <- qkpca(train, kernel = "caubase", qpar = list(sigma = 120,q=0.8),features = 2)
plot(rotated(kpc1),col=labeltrain,xlab="1st Principal Component",ylab="2nd Principal Component")
kpc1 <- qkpca(train, kernel = "chibase", qpar = list(gamma = 0.1,q=0.8),features = 2)
plot(rotated(kpc1),col=labeltrain,xlab="1st Principal Component",ylab="2nd Principal Component")
kpc1 <- qkpca(train, kernel = "studbase", qpar = list(d = 1,q=0.2),features = 2)
plot(rotated(kpc1),col=labeltrain,xlab="1st Principal Component",ylab="2nd Principal Component")
kpc1 <- qkpca(train, kernel = "nonlcnd", qpar = list(alpha=0.02),features = 2)
plot(rotated(kpc1),col=labeltrain,xlab="1st Principal Component",ylab="2nd Principal Component")
kpc1 <- qkpca(train, kernel = "polycnd", qpar = list(d = 2, alpha = 0.5, c = 10),features = 2)
plot(rotated(kpc1),col=labeltrain,xlab="1st Principal Component",ylab="2nd Principal Component")
kpc1 <- qkpca(train, kernel = "rbfcnd", qpar = list(gamma = 1),features = 2)
plot(rotated(kpc1),col=labeltrain,xlab="1st Principal Component",ylab="2nd Principal Component")
kpc1 <- qkpca(train, kernel = "laplcnd", qpar = list(gamma = 1),features = 2)
plot(rotated(kpc1),col=labeltrain,xlab="1st Principal Component",ylab="2nd Principal Component")
kpc1 <- qkpca(train, kernel = "anocnd", qpar = list(d = 2, sigma = 0.02),features = 2)
plot(rotated(kpc1),col=labeltrain,xlab="1st Principal Component",ylab="2nd Principal Component")
kpc1 <- qkpca(train, kernel = "raticnd", qpar = list(c = 100),features = 2)
plot(rotated(kpc1),col=labeltrain,xlab="1st Principal Component",ylab="2nd Principal Component")
kpc1 <- qkpca(train, kernel = "multcnd", qpar = list(c = 6),features = 2)
plot(rotated(kpc1),col=labeltrain,xlab="1st Principal Component",ylab="2nd Principal Component")
kpc1 <- qkpca(train, kernel = "invcnd", qpar = list(c = 10),features = 2)
plot(rotated(kpc1),col=labeltrain,xlab="1st Principal Component",ylab="2nd Principal Component")
kpc1 <- qkpca(train, kernel = "wavcnd", qpar = list(theta = 5),features = 2)
plot(rotated(kpc1),col=labeltrain,xlab="1st Principal Component",ylab="2nd Principal Component")
kpc1 <- qkpca(train, kernel = "powcnd", qpar = list(d = 2),features = 2)
plot(rotated(kpc1),col=labeltrain,xlab="1st Principal Component",ylab="2nd Principal Component")
kpc1 <- qkpca(train, kernel = "logcnd", qpar = list(d = 1),features = 2)
plot(rotated(kpc1),col=labeltrain,xlab="1st Principal Component",ylab="2nd Principal Component")
kpc1 <- qkpca(train, kernel = "caucnd", qpar = list(gamma = 90),features = 2)
plot(rotated(kpc1),col=labeltrain,xlab="1st Principal Component",ylab="2nd Principal Component")
kpc1 <- qkpca(train, kernel = "chicnd", qpar = list(),features = 2)
plot(rotated(kpc1),col=labeltrain,xlab="1st Principal Component",ylab="2nd Principal Component")
kpc1 <- qkpca(train, kernel = "studcnd", qpar = list(d = 1.5),features = 2)
plot(rotated(kpc1),col=labeltrain,xlab="1st Principal Component",ylab="2nd Principal Component")
kpc1 <- qkpca(train, kernel = "norcnd", qpar = list(),features = 2)
plot(rotated(kpc1),col=labeltrain,xlab="1st Principal Component",ylab="2nd Principal Component")
predict(kpc1,test)
qkfunc <- rbfbase(sigma = 30,q = 0.8)
Ktrain2 <- qkernmatrix(qkfunc, train)
kpc3 <- qkpca(Ktrain2, features = 2)
plot(rotated(kpc3),col=labeltrain,xlab="1st Principal Component",ylab="2nd Principal Component")
qkfunc <- nonlbase(alpha = 0.1,q=0.8)
Ktrain2 <- qkernmatrix(qkfunc, train)
kpc3 <- qkpca(Ktrain2, features = 2)
plot(rotated(kpc3),col=labeltrain,xlab="1st Principal Component",ylab="2nd Principal Component")
qkfunc <- laplbase(sigma = 0.4,q=0.8)
Ktrain2 <- qkernmatrix(qkfunc, train)
kpc3 <- qkpca(Ktrain2, features = 2)
plot(rotated(kpc3),col=labeltrain,xlab="1st Principal Component",ylab="2nd Principal Component")
qkfunc <- ratibase(c = 150,q = 0.8)
Ktrain2 <- qkernmatrix(qkfunc, train)
kpc3 <- qkpca(Ktrain2, features = 2)
plot(rotated(kpc3),col=labeltrain,xlab="1st Principal Component",ylab="2nd Principal Component")
qkfunc <- multbase(c = 8,q=0.8)
Ktrain2 <- qkernmatrix(qkfunc, train)
kpc3 <- qkpca(Ktrain2, features = 2)
plot(rotated(kpc3),col=labeltrain,xlab="1st Principal Component",ylab="2nd Principal Component")
qkfunc <- invbase(c = 9,q=0.8)
Ktrain2 <- qkernmatrix(qkfunc, train)
kpc3 <- qkpca(Ktrain2, features = 2)
plot(rotated(kpc3),col=labeltrain,xlab="1st Principal Component",ylab="2nd Principal Component")
qkfunc <- wavbase(theta = 5,q=0.8)
Ktrain2 <- qkernmatrix(qkfunc, train)
kpc3 <- qkpca(Ktrain2, features = 2)
plot(rotated(kpc3),col=labeltrain,xlab="1st Principal Component",ylab="2nd Principal Component")
qkfunc <- powbase(d = 2,q=0.99)
Ktrain2 <- qkernmatrix(qkfunc, train)
kpc3 <- qkpca(Ktrain2, features = 2)
plot(rotated(kpc3),col=labeltrain,xlab="1st Principal Component",ylab="2nd Principal Component")
qkfunc <- logbase(d = 2,q=0.9999)
Ktrain2 <- qkernmatrix(qkfunc, train)
kpc3 <- qkpca(Ktrain2, features = 2)
plot(rotated(kpc3),col=labeltrain,xlab="1st Principal Component",ylab="2nd Principal Component")
qkfunc <- caubase(sigma = 120,q=0.8)
Ktrain2 <- qkernmatrix(qkfunc, train)
kpc3 <- qkpca(Ktrain2, features = 2)
plot(rotated(kpc3),col=labeltrain,xlab="1st Principal Component",ylab="2nd Principal Component")
qkfunc <- chibase(gamma = 0.1,q=0.8)
Ktrain2 <- qkernmatrix(qkfunc, train)
kpc3 <- qkpca(Ktrain2, features = 2)
plot(rotated(kpc3),col=labeltrain,xlab="1st Principal Component",ylab="2nd Principal Component")
qkfunc <- studbase(d = 1,q=0.2)
Ktrain2 <- qkernmatrix(qkfunc, train)
kpc3 <- qkpca(Ktrain2, features = 2)
plot(rotated(kpc3),col=labeltrain,xlab="1st Principal Component",ylab="2nd Principal Component")
predict(kpc3,qkernmatrix(qkfunc, test, train))
cndkfunc <- nonlcnd(alpha=0.02)
Ktrain2 <- cndkernmatrix(cndkfunc, train)
kpc3 <- qkpca(Ktrain2, features = 2)
plot(rotated(kpc3),col=labeltrain,xlab="1st Principal Component",ylab="2nd Principal Component")
cndkfunc <- polycnd(d = 2, alpha = 0.5, c = 10)
Ktrain2 <- cndkernmatrix(cndkfunc, train)
kpc3 <- qkpca(Ktrain2, features = 2)
plot(rotated(kpc3),col=labeltrain,xlab="1st Principal Component",ylab="2nd Principal Component")
cndkfunc <- rbfcnd(gamma = 1)
Ktrain2 <- cndkernmatrix(cndkfunc, train)
kpc3 <- qkpca(Ktrain2, features = 2)
plot(rotated(kpc3),col=labeltrain,xlab="1st Principal Component",ylab="2nd Principal Component")
cndkfunc <- logcnd(d = 2)
Ktrain2 <- cndkernmatrix(cndkfunc, train)
kpc3 <- qkpca(Ktrain2, features = 2)
plot(rotated(kpc3),col=labeltrain,xlab="1st Principal Component",ylab="2nd Principal Component")
cndkfunc <- anocnd(d = 2, sigma = 0.02)
Ktrain2 <- cndkernmatrix(cndkfunc, train)
kpc3 <- qkpca(Ktrain2, features = 2)
plot(rotated(kpc3),col=labeltrain,xlab="1st Principal Component",ylab="2nd Principal Component")
cndkfunc <- raticnd(c = 100)
Ktrain2 <- cndkernmatrix(cndkfunc, train)
kpc3 <- qkpca(Ktrain2, features = 2)
plot(rotated(kpc3),col=labeltrain,xlab="1st Principal Component",ylab="2nd Principal Component")
cndkfunc <- multcnd(c = 6)
Ktrain2 <- cndkernmatrix(cndkfunc, train)
kpc3 <- qkpca(Ktrain2, features = 2)
plot(rotated(kpc3),col=labeltrain,xlab="1st Principal Component",ylab="2nd Principal Component")
cndkfunc <- invcnd(c = 10)
Ktrain2 <- cndkernmatrix(cndkfunc, train)
kpc3 <- qkpca(Ktrain2, features = 2)
plot(rotated(kpc3),col=labeltrain,xlab="1st Principal Component",ylab="2nd Principal Component")
cndkfunc <- wavcnd(theta = 5)
Ktrain2 <- cndkernmatrix(cndkfunc, train)
kpc3 <- qkpca(Ktrain2, features = 2)
plot(rotated(kpc3),col=labeltrain,xlab="1st Principal Component",ylab="2nd Principal Component")
cndkfunc <- powcnd(d = 2)
Ktrain2 <- cndkernmatrix(cndkfunc, train)
kpc3 <- qkpca(Ktrain2, features = 2)
plot(rotated(kpc3),col=labeltrain,xlab="1st Principal Component",ylab="2nd Principal Component")
cndkfunc <- logcnd(d = 1)
Ktrain2 <- cndkernmatrix(cndkfunc, train)
kpc3 <- qkpca(Ktrain2, features = 2)
plot(rotated(kpc3),col=labeltrain,xlab="1st Principal Component",ylab="2nd Principal Component")
cndkfunc <- caucnd(gamma = 90)
Ktrain2 <- cndkernmatrix(cndkfunc, train)
kpc3 <- qkpca(Ktrain2, features = 2)
plot(rotated(kpc3),col=labeltrain,xlab="1st Principal Component",ylab="2nd Principal Component")
cndkfunc <- chicnd()
Ktrain2 <- cndkernmatrix(cndkfunc, train)
kpc3 <- qkpca(Ktrain2, features = 2)
plot(rotated(kpc3),col=labeltrain,xlab="1st Principal Component",ylab="2nd Principal Component")
cndkfunc <- studcnd(d = 1.5)
Ktrain2 <- cndkernmatrix(cndkfunc, train)
kpc3 <- qkpca(Ktrain2, features = 2)
plot(rotated(kpc3),col=labeltrain,xlab="1st Principal Component",ylab="2nd Principal Component")
cndkfunc <- studcnd()
Ktrain2 <- cndkernmatrix(cndkfunc, train)
kpc3 <- qkpca(Ktrain2, features = 2)
plot(rotated(kpc3),col=labeltrain,xlab="1st Principal Component",ylab="2nd Principal Component")
predict(kpc3,cndkernmatrix(cndkfunc, test, train))
}
|
imptree_control <- function(splitmetric, controlList = NULL, tbase = 1, gamma = 1,
depth = NULL, minbucket = 1L, ...) {
sm <- splitmetric
clist <- list(depth = depth, minbucket = minbucket,
tbase = tbase, gamma = gamma)
if(!is.null(controlList)) {
clist[names(controlList)[names(controlList) %in% names(clist)]] <-
controlList[names(controlList) %in% names(clist)]
}
if(any(nal <- is.na(clist))) {
stop(gettextf("no 'NA' permitted in %s",
paste(sQuote(names(clist)[nal]), collapse = ", "),
domain ="R-imptree"))
}
if(sm) {
tbase <- as.double(clist[["tbase"]])
if(tbase > 2 || tbase < -1) {
stop(gettextf("value of 'tbase' (%.3f) must be between [-1,2]",
tbase, domain ="R-imptree"))
}
gamma <- as.double(clist[["gamma"]])
if(gamma > 1 || gamma < 0) {
stop(gettextf("value of 'gamma' (%.3f) must be in [0,1]",
gamma, domain ="R-imptree"))
}
} else {
clist$gamma <- 1
clist$tbase <- 2
}
mydepth <- clist[["depth"]]
if(!is.null(mydepth) && mydepth < 0L) {
warning(gettextf("ignoring supplied 'depth'=%d and use default instead",
mydepth, domain ="R-imptree"))
mydepth <- NULL
}
if(is.null(mydepth)) {
mydepth <- as.integer(.Machine$integer.max)
}
clist$depth <- as.integer(mydepth)
if((minbucket <- clist[["minbucket"]]) < 1L) {
warning(gettextf("ignoring supplied 'minbucket'=%d and use default instead",
minbucket, domain ="R-imptree"))
minbucket <- 1L
}
clist$minbucket <- as.integer(minbucket)
clist
}
|
isNegativeIntegerOrNanOrInfVector <- function(argument, default = NULL, stopIfNot = FALSE, n = NA, message = NULL, argumentName = NULL) {
checkarg(argument, "N", default = default, stopIfNot = stopIfNot, nullAllowed = FALSE, n = NA, zeroAllowed = TRUE, negativeAllowed = TRUE, positiveAllowed = FALSE, nonIntegerAllowed = FALSE, naAllowed = FALSE, nanAllowed = TRUE, infAllowed = TRUE, message = message, argumentName = argumentName)
}
|
context("collect")
test_that("collect returns...", {
skip_on_cran()
dir <- tempdir()
res <- ft_get('10.1371/journal.pone.0086169', from='plos', cache=TRUE, backend="rcache", path=dir)
aa <- res %>% collect()
bb <- res %>% collect() %>% text()
expect_is(res, "ft_data")
expect_is(aa, "ft_data")
expect_is(bb, "list")
expect_is(aa$plos, "list")
expect_is(res$plos$data, "list")
expect_is(aa$plos$data, "list")
expect_null(res$plos$data$data)
expect_is(aa$plos$data$data, "plosft")
expect_is(bb$plos, "list")
expect_is(bb$plos[[1]], "character")
})
test_that("collect fails well", {
skip_on_cran()
res <- ft_get('10.1371/journal.pone.0086169', from='plos', cache=TRUE, backend="rds")
expect_error(collect())
expect_error(collect('adfafsdf'), "no applicable method")
expect_error(collect(5), "no applicable method")
})
|
CheckCounts <- function(counts){
if(class(counts)[1] == "TermDocumentMatrix"){ counts <- t(counts) }
if(is.null(dimnames(counts)[[1]])){ dimnames(counts)[[1]] <- paste("doc",1:nrow(counts)) }
if(is.null(dimnames(counts)[[2]])){ dimnames(counts)[[2]] <- paste("wrd",1:ncol(counts)) }
empty <- row_sums(counts) == 0
if(sum(empty) != 0){
counts <- counts[!empty,]
cat(paste("Removed", sum(empty), "blank documents.\n")) }
return(as.simple_triplet_matrix(counts))
}
tpxSelect <- function(X, K, bf, initheta, alpha, tol, kill, verb,
admix=TRUE, grp=NULL, tmax=10000,
wtol=10^{-4}, qn=100, nonzero=FALSE, dcut=-10){
if(!admix){
if(is.null(grp) || length(grp)!=nrow(X)){ grp <- rep(1,nrow(X)) }
else{ grp <- factor(grp) }
}
if(length(K)==1 && bf==FALSE){
if(verb){ cat(paste("Fitting the",K,"topic model.\n")) }
fit <- tpxfit(X=X, theta=initheta, alpha=alpha, tol=tol, verb=verb,
admix=admix, grp=grp, tmax=tmax, wtol=wtol, qn=qn)
fit$D <- tpxResids(X=X, theta=fit$theta, omega=fit$omega, grp=grp, nonzero=nonzero)$D
return(fit)
}
if(is.matrix(alpha)){ stop("Matrix alpha only works for fixed K") }
if(verb){ cat(paste("Fit and Bayes Factor Estimation for K =",K[1]))
if(length(K)>1){ cat(paste(" ...", max(K))) }
cat("\n") }
n <- nrow(X)
p <- ncol(X)
nK <- length(K)
BF <- D <- NULL
iter <- 0
sx <- sum(X)
qnull <- col_sums(X)/sx
null <- sum( X$v*log(qnull[X$j]) ) - 0.5*(n+p)*(log(sx) - log(2*pi))
best <- -Inf
bestfit <- NULL
for(i in 1:nK){
fit <- tpxfit(X=X, theta=initheta, alpha=alpha, tol=tol, verb=verb,
admix=admix, grp=grp, tmax=tmax, wtol=wtol, qn=qn)
BF <- c(BF, tpxML(X=X, theta=fit$theta, omega=fit$omega, alpha=fit$alpha, L=fit$L, dcut=dcut, admix=admix, grp=grp) - null)
R <- tpxResids(X=X, theta=fit$theta, omega=fit$omega, grp=grp, nonzero=nonzero)
D <- cbind(D, unlist(R$D))
if(verb>0) cat(paste("log BF(", K[i], ") =", round(BF[i],2)))
if(verb>1) cat(paste(" [ ", fit$iter,"steps, disp =",round(D[1,i],2)," ]\n")) else if(verb >0) cat("\n")
if(is.nan(BF[i])){
cat("NAN for Bayes factor.\n")
return(bestfit)
break}
if(BF[i] > best){
best <- BF[i]
bestfit <- fit
} else if(kill>0 && i>kill){
if(prod(BF[i-0:(kill-1)] < BF[i-1:kill])==1) break }
if(i<nK){
if(!admix){ initheta <- tpxinit(X,2,K[i+1], alpha, 0) }
else{ initheta <- tpxThetaStart(X, fit$theta, fit$omega, K[i+1]) }
}
}
names(BF) <- dimnames(D)[[2]] <- paste(K[1:length(BF)])
return(list(theta=bestfit$theta, omega=bestfit$omega, alpha=bestfit$alpha,
BF=BF, D=D, K=K[which.max(BF)])) }
tpxinit <- function(X, initheta, K1, alpha, verb){
if(is.matrix(initheta)){
if(ncol(initheta)!=K1){ stop("mis-match between initheta and K.") }
if(prod(initheta>0) != 1){ stop("use probs > 0 for initheta.") }
return(normalize(initheta, byrow=FALSE)) }
if(is.matrix(alpha)){
if(nrow(alpha)!=ncol(X) || ncol(alpha)!=K1){ stop("bad matrix alpha dimensions; check your K") }
return(normalize(alpha, byrow=FALSE)) }
if(is.null(initheta)){ ilength <- K1-1 }
else{ ilength <- initheta[1] }
if(ilength < 1){ ilength <- 1 }
if(length(initheta)>1){ tmax <- initheta[2] }
else{ tmax <- 3 }
if(length(initheta)>2){ tol <- initheta[3] }
else{ tol <- 0.5 }
if(length(initheta)>3){ verb <- initheta[4] }
else{ verb <- 0 }
if(verb){ cat("Building initial topics")
if(verb > 1){ cat(" for K = ") }
else{ cat("... ") } }
nK <- length( Kseq <- unique(ceiling(seq(2,K1,length=ilength))) )
initheta <- tpxThetaStart(X, matrix(col_sums(X)/sum(X), ncol=1), matrix(rep(1,nrow(X))), 2)
if(verb > 0)
{ cat("\n")
print(list(Kseq=Kseq, tmax=tmax, tol=tol)) }
for(i in 1:nK){
fit <- tpxfit(X=X, theta=initheta, alpha=alpha, tol=tol, verb=verb,
admix=TRUE, grp=NULL, tmax=tmax, wtol=-1, qn=-1)
if(verb>1){ cat(paste(Kseq[i],",", sep="")) }
if(i<nK){ initheta <- tpxThetaStart(X, fit$theta, fit$omega, Kseq[i+1]) }
else{ initheta <- fit$theta }
}
if(verb){ cat("done.\n") }
return(initheta)
}
tpxfit <- function(X, theta, alpha, tol, verb,
admix, grp, tmax, wtol, qn)
{
if(!inherits(X,"simple_triplet_matrix")){ stop("X needs to be a simple_triplet_matrix") }
K <- ncol(theta)
n <- nrow(X)
p <- ncol(X)
m <- row_sums(X)
if(is.null(alpha)){ alpha <- 1/(K*p) }
if(is.matrix(alpha)){ if(nrow(alpha)!=p || ncol(alpha)!=K){ stop("bad matrix alpha dimensions") }}
xvo <- X$v[order(X$i)]
wrd <- X$j[order(X$i)]-1
doc <- c(0,cumsum(as.double(table(factor(X$i, levels=c(1:nrow(X)))))))
omega <- tpxweights(n=n, p=p, xvo=xvo, wrd=wrd, doc=doc, start=tpxOmegaStart(X,theta), theta=theta)
if(!admix){ omega <- matrix(apply(omega,2, function(w) tapply(w,grp,mean)), ncol=K) }
iter <- 0
dif <- tol+1+qn
update <- TRUE
if(verb>0){
cat("log posterior increase: " )
digits <- max(1, -floor(log(tol, base=10))) }
Y <- NULL
Q0 <- col_sums(X)/sum(X)
L <- tpxlpost(X=X, theta=theta, omega=omega, alpha=alpha, admix=admix, grp=grp)
while( update && iter < tmax ){
if(admix && wtol > 0){ Wfit <- tpxweights(n=nrow(X), p=ncol(X), xvo=xvo, wrd=wrd, doc=doc,
start=omega, theta=theta, verb=0, nef=TRUE, wtol=wtol, tmax=20) }
else{ Wfit <- omega }
move <- tpxEM(X=X, m=m, theta=theta, omega=Wfit, alpha=alpha, admix=admix, grp=grp)
QNup <- tpxQN(move=move, Y=Y, X=X, alpha=alpha, verb=verb, admix=admix, grp=grp, doqn=qn-dif)
move <- QNup$move
Y <- QNup$Y
if(QNup$L < L){
if(verb > 10){ cat("_reversing a step_") }
move <- tpxEM(X=X, m=m, theta=theta, omega=omega, alpha=alpha, admix=admix, grp=grp)
QNup$L <- tpxlpost(X=X, theta=move$theta, omega=move$omega, alpha=alpha, admix=admix, grp=grp) }
dif <- (QNup$L-L)
L <- QNup$L
if(abs(dif) < tol){
if(sum(abs(theta-move$theta)) < tol){ update = FALSE } }
if(verb>0 && (iter-1)%%ceiling(10/verb)==0 && iter>0){
cat( paste( round(dif,digits),
", ", sep="") ) }
if(((iter+1)%%1000)==0){
cat(sprintf("p %d iter %d diff %g\n",
nrow(theta), iter+1,round(dif))) }
iter <- iter+1
theta <- move$theta
omega <- move$omega
}
L <- tpxlpost(X=X, theta=theta, omega=omega, alpha=alpha, admix=admix, grp=grp)
if(verb>0){
cat("done.")
if(verb>1) { cat(paste(" (L = ", round(L,digits), ")", sep="")) }
cat("\n")
}
out <- list(theta=theta, omega=omega, K=K, alpha=alpha, L=L, iter=iter)
invisible(out) }
tpxweights <- function(n, p, xvo, wrd, doc, start, theta, verb=FALSE, nef=TRUE, wtol=10^{-5}, tmax=1000)
{
K <- ncol(theta)
start[start == 0] <- 0.1/K
start <- start/rowSums(start)
omega <- .C("Romega",
n = as.integer(n),
p = as.integer(p),
K = as.integer(K),
doc = as.integer(doc),
wrd = as.integer(wrd),
X = as.double(xvo),
theta = as.double(theta),
W = as.double(t(start)),
nef = as.integer(nef),
tol = as.double(wtol),
tmax = as.integer(tmax),
verb = as.integer(verb),
PACKAGE="maptpx")
return(t(matrix(omega$W, nrow=ncol(theta), ncol=n))) }
tpxEM <- function(X, m, theta, omega, alpha, admix, grp)
{
n <- nrow(X)
p <- ncol(X)
K <- ncol(theta)
if(admix){ Xhat <- (X$v/tpxQ(theta=theta, omega=omega, doc=X$i, wrd=X$j))*(omega[X$i,]*theta[X$j,])
Zhat <- .C("Rzhat", n=as.integer(n), p=as.integer(p), K=as.integer(K), N=as.integer(nrow(Xhat)),
Xhat=as.double(Xhat), doc=as.integer(X$i-1), wrd=as.integer(X$j-1),
zj = as.double(rep(0,K*p)), zi = as.double(rep(0,K*n)), PACKAGE="maptpx")
theta <- normalize(matrix(Zhat$zj+alpha, ncol=K), byrow=FALSE)
omega <- normalize(matrix(Zhat$zi+1/K, ncol=K)) }
else{
qhat <- tpxMixQ(X, omega, theta, grp, qhat=TRUE)$qhat
theta <- normalize(tcrossprod_simple_triplet_matrix( t(X), t(qhat) ) + alpha, byrow=FALSE)
omega <- normalize(matrix(apply(qhat*m,2, function(x) tapply(x,grp,sum)), ncol=K)+1/K ) }
return(list(theta=theta, omega=omega)) }
tpxQN <- function(move, Y, X, alpha, verb, admix, grp, doqn)
{
L <- tpxlpost(X=X, theta=move$theta, omega=move$omega,
alpha=alpha, admix=admix, grp=grp)
if(doqn < 0){ return(list(move=move, L=L, Y=Y)) }
Y <- cbind(Y, tpxToNEF(theta=move$theta, omega=move$omega))
if(ncol(Y) < 3){ return(list(Y=Y, move=move, L=L)) }
if(ncol(Y) > 3){ warning("mis-specification in quasi-newton update; please report this bug.") }
U <- as.matrix(Y[,2]-Y[,1])
V <- as.matrix(Y[,3]-Y[,2])
sUU <- sum(U^2)
sVU <- sum(V*U)
Ynew <- Y[,3] + V*(sVU/(sUU-sVU))
qnup <- tpxFromNEF(Ynew, n=nrow(move$omega),
p=nrow(move$theta), K=ncol(move$theta))
Lqnup <- try(tpxlpost(X=X, theta=qnup$theta, omega=qnup$omega,
alpha=alpha, admix=admix, grp=grp), silent=TRUE)
if(inherits(Lqnup, "try-error")){
if(verb>10){ cat("(QN: try error) ") }
return(list(Y=Y[,-1], move=move, L=L)) }
if(verb>10){ cat(paste("(QN diff ", round(Lqnup-L,3), ")\n", sep="")) }
if(Lqnup < L){
return(list(Y=Y[,-1], move=move, L=L)) }
else{
L <- Lqnup
Y <- cbind(Y[,2],Ynew)
return( list(Y=Y, move=qnup, L=L) )
}
}
tpxlpost <- function(X, theta, omega, alpha, admix=TRUE, grp=NULL)
{
if(!inherits(X,"simple_triplet_matrix")){ stop("X needs to be a simple_triplet_matrix.") }
K <- ncol(theta)
if(admix){ L <- sum( X$v*log(tpxQ(theta=theta, omega=omega, doc=X$i, wrd=X$j)) ) }
else{ L <- sum(tpxMixQ(X, omega, theta, grp)$lqlhd) }
if(is.null(nrow(alpha))){ if(alpha != 0){ L <- L + sum(alpha*log(theta)) } }
L <- L + sum(log(omega))/K
return(L) }
tpxML <- function(X, theta, omega, alpha, L, dcut, admix=TRUE, grp=NULL){
K <- ncol(theta)
p <- nrow(theta)
n <- nrow(omega)
if(!admix){
qhat <- tpxMixQ(X, omega, theta, grp, qhat=TRUE)$qhat
ML <- sum(X$v*log(row_sums(qhat[X$i,]*theta[X$j,])))
return( ML - 0.5*( K*p + (K-1)*n )*log(sum(X)) ) }
ML <- L + lfactorial(K)
q <- tpxQ(theta=theta, omega=omega, doc=X$i, wrd=X$j)
D <- tpxHnegDet(X=X, q=q, theta=theta, omega=omega, alpha=alpha)
D[D < dcut] <- dcut
ML <- ML - 0.5*sum( D )
ML <- ML + (K*p + sum(omega>0.01))*log(2*pi)/2
if(is.null(nrow(alpha))){
ML <- ML + K*( lgamma(p*(alpha+1)) - p*lgamma(alpha+1) ) }
else{ ML <- ML + sum(lgamma(col_sums(alpha+1)) - col_sums(lgamma(alpha+1))) }
ML <- ML + sum(D[-(1:p)]>dcut)*( lfactorial(K) - K*lgamma( 1+1/K ) )
return(ML) }
tpxResids <- function(X, theta, omega, grp=NULL, nonzero=TRUE)
{
if(!inherits(X,"simple_triplet_matrix")){ stop("X needs to be a simple_triplet_matrix.") }
m <- row_sums(X)
K <- ncol(theta)
n <- nrow(X)
phat <- sum(col_sums(X)>0)
d <- n*(K-1) + K*( phat-1 )
if(nrow(omega) == nrow(X)){
qhat <- tpxQ(theta=theta, omega=omega, doc=X$i, wrd=X$j)
xhat <- qhat*m[X$i]
} else{
q <- tpxMixQ(X=X, omega=omega, theta=theta, grp=grp, qhat=TRUE)$qhat
qhat <- row_sums(q[X$i,]*theta[X$j,])
xhat <- qhat*m[X$i] }
if(nonzero || nrow(omega) < nrow(X)){
e <- X$v^2 - 2*(X$v*xhat - xhat^2)
s <- qhat*m[X$i]*(1-qhat)^{1-m[X$i]}
r <- sqrt(e/s)
df <- length(r)*(1-d/(n*phat))
R <- sum(r^2)
}
else{
e <- (X$v^2 - 2*X$v*m[X$i]*qhat)
s <- m[X$i]*qhat*(1-qhat)
fulltable <- .C("RcalcTau",
n = as.integer(nrow(omega)),
p = as.integer(nrow(theta)),
K = as.integer(ncol(theta)),
m = as.double(m),
omega = as.double(omega),
theta = as.double(theta),
tau = double(1), size=double(1),
PACKAGE="maptpx" )
tau <- fulltable$tau
R <- sum(e/s) + tau
df <- fulltable$size - phat - d
r <- suppressWarnings(sqrt(e/s + tau))
r[is.nan(r)] <- 0
}
sig2 <- R/df
rho <- suppressWarnings(pchisq(R, df=df, lower.tail=FALSE))
D <- list(dispersion=sig2, pvalue=rho, df=df)
return( list(s=s, e=e, r=r, D=D) ) }
tpxThetaStart <- function(X, theta, omega, K)
{
R <- tpxResids(X, theta=theta, omega=omega, nonzero=TRUE)
X$v <- R$e*(R$r>3) + 1/ncol(X)
Kpast <- ncol(theta)
Kdiff <- K-Kpast
if(Kpast != ncol(omega) || Kpast >= K){ stop("bad K in tpxThetaStart") }
initheta <- normalize(Kpast*theta+rowMeans(theta), byrow=FALSE)
n <- nrow(X)
ki <- matrix(1:(n-n%%Kdiff), ncol=Kdiff)
for(i in 1:Kdiff){ initheta <- cbind(initheta, (col_sums(X[ki[,i],])+1/ncol(X))/(sum(X[ki[,i],])+1)) }
return( initheta )
}
tpxOmegaStart <- function(X, theta)
{
if(!inherits(X,"simple_triplet_matrix")){ stop("X needs to be a simple_triplet_matrix.") }
omega <- try(tcrossprod_simple_triplet_matrix(X, solve(t(theta)%*%theta)%*%t(theta)), silent=TRUE )
if(inherits(omega,"try-error")){ return( matrix( 1/ncol(theta), nrow=nrow(X), ncol=ncol(theta) ) ) }
omega[omega <= 0] <- .5
return( normalize(omega, byrow=TRUE) )
}
tpxQ <- function(theta, omega, doc, wrd){
if(length(wrd)!=length(doc)){stop("index mis-match in tpxQ") }
if(ncol(omega)!=ncol(theta)){stop("theta/omega mis-match in tpxQ") }
out <- .C("RcalcQ",
n = as.integer(nrow(omega)),
p = as.integer(nrow(theta)),
K = as.integer(ncol(theta)),
doc = as.integer(doc-1),
wrd = as.integer(wrd-1),
N = as.integer(length(wrd)),
omega = as.double(omega),
theta = as.double(theta),
q = double(length(wrd)),
PACKAGE="maptpx" )
return( out$q ) }
tpxMixQ <- function(X, omega, theta, grp=NULL, qhat=FALSE){
if(is.null(grp)){ grp <- rep(1, nrow(X)) }
K <- ncol(omega)
n <- nrow(X)
mixhat <- .C("RmixQ",
n = as.integer(nrow(X)),
p = as.integer(ncol(X)),
K = as.integer(K),
N = as.integer(length(X$v)),
B = as.integer(nrow(omega)),
cnt = as.double(X$v),
doc = as.integer(X$i-1),
wrd = as.integer(X$j-1),
grp = as.integer(as.numeric(grp)-1),
omega = as.double(omega),
theta = as.double(theta),
Q = double(K*n),
PACKAGE="maptpx")
lQ <- matrix(mixhat$Q, ncol=K)
lqlhd <- log(row_sums(exp(lQ)))
lqlhd[is.infinite(lqlhd)] <- -600
if(qhat){
qhat <- exp(lQ-lqlhd)
infq <- row_sums(qhat) < .999
if(sum(infq)>0){
qhat[infq,] <- 0
qhat[n*(apply(matrix(lQ[infq,],ncol=K),1,which.max)-1) + (1:n)[infq]] <- 1 }
}
return(list(lQ=lQ, lqlhd=lqlhd, qhat=qhat)) }
tpxHnegDet <- function(X, q, theta, omega, alpha){
K <- ncol(theta)
n <- nrow(omega)
Xq <- X
Xq$v <- Xq$v/q^2
HT <- tcrossprod_simple_triplet_matrix(t(Xq), apply(omega, 1, function(v) v%o%v ) )
HT[,K*(0:(K-1))+1:K] <- HT[,K*(0:(K-1))+1:K] + alpha/theta^2
DT <- apply(HT, 1, tpxlogdet)
HW <- matrix(.C("RnegHW",
n = as.integer(nrow(omega)),
p = as.integer(nrow(theta)),
K = as.integer(K-1),
omeg = as.double(omega[,-1]),
thet = as.double(theta[,-1]),
doc = as.integer(X$i-1),
wrd = as.integer(X$j-1),
cnt = as.double(X$v),
q = as.double(q),
N = as.integer(length(q)),
H = double(n*(K-1)^2),
PACKAGE="maptpx")$H,
nrow=(K-1)^2, ncol=n)
DW <- apply(HW, 2, tpxlogdet)
return( c(DT,DW) ) }
tpxToNEF <- function(theta, omega){
n <- nrow(omega)
p <- nrow(theta)
K <- ncol(omega)
return(.C("RtoNEF",
n=as.integer(n), p=as.integer(p), K=as.integer(K),
Y=double((p-1)*K + n*(K-1)),
theta=as.double(theta), tomega=as.double(t(omega)),
PACKAGE="maptpx")$Y)
}
tpxFromNEF <- function(Y, n, p, K){
bck <- .C("RfromNEF",
n=as.integer(n), p=as.integer(p), K=as.integer(K),
Y=as.double(Y), theta=double(K*p), tomega=double(K*n),
PACKAGE="maptpx")
return(list(omega=t( matrix(bck$tomega, nrow=K) ), theta=matrix(bck$theta, ncol=K)))
}
tpxlogdet <- function(v){
v <- matrix(v, ncol=sqrt(length(v)))
if( sum(zeros <- colSums(v)==0)!=0 ){
cat("warning: boundary values in laplace approx\n")
v <- v[-zeros,-zeros] }
return(determinant(v, logarithm=TRUE)$modulus) }
|
rlkjcorr <- function (n, K, eta = 1) {
stopifnot(is.numeric(K), K >= 2, K == as.integer(K))
stopifnot(eta > 0)
f <- function() {
alpha <- eta + (K - 2)/2
r12 <- 2 * rbeta(1, alpha, alpha) - 1
R <- matrix(0, K, K)
R[1,1] <- 1
R[1,2] <- r12
R[2,2] <- sqrt(1 - r12^2)
if(K > 2) for (m in 2:(K - 1)) {
alpha <- alpha - 0.5
y <- rbeta(1, m / 2, alpha)
z <- rnorm(m, 0, 1)
z <- z / sqrt(crossprod(z)[1])
R[1:m,m+1] <- sqrt(y) * z
R[m+1,m+1] <- sqrt(1 - y)
}
return(crossprod(R))
}
R <- replicate( n , f() )
if ( dim(R)[3]==1 ) {
R <- R[,,1]
} else {
R <- aperm(R,c(3,1,2))
}
return(R)
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.