code
stringlengths 1
13.8M
|
---|
obtainSmoothTrend <- function(object,
grid = NULL,
newdata = NULL,
deriv = 0,
includeIntercept = FALSE) {
if (!inherits(object, "LMMsolve")) {
stop("object should be an object of class LMMsolve.\n")
}
if (is.null(object$splRes)) {
stop("The model was fitted without a spline component.\n")
}
if (is.null(grid) && is.null(newdata)) {
stop("Specify either grid or newdata.\n")
}
splRes <- object$splRes
x <- splRes$x
knots <- splRes$knots
scaleX <- splRes$scaleX
pord <- splRes$pord
splDim <- length(x)
if (splDim == 1 && (!is.numeric(deriv) || length(deriv) > 1 || deriv < 0 ||
deriv != round(deriv))) {
stop("deriv should be an integer greater than or equal to zero.\n")
}
if (splDim > 1 && deriv != 0) {
deriv <- 0
warning("deriv is ignored for ", splDim, "-dimensional splines.\n",
call. = FALSE)
}
if (!is.null(newdata)) {
if (!inherits(newdata, "data.frame")) {
stop("newdata should be a data.frame.\n")
}
missX <- names(x)[!sapply(X = names(x), FUN = function(name) {
hasName(x = newdata, name = name)
})]
if (length(missX) > 0) {
stop("The following smoothing variables are not in newdata:\n",
paste0(missX, collapse = ", "), "\n")
}
xGrid <- lapply(X = seq_along(x), FUN = function(i) {
newdata[[names(x)[i]]]
})
Bx <- mapply(FUN = Bsplines, knots, xGrid, deriv)
BxTot <- Reduce(RowKronecker, Bx)
} else {
if (!is.numeric(grid) || length(grid) != splDim) {
stop("grid should be a numeric vector with length equal to the dimension ",
"of the fitted spline: ", splDim,".\n")
}
xGrid <- lapply(X = seq_along(x), FUN = function(i) {
seq(min(x[[i]]), max(x[[i]]), length = grid[i])
})
Bx <- mapply(FUN = Bsplines, knots, xGrid, deriv)
BxTot <- Reduce(`%x%`, Bx)
}
X <- mapply(FUN = function(x, y) {
constructX(B = x, x = y, scaleX = scaleX, pord = pord)
}, Bx, xGrid, SIMPLIFY = FALSE)
if (!is.null(newdata)) {
XTot <- Reduce(RowKronecker, X)
} else {
XTot <- Reduce(`%x%`, X)
}
XTot <- removeIntercept(XTot)
if (includeIntercept) {
mu <- coef(object)$'(Intercept)'
} else {
mu <- 0
}
if (is.null(XTot)) {
bc <- 0
} else {
bc <- as.vector(XTot %*% coef(object)$splF)
}
sc <- as.vector(BxTot %*% coef(object)$splR)
fit <- mu + bc + sc
if (!is.null(newdata)) {
outDat <- newdata
outDat[["ypred"]] <- fit
} else {
outDat <- data.frame(expand.grid(rev(xGrid)), ypred = fit)
colnames(outDat)[-ncol(outDat)] <- rev(names(x))
outDat <- outDat[c(names(x), "ypred")]
}
return(outDat)
} |
iphub_api_key <- function(force = FALSE) {
env <- Sys.getenv('IPHUB_API_KEY')
if (!identical(env, "") && !force) return(env)
if (!interactive()) {
stop("Please set env var IPHUB_API_KEY to your IPHub key",
call. = FALSE)
}
message("Couldn't find env var IPHUB_API_KEY See ?iphub_api_key for more details.")
message("Please enter your API key:")
pat <- readline(": ")
if (identical(pat, "")) {
stop("IPHub key entry failed", call. = FALSE)
}
message("Updating IPHUB_API_KEY env var")
Sys.setenv(IPHUB_API_KEY = pat)
pat
} |
Sapply <- function (X, FUN, ..., simplify = TRUE, USE.NAMES = TRUE){
FUN <- match.fun(FUN)
if(length(dim(X))){
d.ans <- dim(X)
dn.ans <- if(length(dimnames(X))) dimnames(X) else list(NULL)
} else {
d.ans <- length(X)
dn.ans <- if(USE.NAMES) list(names(X)) else list(NULL)
}
if (!is.vector(X) || is.object(X))
X <- as.list(X)
answer <- lapply(X,FUN,...)
if (USE.NAMES && is.character(X) && length(d.ans) == 1 && is.null(names(answer)))
dn.ans <- X
if(simplify){
dd.ans <- NULL
ddn.ans <- list(NULL)
DIMS <- lapply(answer,dim)
ulDIMS <- unique(unlist(lapply(DIMS,length)))
if(length(ulDIMS)==1 && ulDIMS > 0){
DIMS <- array(unlist(DIMS),dim=c(ulDIMS,length(X)))
common.dims <- rep(NA,ulDIMS)
for(i in seq(nrow(DIMS))){
uDIMS.i <- unique(DIMS[i,])
if(length(uDIMS.i) == 1){
common.dims[i] <- uDIMS.i
}
}
if(!any(is.na(common.dims))){
dd.ans <- common.dims
ddn.ans <- dimnames(answer[[1]])
}
}
else {
LEN <- unique(unlist(lapply(answer,length)))
if(length(LEN)==1 && LEN > 1){
dd.ans <- LEN
ddn.ans <- list(names(answer[[1]]))
}
}
if(!is.null(dd.ans)){
if(is.null(ddn.ans)) ddn.ans <- rep(list(NULL),length(dd.ans))
return(array(unlist(answer,recursive=FALSE),dim=c(dd.ans,d.ans),dimnames=c(ddn.ans,dn.ans)))
}
else
return(array(unlist(answer,recursive=FALSE),dim=c(d.ans),dimnames=c(dn.ans)))
}
return(array(answer,dim=d.ans,dimnames=dn.ans))
}
Lapply <- function(X, FUN, ...)
Sapply(X, FUN, ..., simplify = FALSE, USE.NAMES = FALSE) |
CIetterson <- function(s, s.lwr, s.upr, f, f.lwr, f.upr, J, s.time.variance="carcass age",
f.time.variance="number of searches", nsim=1000, ci=0.95){
if(s.time.variance!="date") stopifnot(s.time.variance=="carcass age"&f.time.variance=="number of searches")
if(s.time.variance=="date") stopifnot(f.time.variance=="date")
s.a <- shapeparameter(s, s.lwr, s.upr)$a
s.b <- shapeparameter(s, s.lwr, s.upr)$b
f.a <- shapeparameter(f, f.lwr, f.upr)$a
f.b <- shapeparameter(f, f.lwr, f.upr)$b
n <- length(f)
N <- length(s)
p <- numeric(nsim)
for(r in 1:nsim) {
sr <- rbeta(N, s.a, s.b)
fr <- rbeta(n, f.a, f.b)
if(N==1&n==1) p[r] <- ettersonEq14(s=sr, f=fr, J=J)
if(N>1|n>1){
if(s.time.variance!="date") p[r] <- ettersonEq14v2(s=sr, f=fr, J=J)
if(s.time.variance=="date") p[r] <- ettersonEq14v1(s=sr, f=fr, J=J)
}
}
estp <- list(p.lower= quantile(p, prob=(1-ci)/2), p.upper=quantile(p, prob=1-(1-ci)/2))
return(estp)
} |
context("Resampling functions")
test_that(
"Univariate resampling works",
{
from <- seq(2, 20, 2)
to <- seq(2, 20)
values <- from ^ 2
true_values <- to ^ 2
resample_values <- resample(values, from, to)
expect_equal(true_values, resample_values)
}
)
test_that(
"Multivariate resampling works",
{
m <- matrix(1:20, ncol = 2)
true_m <- cbind(seq(1, 10, 0.5), seq(11, 20, 0.5))
resample_m <- resample(m, 1:10, seq(1, 10, 0.5))
expect_equal(true_m, resample_m)
}
)
data(testspec)
test_that(
"Spectra down-sampling works",
{
spec <- spectra(testspec_ACRU[, 1:5], 400:2500)
new_wl <- seq(400, 1300, 10)
true_spec <- spec[[new_wl, ]]
resample_spec <- resample(spec, new_wl, method = "linear")
expect_equal(true_spec, resample_spec)
}
)
test_that(
"Spectra up-sampling works",
{
true_spec <- spectra(testspec_ACRU[, 1:5], 400:2500)
new_wl <- seq(400, 2500, 10)
lowres_spec <- true_spec[[new_wl, ]]
resample_spec <- resample(lowres_spec, 400:2500)
expect_equal(true_spec, resample_spec, tolerance = 0.005)
}
)
if (!requireNamespace("PEcAn.logger")) {
test_that(
"Resampling outside range throws warning",
{
from <- seq(2, 20, 2)
to <- seq(1, 30)
values <- from ^ 2
expect_warning(resample(values, from, to), "Resampled values .* unreliable")
}
)
} |
setGeneric("clearNamedRegion",
function(object, name) standardGeneric("clearNamedRegion"))
setMethod("clearNamedRegion",
signature(object = "workbook", name = "character"),
function(object, name) {
xlcCall(object, "clearNamedRegion", name)
invisible()
}
) |
hmflatloglin=function(niter,y,X,scale)
{
p=dim(X)[2]
mod=summary(glm(y~-1+X,family=poisson()))
beta=matrix(0,niter,p)
beta[1,]=as.vector(mod$coeff[,1])
Sigma2=as.matrix(mod$cov.unscaled)
for (i in 2:niter)
{
tildebeta=mvrnorm(1,beta[i-1,],scale*Sigma2)
llr=loglinll(tildebeta,y,X)-loglinll(beta[i-1,],y,X)
if (runif(1)<=exp(llr)) beta[i,]=tildebeta else beta[i,]=beta[i-1,]
}
beta
} |
boolSkip=F
test_that("Check 45.1 - isWeaklySuperadditiveGame with 3 players, return TRUE" ,{
if(boolSkip){
skip("Test was skipped")
}
v=c(1:7)
result=isWeaklySuperadditiveGame(v)
expect_equal(result, TRUE)
})
test_that("Check 45.2 - isWeaklySuperadditiveGame with 4 players, return TRUE" ,{
if(boolSkip){
skip("Test was skipped")
}
v=c(1:15)
result=isWeaklySuperadditiveGame(v)
expect_equal(result, TRUE)
})
test_that("Check 45.3 - isWeaklySuperadditiveGame with 3 players, return FALSE" ,{
if(boolSkip){
skip("Test was skipped")
}
v=c(1:5,7,7)
result=isWeaklySuperadditiveGame(v)
expect_equal(result, FALSE)
}) |
init_site_models <- function(
site_models,
ids,
distr_id = 0,
param_id = 0
) {
testit::assert(beautier::are_site_models(site_models))
testit::assert(length(site_models) == length(ids))
for (i in seq_along(site_models)) {
site_model <- site_models[[i]]
testit::assert(beautier::is_site_model(site_model))
if (beautier::is_gtr_site_model(site_model)) {
site_model <- beautier::init_gtr_site_model(
site_model,
distr_id = distr_id,
param_id = param_id
)
} else if (beautier::is_hky_site_model(site_model)) {
site_model <- beautier::init_hky_site_model(
site_model,
distr_id = distr_id,
param_id = param_id
)
} else if (beautier::is_jc69_site_model(site_model)) {
site_model <- beautier::init_jc69_site_model(
site_model,
distr_id = distr_id,
param_id = param_id
)
} else {
testit::assert(beautier::is_tn93_site_model(site_model))
site_model <- beautier::init_tn93_site_model(
site_model,
distr_id = distr_id,
param_id = param_id
)
}
distr_id <- distr_id + beautier::get_site_model_n_distrs(site_model)
param_id <- param_id + beautier::get_site_model_n_params(site_model)
if (beautier::is_one_na(site_model$id)) site_model$id <- ids[i]
testit::assert(beautier::is_init_site_model(site_model))
site_models[[i]] <- site_model
}
site_models
}
init_gtr_site_model <- function(
gtr_site_model,
distr_id = 0,
param_id = 0
) {
testit::assert(beautier::is_gtr_site_model(gtr_site_model))
if (
!beautier::is_one_na(
gtr_site_model$gamma_site_model$gamma_shape_prior_distr
)
) {
if (
!beautier::is_init_distr(
gtr_site_model$gamma_site_model$gamma_shape_prior_distr
)
) {
gtr_site_model$gamma_site_model$gamma_shape_prior_distr <-
beautier::init_distr(
gtr_site_model$gamma_site_model$gamma_shape_prior_distr,
distr_id = distr_id,
param_id = param_id
)
distr_id <- distr_id + 1
param_id <- param_id + beautier::get_distr_n_params(
gtr_site_model$gamma_site_model$gamma_shape_prior_distr
)
}
}
if (!beautier::is_init_distr(gtr_site_model$rate_ac_prior_distr)) {
gtr_site_model$rate_ac_prior_distr <- beautier::init_distr(
gtr_site_model$rate_ac_prior_distr,
distr_id = distr_id,
param_id = param_id
)
distr_id <- distr_id + 1
param_id <- param_id + beautier::get_distr_n_params(
gtr_site_model$rate_ac_prior_distr
)
}
if (!beautier::is_init_distr(gtr_site_model$rate_ag_prior_distr)) {
gtr_site_model$rate_ag_prior_distr <- beautier::init_distr(
gtr_site_model$rate_ag_prior_distr,
distr_id = distr_id,
param_id = param_id
)
distr_id <- distr_id + 1
param_id <- param_id + beautier::get_distr_n_params(
gtr_site_model$rate_ag_prior_distr
)
}
if (!beautier::is_init_distr(gtr_site_model$rate_at_prior_distr)) {
gtr_site_model$rate_at_prior_distr <- beautier::init_distr(
gtr_site_model$rate_at_prior_distr,
distr_id = distr_id,
param_id = param_id
)
distr_id <- distr_id + 1
param_id <- param_id + beautier::get_distr_n_params(
gtr_site_model$rate_at_prior_distr
)
}
if (!beautier::is_init_distr(gtr_site_model$rate_cg_prior_distr)) {
gtr_site_model$rate_cg_prior_distr <- beautier::init_distr(
gtr_site_model$rate_cg_prior_distr,
distr_id = distr_id,
param_id = param_id
)
distr_id <- distr_id + 1
param_id <- param_id + beautier::get_distr_n_params(
gtr_site_model$rate_cg_prior_distr
)
}
if (!beautier::is_init_distr(gtr_site_model$rate_gt_prior_distr)) {
gtr_site_model$rate_gt_prior_distr <- beautier::init_distr(
gtr_site_model$rate_gt_prior_distr,
distr_id = distr_id,
param_id = param_id
)
distr_id <- distr_id + 1
param_id <- param_id + beautier::get_distr_n_params(
gtr_site_model$rate_gt_prior_distr
)
}
if (!beautier::is_init_param(gtr_site_model$rate_ac_param)) {
gtr_site_model$rate_ac_param <- beautier::init_param(
gtr_site_model$rate_ac_param,
id = param_id
)
param_id <- param_id + 1
}
if (!beautier::is_init_param(gtr_site_model$rate_ag_param)) {
gtr_site_model$rate_ag_param <- beautier::init_param(
gtr_site_model$rate_ag_param,
id = param_id
)
param_id <- param_id + 1
}
if (!beautier::is_init_param(gtr_site_model$rate_at_param)) {
gtr_site_model$rate_at_param <- beautier::init_param(
gtr_site_model$rate_at_param, id = param_id
)
param_id <- param_id + 1
}
if (!beautier::is_init_param(gtr_site_model$rate_cg_param)) {
gtr_site_model$rate_cg_param <- beautier::init_param(
gtr_site_model$rate_cg_param,
id = param_id
)
param_id <- param_id + 1
}
if (!beautier::is_init_param(gtr_site_model$rate_ct_param)) {
gtr_site_model$rate_ct_param <- beautier::init_param(
gtr_site_model$rate_ct_param,
id = param_id
)
param_id <- param_id + 1
}
if (!beautier::is_init_param(gtr_site_model$rate_gt_param)) {
gtr_site_model$rate_gt_param <- beautier::init_param(
gtr_site_model$rate_gt_param,
id = param_id
)
param_id <- param_id + 1
}
testit::assert(beautier::is_gtr_site_model(gtr_site_model))
testit::assert(
beautier::is_init_gamma_site_model(gtr_site_model$gamma_site_model)
)
testit::assert(beautier::is_init_gtr_site_model(gtr_site_model))
gtr_site_model
}
init_hky_site_model <- function(
hky_site_model,
distr_id = 0,
param_id = 0
) {
testit::assert(beautier::is_hky_site_model(hky_site_model))
if (
!beautier::is_one_na(
hky_site_model$gamma_site_model$gamma_shape_prior_distr
)
) {
if (!beautier::is_init_distr(
hky_site_model$gamma_site_model$gamma_shape_prior_distr
)) {
hky_site_model$gamma_site_model$gamma_shape_prior_distr <-
beautier::init_distr(
hky_site_model$gamma_site_model$gamma_shape_prior_distr,
distr_id = distr_id,
param_id = param_id
)
distr_id <- distr_id + 1
param_id <- param_id + beautier::get_distr_n_params(
hky_site_model$gamma_site_model$gamma_shape_prior_distr
)
}
}
if (!beautier::is_init_distr(hky_site_model$kappa_prior_distr)) {
hky_site_model$kappa_prior_distr <- beautier::init_distr(
hky_site_model$kappa_prior_distr,
distr_id = distr_id,
param_id = param_id
)
distr_id <- distr_id + 1
param_id <- param_id + beautier::get_distr_n_params(
hky_site_model$kappa_prior_distr
)
}
testit::assert(
beautier::is_init_gamma_site_model(hky_site_model$gamma_site_model)
)
testit::assert(beautier::is_init_hky_site_model(hky_site_model))
hky_site_model
}
init_jc69_site_model <- function(
jc69_site_model,
distr_id = 0,
param_id = 0
) {
testit::assert(beautier::is_jc69_site_model(jc69_site_model))
if (
!beautier::is_one_na(
jc69_site_model$gamma_site_model$gamma_shape_prior_distr
)
) {
if (
!beautier::is_init_distr(
jc69_site_model$gamma_site_model$gamma_shape_prior_distr
)
) {
jc69_site_model$gamma_site_model$gamma_shape_prior_distr <-
beautier::init_distr(
jc69_site_model$gamma_site_model$gamma_shape_prior_distr,
distr_id = distr_id,
param_id = param_id
)
distr_id <- distr_id + 1
param_id <- param_id + beautier::get_distr_n_params(
jc69_site_model$gamma_site_model$gamma_shape_prior_distr
)
}
}
testit::assert(
beautier::is_init_gamma_site_model(jc69_site_model$gamma_site_model)
)
testit::assert(beautier::is_init_jc69_site_model(jc69_site_model))
jc69_site_model
}
init_tn93_site_model <- function(
tn93_site_model,
distr_id = 0,
param_id = 0
) {
testit::assert(beautier::is_tn93_site_model(tn93_site_model))
if (
!beautier::is_one_na(
tn93_site_model$gamma_site_model$gamma_shape_prior_distr
)
) {
if (
!beautier::is_init_distr(
tn93_site_model$gamma_site_model$gamma_shape_prior_distr
)
) {
tn93_site_model$gamma_site_model$gamma_shape_prior_distr <-
beautier::init_distr(
tn93_site_model$gamma_site_model$gamma_shape_prior_distr,
distr_id = distr_id,
param_id = param_id
)
distr_id <- distr_id + 1
param_id <- param_id + beautier::get_distr_n_params(
tn93_site_model$gamma_site_model$gamma_shape_prior_distr
)
}
}
if (!beautier::is_init_distr(tn93_site_model$kappa_1_prior_distr)) {
tn93_site_model$kappa_1_prior_distr <- beautier::init_distr(
tn93_site_model$kappa_1_prior_distr,
distr_id = distr_id,
param_id = param_id
)
distr_id <- distr_id + 1
param_id <- param_id + beautier::get_distr_n_params(
tn93_site_model$kappa_1_prior_distr
)
}
if (!beautier::is_init_distr(tn93_site_model$kappa_2_prior_distr)) {
tn93_site_model$kappa_2_prior_distr <- beautier::init_distr(
tn93_site_model$kappa_2_prior_distr,
distr_id = distr_id,
param_id = param_id
)
distr_id <- distr_id + 1
param_id <- param_id + beautier::get_distr_n_params(
tn93_site_model$kappa_2_prior_distr
)
}
if (!beautier::is_init_param(tn93_site_model$kappa_1_param)) {
tn93_site_model$kappa_1_param <- beautier::init_param(
tn93_site_model$kappa_1_param,
id = param_id
)
param_id <- param_id + 1
}
if (!beautier::is_init_param(tn93_site_model$kappa_2_param)) {
tn93_site_model$kappa_2_param <- beautier::init_param(
tn93_site_model$kappa_2_param,
id = param_id
)
param_id <- param_id + 1
}
testit::assert(
beautier::is_init_gamma_site_model(tn93_site_model$gamma_site_model)
)
testit::assert(beautier::is_init_tn93_site_model(tn93_site_model))
tn93_site_model
} |
mixDen=
function(x,pvec,comps)
{
ums=function(comps)
{
nc = length(comps)
dim = length(comps[[1]][[1]])
mu = matrix(0.0,nc,dim)
sigma = matrix(0.0,nc,dim)
for(i in 1:nc) {
mu[i,] = comps[[i]][[1]]
root= backsolve(comps[[i]][[2]],diag(rep(1,dim)))
sigma[i,] = sqrt(diag(crossprod(root)))
}
return(list(mu=mu,sigma=sigma))
}
nc = length(comps)
mars = ums(comps)
den = matrix(0.0,nrow(x),ncol(x))
for(i in 1:ncol(x)) {
for(j in 1:nc) den[,i] = den[,i] + dnorm(x[,i],mean = mars$mu[j,i],sd=mars$sigma[j,i])*pvec[j]
}
return(den)
} |
RN_plot_spectrum <- function(desired_RN,
rad_type = NULL,
photon = FALSE,
log_plot = 0,
prob_cut = 0.01) {
rt_allowed <- c("X", "G", "AE", "IE", "A", "AR", "B-", "AQ", "B+", "PG", "DG", "DB", "FF", "N")
stop_flag <- FALSE
if (!is.null(rad_type)) {
if (!rad_type %in% rt_allowed) {
cat("Invalid specification for rad_type.\n")
cat("Please enter one of these: \n")
cat(rt_allowed)
cat(" (in quotes) or NULL and select photon = TRUE")
}
}
if (!is.null(rad_type) & photon == TRUE) {
cat("Enter either rad_type = 'a rad_type', or photon = TRUE, but not both.")
return()
}
dat_set <- "R"
if (!is.null(rad_type)) {
if (rad_type %in% c("B-", "B+", "DB")) {
dat_set <- "B"
}
}
if (dat_set == "B") {
spec_df <- RadData::ICRP_07.BET[which(RadData::ICRP_07.BET$RN %in% desired_RN), ]
}
if (dat_set == "R") {
spec_df <- RadData::ICRP_07.RAD[which(RadData::ICRP_07.RAD$RN %in%
desired_RN), ]
if (photon == TRUE) {
spec_df <- spec_df[which(spec_df$is_photon == TRUE), ]
}
if (photon == FALSE) {
spec_df <- spec_df[which(spec_df$code_AN == rad_type), ]
}
}
if (photon == TRUE) {
spec_df <- RadData::ICRP_07.RAD[which(RadData::ICRP_07.RAD$RN %in% desired_RN), ]
spec_df <- spec_df[which(spec_df$is_photon == TRUE), ]
}
if (is.na(spec_df[1, 1])) {
oops <- "No matches"
stop_flag <- TRUE
}
if (stop_flag) {
return(oops)
}
E_MeV <- prob <- RN <- MeV_per_dk <- A <- NULL
if (dat_set != "B") spec_df <- spec_df[which(spec_df$prob > prob_cut), ]
if (dat_set != "B") spec_df$MeV_per_dk <- spec_df$prob * spec_df$E_MeV
spec_text <- ifelse(length(desired_RN) > 1, "spectra", "spectrum")
if (dat_set != "B") {
if (photon == TRUE) rad_text <- "photon"
if (photon != TRUE) rad_text <- RadData::rad_codes$description[which(RadData::rad_codes$code_AN == rad_type)]
p <- ggplot2::ggplot(
data = spec_df,
ggplot2::aes(E_MeV, prob, color = RN, shape = RN)
) +
ggplot2::geom_segment(ggplot2::aes(xend = E_MeV, yend = 0)) +
ggplot2::geom_point(size = 3) +
ggplot2::scale_colour_hue(l = 80, c = 150) +
ggplot2::ggtitle(paste0("Emission ", spec_text, ": ", rad_text),
subtitle = paste0("particle probability > ", prob_cut)
) +
ggthemes::theme_calc() +
ggplot2::xlab("Energy, MeV") +
ggplot2::ylab("probability density")
if (log_plot == 1) p <- p + ggplot2::scale_y_log10()
if (log_plot == 2) p <- p + ggplot2::scale_x_log10() + ggplot2::scale_y_log10()
p
}
if (dat_set == "B") {
p <- ggplot2::ggplot(
data = spec_df,
ggplot2::aes(E_MeV, A, color = RN, shape = RN)
) +
ggthemes::theme_calc() +
ggplot2::xlab("Energy, MeV") +
ggplot2::ylab("probability density") +
ggplot2::geom_line(size = 1.5) +
ggplot2::scale_colour_hue(l = 80, c = 150) +
ggplot2::ggtitle(RadData::rad_codes$description[which(RadData::rad_codes$code_AN == rad_type)])
if (log_plot == 1) p <- p + ggplot2::scale_y_log10()
if (log_plot == 2) p <- p + ggplot2::scale_x_log10() + ggplot2::scale_y_log10()
}
p
} |
context("Test estimation when only a single IV-like specification is provided.")
set.seed(10L)
dtcf <- ivmte:::gendistCovariates()$data.full
dtc <- ivmte:::gendistCovariates()$data.dist
result <- ivmte(ivlike = ey ~ 1 +d + x1 + x2,
data = dtcf,
components = l(d, x1),
subset = z2 %in% c(2, 3),
propensity = d ~ x1 + x2 + z1 + z2,
link = "logit",
m0 = ~ x1 + x2:u + x2:I(u^2),
m1 = ~ x1 + x1:x2 + u + x1:u + x2:I(u^2),
uname = u,
target = "late",
late.from = c(z1 = 1, z2 = 2),
late.to = c(z1 = 0, z2 = 3),
late.X = c(x1 = 0, x2 = 1),
criterion.tol = 0.01,
initgrid.nu = 4,
initgrid.nx = 2,
audit.nx = 5,
audit.nu = 5,
solver = "lpSolveAPI")
dtc$ey <- dtc$ey1 * dtc$p + dtc$ey0 * (1 - dtc$p)
dtc$eyd <- dtc$ey1 * dtc$p
varlist <- ~ eyd + ey + ey0 + ey1 + p + x1 + x2 + z1 + z2 +
I(ey * p) + I(ey * x1) + I(ey * x2) + I(ey * z1) + I(ey * z2) +
I(ey0 * p) + I(ey0 * x1) + I(ey0 * x2) + I(ey0 * z1) + I(ey0 * z2) +
I(ey1 * p) + I(ey1 * x1) + I(ey1 * x2) + I(ey1 * z1) + I(ey1 * z2) +
I(p * p) + I(p * x1) + I(p * x2) + I(p * z1) + I(p * z2) +
I(x1 * p) + I(x1 * x1) + I(x1 * x2) + I(x1 * z1) + I(x1 * z2) +
I(x2 * p) + I(x2 * x1) + I(x2 * x2) + I(x2 * z1) + I(x2 * z2) +
I(z1 * p) + I(z1 * x1) + I(z1 * x2) + I(z1 * z1) + I(z1 * z2) +
I(z2 * p) + I(z2 * x1) + I(z2 * x2) + I(z2 * z1) + I(z2 * z2)
mv <- popmean(varlist, subset(dtc, dtc$z2 %in% c(2, 3)))
m <- as.list(mv)
names(m) <- rownames(mv)
exx <- symat(c(1, m[["p"]], m[["x1"]], m[["x2"]],
m[["p"]], m[["I(p * x1)"]], m[["I(p * x2)"]],
m[["I(x1 * x1)"]], m[["I(x1 * x2)"]],
m[["I(x2 * x2)"]]))
exy <- matrix(c(m[["ey"]], m[["eyd"]],
m[["I(ey * x1)"]],
m[["I(ey * x2)"]]))
ols <- (solve(exx) %*% exy)
test_that("IV-like estimates", {
expect_equal(as.numeric(result$s.set$s1$beta), as.numeric(ols[2]))
expect_equal(as.numeric(result$s.set$s2$beta), as.numeric(ols[3]))
})
dtc.x <- split(as.matrix(dtc[, c("x1", "x2")]), seq(1, nrow(dtc)))
fit <- glm(d ~ x1 + x2 + z1 + z2, family = binomial(link = "logit"),
data = dtcf)
dtc$p <- predict(fit, dtc, type = "response")
dtc$s.ols.0.d <- unlist(lapply(dtc.x, sOls3, d = 0, j = 2, exx = exx))
dtc$s.ols.1.d <- unlist(lapply(dtc.x, sOls3, d = 1, j = 2, exx = exx))
dtc$s.ols.0.x1 <- unlist(lapply(dtc.x, sOls3, d = 0, j = 3, exx = exx))
dtc$s.ols.1.x1 <- unlist(lapply(dtc.x, sOls3, d = 1, j = 3, exx = exx))
g.ols.d <- genGammaTT(subset(dtc, dtc$z2 %in% c(2, 3)),
"s.ols.0.d",
"s.ols.1.d")
g.ols.x1 <- genGammaTT(subset(dtc, dtc$z2 %in% c(2, 3)),
"s.ols.0.x1",
"s.ols.1.x1")
late.ub <- subset(dtc,
dtc$z1 == 0 &
dtc$z2 == 3 &
dtc$x1 == 0 &
dtc$x2 == 1)$p
late.lb <- subset(dtc,
dtc$z1 == 1 &
dtc$z2 == 2 &
dtc$x1 == 0 &
dtc$x2 == 1)$p
dtc$w.late.1 <- 1 / (late.ub - late.lb)
dtc$w.late.0 <- - dtc$w.late.1
g.star.late <- genGammaTT(dtc[dtc$x1 == 0 & dtc$x2 == 1, ],
"w.late.0",
"w.late.1",
lb = late.lb,
ub = late.ub)
test_that("Gamma moments", {
expect_equal(as.numeric(c(result$gstar$g0, result$gstar$g1)),
as.numeric(unlist(g.star.late)))
expect_equal(as.numeric(c(result$s.set$s1$g0, result$s.set$s1$g1)),
as.numeric(unlist(g.ols.d)))
expect_equal(as.numeric(c(result$s.set$s2$g0, result$s.set$s2$g1)),
as.numeric(unlist(g.ols.x1)))
})
estimates <- c(ols[c(2, 3)])
A <- rbind(c(g.ols.d$g0, g.ols.d$g1),
c(g.ols.x1$g0, g.ols.x1$g1))
Aextra <- matrix(0, nrow = nrow(A), ncol = 2 * nrow(A))
for (i in 1:nrow(A)) {
Aextra[i, (i * 2 - 1)] <- -1
Aextra[i, (i * 2)] <- 1
}
grid <- result$audit.grid$initial[, 1:3]
xGrid <- result$audit.grid$audit.x
nx <- nrow(xGrid)
uGrid <- result$audit.grid$audit.u
xGrid <- xGrid[rep(seq(nrow(xGrid)), each = length(uGrid)), ]
uGrid <- rep(uGrid, times = nx)
grid <- cbind(xGrid, uGrid)
colnames(grid) <- c("x1", "x2", "u")
grid <- data.frame(grid)
mono0 <- model.matrix(~ x1 + x2:u + x2:I(u^2),
data = grid)
mono1 <- model.matrix(~ x1 + x1:x2 + u + x1:u + x2:I(u^2),
data = grid)
maxy <- max(subset(dtc, dtc$z2 %in% c(2, 3))[, c("ey0", "ey1")])
miny <- min(subset(dtc, dtc$z2 %in% c(2, 3))[, c("ey0", "ey1")])
Bzeroes <- matrix(0, ncol = ncol(Aextra), nrow(grid))
b0zeroes <- matrix(0, ncol = ncol(mono0), nrow = nrow(grid))
b1zeroes <- matrix(0, ncol = ncol(mono1), nrow = nrow(grid))
m0bound <- cbind(Bzeroes, mono0, b1zeroes)
m1bound <- cbind(Bzeroes, b0zeroes, mono1)
mtebound <- cbind(Bzeroes, -mono0, mono1)
modelO <- list()
modelO$obj <- c(replicate(ncol(Aextra), 1),
replicate(ncol(A), 0))
modelO$rhs <- c(estimates,
replicate(nrow(m0bound), miny),
replicate(nrow(m1bound), miny),
replicate(nrow(mtebound), miny - maxy),
replicate(nrow(m0bound), maxy),
replicate(nrow(m1bound), maxy),
replicate(nrow(mtebound), maxy - miny))
modelO$sense <- c(replicate(length(estimates), "="),
replicate(nrow(m0bound), ">="),
replicate(nrow(m1bound), ">="),
replicate(nrow(mtebound), ">="),
replicate(nrow(m0bound), "<="),
replicate(nrow(m1bound), "<="),
replicate(nrow(mtebound), "<="))
modelO$A <- rbind(cbind(Aextra, A),
m0bound,
m1bound,
mtebound,
m0bound,
m1bound,
mtebound)
modelO$ub <- c(replicate(ncol(Aextra), Inf),
replicate(ncol(A), Inf))
modelO$lb <- c(replicate(ncol(Aextra), 0),
replicate(ncol(A), -Inf))
lpsolver.options <- list(epslevel = "tight")
minobseq <- runLpSolveAPI(modelO, 'min', lpsolver.options)$objval
tolerance <- 1.01
Atop <- c(replicate(ncol(Aextra), 1),
replicate(ncol(A), 0))
modelF <- list()
modelF$obj <- c(replicate(ncol(Aextra), 0),
g.star.late$g0,
g.star.late$g1)
modelF$rhs <- c(tolerance * minobseq,
modelO$rhs)
modelF$sense <- c("<=",
modelO$sense)
modelF$A <- rbind(Atop,
modelO$A)
modelF$ub <- c(replicate(ncol(Aextra), Inf),
replicate(ncol(mono0) + ncol(mono1), Inf))
modelF$lb <- c(replicate(ncol(Aextra), 0),
replicate(ncol(mono0) + ncol(mono1), -Inf))
minLate <- runLpSolveAPI(modelF, 'min', lpsolver.options)
maxLate <- runLpSolveAPI(modelF, 'max', lpsolver.options)
bound <- c(lower = minLate$objval, upper = maxLate$objval)
test_that("LP problem", {
expect_equal(result$bound, bound)
}) |
library(forestplot)
options(forestplot_new_page = TRUE)
cochrane_from_rmeta <-
structure(list(
mean = c(NA, NA, 0.578, 0.165, 0.246, 0.700, 0.348, 0.139, 1.017, NA, 0.531),
lower = c(NA, NA, 0.372, 0.018, 0.072, 0.333, 0.083, 0.016, 0.365, NA, 0.386),
upper = c(NA, NA, 0.898, 1.517, 0.833, 1.474, 1.455, 1.209, 2.831, NA, 0.731)),
.Names = c("mean", "lower", "upper"),
row.names = c(NA, -11L),
class = "data.frame")
tabletext<-cbind(
c("", "Study", "Auckland", "Block",
"Doran", "Gamsu", "Morrison", "Papageorgiou",
"Tauesch", NA, "Summary"),
c("Deaths", "(steroid)", "36", "1",
"4", "14", "3", "1",
"8", NA, NA),
c("Deaths", "(placebo)", "60", "5",
"11", "20", "7", "7",
"10", NA, NA),
c("", "OR", "0.58", "0.16",
"0.25", "0.70", "0.35", "0.14",
"1.02", NA, "0.53"))
forestplot(tabletext,
cochrane_from_rmeta,new_page = TRUE,
is.summary=c(TRUE,TRUE,rep(FALSE,8),TRUE),
clip=c(0.1,2.5),
xlog=TRUE,
col=fpColors(box="royalblue",line="darkblue", summary="royalblue"))
forestplot(tabletext,
hrzl_lines = gpar(col="
cochrane_from_rmeta,new_page = TRUE,
is.summary=c(TRUE,TRUE,rep(FALSE,8),TRUE),
clip=c(0.1,2.5),
xlog=TRUE,
col=fpColors(box="royalblue",line="darkblue", summary="royalblue"))
forestplot(tabletext,
hrzl_lines = list("3" = gpar(lty=2),
"11" = gpar(lwd=1, columns=1:4, col = "
cochrane_from_rmeta,new_page = TRUE,
is.summary=c(TRUE,TRUE,rep(FALSE,8),TRUE),
clip=c(0.1,2.5),
xlog=TRUE,
col=fpColors(box="royalblue",line="darkblue", summary="royalblue", hrz_lines = "
forestplot(tabletext,
hrzl_lines = list("3" = gpar(lty=2),
"11" = gpar(lwd=1, columns=1:4, col = "blue")),
cochrane_from_rmeta,new_page = TRUE,
is.summary=c(TRUE,TRUE,rep(FALSE,8),TRUE),
clip=c(0.1,2.5),
xlog=TRUE,
col=fpColors(box="red",line="green", summary="purple", hrz_lines = "orange"),
vertices = TRUE)
forestplot(tabletext,
graph.pos = 4,
hrzl_lines = list("3" = gpar(lty=2),
"11" = gpar(lwd=1, columns=c(1:3,5), col = "
"12" = gpar(lwd=1, lty=2, columns=c(1:3,5), col = "
cochrane_from_rmeta,new_page = TRUE,
is.summary=c(TRUE,TRUE,rep(FALSE,8),TRUE),
clip=c(0.1,2.5),
xlog=TRUE,
col=fpColors(box="royalblue",line="darkblue", summary="royalblue", hrz_lines = "
data(HRQoL)
clrs <- fpColors(box="royalblue",line="darkblue", summary="royalblue")
tabletext <-
list(c(NA, rownames(HRQoL$Sweden)),
append(list(expression(beta)), sprintf("%.2f", HRQoL$Sweden[,"coef"])))
forestplot(tabletext,
rbind(rep(NA, 3),
HRQoL$Sweden),
col=clrs,
xlab="EQ-5D index")
tabletext <- cbind(rownames(HRQoL$Sweden),
sprintf("%.2f", HRQoL$Sweden[,"coef"]))
forestplot(tabletext,
txt_gp = fpTxtGp(label = gpar(fontfamily = "HersheyScript")),
rbind(HRQoL$Sweden),
col=clrs,
xlab="EQ-5D index")
forestplot(tabletext,
txt_gp = fpTxtGp(label = list(gpar(fontfamily = "HersheyScript"),
gpar(fontfamily = "",
col = "
ticks = gpar(fontfamily = "", cex=1),
xlab = gpar(fontfamily = "HersheySerif", cex = 1.5)),
rbind(HRQoL$Sweden),
col=clrs,
xlab="EQ-5D index")
forestplot(tabletext,
rbind(HRQoL$Sweden),
clip =c(-.1, Inf),
col=clrs,
xlab="EQ-5D index")
tabletext <- tabletext[,1]
forestplot(tabletext,
mean = cbind(HRQoL$Sweden[, "coef"], HRQoL$Denmark[, "coef"]),
lower = cbind(HRQoL$Sweden[, "lower"], HRQoL$Denmark[, "lower"]),
upper = cbind(HRQoL$Sweden[, "upper"], HRQoL$Denmark[, "upper"]),
clip =c(-.1, 0.075),
col=fpColors(box=c("blue", "darkred")),
xlab="EQ-5D index")
forestplot(tabletext,
fn.ci_norm = c(fpDrawNormalCI, fpDrawCircleCI),
boxsize = .25,
line.margin = .1,
mean = cbind(HRQoL$Sweden[, "coef"], HRQoL$Denmark[, "coef"]),
lower = cbind(HRQoL$Sweden[, "lower"], HRQoL$Denmark[, "lower"]),
upper = cbind(HRQoL$Sweden[, "upper"], HRQoL$Denmark[, "upper"]),
clip =c(-.125, 0.075),
col=fpColors(box=c("blue", "darkred")),
xlab="EQ-5D index")
forestplot(tabletext,
fn.ci_norm = c(fpDrawNormalCI, fpDrawCircleCI),
boxsize = .25,
line.margin = .1,
mean = cbind(HRQoL$Sweden[, "coef"], HRQoL$Denmark[, "coef"]),
lower = cbind(HRQoL$Sweden[, "lower"], HRQoL$Denmark[, "lower"]),
upper = cbind(HRQoL$Sweden[, "upper"], HRQoL$Denmark[, "upper"]),
clip =c(-.125, 0.075),
lty.ci = c(1, 2),
col=fpColors(box=c("blue", "darkred")),
xlab="EQ-5D index")
forestplot(tabletext,
legend = c("Sweden", "Denmark"),
fn.ci_norm = c(fpDrawNormalCI, fpDrawCircleCI),
boxsize = .25,
line.margin = .1,
mean = cbind(HRQoL$Sweden[, "coef"], HRQoL$Denmark[, "coef"]),
lower = cbind(HRQoL$Sweden[, "lower"], HRQoL$Denmark[, "lower"]),
upper = cbind(HRQoL$Sweden[, "upper"], HRQoL$Denmark[, "upper"]),
clip =c(-.125, 0.075),
col=fpColors(box=c("blue", "darkred")),
xlab="EQ-5D index")
forestplot(tabletext,
legend_args = fpLegend(pos = list(x=.85, y=0.25),
gp=gpar(col="
legend = c("Sweden", "Denmark"),
fn.ci_norm = c(fpDrawNormalCI, fpDrawCircleCI),
boxsize = .25,
line.margin = .1,
mean = cbind(HRQoL$Sweden[, "coef"], HRQoL$Denmark[, "coef"]),
lower = cbind(HRQoL$Sweden[, "lower"], HRQoL$Denmark[, "lower"]),
upper = cbind(HRQoL$Sweden[, "upper"], HRQoL$Denmark[, "upper"]),
clip =c(-.125, 0.075),
col=fpColors(box=c("blue", "darkred")),
xlab="EQ-5D index")
forestplot(tabletext,
legend = c("Sweden", "Denmark"),
fn.ci_norm = c(fpDrawNormalCI, fpDrawCircleCI),
boxsize = .25,
line.margin = .1,
mean = cbind(HRQoL$Sweden[, "coef"], HRQoL$Denmark[, "coef"]),
lower = cbind(HRQoL$Sweden[, "lower"], HRQoL$Denmark[, "lower"]),
upper = cbind(HRQoL$Sweden[, "upper"], HRQoL$Denmark[, "upper"]),
clip =c(-.125, 0.075),
col=fpColors(box=c("blue", "darkred")),
xticks = c(-.1, -0.05, 0, .05),
xlab="EQ-5D index")
xticks <- seq(from = -.1, to = .05, by = 0.025)
xtlab <- rep(c(TRUE, FALSE), length.out = length(xticks))
attr(xticks, "labels") <- xtlab
forestplot(tabletext,
legend = c("Sweden", "Denmark"),
fn.ci_norm = c(fpDrawNormalCI, fpDrawCircleCI),
boxsize = .25,
line.margin = .1,
mean = cbind(HRQoL$Sweden[, "coef"], HRQoL$Denmark[, "coef"]),
lower = cbind(HRQoL$Sweden[, "lower"], HRQoL$Denmark[, "lower"]),
upper = cbind(HRQoL$Sweden[, "upper"], HRQoL$Denmark[, "upper"]),
clip =c(-.125, 0.075),
col=fpColors(box=c("blue", "darkred")),
xticks = xticks,
xlab="EQ-5D index")
forestplot(tabletext,
legend = c("Sweden", "Denmark"),
fn.ci_norm = c(fpDrawNormalCI, fpDrawCircleCI),
boxsize = .25,
line.margin = .1,
mean = cbind(HRQoL$Sweden[, "coef"], HRQoL$Denmark[, "coef"]),
lower = cbind(HRQoL$Sweden[, "lower"], HRQoL$Denmark[, "lower"]),
upper = cbind(HRQoL$Sweden[, "upper"], HRQoL$Denmark[, "upper"]),
clip =c(-.125, 0.075),
col=fpColors(box=c("blue", "darkred")),
grid = TRUE,
xticks = c(-.1, -0.05, 0, .05),
xlab="EQ-5D index")
forestplot(tabletext,
legend = c("Sweden", "Denmark"),
fn.ci_norm = c(fpDrawNormalCI, fpDrawCircleCI),
boxsize = .25,
line.margin = .1,
mean = cbind(HRQoL$Sweden[, "coef"], HRQoL$Denmark[, "coef"]),
lower = cbind(HRQoL$Sweden[, "lower"], HRQoL$Denmark[, "lower"]),
upper = cbind(HRQoL$Sweden[, "upper"], HRQoL$Denmark[, "upper"]),
clip =c(-.125, 0.075),
col=fpColors(box=c("blue", "darkred")),
grid = structure(c(-.1, -.05, .05),
gp = gpar(lty = 2, col = "
xlab="EQ-5D index") |
if (interactive()){
library(lmvar)
fit_lm = lm( Petal.Length ~ Species, data = iris, y = TRUE)
plot_qq(fit_lm)
X = model.matrix(~ Species - 1, data = iris)
fit_lmvar = lmvar(iris$Petal.Length, X, X)
plot_qq(fit_lm, fit_lmvar)
fit_lm_width = lm( Petal.Length ~ Species + Petal.Width, data = iris, y = TRUE)
plot_qq(fit_lm, fit_lm_width)
} |
"adult_demo"
"adult_enroll_dur"
"adult_hcc"
"adult_group"
"adult_interaction"
"adult_rxc"
"adult_rxc_hcc_inter" |
immer_osink <- function(file)
{
CDM::osink( file=file, suffix=paste0( "__SUMMARY.Rout") )
} |
cov.function<-
function(data.matrix){
m=dim(data.matrix)[1]
n=dim(data.matrix)[2]
barX=matrix(colMeans(data.matrix),m,n,byrow=T)
S=(1/(m-1))*t(data.matrix-barX)%*%(data.matrix-barX)
return(S)
} |
all_passed <- function(agent,
i = NULL) {
if (!has_agent_intel(agent)) {
stop(
"The agent hasn't performed an interrogation.",
call. = FALSE
)
}
all_passed_vec <- agent$validation_set$all_passed
if (length(all_passed_vec) < 1) {
if (is.null(i)) {
return(NA)
} else {
stop(
"You cannot provide a value for `i` when the agent ",
"contains no validation steps.",
call. = FALSE
)
}
}
if (!is.null(i)) {
all_passed_vec_range <- seq_along(all_passed_vec)
if (!all(i %in% all_passed_vec_range)) {
stop(
"All values provided for `i` must be in the range of ",
"the validation step numbers present in the agent.",
call. = FALSE
)
}
all_passed_vec <- all_passed_vec[i]
}
if (any(is.na(all_passed_vec))) {
return(FALSE)
}
all(all_passed_vec)
} |
siarhistograms <-
function(siardata,siarversion=0,legloc='topright') {
if(siardata$SHOULDRUN==FALSE && siardata$GRAPHSONLY==FALSE) {
cat("You must load in some data first (via option 1) in order to use \n")
cat("this feature of the program. \n")
cat("Press <Enter> to continue")
readline()
invisible()
return(NULL)
}
if(length(siardata$output)==0) {
cat("No output found - check that you have run the SIAR model. \n \n")
return(NULL)
}
cat("Plots of single groups proportions. \n")
if(siardata$numgroups>1) {
cat("Enter the group number of the proportions you wish to plot \n")
BADGROUP <- TRUE
while(BADGROUP==TRUE) {
groupnum <- as.integer(scan(what="",nlines=1,quiet=TRUE))
if(length(groupnum)>0) {
BADGROUP <- FALSE
if(groupnum>siardata$numgroups) {
BADGROUP <- TRUE
cat("Group number out of range. \n")
}
}
}
} else {
groupnum <- 1
}
title <- "Do you require each plot on a seperate graph or all on the same one?"
choices <- c("Each on a seperate graph","All together on one graph")
choose <- menu(choices,title = title)
cat("Producing plot..... \n \n")
if(length(siardata$sources)>0) {
sourcenames <- as.character(siardata$sources[,1])
} else {
sourcenames <- strsplit(colnames(siardata$output[,((groupnum-1)*(siardata$numsources+siardata$numiso)+1):(groupnum*(siardata$numsources+siardata$numiso)-siardata$numiso)]),paste("G",groupnum,sep=""))
}
usepars <- siardata$output[,((groupnum-1)*(siardata$numsources+siardata$numiso)+1):(groupnum*(siardata$numsources+siardata$numiso))]
mybreaks <- seq(0,1,length=50)
halfwidth <- diff(mybreaks)[1]/2
top <- 0
for(j in 1:siardata$numsources) {
top <- max(c(top,max(hist(usepars[,j],plot=FALSE,breaks=mybreaks)$density)))
}
if(choose==2) {
if(siardata$TITLE!="SIAR data") {
if(siardata$numgroups > 1) plot(1,1,xlim=c(0,1),ylim=c(0,top),type="n",main=paste(siardata$TITLE,": proportion densities for group ",groupnum,sep=""),xlab="proportion",ylab="density")
if(siardata$numgroups ==1) plot(1,1,xlim=c(0,1),ylim=c(0,top),type="n",main=paste(siardata$TITLE,": proportion densities",sep=""),xlab="proportion",ylab="density")
} else {
if(siardata$numgroups > 1) plot(1,1,xlim=c(0,1),ylim=c(0,top),type="n",main=paste("Proportion densities for group ",groupnum,sep=""),xlab="proportion",ylab="density")
if(siardata$numgroups ==1) plot(1,1,xlim=c(0,1),ylim=c(0,top),type="n",main="Proportion densities",xlab="proportion",ylab="density")
}
if(siarversion>0) mtext(paste("siar v",siarversion),side=1,line=4,adj=1,cex=0.6)
for(j in 1:siardata$numsources) {
Ans <- hist(usepars[,j],plot=FALSE,breaks=mybreaks)
for(k in 1:length(Ans$mids)) {
lines(c(Ans$mids[k]+(j/((siardata$numsources+1)/2)-1)*halfwidth,Ans$mids[k]+(j/((siardata$numsources+1)/2)-1)*halfwidth),c(0,Ans$density[k]),col=j,lwd=(siardata$numsources+1)/2,lend=1)
lines(c(Ans$mids[k]+(j/((siardata$numsources+1)/2)-1)*halfwidth,Ans$mids[k]+(j/((siardata$numsources+1)/2)-1)*halfwidth),c(0,Ans$density[k]),col=j,lwd=(siardata$numsources+1)/2,lend=1)
}
}
legend(legloc,legend=sourcenames,col=seq(1,siardata$numsources),lty=1,lwd=3,bty="n")
}
if(choose==1) {
devAskNewPage(ask=TRUE)
for(j in 1:siardata$numsources) {
if(siardata$TITLE!="SIAR data") {
if(siardata$numgroups > 1) plot(1,1,xlim=c(0,1),ylim=c(0,top),type="n",main=paste(siardata$TITLE,": proportion densities for group ",groupnum,": ",sourcenames[j],sep=""),xlab="proportion",ylab="density")
if(siardata$numgroups ==1) plot(1,1,xlim=c(0,1),ylim=c(0,top),type="n",main=paste(siardata$TITLE,": proportion densities: ",sourcenames[j],sep=""),xlab="proportion",ylab="density")
} else {
if(siardata$numgroups > 1) plot(1,1,xlim=c(0,1),ylim=c(0,top),type="n",main=paste("Proportion densities for group ",groupnum,": ",sourcenames[j],sep=""),xlab="proportion",ylab="density")
if(siardata$numgroups ==1) plot(1,1,xlim=c(0,1),ylim=c(0,top),type="n",main=paste("Proportion densities: ",sourcenames[j],sep=""),xlab="proportion",ylab="density")
}
if(siarversion>0) mtext(paste("siar v",siarversion),side=1,line=4,adj=1,cex=0.6)
Ans <- hist(usepars[,j],plot=FALSE,breaks=mybreaks)
for(k in 1:length(Ans$mids)) {
lines(c(Ans$mids[k]+(j/((siardata$numsources+1)/2)-1)*halfwidth,Ans$mids[k]+(j/((siardata$numsources+1)/2)-1)*halfwidth),c(0,Ans$density[k]),col=j,lwd=(siardata$numsources+1)/2,lend=1)
lines(c(Ans$mids[k]+(j/((siardata$numsources+1)/2)-1)*halfwidth,Ans$mids[k]+(j/((siardata$numsources+1)/2)-1)*halfwidth),c(0,Ans$density[k]),col=j,lwd=(siardata$numsources+1)/2,lend=1)
}
}
}
} |
getChangeMeta <- function(GADSdat, level = "variable") {
UseMethod("getChangeMeta")
}
getChangeMeta.GADSdat <- function(GADSdat, level = "variable") {
check_GADSdat(GADSdat)
labels <- GADSdat[["labels"]]
if(identical(level, "variable")) {
oldCols <- c("varName", "varLabel", "format", "display_width")
newCols <- paste0(oldCols, "_new")
for(n in newCols) labels[, n] <- NA
change_sheet <- unique(labels[, c(oldCols, newCols)])
return(new_varChanges(change_sheet))
}
if(identical(level, "value")) {
oldCols <- c("value", "valLabel", "missings")
newCols <- paste0(oldCols, "_new")
for(n in newCols) labels[, n] <- NA
change_sheet <- labels[, c("varName", oldCols, newCols)]
return(new_valChanges(change_sheet))
}
stop("Invalid level argument.")
}
getChangeMeta.all_GADSdat <- function(GADSdat, level = "variable") {
check_all_GADSdat(GADSdat)
changeSheet_list <- lapply(names(GADSdat$datList), function(nam ) {
single_GADSdat <- extractGADSdat(GADSdat, name = nam)
getChangeMeta(single_GADSdat, level = level)
})
names(changeSheet_list) <- names(GADSdat$datList)
changeSheet_list
}
new_varChanges <- function(df) {
stopifnot(is.data.frame(df))
structure(df, class = c("varChanges", "data.frame"))
}
check_varChanges <- function(changeTable) {
if(!is.data.frame(changeTable)) stop("changeTable is not a data.frame.")
colNames <- c("varName", "varLabel", "format", "display_width")
colNames <- c(colNames, paste0(colNames, "_new"))
if(any(!names(changeTable) %in% colNames)) stop("Irregular column names in changeTable.")
changeTable$varName_new <- sapply(changeTable$varName_new, function(x) {
if(is.na(x)) return(NA)
transf_names(x)
})
changeTable
}
new_valChanges <- function(df) {
stopifnot(is.data.frame(df))
structure(df, class = c("valChanges", "data.frame"))
}
check_valChanges <- function(changeTable) {
if(!is.data.frame(changeTable)) stop("changeTable is not a data.frame.")
oldCols <- c("value", "valLabel", "missings")
newCols <- paste0(oldCols, "_new")
colNames <- c("varName", oldCols, newCols)
if(any(!names(changeTable) %in% colNames)) stop("Irregular column names in changeTable.")
if(!all(changeTable[, "missings_new"] %in% c("miss", "valid") | is.na(changeTable[, "missings_new"]))) {
stop("Irregular values in 'missings_new' column.")
}
if(is.character(changeTable[, "value_new"])) {
changeTable[, "value_new"] <- suppressWarnings(eatTools::asNumericIfPossible(changeTable[, "value_new"],
force.string = FALSE))
if(is.character(changeTable[, "value_new"])) stop("Column 'value_new' in 'changeTable' is character and can not be transformed to numeric.")
}
if(is.character(changeTable[, "value"])) {
changeTable[, "value"] <- suppressWarnings(eatTools::asNumericIfPossible(changeTable[, "value"],
force.string = FALSE))
if(is.character(changeTable[, "value"])) stop("Column 'value' in 'changeTable' is character and can not be transformed to numeric.")
}
wrong_new_miss <- which((changeTable$missings_new == "miss" | !is.na(changeTable$valLabel_new))
& is.na(changeTable$value) & is.na(changeTable$value_new))
if(length(wrong_new_miss) > 0) stop("Value 'NA' can not receive a value label.")
changeTable
} |
shiny_classes_ronds <-
function(data,fondMaille,fondMailleElargi=NULL,fondContour,fondSuppl=NULL,idData,varVolume,varRatio,emprise="FRM",fondEtranger=NULL,fondChx=NULL)
{
options("stringsAsFactors"=FALSE)
msg_error1<-msg_error2<-msg_error3<-msg_error4<-msg_error5<-msg_error6<-msg_error7<-msg_error8<-msg_error9<-msg_error10<-msg_error11<-msg_error12<-msg_error13<-msg_error14<-msg_error15<-msg_error16<-msg_error17<-msg_error18<-msg_error19<-msg_error20<-msg_error21 <- NULL
if(any(class(data)!="data.frame")) msg_error1 <- "Les donnees doivent etre dans un data.frame / "
if(any(!any(class(fondMaille) %in% "sf"),!any(class(fondMaille) %in% "data.frame"))) msg_error2 <- "Le fond de maille doit etre un objet sf / "
if(!is.null(fondMailleElargi)) if(any(!any(class(fondMailleElargi) %in% "sf"),!any(class(fondMailleElargi) %in% "data.frame"))) msg_error3 <- "Le fond de maille elargie doit etre un objet sf / "
if(any(!any(class(fondContour) %in% "sf"),!any(class(fondContour) %in% "data.frame"))) msg_error4 <- "Le fond de contour doit etre un objet sf / "
if(!is.null(fondSuppl)) if(any(!any(class(fondSuppl) %in% "sf"),!any(class(fondSuppl) %in% "data.frame"))) msg_error5 <- "Le fond supplementaire doit etre un objet sf / "
if(any(class(idData)!="character")) msg_error6 <- "Le nom de la variable doit etre de type caractere / "
if(any(class(varVolume)!="character")) msg_error7 <- "Le nom de la variable doit etre de type caractere / "
if(any(class(varRatio)!="character")) msg_error8 <- "Le nom de la variable doit etre de type caractere / "
if(any(class(emprise)!="character")) msg_error9 <- "La valeur doit etre de type caractere ('FRM', '971', '972', '973', '974', '976' ou '999') / "
if(!is.null(fondChx)) if(any(!any(class(fondChx) %in% "sf"),!any(class(fondChx) %in% "data.frame"))) msg_error10 <- "Le fond des chx doit etre un objet sf / "
if(length(names(data))<3) msg_error11 <- "Le tableau des donnees n'est pas conforme. Il doit contenir au minimum une variable identifiant et les 2 variables a representer / "
if(length(names(fondMaille))<3) msg_error12 <- "Le fond de maille n'est pas conforme. La table doit contenir au minimum une variable identifiant, une variable libelle et la geometry / "
if(!is.null(fondMailleElargi)) if(length(names(fondMailleElargi))<3) msg_error13 <- "Le fond de maille elargie n'est pas conforme. La table doit contenir au minimum une variable identifiant, une variable libelle et la geometry / "
if(length(names(fondContour))<3) msg_error14 <- "Le fond de contour n'est pas conforme. La table doit contenir au minimum une variable identifiant, une variable libelle et la geometry / "
if(!is.null(fondSuppl)) if(length(names(fondSuppl))<3) msg_error15 <- "Le fond supplementaire n'est pas conforme. La table doit contenir au minimum une variable identifiant, une variable libelle et la geometry / "
if(!any(names(data) %in% idData)) msg_error16 <- "La variable identifiant les donnees n'existe pas dans la table des donnees / "
if(!any(names(data) %in% varVolume)) msg_error17 <- "La variable a representer n'existe pas dans la table des donnees / "
if(!any(names(data) %in% varRatio)) msg_error18 <- "La variable a representer n'existe pas dans la table des donnees / "
if(!emprise %in% c("FRM","971","972","973","974","976","999")) msg_error19 <- "La variable emprise doit etre 'FRM', '971', '972', '973', '974', '976' ou '999' / "
if(!is.null(fondEtranger)) if(any(!any(class(fondEtranger) %in% "sf"),!any(class(fondEtranger) %in% "data.frame"))) msg_error20 <- "Le fond etranger doit etre un objet sf / "
if(!is.null(fondEtranger)) if(length(names(fondEtranger))<3) msg_error21 <- "Le fond etranger n'est pas conforme. La table doit contenir au minimum une variable identifiant, une variable libelle et la geometry / "
if(any(!is.null(msg_error1),!is.null(msg_error2),!is.null(msg_error3),!is.null(msg_error4),
!is.null(msg_error5),!is.null(msg_error6),!is.null(msg_error7),!is.null(msg_error8),
!is.null(msg_error9),!is.null(msg_error10),!is.null(msg_error11),!is.null(msg_error12),
!is.null(msg_error13),!is.null(msg_error14),!is.null(msg_error15),!is.null(msg_error16),
!is.null(msg_error17),!is.null(msg_error18),!is.null(msg_error19),!is.null(msg_error20),!is.null(msg_error21)))
{
stop(simpleError(paste0(msg_error1,msg_error2,msg_error3,msg_error4,msg_error5,msg_error6,msg_error7,msg_error8,
msg_error9,msg_error10,msg_error11,msg_error12,msg_error13,msg_error14,msg_error15,msg_error16,
msg_error17,msg_error18,msg_error19,msg_error20,msg_error21)))
}
nb_up <- reactiveValues(a=0)
nb_down <- reactiveValues(a=0)
ordre_analyse <- reactiveValues(a=1,b=2)
insert_save <- reactiveValues(a=0)
remove_carte <- reactiveValues(a=0)
liste_fonds <- reactiveValues(a=c("analyse","maille","contour"))
m_save_ac_rp <- reactiveValues(a=0)
erreur_maille <- reactiveValues(a=FALSE)
max_classes <- reactiveValues(a=4)
methode_calcul <- c("fisher","jenks","kmeans","quantile","manuel")
legende <- reactiveValues(a=NULL)
sourc <- "Source : Insee"
names(data)[names(data)==idData] <- "CODE"
names(fondMaille)[1] <- "CODE"
names(fondMaille)[2] <- "LIBELLE"
names(fondContour)[1] <- "CODE"
names(fondContour)[2] <- "LIBELLE"
if(!is.null(fondMailleElargi))
{
names(fondMailleElargi)[1] <- "CODE"
names(fondMailleElargi)[2] <- "LIBELLE"
fondMailleElargi$LIBELLE<-iconv(fondMailleElargi$LIBELLE,"latin1","utf8")
}
epsg_etranger <- NULL
if(!is.null(fondEtranger))
{
names(fondEtranger)[1] <- "CODE"
names(fondEtranger)[2] <- "LIBGEO"
fondEtranger$LIBGEO<-iconv(fondEtranger$LIBGEO,"latin1","utf8")
if(substr(st_crs(fondEtranger)[1]$input,1,5) == "EPSG:")
{
epsg_etranger <- substr(st_crs(fondEtranger)[1]$input,6,9)
}else
{
epsg_etranger <- st_crs(fondEtranger)[1]$input
}
if(is.na(epsg_etranger) | epsg_etranger=="4326")
{
epsg_etranger <- "3395"
}
}
if(!is.null(fondSuppl))
{
names(fondSuppl)[1] <- "CODE"
names(fondSuppl)[2] <- "LIBELLE"
fondSuppl$LIBELLE<-iconv(fondSuppl$LIBELLE,"latin1","utf8")
}
fondMaille$LIBELLE<-iconv(fondMaille$LIBELLE,"latin1","utf8")
fondContour$LIBELLE<-iconv(fondContour$LIBELLE,"latin1","utf8")
ui <- navbarPage("OCEANIS", id="menu",
theme = shinytheme("superhero"),
tabPanel("Carte",value="carte",
sidebarLayout(
sidebarPanel(width = 3,
style = "overflow-y:scroll; min-height: 1000px; max-height: 1000px",
h4(HTML("<b><font color=
uiOutput("variable_classe_ac_rp"),
uiOutput("variable_rond_ac_rp"),
tags$hr(style="border: 5px solid
h4(HTML("<b><font color=
fluidRow(
column(width=9, offset=0.5,
uiOutput("ordre_fonds_ac_rp")
),
column(width=1,
br(),
br(),
htmlOutput("monter_fond_ac_rp", inline=FALSE),
htmlOutput("descendre_fond_ac_rp", inline=FALSE)
)
),
uiOutput("elargi_ac_rp"),
conditionalPanel(condition = 'input.elargi_ac_rp_id',
uiOutput("opacite_elargi_ac_rp")
),
uiOutput("ajout_territoire_ac_rp"),
uiOutput("ajout_reg_ac_rp"),
uiOutput("ajout_dep_ac_rp"),
tags$hr(style="border: 5px solid
h4(HTML("<b><font color=
uiOutput("taille_rond_ac_rp"),
htmlOutput("info_taille_max_rond_ac_rp"),
htmlOutput("info_rapport_rond_ac_rp"),
uiOutput("rapport_rond_ac_rp"),
conditionalPanel(condition = 'input.rapport_rond_ac_rp_id',
uiOutput("valeur_rapport_rond_ac_rp"),
htmlOutput("info_rapport_max_rond_ac_rp")
),
uiOutput("choix_centroid_ac_rp"),
tags$hr(style="border: 5px solid
h4(HTML("<b><font color=
uiOutput("liste_classes_ac_rp"),
uiOutput("methode_ac_rp"),
uiOutput("palette_insee_ac_rp"),
uiOutput("distribution_variable_ac_rp"),
conditionalPanel(condition = 'input.distribution_variable_ac_rp_id',
verticalLayout(
wellPanel(
style="background:
plotOutput("distribution_ac_rp"),
br(),
uiOutput("slider_bornes_ac_rp"),
uiOutput("valid_slider_bornes_ac_rp")
)
)
),
conditionalPanel(condition = 'input.methode_ac_rp_id=="manuel"',
br(),
uiOutput("zone_bornes_max_ac_rp"),
uiOutput("zone_bornes_ac_rp"),
uiOutput("zone_bornes_min_ac_rp"),
br(),
uiOutput("valid_bornes_ac_rp")
),
tags$hr(style="border: 5px solid
h4(HTML("<b><font color=
uiOutput("titre_ronds_legende_ac_rp"),
uiOutput("titre_classes_legende_ac_rp"),
br(),
uiOutput("affiche_legende_ac_rp"),
uiOutput("type_legende_ac_rp"),
br(),
tags$hr(style="border: 5px solid
h4(HTML("<b><font color=
uiOutput("save_carte_ac_rp"),
br(),
conditionalPanel(condition = 'input.mymap_ac_rp_click',
tags$div(class="dropup",
HTML('<button class="btn btn-primary dropdown-toggle" type="button" data-toggle="dropdown" aria-haspopup="true" aria-expanded="false">
Exporter en projet Qgis
<span class="caret"></span>
</button>'),
tags$ul(class="dropdown-menu",
wellPanel(
style="background:
h4("Export de la carte en projet Qgis"),
br(),
uiOutput("sortie_qgis_ac_rp"),
br(),
uiOutput("titre1_qgis_ac_rp"),
uiOutput("titre2_qgis_ac_rp"),
uiOutput("source_qgis_ac_rp"),
tags$head(tags$style(HTML('
uiOutput("export_qgis_ac_rp")
)
)
)
),
br(),
uiOutput("aide_image_ac_rp"),
br()
),
mainPanel(
tags$head(
tags$style(HTML(".leaflet-container { background:
),
tabsetPanel(id="onglets_ac_rp",
tabPanel(title=HTML("<b>Carte</b>"),value="carte",
leafletOutput("mymap_ac_rp",width="112%",height = 950)
),
tabPanel(title=HTML(paste0("<b>Donn","\u00e9","es</b>")),value="donnees",
h5("S\u00e9lectionnez une ou plusieurs lignes pour ensuite les visualiser sur la carte."),
DT::dataTableOutput("mydonnees_ac_rp",width="112%",height = 950)),
tabPanel(title=HTML("<b>Maille</b>"),value="maille",
h5("S\u00e9lectionnez une ou plusieurs lignes pour ensuite les visualiser sur la carte."),
DT::dataTableOutput("mymaille_ac_rp",width="112%",height = 950)),
tabPanel(title=HTML("<b>Contour</b>"),value="contour",
h5("S\u00e9lectionnez une ou plusieurs lignes pour ensuite les visualiser sur la carte."),
DT::dataTableOutput("mycontour_ac_rp",width="112%",height = 950))
)
)
)
)
)
server <- function(input, output, session) {
observe({
output$variable_classe_ac_rp <- renderUI({
selectInput("variable_classe_ac_rp_id", label=h5("Variable des classes (en ratio)"), choices = varRatio, selected = varRatio)
})
output$variable_rond_ac_rp <- renderUI({
selectInput("variable_rond_ac_rp_id", label=h5("Variable des ronds (en volume)"), choices = varVolume, selected = varVolume)
})
output$ordre_fonds_ac_rp <- renderUI({
selectInput("ordre_fonds_ac_rp_id", label=h5("Modifier l'ordre des fonds"), choices = liste_fonds$a, multiple=TRUE, selectize=FALSE, selected = NULL)
})
output$monter_fond_ac_rp <- renderUI({
actionButton("monter_fond_ac_rp_id", label="", icon=icon("arrow-up"))
})
output$descendre_fond_ac_rp <- renderUI({
actionButton("descendre_fond_ac_rp_id", label="", icon=icon("arrow-down"))
})
if(!is.null(fondMailleElargi))
{
output$elargi_ac_rp <- renderUI({
checkboxInput("elargi_ac_rp_id", label = HTML("Afficher une repr\u00e9sentation \u00e9largie de l'analyse<br>(parfois long)"),
value = if(is.null(fondMailleElargi)) FALSE else TRUE)
})
output$opacite_elargi_ac_rp <- renderUI({
sliderInput("opacite_elargi_ac_rp_id", label = h5("Opacit\u00e9 de l'analyse \u00e9largie"), value=60, min=0, max=100, step=5, ticks=FALSE)
})
}
output$ajout_territoire_ac_rp <- renderUI({
checkboxInput("ajout_territoire_ac_rp_id", label = "Afficher le fond des territoires",
value = if(is.null(fondSuppl)) FALSE else TRUE)
})
output$ajout_reg_ac_rp <- renderUI({
checkboxInput("ajout_reg_ac_rp_id", label = "Afficher le fond des r\u00e9gions",
value = FALSE)
})
output$ajout_dep_ac_rp <- renderUI({
checkboxInput("ajout_dep_ac_rp_id", label = "Afficher le fond des d\u00e9partements",
value = FALSE)
})
output$taille_rond_ac_rp <- renderUI({
numericInput("taille_rond_ac_rp_id", label = h5("Rayon du rond le plus grand (en m\u00e8tres)"), value=round(as.numeric(calcul_max_rayon_metres_ac_rp()[[1]])/1.25,0), min=0, max=round(as.numeric(calcul_max_rayon_metres_ac_rp()[[1]]),0), step=1000)
})
output$info_taille_max_rond_ac_rp <- renderText({
HTML(paste0("<font size=2 color=white>Valeur max du rayon le plus grand = ", round(as.numeric(calcul_max_rayon_metres_ac_rp()[[1]]),0)," m</font>"))
})
output$info_rapport_rond_ac_rp <- renderText({
HTML(paste0("<font size=2 color=white>Rapport Surface rond / Volume = ", (pi*(as.numeric(calcul_max_rayon_metres_ac_rp()[[1]])/1.25)^2)/as.numeric(calcul_max_rayon_metres_ac_rp()[[2]]),"</font>"))
})
output$rapport_rond_ac_rp <- renderUI({
checkboxInput("rapport_rond_ac_rp_id", label = "Modifier la valeur du rapport (permet la comparaison entre cartes)", value=FALSE)
})
output$valeur_rapport_rond_ac_rp <- renderUI({
numericInput("valeur_rapport_rond_ac_rp_id", label = h5("Nouvelle valeur du rapport Surface rond / Volume"), value=(pi*(as.numeric(calcul_max_rayon_metres_ac_rp()[[1]])/1.25)^2)/as.numeric(calcul_max_rayon_metres_ac_rp()[[2]]), min=0.1, max=(pi*(as.numeric(calcul_max_rayon_metres_ac_rp()[[1]]))^2)/as.numeric(calcul_max_rayon_metres_ac_rp()[[2]]), step=0.1)
})
output$info_rapport_max_rond_ac_rp <- renderText({
HTML(paste0("<font size=2 color=white>Valeur max du rapport = ", (pi*(as.numeric(calcul_max_rayon_metres_ac_rp()[[1]]))^2)/as.numeric(calcul_max_rayon_metres_ac_rp()[[2]]),"</font>"))
})
if(!is.null(fondChx))
{
output$choix_centroid_ac_rp <- renderUI({
radioButtons("choix_centroid_ac_rp_id", label = h5("Les ronds sont centres sur"), choices=c("les centroides des communes"="centroid","les chx des communes"="chx"), selected = if(!is.null(fondChx)) "chx" else "centroid")
})
}else
{
output$choix_centroid_ac_rp <- renderUI({
})
}
output$liste_classes_ac_rp <- renderUI({
selectInput("nb_classes_ac_rp_id", label = h5("Nombre de classes"),
choices = nb_classes_ac_rp()[[1]], selected = nb_classes_ac_rp()[[1]][1])
})
output$methode_ac_rp <- renderUI({
selectInput("methode_ac_rp_id", label = h5("M\u00e9thode de calcul des classes"),
choices = methode_calcul, selected="kmeans")
})
output$palette_insee_ac_rp <- renderUI({
selectInput("palette_insee_ac_rp_id", label = h5("Palette de couleurs"),
choices = nb_classes_ac_rp()[[2]], selected=nb_classes_ac_rp()[[2]][1])
})
output$distribution_variable_ac_rp <- renderUI({
bsButton("distribution_variable_ac_rp_id",label="Distribution de la variable", style="btn btn-info", icon = icon("chart-bar"),
type = "toggle", block = FALSE, disabled = FALSE,
value = FALSE)
})
observeEvent(input$distribution_variable_ac_rp_id,{
if(!input$distribution_variable_ac_rp_id) return()
updateButton(session, "distribution_variable_ac_rp_id", value = TRUE)
}, ignoreInit = TRUE)
observeEvent(input$distribution_variable_ac_rp_id,{
output$distribution_ac_rp <- renderPlot({
dt_donnees <- data.frame(VAR=as.numeric(analyse_ac_rp()[[1]]$donnees[,varRatio]))
ggplot(dt_donnees, aes(x=.data$VAR)) +
stat_bin(breaks=unique(sort(c(min(dt_donnees$VAR),new_bornes_ac_rp(),max(dt_donnees$VAR, na.rm = TRUE)))), closed = "left", fill="
scale_x_continuous(breaks=unique(sort(c(min(dt_donnees$VAR),new_bornes_ac_rp(),max(dt_donnees$VAR, na.rm = TRUE)))), labels = round(unique(sort(c(min(dt_donnees$VAR),new_bornes_ac_rp(),max(dt_donnees$VAR, na.rm = TRUE)))),2)) +
ggtitle(label=paste0("Distribution de la variable : ",varRatio)) +
xlab(label = varRatio)
})
output$slider_bornes_ac_rp <- renderUI({
lapply(1:(as.numeric(input$nb_classes_ac_rp_id)-1)+1, function(i) {
sliderInput(inputId = paste0("slider_bornes_", i,"_ac_rp_id"), label = NULL,
value = rev(react_bornes_ac_rp()[[1]])[i], min = round(min(react_bornes_ac_rp()[[1]]),3), max = round(max(react_bornes_ac_rp()[[1]]),3), step = 0.001)
})
})
output$valid_slider_bornes_ac_rp <- renderUI({
actionButton("valid_slider_bornes_ac_rp_id",label=label_bouton_ac_rp(), icon=icon("sync"), style="color:
})
},ignoreInit = TRUE)
label_bouton_ac_rp <- eventReactive(input$methode_ac_rp_id,{
if(input$methode_ac_rp_id=="manuel")
{
label_bouton <- "Valider les bornes manuelles"
}else
{
label_bouton <- "Basculer en mode manuel"
}
return(label_bouton)
})
new_bornes_ac_rp <- reactive({
bornes <- vector()
for (i in 2:(as.numeric(input$nb_classes_ac_rp_id))) {
bornes<-c(bornes,input[[paste0("slider_bornes_", i,"_ac_rp_id")]])
}
return(bornes)
})
output$zone_bornes_max_ac_rp <- renderUI({
HTML(paste0("Borne max : ", round(max(as.numeric(analyse_ac_rp()[[1]]$donnees[,varRatio])),3)))
})
output$zone_bornes_ac_rp <- renderUI({
if(!is.null(input$methode_ac_rp_id))
{
if(input$methode_ac_rp_id=="manuel")
suppressWarnings(bornes_analyse <- classIntervals(as.numeric(analyse_ac_rp()[[1]]$donnees[,varRatio]),as.numeric(input$nb_classes_ac_rp_id),style="kmeans",rtimes=10,intervalClosure="left"))
else
suppressWarnings(bornes_analyse <- classIntervals(as.numeric(analyse_ac_rp()[[1]]$donnees[,varRatio]),as.numeric(input$nb_classes_ac_rp_id),style=input$methode_ac_rp_id,rtimes=10,intervalClosure="left"))
carac_bornes <- calcul_bornes(analyse_ac_rp()[[1]]$donnees,bornes_analyse,varRatio,input$nb_classes_ac_rp_id,input$methode_ac_rp_id,input$palette_insee_ac_rp_id)
if(!is.null(input$nb_classes_ac_rp_id))
{
if(input$methode_ac_rp_id=="manuel")
{
lapply(1:(as.numeric(input$nb_classes_ac_rp_id)-1), function(i) {
numericInput(inputId = paste0("bornes_", i,"_ac_rp_id"), label = paste("Choix de la borne ", i),
value = round(rev(carac_bornes[[1]])[i+1],3))
})
}
}
}
})
output$zone_bornes_min_ac_rp <- renderUI({
HTML(paste0("Borne min : ", round(min(as.numeric(analyse_ac_rp()[[1]]$donnees[,varRatio])),3)))
})
output$valid_bornes_ac_rp <- renderUI({
actionButton("valid_bornes_ac_rp_id",label="Rafraichir la carte", icon=icon("sync"), style="color:
})
output$titre_ronds_legende_ac_rp <- renderUI({
textInput("titre_ronds_legende_ac_rp_id", label = h5("Titre de la l\u00e9gende des ronds"), value = "")
})
output$titre_classes_legende_ac_rp <- renderUI({
textInput("titre_classes_legende_ac_rp_id", label = h5("Titre de la l\u00e9gende des classes"), value = "")
})
output$affiche_legende_ac_rp <- renderUI({
checkboxInput("affiche_legende_ac_rp_id", label = "Activer le d\u00e9placement de la l\u00e9gende au clic",
value = TRUE)
})
output$type_legende_ac_rp <- renderUI({
radioButtons("type_legende_ac_rp_id", label = h5("Type de l\u00e9gende"),
choices = list("Litterale" = 1, "En echelle" = 2),
selected = 1, inline = TRUE)
})
output$save_carte_ac_rp <- renderUI({
actionButton("save_carte_ac_rp_id", label=HTML("<font size=3>Sauvegarder la carte dans un onglet</font>"), style="color:
})
output$entrees_qgis_ac_rp <- renderUI({
actionButton("entrees_qgis_ac_rp_id", label="Exporter en projet Qgis")
})
output$sortie_qgis_ac_rp <- renderUI({
tags$div(class="input-group",
HTML('<input type="text" id="sortie_qgis_ac_rp_id" class="form-control" placeholder="Nom du projet" aria-describedby="sortie_qgis_ac_rp_id">
<span class="input-group-addon" id="sortie_qgis_ac_rp_id">.qgs</span>'))
})
output$titre1_qgis_ac_rp <- renderUI({
textInput("titre1_qgis_ac_rp_id", label = h5("Titre informatif"), value = "", placeholder= "Facultatif")
})
output$titre2_qgis_ac_rp <- renderUI({
textInput("titre2_qgis_ac_rp_id", label = h5("Titre descriptif"), value = "", placeholder= "Facultatif")
})
output$source_qgis_ac_rp <- renderUI({
textInput("source_qgis_ac_rp_id", label = h5("Source de la carte"), value = sourc)
})
output$aide_image_ac_rp <- renderUI({
tags$div(class="dropup",
HTML(paste0('<button class="btn btn-primary dropdown-toggle" type="button" data-toggle="dropdown">
<i class="fa fa-book fa-fw" aria-hidden="true"></i>
Proc','\u00e9','dure pour capture d\'','\u00e9','cran
<span class="caret"></span>
</button>')),
tags$ul(class="dropdown-menu",
wellPanel(
style="background:
div(
HTML("<font size=2>Deux possibilit\u00e9s :</font>"),
br(),
br(),
strong(HTML("<font size=3>Par l'Outil Capture</font>")),
br(),
HTML("<font size=2>1- Ouvrir un logiciel de capture (Outil Capture de Windows par exemple).</font>"),
br(),
HTML(paste0("<font size=2>2- S\u00e9lectionner la zone \u00e0 capturer.</font>")),
br(),
HTML("<font size=2>3- Enregistrer l'image ou copier la dans le presse-papier.</font>"),
br(),
br(),
strong(HTML(paste0("<font size=3>Par impression d'","\u00e9","cran</font>"))),
br(),
HTML("<font size=2>1- Appuyer sur la touche clavier \"Impr ecran\".</font>"),
br(),
HTML("<font size=2>2- Ouvrir un logiciel de retouche image (Paint par exemple).</font>"),
br(),
HTML("<font size=2>3- Coller l'image et l'enregistrer au format voulu (.jpg, .png, .bmp).</font>")
)
)
)
)
})
})
observeEvent(list(input$monter_fond_ac_rp_id,input$descendre_fond_ac_rp_id),{
ordre <- c()
if(as.numeric(input$monter_fond_ac_rp_id)>nb_up$a)
{
ordre <- c(2,3)
nb_up$a <- nb_up$a+1
}
if(as.numeric(input$descendre_fond_ac_rp_id)>nb_down$a)
{
ordre <- c(1,2)
nb_down$a <- nb_down$a+1
}
if(is.null(input$ordre_fonds_ac_rp_id)) pos_select <- 0 else pos_select <- which(liste_fonds$a==input$ordre_fonds_ac_rp_id)
if(pos_select>0)
{
if(pos_select==ordre[1]) liste_fonds$a <- liste_fonds$a[c(2,1,3)]
if(pos_select==ordre[2]) liste_fonds$a <- liste_fonds$a[c(1,3,2)]
updateSelectInput(session, "ordre_fonds_ac_rp_id",
choices = liste_fonds$a,
selected = input$ordre_fonds_ac_rp_id
)
}
},ignoreInit = TRUE)
calcul_max_rayon_metres_ac_rp <- reactive({
aire_territoire <- as.numeric(sum(st_area(fondMaille[fondMaille$CODE %in% data[,"CODE"],])))
suppressWarnings(max_var <- max(data[data[,"CODE"] %in% fondMaille$CODE,varVolume], na.rm = TRUE))
serie <- data[data[,"CODE"] %in% fondMaille$CODE,varVolume]
serie <- serie[!is.na(serie)]
quotient <- serie/max_var
somme_quotient <- sum(quotient^2)
max_surface_rond <- (aire_territoire/(7*somme_quotient))
max_rayon_metres <- sqrt(max_surface_rond/pi)
return(list(max_rayon_metres,max_var))
})
rayon_ac_rp <- reactive({
req(input$valeur_rapport_rond_ac_rp_id)
Sys.sleep(3)
val <- round(sqrt((input$valeur_rapport_rond_ac_rp_id*isolate(calcul_max_rayon_metres_ac_rp())[[2]])/pi),0)
return(val)
})
rayon_react_ac_rp <- rayon_ac_rp %>% debounce(1000)
observeEvent(rayon_react_ac_rp(),{
req(rayon_react_ac_rp())
if(length(rayon_react_ac_rp())==0) return(NULL)
if(rayon_react_ac_rp()==0 | is.na(rayon_react_ac_rp())) return(NULL)
isolate(updateNumericInput(session,"taille_rond_ac_rp_id", value=rayon_react_ac_rp()))
isolate(output$info_rapport_rond_ac_rp <- renderText({
HTML(paste0("<font size=2 color=white>Rapport Surface rond / Volume = ", (pi*(rayon_react_ac_rp())^2)/isolate(calcul_max_rayon_metres_ac_rp())[[2]],"</font>"))
}))
})
rapport_ac_rp <- reactive({
req(input$taille_rond_ac_rp_id)
val <- (pi*(input$taille_rond_ac_rp_id)^2)/isolate(calcul_max_rayon_metres_ac_rp())[[2]]
max <- (pi*(isolate(calcul_max_rayon_metres_ac_rp())[[1]])^2)/isolate(calcul_max_rayon_metres_ac_rp())[[2]]
return(list(val=val,max=max))
})
rapport_react_ac_rp <- rapport_ac_rp %>% debounce(1000)
observeEvent(rapport_react_ac_rp(),{
req(rapport_react_ac_rp())
if(length(rapport_react_ac_rp()$val)==0) return(NULL)
if(rapport_react_ac_rp()$val==0 | is.na(rapport_react_ac_rp()$val)) return(NULL)
isolate(updateNumericInput(session,"valeur_rapport_rond_ac_rp_id", value=rapport_react_ac_rp()$val))
isolate(output$info_rapport_rond_ac_rp <- renderText({
HTML(paste0("<font size=2 color=white>Rapport Surface rond / Volume = ", rapport_react_ac_rp()$val,"</font>"))
}))
})
choix_centroid_ac_rp <- reactive({
if(is.null(input$choix_centroid_ac_rp_id))
{
centroid <- "centroid"
}else
{
centroid <- input$choix_centroid_ac_rp_id
}
return(centroid)
})
react_bornes_ac_rp <- reactive({
if(is.null(input$nb_classes_ac_rp_id) | is.null(input$methode_ac_rp_id))
{
max_classes$a <- 3
methode <- "kmeans"
suppressWarnings(bornes_analyse <- classIntervals(as.numeric(analyse_ac_rp()[[1]]$donnees[,varRatio]),max_classes$a,style=methode,rtimes=10,intervalClosure="left"))
}else if(input$nb_classes_ac_rp_id=="" | input$methode_ac_rp_id=="")
{
max_classes$a <- 3
methode <- "kmeans"
suppressWarnings(bornes_analyse <- classIntervals(as.numeric(analyse_ac_rp()[[1]]$donnees[,varRatio]),max_classes$a,style=methode,rtimes=10,intervalClosure="left"))
}else
{
max_classes$a <- as.numeric(input$nb_classes_ac_rp_id)
if(is.na(max_classes$a)) return(NULL)
methode <- as.character(input$methode_ac_rp_id)
if(!methode %in% c("manuel"))
{
suppressWarnings(bornes_analyse <- classIntervals(as.numeric(analyse_ac_rp()[[1]]$donnees[,varRatio]),max_classes$a,style=methode,rtimes=10,intervalClosure="left"))
}
}
if(methode!="manuel")
{
carac_bornes <- calcul_bornes(analyse_ac_rp()[[1]]$donnees,bornes_analyse,varRatio,max_classes$a,methode,input$palette_insee_ac_rp_id)
}else if(methode=="manuel")
{
carac_bornes <- react_bornes_manuel_1_ac_rp()
}
return(carac_bornes)
})
react_bornes_init_ac_rp <- reactive({
suppressWarnings(bornes_analyse <- classIntervals(as.numeric(analyse_ac_rp()[[1]]$donnees[,varRatio]),3,style="kmeans",rtimes=10,intervalClosure="left"))
if(min(bornes_analyse$brks)<0 & max(bornes_analyse$brks)>=0)
{
palette_init <- "Insee_Rouge"
}else
{
palette_init <- "Insee_Bleu"
}
carac_bornes <- calcul_bornes(analyse_ac_rp()[[1]]$donnees,bornes_analyse,varRatio,3,"kmeans",palette_init)
return(carac_bornes)
})
react_bornes_manuel_1_ac_rp <- eventReactive(input$valid_bornes_ac_rp_id,{
suppressWarnings(bornes_analyse <- classIntervals(as.numeric(analyse_ac_rp()[[1]]$donnees[,varRatio]),max_classes$a,style="kmeans",rtimes=10,intervalClosure="left"))
bornes <- vector()
for (i in 1:(as.numeric(input$nb_classes_ac_rp_id)-1)) {
bornes<-c(bornes,input[[paste0("bornes_", i,"_ac_rp_id")]])
}
bornes_analyse$brks <- c(min(bornes_analyse$brks), bornes, max(bornes_analyse$brks))
carac_bornes <- calcul_bornes(analyse_ac_rp()[[1]]$donnees,bornes_analyse,varRatio,input$nb_classes_ac_rp_id,input$methode_ac_rp_id,input$palette_insee_ac_rp_id)
bornes <- c(carac_bornes[[1]][1],bornes,carac_bornes[[1]][length(carac_bornes[[1]])])
bornes <- sort(unique(bornes),decreasing = T)
carac_bornes[[1]] <- bornes
return(carac_bornes)
},ignoreNULL = TRUE)
observeEvent(input$valid_slider_bornes_ac_rp_id,{
updateSelectInput(session, inputId = "methode_ac_rp_id", selected = "manuel")
for (i in 0:(as.numeric(input$nb_classes_ac_rp_id))+1) {
updateNumericInput(session, inputId = paste0("bornes_", i,"_ac_rp_id"), value = input[[paste0("slider_bornes_", i,"_ac_rp_id")]])
}
},ignoreInit = TRUE)
nb_classes_ac_rp <- reactive({
if(is.null(varRatio)) return(NULL)
donnees <- analyse_ac_rp()[[1]]$donnees[,varRatio]
suppressWarnings(
if(min(donnees)<0 & max(donnees)>0)
{
if(length(donnees)>3 & length(donnees)<9)
{
max_classes <- c(3:(length(donnees)-1))
}else
{
max_classes <- c(3:9)
}
max_palettes <- c("Insee_Rouge","Insee_Jaune")
}else if(min(donnees)>0)
{
if(length(donnees)>3 & length(donnees)<6)
{
max_classes <- c(3:(length(donnees)-1))
}else
{
max_classes <- c(3:6)
}
max_palettes <- c("Insee_Bleu","Insee_Jaune","Insee_Rouge","Insee_Violet","Insee_Turquoise","Insee_Vert","Insee_Gris")
}else if(max(donnees)<0)
{
if(length(donnees)>3 & length(donnees)<6)
{
max_classes <- c(3:(length(donnees)-1))
}else
{
max_classes <- c(3:6)
}
max_palettes <- c("Insee_Bleu","Violet_Neg","Turquoise_Neg","Vert_Neg","Gris_Neg")
}
)
return(list(max_classes,max_palettes))
})
observe({nb_classes_ac_rp()})
output$export_qgis_ac_rp <- renderUI({
downloadButton("downloadProjetQgis_ac_rp", label="Exporter")
})
output$downloadProjetQgis_ac_rp <- downloadHandler(contentType = "zip",
filename = function(){
paste0(input$sortie_qgis_ac_rp_id,".zip")
},
content = function(file){
owd <- setwd(tempdir())
on.exit(setwd(owd))
rep_sortie <- dirname(file)
dir.create("layers",showWarnings = F)
files <- EXPORT_PROJET_QGIS_AC_RP(file)
zip::zip(zipfile = paste0("./",basename(file)),
files = files,
mode = "cherry-pick")
}
)
EXPORT_PROJET_QGIS_AC_RP <- function(file)
{
showModal(modalDialog(HTML("<i class=\"fa fa-spinner fa-spin fa-2x fa-fw\"></i> <font size=+1>Export du projet Qgis en cours...</font> "), size="m", footer=NULL, style = "color:
sortie <- input$sortie_qgis_ac_rp_id
files <- c("layers", paste0(sortie,".qgs"))
rep_sortie <- dirname(file)
if(is.null(input$nb_classes_ac_rp_id))
{
max_classes <- 4
}else
{
max_classes <- input$nb_classes_ac_rp_id
}
if(!is.null(lon_lat_ac_rp()[[1]]))
{
suppressWarnings(test_affiche_leg <- try(table_classe <- data.frame(classe=c(max_classes:1),label=legende$a,couleurs=analyse_leg_ac_rp()$pal_classes, stringsAsFactors = F),silent=TRUE))
if(class(test_affiche_leg) %in% "try-error")
{
showModal(modalDialog(HTML("<font size=+1><i class=\"fa fa-hand-pointer-o fa-fw\"></i><b>Double-cliquez</b> d'abord sur la carte pour afficher la l\u00e9gende.</font> "), size="m", footer=NULL, easyClose = TRUE, style = "color:
return(NULL)
}else
{
table_classe <- data.frame(classe=c(max_classes:1),label=legende$a,couleurs=analyse_leg_ac_rp()$pal_classes, stringsAsFactors = F)
}
}else
{
showModal(modalDialog(HTML("<font size=+1><i class=\"fa fa-hand-pointer-o fa-fw\"></i><b>Double-cliquez</b> d'abord sur la carte pour afficher la l\u00e9gende.</font> "), size="m", footer=NULL, easyClose = TRUE, style = "color:
return(NULL)
}
if(elargi_ac_rp())
{
analyse_donnees_elargi <- analyse_ac_rp()[[1]][[4]]
analyse_maille_elargi <- fondMailleElargi
names_donnees_elargi <- names(analyse_donnees_elargi)
analyse_donnees_elargi <- data.frame(analyse_donnees_elargi,val=analyse_donnees_elargi[,varRatio],classe=palette_ac_rp()[[1]](analyse_donnees_elargi[,varRatio]))
names(analyse_donnees_elargi) <- c(names_donnees_elargi,"val","classe")
analyse_classes_elargi <- merge(table_classe,analyse_donnees_elargi,by.x="couleurs",by.y="classe")
analyse_classes_elargi <- analyse_classes_elargi[,c("CODE","LIBELLE",varVolume,varRatio,"val","classe")]
analyse_classes_elargi <- analyse_classes_elargi[order(analyse_classes_elargi[,varVolume],decreasing = T),]
analyse_ronds_elargi <- analyse_ronds_sf_ac_rp()[[2]]
analyse_ronds_elargi$classe <- analyse_classes_elargi$classe
analyse_ronds_elargi$COL_BOR <- "white"
fond_elargi_ronds <- analyse_ronds_elargi
analyse_maille_elargi <- merge(analyse_maille_elargi,analyse_classes_elargi[,c("CODE",varVolume,varRatio,"val","classe")],by="CODE")
analyse_maille_elargi <- analyse_maille_elargi[,c("CODE","LIBELLE",varVolume,varRatio,"val","classe","geometry")]
analyse_maille_elargi <- st_sf(analyse_maille_elargi,stringsAsFactors = FALSE)
fond_elargi_classes <- analyse_maille_elargi
fond_maille_elargi <- st_transform(fondMailleElargi, crs= as.numeric(code_epsg_ac_rp()))
suppressWarnings(st_write(fond_elargi_ronds, paste0(rep_sortie,"/layers/fond_elargi_ronds_carte.shp"), delete_dsn = TRUE, quiet = TRUE))
suppressWarnings(st_write(fond_elargi_classes, paste0(rep_sortie,"/layers/fond_maille_elargi_carte.shp"), delete_dsn = TRUE, quiet = TRUE))
suppressWarnings(st_write(fond_maille_elargi, paste0(rep_sortie,"/layers/fond_maille_elargi.shp"), delete_dsn = TRUE, quiet = TRUE))
}
analyse_donnees <- analyse_ac_rp()[[1]][[2]]
analyse_maille <- fondMaille
names_donnees <- names(analyse_donnees)
analyse_donnees <- data.frame(analyse_donnees,val=analyse_donnees[,varRatio],classe=palette_ac_rp()[[1]](analyse_donnees[,varRatio]))
names(analyse_donnees) <- c(names_donnees,"val","classe")
analyse_classes <- merge(table_classe,analyse_donnees,by.x="couleurs",by.y="classe")
analyse_classes <- analyse_classes[,c("CODE","LIBELLE",varVolume,varRatio,"val","classe")]
analyse_classes <- analyse_classes[order(analyse_classes[,varVolume],decreasing = T),]
analyse_ronds <- analyse_ronds_sf_ac_rp()[[1]]
analyse_ronds$classe <- analyse_classes$classe
analyse_ronds$COL_BOR <- "white"
analyse_maille <- merge(analyse_maille,analyse_classes[,c("CODE",varVolume,varRatio,"val","classe")],by="CODE")
analyse_maille <- analyse_maille[,c("CODE","LIBELLE",varVolume,varRatio,"val","classe","geometry")]
analyse_maille <- st_sf(analyse_maille,stringsAsFactors = FALSE)
fond_classes <- analyse_maille
fond_ronds <- analyse_ronds
fond_ronds_leg <- construction_ronds_legende(lon_lat_ac_rp()[[1]],lon_lat_ac_rp()[[2]],code_epsg_ac_rp(),input$taille_rond_ac_rp_id)[[2]]
fond_maille <- st_transform(fondMaille, crs= as.numeric(code_epsg_ac_rp()))
fond_contour <- st_transform(fondContour, crs= as.numeric(code_epsg_ac_rp()))
if(!is.null(fondSuppl) && input$ajout_territoire_ac_rp_id) fond_territoire <- st_transform(fond_territoire_ac_rp(), crs= as.numeric(code_epsg_ac_rp()))
if(input$ajout_dep_ac_rp_id) fond_departement <- st_transform(fond_departement_ac_rp(), crs= as.numeric(code_epsg_ac_rp()))
if(input$ajout_reg_ac_rp_id) fond_region <- st_transform(fond_region_ac_rp(), crs= as.numeric(code_epsg_ac_rp()))
fond_france <- st_transform(fond_habillage_ac_rp()[[1]], crs= as.numeric(code_epsg_ac_rp()))
fond_pays <- st_transform(fond_habillage_ac_rp()[[2]], crs= as.numeric(code_epsg_ac_rp()))
suppressWarnings(st_write(fond_ronds, paste0(rep_sortie,"/layers/fond_ronds_carte.shp"), delete_dsn = TRUE, quiet = TRUE))
suppressWarnings(st_write(fond_classes, paste0(rep_sortie,"/layers/fond_maille_carte.shp"), delete_dsn = TRUE, quiet = TRUE))
suppressWarnings(st_write(fond_ronds_leg, paste0(rep_sortie,"/layers/fond_ronds_leg.shp"), delete_dsn = TRUE, quiet = TRUE))
suppressWarnings(st_write(fond_maille, paste0(rep_sortie,"/layers/fond_maille.shp"), delete_dsn = TRUE, quiet = TRUE))
suppressWarnings(st_write(fond_contour,paste0(rep_sortie,"/layers/fond_contour.shp"), delete_dsn = TRUE, quiet = TRUE))
if(exists("fond_territoire")) if(!is.null(fond_territoire)) suppressWarnings(st_write(fond_territoire, paste0(rep_sortie,"/layers/fond_territoire.shp"), delete_dsn = TRUE, quiet = TRUE))
if(exists("fond_departement")) if(!is.null(fond_departement)) suppressWarnings(st_write(fond_departement, paste0(rep_sortie,"/layers/fond_departement.shp"), delete_dsn = TRUE, quiet = TRUE))
if(exists("fond_region")) if(!is.null(fond_region)) suppressWarnings(st_write(fond_region,paste0(rep_sortie,"/layers/fond_region.shp"), delete_dsn = TRUE, quiet = TRUE))
suppressWarnings(st_write(fond_france,paste0(rep_sortie,"/layers/fond_france.shp"), delete_dsn = TRUE, quiet = TRUE))
if(exists("fond_pays")) if(!is.null(fond_pays)) suppressWarnings(st_write(fond_pays,paste0(rep_sortie,"/layers/fond_pays.shp"), delete_dsn = TRUE, quiet = TRUE))
titre1 <- paste0(input$titre1_qgis_ac_rp_id,"\n")
titre2 <- input$titre2_qgis_ac_rp_id
source <- input$source_qgis_ac_rp_id
annee <- format(Sys.time(), format = "%Y")
variable_a_representer <- varRatio
titre_leg_classes <- input$titre_classes_legende_ac_rp_id
l <- c()
l <- c(l,"fond_ronds_leg")
if(elargi_ac_rp())
{
l=c(l,
"fond_ronds_carte",
"fond_elargi_ronds_carte",
"fond_maille_carte",
"fond_maille_elargi_carte",
"fond_maille_elargi"
)
}else
{
l=c(l,
"fond_ronds_carte",
"fond_maille_carte"
)
}
l <- c(l,"fond_france","fond_contour","fond_maille")
if(exists("fond_territoire")) l <- c(l,"fond_territoire")
if(exists("fond_departement")) l <- c(l,"fond_departement")
if(exists("fond_region")) l <- c(l,"fond_region")
if(exists("fond_pays")) l <- c(l,"fond_pays")
export_projet_qgis_classes_ronds(l,rep_sortie,sortie,titre1,titre2,source,titre_leg_classes,table_classe,variable_a_representer,annee)
removeModal()
showModal(modalDialog(HTML(paste0("<font size=+1>Le projet Qgis a \u00e9t\u00e9 cr","\u00e9","ee.</font>")), size="m", footer=NULL, easyClose = TRUE, style = "color:
return(files)
}
elargi_ac_rp <- reactive({
if(is.null(input$elargi_ac_rp_id))
{
elargi <- FALSE
}else
{
elargi <- input$elargi_ac_rp_id
}
return(elargi)
})
code_epsg_ac_rp <- reactive({
code_epsg <- switch(emprise,
"FRM"="2154",
"971"="5490",
"972"="5490",
"973"="2972",
"974"="2975",
"976"="4471",
"999"=epsg_etranger)
return(code_epsg)
})
analyse_ac_rp <- reactive({
req(choix_centroid_ac_rp())
suppressWarnings(test_k_ronds <- try(k_ronds(fondMaille,fondMailleElargi,names(fondMaille)[1],data,"CODE",varVolume,elargi_ac_rp(),choix_centroid_ac_rp(),fondChx),silent=T))
if(class(test_k_ronds) %in% "try-error")
{
return(NULL)
}else
{
analyse <- k_ronds(fondMaille,fondMailleElargi,names(fondMaille)[1],data,"CODE",varVolume,elargi_ac_rp(),choix_centroid_ac_rp(),fondChx)
}
if(is.null(analyse))
{
showModal(modalDialog(HTML(paste0("<font size=+1>La maille ne correspond pas au niveau g\u00e9ographique du fichier de donn","\u00e9","es.<br><br>Veuillez svp choisir une maille adapt","\u00e9","e ou modifier le fichier de donn","\u00e9","es.</font>")), size="l", footer=NULL, easyClose = TRUE, style = "color:
erreur_maille$a <- TRUE
return(NULL)
}
analyse$donnees[,"TXT1"] <- paste0("<b> <font color=
analyse$donnees[,"TXT2"] <- paste0("<b> <font color=
if(elargi_ac_rp())
{
analyse$donnees_elargi[,"TXT1"] <- paste0("<b> <font color=
analyse$donnees_elargi[,"TXT2"] <- paste0("<b> <font color=
}
analyse_WGS84 <- st_transform(analyse$analyse_points,crs=4326)
return(list(analyse,analyse_WGS84))
})
analyse_leg_ac_rp <- reactive({
analyse <- analyse_ac_rp()[[1]]
analyse$rupture_classes <- palette_ac_rp()[[2]]
analyse$pal_classes <- rev(palette_ac_rp()[[3]])
return(analyse)
})
fond_habillage_ac_rp <- reactive({
if(emprise=="FRM")
{
fond_pays <- st_transform(sf_paysm(),crs=4326)
fond_france <- st_transform(sf_fram(),crs=4326)
}else if(emprise!="999")
{
if(emprise=="971")
{
fond_france <- st_transform(sf_reg01(),crs=4326)
fond_pays <- fond_france
}
if(emprise=="972")
{
fond_france <- st_transform(sf_reg02(),crs=4326)
fond_pays <- fond_france
}
if(emprise=="973")
{
fond_france <- st_transform(sf_reg03(),crs=4326)
fond_pays <- st_transform(sf_pays973(),crs=4326)
}
if(emprise=="974")
{
fond_france <- st_transform(sf_reg04(),crs=4326)
fond_pays <- fond_france
}
if(emprise=="976")
{
fond_france <- st_transform(sf_reg06(),crs=4326)
fond_pays <- fond_france
}
}else if(emprise=="999")
{
fond_france <- st_transform(fondEtranger,crs=4326)
fond_pays <- fond_france
}else{}
return(list(fond_france,fond_pays))
})
fond_contour_maille_ac_rp <- reactive({
test_contour <- try(st_transform(fondContour,crs=4326), silent = TRUE)
test_maille <- try(st_transform(fondMaille,crs=4326), silent = TRUE)
if(any(list(class(test_contour),class(test_maille)) %in% "try-error"))
{
showModal(modalDialog(HTML(paste0("<font size=+1>Une erreur est survenue dans la cr","\u00e9","ation du territoire.<br><br>Veuillez svp v\u00e9rifier vos donn","\u00e9","es et les variables choisies.</font>")), size="m", footer=NULL, easyClose = TRUE, style = "color:
erreur_maille$a <- TRUE
return(NULL)
}else
{
contour_WGS84 <- st_transform(fondContour,crs=4326)
maille_WGS84 <- st_transform(fondMaille,crs=4326)
}
return(list(contour_WGS84,maille_WGS84))
})
fond_elargi_ac_rp <- reactive({
req(analyse_ac_rp())
if(elargi_ac_rp())
{
analyse_WGS84_elargi <- st_transform(analyse_ac_rp()[[1]]$analyse_points_elargi,crs=4326)
maille_WGS84_elargi <- st_transform(fondMailleElargi,crs=4326)
return(list(analyse_WGS84_elargi,maille_WGS84_elargi))
}else
{
return(NULL)
}
})
list_bbox_ac_rp <- reactive({
req(fond_contour_maille_ac_rp())
list_bbox <- list(c(st_bbox(fond_contour_maille_ac_rp()[[1]])[1],st_bbox(fond_contour_maille_ac_rp()[[1]])[3]),c(st_bbox(fond_contour_maille_ac_rp()[[1]])[2],st_bbox(fond_contour_maille_ac_rp()[[1]])[4]))
return(list_bbox)
})
calcul_rond_ac_rp <- reactive({
req(calcul_max_rayon_metres_ac_rp(),input$taille_rond_ac_rp_id)
if(is.null(input$taille_rond_ac_rp_id)) taille_rond <- 1000
if(!is.null(input$taille_rond_ac_rp_id))
{
if(input$taille_rond_ac_rp_id>calcul_max_rayon_metres_ac_rp()[[1]])
{
showModal(modalDialog(HTML(paste0("Le rayon du rond le plus grand est trop \u00e9lev\u00e9 et ne permet pas de respecter la r\u00e8gle s\u00e9miologique des 1/7\u00e8me. Le rayon max conseill\u00e9 est ",round(calcul_max_rayon_metres_ac_rp()[[1]],2)," m\u00e8tres.")), size="l", footer=NULL, easyClose = TRUE, style = "color:
}
taille_rond_m <- input$taille_rond_ac_rp_id
}else
{
taille_rond_m <- NULL
}
return(taille_rond_m)
})
analyse_ronds_sf_ac_rp <- reactive({
req(analyse_ac_rp(),code_epsg_ac_rp(),calcul_rond_ac_rp())
if(elargi_ac_rp())
{
req(fond_elargi_ac_rp())
centres <- rbind(st_coordinates(fond_elargi_ac_rp()[[1]]))
row.names(centres) <- c(1:(nrow(analyse_ac_rp()[[1]]$donnees_elargi)))
ronds <- st_sf(geometry=st_sfc(lapply(c(1:nrow(centres)),function(x) st_point(centres[x,])),crs=4326))
ronds_pl_elargi <- st_buffer(st_transform(ronds, crs= as.numeric(code_epsg_ac_rp())), calcul_rond_ac_rp()*sqrt(analyse_ac_rp()[[1]]$donnees_elargi[,varVolume]/calcul_max_rayon_metres_ac_rp()[[2]]))
dt_ronds_sf <- data.frame(ronds_pl_elargi,stringsAsFactors = F)
analyse_ronds_sf_elargi <- st_sf(cbind(analyse_ac_rp()[[1]]$donnees_elargi,dt_ronds_sf))
}else
{
analyse_ronds_sf_elargi <- NULL
}
centres <- rbind(st_coordinates(analyse_ac_rp()[[2]]))
row.names(centres) <- c(1:(nrow(analyse_ac_rp()[[1]]$donnees)))
ronds <- st_sf(geometry=st_sfc(lapply(c(1:nrow(centres)),function(x) st_point(centres[x,])),crs=4326))
ronds_pl <- st_buffer(st_transform(ronds, crs= as.numeric(code_epsg_ac_rp())), calcul_rond_ac_rp()*sqrt(analyse_ac_rp()[[1]]$donnees[,varVolume]/calcul_max_rayon_metres_ac_rp()[[2]]))
dt_ronds_sf <- data.frame(ronds_pl,stringsAsFactors = F)
analyse_ronds_sf <- st_sf(cbind(analyse_ac_rp()[[1]]$donnees,dt_ronds_sf))
return(list(analyse_ronds_sf,analyse_ronds_sf_elargi))
})
palette_ac_rp <- reactive({
bornes <- react_bornes_ac_rp()[[1]]
if(is.null(bornes)) return(NULL)
if(elargi_ac_rp())
{
bornes[length(bornes)] <- min(as.numeric(analyse_ac_rp()[[1]]$donnees_elargi[,varRatio]))
bornes[1] <- max(as.numeric(analyse_ac_rp()[[1]]$donnees_elargi[,varRatio]), na.rm = TRUE)
}else
{
bornes[length(bornes)] <- min(as.numeric(analyse_ac_rp()[[1]]$donnees[,varRatio]))
bornes[1] <- max(as.numeric(analyse_ac_rp()[[1]]$donnees[,varRatio]), na.rm = TRUE)
}
if(length(unique(bornes)) != length(bornes))
{
removeModal()
showModal(modalDialog(HTML(paste0("<font size=+1>Les bornes calculees avec la methode '",input$methode_ac_rp_id,"' ne sont pas uniques. La methode kmeans a donc ete retenue.</font>")), size="l", footer=NULL, style = "color:
Sys.sleep(7)
suppressWarnings(bornes_analyse <- classIntervals(as.numeric(analyse_ac_rp()[[1]]$donnees[,varRatio]),max_classes$a,style="kmeans",rtimes=10,intervalClosure="left"))
carac_bornes <- calcul_bornes(analyse_ac_rp()[[1]]$donnees,bornes_analyse,varRatio,max_classes$a,"kmeans",input$palette_insee_ac_rp_id)
updateSelectInput(session,"methode_ac_rp_id",choices = methode_calcul, selected="kmeans")
bornes <- carac_bornes[[1]]
pal_classes <- carac_bornes[[2]]
}else
{
pal_classes <- react_bornes_ac_rp()[[2]]
}
pal_classes[is.na(pal_classes)] <- "grey"
palette<-colorBin(palette=pal_classes, domain=0:100, bins=bornes, na.color="grey")
return(list(palette,bornes,pal_classes))
})
fond_territoire_ac_rp <- reactive({
if(!is.null(fondSuppl))
{
fond_territoire <- st_transform(fondSuppl,crs=4326)
return(fond_territoire)
}else
{
return(NULL)
}
})
fond_region_ac_rp <- reactive({
fond_region <- st_transform(sf_regm(),crs=4326)
return(fond_region)
})
fond_departement_ac_rp <- reactive({
fond_departement <- st_transform(sf_depm(),crs=4326)
return(fond_departement)
})
fond_select_donnees_elargi_ac_rp <- reactive({
req(analyse_ronds_sf_ac_rp(),analyse_ac_rp())
if(elargi_ac_rp())
{
fond_donnees_elargi <- analyse_ronds_sf_ac_rp()[[2]][as.data.frame(analyse_ronds_sf_ac_rp()[[2]])[,"CODE"] %in% analyse_ac_rp()[[1]]$donnees_elargi[input$mydonnees_ac_rp_rows_selected,"CODE"],]
fond_donnees_elargi <- st_transform(fond_donnees_elargi,crs=4326)
return(fond_donnees_elargi)
}else
{
return(NULL)
}
})
fond_select_donnees_ac_rp <- reactive({
req(analyse_ronds_sf_ac_rp(),analyse_ac_rp())
fond_donnees <- analyse_ronds_sf_ac_rp()[[1]][as.data.frame(analyse_ronds_sf_ac_rp()[[1]])[,"CODE"] %in% analyse_ac_rp()[[1]]$donnees[input$mydonnees_ac_rp_rows_selected,"CODE"],]
if(nrow(fond_donnees)>0)
{
fond_donnees <- st_transform(fond_donnees,crs=4326)
return(fond_donnees)
}else
{
return(NULL)
}
})
fond_select_maille_elargi_ac_rp <- reactive({
req(fond_elargi_ac_rp())
if(elargi_ac_rp())
{
fond_maille_elargi <- fond_elargi_ac_rp()[[2]][as.data.frame(fond_elargi_ac_rp()[[2]])[,"CODE"] %in% as.data.frame(fondMailleElargi)[input$mymaille_ac_rp_rows_selected,"CODE"],]
return(fond_maille_elargi)
}else
{
return(NULL)
}
})
fond_select_maille_ac_rp <- reactive({
req(fond_contour_maille_ac_rp())
fond_maille <- fond_contour_maille_ac_rp()[[2]][as.data.frame(fond_contour_maille_ac_rp()[[2]])[,"CODE"] %in% as.data.frame(fondMaille)[input$mymaille_ac_rp_rows_selected,"CODE"],]
return(fond_maille)
})
fond_select_contour_ac_rp <- reactive({
req(fond_contour_maille_ac_rp())
fond_contour <- fond_contour_maille_ac_rp()[[1]][as.data.frame(fond_contour_maille_ac_rp()[[1]])[,"CODE"] %in% as.data.frame(fondContour)[input$mycontour_ac_rp_rows_selected,"CODE"],]
return(fond_contour)
})
react_fond_ac_rp <- reactive({
if(input$menu=="carte")
{
showModal(modalDialog(HTML("<i class=\"fa fa-spinner fa-spin fa-2x fa-fw\"></i><font size=+1>\u00c9laboration de la carte...</font> "), size="m", footer=NULL, style = "color:
if(is.null(fondEtranger))
{
proj4 <- st_crs(fondMaille)$proj4string
}else{
proj4 <- st_crs(fondEtranger)$proj4string
}
m <- leaflet(padding = 0,
options = leafletOptions(
preferCanvas = TRUE,
transition = 2,
crs = leafletCRS(crsClass = "L.Proj.CRS",
code = paste0("EPSG:", code_epsg_ac_rp()),
proj4def = proj4,
resolutions = 2^(16:1)
)
)) %>%
setMapWidgetStyle(list(background = "
addTiles_insee(attribution = paste0("<a href=\"http://www.insee.fr\">OCEANIS - \u00A9 IGN - INSEE ",format(Sys.time(), format = "%Y"),"</a>")) %>%
fitBounds(lng1 = min(list_bbox_ac_rp()[[1]]),
lat1 = min(list_bbox_ac_rp()[[2]]),
lng2 = max(list_bbox_ac_rp()[[1]]),
lat2 = max(list_bbox_ac_rp()[[2]])
) %>%
addScaleBar(position = 'bottomright',
options = scaleBarOptions(metric = TRUE, imperial = FALSE)
) %>%
addMapPane(name = "fond_pays", zIndex = 401) %>%
addMapPane(name = "fond_france", zIndex = 402) %>%
addMapPane(name = "fond_dep", zIndex = 403) %>%
addMapPane(name = "fond_reg", zIndex = 404) %>%
addMapPane(name = "fond_territoire", zIndex = 405) %>%
addMapPane(name = "fond_trio3", zIndex = 406) %>%
addMapPane(name = "fond_trio2", zIndex = 407) %>%
addMapPane(name = "fond_trio1", zIndex = 408) %>%
addMapPane(name = "selection", zIndex = 409) %>%
addMapPane(name = "fond_legende", zIndex = 410)
if(emprise %in% c("FRM","973"))
{
m <- addPolygons(map = m, data = fond_habillage_ac_rp()[[2]][,"LIBGEO"], opacity = 1,
stroke = TRUE, color = "white",
weight = 1,
options = pathOptions(pane = "fond_pays", clickable = F),
fill = T, fillColor = "
)
}
m <- addPolygons(map = m, data = fond_habillage_ac_rp()[[1]][,"LIBGEO"], opacity = 1,
stroke = TRUE, color = "black",
weight = 1.5,
options = pathOptions(pane = "fond_france", clickable = F),
fill = T, fillColor = "white", fillOpacity = 1
)
m_save_ac_rp$a <- m
if(!is.null(fondSuppl))
{
m <- addPolygons(map = m, data = fond_territoire_ac_rp(),
stroke = TRUE, color = "
weight = 0.5,
options = pathOptions(pane = "fond_territoire", clickable = T),
popup = paste0("<b> <font color=
fill = T, fillColor = "white", fillOpacity = 0.001,
group = "territoire"
)
}
m <- addPolygons(map = m, data = fond_contour_maille_ac_rp()[[1]], opacity = 0.3,
stroke = TRUE, color = "black", weight = 3,
options = pathOptions(pane = "fond_trio3", clickable = T),
popup = paste0("<b> <font color=
fill = T, fillColor = "white", fillOpacity = 0.3,
group = "maille_contour"
)
analyse <- k_ronds(fondMaille,fondMailleElargi,names(fondMaille)[1],data,"CODE",varVolume,FALSE,"centroid",fondChx)
analyse$donnees[,"TXT1"] <- paste0("<b> <font color=
analyse$donnees[,"TXT2"] <- paste0("<b> <font color=
analyse_WGS84 <- st_transform(analyse$analyse_points,crs=4326)
m <- addCircles(map = m,
lng = st_coordinates(analyse_WGS84)[,1],
lat = st_coordinates(analyse_WGS84)[,2],
stroke = TRUE, color = "
opacity = 1,
weight = 1.5,
radius = (calcul_max_rayon_metres_ac_rp()[[1]]/1.25)*sqrt(analyse$donnees[,varVolume]/calcul_max_rayon_metres_ac_rp()[[2]]),
options = pathOptions(pane = "fond_trio1", clickable = T),
popup = paste0("<b> <font color=
fill = F,
group = "taille"
)
suppressWarnings(test_analyse_maille_classe <- try(analyse$donnees[rev(order(analyse$donnees[,varVolume])),varRatio],silent=T))
if(class(test_analyse_maille_classe) %in% "try-error")
{
return(NULL)
}else
{
analyse_maille_classe <- analyse$donnees[rev(order(analyse$donnees[,varVolume])),varRatio]
}
bornes <- react_bornes_init_ac_rp()[[1]]
bornes[length(bornes)] <- min(as.numeric(analyse$donnees[,varRatio]))
bornes[1] <- max(as.numeric(analyse$donnees[,varRatio]), na.rm = TRUE)
pal_classes <- react_bornes_init_ac_rp()[[2]]
pal_classes[is.na(pal_classes)] <- "grey"
palette<-colorBin(palette=pal_classes, domain=0:100, bins=bornes, na.color="grey")
analyse_maille <- merge(fond_contour_maille_ac_rp()[[2]][,c("CODE","geometry")],analyse$donnees[,c("CODE","LIBELLE",varVolume,varRatio,"TXT1","TXT2")],by="CODE")
names(analyse_maille)[3] <- varVolume
names(analyse_maille)[4] <- varRatio
analyse_maille <- analyse_maille[rev(order(as.data.frame(analyse_maille)[,varVolume])),]
analyse_maille <- st_sf(analyse_maille,stringsAsFactors = FALSE)
m <- addPolygons(map = m, data = analyse_maille, opacity = 1,
stroke = TRUE, color = "white", weight = 1,
options = pathOptions(pane = "fond_trio2", clickable = T),
popup = paste0("<b> <font color=
"<b><font color=
fill = T,
fillColor = palette(analyse_maille_classe),
fillOpacity = 1,
group = "classe"
)
removeModal()
showModal(modalDialog(HTML("<font size=+1>Veuillez patientez svp, la carte va s'afficher dans quelques secondes...<br><br><i class=\"fa fa-hand-pointer-o fa-fw\"></i><b>Double-cliquez</b> ensuite sur la carte pour afficher la l\u00e9gende.</font> "), size="m", footer=NULL, easyClose = TRUE, style = "color:
return(m)
}
})
observeEvent(input$ajout_territoire_ac_rp_id,{
proxy <- leafletProxy("mymap_ac_rp")
clearGroup(map = proxy, group = "territoire")
if(!is.null(fondSuppl))
{
if(input$ajout_territoire_ac_rp_id)
{
proxy <- addPolygons(map = proxy, data = fond_territoire_ac_rp(),
stroke = TRUE, color = "
weight = 0.5,
options = pathOptions(pane = "fond_territoire", clickable = T),
popup = paste0("<b> <font color=
fill = T, fillColor = "white", fillOpacity = 0.001,
group = "territoire"
)
}
}
},ignoreInit = TRUE)
observeEvent(input$ajout_reg_ac_rp_id,{
proxy <- leafletProxy("mymap_ac_rp")
clearGroup(map = proxy, group = "region")
if(emprise=="FRM")
{
if(input$ajout_reg_ac_rp_id)
{
proxy <- addPolygons(map = proxy, data = fond_region_ac_rp(),
stroke = TRUE, color = "grey", opacity = 1,
weight = 1.5,
options = pathOptions(pane = "fond_reg", clickable = F),
fill = F,
group = "region"
)
}
}
},ignoreInit = TRUE)
observeEvent(input$ajout_dep_ac_rp_id,{
proxy <- leafletProxy("mymap_ac_rp")
clearGroup(map = proxy, group = "departement")
if(emprise=="FRM")
{
if(input$ajout_dep_ac_rp_id)
{
proxy <- addPolygons(map = proxy, data = fond_departement_ac_rp(),
stroke = TRUE, color = "grey", opacity = 1,
weight = 0.5,
options = pathOptions(pane = "fond_dep", clickable = F),
fill = F,
group = "departement"
)
}
}
},ignoreInit = TRUE)
observeEvent(list(input$monter_fond_ac_rp_id,input$descendre_fond_ac_rp_id),{
if(as.numeric(input$monter_fond_ac_rp_id)==0 & as.numeric(input$descendre_fond_ac_rp_id)==0) return(NULL)
proxy <- leafletProxy("mymap_ac_rp")
clearGroup(map = proxy, group = "maille_contour")
clearGroup(map = proxy, group = "taille")
i <- 1
for(fond in liste_fonds$a)
{
if(fond=="analyse")
{
proxy <- addCircles(map = proxy,
lng = st_coordinates(analyse_ac_rp()[[2]])[,1],
lat = st_coordinates(analyse_ac_rp()[[2]])[,2],
stroke = TRUE, color = "
opacity = 1,
weight = 1.5,
radius = calcul_rond_ac_rp()*sqrt(analyse_ac_rp()[[1]]$donnees[,varVolume]/calcul_max_rayon_metres_ac_rp()[[2]]),
options = pathOptions(pane = paste0("fond_trio",i), clickable = T),
popup = paste0("<b> <font color=
fill = F,
group = "taille"
)
ordre_analyse$a <- i
}
if(fond=="maille")
{
suppressWarnings(test_analyse_maille_classe <- try(analyse_ac_rp()[[1]]$donnees[rev(order(analyse_ac_rp()[[1]]$donnees[,varVolume])),varRatio],silent=T))
if(class(test_analyse_maille_classe) %in% "try-error")
{
return(NULL)
}else
{
analyse_maille_classe <- analyse_ac_rp()[[1]]$donnees[rev(order(analyse_ac_rp()[[1]]$donnees[,varVolume])),varRatio]
}
analyse_maille <- merge(fond_contour_maille_ac_rp()[[2]][,c("CODE","geometry")],analyse_ac_rp()[[1]]$donnees[,c("CODE","LIBELLE",varVolume,varRatio,"TXT1","TXT2")],by="CODE")
names(analyse_maille)[3] <- varVolume
names(analyse_maille)[4] <- varRatio
analyse_maille <- analyse_maille[rev(order(as.data.frame(analyse_maille)[,varVolume])),]
analyse_maille <- st_sf(analyse_maille,stringsAsFactors = FALSE)
proxy <- addPolygons(map = proxy, data = analyse_maille, opacity = 1,
stroke = TRUE, color = "white", weight = 1,
options = pathOptions(pane = paste0("fond_trio",i), clickable = T),
popup = paste0("<b> <font color=
"<b><font color=
fill = T,
fillColor = palette_ac_rp()[[1]](analyse_maille_classe),
fillOpacity = 1,
group = "classe"
)
}
if(fond=="contour")
{
proxy <- addPolygons(map = proxy, data = fond_contour_maille_ac_rp()[[1]], opacity = 0.3,
stroke = TRUE, color = "black", weight = 3,
options = pathOptions(pane = paste0("fond_trio",i), clickable = T),
popup = paste0("<b> <font color=
fill = T, fillColor = "white", fillOpacity = 0.3,
group = "maille_contour"
)
}
i <- i + 1
}
},ignoreInit = TRUE)
observeEvent(input$taille_rond_ac_rp_id,{
req(input$taille_rond_ac_rp_id,calcul_rond_ac_rp())
proxy <- leafletProxy("mymap_ac_rp")
clearGroup(map = proxy, group = "taille")
proxy <- addCircles(map = proxy,
lng = st_coordinates(analyse_ac_rp()[[2]])[,1],
lat = st_coordinates(analyse_ac_rp()[[2]])[,2],
stroke = TRUE, color = "
opacity = 1,
weight = 1.5,
radius = calcul_rond_ac_rp()*sqrt(analyse_ac_rp()[[1]]$donnees[,varVolume]/calcul_max_rayon_metres_ac_rp()[[2]]),
options = pathOptions(pane = paste0("fond_trio",ordre_analyse$a), clickable = T),
popup = paste0("<b> <font color=
fill = F,
group = "taille"
)
},ignoreInit = TRUE)
observeEvent(input$choix_centroid_ac_rp_id,{
req(input$choix_centroid_ac_rp_id)
proxy <- leafletProxy("mymap_ac_rp")
clearGroup(map = proxy, group = "taille")
proxy <- addCircles(map = proxy,
lng = st_coordinates(analyse_ac_rp()[[2]])[,1],
lat = st_coordinates(analyse_ac_rp()[[2]])[,2],
stroke = TRUE, color = "
opacity = 1,
weight = 1.5,
radius = calcul_rond_ac_rp()*sqrt(analyse_ac_rp()[[1]]$donnees[,varVolume]/calcul_max_rayon_metres_ac_rp()[[2]]),
options = pathOptions(pane = paste0("fond_trio",ordre_analyse$a), clickable = T),
popup = paste0("<b> <font color=
fill = F,
group = "taille"
)
},ignoreInit = TRUE)
observeEvent(list(input$nb_classes_ac_rp_id,input$methode_ac_rp_id,input$valid_bornes_ac_rp_id,input$palette_insee_ac_rp_id),{
req(input$nb_classes_ac_rp_id,input$methode_ac_rp_id,input$palette_insee_ac_rp_id)
proxy <- leafletProxy("mymap_ac_rp")
clearGroup(map = proxy, group = "classe")
suppressWarnings(test_analyse_maille_classe <- try(analyse_ac_rp()[[1]]$donnees[rev(order(analyse_ac_rp()[[1]]$donnees[,varVolume])),varRatio],silent=T))
if(class(test_analyse_maille_classe) %in% "try-error")
{
return(NULL)
}else
{
analyse_maille_classe <- analyse_ac_rp()[[1]]$donnees[rev(order(analyse_ac_rp()[[1]]$donnees[,varVolume])),varRatio]
}
analyse_maille <- merge(fond_contour_maille_ac_rp()[[2]][,c("CODE","geometry")],analyse_ac_rp()[[1]]$donnees[,c("CODE","LIBELLE",varVolume,varRatio,"TXT1","TXT2")],by="CODE")
names(analyse_maille)[3] <- varVolume
names(analyse_maille)[4] <- varRatio
analyse_maille <- analyse_maille[rev(order(as.data.frame(analyse_maille)[,varVolume])),]
analyse_maille <- st_sf(analyse_maille,stringsAsFactors = FALSE)
proxy <- addPolygons(map = proxy, data = analyse_maille, opacity = 1,
stroke = TRUE, color = "white", weight = 1,
options = pathOptions(pane = paste0("fond_trio",ordre_analyse$b), clickable = T),
popup = paste0("<b> <font color=
"<b><font color=
fill = T,
fillColor = palette_ac_rp()[[1]](analyse_maille_classe),
fillOpacity = 1,
group = "classe"
)
},ignoreInit = TRUE)
observeEvent(list(input$elargi_ac_rp_id,input$opacite_elargi_ac_rp_id,input$taille_rond_ac_rp_id,input$nb_classes_ac_rp_id,input$methode_ac_rp_id,input$palette_insee_ac_rp_id,input$valid_bornes_ac_rp_id,input$choix_centroid_ac_rp_id),{
req(input$opacite_elargi_ac_rp_id,input$taille_rond_ac_rp_id,input$nb_classes_ac_rp_id,input$methode_ac_rp_id,input$palette_insee_ac_rp_id)
proxy <- leafletProxy("mymap_ac_rp")
clearGroup(map = proxy, group = "elargi")
if(elargi_ac_rp())
{
analyse_maille_classe_elargi <- analyse_ac_rp()[[1]]$donnees_elargi[rev(order(analyse_ac_rp()[[1]]$donnees_elargi[,varVolume])),varRatio]
analyse_maille_elargi <- merge(fond_elargi_ac_rp()[[2]][,c("CODE","geometry")],analyse_ac_rp()[[1]]$donnees_elargi[,c("CODE","LIBELLE",varVolume,varRatio,"TXT1","TXT2")],by="CODE")
names(analyse_maille_elargi)[3] <- varVolume
names(analyse_maille_elargi)[4] <- varRatio
analyse_maille_elargi <- analyse_maille_elargi[rev(order(as.data.frame(analyse_maille_elargi)[,varVolume])),]
analyse_maille_elargi <- st_sf(analyse_maille_elargi,stringsAsFactors = FALSE)
proxy <- addPolygons(map = proxy, data = analyse_maille_elargi, opacity = input$opacite_elargi_ac_rp_id/100,
stroke = TRUE, color = "white", weight = 1,
options = pathOptions(pane = "fond_trio3", clickable = T),
popup = paste0("<b> <font color=
"<b><font color=
fill = T,
fillColor = palette_ac_rp()[[1]](analyse_maille_classe_elargi),
fillOpacity = input$opacite_elargi_ac_rp_id/100,
group = "elargi"
)
proxy <- addCircles(map = proxy,
lng = st_coordinates(fond_elargi_ac_rp()[[1]])[,1],
lat = st_coordinates(fond_elargi_ac_rp()[[1]])[,2],
stroke = TRUE, color = "
opacity = input$opacite_elargi_ac_rp_id/100,
weight = 1.5,
radius = calcul_rond_ac_rp()*sqrt(analyse_ac_rp()[[1]]$donnees_elargi[,varVolume]/calcul_max_rayon_metres_ac_rp()[[2]]),
options = pathOptions(pane = "fond_trio3", clickable = T),
popup = paste0("<b> <font color=
fill = F,
group = "elargi"
)
}
},ignoreInit = TRUE)
observeEvent(list(input$onglets_ac_rp,input$choix_centroid_ac_rp_id),{
req(input$onglets_ac_rp)
if(input$onglets_ac_rp == "carte")
{
proxy <- leafletProxy("mymap_ac_rp")
clearGroup(map = proxy, group = "select_donnees")
if(!is.null(input$mydonnees_ac_rp_rows_selected))
{
if(elargi_ac_rp())
{
suppressWarnings(proxy <- addCircles(map = proxy,
lng = st_coordinates(st_centroid(fond_select_donnees_elargi_ac_rp()))[,1],
lat = st_coordinates(st_centroid(fond_select_donnees_elargi_ac_rp()))[,2],
stroke = TRUE, color = "
opacity = 1,
weight = 3,
radius = calcul_rond_ac_rp()*sqrt(analyse_ac_rp()[[1]]$donnees_elargi[analyse_ac_rp()[[1]]$donnees_elargi[,"CODE"] %in% analyse_ac_rp()[[1]]$donnees_elargi[input$mydonnees_ac_rp_rows_selected,"CODE"],varVolume]/calcul_max_rayon_metres_ac_rp()[[2]]),
options = pathOptions(pane = "selection", clickable = F),
fill = F,
group = "select_donnees")
)
}else
{
suppressWarnings(proxy <- addCircles(map = proxy,
lng = st_coordinates(st_centroid(fond_select_donnees_ac_rp()))[,1],
lat = st_coordinates(st_centroid(fond_select_donnees_ac_rp()))[,2],
stroke = TRUE, color = "
opacity = 1,
weight = 3,
radius = calcul_rond_ac_rp()*sqrt(analyse_ac_rp()[[1]]$donnees[analyse_ac_rp()[[1]]$donnees[,"CODE"] %in% analyse_ac_rp()[[1]]$donnees[input$mydonnees_ac_rp_rows_selected,"CODE"],varVolume]/calcul_max_rayon_metres_ac_rp()[[2]]),
options = pathOptions(pane = "selection", clickable = F),
fill = F,
group = "select_donnees")
)
}
}
}
},ignoreInit = TRUE)
observeEvent(input$onglets_ac_rp,{
req(input$onglets_ac_rp)
if(input$onglets_ac_rp == "carte")
{
proxy <- leafletProxy("mymap_ac_rp")
clearGroup(map = proxy, group = "select_maille")
if(!is.null(input$mymaille_ac_rp_rows_selected))
{
if(elargi_ac_rp())
{
proxy <- addPolygons(map = proxy, data = fond_select_maille_elargi_ac_rp(),
stroke = TRUE, weight = 3,
color="
options = pathOptions(pane = "selection", clickable = F),
fill = F,
group = "select_maille"
)
}else
{
proxy <- addPolygons(map = proxy, data = fond_select_maille_ac_rp(),
stroke = TRUE, weight = 3,
color="
options = pathOptions(pane = "selection", clickable = F),
fill = F,
group = "select_maille"
)
}
}
}
},ignoreInit = TRUE)
observeEvent(input$onglets_ac_rp,{
req(input$onglets_ac_rp)
if(input$onglets_ac_rp == "carte")
{
proxy <- leafletProxy("mymap_ac_rp")
clearGroup(map = proxy, group = "select_contour")
if(!is.null(input$mycontour_ac_rp_rows_selected))
{
proxy <- addPolygons(map = proxy, data = fond_select_contour_ac_rp(),
stroke = TRUE, weight = 3,
color="
options = pathOptions(pane = "selection", clickable = F),
fill = F,
group = "select_contour"
)
}
}
},ignoreInit = TRUE)
lon_lat_ac_rp <- reactive({
click <- input$mymap_ac_rp_click
lon <- click$lng
lat <- click$lat
return(list(lon,lat))
})
observeEvent(list(input$mymap_ac_rp_zoom,input$mymap_ac_rp_click,input$type_legende_ac_rp_id,input$titre_ronds_legende_ac_rp_id,input$titre_classes_legende_ac_rp_id,input$taille_rond_ac_rp_id,input$nb_classes_ac_rp_id,input$methode_ac_rp_id,input$palette_insee_ac_rp_id,input$valid_bornes_ac_rp_id),{
req(input$taille_rond_ac_rp_id)
if(is.null(input$affiche_legende_ac_rp_id)) return(NULL)
if(input$affiche_legende_ac_rp_id==FALSE) return(NULL)
if(is.null(lon_lat_ac_rp()[[1]])) return(NULL)
proxy <- leafletProxy("mymap_ac_rp")
proxy <- clearGroup(map=proxy, group="leg")
proxy <- clearMarkers(map=proxy)
large <- as.numeric((st_bbox(fondMaille)[4] - st_bbox(fondMaille)[2]) / 20)
pt_ronds <- st_sfc(st_geometry(st_point(c(lon_lat_ac_rp()[[1]],
lon_lat_ac_rp()[[2]]))),
crs = 4326)
pt_ronds <- st_transform(pt_ronds, crs = as.numeric(code_epsg_ac_rp()))
pt_ronds <- st_sfc(st_geometry(st_point(c(st_coordinates(pt_ronds)[,1] + large*3,
st_coordinates(pt_ronds)[,2] - large*3))),
crs = as.numeric(code_epsg_ac_rp()))
pt_ronds <- st_transform(pt_ronds, crs = 4326)
ronds_leg <- construction_ronds_legende(st_coordinates(pt_ronds)[,1],st_coordinates(pt_ronds)[,2],code_epsg_ac_rp(),input$taille_rond_ac_rp_id)
lignes <- construction_lignes_legende(ronds_leg,code_epsg_ac_rp())
pt <- st_sfc(st_geometry(st_point(c(lon_lat_ac_rp()[[1]],lon_lat_ac_rp()[[2]]))), crs = 4326)
pt <- st_transform(pt, crs = as.numeric(code_epsg_ac_rp()))
coord_pt <- st_coordinates(pt)[1:2]
position_leg_ronds <- t(data.frame(c(coord_pt[1],coord_pt[2])))
position_leg_classes <- t(data.frame(c(coord_pt[1],as.numeric(st_bbox(ronds_leg[[2]])[2]) - large*2)))
if(is.null(input$type_legende_ac_rp_id)) return(NULL)
if(is.null(input$nb_classes_ac_rp_id)) return(NULL)
max_classes <- as.numeric(input$nb_classes_ac_rp_id)
if(input$type_legende_ac_rp_id==1)
{
for(i in 1:max_classes)
{
x_coord_rectangle <- position_leg_classes[1]
if(i==1)
{
y_coord_rectangle <- position_leg_classes[2]
}else
{
y_coord_rectangle <- y_coord_rectangle - large - large / 4
}
assign(paste0("rectangle_",i),st_sfc(st_polygon(list(matrix(c(x_coord_rectangle, y_coord_rectangle,
x_coord_rectangle + large * 1.5, y_coord_rectangle,
x_coord_rectangle + large * 1.5, y_coord_rectangle - large,
x_coord_rectangle, y_coord_rectangle - large,
x_coord_rectangle, y_coord_rectangle),
ncol=2, byrow=TRUE))),
crs = as.numeric(code_epsg_ac_rp())))
}
classes_leg_texte <- analyse_leg_ac_rp()$rupture_classes
label_rectangle <- c()
legende$a <- c()
for(i in 1:max_classes)
{
if(i==1)
{
lbl <- paste0(format(round(classes_leg_texte[i+1],3), big.mark=" ",decimal.mark=",",nsmall=0)," et plus")
label_rectangle <- c(label_rectangle, lbl)
}else if (i>1 && i<max_classes)
{
lbl <- paste0("De ", format(round(classes_leg_texte[i+1],3), big.mark=" ",decimal.mark=",",nsmall=0)," \u00E0 moins de ", format(round(classes_leg_texte[i],3), big.mark=" ",decimal.mark=",",nsmall=0))
label_rectangle <- c(label_rectangle, lbl)
}else
{
lbl <- paste0("Moins de ", format(round(classes_leg_texte[i],3), big.mark=" ",decimal.mark=",",nsmall=0))
label_rectangle <- c(label_rectangle, lbl)
}
legende$a <- c(legende$a,lbl)
}
ltext <- max(nchar(label_rectangle)) / 2.5
vec <- matrix(c(position_leg_ronds[1] - large / 2, position_leg_ronds[2] + large / 2,
position_leg_ronds[1] + large * 1.5 + (large * ltext), position_leg_ronds[2] + large / 2,
position_leg_ronds[1] + large * 1.5 + (large * ltext), position_leg_classes[2] - large * (max_classes + (max_classes-1)/4 + 1),
position_leg_ronds[1] - large / 2, position_leg_classes[2] - large * (max_classes + (max_classes-1)/4 + 1),
position_leg_ronds[1] - large / 2, position_leg_ronds[2] + large / 2),
5,2,byrow=T)
rectangle <- st_sfc(st_polygon(list(vec)), crs = as.numeric(code_epsg_ac_rp()))
rectangle <- st_transform(rectangle, crs = 4326)
proxy <- addPolygons(map = proxy,
data = rectangle,
stroke = FALSE,
options = pathOptions(pane = "fond_legende", clickable = F),
fill = T,
fillColor = "white",
fillOpacity = 0.8,
group = "leg"
)
for(i in 1:max_classes)
{
proxy <- addPolygons(map = proxy,
data = st_transform(get(paste0("rectangle_",i)), crs = 4326),
stroke = FALSE,
options = pathOptions(pane = "fond_legende", clickable = F),
fill = T,
fillColor = analyse_leg_ac_rp()$pal_classes[i],
fillOpacity = 1,
group = "leg"
)
pt_label <- st_sfc(st_geometry(st_point(c(max(st_coordinates(get(paste0("rectangle_",i))[[1]])[,1]) + large / 10,
mean(st_coordinates(get(paste0("rectangle_",i))[[1]])[,2])))),
crs = as.numeric(code_epsg_ac_rp()))
pt_label <- st_transform(pt_label, crs = 4326)
proxy <- addLabelOnlyMarkers(map = proxy,
lng = st_coordinates(pt_label)[1],
lat = st_coordinates(pt_label)[2],
label = label_rectangle[i],
labelOptions = labelOptions(noHide = T, textOnly = TRUE, direction = "right",
style = list(
"color" = "black",
"font-size" = "12px"
)),
group = "leg"
)
}
pt_titre <- st_sfc(st_geometry(st_point(c(position_leg_classes[1],
position_leg_classes[2] + large/2))),
crs = as.numeric(code_epsg_ac_rp()))
pt_titre <- st_transform(pt_titre, crs = 4326)
proxy <- addLabelOnlyMarkers(map = proxy,
lng = st_coordinates(pt_titre)[1],
lat = st_coordinates(pt_titre)[2],
label = input$titre_classes_legende_ac_rp_id,
labelOptions = labelOptions(noHide = T, textOnly = TRUE, direction = "right",
style = list(
"color" = "black",
"font-size" = "14px"
)),
group = "leg"
)
}
if(input$type_legende_ac_rp_id==2)
{
for(i in 1:max_classes)
{
x_coord_rectangle <- position_leg_classes[1]
if(i==1)
{
y_coord_rectangle <- position_leg_classes[2]
}else
{
y_coord_rectangle <- y_coord_rectangle - large
}
assign(paste0("rectangle_",i),st_sfc(st_polygon(list(matrix(c(x_coord_rectangle, y_coord_rectangle,
x_coord_rectangle + large * 1.5, y_coord_rectangle,
x_coord_rectangle + large * 1.5, y_coord_rectangle - large,
x_coord_rectangle, y_coord_rectangle - large,
x_coord_rectangle, y_coord_rectangle),
ncol=2, byrow=TRUE))),
crs = as.numeric(code_epsg_ac_rp())))
}
classes_leg_num <- analyse_leg_ac_rp()$rupture_classes
ltext <- max(nchar(classes_leg_num)) / 2.5
vec <- matrix(c(position_leg_ronds[1] - large / 2, position_leg_ronds[2] + large / 2,
position_leg_ronds[1] + large * 1.5 + (large * ltext * 4), position_leg_ronds[2] + large / 2,
position_leg_ronds[1] + large * 1.5 + (large * ltext * 4), position_leg_classes[2] - large * (max_classes + 1),
position_leg_ronds[1] - large / 2, position_leg_classes[2] - large * (max_classes + 1),
position_leg_ronds[1] - large / 2, position_leg_ronds[2] + large / 2),
5,2,byrow=T)
rectangle <- st_sfc(st_polygon(list(vec)), crs = as.numeric(code_epsg_ac_rp()))
rectangle <- st_transform(rectangle, crs = 4326)
proxy <- addPolygons(map = proxy,
data = rectangle,
stroke = FALSE,
options = pathOptions(pane = "fond_legende", clickable = F),
fill = T,
fillColor = "white",
fillOpacity = 0.8,
group = "leg"
)
for(i in 1:max_classes)
{
proxy <- addPolygons(map = proxy,
data = st_transform(get(paste0("rectangle_",i)), crs = 4326),
stroke = FALSE,
options = pathOptions(pane = "fond_legende", clickable = F),
fill = T,
fillColor = analyse_leg_ac_rp()$pal_classes[i],
fillOpacity = 1,
group = "leg"
)
if(i<max_classes)
{
x1 <- max(st_coordinates(get(paste0("rectangle_",i))[[1]])[,1])
y1 <- min(st_coordinates(get(paste0("rectangle_",i))[[1]])[,2])
x2 <- max(st_coordinates(get(paste0("rectangle_",i))[[1]])[,1]) + large*0.2
y2 <- min(st_coordinates(get(paste0("rectangle_",i))[[1]])[,2])
ligne <- st_sfc(st_linestring(rbind(c(x1,y1),c(x2,y2))), crs = as.numeric(code_epsg_ac_rp()))
proxy <- addPolygons(map = proxy,
data = st_transform(ligne, crs = 4326),
color = "black",
weight = 1,
options = pathOptions(pane = "fond_legende", clickable = F),
fill = F,
fillOpacity = 1,
group = "leg"
)
pt_label <- st_sfc(st_geometry(st_point(c(x2,y2))),
crs = as.numeric(code_epsg_ac_rp()))
pt_label <- st_transform(pt_label, crs = 4326)
proxy <- addLabelOnlyMarkers(map = proxy,
lng = st_coordinates(pt_label)[1],
lat = st_coordinates(pt_label)[2],
label = as.character(format(round(classes_leg_num[i+1],3),big.mark=" ",decimal.mark=",",nsmall=0)),
labelOptions = labelOptions(noHide = T, textOnly = TRUE, direction = "right",
style = list(
"color" = "black",
"font-size" = "12px"
)),
group = "leg"
)
}
}
pt_titre <- st_sfc(st_geometry(st_point(c(position_leg_classes[1],
position_leg_classes[2] + large/2))),
crs = as.numeric(code_epsg_ac_rp()))
pt_titre <- st_transform(pt_titre, crs = 4326)
proxy <- addLabelOnlyMarkers(map = proxy,
lng = st_coordinates(pt_titre)[1],
lat = st_coordinates(pt_titre)[2],
label = input$titre_classes_legende_ac_rp_id,
labelOptions = labelOptions(noHide = T, textOnly = TRUE, direction = "right",
style = list(
"color" = "black",
"font-size" = "14px"
)),
group = "leg"
)
}
suppressWarnings(proxy <- addCircles(map = proxy,
lng = st_coordinates(st_centroid(ronds_leg[[1]]))[,1],
lat = st_coordinates(st_centroid(ronds_leg[[1]]))[,2],
stroke = TRUE,
opacity = 1,
color = "
weight = 2,
radius = c(calcul_rond_ac_rp(),calcul_rond_ac_rp()/sqrt(3)),
options = pathOptions(pane = "fond_legende", clickable = F),
fill = T,
fillColor = "white",
fillOpacity = 1,
group = "leg")
)
proxy <- addPolygons(map = proxy,
data = lignes[[1]],
stroke = TRUE,
opacity = 1,
color = "
weight = 2,
options = pathOptions(pane = "fond_legende", clickable = F),
fill = F,
fillOpacity = 1,
group = "leg"
)
proxy <- addLabelOnlyMarkers(map = proxy,
lng = st_bbox(lignes[[1]][1,])[3],
lat = st_bbox(lignes[[1]][1,])[4],
label = as.character(format(round(calcul_max_rayon_metres_ac_rp()[[2]],0),big.mark=" ",decimal.mark=",",nsmall=0)),
labelOptions = labelOptions(noHide = T, textOnly = TRUE, direction = "right",
style = list(
"color" = "black",
"font-size" = "12px"
)),
group = "leg"
)
proxy <- addLabelOnlyMarkers(map = proxy,
lng = st_bbox(lignes[[1]][2,])[3],
lat = st_bbox(lignes[[1]][2,])[4],
label = as.character(format(round(calcul_max_rayon_metres_ac_rp()[[2]]/3,0),big.mark=" ",decimal.mark=",",nsmall=0)),
labelOptions = labelOptions(noHide = T, textOnly = TRUE, direction = "right",
style = list(
"color" = "black",
"font-size" = "12px"
)),
group = "leg"
)
pt_titre <- st_sfc(st_geometry(st_point(c(position_leg_ronds[1],
position_leg_ronds[2]))),
crs = as.numeric(code_epsg_ac_rp()))
pt_titre <- st_transform(pt_titre, crs = 4326)
proxy <- addLabelOnlyMarkers(map = proxy,
lng = st_coordinates(pt_titre)[1],
lat = st_coordinates(pt_titre)[2],
label = input$titre_ronds_legende_ac_rp_id,
labelOptions = labelOptions(noHide = T, textOnly = TRUE, direction = "right",
style = list(
"color" = "black",
"font-size" = "14px"
)),
group = "leg"
)
})
observeEvent(input$save_carte_ac_rp_id,{
showModal(modalDialog(HTML("<i class=\"fa fa-spinner fa-spin fa-2x fa-fw\"></i><font size=+1>Sauvegarde de la carte en cours...</font> "), size="m", footer=NULL, style = "color:
insert_save$a <- insert_save$a + 1
nb_save_carte <- insert_save$a-remove_carte$a
m_save <- m_save_ac_rp$a
if(nb_save_carte>6)
{
insert_save$a <- insert_save$a - 1
showModal(modalDialog(HTML("<font size=+1>Vous ne pouvez pas sauvegarger plus de 6 cartes. Veuillez en supprimer avant de continuer.</font> "), size="l", footer=NULL, easyClose = TRUE, style = "color:
return(NULL)
}
output[[paste0("mymap_save_",insert_save$a,"_ac_rp")]] <- renderLeaflet({
if(!is.null(fondSuppl))
{
if(isolate(input$ajout_territoire_ac_rp_id))
{
m_save <- addPolygons(map = m_save, data = isolate(fond_territoire_ac_rp()),
stroke = TRUE, color = "
weight = 0.5,
options = pathOptions(pane = "fond_territoire", clickable = T),
popup = paste0("<b> <font color=
fill = T, fillColor = "white", fillOpacity = 0.001
)
}
}
if(isolate(input$ajout_reg_ac_rp_id))
{
m_save <- addPolygons(map = m_save, data = isolate(fond_region_ac_rp()),
stroke = TRUE, color = "grey", opacity = 1,
weight = 1.5,
options = pathOptions(pane = "fond_reg", clickable = F),
fill = F
)
}
if(isolate(input$ajout_dep_ac_rp_id))
{
m_save <- addPolygons(map = m_save, data = isolate(fond_departement_ac_rp()),
stroke = TRUE, color = "grey", opacity = 1,
weight = 0.5,
options = pathOptions(pane = "fond_dep", clickable = F),
fill = F
)
}
i <- 1
for(fond in isolate(liste_fonds$a))
{
if(fond=="analyse")
{
m_save <- addCircles(map = m_save,
lng = st_coordinates(isolate(analyse_ac_rp())[[2]])[,1],
lat = st_coordinates(isolate(analyse_ac_rp())[[2]])[,2],
stroke = TRUE, color = "
opacity = 1,
weight = 1.5,
radius = isolate(calcul_rond_ac_rp())*sqrt(isolate(analyse_ac_rp())[[1]]$donnees[,varVolume]/isolate(calcul_max_rayon_metres_ac_rp())[[2]]),
options = pathOptions(pane = paste0("fond_trio",i), clickable = T),
popup = paste0("<b> <font color=
fill = F
)
}
if(fond=="maille")
{
suppressWarnings(test_analyse_maille_classe <- try(isolate(analyse_ac_rp())[[1]]$donnees[rev(order(isolate(analyse_ac_rp())[[1]]$donnees[,varVolume])),varRatio],silent=T))
if(class(test_analyse_maille_classe) %in% "try-error")
{
return(NULL)
}else
{
analyse_maille_classe <- isolate(analyse_ac_rp())[[1]]$donnees[rev(order(isolate(analyse_ac_rp())[[1]]$donnees[,varVolume])),varRatio]
}
analyse_maille <- merge(isolate(fond_contour_maille_ac_rp())[[2]][,c("CODE","geometry")],isolate(analyse_ac_rp())[[1]]$donnees[,c("CODE","LIBELLE",varVolume,varRatio,"TXT1","TXT2")],by="CODE")
names(analyse_maille)[3] <- varVolume
names(analyse_maille)[4] <- varRatio
analyse_maille <- analyse_maille[rev(order(as.data.frame(analyse_maille)[,varVolume])),]
analyse_maille <- st_sf(analyse_maille,stringsAsFactors = FALSE)
m_save <- addPolygons(map = m_save, data = analyse_maille, opacity = 1,
stroke = TRUE, color = "white", weight = 1,
options = pathOptions(pane = paste0("fond_trio",i), clickable = T),
popup = paste0("<b> <font color=
"<b><font color=
fill = T,
fillColor = isolate(palette_ac_rp())[[1]](analyse_maille_classe),
fillOpacity = 1
)
}
if(fond=="contour")
{
m_save <- addPolygons(map = m_save, data = isolate(fond_contour_maille_ac_rp())[[1]], opacity = 0.3,
stroke = TRUE, color = "black", weight = 3,
options = pathOptions(pane = paste0("fond_trio",i), clickable = T),
popup = paste0("<b> <font color=
fill = T, fillColor = "white", fillOpacity = 0.3
)
}
i <- i + 1
}
if(isolate(elargi_ac_rp()))
{
analyse_maille_classe_elargi <- isolate(analyse_ac_rp())[[1]]$donnees_elargi[rev(order(isolate(analyse_ac_rp())[[1]]$donnees_elargi[,varVolume])),varRatio]
analyse_maille_elargi <- merge(isolate(fond_elargi_ac_rp())[[2]][,c("CODE","geometry")],isolate(analyse_ac_rp())[[1]]$donnees_elargi[,c("CODE","LIBELLE",varVolume,varRatio,"TXT1","TXT2")],by="CODE")
names(analyse_maille_elargi)[3] <- varVolume
names(analyse_maille_elargi)[4] <- varRatio
analyse_maille_elargi <- analyse_maille_elargi[rev(order(as.data.frame(analyse_maille_elargi)[,varVolume])),]
analyse_maille_elargi <- st_sf(analyse_maille_elargi,stringsAsFactors = FALSE)
m_save <- addPolygons(map = m_save, data = analyse_maille_elargi, opacity = isolate(input$opacite_elargi_ac_rp_id)/100,
stroke = TRUE, color = "white", weight = 1,
options = pathOptions(pane = "fond_trio3", clickable = T),
popup = paste0("<b> <font color=
"<b><font color=
fill = T,
fillColor = isolate(palette_ac_rp())[[1]](analyse_maille_classe_elargi),
fillOpacity = isolate(input$opacite_elargi_ac_rp_id)/100
)
m_save <- addCircles(map = m_save,
lng = st_coordinates(isolate(fond_elargi_ac_rp())[[1]])[,1],
lat = st_coordinates(isolate(fond_elargi_ac_rp())[[1]])[,2],
stroke = TRUE, color = "
opacity = isolate(input$opacite_elargi_ac_rp_id)/100,
weight = 1.5,
radius = isolate(calcul_rond_ac_rp())*sqrt(isolate(analyse_ac_rp())[[1]]$donnees_elargi[,varVolume]/isolate(calcul_max_rayon_metres_ac_rp())[[2]]),
options = pathOptions(pane = "fond_trio3", clickable = T),
popup = paste0("<b> <font color=
fill = F
)
}
if(!is.null(isolate(lon_lat_ac_rp())[[1]]))
{
large <- as.numeric((st_bbox(fondMaille)[4] - st_bbox(fondMaille)[2]) / 20)
pt_ronds <- st_sfc(st_geometry(st_point(c(isolate(lon_lat_ac_rp())[[1]],
isolate(lon_lat_ac_rp())[[2]]))),
crs = 4326)
pt_ronds <- st_transform(pt_ronds, crs = as.numeric(isolate(code_epsg_ac_rp())))
pt_ronds <- st_sfc(st_geometry(st_point(c(st_coordinates(pt_ronds)[,1] + large*3,
st_coordinates(pt_ronds)[,2] - large*3))),
crs = as.numeric(isolate(code_epsg_ac_rp())))
pt_ronds <- st_transform(pt_ronds, crs = 4326)
ronds_leg <- construction_ronds_legende(st_coordinates(pt_ronds)[,1],st_coordinates(pt_ronds)[,2],isolate(code_epsg_ac_rp()),isolate(input$taille_rond_ac_rp_id))
lignes <- construction_lignes_legende(ronds_leg,isolate(code_epsg_ac_rp()))
pt <- st_sfc(st_geometry(st_point(c(isolate(lon_lat_ac_rp())[[1]],isolate(lon_lat_ac_rp())[[2]]))), crs = 4326)
pt <- st_transform(pt, crs = as.numeric(isolate(code_epsg_ac_rp())))
coord_pt <- st_coordinates(pt)[1:2]
position_leg_ronds <- t(data.frame(c(coord_pt[1],coord_pt[2])))
position_leg_classes <- t(data.frame(c(coord_pt[1],as.numeric(st_bbox(ronds_leg[[2]])[2]) - large*2)))
if(is.null(isolate(input$type_legende_ac_rp_id))) return(NULL)
if(is.null(isolate(input$nb_classes_ac_rp_id))) return(NULL)
max_classes <- as.numeric(isolate(input$nb_classes_ac_rp_id))
if(isolate(input$type_legende_ac_rp_id==1))
{
for(i in 1:max_classes)
{
x_coord_rectangle <- position_leg_classes[1]
if(i==1)
{
y_coord_rectangle <- position_leg_classes[2]
}else
{
y_coord_rectangle <- y_coord_rectangle - large - large / 4
}
assign(paste0("rectangle_",i),st_sfc(st_polygon(list(matrix(c(x_coord_rectangle, y_coord_rectangle,
x_coord_rectangle + large * 1.5, y_coord_rectangle,
x_coord_rectangle + large * 1.5, y_coord_rectangle - large,
x_coord_rectangle, y_coord_rectangle - large,
x_coord_rectangle, y_coord_rectangle),
ncol=2, byrow=TRUE))),
crs = as.numeric(isolate(code_epsg_ac_rp()))))
}
classes_leg_texte <- isolate(analyse_leg_ac_rp())$rupture_classes
label_rectangle <- c()
for(i in 1:max_classes)
{
if(i==1)
{
lbl <- paste0(format(round(classes_leg_texte[i+1],3), big.mark=" ",decimal.mark=",",nsmall=0)," et plus")
label_rectangle <- c(label_rectangle, lbl)
}else if (i>1 && i<max_classes)
{
lbl <- paste0("De ", format(round(classes_leg_texte[i+1],3), big.mark=" ",decimal.mark=",",nsmall=0)," \u00E0 moins de ", format(round(classes_leg_texte[i],3), big.mark=" ",decimal.mark=",",nsmall=0))
label_rectangle <- c(label_rectangle, lbl)
}else
{
lbl <- paste0("Moins de ", format(round(classes_leg_texte[i],3), big.mark=" ",decimal.mark=",",nsmall=0))
label_rectangle <- c(label_rectangle, lbl)
}
}
ltext <- max(nchar(label_rectangle)) / 2.5
vec <- matrix(c(position_leg_ronds[1] - large / 2, position_leg_ronds[2] + large / 2,
position_leg_ronds[1] + large * 1.5 + (large * ltext), position_leg_ronds[2] + large / 2,
position_leg_ronds[1] + large * 1.5 + (large * ltext), position_leg_classes[2] - large * (max_classes + (max_classes-1)/4 + 1),
position_leg_ronds[1] - large / 2, position_leg_classes[2] - large * (max_classes + (max_classes-1)/4 + 1),
position_leg_ronds[1] - large / 2, position_leg_ronds[2] + large / 2),
5,2,byrow=T)
rectangle <- st_sfc(st_polygon(list(vec)), crs = as.numeric(isolate(code_epsg_ac_rp())))
rectangle <- st_transform(rectangle, crs = 4326)
m_save <- addPolygons(map = m_save,
data = rectangle,
stroke = FALSE,
options = pathOptions(pane = "fond_legende", clickable = F),
fill = T,
fillColor = "white",
fillOpacity = 0.8,
group = "leg"
)
for(i in 1:max_classes)
{
m_save <- addPolygons(map = m_save,
data = st_transform(get(paste0("rectangle_",i)), crs = 4326),
stroke = FALSE,
options = pathOptions(pane = "fond_legende", clickable = F),
fill = T,
fillColor = isolate(analyse_leg_ac_rp())$pal_classes[i],
fillOpacity = 1,
group = "leg"
)
pt_label <- st_sfc(st_geometry(st_point(c(max(st_coordinates(get(paste0("rectangle_",i))[[1]])[,1]) + large / 10,
mean(st_coordinates(get(paste0("rectangle_",i))[[1]])[,2])))),
crs = as.numeric(isolate(code_epsg_ac_rp())))
pt_label <- st_transform(pt_label, crs = 4326)
m_save <- addLabelOnlyMarkers(map = m_save,
lng = st_coordinates(pt_label)[1],
lat = st_coordinates(pt_label)[2],
label = label_rectangle[i],
labelOptions = labelOptions(noHide = T, textOnly = TRUE, direction = "right",
style = list(
"color" = "black",
"font-size" = "12px"
)),
group = "leg"
)
}
pt_titre <- st_sfc(st_geometry(st_point(c(position_leg_classes[1],
position_leg_classes[2] + large/2))),
crs = as.numeric(isolate(code_epsg_ac_rp())))
pt_titre <- st_transform(pt_titre, crs = 4326)
m_save <- addLabelOnlyMarkers(map = m_save,
lng = st_coordinates(pt_titre)[1],
lat = st_coordinates(pt_titre)[2],
label = isolate(input$titre_classes_legende_ac_rp_id),
labelOptions = labelOptions(noHide = T, textOnly = TRUE, direction = "right",
style = list(
"color" = "black",
"font-size" = "14px"
)),
group = "leg"
)
}
if(isolate(input$type_legende_ac_rp_id)==2)
{
for(i in 1:max_classes)
{
x_coord_rectangle <- position_leg_classes[1]
if(i==1)
{
y_coord_rectangle <- position_leg_classes[2]
}else
{
y_coord_rectangle <- y_coord_rectangle - large
}
assign(paste0("rectangle_",i),st_sfc(st_polygon(list(matrix(c(x_coord_rectangle, y_coord_rectangle,
x_coord_rectangle + large * 1.5, y_coord_rectangle,
x_coord_rectangle + large * 1.5, y_coord_rectangle - large,
x_coord_rectangle, y_coord_rectangle - large,
x_coord_rectangle, y_coord_rectangle),
ncol=2, byrow=TRUE))),
crs = as.numeric(isolate(code_epsg_ac_rp()))))
}
classes_leg_num <- isolate(analyse_leg_ac_rp())$rupture_classes
ltext <- max(nchar(classes_leg_num)) / 2.5
vec <- matrix(c(position_leg_ronds[1] - large / 2, position_leg_ronds[2] + large / 2,
position_leg_ronds[1] + large * 1.5 + (large * ltext * 4), position_leg_ronds[2] + large / 2,
position_leg_ronds[1] + large * 1.5 + (large * ltext * 4), position_leg_classes[2] - large * (max_classes + 1),
position_leg_ronds[1] - large / 2, position_leg_classes[2] - large * (max_classes + 1),
position_leg_ronds[1] - large / 2, position_leg_ronds[2] + large / 2),
5,2,byrow=T)
rectangle <- st_sfc(st_polygon(list(vec)), crs = as.numeric(isolate(code_epsg_ac_rp())))
rectangle <- st_transform(rectangle, crs = 4326)
m_save <- addPolygons(map = m_save,
data = rectangle,
stroke = FALSE,
options = pathOptions(pane = "fond_legende", clickable = F),
fill = T,
fillColor = "white",
fillOpacity = 0.8,
group = "leg"
)
for(i in 1:max_classes)
{
m_save <- addPolygons(map = m_save,
data = st_transform(get(paste0("rectangle_",i)), crs = 4326),
stroke = FALSE,
options = pathOptions(pane = "fond_legende", clickable = F),
fill = T,
fillColor = isolate(analyse_leg_ac_rp())$pal_classes[i],
fillOpacity = 1,
group = "leg"
)
if(i<max_classes)
{
x1 <- max(st_coordinates(get(paste0("rectangle_",i))[[1]])[,1])
y1 <- min(st_coordinates(get(paste0("rectangle_",i))[[1]])[,2])
x2 <- max(st_coordinates(get(paste0("rectangle_",i))[[1]])[,1]) + large*0.2
y2 <- min(st_coordinates(get(paste0("rectangle_",i))[[1]])[,2])
ligne <- st_sfc(st_linestring(rbind(c(x1,y1),c(x2,y2))), crs = as.numeric(isolate(code_epsg_ac_rp())))
m_save <- addPolygons(map = m_save,
data = st_transform(ligne, crs = 4326),
color = "black",
weight = 1,
options = pathOptions(pane = "fond_legende", clickable = F),
fill = F,
fillOpacity = 1,
group = "leg"
)
pt_label <- st_sfc(st_geometry(st_point(c(x2,y2))),
crs = as.numeric(isolate(code_epsg_ac_rp())))
pt_label <- st_transform(pt_label, crs = 4326)
m_save <- addLabelOnlyMarkers(map = m_save,
lng = st_coordinates(pt_label)[1],
lat = st_coordinates(pt_label)[2],
label = as.character(format(round(classes_leg_num[i+1],3),big.mark=" ",decimal.mark=",",nsmall=0)),
labelOptions = labelOptions(noHide = T, textOnly = TRUE, direction = "right",
style = list(
"color" = "black",
"font-size" = "12px"
)),
group = "leg"
)
}
}
pt_titre <- st_sfc(st_geometry(st_point(c(position_leg_classes[1],
position_leg_classes[2] + large/2))),
crs = as.numeric(isolate(code_epsg_ac_rp())))
pt_titre <- st_transform(pt_titre, crs = 4326)
m_save <- addLabelOnlyMarkers(map = m_save,
lng = st_coordinates(pt_titre)[1],
lat = st_coordinates(pt_titre)[2],
label = isolate(input$titre_classes_legende_ac_rp_id),
labelOptions = labelOptions(noHide = T, textOnly = TRUE, direction = "right",
style = list(
"color" = "black",
"font-size" = "14px"
)),
group = "leg"
)
}
suppressWarnings(m_save <- addCircles(map = m_save,
lng = st_coordinates(st_centroid(ronds_leg[[1]]))[,1],
lat = st_coordinates(st_centroid(ronds_leg[[1]]))[,2],
stroke = TRUE,
opacity = 1,
color = "
weight = 2,
radius = c(isolate(calcul_rond_ac_rp()),isolate(calcul_rond_ac_rp())/sqrt(3)),
options = pathOptions(pane = "fond_legende", clickable = F),
fill = T,
fillColor = "white",
fillOpacity = 1,
group = "leg")
)
m_save <- addPolygons(map = m_save,
data = lignes[[1]],
stroke = TRUE,
opacity = 1,
color = "
weight = 2,
options = pathOptions(pane = "fond_legende", clickable = F),
fill = F,
fillOpacity = 1,
group = "leg"
)
m_save <- addLabelOnlyMarkers(map = m_save,
lng = st_bbox(lignes[[1]][1,])[3],
lat = st_bbox(lignes[[1]][1,])[4],
label = as.character(format(round(isolate(calcul_max_rayon_metres_ac_rp())[[2]],0),big.mark=" ",decimal.mark=",",nsmall=0)),
labelOptions = labelOptions(noHide = T, textOnly = TRUE, direction = "right",
style = list(
"color" = "black",
"font-size" = "12px"
)),
group = "leg"
)
m_save <- addLabelOnlyMarkers(map = m_save,
lng = st_bbox(lignes[[1]][2,])[3],
lat = st_bbox(lignes[[1]][2,])[4],
label = as.character(format(round(isolate(calcul_max_rayon_metres_ac_rp())[[2]]/3,0),big.mark=" ",decimal.mark=",",nsmall=0)),
labelOptions = labelOptions(noHide = T, textOnly = TRUE, direction = "right",
style = list(
"color" = "black",
"font-size" = "12px"
)),
group = "leg"
)
pt_titre <- st_sfc(st_geometry(st_point(c(position_leg_ronds[1],
position_leg_ronds[2]))),
crs = as.numeric(isolate(code_epsg_ac_rp())))
pt_titre <- st_transform(pt_titre, crs = 4326)
m_save <- addLabelOnlyMarkers(map = m_save,
lng = st_coordinates(pt_titre)[1],
lat = st_coordinates(pt_titre)[2],
label = isolate(input$titre_ronds_legende_ac_rp_id),
labelOptions = labelOptions(noHide = T, textOnly = TRUE, direction = "right",
style = list(
"color" = "black",
"font-size" = "14px"
)),
group = "leg"
)
}
removeModal()
m_save
})
output[[paste0("remove_carte_",nb_save_carte,"_ac_rp")]] <- renderUI({
actionButton(paste0("remove_carte_",nb_save_carte,"_ac_rp_id"),label="X Supprimer la carte", style="color:
})
appendTab(inputId = "onglets_ac_rp",
tabPanel(title=HTML(paste0("<font color=
select = TRUE,
session = session
)
}, ignoreInit = TRUE)
observeEvent(input$remove_carte_1_ac_rp_id,{
remove_carte$a <- remove_carte$a + 1
removeTab(inputId = "onglets_ac_rp",
target = "carte1",
session = session
)
}, ignoreInit = TRUE)
observeEvent(input$remove_carte_2_ac_rp_id,{
remove_carte$a <- remove_carte$a + 1
removeTab(inputId = "onglets_ac_rp",
target = "carte2",
session = session
)
}, ignoreInit = TRUE)
observeEvent(input$remove_carte_3_ac_rp_id,{
remove_carte$a <- remove_carte$a + 1
removeTab(inputId = "onglets_ac_rp",
target = "carte3",
session = session
)
}, ignoreInit = TRUE)
observeEvent(input$remove_carte_4_ac_rp_id,{
remove_carte$a <- remove_carte$a + 1
removeTab(inputId = "onglets_ac_rp",
target = "carte4",
session = session
)
}, ignoreInit = TRUE)
observeEvent(input$remove_carte_5_ac_rp_id,{
remove_carte$a <- remove_carte$a + 1
removeTab(inputId = "onglets_ac_rp",
target = "carte5",
session = session
)
}, ignoreInit = TRUE)
observeEvent(input$remove_carte_6_ac_rp_id,{
remove_carte$a <- remove_carte$a + 1
removeTab(inputId = "onglets_ac_rp",
target = "carte6",
session = session
)
}, ignoreInit = TRUE)
output$mydonnees_ac_rp <- DT::renderDataTable(DT::datatable({
if(elargi_ac_rp())
data <- analyse_ac_rp()[[1]]$donnees_elargi
else
data <- analyse_ac_rp()[[1]]$donnees
tableau_donnees <- data[,c("CODE","LIBELLE",varVolume,varRatio)]
}, style = 'bootstrap'
))
output$mymaille_ac_rp <- DT::renderDataTable(DT::datatable({
if(elargi_ac_rp())
data <- as.data.frame(fondMailleElargi)
else
data <- as.data.frame(fondMaille)
tableau_maille <- data[,c(1:2)]
}, style = 'bootstrap'
))
output$mycontour_ac_rp <- DT::renderDataTable(DT::datatable({
data <- as.data.frame(fondContour)
tableau_contour <- data[,c(1:2)]
}, style = 'bootstrap'
))
output$mymap_ac_rp <- renderLeaflet({
react_fond_ac_rp()
})
}
runApp(shinyApp(ui = ui, server = server), launch.browser = TRUE)
} |
timmaSearchBinary1 <- function(profile_k, space, sens, loo = TRUE) {
dim_info <- dim(space$d)
rows <- dim_info[1]
cols <- dim_info[2]
IM_d <- array(NA, dim = dim_info[1:2])
IM_superset <- array(-Inf, dim = dim_info[1:2])
IM_subset <- array(Inf, dim = dim_info[1:2])
identical_idx <- rep(0, rows)
for (i in 1:rows) {
index <- profile_k[i] + 1
IM_d[i, ] <- space$d[i, , index]
IM_superset[i, ] <- space$i[i, , index]
IM_subset[i, ] <- space$o[i, , index]
identical_idx[i] <- which((!is.na(IM_d[i, ])) == TRUE)
}
M_d <- sumcpp1(IM_d, rows, cols)
maxval <- maxcpp1(IM_superset, rows, cols)
minval <- mincpp1(IM_subset, rows, cols)
min_subset <- minval$min
min_index <- minval$min_idx
max_superset <- maxval$max
max_index <- maxval$max_idx
cell <- is.nan(M_d) & is.finite(max_superset)
cell <- which(cell == TRUE)
if (length(cell) != 0) {
for (i in cell) {
drug_sub_cell <- !is.infinite(IM_superset[, i])
index <- max_index[i]
dec_maxsens <- identical_idx[index]
supersets_small <- IM_subset[, dec_maxsens] < max_superset[i]
common_cell <- which(drug_sub_cell & supersets_small)
if (length(common_cell) != 0) {
k <- 1
for (j in common_cell) {
max_superset[i] <- (max_superset[i] * k + sens[j])/(k + 1)
k <- k + 1
}
}
}
}
cell2 <- is.nan(M_d) & is.finite(min_subset)
cell2 <- which(cell2 == TRUE)
if (length(cell2) != 0) {
for (i in cell2) {
drug_sub_cell <- !is.infinite(IM_subset[, i])
index <- min_index[i]
dec_minsens <- identical_idx[index]
subsets_small <- IM_superset[, dec_minsens] > min_subset[i]
if (length(subsets_small) == 0) {
common_cell2 <- vector("numeric")
} else {
common_cell2 <- which(drug_sub_cell & subsets_small)
}
if (length(common_cell2) != 0) {
k <- 1
for (j in common_cell2) {
min_subset[i] <- (min_subset[i] * k + sens[j])/(k + 1)
k <- k + 1
}
}
}
}
M <- M_d
M[cell] <- (max_superset[cell] + 1)/2
M[cell2] <- (min_subset[cell2] + 0)/2
average_index <- intersect(cell, cell2)
M[average_index] <- (max_superset[average_index] + min_subset[average_index])/2
error_predict <- rep(NA, rows)
pred <- rep(NA, rows)
if (loo == FALSE) {
pred <- M[identical_idx]
error_predict <- abs(pred - sens)
} else {
for (i in 1:rows) {
dim_IMd <- c(rows - 1, cols)
IM_d_loo <- array(IM_d[-i, ], dim = dim_IMd)
IM_subset_loo <- array(IM_subset[-i, ], dim = dim_IMd)
IM_superset_loo <- array(IM_superset[-i, ], dim = dim_IMd)
sens_loo <- sens[-i]
drug_idx_loo <- identical_idx[-i]
M_d_loo <- sumcpp1(IM_d_loo, rows - 1, cols)
M_loo <- M_d_loo
maxval <- maxcpp1(IM_superset_loo, rows - 1, cols)
minval <- mincpp1(IM_subset_loo, rows - 1, cols)
min_subset_loo <- minval$min
min_index_loo <- minval$min_idx
max_superset_loo <- maxval$max
max_index_loo <- maxval$max_idx
cell <- is.nan(M_d_loo) & is.finite(max_superset_loo)
cell <- which(cell == TRUE)
cell2 <- is.nan(M_d_loo) & is.finite(min_subset_loo)
cell2 <- which(cell2 == TRUE)
j_max <- which(cell == identical_idx[i])
j_min <- which(cell2 == identical_idx[i])
if (length(j_max) != 0 && length(j_min) == 0) {
cell_index <- cell[j_max]
drug_sub_cell <- !is.infinite(IM_superset_loo[, cell_index])
index <- max_index_loo[cell_index]
dec_maxsens <- drug_idx_loo[index]
supersets_small <- IM_subset_loo[, dec_maxsens] < max_superset_loo[cell_index]
common_cell <- which(drug_sub_cell & supersets_small)
if (length(common_cell) != 0) {
k <- 1
for (j in common_cell) {
max_superset_loo[cell_index] <- (max_superset_loo[cell_index] * k + sens_loo[j])/(k +
1)
k <- k + 1
}
}
pred[i] <- (max_superset_loo[identical_idx[i]] + 1)/2
error_predict[i] <- abs(pred[i] - sens[i])
} else if (length(j_max) == 0 && length(j_min) != 0) {
cell2_index <- cell2[j_min]
drug_sub_cell <- !is.infinite(IM_subset_loo[, cell2_index])
index <- min_index_loo[cell2_index]
dec_minsens <- drug_idx_loo[index]
supersets_small <- IM_superset_loo[, dec_minsens] > min_subset_loo[cell2_index]
common_cell <- which(drug_sub_cell & supersets_small)
if (length(common_cell) != 0) {
k <- 1
for (j in common_cell) {
min_subset_loo[cell2_index] <- (min_subset_loo[cell2_index] * k + sens_loo[j])/(k + 1)
k <- k + 1
}
}
pred[i] <- (min_subset_loo[identical_idx[i]] + 0)/2
error_predict[i] <- abs(pred[i] - sens[i])
} else if (length(j_max) != 0 && length(j_min) != 0) {
cell_index <- cell[j_max]
drug_sub_cell <- !is.infinite(IM_superset_loo[, cell_index])
index <- max_index_loo[cell_index]
dec_maxsens <- drug_idx_loo[index]
supersets_small <- IM_subset_loo[, dec_maxsens] < max_superset_loo[cell_index]
common_cell <- which(drug_sub_cell & supersets_small)
if (length(common_cell) != 0) {
k <- 1
for (j in common_cell) {
max_superset_loo[cell_index] <- (max_superset_loo[cell_index] * k + sens_loo[j])/(k +
1)
k <- k + 1
}
}
cell2_index <- cell2[j_min]
drug_sub_cell <- !is.infinite(IM_subset_loo[, cell2_index])
index <- min_index_loo[cell2_index]
dec_minsens <- drug_idx_loo[index]
supersets_small <- IM_superset_loo[, dec_minsens] > min_subset_loo[cell2_index]
common_cell <- which(drug_sub_cell & supersets_small)
if (length(common_cell) != 0) {
k <- 1
for (j in common_cell) {
min_subset_loo[cell2_index] <- (min_subset_loo[cell2_index] * k + sens_loo[j])/(k + 1)
k <- k + 1
}
}
pred[i] <- (max_superset_loo[identical_idx[i]] + min_subset_loo[identical_idx[i]])/2
error_predict[i] <- abs(pred[i] - sens[i])
} else {
pred[i] <- M_loo[identical_idx[i]]
error_predict[i] <- abs(pred[i] - sens[i])
}
}
}
return(error_predict)
} |
same_type <- function(x, y) {
(typeof(x) == typeof(y))
}
different_type <- function(x, y) {
!same_type(x, y)
} |
generate_logfile_entry <- function(logfile, formula, seed, file_name) {
logfile_tmp <- data.frame(file_name = file_name,
seed = seed,
formula_x = as.character(formula["x"]),
formula_y = as.character(formula["y"]), stringsAsFactors = F)
logfile <- dplyr::bind_rows(logfile, logfile_tmp)
write.table(logfile, LOGFILE_PATH, sep = "\t", quote = F, row.names = F)
print("logfile saved")
} |
expand.table <- function(tabdata, freq = colnames(tabdata)[ncol(tabdata)],
sample = FALSE) {
if(missing(tabdata)) missingMsg('tabdata')
if(is.null(colnames(tabdata)) && is.null(freq))
stop('Please either supply colnames to tabdata or provide a vector of counts in freq', call.=FALSE)
stopifnot(is.data.frame(tabdata) || is.matrix(tabdata))
tabdat <- as.matrix(tabdata)
if(is.character(freq)){
stopifnot(length(freq) == 1L)
tmp <- tabdata[,freq]
tabdata <- tabdata[, colnames(tabdata) != freq, drop=FALSE]
freq <- tmp
}
stopifnot(length(freq) == nrow(tabdata))
fulldata <- vector('list', nrow(tabdata))
for (i in seq_len(nrow(tabdata)))
fulldata[[i]] <- tabdata[rep(i, freq[i]), ]
fulldata <- do.call(rbind, fulldata)
if(sample) fulldata <- fulldata[sample(seq_len(nrow(fulldata))), ]
rownames(fulldata) <- seq_len(nrow(fulldata))
fulldata
} |
plot_training_df_moran <- function(
data = NULL,
dependent.variable.name = NULL,
predictor.variable.names = NULL,
distance.matrix = NULL,
distance.thresholds = NULL,
fill.color = viridis::viridis(
100,
option = "F",
direction = -1
),
point.color = "gray30"
){
distance.threshold <- NULL
p.value.binary <- NULL
moran.i <- NULL
if(
is.null(data) |
is.null(dependent.variable.name) |
is.null(predictor.variable.names)
){
stop("No variables to plot.")
}
data <- as.data.frame(data)
if(!is.null(predictor.variable.names)){
if(inherits(predictor.variable.names, "variable_selection")){
predictor.variable.names <- predictor.variable.names$selected.variables
}
}
if(is.null(distance.matrix)){
stop("distance.matrix is missing.")
}
if(is.null(distance.thresholds)){
distance.thresholds <- default_distance_thresholds(distance.matrix = distance.matrix)
}
df.list <- list()
for(variable in c(
dependent.variable.name,
predictor.variable.names
)
){
temp.df <- moran_multithreshold(
x = as.vector(data[, variable]),
distance.matrix = distance.matrix,
distance.thresholds = distance.thresholds,
verbose = FALSE
)$per.distance
temp.df$variable <- variable
df.list[[variable]] <- temp.df
}
plot.df <- do.call("rbind", df.list)
rownames(plot.df) <- NULL
plot.df$p.value.binary <- "< 0.05"
plot.df[plot.df$p.value >= 0.05, "p.value.binary"] <- ">= 0.05"
plot.df$p.value.binary <- factor(
plot.df$p.value.binary,
levels = c("< 0.05", ">= 0.05")
)
plot.df$variable <- factor(
plot.df$variable,
levels = c(
rev(predictor.variable.names),
dependent.variable.name
)
)
p <- ggplot2::ggplot(data = plot.df) +
ggplot2::scale_fill_gradientn(colors = fill.color) +
ggplot2::geom_tile(
ggplot2::aes(
x = factor(distance.threshold),
y = variable,
fill = moran.i
)
) +
ggplot2::geom_point(
ggplot2::aes(
x = factor(distance.threshold),
y = variable,
size = p.value.binary
),
color = point.color,
pch = 1
) +
ggplot2::scale_size_manual(
breaks = c("< 0.05", ">= 0.05"),
values = c(2.5, 5),
drop = FALSE
) +
ggplot2::coord_cartesian(expand = FALSE) +
ggplot2::ylab("") +
ggplot2::xlab("Distance threshold") +
ggplot2::labs(
fill = "Moran's I",
size = "p-value"
)
p
} |
context("bsts")
.data <- iris[, 1:4]
datetime <- seq(from = Sys.time(), length.out = nrow(.data), by = "mins")
.data <- cbind(datetime = datetime, .data)
test_that("bsts_spec_static", {
.spec <- bsts_spec_static(.data)
expect_true(inherits(.spec, "cbar.model.spec"))
})
test_that("bsts_model", {
pre_period <- c(1, 100)
post_period <- c(101, 150)
training_data <- .data
training_data[post_period[1]:post_period[2], 1] <- NA
.model <- bsts_model(.data)
expect_true(inherits(.model, "bsts"))
names(.model)
.model$coefficients
.model$state.contributions[1000, 1:2, 145:150]
}) |
str(dip1)
f2(data = dip1, tcol = 3:10, grouping = "type")
f2(data = dip1, tcol = 3:10, grouping = "type", use_EMA = "no",
bounds = c(5, 80))
f2(data = dip1, tcol = 3:10, grouping = "type", use_EMA = "no",
bounds = c(1, 95))
f2(data = dip1, tcol = 3:10, grouping = "type", use_EMA = "ignore")
tmp <- rbind(dip1,
data.frame(type = "T2",
tablet = as.factor(1:6),
dip1[7:12, 3:10]))
tryCatch(
f2(data = tmp, tcol = 3:10, grouping = "type"),
error = function(e) message(e),
finally = message("\nMaybe you want to remove unesed levels in data.")) |
print01Report <- function(data, modelname="Siena", getDocumentation=FALSE)
{
reportDataObject1 <- function(x)
{
Report(c(x$observations, "observations,\n"), outf)
if (length(x$nodeSets) > 1)
{
Report("Node Sets:\n", outf)
lapply(x$nodeSets, function(z)
{
Report(c(" ", format(attr(z, "nodeSetName"), width=15),
":",
format(length(z), width=3), "nodes\n"), outf)
})
Report("\n", outf)
}
else
{
Report(c(length(x$nodeSets[[1]]), "actors\n"), outf)
}
}
reportDataObject <- function(x, periodFromStart=0, multi=FALSE)
{
reportStart <- function()
{
multipleNodeSets <- length(x$nodeSets) > 1
if (multipleNodeSets)
{
Report("Dependent variables Type NodeSet(s) (R, C)\n",
outf)
Report("------------------- ---- -----------------\n",
outf)
for (i in 1:length(x$depvars))
{
atts <- attributes(x$depvars[[i]])
Report(c(format(atts$name, width=20),
format(atts$type, width=12)), outf)
for (j in 1:length(atts$nodeSet))
{
if (j > 1)
{
Report(', ', outf)
}
Report(c(format(atts$nodeSet[j]),
" (", atts$netdims[j], ")"), sep="", outf)
}
Report("\n", outf)
}
}
else
{
Report(c(x$observations, "observations,\n"), outf)
Report(c(length(x$nodeSets[[1]]), "actors,\n"), outf)
Report(c(sum(types=="oneMode"),
"dependent network variables,\n"),
outf)
Report(c(sum(types=="bipartite"),
"dependent bipartite variables,\n"), outf)
Report(c(sum(types=="behavior"),
"dependent discrete behavior variables,\n"),
outf)
Report(c(sum(types=="continuous"),
"dependent continuous behavior variables,\n"),
outf)
}
Report(c(length(x$cCovars), "constant actor covariates,\n"), outf)
Report(c(length(x$vCovars),
"exogenous changing actor covariates,\n"), outf)
Report(c(length(x$dycCovars), "constant dyadic covariates,\n"),
outf)
Report(c(length(x$dyvCovars),
"exogenous changing dyadic covariates,\n"), outf)
Report(c(length(x$compositionChange),
c('no files','file',
'files')[1 + as.numeric(length(x$compositionChange))],
"with times of composition change.\n"), outf)
if ((length(x$cCovars) > 0 || length(x$dycCovars) > 0) && multi)
{
Report(c("For multi-group projects, constant covariates are",
"treated as changing covariates.\n"), outf)
if (length(x$dycCovars) > 0)
{
Report(c("Note that missings in changing dyadic",
"covariates are not (yet) supported!\n"), outf)
}
}
Report("\n", outf)
}
reportNetworks <- function()
{
Heading(2, outf, "Reading network variables.")
anymissings <- FALSE
for (i in 1:length(x$depvars))
{
depvar <- x$depvars[[i]]
atts <- attributes(depvar)
netname <- atts$name
type <- atts$type
if (!(type %in% c("behavior", "continuous")))
{
Report("Name of ", outf)
if (nNetworks > 1)
{
Report("this ", outf)
}
Report(c("network variable: ", netname, '.\n'),
sep="", outf)
Report(c(type, "network.\n"), outf)
if (type == "bipartite")
{
Report("This is a two-mode network.\n", outf)
Report(c("The number of units in the second mode is ",
atts$netdims[2], ".\n"), sep="", outf)
}
for (k in 1:x$observations)
{
Report(c("For observation moment ", k + periodFromStart,
", degree distributions are as ",
"follows:\nNodes\n"),
sep="", outf)
if (attr(depvar, "sparse"))
{
tmpdepvar <- depvar[[k]]
tmpx1 <- tmpdepvar@x
use <- tmpx1 %in% c(10, 11)
tmpx1[use] <- tmpx1[use] - 10
tmpdepvar@x <- tmpx1
outdeg <- rowSums(tmpdepvar, na.rm=TRUE)
indeg <- colSums(tmpdepvar, na.rm=TRUE)
diag(tmpdepvar) <- 0
missrow <- rowSums(is.na(depvar[[k]]))
misscol <- colSums(is.na(depvar[[k]]))
}
else
{
tmpdepvar <- depvar[, , k]
use <- tmpdepvar %in% c(10, 11)
tmpdepvar[use] <- tmpdepvar[use] - 10
if (attr(depvar, "type") != "bipartite")
{
diag(tmpdepvar) <- 0
}
outdeg <- rowSums(tmpdepvar, na.rm=TRUE)
indeg <- colSums(tmpdepvar, na.rm=TRUE)
missrow <- rowSums(is.na(tmpdepvar))
misscol <- colSums(is.na(tmpdepvar))
}
if (attr(depvar, "type") == "bipartite")
{
tmp <- format(cbind(1:atts$netdims[1], outdeg))
tmp2 <- format(cbind(1:atts$netdims[2], indeg))
}
else
{
tmp <- format(cbind(1:atts$netdims[1], outdeg,
indeg))
}
Report(tmp[, 1], fill=60, outf)
Report("out-degrees\n", outf)
Report(tmp[, 2], fill=60, outf)
if (attr(depvar, "type") == "bipartite")
{
Report("in-degrees\n", outf)
Report(tmp2[, 2], fill=60, outf)
}
else
{
Report("in-degrees\n", outf)
Report(tmp[, 3], fill=60, outf)
}
if (attr(depvar, "structural"))
{
if (attr(depvar, "sparse"))
{
nstruct0 <- sum(depvar[[k]]@x %in% c(10))
nstruct1 <- sum(depvar[[k]]@x %in% c(11))
}
else
{
nstruct0 <- sum(depvar[, , k] %in% c(10))
nstruct1 <- sum(depvar[, , k] %in% c(11))
}
if (nstruct0 + nstruct1 > 0)
{
Report(c("\nThe input file contains codes for ",
"structurally determined values:\n"),
sep="", outf );
if (attr(depvar, "sparse"))
{
nstruct0 <- sum(depvar[[k]]@x %in% c(10))
nstruct1 <- sum(depvar[[k]]@x %in% c(11))
}
else
{
nstruct0 <- sum(depvar[, , k] %in% c(10))
nstruct1 <- sum(depvar[, , k] %in% c(11))
}
Report(c(' ', nstruct0, ' structural zero'),
sep='', outf)
Report(ifelse(nstruct0 > 1,
"s were found (code 10).\n",
" was found (code 10).\n"), outf)
Report(c(' ', nstruct1, ' structural one'),
sep='', outf)
Report(ifelse(nstruct1 > 1,
"s were found (code 11).\n",
" was found (code 11).\n"),
outf)
if (attr(depvar, 'sparse'))
{
nnonactive <-
rowSums(depvar[[k]] == 10 |
depvar[[k]] == 11, na.rm=TRUE)
nnonactive <- nnonactive >= nrow(depvar[[k]])
}
else
{
nnonactive <-
rowSums(depvar[, , k] == 10 |
depvar[, , k] == 11, na.rm=TRUE)
nnonactive <- nnonactive >=
nrow(depvar[, , k])
}
if (sum(nnonactive) == 1)
{
Report(c("Actor ", which(nnonactive),
" is inactive at this ",
"observation.\n"), sep='', outf)
}
else if (sum(nnonactive) > 1)
{
Report(c("Actors", which(nnonactive),
"are inactive at this",
"observation.\n"), fill=80, outf)
}
}
}
if (attr(depvar, "sparse"))
{
depvark <- depvar[[k]]
diag(depvark) <- 0
anymissings <- any(is.na(depvark))
}
else
{
depvark <- depvar[, , k]
diag(depvark) <- 0
anymissings <- any(is.na(depvark))
}
if (anymissings)
{
Report(c("\nFor observation moment ",
k + periodFromStart,
", number of missing values ",
"are:\n"),
sep="", outf)
if (attr(depvar, "type") == "bipartite")
{
Report("Senders\n", outf)
tmp <- format(cbind(1:atts$netdims[1],
missrow))
Report(tmp[, 1], fill=60, outf)
Report("missing in rows\n", outf)
Report(tmp[, 2], fill=60, outf)
tmp <- format(cbind(1:atts$netdims[2],
misscol))
Report("Receivers\n", outf)
Report(tmp[, 1], fill=60, outf)
Report("missing in columns\n", outf)
Report(tmp[, 2], fill=60, outf)
mult <- atts$netdims[2]
}
else
{
Report("Nodes\n", outf)
tmp <- format(cbind(1:atts$netdims[1],
missrow, misscol))
Report(tmp[, 1], fill=60, outf)
Report("missing in rows\n", outf)
Report(tmp[, 2], fill=60, outf)
Report("missing in columns\n", outf)
Report(tmp[, 3], fill=60, outf)
mult <- atts$netdims[1] - 1
}
Report(c("Total number of missing data: ",
sum(missrow),
", corresponding to a fraction of ",
format(round(sum(missrow)/
atts$netdims[1] /
mult, 3),
nsmall=3),
".\n"), sep="", outf)
if (k > 1)
Report(c("In reported in- and outdegrees,",
"missings are not counted.\n"), outf)
Report("\n", outf)
}
else
{
Report(c("\nNo missing data for observation ",
k + periodFromStart, ".\n\n"),
sep= "", outf)
}
}
if (anymissings)
{
Report(c("There are missing data for this",
"network variable,\n"), outf)
Report(c("and the <<carry missings forward>>",
"option is active.\n"), outf)
Report("This means that for each tie variable,\n", outf)
Report(c("the last previous nonmissing value (if any)",
"is imputed.\n"), outf)
Report(c("If there is no previous nonmissing value,",
"the value 0 is imputed.\n"), outf)
}
}
Report("\n", outf)
}
Report("\n", outf)
}
reportBehaviors <- function()
{
Heading(2, outf, "Reading dependent actor variables.")
iBehav <- 0
for (i in 1:length(x$depvars))
{
if (types[i] %in% c("behavior", "continuous"))
{
depvar <- x$depvars[[i]]
atts <- attributes(depvar)
netname <- atts$name
iBehav <- iBehav + 1
mystr <- paste(iBehav, switch(as.character(iBehav),
"1"=, "21"=, "31"= "st",
"2"=, "22"=, "32"= "nd",
"3"=, "23"=, "33"= "rd",
"th"), sep="")
Report(c(mystr, " dependent actor variable named ",
netname,".\n"), sep="", outf)
ranged <- atts$range2
if (types[i] == "behavior")
ranged <- round(ranged)
else
ranged <- signif(ranged, 4)
Report(c("Maximum and minimum ",
ifelse(types[i] == "behavior", "rounded ", ""),
"values are ", ranged[1], " and ", ranged[2],
".\n"), sep="", outf)
if (types[i] == "behavior")
{
if (ranged[1] < 0 )
stop("Negative minima not allowed for discrete ",
"dependent actor variables.\n")
if (ranged[2] > 255 )
stop("Maxima more than 255 not allowed for ",
"discrete dependent actor variables.\n")
}
if (ranged[1] >= ranged[2] )
stop("Dependent actor variables must not be",
" constant.\n")
if (any(is.na(depvar)))
{
Report(c("Missing values in this actor variable are",
"imputed",
"by the mode per observation.\n"), outf)
Report(c("But if there is a previous (or later)",
"nonmissing value,",
"this is used as the imputed value.\n"), outf)
Report("Modal values:\nObservation ", outf)
Report(c(format(1:x$observations+periodFromStart,
width=4), '\n'), outf)
Report(c(format("Modes", width=12),
format(atts$modes, width=4)), outf)
Report("\n", outf)
}
depvar2 <- depvar
depvar2[is.na(depvar2)] <- 0
if (types[i] == "behavior" &&
!isTRUE(all.equal(as.vector(depvar2),
round(as.vector(depvar2)))))
{
Report(c("Non-integer values noted in this behavior",
"variable: they will be truncated.\n")
, outf)
}
Report('\n', outf)
}
}
Report(c("\nA total of",
nBehavs, "dependent actor variable"), outf)
Report(ifelse(nBehavs > 1, "s.\n\n", ".\n\n"), outf)
Report("Number of missing cases per observation:\n", outf)
Report(c(" observation", format(1:x$observations+periodFromStart,
width=10),
" overall\n"), sep="", outf)
for (i in 1:length(x$depvars))
{
if (types[i] %in% c("behavior", "continuous"))
{
depvar <- x$depvars[[i]][, 1, ]
atts <- attributes(x$depvars[[i]])
netname <- atts$name
missings <- colSums(is.na(depvar))
Report(c(format(netname, width=12),
format(c(missings, sum(missings)),
width=10), " (",
format(round(100 * sum(missings)/
nrow(depvar)/ncol(depvar), 1),
nsmall=1, width=4), ' %)\n'), sep="", outf)
}
}
Report("\nMeans per observation:\n", outf)
Report(c(" observation", format(1:x$observations+periodFromStart,
width=10),
" overall\n"), sep="", outf)
for (i in 1:length(x$depvars))
{
if (types[i] %in% c("behavior", "continuous"))
{
depvar <- x$depvars[[i]][, 1, ]
atts <- attributes(x$depvars[[i]])
netname <- atts$name
means <- colMeans(depvar, na.rm=TRUE)
Report(c(format(netname, width=14),
format(round(means, 3), nsmall=3,
width=10), format(round(mean(means),
3), width=10), '\n'), sep="", outf)
}
}
}
reportConstantCovariates <- function()
{
nCovars <- length(x$cCovars)
covars <- names(x$cCovars)
Heading(2, outf, "Reading constant actor covariates.")
Report(c(nCovars, "variable"),outf)
Report(ifelse(nCovars == 1, ", named:\n", "s, named:\n"), outf)
for (i in seq(along=covars))
{
Report(c(format(covars[i], width=15), '\n'), outf)
}
Report(c("\nA total of", nCovars,
"non-changing individual covariate"), outf)
Report(ifelse(nCovars == 1, ".\n\n", "s.\n\n"), outf)
Report("Number of missing cases:\n", outf)
for (i in seq(along=covars))
{
Report(c(format(covars[i], width=15),
sum(is.na(x$cCovars[[i]])), " (",
format(round(100 * sum(is.na(x$cCovars[[i]]))/
length(x$cCovars[[i]]), 1),
width=3, nsmall=1), '%)\n'), outf)
}
Report("\nInformation about covariates:\n", outf)
Report(c(format("minimum maximum mean centered", width=48,
justify="right"), "\n"), outf)
any.cent <- 0
any.noncent <- 0
for (i in seq(along=covars))
{
atts <- attributes(x$cCovars[[i]])
if (atts$centered)
{
cent <- " Y"
any.cent <- any.cent+1
}
else
{
cent <- " N"
any.noncent <- any.noncent+1
}
Report(c(format(covars[i], width=10),
format(round(atts$range2[1], 1),
nsmall=1, width=8),
format(round(atts$range2[2], 1),
nsmall=1, width=7),
format(round(atts$mean, 3),
nsmall=3, width=10), cent, "\n"), outf)
}
if (nData <= 1)
{
if (any.noncent <= 0)
{
Report(c("The mean value", ifelse(nCovars == 1, " is", "s are"),
" subtracted from the",
ifelse(nCovars == 1, " centered", ""), " covariate",
ifelse(nCovars == 1, ".\n\n", "s.\n\n")), sep="", outf)
}
else if (any.cent >= 1)
{
s.plural <- ""
if (any.cent >= 2){s.plural <- "s"}
Report(c("For the centered variable", s.plural,
", the mean value", ifelse(any.cent == 1, " is", "s are"),
" subtracted from the covariate", s.plural,
".\n"), sep="", outf)
}
}
}
reportChangingCovariates <- function()
{
nCovars <- length(x$vCovars)
covars <- names(x$vCovars)
use <- ! covars %in% names(x$cCovars)
nCovars <- length(x$vCovars[use])
Heading(2, outf, "Reading exogenous changing actor covariates.")
Report(c(nCovars, "variable"),outf)
Report(ifelse(nCovars == 1, ", named:\n", "s, named:\n"), outf)
for (i in seq(along=covars[use]))
{
Report(c(format(covars[use][i], width=15), '\n'), outf)
}
Report(c("\nA total of", nCovars,
"exogenous changing actor covariate"), outf)
Report(ifelse(nCovars == 1, ".\n\n", "s.\n\n"), outf)
Report("Number of missing cases per period:\n", outf)
Report(c(" period ", format(1:(x$observations - 1) +
periodFromStart, width=8),
" overall\n"), sep="", outf)
for (i in seq(along=covars))
{
if (use[i])
{
thiscovar <- x$vCovars[[i]]
misscols <- colSums(is.na(thiscovar))
Report(c(format(covars[i], width=20),
format(misscols, width=7),
format(sum(misscols), width=8), " (",
format(round(100 * sum(misscols)/nrow(thiscovar)/
ncol(thiscovar), 1), nsmall=1,
width=3), '%)\n'), outf)
}
}
Report("\nInformation about changing covariates:\n\n", outf)
Report(c(format("minimum maximum mean centered", width=48,
justify="right"), "\n"), outf)
any.cent <- 0
any.noncent <- 0
for (i in seq(along=covars))
{
if (use[i])
{
atts <- attributes(x$vCovars[[i]])
if (atts$centered)
{
cent <- " Y"
any.cent <- any.cent+1
}
else
{
cent <- " N"
any.noncent <- any.noncent+1
}
Report(c(format(covars[i], width=39), cent, '\n'), outf)
for (j in 1:(ncol(x$vCovars[[i]])))
{
Report(c(" period", format(j + periodFromStart,
width=3),
format(round(atts$rangep[1, j], 1),
nsmall=1, width=7),
format(round(atts$rangep[2, j], 1),
nsmall=1, width=7),
format(round(atts$meanp[j], 3),
nsmall=3, width=10), "\n"), outf)
}
Report(c(format("Overall", width=29),
format(round(atts$mean, 3), width=10, nsmall=3),
"\n\n"), outf)
}
}
if (nData <= 1)
{
if (any.noncent <= 0)
{
Report(c("The mean value", ifelse(nCovars == 1, " is", "s are"),
" subtracted from the",
ifelse(nCovars == 1, " centered", ""), " covariate",
ifelse(nCovars == 1, ".\n\n", "s.\n\n")), sep="", outf)
}
else if (any.cent >= 1)
{
s.plural <- ""
if (any.cent >= 2){s.plural <- "s"}
Report(c("For the centered variable", s.plural,
", the mean value", ifelse(any.cent == 1, " is", "s are"),
" subtracted from the covariate", s.plural,
".\n"), sep="", outf)
}
}
}
reportConstantDyadicCovariates <- function()
{
nCovars <- length(x$dycCovars)
covars <- names(x$dycCovars)
Heading(2, outf, "Reading constant dyadic covariates.")
for (i in seq(along=covars))
{
Report(c("Dyadic covariate named ", covars[i], '.\n'),
sep="", outf)
}
Report(c("\nA total of", nCovars,
"dyadic individual covariate"), outf)
Report(ifelse(nCovars == 1, ".\n\n", "s.\n\n"), outf)
Report("Number of tie variables with missing data:\n", outf)
for (i in seq(along=covars))
{
if (attr(x$dycCovars[[i]], "sparse"))
{
myvar <- x$dycCovars[[i]][[1]]
}
else
{
myvar <- x$dycCovars[[i]]
}
diag(myvar) <- 0
Report(c(format(covars[i], width=30),
sum(is.na(myvar)), " (",
format(round(100 * sum(is.na(myvar))/
(length(myvar) - nrow(myvar)), 1),
width=3, nsmall=1), '%)\n'), outf)
}
Report("\nInformation about dyadic covariates:\n", outf)
Report(c(format("minimum maximum mean centered", width=67,
justify="right"), "\n"), outf)
any.cent <- 0
any.noncent <- 0
for (i in seq(along=covars))
{
atts <- attributes(x$dycCovars[[i]])
if (atts$centered)
{
cent <- " Y"
any.cent <- any.cent+1
}
else
{
cent <- " N"
any.noncent <- any.noncent+1
}
Report(c(format(covars[i], width=30),
format(round(atts$range2[1], 1),
nsmall=1, width=8),
format(round(atts$range2[2], 1),
nsmall=1, width=7),
format(round(atts$mean, 3),
nsmall=3, width=10), cent, "\n"), outf)
}
Report('\n', outf)
s.plural <- ifelse((any.cent >= 2),"s","")
if (any.noncent >= 1)
{
Report(c('The <mean> listed for the non-centered variable',
s.plural, ' is the attribute, not the observed mean.',
'\n'), sep="", outf)
}
if (any.noncent <= 0)
{
Report(c("The mean value", ifelse(nCovars == 1, " is", "s are"),
" subtracted from the",
ifelse(nCovars == 1, " centered", ""), " covariate",
ifelse(nCovars == 1, ".\n\n", "s.\n\n")), sep="", outf)
}
else if (any.cent >= 1)
{
Report(c("For the centered variable", s.plural,
", the mean value", ifelse(any.cent == 1, " is", "s are"),
" subtracted from the covariate", s.plural,
".\n"), sep="", outf)
}
}
reportChangingDyadicCovariates <- function()
{
covars <- names(x$dyvCovars)
use <- ! covars %in% names(x$dycCovars)
nCovars <- length(x$dyvCovars[use])
Heading(2, outf, "Reading exogenous dyadic covariates.")
for (i in seq(along=covars))
{
Report(c("Exogenous dyadic covariate named ", covars[i], '.\n'),
sep="", outf)
}
Report("Number of tie variables with missing data per period:\n",
outf)
Report(c(" period ", format(1:(x$observations - 1) +
periodFromStart, width=7),
" overall\n"), sep="", outf)
for (i in seq(along=covars))
{
if (use[i])
{
sparse <- attr(x$dyvCovars[[i]], "sparse")
vardims <- attr(x$dyvCovars[[i]], "vardims")
thiscovar <- x$dyvCovars[[i]]
if (!sparse)
{
missvals <- colSums(is.na(thiscovar), dims=2)
}
else
{
missvals <- sapply(thiscovar, function(x)sum(is.na(x)))
}
Report(c(format(covars[i], width=10),
format(missvals, width=6),
format(sum(missvals), width=9), " (",
format(round(100 * sum(missvals)/vardims[1]/
vardims[2]), nsmall=1,
width=3), '%)\n'), outf)
}
}
Report("\nInformation about changing dyadic covariates:\n", outf)
Report(c(format("mean centered", width=36,
justify="right"), "\n"), outf)
any.cent <- 0
any.noncent <- 0
for (i in seq(along=covars))
{
atts <- attributes(x$dyvCovars[[i]])
if (atts$centered)
{
cent <- " Y"
any.cent <- any.cent+1
}
else
{
cent <- " N"
any.noncent <- any.noncent+1
}
Report(c(format(covars[i], width=28), cent, '\n'), outf)
for (j in 1:(atts$vardims[3]))
{
Report(c(" period", format(j + periodFromStart,
width=3),
format(round(atts$meanp[j], 3),
nsmall=3, width=10), "\n"), outf)
}
if (!atts$centered)
{
Report(c(format("Overall", width=29),
format(round(atts$mean, 3), width=10, nsmall=3),
"\n"), outf)
}
Report("\n", outf)
}
Report('\n', outf)
s.plural <- ifelse((any.cent >= 2),"s","")
if (any.noncent >= 1)
{
Report(c('The <mean> listed for the non-centered variable',
s.plural, ' is the attribute, not the observed mean.',
'\n'), sep="", outf)
}
if (nCovars >= 1)
{
if (any.noncent <= 0)
{
Report(c("The mean value",
ifelse(nCovars == 1, " is", "s are"),
" subtracted from the",
ifelse(nCovars == 1, " centered", ""), " covariate",
ifelse(nCovars == 1, ".\n\n", "s.\n\n")), sep="", outf)
}
else if (any.cent >= 1)
{
Report(c("For the centered variable", s.plural,
", the mean value", ifelse(any.cent == 1, " is", "s are"),
" subtracted from the covariate", s.plural,
".\n"), sep="", outf)
}
}
}
reportCompositionChange <- function()
{
comps <- x$compositionChange
Heading(2, outf, "Reading files with times of composition change.")
for (i in seq(along=comps))
{
nodeSet <- attr(comps[[i]], "nodeSet")
Report(c("\nComposition changes for nodeSet ", nodeSet, '.\n\n'),
sep="", outf)
events <- attr(comps[[i]], "events")
for (j in 1:nrow(events))
{
x <- events[j, ]
Report(c("Actor ", format(x$actor, width=2),
ifelse(x$event=="join", " joins ", " leaves"),
" network at time ",
format(round(x$period + x$time, 4), nsmall=4),
".\n"), sep="", outf)
}
pertab <- table(events$period, events$event)
for (period in row.names(pertab))
{
joiners <- pertab[period, "join"]
leavers <- pertab[period, "leave"]
Report(c("\nIn period ", period, ", ", joiners,
ifelse(joiners == 1, " actor", " actors"),
" joined and ", leavers,
ifelse(leavers == 1, " actor", " actors"),
" left the network.\n"), sep="", outf)
}
}
}
types <- lapply(x$depvars, function(z) attr(z, "type"))
reportStart()
nNetworks <- sum(types != "behavior")
nBehavs <- sum(types %in% c("behavior", "continuous"))
if (nNetworks > 0)
{
reportNetworks()
}
if (nBehavs > 0)
{
reportBehaviors()
}
if (length(x$cCovars) > 0)
{
reportConstantCovariates()
}
if (nData > 1 && length(x$vCovars) > length(x$cCovars) ||
(nData ==1 && length(x$vCovars) > 0))
{
reportChangingCovariates()
}
if (length(x$dycCovars) > 0)
{
reportConstantDyadicCovariates()
}
if (nData > 1 && length(x$dyvCovars) > length(x$dycCovars) ||
(nData ==1 && length(x$dyvCovars) > 0))
{
reportChangingDyadicCovariates()
}
if (length(x$compositionChange) > 0)
{
reportCompositionChange()
}
Report("\n\n", outf)
}
if (!(inherits(data, "siena")))
{
stop("The first argument needs to be a siena data object.")
}
if (!(inherits(modelname, "character")))
{
cat("Since version 1.1-279, an effects object should not be given\n")
cat(" in the call of print01Report. Consult the help file.\n")
stop("print01Report needs no effects object.")
}
if (!inherits(getDocumentation, 'logical'))
{
stop('wrong parameters; note: do not include an effects object as parameter!')
}
if (getDocumentation)
{
tt <- getInternals()
return(tt)
}
Report(openfiles=TRUE, type="w", projname=modelname)
Report(" ************************\n", outf)
Report(c(" ", modelname, ".txt\n"),
sep='', outf)
Report(" ************************\n\n", outf)
Report(c("Filename is ", modelname, ".txt.\n\n"), sep="", outf)
Report(c("This file contains primary output for SIENA project <<",
modelname, ">>.\n\n"), sep="", outf)
Report(c("Date and time:", format(Sys.time(), "%d/%m/%Y %X"), "\n\n"), outf)
packageValues <- packageDescription(pkgname, fields=c("Version", "Date"))
rforgeRevision <- packageDescription(pkgname,
fields="Repository/R-Forge/Revision")
if (is.na(rforgeRevision))
{
revision <- ""
}
else
{
revision <- paste(" R-forge revision: ", rforgeRevision, " ", sep="")
}
Report(c(paste(pkgname, "version "), packageValues[[1]],
" (", format(as.Date(packageValues[[2]]), "%d %m %Y"), ")",
revision, "\n\n"), sep="", outf)
if (!inherits(data, 'sienaGroup'))
{
nData <- 1
data <- sienaGroupCreate(list(data), singleOK=TRUE)
}
else
{
nData <- length(data)
}
if (nData > 1)
{
Report("Multi-group input detected\n\n", outf)
for (i in 1:nData)
{
Report(c("Subproject ", i, ": <", names(data)[i], ">\n"), sep="",
outf)
reportDataObject1(data[[i]])
}
Report(c("Multi-group project", modelname, "contains", nData,
"subprojects.\n\n"), outf)
periodFromStart <- 0
for (i in 1:nData)
{
Heading(1, outf,
paste("Subproject ", i, ": <", names(data)[i], ">",
sep="", collapse="")
)
reportDataObject(data[[i]], periodFromStart, multi=TRUE)
periodFromStart <- periodFromStart + data[[i]]$observations
}
}
else
{
Heading(1, outf, "Data input.")
reportDataObject(data[[1]], 0, multi=FALSE)
}
atts <- attributes(data)
nets <- !(atts$types %in% c("behavior", "continuous"))
behs <- atts$types == "behavior"
if (length(data) > 1)
{
Heading(1, outf, "Further processing of multi-group data.")
Report("Series of observations for the multi-group project:\n", outf)
periodFromStart <- 0
for (i in seq(along=data))
{
Report(c(format(1:data[[i]]$observations + periodFromStart), '\n'),
outf)
periodFromStart <- periodFromStart + data[[i]]$observations
}
Report("\n", outf)
if (length(atts$vCovars) == 1)
{
Report(c("The overall mean value ",
format(round(atts$vCovarMean, 4), nsmall=3, width=12),
" is subtracted from covariate ", atts$vCovars,
".\n\n"), sep="", outf)
}
else if (length(atts$vCovars) >= 2)
{
Report(c("The mean values are subtracted from the covariates:\n"), outf)
for (i in seq(along=atts$vCovars))
{
Report(c(format(atts$vCovars[i], width=15),
format(round(atts$vCovarMean[i], 4), nsmall=3, width=12), '\n'), outf)
}
}
}
periodNos <- attr(data, "periodNos")
if (any(atts$anyUpOnly[nets]))
{
netnames <- atts$netnames[nets]
upOnly <- atts$anyUpOnly[nets]
allUpOnly <- atts$allUpOnly[nets]
for (i in which(upOnly))
{
if (sum(nets) > 1)
{
Report(c("Network ", netnames[i], ":\n"), sep = "", outf)
}
if (allUpOnly[i])
{
Report("All network changes are upward.\n", outf)
Report("This will be respected in the simulations.\n", outf)
Report("Therefore, there is no outdegree parameter.\n\n", outf)
}
else
{
Report(c("All network changes are upward for the following",
"periods:\n"), outf)
periodsUp <- unlist(lapply(data, function(x)
{
attr(x$depvars[[match(netnames[i], names(x$depvars))]],
"uponly")
}))
periods <- periodNos[c(1:length(periodsUp))[periodsUp]]
Report(paste(periods, " => ", periods + 1, ";",
sep=""), fill=80, outf)
Report("This will be respected in the simulations.\n\n", outf)
}
}
}
if (any(atts$anyDownOnly[nets]))
{
netnames <- atts$netnames[nets]
downOnly <- atts$anyDownOnly[nets]
allDownOnly <- atts$allDownOnly[nets]
for (i in which(downOnly))
{
if (sum(nets) > 1)
{
Report(c("Network ", netnames[i], "\n"), sep = "", outf)
}
if (allDownOnly[i])
{
Report("All network changes are downward.\n", outf)
Report("This will be respected in the simulations.\n", outf)
Report("Therefore, there is no outdegree parameter.\n\n", outf)
}
else
{
periodsDown <-
unlist(lapply(data, function(x)
{
attr(x$depvars[[match(netnames[i],
names(x$depvars))]],
"downonly")
}))
Report(c("All network changes are downward for the",
"following periods:\n"), outf)
periods <- periodNos[c(1:length(periodsDown))[periodsDown]]
Report(paste(periods, " => ", periods + 1, ";",
sep=""), fill=80, outf)
Report("This will be respected in the simulations.\n\n", outf)
}
}
}
if (any(atts$anyUpOnly[behs]))
{
netnames <- atts$netnames[behs]
upOnlyAndBeh <- atts$anyUpOnly[behs]
allUpOnly <- atts$allUpOnly[behs]
for (i in which(upOnlyAndBeh))
{
Report(c("\nBehavior variable ", netnames[i], ":\n"), sep = "",
outf)
if (allUpOnly[i])
{
Report("All behavior changes are upward.\n", outf)
Report("This will be respected in the simulations.\n", outf)
Report("Therefore, there is no linear shape parameter.\n\n",
outf)
}
else
{
Report(c("All behavior changes are upward for the following",
"periods:\n"), outf)
periodsUp <-
sapply(data, function(x)
{
attr(x$depvars[[match(netnames[i],
names(x$depvars))]],
"uponly")
})
periods <- periodNos[c(1:length(periodsUp))[periodsUp]]
Report(paste(periods, " => ", periods + 1, ";",
sep=""), fill=80, outf)
Report("This will be respected in the simulations.\n\n", outf)
}
}
}
if (any(atts$anyDownOnly[behs]))
{
netnames <- atts$netnames[behs]
downOnly <- atts$anyDownOnly[behs]
allDownOnly <- atts$allDownOnly[behs]
for (i in which(downOnly))
{
Report(c("\nBehavior ", netnames[i], ":\n"), sep = "", outf)
if (allDownOnly[i])
{
Report("All behavior changes are downward.\n", outf)
Report("This will be respected in the simulations.\n", outf)
Report("Therefore, there is no linear shape parameter.\n\n",
outf)
}
else
{
periodsDown <-
sapply(data, function(x)
{
attr(x$depvars[[match(netnames[i],
names(x$depvars))]],
"downonly")
})
Report(c("All behavior changes are downward for the",
"following periods:\n"), outf)
periods <- periodNos[c(1:length(periodsDown))[periodsDown]]
Report(paste(periods, " => ", periods + 1, ";",
sep=""), fill=80, outf)
Report("This will be respected in the simulations.\n\n", outf)
}
}
}
if (any(atts$anyMissing[nets]))
{
netnames <- atts$netnames[nets]
missings <- atts$anyMissing[nets]
for (i in seq(along=netnames[missings]))
{
Report(c("There are missing data for network variable ",
netnames[i], ".\n"), sep = "", outf)
}
}
if (any(atts$anyMissing[!nets]))
{
netnames <- atts$netnames[!nets]
missings <- atts$anyMissing[!nets]
for (i in seq(along=netnames[missings]))
{
Report(c("There are missing data for behavior variable ",
netnames[i], ".\n"), sep = "", outf)
}
}
if (sum(atts$types == 'oneMode') > 0)
{
netnames <- atts$netnames[nets]
if (nData > 1)
{
balmean <-
lapply(data, function(x)
sapply(x$depvars, function(y) attr(y, "balmean")))
}
else
{
balmean <- atts$"balmean"
}
if (nData > 1 || sum(atts$types == "oneMode") > 1)
{
Report(c("The mean structural dissimilarity values subtracted",
"in the\n"), outf)
Report("balance calculations are\n", outf)
}
else
{
Report(c("The mean structural dissimilarity value subtracted",
"in the\n"), outf)
Report("balance calculations is ", outf)
}
for (i in seq(along=atts$types))
{
if (atts$types[i] == "oneMode")
{
if (nData > 1)
{
thisbalmean <- sapply(balmean, function(x)x[[netnames[i]]])
if (sum(atts$types != "behavior") > 1)
{
Report(c("for network ", netnames[i],":"), sep="",
outf)
}
Report("\n", outf)
mystr <- format(paste("Subproject ", 1:nData, " <",
atts$names, "> ", sep=""))
for (j in seq(along=thisbalmean))
{
Report(c(mystr[j], ": ",
format(round(thisbalmean[j], 4), nsmall=4,
width=14), "\n"), sep="", outf)
}
}
else
{
if (sum(atts$types != "behavior") > 1)
{
Report(c("for network ", format(netnames[i], width=12),
format(round(balmean[i], 4),
nsmall=4, width=14), '.\n'),
sep="", outf)
}
else
{
Report(c(format(round(balmean[i], 4), nsmall=4,
width=14), '.\n'), sep="", outf)
}
}
}
}
}
if (sum(atts$types %in% c("behavior", "continuous")) > 0 ||
(nData ==1 && length(atts$cCovars) > 0) ||
length(atts$vCovars) > 0)
{
netnames <- atts$netnames
if (nData > 1)
{
vCovarSim <-
lapply(data, function(x)
sapply(x$vCovars, function(y) attr(y, "simMean")))
behSim <-
lapply(data, function(x)
sapply(x$depvars, function(y) attr(y, "simMean")))
}
else
{
vCovarSim <- atts$"vCovarSim"
behSim <- atts$"bSim"
}
Report(c("\nFor the similarity variable calculated from each actor",
"covariate,\nthe mean is subtracted.\nThese means are:\n"),
outf)
if (nData == 1)
{
for (i in seq(along=atts$cCovars))
{
if (atts$cCovarPoszvar[i])
{
Report(c("Similarity", format(atts$cCovars[i], width=24),
':', format(round(atts$cCovarSim[i], 4), width=12,
nsmall=4), '\n'), outf)
}
}
}
for (i in seq(along=atts$netnames))
{
if ((atts$types[i] %in% c("behavior", "continuous")) && atts$bPoszvar[i])
{
if (nData > 1)
{
thisSim <- sapply(behSim, function(x)x[[netnames[i]]])
Report(c("Similarity ", format(atts$netnames[i], width=24),
":\n"), sep="", outf)
mystr <- format(paste(" Subproject ", 1:nData, " <",
atts$names, "> ", sep=""))
for (j in seq(along=thisSim))
{
Report(c(mystr[j], format(round(thisSim[j], 4),
nsmall=4, width=12), "\n"),
sep="", outf)
}
Report("\n", outf)
}
else
{
Report(c("Similarity", format(atts$netnames[i], width=24),
':', format(round(atts$bSim[i], 4), nsmall=4,
width=12), '\n'), outf)
}
}
}
for (i in seq(along=atts$vCovars))
{
covarnames <- atts$vCovars
if (atts$vCovarPoszvar[i])
{
if (nData > 1)
{
thisSim <- sapply(vCovarSim, function(x)x[[covarnames[i]]])
Report(c("Similarity ", format(covarnames[i], width=24),
":\n"), sep="", outf)
mystr <- format(paste(" Subproject ", 1:nData, " <",
atts$names, "> ", sep=""))
for (j in seq(along=thisSim))
{
Report(c(mystr[j], format(round(thisSim[j], 4),
nsmall=4, width=12), "\n"),
sep="", outf)
}
Report("\n", outf)
}
else
{
Report(c("Similarity", format(atts$vCovars[i], width=24),
':', format(round(atts$vCovarSim[i], 4), width=12,
nsmall=4), '\n'), outf)
}
}
}
}
if (any(atts$anyHigher) || any(atts$anyDisjoint) || any(atts$anyAtLeastOne))
{
Report("\n", outf)
highers <- atts[["anyHigher"]]
disjoints <- atts[["anyDisjoint"]]
atleastones <- atts[["anyAtLeastOne"]]
if (any(highers))
{
higherSplit <- strsplit(names(highers)[highers], ",")
lapply(higherSplit, function(x)
{
Report(c("Network ", x[1], " is higher than network ", x[2],
".\n"), sep="", outf)
Report("This will be respected in the simulations.\n\n",
outf)
})
}
if (any(disjoints))
{
disjointSplit <- strsplit(names(disjoints)[disjoints],',')
lapply(disjointSplit, function(x)
{
Report(c("Network ", x[1], " is disjoint from network ",
x[2], ".\n"), sep="", outf)
Report("This will be respected in the simulations.\n\n",
outf)
})
}
if (any(atleastones))
{
atLeastOneSplit <- strsplit(names(atleastones)[atleastones],',')
lapply(atLeastOneSplit, function(x)
{
Report(c("A link in at least one of networks ",
x[1], " and", x[2],
" always exists.\n"), sep="", outf)
Report("This will be respected in the simulations.\n\n",
outf)
})
}
}
myeff <- getEffects(data)
printInitialDescription(data, myeff, modelName=modelname)
Report(closefiles=TRUE)
} |
segsample <-
function(mysegs,ratcol,startcol="StartProbe",endcol="EndProbe",
blocksize=0,times=0){
if(blocksize==0×==0)stop("One of blocksize or times must be set")
if(blocksize!=0×!=0)stop("Only one of blocksize or times can be set")
segtable<-mysegs[,c(startcol,endcol),drop=F]
if(blocksize!=0)segtable<-
segtable[rep(1:nrow(segtable),
times=(segtable[,endcol]-segtable[,startcol]+1)%/%blocksize),]
if(times!=0)segtable<-segtable[rep(1:nrow(segtable),each=times),]
return(cbind(segtable, apply(segtable, 1, smedian.sample, v = ratcol)))
} |
NULL
NULL
methods::setGeneric("add_contiguity_constraints",
signature = methods::signature("x", "zones", "data"),
function(x, zones = diag(number_of_zones(x)), data = NULL)
standardGeneric("add_contiguity_constraints"))
methods::setMethod("add_contiguity_constraints",
methods::signature("ConservationProblem", "ANY", "ANY"),
function(x, zones, data) {
assertthat::assert_that(inherits(x, "ConservationProblem"),
inherits(zones, c("matrix", "Matrix")),
inherits(data, c("NULL", "Matrix")))
if (!is.null(data)) {
data <- methods::as(data, "dgCMatrix")
assertthat::assert_that(all(data@x %in% c(0, 1)),
ncol(data) == nrow(data), number_of_total_units(x) == ncol(data),
all(is.finite(data@x)), Matrix::isSymmetric(data))
d <- list(matrix = data)
} else {
assertthat::assert_that(inherits(x$data$cost,
c("Spatial", "Raster", "sf")),
msg = paste("argument to data must be supplied because planning unit",
"data are not in a spatially referenced format"))
d <- list()
}
zones <- as.matrix(zones)
assertthat::assert_that(
isSymmetric(zones), ncol(zones) == number_of_zones(x),
is.numeric(zones), all(zones %in% c(0, 1)),
all(colMeans(zones) <= diag(zones)), all(rowMeans(zones) <= diag(zones)))
colnames(zones) <- x$zone_names()
rownames(zones) <- colnames(zones)
x$add_constraint(pproto(
"ContiguityConstraint",
Constraint,
data = d,
name = "Contiguity constraints",
parameters = parameters(
binary_parameter("apply constraints?", 1L),
binary_matrix_parameter("zones", zones, symmetric = TRUE)),
calculate = function(self, x) {
assertthat::assert_that(inherits(x, "ConservationProblem"))
if (is.Waiver(self$get_data("matrix"))) {
data <- adjacency_matrix(x$data$cost)
data <- methods::as(data, "dgCMatrix")
self$set_data("matrix", data)
}
invisible(TRUE)
},
apply = function(self, x, y) {
assertthat::assert_that(inherits(x, "OptimizationProblem"),
inherits(y, "ConservationProblem"))
if (as.logical(self$parameters$get("apply constraints?")[[1]])) {
ind <- y$planning_unit_indices()
d <- self$get_data("matrix")[ind, ind, drop = FALSE]
z <- self$parameters$get("zones")
z_cl <- igraph::graph_from_adjacency_matrix(z, diag = FALSE,
mode = "undirected", weighted = NULL)
z_cl <- igraph::clusters(z_cl)$membership
z_cl <- z_cl * diag(z)
d <- Matrix::forceSymmetric(d, uplo = "L")
class(d) <- "dgCMatrix"
if (max(z_cl) > 0)
rcpp_apply_contiguity_constraints(x$ptr, d, z_cl)
}
invisible(TRUE)
}))
})
methods::setMethod("add_contiguity_constraints",
methods::signature("ConservationProblem", "ANY", "data.frame"),
function(x, zones, data) {
assertthat::assert_that(inherits(data, "data.frame"),
!assertthat::has_name(data, "zone1"),
!assertthat::has_name(data, "zone2"))
add_contiguity_constraints(x, zones, marxan_boundary_data_to_matrix(x, data))
})
methods::setMethod("add_contiguity_constraints",
methods::signature("ConservationProblem", "ANY", "matrix"),
function(x, zones, data) {
add_contiguity_constraints(x, zones, methods::as(data, "dgCMatrix"))
}) |
CoDa_FPCA <-
function(data, normalization, h_scale = 1, m = 5001,
band_choice = c("Silverman", "DPI"),
kernel = c("gaussian", "epanechnikov"),
varprop = 0.99, fmethod)
{
if(getmode(trunc(diff(apply(data, 1, sum))) == 0))
{
CoDa_mat = t(data)
}
else
{
band_choice = match.arg(band_choice)
kernel = match.arg(kernel)
N = nrow(data)
if (!exists('h_scale')) h_scale = 1
if(band_choice == "Silverman")
{
if(kernel == "gaussian")
{
h.hat_5m = sapply(1:N, function(t) 1.06*sd(data[t,])*(length(data[t,])^(-(1/5))))
}
if(kernel == "epanechnikov")
{
h.hat_5m = sapply(1:N, function(t) 2.34*sd(data[t,])*(length(data[t,])^(-(1/5))))
}
h.hat_5m = h_scale * h.hat_5m
}
if(band_choice == "DPI")
{
if(kernel == "gaussian")
{
h.hat_5m = sapply(1:N, function(t) dpik(data[t,], kernel = "normal"))
}
if(kernel == "epanechnikov")
{
h.hat_5m = sapply(1:N, function(t) dpik(data[t,], kernel = "epanech"))
}
h.hat_5m = h_scale * h.hat_5m
}
n = N
u = seq(from = min(data), to = max(data), length = m)
du = u[2] - u[1]
if(kernel == "gaussian")
{
Y = sapply(1:N, function(t) density(data[t,], bw = h.hat_5m[t], kernel = 'gaussian', from = min(data), to = max(data), n = m)$y)
}
if(kernel == "epanechnikov")
{
Y = sapply(1:N, function(t) density(data[t,], bw = h.hat_5m[t], kernel = 'epanechnikov', from = min(data), to = max(data), n = m)$y)
}
for(t in 1:N)
{
Y[,t] = Y[,t]/(sum(Y[,t])*du)
}
return_density_train_trans <- Y
return_density_train_transformation = return_density_train_trans * (10^6)
n_1 = ncol(return_density_train_transformation)
epsilon = sapply(1:n_1, function(X) max(return_density_train_transformation[,X] - round(return_density_train_transformation[,X], 2)))
CoDa_mat = matrix(NA, m, n_1)
for(ik in 1:n_1)
{
index = which(round(return_density_train_transformation[,ik], 2) == 0)
CoDa_mat[,ik] = replace(return_density_train_transformation[,ik], index, epsilon[ik])
CoDa_mat[-index,ik] = return_density_train_transformation[-index,ik] * (1 - (length(index) * epsilon[ik])/(10^6))/(10^6)
}
}
c = colSums(CoDa_mat)[1]
dum = CoDa_recon(dat = t(CoDa_mat), normalize = normalization,
fore_method = fmethod, fh = 1, varprop = varprop, constant = c)
return(dum$d_x_t_star_fore)
} |
test_that("impute_LS_adaptive() works (basic test, check for anyNA and warning)", {
set.seed(1234)
ds_mis <- mvtnorm::rmvnorm(20, rep(0, 8), diag(1, 8))
ds_mis <- delete_MCAR(ds_mis, 0.2, 1:4)
ds_imp <- expect_warning(
impute_LS_adaptive(ds_mis, r_max_min = 43, warn_r_max = TRUE),
"Not enough data for r_max_min = 43. r_max_min reduced to 7!",
fixed = TRUE,
all = TRUE
)
expect_false(anyNA(ds_imp))
})
test_that("impute_LS_adaptive() works for small matrices", {
set.seed(1234)
ds_mis <- mvtnorm::rmvnorm(20, rep(0, 5), diag(1, 5))
ds_mis <- delete_MCAR(ds_mis, 0.2, 1:4)
ds_imp <- expect_warning(impute_LS_adaptive(ds_mis, warn_r_max = TRUE),
"Not enough data for r_max_min = 100. r_max_min reduced to 0!",
fixed = TRUE,
all = TRUE
)
expect_false(anyNA(ds_imp))
expect_equal(ds_imp, impute_LS_array(ds_mis))
})
test_that("impute_LS_adaptive() works for data frames", {
set.seed(123)
ds_mis <- as.data.frame(mvtnorm::rmvnorm(30, rep(0, 9), diag(2, 9)))
ds_mis <- delete_MCAR(ds_mis, 0.1)
ds_imp <- expect_warning(impute_LS_adaptive(ds_mis),
"Not enough data for r_max_min = 100. r_max_min reduced to 10!",
fixed = TRUE,
all = TRUE
)
expect_false(anyNA(ds_imp))
})
test_that("impute_LS_adaptive() works with completely missing row and verbose", {
set.seed(1234)
ds_mis <- mvtnorm::rmvnorm(20, rep(0, 7), diag(1, 7))
ds_mis[5, ] <- NA
ds_imp_silent <- expect_silent(
impute_LS_adaptive(ds_mis,
warn_r_max = FALSE, verbose_gene = FALSE, verbose_array = FALSE
)
)
expect_false(anyNA(ds_imp_silent))
expect_equal(ds_imp_silent[5, ], suppressWarnings(colMeans(impute_LS_gene(ds_mis))))
ds_imp_verb1 <- expect_message(
impute_LS_adaptive(ds_mis,
warn_r_max = FALSE,
verbose_gene = TRUE, verbose_array = FALSE
),
"No observed value in row(s) 5. These rows were imputed with column means.",
fixed = TRUE,
all = TRUE
)
expect_equal(ds_imp_verb1, ds_imp_silent)
ds_imp_verb2 <- expect_message(
impute_LS_adaptive(ds_mis,
warn_r_max = FALSE,
verbose_gene = FALSE, verbose_array = TRUE
),
"The missing values of following rows were imputed with (parts of) mu: 5",
fixed = TRUE,
all = TRUE
)
expect_equal(ds_imp_verb2, ds_imp_silent)
verify_output(
test_path("test-impute_LS_adaptive-verbosity.txt"),
ds_imp_verb3 <- impute_LS_adaptive(ds_mis,
warn_r_max = FALSE,
verbose_gene = TRUE, verbose_array = TRUE
)
)
expect_equal(ds_imp_verb3, ds_imp_silent)
ds_imp_verb4 <- expect_message(
impute_LS_adaptive(ds_mis,
warn_r_max = FALSE,
verbose_gene_p = TRUE, verbose_array_p = FALSE
),
"No observed value in row(s) 5. These rows were imputed with column means.",
fixed = TRUE,
all = TRUE
)
expect_equal(ds_imp_verb4, ds_imp_silent)
ds_imp_verb5 <- expect_message(
impute_LS_adaptive(ds_mis,
warn_r_max = FALSE,
verbose_gene_p = FALSE, verbose_array_p = TRUE
),
"The missing values of following rows were imputed with (parts of) mu: 5",
fixed = TRUE,
all = TRUE
)
expect_equal(ds_imp_verb5, ds_imp_silent)
})
test_that("impute_LS_adaptive() works with dataset triangle miss", {
ds_triangle_mis <- readRDS(test_path(file.path("datasets", "ds_triangle_mis.rds")))
ds_triangle_LS_array_Bo <- readRDS(test_path(file.path("datasets", "ds_triangle_LS_array_Bo.rds")))
ds_triangle_LS_gene_Bo <- readRDS(test_path(file.path("datasets", "ds_triangle_LS_gene_Bo.rds")))
set.seed(1234)
ds_imp <- expect_warning(round(impute_LS_adaptive(ds_triangle_mis, min_common_obs = 5), 3),
"Not enough data for r_max_min = 100. r_max_min reduced to 24!",
fixed = TRUE,
all = TRUE
)
expect_true(all(ds_imp <= pmax(ds_triangle_LS_array_Bo, ds_triangle_LS_gene_Bo)))
expect_true(all(ds_imp >= pmin(ds_triangle_LS_array_Bo, ds_triangle_LS_gene_Bo)))
})
test_that("impute_LS_adaptive() works with dataset MCAR, 100x7", {
ds_100x7_LS_array_Bo <- readRDS(test_path(file.path("datasets", "ds_100x7_LS_array_Bo.rds")))
ds_100x7_LS_gene_Bo <- readRDS(test_path(file.path("datasets", "ds_100x7_LS_gene_Bo.rds")))
ds_100x7_mis_MCAR <- readRDS(test_path(file.path("datasets", "ds_100x7_mis_MCAR.rds")))
ds_mis <- ds_100x7_LS_gene_Bo
ds_mis[is.na(ds_100x7_mis_MCAR)] <- NA
set.seed(1234)
ds_imp <- round(impute_LS_adaptive(ds_mis, warn_r_max = FALSE), 3)
tol <- 0.002
expect_true(all(ds_imp <= pmax(ds_100x7_LS_array_Bo, ds_100x7_LS_gene_Bo) + tol))
expect_true(all(ds_imp >= pmin(ds_100x7_LS_array_Bo, ds_100x7_LS_gene_Bo) - tol))
}) |
is.numeric_data.frame <- function(x){
if (is.data.frame(x) && all(sapply(x,base::is.numeric)))
return (T)
return (F)
}
is.numeric <- function(x){
if (base::is.numeric(x))
return (T)
if (is.data.frame(x) && all(sapply(x,base::is.numeric)))
return (T)
return (F)
}
resetPar <- function() {
dev.new()
op <- par(no.readonly = TRUE)
dev.off()
op
}
is.sorted <- function(x) {
return(!is.unsorted(x))
}
tryCatchCapture <- function(expr, warn = T, err = T) {
val <- NULL
myWarnings <- NULL
wHandler <- function(w) {
myWarnings <<- c(myWarnings, w$message)
invokeRestart("muffleWarning")
}
myError <- NULL
eHandler <- function(e) {
myError <<- e$message
NULL
}
if(warn && err){
val <- tryCatch(withCallingHandlers(expr, warning = wHandler), error = eHandler)
return(list(value = val, warnings = myWarnings, error=myError))
}
if(warn){
val <- tryCatch(withCallingHandlers(expr, warning = wHandler))
return(list(value = val, warnings = myWarnings))
}
if(err){
val <- tryCatch(expr, error = eHandler)
return(list(value = val, error=myError))
}
val <- expr
return(list(value = val))
} |
library(magrittr)
sa <- rtweet::search_tweets(
"url:shinyapps.io OR (shiny app OR application OR rstudio) OR shinyapps OR shinyapp",
n = 10000, include_rts = FALSE)
l <- tfse::readlines("README.Rmd")
links <- unique(sub("/+$", "", gsub(".*\\(|\\)$", "", grep("^\\+ \\[", l, value = TRUE))))
links <- grep("https://[^/]+\\.shinyapps\\.io/[^/]+$", links, value = TRUE)
links <- c(sa$urls_expanded_url, sa$urls_url, sa$media_expanded_url, sa$ext_media_expanded_url) %>%
unlist() %>%
sub("/?(\\
tfse::regmatches_("^https?://[^/]+.shinyapps\\.io/[^/]+/?", drop = TRUE) %>%
sub("^http:", "https:", .) %>%
sub("/+$", "", .) %>%
unique() %>%
c(links) %>%
unique() %>%
sort() ->
links
user <- regexpr("(?<=//)[^/]+(?=\\.shinyapps)", links, perl = TRUE)
user <- regmatches(links, user)
app <- regexpr("(?<=shinyapps.io/)[^/]+", links, perl = TRUE)
app <- regmatches(links, app)
d <- data.table::data.table(
user = user,
app = app,
url = links
)
d[, md_url := paste0("+ [**", app, "** by *", user, "*](", url, ")")]
get_title <- function(url) {
tryCatch({
h <- tryCatch(readthat::read(url), error = function(e) NULL)
if (is.null(h) || nchar(h[1]) == 0) {
return("")
}
Sys.sleep(2)
h <- xml2::read_html(url)
title <- rvest::html_text(rvest::html_nodes(h, "h1,h2,h3,h4,title,.title"), trim = TRUE)
if (length(title) == 0) {
return("")
}
title <- title[nchar(title) > 0][1]
if (grepl("Please.{0,4}Wait", title, ignore.case = TRUE)) {
return("")
}
title
}, error = function(e) "")
}
o <- vector("list", nrow(d))
for (i in seq_along(o)) {
if (length(o[[i]]) > 0) {
cat(i, "\n")
next
}
o[[i]] <- get_title(d[, url][i])
cat(i, "\n")
}
d[, title := unlist(o)]
dd <- d[!is.na(title), ]
dd[, md_url := paste0(md_url, ": ", title)]
by_app <- data.table::copy(dd[order(!grepl("^[[:alpha:]]", app), tolower(app)), ])
by_app[, letter := toupper(substr(by_app[, sub("^[[:punct:]]", "", app)], 1, 1))]
by_app[, md_url := ifelse(duplicated(letter), md_url, paste0("\n
by_app[, md_url := tfse::trim_ws(gsub("\n+", " ", md_url))]
by_app[, md_url := ifelse(grepl("^
by_app[, md_url := ifelse(grepl("^
by_app2 <- data.table::copy(by_app)
by_app[, md_url := sub("(?<=
toc <- unique(by_app[, letter])
toc <- paste0("
paste0("+ [", toc, "](
readme_prem <- c(
'---',
'title: "ShinyApps"',
'output: github_document',
'---',
'',
'A collection of links to [Shiny apps](https://shinyapps.io)',
'that have been shared on Twitter.',
'',
toc)
writeLines(c(readme_prem, by_app_no_title[, md_url]), "README-notitle.RMD")
writeLines(c(readme_prem, by_app[, md_url]), "README.Rmd")
rmarkdown::render("README.Rmd")
browseURL("README.html")
unlink("README.html")
git2r::add(path = c("README.Rmd", "README.md"))
git2r::commit(message = "Update")
git2r::push() |
responseFun2 <- function(eta) {
q <- length(eta)
eta.help <- matrix(rep(c(0, eta), each = q + 1), ncol = q +
1)
eta.help[upper.tri(eta.help)] <- 0
pi <- cumprod(c(1, exp(eta[-q])))/sum(apply(exp(eta.help),
1, prod))
pi
}
sim_fun <- function(model, m, I, k, n, gamma, seed = NULL){
if(!is.null(seed)){
set.seed(seed)
}
RSM <- GPCM <- FALSE
if(model %in% c("GRSM","RSM")){
RSM <- TRUE
}
if(model %in% c("GRSM","GPCM","2PL")){
GPCM <- TRUE
}
q <- k-1
X <- c()
for(i in 1:m){
if(i%%2 == 1){
X <- cbind(X, rnorm(n))
}else{
X <- cbind(X, rbinom(n,1,0.5))
}
}
X <- scale(X)
if(!RSM){
delta <- deltaX <- matrix(round(rnorm(q*I,sd=0.5),2),nrow=I)
alpha <- NA
}else{
delta <- round(rnorm(I,sd=0.5),2)
alpha <- c(0,round(rnorm(q-1,sd=0.5),2))
deltaX <- t(t(matrix(rep(delta,q),nrow=I))+alpha)
}
if(!GPCM){
sigma <- rep(1,I)
}else{
sigma <- seq(0.7,1,length=I)
}
theta <- rnorm(n)
lin_pred<- c()
probs <- c()
y <- c()
for(i in 1:n){
for(ii in 1:I){
eta <- sigma[ii] * (theta[i] - deltaX[ii,] -sum(gamma[ii,]*X[i,]))
lin_pred <- rbind(lin_pred, eta)
pi <- responseFun2(eta)
if(q==1){
pi <- exp(eta)/(1+exp(eta))
}
probs <- rbind(probs,pi)
pi <- c(pi,1-sum(pi))
y.sample <- which(rmultinom(1,1,pi)==1)
y <- c(y,y.sample)
}
}
Y <- matrix(y,byrow=TRUE,nrow=n)
data.sim <- as.data.frame(cbind(Y,X))
return(list(data=data.sim, theta = theta, alpha = alpha, sigma = sigma,
delta = delta, gamma = gamma, lin_pred = lin_pred, probs = probs))
}
sim_fun2 <- function(model, m, I, k, n, gamma, seed = NULL){
if(!is.null(seed)){
set.seed(seed)
}
RSM <- GPCM <- FALSE
if(model %in% c("GRSM","RSM")){
RSM <- TRUE
}
if(model %in% c("GRSM","GPCM","2PL")){
GPCM <- TRUE
}
q <- k-1
X <- c()
for(i in 1:m){
X <- cbind(X, rbinom(n,1,0.5))
}
if(!RSM){
delta <- deltaX <- matrix(round(rnorm(q*I,sd=0.3),2),nrow=I)
alpha <- NA
}else{
delta <- round(rnorm(I,sd=0.3),2)
alpha <- round(rnorm(q,sd=0.3),2)
deltaX <- t(t(matrix(rep(delta,q),nrow=I))+alpha)
}
if(!GPCM){
sigma <- rep(1,I)
}else{
sigma <- seq(0.8,1.2,length=I)
}
theta <- rnorm(n)
lin_pred<- c()
probs <- c()
y <- c()
for(i in 1:n){
for(ii in 1:I){
eta <- sigma[ii] * (theta[i] - deltaX[ii,] -sum(gamma[ii,]*X[i,]))
lin_pred <- rbind(lin_pred, eta)
pi <- responseFun2(eta)
if(q==1){
pi <- exp(eta)/(1+exp(eta))
}
probs <- rbind(probs,pi)
pi <- c(pi,1-sum(pi))
y.sample <- which(rmultinom(1,1,pi)==1)
y <- c(y,y.sample)
}
}
Y <- matrix(y,byrow=TRUE,nrow=n)
data.sim <- as.data.frame(cbind(Y,X))
return(list(data=data.sim, theta = theta, alpha = alpha, sigma = sigma,
delta = delta, gamma = gamma, lin_pred = lin_pred, probs = probs))
}
sim_fun3 <- function(model, m, I, k, n, gamma, seed = NULL){
if(!is.null(seed)){
set.seed(seed)
}
RSM <- GPCM <- FALSE
if(model %in% c("GRSM","RSM")){
RSM <- TRUE
}
if(model %in% c("GRSM","GPCM","2PL")){
GPCM <- TRUE
}
q <- k-1
for(i in 1:m){
if(i%%3 == 1){
X <- data.frame(V1= rnorm(n))
}
if(i%%3 == 2){
X$V2 <- factor(rbinom(n,1,0.5))
}
if(i%%3 == 0){
X$V3 <- factor(sample(1:4,n,replace=TRUE))
}
}
X2 <- model.matrix(~V1+V2+V3,data=X)[,-1]
X2 <- scale(X2)
if(!RSM){
delta <- deltaX <- matrix(round(rnorm(q*I,sd=0.5,mean=-0.5),2),nrow=I)
alpha <- NA
}else{
delta <- round(rnorm(I,sd=0.5),2)
alpha <- c(0,round(rnorm(q-1,sd=0.5),2))
deltaX <- t(t(matrix(rep(delta,q),nrow=I))+alpha)
}
if(!GPCM){
sigma <- rep(1,I)
}else{
sigma <- seq(0.7,1,length=I)
}
theta <- rnorm(n)
lin_pred<- c()
probs <- c()
y <- c()
for(i in 1:n){
for(ii in 1:I){
eta <- sigma[ii] * (theta[i] - deltaX[ii,] -sum(gamma[ii,]*X2[i,]))
lin_pred <- rbind(lin_pred, eta)
pi <- responseFun2(eta)
if(q==1){
pi <- exp(eta)/(1+exp(eta))
}
probs <- rbind(probs,pi)
pi <- c(pi,1-sum(pi))
y.sample <- which(rmultinom(1,1,pi)==1)
y <- c(y,y.sample)
}
}
Y <- matrix(y,byrow=TRUE,nrow=n)
data.sim <- as.data.frame(cbind(Y,X))
names(data.sim)[1:I] <- paste0("Item",1:I)
return(list(data=data.sim, theta = theta, alpha = alpha, sigma = sigma,
delta = delta, gamma = gamma, lin_pred = lin_pred, probs = probs))
}
sim_cor <- function(model, m, I, k, n, gamma, sigma, seed = NULL){
if(!is.null(seed)){
set.seed(seed)
}
RSM <- GPCM <- FALSE
if(model %in% c("GRSM","RSM")){
RSM <- TRUE
}
if(model %in% c("GRSM","GPCM","2PL")){
GPCM <- TRUE
}
q <- k-1
mat1 <- rmvnorm(n, sigma = sigma)
set.seed(1860)
X <- c()
for(i in 1:m){
if(i%%2 == 1){
X <- cbind(X, mat1[,i])
}else{
X <- cbind(X, mat1[,i]>0)
}
}
X <- scale(X)
if(!RSM){
delta <- deltaX <- matrix(round(rnorm(q*I,sd=0.5),2),nrow=I)
alpha <- NA
}else{
delta <- round(rnorm(I,sd=0.5),2)
alpha <- c(0,round(rnorm(q-1,sd=0.5),2))
deltaX <- t(t(matrix(rep(delta,q),nrow=I))+alpha)
}
if(!GPCM){
sigma <- rep(1,I)
}else{
sigma <- seq(0.7,1,length=I)
}
theta <- rnorm(n)
lin_pred<- c()
probs <- c()
y <- c()
for(i in 1:n){
for(ii in 1:I){
eta <- sigma[ii] * (theta[i] - deltaX[ii,] -sum(gamma[ii,]*X[i,]))
lin_pred <- rbind(lin_pred, eta)
pi <- responseFun2(eta)
if(q==1){
pi <- exp(eta)/(1+exp(eta))
}
probs <- rbind(probs,pi)
pi <- c(pi,1-sum(pi))
y.sample <- which(rmultinom(1,1,pi)==1)
y <- c(y,y.sample)
}
}
Y <- matrix(y,byrow=TRUE,nrow=n)
data.sim <- as.data.frame(cbind(Y,X))
return(list(data=data.sim, theta = theta, alpha = alpha, sigma = sigma,
delta = delta, gamma = gamma, lin_pred = lin_pred, probs = probs))
} |
context("Natural abundance correction")
library(accucor)
read_expected <- function(file, sheet) {
expected <- readxl::read_excel(path = file, sheet = sheet)
expected <- dplyr::mutate_at(
expected,
dplyr::vars(dplyr::ends_with("_Label")),
as.integer
)
}
test_that("Carbon correction (Excel, simple format)", {
resolution <- 100000
input_file <- system.file(
"extdata",
"C_Sample_Input_Simple.xlsx",
package = "accucor"
)
corrected <- natural_abundance_correction(
path = input_file,
output_base = FALSE,
resolution = resolution
)
expected_output <- list(
"Original" = read_expected(
system.file(
"extdata",
"C_Sample_Input_Simple.xlsx",
package = "accucor"
),
sheet = 1
),
"Corrected" = read_expected(
system.file(
"extdata",
"C_Sample_Input_Simple_corrected.xlsx",
package = "accucor"
),
sheet = "Corrected"
),
"Normalized" = read_expected(
system.file(
"extdata",
"C_Sample_Input_Simple_corrected.xlsx",
package = "accucor"
),
sheet = "Normalized"
),
"PoolAfterDF" = read_expected(
system.file(
"extdata",
"C_Sample_Input_Simple_corrected.xlsx",
package = "accucor"
),
sheet = "PoolAfterDF"
)
)
expect_equal(corrected, expected_output)
})
test_that("PoolBeforeDF parameter", {
resolution <- 100000
input_file <- system.file(
"extdata",
"C_Sample_Input_Simple.xlsx",
package = "accucor"
)
corrected <- natural_abundance_correction(
path = input_file,
output_base = FALSE,
report_pool_size_before_df = TRUE,
resolution = resolution
)
expected_output <- list(
"Original" = read_expected(
system.file(
"extdata",
"C_Sample_Input_Simple.xlsx",
package = "accucor"
),
sheet = 1
),
"Corrected" = read_expected(
system.file(
"extdata",
"C_Sample_Input_Simple_corrected.xlsx",
package = "accucor"
),
sheet = "Corrected"
),
"Normalized" = read_expected(
system.file(
"extdata",
"C_Sample_Input_Simple_corrected.xlsx",
package = "accucor"
),
sheet = "Normalized"
),
"PoolAfterDF" = read_expected(
system.file(
"extdata",
"C_Sample_Input_Simple_corrected.xlsx",
package = "accucor"
),
sheet = "PoolAfterDF"
),
"PoolBeforeDF" = read_expected(
system.file(
"extdata",
"C_Sample_Input_Simple_corrected.xlsx",
package = "accucor"
),
sheet = "PoolBeforeDF"
)
)
expect_equal(corrected, expected_output)
})
test_that("Carbon correction (csv, simple format)", {
resolution <- 100000
resolution_defined_at <- 200
input_file <- system.file(
"extdata",
"C_Sample_Input_Simple.csv",
package = "accucor"
)
corrected <- natural_abundance_correction(
path = input_file,
output_base = FALSE,
resolution = resolution,
resolution_defined_at = resolution_defined_at
)
expected_output <- list(
"Original" = read_expected(
system.file(
"extdata",
"C_Sample_Input_Simple.xlsx",
package = "accucor"
),
sheet = 1
),
"Corrected" = read_expected(
system.file(
"extdata",
"C_Sample_Input_Simple_corrected.xlsx",
package = "accucor"
),
sheet = "Corrected"
),
"Normalized" = read_expected(
system.file(
"extdata",
"C_Sample_Input_Simple_corrected.xlsx",
package = "accucor"
),
sheet = "Normalized"
),
"PoolAfterDF" = read_expected(
system.file(
"extdata",
"C_Sample_Input_Simple_corrected.xlsx",
package = "accucor"
),
sheet = "PoolAfterDF"
)
)
expect_equal(corrected, expected_output)
})
test_that("Carbon correction (Excel, Classic MAVEN copy/paste)", {
resolution <- 100000
resolution_defined_at <- 200
input_file <- system.file("extdata", "C_Sample_Input.xlsx",
package = "accucor"
)
knowns_file <- system.file("extdata", "KNOWNS.csv", package = "accucor")
corrected <- natural_abundance_correction(
path = input_file,
compound_database = knowns_file,
output_base = FALSE,
resolution = resolution,
resolution_defined_at = resolution_defined_at
)
expected_output <- list(
"Original" = read_expected(
system.file(
"extdata",
"C_Sample_Input_Simple_corrected.xlsx",
package = "accucor"
),
sheet = 1
),
"Corrected" = read_expected(
system.file(
"extdata",
"C_Sample_Input_Simple_corrected.xlsx",
package = "accucor"
),
sheet = "Corrected"
),
"Normalized" = read_expected(
system.file(
"extdata",
"C_Sample_Input_Simple_corrected.xlsx",
package = "accucor"
),
sheet = "Normalized"
),
"PoolAfterDF" = read_expected(
system.file(
"extdata",
"C_Sample_Input_Simple_corrected.xlsx",
package = "accucor"
),
sheet = "PoolAfterDF"
)
)
expect_equal(corrected, expected_output)
})
test_that("Deuterium correction (Excel, simple format)", {
resolution <- 100000
resolution_defined_at <- 200
input_file <- system.file("extdata", "D_Sample_Input_Simple.xlsx",
package = "accucor"
)
corrected <- natural_abundance_correction(
path = input_file,
output_base = FALSE,
resolution = resolution,
resolution_defined_at = resolution_defined_at
)
expected_output <- list(
"Original" = read_expected(
system.file(
"extdata",
"D_Sample_Input_Simple.xlsx",
package = "accucor"
),
sheet = 1
),
"Corrected" = read_expected(
system.file(
"extdata",
"D_Sample_Input_Simple_corrected.xlsx",
package = "accucor"
),
sheet = "Corrected"
),
"Normalized" = read_expected(
system.file(
"extdata",
"D_Sample_Input_Simple_corrected.xlsx",
package = "accucor"
),
sheet = "Normalized"
),
"PoolAfterDF" = read_expected(
system.file(
"extdata",
"D_Sample_Input_Simple_corrected.xlsx",
package = "accucor"
),
sheet = "PoolAfterDF"
)
)
expect_equal(corrected, expected_output)
})
test_that("Deuterium correction (Excel, Classic Maven Cut/Paste)", {
resolution <- 100000
resolution_defined_at <- 200
input_file <- system.file("extdata", "D_Sample_Input.xlsx",
package = "accucor"
)
knowns_file <- system.file("extdata", "KNOWNS.csv", package = "accucor")
corrected <- natural_abundance_correction(
path = input_file,
compound_database = knowns_file,
output_base = FALSE,
resolution = resolution, resolution_defined_at = resolution_defined_at
)
expected_output <- list(
"Original" = read_expected(
system.file(
"extdata",
"D_Sample_Input_Simple.xlsx",
package = "accucor"
),
sheet = 1
),
"Corrected" = read_expected(
system.file(
"extdata",
"D_Sample_Input_Simple_corrected.xlsx",
package = "accucor"
),
sheet = "Corrected"
),
"Normalized" = read_expected(
system.file(
"extdata",
"D_Sample_Input_Simple_corrected.xlsx",
package = "accucor"
),
sheet = "Normalized"
),
"PoolAfterDF" = read_expected(
system.file(
"extdata",
"D_Sample_Input_Simple_corrected.xlsx",
package = "accucor"
),
sheet = "PoolAfterDF"
)
)
expect_equal(corrected, expected_output)
})
test_that("Nitrogen correction (Excel, simple format)", {
resolution <- 140000
resolution_defined_at <- 200
input_file <- system.file(
"extdata",
"N_Sample_Input_Simple.xlsx",
package = "accucor"
)
corrected <- natural_abundance_correction(
path = input_file,
output_base = FALSE,
resolution = resolution,
resolution_defined_at = resolution_defined_at
)
expected_output <- list(
"Original" = read_expected(
system.file(
"extdata",
"N_Sample_Input_Simple.xlsx",
package = "accucor"
),
sheet = 1
),
"Corrected" = read_expected(
system.file(
"extdata",
"N_Sample_Input_Simple_corrected.xlsx",
package = "accucor"
),
sheet = "Corrected"
),
"Normalized" = read_expected(
system.file(
"extdata",
"N_Sample_Input_Simple_corrected.xlsx",
package = "accucor"
),
sheet = "Normalized"
),
"PoolAfterDF" = read_expected(
system.file(
"extdata",
"N_Sample_Input_Simple_corrected.xlsx",
package = "accucor"
),
sheet = "PoolAfterDF"
)
)
expect_equal(corrected, expected_output)
})
test_that("Nitrogen correction (Excel, Classic Maven Cut/Paste)", {
resolution <- 140000
resolution_defined_at <- 200
input_file <- system.file(
"extdata",
"N_Sample_Input.xlsx",
package = "accucor"
)
knowns_file <- system.file("extdata", "KNOWNS.csv", package = "accucor")
corrected <- natural_abundance_correction(
path = input_file,
compound_database = knowns_file,
output_base = FALSE,
resolution = resolution,
resolution_defined_at = resolution_defined_at
)
expected_output <- list(
"Original" = read_expected(
system.file(
"extdata",
"N_Sample_Input_Simple.xlsx",
package = "accucor"
),
sheet = 1
),
"Corrected" = read_expected(
system.file(
"extdata",
"N_Sample_Input_Simple_corrected.xlsx",
package = "accucor"
),
sheet = "Corrected"
),
"Normalized" = read_expected(
system.file(
"extdata",
"N_Sample_Input_Simple_corrected.xlsx",
package = "accucor"
),
sheet = "Normalized"
),
"PoolAfterDF" = read_expected(
system.file(
"extdata",
"N_Sample_Input_Simple_corrected.xlsx",
package = "accucor"
),
sheet = "PoolAfterDF"
)
)
expect_equal(corrected, expected_output)
})
test_that("Carbon correction (csv, El-MAVEN export (with set names))", {
resolution <- 140000
resolution_defined_at <- 200
input_file <- system.file(
"extdata",
"elmaven_export.csv",
package = "accucor"
)
corrected <- expect_warning(natural_abundance_correction(
path = input_file,
output_base = FALSE,
resolution = resolution,
resolution_defined_at = resolution_defined_at
))
expected_output <- list(
"Original" = read_expected(
system.file(
"extdata",
"elmaven_export_corrected.xlsx",
package = "accucor"
),
sheet = 1
),
"Corrected" = read_expected(
system.file(
"extdata",
"elmaven_export_corrected.xlsx",
package = "accucor"
),
sheet = "Corrected"
),
"Normalized" = read_expected(
system.file(
"extdata",
"elmaven_export_corrected.xlsx",
package = "accucor"
),
sheet = "Normalized"
),
"PoolAfterDF" = read_expected(
system.file(
"extdata",
"elmaven_export_corrected.xlsx",
package = "accucor"
),
sheet = "PoolAfterDF"
)
)
expect_equivalent(corrected, expected_output)
})
test_that("Carbon correction (Excel, El-MAVEN export (with set names))", {
resolution <- 140000
resolution_defined_at <- 200
input_file <- system.file(
"extdata",
"elmaven_export.xlsx",
package = "accucor"
)
corrected <- natural_abundance_correction(
path = input_file,
output_base = FALSE,
resolution = resolution,
resolution_defined_at = resolution_defined_at
)
expected_output <- list(
"Original" = read_expected(
system.file(
"extdata",
"elmaven_export_corrected.xlsx",
package = "accucor"
),
sheet = 1
),
"Corrected" = read_expected(
system.file(
"extdata",
"elmaven_export_corrected.xlsx",
package = "accucor"
),
sheet = "Corrected"
),
"Normalized" = read_expected(
system.file(
"extdata",
"elmaven_export_corrected.xlsx",
package = "accucor"
),
sheet = "Normalized"
),
"PoolAfterDF" = read_expected(
system.file(
"extdata",
"elmaven_export_corrected.xlsx",
package = "accucor"
),
sheet = "PoolAfterDF"
)
)
expect_equal(corrected, expected_output)
})
test_that("Carbon correction (csv, El-MAVEN export (w/o names))", {
resolution <- 140000
input_file <- system.file(
"extdata",
"elmaven_d2_export.csv",
package = "accucor"
)
corrected <- natural_abundance_correction(
path = input_file,
resolution = resolution,
output_base = FALSE
)
expected_output <- list(
"Original" = read_expected(
system.file(
"extdata",
"elmaven_d2_export_corrected.xlsx",
package = "accucor"
),
sheet = 1
),
"Corrected" = read_expected(
system.file(
"extdata",
"elmaven_d2_export_corrected.xlsx",
package = "accucor"
),
sheet = "Corrected"
),
"Normalized" = read_expected(
system.file(
"extdata",
"elmaven_d2_export_corrected.xlsx",
package = "accucor"
),
sheet = "Normalized"
),
"PoolAfterDF" = read_expected(
system.file(
"extdata",
"elmaven_d2_export_corrected.xlsx",
package = "accucor"
),
sheet = "PoolAfterDF"
)
)
expect_equal(corrected, expected_output)
})
test_that("Carbon correction (csv, El-MAVEN, multiple groups per compound)", {
resolution <- 140000
input_file <- system.file(
"extdata",
"alanine_three_peak_groups.csv",
package = "accucor"
)
corrected <- natural_abundance_correction(
path = input_file,
resolution = resolution,
output_base = FALSE
)
expected_output <- list(
"Original" = read_expected(
system.file(
"extdata",
"alanine_three_peak_groups_corrected.xlsx",
package = "accucor"
),
sheet = 1
),
"Corrected" = read_expected(
system.file(
"extdata",
"alanine_three_peak_groups_corrected.xlsx",
package = "accucor"
),
sheet = "Corrected"
),
"Normalized" = read_expected(
system.file(
"extdata",
"alanine_three_peak_groups_corrected.xlsx",
package = "accucor"
),
sheet = "Normalized"
),
"PoolAfterDF" = read_expected(
system.file(
"extdata",
"alanine_three_peak_groups_corrected.xlsx",
package = "accucor"
),
sheet = "PoolAfterDF"
)
)
expect_equal(corrected, expected_output)
})
test_that("Carbon correction (dataframe)", {
resolution <- 100000
input_data <- as.data.frame(
readxl::read_excel(
path = system.file(
"extdata",
"C_Sample_Input_Simple.xlsx",
package = "accucor"
),
sheet = 1
)
)
corrected <- natural_abundance_correction(
data = input_data,
resolution = resolution
)
expected_output <- list(
"Original" = read_expected(
system.file(
"extdata",
"C_Sample_Input_Simple.xlsx",
package = "accucor"
),
sheet = 1
),
"Corrected" = read_expected(
system.file(
"extdata",
"C_Sample_Input_Simple_corrected.xlsx",
package = "accucor"
),
sheet = "Corrected"
),
"Normalized" = read_expected(
system.file(
"extdata",
"C_Sample_Input_Simple_corrected.xlsx",
package = "accucor"
),
sheet = "Normalized"
),
"PoolAfterDF" = read_expected(
system.file(
"extdata",
"C_Sample_Input_Simple_corrected.xlsx",
package = "accucor"
),
sheet = "PoolAfterDF"
)
)
expect_equal(corrected, expected_output)
})
test_that("Carbon correction (El-Maven v0.11.0)", {
resolution <- 100000
input_file <- system.file(
"extdata",
"elmaven_v0.11_export.csv",
package = "accucor"
)
corrected <- natural_abundance_correction(
path = input_file,
resolution = resolution,
output_base = FALSE
)
expected_output <- list(
"Original" = read_expected(
system.file(
"extdata",
"elmaven_v0.11_export_corrected.xlsx",
package = "accucor"
),
sheet = 1
),
"Corrected" = read_expected(
system.file(
"extdata",
"elmaven_v0.11_export_corrected.xlsx",
package = "accucor"
),
sheet = "Corrected"
),
"Normalized" = read_expected(
system.file(
"extdata",
"elmaven_v0.11_export_corrected.xlsx",
package = "accucor"
),
sheet = "Normalized"
),
"PoolAfterDF" = read_expected(
system.file(
"extdata",
"elmaven_v0.11_export_corrected.xlsx",
package = "accucor"
),
sheet = "PoolAfterDF"
)
)
expect_equal(corrected, expected_output)
}) |
dist.vect <- function(vector1, vector2) {
dist <- 0
if (ncol(vector1) == ncol(vector2)) {
dist <- norm.vect(vector1 - vector2)
}
return(dist)
} |
library(psychmeta)
rxyi <- c(
0.49, 0.4, 0.36, 0.54, 0.56, 0.62, 0.34, 0.4, 0.53, 0.37, 0.53, 0.45, 0.39, 0.43,
0.36, 0.34, 0.46, 0.19, 0.47, 0.73, 0.48, 0.21, 0.29, 0.23, 0.23, 0.56, 0.37,
0.37, 0.52, 0.34, 0.43, 0.49, 0.47, 0.4, 0.46, 0.25, 0.4, 0.3, 0.39, 0.48, 0.25,
0.53, 0.19, 0.32, 0.28, 0.51, 0.38, 0.41, 0.38, 0.36, 0.48, 0.49, 0.39, 0.41,
0.4, 0.48, 0.4, 0.39, 0.51, 0.43, 0.31, 0.14, 0.1, 0.17, 0.28, 0.38, 0.4, 0.22,
0.01, 0.38, 0.43, 0.27, 0.07, 0.38, 0.2, 0.17, 0.07, 0.34, 0.39, 0.3, 0.38, 0.3,
0.29, 0.1, 0.22, 0.22, 0.4, 0.02, 0.12, 0.16, 0.16, 0.19, 0.22, 0.2, 0.34, 0.31,
0.26, 0.2, 0.21, 0.24, 0.3, 0.24, 0.32, 0.26, 0.25, 0.16, 0.19, 0.19, 0.13, 0.19,
0.32, 0.3, 0.18, 0.24, 0.41, 0.19, 0.2, 0.21, 0.14, 0.21
)
construct_x <- rep(c("A", "A", "B"), 40)
construct_y <- rep(c("B", "C", "C"), 40)
test_that("Global = NULL, Column all TRUE", {
correct_rel <- NULL
correct_rxx <- TRUE
correct_ryy <- TRUE
expected_rel <- list(x = TRUE, y = TRUE)
expect_equal(
.distribute_logic(
logic_general = correct_rel,
logic_x = correct_rxx,
logic_y = correct_ryy,
name_logic_x = "correct_rxx",
name_logic_y = "correct_ryy",
construct_x = construct_x,
construct_y = construct_y,
es_length = length(rxyi)
),
expected_rel
)
})
test_that("Global all TRUE, Column all TRUE", {
correct_rel <- c(A = TRUE, B = TRUE, C = TRUE)
correct_rxx <- TRUE
correct_ryy <- TRUE
expected_rel <- list(
x = c(
TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE,
TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE,
TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE,
TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE,
TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE,
TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE,
TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE,
TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE,
TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE,
TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE,
TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE
),
y = c(
TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE,
TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE,
TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE,
TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE,
TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE,
TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE,
TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE,
TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE,
TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE,
TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE,
TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE
)
)
expect_equal(
.distribute_logic(
logic_general = correct_rel,
logic_x = correct_rxx,
logic_y = correct_ryy,
name_logic_x = "correct_rxx",
name_logic_y = "correct_ryy",
construct_x = construct_x,
construct_y = construct_y,
es_length = length(rxyi)
),
expected_rel
)
})
test_that("Global all FALSE, Column all FALSE", {
correct_rel <- c(X = FALSE, Y = FALSE, Z = FALSE)
correct_rxx <- FALSE
correct_ryy <- FALSE
expected_rel <- list(
x = c(
FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE
),
y = c(
FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE
)
)
expect_equal(
.distribute_logic(
logic_general = correct_rel,
logic_x = correct_rxx,
logic_y = correct_ryy,
name_logic_x = "correct_rxx",
name_logic_y = "correct_ryy",
construct_x = construct_x,
construct_y = construct_y,
es_length = length(rxyi)
),
expected_rel
)
})
test_that("Global all FALSE, Column all TRUE", {
correct_rel <- c(A = FALSE, B = FALSE, C = FALSE)
correct_rxx <- TRUE
correct_ryy <- TRUE
expected_rel <- list(
x = c(
FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE
),
y = c(
FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE
)
)
expect_equal(
.distribute_logic(
logic_general = correct_rel,
logic_x = correct_rxx,
logic_y = correct_ryy,
name_logic_x = "correct_rxx",
name_logic_y = "correct_ryy",
construct_x = construct_x,
construct_y = construct_y,
es_length = length(rxyi)
),
expected_rel
)
})
test_that("Global Z missing A = TRUE B = FALSE, Column all TRUE", {
correct_rel <- c(A = TRUE, B = FALSE)
correct_rxx <- TRUE
correct_ryy <- TRUE
expected_rel <- list(
x = c(
TRUE, TRUE, FALSE, TRUE, TRUE, FALSE, TRUE, TRUE, FALSE, TRUE, TRUE, FALSE,
TRUE, TRUE, FALSE, TRUE, TRUE, FALSE, TRUE, TRUE, FALSE, TRUE, TRUE, FALSE,
TRUE, TRUE, FALSE, TRUE, TRUE, FALSE, TRUE, TRUE, FALSE, TRUE, TRUE, FALSE,
TRUE, TRUE, FALSE, TRUE, TRUE, FALSE, TRUE, TRUE, FALSE, TRUE, TRUE, FALSE,
TRUE, TRUE, FALSE, TRUE, TRUE, FALSE, TRUE, TRUE, FALSE, TRUE, TRUE, FALSE,
TRUE, TRUE, FALSE, TRUE, TRUE, FALSE, TRUE, TRUE, FALSE, TRUE, TRUE, FALSE,
TRUE, TRUE, FALSE, TRUE, TRUE, FALSE, TRUE, TRUE, FALSE, TRUE, TRUE, FALSE,
TRUE, TRUE, FALSE, TRUE, TRUE, FALSE, TRUE, TRUE, FALSE, TRUE, TRUE, FALSE,
TRUE, TRUE, FALSE, TRUE, TRUE, FALSE, TRUE, TRUE, FALSE, TRUE, TRUE, FALSE,
TRUE, TRUE, FALSE, TRUE, TRUE, FALSE, TRUE, TRUE, FALSE, TRUE, TRUE, FALSE
),
y = c(
FALSE, TRUE, TRUE, FALSE, TRUE, TRUE, FALSE, TRUE, TRUE, FALSE, TRUE,
TRUE, FALSE, TRUE, TRUE, FALSE, TRUE, TRUE, FALSE, TRUE, TRUE, FALSE,
TRUE, TRUE, FALSE, TRUE, TRUE, FALSE, TRUE, TRUE, FALSE, TRUE, TRUE,
FALSE, TRUE, TRUE, FALSE, TRUE, TRUE, FALSE, TRUE, TRUE, FALSE, TRUE,
TRUE, FALSE, TRUE, TRUE, FALSE, TRUE, TRUE, FALSE, TRUE, TRUE, FALSE,
TRUE, TRUE, FALSE, TRUE, TRUE, FALSE, TRUE, TRUE, FALSE, TRUE, TRUE,
FALSE, TRUE, TRUE, FALSE, TRUE, TRUE, FALSE, TRUE, TRUE, FALSE, TRUE,
TRUE, FALSE, TRUE, TRUE, FALSE, TRUE, TRUE, FALSE, TRUE, TRUE, FALSE,
TRUE, TRUE, FALSE, TRUE, TRUE, FALSE, TRUE, TRUE, FALSE, TRUE, TRUE,
FALSE, TRUE, TRUE, FALSE, TRUE, TRUE, FALSE, TRUE, TRUE, FALSE, TRUE,
TRUE, FALSE, TRUE, TRUE, FALSE, TRUE, TRUE, FALSE, TRUE, TRUE
)
)
expect_equal(
.distribute_logic(
logic_general = correct_rel,
logic_x = correct_rxx,
logic_y = correct_ryy,
name_logic_x = "correct_rxx",
name_logic_y = "correct_ryy",
construct_x = construct_x,
construct_y = construct_y,
es_length = length(rxyi)
),
expected_rel
)
})
test_that("Global X = FALSE, Y = TRUE, Z = FALSE, Column rxx = FALSE, ryy = TRUE", {
correct_rel <- c(X = FALSE, Y = TRUE, Z = TRUE)
correct_rxx <- FALSE
correct_ryy <- TRUE
expected_rel <- list(
x = c(
FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE
),
y = c(
TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE,
TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE,
TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE,
TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE,
TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE,
TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE,
TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE,
TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE,
TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE,
TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE
)
)
expect_equal(
.distribute_logic(
logic_general = correct_rel,
logic_x = correct_rxx,
logic_y = correct_ryy,
name_logic_x = "correct_rxx",
name_logic_y = "correct_ryy",
construct_x = construct_x,
construct_y = construct_y,
es_length = length(rxyi)
), expected_rel
)
})
test_that("Global X = FALSE, Y = TRUE, Column rxx = TRUE, ryy = FALSE", {
correct_rel <- c(X = FALSE, Y = TRUE)
correct_rxx <- TRUE
correct_ryy <- FALSE
expected_rel <- list(
x = c(
TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE,
TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE,
TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE,
TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE,
TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE,
TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE,
TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE,
TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE,
TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE,
TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE, TRUE
),
y = c(
FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE
)
)
expect_equal(
.distribute_logic(
logic_general = correct_rel,
logic_x = correct_rxx,
logic_y = correct_ryy,
name_logic_x = "correct_rxx",
name_logic_y = "correct_ryy",
construct_x = construct_x,
construct_y = construct_y,
es_length = length(rxyi)
),
expected_rel
)
}) |
select_spatial_predictors_sequential <- function(
data = NULL,
dependent.variable.name = NULL,
predictor.variable.names = NULL,
distance.matrix = NULL,
distance.thresholds = NULL,
ranger.arguments = NULL,
spatial.predictors.df = NULL,
spatial.predictors.ranking = NULL,
weight.r.squared = 0.75,
weight.penalization.n.predictors = 0.25,
verbose = FALSE,
n.cores = parallel::detectCores() - 1,
cluster = NULL
){
if(!is.null(predictor.variable.names)){
if(inherits(predictor.variable.names, "variable_selection")){
predictor.variable.names <- predictor.variable.names$selected.variables
}
}
spatial.predictors.ranking <- spatial.predictors.ranking$ranking
if(is.null(weight.r.squared)){weight.r.squared <- 0.75}
if(weight.r.squared > 1){weight.r.squared <- 1}
if(weight.r.squared < 0){weight.r.squared <- 0}
if(is.null(weight.penalization.n.predictors)){weight.penalization.n.predictors <- 0.25}
if(weight.penalization.n.predictors > 1){weight.penalization.n.predictors <- 1}
if(weight.penalization.n.predictors < 0){weight.penalization.n.predictors <- 0}
if(is.null(ranger.arguments)){
ranger.arguments <- list()
}
ranger.arguments$write.forest <- TRUE
ranger.arguments$importance <- "none"
ranger.arguments$local.importance <- FALSE
ranger.arguments$keep.inbag <- FALSE
ranger.arguments$num.trees <- 500
ranger.arguments$data <- NULL
ranger.arguments$formula <- NULL
ranger.arguments$dependent.variable.name <- NULL
ranger.arguments$predictor.variable.names <- NULL
ranger.arguments$num.threads <- 1
if(!is.null(cluster)){
n.cores <- NULL
stop.cluster <- FALSE
} else {
cluster <- parallel::makeCluster(
n.cores,
type = "PSOCK"
)
stop.cluster <- TRUE
}
doParallel::registerDoParallel(cl = cluster)
spatial.predictors.i <- NULL
optimization.df <- foreach::foreach(
spatial.predictors.i = seq(1, length(spatial.predictors.ranking)),
.combine = "rbind",
.verbose = verbose
) %dopar% {
spatial.predictors.selected.names.i <- spatial.predictors.ranking[1:spatial.predictors.i]
data.i <- data.frame(
data,
spatial.predictors.df[, spatial.predictors.selected.names.i]
)
colnames(data.i)[(ncol(data)+1):ncol(data.i)] <- spatial.predictors.selected.names.i
predictor.variable.names.i <- c(
predictor.variable.names,
spatial.predictors.selected.names.i
)
m.i <- spatialRF::rf(
data = data.i,
dependent.variable.name = dependent.variable.name,
predictor.variable.names = predictor.variable.names.i,
distance.matrix = distance.matrix,
distance.thresholds = distance.thresholds,
ranger.arguments = ranger.arguments,
seed = spatial.predictors.i,
verbose = FALSE
)
out.df <- data.frame(
spatial.predictor.index = spatial.predictors.i,
moran.i = m.i$residuals$autocorrelation$max.moran,
p.value = m.i$residuals$autocorrelation$per.distance[
which.max(m.i$residuals$autocorrelation$per.distance$moran.i),
"p.value"
],
r.squared = m.i$performance$r.squared.oob
)
return(out.df)
}
if(!is.null(n.cores)){
parallel::stopCluster(cl = cluster)
}
optimization.df <- data.frame(
spatial.predictor.name = spatial.predictors.ranking,
spatial.predictor.index = optimization.df$spatial.predictor.index,
moran.i = optimization.df$moran.i,
p.value = optimization.df$p.value,
p.value.binary = ifelse(optimization.df$p.value >= 0.05, 1, 0),
r.squared = optimization.df$r.squared,
penalization.per.variable = (1/nrow(optimization.df)) * optimization.df$spatial.predictor.index
)
optimization.df$optimization <- optimization_function(
x = optimization.df,
weight.r.squared = weight.r.squared,
weight.penalization.n.predictors = weight.penalization.n.predictors
)
optimized.index <- which.max(optimization.df$optimization)
best.spatial.predictors <- spatial.predictors.ranking[1:optimized.index]
optimization.df$selected <- FALSE
optimization.df[optimization.df$spatial.predictor.name %in% best.spatial.predictors, "selected"] <- TRUE
out.list <- list()
out.list$optimization <- optimization.df
out.list$best.spatial.predictors <- best.spatial.predictors
out.list
} |
adapt.a <- function (test = c("anova","chisq","cor","one.sample","two.sample","paired"),
ref.n = NULL, n = NULL, alpha = .05, power = .80,
efxize = c("small","medium","large"), groups = NULL, df = NULL)
{
if(missing(test))
{stop("test must be selected")
}else{test <- match.arg(test)}
if(missing(efxize))
{
efxize <- "medium"
message("No effect size selected. Medium effect size computed.")
}else{efxize <- efxize}
if(test=="anova")
{
if(is.null(groups))
{stop("ANOVA is selected. Number of groups must be set")}
if(efxize=="small")
{efxize <- .10
}else if(efxize=="medium")
{efxize <- .25
}else if(efxize=="large")
{efxize <- .40}
if(!is.numeric(efxize))
{stop("Effect size must be numeric")}
if(is.null(ref.n))
{
ref.n <- pwr::pwr.anova.test(f=efxize,power=power,sig.level=alpha,k=groups)$n
message("ref.n is observations per group")
}
num <- sqrt(ref.n*(log(ref.n)+qchisq((1-alpha),1)))
}else if(test=="chisq")
{
if(is.null(df))
{stop("Chi-square is selected. Degrees of freedom must be set")}
if(efxize=="small")
{efxize <- .10
}else if(efxize=="medium")
{efxize <- .30
}else if(efxize=="large")
{efxize <- .50}
if(!is.numeric(efxize))
{stop("Effect size must be numeric")}
if(is.null(ref.n))
{ref.n <- pwr::pwr.chisq.test(w=efxize,df=df,power=power,sig.level=alpha)$N}
num <- sqrt(ref.n*(log(ref.n)+qchisq((1-alpha),1)))
}else if(test=="cor")
{
if(efxize=="small")
{efxize <- .10
}else if(efxize=="medium")
{efxize <- .30
}else if(efxize=="large")
{efxize <- .50}
if(!is.numeric(efxize))
{stop("Effect size must be numeric")}
if(is.null(ref.n))
{ref.n <- pwr::pwr.r.test(r=efxize,power=power,sig.level=alpha)$n}
num <- sqrt(ref.n*(log(ref.n)+qchisq((1-alpha),1)))
}else if(any(c("one.sample","two.sample","paired") %in% test))
{
if(efxize=="small")
{efxize <- .20
}else if(efxize=="medium")
{efxize <- .50
}else if(efxize=="large")
{efxize <- .80}
if(!is.numeric(efxize))
{stop("Effect size must be numeric")}
if(is.null(ref.n))
{ref.n <- pwr::pwr.t.test(d=efxize,power=power,sig.level=alpha,type=test)$n}
num <- sqrt(ref.n*(log(ref.n)+qchisq((1-alpha),1)))
}else{stop("test does not exist")}
denom <- (sqrt(n*(log(n)+qchisq((1-alpha),1))))
adj.a <- alpha*num/denom
if(test=="anova")
{
critical.f <- function (groups, n, a)
{
df1 <- groups - 1
df2 <- n - groups
cvf <- qf(a, df1, df2, lower.tail = FALSE)
return(cvf)
}
cv <- critical.f(groups, n, adj.a)
}else if(test=="chisq")
{
critical.chi <- function (df, a)
{
cvchi <- qchisq(a, df, lower.tail = FALSE)
return(cvchi)
}
cv <- critical.chi(df, adj.a)
}else if(test=="cor")
{
critical.r <- function (n, a)
{
df <- n - 2
critical.t <- qt( a/2, df, lower.tail = FALSE )
cvr <- sqrt( (critical.t^2) / ( (critical.t^2) + df ) )
return(cvr)
}
cv <- critical.r(n, adj.a)
}else if(any(c("one.sample","two.sample","paired") %in% test))
{
critical.t <- function (n, a)
{
df <- n - 2
cvt <- qt( a/2, df, lower.tail = FALSE )
return(cvt)
}
cv <- critical.t(n, adj.a)
}
output <- list()
output$adapt.a <- adj.a
output$crit.value <- cv
output$orig.a <- alpha
output$ref.n <- ref.n
output$exp.n <- n
if(test=="anova")
{
output$groups <- groups
output$df <- c((groups - 1), (n - groups))
}
if(test=="chisq")
{output$df <- df}
output$power <- power
output$efxize <- efxize
output$test <- test
return(output)
} |
summary.coxph.penal <- function(object, conf.int = 0.95, scale=1,
terms=FALSE, maxlabel=25, ...) {
beta <- object$coefficients
if (length(beta)==0 && length(object$frail)==0)
stop("Penalized summary function can't be used for a null model")
if (length(beta) > 0) {
nacoef <- !(is.na(beta))
beta2 <- beta[nacoef]
if(is.null(beta2) | is.null(object$var))
stop("Input is not valid")
se <- sqrt(diag(object$var))
}
pterms <- object$pterms
nterms <- length(pterms)
npenal <- sum(pterms>0)
print.map <- rep(0,nterms)
if (!is.null(object$printfun)) {
temp <- unlist(lapply(object$printfun, is.null))
print.map[pterms>0] <- (1:npenal) * (!temp)
}
print1 <- NULL
pname1 <- NULL
if (is.null(object$assign2)) alist <- object$assign[-1]
else alist <- object$assign2
print2 <- NULL
for (i in 1:nterms) {
kk <- alist[[i]]
if (print.map[i] >0) {
j <- print.map[i]
if (pterms[i]==2)
temp <- (object$printfun[[j]])(object$frail, object$fvar, ,
object$df[i], object$history[[j]])
else temp <- (object$printfun[[j]])(beta[kk], object$var[kk,kk],
object$var2[kk,kk],
object$df[i], object$history[[j]])
print1 <- rbind(print1, temp$coef)
if (is.matrix(temp$coef)) {
xx <- dimnames(temp$coef)[[1]]
if (is.null(xx))
xx <- rep(names(pterms)[i], nrow(temp$coef))
else xx <- paste(names(pterms)[i], xx, sep=', ')
pname1 <- c(pname1, xx)
}
else pname1 <- c(pname1, names(pterms)[i])
print2 <- c(print2, temp$history)
}
else if (terms && length(kk)>1) {
pname1 <- c(pname1, names(pterms)[i])
temp <- coxph.wtest(object$var[kk,kk], beta[kk])$test
print1 <- rbind(print1, c(NA, NA, NA,
temp, object$df[i], pchisq(temp, 1, lower.tail=FALSE)))
}
else {
pname1 <- c(pname1, names(beta)[kk])
tempe<- (diag(object$var))[kk]
temp <- beta[kk]^2/ tempe
print1 <- rbind(print1, cbind(beta[kk], sqrt(tempe),
sqrt((diag(object$var2))[kk]),
temp, 1, pchisq(temp, 1, lower.tail=FALSE)))
}
}
dimnames(print1) <- list(substring(pname1,1, maxlabel),
c("coef","se(coef)", "se2", "Chisq","DF","p"))
rval <- object[match(c("call", "fail", "na.action", "n", "nevent", "loglik",
"iter", "df"), names(object), nomatch=0)]
rval$coefficients <- print1
rval$print2 <- print2
if(conf.int & length(beta) >0 ) {
z <- qnorm((1 + conf.int)/2, 0, 1)
beta <- beta * scale
se <- se * scale
tmp <- cbind(exp(beta), exp(-beta), exp(beta - z * se),
exp(beta + z * se))
dimnames(tmp) <- list(substring(names(beta),1, maxlabel),
c("exp(coef)", "exp(-coef)",
paste("lower .", round(100 * conf.int, 2), sep = ""),
paste("upper .", round(100 * conf.int, 2), sep = "")))
rval$conf.int <- tmp
}
df <- sum(object$df)
logtest <- -2 * (object$loglik[1] - object$loglik[2])
rval$logtest <- c(test = logtest, df=df,
pvalue= pchisq(logtest,df, lower.tail=FALSE))
if (!is.null(object$waldtest))
rval$waldtest <- c(test= object$wald.test, df=df,
pvalue = pchisq(object$wald.test, df, lower.tail=FALSE))
if (!is.null(object$concordance)) {
ctemp <- object$concordance
rval$concordance <- ctemp[c("concordance", "std")]
names(rval$concordance) <- c("C", "se(C)")
}
class(rval) <- "summary.coxph.penal"
rval
} |
env_file <- NULL
.onLoad <- function(libname, pkgname) {
env <- new.env(parent = emptyenv())
env$`__asciicast_data__` <- new.env(parent = baseenv())
client_file <- system.file("client.R", package = "asciicast")
if (client_file == "") stop("Cannot find client R file")
source(
client_file, local = env$`__asciicast_data__`,
keep.source = FALSE)
arch <- .Platform$r_arch
ext <- .Platform$dynlib.ext
sofile <- system.file(
"libs", arch, paste0("client", ext),
package = "processx")
if (sofile == "") {
sofile <- system.file(
"libs", paste0("client", ext),
package = "processx")
}
if (sofile == "") {
sofile <- system.file(
"src", paste0("client", ext),
package = "processx")
}
if (sofile == "") stop("Cannot find client file")
env$`__asciicast_data__`$sofile <- sofile
env_file <<- tempfile()
saveRDS(env, file = env_file, version = 2, compress = FALSE)
lazyrmd$onload_hook(
local = FALSE,
ci = function() is_recording_supported(),
cran = "no-code"
)
invisible()
} |
centiles.com <- function( obj,
...,
xvar,
cent = c(.4,10,50,90,99.6),
legend = TRUE,
ylab = "y",
xlab = "x",
xleg = min(xvar),
yleg = max(obj$y),
xlim = range(xvar),
ylim = NULL,
no.data = FALSE,
color = TRUE,
main = NULL,
plot = TRUE
)
{
if (length(list(...)))
{
object <- list(obj, ...)
nobj <- length(object)
isgamlss <- unlist(lapply(object, is.gamlss))
if (!any(isgamlss)) stop("some of the objects are not gamlss")
if (missing(xvar))
{
xvar <- all.vars(obj$call$formula)[[2]]
if (any(grepl("data", names(obj$call))))
{
DaTa <- eval(obj$call[["data"]])
xvar <- get(xvar, envir=as.environment(DaTa))
}
}
xvarO <- deparse(substitute(xvar))
xvar <- try(xvar, silent = TRUE)
if (any(class(xvar)%in%"try-error"))
{
DaTa <- eval(obj$call[["data"]])
xvar <- get(xvarO, envir=as.environment(DaTa))
}
fname <- lapply(object, function(x) x$family[1])
qfun <- lapply(fname, function(x) paste("q",x,sep=""))
lenpar <- lapply(object, function(x) length(x$parameters) )
oxvar <- xvar[order(xvar)]
oyvar <- object[[1]]$y[order(xvar)]
if (is.null(ylim)) ylim <- range( object[[1]]$y)
Title <- if (is.null(main)) paste("Centile curves") else main
if (plot)
{
if (no.data==FALSE) type<-"p" else type<-"n"
plot(oxvar, oyvar, type=type, pch = 15, cex = 0.5, col = gray(0.7),
xlab= xlab, ylab=ylab, xlim=xlim, ylim=ylim)
title(Title)
}
ltype <- 0
for (iii in 1:nobj)
{
cat("******** Model", iii,"******** \n" )
lpar <- lenpar[[iii]]
if (color==TRUE) col <- 3 else col <- 1
ltype <- ltype+1
ii <- 0
per <- rep(0,length(cent))
for(var in cent)
{
if(lpar==1)
{
newcall <-call(qfun[[iii]],var/100,
mu=fitted(object[[iii]],"mu")[order(xvar)])
}
else if(lpar==2)
{
newcall <-call(qfun[[iii]],var/100,
mu=fitted(object[[iii]],"mu")[order(xvar)],
sigma=fitted(object[[iii]],"sigma")[order(xvar)])
}
else if(lpar==3)
{
newcall <-call(qfun[[iii]],var/100,
mu=fitted(object[[iii]],"mu")[order(xvar)],
sigma=fitted(object[[iii]],"sigma")[order(xvar)],
nu=fitted(object[[iii]],"nu")[order(xvar)])
}
else
{
newcall <-call(qfun[[iii]],var/100,
mu=fitted(object[[iii]],"mu")[order(xvar)],
sigma=fitted(object[[iii]],"sigma")[order(xvar)],
nu=fitted(object[[iii]],"nu")[order(xvar)],
tau=fitted(object[[iii]],"tau")[order(xvar)])
}
ii <- ii+1
ll<- eval(newcall)
if (plot)
{
lines(oxvar,ll,col=col, lty=ltype)
if (color==TRUE) colleg <- c(3,4,5,6,7,8,9,10) else colleg <- c(1)
if (legend==TRUE) legend(list(x=xleg,y=yleg), legend = cent,
col=colleg, lty=1, ncol=1, bg="white")
}
if (color==TRUE) col <- col+1
per[ii]<-(1-sum(oyvar>ll)/length(oyvar))*100
cat("% of cases below ", var,"centile is ", per[ii], "\n" )
}
}
}
else
{
if (!is.gamlss(obj)) stop(paste("This is not an gamlss object", "\n", ""))
if(is.null(xvar)) stop(paste("The xvar argument is not specified", "\n", ""))
fname <- obj$family[1]
qfun <- paste("q",fname,sep="")
Title <- paste("Centile curves using",fname, sep=" ")
oxvar <- xvar[order(xvar)]
oyvar <- obj$y[order(xvar)]
if (plot)
{
if (no.data==FALSE) type <- "p" else type <- "n"
plot(oxvar, oyvar, type = type , pch = 15, cex = 0.5, col = gray(0.7),
xlab = xlab, ylab = ylab ,xlim = xlim, ylim, ...)
title(Title)
}
if (color==TRUE) col <- 3 else col <- 1
lpar <- length(obj$parameters)
ii <- 0
per <- rep(0,length(cent))
for(var in cent)
{
if(lpar==1)
{
newcall <-call(qfun,var/100,
mu=fitted(obj,"mu")[order(xvar)])
}
else if(lpar==2)
{
newcall <-call(qfun,var/100,
mu=fitted(obj,"mu")[order(xvar)],
sigma=fitted(obj,"sigma")[order(xvar)])
}
else if(lpar==3)
{
newcall <-call(qfun,var/100,
mu=fitted(obj,"mu")[order(xvar)],
sigma=fitted(obj,"sigma")[order(xvar)],
nu=fitted(obj,"nu")[order(xvar)])
}
else
{
newcall <-call(qfun,var/100,
mu=fitted(obj,"mu")[order(xvar)],
sigma=fitted(obj,"sigma")[order(xvar)],
nu=fitted(obj,"nu")[order(xvar)],
tau=fitted(obj,"tau")[order(xvar)])
}
ii <- ii+1
ll<- eval(newcall)
if (plot)
{
lines(oxvar,ll,col=col, lty=1)
if (color==TRUE) colleg <- c(3,4,5,6,7,8,9,10) else colleg <- c(1)
if (legend==TRUE) legend(list(x=xleg,y=yleg), legend = cent,
col=colleg, lty=1, ncol=1, bg="white")
}
if (color==TRUE) col <- col+1
per[ii]<-(1-sum(oyvar>ll)/length(oyvar))*100
cat("% of cases below ", var,"centile is ", per[ii], "\n" )
}
}
} |
from <- function(.from, ..., .into = "imports",
.library = .libPaths()[1L], .directory=".",
.all=(length(.except) > 0), .except=character(),
.chdir = TRUE, .character_only = FALSE)
{
cl <- match.call()[[1L]]
exports_only <- identical(cl, call("::", quote(import), quote(from)))
if (!exports_only && !identical(cl, call(":::", quote(import), quote(from))))
stop("Use `import::` or `import:::` when importing objects.", call. = FALSE)
if (missing(.from))
stop("Argument `.from` must be specified for import::from.", call. = FALSE)
if (identical(cl, call(":::", quote(import), quote(from))) &&
(.all!=FALSE || length(.except)!=0))
stop("`import:::` must not be used in conjunction with .all or .except", call. = FALSE)
if (!missing(.into) && is.character(.into) && .into == "")
.into = quote({environment()})
if (detect_bad_recursion(.traceback(0))) {
.into = quote({environment()})
warning(paste0("import::from() or import::into() was used recursively, to import \n",
" a module from within a module. Please rely on import::here() \n",
" when using the import package in this way.\n",
" See vignette(import) for further details."))
}
symbols <- symbol_list(..., .character_only = .character_only, .all = .all)
from <-
`if`(isTRUE(.character_only), .from, symbol_as_character(substitute(.from)))
into_expr <- substitute(.into)
`{env}` <- identical(into_expr[[1]], quote(`{`))
if (`{env}`) {
into <- eval.parent(.into)
if (!is.environment(into))
stop("into is not an environment, but {env} notation was used.", call. = FALSE)
} else {
into <- symbol_as_character(into_expr)
}
use_into <- !exists(".packageName", parent.frame(), inherits = TRUE) &&
!`{env}` &&
!into == ""
into_exists <- !`{env}` && (into %in% search())
make_attach <- attach
if (use_into && !into_exists)
make_attach(NULL, 2L, name = into)
from_is_script <- is_script(from, .directory)
if (from_is_script) {
from_created <- from %in% ls(scripts, all.names = TRUE)
if (!from_created || modified(from, .directory) > modified(scripts[[from]])) {
attached <- search()
if (!from_created)
assign(from, new.env(parent = parent.frame()), scripts)
modified(scripts[[from]]) <- modified(from, .directory)
scripts[[from]][[".packageName"]] <- from
packages_before <- .packages()
suppress_output(sys.source(file_path(.directory, from), scripts[[from]], chdir = .chdir))
packages_after <- .packages()
if ( !identical(packages_before,packages_after) ) {
warning("A package was loaded using 'library(...)' from within an import::*() module.\n",
" Please rely on import::here() to load objects from packages within an \n",
" import::*() module. See vignette(import) for further details." )
}
on.exit({
to_deattach <- Filter(function(.) !. %in% attached, search())
for (d in to_deattach)
detach(d, character.only = TRUE)
})
}
pkg <- scripts[[from]]
pkg_name <- from
all_objects <- ls(scripts[[from]])
} else {
spec <- package_specs(from)
all_objects <- getNamespaceExports(spec$pkg)
pkg <- tryCatch(
loadNamespace(spec$pkg, lib.loc = .library,
versionCheck = spec$version_check),
error = function(e) stop(conditionMessage(e), call. = FALSE)
)
pkg_name <- spec$pkg
}
if (.all) {
all_objects <- setdiff(all_objects, "__last_modified__")
names(all_objects) <- all_objects
symbols <- c(symbols,all_objects)
symbols <- symbols[!duplicated(symbols)]
}
if (length(.except)>0) {
symbols <- symbols[!(symbols %in% .except)]
}
for (s in seq_along(symbols)) {
import_call <-
make_import_call(
list(new = names(symbols)[s],
nm = symbols[s],
ns = pkg,
inh = !exports_only,
pos = if (use_into || `{env}`) into else -1),
exports_only && !from_is_script)
if (!from_is_script)
import_aliases[[names(symbols)[s]]] <-
call("::", as.symbol(pkg_name), as.symbol(symbols[s]))
tryCatch(eval.parent(import_call),
error = function(e) stop(e$message, call. = FALSE))
}
if (!`{env}` && into != "" && !exists("?", into, mode = "function", inherits = FALSE)) {
assign("?", `?redirect`, into)
}
invisible(as.environment(into))
} |
LKrigMakewU <- function(object, verbose = FALSE) {
LKinfo<- object$LKinfo
if (!is.null(object$U)) {
wU <- sqrt(object$weights) * object$U
} else {
if (!is.null(LKinfo$fixedFunction)) {
wU <- sqrt(object$weights) * do.call(
LKinfo$fixedFunction,
c(list(x = object$x,
Z = object$Z,
distance.type = LKinfo$distance.type),
LKinfo$fixedFunctionArgs))
}
else{
wU<- NULL
}
}
if (verbose) {
cat("dim wU:", dim(wU), fill=TRUE)
}
return( wU)
} |
source("ESEUR_config.r")
library("plyr")
mdon=read.csv(paste0(ESEUR_dir, "ecosystems/overney20donations.csv.xz"), as.is=TRUE)
mon_av=ddply(mdon, .(project_id), function(df) mean(df$earning_after_adoption))
plot(sort(mon_av$V1), log="y", col=point_col,
xaxs="i",
xlab="Project", ylab="Monthly donation (dollars)\n") |
context("metadata cache 2/3")
test_that("check_update", {
skip_if_offline()
skip_on_cran()
withr::local_options(
list(repos = c(CRAN = "https://cloud.r-project.org"))
)
dir.create(pri <- fs::path_norm(tempfile()))
on.exit(unlink(pri, recursive = TRUE), add = TRUE)
dir.create(rep <- fs::path_norm(tempfile()))
on.exit(unlink(rep, recursive = TRUE), add = TRUE)
cmc <- cranlike_metadata_cache$new(pri, rep, "source", bioc = FALSE)
data <- cmc$check_update()
check_packages_data(data)
expect_identical(get_private(cmc)$data, data)
expect_true(Sys.time() - get_private(cmc)$data_time < oneminute())
rep_files <- get_private(cmc)$get_cache_files("replica")
expect_true(file.exists(rep_files$rds))
expect_true(Sys.time() - file_get_time(rep_files$rds) < oneminute())
pri_files <- get_private(cmc)$get_cache_files("primary")
expect_true(file.exists(pri_files$rds))
expect_true(Sys.time() - file_get_time(pri_files$rds) < oneminute())
expect_true(all(file.exists(rep_files$pkgs$path)))
expect_true(all(file.exists(rep_files$pkgs$etag)))
expect_true(all(file.exists(pri_files$pkgs$path)))
expect_true(all(file.exists(pri_files$pkgs$etag)))
cat("foobar\n", file = rep_files$pkgs$path[1])
cat("foobar2\n", file = rep_files$rds)
cat("foobar\n", file = pri_files$pkgs$path[1])
cat("foobar2\n", file = pri_files$rds)
data2 <- cmc$check_update()
expect_identical(data, data2)
expect_equal(read_lines(rep_files$pkgs$path[1]), "foobar")
cmc$cleanup(force = TRUE)
expect_false(file.exists(pri_files$rds))
expect_false(any(file.exists(pri_files$pkgs$path)))
expect_false(file.exists(rep_files$rds))
expect_false(any(file.exists(rep_files$pkgs$path)))
})
test_that("deps will auto-update as needed", {
skip_if_offline()
skip_on_cran()
withr::local_options(list(repos = NULL))
dir.create(pri <- fs::path_norm(tempfile()))
on.exit(unlink(pri, recursive = TRUE), add = TRUE)
dir.create(rep <- fs::path_norm(tempfile()))
on.exit(unlink(rep, recursive = TRUE), add = TRUE)
cmc <- cranlike_metadata_cache$new(pri, rep, "source", bioc = FALSE)
pri_files <- get_private(cmc)$get_cache_files("primary")
mkdirp(dirname(pri_files$pkgs$path))
fs::file_copy(get_fixture("PACKAGES-src.gz"), pri_files$pkgs$path)
cmc$deps("A3", recursive = FALSE)
expect_false(is.null(get_private(cmc)$data))
expect_true(Sys.time() - get_private(cmc)$data_time < oneminute())
rep_files <- get_private(cmc)$get_cache_files("replica")
expect_true(file.exists(rep_files$rds))
expect_true(Sys.time() - file_get_time(rep_files$rds) < oneminute())
pri_files <- get_private(cmc)$get_cache_files("primary")
expect_true(file.exists(pri_files$rds))
expect_true(Sys.time() - file_get_time(pri_files$rds) < oneminute())
expect_true(all(file.exists(rep_files$pkgs$path)))
expect_true(all(file.exists(pri_files$pkgs$path)))
})
test_that("deps, extract_deps", {
skip_if_offline()
skip_on_cran()
withr::local_options(list(repos = NULL))
dir.create(pri <- fs::path_norm(tempfile()))
on.exit(unlink(pri, recursive = TRUE), add = TRUE)
dir.create(rep <- fs::path_norm(tempfile()))
on.exit(unlink(rep, recursive = TRUE), add = TRUE)
cmc <- cranlike_metadata_cache$new(pri, rep, "source", bioc = FALSE,
cran_mirror = "mirror")
pri_files <- get_private(cmc)$get_cache_files("primary")
mkdirp(dirname(pri_files$pkgs$path))
fs::file_copy(get_fixture("PACKAGES-src.gz"), pri_files$pkgs$path)
file_set_time(pri_files$pkgs$path, Sys.time() - 1/2 * oneday())
pkgs <- read_packages_file(
get_fixture("PACKAGES-src.gz"),
mirror = "mirror", repodir = "src/contrib", platform = "source",
rversion = "*", type = "cran")
deps <- cmc$deps("abc", FALSE, FALSE)
expect_identical(deps$package, "abc")
expect_identical(attr(deps, "base"), character())
expect_identical(attr(deps, "unknown"), character())
deps2 <- extract_deps(pkgs, "abc", FALSE, FALSE)
expect_identical(deps, deps2)
deps <- extract_deps(pkgs, "abc", TRUE, FALSE)
expect_identical(deps$package, c("abc", "abc.data", "MASS", "nnet"))
expect_identical(attr(deps, "base"), character())
expect_identical(attr(deps, "unknown"), c("quantreg", "locfit"))
deps2 <- extract_deps(pkgs, "abc", TRUE, FALSE)
expect_identical(deps, deps2)
deps <- extract_deps(pkgs, "abc", TRUE, TRUE)
expect_identical(deps$package, c("abc", "abc.data", "MASS", "nnet"))
expect_identical(
sort(attr(deps, "base")),
sort(c("grDevices", "graphics", "stats", "utils", "methods")))
expect_identical(attr(deps, "unknown"), c("quantreg", "locfit"))
deps2 <- extract_deps(pkgs, "abc", TRUE, TRUE)
expect_identical(deps, deps2)
deps <- extract_deps(pkgs, "nnet", c("Depends", "Suggests"), FALSE)
expect_identical(deps$package, c("MASS", "nnet"))
expect_identical(attr(deps, "base"), c("stats", "utils"))
expect_identical(attr(deps, "unknown"), character())
deps2 <- extract_deps(pkgs, "nnet", c("Depends", "Suggests"), FALSE)
expect_identical(deps, deps2)
}) |
subset_lake_data = function(lake_name, types){
check_lake(lake_name)
siteID <- "_private"
variable <- "_private"
year <- "_private"
value <- "_private"
IDs <- get_site_ID(lake_name)
df <- data.frame()
df = tryCatch({
for (i in 1:length(IDs)){
vals <- filter(gltc_values, tolower(variable) %in% tolower(types), siteID == IDs[i]) %>%
select(variable, year, value)
df <- rbind(vals, df)
}
df <- acast(df, year ~ variable)
df <- cbind(data.frame(year = as.numeric(row.names(df))), df)
rownames(df) <- NULL
df
}, error = function(e) {
return(df)
})
if (nrow(df) == 0) df = data.frame()
return(df)
} |
util_tibble2raster <- function(x) UseMethod("util_tibble2raster")
util_tibble2raster <- function(x) {
r <- raster::raster(matrix(x$z, max(x$y), max(x$x), byrow = TRUE))
raster::extent(r) <- c(0, max(x$x), 0, max(x$y))
return(r)
} |
utils::globalVariables(c("%dopar%", "CRS", "SpatialPoints", "bbox", "clusterEvalQ", "coordinates", "error", "foreach", "get.knnx", "makeCluster", "proj4string", "rasterToPoints", "registerDoParallel", "registerDoSNOW", "spDists", "stopCluster", "xres", "yres", "ginv")) |
require(spatstat.utils)
a <- paren(character(0))
a <- paren("hello", "")
a <- paren("hello", "{")
strsplitretain("hello, world")
truncline(c("Now is the time for all good people",
"to come to the aid of the Party"),
15)
is.blank(c("a", " ", "b"))
onetwo <- c("one", "two")
padtowidth(onetwo, 10, "left")
padtowidth(onetwo, 10, "right")
padtowidth(onetwo, 10, "centre")
splat("Hello world", indent="zzz")
choptext("Hello\nWorld")
exhibitStringList("Letters", letters)
exhibitStringList("Letters", letters[1:4])
numalign(42, 1e4)
singlestring(1:5)
x <- c("TRUE", "unknown", "not known")
verbalogic(x, "and")
verbalogic(x, "or")
verbalogic(x, "not")
x[1] <- "FALSE"
verbalogic(x, "and")
sensiblevarname("$@wtf%!", "variablenumberone")
nzpaste(c("Hello", "", "World"))
substringcount("v", "vavavoom")
huh <- c("42", "y <- x", "$%^%$")
is.parseable(huh)
make.parseable(huh)
paste.expr(expression(y == x))
pasteFormula(y ~ x + z)
gsubdot("cbind(est,theo)", ". ~ r")
simplenumber(0)
simplenumber(1/3)
simplenumber(2/3)
simplenumber(-2)
simplenumber(0, unit="km")
simplenumber(1/3, unit="km")
simplenumber(2/3, unit="km")
simplenumber(-2, unit="km")
makeCutLabels(0:3) |
lsa.bin.log.reg <- function(data.file, data.object, split.vars, bin.dep.var, bckg.indep.cont.vars, bckg.indep.cat.vars, bckg.cat.contrasts, bckg.ref.cats, PV.root.indep, standardize = FALSE, weight.var, norm.weight = FALSE, include.missing = FALSE, shortcut = FALSE, output.file, open.output = TRUE) {
tmp.options <- options(scipen = 999, digits = 22)
on.exit(expr = options(tmp.options), add = TRUE)
warnings.collector <- list()
if(missing("bckg.indep.cont.vars") & missing("bckg.indep.cat.vars") & missing("PV.root.indep")) {
stop('No independent variables ("bckg.indep.cont.vars", "bckg.indep.cat.vars" or "PV.root.indep") were passed to the call. All operations stop here. Check your input.\n\n', call. = FALSE)
}
if(!missing(bin.dep.var) && length(bin.dep.var) > 1) {
stop('Only one binary dependent variable can be passed at a time. All operations stop here. Check your input.\n\n', call. = FALSE)
}
if(!missing(bckg.indep.cat.vars) && !missing(bckg.ref.cats) && length(bckg.indep.cat.vars) != length(bckg.ref.cats)) {
stop('"bckg.indep.cat.vars" and "bckg.ref.cats" must have equal length. All operations stop here. Check your input.\n\n', call. = FALSE)
}
if(!missing(bckg.indep.cat.vars) && !missing(bckg.cat.contrasts) && length(bckg.indep.cat.vars) != length(bckg.cat.contrasts)) {
stop('"bckg.indep.cat.vars" and "bckg.cat.contrasts" must have equal length. All operations stop here. Check your input.\n\n', call. = FALSE)
}
if(!missing(bckg.ref.cats) && !is.numeric(bckg.ref.cats)) {
stop('The reference category passed to "bckg.ref.cats" must be a numeric value. All operations stop here. Check your input.\n\n', call. = FALSE)
}
if(!missing(bckg.indep.cat.vars) & missing(bckg.cat.contrasts)) {
bckg.cat.contrasts <- rep(x = "dummy", times = length(bckg.indep.cat.vars))
warnings.collector[["contrast.cat.set.default"]] <- 'Independent categorical background variable(s) were passed to "bckg.indep.cat.vars", but no contrast coding schemes were provided for the "bckg.cat.contrasts" argument. "dummy" coding was set as default for all variables passed to "bckg.indep.cat.vars".'
}
if(!missing(bckg.indep.cat.vars) && any(!bckg.cat.contrasts %in% c("dummy", "simple", "deviation"))) {
stop('An unsupported contrast coding scheme was passed to the "bckg.indep.cat.vars". All operations stop here. Check your input.\n\n', call. = FALSE)
}
if(!missing(data.file) == TRUE && !missing(data.object) == TRUE) {
stop('Either "data.file" or "data.object" has to be provided, but not both. All operations stop here. Check your input.\n\n', call. = FALSE)
} else if(!missing(data.file)) {
if(file.exists(data.file) == FALSE) {
stop('The file specified in the "data.file" argument does not exist. All operations stop here. Check your input.\n\n', call. = FALSE)
}
ptm.data.import <- proc.time()
data <- copy(import.data(path = data.file))
used.data <- deparse(substitute(data.file))
message('\nData file ', used.data, ' imported in ', format(as.POSIXct("0001-01-01 00:00:00") + {proc.time() - ptm.data.import}[[3]], "%H:%M:%OS3"))
} else if(!missing(data.object)) {
if(length(all.vars(match.call())) == 0) {
stop('The object specified in the "data.object" argument is quoted, is this an object or a path to a file? All operations stop here. Check your input.\n\n', call. = FALSE)
}
if(!exists(all.vars(match.call()))) {
stop('The object specified in the "data.object" argument does not exist. All operations stop here. Check your input.\n\n', call. = FALSE)
}
data <- copy(data.object)
used.data <- deparse(substitute(data.object))
message('\nUsing data from object "', used.data, '".')
}
if(!"lsa.data" %in% class(data)) {
stop('\nThe data is not of class "lsa.data". All operations stop here. Check your input.\n\n', call. = FALSE)
}
vars.list <- get.analysis.and.design.vars(data)
if(!missing(bckg.indep.cat.vars) & missing(bckg.ref.cats)) {
bckg.ref.cats <- sapply(X = data[ , mget(vars.list[["bckg.indep.cat.vars"]])], FUN = function(i) {
min(na.omit(as.numeric(i)))
})
warnings.collector[["ref.cat.set.default"]] <- 'Independent categorical background variable(s) were passed to "bckg.indep.cat.vars", but no reference categories were provided for the "bckg.ref.cats" argument. Default reference categories were set: the minimum value(s) available in the data for categorical independent variable(s).'
}
action.args.list <- get.action.arguments()
file.attributes <- get.file.attributes(imported.object = data)
tryCatch({
if(file.attributes[["lsa.study"]] %in% c("PIRLS", "prePIRLS", "ePIRLS", "RLII", "TIMSS", "preTIMSS", "TIMSS Advanced", "TiPi") & missing(shortcut)) {
action.args.list[["shortcut"]] <- FALSE
}
data <- produce.analysis.data.table(data.object = data, object.variables = vars.list, action.arguments = action.args.list, imported.file.attributes = file.attributes)
max.two.cats <- sapply(X = data, FUN = function(i) {
length(unique(na.omit(i[ , get(bin.dep.var)])))
})
if(na.omit(unique(max.two.cats)) != 2) {
stop('The variable passed to "bin.dep.var" is not binary. All operations stop here. Check your input.\n\n', call. = FALSE)
}
lapply(X = data, FUN = function(i) {
i[get(bin.dep.var) == min(get(bin.dep.var), na.rm = TRUE), (bin.dep.var) := 0]
i[get(bin.dep.var) == max(get(bin.dep.var), na.rm = TRUE), (bin.dep.var) := 1]
})
countries.with.all.NA.vars <- sapply(X = data, FUN = function(i) {
any(sapply(X = i[ , mget(unname(unlist(vars.list[c("bin.dep.var", "bckg.indep.cont.vars", "bckg.indep.cat.vars", "PV.names")])))], FUN = function(j) {
all(is.na(j))
}) == TRUE)
})
countries.with.all.NA.vars <- names(Filter(isTRUE, countries.with.all.NA.vars))
if(length(countries.with.all.NA.vars) > 0) {
warnings.collector[["countries.with.all.NA.vars"]] <- paste0('One or more countries in the data have one or more variables in the regression model which have only missing values and have been removed: ', paste(countries.with.all.NA.vars, collapse = ", "), ".")
if(length(countries.with.all.NA.vars) == length(names(data))) {
stop('One or more variables in the model has missing values in all countries. All operations stop here. Check the data for all variables.\n\n', call. = FALSE)
} else {
data[countries.with.all.NA.vars] <- NULL
}
}
if(!missing(bckg.indep.cat.vars)) {
countries.with.constant.cat.vars <- names(Filter(isTRUE, lapply(X = data, FUN = function(i) {
any(Filter(isTRUE, lapply(X = i[ , mget(unname(unlist(vars.list["bckg.indep.cat.vars"])))], FUN = function(j) {
length(unique(j)) < 2
})) == TRUE)
})))
if(length(countries.with.constant.cat.vars) > 0) {
warnings.collector[["countries.with.constant.cat.vars"]] <- paste0('One or more countries in the data have one or more variables in "bckg.indep.cat.vars" which are constant and have been removed: ', paste(countries.with.all.NA.vars, collapse = ", "), ".")
data[countries.with.constant.cat.vars] <- NULL
}
}
if(!is.null(vars.list[["split.vars"]])) {
data <- lapply(X = data, FUN = function(i) {
rows.to.remove <- lapply(X = vars.list[["bckg.indep.cat.vars"]], FUN = function(j) {
tmp <- dcast(i, formula(paste0(vars.list[["split.vars"]][length(vars.list[["split.vars"]])], " ~ ", j)), value.var = j, fun.aggregate = length)
tmp1 <- tmp[ , mget(colnames(tmp)[2:length(colnames(tmp))])]
tmp[ , JUSTONEVALID := apply(tmp1, 1, function(j) {
if(sum(j > 0) == 1) {
FALSE
} else {
TRUE
}
})]
tmp[JUSTONEVALID == FALSE, get(vars.list[["split.vars"]][length(vars.list[["split.vars"]])])]
})
i[!get(vars.list[["split.vars"]][length(vars.list[["split.vars"]])]) %in% unlist(rows.to.remove), ]
})
}
data <- lapply(X = data, FUN = function(i) {
i <- na.omit(object = i, cols = unlist(vars.list[c("bin.dep.var", "bckg.indep.cont.vars", "bckg.indep.cat.vars", "bckg.cat.contrasts", "bckg.ref.cats")]))
i[get(vars.list[["weight.var"]]) > 0, ]
})
if(standardize == TRUE) {
data <- lapply(X = data, FUN = function(i) {
all.model.vars <- unlist(x = Filter(Negate(is.null), vars.list[c("bckg.indep.cont.vars", "PV.names")]), use.names = FALSE)
i[ , (all.model.vars) := lapply(.SD, scale), .SDcols = all.model.vars]
})
}
if(!is.null(vars.list[["bckg.indep.cat.vars"]])) {
bckg.cat.vars.new.names <- unlist(Map(f = function(input1, input2) {
if(input2 == "dummy") {
paste0(input1, "_DY")
} else if(input2 == "deviation") {
paste0(input1, "_DN")
} else if(input2 == "simple") {
paste0(input1, "_SC")
}
}, input1 = as.list(vars.list[["bckg.indep.cat.vars"]]), input2 = as.list(bckg.cat.contrasts)))
contrast.columns <- copy(lapply(X = data, FUN = function(i) {
i[ , mget(vars.list[["bckg.indep.cat.vars"]])]
}))
contrast.columns <- lapply(X = contrast.columns, FUN = function(i) {
i[ , (bckg.cat.vars.new.names) := lapply(.SD, factor), .SDcols = vars.list[["bckg.indep.cat.vars"]]]
tmp.contr.cols <- Map(f = function(input1, input2, input3) {
if(input2 == "dummy") {
contrasts(input1) <- contr.treatment(n = length(levels(input1)), base = input3)
} else if(input2 == "deviation") {
input1 <- factor(x = input1, levels = c(levels(input1)[!levels(input1) == input3], input3))
deviation.contrasts <- contr.sum(n = length(levels(input1)))
dimnames(deviation.contrasts) <- list(levels(input1), grep(pattern = input3, x = levels(input1), value = TRUE, invert = TRUE))
contrasts(input1) <- deviation.contrasts
} else if(input2 == "simple") {
input1 <- factor(x = input1, levels = c(levels(input1)[levels(input1) == input3], levels(input1)[!levels(input1) == input3]))
contr.treatment.matrix <- contr.treatment(n = length(levels(input1)))
effect.contrasts.matrix <- matrix(rep(x = 1/4, times = length(levels(input1))*(length(levels(input1)) - 1)), ncol = (length(levels(input1)) - 1))
contr.treatment.matrix <- contr.treatment.matrix - effect.contrasts.matrix
dimnames(contr.treatment.matrix) <- list(levels(input1), grep(pattern = input3, x = levels(input1), value = TRUE, invert = TRUE))
contrasts(input1) <- contr.treatment.matrix
}
return(data.table(input1))
}, input1 = i[ , mget(bckg.cat.vars.new.names)], input2 = as.list(bckg.cat.contrasts), input3 = as.list(bckg.ref.cats))
tmp.contr.cols <- do.call(cbind, tmp.contr.cols)
setnames(x = tmp.contr.cols, bckg.cat.vars.new.names)
})
data <- Map(f = cbind, data, contrast.columns)
}
vars.list[["pcts.var"]] <- tmp.pcts.var
vars.list[["group.vars"]] <- tmp.group.vars
analysis.info <- list()
model.stats <- list()
number.of.countries <- length(names(data))
if(number.of.countries == 1) {
message("\nValid data from one country have been found. Some computations can be rather intensive. Please be patient.\n")
} else if(number.of.countries > 1) {
message("\nValid data from ", number.of.countries, " countries have been found. Some computations can be rather intensive. Please be patient.\n")
}
counter <- 0
compute.all.stats <- function(data) {
independent.variables <- grep(pattern = ".indep", x = names(vars.list), value = TRUE)
if("PV.root.indep" %in% independent.variables) {
independent.variables.PV <- lapply(X = vars.list[["PV.root.indep"]], FUN = function(i) {
as.list(grep(pattern = i, x = unlist(vars.list[["PV.names"]]), value = TRUE))
})
}
if(any(c("bckg.indep.cont.vars", "bckg.indep.cat.vars") %in% independent.variables)) {
if(exists("bckg.cat.vars.new.names")) {
independent.variables.bckg <- paste(unlist(c(vars.list[["bckg.indep.cont.vars"]], bckg.cat.vars.new.names)), collapse = " + ")
} else {
independent.variables.bckg <- paste(unlist(vars.list[["bckg.indep.cont.vars"]]), collapse = " + ")
}
}
if(exists("independent.variables.PV") & exists("independent.variables.bckg")) {
independent.variables <- do.call(cbind, independent.variables.PV)
independent.variables <- cbind(independent.variables, independent.variables.bckg)
independent.variables <- as.list(apply(X = independent.variables, MARGIN = 1, FUN = function(i) {
paste(i, collapse = " + ")
}))
} else if(exists("independent.variables.PV") & !exists("independent.variables.bckg")) {
independent.variables <- lapply(X = vars.list[["PV.root.indep"]], FUN = function(i) {
as.list(grep(pattern = i, x = unlist(vars.list[["PV.names"]]), value = TRUE))
})
independent.variables <- do.call(cbind, independent.variables)
independent.variables <- as.list(apply(X = independent.variables, MARGIN = 1, FUN = function(i) {
paste(i, collapse = " + ")
}))
} else if(!exists("independent.variables.PV") & exists("independent.variables.bckg")) {
if(exists("bckg.cat.vars.new.names")) {
independent.variables <- paste(unlist(Filter(Negate(is.null), c(vars.list["bckg.indep.cont.vars"], bckg.cat.vars.new.names))), collapse = " + ")
} else {
independent.variables <- paste(unlist(Filter(Negate(is.null), vars.list["bckg.indep.cont.vars"])), collapse = " + ")
}
}
if(is.character(independent.variables)) {
regression.formula <- paste(c(bin.dep.var, independent.variables), collapse = " ~ ")
} else if(is.list(independent.variables)) {
regression.formula <- Map(f = paste, bin.dep.var, independent.variables, sep = " ~ ")
}
rep.wgts.names <- paste(c("REPWGT", unlist(lapply(X = design.weight.variables[grep("rep.wgts", names(design.weight.variables), value = TRUE)], FUN = function(i) {
unique(gsub(pattern = "[[:digit:]]*$", replacement = "", x = i))
}))), collapse = "|")
rep.wgts.names <- grep(pattern = rep.wgts.names, x = names(data), value = TRUE)
all.weights <- c(vars.list[["weight.var"]], rep.wgts.names)
if(norm.weight == TRUE) {
data[ , (all.weights) := lapply(.SD, function(i) {
length(i) * i / sum(i)
}), .SDcols = all.weights]
}
cnt.start.time <- format(Sys.time(), format = "%Y-%m-%d %H:%M:%OS3")
if(include.missing == FALSE) {
data1 <- na.omit(object = copy(data), cols = key.vars)
if(!is.null(vars.list[["pcts.var"]])) {
percentages <- na.omit(data1[ , c(.(na.omit(unique(get(vars.list[["pcts.var"]])))), Map(f = wgt.pct, variable = .(get(vars.list[["pcts.var"]])), weight = mget(all.weights))), by = eval(vars.list[["group.vars"]])])
number.of.cases <- na.omit(data1[eval(parse(text = vars.list[["weight.var"]])) > 0, .(n_Cases = .N), by = key.vars])
sum.of.weights <- na.omit(data1[ , lapply(.SD, sum), by = key.vars, .SDcols = all.weights])
} else {
percentages <- na.omit(data1[ , c(.(na.omit(unique(get(key.vars)))), Map(f = wgt.pct, variable = .(get(key.vars)), weight = mget(all.weights)))])
number.of.cases <- na.omit(data1[ , .(n_Cases = .N), by = key.vars])
sum.of.weights <- na.omit(data1[ , lapply(.SD, sum), by = key.vars, .SDcols = all.weights])
}
} else if (include.missing == TRUE) {
data1 <- copy(data)
if(!is.null(vars.list[["pcts.var"]])) {
percentages <- data1[ , c(.(na.omit(unique(get(vars.list[["pcts.var"]])))), Map(f = wgt.pct, variable = .(get(vars.list[["pcts.var"]])), weight = mget(all.weights))), by = eval(vars.list[["group.vars"]])]
number.of.cases <- data1[eval(parse(text = vars.list[["weight.var"]])) > 0, .(n_Cases = .N), by = key.vars]
sum.of.weights <- data1[ , lapply(.SD, sum), by = key.vars, .SDcols = all.weights]
} else {
percentages <- data[ , c(.(na.omit(unique(get(key.vars)))), Map(f = wgt.pct, variable = .(get(key.vars)), weight = mget(all.weights)))]
number.of.cases <- data[ , .(n_Cases = .N), by = key.vars]
sum.of.weights <- data[ , lapply(.SD, sum), by = key.vars, .SDcols = all.weights]
}
}
percentages <- list(percentages)
sum.of.weights <- list(sum.of.weights)
if(!is.null(vars.list[["pcts.var"]])) {
reshape.list.statistics.bckg(estimate.object = percentages, estimate.name = "Percentages_", bckg.vars.vector = vars.list[["pcts.var"]], weighting.variable = vars.list[["weight.var"]], data.key.variables = key.vars, new.names.vector = vars.list[["pcts.var"]], replication.weights = rep.wgts.names, study.name = file.attributes[["lsa.study"]], SE.design = shortcut)
} else {
reshape.list.statistics.bckg(estimate.object = percentages, estimate.name = "Percentages_", bckg.vars.vector = NULL, weighting.variable = vars.list[["weight.var"]], data.key.variables = key.vars, new.names.vector = key.vars, replication.weights = rep.wgts.names, study.name = file.attributes[["lsa.study"]], SE.design = shortcut)
}
percentages <- rbindlist(percentages)
if(nrow(number.of.cases) > nrow(percentages)) {
percentages <- merge(number.of.cases[ , mget(key.vars)], percentages, all.x = TRUE)
percentages[ , (grep(pattern = "Percentages_[[:alnum:]]+$", x = colnames(percentages), value = TRUE)) := lapply(.SD, function(i){i[is.na(i)] <- 100; i}), .SDcols = grep(pattern = "Percentages_[[:alnum:]]+$", x = colnames(percentages), value = TRUE)]
percentages[ , (grep(pattern = "Percentages_[[:alnum:]]+_SE$", x = colnames(percentages), value = TRUE)) := lapply(.SD, function(i){i[is.na(i)] <- 0; i}), .SDcols = grep(pattern = "Percentages_[[:alnum:]]+_SE$", x = colnames(percentages), value = TRUE)]
}
reshape.list.statistics.bckg(estimate.object = sum.of.weights, estimate.name = "Sum_", weighting.variable = vars.list[["weight.var"]], data.key.variables = key.vars, new.names.vector = vars.list[["weight.var"]], replication.weights = rep.wgts.names, study.name = file.attributes[["lsa.study"]], SE.design = shortcut)
if(!is.null(vars.list[["PV.root.indep"]])) {
PV.names.to.split.by <- transpose(vars.list[["PV.names"]])
PV.names.to.keep <- lapply(X = PV.names.to.split.by, FUN = function(i) {
grep(pattern = paste(c(key.vars, i, vars.list[["bin.dep.var"]], vars.list[["bckg.indep.cont.vars"]], vars.list[["bckg.indep.cat.vars"]], all.weights, vars.list[["jk.zones"]], vars.list[["rep.ind"]]), collapse = "|"), x = colnames(data1), value = TRUE)
})
data1 <- lapply(X = PV.names.to.keep, FUN = function(i) {
data1[ , mget(i)]
})
}
if(is.null(vars.list[["PV.root.indep"]])) {
if(exists("bckg.cat.vars.new.names")) {
bckg.regression <- list(compute.logistic.regression.all.repwgt(data.object = data1, vars.vector = c(vars.list[["bin.dep.var"]], vars.list[["bckg.indep.cont.vars"]], bckg.cat.vars.new.names), weight.var = all.weights, keys = key.vars, reg.formula = regression.formula))
} else {
bckg.regression <- list(compute.logistic.regression.all.repwgt(data.object = data1, vars.vector = c(vars.list[["bin.dep.var"]], vars.list[["bckg.indep.cont.vars"]]), weight.var = all.weights, keys = key.vars, reg.formula = regression.formula))
}
lapply(X = bckg.regression, FUN = function(i) {
setnames(x = i, old = "V1", new = "Variable")
})
} else if(!is.null(vars.list[["PV.root.indep"]])) {
PV.regression <- list(lapply(X = seq_along(data1), FUN = function(i) {
compute.logistic.regression.all.repwgt(data.object = data1[[i]], vars.vector = grep(pattern = paste(c(vars.list[["PV.root.indep"]], vars.list[["bin.dep.var"]], vars.list[["bckg.indep.cont.vars"]], vars.list[["bckg.indep.cat.vars"]]), collapse = "|"), x = colnames(data1[[i]]), value = TRUE), weight.var = all.weights, keys = key.vars, reg.formula = regression.formula[[i]])
}))
PV.regression["odds.ratios"] <- lapply(X = PV.regression, FUN = function(i) {
lapply(X = i, function(j) {
j <- j[V1 %in% grep(pattern = "_odds$", x = V1, value = TRUE)]
j[ , V1 := gsub(pattern = "_odds$", replacement = "", x = V1)]
})
})
PV.regression[1] <- lapply(X = PV.regression[1], FUN = function(i) {
lapply(X = i, function(j) {
j[!V1 %in% grep(pattern = "_odds$", x = V1, value = TRUE), ]
})
})
PV.regression <- lapply(X = PV.regression, FUN = function(i) {
lapply(X = i, FUN = function(j) {
j[ , V1 := as.character(V1)]
PV.values.names <- grep(pattern = paste(vars.list[["PV.root.indep"]], collapse = "|"), x = j[ , V1], value = TRUE)
new.V1.values <- unname(sapply(X = j[ , V1], FUN = function(k) {
ifelse(test = k %in% PV.values.names, yes = gsub(pattern = "[[:digit:]]+$", replacement = "", x = k), no = k)
}))
j[ , V1 := new.V1.values]
if(exists("bckg.cat.vars.new.names")) {
new.cat.indep.vars.vals <- unique(grep(pattern = paste(bckg.cat.vars.new.names, collapse = "|"), x = j[ , V1], value = TRUE))
if(file.attributes[["lsa.study"]] %in% c("PISA", "PISA for Development", "ICCS", "ICILS")) {
PV.root.indep.names <- unique(gsub(pattern = "[[:digit:]]+", replacement = "N", x = grep(pattern = paste(vars.list[["PV.root.indep"]], collapse = "|"), x = j[ , V1], value = TRUE)))
j[ , V1 := sapply(.SD, FUN = function(k) {
ifelse(test = grepl(pattern = paste(vars.list[["PV.root.indep"]], collapse = "|"), x = k), yes = gsub(pattern = "[[:digit:]]+", replacement = "N", x = k), no = k)
}), .SDcols = "V1"]
j[ , V1 := factor(x = V1, levels = c("(Intercept)", PV.root.indep.names, vars.list[["bckg.indep.cont.vars"]], new.cat.indep.vars.vals, "null.deviance", "deviance", "df.null", "df.residual", "aic", "bic", "chi.square", "r2hl", "r2cs", "r2n"), labels = c("(Intercept)", PV.root.indep.names, vars.list[["bckg.indep.cont.vars"]], new.cat.indep.vars.vals, "null.deviance", "deviance", "df.null", "df.residual", "aic", "bic", "chi.square", "r2hl", "r2cs", "r2n"))]
} else {
j[ , V1 := factor(x = V1, levels = c("(Intercept)", vars.list[["PV.root.indep"]], vars.list[["bckg.indep.cont.vars"]], new.cat.indep.vars.vals, "null.deviance", "deviance", "df.null", "df.residual", "aic", "bic", "chi.square", "r2hl", "r2cs", "r2n"), labels = c("(Intercept)", vars.list[["PV.root.indep"]], vars.list[["bckg.indep.cont.vars"]], new.cat.indep.vars.vals, "null.deviance", "deviance", "df.null", "df.residual", "aic", "bic", "chi.square", "r2hl", "r2cs", "r2n"))]
}
} else {
if(file.attributes[["lsa.study"]] %in% c("PISA", "PISA for Development", "ICCS", "ICILS")) {
PV.root.indep.names <- unique(gsub(pattern = "[[:digit:]]+", replacement = "N", x = grep(pattern = paste(vars.list[["PV.root.indep"]], collapse = "|"), x = j[ , V1], value = TRUE)))
j[ , V1 := sapply(.SD, FUN = function(k) {
ifelse(test = grepl(pattern = paste(vars.list[["PV.root.indep"]], collapse = "|"), x = k), yes = gsub(pattern = "[[:digit:]]+", replacement = "N", x = k), no = k)
}), .SDcols = "V1"]
j[ , V1 := factor(x = V1, levels = c("(Intercept)", PV.root.indep.names, vars.list[["bckg.indep.cont.vars"]], "null.deviance", "deviance", "df.null", "df.residual", "aic", "bic", "chi.square", "r2hl", "r2cs", "r2n"), labels = c("(Intercept)", PV.root.indep.names, vars.list[["bckg.indep.cont.vars"]], "null.deviance", "deviance", "df.null", "df.residual", "aic", "bic", "chi.square", "r2hl", "r2cs", "r2n"))]
} else {
j[ , V1 := factor(x = V1, levels = c("(Intercept)", vars.list[["PV.root.indep"]], vars.list[["bckg.indep.cont.vars"]], "null.deviance", "deviance", "df.null", "df.residual", "aic", "bic", "chi.square", "r2hl", "r2cs", "r2n"), labels = c("(Intercept)", vars.list[["PV.root.indep"]], vars.list[["bckg.indep.cont.vars"]], "null.deviance", "deviance", "df.null", "df.residual", "aic", "bic", "chi.square", "r2hl", "r2cs", "r2n"))]
}
}
setkeyv(x = j, cols = c(key.vars, "V1"))
})
})
PV.regression <- lapply(X = PV.regression, FUN = function(i) {
lapply(X = i, FUN = function(j) {
setnames(x = j, old = c("V1", all.weights), new = c("Variable", paste0("V", 1:length(all.weights))))
})
})
}
if(is.null(vars.list[["PV.root.indep"]])) {
reshape.list.statistics.bckg(estimate.object = bckg.regression, estimate.name = "Coefficients", data.key.variables = key.vars, new.names.vector = "", bckg.vars.vector = vars.list[["bckg.indep.vars"]], weighting.variable = vars.list[["weight.var"]], replication.weights = rep.wgts.names, study.name = file.attributes[["lsa.study"]], SE.design = shortcut)
bckg.regression <- bckg.regression[[1]]
country.model.stats <- bckg.regression[Variable %in% c("null.deviance", "deviance", "df.null", "df.residual", "aic", "bic", "chi.square", "r2hl", "r2cs", "r2n"), ]
setnames(x = country.model.stats, old = c("Variable", "Coefficients", "Coefficients_SE"), new = c("Statistic", "Estimate", "Estimate_SE"))
bckg.regression <- bckg.regression[!Variable %in% c("null.deviance", "deviance", "df.null", "df.residual", "aic", "bic", "chi.square", "r2hl", "r2cs", "r2n"), ]
} else if(!is.null(vars.list[["PV.root.indep"]])) {
reshape.list.statistics.PV(estimate.object = PV.regression, estimate.name = "Coefficients", PV.vars.vector = "", weighting.variable = vars.list[["weight.var"]], replication.weights = rep.wgts.names, study.name = file.attributes[["lsa.study"]], SE.design = shortcut)
lapply(X = PV.regression[["odds.ratios"]], FUN = function(i) {
i[ , Variable := paste0(Variable, "_odds")]
})
PV.regression <- lapply(X = PV.regression, FUN = function(i) {
rbindlist(l = i, idcol = "DDD")
})
PV.regression <- rbindlist(l = PV.regression)
PV.regression <- split(x = PV.regression, by = "DDD")
PV.regression <- list(lapply(X = PV.regression, FUN = function(i) {
i[ , DDD := NULL]
}))
reset.coefficients.colnames <- function(input1, input2) {
setnames(x = input1, old = grep(pattern = "^Coefficients$", x = colnames(input1), value = TRUE), new = paste0("Coefficients_", input2))
setnames(x = input1, old = grep(pattern = "^Coefficients_SumSq$", x = colnames(input1), value = TRUE), new = paste0("Coefficients_", input2, "_SumSq"))
}
PV.regression <- lapply(X = PV.regression, FUN = function(i) {
list(Map(f = reset.coefficients.colnames, input1 = i, input2 = as.list(paste(vars.list[["bin.dep.var"]], 1:length(vars.list[["PV.names"]][[1]]), sep = "0"))))[[1]]
})
PV.regression <- lapply(X = PV.regression, FUN = function(i) {
Reduce(function(...) merge(...), i)
})
aggregate.PV.estimates(estimate.object = PV.regression, estimate.name = "Coefficients_", root.PV = vars.list[["bin.dep.var"]], PV.vars.vector = paste(vars.list[["bin.dep.var"]], 1:length(vars.list[["PV.names"]][[1]]), sep = "0"), data.key.variables = c(key.vars, "Variable"), study.name = file.attributes[["lsa.study"]], SE.design = shortcut)
if(file.attributes[["lsa.study"]] %in% c("PISA", "PISA for Development", "ICCS", "ICILS")) {
lapply(X = PV.regression, FUN = function(i) {
coefficient.cols <- grep(pattern = "^Coefficients_[[:graph:]]+$", x = colnames(i), value = TRUE)
if(length(coefficient.cols) > 0) {
main.coeff.col <- coefficient.cols[!coefficient.cols %in% grep(pattern = "_SE$|_SVR$|_MVR$", x = coefficient.cols, value = TRUE)]
setnames(x = i, old = main.coeff.col, new = paste0("Coefficients_", vars.list[["bin.dep.var"]]))
setnames(x = i, old = grep(pattern = "^Coefficients_[[:graph:]]+_SE$", x = colnames(i), value = TRUE), new = paste0("Coefficients_", vars.list[["bin.dep.var"]], "_SE"))
setnames(x = i, old = grep(pattern = "^Coefficients_[[:graph:]]+_SVR$", x = colnames(i), value = TRUE), new = paste0("Coefficients_", vars.list[["bin.dep.var"]], "_SVR"))
setnames(x = i, old = grep(pattern = "^Coefficients_[[:graph:]]+_MVR$", x = colnames(i), value = TRUE), new = paste0("Coefficients_", vars.list[["bin.dep.var"]], "_MVR"))
} else {
i
}
})
}
PV.regression <- PV.regression[[1]]
coeff.colnames <- grep(pattern = "^Coefficients_", x = colnames(PV.regression), value = TRUE)
country.model.stats <- PV.regression[Variable %in% c("null.deviance", "deviance", "df.null", "df.residual", "aic", "bic", "chi.square", "r2hl", "r2cs", "r2n"), ]
colnames(country.model.stats) <- gsub(pattern = paste(paste0("_", unlist(vars.list)), collapse = "|"), replacement = "", x = colnames(country.model.stats))
setnames(x = country.model.stats, old = c("Variable", "Coefficients", grep(pattern = "Coefficients_", x = colnames(country.model.stats), value = TRUE)), new = c("Statistic", "Estimate", gsub(pattern = "Coefficients_", replacement = "Estimate_", x = grep(pattern = "Coefficients_", x = colnames(country.model.stats), value = TRUE))))
PV.regression <- PV.regression[!Variable %in% c("null.deviance", "deviance", "df.null", "df.residual", "aic", "bic", "chi.square", "r2hl", "r2cs", "r2n"), ]
merged.PV.estimates <- PV.regression
PV.regression <- NULL
}
country.model.stats[ , Statistic := factor(x = Statistic, levels = c("null.deviance", "deviance", "df.null", "df.residual", "aic", "bic", "chi.square", "r2hl", "r2cs", "r2n"), labels = c("Null Deviance (-2LL)", "Deviance (-2LL)", "DF Null", "DF Residual", "AIC", "BIC", "Chi-Square", "R-Squared (Hosmer & Lemeshow)", "R-Squared (Cox & Snell)", "R-Squared (Nagelkerke)"))]
setkeyv(x = country.model.stats, cols = c(key.vars, "Statistic"))
cnt.model.name <- unique(country.model.stats[ , get(key.vars[1])])
model.stats[[cnt.model.name]] <<- country.model.stats
country.analysis.info <- produce.analysis.info(cnt.ID = unique(data[ , get(key.vars[1])]), data = used.data, study = file.attributes[["lsa.study"]], cycle = file.attributes[["lsa.cycle"]], weight.variable = vars.list[["weight.var"]], rep.design = DESIGN, used.shortcut = shortcut, number.of.reps = rep.wgts.names, in.time = cnt.start.time)
analysis.info[[country.analysis.info[ , COUNTRY]]] <<- country.analysis.info
if("PV.root.indep" %in% names(vars.list) == FALSE) {
merged.outputs <- Reduce(function(...) merge(..., all = TRUE), list(number.of.cases, sum.of.weights, percentages, bckg.regression))
} else if("PV.root.indep" %in% names(vars.list) == TRUE) {
merged.outputs <- Reduce(function(...) merge(..., all = TRUE), list(number.of.cases, sum.of.weights, percentages, merged.PV.estimates))
colnames(merged.outputs) <- gsub(pattern = paste(paste0("Coefficients_", unlist(vars.list[["bin.dep.var"]])), collapse = "|"), replacement = "Coefficients", x = colnames(merged.outputs))
}
merged.outputs[ , Wald_Statistic := Coefficients/Coefficients_SE]
merged.outputs[ , Wald_Statistic := lapply(.SD, function(i) {
ifelse(test = is.infinite(i), yes = NA, no = i)
}), .SDcols = "Wald_Statistic"]
merged.outputs[ , p_value := 2 * pnorm(q = abs(Wald_Statistic), lower.tail = FALSE)]
merged.outputs[ , (c("Wald_Statistic", "p_value")) := lapply(.SD, function(i) {
ifelse(test = is.na(i), yes = NaN, no = i)
}), .SDcols = c("Wald_Statistic", "p_value")]
odds.ratios.estimates <- merged.outputs[Variable %in% grep(pattern = "_odds$", x = Variable, value = TRUE), mget(c(key.vars, "Variable", "Coefficients", "Coefficients_SE"))]
odds.ratios.estimates[ , Variable := droplevels(Variable)]
setnames(x = odds.ratios.estimates, old = c("Coefficients", "Coefficients_SE"), new = c("Odds_Ratio", "Odds_Ratio_SE"))
odds.ratios.estimates[ , Variable := gsub(pattern = "_odds$", replacement = "", x = Variable)]
setkeyv(x = odds.ratios.estimates, cols = c(key.vars, "Variable"))
merged.outputs <- merged.outputs[!Variable %in% grep(pattern = "_odds$", x = Variable, value = TRUE), ]
merged.outputs[ , Variable := droplevels(Variable)]
setkeyv(x = merged.outputs, cols = c(key.vars, "Variable"))
merged.outputs <- merge(x = merged.outputs, y = odds.ratios.estimates)
merged.outputs[ , Wald_L95CI := Coefficients - qnorm(0.975) * Coefficients_SE]
merged.outputs[ , Wald_U95CI := Coefficients + qnorm(0.975) * Coefficients_SE]
merged.outputs[ , Odds_L95CI := exp(Wald_L95CI)]
merged.outputs[ , Odds_U95CI := exp(Wald_U95CI)]
odds.ratios.estimates <- NULL
counter <<- counter + 1
message(" ",
if(nchar(counter) == 1) {
paste0("( ", counter, "/", number.of.countries, ") ")
} else if(nchar(counter) == 2) {
paste0("(", counter, "/", number.of.countries, ") ")
},
paste0(str_pad(string = unique(merged.outputs[[1]]), width = 40, side = "right"), " processed in ", country.analysis.info[ , DURATION]))
return(merged.outputs)
}
estimates <- rbindlist(lapply(X = data, FUN = compute.all.stats))
estimates[ , colnames(estimates)[1] := as.character(estimates[ , get(colnames(estimates)[1])])]
setkeyv(x = estimates, cols = key.vars)
total.exec.time <- rbindlist(analysis.info)[ , DURATION]
total.exec.time.millisec <- sum(as.numeric(str_extract(string = total.exec.time, pattern = "[[:digit:]]{3}$")))/1000
total.exec.time <- sum(as.ITime(total.exec.time), total.exec.time.millisec)
if(length(unique(estimates[ , get(key.vars[1])])) > 1) {
message("\nAll ", length(unique(estimates[ , get(key.vars[1])])), " countries with valid data processed in ", format(as.POSIXct("0001-01-01 00:00:00") + total.exec.time - 1, "%H:%M:%OS3"))
} else {
message("")
}
ptm.add.table.average <- proc.time()
estimates <- compute.table.average(output.obj = estimates, object.variables = vars.list, data.key.variables = c(key.vars, "Variable"), data.properties = file.attributes)
estimates[eval(parse(text = colnames(estimates)[1])) == "Table Average", Wald_Statistic := Coefficients/Coefficients_SE]
estimates[eval(parse(text = colnames(estimates)[1])) == "Table Average", p_value := 2 * pnorm(q = abs(Wald_Statistic), lower.tail = FALSE)]
if(standardize == TRUE) {
if(!is.null(vars.list[["PV.names"]])) {
estimates[Variable == "(Intercept)", (c("Coefficients", "Coefficients_SE", "Coefficients_SVR", "Coefficients_MVR", "Wald_Statistic", "p_value")) := NaN]
} else {
estimates[Variable == "(Intercept)", (c("Coefficients", "Coefficients_SE", "Wald_Statistic", "p_value")) := NaN]
}
}
message('"Table Average" added to the estimates in ', format(as.POSIXct("0001-01-01 00:00:00") + {proc.time() - ptm.add.table.average}[[3]], "%H:%M:%OS3"))
ptm.add.model.stats <- proc.time()
model.stats <- rbindlist(l = model.stats)
setkeyv(x = model.stats, cols = c(key.vars, "Statistic"))
model.stats <- compute.table.average(output.obj = model.stats, object.variables = vars.list, data.key.variables = c(key.vars, "Statistic"), data.properties = file.attributes)
model.stats[eval(parse(text = colnames(model.stats)[1])) == "Table Average" & Statistic %in% c("Null Deviance (-2LL)", "Deviance (-2LL)", "DF Null", "DF Residual"), Estimate := NaN]
model.stats[eval(parse(text = colnames(model.stats)[1])) == "Table Average" & Statistic %in% c("Null Deviance (-2LL)", "Deviance (-2LL)", "DF Null", "DF Residual"), Estimate_SE := NaN]
message('\nModel statistics table assembled in ', format(as.POSIXct("0001-01-01 00:00:00") + {proc.time() - ptm.add.model.stats}[[3]], "%H:%M:%OS3"), "\n")
export.results(output.object = estimates, analysis.type = action.args.list[["executed.analysis.function"]], model.stats.obj = model.stats, analysis.info.obj = rbindlist(l = analysis.info), destination.file = output.file, open.exported.file = open.output)
if(exists("removed.countries.where.any.split.var.is.all.NA") && length(removed.countries.where.any.split.var.is.all.NA) > 0) {
warning('Some of the countries had one or more splitting variables which contains only missing values. These countries are: "', paste(removed.countries.where.any.split.var.is.all.NA, collapse = '", "'), '".', call. = FALSE)
}
}, interrupt = function(f) {
message("\nInterrupted by the user. Computations are not finished and output file is not produced.\n")
})
vars.list.analysis.vars <- grep(pattern = "split.vars|bckg.dep.var|bckg.indep.cont.vars|bckg.indep.cat.vars", x = names(vars.list), value = TRUE)
vars.list.analysis.vars <- unlist(vars.list[vars.list.analysis.vars])
vars.list.analysis.vars <- grep(pattern = paste(unique(unlist(studies.all.design.variables)), collapse = "|"), x = vars.list.analysis.vars, value = TRUE)
if(length(vars.list.analysis.vars) > 0) {
warning('Some of the variables specified as analysis variables (in "split.vars" and/or background variables - dependent or independent) are design variables (sampling variables or PVs). This kind of variables shall not be used for analysis. Check your input.', call. = FALSE)
}
if(length(warnings.collector) > 0) {
if(!is.null(warnings.collector[["ref.cat.set.default"]])) {
warning(warnings.collector[["ref.cat.set.default"]], call. = FALSE)
}
if(!is.null(warnings.collector[["contrast.cat.set.default"]])) {
warning(warnings.collector[["contrast.cat.set.default"]], call. = FALSE)
}
if(!is.null(warnings.collector[["countries.with.all.NA.vars"]])) {
warning(warnings.collector[["countries.with.all.NA.vars"]], call. = FALSE)
}
if(!is.null(warnings.collector[["countries.with.constant.cat.vars"]])) {
warning(warnings.collector[["countries.with.constant.cat.vars"]], call. = FALSE)
}
}
} |
htmlH4 <- function(children=NULL, id=NULL, n_clicks=NULL, n_clicks_timestamp=NULL, key=NULL, role=NULL, accessKey=NULL, className=NULL, contentEditable=NULL, contextMenu=NULL, dir=NULL, draggable=NULL, hidden=NULL, lang=NULL, spellCheck=NULL, style=NULL, tabIndex=NULL, title=NULL, loading_state=NULL, ...) {
wildcard_names = names(dash_assert_valid_wildcards(attrib = list('data', 'aria'), ...))
props <- list(children=children, id=id, n_clicks=n_clicks, n_clicks_timestamp=n_clicks_timestamp, key=key, role=role, accessKey=accessKey, className=className, contentEditable=contentEditable, contextMenu=contextMenu, dir=dir, draggable=draggable, hidden=hidden, lang=lang, spellCheck=spellCheck, style=style, tabIndex=tabIndex, title=title, loading_state=loading_state, ...)
if (length(props) > 0) {
props <- props[!vapply(props, is.null, logical(1))]
}
component <- list(
props = props,
type = 'H4',
namespace = 'dash_html_components',
propNames = c('children', 'id', 'n_clicks', 'n_clicks_timestamp', 'key', 'role', 'accessKey', 'className', 'contentEditable', 'contextMenu', 'dir', 'draggable', 'hidden', 'lang', 'spellCheck', 'style', 'tabIndex', 'title', 'loading_state', wildcard_names),
package = 'dashHtmlComponents'
)
structure(component, class = c('dash_component', 'list'))
} |
clean_points <- function(coord,
merge_dist,
coord_col = c("Lon", "Lat"),
filter_layer = NULL,
na.rm = FALSE
) {
coord_only <-
if (ncol(coord) > 2) coord[coord_col] else
{if (na.rm == TRUE) coord[!is.na(coord$Lon) | !is.na(coord$Lat),] else coord}
dist_mat <- raster::pointDistance(coord_only, lonlat = TRUE, allpairs = TRUE) < merge_dist
diag(dist_mat) <- NA
logical_dist <- colSums(dist_mat, na.rm = TRUE) == 0
logical_raster <-
if (!is.null(filter_layer)) {
raster::extract(filter_layer, coord_only) == 1
} else TRUE
coord_clean <- coord[logical_dist & logical_raster,]
rbind(n_entries_species = nrow(coord),
n_entries_clean = nrow(coord_clean)) %>%
message
return(coord_clean)
}
utils::globalVariables(".") |
ensembleMOS <-
function(ensembleData, trainingDays, consecutive = FALSE, dates = NULL,
control = NULL, warmStart = FALSE,
model = NULL, exchangeable = NULL)
{
if (!inherits(ensembleData,"ensembleData")) stop("not an ensembleData object")
mc <- match.call()
mc$model <- NULL
if (!is.null(model)) {
switch( model,
"normal" = {
mc[[1]] <- as.name("ensembleMOSnormal")
},
"truncnormal" = {
mc[[1]] <- as.name("ensembleMOStruncnormal")
},
"lognormal" = {
mc[[1]] <- as.name("ensembleMOSlognormal")
},
"csg0" = {
mc[[1]] <- as.name("ensembleMOScsg0")
},
"gev0" = {
mc[[1]] <- as.name("ensembleMOSgev0")
},
stop("unrecognized model")
)
} else stop("unspecified model")
if (length(attr(ensembleData, "class")) > 2) {
attr(ensembleData, "class") <- attr(ensembleData, "class")[-1]
mc$ensembleData <- ensembleData
}
eval(mc, parent.frame())
} |
context("Expected input arguments and output in getModelFitness")
models <- modelPop(nPop=15, numVar=6, longitudinal=FALSE,
consMatrix = matrix(c(1, 2), 1, 2))
test_that("Incorrect/missing input arguments yields errors in getModelFitness", {
expect_error(getModelFitness(theData=NULL, allModelString=models,
longitudinal=FALSE, co="covariance", mixture=FALSE),
"Data cannot be missing")
expect_error(getModelFitness(theData=1:10, allModelString=models,
longitudinal=FALSE, co="covariance", mixture=FALSE),
"Data should be either a data frame or a matrix of numerical values.")
expect_error(getModelFitness(theData=c("a", "b"), allModelString=models,
longitudinal=FALSE, co="covariance", mixture=FALSE),
"Data should be either a data frame or a matrix of numerical values.")
expect_error(getModelFitness(theData=data.frame(letter=letters[1:3], number=1:3),
allModelString=models,
longitudinal=FALSE, co="covariance", mixture=FALSE),
"Data should be either a data frame or a matrix of numerical values.")
expect_error(getModelFitness(theData=crossdata6V, allModelString=1:3,
longitudinal=FALSE, co="covariance", mixture=FALSE),
"Argument allModelString should be formed in a matrix.")
expect_error(getModelFitness(theData=crossdata6V, allModelString=NULL,
longitudinal=FALSE, co="covariance", mixture=FALSE),
"Argument allModelString cannot be missing.")
expect_error(getModelFitness(theData=crossdata6V, allModelString=models,
longitudinal=NULL, co="covariance", mixture=FALSE),
"Argument longitudinal cannot be missing.")
expect_error(getModelFitness(theData=crossdata6V, allModelString=models,
longitudinal=1:3, co="covariance", mixture=FALSE),
"Argument longitudinal should be either logical TRUE or FALSE.")
expect_error(getModelFitness(theData=crossdata6V, allModelString=models,
longitudinal=FALSE, co="covariance", mixture=1:3),
"Argument mixture should be either logical TRUE or FALSE.")
expect_error(getModelFitness(theData=crossdata6V, allModelString=models,
longitudinal=FALSE, co="wrongString", mixture=FALSE),
"Argument co should be either covariance or correlation matrix.")
expect_error(getModelFitness(theData=crossdata6V, allModelString=models,
longitudinal=FALSE, co=20, mixture=FALSE),
"Argument co should be a string of characters, e.g., either covariance or correlation.")
})
test_that("Correct input arguments yield expected output in modelFitness.", {
skip_on_cran()
result <- getModelFitness(theData=crossdata6V, allModelString=models,
longitudinal=FALSE, co="covariance", mixture=FALSE)
expect_true(is.matrix(result))
expect_equal(nrow(result), nrow(models))
expect_equal(ncol(result), ncol(models) + 2)
}) |
makeDataList <- function(dat, J, ntrt, uniqtrt, t0, bounds = NULL, ...) {
n <- nrow(dat)
dataList <- vector(mode = "list", length = ntrt + 1)
rankftime <- match(dat$ftime, sort(unique(dat$ftime)))
dataList[[1]] <- dat[rep(1:nrow(dat), rankftime), ]
for (j in J) {
dataList[[1]][[paste0("N", j)]] <- 0
dataList[[1]][[paste0("N", j)]][cumsum(rankftime)] <- as.numeric(dat$ftype == j)
}
dataList[[1]]$C <- 0
dataList[[1]]$C[cumsum(rankftime)] <- as.numeric(dat$ftype == 0)
n.row.ii <- nrow(dataList[[1]])
uniqftime <- unique(dat$ftime)
orduniqftime <- uniqftime[order(uniqftime)]
row.names(dataList[[1]])[row.names(dataList[[1]]) %in%
paste(row.names(dat))] <- paste0(row.names(dat), ".0")
dataList[[1]]$t <- orduniqftime[as.numeric(paste(unlist(strsplit(
row.names(dataList[[1]]), ".",
fixed = TRUE
))[seq(2, n.row.ii * 2, 2)])) + 1]
if (!is.null(bounds)) {
boundFormat <- data.frame(t = bounds$t)
for (j in J) {
if (paste("l", j, sep = "") %in% colnames(bounds)) {
boundFormat[[paste0("l", j)]] <- bounds[, paste0("l", j)]
} else {
boundFormat[[paste0("l", j)]] <- 0
}
if (paste("u", j, sep = "") %in% names(bounds)) {
boundFormat[[paste0("u", j)]] <- bounds[, paste0("u", j)]
} else {
boundFormat[[paste0("u", j)]] <- 1
}
}
suppressMessages(
dataList[[1]] <- plyr::join(
x = dataList[[1]], y = boundFormat,
type = "left"
)
)
for (j in J) {
tmp <- is.na(dataList[[1]][, paste0("l", j)])
dataList[[1]][tmp, paste0("l", j)] <- 0
tmp <- is.na(dataList[[1]][, paste0("u", j)])
dataList[[1]][tmp, paste0("u", j)] <- 1
}
} else {
for (j in J) {
dataList[[1]][[paste0("l", j)]] <- 0
dataList[[1]][[paste0("u", j)]] <- 1
}
}
for (i in seq_len(ntrt)) {
dataList[[i + 1]] <- dat[sort(rep(1:nrow(dat), t0)), ]
dataList[[i + 1]]$t <- rep(1:t0, n)
for (j in J) {
typejEvents <- dat$id[which(dat$ftype == j)]
dataList[[i + 1]][[paste0("N", j)]] <- 0
dataList[[i + 1]][[paste0("N", j)]][dataList[[i + 1]]$id %in% typejEvents &
dataList[[i + 1]]$t >= dataList[[i + 1]]$ftime] <- 1
}
censEvents <- dat$id[which(dat$ftype == 0)]
dataList[[i + 1]]$C <- 0
dataList[[i + 1]]$C[dataList[[i + 1]]$id %in% censEvents &
dataList[[i + 1]]$t >= dataList[[i + 1]]$ftime] <- 1
dataList[[i + 1]]$trt <- uniqtrt[i]
dataList[[i + 1]]$ftime <- t0
if (!is.null(bounds)) {
suppressMessages(
dataList[[i + 1]] <- plyr::join(
x = dataList[[i + 1]], y = boundFormat,
type = "left"
)
)
for (j in J) {
tmp <- is.na(dataList[[i + 1]][, paste0("l", j)])
dataList[[i + 1]][tmp, paste0("l", j)] <- 0
tmp <- is.na(dataList[[i + 1]][, paste0("u", j)])
dataList[[i + 1]][tmp, paste0("u", j)] <- 1
}
} else {
for (j in J) {
dataList[[i + 1]][[paste0("l", j)]] <- .Machine$double.eps
dataList[[i + 1]][[paste0("u", j)]] <- 1 - .Machine$double.eps
}
}
}
names(dataList) <- c("obs", uniqtrt)
return(dataList)
} |
rm(list = ls())
source("helper.R")
context("test-spamlist.R")
test_that("spam.list", {
spamtest_eq(spam( list(ind=numeric(0), j=numeric(0), numeric(0)),nrow=4,ncol=3),
spam(0,4,3),rel=FALSE)
i <- c(1,2,3,4,5)
j <- c(5,4,3,2,1)
ss3 <- spam(0,5,5)
ss3[cbind(i,j)] <- i/j
spamtest_eq(spam.list(list(i=i,j=j,i/j)), ss3)
pad(ss3) <- c(13,13)
spamtest_eq(spam.list(list(i=i,j=j,i/j),13,13), ss3)
pad(ss3) <- c(3,3)
spamtest_eq(spam.list(list(i=i,j=j,i/j),3,3), ss3)
pad(ss3) <- c(2,2)
spamtest_eq(spam.list(list(i=i,j=j,i/j),2,2), ss3,rel=F)
spamtest_eq({options(spam.listmethod='EP');
spam.list(list(i=i,j=j,i/j),ncol=3)},
{options(spam.listmethod='BS');
method='BS';spam.list(list(i=i,j=j,i/j),ncol=3)})
spamtest_eq({options(spam.listmethod='EP');
spam.list(list(i=i,j=j,i/j),ncol=3,nrow=4)},
{options(spam.listmethod='BS');
spam.list(list(i=i,j=j,i/j),ncol=3,nrow=4)})
spamtest_eq(spam.list(list(i=i,j=j,i/j),ncol=1,nrow=1),
0,rel=F)
set.seed(2011)
m = 1000
rmax = 30
cmax = 40
i = floor(runif(m) * rmax) + 1
j = floor(runif(m) * cmax) + 1
val = floor(10 * runif(m)) + 1
options(spam.listmethod='EP')
ss1 <- spam.list(list(i=i,j=j,val))
options(spam.listmethod='BS')
ss2 <- spam.list(list(i=i,j=j,val))
spamtest_eq(ss1,ss2,rel=F)
})
test_that("spam with list", {
spamtest_eq(spam( list(ind=numeric(0), j=numeric(0), numeric(0)),nrow=4,ncol=3),
spam(0,4,3),rel=FALSE)
i <- c(1,2,3,4,5)
j <- c(5,4,3,2,1)
ss3 <- spam(0,5,5)
ss3[cbind(i,j)] <- i/j
spamtest_eq(spam(list(i=i,j=j,i/j)), ss3)
pad(ss3) <- c(13,13)
spamtest_eq(spam(list(i=i,j=j,i/j),13,13), ss3)
pad(ss3) <- c(3,3)
spamtest_eq(spam(list(i=i,j=j,i/j),3,3), ss3)
pad(ss3) <- c(2,2)
spamtest_eq(spam(list(i=i,j=j,i/j),2,2), ss3,rel=F)
spamtest_eq({options(spam.listmethod='EP');
spam(list(i=i,j=j,i/j),ncol=3)},
{options(spam.listmethod='BS');
method='BS';spam(list(i=i,j=j,i/j),ncol=3)})
spamtest_eq({options(spam.listmethod='EP');
spam(list(i=i,j=j,i/j),ncol=3,nrow=4)},
{options(spam.listmethod='BS');
spam(list(i=i,j=j,i/j),ncol=3,nrow=4)})
spamtest_eq(spam(list(i=i,j=j,i/j),ncol=1,nrow=1),
0,rel=F)
set.seed(2011)
m = 1000
rmax = 30
cmax = 40
i = floor(runif(m) * rmax) + 1
j = floor(runif(m) * cmax) + 1
val = floor(10 * runif(m)) + 1
options(spam.listmethod='EP')
ss1 <- spam(list(i=i,j=j,val))
options(spam.listmethod='BS')
ss2 <- spam(list(i=i,j=j,val))
spamtest_eq(ss1,ss2,rel=F)
}) |
library("testthat")
library("SuperLearner")
set.seed(4747)
p <- 2
n <- 5e4
x <- replicate(p, stats::rnorm(n, 0, 1))
x_df <- as.data.frame(x)
y <- 1 + 0.5 * x[, 1] + 0.75 * x[, 2] + stats::rnorm(n, 0, 1)
true_var <- mean((y - mean(y)) ^ 2)
r2_one <- 0.5 ^ 2 * 1 / true_var
r2_two <- 0.75 ^ 2 * 1 / true_var
folds <- sample(rep(seq_len(2), length = length(y)))
y_1 <- y[folds == 1]
y_2 <- y[folds == 2]
x_1 <- subset(x_df, folds == 1)
x_2 <- subset(x_df, folds == 2)
learners <- c("SL.glm")
V <- 2
set.seed(1234)
full_fit_1 <- SuperLearner::SuperLearner(Y = y_1, X = x_1,
SL.library = learners, cvControl = list(V = V))
full_fitted_1 <- SuperLearner::predict.SuperLearner(full_fit_1)$pred
full_fit_2 <- SuperLearner::SuperLearner(Y = y_2, X = x_2,
SL.library = learners, cvControl = list(V = V))
full_fitted_2 <- SuperLearner::predict.SuperLearner(full_fit_2)$pred
reduced_fit_1 <- SuperLearner::SuperLearner(Y = full_fitted_2,
X = x_2[, -2, drop = FALSE],
SL.library = learners, cvControl = list(V = V))
reduced_fitted_1 <- SuperLearner::predict.SuperLearner(reduced_fit_1)$pred
reduced_fit_2 <- SuperLearner::SuperLearner(Y = full_fitted_1,
X = x_1[, -1, drop = FALSE],
SL.library = learners, cvControl = list(V = V))
reduced_fitted_2 <- SuperLearner::predict.SuperLearner(reduced_fit_2)$pred
set.seed(4747)
test_that("Merging variable importance estimates works", {
est_1 <- vim(Y = y, f1 = full_fitted_1, f2 = reduced_fitted_1,
run_regression = FALSE, indx = 2, type = "r_squared",
sample_splitting_folds = folds)
expect_warning(est_2 <- vim(Y = y, f1 = full_fitted_2, f2 = reduced_fitted_2,
run_regression = FALSE, indx = 1, type = "r_squared",
sample_splitting_folds = folds))
merged_ests <- merge_vim(est_1, est_2)
expect_equal(merged_ests$est[1], r2_two, tolerance = 0.2, scale = 1)
expect_equal(merged_ests$est[2], r2_one, tolerance = 0.4, scale = 1)
expect_output(print(merged_ests), "Estimate", fixed = TRUE)
})
test_that("Merging cross-validated variable importance estimates works", {
est_1 <- cv_vim(Y = y, X = x_df, run_regression = TRUE, indx = 2,
V = V, cvControl = list(V = V), SL.library = learners,
env = environment(), na.rm = TRUE)
est_2 <- cv_vim(Y = y, X = x_df, run_regression = TRUE, indx = 1,
V = V, cvControl = list(V = V), SL.library = learners,
env = environment(), na.rm = TRUE)
merged_ests <- merge_vim(est_1, est_2)
expect_equal(merged_ests$est[1], r2_two, tolerance = 0.1, scale = 1)
expect_equal(merged_ests$est[2], r2_one, tolerance = 0.1, scale = 1)
expect_output(print(merged_ests), "Estimate", fixed = TRUE)
}) |
NULL
col_is_logical <- function(x,
columns,
actions = NULL,
step_id = NULL,
label = NULL,
brief = NULL,
active = TRUE) {
preconditions <- NULL
values <- NULL
columns_expr <-
rlang::as_label(rlang::quo(!!enquo(columns))) %>%
gsub("^\"|\"$", "", .)
columns <- rlang::enquo(columns)
columns <- resolve_columns(x = x, var_expr = columns, preconditions = NULL)
if (is_a_table_object(x)) {
secret_agent <-
create_agent(x, label = "::QUIET::") %>%
col_is_logical(
columns = columns,
label = label,
brief = brief,
actions = prime_actions(actions),
active = active
) %>%
interrogate()
return(x)
}
agent <- x
if (is.null(brief)) {
brief <-
generate_autobriefs(
agent, columns, preconditions, values, "col_is_logical"
)
}
step_id <- normalize_step_id(step_id, columns, agent)
i_o <- get_next_validation_set_row(agent)
check_step_id_duplicates(step_id, agent)
for (i in seq(columns)) {
agent <-
create_validation_step(
agent = agent,
assertion_type = "col_is_logical",
i_o = i_o,
columns_expr = columns_expr,
column = columns[i],
preconditions = NULL,
actions = covert_actions(actions, agent),
step_id = step_id[i],
label = label,
brief = brief[i],
active = active
)
}
agent
}
expect_col_is_logical <- function(object,
columns,
threshold = 1) {
fn_name <- "expect_col_is_logical"
vs <-
create_agent(tbl = object, label = "::QUIET::") %>%
col_is_logical(
columns = {{ columns }},
actions = action_levels(notify_at = threshold)
) %>%
interrogate() %>%
.$validation_set
x <- vs$notify
threshold_type <- get_threshold_type(threshold = threshold)
if (threshold_type == "proportional") {
failed_amount <- vs$f_failed
} else {
failed_amount <- vs$n_failed
}
if (length(x) > 1 && any(x)) {
fail_idx <- which(x)[1]
failed_amount <- failed_amount[fail_idx]
x <- TRUE
} else {
x <- any(x)
fail_idx <- 1
}
if (inherits(vs$capture_stack[[1]]$warning, "simpleWarning")) {
warning(conditionMessage(vs$capture_stack[[1]]$warning))
}
if (inherits(vs$capture_stack[[1]]$error, "simpleError")) {
stop(conditionMessage(vs$capture_stack[[1]]$error))
}
act <- testthat::quasi_label(enquo(x), arg = "object")
column_text <- prep_column_text(vs$column[[fail_idx]])
col_type <- "logical"
testthat::expect(
ok = identical(!as.vector(act$val), TRUE),
failure_message = glue::glue(
failure_message_gluestring(
fn_name = fn_name, lang = "en"
)
)
)
act$val <- object
invisible(act$val)
}
test_col_is_logical <- function(object,
columns,
threshold = 1) {
vs <-
create_agent(tbl = object, label = "::QUIET::") %>%
col_is_logical(
columns = {{ columns }},
actions = action_levels(notify_at = threshold)
) %>%
interrogate() %>%
.$validation_set
if (inherits(vs$capture_stack[[1]]$warning, "simpleWarning")) {
warning(conditionMessage(vs$capture_stack[[1]]$warning))
}
if (inherits(vs$capture_stack[[1]]$error, "simpleError")) {
stop(conditionMessage(vs$capture_stack[[1]]$error))
}
all(!vs$notify)
} |
blrtest <- function(z, H, r){
if(!(class(z)=="ca.jo")){
stop("\nPlease, provide object of class 'ca.jo' as 'z'.\n")
}
if(r >= z@P || r < 1){
stop("\nCount of cointegrating relationships is out of allowable range.\n")
}
if(z@ecdet == "none"){
P <- z@P
}else{
P <- z@P + 1
}
r <- as.integer(r)
H <- as.matrix(H)
if(!(nrow(H)==P)){
stop("\nRow number of 'H' is unequal to VAR order.\n")
}
type <- "Estimation and testing under linear restrictions on beta"
N <- nrow(z@Z0)
M00 <- crossprod(z@Z0)/N
M11 <- crossprod(z@Z1)/N
MKK <- crossprod(z@ZK)/N
M01 <- crossprod(z@Z0, z@Z1)/N
M0K <- crossprod(z@Z0, z@ZK)/N
MK0 <- crossprod(z@ZK, z@Z0)/N
M10 <- crossprod(z@Z1, z@Z0)/N
M1K <- crossprod(z@Z1, z@ZK)/N
MK1 <- crossprod(z@ZK, z@Z1)/N
M11inv <- solve(M11)
S00 <- M00 - M01%*%M11inv%*%M10
S0K <- M0K - M01%*%M11inv%*%M1K
SK0 <- MK0 - MK1%*%M11inv%*%M10
SKK <- MKK - MK1%*%M11inv%*%M1K
Ctemp <- chol(t(H)%*%SKK%*%H, pivot=TRUE)
pivot <- attr(Ctemp, "pivot")
oo <- order(pivot)
C <- t(Ctemp[,oo])
Cinv <- solve(C)
S00inv <- solve(S00)
valeigen <- eigen(Cinv%*%t(H)%*%SK0%*%S00inv%*%S0K%*%H%*%t(Cinv))
e <- valeigen$vector
V <- H%*%t(Cinv)%*%e
Vorg <- V
idx <- ncol(V)
V <- sapply(1:idx, function(x) V[,x]/V[1,x])
W <- S0K%*%V%*%solve(t(V)%*%SKK%*%V)
PI <- W %*% t(V)
DELTA <- S00 - S0K%*%V%*%solve(t(V)%*%SKK%*%V)%*%t(V)%*%SK0
GAMMA <- M01%*%M11inv - PI%*%MK1%*%M11inv
lambda.res <- valeigen$values
lambda <- z@lambda
teststat <- N*sum(log((1-lambda.res[1:r])/(1-lambda[1:r])))
df <- r*(P - ncol(H))
pval <- c(1-pchisq(teststat, df), df)
new("cajo.test", Z0=z@Z0, Z1=z@Z1, ZK=z@ZK, ecdet=z@ecdet, H=H, A=NULL, B=NULL, type=type, teststat=teststat, pval=pval, lambda=lambda.res, Vorg=Vorg, V=V, W=W, PI=PI, DELTA=DELTA, DELTA.bb=NULL, DELTA.ab=NULL, DELTA.aa.b=NULL, GAMMA=GAMMA, test.name="Johansen-Procedure")
} |
mediate_contY_contM=function(data,
outcome="Y",
mediator="M",
exposure="X",
covariateY=c("X1","X2"),
covariateM=c("X1","X2"),x0=0,x1=1) {
data = as.data.frame(data)
if (is.null(covariateY)) {
formula_Y=as.formula(paste(outcome,"~",exposure,"+",mediator,sep=""))
} else {
formula_Y=as.formula(paste(outcome,"~",exposure,"+",mediator,"+",paste(covariateY,collapse="+"),sep=""))
}
if (is.null(covariateM)) {
formula_M=as.formula(paste(mediator,"~",exposure,sep=""))
} else {
formula_M=as.formula(paste(mediator,"~",exposure,"+",paste(covariateM,collapse="+"),sep=""))
}
model_Y=summary(lm(formula_Y,data=data))
model_M=summary(lm(formula_M,data=data))
beta=model_Y$coef[,1];cov_beta=model_Y$cov.unscaled
gamma=model_M$coef[,1];cov_gamma=model_M$cov.unscaled
nbeta=dim(cov_beta)[1];ngamma=dim(cov_gamma)[1]
S=matrix(0,ncol=nbeta+ngamma,nrow=nbeta+ngamma)
S[1:ngamma,1:ngamma]=cov_gamma; S[(ngamma+1):(nbeta+ngamma),(ngamma+1):(nbeta+ngamma)]=cov_beta
colnames(S)=rownames(S)=c(paste(names(gamma),"_gamma",sep=""),paste(names(beta),"_beta",sep=""))
if (is.null(covariateY)==0) {
names(cY) = paste(names(beta),"_betafix",sep="")[-c(1:3)]
beta_c=paste("beta_",covariateY,sep="")
}
if (is.null(covariateM)==0) {
names(cM) = paste(names(gamma),"_gammafix",sep="")[-c(1:2)]
gamma_c=paste("gamma_",covariateM,sep="")
}
NIEa_fun = function() {
output = "beta2*gamma1*(x1-x0)"
return(output)
}
variable=c("gamma0","gamma1",if(is.null(covariateM)==0) {gamma_c},"beta0","beta1","beta2",if(is.null(covariateY)==0) {beta_c})
NIEa_D=deriv(parse(text=NIEa_fun()),variable)
gamma0=gamma[1];gamma1=gamma[2];
if(is.null(covariateM)==0) {
for (i in (1:length(covariateM))) {assign(gamma_c[i],gamma[2+i])}
}
beta0=beta[1];beta1=beta[2];beta2=beta[3]
if(is.null(covariateY)==0) {
for (i in (1:length(covariateY))) {assign(beta_c[i],beta[3+i])}
}
TEa_fun = function() {
output = "(beta2*gamma1+beta1)*(x1-x0)"
return(output)
}
TEa_D=deriv(parse(text=TEa_fun()),variable)
PMa_fun = function() {
.UP = "beta2*gamma1"
.BOT = "beta2*gamma1+beta1"
output=paste("(",.UP,")/(",.BOT,")")
return(output)
}
PMa_D=deriv(parse(text=PMa_fun()),variable)
NIEa_D = eval(NIEa_D)
NIEa_p = NIEa_D[1]
lambda= t(attr(NIEa_D,"gradient"))
V_NIEa = as.vector(t(lambda) %*% S %*% lambda)
TEa_D = eval(TEa_D)
TEa_p = TEa_D[1]
lambda= t(attr(TEa_D,"gradient"))
V_TEa = as.vector(t(lambda) %*% S %*% lambda)
PMa_D = eval(PMa_D)
PMa_p = PMa_D[1]
lambda= t(attr(PMa_D,"gradient"))
V_PMa = as.vector(t(lambda) %*% S %*% lambda)
point_est = c(NIEa_p,TEa_p,PMa_p);
names(point_est)=c("NIE","TE","PM")
var_est = c(V_NIEa,V_TEa,V_PMa);
names(var_est)=c("NIE","TE","PM")
sd_est = sqrt(var_est)
names(sd_est)=c("NIE","TE","PM")
ci_est = rbind(point_est-1.96*sd_est,point_est+1.96*sd_est)
rownames(ci_est) = c("Lower boundary","Upper boundary")
return(list(point_est=point_est,var_est=var_est,sd_est=sd_est,ci_est=ci_est))
}
Mediate_contY_contM_bootci=function(data,
outcome="Y",
mediator="M",
exposure="X",
covariateY=c("X1","X2"),
covariateM=c("X1","X2"),
x0=0,x1=1,R=1000) {
data = as.data.frame(data)
get_par_boot=function(data=data,indices) {
data=data[indices,]
if (is.null(covariateY)) {
formula_Y=as.formula(paste(outcome,"~",exposure,"+",mediator,sep=""))
} else {
formula_Y=as.formula(paste(outcome,"~",exposure,"+",mediator,"+",paste(covariateY,collapse="+"),sep=""))
}
if (is.null(covariateM)) {
formula_M=as.formula(paste(mediator,"~",exposure,sep=""))
} else {
formula_M=as.formula(paste(mediator,"~",exposure,"+",paste(covariateM,collapse="+"),sep=""))
}
model_Y=summary(lm(formula_Y,data=data))
model_M=summary(lm(formula_M,data=data))
beta=model_Y$coef[,1];cov_beta=model_Y$cov.unscaled
gamma=model_M$coef[,1];cov_gamma=model_M$cov.unscaled
nbeta=dim(cov_beta)[1];ngamma=dim(cov_gamma)[1]
S=matrix(0,ncol=nbeta+ngamma,nrow=nbeta+ngamma)
S[1:ngamma,1:ngamma]=cov_gamma; S[(ngamma+1):(nbeta+ngamma),(ngamma+1):(nbeta+ngamma)]=cov_beta
colnames(S)=rownames(S)=c(paste(names(gamma),"_gamma",sep=""),paste(names(beta),"_beta",sep=""))
if (is.null(covariateY)==0) {
names(cY) = paste(names(beta),"_betafix",sep="")[-c(1:3)]
beta_c=paste("beta_",covariateY,sep="")
}
if (is.null(covariateM)==0) {
names(cM) = paste(names(gamma),"_gammafix",sep="")[-c(1:2)]
gamma_c=paste("gamma_",covariateM,sep="")
}
NIEa_fun = function() {
output = "beta2*gamma1*(x1-x0)"
return(output)
}
variable=c("gamma0","gamma1",if(is.null(covariateM)==0) {gamma_c},"beta0","beta1","beta2",if(is.null(covariateY)==0) {beta_c})
gamma0=gamma[1];gamma1=gamma[2];
if(is.null(covariateM)==0) {
for (i in (1:length(covariateM))) {assign(gamma_c[i],gamma[2+i])}
}
beta0=beta[1];beta1=beta[2];beta2=beta[3]
if(is.null(covariateY)==0) {
for (i in (1:length(covariateY))) {assign(beta_c[i],beta[3+i])}
}
TEa_fun = function() {
output = "(beta2*gamma1+beta1)*(x1-x0)"
return(output)
}
PMa_fun = function() {
.UP = "beta2*gamma1*(x1-x0)"
.BOT = "(beta2*gamma1+beta1)*(x1-x0)"
output=paste("(",.UP,")/(",.BOT,")")
return(output)
}
NIEa_p = eval(parse(text=NIEa_fun()))
TEa_p = eval(parse(text=TEa_fun()))
PMa_p = eval(parse(text=PMa_fun()))
point_est = c(NIEa_p,TEa_p,PMa_p);
names(point_est)=c("NIE","TE","PM")
return(point_est)
}
boot.par=boot::boot(data=data, statistic=get_par_boot, R=R)
boot.parciNIEa <- boot::boot.ci(boot.par, index=1, type=c("perc"))
boot.parciTEa <- boot::boot.ci(boot.par, index=2, type=c("perc"))
boot.parciPMa <- boot::boot.ci(boot.par, index=3, type=c("perc"))
ci_est_prec = c(boot.parciNIEa$percent[4:5],
boot.parciTEa$percent[4:5],
boot.parciPMa$percent[4:5])
names(ci_est_prec)=c(paste(rep("CI_",6),rep(c("NIE","TE","PM"),each=2),rep(c("_Low","_High"),times=3),sep=""))
return(ci_est_prec)
}
mediate_contY_binaM=function(data,
outcome="Y",
mediator="M",
exposure="X",
covariateY=c("X1","X2","X3","X4","X5","X6","X7","X8"),
covariateM=c("X1","X2","X3","X4","X5","X6","X7"),
x0=0,x1=1,cY=c(0,0),cM=c(0,0)) {
data = as.data.frame(data)
if (is.null(covariateY)) {
formula_Y=as.formula(paste(outcome,"~",exposure,"+",mediator,sep=""))
} else {
formula_Y=as.formula(paste(outcome,"~",exposure,"+",mediator,"+",paste(covariateY,collapse="+"),sep=""))
}
if (is.null(covariateM)) {
formula_M=as.formula(paste(mediator,"~",exposure,sep=""))
} else {
formula_M=as.formula(paste(mediator,"~",exposure,"+",paste(covariateM,collapse="+"),sep=""))
}
model_Y=summary(lm(formula_Y,data=data))
model_M=summary(glm(formula_M,family=binomial(link="logit"),data=data))
beta=model_Y$coef[,1];cov_beta=model_Y$cov.unscaled
gamma=model_M$coef[,1];cov_gamma=model_M$cov.unscaled
nbeta=dim(cov_beta)[1];ngamma=dim(cov_gamma)[1]
S=matrix(0,ncol=nbeta+ngamma,nrow=nbeta+ngamma)
S[1:ngamma,1:ngamma]=cov_gamma; S[(ngamma+1):(nbeta+ngamma),(ngamma+1):(nbeta+ngamma)]=cov_beta
colnames(S)=rownames(S)=c(paste(names(gamma),"_gamma",sep=""),paste(names(beta),"_beta",sep=""))
if (is.null(covariateY)==0) {
names(cY) = paste(names(beta),"_betafix",sep="")[-c(1:3)]
beta_c=paste("beta_",covariateY,sep="")
}
if (is.null(covariateM)==0) {
names(cM) = paste(names(gamma),"_gammafix",sep="")[-c(1:2)]
gamma_c=paste("gamma_",covariateM,sep="")
}
.A = paste("exp(gamma0+gamma1*",x0,if(is.null(covariateM)==0) {
paste("+",paste(paste(gamma_c,"*",cM),collapse = "+"))}
,")")
.B= paste("exp(beta2+gamma0+gamma1*",x1,if(is.null(covariateM)==0) {
paste("+",paste(paste(gamma_c,"*",cM),collapse = "+"))}
,")")
NIEa_fun = function() {
output=paste("beta2*((",.A,")/(1+",.A,")-(",.B,")/(1+",.B,"))")
return(output)
}
variable=c("gamma0","gamma1",if(is.null(covariateM)==0) {gamma_c},"beta0","beta1","beta2",if(is.null(covariateY)==0) {beta_c})
NIEa_D=deriv(parse(text=NIEa_fun()),variable)
gamma0=gamma[1];gamma1=gamma[2];
if(is.null(covariateM)==0) {
for (i in (1:length(covariateM))) {assign(gamma_c[i],gamma[2+i])}
}
beta0=beta[1];beta1=beta[2];beta2=beta[3]
if(is.null(covariateY)==0) {
for (i in (1:length(covariateY))) {assign(beta_c[i],beta[3+i])}
}
TEa_fun = function() {
output = paste("beta1*",x1-x0,"+","beta2*((",.A,")/(1+",.A,")-(",.B,")/(1+",.B,"))")
return(output)
}
TEa_D=deriv(parse(text=TEa_fun()),variable)
PMa_fun = function() {
.UP = paste("beta2*((",.A,")/(1+",.A,")-(",.B,")/(1+",.B,"))")
.BOT = paste("beta1*",x1-x0,"+","beta2*((",.A,")/(1+",.A,")-(",.B,")/(1+",.B,"))")
output=paste("(",.UP,")/(",.BOT,")")
return(output)
}
PMa_D=deriv(parse(text=PMa_fun()),variable)
NIEa_D = eval(NIEa_D)
NIEa_p = NIEa_D[1]
lambda= t(attr(NIEa_D,"gradient"))
V_NIEa = as.vector(t(lambda) %*% S %*% lambda)
TEa_D = eval(TEa_D)
TEa_p = TEa_D[1]
lambda= t(attr(TEa_D,"gradient"))
V_TEa = as.vector(t(lambda) %*% S %*% lambda)
PMa_D = eval(PMa_D)
PMa_p = PMa_D[1]
lambda= t(attr(PMa_D,"gradient"))
V_PMa = as.vector(t(lambda) %*% S %*% lambda)
point_est = c(NIEa_p,TEa_p,PMa_p);
names(point_est)=c("NIE","TE","PM")
var_est = c(V_NIEa,V_TEa,V_PMa);
names(var_est)=c("NIE","TE","PM")
sd_est = sqrt(var_est)
names(sd_est)=c("NIE","TE","PM")
ci_est = rbind(point_est-1.96*sd_est,point_est+1.96*sd_est)
rownames(ci_est) = c("Lower boundary","Upper boundary")
return(list(point_est=point_est,var_est=var_est,sd_est=sd_est,ci_est=ci_est))
}
Mediate_contY_binaM_bootci=function(data,
outcome="Y",
mediator="M",
exposure="X",
covariateY=c("X1","X2"),
covariateM=c("X1","X2"),
x0=0,x1=1,cY=c(0,0),cM=c(0,0),R=1000) {
data = as.data.frame(data)
get_par_boot=function(data=data,indices) {
data=data[indices,]
if (is.null(covariateY)) {
formula_Y=as.formula(paste(outcome,"~",exposure,"+",mediator,sep=""))
} else {
formula_Y=as.formula(paste(outcome,"~",exposure,"+",mediator,"+",paste(covariateY,collapse="+"),sep=""))
}
if (is.null(covariateM)) {
formula_M=as.formula(paste(mediator,"~",exposure,sep=""))
} else {
formula_M=as.formula(paste(mediator,"~",exposure,"+",paste(covariateM,collapse="+"),sep=""))
}
model_Y=summary(lm(formula_Y,data=data))
model_M=summary(glm(formula_M,family=binomial(link="logit"),data=data))
beta=model_Y$coef[,1];cov_beta=model_Y$cov.unscaled
gamma=model_M$coef[,1];cov_gamma=model_M$cov.unscaled
nbeta=dim(cov_beta)[1];ngamma=dim(cov_gamma)[1]
S=matrix(0,ncol=nbeta+ngamma,nrow=nbeta+ngamma)
S[1:ngamma,1:ngamma]=cov_gamma; S[(ngamma+1):(nbeta+ngamma),(ngamma+1):(nbeta+ngamma)]=cov_beta
colnames(S)=rownames(S)=c(paste(names(gamma),"_gamma",sep=""),paste(names(beta),"_beta",sep=""))
if (is.null(covariateY)==0) {
names(cY) = paste(names(beta),"_betafix",sep="")[-c(1:3)]
beta_c=paste("beta_",covariateY,sep="")
}
if (is.null(covariateM)==0) {
names(cM) = paste(names(gamma),"_gammafix",sep="")[-c(1:2)]
gamma_c=paste("gamma_",covariateM,sep="")
}
.A = paste("exp(gamma0+gamma1*",x0,if(is.null(covariateM)==0) {
paste("+",paste(paste(gamma_c,"*",cM),collapse = "+"))}
,")")
.B= paste("exp(beta2+gamma0+gamma1*",x1,if(is.null(covariateM)==0) {
paste("+",paste(paste(gamma_c,"*",cM),collapse = "+"))}
,")")
NIEa_fun = function() {
output = paste("beta2*((",.A,")/(1+",.A,")-(",.B,")/(1+",.B,"))")
return(output)
}
variable=c("gamma0","gamma1",if(is.null(covariateM)==0) {gamma_c},"beta0","beta1","beta2",if(is.null(covariateY)==0) {beta_c})
gamma0=gamma[1];gamma1=gamma[2];
if(is.null(covariateM)==0) {
for (i in (1:length(covariateM))) {assign(gamma_c[i],gamma[2+i])}
}
beta0=beta[1];beta1=beta[2];beta2=beta[3]
if(is.null(covariateY)==0) {
for (i in (1:length(covariateY))) {assign(beta_c[i],beta[3+i])}
}
TEa_fun = function() {
output = paste("beta1*",x1-x0,"+","beta2*((",.A,")/(1+",.A,")-(",.B,")/(1+",.B,"))")
return(output)
}
PMa_fun = function() {
.UP = paste("beta2*((",.A,")/(1+",.A,")-(",.B,")/(1+",.B,"))")
.BOT = paste("beta1*",x1-x0,"+","beta2*((",.A,")/(1+",.A,")-(",.B,")/(1+",.B,"))")
output=paste("(",.UP,")/(",.BOT,")")
return(output)
}
NIEa_p = eval(parse(text=NIEa_fun()))
TEa_p = eval(parse(text=TEa_fun()))
PMa_p = eval(parse(text=PMa_fun()))
point_est = c(NIEa_p,TEa_p,PMa_p);
names(point_est)=c("NIE","TE","PM")
return(point_est)
}
boot.par=boot::boot(data=data, statistic=get_par_boot, R=R)
boot.parciNIEa <- boot::boot.ci(boot.par, index=1, type=c("perc"))
boot.parciTEa <- boot::boot.ci(boot.par, index=2, type=c("perc"))
boot.parciPMa <- boot::boot.ci(boot.par, index=3, type=c("perc"))
ci_est_prec = c(boot.parciNIEa$percent[4:5],
boot.parciTEa$percent[4:5],
boot.parciPMa$percent[4:5])
names(ci_est_prec)=c(paste(rep("CI_",6),rep(c("NIE","TE","PM"),each=2),rep(c("_Low","_High"),times=3),sep=""))
return(ci_est_prec)
}
mediate_binaY_contM=function(data,
outcome="Y",
mediator="M",
exposure="X",
covariateY=c("X1","X2","X3","X4","X5","X6","X7","X8"),
covariateM=c("X1","X2","X3","X4","X5","X6","X7"),
x0=0,x1=1,cY=c(0,0),cM=c(0,0)) {
data = as.data.frame(data)
HermiteCoefs=function (order) {
x <- 1
if (order > 0)
for (n in 1:order) x <- c(0, 2 * x) - c(((0:(n - 1)) *
x)[-1L], 0, 0)
return(x)
}
gauss.hermite=function (f, mu = 0, sd = 1, ..., order = 5) {
stopifnot(is.function(f))
stopifnot(length(mu) == 1)
stopifnot(length(sd) == 1)
Hn <- HermiteCoefs(order)
Hn1 <- HermiteCoefs(order - 1)
x <- sort(Re(polyroot(Hn)))
Hn1x <- matrix(Hn1, nrow = 1) %*% t(outer(x, 0:(order - 1),
"^"))
w <- 2^(order - 1) * factorial(order) * sqrt(pi)/(order *
Hn1x)^2
ww <- w/sqrt(pi)
xx <- mu + sd * sqrt(2) * x
ans <- 0
for (i in seq_along(x)) ans <- ans + ww[i] * f(xx[i], ...)
return(ans)
}
mygrad=function (f, x0,heps = 1e-5, ...) {
if (!is.numeric(x0))
stop("Argument 'x0' must be a numeric value.")
fun <- match.fun(f)
f <- function(x) fun(x, ...)
p =length(f(x0))
n <- length(x0)
hh <- rep(0, n)
gr <- matrix(0,nrow=n,ncol=p)
for (i in 1:n) {
hh[i] <- heps
gr[i,] <- (f(x0 + hh) - f(x0 - hh))/(2 * heps)
hh[i] <- 0
}
return(gr)
}
NIE_unbiased = function(theta,loc_gamma_0=loc_gamma_0,loc_gamma_c=loc_gamma_c,loc_beta_0=loc_beta_0,loc_beta_c=loc_beta_c,
x0s=x0s,x1=x1,cY=cY,cM=cM) {
gamma0 = theta[1]
gamma1 = theta[2]
gamma_c = theta[loc_gamma_c]
beta0 = theta[loc_beta_0[1]]
beta1 = theta[loc_beta_0[2]]
beta2 = theta[loc_beta_0[3]]
beta_c = theta[loc_beta_c]
sigma2 = theta[length(theta)]
if (is.null(loc_beta_c)) {
f11 = function(x) {exp(beta0+beta1*x1+beta2*x)/(1+exp(beta0+beta1*x1+beta2*x))}
f10 = function(x) {exp(beta0+beta1*x1+beta2*x)/(1+exp(beta0+beta1*x1+beta2*x))}
} else {
f11 = function(x) {exp(beta0+beta1*x1+beta2*x+sum(beta_c*cY))/(1+exp(beta0+beta1*x1+beta2*x+sum(beta_c*cY)))}
f10 = function(x) {exp(beta0+beta1*x1+beta2*x+sum(beta_c*cY))/(1+exp(beta0+beta1*x1+beta2*x+sum(beta_c*cY)))}
}
if (is.null(loc_gamma_c)) {
p11s= gauss.hermite(f=f11,mu=gamma0+gamma1*x1,sd=sqrt(sigma2),order=40)
p10s= gauss.hermite(f=f10,mu=gamma0+gamma1*x0s,sd=sqrt(sigma2),order=40)
} else {
p11s= gauss.hermite(f=f11,mu=gamma0+gamma1*x1+sum(gamma_c*cM),sd=sqrt(sigma2),order=40)
p10s= gauss.hermite(f=f10,mu=gamma0+gamma1*x0s+sum(gamma_c*cM),sd=sqrt(sigma2),order=40)
}
output = log((p11s)/(1-p11s)) - log((p10s)/(1-p10s))
return(output)
}
TE_unbiased = function(theta,loc_gamma_0=loc_gamma_0,loc_gamma_c=loc_gamma_c,loc_beta_0=loc_beta_0,loc_beta_c=loc_beta_c,
x0s=x0s,x1=x1,cY=cY,cM=cM) {
gamma0 = theta[1]
gamma1 = theta[2]
gamma_c = theta[loc_gamma_c]
beta0 = theta[loc_beta_0[1]]
beta1 = theta[loc_beta_0[2]]
beta2 = theta[loc_beta_0[3]]
beta_c = theta[loc_beta_c]
sigma2 = theta[length(theta)]
if (is.null(loc_beta_c)) {
f11 = function(x) {exp(beta0+beta1*x1+beta2*x)/(1+exp(beta0+beta1*x1+beta2*x))}
f00 = function(x) {exp(beta0+beta1*x0s+beta2*x)/(1+exp(beta0+beta1*x0s+beta2*x))}
} else {
f11 = function(x) {exp(beta0+beta1*x1+beta2*x+sum(beta_c*cY))/(1+exp(beta0+beta1*x1+beta2*x+sum(beta_c*cY)))}
f00 = function(x) {exp(beta0+beta1*x0s+beta2*x+sum(beta_c*cY))/(1+exp(beta0+beta1*x0s+beta2*x+sum(beta_c*cY)))}
}
if (is.null(loc_gamma_c)) {
p11s= gauss.hermite(f=f11,mu=gamma0+gamma1*x1,sd=sqrt(sigma2),order=40)
p00s= gauss.hermite(f=f00,mu=gamma0+gamma1*x0s,sd=sqrt(sigma2),order=40)
} else {
p11s= gauss.hermite(f=f11,mu=gamma0+gamma1*x1+sum(gamma_c*cM),sd=sqrt(sigma2),order=40)
p00s= gauss.hermite(f=f00,mu=gamma0+gamma1*x0s+sum(gamma_c*cM),sd=sqrt(sigma2),order=40)
}
output = log((p11s)/(1-p11s)) - log((p00s)/(1-p00s))
return(output)
}
PM_unbiased=function(theta,loc_gamma_0=loc_gamma_0,loc_gamma_c=loc_gamma_c,loc_beta_0=loc_beta_0,loc_beta_c=loc_beta_c,
x0s=x0s,x1=x1,cY=cY,cM=cM) {
gamma0 = theta[1]
gamma1 = theta[2]
gamma_c = theta[loc_gamma_c]
beta0 = theta[loc_beta_0[1]]
beta1 = theta[loc_beta_0[2]]
beta2 = theta[loc_beta_0[3]]
beta_c = theta[loc_beta_c]
sigma2 = theta[length(theta)]
if (is.null(loc_beta_c)) {
f11 = function(x) {exp(beta0+beta1*x1+beta2*x)/(1+exp(beta0+beta1*x1+beta2*x))}
f10 = function(x) {exp(beta0+beta1*x1+beta2*x)/(1+exp(beta0+beta1*x1+beta2*x))}
f00 = function(x) {exp(beta0+beta1*x0s+beta2*x)/(1+exp(beta0+beta1*x0s+beta2*x))}
} else {
f11 = function(x) {exp(beta0+beta1*x1+beta2*x+sum(beta_c*cY))/(1+exp(beta0+beta1*x1+beta2*x+sum(beta_c*cY)))}
f10 = function(x) {exp(beta0+beta1*x1+beta2*x+sum(beta_c*cY))/(1+exp(beta0+beta1*x1+beta2*x+sum(beta_c*cY)))}
f00 = function(x) {exp(beta0+beta1*x0s+beta2*x+sum(beta_c*cY))/(1+exp(beta0+beta1*x0s+beta2*x+sum(beta_c*cY)))}
}
if (is.null(loc_gamma_c)) {
p11s= gauss.hermite(f=f11,mu=gamma0+gamma1*x1,sd=sqrt(sigma2),order=40)
p10s= gauss.hermite(f=f10,mu=gamma0+gamma1*x0s,sd=sqrt(sigma2),order=40)
p00s= gauss.hermite(f=f00,mu=gamma0+gamma1*x0s,sd=sqrt(sigma2),order=40)
} else {
p11s= gauss.hermite(f=f11,mu=gamma0+gamma1*x1+sum(gamma_c*cM),sd=sqrt(sigma2),order=40)
p10s= gauss.hermite(f=f10,mu=gamma0+gamma1*x0s+sum(gamma_c*cM),sd=sqrt(sigma2),order=40)
p00s= gauss.hermite(f=f00,mu=gamma0+gamma1*x0s+sum(gamma_c*cM),sd=sqrt(sigma2),order=40)
}
te = log((p11s)/(1-p11s)) - log((p00s)/(1-p00s))
nie = log((p11s)/(1-p11s)) - log((p10s)/(1-p10s))
return(nie/te)
}
if (is.null(covariateY)) {
formula_Y=as.formula(paste(outcome,"~",exposure,"+",mediator,sep=""))
} else {
formula_Y=as.formula(paste(outcome,"~",exposure,"+",mediator,"+",paste(covariateY,collapse="+"),sep=""))
}
if (is.null(covariateM)) {
formula_M=as.formula(paste(mediator,"~",exposure,sep=""))
} else {
formula_M=as.formula(paste(mediator,"~",exposure,"+",paste(covariateM,collapse="+"),sep=""))
}
model_Y=summary(glm(formula_Y,family=binomial(link="logit"),data=data))
model_M=summary(lm(formula_M,data=data))
beta=model_Y$coef[,1];cov_beta=model_Y$cov.unscaled
gamma=model_M$coef[,1];cov_gamma=model_M$cov.unscaled
nbeta=dim(cov_beta)[1];ngamma=dim(cov_gamma)[1]
S=matrix(0,ncol=nbeta+ngamma,nrow=nbeta+ngamma)
S[1:ngamma,1:ngamma]=cov_gamma; S[(ngamma+1):(nbeta+ngamma),(ngamma+1):(nbeta+ngamma)]=cov_beta
colnames(S)=rownames(S)=c(paste(names(gamma),"_gamma",sep=""),paste(names(beta),"_beta",sep=""))
NIE=beta[3]*gamma[2]
V_NIE = gamma[2]^2 * cov_beta[3,3] + beta[3]^2 * cov_gamma[2,2]
TE = beta[2]+beta[3]*gamma[2]
lambda = matrix(c(0,beta[3],rep(0,length(covariateM)),0,1,gamma[2],rep(0,length(covariateY))),ncol=1)
V_TE=as.vector(t(lambda) %*% S %*% lambda)
lambda = matrix(c(0,beta[2]*beta[3]/(beta[2]+beta[3]*gamma[2])^2,rep(0,length(covariateM)),0,-beta[3]*gamma[2]/(beta[2]+beta[3]*gamma[2])^2,beta[2]*gamma[2]/(beta[2]+beta[3]*gamma[2])^2,rep(0,length(covariateY))),ncol=1)
PM = beta[3]*gamma[2]/(beta[2]+beta[3]*gamma[2])
V_PM=as.vector(t(lambda) %*% S %*% lambda)
sigma2=model_M$sigma
var_sigma2=2*model_M$sigma^2*(1/model_M$df[2])
theta=c(gamma,beta,sigma2)
S=matrix(0,ncol=nbeta+ngamma+1,nrow=nbeta+ngamma+1)
S[1:ngamma,1:ngamma]=cov_gamma; S[(ngamma+1):(nbeta+ngamma),(ngamma+1):(nbeta+ngamma)]=cov_beta
S[(nbeta+ngamma+1),(nbeta+ngamma+1)]=var_sigma2
colnames(S)=rownames(S)=c(paste(names(gamma),"_gamma",sep=""),paste(names(beta),"_beta",sep=""),"sigma2_M")
loc_gamma_0 = 1:2
loc_gamma_c = 3:(3+length(covariateM)-1)
loc_beta_0 = (ngamma+1):(ngamma+3)
loc_beta_c = (ngamma+4):(ngamma+4+length(covariateY)-1)
if (is.null(covariateM)) {loc_gamma_c=NULL}
if (is.null(covariateY)) {loc_beta_c=NULL}
x0s=x0
NIE_nonrare_p = NIE_unbiased(theta=theta,loc_gamma_0=loc_gamma_0,loc_gamma_c=loc_gamma_c,loc_beta_0=loc_beta_0,loc_beta_c=loc_beta_c,
x0s=x0s,x1=x1,cY=cY,cM=cM)
lambda= mygrad(NIE_unbiased,x0=theta,loc_gamma_0=loc_gamma_0,loc_gamma_c=loc_gamma_c,loc_beta_0=loc_beta_0,loc_beta_c=loc_beta_c,
x0s=x0s,x1=x1,cY=cY,cM=cM)
V_NIE_nonrare = as.vector(t(lambda) %*% S %*% lambda)
TE_nonrare_p = TE_unbiased(theta=theta,loc_gamma_0=loc_gamma_0,loc_gamma_c=loc_gamma_c,loc_beta_0=loc_beta_0,loc_beta_c=loc_beta_c,
x0s=x0s,x1=x1,cY=cY,cM=cM)
lambda= mygrad(TE_unbiased,x0=theta,loc_gamma_0=loc_gamma_0,loc_gamma_c=loc_gamma_c,loc_beta_0=loc_beta_0,loc_beta_c=loc_beta_c,
x0s=x0s,x1=x1,cY=cY,cM=cM)
V_TE_nonrare = as.vector(t(lambda) %*% S %*% lambda)
PM_nonrare_p = PM_unbiased(theta=theta,loc_gamma_0=loc_gamma_0,loc_gamma_c=loc_gamma_c,loc_beta_0=loc_beta_0,loc_beta_c=loc_beta_c,
x0s=x0s,x1=x1,cY=cY,cM=cM)
lambda= mygrad(PM_unbiased,x0=theta,loc_gamma_0=loc_gamma_0,loc_gamma_c=loc_gamma_c,loc_beta_0=loc_beta_0,loc_beta_c=loc_beta_c,
x0s=x0s,x1=x1,cY=cY,cM=cM)
V_PM_nonrare = as.vector(t(lambda) %*% S %*% lambda)
point_est = c(NIE,TE,PM,NIE_nonrare_p,TE_nonrare_p,PM_nonrare_p);
names(point_est)=c("NIE","TE","PM","NIE_nonrare","TE_nonrare","PM_nonrare")
var_est = c(V_NIE,V_TE,V_PM,V_NIE_nonrare,V_TE_nonrare,V_PM_nonrare);
names(var_est)=c("NIE","TE","PM","NIE_nonrare","TE_nonrare","PM_nonrare")
sd_est = sqrt(var_est)
names(sd_est)=c("NIE","TE","PM","NIE_nonrare","TE_nonrare","PM_nonrare")
ci_est = rbind(point_est-1.96*sd_est,point_est+1.96*sd_est)
rownames(ci_est) = c("Lower boundary","Upper boundary")
return(list(point_est=point_est,var_est=var_est,sd_est=sd_est,ci_est=ci_est))
}
Mediate_binaY_contM_bootci=function(data,
outcome="Y",
mediator="M",
exposure="X",
covariateY=c("X1","X2"),
covariateM=c("X1","X2"),
x0=0,x1=1,cY=c(0,0),cM=c(0,0),R=1000) {
HermiteCoefs=function (order) {
x <- 1
if (order > 0)
for (n in 1:order) x <- c(0, 2 * x) - c(((0:(n - 1)) *
x)[-1L], 0, 0)
return(x)
}
gauss.hermite=function (f, mu = 0, sd = 1, ..., order = 5) {
stopifnot(is.function(f))
stopifnot(length(mu) == 1)
stopifnot(length(sd) == 1)
Hn <- HermiteCoefs(order)
Hn1 <- HermiteCoefs(order - 1)
x <- sort(Re(polyroot(Hn)))
Hn1x <- matrix(Hn1, nrow = 1) %*% t(outer(x, 0:(order - 1),
"^"))
w <- 2^(order - 1) * factorial(order) * sqrt(pi)/(order *
Hn1x)^2
ww <- w/sqrt(pi)
xx <- mu + sd * sqrt(2) * x
ans <- 0
for (i in seq_along(x)) ans <- ans + ww[i] * f(xx[i], ...)
return(ans)
}
mygrad=function (f, x0,heps = 1e-5, ...) {
if (!is.numeric(x0))
stop("Argument 'x0' must be a numeric value.")
fun <- match.fun(f)
f <- function(x) fun(x, ...)
p =length(f(x0))
n <- length(x0)
hh <- rep(0, n)
gr <- matrix(0,nrow=n,ncol=p)
for (i in 1:n) {
hh[i] <- heps
gr[i,] <- (f(x0 + hh) - f(x0 - hh))/(2 * heps)
hh[i] <- 0
}
return(gr)
}
NIE_unbiased = function(theta,loc_gamma_0=loc_gamma_0,loc_gamma_c=loc_gamma_c,loc_beta_0=loc_beta_0,loc_beta_c=loc_beta_c,
x0s=x0s,x1=x1,cY=cY,cM=cM) {
gamma0 = theta[1]
gamma1 = theta[2]
gamma_c = theta[loc_gamma_c]
beta0 = theta[loc_beta_0[1]]
beta1 = theta[loc_beta_0[2]]
beta2 = theta[loc_beta_0[3]]
beta_c = theta[loc_beta_c]
sigma2 = theta[length(theta)]
if (is.null(loc_beta_c)) {
f11 = function(x) {exp(beta0+beta1*x1+beta2*x)/(1+exp(beta0+beta1*x1+beta2*x))}
f10 = function(x) {exp(beta0+beta1*x1+beta2*x)/(1+exp(beta0+beta1*x1+beta2*x))}
} else {
f11 = function(x) {exp(beta0+beta1*x1+beta2*x+sum(beta_c*cY))/(1+exp(beta0+beta1*x1+beta2*x+sum(beta_c*cY)))}
f10 = function(x) {exp(beta0+beta1*x1+beta2*x+sum(beta_c*cY))/(1+exp(beta0+beta1*x1+beta2*x+sum(beta_c*cY)))}
}
if (is.null(loc_gamma_c)) {
p11s= gauss.hermite(f=f11,mu=gamma0+gamma1*x1,sd=sqrt(sigma2),order=40)
p10s= gauss.hermite(f=f10,mu=gamma0+gamma1*x0s,sd=sqrt(sigma2),order=40)
} else {
p11s= gauss.hermite(f=f11,mu=gamma0+gamma1*x1+sum(gamma_c*cM),sd=sqrt(sigma2),order=40)
p10s= gauss.hermite(f=f10,mu=gamma0+gamma1*x0s+sum(gamma_c*cM),sd=sqrt(sigma2),order=40)
}
output = log((p11s)/(1-p11s)) - log((p10s)/(1-p10s))
return(output)
}
TE_unbiased = function(theta,loc_gamma_0=loc_gamma_0,loc_gamma_c=loc_gamma_c,loc_beta_0=loc_beta_0,loc_beta_c=loc_beta_c,
x0s=x0s,x1=x1,cY=cY,cM=cM) {
gamma0 = theta[1]
gamma1 = theta[2]
gamma_c = theta[loc_gamma_c]
beta0 = theta[loc_beta_0[1]]
beta1 = theta[loc_beta_0[2]]
beta2 = theta[loc_beta_0[3]]
beta_c = theta[loc_beta_c]
sigma2 = theta[length(theta)]
if (is.null(loc_beta_c)) {
f11 = function(x) {exp(beta0+beta1*x1+beta2*x)/(1+exp(beta0+beta1*x1+beta2*x))}
f00 = function(x) {exp(beta0+beta1*x0s+beta2*x)/(1+exp(beta0+beta1*x0s+beta2*x))}
} else {
f11 = function(x) {exp(beta0+beta1*x1+beta2*x+sum(beta_c*cY))/(1+exp(beta0+beta1*x1+beta2*x+sum(beta_c*cY)))}
f00 = function(x) {exp(beta0+beta1*x0s+beta2*x+sum(beta_c*cY))/(1+exp(beta0+beta1*x0s+beta2*x+sum(beta_c*cY)))}
}
if (is.null(loc_gamma_c)) {
p11s= gauss.hermite(f=f11,mu=gamma0+gamma1*x1,sd=sqrt(sigma2),order=40)
p00s= gauss.hermite(f=f00,mu=gamma0+gamma1*x0s,sd=sqrt(sigma2),order=20)
} else {
p11s= gauss.hermite(f=f11,mu=gamma0+gamma1*x1+sum(gamma_c*cM),sd=sqrt(sigma2),order=40)
p00s= gauss.hermite(f=f00,mu=gamma0+gamma1*x0s+sum(gamma_c*cM),sd=sqrt(sigma2),order=40)
}
output = log((p11s)/(1-p11s)) - log((p00s)/(1-p00s))
return(output)
}
PM_unbiased=function(theta,loc_gamma_0=loc_gamma_0,loc_gamma_c=loc_gamma_c,loc_beta_0=loc_beta_0,loc_beta_c=loc_beta_c,
x0s=x0s,x1=x1,cY=cY,cM=cM) {
gamma0 = theta[1]
gamma1 = theta[2]
gamma_c = theta[loc_gamma_c]
beta0 = theta[loc_beta_0[1]]
beta1 = theta[loc_beta_0[2]]
beta2 = theta[loc_beta_0[3]]
beta_c = theta[loc_beta_c]
sigma2 = theta[length(theta)]
if (is.null(loc_beta_c)) {
f11 = function(x) {exp(beta0+beta1*x1+beta2*x)/(1+exp(beta0+beta1*x1+beta2*x))}
f10 = function(x) {exp(beta0+beta1*x1+beta2*x)/(1+exp(beta0+beta1*x1+beta2*x))}
f00 = function(x) {exp(beta0+beta1*x0s+beta2*x)/(1+exp(beta0+beta1*x0s+beta2*x))}
} else {
f11 = function(x) {exp(beta0+beta1*x1+beta2*x+sum(beta_c*cY))/(1+exp(beta0+beta1*x1+beta2*x+sum(beta_c*cY)))}
f10 = function(x) {exp(beta0+beta1*x1+beta2*x+sum(beta_c*cY))/(1+exp(beta0+beta1*x1+beta2*x+sum(beta_c*cY)))}
f00 = function(x) {exp(beta0+beta1*x0s+beta2*x+sum(beta_c*cY))/(1+exp(beta0+beta1*x0s+beta2*x+sum(beta_c*cY)))}
}
if (is.null(loc_gamma_c)) {
p11s= gauss.hermite(f=f11,mu=gamma0+gamma1*x1,sd=sqrt(sigma2),order=40)
p10s= gauss.hermite(f=f10,mu=gamma0+gamma1*x0s,sd=sqrt(sigma2),order=40)
p00s= gauss.hermite(f=f00,mu=gamma0+gamma1*x0s,sd=sqrt(sigma2),order=40)
} else {
p11s= gauss.hermite(f=f11,mu=gamma0+gamma1*x1+sum(gamma_c*cM),sd=sqrt(sigma2),order=40)
p10s= gauss.hermite(f=f10,mu=gamma0+gamma1*x0s+sum(gamma_c*cM),sd=sqrt(sigma2),order=40)
p00s= gauss.hermite(f=f00,mu=gamma0+gamma1*x0s+sum(gamma_c*cM),sd=sqrt(sigma2),order=40)
}
te = log((p11s)/(1-p11s)) - log((p00s)/(1-p00s))
nie = log((p11s)/(1-p11s)) - log((p10s)/(1-p10s))
return(nie/te)
}
data = as.data.frame(data)
get_par_boot=function(data=data,indices) {
data=data[indices,]
if (is.null(covariateY)) {
formula_Y=as.formula(paste(outcome,"~",exposure,"+",mediator,sep=""))
} else {
formula_Y=as.formula(paste(outcome,"~",exposure,"+",mediator,"+",paste(covariateY,collapse="+"),sep=""))
}
if (is.null(covariateM)) {
formula_M=as.formula(paste(mediator,"~",exposure,sep=""))
} else {
formula_M=as.formula(paste(mediator,"~",exposure,"+",paste(covariateM,collapse="+"),sep=""))
}
model_Y=summary(glm(formula_Y,family=binomial(link="logit"),data=data))
model_M=summary(lm(formula_M,data=data))
beta=model_Y$coef[,1];cov_beta=model_Y$cov.unscaled
gamma=model_M$coef[,1];cov_gamma=model_M$cov.unscaled
nbeta=dim(cov_beta)[1];ngamma=dim(cov_gamma)[1]
S=matrix(0,ncol=nbeta+ngamma,nrow=nbeta+ngamma)
S[1:ngamma,1:ngamma]=cov_gamma; S[(ngamma+1):(nbeta+ngamma),(ngamma+1):(nbeta+ngamma)]=cov_beta
colnames(S)=rownames(S)=c(paste(names(gamma),"_gamma",sep=""),paste(names(beta),"_beta",sep=""))
NIE=beta[3]*gamma[2]
V_NIE = gamma[2]^2 * cov_beta[3,3] + beta[3]^2 * cov_gamma[2,2]
TE = beta[2]+beta[3]*gamma[2]
lambda = matrix(c(0,beta[3],rep(0,length(covariateM)),0,1,gamma[2],rep(0,length(covariateY))),ncol=1)
V_TE=as.vector(t(lambda) %*% S %*% lambda)
lambda = matrix(c(0,beta[2]*beta[3]/(beta[2]+beta[3]*gamma[2])^2,rep(0,length(covariateM)),0,-beta[3]*gamma[2]/(beta[2]+beta[3]*gamma[2])^2,beta[2]*gamma[2]/(beta[2]+beta[3]*gamma[2])^2,rep(0,length(covariateY))),ncol=1)
PM = beta[3]*gamma[2]/(beta[2]+beta[3]*gamma[2])
V_PM=as.vector(t(lambda) %*% S %*% lambda)
sigma2=model_M$sigma
var_sigma2=2*model_M$sigma^2*(1/model_M$df[2])
theta=c(gamma,beta,sigma2)
S=matrix(0,ncol=nbeta+ngamma+1,nrow=nbeta+ngamma+1)
S[1:ngamma,1:ngamma]=cov_gamma; S[(ngamma+1):(nbeta+ngamma),(ngamma+1):(nbeta+ngamma)]=cov_beta
S[(nbeta+ngamma+1),(nbeta+ngamma+1)]=var_sigma2
colnames(S)=rownames(S)=c(paste(names(gamma),"_gamma",sep=""),paste(names(beta),"_beta",sep=""),"sigma2_M")
loc_gamma_0 = 1:2
loc_gamma_c = 3:(3+length(covariateM)-1)
loc_beta_0 = (ngamma+1):(ngamma+3)
loc_beta_c = (ngamma+4):(ngamma+4+length(covariateY)-1)
if (is.null(covariateM)) {loc_gamma_c=NULL}
if (is.null(covariateY)) {loc_beta_c=NULL}
x0s=x0
NIE_nonrare_p = NIE_unbiased(theta=theta,loc_gamma_0=loc_gamma_0,loc_gamma_c=loc_gamma_c,loc_beta_0=loc_beta_0,loc_beta_c=loc_beta_c,
x0s=x0s,x1=x1,cY=cY,cM=cM)
lambda= mygrad(NIE_unbiased,x0=theta,loc_gamma_0=loc_gamma_0,loc_gamma_c=loc_gamma_c,loc_beta_0=loc_beta_0,loc_beta_c=loc_beta_c,
x0s=x0s,x1=x1,cY=cY,cM=cM)
V_NIE_nonrare = as.vector(t(lambda) %*% S %*% lambda)
TE_nonrare_p = TE_unbiased(theta=theta,loc_gamma_0=loc_gamma_0,loc_gamma_c=loc_gamma_c,loc_beta_0=loc_beta_0,loc_beta_c=loc_beta_c,
x0s=x0s,x1=x1,cY=cY,cM=cM)
lambda= mygrad(TE_unbiased,x0=theta,loc_gamma_0=loc_gamma_0,loc_gamma_c=loc_gamma_c,loc_beta_0=loc_beta_0,loc_beta_c=loc_beta_c,
x0s=x0s,x1=x1,cY=cY,cM=cM)
V_TE_nonrare = as.vector(t(lambda) %*% S %*% lambda)
PM_nonrare_p = PM_unbiased(theta=theta,loc_gamma_0=loc_gamma_0,loc_gamma_c=loc_gamma_c,loc_beta_0=loc_beta_0,loc_beta_c=loc_beta_c,
x0s=x0s,x1=x1,cY=cY,cM=cM)
lambda= mygrad(PM_unbiased,x0=theta,loc_gamma_0=loc_gamma_0,loc_gamma_c=loc_gamma_c,loc_beta_0=loc_beta_0,loc_beta_c=loc_beta_c,
x0s=x0s,x1=x1,cY=cY,cM=cM)
V_PM_nonrare = as.vector(t(lambda) %*% S %*% lambda)
point_est = c(NIE,TE,PM,NIE_nonrare_p,TE_nonrare_p,PM_nonrare_p);
names(point_est)=c("NIE","TE","PM","NIE_nonrare","TE_nonrare","PM_nonrare")
return(point_est)
}
boot.par=boot::boot(data=data, statistic=get_par_boot, R=R)
boot.parciNIE <- boot::boot.ci(boot.par, index=1, type=c("perc"))
boot.parciTE <- boot::boot.ci(boot.par, index=2, type=c("perc"))
boot.parciPM <- boot::boot.ci(boot.par, index=3, type=c("perc"))
boot.parciNIE_nonrare <- boot::boot.ci(boot.par, index=4, type=c("perc"))
boot.parciTE_nonrare <- boot::boot.ci(boot.par, index=5, type=c("perc"))
boot.parciPM_nonrare <- boot::boot.ci(boot.par, index=6, type=c("perc"))
ci_est_prec = c(boot.parciNIE$percent[4:5],
boot.parciTE$percent[4:5],
boot.parciPM$percent[4:5],
boot.parciNIE_nonrare$percent[4:5],
boot.parciTE_nonrare$percent[4:5],
boot.parciPM_nonrare$percent[4:5])
names(ci_est_prec)=c(paste(rep("CI_",6),rep(c("NIE","TE","PM"),each=2),rep(c("_Low","_High"),times=3),sep=""),
paste(rep("CI_",6),rep(c("NIE_nonrare","TE_nonrare","PM_nonrare"),each=2),rep(c("_Low","_High"),times=3),sep=""))
return(ci_est_prec)
}
mediate_binaY_binaM=function(data,
outcome="Y",
mediator="M",
exposure="X",
covariateY=c("X1","X2"),
covariateM=c("X1","X2"),
x0=0,x1=1,cY=c(0,0),cM=c(0,0)) {
data = as.data.frame(data)
if (is.null(covariateY)) {
formula_Y=as.formula(paste(outcome,"~",exposure,"+",mediator,sep=""))
} else {
formula_Y=as.formula(paste(outcome,"~",exposure,"+",mediator,"+",paste(covariateY,collapse="+"),sep=""))
}
if (is.null(covariateM)) {
formula_M=as.formula(paste(mediator,"~",exposure,sep=""))
} else {
formula_M=as.formula(paste(mediator,"~",exposure,"+",paste(covariateM,collapse="+"),sep=""))
}
model_Y=summary(glm(formula_Y,family=binomial(link="logit"),data=data))
model_M=summary(glm(formula_M,family=binomial(link="logit"),data=data))
beta=model_Y$coef[,1];cov_beta=model_Y$cov.unscaled
gamma=model_M$coef[,1];cov_gamma=model_M$cov.unscaled
nbeta=dim(cov_beta)[1];ngamma=dim(cov_gamma)[1]
S=matrix(0,ncol=nbeta+ngamma,nrow=nbeta+ngamma)
S[1:ngamma,1:ngamma]=cov_gamma; S[(ngamma+1):(nbeta+ngamma),(ngamma+1):(nbeta+ngamma)]=cov_beta
colnames(S)=rownames(S)=c(paste(names(gamma),"_gamma",sep=""),paste(names(beta),"_beta",sep=""))
if (is.null(covariateY)==0) {
names(cY) = paste(names(beta),"_betafix",sep="")[-c(1:3)]
beta_c=paste("beta_",covariateY,sep="")
}
if (is.null(covariateM)==0) {
names(cM) = paste(names(gamma),"_gammafix",sep="")[-c(1:2)]
gamma_c=paste("gamma_",covariateM,sep="")
}
.A = paste("exp(gamma0+gamma1*",x0,if(is.null(covariateM)==0) {
paste("+",paste(paste(gamma_c,"*",cM),collapse = "+"))}
,")")
.B= paste("exp(beta2+gamma0+gamma1*",x1,if(is.null(covariateM)==0) {
paste("+",paste(paste(gamma_c,"*",cM),collapse = "+"))}
,")")
.C=paste("exp(gamma0+gamma1*",x1,if(is.null(covariateM)==0) {
paste("+",paste(paste(gamma_c,"*",cM),collapse = "+"))}
,")")
.D= paste("exp(beta2+gamma0+gamma1*",x0,if(is.null(covariateM)==0) {
paste("+",paste(paste(gamma_c,"*",cM),collapse = "+"))}
,")")
NIEa_fun = function() {
output = paste("log(","(1+",.A,")*","(1+",.B,")/((","1+",.C,")*(","1+",.D,"))",")")
return(output)
}
variable=c("gamma0","gamma1",if(is.null(covariateM)==0) {gamma_c},"beta0","beta1","beta2",if(is.null(covariateY)==0) {beta_c})
NIEa_D=deriv(parse(text=NIEa_fun()),variable)
gamma0=gamma[1];gamma1=gamma[2];
if(is.null(covariateM)==0) {
for (i in (1:length(covariateM))) {assign(gamma_c[i],gamma[2+i])}
}
beta0=beta[1];beta1=beta[2];beta2=beta[3]
if(is.null(covariateY)==0) {
for (i in (1:length(covariateY))) {assign(beta_c[i],beta[3+i])}
}
TEa_fun = function() {
output = paste("beta1*",(x1-x0),"+","log(","(1+",.A,")*","(1+",.B,")/((","1+",.C,")*(","1+",.D,"))",")")
return(output)
}
TEa_D=deriv(parse(text=TEa_fun()),variable)
PMa_fun = function() {
.UP = paste("log(","(1+",.A,")*","(1+",.B,")/((","1+",.C,")*(","1+",.D,"))",")")
.BOT = paste("beta1*",(x1-x0),"+","log(","(1+",.A,")*","(1+",.B,")/((","1+",.C,")*(","1+",.D,"))",")")
output=paste("(",.UP,")/(",.BOT,")")
return(output)
}
PMa_D=deriv(parse(text=PMa_fun()),variable)
NIEa_D = eval(NIEa_D)
NIEa_p = NIEa_D[1]
lambda= t(attr(NIEa_D,"gradient"))
V_NIEa = as.vector(t(lambda) %*% S %*% lambda)
TEa_D = eval(TEa_D)
TEa_p = TEa_D[1]
lambda= t(attr(TEa_D,"gradient"))
V_TEa = as.vector(t(lambda) %*% S %*% lambda)
PMa_D = eval(PMa_D)
PMa_p = PMa_D[1]
lambda= t(attr(PMa_D,"gradient"))
V_PMa = as.vector(t(lambda) %*% S %*% lambda)
.A = paste("exp(gamma0+gamma1*",x0,if(is.null(covariateM)==0) {
paste("+",paste(paste(gamma_c,"*",cM),collapse = "+"))}
,")")
.B = paste("exp(beta2+gamma0+gamma1*",x1,if(is.null(covariateM)==0) {
paste("+",paste(paste(gamma_c,"*",cM),collapse = "+"))}
,")")
.C =paste("exp(gamma0+gamma1*",x1,if(is.null(covariateM)==0) {
paste("+",paste(paste(gamma_c,"*",cM),collapse = "+"))}
,")")
.D = paste("exp(beta2+gamma0+gamma1*",x0,if(is.null(covariateM)==0) {
paste("+",paste(paste(gamma_c,"*",cM),collapse = "+"))}
,")")
.E = paste("exp(beta0+beta1*",x1,if(is.null(covariateY)==0) {
paste("+",paste(paste(beta_c,"*",cY),collapse = "+"))}
,")")
.F = paste("exp(beta0+beta2+beta1*",x1,if(is.null(covariateY)==0) {
paste("+",paste(paste(beta_c,"*",cY),collapse = "+"))}
,")")
.G = paste("exp(beta0+beta1*",x0,if(is.null(covariateY)==0) {
paste("+",paste(paste(beta_c,"*",cY),collapse = "+"))}
,")")
.H = paste("exp(beta0+beta2+beta1*",x0,if(is.null(covariateY)==0) {
paste("+",paste(paste(beta_c,"*",cY),collapse = "+"))}
,")")
NIE_fun = function() {
.A1 = paste("(1+",.A,"+",.E,"*",.A,"+",.F,")")
.A2 = paste("(1+",.C,"+",.E,"*",.C,"+",.F,")")
.B1 = paste("(1+", .F,"+",.B,"*(1+",.E,"))")
.B2 = paste("(1+", .F,"+",.D,"*(1+",.E,"))")
output = paste("log(",.A1,"/",.A2,")+","log(",.B1,"/",.B2,")")
return(output)
}
NIE_D=deriv(parse(text=NIE_fun()),variable)
TE_fun = function() {
.C1 = paste("(1+",.A,"+",.G,"*",.A,"+",.H,")")
.C2 = paste("(1+",.C,"+",.E,"*",.C,"+",.F,")")
.D1 = paste("(1+", .F,"+",.B,"*(1+",.E,"))")
.D2 = paste("(1+", .H,"+",.D,"*(1+",.G,"))")
output = paste("beta1*",(x1-x0),"+log(",.C1,"/",.C2,")+","log(",.D1,"/",.D2,")")
return(output)
}
TE_D=deriv(parse(text=TE_fun()),variable)
PM_fun = function() {
.A1 = paste("(1+",.A,"+",.E,"*",.A,"+",.F,")")
.A2 = paste("(1+",.C,"+",.E,"*",.C,"+",.F,")")
.B1 = paste("(1+", .F,"+",.B,"*(1+",.E,"))")
.B2 = paste("(1+", .F,"+",.D,"*(1+",.E,"))")
.C1 = paste("(1+",.A,"+",.G,"*",.A,"+",.H,")")
.C2 = paste("(1+",.C,"+",.E,"*",.C,"+",.F,")")
.D1 = paste("(1+", .F,"+",.B,"*(1+",.E,"))")
.D2 = paste("(1+", .H,"+",.D,"*(1+",.G,"))")
output1 = paste("(log(",.A1,"/",.A2,")+","log(",.B1,"/",.B2,"))")
output2 = paste("(beta1*",(x1-x0),"+log(",.C1,"/",.C2,")+","log(",.D1,"/",.D2,"))")
return(paste(output1,"/",output2))
}
PM_D=deriv(parse(text=PM_fun()),variable)
NIE_D = eval(NIE_D)
NIE_p = NIE_D[1]
lambda= t(attr(NIE_D,"gradient"))
V_NIE = as.vector(t(lambda) %*% S %*% lambda)
TE_D = eval(TE_D)
TE_p = TE_D[1]
lambda= t(attr(TE_D,"gradient"))
V_TE = as.vector(t(lambda) %*% S %*% lambda)
PM_D = eval(PM_D)
PM_p = PM_D[1]
lambda= t(attr(PM_D,"gradient"))
V_PM = as.vector(t(lambda) %*% S %*% lambda)
point_est = c(NIEa_p,TEa_p,PMa_p,NIE_p,TE_p,PM_p);
names(point_est)=c("NIEa","TEa","PMa","NIE","TE","PM")
var_est = c(V_NIEa,V_TEa,V_PMa,V_NIE,V_TE,V_PM);
names(var_est)=c("NIEa","TEa","PMa","NIE","TE","PM")
sd_est = sqrt(var_est)
names(sd_est)=c("NIEa","TEa","PMa","NIE","TE","PM")
ci_est = rbind(point_est-1.96*sd_est,point_est+1.96*sd_est)
rownames(ci_est) = c("Lower boundary","Upper boundary")
return(list(point_est=point_est,var_est=var_est,sd_est=sd_est,ci_est=ci_est))
}
Mediate_binaY_binaM_bootci=function(data,
outcome="Y",
mediator="M",
exposure="X",
covariateY=c("X1","X2"),
covariateM=c("X1","X2"),
x0=0,x1=1,cY=c(0,0),cM=c(0,0),R=1000) {
data = as.data.frame(data)
get_par_boot=function(data=data,indices) {
data=data[indices,]
if (is.null(covariateY)) {
formula_Y=as.formula(paste(outcome,"~",exposure,"+",mediator,sep=""))
} else {
formula_Y=as.formula(paste(outcome,"~",exposure,"+",mediator,"+",paste(covariateY,collapse="+"),sep=""))
}
if (is.null(covariateM)) {
formula_M=as.formula(paste(mediator,"~",exposure,sep=""))
} else {
formula_M=as.formula(paste(mediator,"~",exposure,"+",paste(covariateM,collapse="+"),sep=""))
}
model_Y=summary(glm(formula_Y,family=binomial(link="logit"),data=data))
model_M=summary(glm(formula_M,family=binomial(link="logit"),data=data))
beta=model_Y$coef[,1];cov_beta=model_Y$cov.unscaled
gamma=model_M$coef[,1];cov_gamma=model_M$cov.unscaled
nbeta=dim(cov_beta)[1];ngamma=dim(cov_gamma)[1]
S=matrix(0,ncol=nbeta+ngamma,nrow=nbeta+ngamma)
S[1:ngamma,1:ngamma]=cov_gamma; S[(ngamma+1):(nbeta+ngamma),(ngamma+1):(nbeta+ngamma)]=cov_beta
colnames(S)=rownames(S)=c(paste(names(gamma),"_gamma",sep=""),paste(names(beta),"_beta",sep=""))
if (is.null(covariateY)==0) {
names(cY) = paste(names(beta),"_betafix",sep="")[-c(1:3)]
beta_c=paste("beta_",covariateY,sep="")
}
if (is.null(covariateM)==0) {
names(cM) = paste(names(gamma),"_gammafix",sep="")[-c(1:2)]
gamma_c=paste("gamma_",covariateM,sep="")
}
.A = paste("exp(gamma0+gamma1*",x0,if(is.null(covariateM)==0) {
paste("+",paste(paste(gamma_c,"*",cM),collapse = "+"))}
,")")
.B= paste("exp(beta2+gamma0+gamma1*",x1,if(is.null(covariateM)==0) {
paste("+",paste(paste(gamma_c,"*",cM),collapse = "+"))}
,")")
.C=paste("exp(gamma0+gamma1*",x1,if(is.null(covariateM)==0) {
paste("+",paste(paste(gamma_c,"*",cM),collapse = "+"))}
,")")
.D= paste("exp(beta2+gamma0+gamma1*",x0,if(is.null(covariateM)==0) {
paste("+",paste(paste(gamma_c,"*",cM),collapse = "+"))}
,")")
NIEa_fun = function() {
output = paste("log(","(1+",.A,")*","(1+",.B,")/((","1+",.C,")*(","1+",.D,"))",")")
return(output)
}
variable=c("gamma0","gamma1",if(is.null(covariateM)==0) {gamma_c},"beta0","beta1","beta2",if(is.null(covariateY)==0) {beta_c})
gamma0=gamma[1];gamma1=gamma[2];
if(is.null(covariateM)==0) {
for (i in (1:length(covariateM))) {assign(gamma_c[i],gamma[2+i])}
}
beta0=beta[1];beta1=beta[2];beta2=beta[3]
if(is.null(covariateY)==0) {
for (i in (1:length(covariateY))) {assign(beta_c[i],beta[3+i])}
}
TEa_fun = function() {
output = paste("beta1*",(x1-x0),"+","log(","(1+",.A,")*","(1+",.B,")/((","1+",.C,")*(","1+",.D,"))",")")
return(output)
}
PMa_fun = function() {
.UP = paste("log(","(1+",.A,")*","(1+",.B,")/((","1+",.C,")*(","1+",.D,"))",")")
.BOT = paste("beta1*",(x1-x0),"+","log(","(1+",.A,")*","(1+",.B,")/((","1+",.C,")*(","1+",.D,"))",")")
output=paste("(",.UP,")/(",.BOT,")")
return(output)
}
NIEa_p = eval(parse(text=NIEa_fun()))
TEa_p = eval(parse(text=TEa_fun()))
PMa_p = eval(parse(text=PMa_fun()))
.A = paste("exp(gamma0+gamma1*",x0,if(is.null(covariateM)==0) {
paste("+",paste(paste(gamma_c,"*",cM),collapse = "+"))}
,")")
.B = paste("exp(beta2+gamma0+gamma1*",x1,if(is.null(covariateM)==0) {
paste("+",paste(paste(gamma_c,"*",cM),collapse = "+"))}
,")")
.C =paste("exp(gamma0+gamma1*",x1,if(is.null(covariateM)==0) {
paste("+",paste(paste(gamma_c,"*",cM),collapse = "+"))}
,")")
.D = paste("exp(beta2+gamma0+gamma1*",x0,if(is.null(covariateM)==0) {
paste("+",paste(paste(gamma_c,"*",cM),collapse = "+"))}
,")")
.E = paste("exp(beta0+beta1*",x1,if(is.null(covariateY)==0) {
paste("+",paste(paste(beta_c,"*",cY),collapse = "+"))}
,")")
.F = paste("exp(beta0+beta2+beta1*",x1,if(is.null(covariateY)==0) {
paste("+",paste(paste(beta_c,"*",cY),collapse = "+"))}
,")")
.G = paste("exp(beta0+beta1*",x0,if(is.null(covariateY)==0) {
paste("+",paste(paste(beta_c,"*",cY),collapse = "+"))}
,")")
.H = paste("exp(beta0+beta2+beta1*",x0,if(is.null(covariateY)==0) {
paste("+",paste(paste(beta_c,"*",cY),collapse = "+"))}
,")")
NIE_fun = function() {
.A1 = paste("(1+",.A,"+",.E,"*",.A,"+",.F,")")
.A2 = paste("(1+",.C,"+",.E,"*",.C,"+",.F,")")
.B1 = paste("(1+", .F,"+",.B,"*(1+",.E,"))")
.B2 = paste("(1+", .F,"+",.D,"*(1+",.E,"))")
output = paste("log(",.A1,"/",.A2,")+","log(",.B1,"/",.B2,")")
return(output)
}
TE_fun = function() {
.C1 = paste("(1+",.A,"+",.G,"*",.A,"+",.H,")")
.C2 = paste("(1+",.C,"+",.E,"*",.C,"+",.F,")")
.D1 = paste("(1+", .F,"+",.B,"*(1+",.E,"))")
.D2 = paste("(1+", .H,"+",.D,"*(1+",.G,"))")
output = paste("beta1*",(x1-x0),"+log(",.C1,"/",.C2,")+","log(",.D1,"/",.D2,")")
return(output)
}
PM_fun = function() {
.A1 = paste("(1+",.A,"+",.E,"*",.A,"+",.F,")")
.A2 = paste("(1+",.C,"+",.E,"*",.C,"+",.F,")")
.B1 = paste("(1+", .F,"+",.B,"*(1+",.E,"))")
.B2 = paste("(1+", .F,"+",.D,"*(1+",.E,"))")
.C1 = paste("(1+",.A,"+",.G,"*",.A,"+",.H,")")
.C2 = paste("(1+",.C,"+",.E,"*",.C,"+",.F,")")
.D1 = paste("(1+", .F,"+",.B,"*(1+",.E,"))")
.D2 = paste("(1+", .H,"+",.D,"*(1+",.G,"))")
output1 = paste("(log(",.A1,"/",.A2,")+","log(",.B1,"/",.B2,"))")
output2 = paste("(beta1*",(x1-x0),"+log(",.C1,"/",.C2,")+","log(",.D1,"/",.D2,"))")
return(paste(output1,"/",output2))
}
NIE_p = eval(parse(text=NIE_fun()))
TE_p = eval(parse(text=TE_fun()))
PM_p = eval(parse(text=PM_fun()))
point_est = c(NIEa_p,TEa_p,PMa_p,NIE_p,TE_p,PM_p);
names(point_est)=c("NIEa","TEa","PMa","NIE","TE","PM")
return(point_est)
}
boot.par=boot::boot(data=data, statistic=get_par_boot, R=R)
boot.parciNIEa <- boot::boot.ci(boot.par, index=1, type=c("perc"))
boot.parciTEa <- boot::boot.ci(boot.par, index=2, type=c("perc"))
boot.parciPMa <- boot::boot.ci(boot.par, index=3, type=c("perc"))
boot.parciNIE<- boot::boot.ci(boot.par, index=4, type=c("perc"))
boot.parciTE <- boot::boot.ci(boot.par, index=5, type=c("perc"))
boot.parciPM <- boot::boot.ci(boot.par, index=6, type=c("perc"))
ci_est_prec = c(boot.parciNIEa$percent[4:5],
boot.parciTEa$percent[4:5],
boot.parciPMa$percent[4:5],
boot.parciNIE$percent[4:5],
boot.parciTE$percent[4:5],
boot.parciPM$percent[4:5])
names(ci_est_prec)=c(paste(rep("CI_",6),rep(c("NIEa","TEa","PMa"),each=2),rep(c("_Low","_High"),times=3),sep=""),
paste(rep("CI_",6),rep(c("NIE","TE","PM"),each=2),rep(c("_Low","_High"),times=3),sep=""))
return(ci_est_prec)
}
mediate=function(data,
outcome="Y1",
mediator="Mc",
exposure="X",
binary.outcome=0,
binary.mediator=0,
covariate.outcome=c("C1","C2"),
covariate.mediator=c("C1","C2"),
x0=0,
x1=1,
c.outcome=c(0,0),
c.mediator=c(0,0),
boot=0,
R=2000) {
data=as.data.frame(data)
covariateY=covariate.outcome
covariateM=covariate.mediator
cY<-c.outcome;cM<-c.mediator
if (binary.outcome==0 & binary.mediator==0) {
delta_res=mediate_contY_contM(data=data,outcome=outcome,mediator=mediator,exposure=exposure,
covariateY=covariateY,covariateM=covariateM,x0=x0,x1=x1)
if (boot==1) {
boot_res=Mediate_contY_contM_bootci(data=data,outcome=outcome,mediator=mediator,exposure=exposure,
covariateY=covariateY,covariateM=covariateM,x0=x0,x1=x1,R=R)
}
}
if (binary.outcome==0 & binary.mediator==1) {
delta_res=mediate_contY_binaM(data=data,outcome=outcome,mediator=mediator,exposure=exposure,
covariateY=covariateY,covariateM=covariateM,x0=x0,x1=x1,cY=cY,cM=cM)
if (boot==1) {
boot_res=Mediate_contY_binaM_bootci(data=data,outcome=outcome,mediator=mediator,exposure=exposure,
covariateY=covariateY,covariateM=covariateM,x0=x0,x1=x1,R=R,cY=cY,cM=cM)
}
}
if (binary.outcome==1 & binary.mediator==0) {
delta_res=mediate_binaY_contM(data=data,outcome=outcome,mediator=mediator,exposure=exposure,
covariateY=covariateY,covariateM=covariateM,x0=x0,x1=x1,cY=cY,cM=cM)
if (boot==1) {
boot_res=Mediate_binaY_contM_bootci(data=data,outcome=outcome,mediator=mediator,exposure=exposure,
covariateY=covariateY,covariateM=covariateM,x0=x0,x1=x1,R=R,cY=cY,cM=cM)
}
}
if (binary.outcome==1 & binary.mediator==1) {
delta_res=mediate_binaY_binaM(data=data,outcome=outcome,mediator=mediator,exposure=exposure,
covariateY=covariateY,covariateM=covariateM,x0=x0,x1=x1,cY=cY,cM=cM)
if (boot==1) {
boot_res=Mediate_binaY_binaM_bootci(data=data,outcome=outcome,mediator=mediator,exposure=exposure,
covariateY=covariateY,covariateM=covariateM,x0=x0,x1=x1,R=R,cY=cY,cM=cM)
}
}
if (binary.outcome==1) {
point=delta_res[[1]]
serror=delta_res[[3]]
ci_delta = delta_res[[4]]
res=as.data.frame(rbind(point,serror,ci_delta))
colnames(res)=c("Approximate NIE","Approximate TE","Approximate MP",
"Exact NIE","Exact TE","Exact MP")
rownames(res)=c("point estimate","S.E by Delta Method","CI Lower by Delta Method",
"CI Upper by Delta Method")
if (boot==1) {
ci_boot=as.data.frame(rbind(boot_res[c(1,3,5,7,9,11)],boot_res[c(2,4,6,8,10,12)]))
colnames(ci_boot)=c("Approximate NIE","Approximate TE","Approximate MP",
"Exact NIE","Exact TE","Exact MP")
rownames(ci_boot)=c("CI Lower by Bootstrap Method",
"CI Upper by Bootstrap Method")
res=rbind(res,ci_boot)
}
}
if (binary.outcome==0) {
point=delta_res[[1]]
serror=delta_res[[3]]
ci_delta = delta_res[[4]]
res=as.data.frame(rbind(point,serror,ci_delta))
colnames(res)=c("NIE","TE","MP")
rownames(res)=c("point estimate","S.E by Delta Method","CI Lower by Delta Method",
"CI Upper by Delta Method")
if (boot==1) {
ci_boot=as.data.frame(rbind(boot_res[c(1,3,5)],boot_res[c(2,4,6)]))
colnames(ci_boot)=c("NIE","TE","MP")
rownames(ci_boot)=c("CI Lower by Bootstrap Method",
"CI Upper by Bootstrap Method")
res=rbind(res,ci_boot)
}
}
res=list(res=res,class="mediate")
attr(res, "class") <- "mediate"
print.mediate(res)
invisible(res)
}
print.mediate=function(x, ...) {
res=format(x$res, digits=3)
isboot=ifelse(dim(res)[1]==4,0,1)
iscontY=ifelse(dim(res)[2]==3,1,0)
if (iscontY==1) {num.row=3} else {num.row=6}
if (isboot==1) {num.col=3} else {num.col=2}
out=as.data.frame(matrix(0,ncol=num.col,nrow=num.row))
if (isboot==1) {
colnames(out)=c("Point (S.E.)"," 95% CI by Delta Approach"," 95% CI by Bootstrap")
} else {
colnames(out)=c("Point (S.E.)"," 95% CI by Delta Approach")
}
if (iscontY==1) {
rownames(out)=c("NIE: ","TE: ","MP: ")
} else {
rownames(out)=c("NIE: Approximate ","NIE: Exact ",
"TE: Approximate ","TE: Exact ",
"MP: Approximate ","MP: Exact ")
}
for (i in (1:num.row)) {
out[i,1]=paste(res[1,i]," (",res[2,i],")",sep="")
out[i,2]=paste("(",res[3,i],",",res[4,i],")",sep="")
if (isboot==1) {
out[i,3]=paste("(",res[5,i],",",res[6,i],")",sep="")
}
}
cat(paste("Mediation Analysis Results\n"))
print.data.frame(out)
} |
if (session_variables$doc_tab_open == TRUE) {
remove_tab_doc_tekst()
remove_tab_doc_info()
if (INCLUDE_EXTRA == TRUE) {
remove_tab_extra()
}
session_variables$doc_tab_open <- FALSE
}
if (session_variables$doc_list_open == FALSE) {
add_tab_doc_list_tekst(365)
session_variables$doc_list_open <- TRUE
} else if (session_variables$doc_list_open == TRUE) {
shiny::updateTabsetPanel(session,
inputId = "dokumentboks",
selected = 'document_list_title')
}
show_ui("day_corpus_box")
show_ui("document_box")
output$document_list_title <- shiny::renderText({
"Document list"
})
output$title <- shiny::renderText({
format_date(session_variables[[plot_mode$mode]]$Date[min_rad])
})
output$document_box_title <- shiny::renderText({
"Document list"
}) |
acontext("variable value")
problems <-
data.frame(problemStart=c(100, 200, 100, 150, 200, 250),
problemEnd=c(200, 300, 150, 200, 250, 300),
problem.i=c(1, 2, 1, 2, 3, 4),
bases.per.problem=c(100, 100, 50, 50, 50, 50))
problems$problem.name <- with(problems, {
sprintf("size.%d.problem.%d", bases.per.problem, problem.i)
})
sizes <- data.frame(bases.per.problem=c(50, 100),
problems=c(2, 4))
problems$peakStart <- problems$problemStart + 10
problems$peakEnd <- problems$problemEnd - 10
samples <-
rbind(data.frame(problems, sample.id="sample1", peaks=1),
data.frame(problems, sample.id="sample1", peaks=2),
data.frame(problems, sample.id="sample2", peaks=2))
peaks <-
expand.grid(peaks=0:2,
problem.name=problems$problem.name)
peaks$error.type <-
c("false positive", "false negative", "correct")
rownames(problems) <- problems$problem.name
peaks$bases.per.problem <-
problems[paste(peaks$problem.name), "bases.per.problem"]
peak.problems <-
rbind(data.frame(problems, peaks=1),
data.frame(problems, peaks=2))
one.error <-
data.frame(bases.per.problem=1:10,
errors=rnorm(10),
chunks="one")
two.error <-
data.frame(bases.per.problem=1:10,
errors=rnorm(10),
chunks="two")
showSelected.vec <- c(problem.name="peaks", "bases.per.problem")
clickSelects.vec <- c(problem.name="peaks")
viz <-
list(errorLines=ggplot()+
scale_color_manual(values=c(one="red", two="black"))+
scale_size_manual(values=c(one=1, two=2))+
geom_line(aes(bases.per.problem, errors,
color=chunks, size=chunks),
data=one.error)+
geom_line(aes(bases.per.problem, errors,
color=chunks, size=chunks),
data=two.error),
problems=ggplot()+
ggtitle("select problem")+
geom_segment(aes(problemStart, problem.i,
xend=problemEnd, yend=problem.i),
clickSelects="problem.name",
showSelected="bases.per.problem",
size=5,
data=data.frame(problems, sample.id="problems"))+
geom_text(aes(200, 5,
label=paste("problem size", bases.per.problem)),
showSelected="bases.per.problem",
data=data.frame(sizes, sample.id="problems"))+
geom_segment(aes(peakStart, problem.i,
xend=peakEnd, yend=problem.i),
showSelected=showSelected.vec,
clickSelects="problem.name",
data=data.frame(peak.problems, sample.id="problems"),
size=10,
color="deepskyblue")+
geom_segment(aes(peakStart, 0,
xend=peakEnd, yend=0),
showSelected=showSelected.vec,
clickSelects="problem.name",
data=samples,
size=10,
color="deepskyblue")+
theme_bw()+
theme(panel.margin=grid::unit(0, "cm"))+
facet_grid(sample.id ~ .),
title="viz with .variable .value",
sizes=ggplot()+
ggtitle("select problem size")+
geom_point(aes(bases.per.problem, problems),
clickSelects="bases.per.problem",
size=10,
data=sizes),
peaks=ggplot()+
ggtitle("select number of peaks")+
geom_point(aes(peaks, peaks,
color=error.type,
id=peaks),
showSelected=c("problem.name", "bases.per.problem"),
clickSelects = clickSelects.vec,
size=10,
data=peaks)+
geom_text(aes(1, 3, label=problem.name),
showSelected=c("problem.name", "bases.per.problem"),
data=problems))
info <- animint2HTML(viz)
test_that("No widgets for .variable .value selectors", {
computed.vec <- getSelectorWidgets(info$html)
expected.vec <- c(
"chunks", "problem.name", "bases.per.problem",
"error.type")
expect_identical(sort(computed.vec), sort(expected.vec))
})
circle.xpath <- '//svg[@id="plot_peaks"]//circle'
title.xpath <- paste0(circle.xpath, '//title')
test_that("clickSelects.variable tooltip/title", {
circle.list <- getNodeSet(info$html, circle.xpath)
expect_equal(length(circle.list), 3)
title.list <- getNodeSet(info$html, title.xpath)
title.vec <- sapply(title.list, xmlValue)
expect_identical(title.vec, paste("size.100.problem.1", 0:2))
})
test_that("two lines rendered in first plot", {
path.list <- getNodeSet(
info$html, '//svg[@id="plot_errorLines"]//g[@class="PANEL1"]//path')
style.strs <- sapply(path.list, function(x) xmlAttrs(x)["style"])
pattern <-
paste0("(?<name>\\S+?)",
": *",
"(?<value>.+?)",
";")
style.matrices <- str_match_all_perl(style.strs, pattern)
size.vec <- sapply(style.matrices, function(m)m["stroke-width", "value"])
size.num <- as.numeric(sub("px", "", size.vec))
expect_equal(size.num, c(1, 2))
color.vec <- sapply(style.matrices, function(m)m["stroke", "value"])
expect_color(color.vec, c("red", "black"))
})
test_that(".variable and .value makes compiler create selectors", {
selector.names <- sort(names(info$selectors))
problem.selectors <- paste0(problems$problem.name)
expected.names <-
sort(c("problem.name",
"error.type",
"chunks",
problem.selectors,
"bases.per.problem"))
expect_identical(selector.names, expected.names)
selected <- sapply(info$selectors[problem.selectors], "[[", "selected")
expect_true(all(selected == "1"))
})
test_that(".variable and .value renders correctly at first", {
node.list <-
getNodeSet(info$html, '//g[@class="geom6_segment_problems"]//line')
expect_equal(length(node.list), 2)
})
test_that("clicking reduces the number of peaks", {
no.peaks.html <- clickHTML(id=0)
node.list <-
getNodeSet(no.peaks.html, '//g[@class="geom6_segment_problems"]//line')
expect_equal(length(node.list), 1)
})
test_that("clicking increases the number of peaks", {
more.peaks.html <- clickHTML(id=2)
node.list <-
getNodeSet(more.peaks.html, '//g[@class="geom6_segment_problems"]//line')
expect_equal(length(node.list), 3)
})
viz.for <-
list(problems=ggplot()+
ggtitle("select problem")+
geom_segment(aes(problemStart, problem.i,
xend=problemEnd, yend=problem.i),
clickSelects="problem.name",
showSelected="bases.per.problem",
size=5,
data=data.frame(problems, sample.id="problems"))+
geom_text(aes(200, 5,
label=paste("problem size", bases.per.problem)),
showSelected="bases.per.problem",
data=data.frame(sizes, sample.id="problems"))+
theme_bw()+
theme(panel.margin=grid::unit(0, "cm"))+
facet_grid(sample.id ~ .),
title="viz with for loop",
sizes=ggplot()+
ggtitle("select problem size")+
geom_point(aes(bases.per.problem, problems),
clickSelects="bases.per.problem",
size=10,
data=sizes),
peaks=ggplot()+
ggtitle("select number of peaks")+
geom_text(aes(1, 3, label=problem.name),
showSelected="problem.name",
data=problems))
pp.list <- split(peak.problems, peak.problems$problem.name)
s.list <- split(samples, samples$problem.name)
p.list <- split(peaks, peaks$problem.name)
for(problem.name in names(p.list)){
s.name <- paste0(problem.name, "peaks")
p <- p.list[[problem.name]]
p[[s.name]] <- p$peaks
pp <- pp.list[[problem.name]]
pp[[s.name]] <- pp$peaks
pp$problem.nodots <- gsub("[.]", "", pp$problem.name)
s <- s.list[[problem.name]]
s[[s.name]] <- s$peaks
p$bases.per.problem <- pp$bases.per.problem[1]
viz.for$problems <- viz.for$problems+
geom_segment(aes_string("peakStart", "problem.i",
id="problem.nodots",
xend="peakEnd", yend="problem.i"),
showSelected=c(s.name, "bases.per.problem"),
clickSelects="problem.name",
data=data.frame(pp, sample.id="problems"),
size=10,
color="deepskyblue")+
geom_segment(aes_string("peakStart", "0",
xend="peakEnd", yend="0"),
showSelected=c(s.name, "bases.per.problem"),
clickSelects="problem.name",
data=s,
size=10,
color="deepskyblue")
viz.for$peaks <- viz.for$peaks+
geom_point(aes_string("peaks", "peaks"),
showSelected=c("problem.name", "bases.per.problem"),
clickSelects=s.name,
size=10,
data=p)
}
info <- animint2HTML(viz.for)
test_that("Widgets for regular selectors", {
computed.vec <- getSelectorWidgets(info$html)
expected.vec <- c(
"problem.name", "bases.per.problem",
"size.100.problem.1peaks", "size.100.problem.2peaks",
"size.50.problem.1peaks", "size.50.problem.2peaks",
"size.50.problem.3peaks", "size.50.problem.4peaks")
expect_identical(sort(computed.vec), sort(expected.vec))
})
chunk.counts <- function(html=getHTML()){
node.set <-
getNodeSet(html, '//td[@class="downloaded"]')
as.integer(sapply(node.set, xmlValue))
}
test_that("counts of chunks downloaded or not at first", {
value.vec <- chunk.counts()
expect_equal(value.vec,
c(1, 1, 1, 1, 1, 1,
0, 0, 0, 0, 0, 0, 0, 0,
1, 1, 1,
0, 0, 0, 0, 0))
})
test_that("changing problem downloads one chunk", {
clickID('size100problem2')
Sys.sleep(1)
value.vec <- chunk.counts()
expect_equal(value.vec,
c(1, 1, 1, 1, 1, 1,
0, 0, 0, 0, 0, 0, 0, 0,
1, 1, 1, 1,
0, 0, 0, 0))
})
test_that("clickSelects tooltip/title", {
circle.list <- getNodeSet(info$html, circle.xpath)
expect_equal(length(circle.list), 3)
title.list <- getNodeSet(info$html, title.xpath)
title.vec <- sapply(title.list, xmlValue)
expect_identical(title.vec, paste("size.100.problem.1peaks", 0:2))
}) |
bstick.chclust <- function(n, ng=10, plot=TRUE, ...) {
if (n$method != "coniss")
stop("bstick cannot display conslink results")
disp <- rev(n$height)
tot.disp <- disp[1]
disp <- abs(diff(disp))
nobj <- length(n$height)
bs <- bstick(nobj, tot.disp)
yR <- range(disp[1:(ng-1)], bs[1:(ng-1)])
if (plot) {
plot(2:ng, disp[1:(ng-1)], type="o", ylim=yR, ylab="Sum of Squares", xlab = "Number of groups")
lines(2:ng, bs[1:(ng-1)], col="red", type="o")
}
invisible(data.frame(nGroups = 2:(ng), dispersion=disp[1:(ng-1)], bstick = bs[1:(ng-1)]))
} |
UtilPseudoValues <- function(dataset, FOM, FPFValue = 0.2) {
dataType <- dataset$descriptions$type
if (dataType != "LROC") {
NL <- dataset$ratings$NL
LL <- dataset$ratings$LL
} else {
if (FOM == "Wilcoxon"){
datasetRoc <- DfLroc2Roc(dataset)
NL <- datasetRoc$ratings$NL
LL <- datasetRoc$ratings$LL
} else if (FOM %in% c("PCL", "ALROC")){
NL <- dataset$ratings$NL
LL <- dataset$ratings$LL
} else stop("incorrect FOM for LROC data")
}
maxNL <- dim(NL)[4]
maxLL <- dim(LL)[4]
I <- dim(NL)[1]
J <- dim(NL)[2]
K <- dim(NL)[3]
K2 <- dim(LL)[3]
K1 <- K - K2
if (FOM %in% c("MaxNLF", "ExpTrnsfmSp", "HrSp")) {
jkFomValues <- array(dim = c(I, J, K1))
jkPseudoValues <- array(dim = c(I, J, K1))
} else if (FOM %in% c("MaxLLF", "HrSe")) {
jkFomValues <- array(dim = c(I, J, K2))
jkPseudoValues <- array(dim = c(I, J, K2))
} else if (FOM %in% c("Wilcoxon", "HrAuc", "SongA1",
"AFROC", "AFROC1", "wAFROC1", "wAFROC",
"MaxNLFAllCases", "ROI", "SongA2",
"PCL", "ALROC")) {
jkFomValues <- array(dim = c(I, J, K))
jkPseudoValues <- array(dim = c(I, J, K))
} else stop("Illegal FOM specified")
t <- dataset$descriptions$truthTableStr
fomArray <- UtilFigureOfMerit(dataset, FOM, FPFValue)
lastCase <- 0
caseTransitions <- array(dim = J)
for (i in 1:I) {
for (j in 1:J) {
k1_ij_logi <- !is.na(t[i,j,,1])
k2_ij_logi <- !is.na(t[i,j,,2])[(K1+1):K]
k_ij_logi <- !is.na(t[i,j,,1]) | !is.na(t[i,j,,2])
if (sum(k_ij_logi) == 0) next
perCase_ij <- dataset$lesions$perCase[k2_ij_logi]
K1_ij <- sum(!is.na(t[i,j,,1]))
K2_ij <- sum(!is.na(t[i,j,,2]))
K_ij <- K1_ij + K2_ij
lID_ij <- dataset$lesions$IDs[k2_ij_logi,1:maxLL, drop = FALSE]
lW_ij <- dataset$lesions$weights[k2_ij_logi,1:maxLL, drop = FALSE]
nl_ij <- NL[i, j, k_ij_logi, 1:maxNL]; dim(nl_ij) <- c(K_ij, maxNL)
ll_ij <- LL[i, j, k2_ij_logi, 1:maxLL]; dim(ll_ij) <- c(K2_ij, maxLL)
if (FOM %in% c("MaxNLF", "ExpTrnsfmSp", "HrSp")) {
for (k in 1:K1_ij) {
kIndxNor <- which(k1_ij_logi)[k];if (is.na(kIndxNor))
stop("Indexing error in UtilPseudoValues")
nlij_jk <- nl_ij[-k, ];dim(nlij_jk) <- c(K_ij - 1, maxNL)
llij_jk <- ll_ij;dim(llij_jk) <- c(K2_ij, maxLL)
lV_j_jk <- perCase_ij
lW_j_jk <- lW_ij;dim(lW_j_jk) <- c(K2_ij, maxLL)
lID_j_jk <- lID_ij;dim(lID_j_jk) <- c(K2_ij, maxLL)
if (is.na(jkFomValues[i, j, kIndxNor])) {
jkFomValues[i, j, kIndxNor] <-
MyFom_ij(nlij_jk, llij_jk, lV_j_jk,
lID_j_jk, lW_j_jk, maxNL, maxLL,
K1_ij - 1, K2_ij, FOM, FPFValue)
} else stop("overwriting UtilPseudoValues")
if (is.na(jkPseudoValues[i, j, kIndxNor])) {
jkPseudoValues[i, j, kIndxNor] <-
fomArray[i, j] * K1_ij - jkFomValues[i, j, kIndxNor] * (K1_ij - 1)
} else stop("overwriting UtilPseudoValues")
}
} else if (FOM %in% c("MaxLLF", "HrSe")) {
for (k in 1:K2_ij) {
kIndxAbn <- which(k2_ij_logi)[k];if (is.na(kIndxAbn))
stop("Indexing error in UtilPseudoValues")
nlij_jk <- nl_ij[-(k+K1_ij), ];dim(nlij_jk) <- c(K_ij - 1, maxNL)
llij_jk <- ll_ij[-k, ];dim(llij_jk) <- c(K2_ij - 1, maxLL)
lV_j_jk <- perCase_ij[-k]
lW_j_jk <- lW_ij[-k, ];dim(lW_j_jk) <- c(K2_ij - 1, maxLL)
lID_j_jk <- lID_ij[-k, ];dim(lID_j_jk) <- c(K2_ij - 1, maxLL)
if (is.na(jkFomValues[i, j, kIndxAbn])) {
jkFomValues[i, j, kIndxAbn] <-
MyFom_ij(nlij_jk, llij_jk, lV_j_jk,
lID_j_jk, lW_j_jk, maxNL, maxLL,
K1_ij, K2_ij - 1, FOM, FPFValue)
} else stop("overwriting UtilPseudoValues 3")
if (is.na(jkPseudoValues[i, j, kIndxAbn])) {
jkPseudoValues[i, j, kIndxAbn] <-
fomArray[i, j] * K2_ij - jkFomValues[i, j, kIndxAbn] * (K2_ij - 1)
} else stop("overwriting UtilPseudoValues")
}
} else {
for (k in 1:K_ij) {
kIndxAll <- which(k_ij_logi)[k];if (is.na(kIndxAll))
stop("Indexing error in UtilPseudoValues")
if (k <= K1_ij) {
nlij_jk <- nl_ij[-k, ];dim(nlij_jk) <- c(K_ij - 1, maxNL)
llij_jk <- ll_ij;dim(llij_jk) <- c(K2_ij, maxLL)
lV_j_jk <- perCase_ij
lID_j_jk <- lID_ij;dim(lID_j_jk) <- c(K2_ij, maxLL)
lW_j_jk <- lW_ij;dim(lW_j_jk) <- c(K2_ij, maxLL)
if (is.na(jkFomValues[i, j, kIndxAll])) {
jkFomValues[i, j, kIndxAll] <-
MyFom_ij(nlij_jk, llij_jk, lV_j_jk,
lID_j_jk, lW_j_jk, maxNL, maxLL,
K1_ij - 1, K2_ij, FOM, FPFValue)
} else stop("overwriting UtilPseudoValues")
if (is.na(jkPseudoValues[i, j, kIndxAll])) {
jkPseudoValues[i, j, kIndxAll] <-
fomArray[i, j] * K_ij - jkFomValues[i, j, kIndxAll] * (K_ij - 1)
} else stop("overwriting UtilPseudoValues")
} else {
nlij_jk <- nl_ij[-k, ];dim(nlij_jk) <- c(K_ij - 1, maxNL)
llij_jk <- ll_ij[-(k - K1_ij), ];dim(llij_jk) <- c(K2_ij - 1, maxLL)
lV_j_jk <- perCase_ij[-(k - K1_ij)]
lW_j_jk <- lW_ij[-(k - K1_ij), ];dim(lW_j_jk) <- c(K2_ij - 1, maxLL)
lID_j_jk <- lID_ij[-(k - K1_ij), ];dim(lID_j_jk) <- c(K2_ij - 1, maxLL)
if (is.na(jkFomValues[i, j, kIndxAll])) {
jkFomValues[i, j, kIndxAll] <-
MyFom_ij(nlij_jk, llij_jk, lV_j_jk,
lID_j_jk, lW_j_jk, maxNL, maxLL,
K1_ij, K2_ij - 1, FOM, FPFValue)
} else stop("overwriting UtilPseudoValues")
if (is.na(jkPseudoValues[i, j, kIndxAll])) {
jkPseudoValues[i, j, kIndxAll] <-
fomArray[i, j] * K_ij - jkFomValues[i, j, kIndxAll] * (K_ij - 1)
} else stop("overwriting UtilPseudoValues")
}
}
}
if (FOM %in% c("MaxNLF", "ExpTrnsfmSp", "HrSp")) {
jkPseudoValues[i, j, which(k1_ij_logi)] <-
jkPseudoValues[i, j, which(k1_ij_logi)] +
(fomArray[i, j] - mean(jkPseudoValues[i, j, which(k1_ij_logi)]))
} else if (FOM %in% c("MaxLLF", "HrSe")) {
jkPseudoValues[i, j, which(k2_ij_logi)] <-
jkPseudoValues[i, j, which(k2_ij_logi)] +
(fomArray[i, j] - mean(jkPseudoValues[i, j, which(k2_ij_logi)]))
} else {
jkPseudoValues[i, j, which(k_ij_logi)] <-
jkPseudoValues[i, j, which(k_ij_logi)] +
(fomArray[i, j] - mean(jkPseudoValues[i, j, which(k_ij_logi)]))
}
caseTransitions[j] <- lastCase
lastCase <- (lastCase + K_ij) %% K
}
}
caseTransitions <- c(caseTransitions, K)
return(list(
jkPseudoValues = jkPseudoValues,
jkFomValues = jkFomValues,
caseTransitions = caseTransitions
))
} |
combineStreamflow <- function(flowlist, mult, approx = FALSE) {
flows <- flowlist$flows
for (i in 1:length(mult)) {
flows[, i] <- flows[, i] * mult[i]
}
flows <- as.xts(rowSums(flows), order.by = index(flows))
if (approx) {
flows <- na.approx(flows)
}
return(flows)
} |
.newMethodObj_CaseCohort <- function(info, par, minData, ...) {
base <- .newBaseInfo(par = par, minData = minData)
return( list("wg" = info$wg,
"wb" = info$wb,
"np" = length(x = base$beta) + length(x = base$et),
"baseInfo" = base) )
}
.loglik_CaseCohort <- function(object, ...) {
Su <- object$baseInfo$U$S
Sv <- object$baseInfo$V$S
res <- -sum(object$wg*object$wb*
{object$baseInfo$del1 * log(x = 1.0-Su) +
object$baseInfo$del2 * log(x = Su-Sv) +
{1.0-object$baseInfo$del1-object$baseInfo$del2} * log(x = Sv)})
if (is.nan(x = res)) return( Inf )
return( res )
}
.dloglik_CaseCohort <- function(object, ...) {
Su <- object$baseInfo$U$S
Sv <- object$baseInfo$V$S
dSu <- .deriv1S(object = object$baseInfo$U,
et = object$baseInfo$et,
beta = object$baseInfo$beta)
dSv <- .deriv1S(object = object$baseInfo$V,
et = object$baseInfo$et,
beta = object$baseInfo$beta)
temp11 <- -dSu / {1.0-Su}
temp12 <- {dSu - dSv} / {Su-Sv}
temp13 <- dSv / Sv
res <- object$baseInfo$del1*temp11 +
object$baseInfo$del2*temp12 +
{1.0-object$baseInfo$del1-object$baseInfo$del2}*temp13
return( unname(-res*object$wb*object$wg) )
}
.ddloglik_CaseCohort <- function(object, ...) {
n <- length(x = object$baseInfo$del1)
np <- object$np
res <- matrix(data = 0.0, nrow = np, ncol = np)
Su <- object$baseInfo$U$S
Sv <- object$baseInfo$V$S
if (length(x = object$wb) == 1L) {
object$wb <- rep(x = object$wb, times = n)
}
if (length(x = object$wg) == 1L) {
object$wg <- rep(x = object$wg, times = n)
}
for (i in 1L:n) {
dSu <- .derivS(object = object$baseInfo$U,
i = i,
et = object$baseInfo$et,
beta = object$baseInfo$beta)
dSv <- .derivS(object = object$baseInfo$V,
i = i,
et = object$baseInfo$et,
beta = object$baseInfo$beta)
temp11 <- -dSu$Stt / {1.0-Su[i]} - dSu$St %o% dSu$St / {{1.0-Su[i]}^2}
temp12 <- {dSu$Stt - dSv$Stt} / {Su[i]-Sv[i]} -
{dSu$St - dSv$St} %o% {dSu$St - dSv$St} / {{Su[i]-Sv[i]}^2}
temp13 <- dSv$Stt / Sv[i] - dSv$St %o% dSv$St/{Sv[i]^2}
res <- res + {object$baseInfo$del1[i]*temp11 +
object$baseInfo$del2[i]*temp12 +
{1.0-object$baseInfo$del1[i]-object$baseInfo$del2[i]}*temp13}*
object$wb[i]*object$wg[i]
}
return( unname(-res) )
}
.se_CaseCohort <- function(object, B, argList, ...) {
np <- length(x = object$baseInfo$beta)
n <- length(x = object$baseInfo$del1)
boot <- matrix(data = NA, nrow = B, ncol = np)
for (b in 1L:B) {
argList[[ "info" ]] <- list("wg" = object$wg,
"wb" = rexp(n = n, rate = 1.0))
tmp <- .myOptim(argList = argList)
if (is.null(x = tmp)) {
warning(paste("optim did not converge for bootstrap iteration", b))
} else {
i <- length(x = tmp)
boot[b,] <- tmp[[ i ]]$par[1L:np]
}
}
se <- drop(x = apply(X = boot,
MARGIN = 2L,
FUN = sd,
na.rm = TRUE))
names(x = se) <- names(x = object$baseInfo$beta)
return( se )
}
.pValue <- function(object, se, ...) {
pValue <- 2.0*{1.0 - pnorm(q = abs(x = object$baseInfo$beta / se),
mean = 0.0,
sd = 1.0)}
names(x = pValue) <- names(x = object$baseInfo$beta)
return( pValue )
}
.AIC <- function(object, value, ...) {
n <- length(object$baseInfo$beta) + length(object$baseInfo$et)
return( 2.0*{value + n} )
} |
drop.scope.svisit <-
function (terms1, terms2, model = c("sta", "det"))
{
model <- match.arg(model)
terms1 <- terms(terms1, model)
f2 <- if (missing(terms2))
numeric(0)
else attr(terms(terms2, model), "factors")
factor.scope(attr(terms1, "factors"), list(drop = f2))$drop
} |
param_defaults <- function(values) {
l <- list(
k_photo_fixed = FALSE,
k_photo_max = 0.47,
k_loss = 0.05,
BM_threshold = 5e-4,
BM_min = 0,
T_opt = 26.7,
T_min = 8,
T_max = 40.5,
Q10 = 2,
T_ref = 25,
alpha = 5e-5,
beta = 0.025,
N_50 = 0.034,
P_50 = 0.0043,
BM_L = 177,
E_max = 1,
EC50_int = NA,
b = NA,
P = NA,
r_A_DW = 1000,
r_FW_DW = 16.7,
r_FW_V = 1,
r_DW_FN = 1e-4,
K_pw = 1,
k_met = 0
)
if(!missing(values)) {
for(nm in names(values)) {
if(!nm %in% names(l)) {
warning(paste("parameter",nm,"is not part of the Lemna model"))
}
l[[nm]] <- values[[nm]]
}
}
l
}
param_new <- function(values) {
l <- list(
k_photo_fixed = NA,
k_photo_max = NA,
k_loss = NA,
BM_threshold = NA,
BM_min = NA,
T_opt = NA,
T_min = NA,
T_max = NA,
Q10 = NA,
T_ref = NA,
alpha = NA,
beta = NA,
N_50 = NA,
P_50 = NA,
BM_L = NA,
E_max = NA,
EC50_int = NA,
b = NA,
P = NA,
r_A_DW = NA,
r_FW_DW = NA,
r_FW_V = NA,
r_DW_FN = NA,
K_pw = NA,
k_met = NA
)
if(!missing(values)) {
for(nm in names(values)) {
if(!nm %in% names(l)) {
warning(paste("parameter",nm,"is not part of the Lemna model"))
}
l[[nm]] <- values[[nm]]
}
}
l
} |
as.single <- function(x,...) UseMethod("as.single")
as.single.default <- function(x,...)
structure(.Internal(as.vector(x,"double")), Csingle=TRUE)
as.character.default <- function(x,...) .Internal(as.vector(x, "character"))
as.expression <- function(x,...) UseMethod("as.expression")
as.expression.default <- function(x,...) .Internal(as.vector(x, "expression"))
as.list <- function(x,...) UseMethod("as.list")
as.list.default <- function (x, ...)
if (typeof(x) == "list") x else .Internal(as.vector(x, "list"))
as.list.function <- function (x, ...) c(formals(x), list(body(x)))
as.list.data.frame <- function(x,...) {
x <- unclass(x)
attr(x,"row.names") <- NULL
x
}
as.vector.data.frame <- function(x, mode = "any") {
x <- as.list.data.frame(x)
if(mode %in% c("any", "list"))
x
else as.vector(x, mode=mode)
}
as.list.environment <- function(x, all.names=FALSE, sorted=FALSE, ...)
.Internal(env2list(x, all.names, sorted))
as.vector <- function(x, mode = "any") .Internal(as.vector(x, mode))
as.matrix <- function(x, ...) UseMethod("as.matrix")
as.matrix.default <- function(x, ...) {
if (is.matrix(x)) x
else
array(x, c(length(x), 1L),
if(!is.null(names(x))) list(names(x), NULL) else NULL)
}
as.null <- function(x,...) UseMethod("as.null")
as.null.default <- function(x,...) NULL
as.function <- function(x,...) UseMethod("as.function")
as.function.default <- function (x, envir = parent.frame(), ...)
if (is.function(x)) x else .Internal(as.function.default(x, envir))
as.array <- function(x, ...) UseMethod("as.array")
as.array.default <- function(x, ...)
{
if(is.array(x)) return(x)
n <- names(x)
dim(x) <- length(x)
if(length(n)) dimnames(x) <- list(n)
return(x)
}
as.symbol <- function(x) .Internal(as.vector(x, "symbol"))
as.name <- as.symbol
as.qr <- function(x) stop("you cannot be serious", domain = NA) |
MRPCA=function(data=0,data0,real=TRUE,example=FALSE)
{
if (real||example){
etatol=0.7
}else{
etatol=0.9
}
lll=0
time=system.time(
while(lll==0){
X0=data0
n=nrow(X0);p=ncol(X0)
mr=which(is.na(X0)==TRUE)
m=nrow(as.matrix(mr))
cm0=colMeans(X0,na.rm=T)
ina=as.matrix(mr%%n)
jna=as.matrix(floor((mr+n-1)/n))
data0[is.na(data0)]=cm0[ceiling(which(is.na(X0))/n)]
X=as.matrix(data0)
Z=scale(X,center=TRUE,scale=FALSE)
niter=0;d=1;tol=1e-5;nb=10
while((d>=tol) & (niter<=nb)){
niter=niter+1
Xold=X
Zold=Z
R=cor(Z)
lambda=svd(R)$d
l=lambda/sum(lambda)
J=rep(l,times=p);dim(J)=c(p,p)
upper.tri(J,diag=T);J[lower.tri(J)]=0
eta=matrix(colSums(J),nrow = 1,ncol = p,byrow = FALSE)
ww=which(eta>=etatol)
k=ww[1]
Lambda=svd(Z)$d
A=svd(Z)$v
B=svd(Z)$u
Lambdak=diag(sqrt(lambda[1:k]),k,k)
Ak=matrix(A[,1:k],p,k);Bk=matrix(B[,1:k],n,k)
Lambdapk=diag(sqrt(lambda[(k+1):p]),p-k,p-k)
sigma2hat=sum(diag(Lambdapk%*%Lambdapk))/(p-k)
for( i in 1:n){
M=is.na(X0[i,])
job=which(M==FALSE);jna=which(M==TRUE)
piob=nrow(as.matrix(job));pina=nrow(as.matrix(jna))
while((piob>0)&(pina>0)){
Qi=matrix(0,p,p)
for( u in 1:piob){
Qi[job[u],u]=1
}
for( v in 1:pina){
Qi[jna[v],v+piob]=1
}
zi=Z[i,]
zQi=zi%*%Qi
ZQi=Z%*%Qi
AQi=t(t(Ak)%*%Qi)
ziob=matrix(zQi[,1:piob],1,piob)
zina=matrix(zQi[,piob+(1:pina)],1,pina)
Ziob=matrix(ZQi[,1:piob],n,piob,byrow=FALSE)
Zina=matrix(ZQi[,piob+(1:pina)],n,pina,byrow=FALSE)
Aiob=matrix(AQi[1:piob,],piob,k,byrow=FALSE)
Aina=matrix(AQi[piob+(1:pina),],pina,k,byrow=FALSE)
Cihat=n^(-1/2)*Aina%*%(Lambdak%*%Lambdak-sigma2hat*diag(k))^(1/2)
tihat=n^(1/2)*solve(Lambdak)%*%(Lambdak%*%Lambdak-sigma2hat*diag(k))^(1/2)%*%Bk[i,]
zinahat=Cihat%*%tihat
ZQi[i,piob+(1:pina)]=zinahat
Zi=ZQi%*%t(Qi)
Z=Zi
pina=0
}
}
ZMRPCA=Znew=Z
d=sqrt(sum(diag((t(Zold-Znew)%*%(Zold-Znew)))))
}
XMRPCA=Xnew=Znew+matrix(rep(1,n*p),ncol=p)%*%diag(cm0)
lll=1
}
)
if(real){
MSEMRPCA= MAEMRPCA= REMRPCA='NULL'
}else{
MSEMRPCA=(1/m)*t(Xnew[mr]-data[mr])%*%(Xnew[mr]-data[mr])
MAEMRPCA=(1/m)*sum(abs(Xnew[mr]-data[mr]))
REMRPCA=(sum(abs(data[mr]-Xnew[mr])))/(sum(data[mr]))
}
lambdaMRPCA=svd(cor(XMRPCA))$d
lMRPCA=lambdaMRPCA/sum(lambdaMRPCA);J=rep(lMRPCA,times=p);dim(J)=c(p,p)
upper.tri(J,diag=T);J[lower.tri(J)]=0;dim(J)=c(p,p)
etaMRPCA=matrix(colSums(J),nrow = 1,ncol = p,byrow = FALSE)
wwMRPCA=which(etaMRPCA>=etatol);kMRPCA=wwMRPCA[1]
lambdaMRPCApk=lambdaMRPCA[(kMRPCA+1):p]
GCVMRPCA=sum(lambdaMRPCApk)*p/(p-kMRPCA)^2
return(list(XMRPCA=XMRPCA,MSEMRPCA=MSEMRPCA,MAEMRPCA=MAEMRPCA,REMRPCA=REMRPCA,GCVMRPCA=GCVMRPCA,timeMRPCA=time))
} |
`fullsecder` <-
function(A){
q <- A != 0
size <- dim(A)
qq <- matrix(q,ncol=1)
D <- NULL
for(j in 1:size[2]){
for(i in 1:size[1]){
if(A[i,j]!=0){
d2 <- secder(A,i,j)
D <- cbind(D, matrix(d2,ncol=1)*qq)
}
}
}
qq <- which(D[,1] !=0)
D <- D[qq,]
dd <- dim(D)
uu <- which(A>0, arr.ind=TRUE)
o <- order(uu[,1])
uu <- uu[o,]
D <- D[o,o]
m <- length(uu[,1])
uuu <- rep(0,m)
for(i in 1:m) uuu[i] <- paste(uu[i,1],uu[i,2],sep="")
D <- matrix(D, nrow=dd[1], ncol=dd[2], dimnames=list(uuu,uuu))
D
} |
MARSSoptim <- function(MLEobj) {
neglogLik <- function(x, MLEobj = NULL) {
MLEobj <- MARSSvectorizeparam(MLEobj, x)
free <- MLEobj$marss$free
pars <- MLEobj$par
par.dims <- attr(MLEobj[["marss"]], "model.dims")
for (elem in c("Q", "R", "V0")) {
if (!is.fixed(free[[elem]]))
{
d <- sub3D(free[[elem]], t = 1)
par.dim <- par.dims[[elem]][1:2]
L <- unvec(d %*% pars[[elem]], dim = par.dim)
the.par <- tcrossprod(L)
MLEobj$par[[elem]] <- solve(crossprod(d)) %*% t(d) %*% vec(the.par)
}
}
MLEobj$marss$fixed <- MLEobj$fixed.original
MLEobj$marss$free <- MLEobj$free.original
negLL <- MARSSkf(MLEobj, only.logLik = TRUE, return.lag.one = FALSE)$logLik
-1 * negLL
}
if (!inherits(MLEobj, "marssMLE")) {
stop("Stopped in MARSSoptim(). Object of class marssMLE is required.\n", call. = FALSE)
}
for (elem in c("Q", "R")) {
if (dim(MLEobj$model$free[[elem]])[3] > 1) {
stop(paste("Stopped in MARSSoptim() because this function does not allow estimated part of ", elem, " to be time-varying.\n", sep = ""), call. = FALSE)
}
}
MODELobj <- MLEobj[["marss"]]
y <- MODELobj$data
free <- MODELobj$free
fixed <- MODELobj$fixed
tmp.inits <- MLEobj$start
control <- MLEobj$control
par.dims <- attr(MODELobj, "model.dims")
m <- par.dims[["x"]][1]
n <- par.dims[["y"]][1]
control.names <- c("trace", "fnscale", "parscale", "ndeps", "maxit", "abstol", "reltol", "alpha", "beta", "gamma", "REPORT", "type", "lmm", "factr", "pgtol", "temp", "tmax")
optim.control <- list()
for (elem in control.names) {
if (!is.null(control[[elem]])) optim.control[[elem]] <- control[[elem]]
}
if (is.null(control[["lower"]])) {
lower <- -Inf
} else {
lower <- control[["lower"]]
}
if (is.null(control[["upper"]])) {
upper <- Inf
} else {
upper <- control$upper
}
if (control$trace == -1) optim.control$trace <- 0
tmp.MLEobj <- MLEobj
tmp.MLEobj$fixed.original <- tmp.MLEobj$marss$fixed
tmp.MLEobj$free.original <- tmp.MLEobj$marss$free
tmp.MLEobj$par <- tmp.inits
for (elem in c("Q", "R", "V0")) {
d <- sub3D(free[[elem]], t = 1)
f <- sub3D(fixed[[elem]], t = 1)
the.par <- unvec(f + d %*% tmp.inits[[elem]], dim = par.dims[[elem]][1:2])
is.zero <- diag(the.par) == 0
if (any(is.zero)) diag(the.par)[is.zero] <- 1
the.par <- t(chol(the.par))
if (any(is.zero)) diag(the.par)[is.zero] <- 0
if (!is.fixed(free[[elem]])) {
tmp.MLEobj$par[[elem]] <- solve(crossprod(d)) %*% t(d) %*% vec(the.par)
} else {
tmp.MLEobj$par[[elem]] <- matrix(0, 0, 1)
}
tmp.list.mat <- fixed.free.to.formula(f, d, par.dims[[elem]][1:2])
tmp.list.mat[upper.tri(tmp.list.mat)] <- 0
tmp.MLEobj$marss$free[[elem]] <- convert.model.mat(tmp.list.mat)$free
}
pars <- MARSSvectorizeparam(tmp.MLEobj)
if (substr(tmp.MLEobj$method, 1, 4) == "BFGS") {
optim.method <- "BFGS"
} else {
optim.method <- "something wrong"
}
kf.function <- MLEobj$fun.kf
optim.output <- try(optim(pars, neglogLik, MLEobj = tmp.MLEobj, method = optim.method, lower = lower, upper = upper, control = optim.control, hessian = FALSE), silent = TRUE)
if (inherits(optim.output, "try-error")) {
if (MLEobj$fun.kf != "MARSSkfss") {
cat("MARSSkfas returned error. Trying MARSSkfss.\n")
tmp.MLEobj$fun.kf <- "MARSSkfss"
kf.function <- "MARSSkfss"
optim.output <- try(optim(pars, neglogLik, MLEobj = tmp.MLEobj, method = optim.method, lower = lower, upper = upper, control = optim.control, hessian = FALSE), silent = TRUE)
}
}
if (inherits(optim.output, "try-error")) {
optim.output <- list(convergence = 53, message = c("MARSSkfas and MARSSkfss tried to compute log likelihood and encountered numerical problems.\n", sep = ""))
}
MLEobj.return <- MLEobj
MLEobj.return$iter.record <- optim.output$message
MLEobj.return$start <- tmp.inits
MLEobj.return$convergence <- optim.output$convergence
if (optim.output$convergence %in% c(1, 0)) {
if ((!control$silent || control$silent == 2) && optim.output$convergence == 0) cat(paste("Success! Converged in ", optim.output$counts[1], " iterations.\n", "Function ", kf.function, " used for likelihood calculation.\n", sep = ""))
if ((!control$silent || control$silent == 2) && optim.output$convergence == 1) cat(paste("Warning! Max iterations of ", control$maxit, " reached before convergence.\n", "Function ", kf.function, " used for likelihood calculation.\n", sep = ""))
tmp.MLEobj <- MARSSvectorizeparam(tmp.MLEobj, optim.output$par)
for (elem in c("Q", "R", "V0")) {
if (!is.fixed(MODELobj$free[[elem]]))
{
d <- sub3D(tmp.MLEobj$marss$free[[elem]], t = 1)
par.dim <- par.dims[[elem]][1:2]
L <- unvec(tmp.MLEobj$marss$free[[elem]][, , 1] %*% tmp.MLEobj$par[[elem]], dim = par.dim)
the.par <- tcrossprod(L)
tmp.MLEobj$par[[elem]] <- solve(crossprod(d)) %*% t(d) %*% vec(the.par)
}
}
pars <- MARSSvectorizeparam(tmp.MLEobj)
MLEobj.return <- MARSSvectorizeparam(MLEobj.return, pars)
kf.out <- try(MARSSkf(MLEobj.return), silent = TRUE)
if (inherits(kf.out, "try-error")) {
MLEobj.return$numIter <- optim.output$counts[1]
MLEobj.return$logLik <- -1 * optim.output$value
MLEobj.return$errors <- c(paste0("\nWARNING: optim() successfully fit the model but ", kf.function, " returned an error with the fitted model. Try MARSSinfo('optimerror54') for insight.", sep = ""), "\nError: ", kf.out[1])
MLEobj.return$convergence <- 54
MLEobj.return <- MARSSaic(MLEobj.return)
kf.out <- NULL
}
} else {
if (optim.output$convergence == 10) optim.output$message <- c("degeneracy of the Nelder-Mead simplex\n", paste("Function ", kf.function, " used for likelihood calculation.\n", sep = ""), optim.output$message)
optim.output$counts <- NULL
if (!control$silent) cat("MARSSoptim() stopped with errors. No parameter estimates returned.\n")
if (control$silent == 2) cat("MARSSoptim() stopped with errors. No parameter estimates returned. See $errors in output for details.\n")
MLEobj.return$par <- NULL
MLEobj.return$errors <- optim.output$message
kf.out <- NULL
}
if (!is.null(kf.out)) {
if (control$trace > 0) MLEobj.return$kf <- kf.out
MLEobj.return$states <- kf.out$xtT
MLEobj.return$numIter <- optim.output$counts[1]
MLEobj.return$logLik <- kf.out$logLik
}
MLEobj.return$method <- MLEobj$method
if (!is.null(kf.out)) MLEobj.return <- MARSSaic(MLEobj.return)
return(MLEobj.return)
} |
library(hamcrest)
x <- as.POSIXlt("2015-01-02 03:04:06.07", tz = "UTC")
assertThat(names(unclass(x)), identicalTo(c("sec", "min", "hour", "mday", "mon", "year", "wday", "yday", "isdst")))
assertThat(names(attributes(x)), identicalTo(c("names", "class", "tzone")))
assertTrue(identical(x, structure(
list(
sec = 6.07,
min = 4L,
hour = 3L,
mday = 2L,
mon = 0L,
year = 115L,
wday = 5L,
yday = 1L,
isdst = 0L),
class = c("POSIXlt", "POSIXt"),
tzone = "UTC"))) |
NULL
Query <- R6::R6Class("Query",
private = list(
.vars = NULL
),
public = list(
con = NULL,
sql = NULL,
initialize = function(con, sql, vars) {
self$con <- con
self$sql <- sql
private$.vars <- vars
},
print = function(...) {
cat("<Query> ", self$sql, "\n", sep = "")
print(self$con)
},
fetch = function(n = -1L) {
res <- dbSendQuery(self$con, self$sql)
on.exit(dbClearResult(res))
out <- dbFetch(res, n)
res_warn_incomplete(res)
out
},
fetch_paged = function(chunk_size = 1e4, callback) {
qry <- dbSendQuery(self$con, self$sql)
on.exit(dbClearResult(qry))
while (!dbHasCompleted(qry)) {
chunk <- dbFetch(qry, chunk_size)
callback(chunk)
}
invisible(TRUE)
},
vars = function() {
private$.vars
},
ncol = function() {
length(self$vars())
}
)
) |
C2RVine <- function(order, family, par, par2 = rep(0, length(family))) {
dd <- length(family)
d <- (1 + sqrt(1 + 8 * dd))/2
if (dd < 1)
stop("Length of 'family' has to be positive.")
if (length(par) != length(par2))
stop("Lengths of 'par' and 'par2' do not match.")
if (length(par) != dd)
stop("Lengths of 'family' and 'par' do not match.")
if (length(order) != d)
stop("Length of 'order' and dimension of the D-vine do not match.")
BiCopCheck(family, par, par2)
Matrix <- matrix(rep(0, d * d), d, d)
Copula.Params <- matrix(rep(0, d * d), d, d)
Copula.Params2 <- matrix(rep(0, d * d), d, d)
Copula.Types <- matrix(rep(0, d * d), d, d)
for (i in 1:d) {
for (j in 1:(d - i + 1)) {
Matrix[(d - i + 1), j] <- order[i]
}
}
k <- 1
for (i in 1:(d - 1)) {
for (j in 1:(d - i)) {
Copula.Types[(d - i + 1), (d - j - i + 1)] <- family[k]
Copula.Params[(d - i + 1), (d - j - i + 1)] <- par[k]
Copula.Params2[(d - i + 1), (d - j - i + 1)] <- par2[k]
k <- k + 1
}
}
RVineMatrix(Matrix = Matrix,
family = Copula.Types,
par = Copula.Params,
par2 = Copula.Params2)
} |
setMethod(
f = "overhead",
signature = "USL",
definition = function(object, newdata) {
if (missing(newdata)) newdata <- object@frame
x <- newdata[, object@regr, drop=TRUE]
y.ideal <- 1 / x
y.contention <- coef(object)[['alpha']] * (x - 1) / x
y.coherency <- coef(object)[['beta']] * (1/2) * (x - 1)
col.names <- c("ideal", "contention", "coherency")
matrix(c(y.ideal, y.contention, y.coherency),
nrow = length(x), dimnames = list(seq(x), col.names))
}
) |
imageW <- function(data, latticeVersion=FALSE, transp=TRUE, NAcol="grey95", rowNa=NULL, colNa=NULL, tit=NULL, xLab=NA, yLab=NA, las=2,
col=NULL, nColor=9, balanceCol=TRUE, gridCol="grey75", gridLty=1, centColShift=0, cexDispl=NULL, panel.background.col="white",
rotXlab=0, rotYlab=0, cexXlab=0.7, cexAxs=NULL, cexYlab=0.9, Xtck=0, Ytck=0, cexTit=1.6, silent=FALSE, debug=FALSE, callFrom=NULL, ...) {
fxNa <- wrMisc::.composeCallName(callFrom, newNa="imageW")
argNa <- deparse(substitute(data))
if(debug) silent <- FALSE
doPlot <- if(length(data) >0) is.numeric(data) else FALSE
if(length(dim(data)) <2) data <- try(matrix(as.numeric(data), ncol=1, dimnames=list(names(data), NULL)))
if("try-error" %in% class(data)) doPlot <- FALSE else {
if(is.data.frame(data) & doPlot) {doPlot <- is.numeric(as.matrix(data)); data <- as.matrix(data)}}
if(doPlot) {
if(length(rowNa) <nrow(data)) rowNa <- rownames(data)
if(length(rowNa) <1) rowNa <- if(length(nrow(data)) >1) 1:nrow(data) else ""
if(length(colNa) < ncol(data)) colNa <- colnames(data)
if(length(colNa) <1) colNa <- if(length(ncol(data)) >1) 1:ncol(data) else ""
if(is.null(xLab)) xLab <- ""
if(is.null(yLab)) yLab <- ""
if(latticeVersion) {
if(!transp) data <- t(data)
if(length(rotXlab)==0 & any(las %in% c(2,3))) rotXlab <- 0
if(length(rotYlab)==0 & any(las %in% c(0,3))) rotYlab <- 0
ma2 <- expand.grid(1:ncol(data), 1:nrow(data))
ma2 <- cbind(ma2, as.numeric(t(data[nrow(data):1,])))
colnames(ma2) <- c("x","y","z")
if(any(is.na(xLab))) xLab <- NULL
if(any(is.na(yLab))) yLab <- NULL
if(length(col) <2) col <- c("blue","grey80","red")
nCol2 <- try(round(nColor[1]))
msg <- " argument 'nColor' should contain integer at least as high as numbers of colors defined to pass through; resetting to default=9"
if("try-error" %in% class(nCol2)) nCol2 <- NULL
if(nCol2 < length(col)) { if(!silent) message(fxNa,msg)
nCol2 <- 9 }
miMa <- range(data, na.rm=TRUE)
width <- (miMa[2] - miMa[1])/ nCol2
bre <- miMa[1] + (0:nCol2) *width
clo0 <- which.min(abs(as.numeric(data)))
clo0br <- min(which(bre >= as.numeric(data)[clo0]))
if(clo0br >1 & clo0br < length(bre) -1 & length(col) >2) {
maxLe <- max(clo0br -1, length(bre) -clo0br) -as.integer(balanceCol)
negCol <- try(grDevices::colorRampPalette(col[1:2])(if(balanceCol) maxLe else length(clo0br-1)))
if("try-error" %in% class(negCol)) {if(!silent) message(fxNa,"invalid color-gradient for neg values")
}
negCol <- negCol[-length(negCol)]
posCol <- try((grDevices::colorRampPalette(col[2:3])(if(balanceCol) maxLe else length(length(bre) -1 -clo0br))) [])
if("try-error" %in% class(posCol)) { if(!silent) message(fxNa,"invalid color-gradient for pos values")
}
if(debug) message(fxNa, "/1 clo0br ",clo0br," max nCol ",nCol2," le negCol ",length(negCol)," le posCol ",length(posCol))
if(balanceCol) {
centColShift <- if(length(centColShift) <1 | !is.numeric(centColShift)) 0 else as.integer(centColShift)
.keepLastN <- function(x,lastN) x[(length(x) -lastN +1):length(x)]
if(length(negCol) != clo0br -2 +centColShift) {
if(debug) message(fxNa," correct negCol (prev=",length(negCol),") centColShift=",centColShift," to : ",clo0br -2 +centColShift)
if(length(negCol) > clo0br -2 +centColShift) negCol <- .keepLastN(negCol, clo0br -2 +centColShift)
if(length(negCol) < clo0br -2 +centColShift) {negCol <- grDevices::colorRampPalette(col[1:2])(clo0br -1 +centColShift)
negCol <- negCol[-length(negCol)] }
}
if(length(posCol) != length(bre) -length(negCol) -1) {
if(debug) message(fxNa," corr posCol (prev ",length(posCol),") to ",maxLe + centColShift," to ",length(bre) -length(negCol) -1)
if(length(posCol) > length(bre) -length(negCol) -1) posCol <- posCol[1:(length(bre) -clo0br)]
if(length(posCol) < length(bre) -length(negCol) -1) {
posCol <- grDevices::colorRampPalette(col[2:3])(length(bre) -length(negCol) -1) }
}
}
cols <- c(negCol, posCol)
if(debug) message(fxNa, "/2 clo0br ",clo0br," max nCol ",nCol2," le cols ",length(cols)," le negCol ",length(negCol)," le posCol ",length(posCol))
} else {
cols <- if(length(col)==2) grDevices::colorRampPalette(col[1:2])(length(bre) -1) else {
c(grDevices::colorRampPalette(col[1:2])(floor(length(bre)/2)), (grDevices::colorRampPalette(col[2:3])(length(bre) -floor(length(bre)/2)))[-1])
}
}
myPanel <- function(...) {
grid::grid.rect(gp=grid::gpar(col=NA, fill=NAcol))
lattice::panel.levelplot(...)
argXYZ <- list(...)
if(length(cexDispl)==1 & is.numeric(cexDispl)) lattice::panel.text(argXYZ$x, argXYZ$y, signif(argXYZ$z,2), cex=cexDispl)
if(any(is.na(gridCol))) gridCol <- NULL
chGri <- (1:6) %in% gridLty
if(length(gridCol) >0 & any(chGri)) {
lattice::panel.abline(h=0.5 +1:(nrow(data) -1), col=gridCol, lty=gridLty)
lattice::panel.abline(v=0.5 +1:(ncol(data) -1), col=gridCol, lty=gridLty) }
}
if(doPlot) lattice::levelplot(z ~ x *y, data = ma2, aspect=nrow(data)/ncol(data), col.regions=cols,
region = TRUE, cuts =length(cols) -1, xlab = xLab, ylab = yLab, main = tit,
scales=list(relation="free", x=list(at=1:ncol(data), labels=if(transp) colNa else rowNa, cex=cexXlab, rot=rotXlab, tck=as.numeric(Xtck)),
y=list(at=nrow(data):1, labels=if(transp) rowNa else colNa, cex=cexYlab, rot=rotYlab, tck=as.numeric(Ytck))),
par.settings=list(axis.line=list(col='black')),
panel=myPanel
)
} else {
if(transp) data <- t(data)
if(ncol(data) >1) data <- data[,ncol(data):1]
if(identical(col,"heat.colors") | identical(col,"heatColors")) col <- rev(grDevices::heat.colors(sort(c(15, prod(dim(data)) +2))[2] ))
chRCo <- requireNamespace("RColorBrewer", quietly=TRUE)
msgRCo <- c(fxNa,": package 'RColorBrewer' not installed",", ignore argument 'col'")
if(identical(col,"YlOrRd")) {if(chRCo) col <- RColorBrewer::brewer.pal(9,"YlOrRd") else { col <- NULL; if(!silent) message(msgRCo) }}
if(identical(col,"RdYlGn")) {if(chRCo) col <- RColorBrewer::brewer.pal(11,"RdYlGn") else { col <- NULL; if(!silent) message(msgRCo) }}
if(identical(col,"Spectral")) {if(chRCo) col <- RColorBrewer::brewer.pal(11,"Spectral") else { col <- NULL; if(!silent) message(msgRCo) }}
if(identical(col,"RdBu")) {if(chRCo) col <- RColorBrewer::brewer.pal(11,"RdBu") else { col <- NULL; if(!silent) message(msgRCo) }}
if(length(col) <1) { if(!chRCo) message(msgRCo[1:2]," using rainbow colors instead of 'RdYlBu'")
col <- if(chRCo) grDevices::colorRampPalette(rev(RColorBrewer::brewer.pal(n=7, name="RdYlBu")))(60) else grDevices::rainbow(60)}
chNa <- is.na(data)
if(any(chNa) & length(NAcol) >0) { if(!is.matrix(data)) data <- as.matrix(data)
mi <- min(data, na.rm=TRUE)
if(any(chNa)) data[which(chNa)] <- min(data, na.rm=TRUE) -diff(range(data, na.rm=TRUE))*1.1/(length(col))
col <- c(NAcol,col) }
yAt <- (0:(length(rowNa)-1))/(length(rowNa)-1)
if(doPlot) {
graphics::image(data, col=col, xaxt="n", yaxt="n", main=tit, xlab=if(transp) xLab else yLab, ylab=if(transp) yLab else xLab, cex.main=cexTit)
graphics::mtext(at=(0:(length(colNa)-1))/(length(colNa)-1), colNa, side=if(transp) 1 else 2, line=0.3, las=las, cex=cexYlab)
graphics::mtext(at=if(transp) rev(yAt) else yAt, rowNa, side=if(transp) 2 else 1, line=0.3, las=las, cex=cexXlab)
graphics::box(col=grDevices::grey(0.8)) }}
} else if(!silent) message(fxNa,": argument 'data' invalid, please furnish matrix or data.frame with min 2 lines & min 1 col")
} |
nn_pairwise_distance <- nn_module(
"nn_pairwise_distance",
initialize = function(p = 2, eps = 1e-6, keepdim = FALSE) {
self$norm <- p
self$eps <- eps
self$keepdim <- keepdim
},
forward = function(x1, x2) {
nnf_pairwise_distance(x1, x2, p = self$norm, eps = self$eps,
keepdim = self$keepdim)
}
) |
outbreaker_mcmc_shape <- function(param, data) {
if (!all(vapply(param$alpha, length, integer(1))==data$N)) {
stop("some ancestries are missing in the param")
}
param$alpha <- matrix(unlist(param$alpha), ncol = data$N, byrow = TRUE)
colnames(param$alpha) <- paste("alpha", seq_len(data$N), sep=".")
if (!all(vapply(param$t_inf, length, integer(1))==data$N)) {
stop("some infection dates are missing in the param")
}
param$t_inf <- matrix(unlist(param$t_inf), ncol = data$N, byrow = TRUE)
colnames(param$t_inf) <- paste("t_inf", seq_len(data$N), sep=".")
if (!all(vapply(param$kappa, length, integer(1))==data$N)) {
stop("some ancestries are missing in the param")
}
param$kappa <- matrix(unlist(param$kappa), ncol = data$N, byrow = TRUE)
colnames(param$kappa) <- paste("kappa", seq_len(data$N), sep=".")
param <- data.frame(step = param$step,
post = param$post, like = param$like, prior = param$prior,
a = param$a, b = param$b,
pi = param$pi,
param$alpha, param$t_inf, param$kappa)
names(param) <- gsub("[.]", "_", names(param))
class(param) <- c("outbreaker_chains","data.frame")
return(param)
} |
Lopt.get <- function(data, mcep=10){
cvK <- array(0,dim=mcep)
for(k in 1:mcep){
b <- as.formula(paste("y ~ ",paste(colnames(data[,1:(k+1)]), collapse="+"),sep = ""))
C.lda.pred <- lda(b , data=data, CV=TRUE)
cvK[k] <- mean(C.lda.pred$class==data$y)
}
Lopt <- min(which(cvK == max(cvK)))
Lopt
} |
sample_addresses <- tibble::tribble(
~name, ~addr,
"White House", "1600 Pennsylvania Ave NW Washington, DC",
"Transamerica Pyramid", "600 Montgomery St, San Francisco, CA 94111",
"NY Stock Exchange", "11 Wall Street, New York, New York",
"Willis Tower", "233 S Wacker Dr, Chicago, IL 60606",
"Chateau Frontenac", "1 Rue des Carrieres, Quebec, QC G1R 4P5, Canada",
"Nashville", "Nashville, TN" ,
"Nairobi", "Nairobi, Kenya",
"Istanbul", "Istanbul, Turkey",
"Tokyo", "Tokyo, Japan",
)
usethis::use_data(sample_addresses, overwrite = TRUE) |
filter_throughput_time_percentile <- function(eventlog,
percentage,
reverse) {
case_selection <- eventlog %>%
throughput_time("case") %>%
arrange(throughput_time) %>%
slice(1:ceiling(n()*percentage)) %>%
pull(1)
filter_case(eventlog, case_selection, reverse)
} |
tidy_matrix <- function(x, row.name = 'row', col.name = 'col',
value.name = 'value', ...){
stopifnot(is.matrix(x))
out <- data.table::data.table(x = rep(rownames(x), ncol(x)),
y = rep(colnames(x), each = nrow(x)),
z = c(x))
data.table::setnames(out, c(row.name, col.name, value.name))
out
}
tidy_adjacency_matrix <- function(x, ...){
tidy_matrix(x, row.name = 'from', col.name = 'to', value.name = 'n', ...)
} |
cat0 <- function(..., sep="") {
return(cat(..., sep=sep));
} |
updateversion <- function(x) {
if (is.null(x$version)) {
major <- 0
minor <- 0
}
else {
version <- as.numeric(unlist(strsplit(x$version, "-")))
major <- version[1]
minor <- version[2]
}
update.0.9.6 <- FALSE
update.0.9.7 <- FALSE
update.1.3.0 <- FALSE
update.2.0.0 <- FALSE
if (!((major == 0.9 & minor > 5) | major > 0.9)) {
update.0.9.6 <- TRUE
update.0.9.7 <- TRUE
}
if (!((major == 0.9 & minor > 6) | major > 0.9))
update.0.9.7 <- TRUE
if (major < 1.3)
update.1.3.0 <- TRUE
if (major < 2.0)
update.2.0.0 <- TRUE
if (inherits(x, "netmeta")) {
if (update.0.9.6) {
x$prediction <- FALSE
x$df.Q <- x$df
x$d <- nma.krahn(x)$d
if (is.null(x$d))
x$d <- 1
if (x$d > 1) {
dd <- decomp.design(x)
x$Q.heterogeneity <- dd$Q.decomp$Q[2]
x$Q.inconsistency <- dd$Q.decomp$Q[3]
x$df.Q.heterogeneity <- dd$Q.decomp$df[2]
x$df.Q.inconsistency <- dd$Q.decomp$df[3]
x$pval.Q.heterogeneity <- dd$Q.decomp$pval[2]
x$pval.Q.inconsistency <- dd$Q.decomp$pval[3]
}
else {
x$Q.heterogeneity <- NA
x$Q.inconsistency <- NA
x$df.Q.heterogeneity <- NA
x$df.Q.inconsistency <- NA
x$pval.Q.heterogeneity <- NA
x$pval.Q.inconsistency <- NA
}
x$df <- NULL
x$baseline.reference <- TRUE
x$version <- packageDescription("netmeta")$Version
}
if (update.0.9.7)
x$backtransf <- TRUE
if (update.1.3.0) {
x$statistic.fixed <- x$zval.fixed
x$statistic.random <- x$zval.random
x$statistic.direct.fixed <- x$zval.direct.fixed
x$statistic.direct.random <- x$zval.direct.random
x$statistic.indirect.fixed <- x$zval.indirect.fixed
x$statistic.indirect.random <- x$zval.indirect.random
x$statistic.nma.fixed <- x$zval.nma.fixed
x$statistic.nma.random <- x$zval.nma.random
x$zval.fixed <- x$zval.random <-
x$zval.nma.fixed <- x$zval.nma.random <-
x$zval.direct.fixed <- x$zval.direct.random <-
x$zval.indirect.fixed <- x$zval.indirect.random <- NULL
if (any(x$narms > 2)) {
tdata1 <- data.frame(studlab = x$studlab,
.order = seq(along = x$studlab))
tdata2 <- data.frame(studlab = as.character(x$studies),
narms = x$narms)
tdata12 <- merge(tdata1, tdata2,
by = "studlab", all.x = TRUE, all.y = FALSE,
sort = FALSE)
tdata12 <- tdata12[order(tdata12$.order), ]
x$n.arms <- tdata12$narms
x$multiarm <- tdata12$narms > 2
}
else {
x$n.arms <- rep(2, length(x$studlab))
x$multiarm <- rep(FALSE, length(x$studlab))
}
}
if (update.2.0.0) {
x$fixed <- x$comb.fixed
x$random <- x$comb.random
x$level.ma <- x$level.comb
x$comb.fixed <- x$comb.random <- x$level.comb <- NULL
}
return(x)
}
if (inherits(x, c("summary.netmeta", "summary.netcomb"))) {
if (update.2.0.0) {
x$level.ma <- x$level.comb
x$x$fixed <- x$comb.fixed
x$x$random <- x$comb.random
x$comb.fixed <- x$comb.random <- x$level.comb <- NULL
x$version <- packageDescription("netmeta")$Version
}
return(x)
}
if (inherits(x, "netcomb") && !inherits(x, "discomb")) {
if (update.1.3.0) {
x$statistic.fixed <- x$zval.fixed
x$statistic.random <- x$zval.random
x$statistic.nma.fixed <- x$zval.nma.fixed
x$statistic.nma.random <- x$zval.nma.random
x$statistic.cnma.fixed <- x$zval.cnma.fixed
x$statistic.cnma.random <- x$zval.cnma.random
x$statistic.Comb.fixed <- x$zval.Comb.fixed
x$statistic.Comb.random <- x$zval.Comb.random
x$statistic.Comp.fixed <- x$zval.Comp.fixed
x$statistic.Comp.random <- x$zval.Comp.random
x$zval.fixed <- x$zval.random <-
x$zval.nma.fixed <- x$zval.nma.random <-
x$zval.cnma.fixed <- x$zval.cnma.random <-
x$zval.Comb.fixed <- x$zval.Comb.random <-
x$zval.Comp.fixed <- x$zval.Comp.random <- NULL
x$version <- packageDescription("netmeta")$Version
}
if (update.2.0.0) {
x$fixed <- x$comb.fixed
x$random <- x$comb.random
x$level.ma <- x$level.comb
x$comb.fixed <- x$comb.random <- x$level.comb <- NULL
}
return(x)
}
if (inherits(x, "discomb")) {
if (update.1.3.0) {
x$statistic.fixed <- x$zval.fixed
x$statistic.random <- x$zval.random
x$statistic.nma.fixed <- x$zval.nma.fixed
x$statistic.nma.random <- x$zval.nma.random
x$statistic.cnma.fixed <- x$zval.cnma.fixed
x$statistic.cnma.random <- x$zval.cnma.random
x$statistic.Comb.fixed <- x$zval.Comb.fixed
x$statistic.Comb.random <- x$zval.Comb.random
x$statistic.Comp.fixed <- x$zval.Comp.fixed
x$statistic.Comp.random <- x$zval.Comp.random
x$zval.fixed <- x$zval.random <-
x$zval.nma.fixed <- x$zval.nma.random <-
x$zval.cnma.fixed <- x$zval.cnma.random <-
x$zval.Comb.fixed <- x$zval.Comb.random <-
x$zval.Comp.fixed <- x$zval.Comp.random <- NULL
x$version <- packageDescription("netmeta")$Version
}
if (update.2.0.0) {
x$fixed <- x$comb.fixed
x$random <- x$comb.random
x$level.ma <- x$level.comb
x$comb.fixed <- x$comb.random <- x$level.comb <- NULL
}
return(x)
}
if (inherits(x, "netsplit")) {
if (update.1.3.0) {
x$statistic.fixed <- x$zval.fixed
x$statistic.random <- x$zval.random
x$statistic.nma.fixed <- x$zval.nma.fixed
x$statistic.nma.random <- x$zval.nma.random
x$statistic.cnma.fixed <- x$zval.cnma.fixed
x$statistic.cnma.random <- x$zval.cnma.random
x$statistic.Comb.fixed <- x$zval.Comb.fixed
x$statistic.Comb.random <- x$zval.Comb.random
x$statistic.Comp.fixed <- x$zval.Comp.fixed
x$statistic.Comp.random <- x$zval.Comp.random
x$zval.fixed <- x$zval.random <-
x$zval.nma.fixed <- x$zval.nma.random <-
x$zval.cnma.fixed <- x$zval.cnma.random <-
x$zval.Comb.fixed <- x$zval.Comb.random <-
x$zval.Comp.fixed <- x$zval.Comp.random <- NULL
x$version <- packageDescription("netmeta")$Version
}
if (update.2.0.0) {
x$x$fixed <- x$comb.fixed
x$x$random <- x$comb.random
x$level.ma <- x$level.comb
x$comb.fixed <- x$comb.random <- x$level.comb <- NULL
}
return(x)
}
if (inherits(x, "netrank")) {
if (update.2.0.0) {
x$x <- updateversion(x$x)
x$version <- packageDescription("netmeta")$Version
}
return(x)
}
if (inherits(x, "rankogram")) {
if (update.2.0.0) {
if (is.null(x$cumulative.rankprob))
x$cumulative.rankprob <- FALSE
if (is.null(x$nchar.trts))
x$nchar.trts <- 666
x$fixed <- x$comb.fixed
x$random <- x$comb.random
x$comb.fixed <- x$comb.random <- NULL
x$version <- packageDescription("netmeta")$Version
}
return(x)
}
if (inherits(x, "netimpact")) {
if (update.2.0.0) {
x$x <- updateversion(x$x)
x$version <- packageDescription("netmeta")$Version
}
return(x)
}
if (inherits(x, "netbind")) {
if (update.2.0.0) {
x$x$fixed <- x$comb.fixed
x$x$random <- x$comb.random
x$x$level.ma <- x$level.comb
x$comb.fixed <- x$comb.random <- x$level.comb <- NULL
x$version <- packageDescription("netmeta")$Version
}
return(x)
}
if (inherits(x, "netposet")) {
if (update.2.0.0) {
x$fixed <- x$comb.fixed
x$random <- x$comb.random
x$comb.fixed <- x$comb.random <- NULL
x$version <- packageDescription("netmeta")$Version
}
return(x)
}
if (inherits(x, "netcontrib")) {
if (update.2.0.0) {
x$x$fixed <- x$comb.fixed
x$x$random <- x$comb.random
x$comb.fixed <- x$comb.random <- NULL
x$version <- packageDescription("netmeta")$Version
}
return(x)
}
x
} |
dtgamma <-
function (x, shape, scale = 1, a = 0, b = Inf)
{
stopifnot(all(shape > 0) & all(scale > 0))
Fa <- pgamma(a, shape, scale = scale)
Fb <- pgamma(b, shape, scale = scale)
y <- dgamma(x, shape, scale = scale)
inda <- which(x < a)
indb <- which(x > b)
if (length(inda) > 0)
y[inda] <- 0
if (length(indb) > 0)
y[indb] <- 0
return(y/(Fb - Fa))
} |
SDMXDimension <- function(xmlObj, namespaces){
sdmxVersion <- version.SDMXSchema(xmlDoc(xmlObj), namespaces)
VERSION.21 <- sdmxVersion == "2.1"
messageNs <- findNamespace(namespaces, "message")
strNs <- findNamespace(namespaces, "structure")
conceptRefXML <- NULL
if(VERSION.21){
conceptIdentityXML <- getNodeSet(xmlDoc(xmlObj),
"//str:ConceptIdentity",
namespaces = c(str = as.character(strNs)))
if(length(conceptIdentityXML) > 0)
conceptRefXML <- xmlChildren(conceptIdentityXML[[1]])[[1]]
}
codelistRefXML <- NULL
if(VERSION.21){
enumXML <- getNodeSet(xmlDoc(xmlObj),
"//str:Enumeration",
namespaces = c(str = as.character(strNs)))
if(length(enumXML) > 0)
codelistRefXML <- xmlChildren(enumXML[[1]])[[1]]
}
conceptRef <- NULL
conceptVersion <- NULL
conceptAgency <- NULL
conceptSchemeRef <- NULL
conceptSchemeAgency <- NULL
codelist <- NULL
codelistVersion <- NULL
codelistAgency <- NULL
isMeasureDimension <- NULL
isFrequencyDimension <- NULL
isEntityDimension <- NULL
isCountDimension <- NULL
isNonObservationTimeDimension <- NULL
isIdentityDimension <- NULL
crossSectionalAttachDataset <- NULL
crossSectionalAttachGroup <- NULL
crossSectionalAttachSection <- NULL
crossSectionalAttachObservation <- NULL
if(VERSION.21){
if(!is.null(conceptRefXML)){
conceptRef = xmlGetAttr(conceptRefXML, "id")
conceptVersion = xmlGetAttr(conceptRefXML, "maintainableParentVersion")
conceptAgency = xmlGetAttr(conceptRefXML, "agencyID")
}
if(!is.null(codelistRefXML)){
codelist <- xmlGetAttr(codelistRefXML, "id")
codelistVersion <- xmlGetAttr(codelistRefXML, "version")
codelistAgency <- xmlGetAttr(codelistRefXML, "agencyID")
}
}else{
conceptRef = xmlGetAttr(xmlObj, "conceptRef")
conceptVersion = xmlGetAttr(xmlObj, "conceptVersion")
conceptAgency = xmlGetAttr(xmlObj, "conceptAgency")
conceptSchemeRef = xmlGetAttr(xmlObj, "conceptSchemeRef")
conceptSchemeAgency = xmlGetAttr(xmlObj, "conceptSchemeAgency")
codelist = xmlGetAttr(xmlObj, "codelist")
codelistVersion = xmlGetAttr(xmlObj, "codelistVersion")
codelistAgency = xmlGetAttr(xmlObj, "codelistAgency")
isMeasureDimension = xmlGetAttr(xmlObj, "isMeasureDimension")
isFrequencyDimension = xmlGetAttr(xmlObj, "isFrequencyDimension")
isEntityDimension = xmlGetAttr(xmlObj, "isEntityDimension")
isCountDimension = xmlGetAttr(xmlObj, "isCountDimension")
isNonObservationTimeDimension = xmlGetAttr(xmlObj,"isNonObservationTimeDimension")
isIdentityDimension = xmlGetAttr(xmlObj, "isIdentityDimension")
crossSectionalAttachDataset = xmlGetAttr(xmlObj, "crossSectionalAttachDataset")
crossSectionalAttachGroup = xmlGetAttr(xmlObj, "crossSectionalAttachGroup")
crossSectionalAttachSection = xmlGetAttr(xmlObj, "crossSectionalAttachSection")
crossSectionalAttachObservation = xmlGetAttr(xmlObj,"crossSectionalAttachObservation")
}
if(is.null(conceptRef)) conceptRef <- as.character(NA)
if(is.null(conceptVersion)) conceptVersion <- as.character(NA)
if(is.null(conceptAgency)) conceptAgency <- as.character(NA)
if(is.null(conceptSchemeRef)) conceptSchemeRef <- as.character(NA)
if(is.null(conceptSchemeAgency)) conceptSchemeAgency <- as.character(NA)
if(is.null(codelist)) codelist <- as.character(NA)
if(is.null(codelistVersion)) codelistVersion <- as.character(NA)
if(is.null(codelistAgency)) codelistAgency <- as.character(NA)
if(is.null(isMeasureDimension)){
isMeasureDimension <- FALSE
}else{
isMeasureDimension <- as.logical(isMeasureDimension)
}
if(is.null(isFrequencyDimension)){
isFrequencyDimension <- FALSE
}else{
isFrequencyDimension <- as.logical(isFrequencyDimension)
}
if(is.null(isEntityDimension)){
isEntityDimension <- FALSE
}else{
isEntityDimension <- as.logical(isEntityDimension)
}
if(is.null(isCountDimension)){
isCountDimension <- FALSE
}else{
isCountDimension <- as.logical(isCountDimension)
}
if(is.null(isNonObservationTimeDimension)){
isNonObservationTimeDimension <- FALSE
}else{
isNonObservationTimeDimension <- as.logical(isNonObservationTimeDimension)
}
if(is.null(isIdentityDimension)){
isIdentityDimension <- FALSE
}else{
isIdentityDimension <- as.logical(isIdentityDimension)
}
if(is.null(crossSectionalAttachDataset)){
crossSectionalAttachDataset <- NA
}else{
crossSectionalAttachDataset <- as.logical(crossSectionalAttachDataset)
}
if(is.null(crossSectionalAttachGroup)){
crossSectionalAttachGroup <- NA
}else{
crossSectionalAttachGroup <- as.logical(crossSectionalAttachGroup)
}
if(is.null(crossSectionalAttachSection)){
crossSectionalAttachSection <- NA
}else{
crossSectionalAttachSection <- as.logical(crossSectionalAttachSection)
}
if(is.null(crossSectionalAttachObservation)){
crossSectionalAttachObservation <- NA
}else{
crossSectionalAttachObservation <- as.logical(crossSectionalAttachObservation)
}
obj<- new("SDMXDimension",
conceptRef = conceptRef,
conceptVersion = conceptVersion,
conceptAgency = conceptAgency,
conceptSchemeRef = conceptSchemeRef,
conceptSchemeAgency = conceptSchemeAgency,
codelist = codelist,
codelistVersion = codelistVersion,
codelistAgency = codelistAgency,
isMeasureDimension = isMeasureDimension,
isFrequencyDimension = isFrequencyDimension,
isEntityDimension = isEntityDimension,
isCountDimension = isCountDimension,
isNonObservationTimeDimension = isNonObservationTimeDimension,
isIdentityDimension = isIdentityDimension,
crossSectionalAttachDataset = crossSectionalAttachDataset,
crossSectionalAttachGroup = crossSectionalAttachGroup,
crossSectionalAttachSection = crossSectionalAttachSection,
crossSectionalAttachObservation = crossSectionalAttachObservation
)
} |
setRefClass(
"VAR_INMB",
fields = c(
sde = "numeric",
sdc = "numeric",
rho="numeric",
object_lambda = "ANY"
),
methods=list(
initialize = function(sde,sdc,rho,object_lambda){
check_heritage (object_lambda, "Lambda")
object_lambda <<- object_lambda
check_1(list(sde=sde,sdc=sdc,rho=rho))
check_positif(list(sde=sde, sdc=sdc))
check_rho(rho)
sde <<- sde
sdc <<- sdc
rho <<- rho
set_var_inmb()
},
set_var_inmb = function(){
var_inmb <<- 2 * (object_lambda$get_lambda()^2 * sde^2 + sdc^2 -
2*object_lambda$get_lambda() * rho * sde * sdc)
},
set_sdc = function(sdc){
check_1(list(sdc=sdc))
check_positif(list(sdc=sdc))
sdc <<- sdc
set_var_inmb()
},
set_sde = function(sde){
check_1(list(sde=sde))
check_positif(list(sde=sde))
sde <<- sde
set_var_inmb()
},
set_rho = function(rho){
check_1(list(rho=rho))
check_rho(rho)
rho <<- rho
set_var_inmb()
},
get_var_inmb = function(){
set_var_inmb()
return(var_inmb)
},
get_var_inmb_exp = function(){
set_var_inmb()
return (var_inmb/2)
},
get_var_inmb_ref = function(){
set_var_inmb()
return (var_inmb/2)
},
set_object_lambda = function(object_lambda){
check_heritage (object_lambda, "Lambda")
object_lambda <<- object_lambda
set_var_inmb()
}
),
contains = "VAR_INMB_DIRECT"
)
create_object_var_inmb <- function(sdc, sde, rho, object_lambda){
var_inmb <- methods::new(Class="VAR_INMB", sdc=sdc, sde = sde, rho = rho,
object_lambda = object_lambda)
} |
seqLLCS <- function(seq1, seq2) {
if (!inherits(seq1, "stslist") | !inherits(seq2, "stslist")) {
stop(" [!] sequences must be sequence objects")
}
a1 <- alphabet(seq1)
a2 <- alphabet(seq2)
if (length(a1) != length(a2) || any(a1 != a2)) {
stop(" [!] The alphabet of both sequences have to be same.")
}
l1 <- seqlength(seq1)
l2 <- seqlength(seq2)
result <- .C(C_cLCS, as.integer(seq1), as.integer(seq2), as.double(c(l1, l2)),
result = as.integer(0))$result
return(result)
} |
menu.ttest <- function()
{
z <- .C("menu_ttest", vars=character(2), ints=integer(4), level=double(1))
if (z$ints[4] > 1) return(invisible())
oc <- call("t.test", x = as.name(z$vars[1]), y = as.name(z$vars[2]),
alternative = c("two.sided", "less", "greater")[1+z$ints[1]],
paired = z$ints[2] != 0,
var.equal = z$ints[3] != 0,
conf.level = z$level)
eval(oc)
}
menu.ttest2 <- function()
{
.C("menu_ttest2")
return(invisible())
}
menu.ttest3 <- function() .Call("menu_ttest3")
del.ttest <- function() winMenuDel("Statistics")
.onAttach <- function(libname, pkgname)
{
if(interactive()) {
winMenuAdd("Statistics")
winMenuAdd("Statistics/Classical tests")
winMenuAddItem("Statistics/Classical tests", "t-test:1", "menu.ttest()")
winMenuAddItem("Statistics/Classical tests", "t-test:2", "menu.ttest2()")
winMenuAddItem("Statistics/Classical tests", "t-test:3", "menu.ttest3()")
packageStartupMessage("To remove the Statistics menu use del.ttest()")
}
}
.onDetach <- function(libpath) del.ttest() |
test_that("`plot.see_point_estimate()` works", {
if (require("bayestestR") && require("rstanarm")) {
set.seed(123)
m <- stan_glm(Sepal.Length ~ Petal.Width * Species,
data = iris,
refresh = 0
)
result <- point_estimate(m, centrality = "median")
expect_s3_class(plot(result), "ggplot")
}
}) |
id <- function(mea) {
UseMethod("id", mea)
}
id.MEA <- function(mea){
attr(mea, "id")
}
id.default <- function(mea){
if (is.list(mea)) mea = MEAlist(mea)
sapply(mea, attr, "id")
}
group <- function(mea) {
UseMethod("group", mea)
}
group.MEA <- function(mea){
attr(mea, "group")
}
group.default <- function(mea){
if (is.list(mea)) mea = MEAlist(mea)
sapply(mea, attr, "group")
}
session <- function(mea) {
UseMethod("session", mea)
}
session.MEA <- function(mea){
attr(mea, "session")
}
session.default <- function(mea){
if (is.list(mea)) mea = MEAlist(mea)
sapply(mea, attr, "session")
}
sampRate <- function(mea) {
UseMethod("sampRate", mea)
}
sampRate.MEA <- function(mea){
attr(mea, "sampRate")
}
sampRate.default <- function(mea){
if (is.list(mea)) mea = MEAlist(mea)
attr(mea, "sampRate")
}
s1Name <- function(mea) {
UseMethod("s1Name", mea)
}
s1Name.MEA <- function(mea){
attr(mea, "s1Name")
}
s1Name.default <- function(mea){
if (is.list(mea)) mea = MEAlist(mea)
attr(mea, "s1Name")
}
s2Name <- function(mea) {
UseMethod("s2Name", mea)
}
s2Name.MEA <- function(mea){
attr(mea, "s2Name")
}
s2Name.default <- function(mea){
if (is.list(mea)) mea = MEAlist(mea)
attr(mea, "s2Name")
}
uid <- function(mea) {
UseMethod("uid", mea)
}
uid.MEA <- function(mea){
attr(mea, "uid")
}
uid.default <- function(mea){
if (is.list(mea)) mea = MEAlist(mea)
sapply(mea, attr, "uid")
}
getCCF <- function (mea, type) {
UseMethod("getCCF", mea)
}
getCCF.MEA <- function (mea, type) {
if (!hasCCF(mea)) stop ("No ccf computation found, please refer to MEAccf() function.")
if (type %in% lagNames(mea)) {
return (mea$ccf[[type]])
} else if (type %in% names(mea$ccfRes)) {
return (mea$ccfRes[[type]])
} else if (type == "matrix") {
l = attr(mea,which = "ccf")$lag
return (mea$ccf[paste0("lag",seq(-l,l))])
} else if (type == "fullMatrix") {
return (mea$ccf)
} else stop ("'type' must be either \"matrix\", \"fullMatrix\", a lag label extracted with lagNames(), or one of:\r\n\"",paste0(ccfResNames(mea),collapse = "\", \""),"\"", call.=F)
}
getCCF.default <- function (mea, type) {
if (is.list(mea)) mea = MEAlist(mea)
mea <- MEAlist(mea)
if(type=="grandAver"){
sapply(mea, getCCF, type)
} else lapply(mea, getCCF, type)
}
lagNames <- function (mea) {
UseMethod("lagNames", mea)
}
lagNames.MEA <- function (mea) {
if (!hasCCF(mea)) stop("No ccf computation found, please refer to MEAccf() function.")
names(mea$ccf)
}
lagNames.default <- function (mea){
if (is.list(mea)) mea = MEAlist(mea)
mea <- MEAlist(mea)
names(mea[[1]]$ccf)
}
ccfResNames <- function (mea) {
UseMethod("ccfResNames", mea)
}
ccfResNames.MEA <- function (mea) {
if (!hasCCF(mea)) stop("No ccf computation found, please refer to MEAccf() function.")
names(mea$ccfRes)
}
ccfResNames.default <- function (mea){
if (is.list(mea)) mea = MEAlist(mea)
mea <- MEAlist(mea)
names(mea[[1]]$ccfRes)
} |
to_ms <- function(timestamp, time_class, time_format, tz) {
if ("numeric" %in% time_class || "integer" %in% time_class) {
if (time_format == "ms") {
return(format(timestamp, scientific = FALSE))
}
if (time_format == "sec") {
return(format(timestamp * 1000, scientific = FALSE))
}
cat("\nSet time_format = \"ms\" if time stamps are in milliseconds")
cat("\nor time_format = \"sec\" if time stamps are in seconds.")
stop("Wrong arguments.", call. = FALSE)
}
if ("POSIXt" %in% time_class || "POSIXct" %in% time_class || "POSIXlt" %in% time_class) {
return(format(as.numeric(timestamp) * 1000, scientific = FALSE))
}
if ("Date" %in% time_class) {
origin <- as.Date("1970-01-01")
return(format(as.numeric(difftime(timestamp, origin, units = "secs")) * 1000, scientific = FALSE))
}
if ("chron" %in% time_class) {
if (!requireNamespace("chron", quietly = TRUE)) {
msg <- "It seems that time stamps are 'chrone' objects, so"
msg <- '"chron" package should be installed. \n'
msg <- paste0(msg, 'Please install it with: install.packages("chron")')
stop(msg, call. = FALSE)
}
origin <- chron::as.chron("1970-01-01 00:00:00", "%Y-%m-%d %H:%M:%S")
return(format(as.numeric(difftime(timestamp, origin, units = "secs")) * 1000, scientific = FALSE))
}
if ("timeDate" %in% time_class) {
if (!requireNamespace("timeDate", quietly = TRUE)) {
msg <- "It seems that time stamps are 'timeDate' objects, so"
msg <- '"timeDate" package should be installed. \n'
msg <- paste0(msg, 'Please install it with: install.packages("chron")')
stop(msg, call. = FALSE)
}
origin <- timeDate::timeDate("1970-01-01 00:00:00",
format = "%Y-%m-%d %H:%M:%S", FinCenter = "GMT")
return(format(as.numeric(difftime(timestamp, origin, units = "secs")) * 1000, scientific = FALSE))
}
if (time_class == "character") {
return(
format(
as.numeric(
as.POSIXct(timestamp, tz = tz, format = time_format, origin = "1970-01-01 00:00:00")) * 1000,
scientific = FALSE))
}
msg <- "Can not understand what time format is used."
stop(msg, call. = FALSE)
} |
context("EbayesThresh")
test_that("beta.laplace recovers result from v1.3.2 package when s=1",{
x <- c(-2,1,0,-4,5)
y <- c(+0.889852029651143,
-0.380041716606011,
-0.561817771773154,
+285.459466672351,
+15639.8849145429)
expect_equal(beta.laplace(x),y,tolerance = 1e-6)
})
test_that("postmean recovers result from v1.3.2 package when s=1 and w=0.5",{
x <- c(-2,1,0,-4,5)
y <- c(-1.01158962199946,
+0.270953305249239,
0,
-3.48800924041643,
+4.4997151290092)
expect_equal(postmean(x, s=1),y,tolerance = 1e-6)
})
test_that("postmed recovers result from v1.3.2 package when s=1 and w=0.5",{
x <- c(-2,1,0,-4,5)
y <- c(-0.829992882781227,
0,
0,
-3.49568406354978,
+4.49992059554046)
expect_equal(postmed(x, s=1),y,tolerance = 1e-6)
})
test_that("tfromw recovers result from v1.3.2 package when s=1",{
w <- seq(0.2,0.8,0.2)
y <- c(+2.44873377028853,
+1.92279064562172,
+1.40956187155098,
+0.767900790087879)
expect_equal(tfromw(w),y,tolerance = 1e-6)
})
test_that("tfromx recovers result from v1.3.2 package when s=1",{
set.seed(123)
x <- rnorm(100)
y <- c(+3.03485425654799)
expect_equal(tfromx(x),y,tolerance = 1e-6)
})
test_that("wfromt recovers result from v1.3.2 package when s=1",{
y <- c(+0.734187187788918,
+0.368633767549335,
+0.0661925474440213,
+0.00348003803260551,
+6.39312743790131e-05)
expect_equal(wfromt(1:5),y,tolerance = 1e-10)
})
test_that("wfromx recovers result from v1.3.2 package when s=1",{
set.seed(123)
x <- rnorm(100)
y <- 0.0609124723599925
expect_equal(wfromx(x),y,tolerance = 1e-6)
})
test_that("wandafromx recovers result from v1.3.2 package when s=1",{
set.seed(123)
x <- rnorm(100)
y <- list(w = 0.371583145802847,a = 3)
expect_equal(wandafromx(x),y,tolerance = 1e-6)
})
test_that("ebayesthresh recovers result from v1.3.2 when s=1 (1st test)",{
set.seed(123)
mu <- c(rep(0, 50), rnorm(50, sd=2))
x <- mu + rnorm(100)
y <- c(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0.461501932251305,0,-1.18081131503738,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0.87949485647377,0,0,0,0,0,2.18630700818864,0,0,
2.83745453837867,0,-3.69477351123594,0,0,0,0,0,0,0,
3.36467539856769,0,-4.07317568417785,0,-0.141700670497741,
-0.343683094401104,0,-1.65608219851322,0,0,-3.5191992174366,
0,0,-2.66613200774895,1.45289341778632,1.15387225239285,
0,0,0,0,1.59321391647936,0,0,0,-0.925903664068815,0,0,
-3.59828116583749,2.11687940619439,0,-2.04737147690392,
-1.08310726061405,0,3.13925967166472,0)
expect_equal(ebayesthresh(x, sdev = 1),y,tolerance = 1e-6)
})
test_that("ebayesthresh recovers result from v1.3.2 when s=1 (2nd test)",{
set.seed(120)
mu <- c(rep(0, 50), rnorm(50, sd=2))
x <- mu + rnorm(100)
y <- c(0,0,0,0,0,0,0,0,0,0,0,0,-1.04121693067389,
0,0,0,0,0,0,0,0,0,0,-1.35978570570315,0,0,-0.240101857101125,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3.83090910100726,0.0712822438458154,0,-3.59696604125997,
3.86540675250451,0.0897481774495907,0,0,0,0,-3.41370542189694,
0,0,3.49859567543283,0,0,-1.81030820004674,3.71037602480949,
0,0,0,2.81312247449243,0,0,0,-2.00442016176308,-4.6661776361885,
0,0,1.67416061651496,0,0,-1.53846644772252,0,5.8288454853458,
0,0,0,0,0,0,3.00394257291387,0,0,-2.26047995381615,0,0)
expect_equal(ebayesthresh(x, sdev = 1),y,tolerance = 1e-6)
})
test_that(paste("ebayesthresh returns the same result with sdev=1",
"and sdev=rep(1, n) (3rd test)"),{
set.seed(120)
mu <- c(rep(0, 50), rnorm(50, sd=2))
x <- mu + rnorm(100)
expect_equal(ebayesthresh(x, sdev = rep(1,100)),
ebayesthresh(x, sdev = 1),
tolerance = 1e-6)
})
test_that(paste("ebayesthresh with heterogeneous variance in which",
"samples are presented in different orders (4th test)"),{
set.seed(120)
mu <- c(rep(0, 25), rnorm(25, sd=2))
s <- rchisq(50, 1)
x <- mu + rnorm(50, sd=s)
i <- sample(50)
expect_equal(ebayesthresh(x,sdev = s),
ebayesthresh(x[i],sdev = s[i])[order(i)],
tolerance = 1e-6)
}) |
read_pop_arrangements <- function(year=2015, simplified=TRUE, showProgress=TRUE){
temp_meta <- select_metadata(geography="pop_arrengements", year=year, simplified=simplified)
file_url <- as.character(temp_meta$download_path)
temp_sf <- download_gpkg(file_url, progress_bar = showProgress)
return(temp_sf)
} |
context("query webgeom")
test_that("query geoms", {
testthat::skip_on_cran()
expect_is(query(webgeom(), 'geoms'),'character')
wg <- webgeom(geom = "sample:CONUS_states",
attribute = "STATE",
values = "New Hampshire")
expect_is(query(wg, 'geoms'),'character')
expect_error(query(webgeom(), 'attributes'))
expect_error(query(webgeom(), 'values'))
})
test_that("query attributes", {
testthat::skip_on_cran()
wg <- webgeom(geom = "sample:CONUS_states",
attribute = "STATE",
values = "New Hampshire")
expect_is(query(wg, 'attributes'),'character')
})
test_that("query values", {
testthat::skip_on_cran()
wg <- webgeom(geom = "sample:CONUS_states",
attribute = "STATE",
values = "New Hampshire")
expect_is(query(wg, 'values'),'character')
})
test_that("query values returns only unique", {
expect_false(any(
duplicated(
query(webgeom(geom="sample:Counties" , attribute = "STATE_FIPS"), 'values'))
)
)
})
context("Create WFS post XML works")
test_that("two states", {
wg <- readRDS("data/test_wfsgetfeature_wg.rds")
xml <- geoknife:::wfsFilterFeatureXML(wg)
fn <- "data/test_wfsgetfeature.xml"
expect_equal(xml, gsub("\r", "", readChar(fn, file.info(fn)$size)))
}) |
is.CoordPolar <- function(coord) {
"CoordPolar" %in% class(coord)
}
is.CoordSerialaxes <- function(coord) {
"CoordSerialaxes" %in% class(coord)
}
is.CoordFlip <- function(coord) {
"CoordFlip" %in% class(coord)
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.