code
stringlengths 1
13.8M
|
---|
context("PipeOpOVRSplit")
test_that("PipeOpOVRSplit - basic properties", {
po = PipeOpOVRSplit$new()
expect_pipeop(po)
expect_data_table(po$input, nrows = 1)
expect_data_table(po$output, nrows = 1)
expect_pipeop_class(PipeOpOVRSplit)
})
test_that("PipeOpOVRSplit - train and predict", {
dat = data.table(target = as.factor(rep(c("a", "b", "rest"), each = 10)), feature = rnorm(30))
tsk = TaskClassif$new("test", backend = dat, target = "target")
po = PipeOpOVRSplit$new()
tout = train_pipeop(po, list(tsk))
expect_equal(po$state$levels, tsk$class_names)
expect_multiplicity(tout[[1]])
expect_list(tout[[1]], len = 3)
expect_named(tout[[1]], tsk$class_names)
expect_true(all(pmap_lgl(list(tout[[1]], names(tout[[1]])), .f = function(task, name) {
expect_task(task)
all(task$target_names == tsk$target_names) && task$positive == name && task$negative == "rest." &&
all.equal(task$truth(), factor(ifelse(tsk$truth() == task$positive, task$positive, "rest."), levels = c(task$positive, "rest.")))
})))
pout = predict_pipeop(po, list(tsk))
expect_multiplicity(pout[[1]])
expect_list(pout[[1]], len = 3)
expect_named(pout[[1]], tsk$class_names)
expect_true(all(pmap_lgl(list(pout[[1]], names(pout[[1]])), .f = function(task, name) {
expect_task(task)
task$target_names == tsk$target_names && task$positive == name && task$negative == "rest." &&
all.equal(task$truth(), factor(ifelse(tsk$truth() == task$positive, task$positive, "rest."), levels = c(task$positive, "rest.")))
})))
})
context("PipeOpOVRUnite")
test_that("PipeOpOVRUnite - basic properties", {
po = PipeOpOVRUnite$new()
expect_pipeop(po)
expect_data_table(po$input, nrows = 1)
expect_data_table(po$output, nrows = 1)
expect_pipeop_class(PipeOpOVRUnite)
})
test_that("PipeOpOVRUnite- train and predict", {
feature = rep(c(1, 0), c(10, 20))
dat1 = data.table(target = as.factor(rep(c("a", "rest"), c(10, 20))), feature = feature)
dat2 = data.table(target = as.factor(rep(c("rest", "b", "rest"), c(10, 10, 10))), feature = feature)
dat3 = data.table(target = as.factor(rep(c("rest", "c"), c(20, 10))), feature = feature)
tsk1 = TaskClassif$new("t1", backend = dat1, target = "target", positive = "a")
tsk2 = TaskClassif$new("t2", backend = dat2, target = "target", positive = "b")
tsk3 = TaskClassif$new("t3", backend = dat3, target = "target", positive = "c")
po = PipeOpOVRUnite$new()
lrn = LearnerClassifRpart$new()
lrn$predict_type = "prob"
tin = map(list(tsk1, tsk2, tsk3), .f = function(task) {
lrn$train(task)
lrn$predict(task)
})
pout = po$predict(list(as.Multiplicity(tin)))
expect_prediction_classif(pout[[1]])
lrn$predict_type = "response"
tin = map(list(tsk1, tsk2, tsk3), .f = function(task) {
lrn$train(task)
lrn$predict(task)
})
pout = po$predict(list(as.Multiplicity(tin)))
expect_prediction_classif(pout[[1]])
na_response = tin[[1]]$response
na_response[1] = NA
tin[[1]] = PredictionClassif$new(row_ids = tin[[1]]$row_ids, truth = tin[[1]]$truth, response = na_response)
pout = po$predict(list(as.Multiplicity(tin)))
expect_prediction_classif(pout[[1]])
expect_equal(pout[[1]]$prob[1, ], c(a = 1/3, b = 1/3, c = 1/3))
tin[[1]] = PredictionClassif$new(row_ids = tin[[1]]$row_ids, truth = tin[[1]]$truth)
expect_error(po$predict(list(as.Multiplicity(tin))), regexp = "PipeOpOVRUnite input predictions had missing 'prob' and missing 'response' values")
})
context("PipeOpOVRSplit and PipeOpOVRUnite")
test_that("PipeOpOVRSplit and PipeOpOVRUnite - train and predict", {
feature = rep(c(1, 0), c(10, 20))
dat0 = data.table(target = as.factor(rep(c("a", "b", "c"), each = 10)), feature = feature)
dat1 = data.table(target = as.factor(rep(c("a", "rest"), c(10, 20))), feature = feature)
dat2 = data.table(target = as.factor(rep(c("rest", "b", "rest"), c(10, 10, 10))), feature = feature)
dat3 = data.table(target = as.factor(rep(c("rest", "c"), c(20, 10))), feature = feature)
tsk0 = TaskClassif$new("t0", backend = dat0, target = "target")
tsk1 = TaskClassif$new("t1", backend = dat1, target = "target", positive = "a")
tsk2 = TaskClassif$new("t2", backend = dat2, target = "target", positive = "b")
tsk3 = TaskClassif$new("t3", backend = dat3, target = "target", positive = "c")
po = PipeOpOVRUnite$new()
lrn = LearnerClassifRpart$new()
tin = map(list(tsk1, tsk2, tsk3), .f = function(task) {
lrn$train(task)
lrn$predict(task)
})
pout_ref = po$predict(list(as.Multiplicity(tin)))
gr = PipeOpOVRSplit$new() %>>% LearnerClassifRpart$new() %>>% PipeOpOVRUnite$new()
expect_graph(gr)
tout = gr$train(tsk0)
expect_list(gr$state$ovrunite, len = 0)
expect_null(tout[[1]])
pout = gr$predict(tsk0)
expect_equal(pout_ref[[1]]$prob, pout[[1]]$prob)
gr$param_set$values$ovrunite.weights = rep(0, 3)
expect_true(all.equal(unique(gr$predict(tsk0)[[1]]$prob), t(c(a = 1/3, b = 1/3, c = 1/3))))
})
test_that("PipeOpOVRSplit and PipeOpOVRUnite - task size", {
gr = PipeOpOVRSplit$new() %>>% LearnerClassifRpart$new() %>>% PipeOpOVRUnite$new()
gr$train(tsk("iris")$filter(c(1:30, 51:80, 101:130)))
prd = gr$predict(tsk("iris")$filter(c(1:30, 51:80, 101:130)))[[1]]
expect_prediction_classif(prd)
expect_true(nrow(prd$data$prob) == 90)
}) |
path.lines <- function(x,plane='Pv',shade.between=FALSE,lab.cycle=FALSE,shade.cycle=FALSE){
if(any(sapply(x, class) == "list")==FALSE) x <- list(x)
nl <- length(x)
if(plane == 'Pv'){
xlab=~"Specific volume "*italic('v')*"("*m^3*"/kg)"
ylab=~"Pressure "*italic('P')*"(bar)"
}
if(plane == 'Ts'){
xlab=~"Specific entropy "*italic('s')*"((kJ/K)/kg)"
ylab=~"Temperature "*italic('T')*"("*degree*"C)"
}
y <- list(); leg <- array(); linetype<- array()
pr <- array(); nv <- array();nT <- array(); nP <- array()
ns <- array();nxx <- array(); nyy <- array()
vl <- matrix(nrow=2,ncol=nl)
pl <- vl; lab <- vl
for(i in 1:nl){
y[[i]] <- path.calc(x[[i]])
if(plane == 'Ts'){
y[[i]]$xx <- y[[i]]$s
y[[i]]$yy <- y[[i]]$T
}
if(plane == 'Pv'){
y[[i]]$xx <- y[[i]]$v
y[[i]]$yy <- y[[i]]$P
}
nv[i] <- length(y[[i]]$v)
nP[i] <- length(y[[i]]$P)
ns[i] <- length(y[[i]]$s)
nT[i] <- length(y[[i]]$T)
nxx[i] <- length(y[[i]]$xx)
nyy[i] <- length(y[[i]]$yy)
lab[,i] <- x[[i]]$lab
dirpr <- sign(y[[i]]$v[1]- y[[i]]$v[nv[i]])
if(dirpr==1)pr[i] <- " compr "
if(dirpr==0)pr[i] <- " "
if(dirpr==-1)pr[i] <- " expan "
leg[i] <- as.expression(bquote(.(lab[1,i])*"-"*.(lab[2,i])*":"*.(x[[i]]$path)~.(pr[i])~
.(round(y[[i]]$T[1],1))*degree*"C->"*.(round(y[[i]]$T[nT[i]],1))*
degree*"C, W="*.(round(y[[i]]$WQtot[1],1))*"kJ/kg, Q="*
.(round(y[[i]]$WQtot[2],1))*"kJ/kg"))
vl[,i] <- c(min(y[[i]]$xx),max(y[[i]]$xx))
pl[,i] <- c(min(y[[i]]$yy),max(y[[i]]$yy))
linetype[i]=1
if(nl>1){
if(x[[i]]$path=='isotherm') linetype[i]=1
if(x[[i]]$path=='adiabat' ) linetype[i]=2
}
else linetype[i]=1
}
xlim<- c(0.8*min(vl[1,]), 1.2*max(vl[2,]))
ylim<- c(0.8*0, 1.2*max(pl[2,]))
plot(y[[1]]$xx,y[[1]]$yy, xlab=xlab, ylab=ylab,
xlim=xlim,ylim=ylim,lwd=2,cex.axis=0.8,cex.lab=0.9,type="n")
for(i in 1:nl){
lines(y[[i]]$xx,y[[i]]$yy,lwd=2,lty=linetype[i])
x0 <- y[[i]]$xx[0.5*nxx[i]];y0=y[[i]]$yy[0.5*nyy[i]]
x1 <- y[[i]]$xx[0.6*nxx[i]];y1=y[[i]]$yy[0.6*nyy[i]]
arrows(x0,y0,x1,y1,length=0.15,lwd=2)
if(is.null(x[[i]]$shade.under)==FALSE){
if(x[[i]]$shade.under==TRUE)
polygon(c(y[[i]]$xx,rev(y[[i]]$xx)), c(y[[i]]$yy,rep(ylim[1],nyy[i])),
density=10)
}
pv <- matrix(nrow=2,ncol=2)
pv[1,] <- c(y[[i]]$xx[1],y[[i]]$yy[1])
pv[2,] <- c(y[[i]]$xx[nxx[i]],y[[i]]$yy[nyy[i]])
if(pr[i]=='compr'){
pv[c(2,1),] <- pv[c(1,2),]
lab[,i] <- rev(lab[,i])
}
if(lab.cycle == FALSE){
dirlab <- sign(pv[2,1]-pv[1,1]);if (dirlab==0) dirlab<-1
xs <- 0.02*dirlab*(1+ 0.5*rnorm(1))
ys <- 0.02*dirlab*(1+ 0.5*rnorm(1))
text(pv[1,1]-xs*xlim[2],pv[1,2]+ys*ylim[2],lab[1,i],cex=0.7)
text(pv[2,1]+xs*xlim[2],pv[2,2]+ys*ylim[2],lab[2,i],cex=0.7)
}
else{
dirlab <- c(-1,-1,1,1)
xs <- 0.02*dirlab[i]; ys <- 0.02*dirlab[i]
text(pv[1,1]+xs*xlim[2],pv[1,2]+ys*ylim[2],lab[1,i],cex=0.7)
}
}
if(shade.cycle==TRUE){
xbox <- y[[1]]$xx; for(i in 2:nl) xbox <- c(xbox,y[[i]]$xx)
ybox <- y[[1]]$yy; for(i in 2:nl) ybox <- c(ybox,y[[i]]$yy)
polygon(c(xbox),c(ybox),density=10)
}
if(shade.between==TRUE){
npts <- length(y[[1]]$xx)
xleft <- c(y[[1]]$xx[1],y[[2]]$xx[1])
yleft <- c(y[[1]]$yy[1],y[[2]]$yy[1])
xright <- c(y[[1]]$xx[npts],y[[2]]$xx[npts])
yright <- c(y[[1]]$yy[npts],y[[2]]$yy[npts])
xbox <- c(xleft, y[[1]]$xx, xright, rev(y[[2]]$xx))
ybox <- c(yleft, y[[1]]$yy, yright, rev(y[[2]]$yy))
polygon(c(xbox),c(ybox),density=10)
}
legend("top", leg, cex=0.8)
}
|
env_add_stylerignore <- function(pd_flat) {
if (!env_current$any_stylerignore) {
env_current$stylerignore <- pd_flat[0, ]
return()
}
pd_flat_temp <- pd_flat[pd_flat$terminal | pd_flat$is_cached, ] %>%
default_style_guide_attributes()
is_stylerignore_switchpoint <- pd_flat_temp$stylerignore != lag(
pd_flat_temp$stylerignore,
default = pd_flat_temp$stylerignore[1]
)
pd_flat_temp$first_pos_id_in_segment <- split(
pd_flat_temp$pos_id, cumsum(is_stylerignore_switchpoint)
) %>%
map(~ rep(.x[1], length(.x))) %>%
unlist()
pd_flat_temp$lag_newlines <- pd_flat_temp$lag_newlines
pd_flat_temp$lag_spaces <- lag(pd_flat_temp$spaces, default = 0)
is_terminal_to_ignore <- pd_flat_temp$terminal & pd_flat_temp$stylerignore
env_current$stylerignore <- pd_flat_temp[is_terminal_to_ignore, ]
}
add_stylerignore <- function(pd_flat) {
parse_text <- trimws(pd_flat$text)
start_candidate <- parse_text == option_read("styler.ignore_start")
pd_flat$stylerignore <- rep(FALSE, length(start_candidate))
env_current$any_stylerignore <- any(start_candidate)
if (!env_current$any_stylerignore) {
return(pd_flat)
}
pd_flat_terminals <- pd_flat[pd_flat$terminal, ]
pd_flat_lat_line1 <- lag(pd_flat$line2, default = 0)
on_same_line <- pd_flat$line1 == pd_flat_lat_line1
cumsum_start <- cumsum(start_candidate & !on_same_line)
cumsum_stop <- cumsum(parse_text == option_read("styler.ignore_stop"))
pd_flat$indicator_off <- cumsum_start + cumsum_stop
is_invalid <- cumsum_start - cumsum_stop < 0 | cumsum_start - cumsum_stop > 1
if (any(is_invalid)) {
warn(paste0(
"Invalid stylerignore sequences found, potentially ignoring some of the ",
"markers set.\nSee `help(\"stylerignore\", \"styler\")`."
))
}
to_ignore <- as.logical(pd_flat$indicator_off %% 2)
to_ignore[is_invalid] <- FALSE
single_lines_to_ignore <- pd_flat$line1[start_candidate & on_same_line]
to_ignore[pd_flat$line1 %in% single_lines_to_ignore] <- TRUE
pd_flat$indicator_off <- NULL
pd_flat[to_ignore, "stylerignore"] <- TRUE
pd_flat
}
apply_stylerignore <- function(flattened_pd) {
if (!env_current$any_stylerignore) {
return(flattened_pd)
}
env_current$stylerignore$pos_id_ <- env_current$stylerignore$pos_id
colnames_required_apply_stylerignore <- c(
"pos_id_", "lag_newlines", "lag_spaces", "text", "first_pos_id_in_segment"
)
to_ignore <- flattened_pd$stylerignore == TRUE
not_first <- flattened_pd$stylerignore == lag(
flattened_pd$stylerignore,
default = FALSE
)
flattened_pd <- merge(
flattened_pd[!(to_ignore & not_first), ],
env_current$stylerignore[, colnames_required_apply_stylerignore],
by.x = "pos_id", by.y = "first_pos_id_in_segment", all.x = TRUE,
sort = FALSE
) %>%
as_tibble()
flattened_pd %>%
stylerignore_consolidate_col("lag_newlines") %>%
stylerignore_consolidate_col("lag_spaces") %>%
stylerignore_consolidate_col("text") %>%
stylerignore_consolidate_col("pos_id", "pos_id", "pos_id_") %>%
arrange_pos_id()
}
stylerignore_consolidate_col <- function(flattened_pd,
col,
col_x = paste0(col, ".x"),
col_y = paste0(col, ".y")) {
flattened_pd[[col]] <- ifelse(is.na(flattened_pd[[col_y]]),
flattened_pd[[col_x]],
flattened_pd[[col_y]]
)
if (col != col_x) {
flattened_pd[[col_x]] <- NULL
}
if (col != col_y) {
flattened_pd[[col_y]] <- NULL
}
flattened_pd
} |
if(keras_available()) {
X_train <- matrix(sample(0:19, 100 * 100, TRUE), ncol = 100)
Y_train <- rnorm(100)
mod <- Sequential()
mod$add(Embedding(input_dim = 20, output_dim = 10,
input_length = 100))
mod$add(Dropout(0.5))
mod$add(GRU(16))
mod$add(Dense(1))
mod$add(Activation("sigmoid"))
keras_compile(mod, loss = "mse", optimizer = RMSprop())
keras_fit(mod, X_train, Y_train, epochs = 3, verbose = 0)
} |
library("calmate");
verbose <- Arguments$getVerbose(-10, timestamp=TRUE);
dataSet <- "GSE8605";
chipType <- "Mapping10K_Xba142";
tags <- "ACC,-XY,BPN,-XY,RMA,FLN,-XY";
dsT <- AromaUnitTotalCnBinarySet$byName(dataSet, tags=tags, chipType=chipType);
dsB <- AromaUnitFracBCnBinarySet$byName(dataSet, tags=tags, chipType=chipType);
dsList <- list(total=dsT, fracB=dsB);
print(dsList);
asn <- CalMaTeCalibration(dsList, fB1=0.33, fB2=0.66);
print(asn);
dsNList <- process(asn, verbose=verbose);
print(dsNList);
stop();
ugp <- getAromaUgpFile(dsT);
chr <- 17;
units <- getUnitsOnChromosome(ugp, chr);
ii <- 1;
df <- getFile(dsList$total, ii);
dfR <- getAverageFile(dsList$total, verbose=verbose);
gamma <- extractRawCopyNumbers(df, logBase=NULL, chromosome=chr);
gammaR <- extractRawCopyNumbers(dfR, logBase=NULL, chromosome=chr);
gamma <- 2*divideBy(gamma, gammaR);
df <- getFile(dsList$fracB, ii);
beta <- extractRawAlleleBFractions(df, chromosome=chr);
dfN <- getFile(dsNList$fracB, ii);
betaN <- extractRawAlleleBFractions(dfN, chromosome=chr);
dfN <- getFile(dsNList$total, ii);
gammaN <- extractRawCopyNumbers(dfN, logBase=NULL, chromosome=chr);
subplots(4, ncol=2, byrow=FALSE);
plot(beta);
title(sprintf("%s", getName(beta)));
plot(gamma);
plot(betaN);
title(sprintf("%s (CalMaTe)", getName(betaN)));
plot(gammaN); |
fun.N_1 <- function(x, y) {
nombres <- colnames(x)
resultado <- vector(mode = "list", length = 2)
names(resultado) <- c("coef", "CV")
tol <- sqrt(.Machine$double.eps)
x <- cbind(1, x[, 1], x[, 2],
I(x[, 1]^2), I(x[, 2]^2),
x[, 1] * x[, 2])
Xsvd <- svd(x)
D <- 1 / Xsvd$d
D[D <= tol] <- 0
C <- Xsvd$v %*% (crossprod(Xsvd$u, y) * D)
rownames(C) <- c("Ind", nombres, paste0(nombres, "^2", sep = ""), "interac")
err <- (x %*% C) - y
CV <- sqrt(mean((err / (1 - rowSums(Xsvd$u * Xsvd$u)))^2))
CV <- round(CV, digits = 6)
resultado$coef <- C
resultado$CV <- CV
class(resultado) <- "neurona"
return(resultado)
} |
st_sample = function(x, size, ...) UseMethod("st_sample")
st_sample.sf = function(x, size, ...) st_sample(st_geometry(x), size, ...)
st_sample.sfc = function(x, size, ..., type = "random", exact = TRUE, warn_if_not_integer = TRUE,
by_polygon = FALSE) {
if (!missing(size) && warn_if_not_integer && any(size %% 1 != 0))
warning("size is not an integer")
if (!missing(size) && length(size) > 1) {
size = rep(size, length.out = length(x))
ret = lapply(1:length(x), function(i) st_sample(x[i], size[i], type = type, exact = exact, ...))
st_set_crs(do.call(c, ret), st_crs(x))
} else {
res = switch(max(st_dimension(x)) + 1,
st_multipoints_sample(do.call(c, x), size = size, ..., type = type),
st_ll_sample(st_cast(x, "LINESTRING"), size = size, ..., type = type),
st_poly_sample(x, size = size, ..., type = type, by_polygon = by_polygon))
if (exact & type == "random" & all(st_geometry_type(res) == "POINT")) {
diff = size - length(res)
if (diff > 0) {
res_additional = st_sample_exact(x = x, size = diff, ...,
type = type, by_polygon = by_polygon)
res = c(res, res_additional)
} else if (diff < 0) {
res = res[1:size]
}
}
res
}
}
st_sample.sfg = function(x, size, ...) {
st_sample(st_geometry(x), size, ...)
}
st_poly_sample = function(x, size, ..., type = "random",
offset = st_sample(st_as_sfc(st_bbox(x)), 1)[[1]],
by_polygon = FALSE) {
if (by_polygon && inherits(x, "sfc_MULTIPOLYGON")) {
sum_a = units::drop_units(sum(st_area(x)))
x = lapply(suppressWarnings(st_cast(st_geometry(x), "POLYGON")), st_sfc, crs = st_crs(x))
a = sapply(x, st_area)
ret = mapply(st_poly_sample, x, size = size * a / sum_a, type = type, ...)
do.call(c, ret)
} else if (type %in% c("hexagonal", "regular", "random")) {
if (isTRUE(st_is_longlat(x))) {
if (type == "regular") {
message_longlat("st_sample")
x = st_set_crs(x, NA)
}
if (type == "hexagonal")
stop("hexagonal sampling on geographic coordinates not supported; consider projecting first")
}
a0 = as.numeric(st_area(st_make_grid(x, n = c(1,1))))
a1 = as.numeric(sum(st_area(x)))
if (is.finite(a0) && is.finite(a1) && a0 > a0 * 0.0 && a1 > a1 * 0.0) {
r = round(size * a0 / a1)
size = if (r == 0)
rbinom(1, 1, size * a0 / a1)
else
r
}
bb = st_bbox(x)
pts = if (type == "hexagonal") {
dx = sqrt(a0 / size / (sqrt(3)/2))
hex_grid_points(x, pt = offset, dx = dx)
} else if (type == "regular") {
dx = as.numeric(sqrt(a0 / size))
offset = c((offset[1] - bb["xmin"]) %% dx,
(offset[2] - bb["ymin"]) %% dx) + bb[c("xmin", "ymin")]
n = c(round((bb["xmax"] - offset[1])/dx), round((bb["ymax"] - offset[2])/dx))
st_make_grid(x, cellsize = c(dx, dx), offset = offset, n = n, what = "corners")
} else if (type == "random") {
lon = runif(size, bb[1], bb[3])
lat = if (isTRUE(st_is_longlat(x))) {
toRad = pi/180
lat0 = (sin(bb[2] * toRad) + 1)/2
lat1 = (sin(bb[4] * toRad) + 1)/2
y = runif(size, lat0, lat1)
asin(2 * y - 1) / toRad
} else
runif(size, bb[2], bb[4])
m = cbind(lon, lat)
st_sfc(lapply(seq_len(nrow(m)), function(i) st_point(m[i,])), crs = st_crs(x))
}
pts[x]
} else {
if (!requireNamespace("spatstat.random", quietly = TRUE))
stop("package spatstat.random required, please install it (or the full spatstat package) first")
spatstat_fun = try(get(paste0("r", type), asNamespace("spatstat.random")), silent = TRUE)
if (inherits(spatstat_fun, "try-error"))
stop(paste0("r", type), " is not an exported function from spatstat.random.")
pts = try(spatstat_fun(..., win = spatstat.geom::as.owin(x)), silent = TRUE)
if (inherits(pts, "try-error"))
stop("The spatstat function ", paste0("r", type),
" did not return a valid result. Consult the help file.\n",
"Error message from spatstat:\n", pts)
st_as_sf(pts)[-1,]
}
}
st_multipoints_sample = function(x, size, ..., type = "random") {
if (!inherits(x, "MULTIPOINT"))
stop("points sampling only implemented for MULTIPOINT; use sample to sample individual features", call.=FALSE)
m = unclass(x)
st_sfc(st_multipoint(m[sample(nrow(m), size, ...),]), crs = st_crs(x))
}
st_ll_sample = function (x, size, ..., type = "random", offset = runif(1)) {
crs = st_crs(x)
if (isTRUE(st_is_longlat(x))) {
message_longlat("st_sample")
st_crs(x) = NA_crs_
}
l = st_length(x)
if (inherits(l, "units"))
l = drop_units(l)
if (type == "random") {
d = runif(size, 0, sum(l))
} else if (type == "regular") {
d = ((1:size) - (1. - (offset %% 1)))/size * sum(l)
} else {
stop(paste("sampling type", type, "not available for LINESTRING"))
}
lcs = c(0, cumsum(l))
if (sum(l) == 0) {
grp = list(0)
message("line is of length zero, only one point is sampled")
} else {
grp = split(d, cut(d, lcs, include.lowest = TRUE))
grp = lapply(seq_along(x), function(i) grp[[i]] - lcs[i])
}
st_sfc(CPL_gdal_linestring_sample(x, grp), crs = crs)
}
hex_grid_points = function(obj, pt, dx) {
bb = st_bbox(obj)
dy = sqrt(3) * dx / 2
xlim = bb[c("xmin", "xmax")]
ylim = bb[c("ymin", "ymax")]
offset = c(x = (pt[1] - xlim[1]) %% dx, y = (pt[2] - ylim[1]) %% (2 * dy))
x = seq(xlim[1] - dx, xlim[2] + dx, dx) + offset[1]
y = seq(ylim[1] - 2 * dy, ylim[2] + 2 * dy, dy) + offset[2]
y <- rep(y, each = length(x))
x <- rep(c(x, x + dx / 2), length.out = length(y))
xy = cbind(x, y)[x >= xlim[1] & x <= xlim[2] & y >= ylim[1] & y <= ylim[2], ]
st_sfc(lapply(seq_len(nrow(xy)), function(i) st_point(xy[i,])), crs = st_crs(bb))
}
st_sample_exact = function(x, size, ..., type, by_polygon) {
random_pt = st_sample(x = x, size = size, ..., type = type, exact = FALSE)
while (length(random_pt) < size) {
diff = size - length(random_pt)
random_pt_new = st_sample(x, size = diff, ..., type, exact = FALSE, by_polygon = by_polygon)
random_pt = c(random_pt, random_pt_new)
}
if(length(random_pt) > size) {
random_pt = random_pt[1:size]
}
random_pt
} |
qsiniw<-function(p,alpha,theta,lower = TRUE,log.p = FALSE){
if (log.p == TRUE) {
if (lower == TRUE){
log((-log((2/pi)*asin(p))/alpha)^(-1/theta))
}else{
log((-log((2/pi)*asin(1-p))/alpha)^(-1/theta))
}
} else {
if (lower == TRUE){
(-log((2/pi)*asin(p))/alpha)^(-1/theta)
}else{
(-log((2/pi)*asin(1-p))/alpha)^(-1/theta)
}
}
} |
AdditionTCFIP <- function() {
self <- environment()
class(self) <- append('AdditionTCFIP', class(self))
addNumeric <- function(x_n, y_n) x_n + y_n
addDouble <- function(x_d, y_d = 0.0, ...) x_d + y_d + ...
addInteger <- function(x_i, y_i) x_i + y_i
divideByZero <- function(x_n) x_n / 0
generateWarning <- function(x_ = 8L) 1:3 + 1:7 + x_
generateError <- function() stop('generated error')
function_return_types <- data.table(
function_name = c('addNumeric', 'addDouble', 'addInteger',
'divideByZero', 'generateWarning', 'generateError'),
return_value = c('x_n', 'x_d', 'x_i','x_d', 'x_w', 'x_er')
)
test_case_definitions <- data.table(
function_name = c('addInteger', 'divideByZero', "divideByZero", 'generateWarning', 'generateError'),
standard_evaluation = c('correct', 'correct', 'correct', 'correct', 'failure'),
type_checking_enforcement = c('erroneous', 'correct', 'correct', 'correct', 'correct'),
test_case = list(
TestCaseDefinition(list(34L, 44.5), 78L, 'sum 1 integer and 1 double'),
TestCaseDefinition(list(1), Inf, '1 / 0'),
TestCaseDefinition(list(0), NaN, '0 / 0'),
TestCaseDefinition(list(0), 1:3 + 1:7, 'generate warning'),
TestCaseDefinition(list(), NA, 'generate error')
)
)
self
} |
structure(
list(
url = "https://quickstats.nass.usda.gov/api/api_GET?key=API_KEY&commodity_desc=CORN&year=2012&agg_level_desc=STATE&statisticcat_desc=AREA%20HARVESTED&domaincat_desc=NOT%20SPECIFIED&state_alpha=VA&format=JSON",
status_code = 413
),
class = "response"
) |
library(R2MLwiN)
mlwin <- getOption("MLwiN_path")
while (!file.access(mlwin, mode = 1) == 0) {
cat("Please specify the root MLwiN folder or the full path to the MLwiN executable:\n")
mlwin <- scan(what = character(0), sep = "\n")
mlwin <- gsub("\\", "/", mlwin, fixed = TRUE)
}
options(MLwiN_path = mlwin)
data(gcsemv1, package = "R2MLwiN")
summary(gcsemv1)
(mymodel1 <- runMLwiN(c(written, csework) ~ 1 + (1 | student), D = "Multivariate Normal", estoptions = list(sort.ignore = TRUE),
data = gcsemv1))
(mymodel2 <- runMLwiN(c(written, csework) ~ 1 + female + (1 | school) + (1 | student), D = "Multivariate Normal",
data = gcsemv1))
mymodel2@RP["RP2_cov_Intercept_written_Intercept_csework"]/sqrt(mymodel2@RP["RP2_var_Intercept_written"] * mymodel2@RP["RP2_var_Intercept_csework"])
mymodel2@RP["RP1_cov_Intercept_written_Intercept_csework"]/sqrt(mymodel2@RP["RP1_var_Intercept_written"] * mymodel2@RP["RP1_var_Intercept_csework"])
(mymodel3 <- runMLwiN(c(written, csework) ~ 1 + female + (1 + female | school) + (1 | student), D = "Multivariate Normal",
data = gcsemv1))
(mymodel4 <- runMLwiN(c(written, csework) ~ 1 + female + (1 + female[1] | school) + (1 | student), D = "Multivariate Normal",
estoptions = list(resi.store = TRUE), data = gcsemv1))
mymodel4@RP["RP2_cov_Intercept_written_Intercept_csework"]/sqrt(mymodel4@RP["RP2_var_Intercept_written"] * mymodel4@RP["RP2_var_Intercept_csework"])
mymodel4@RP["RP2_cov_Intercept_written_femaleFemale_1"]/sqrt(mymodel4@RP["RP2_var_Intercept_written"] * mymodel4@RP["RP2_var_femaleFemale_1"])
mymodel4@RP["RP2_cov_Intercept_csework_femaleFemale_1"]/sqrt(mymodel4@RP["RP2_var_Intercept_csework"] * mymodel4@RP["RP2_var_femaleFemale_1"])
u0 <- mymodel4@residual$lev_2_resi_est_Intercept.written
u1 <- mymodel4@residual$lev_2_resi_est_Intercept.csework
u2 <- mymodel4@residual$lev_2_resi_est_femaleFemale.1
plot(u0, u0, asp = 1)
plot(u0, u1, asp = 1)
plot(u0, u2, asp = 1)
plot(u1, u1, asp = 1)
plot(u1, u2, asp = 1)
plot(u2, u2, asp = 1)
data(tutorial, package = "R2MLwiN")
tutorial$binexam <- as.integer(tutorial$normexam > 0)
tutorial$binlrt <- as.integer(tutorial$standlrt > 0)
(mymodel5 <- runMLwiN(c(logit(binexam), logit(binlrt)) ~ 1, D = c("Mixed", "Binomial", "Binomial"), estoptions = list(sort.ignore = TRUE),
data = tutorial)) |
replace_NAs_with_next_or_previous_non_NA <- function(x, direction = c("previous", "next"), remove_na = FALSE) {
if (direction == "next") {
x <- rev(x)
}
v <- !is.na(x)
x <- c(NA, x[v])[cumsum(v) + 1]
if (direction == "next") {
x <- rev(x)
}
if (remove_na == TRUE) {
x <- x[!is.na(x)]
}
return(x)
} |
compute_counts <- function(dat, by = NULL, demo = NULL,
date = "date",
age = "age",
agegroup = "agegroup",
breaks = NULL){
if(!is.character(by) & !is.null(by)) stop("by needs to be a character verctor or NULL.")
if(!is.null(demo) & (agegroup %in% by)){
names(demo)[names(demo) == agegroup] <- "agegroup"
if(is.null(breaks)){
start <-grep("\\d+", demo$agegroup, value = TRUE) %>% unique() %>% as.numeric() %>% sort()
breaks <- c(start, Inf)
} else{
demo <- collapse_age_dist(demo, breaks)
}
}
if(agegroup %in% by){
if(is.null(breaks)){
stop("Need to provide age breaks or a demographics table")
} else{
if(age %in% names(dat)){
dat$agegroup <-group_age(dat[[age]], breaks)
} else stop(age, "not a column in dat")
}
}
if(!is.null(by))
if(!all(by %in% names(dat))) stop("by needs to be a subset of", setdiff(names(dat), date))
names(dat)[names(dat) == date] <- "date"
by <- c("date", by)
dat <- drop_na(dat, by)
counts <- dat %>% filter(!(lubridate::month(date) == 2 & lubridate::day(date) == 29)) %>%
group_by_at(by) %>%
summarise(outcome = n()) %>%
ungroup() %>%
complete(!!!syms(by), fill = list(outcome = 0)) %>%
arrange(date)
if(!is.null(demo)){
by <- intersect(names(demo), names(counts))
demo <- demo %>%
group_by_at(by) %>%
summarize(population = sum(population)) %>%
ungroup()
counts <- left_join(counts, demo, by = by)
}
return(counts)
} |
PrepareGeneList <- function(path, fileNm="datalist1"){
data <- readRDS(file=paste0(path, fileNm));
data <- as.data.frame(data$prot.mat);
data <- cbind(rownames(data), data)
colnames(data) <- c("
dest.file <- paste0(path, fileNm, ".txt")
write.table(data, dest.file, append = FALSE, sep = "\t", row.names = FALSE, col.names = TRUE, quote = FALSE);
return(dest.file);
} |
rm(list = ls())
library(tidyverse)
library(forcats)
library(grid)
library(ggtext)
source("theme/theme_swd.R")
theme_set(theme_swd() + theme(
axis.title = element_blank(),
axis.title.y = element_blank(),
axis.ticks.y = element_blank(),
axis.line.y = element_blank(),
axis.title.x = element_blank(),
axis.text = element_blank(),
axis.line.x = element_blank(),
plot.title = element_text(margin = margin(b = 1, unit = "cm")),
plot.subtitle = element_text(color = GRAY3, size = 12, hjust = 0),
plot.caption = element_text(margin = margin(t = 1, unit = "cm")),
plot.margin = unit(c(1, 1, 1, 6), "cm")
))
df <- read_csv(file.path("data", "FIG0605.csv")) %>%
pivot_longer(cols = -Category, names_to = "importance", values_to = "value") %>%
mutate(value = as.numeric(str_remove(value, "%"))/100) %>%
mutate(Category = fct_rev(fct_relevel(factor(Category), "Education", "Agriculture & rural development","Poverty reduction","Reconstruction",
"Economic growth","Health","Job creation","Governanace",
"Anti-corruption","Transport","Energy","Law & Justice",
"Basic infrastructure","Public sector reform","Public financial management"))) %>%
mutate(importance = fct_rev(fct_relevel(factor(importance), "Most important", "2nd Most Important", "3rd Most Important"))) %>%
mutate(fill = case_when(
(Category == "Education" | Category == "Agriculture & rural development" | Category == "Poverty reduction") & (importance == "Most important") ~ BLUE1,
(Category == "Education" | Category == "Agriculture & rural development" | Category == "Poverty reduction") & (importance == "2nd Most Important") ~ BLUE2,
(Category == "Education" | Category == "Agriculture & rural development" | Category == "Poverty reduction") & (importance == "3rd Most Important") ~ BLUE3,
importance == "Most important" ~ GRAY3,
importance == "2nd Most Important" ~ GRAY6,
T ~ GRAY9
))
df_text_labels <- df %>% filter(importance == "Overall") %>% select(Category, value) %>%
mutate(color = case_when(Category == "Education" | Category == "Agriculture & rural development" | Category == "Poverty reduction" ~ BLUE1,
T ~ GRAY6)) %>%
mutate(label = case_when(Category == "Education" | Category == "Agriculture & rural development" | Category == "Poverty reduction" ~ paste0("<b>",Category,"</b>"),
T ~ as.character(Category))) %>%
mutate(value_label = case_when(Category == "Education" | Category == "Agriculture & rural development" | Category == "Poverty reduction" ~ paste0("<b>",scales::percent(accuracy = 1, x = value),"</b>"),
T ~ as.character(scales::percent(accuracy = 1, x = value))))
pt <- df %>% filter(importance != "Overall") %>%
ggplot(aes(x = Category, y = value, group = importance, fill = fill)) +
geom_col(position = "stack", width = .69) +
scale_fill_identity(guide = F) +
geom_text(aes(label = scales::percent(accuracy = 1, x = value)), position = position_stack(vjust = 0),
color = "white") +
geom_richtext(data = df_text_labels,
aes(label = label, x = Category, y = 0, color = color, group = NA, fill = NA), fill = NA, label.color = NA, hjust = 1, vjust = .5, nudge_y = -.05) +
geom_richtext(data = df_text_labels,
aes(label = value_label, x = Category, y = 0, color = color, group = NA, fill = NA), fill = NA, label.color = NA, hjust = 1, vjust = .5, nudge_y = -.005) +
scale_color_identity() +
coord_flip(clip = "off") +
labs(title = "Top 15 development priorities, according to survey",
caption = "N = 4,392. Based on response to item, When considering the development priorities, which one development priority is the most important? Which one is\nthe second most important priority? Which one is the third most important priority? Respondents chose from a list. Top 15 shown.")
width <- 8
height <- 6
dev.new(width = width, height = height, noRStudioGD = T)
pt
ggsave(file.path("plot output", "FIG0605.png"), pt, width = width, height = height) |
summary.rsm <-
function(object,...){
Y = object
if(class(Y) != "rsm") stop("Entry must be from class rsm")
vec <- rep(-Inf, max(Y$Klist))
for(K in Y$Klist) vec[K] <- Y$output[[K]]$lower
vec <- vec[-c(1:(Y$Klist[1]-1))]
cat(paste("Initial settings:\n", Y$N, "vertices \n", Y$R, "subgraphs\n", Y$C, "relations types\n"))
cat("\n The optimal number of cluster is K = ", Y$K_star, "; \n with the lower bound equal: ", max(vec), "\n")
} |
kludgeConvertAwfulR <- function( csv.infilename, csv.outfilename ) {
warning( "Kludge pbat input level 2 reached (output is unfixable, padded)." );
.C( "kludgeConvertAwful", as.character(csv.infilename), as.character(csv.outfilename) );
}
kludgeConvertR <- function( csv.infilename, csv.outfilename ) {
warning( "Kludge pbat input level 1 reached (tries to fix output, should be okay?)." );
status = as.integer(0);
status <- .C( "kludgeConvert", as.character(csv.infilename), as.character(csv.outfilename), status )[[3]];
print( status )
}
vectorIntersection <- function( a, b ) {
remList <- c();
for( i in 1:length(a) ) {
if( sum(a[i]==b) < 1 )
remList <- c(remList, i);
}
if( length(remList) > 0 )
a <- a[-remList];
return(a);
}
vectorSubtraction <- function( a, b ) {
remList <- c();
for( i in 1:length(a) ) {
if( sum(a[i]==b) > 0 )
remList <- c(remList, i);
}
if( length(remList) > 0 )
a <- a[-remList];
return(a);
}
getPbatlogs <- function() {
strs <- dir(pattern="pbatlog.*");
datStrs <- dir(pattern="pbatlog.*dat");
headerStrs <- dir(pattern="pbatlog.*header");
return( vectorSubtraction( vectorSubtraction( strs, datStrs ), headerStrs ) );
}
getPbatlog <- function( beforeLogs, afterLogs ) {
log <- vectorSubtraction( afterLogs, beforeLogs );
if( length(log)!=1 ) {
if( length(log)<1 )
stop( "Pbat terminated before a log-file could be written." );
stop( "Two possible logs were found - if you are running pbat twice simulataneously in the same directory, bad things happen." );
}
return(log);
}
strsplitFix2 <- function( x, split ) {
if( length(x) > 1 ) stop( "strSplitFix(...) only works on a single string." );
if( length(x)==0 || x=="" ) return("");
res=unlist( strsplit( x, split, fixed=TRUE ) );
for( i in 1:length(res) ){
if( substring(res[i],1,1)==" " )
res[i] <- substring(res[i],2);
if( substring(res[i],strlen(res[i]))==" " )
res[i] <- substring(res[i],1,strlen(res[i])-1);
}
return( res[res!=""] );
}
loadPbatlog <- function( log ){
callfile <- paste( log, ".call", sep="" );
resultfile <- paste( log, ".csv", sep="" );
.C( "launchPbatlog", log, callfile, resultfile, as.integer(0) );
pbatCall <- NULL; pbatData <- NULL;
try( pbatCall <- readLines( callfile ) );
if( file.info(resultfile)$size == 0 ) {
warning( "Empty output. Generally this indicates the number of informative families in the markers specified is below your current 'min.info' threshhold (or pbat crashed)." );
return( list( call=pbatCall, data=NA ) );
}
read <- FALSE;
try( { pbatData <- read.csv( resultfile, strip.white=TRUE );
read <- TRUE; } );
if( !read ) {
kludgeLog <- paste( resultfile, ".kludge.csv", sep="" );
kludgeConvertR( resultfile, kludgeLog );
try( { pbatData <- read.csv( kludgeLog, strip.white=TRUE );
read <- TRUE } );
if( !read ) {
kludgeConvertAwfulR( resultfile, kludgeLog );
try( { pbatData <- read.csv( kludgeLog, strip.white=TRUE );
read <- TRUE } );
}
if( !read )
warning( "Data could not be read in, despite kludges." );
}
return( list( call=pbatCall, data=pbatData ) );
}
loadPbatlog.slow.but.good <- function( log ){
pbatCall <- NULL; pbatData <- NULL;
if( !file.exists(log) ) {
print( "Nonexistant pbat output file; potentially safe to ignore if running a smaller analysis with multiple processes (the output changes in every PBAT release). Ensure that the output is proper length." );
return( NULL );
}
lines <- readLines( log );
NUMLINES <- length(lines);
if( NUMLINES < 1 ) {
print( "Empty pbat output file; safe to ignore if running a smaller analysis with multiple processes. Ensure that the output is proper length." );
return( NULL );
}
and.symbol <- -1;
for( i in 1:NUMLINES ){
if( !is.null(lines[i]) && lines[i]!="" ) {
if( strfindf(lines[i], "&") != -1 ){
if( and.symbol == -1 ) and.symbol <- i;
break;
}
}
}
if( and.symbol == -1 ) {
if( pbat.getNumProcesses() < 2 ) {
print( "ERROR: No data could be found in the file. The pbat output is as follows:" );
print( lines );
}
print( "No output in the logfile - just batch commands. (1) Pbat may have crashed. (2) You may be doing a relatively small analysis, so that some processes had nothing to do (so completely safe to ignore in that circumstance). Ensure output is proper length." );
return(NULL);
}
if( and.symbol > 1 )
pbatCall <- lines[1:(and.symbol-1)];
dataNames <- NULL;
firstLine <- strsplitFix2( lines[and.symbol], "&" );
if( firstLine[1] == "Group" ){
dataNames <- firstLine;
and.symbol <- and.symbol + 1;
}
if( and.symbol>NUMLINES && length(dataNames)>0 ) {
pbatData <- data.frame( matrix( NA, 1, length(dataNames) ) );
names(pbatData) <- dataNames;
return( list( call=pbatCall, data=pbatData ) );
}
for( i in and.symbol:NUMLINES ){
nextLine <- strsplitFix2( lines[i], "&" );
pbatData <- rbind( pbatData, nextLine );
}
row.names(pbatData) <- 1:nrow(pbatData);
pbatData <- data.frame( pbatData );
if( !is.null(dataNames) ){
if( length(dataNames) == ncol(pbatData) ) {
names(pbatData) <- dataNames;
}else{
print( dataNames );
warning( "Data Names do not match the data!" );
}
}
if( !is.null(pbatData) ){
for( i in 1:ncol(pbatData) ){
suppressWarnings(
curcol <- as.numeric(as.character(pbatData[,i]))
);
if( !is.na(sum(curcol)) )
pbatData[,i] <- curcol;
}
}
return( list( call=pbatCall, data=pbatData ) );
}
loadPbatlog.bad <- function( log ) {
pbatCall <- NULL; pbatData <- NULL;
if( !file.exists(log) )
stop( paste("Cannot load the pbat logfile '",log,"'; file does not exist",sep="") );
if( file.exists(paste(log,".dat",sep="")) && file.exists(paste(log,".header",sep="")) ) {
header <- read.table( paste(log,".header",sep=""),
sep="&", comment.char="", header=TRUE );
pbatData <- read.table( log, sep="&", header=FALSE );
print(length(pbatData))
print(length(header))
warning( "header and data do not match!!!" );
logfile <- file( paste(log,".dat",sep=""), open="r", blocking=FALSE );
pbatCall <- readLines(logfile);
NUMLINES <- length(pbatCall);
close(logfile);
}else {
logfile <- file(log, open="r", blocking=FALSE);
tmp <- readLines(logfile);
NUMLINES <- length(tmp);
close(logfile);
header <- TRUE;
addiLine <- NULL;
if( NUMLINES>0 ) {
logfile <- file(log, open="r", blocking=FALSE);
on.exit(close(logfile));
MARKERSTR <- "Group&";
;
line <- readLines( logfile, n=1 );
namesVector <- NULL;
lastLine=-1;
for( i in 1:NUMLINES ){
if( substring(line,1,strlen(MARKERSTR))==MARKERSTR ) {
namesVector <- make.names( unlist(strsplit(line,"&",fixed=TRUE)) );
break;
}else if( strfindf(line,"&")!=-1 ){
addiLine <- unlist(strsplit(line,"&",fixed=TRUE));
namesVector <- "BAD";
header <- FALSE;
break;
}else{
pbatCall <- c(pbatCall, line);
line <- readLines( logfile, n=1 );
}
lastLine=i;
}
if( !is.null(namesVector) && lastLine<NUMLINES ) {
pbatData <- read.table( logfile, header=FALSE, sep="&" );
if( length(namesVector)!=length(pbatData) && header==TRUE ) {
warning( "Names vector is of improper length! I do not know what to do!" );
}else{
if( header ) {
names(pbatData) <- namesVector;
}else{
warning( paste("Could not load in header for '",log,"' (bug workaround for multiple processes; safe to ignore).") );
pbatData <- rbind( addiLine, pbatData );
if( pbatData[2,1]==999 )
pbatData[2,1] <- -999;
}
}
;
} else if( lastLine>=NUMLINES ) {
pbatData <- read.table( log, header=FALSE, sep="&" );
}
} else{
warning( "No logfile exists." );
pbatCall="";
pbatData="";
}
}
return( list( call=pbatCall, data=pbatData ) );
}
loadCurrentPbatLog <- function( beforeLogs ) {
afterLogs <- getPbatlogs();
strLog <- getPbatlog( beforeLogs, afterLogs );
return( loadPbatlog( strLog ) );
}
loadPbatlogExtended <- function( log ) {
callfile <- paste( log, ".call", sep="" );
resultfile <- paste( log, ".csv", sep="" );
numProcesses <- pbat.getNumProcesses();
if( numProcesses==1 ) return( loadPbatlog( log ) );
.C( "launchPbatlogExtended", log, callfile, resultfile, as.integer(numProcesses) );
pbatCall <- NULL; pbatData <- NULL;
try( pbatCall <- readLines( callfile ) );
if( file.info(resultfile)$size == 0 ) {
warning( "Empty output. Generally this indicates the number of informative families in the markers specified is below your current 'min.info' threshhold (or pbat crashed)." );
return( list( call=pbatCall, data=NA ) );
}
read <- FALSE;
try( { pbatData <- read.csv( resultfile, strip.white=TRUE );
read <- TRUE; } );
if( !read ) {
kludgeLog <- paste( resultfile, ".kludge.csv", sep="" );
kludgeConvertR( resultfile, kludgeLog );
try( { pbatData <- read.csv( kludgeLog, strip.white=TRUE );
read <- TRUE } );
if( !read ) {
kludgeConvertAwfulR( resultfile, kludgeLog );
try( { pbatData <- read.csv( kludgeLog, strip.white=TRUE );
read <- TRUE } );
}
if( !read )
warning( "Data could not be read in, despite kludges." );
}
return( list( call=pbatCall, data=pbatData ) );
}
loadPbatlogExtended.slower <- function( log ) {
numProcesses <- pbat.getNumProcesses();
if( numProcesses == 1 )
return( loadPbatlog(log) );
res <- loadPbatlog(paste(log,"_1_",numProcesses,sep=""));
for( i in 2:numProcesses ){
res2 <- loadPbatlog(paste(log,"_",i,"_",numProcesses,sep=""));
if( !is.null(res2) ) {
if( is.null( res$data ) ) {
res$data <- res2$data;
}else if( !is.null(res2$data) ) {
names( res2$data ) <- names( res$data );
res$data <- rbind( res$data, res2$data );
}
}
}
res$data <- res$data[!is.na(res$data[,1]),];
rownames(res$data) <- 1:nrow(res$data);
return(res);
}
loadPbatlogConcatenate <- function( log, filename, clean=FALSE ) {
numProcesses <- pbat.getNumProcesses();
if( numProcesses == 1 ) {
if( !clean ){
file.copy( from=log, to=filename );
}else{
file.rename( from=log, to=filename );
}
}
firstlog <- paste(log,"_1_",numProcesses,sep="");
if( !clean ){
file.copy( from=firstlog, to=filename );
}else{
file.rename( from=firstlog, to=filename );
}
for( i in 2:numProcesses ){
nextlog <- paste(log,"_",i,"_",numProcesses,sep="");
if( file.exists( nextlog ) ){
file.append( filename, nextlog );
if( clean ) file.remove( nextlog );
}else{
cat( "Warning, not all output files exist; PBAT may have crashed or not finished running. See also 'is.finished()'\n" );
}
}
cat( "Output has been concatenated and left in '", filename, "'.\n", sep="" );
return(invisible());
} |
xexp <- function(x) {
return(x * exp(x))
}
deriv_xexp <- function(x, degree = 1) {
stopifnot(is.numeric(degree),
degree >= 0)
return(exp(x) *(x + degree))
} |
library(OpenMx)
library(testthat)
context("omp")
skip_if_not(imxHasOpenMP())
oldONT <- Sys.getenv("OMP_NUM_THREADS")
Sys.setenv(OMP_NUM_THREADS = '1')
mxOption(key='Number of Threads', value=2)
mData = matrix (1)
dimnames(mData) = list(c("X"), c("X"))
m1 = mxModel("one_is_the_loneliest_number", type="RAM",
manifestVars = "X",
mxPath(from="X", to = "X", arrows=2, lbound=0, labels= "X"),
mxData(mData, type="cov", numObs = 10)
)
expect_error(mxRun(m1), "2 threads requested.")
if (nchar(oldONT) == 0) {
Sys.unsetenv('OMP_NUM_THREADS')
} else {
Sys.setenv(OMP_NUM_THREADS = oldONT)
} |
arpv.plot <- function(alpha, phi, df = TRUE, verticals = TRUE) {
if (! is.numeric(alpha)) stop("alpha not numeric")
if (! is.numeric(phi)) stop("phi not numeric")
if (! is.logical(df)) stop("df not logical")
if (length(alpha) != length(phi)) stop("alpha and phi not same length")
if (! all(0 <= alpha & alpha <= 1)) stop("alpha not in [0, 1]")
if (! all(0 <= phi & phi <= 1)) stop("phi not in [0, 1]")
if (df) {
plot(alpha, phi, xlab = "significance level",
ylab = "fuzzy P-value", type = "l")
u <- par("usr")
lines(c(u[1], alpha[1]), c(0, 0))
lines(c(alpha[length(alpha)], u[2]), c(1, 1))
} else {
dens <- diff(phi) / diff(alpha)
plot(range(alpha), range(0, dens), type = "n",
xlab = "significance level",
ylab = "density of randomized P-value")
nalpha <- length(alpha)
ndens <- length(dens)
segments(alpha[-nalpha], dens, alpha[-1], dens)
if (verticals) {
jalpha <- c(1, nalpha)
jdens <- c(1, ndens)
segments(alpha[jalpha], rep(0, 2), alpha[jalpha], dens[jdens],
lty = 2)
if (nalpha > 2)
segments(alpha[-jalpha], pmin(dens[-1], dens[-ndens]),
alpha[-jalpha], pmax(dens[-1], dens[-ndens]), lty = 2)
}
}
} |
Y.matrix.gen <- function(k, kd, nobs, y.train) {
XI <- XI.gen(k = k, kd = kd)
Y.matrix <- matrix(data = 0.0, nrow = nobs, ncol = k-1L)
for( ii in 1L:nobs ) Y.matrix[ii,] <- XI[,y.train[ii]]
return( Y.matrix )
} |
Hypergeometric2F1 <- function(a,b,c,x)
{
ret <- NA
if(is.nan(x)) {
ret <- 0
} else if(x==0) ret <- 1
if(is.na(ret))
{
if(b==1 && c==5/2) {
integrand1 <- function(u) u^(-2*a+1)*sqrt(u^2-(1-x))
result <- integrate(integrand1, lower=sqrt(1-x),upper=1,rel.tol=1e-12)$value
result <- result*3/(x*sqrt(x))
ret <- result
} else if(b==1/2 && c==3/2)
{
if(x==1) {
ret <- sqrt(pi)*gamma(1-a)/(2*gamma(3/2-a))
} else {
integrand2 <- function(u) u^(2*a-2)*asin(sqrt(1-x)/u)
result <- integrate(integrand2, lower=sqrt(1-x),upper=1,rel.tol=1e-12)$value
result <- result*(2*a-1)/sqrt(1-x)^(2*a-1)+pi/2-1/sqrt(1-x)^(2*a-1)*asin(sqrt(1-x))
result <- result/sqrt(x)
ret <- result
}
}
}
if(is.nan(ret)) ret <- 0
ret
} |
removeByID_UI <- function(id) {
ns <- NS(id)
tagList(
numericInput(ns("removeID"), label="Enter the record ID to be removed", value = 0)
)
}
removeByID_MOD <- function(input, output, session, rvs) {
reactive({
if (is.null(rvs$occs)) {
rvs %>% writeLog(type = 'error', "Before processing occurrences,
obtain the data in component 1.")
return()
}
if (!(input$removeID %in% rvs$occs$occID)) {
rvs %>% writeLog(type = 'error','Entered ID not found.')
return()
}
i <- which(input$removeID == rvs$occs$occID)
occs.remID <- rvs$occs[-i,]
rvs$removedIDs <- c(rvs$removedIDs, input$removeID)
rvs %>% writeLog("Removed occurrence with ID = ", input$removeID,
". Updated data has n = ", nrow(rvs$occs), " records.")
return(occs.remID)
})
} |
transformdata <- function(i.data, i.range.x = NA, i.name = "rates", i.max.na.per = 100, i.function = NULL) {
if (is.null(i.range.x)) i.range.x <- NA
if (any(is.na(i.range.x)) | !is.numeric(i.range.x) | length(i.range.x) != 2) i.range.x <- c(min(as.numeric(i.data$week)), max(as.numeric(i.data$week)))
if (i.range.x[1] < 1) i.range.x[1] <- 1
if (i.range.x[1] >= 52) i.range.x[1] <- 52
if (i.range.x[2] < 1) i.range.x[2] <- 1
if (i.range.x[2] >= 52) i.range.x[2] <- 52
if (i.range.x[1] == i.range.x[2]) i.range.x[2] <- i.range.x[2] - 1
if (i.range.x[2] == 0) i.range.x[2] <- 52
if (!all(c("year", "week") %in% tolower(names(i.data)))) stop("Input data must have a year, week, rate format\n")
if (!(i.name %in% names(i.data))) stop(paste0(i.name, " variable not found in input data\n"))
data <- i.data[tolower(names(i.data)) %in% c("year", "week") | names(i.data) %in% i.name]
names(data)[names(data) == i.name] <- "rates"
names(data) <- tolower(names(data))
year <- week <- NULL
data <- data %>%
filter(!is.na(year) & !is.na(week))
week.f <- i.range.x[1]
week.l <- i.range.x[2]
data$season <- ""
if (week.f > week.l) {
i.range.x.length <- 52 - week.f + 1 + week.l
i.range.x.values.52 <- data.frame(week = c(week.f:52, 1:week.l), week.no = 1:i.range.x.length)
i.range.x.values.53 <- data.frame(week = c(week.f:53, 1:(week.l - 1)), week.no = 1:i.range.x.length)
data$season[data$week < week.f] <- paste(data$year[data$week < week.f] - 1, data$year[data$week < week.f], sep = "/")
data$season[data$week >= week.f] <- paste(data$year[data$week >= week.f], data$year[data$week >= week.f] + 1, sep = "/")
seasons.all <- unique(data$season)
seasons.53 <- unique(subset(data, data$week == 53)$season)
seasons.52 <- seasons.all[!(seasons.all %in% seasons.53)]
data.out <- rbind(
merge(data.frame(season = seasons.52, stringsAsFactors = F), i.range.x.values.52, stringsAsFactors = F),
merge(data.frame(season = seasons.53, stringsAsFactors = F), i.range.x.values.53, stringsAsFactors = F)
)
data.out <- merge(data.out, data, by = c("season", "week"), all.x = T)
data.out$year[data.out$week >= week.f] <- as.numeric(substr(data.out$season[data.out$week >= week.f], 1, 4))
data.out$year[data.out$week < week.f] <- as.numeric(substr(data.out$season[data.out$week < week.f], 6, 9))
} else {
i.range.x.length <- week.l - week.f + 1
if (week.l == 53) {
i.range.x.values.52 <- data.frame(week = week.f:52, week.no = 1:(i.range.x.length - 1))
i.range.x.values.53 <- data.frame(week = (week.f + 1):53, week.no = 1:(i.range.x.length - 1))
} else {
i.range.x.values.52 <- data.frame(week = week.f:week.l, week.no = 1:i.range.x.length)
i.range.x.values.53 <- data.frame(week = week.f:week.l, week.no = 1:i.range.x.length)
}
data$season <- paste(data$year, data$year, sep = "/")
seasons.all <- unique(data$season)
seasons.53 <- unique(subset(data, data$week == 53)$season)
seasons.52 <- seasons.all[!(seasons.all %in% seasons.53)]
data.out <- rbind(
merge(data.frame(season = seasons.52, stringsAsFactors = F), i.range.x.values.52, stringsAsFactors = F),
merge(data.frame(season = seasons.53, stringsAsFactors = F), i.range.x.values.53, stringsAsFactors = F)
)
data.out <- merge(data.out, data, by = c("season", "week"), all.x = T)
data.out$year <- as.numeric(substr(data.out$season, 1, 4))
}
data.out$yrweek <- data.out$year * 100 + data.out$week
data.out <- subset(data.out, !is.na(data.out$week.no))
data.out$week <- NULL
week.no <- season <- rates <- NULL
if (!is.null(i.function)) data.out <- aggregate(rates ~ season + week.no, data = data.out, FUN = i.function)
data.out <- data.out %>%
select(week.no, season, rates) %>%
spread(season, rates)
data.out <- merge(i.range.x.values.52, data.out, by = "week.no", all.x = T)
data.out <- data.out[apply(data.out, 2, function(x) sum(is.na(x)) / length(x) < i.max.na.per / 100)]
data.out <- data.out[order(data.out$week.no), ]
rownames(data.out) <- data.out$week
data.out$week <- NULL
data.out$week.no <- NULL
transformdata.output <- list(data = data.out)
transformdata.output$call <- match.call()
return(transformdata.output)
} |
generate.rho <- function( wtsum,
pa.vec,
p,
rhodefault = -1,
maxgapf = 0.9 )
{
if( rhodefault < 0 )
{
trialrho <- max( 0.001, 1.0 / wtsum )
} else
{
trialrho <- rhodefault
}
if( trialrho <= 0 | trialrho >= 1 )
{
stop( 'Sorry - failed to find suitable value for rho (0 < rho < 1)!' )
}
rho <- numeric( length = p * 21 * p* 21 )
rho.raw <- .C( 'guess_rho_matrix',
as.double( rho ),
as.double( pa.vec ),
as.double( p ),
as.double( maxgapf ),
as.double( trialrho ) )
rho.vec <- unlist( rho.raw[ 1 ] )
rho.mat <- matrix( rho.vec, p * 21, p * 21, byrow = TRUE )
sum( rho.mat )
return( rho.mat )
}
precision <- function( S.shrinked,
rho )
{
p <- nrow( S.shrinked )
X <- matrix( 0, p, p )
W <- matrix( 0, p, p )
Wd <- rep(0,p)
Wdj <- rep(0,p)
info <- 0
P <- matrix( .Fortran( 'glassofast',
as.integer( nrow( S.shrinked ) ),
as.double( S.shrinked ),
as.double( rho ),
as.double( 1e-4 ),
as.integer( 1000 ),
as.integer( 0 ),
as.integer( 0 ),
as.double( X ),
as.double( W ),
as.double( Wd ),
as.double( Wdj ),
as.integer( info ) )[[ 8 ]], p, p )
return( P )
} |
listFilesInZip <- function(zippath){
utils::unzip(zipfile = zippath, list = T)
} |
as_survey_design <- function(.data, ...) {
UseMethod("as_survey_design")
}
as_survey_design.data.frame <-
function(.data, ids = NULL, probs = NULL, strata = NULL,
variables = NULL, fpc = NULL, nest = FALSE,
check_strata = !nest, weights = NULL, pps = FALSE,
variance = c("HT", "YG"), ...) {
ids <- srvyr_select_vars(rlang::enquo(ids), .data, check_ids = TRUE)
probs <- srvyr_select_vars(rlang::enquo(probs), .data)
strata <- srvyr_select_vars(rlang::enquo(strata), .data)
fpc <- srvyr_select_vars(rlang::enquo(fpc), .data)
weights <- srvyr_select_vars(rlang::enquo(weights), .data)
variables <- srvyr_select_vars(rlang::enquo(variables), .data)
if (is.null(ids)) ids <- ~1
out <- survey::svydesign(
ids, probs, strata, variables, fpc, .data, nest, check_strata, weights, pps
)
as_tbl_svy(
out,
list(ids = ids, probs = probs, strata = strata, fpc = fpc, weights = weights)
)
}
as_survey_design.survey.design2 <- function(.data, ...) {
as_tbl_svy(.data)
}
as_survey_design.tbl_lazy <-
function(.data, ids = NULL, probs = NULL, strata = NULL,
variables = NULL, fpc = NULL, nest = FALSE,
check_strata = !nest, weights = NULL, pps = FALSE,
variance = c("HT", "YG"), ...) {
ids <- rlang::enquo(ids)
probs <- rlang::enquo(probs)
strata <- rlang::enquo(strata)
fpc <- rlang::enquo(fpc)
weights <- rlang::enquo(weights)
variables <- rlang::enquo(variables)
survey_vars_local <- get_lazy_vars(
data = .data, id = !!ids, !!probs, !!strata, !!fpc, !!weights, !!variables
)
ids <- srvyr_select_vars(ids, survey_vars_local, check_ids = TRUE)
probs <- srvyr_select_vars(probs, survey_vars_local)
strata <- srvyr_select_vars(strata, survey_vars_local)
fpc <- srvyr_select_vars(fpc, survey_vars_local)
weights <- srvyr_select_vars(weights, survey_vars_local)
variables <- srvyr_select_vars(variables, survey_vars_local)
if (is.null(ids)) ids <- ~1
out <- survey::svydesign(
ids, probs, strata, variables, fpc, survey_vars_local, nest, check_strata, weights, pps
)
out$variables <- .data
as_tbl_svy(
out,
list(ids = ids, probs = probs, strata = strata, fpc = fpc, weights = weights)
)
}
as_survey_design_ <- function(.data, ids = NULL, probs = NULL, strata = NULL,
variables = NULL, fpc = NULL, nest = FALSE,
check_strata = !nest, weights = NULL, pps = FALSE,
variance = c("HT", "YG")) {
as_survey_design(
.data,
ids = !!n_compat_lazy(ids),
probs = !!n_compat_lazy(probs),
strata = !!n_compat_lazy(strata),
variables = !!n_compat_lazy(variables),
fpc = !!n_compat_lazy(fpc),
nest = nest,
check_strata = check_strata,
weights = !!n_compat_lazy(weights),
pps = pps,
variance = variance
)
} |
penalty_goric <- function(Amat, meq, LP, correction = FALSE,
sample.nobs = NULL, ...) {
if (correction) {
N <- sample.nobs
if (all(c(Amat) == 0)) {
lPT <- ncol(Amat)
PT <- ( (N * (lPT + 1) / (N - lPT - 2)) )
} else {
if (attr(LP, "method") == "boot") {
lPT <- 0 : ncol(Amat)
PT <- sum( ( (N * (lPT + 1) / (N - lPT - 2) ) ) * LP)
} else if (attr(LP, "method") == "pmvnorm") {
min.col <- ncol(Amat) - nrow(Amat)
max.col <- ncol(Amat) - meq
lPT <- min.col : max.col
PT <- sum( ( (N * (lPT + 1) / (N - lPT - 2) ) ) * LP)
}
}
} else {
if (all(c(Amat) == 0)) {
PT <- 1 + ncol(Amat)
} else {
if (attr(LP, "method") == "boot") {
PT <- 1 + sum(0 : ncol(Amat) * LP)
} else if (attr(LP, "method") == "pmvnorm") {
min.col <- ncol(Amat) - nrow(Amat)
max.col <- ncol(Amat) - meq
PT <- 1 + sum(min.col : max.col * LP)
}
}
}
return(PT)
} |
rgt <-
function (n, mu = 0, sigma = 1, df = stop("no df arg")) {
mu + sigma * rt(n, df = df)
} |
mc <- as.factor(c("Ger", "other", "other", "Aus"))
mt <- data.frame(ID = 1:4, mc = mc, text = c(NA, "Eng", "Aus", "Aus2"), stringsAsFactors = FALSE)
mt_gads <- import_DF(mt)
test_that("Errors collapse mc text",{
expect_error(collapseMC_Text(mt_gads, mc_var = "some_var", text_var = "text", mc_code4text = "other"), "'mc_var' is not a variable in the GADSdat.")
expect_error(collapseMC_Text(mt_gads, mc_var = "mc", text_var = "some_var", mc_code4text = "other"), "'text_var' is not a variable in the GADSdat.")
mtcars_g <- import_DF(mtcars)
expect_error(collapseMC_Text(mtcars_g, mc_var = "cyl", text_var = "gear", mc_code4text = "other"), "'mc_var' must be a labeled integer.")
})
test_that("Errors mc_value4text collapse mc text",{
expect_error(collapseMC_Text(mt_gads, mc_var = "mc", text_var = "text", mc_code4text = 3), "'mc_code4text' must be a character of length 1.")
expect_error(collapseMC_Text(mt_gads, mc_var = "mc", text_var = "text", mc_code4text = c("1", "2")), "'mc_code4text' must be a character of length 1.")
expect_error(collapseMC_Text(mt_gads, mc_var = "mc", text_var = "text", mc_code4text = "other_"), "'mc_code4text' must be a 'valLabel' entry for 'mc_var'.")
})
test_that("Append variable label",{
mt_gads2 <- changeVarLabels(mt_gads, varName = "ID", varLabel = "id")
mt_gads2t <- append_varLabel(mt_gads2, varName = "ID", label_suffix = "(recoded)")
expect_equal(mt_gads2t$label$varLabel[1], "id (recoded)")
mt_gads2u <- append_varLabel(mt_gads2, varName = "mc", label_suffix = "(recoded)")
expect_equal(mt_gads2u$label$varLabel[2], "(recoded)")
mt_gads2v <- append_varLabel(mt_gads2, varName = "mc", label_suffix = "")
expect_equal(mt_gads2v$label$varLabel[2], NA_character_)
})
test_that("Combine mc and text",{
test <- collapseMC_Text(mt_gads, mc_var = "mc", text_var = "text", mc_code4text = "other")
expect_true("mc_r" %in% names(test$dat))
expect_equal(test$labels[6, "varLabel"], "(recoded)")
expect_equal(test$dat$mc_r, c(2, 4, 1, 1))
test_dat <- extractData(test)
expect_equal(test_dat$mc_r, c("Ger", "Eng", "Aus", "Aus"))
})
test_that("Combine mc and text into old variables",{
test <- collapseMC_Text(mt_gads, mc_var = "mc", text_var = "text", mc_code4text = "other", var_suffix = NULL, label_suffix = NULL)
expect_false("mc_r" %in% names(test$dat))
expect_equal(test$dat$mc, c(2, 4, 1, 1))
test_dat <- extractData(test)
expect_equal(test_dat$mc, c("Ger", "Eng", "Aus", "Aus"))
})
test_that("Combine mc and text into old variables via empty string",{
test <- collapseMC_Text(mt_gads, mc_var = "mc", text_var = "text", mc_code4text = "other", var_suffix = "", label_suffix = NULL)
expect_false("mc_r" %in% names(test$dat))
expect_equal(test$dat$mc, c(2, 4, 1, 1))
test_dat <- extractData(test)
expect_equal(test_dat$mc, c("Ger", "Eng", "Aus", "Aus"))
})
test_that("Combine mc and text with Missings on mcs",{
mt_gads2 <- recodeGADS(mt_gads, varName = "mc", oldValues = c(1, 2, 3), newValues = c(-9, -8, 1), existingMeta = "value")
mt_gads2 <- changeValLabels(mt_gads2, "mc", value = c(1, -8, -9), c("Aus", "missing other", "missing"))
mt_gads2 <- checkMissings(mt_gads2)
test <- collapseMC_Text(mt_gads2, mc_var = "mc", text_var = "text", mc_code4text = "Aus")
expect_equal(test$dat$mc_r, c(-8, 3, 1, 2))
test_dat <- extractData(test)
expect_equal(test_dat$mc_r, c(NA, "Eng", "Aus", "Aus2"))
})
test_that("Combinations of mc_code4text and missing in text variable",{
mt_gads2 <- recodeGADS(mt_gads, varName = "mc", oldValues = c(1, 2, 3), newValues = c(-9, 3, 1), existingMeta = "value")
mt_gads2 <- changeValLabels(mt_gads2, "mc", value = c(1, 3, -9), c("Aus", "other", "missing"))
mt_gads2 <- checkMissings(mt_gads2)
test <- collapseMC_Text(mt_gads2, mc_var = "mc", text_var = "text", mc_code4text = "other")
expect_equal(test$dat$mc_r, c(3, 1, 1, 4))
test_dat <- extractData(test)
expect_equal(test_dat$mc_r, c("other", "Aus", "Aus", "Aus2"))
})
suppressWarnings(testMC <- import_spss("helper_spss_recodeMC.sav"))
test_that("Combination of mc_code4text and labeled missing in text variable",{
test <- collapseMC_Text(testMC, mc_var = "mc", text_var = "text", mc_code4text = "other")
expect_equal(test$dat$mc_r, c(-9, 1, 2, -9, 4, 3, -9))
test_dat <- extractData(test)
expect_equal(test_dat$mc_r, c(NA, "Ger", "Eng", NA, "Aus", "other", NA))
}) |
parseIndex <- function (file)
{
empty <- data.frame(index = character(), description = character(),
stringsAsFactors = FALSE)
if (!file.exists(file)) return(empty)
rl <- readLines(file)
if (!length(rl)) return(empty)
lines <- (regexpr("^", rl) > 0)
index <- gsub(" +.*$", "", rl[!lines])
description <- gsub("^.*? +", "", rl[!lines])
return(data.frame(index = index, description = description,
stringsAsFactors = FALSE))
} |
test_that("TWIT_paginte_max_id respects max_id and since_id", {
simple_timeline <- function(...) {
r <- TWIT_paginate_max_id(NULL, "/1.1/statuses/user_timeline",
list(screen_name = "JustinBieber"),
n = 100,
...
)
tweets_with_users(r)[1:10]
}
base <- simple_timeline()
older <- simple_timeline(max_id = base)
expect_true(min(format_date(older$created_at)) < min(format_date(base$created_at)))
base2 <- simple_timeline(since_id = older)
expect_length(intersect(base$id, base2$id), nrow(base))
})
test_that("TWIT_paginte_cursor respects cursor", {
page1 <- get_followers("JustinBieber")
page2 <- get_followers("JustinBieber", cursor = page1)
expect_length(intersect(page1$from_id, page2$from_id), 0)
}) |
L_1way_RM_ANOVA <- function(dat, group, ID, contrast1=NULL, contrast2=NULL, verb=TRUE) {
m1=anova(lm(dat~ID + group))
within_ss <- sum(m1$`Sum Sq`[2:3])
eta_sq <- m1$`Sum Sq`[2]/within_ss
N <- m1$Df[1]+1
S_12 <- -0.5 * N * (log(m1$`Sum Sq`[3]) - log(within_ss))
k <- m1$Df[2]+1
nulfg <- 0
if (k > 2) {
if (is.null(contrast1)) {
conta <- contr.poly(k, scores = 1:k)
contrast1 <- conta[,1]
contrast2 <- conta[,2]
nulfg <- NULL
}}
gp_means <- tapply(dat, group, mean)
SS_1 <- N * sum(contrast1*gp_means)^2/(sum(contrast1^2))
SS_2 <- N * sum(contrast2*gp_means)^2/(sum(contrast2^2))
r_SS_1 <- within_ss - SS_1
r_SS_2 <- within_ss - SS_2
S_cont_12 <- -0.5*N*(log(r_SS_1) - log(r_SS_2))
S_cont1_means <- -0.5*N*(log(r_SS_1) - log(m1$`Sum Sq`[3]))
datf <- data.frame(dat,group)
mean_out <- aggregate(datf[1],datf[2],mean)
plot(mean_out)
Fval <- m1$`F value`[2]
dfv <- m1$Df
Pval <- m1$`Pr(>F)`[2]
Fval_c1 <- SS_1/m1$`Mean Sq`[3]
Pval_c1 <- pf(Fval_c1, 1, m1$Df[3], lower.tail = FALSE)
if(verb) cat("\nSupport for group means versus null = ", round(S_12,3), sep= "",
"\n Support for contrast 1 ", if (is.null(nulfg)) "(linear) ",
"versus group means model = ", round(S_cont1_means,3),
"\n Support for contrast 1 versus contrast 2 ", if (is.null(nulfg)) "(quadratic) ", "= ", round(S_cont_12,3),
"\n\nOverall analysis F(",dfv[2],",",dfv[3],") = ", round(Fval,3),
", p = ", round(Pval,5), ", partial eta-squared = ", round(eta_sq,3),
"\nContrast 1 F(1,",dfv[3],") = ", round(Fval_c1,3),
", p = ", round(Pval_c1,5), "\n ")
df1 <- c(1, m1$Df[3])
invisible(list(S.12 = S_12, S.1m = S_cont1_means, S.cont.12 = S_cont_12,
contrast1 = contrast1, contrast2 = contrast2,
gp.means = mean_out, df = m1$Df, F.val = Fval, P.val = Pval,
eta.sq = eta_sq, Fval.c1 = Fval_c1, df.1 = df1, P.val1 = Pval_c1))
} |
require(SkewHyperbolic)
options(digits=20)
param <- c(0,1,0,10)
q <- c(-Inf,-1,0,1,Inf)
pskewhyp(q, param = param)
pskewhyp(q, param = param, lower.tail = FALSE)
pskewhyp(q, param = param, valueOnly = FALSE)
pskewhyp(q, param = param, valueOnly = FALSE, intTol = 10^(-12))
x <- rskewhyp(1, param = param)
x
qskewhyp(pskewhyp(x, param = param, intTol = 10^(-10)),
param = param) - x
qskewhyp(pskewhyp(x, param = param),
param = param, uniTol = 10^(-10)) - x
qskewhyp(pskewhyp(x, param = param, intTol = 10^(-8)),
param = param, uniTol = 10^(-8)) - x
qskewhyp(pskewhyp(x, param = param, intTol = 10^(-10)),
param = param, uniTol = 10^(-10)) - x
qskewhyp(pskewhyp(x, param = param, intTol = 10^(-12)),
param = param, uniTol = 10^(-12)) - x
qskewhyp(pskewhyp(x, param = param, intTol = 10^(-10)),
param = param, method = "integrate") - x
qskewhyp(pskewhyp(x, param = param),
param = param, uniTol = 10^(-10)) - x
qskewhyp(pskewhyp(x, param = param, intTol = 10^(-8)),
param = param, uniTol = 10^(-8), method = "integrate") - x
qskewhyp(pskewhyp(x, param = param, intTol = 10^(-10)),
param = param, uniTol = 10^(-10), method = "integrate") - x
qskewhyp(pskewhyp(x, param = param, intTol = 10^(-12)),
param = param, uniTol = 10^(-12), method = "integrate") - x
qskewhyp(pskewhyp(10, param = param, intTol = 10^(-12)),
param = param, uniTol = 10^(-12), method = "integrate") - 10
qskewhyp(pskewhyp(-10, param = param, intTol = 10^(-12)),
param = param, uniTol = 10^(-12), method = "integrate") + 10
param <- c(0,1,10,20)
q <- c(-Inf,-1,0,1,Inf)
pskewhyp(q, param = param)
pskewhyp(q, param = param, lower.tail = FALSE)
pskewhyp(q, param = param, valueOnly = FALSE)
pskewhyp(q, param = param, valueOnly = FALSE, intTol = 10^(-12))
x <- rskewhyp(1, param = param)
x
qskewhyp(pskewhyp(x, param = param, intTol = 10^(-10)),
param = param) - x
qskewhyp(pskewhyp(x, param = param),
param = param, uniTol = 10^(-10)) - x
qskewhyp(pskewhyp(x, param = param, intTol = 10^(-8)),
param = param, uniTol = 10^(-8)) - x
qskewhyp(pskewhyp(x, param = param, intTol = 10^(-10)),
param = param, uniTol = 10^(-10)) - x
qskewhyp(pskewhyp(x, param = param, intTol = 10^(-12)),
param = param, uniTol = 10^(-12)) - x
qskewhyp(pskewhyp(x, param = param, intTol = 10^(-10)),
param = param, method = "integrate") - x
qskewhyp(pskewhyp(x, param = param),
param = param, uniTol = 10^(-10)) - x
qskewhyp(pskewhyp(x, param = param, intTol = 10^(-8)),
param = param, uniTol = 10^(-8), method = "integrate") - x
qskewhyp(pskewhyp(x, param = param, intTol = 10^(-10)),
param = param, uniTol = 10^(-10), method = "integrate") - x
qskewhyp(pskewhyp(x, param = param, intTol = 10^(-12)),
param = param, uniTol = 10^(-12), method = "integrate") - x
qskewhyp(pskewhyp(10, param = param, intTol = 10^(-12)),
param = param, uniTol = 10^(-12), method = "integrate") - 10
qskewhyp(pskewhyp(-10, param = param, intTol = 10^(-12)),
param = param, uniTol = 10^(-12), method = "integrate") + 10
param <- c(0,1,1,1)
q <- c(-Inf,-1,0,1,Inf)
pskewhyp(q, param = param)
pskewhyp(q, param = param, lower.tail = FALSE)
pskewhyp(q, param = param, valueOnly = FALSE)
pskewhyp(q, param = param, valueOnly = FALSE, intTol = 10^(-12))
param <- c(0,1,-10,5)
q <- c(-Inf,-1,0,1,Inf)
pskewhyp(q, param = param)
pskewhyp(q, param = param, lower.tail = FALSE)
pskewhyp(q, param = param, valueOnly = FALSE)
pskewhyp(q, param = param, valueOnly = FALSE, intTol = 10^(-12))
param <- c(0,1,5,5)
q <- c(-Inf,-1,0,1,Inf)
pskewhyp(q, param = param)
pskewhyp(q, param = param, lower.tail = FALSE)
pskewhyp(q, param = param, valueOnly = FALSE)
pskewhyp(q, param = param, valueOnly = FALSE, intTol = 10^(-12))
x <- rskewhyp(1, param = param)
x
qskewhyp(pskewhyp(x, param = param, intTol = 10^(-10)),
param = param) - x
qskewhyp(pskewhyp(x, param = param),
param = param, uniTol = 10^(-10)) - x
qskewhyp(pskewhyp(x, param = param, intTol = 10^(-8)),
param = param, uniTol = 10^(-8)) - x
qskewhyp(pskewhyp(x, param = param, intTol = 10^(-10)),
param = param, uniTol = 10^(-10)) - x
qskewhyp(pskewhyp(x, param = param, intTol = 10^(-12)),
param = param, uniTol = 10^(-12)) - x
qskewhyp(pskewhyp(x, param = param, intTol = 10^(-10)),
param = param, method = "integrate") - x
qskewhyp(pskewhyp(x, param = param),
param = param, uniTol = 10^(-10)) - x
qskewhyp(pskewhyp(x, param = param, intTol = 10^(-8)),
param = param, uniTol = 10^(-8), method = "integrate") - x
qskewhyp(pskewhyp(x, param = param, intTol = 10^(-10)),
param = param, uniTol = 10^(-10), method = "integrate") - x
qskewhyp(pskewhyp(x, param = param, intTol = 10^(-12)),
param = param, uniTol = 10^(-12), method = "integrate") - x
qskewhyp(pskewhyp(10, param = param, intTol = 10^(-12)),
param = param, uniTol = 10^(-12), method = "integrate") - 10
qskewhyp(pskewhyp(-10, param = param, intTol = 10^(-12)),
param = param, uniTol = 10^(-12), method = "integrate") + 10
param <- c(1,2,20,10)
q <- c(-Inf,-1,0,1,Inf)
pskewhyp(q, param = param)
pskewhyp(q, param = param, lower.tail = FALSE)
pskewhyp(q, param = param, valueOnly = FALSE)
pskewhyp(q, param = param, valueOnly = FALSE, intTol = 10^(-12))
x <- rskewhyp(1, param = param)
x
qskewhyp(pskewhyp(x, param = param, intTol = 10^(-10)),
param = param) - x
qskewhyp(pskewhyp(x, param = param),
param = param, uniTol = 10^(-10)) - x
qskewhyp(pskewhyp(x, param = param, intTol = 10^(-8)),
param = param, uniTol = 10^(-8)) - x
qskewhyp(pskewhyp(x, param = param, intTol = 10^(-10)),
param = param, uniTol = 10^(-10)) - x
qskewhyp(pskewhyp(x, param = param, intTol = 10^(-12)),
param = param, uniTol = 10^(-12)) - x
qskewhyp(pskewhyp(x, param = param, intTol = 10^(-10)),
param = param, method = "integrate") - x
qskewhyp(pskewhyp(x, param = param),
param = param, uniTol = 10^(-10)) - x
qskewhyp(pskewhyp(x, param = param, intTol = 10^(-8)),
param = param, uniTol = 10^(-8), method = "integrate") - x
qskewhyp(pskewhyp(x, param = param, intTol = 10^(-10)),
param = param, uniTol = 10^(-10), method = "integrate") - x
qskewhyp(pskewhyp(x, param = param, intTol = 10^(-12)),
param = param, uniTol = 10^(-12), method = "integrate") - x
qskewhyp(pskewhyp(10, param = param, intTol = 10^(-12)),
param = param, uniTol = 10^(-12), method = "integrate") - 10
qskewhyp(pskewhyp(-10, param = param, intTol = 10^(-12)),
param = param, uniTol = 10^(-12), method = "integrate") + 10 |
partition_ranges <- function(df, start_var, end_var, fmt = "%Y-%m-%d", vars_to_keep = NULL, partition_by = "year") {
partitioned <- copy(df)
if (!any(class(partitioned) %in% "data.table")) setDT(partitioned)
if (class(partitioned[[start_var]]) != 'Date' | class(partitioned[[end_var]]) != 'Date') {
for (j in c(start_var, end_var)) set(partitioned, j = j, value = as.Date(as.character(partitioned[[j]]), format = fmt))
}
if (partition_by == "year") {
grpRnD <- c("rl", vars_to_keep)
partitioned <- partitioned[partitioned[, rep(.I, 1 +
year(get(end_var)) - year(get(start_var)))]][
, `:=`(rl, rleid(get(start_var)))][
, `:=`((start_var), pmax(get(start_var)[1], as.Date(paste0(year(get(start_var)[1]) + 0:(.N - 1), "-01-01")))), by = mget(grpRnD)][
, `:=`((end_var), pmin(get(end_var)[.N], as.Date(paste0(year(get(end_var)[.N]) - (.N - 1):0, "-12-31")))), by = mget(grpRnD)][
, `:=`("rl", NULL)]
}
else if (partition_by == "month") {
grpRnD <- c("nrow", vars_to_keep, start_var, end_var)
grp2ndlev <- c(vars_to_keep, start_var)
partitionedIn <- partitioned[(format(get(start_var),
"%Y-%m") == format(get(end_var), "%Y-%m")),
]
partitionedOut <- partitioned[!(format(get(start_var),
"%Y-%m") == format(get(end_var), "%Y-%m")),
]
partitionedOut <- partitionedOut[, st_seq := as.Date(paste0(format(get(start_var), "%Y-%m"), "-01"))][
, end_seq := {
end_seq <- as.POSIXlt(as.Date(paste0(format(get(end_var), "%Y-%m"), "-01")))
end_seq$mon <- end_seq$mon + 1
return(as.Date(as.character(as.POSIXct(end_seq))) - 1)
}
]
partitionedOut <- partitionedOut[, `:=`(nrow, 1:.N)][
, .(seqs = seq.Date(st_seq, end_seq, by = "month")), by = mget(grpRnD)][
, `:=`(seqs, c(get(start_var)[1], seqs[-1])), by = nrow][
, `:=`(seqsEnd, {
tmp <- as.POSIXlt(as.Date(paste0(format(seqs, "%Y-%m"), "-01")))
tmp$mon <- tmp$mon + 1
return(as.Date(as.character(as.POSIXct(tmp))) - 1)
})][, `:=`((start_var), seqs)][, lapply(.SD, function(x) pmin(x, seqsEnd)),
by = mget(grp2ndlev),
.SDcols = substitute(end_var)]
nms <- names(partitionedOut)
if (nrow(partitionedIn) > 0) {
partitionedIn <- partitionedIn[, names(partitionedIn) %in%
nms, with = FALSE]
partitioned <- rbind(partitionedIn, partitionedOut)
}
else {
partitioned <- partitionedOut
}
partitioned <- setorderv(partitioned, nms)
}
else {
stop("partition_by argument has to be either 'year' or 'month'.")
}
if (!any(class(df) %in% "data.table")) {
return(setDF(partitioned))
}
else {
return(partitioned)
}
} |
NULL
setGeneric("getLineup", function(obj) standardGeneric("getLineup")) |
setGeneric(
name="query",
def=function(self, resource, ...){standardGeneric("query")}
) |
reconcile_v_fmt <- function(v_fmt){
v_fmt_tbl <- tibble(v_fmt=v_fmt) %>%
mutate(leading_chars = {str_extract(v_fmt,'^.*(?=%)') %>%
replace_na('')},
width = {str_extract(v_fmt,'(?<!\\.)[0-9]+') %>%
replace_na('')},
digits = {str_extract(v_fmt,'(?<=\\.)[0-9]+') %>%
as.numeric() %>%
{ifelse(is.na(.),-1,.)}},
justification = {str_extract(v_fmt,'-') %>%
replace_na('')},
type = str_extract(v_fmt,'[a-z]')) %>%
summarize(leading_chars = {table(.data$leading_chars) %>%
sort(decreasing = TRUE) %>%
head(1) %>%
names()},
width = {table(.data$width) %>%
sort(decreasing = TRUE) %>%
head(1) %>%
names()},
justification = {table(.data$justification) %>%
sort(decreasing = TRUE) %>%
head(1) %>%
names()},
type = {table(.data$type) %>%
sort(decreasing = TRUE) %>%
head(1) %>%
names()},
digits = {max(.data$digits,na.rm=TRUE) %>%
{ifelse(. < 0,'',str_c('.',.))}}
) %>%
glue_data('{leading_chars}%{justification}{width}{digits}{type}')
return(v_fmt_tbl)
} |
ParLin_expectreg_hetero<-function(X,Y,Z,omega=0.3,kernel=gaussK,heteroscedastic=c("X", "Z", "Z and X") )
{
Values=cbind(X,Y,Z)
Values=Values[order(Values[,ncol(Values)-1]),]
m_X_plug<-NULL
if(NCOL(X)==1)
{
for(i in 1:NCOL(X))
{
data<-data.frame(x=Values[,i])
data$z<-Values[,ncol(Values)]
m_X_plug=cbind(m_X_plug,locpol::locLinSmootherC(data$z, data$x, xeval=data$z, bw=locpol::pluginBw(data$z, data$x, deg=1,kernel=kernel),kernel=kernel)[,2])
}
data<-data.frame(y=Values[,ncol(Values)-1])
data$z<-Values[,ncol(Values)]
m_Y_plug=locpol::locLinSmootherC(data$z, data$y, xeval=data$z, bw=locpol::pluginBw(data$z, data$y, deg=1,kernel=kernel),kernel=kernel)[,2]
Xtilde_plug=Values[,1:(ncol(Values)-2)]-as.numeric(m_X_plug)
Ytilde_plug=Values[,ncol(Values)-1]-m_Y_plug
fmla <- as.formula(paste("Ytilde_plug ~Xtilde_plug "))
expect_linear_plug=expectreg::expectreg.ls(fmla,estimate="laws",smooth="schall",expectiles=omega)
delta<-data.frame(expect_linear_plug$intercepts,expect_linear_plug$coefficients)
dfnam <- paste("delta", 0:(ncol(Values)-2), sep = "")
colnames(delta) <- dfnam
if(heteroscedastic=="Z")
{
grid=Values[,ncol(Values)]
Y_last=Values[,ncol(Values)-1]-(as.matrix(delta[,2:ncol(delta)]))%*%t(Values[,1:(ncol(Values)-2)])-delta[,1]
Estimates<-expectreg_locpol(X=Values[,ncol(Values)],Y=as.numeric(Y_last),j=0,p=1,omega=omega,h=h_GenROT(X=Values[,ncol(Values)],Y=as.numeric(Y_last),j=0,p=1,kernel=kernel,omega),kernel=kernel,starting_value = "mean",grid=grid)[,1]
l1 = list(Linear=delta,Nonlinear=Estimates)
}
if(heteroscedastic=="X")
{
grid=Values[,ncol(Values)]
Y_last=Values[,ncol(Values)-1]-(as.matrix(delta[,2:ncol(delta)]))%*%t(Values[,1:(ncol(Values)-2)])-delta[,1]
Estimates_g<-expectreg_locpol(X=Values[,ncol(Values)],Y=as.numeric(Y_last),j=0,p=1,omega=omega,h=h_GenROT(X=Values[,ncol(Values)],Y=as.numeric(Y_last),j=0,p=1,kernel=kernel,omega),kernel=kernel,starting_value = "mean",grid=grid)[,1]
Y_g_omega=Y_last-Estimates_g
Estimates_g_omega<-expectreg_locpol(X=Values[,1],Y=as.numeric(Y_g_omega),omega=omega
,kernel=kernel,h=h_GenROT(X=Values[,1],Y=as.numeric(Y_g_omega),omega=omega,kernel=kernel),starting_value="mean",grid=cbind(Values[,1:(ncol(Values)-2)]))[,1]
l1 = list(Linear=delta,Nonlinear_g=Estimates_g,Nonlinear_g_omega=Estimates_g_omega)
}
if(heteroscedastic=="Z and X")
{
grid=Values[,ncol(Values)]
Y_last=Values[,ncol(Values)-1]-(as.matrix(delta[,2:ncol(delta)]))%*%t(Values[,1:(ncol(Values)-2)])-delta[,1]
Estimates<-expectreg_loclin_bivariate(Z1=Values[,ncol(Values)],Z2=Values[,1],Y=as.numeric(Y_last),omega=omega
,kernel=kernel,h=h_GenROT_bivariate(Z1=Values[,ncol(Values)],Z2=Values[,1],Y=as.numeric(Y_last),omega=omega,kernel=kernel),grid=cbind(Values[,ncol(Values)],Values[,1]))
l1 = list(Linear=delta,Nonlinear=Estimates)
}
}
for(i in 1:ncol(X))
{
data<-data.frame(x=Values[,i])
data$z<-Values[,ncol(Values)]
m_X_plug=cbind(m_X_plug,locpol::locLinSmootherC(data$z, data$x, xeval=data$z, bw=locpol::pluginBw(data$z, data$x, deg=1,kernel=kernel),kernel=kernel)[,2])
}
data<-data.frame(y=Values[,ncol(Values)-1])
data$z<-Values[,ncol(Values)]
m_Y_plug=locpol::locLinSmootherC(data$z, data$y, xeval=data$z, bw=pluginBw(data$z, data$y, deg=1,kernel=kernel),kernel=kernel)[,2]
Xtilde_plug=Values[,1:(ncol(Values)-2)]-m_X_plug
Ytilde_plug=Values[,ncol(Values)-1]-m_Y_plug
xnam <- paste("Xtilde_plug[,", 1:(ncol(Values)-2), sep = "")
xnam <- paste("rb(", xnam,"],type='parametric')")
fmla <- as.formula(paste("Ytilde_plug ~ ", paste(xnam, collapse = "+")))
expect_linear_plug=expectreg::expectreg.ls(fmla,estimate="laws",smooth="schall",expectiles=omega)
delta<-data.frame(expect_linear_plug$intercepts,expect_linear_plug$coefficients)
dfnam <- paste("delta", 0:(ncol(Values)-2), sep = "")
colnames(delta) <- dfnam
if(heteroscedastic=="Z")
{
grid=Values[,ncol(Values)]
Y_last=Values[,ncol(Values)-1]-(as.matrix(delta[,2:ncol(delta)]))%*%t(Values[,1:(ncol(Values)-2)])-delta[,1]
Estimates<-expectreg_locpol(X=Values[,ncol(Values)],Y=as.numeric(Y_last),j=0,p=1,omega=omega,h=h_GenROT(X=Values[,ncol(Values)],Y=as.numeric(Y_last),j=0,p=1,kernel=kernel,omega),kernel=kernel,starting_value = "mean",grid=grid)[,1]
l1 = list(Linear=delta,Nonlinear=Estimates)
}
if(heteroscedastic=="X")
{
grid=Values[,ncol(Values)]
Y_last=Values[,ncol(Values)-1]-(as.matrix(delta[,2:ncol(delta)]))%*%t(Values[,1:(ncol(Values)-2)])-delta[,1]
Estimates_g<-expectreg_locpol(X=Values[,ncol(Values)],Y=as.numeric(Y_last),j=0,p=1,omega=omega,h=h_GenROT(X=Values[,ncol(Values)],Y=as.numeric(Y_last),j=0,p=1,kernel=kernel,omega),kernel=kernel,starting_value = "mean",grid=grid)[,1]
Y_g_omega=Y_last-Estimates_g
Estimates_g_omega<-expectreg_loclin_bivariate(Z1=Values[,1],Z2=Values[,2],Y=as.numeric(Y_g_omega),omega=omega
,kernel=kernel,h=h_GenROT_bivariate(Z1=Values[,1],Z2=Values[,2],Y=as.numeric(Y_g_omega),omega=omega,kernel=kernel),grid=cbind(Values[,1:(ncol(Values)-2)]))
l1 = list(Linear=delta,Nonlinear_g=Estimates_g,Nonlinear_g_omega=Estimates_g_omega)
}
if(heteroscedastic=="Z and X")
{
grid=Values[,ncol(Values)]
Y_last=Values[,ncol(Values)-1]-(as.matrix(delta[,2:ncol(delta)]))%*%t(Values[,1:(ncol(Values)-2)])-delta[,1]
Estimates<-expectreg_loclin_trivariate(Z=Values[,ncol(Values)],X1=Values[,ncol(Values)-3],X2=Values[,ncol(Values)-2],Y=as.numeric(Y_last),omega=omega,h=1,kernel=kernel)
l1 = list(Linear=delta,Nonlinear=Estimates)
}
return(l1)
} |
ecospat.CCV.createDataSplitTable <- function(NbRunEval,
DataSplit,
validation.method,
NbSites,
sp.data=NULL,
minNbPresences=NULL,
minNbAbsences=NULL,
maxNbTry=1000){
stopifnot(DataSplit >= 50 & DataSplit <=100)
stopifnot(NbRunEval>=0)
stopifnot(validation.method %in% c("cross-validation", "split-sample"))
if(is.null(sp.data)){
stopifnot(NbSites>0)
}else{
stopifnot(!is.null(minNbPresences) & !is.null(minNbAbsences) & minNbPresences >= 0 & minNbAbsences >= 0 & maxNbTry>0 & maxNbTry < 1000000000)
stopifnot(is.data.frame(sp.data))
}
if(is.null(sp.data)){
DataSplitTable <- matrix(data=FALSE, nrow=NbSites, ncol=NbRunEval)
if(validation.method=="split-sample"){
for(i in 1:NbRunEval){
DataSplitTable[sample(1:NbSites,round(DataSplit/100*NbSites), replace=FALSE),i] <- TRUE
}
}
if(validation.method=="cross-validation"){
grouper <- sample(rep(1:NbRunEval,each=ceiling(NbSites/NbRunEval)),NbSites, replace = FALSE)
iner <- round(DataSplit*NbRunEval/100)
for(i in 1:NbRunEval){
DataSplitTable[which(grouper %in% ((i:(i+iner-1)%%NbRunEval)+1)),i] <- TRUE
}
}
return(DataSplitTable)
}
if(!is.null(sp.data)){
create.SpRunMatrix <- function(sp.data,
DataSplitTable){
SpRunMatrix <- apply(DataSplitTable,2, function(x){colSums(x*sp.data)})
}
Nb.sp.dropped <- dim(sp.data)[2]
trys <- 1
while(trys <= maxNbTry & Nb.sp.dropped > 0){
DataSplitTable <- matrix(data=FALSE, nrow=dim(sp.data)[1], ncol=NbRunEval)
if(validation.method=="split-sample"){
for(i in 1:NbRunEval){
DataSplitTable[sample(1:dim(sp.data)[1],round(DataSplit/100*dim(sp.data)[1]), replace=FALSE),i] <- TRUE
}
}
if(validation.method=="cross-validation"){
grouper <- sample(rep(1:NbRunEval,each=ceiling(dim(sp.data)[1]/NbRunEval)),dim(sp.data)[1], replace = FALSE)
iner <- round(DataSplit*NbRunEval/100)
for(i in 1:NbRunEval){
DataSplitTable[which(grouper %in% ((i:(i+iner-1)%%NbRunEval)+1)),i] <- TRUE
}
}
SpRunMatrix <- create.SpRunMatrix(sp.data = sp.data, DataSplitTable = DataSplitTable)
sp.names.all <- rownames(SpRunMatrix)
sp.names.ok <- intersect(names(which(apply(SpRunMatrix,1,min) >= minNbPresences)), names(which(apply(min(colSums(DataSplitTable))-SpRunMatrix,1,min) >= minNbAbsences)))
sp.names.droped <- setdiff(sp.names.all, sp.names.ok)
if(length(sp.names.droped) < Nb.sp.dropped){
DataSplitTable.final <- DataSplitTable
Nb.sp.dropped <- length(sp.names.droped)
sp.names.droped.best <- sp.names.droped
}
trys <- trys+1
}
message(paste("The following species will not have the desired minimum number of presence/absence data in each run: ", paste(sp.names.droped.best, sep="", collapse=", "),"\n\n",sep=""))
return(DataSplitTable.final)
}
}
ecospat.CCV.modeling <- function(sp.data,
env.data,
xy,
DataSplitTable=NULL,
DataSplit = 70,
NbRunEval = 25,
minNbPredictors =5,
validation.method = "cross-validation",
models.sdm = c("GLM","RF"),
models.esm = "CTA",
modeling.options.sdm = NULL,
modeling.options.esm = NULL,
ensemble.metric = "AUC",
ESM = "YES",
parallel = FALSE,
cpus = 4,
VarImport = 10,
modeling.id = as.character(format(Sys.time(), '%s'))){
stopifnot(dim(sp.data)[1]==dim(xy)[1])
stopifnot(dim(env.data)[1]==dim(xy)[1] | data.class(env.data)=="RasterStack")
stopifnot(dim(DataSplitTable)[1]==dim(xy)[1] | is.null(DataSplitTable))
stopifnot(DataSplit >= 50 & DataSplit <=100)
stopifnot(NbRunEval>=0)
stopifnot(minNbPredictors>1)
stopifnot(validation.method %in% c("cross-validation", "split-sample"))
stopifnot(models.sdm %in% c('GLM','GBM','GAM','CTA','ANN','SRE','FDA','MARS','RF','MAXENT.Phillips','MAXENT.Tsuruoka'))
stopifnot(models.esm %in% c('GLM','GBM','GAM','CTA','ANN','SRE','FDA','MARS','RF','MAXENT.Phillips','MAXENT.Tsuruoka'))
stopifnot(ensemble.metric %in% c("AUC","TSS","KAPPA") & length(ensemble.metric)==1)
stopifnot(ESM %in% c("YES","NO","ALL"))
stopifnot(is.logical(parallel))
stopifnot(cpus>=1)
stopifnot(is.numeric(VarImport))
eval.metrics.sdm=c('KAPPA', 'TSS', 'ROC')
eval.metrics.esm=c('KAPPA', 'TSS', 'AUC')
eval.metrics.names= c('KAPPA', 'TSS', 'AUC')
ensemble.metric.esm <- ensemble.metric
if(ensemble.metric=="AUC"){
ensemble.metric.sdm <- "ROC"
}else{
ensemble.metric.sdm <- ensemble.metric
}
if(data.class(env.data)=="RasterStack"){
NbPredictors <- dim(env.data)[3]
NamesPredictors <- names(env.data)
}else{
NbPredictors <- dim(env.data)[2]
NamesPredictors <- colnames(env.data)
}
if(length(models.esm)==1){
ef.counter <- 1
}else{
ef.counter <- length(models.esm)+1
}
colnames(sp.data) <- gsub("_",".", colnames(sp.data))
dir.create(modeling.id)
oldwd <- getwd()
on.exit(setwd(oldwd))
setwd(modeling.id)
create.SpRunMatrix <- function(sp.data,
DataSplitTable){
SpRunMatrix <- apply(DataSplitTable,2, function(x){colSums(x*sp.data)})
}
BiomodSF <- function(sp.name,
DataSplitTable,
sp.data,
env.data,
xy,
models,
models.options,
eval.metrics,
ensemble.metric,
VarImport){
MyBiomodData <- BIOMOD_FormatingData(resp.var = as.numeric(sp.data[,sp.name]),
expl.var = env.data,
resp.xy = xy,
resp.name = sp.name,
na.rm=FALSE)
if(is.null(models.options)){
MyBiomodOptions <- BIOMOD_ModelingOptions()
}else{
MyBiomodOptions <- BIOMOD_ModelingOptions(models.options)
}
MyBiomodModelOut <- BIOMOD_Modeling(data = MyBiomodData,
models = models,
models.options = MyBiomodOptions,
models.eval.meth = eval.metrics,
DataSplitTable = DataSplitTable,
Prevalence=NULL,
modeling.id = "ccv")
MyBiomodEnsemble <- BIOMOD_EnsembleModeling(modeling.output = MyBiomodModelOut,
chosen.models = "all",
em.by = "PA_dataset+repet",
eval.metric = ensemble.metric,
eval.metric.quality.threshold = NULL,
models.eval.meth =eval.metrics,
prob.mean = FALSE,
prob.cv = FALSE,
prob.ci = FALSE,
prob.ci.alpha = 0.05,
prob.median = FALSE,
committee.averaging = FALSE,
prob.mean.weight = TRUE,
prob.mean.weight.decay = 'proportional',
VarImport = VarImport)
}
ESMSF <- function(sp.name,
DataSplitTable,
sp.data,
env.data,
xy,
models,
models.options,
ensemble.metric){
MyESMData <- BIOMOD_FormatingData(resp.var = as.numeric(sp.data[,sp.name]),
expl.var = env.data,
resp.xy = xy,
resp.name = sp.name,
na.rm = FALSE)
if(is.null(models.options)){
MyBiomodOptions <- BIOMOD_ModelingOptions()
}else{
MyBiomodOptions <- BIOMOD_ModelingOptions(models.options)
}
MyESMModelOut <- ecospat.ESM.Modeling(data=MyESMData,
DataSplitTable = DataSplitTable,
weighting.score = ensemble.metric,
models=models,
Prevalence=NULL,
modeling.id="ccv",
models.options=MyBiomodOptions,
parallel=FALSE)
MyESMEnsemble <- ecospat.ESM.EnsembleModeling(ESM.modeling.output = MyESMModelOut,
weighting.score = ensemble.metric,
models=models)
}
get.ESMvariableContribution <- function(output_EF, output, NamesPredictors){
Variable.Contribution <- rep(NA, times=length(NamesPredictors))
names(Variable.Contribution)<- NamesPredictors
for(v in NamesPredictors){
cb1<-rep(combn(NamesPredictors,2)[1,],each=length(output$models))
cb2<-rep(combn(NamesPredictors,2)[2,],each=length(output$models))
pos<-c(which(cb1==v),which(cb2==v))
Variable.Contribution[which(NamesPredictors==v)]<-mean(output_EF$weights[pos])-mean(output_EF$weights)
}
Variable.Contribution[which(is.na(Variable.Contribution))]<-0
return((Variable.Contribution-min(Variable.Contribution))/(max(Variable.Contribution)-min(Variable.Contribution)))
}
if(is.null(DataSplitTable)){
DataSplitTable <- ecospat.CCV.createDataSplitTable(NbSites = dim(xy)[1], NbRunEval = NbRunEval, DataSplit = DataSplit, validation.method = validation.method)
}else{
NbRunEval <- dim(DataSplitTable)[2]
}
SpRunMatrix <- create.SpRunMatrix(sp.data = sp.data, DataSplitTable = DataSplitTable)
if(ESM=="NO"){
sp.names.all <- rownames(SpRunMatrix)
sp.names.ok <- intersect(names(which(apply(SpRunMatrix,1,min) >= minNbPredictors*NbPredictors)), names(which(apply(min(colSums(DataSplitTable))-SpRunMatrix,1,min) >= minNbPredictors*NbPredictors)))
sp.names.droped <- setdiff(sp.names.all, sp.names.ok)
message(paste("The following species will not be modelled due to limited presence data: ", paste(sp.names.droped, sep="", collapse=", "),"\n\n",sep=""))
speciesData.calibration <- array(data=NA, dim=c(length(sp.names.ok), max(colSums(DataSplitTable)), NbRunEval), dimnames=list(sp.names.ok, paste("c",1:max(colSums(DataSplitTable)),sep="_"), 1:NbRunEval))
speciesData.evaluation <- singleSpecies.evaluationSites.ensemblePredictions <- array(data=NA, dim=c(length(sp.names.ok), max(colSums(!DataSplitTable)), NbRunEval), dimnames=list(sp.names.ok, paste("e",1:max(colSums(!DataSplitTable)),sep="_"), 1:NbRunEval))
for(i in 1:NbRunEval){
speciesData.calibration[,1:sum(DataSplitTable[,i]),i] <- t(sp.data[which(DataSplitTable[,i]), sp.names.ok])
speciesData.evaluation[,1:sum(!DataSplitTable[,i]),i] <- t(sp.data[which(!DataSplitTable[,i]), sp.names.ok])
}
singleSpecies.ensembleEvaluationScore <- array(data=NA, dim=c(length(eval.metrics.names), length(sp.names.ok), NbRunEval), dimnames = list(eval.metrics.names,sp.names.ok,1:NbRunEval))
singleSpecies.ensembleVariableImportance <- array(data=NA, dim=c(NbPredictors, length(sp.names.ok), NbRunEval), dimnames = list(NamesPredictors,sp.names.ok,1:NbRunEval))
singleSpecies.calibrationSites.ensemblePredictions <- array(data=NA, dim=c(length(sp.names.ok), max(colSums(DataSplitTable)), NbRunEval), dimnames=list(sp.names.ok, paste("c",1:max(colSums(DataSplitTable)),sep="_"), 1:NbRunEval))
singleSpecies.evaluationSites.ensemblePredictions <- array(data=NA, dim=c(length(sp.names.ok), max(colSums(!DataSplitTable)), NbRunEval), dimnames=list(sp.names.ok, paste("e",1:max(colSums(!DataSplitTable)),sep="_"), 1:NbRunEval))
if(parallel){
sfInit(parallel=TRUE, cpus=cpus)
sfLibrary('biomod2', character.only=TRUE)
sfLapply(sp.names.ok,
BiomodSF,
DataSplitTable=DataSplitTable,
sp.data=sp.data,
env.data=env.data,
xy=xy,
models=models.sdm,
models.options=modeling.options.sdm,
eval.metrics=eval.metrics.sdm,
ensemble.metric=ensemble.metric.sdm,
VarImport = VarImport)
sfStop( nostop=FALSE )
}else{
lapply(sp.names.ok,
BiomodSF,
DataSplitTable=DataSplitTable,
sp.data=sp.data,
env.data=env.data,
xy=xy,
models=models.sdm,
models.options=modeling.options.sdm,
eval.metrics=eval.metrics.sdm,
ensemble.metric=ensemble.metric.sdm,
VarImport = VarImport)
}
for(i in sp.names.ok){
load(paste(i,"/",i,".ccvensemble.models.out", sep=""))
temp.evaluations <- get_evaluations(eval(parse(text=paste(i,".ccvensemble.models.out",sep=""))))
for(l in 1:length(temp.evaluations)){
singleSpecies.ensembleEvaluationScore[,i,l] <- temp.evaluations[[l]][,1]
}
temp.variableimprtance <- get_variables_importance(eval(parse(text=paste(i,".ccvensemble.models.out",sep=""))))
singleSpecies.ensembleVariableImportance[,i,] <- round(apply(temp.variableimprtance,c(1,3), mean, na.rm = TRUE),2)
temp.predictions <- get_predictions(eval(parse(text=paste(i,".ccvensemble.models.out",sep=""))))
for(l in 1:dim(temp.predictions)[2]){
singleSpecies.calibrationSites.ensemblePredictions[i,1:sum(DataSplitTable[,l]),l] <- temp.predictions[which(DataSplitTable[,l]),l]
singleSpecies.evaluationSites.ensemblePredictions[i,1:sum(!DataSplitTable[,l]),l] <- temp.predictions[which(!DataSplitTable[,l]),l]
}
}
}
if(ESM=="ALL"){
sp.names.all <- rownames(SpRunMatrix)
sp.names.ok <- intersect(names(which(apply(SpRunMatrix,1,min) >= minNbPredictors*2)), names(which(apply(min(colSums(DataSplitTable))-SpRunMatrix,1,min) >= minNbPredictors*2)))
sp.names.droped <- setdiff(sp.names.all, sp.names.ok)
message(paste("The following species will not be modelled due to limited presence data: ", paste(sp.names.droped, sep="", collapse=", "),"\n\n",sep=""))
speciesData.calibration <- array(data=NA, dim=c(length(sp.names.ok), max(colSums(DataSplitTable)), NbRunEval), dimnames=list(sp.names.ok, paste("c",1:max(colSums(DataSplitTable)),sep="_"), 1:NbRunEval))
speciesData.evaluation <- singleSpecies.evaluationSites.ensemblePredictions <- array(data=NA, dim=c(length(sp.names.ok), max(colSums(!DataSplitTable)), NbRunEval), dimnames=list(sp.names.ok, paste("e",1:max(colSums(!DataSplitTable)),sep="_"), 1:NbRunEval))
for(i in 1:NbRunEval){
speciesData.calibration[,1:sum(DataSplitTable[,i]),i] <- t(sp.data[which(DataSplitTable[,i]), sp.names.ok])
speciesData.evaluation[,1:sum(!DataSplitTable[,i]),i] <- t(sp.data[which(!DataSplitTable[,i]), sp.names.ok])
}
singleSpecies.ensembleEvaluationScore <- array(data=NA, dim=c(length(eval.metrics.names), length(sp.names.ok), NbRunEval), dimnames = list(eval.metrics.names,sp.names.ok,1:NbRunEval))
singleSpecies.ensembleVariableImportance <- array(data=NA, dim=c(NbPredictors, length(sp.names.ok), NbRunEval), dimnames = list(NamesPredictors,sp.names.ok,1:NbRunEval))
singleSpecies.calibrationSites.ensemblePredictions <- array(data=NA, dim=c(length(sp.names.ok), max(colSums(DataSplitTable)), NbRunEval), dimnames=list(sp.names.ok, paste("c",1:max(colSums(DataSplitTable)),sep="_"), 1:NbRunEval))
singleSpecies.evaluationSites.ensemblePredictions <- array(data=NA, dim=c(length(sp.names.ok), max(colSums(!DataSplitTable)), NbRunEval), dimnames=list(sp.names.ok, paste("e",1:max(colSums(!DataSplitTable)),sep="_"), 1:NbRunEval))
if(parallel){
sfInit(parallel=TRUE, cpus=cpus)
sfLibrary('biomod2', character.only=TRUE)
sfLibrary('ecospat', character.only=TRUE)
sfLibrary('gtools', character.only=TRUE)
sfLapply(sp.names.ok,
ESMSF,
DataSplitTable=DataSplitTable,
sp.data=sp.data,
env.data=env.data,
xy=xy,
models=models.esm,
models.options=modeling.options.esm,
ensemble.metric=ensemble.metric.esm)
sfStop( nostop=FALSE)
}else{
lapply(sp.names.ok,
ESMSF,
DataSplitTable=DataSplitTable,
sp.data=sp.data,
env.data=env.data,
xy=xy,
models=models.esm,
models.options=modeling.options.esm,
ensemble.metric=ensemble.metric.esm)
}
for(i in sp.names.ok){
load(list.files(path=paste("ESM.BIOMOD.output_",i,sep=""), pattern="ESM_EnsembleModeling", full.names = TRUE))
output_EF <- eval(parse(text="output"))
load(list.files(path=paste("ESM.BIOMOD.output_",i,sep=""), pattern="ESM_Modeling", full.names = TRUE))
singleSpecies.ensembleEvaluationScore[,i,] <- t(output_EF$ESM.evaluations[seq(ef.counter,dim(output_EF$ESM.evaluations)[1], ef.counter),c(5,11,6)])
singleSpecies.ensembleVariableImportance[,i,] <- round(get.ESMvariableContribution(output_EF = output_EF, output = eval(parse(text="output")), NamesPredictors = NamesPredictors),2)
for(l in 1:NbRunEval){
singleSpecies.calibrationSites.ensemblePredictions[i,1:sum(DataSplitTable[,l]),l] <- output_EF$ESM.fit[which(DataSplitTable[,l]),ef.counter*l+1]
singleSpecies.evaluationSites.ensemblePredictions[i,1:sum(!DataSplitTable[,l]),l] <- output_EF$ESM.fit[which(!DataSplitTable[,l]),ef.counter*l+1]
}
}
}
if(ESM=="YES"){
sp.names.all <- rownames(SpRunMatrix)
sp.names.bm.ok <- intersect(names(which(apply(SpRunMatrix,1, min) >= minNbPredictors*NbPredictors)), names(which(apply(min(colSums(DataSplitTable))-SpRunMatrix,1,min) >= minNbPredictors*NbPredictors)))
message(paste("The following species will be run with standard biomod2 models: ", paste(sp.names.bm.ok, sep="", collapse=", "),"\n\n",sep=""))
sp.names.bm.droped <- setdiff(sp.names.all, sp.names.bm.ok)
if(length(sp.names.bm.droped)>1){
sp.names.esm.ok <- intersect(names(which(apply(SpRunMatrix[sp.names.bm.droped,],1,min) >= minNbPredictors*2)), names(which(apply(min(colSums(DataSplitTable))-SpRunMatrix[sp.names.bm.droped,],1,min) >= minNbPredictors*2)))
message(paste("The following species will be run with ESM models: ", paste(sp.names.esm.ok, sep="", collapse=", "),"\n\n",sep=""))
sp.names.droped <- setdiff(sp.names.all, c(sp.names.bm.ok, sp.names.esm.ok))
message(paste("The following species will not be modelled due to limited presence data: ", paste(sp.names.droped, sep="", collapse=", "),"\n\n",sep=""))
sp.names.ok <- sort(c(sp.names.bm.ok, sp.names.esm.ok))
}else{
if(length(sp.names.bm.droped==1)){
if(min(SpRunMatrix[sp.names.bm.droped,]) >= minNbPredictors*2 & min(colSums(DataSplitTable))-min(SpRunMatrix[sp.names.bm.droped,])>= minNbPredictors*2)
sp.names.esm.ok <- sp.names.bm.droped
message(paste("The following species will be run with ESM models: ", paste(sp.names.esm.ok, sep="", collapse=", "),"\n\n",sep=""))
message(paste("The following species will not be modelled due to limited presence data:","\n\n", sep=""))
sp.names.ok <- sort(c(sp.names.bm.ok, sp.names.esm.ok))
}else{
sp.names.esm.ok <- NULL
message(paste("The following species will be run with ESM models:","\n\n", sep=""))
message(paste("The following species will not be modelled due to limited presence data:","\n\n", sep=""))
sp.names.ok <- sort(sp.names.bm.ok)
}
}
speciesData.calibration <- array(data=NA, dim=c(length(sp.names.ok), max(colSums(DataSplitTable)), NbRunEval), dimnames=list(sp.names.ok, paste("c",1:max(colSums(DataSplitTable)),sep="_"), 1:NbRunEval))
speciesData.evaluation <- singleSpecies.evaluationSites.ensemblePredictions <- array(data=NA, dim=c(length(sp.names.ok), max(colSums(!DataSplitTable)), NbRunEval), dimnames=list(sp.names.ok, paste("e",1:max(colSums(!DataSplitTable)),sep="_"), 1:NbRunEval))
for(i in 1:NbRunEval){
speciesData.calibration[,1:sum(DataSplitTable[,i]),i] <- t(sp.data[which(DataSplitTable[,i]), sp.names.ok])
speciesData.evaluation[,1:sum(!DataSplitTable[,i]),i] <- t(sp.data[which(!DataSplitTable[,i]), sp.names.ok])
}
singleSpecies.ensembleEvaluationScore <- array(data=NA, dim=c(length(eval.metrics.names), length(sp.names.ok), NbRunEval), dimnames = list(eval.metrics.names,sp.names.ok,1:NbRunEval))
singleSpecies.ensembleVariableImportance <- array(data=NA, dim=c(NbPredictors, length(sp.names.ok), NbRunEval), dimnames = list(NamesPredictors,sp.names.ok,1:NbRunEval))
singleSpecies.calibrationSites.ensemblePredictions <- array(data=NA, dim=c(length(sp.names.ok), max(colSums(DataSplitTable)), NbRunEval), dimnames=list(sp.names.ok, paste("c",1:max(colSums(DataSplitTable)),sep="_"), 1:NbRunEval))
singleSpecies.evaluationSites.ensemblePredictions <- array(data=NA, dim=c(length(sp.names.ok), max(colSums(!DataSplitTable)), NbRunEval), dimnames=list(sp.names.ok, paste("e",1:max(colSums(!DataSplitTable)),sep="_"), 1:NbRunEval))
if(parallel){
sfInit(parallel=TRUE, cpus=cpus)
sfLibrary('biomod2', character.only=TRUE)
sfLibrary('ecospat', character.only=TRUE)
sfLibrary('gtools', character.only=TRUE)
sfLapply(sp.names.bm.ok,
BiomodSF,
DataSplitTable=DataSplitTable,
sp.data=sp.data,
env.data=env.data,
xy=xy, models=models.sdm,
models.options=modeling.options.sdm,
eval.metrics=eval.metrics.sdm,
ensemble.metric=ensemble.metric.sdm,
VarImport = VarImport)
sfLapply(sp.names.esm.ok,
ESMSF,
DataSplitTable=DataSplitTable,
sp.data=sp.data,
env.data=env.data,
xy=xy,
models=models.esm,
models.options=modeling.options.esm,
ensemble.metric=ensemble.metric.esm)
sfStop( nostop=FALSE )
}else{
lapply(sp.names.bm.ok,
BiomodSF,
DataSplitTable=DataSplitTable,
sp.data=sp.data,
env.data=env.data,
xy=xy, models=models.sdm,
models.options=modeling.options.sdm,
eval.metrics=eval.metrics.sdm,
ensemble.metric=ensemble.metric.sdm,
VarImport = VarImport)
lapply(sp.names.esm.ok,
ESMSF,
DataSplitTable=DataSplitTable,
sp.data=sp.data,
env.data=env.data,
xy=xy,
models=models.esm,
models.options=modeling.options.esm,
ensemble.metric=ensemble.metric.esm)
}
for(i in sp.names.bm.ok){
load(paste(i,"/",i,".ccvensemble.models.out", sep=""))
temp.evaluations <- get_evaluations(eval(parse(text=paste(i,".ccvensemble.models.out",sep=""))))
for(l in 1:length(temp.evaluations)){
singleSpecies.ensembleEvaluationScore[,i,l] <- temp.evaluations[[l]][,1]
}
temp.variableimprtance <- get_variables_importance(eval(parse(text=paste(i,".ccvensemble.models.out",sep=""))))
singleSpecies.ensembleVariableImportance[,i,] <- round(apply(temp.variableimprtance,c(1,3), mean, na.rm = TRUE),2)
temp.predictions <- get_predictions(eval(parse(text=paste(i,".ccvensemble.models.out",sep=""))))
for(l in 1:dim(temp.predictions)[2]){
singleSpecies.calibrationSites.ensemblePredictions[i,1:sum(DataSplitTable[,l]),l] <- temp.predictions[which(DataSplitTable[,l]),l]
singleSpecies.evaluationSites.ensemblePredictions[i,1:sum(!DataSplitTable[,l]),l] <- temp.predictions[which(!DataSplitTable[,l]),l]
}
}
if(!is.null(sp.names.esm.ok)){
for(i in sp.names.esm.ok){
load(list.files(path=paste("ESM.BIOMOD.output_",i,sep=""), pattern="ESM_EnsembleModeling", full.names = TRUE))
output_EF <- eval(parse(text="output"))
load(list.files(path=paste("ESM.BIOMOD.output_",i,sep=""), pattern="ESM_Modeling", full.names = TRUE))
singleSpecies.ensembleEvaluationScore[,i,] <- t(output_EF$ESM.evaluations[seq(ef.counter,dim(output_EF$ESM.evaluations)[1], ef.counter),c(5,11,6)])
singleSpecies.ensembleVariableImportance[,i,] <- round(get.ESMvariableContribution(output_EF = output_EF, output = eval(parse(text="output")), NamesPredictors = NamesPredictors),2)
for(l in 1:NbRunEval){
singleSpecies.calibrationSites.ensemblePredictions[i,1:sum(DataSplitTable[,l]),l] <- output_EF$ESM.fit[which(DataSplitTable[,l]),ef.counter*l+1]
singleSpecies.evaluationSites.ensemblePredictions[i,1:sum(!DataSplitTable[,l]),l] <- output_EF$ESM.fit[which(!DataSplitTable[,l]),ef.counter*l+1]
}
}
}
}
all.predictions.caliSites <- array(data=NA,
dim=c(dim(sp.data),dim(DataSplitTable)[2]),
dimnames=list(unlist(dimnames(sp.data)[1]),
unlist(dimnames(sp.data)[2]),
1:dim(DataSplitTable)[2]))
all.predictions.evalSites <- array(data=NA,
dim=c(dim(sp.data),dim(DataSplitTable)[2]),
dimnames=list(unlist(dimnames(sp.data)[1]),
unlist(dimnames(sp.data)[2]),
1:dim(DataSplitTable)[2]))
for(i in 1:dim(DataSplitTable)[2]){
all.predictions.caliSites[DataSplitTable[,i],,i] <- t(singleSpecies.calibrationSites.ensemblePredictions[,,i])[1:dim(all.predictions.caliSites[DataSplitTable[,i],,i])[1]]
all.predictions.evalSites[!DataSplitTable[,i],,i] <- t(singleSpecies.evaluationSites.ensemblePredictions[,,i])[1:dim(all.predictions.evalSites[!DataSplitTable[,i],,i])[1]]
}
allSites.averagePredictions.cali <- apply(all.predictions.caliSites, 1:2, mean, na.rm = TRUE)
allSites.averagePredictions.eval <- apply(all.predictions.evalSites, 1:2, mean, na.rm = TRUE)
save(singleSpecies.ensembleEvaluationScore, file="singleSpecies.ensembleEvaluationScore.RData")
save(singleSpecies.calibrationSites.ensemblePredictions, file="singleSpecies.calibrationSites.ensemblePredictions.RData")
save(singleSpecies.evaluationSites.ensemblePredictions, file="singleSpecies.evaluationSites.ensemblePredictions.RData")
save(singleSpecies.ensembleVariableImportance, file="singleSpecies.ensembleVariableImportance.RData")
save(DataSplitTable, file="DataSplitTable.RData")
save(speciesData.calibration, file="speciesData.calibration.RData")
save(speciesData.evaluation, file="speciesData.evaluation.RData")
save(allSites.averagePredictions.cali, file="allSites.averagePredictions.cali.RData")
save(allSites.averagePredictions.eval, file="allSites.averagePredictions.eval.RData")
ccv.modeling.data <- list(modeling.id = modeling.id,
output.files = c("singleSpecies.ensembleEvaluationScore.RData",
"singleSpecies.calibrationSites.ensemblePredictions.RData",
"singleSpecies.evaluationSites.ensemblePredictions.RData",
"singleSpecies.ensembleVariableImportance.RData",
"DataSplitTable.RData",
"speciesData.calibration.RData",
"speciesData.evaluation.RData",
"allSites.averagePredictions.cali.RData",
"allSites.averagePredictions.eval.RData"),
speciesData.calibration = speciesData.calibration,
speciesData.evaluation = speciesData.evaluation,
speciesData.full = sp.data,
DataSplitTable = DataSplitTable,
singleSpecies.ensembleEvaluationScore = singleSpecies.ensembleEvaluationScore,
singleSpecies.ensembleVariableImportance = singleSpecies.ensembleVariableImportance,
singleSpecies.calibrationSites.ensemblePredictions=singleSpecies.calibrationSites.ensemblePredictions,
singleSpecies.evaluationSites.ensemblePredictions=singleSpecies.evaluationSites.ensemblePredictions,
allSites.averagePredictions.cali=allSites.averagePredictions.cali,
allSites.averagePredictions.eval=allSites.averagePredictions.eval)
save(ccv.modeling.data, file=paste("../",modeling.id,".ccv.modeling.RData", sep=""))
setwd("../")
return(ccv.modeling.data)
}
ecospat.CCV.communityEvaluation.bin <- function(ccv.modeling.data,
thresholds= c("MAX.KAPPA", "MAX.ROC","PS_SDM"),
community.metrics=c("SR.deviation","Sorensen"),
parallel=FALSE,
cpus=4,
fix.threshold=0.5,
MCE=5,
MEM=NULL){
stopifnot(names(ccv.modeling.data)==c("modeling.id",
"output.files",
"speciesData.calibration",
"speciesData.evaluation",
"speciesData.full",
"DataSplitTable",
"singleSpecies.ensembleEvaluationScore",
"singleSpecies.ensembleVariableImportance",
"singleSpecies.calibrationSites.ensemblePredictions",
"singleSpecies.evaluationSites.ensemblePredictions",
"allSites.averagePredictions.cali",
"allSites.averagePredictions.eval"))
possible.thresholds <- c("FIXED",
"MAX.KAPPA",
"MAX.ACCURACY",
"MAX.TSS",
"SENS_SPEC",
"MAX.ROC",
"OBS.PREVALENCE",
"AVG.PROBABILITY",
"MCE",
"PS_SDM",
"MEM")
stopifnot(thresholds %in% possible.thresholds)
stopifnot(community.metrics %in% c("SR.deviation",
"community.overprediction",
"community.underprediction",
"community.accuracy",
"community.sensitivity",
"community.specificity",
"community.kappa",
"community.tss",
"Sorensen",
"Jaccard",
"Simpson"))
stopifnot(is.logical(parallel))
stopifnot(cpus>=1)
stopifnot(!("FIXED" %in% thresholds & (fix.threshold<=0 | fix.threshold>=1)))
stopifnot(!("MCE" %in% thresholds & (MCE<=0 | MCE>=100)))
stopifnot(!("MEM" %in% thresholds & length(MEM)!=dim(ccv.modeling.data$speciesData.full)[1]))
community.metrics.calculation <- function(errors, potential.community.metrics){
temp.matrix <- matrix(data=NA, nrow=1, ncol=length(potential.community.metrics))
a <- length(which(errors == 3))
b <- length(which(errors == 2))
c <- length(which(errors == 1))
d <- length(which(errors == 0))
n <- a+b+c+d
temp.matrix[1] <- b-c
if(b==0 & d==0){
temp.matrix[2] <- 0
}else{
temp.matrix[2] <- round(b/(b + d), digits=3)
}
if(a==0 & c==0){
temp.matrix[3] <- 0
}else{
temp.matrix[3] <- round(c/(a + c), digits=3)
}
if(n==0){
temp.matrix[4] <- 1
}else{
temp.matrix[4] <- round((a + d)/n, digits=3)
}
if(a==0 & c==0){
temp.matrix[5] <- 1
}else{
temp.matrix[5] <- round(a/(a + c), digits=3)
}
if(b==0 & d==0){
temp.matrix[6] <- 1
}else{
temp.matrix[6] <- round(d/(b + d), digits=3)
}
if(n==0){
temp.matrix[7] <- 1
}else{
temp.matrix[7] <- round((((a + d)/n) - (((a + c) * (a + b) + (b + d) * (d + c))/(n^2)))/(1 - (((a + c) * (a + b) + (b + d) * (d + c))/(n^2))), digits=3)
}
temp.matrix[8] <- round(temp.matrix[5] + temp.matrix[6] - 1, digits=3)
if(a==0 & b==0 & c==0){
temp.matrix[9] <- 1
}else{
temp.matrix[9] <- round((2 * a)/(2 * a + b + c), digits=3)
}
if(a==0 & b==0 & c==0){
temp.matrix[10] <- 1
}else{
temp.matrix[10] <- round(a/(a+b+c), digits=3)
}
if((a==0 & b==0) | (a==0 & c==0)){
temp.matrix[11] <- 1
}else{
temp.matrix[11] <- round(a/min(c(a+b,a+c)), digits=3)
}
return(temp.matrix)
}
community.compairison <- function(sp.data.cali, sp.data.eval, PA.cali, PA.eval, community.metrics, save.dir, run){
potential.community.metrics <- c("SR.deviation",
"community.overprediction",
"community.underprediction",
"community.accuracy",
"community.sensitivity",
"community.specificity",
"community.kappa",
"community.tss",
"Sorensen",
"Jaccard",
"Simpson")
community.metrics.cali <- array(data=NA,
dim=c(dim(sp.data.cali)[1],length(potential.community.metrics)),
dimnames=list(unlist(dimnames(sp.data.cali)[1]),potential.community.metrics))
community.metrics.eval <- array(data=NA,
dim=c(dim(sp.data.eval)[1],length(potential.community.metrics)),
dimnames=list(unlist(dimnames(sp.data.eval)[1]),potential.community.metrics))
error.matrix.cali <- 2 * PA.cali + sp.data.cali
error.matrix.eval <- 2 * PA.eval + sp.data.eval
community.metrics.cali[,] <- t(apply(error.matrix.cali, 1, community.metrics.calculation, potential.community.metrics=potential.community.metrics))
community.metrics.cali.selected <- community.metrics.cali[,community.metrics]
community.metrics.eval[,] <- t(apply(error.matrix.eval, 1, community.metrics.calculation, potential.community.metrics=potential.community.metrics))
community.metrics.eval.selected <- community.metrics.eval[,community.metrics]
save(community.metrics.cali.selected, file=paste(save.dir,"community.metrics.cali_",run,".RData", sep=""))
save(community.metrics.eval.selected, file=paste(save.dir,"community.metrics.eval_",run,".RData", sep=""))
}
community.thresholding <- function(run, ccv.modeling.data, thresholds, community.metrics, fix.threshold, MCE, MEM){
if("FIXED" %in% thresholds){
PA.FIXED.cali <- t(ccv.modeling.data$singleSpecies.calibrationSites.ensemblePredictions[,,run])/1000
PA.FIXED.cali[PA.FIXED.cali >= fix.threshold] <- 1
PA.FIXED.cali[PA.FIXED.cali < fix.threshold] <- 0
PA.FIXED.eval <- t(ccv.modeling.data$singleSpecies.evaluationSites.ensemblePredictions[,,run])/1000
PA.FIXED.eval[PA.FIXED.eval >= fix.threshold] <- 1
PA.FIXED.eval[PA.FIXED.eval < fix.threshold] <- 0
dir.create(paste(ccv.modeling.data$modeling.id, "/Thresholding/FIXED", sep=""), recursive=TRUE)
save(PA.FIXED.cali, file=paste(ccv.modeling.data$modeling.id, "/Thresholding/FIXED/PA.FIXED.cali_",run,".RData", sep=""))
save(PA.FIXED.eval, file=paste(ccv.modeling.data$modeling.id, "/Thresholding/FIXED/PA.FIXED.eval_",run,".RData", sep=""))
community.compairison(sp.data.cali=t(ccv.modeling.data$speciesData.calibration[,,run]),
sp.data.eval= t(ccv.modeling.data$speciesData.evaluation[,,run]),
PA.cali= PA.FIXED.cali,
PA.eval = PA.FIXED.eval,
community.metrics = community.metrics,
save.dir=paste(ccv.modeling.data$modeling.id, "/Thresholding/FIXED/FIXED_", sep=""),
run=run)
}
if("MAX.KAPPA" %in% thresholds){
PA.MAX.KAPPA.cali <- t(ccv.modeling.data$singleSpecies.calibrationSites.ensemblePredictions[,,run])/1000
PA.MAX.KAPPA.eval <- t(ccv.modeling.data$singleSpecies.evaluationSites.ensemblePredictions[,,run])/1000
for(s in 1:dim(PA.MAX.KAPPA.cali)[2]){
if(sum(!is.na(PA.MAX.KAPPA.cali[,s]))==0){
PA.MAX.KAPPA.cali[,s] <- NA
PA.MAX.KAPPA.eval[,s] <- NA
}else{
MAX.KAPPA.threshold <- optimal.thresholds(DATA=na.omit(data.frame(unlist(dimnames(PA.MAX.KAPPA.cali)[1]),
t(ccv.modeling.data$speciesData.calibration[,,run])[,s],
t(ccv.modeling.data$singleSpecies.calibrationSites.ensemblePredictions[,,run])[,s]/1000)),
threshold=101, opt.methods=4)[,2]
PA.MAX.KAPPA.cali[PA.MAX.KAPPA.cali[,s] >= MAX.KAPPA.threshold,s] <- 1
PA.MAX.KAPPA.cali[PA.MAX.KAPPA.cali[,s] <= MAX.KAPPA.threshold,s] <- 0
PA.MAX.KAPPA.eval[PA.MAX.KAPPA.eval[,s] >= MAX.KAPPA.threshold,s] <- 1
PA.MAX.KAPPA.eval[PA.MAX.KAPPA.eval[,s] <= MAX.KAPPA.threshold,s] <- 0
}
}
dir.create(paste(ccv.modeling.data$modeling.id, "/Thresholding/MAX.KAPPA", sep=""), recursive=TRUE)
save(PA.MAX.KAPPA.cali, file=paste(ccv.modeling.data$modeling.id, "/Thresholding/MAX.KAPPA/PA.MAX.KAPPA.cali_",run,".RData", sep=""))
save(PA.MAX.KAPPA.eval, file=paste(ccv.modeling.data$modeling.id, "/Thresholding/MAX.KAPPA/PA.MAX.KAPPA.eval_",run,".RData", sep=""))
community.compairison(sp.data.cali=t(ccv.modeling.data$speciesData.calibration[,,run]),
sp.data.eval= t(ccv.modeling.data$speciesData.evaluation[,,run]),
PA.cali= PA.MAX.KAPPA.cali,
PA.eval = PA.MAX.KAPPA.eval,
community.metrics = community.metrics,
save.dir=paste(ccv.modeling.data$modeling.id, "/Thresholding/MAX.KAPPA/MAX.KAPPA_", sep=""),
run=run)
}
if("MAX.ACCURACY" %in% thresholds){
PA.MAX.ACCURACY.cali <- t(ccv.modeling.data$singleSpecies.calibrationSites.ensemblePredictions[,,run])/1000
PA.MAX.ACCURACY.eval <- t(ccv.modeling.data$singleSpecies.evaluationSites.ensemblePredictions[,,run])/1000
for(s in 1:dim(PA.MAX.ACCURACY.cali)[2]){
if(sum(!is.na(PA.MAX.ACCURACY.cali[,s]))==0){
PA.MAX.ACCURACY.cali[,s] <- NA
PA.MAX.ACCURACY.eval[,s] <- NA
}else{
MAX.ACCURACY.threshold <- optimal.thresholds(DATA=na.omit(data.frame(unlist(dimnames(PA.MAX.ACCURACY.cali)[1]),
t(ccv.modeling.data$speciesData.calibration[,,run])[,s],
t(ccv.modeling.data$singleSpecies.calibrationSites.ensemblePredictions[,,run])[,s]/1000)),
threshold=101, opt.methods=5)[,2]
PA.MAX.ACCURACY.cali[PA.MAX.ACCURACY.cali[,s] >= MAX.ACCURACY.threshold,s] <- 1
PA.MAX.ACCURACY.cali[PA.MAX.ACCURACY.cali[,s] <= MAX.ACCURACY.threshold,s] <- 0
PA.MAX.ACCURACY.eval[PA.MAX.ACCURACY.eval[,s] >= MAX.ACCURACY.threshold,s] <- 1
PA.MAX.ACCURACY.eval[PA.MAX.ACCURACY.eval[,s] <= MAX.ACCURACY.threshold,s] <- 0
}
}
dir.create(paste(ccv.modeling.data$modeling.id, "/Thresholding/MAX.ACCURACY", sep=""), recursive=TRUE)
save(PA.MAX.ACCURACY.cali, file=paste(ccv.modeling.data$modeling.id, "/Thresholding/MAX.ACCURACY/PA.MAX.ACCURACY.cali_",run,".RData", sep=""))
save(PA.MAX.ACCURACY.eval, file=paste(ccv.modeling.data$modeling.id, "/Thresholding/MAX.ACCURACY/PA.MAX.ACCURACY.eval_",run,".RData", sep=""))
community.compairison(sp.data.cali=t(ccv.modeling.data$speciesData.calibration[,,run]),
sp.data.eval= t(ccv.modeling.data$speciesData.evaluation[,,run]),
PA.cali= PA.MAX.ACCURACY.cali,
PA.eval = PA.MAX.ACCURACY.eval,
community.metrics = community.metrics,
save.dir=paste(ccv.modeling.data$modeling.id, "/Thresholding/MAX.ACCURACY/MAX.ACCURACY_", sep=""),
run=run)
}
if("MAX.TSS" %in% thresholds){
PA.MAX.TSS.cali <- t(ccv.modeling.data$singleSpecies.calibrationSites.ensemblePredictions[,,run])/1000
PA.MAX.TSS.eval <- t(ccv.modeling.data$singleSpecies.evaluationSites.ensemblePredictions[,,run])/1000
for(s in 1:dim(PA.MAX.TSS.cali)[2]){
if(sum(!is.na(PA.MAX.TSS.cali[,s]))==0){
PA.MAX.TSS.cali[,s] <- NA
PA.MAX.TSS.eval[,s] <- NA
}else{
MAX.TSS.threshold <- optimal.thresholds(DATA=na.omit(data.frame(unlist(dimnames(PA.MAX.TSS.cali)[1]),
t(ccv.modeling.data$speciesData.calibration[,,run])[,s],
t(ccv.modeling.data$singleSpecies.calibrationSites.ensemblePredictions[,,run])[,s]/1000)),
threshold=101, opt.methods=3)[,2]
PA.MAX.TSS.cali[PA.MAX.TSS.cali[,s] >= MAX.TSS.threshold,s] <- 1
PA.MAX.TSS.cali[PA.MAX.TSS.cali[,s] <= MAX.TSS.threshold,s] <- 0
PA.MAX.TSS.eval[PA.MAX.TSS.eval[,s] >= MAX.TSS.threshold,s] <- 1
PA.MAX.TSS.eval[PA.MAX.TSS.eval[,s] <= MAX.TSS.threshold,s] <- 0
}
}
dir.create(paste(ccv.modeling.data$modeling.id, "/Thresholding/MAX.TSS", sep=""), recursive=TRUE)
save(PA.MAX.TSS.cali, file=paste(ccv.modeling.data$modeling.id, "/Thresholding/MAX.TSS/PA.MAX.TSS.cali_",run,".RData", sep=""))
save(PA.MAX.TSS.eval, file=paste(ccv.modeling.data$modeling.id, "/Thresholding/MAX.TSS/PA.MAX.TSS.eval_",run,".RData", sep=""))
community.compairison(sp.data.cali=t(ccv.modeling.data$speciesData.calibration[,,run]),
sp.data.eval= t(ccv.modeling.data$speciesData.evaluation[,,run]),
PA.cali= PA.MAX.TSS.cali,
PA.eval = PA.MAX.TSS.eval,
community.metrics = community.metrics,
save.dir=paste(ccv.modeling.data$modeling.id, "/Thresholding/MAX.TSS/MAX.TSS_", sep=""),
run=run)
}
if("SENS_SPEC" %in% thresholds){
PA.SENS_SPEC.cali <- t(ccv.modeling.data$singleSpecies.calibrationSites.ensemblePredictions[,,run])/1000
PA.SENS_SPEC.eval <- t(ccv.modeling.data$singleSpecies.evaluationSites.ensemblePredictions[,,run])/1000
for(s in 1:dim(PA.SENS_SPEC.cali)[2]){
if(sum(!is.na(PA.SENS_SPEC.cali[,s]))==0){
PA.SENS_SPEC.cali[,s] <- NA
PA.SENS_SPEC.eval[,s] <- NA
}else{
SENS_SPEC.threshold <- optimal.thresholds(DATA=na.omit(data.frame(unlist(dimnames(PA.SENS_SPEC.cali)[1]),
t(ccv.modeling.data$speciesData.calibration[,,run])[,s],
t(ccv.modeling.data$singleSpecies.calibrationSites.ensemblePredictions[,,run])[,s]/1000)),
threshold=101, opt.methods=2)[,2]
PA.SENS_SPEC.cali[PA.SENS_SPEC.cali[,s] >= SENS_SPEC.threshold,s] <- 1
PA.SENS_SPEC.cali[PA.SENS_SPEC.cali[,s] <= SENS_SPEC.threshold,s] <- 0
PA.SENS_SPEC.eval[PA.SENS_SPEC.eval[,s] >= SENS_SPEC.threshold,s] <- 1
PA.SENS_SPEC.eval[PA.SENS_SPEC.eval[,s] <= SENS_SPEC.threshold,s] <- 0
}
}
dir.create(paste(ccv.modeling.data$modeling.id, "/Thresholding/SENS_SPEC", sep=""), recursive=TRUE)
save(PA.SENS_SPEC.cali, file=paste(ccv.modeling.data$modeling.id, "/Thresholding/SENS_SPEC/PA.SENS_SPEC.cali_",run,".RData", sep=""))
save(PA.SENS_SPEC.eval, file=paste(ccv.modeling.data$modeling.id, "/Thresholding/SENS_SPEC/PA.SENS_SPEC.eval_",run,".RData", sep=""))
community.compairison(sp.data.cali=t(ccv.modeling.data$speciesData.calibration[,,run]),
sp.data.eval= t(ccv.modeling.data$speciesData.evaluation[,,run]),
PA.cali= PA.SENS_SPEC.cali,
PA.eval = PA.SENS_SPEC.eval,
community.metrics = community.metrics,
save.dir=paste(ccv.modeling.data$modeling.id, "/Thresholding/SENS_SPEC/SENS_SPEC_", sep=""),
run=run)
}
if("MAX.ROC" %in% thresholds){
PA.MAX.ROC.cali <- t(ccv.modeling.data$singleSpecies.calibrationSites.ensemblePredictions[,,run])/1000
PA.MAX.ROC.eval <- t(ccv.modeling.data$singleSpecies.evaluationSites.ensemblePredictions[,,run])/1000
for(s in 1:dim(PA.MAX.ROC.cali)[2]){
if(sum(!is.na(PA.MAX.ROC.cali[,s]))==0){
PA.MAX.ROC.cali[,s] <- NA
PA.MAX.ROC.eval[,s] <- NA
}else{
MAX.ROC.threshold <- optimal.thresholds(DATA=na.omit(data.frame(unlist(dimnames(PA.MAX.ROC.cali)[1]),
t(ccv.modeling.data$speciesData.calibration[,,run])[,s],
t(ccv.modeling.data$singleSpecies.calibrationSites.ensemblePredictions[,,run])[,s]/1000)),
threshold=101, opt.methods=9)[,2]
PA.MAX.ROC.cali[PA.MAX.ROC.cali[,s] >= MAX.ROC.threshold,s] <- 1
PA.MAX.ROC.cali[PA.MAX.ROC.cali[,s] <= MAX.ROC.threshold,s] <- 0
PA.MAX.ROC.eval[PA.MAX.ROC.eval[,s] >= MAX.ROC.threshold,s] <- 1
PA.MAX.ROC.eval[PA.MAX.ROC.eval[,s] <= MAX.ROC.threshold,s] <- 0
}
}
dir.create(paste(ccv.modeling.data$modeling.id, "/Thresholding/MAX.ROC", sep=""), recursive=TRUE)
save(PA.MAX.ROC.cali, file=paste(ccv.modeling.data$modeling.id, "/Thresholding/MAX.ROC/PA.MAX.ROC.cali_",run,".RData", sep=""))
save(PA.MAX.ROC.eval, file=paste(ccv.modeling.data$modeling.id, "/Thresholding/MAX.ROC/PA.MAX.ROC.eval_",run,".RData", sep=""))
community.compairison(sp.data.cali=t(ccv.modeling.data$speciesData.calibration[,,run]),
sp.data.eval= t(ccv.modeling.data$speciesData.evaluation[,,run]),
PA.cali= PA.MAX.ROC.cali,
PA.eval = PA.MAX.ROC.eval,
community.metrics = community.metrics,
save.dir=paste(ccv.modeling.data$modeling.id, "/Thresholding/MAX.ROC/MAX.ROC_", sep=""),
run=run)
}
if("OBS.PREVALENCE" %in% thresholds){
PA.OBS.PREVALENCE.cali <- t(ccv.modeling.data$singleSpecies.calibrationSites.ensemblePredictions[,,run])/1000
PA.OBS.PREVALENCE.eval <- t(ccv.modeling.data$singleSpecies.evaluationSites.ensemblePredictions[,,run])/1000
for(s in 1:dim(PA.OBS.PREVALENCE.cali)[2]){
if(sum(!is.na(PA.OBS.PREVALENCE.cali[,s]))==0){
PA.OBS.PREVALENCE.cali[,s] <- NA
PA.OBS.PREVALENCE.eval[,s] <- NA
}else{
OBS.PREVALENCE.threshold <- optimal.thresholds(DATA=na.omit(data.frame(unlist(dimnames(PA.OBS.PREVALENCE.cali)[1]),
t(ccv.modeling.data$speciesData.calibration[,,run])[,s],
t(ccv.modeling.data$singleSpecies.calibrationSites.ensemblePredictions[,,run])[,s]/1000)),
threshold=101, opt.methods=6)[,2]
PA.OBS.PREVALENCE.cali[PA.OBS.PREVALENCE.cali[,s] >= OBS.PREVALENCE.threshold,s] <- 1
PA.OBS.PREVALENCE.cali[PA.OBS.PREVALENCE.cali[,s] <= OBS.PREVALENCE.threshold,s] <- 0
PA.OBS.PREVALENCE.eval[PA.OBS.PREVALENCE.eval[,s] >= OBS.PREVALENCE.threshold,s] <- 1
PA.OBS.PREVALENCE.eval[PA.OBS.PREVALENCE.eval[,s] <= OBS.PREVALENCE.threshold,s] <- 0
}
}
dir.create(paste(ccv.modeling.data$modeling.id, "/Thresholding/OBS.PREVALENCE", sep=""), recursive=TRUE)
save(PA.OBS.PREVALENCE.cali, file=paste(ccv.modeling.data$modeling.id, "/Thresholding/OBS.PREVALENCE/PA.OBS.PREVALENCE.cali_",run,".RData", sep=""))
save(PA.OBS.PREVALENCE.eval, file=paste(ccv.modeling.data$modeling.id, "/Thresholding/OBS.PREVALENCE/PA.OBS.PREVALENCE.eval_",run,".RData", sep=""))
community.compairison(sp.data.cali=t(ccv.modeling.data$speciesData.calibration[,,run]),
sp.data.eval= t(ccv.modeling.data$speciesData.evaluation[,,run]),
PA.cali= PA.OBS.PREVALENCE.cali,
PA.eval = PA.OBS.PREVALENCE.eval,
community.metrics = community.metrics,
save.dir=paste(ccv.modeling.data$modeling.id, "/Thresholding/OBS.PREVALENCE/OBS.PREVALENCE_", sep=""),
run=run)
}
if("AVG.PROBABILITY" %in% thresholds){
PA.AVG.PROBABILITY.cali <- t(ccv.modeling.data$singleSpecies.calibrationSites.ensemblePredictions[,,run])/1000
PA.AVG.PROBABILITY.eval <- t(ccv.modeling.data$singleSpecies.evaluationSites.ensemblePredictions[,,run])/1000
for(s in 1:dim(PA.AVG.PROBABILITY.cali)[2]){
if(sum(!is.na(PA.AVG.PROBABILITY.cali[,s]))==0){
PA.AVG.PROBABILITY.cali[,s] <- NA
PA.AVG.PROBABILITY.eval[,s] <- NA
}else{
AVG.PROBABILITY.threshold <- optimal.thresholds(DATA=na.omit(data.frame(unlist(dimnames(PA.AVG.PROBABILITY.cali)[1]),
t(ccv.modeling.data$speciesData.calibration[,,run])[,s],
t(ccv.modeling.data$singleSpecies.calibrationSites.ensemblePredictions[,,run])[,s]/1000)),
threshold=101, opt.methods=8)[,2]
PA.AVG.PROBABILITY.cali[PA.AVG.PROBABILITY.cali[,s] >= AVG.PROBABILITY.threshold,s] <- 1
PA.AVG.PROBABILITY.cali[PA.AVG.PROBABILITY.cali[,s] <= AVG.PROBABILITY.threshold,s] <- 0
PA.AVG.PROBABILITY.eval[PA.AVG.PROBABILITY.eval[,s] >= AVG.PROBABILITY.threshold,s] <- 1
PA.AVG.PROBABILITY.eval[PA.AVG.PROBABILITY.eval[,s] <= AVG.PROBABILITY.threshold,s] <- 0
}
}
dir.create(paste(ccv.modeling.data$modeling.id, "/Thresholding/AVG.PROBABILITY", sep=""), recursive=TRUE)
save(PA.AVG.PROBABILITY.cali, file=paste(ccv.modeling.data$modeling.id, "/Thresholding/AVG.PROBABILITY/PA.AVG.PROBABILITY.cali_",run,".RData", sep=""))
save(PA.AVG.PROBABILITY.eval, file=paste(ccv.modeling.data$modeling.id, "/Thresholding/AVG.PROBABILITY/PA.AVG.PROBABILITY.eval_",run,".RData", sep=""))
community.compairison(sp.data.cali=t(ccv.modeling.data$speciesData.calibration[,,run]),
sp.data.eval= t(ccv.modeling.data$speciesData.evaluation[,,run]),
PA.cali= PA.AVG.PROBABILITY.cali,
PA.eval = PA.AVG.PROBABILITY.eval,
community.metrics = community.metrics,
save.dir=paste(ccv.modeling.data$modeling.id, "/Thresholding/AVG.PROBABILITY/AVG.PROBABILITY_", sep=""),
run=run)
}
if("MCE" %in% thresholds){
PA.MCE.cali <- t(ccv.modeling.data$singleSpecies.calibrationSites.ensemblePredictions[,,run])/1000
PA.MCE.eval <- t(ccv.modeling.data$singleSpecies.evaluationSites.ensemblePredictions[,,run])/1000
for(s in 1:dim(PA.MCE.cali)[2]){
if(sum(!is.na(PA.MCE.cali[,s]))==0){
PA.MCE.cali[,s] <- NA
PA.MCE.eval[,s] <- NA
}else{
MCE.threshold <- optimal.thresholds(DATA=na.omit(data.frame(unlist(dimnames(PA.MCE.cali)[1]),
t(ccv.modeling.data$speciesData.calibration[,,run])[,s],
t(ccv.modeling.data$singleSpecies.calibrationSites.ensemblePredictions[,,run])[,s]/1000)),
threshold=101, opt.methods=10, req.sens=(100-MCE)/100)[,2]
PA.MCE.cali[PA.MCE.cali[,s] >= MCE.threshold,s] <- 1
PA.MCE.cali[PA.MCE.cali[,s] <= MCE.threshold,s] <- 0
PA.MCE.eval[PA.MCE.eval[,s] >= MCE.threshold,s] <- 1
PA.MCE.eval[PA.MCE.eval[,s] <= MCE.threshold,s] <- 0
}
}
dir.create(paste(ccv.modeling.data$modeling.id, "/Thresholding/MCE", sep=""), recursive=TRUE)
save(PA.MCE.cali, file=paste(ccv.modeling.data$modeling.id, "/Thresholding/MCE/PA.MCE.cali_",run,".RData", sep=""))
save(PA.MCE.eval, file=paste(ccv.modeling.data$modeling.id, "/Thresholding/MCE/PA.MCE.eval_",run,".RData", sep=""))
community.compairison(sp.data.cali=t(ccv.modeling.data$speciesData.calibration[,,run]),
sp.data.eval= t(ccv.modeling.data$speciesData.evaluation[,,run]),
PA.cali= PA.MCE.cali,
PA.eval = PA.MCE.eval,
community.metrics = community.metrics,
save.dir=paste(ccv.modeling.data$modeling.id, "/Thresholding/MCE/MCE_", sep=""),
run=run)
}
if("PS_SDM" %in% thresholds){
PA.PS_SDM.cali <- t(ccv.modeling.data$singleSpecies.calibrationSites.ensemblePredictions[,,run])/1000
PA.PS_SDM.eval <- t(ccv.modeling.data$singleSpecies.evaluationSites.ensemblePredictions[,,run])/1000
SR.cali <- rowSums(PA.PS_SDM.cali, na.rm = TRUE)
SR.eval <- rowSums(PA.PS_SDM.eval, na.rm = TRUE)
for(p in 1:dim(PA.PS_SDM.cali)[1]){
if(round(SR.cali[p])==0){
PA.PS_SDM.cali[p,] <- 0
}else{
pS_SDM.threshold <- sort(PA.PS_SDM.cali[p,], decreasing = TRUE)[round(SR.cali[p])]
PA.PS_SDM.cali[p,PA.PS_SDM.cali[p,]>=as.numeric(pS_SDM.threshold)] <- 1
PA.PS_SDM.cali[p,PA.PS_SDM.cali[p,]<as.numeric(pS_SDM.threshold)] <- 0
}
}
for(p in 1:dim(PA.PS_SDM.eval)[1]){
if(round(SR.eval[p])==0){
PA.PS_SDM.eval[p,] <- 0
}else{
pS_SDM.threshold <- sort(PA.PS_SDM.eval[p,], decreasing = TRUE)[round(SR.eval[p])]
PA.PS_SDM.eval[p,PA.PS_SDM.eval[p,]>=as.numeric(pS_SDM.threshold)] <- 1
PA.PS_SDM.eval[p,PA.PS_SDM.eval[p,]<as.numeric(pS_SDM.threshold)] <- 0
}
}
dir.create(paste(ccv.modeling.data$modeling.id, "/Thresholding/PS_SDM", sep=""), recursive = TRUE)
save(PA.PS_SDM.cali, file=paste(ccv.modeling.data$modeling.id, "/Thresholding/PS_SDM/PA.PS_SDM.cali_",run,".RData", sep=""))
save(PA.PS_SDM.eval, file=paste(ccv.modeling.data$modeling.id, "/Thresholding/PS_SDM/PA.PS_SDM.eval_",run,".RData", sep=""))
community.compairison(sp.data.cali=t(ccv.modeling.data$speciesData.calibration[,,run]),
sp.data.eval= t(ccv.modeling.data$speciesData.evaluation[,,run]),
PA.cali= PA.PS_SDM.cali,
PA.eval = PA.PS_SDM.eval,
community.metrics = community.metrics,
save.dir=paste(ccv.modeling.data$modeling.id, "/Thresholding/PS_SDM/PS_SDM_", sep=""),
run=run)
}
if("MEM" %in% thresholds){
PA.MEM.cali <- t(ccv.modeling.data$singleSpecies.calibrationSites.ensemblePredictions[,,run])/1000
PA.MEM.eval <- t(ccv.modeling.data$singleSpecies.evaluationSites.ensemblePredictions[,,run])/1000
SR.cali <- MEM[which(ccv.modeling.data$DataSplitTable[,run])]
SR.eval <- MEM[which(!ccv.modeling.data$DataSplitTable[,run])]
for(p in 1:length(SR.cali)){
if(round(SR.cali[p])==0){
PA.MEM.cali[p,] <- 0
}else{
MEM.threshold <- sort(PA.MEM.cali[p,], decreasing = TRUE)[round(SR.cali[p])]
PA.MEM.cali[p,PA.MEM.cali[p,]>=as.numeric(MEM.threshold)] <- 1
PA.MEM.cali[p,PA.MEM.cali[p,]<=as.numeric(MEM.threshold)] <- 0
}
}
for(p in 1:length(SR.eval)){
if(round(SR.eval[p])==0){
PA.MEM.eval[p,] <- 0
}else{
MEM.threshold <- sort(PA.MEM.eval[p,], decreasing = TRUE)[round(SR.eval[p])]
PA.MEM.eval[p,PA.MEM.eval[p,]>=as.numeric(MEM.threshold)] <- 1
PA.MEM.eval[p,PA.MEM.eval[p,]<=as.numeric(MEM.threshold)] <- 0
}
}
dir.create(paste(ccv.modeling.data$modeling.id, "/Thresholding/MEM", sep=""), recursive = TRUE)
save(PA.MEM.cali, file=paste(ccv.modeling.data$modeling.id, "/Thresholding/MEM/PA.MEM.cali_",run,".RData", sep=""))
save(PA.MEM.eval, file=paste(ccv.modeling.data$modeling.id, "/Thresholding/MEM/PA.MEM.eval_",run,".RData", sep=""))
community.compairison(sp.data.cali=t(ccv.modeling.data$speciesData.calibration[,,run]),
sp.data.eval= t(ccv.modeling.data$speciesData.evaluation[,,run]),
PA.cali= PA.MEM.cali,
PA.eval = PA.MEM.eval,
community.metrics = community.metrics,
save.dir=paste(ccv.modeling.data$modeling.id, "/Thresholding/MEM/MEM_", sep=""),
run=run)
}
}
if(parallel){
sfInit(parallel=TRUE, cpus=cpus)
sfLibrary('PresenceAbsence', character.only=TRUE)
sfExport('community.metrics.calculation')
sfExport('community.compairison')
sfLapply(1:dim(ccv.modeling.data$DataSplitTable)[2],
community.thresholding,
ccv.modeling.data=ccv.modeling.data,
thresholds=thresholds,
community.metrics=community.metrics,
fix.threshold=fix.threshold,
MCE=MCE,
MEM=MEM)
sfStop( nostop=FALSE )
}else{
lapply(1:dim(ccv.modeling.data$DataSplitTable)[2],
community.thresholding,
ccv.modeling.data=ccv.modeling.data,
thresholds=thresholds,
community.metrics=community.metrics,
fix.threshold=fix.threshold,
MCE=MCE,
MEM=MEM)
}
ccv.metrics.allsites.cali <- array(data=NA,
dim=c(dim(ccv.modeling.data$speciesData.full)[1],
length(thresholds),
length(community.metrics),
dim(ccv.modeling.data$speciesData.calibration)[3]),
dimnames=list(unlist(dimnames(ccv.modeling.data$speciesData.full)[1]),
thresholds,
community.metrics,
unlist(dimnames(ccv.modeling.data$speciesData.calibration)[3])))
ccv.metrics.allsites.eval <- array(data=NA,
dim=c(dim(ccv.modeling.data$speciesData.full)[1],
length(thresholds),
length(community.metrics),
dim(ccv.modeling.data$speciesData.evaluation)[3]),
dimnames=list(unlist(dimnames(ccv.modeling.data$speciesData.full)[1]),
thresholds,
community.metrics,
unlist(dimnames(ccv.modeling.data$speciesData.evaluation)[3])))
ccv.PA.allSites <- array(data=NA,
dim=c(dim(ccv.modeling.data$speciesData.full)[2],
dim(ccv.modeling.data$speciesData.full)[1],
length(thresholds),
dim(ccv.modeling.data$speciesData.evaluation)[3]),
dimnames=list(unlist(dimnames(ccv.modeling.data$speciesData.full)[2]),
unlist(dimnames(ccv.modeling.data$speciesData.full)[1]),
thresholds,
unlist(dimnames(ccv.modeling.data$speciesData.evaluation)[3])))
for(th in thresholds){
for(r in 1:dim(ccv.modeling.data$DataSplitTable)[2]){
load(paste(ccv.modeling.data$modeling.id,"/Thresholding/",th,"/",th,"_community.metrics.cali_",r,".RData", sep=""))
load(paste(ccv.modeling.data$modeling.id,"/Thresholding/",th,"/",th,"_community.metrics.eval_",r,".RData", sep=""))
load(paste(ccv.modeling.data$modeling.id,"/Thresholding/",th,"/",th,"_community.metrics.cali_",r,".RData", sep=""))
load(paste(ccv.modeling.data$modeling.id,"/Thresholding/",th,"/PA.",th,".cali_",r,".RData", sep=""))
load(paste(ccv.modeling.data$modeling.id,"/Thresholding/",th,"/PA.",th,".eval_",r,".RData", sep=""))
ccv.metrics.allsites.cali[which(ccv.modeling.data$DataSplitTable[,r]),th,,r] <- eval(parse(text="community.metrics.cali.selected"))[1:length(which(ccv.modeling.data$DataSplitTable[,r])),]
ccv.metrics.allsites.eval[which(!ccv.modeling.data$DataSplitTable[,r]),th,,r] <- eval(parse(text="community.metrics.eval.selected"))[1:length(which(!ccv.modeling.data$DataSplitTable[,r])),]
ccv.PA.allSites[,which(ccv.modeling.data$DataSplitTable[,r]),th,r] <- eval(parse(text=paste("PA.",th,".cali", sep="")))[1:length(which(ccv.modeling.data$DataSplitTable[,r]))]
ccv.PA.allSites[,which(!ccv.modeling.data$DataSplitTable[,r]),th,r] <- eval(parse(text=paste("PA.",th,".eval", sep="")))[1:length(which(!ccv.modeling.data$DataSplitTable[,r]))]
}
}
ccv.evaluationMetrics.bin <- list(DataSplitTable = ccv.modeling.data$DataSplitTable,
CommunityEvaluationMetrics.CalibrationSites = ccv.metrics.allsites.cali,
CommunityEvaluationMetrics.EvaluationSites = ccv.metrics.allsites.eval,
PA.allSites = ccv.PA.allSites)
save(ccv.evaluationMetrics.bin, file=paste(ccv.modeling.data$modeling.id,".ccv.evaluationMetrics.bin.RData", sep=""))
return(ccv.evaluationMetrics.bin)
}
ecospat.CCV.communityEvaluation.prob <- function(ccv.modeling.data,
community.metrics=c('SR.deviation','community.AUC','Max.Sorensen','Max.Jaccard','probabilistic.Sorensen','probabilistic.Jaccard'),
parallel = FALSE,
cpus = 4){
stopifnot(names(ccv.modeling.data)==c("modeling.id",
"output.files",
"speciesData.calibration",
"speciesData.evaluation",
"speciesData.full",
"DataSplitTable",
"singleSpecies.ensembleEvaluationScore",
"singleSpecies.ensembleVariableImportance",
"singleSpecies.calibrationSites.ensemblePredictions",
"singleSpecies.evaluationSites.ensemblePredictions",
"allSites.averagePredictions.cali",
"allSites.averagePredictions.eval"))
stopifnot(community.metrics %in% c("SR.deviation","community.AUC","Max.Sorensen","Max.Jaccard","probabilistic.Sorensen","probabilistic.Jaccard"))
SR.prob <- function(data){
Sj <- as.numeric(data[1])
pjk <- as.numeric(data[-1][!is.na(data[-1])])
return(dpoibin(kk=Sj, pp=pjk))
}
SR.mean.sd <- function(data){
data <- data[!is.na(data)]
SR.mean <- sum(data[-1])
SR.dev <- SR.mean - data[[1]]
SR.sd <- sqrt(sum((1-data[-1])*data[-1]))
if(SR.dev >= 0){
SR.prob <- ppoibin(data[[1]], data[-1])
}else{
SR.prob <- 1-ppoibin(data[[1]]-1, data[-1])
}
return(unlist(c(SR.mean=SR.mean,SR.dev=SR.dev,SR.sd=SR.sd, SR.prob=SR.prob)))
}
Community.AUC <- function(data){
obs.data <- as.numeric(data[1:(length(data)/2)])
pred.data <- as.numeric(data[((length(data)/2)+1):length(data)])
obs.data <- obs.data[!is.na(pred.data)]
pred.data <- pred.data[!is.na(pred.data)]
if(sum(is.na(obs.data))==length(obs.data) & sum(is.na(pred.data)==length(pred.data))){
return(NA)
}else{
if(sum(obs.data)==0 | sum(obs.data)==length(obs.data)){
return(1)
}else{
auc.return <- unlist(auc(DATA=data.frame(id=1:length(obs.data),
obs=obs.data,
pred=pred.data), na.rm = TRUE))[1]
return(auc.return)
}
}
}
composition.prob <- function(data){
obs.data <- data[1:(length(data)/2)]
pred.data <- data[((length(data)/2)+1):length(data)]
obs.data <- obs.data[!is.na(pred.data)]
pred.data <- pred.data[!is.na(pred.data)]
if(sum(obs.data==1)>0 & sum(obs.data==0)>0){
prob.list <- c(pred.data[which(obs.data==1)],1-pred.data[which(obs.data==0)])
}
if(sum(obs.data==1)>0 & sum(obs.data==0)==0){
prob.list <- pred.data[which(obs.data==1)]
}
if(sum(obs.data==1)==0 & sum(obs.data==0)>0){
prob.list <- 1-pred.data[which(obs.data==0)]
}
return(prod(prob.list))
}
MaxSorensen <- function(data){
obs.data <- as.numeric(data[1:(length(data)/2)])
pred.data <- as.numeric(data[((length(data)/2)+1):length(data)])
temp.Sorensen <- rep(NA,101)
th <- seq(0,1,0.01)
for(i in 1:101){
pred.temp <- pred.data
pred.temp[pred.temp>=th[i]] <- 1
pred.temp[pred.temp<th[i]] <- 0
errors <- 2*pred.temp+obs.data
a <- length(which(errors == 3))
b <- length(which(errors == 2))
c <- length(which(errors == 1))
if(a==0 & b==0 & c==0){
Sorensen <- 1
}else{
Sorensen <- round((2 * a)/(2 * a + b + c), digits=3)
}
temp.Sorensen[i] <- Sorensen
}
return(max(temp.Sorensen))
}
MaxJaccard <- function(data){
obs.data <- as.numeric(data[1:(length(data)/2)])
pred.data <- as.numeric(data[((length(data)/2)+1):length(data)])
temp.Jaccard <- rep(NA,101)
th <- seq(0,1,0.01)
for(i in 1:101){
pred.temp <- pred.data
pred.temp[pred.temp>=th[i]] <- 1
pred.temp[pred.temp<th[i]] <- 0
errors <- 2*pred.temp+obs.data
a <- length(which(errors == 3))
b <- length(which(errors == 2))
c <- length(which(errors == 1))
if(a==0 & b==0 & c==0){
Jaccard <- 1
}else{
Jaccard <- round((a)/(a + b + c), digits=3)
}
temp.Jaccard[i] <- Jaccard
}
return(max(temp.Jaccard))
}
probabilisticSorensen <- function(data){
temp.df <- data.frame(obs=as.numeric(data[1:(length(data)/2)]),pred=as.numeric(data[((length(data)/2)+1):length(data)]))
temp.df <- temp.df[order(-temp.df$pred),]
AnB <- 2* sum(temp.df$pred[temp.df$obs==1])
AuB <- sum(temp.df$pred[temp.df$pred>=min(temp.df$pred[temp.df$obs==1])]) + sum(temp.df$pred[temp.df$obs==1])
return(AnB/AuB)
}
probabilisticJaccard <- function(data){
temp.df <- data.frame(obs=as.numeric(data[1:(length(data)/2)]),pred=as.numeric(data[((length(data)/2)+1):length(data)]))
temp.df <- temp.df[order(-temp.df$pred),]
AnB <- sum(temp.df$pred[temp.df$obs==1])
AuB <- sum(temp.df$pred[temp.df$pred>=min(temp.df$pred[temp.df$obs==1])])
return(AnB/AuB)
}
prob.community.metics <- function(obs, pred, metrics){
obs <- obs[,order(colnames(obs))]
pred <- pred[,order(colnames(pred))]
try(if(!identical(dim(obs),dim(pred))){stop("Dimensions of obs and pred differ")})
try(if(!identical(colnames(obs), colnames(pred))){stop("Columnames of obs and pred differ, make sure the species are matching")})
Null.pred.05 <- pred
Null.pred.05[,] <- 0.5
Null.pred.average.SR <- pred
Null.pred.average.SR[,] <- mean(rowSums(obs))/dim(obs)[2]
Null.pred.prevalence <- pred
Null.pred.prevalence[,] <- rep(colSums(obs)/dim(obs)[1], each=dim(obs)[1])
if("SR.deviation" %in% metrics){
SR.obs <- rowSums(obs)
SR.Null.pred.05 <- apply(data.frame(SR.obs=SR.obs, Null.pred.05),1, SR.prob)
SR.Null.pred.average.SR <- apply(data.frame(SR.obs=SR.obs, Null.pred.average.SR),1, SR.prob)
SR.Null.pred.prevalence <- apply(data.frame(SR.obs=SR.obs, Null.pred.prevalence),1, SR.prob)
SR.pred <- apply(data.frame(SR.obs=SR.obs, pred),1,SR.prob)
SR.stat <- data.frame(t(apply(data.frame(SR.obs=SR.obs, pred),1,SR.mean.sd)))
SR.results <- signif(data.frame(SR.obs, SR.stat, SR.imp.05=SR.pred/SR.Null.pred.05, SR.imp.average.SR=SR.pred/SR.Null.pred.average.SR, SR.imp.prevalence=SR.pred/SR.Null.pred.prevalence),3)
}
if(length(intersect(metrics,c("community.AUC","Max.Sorensen","Max.Jaccard","probabilistic.Sorensen","probabilistic.Jaccard")))>0){
composition.Null.pred.05 <- rep(0.5^dim(Null.pred.05)[2], dim(Null.pred.05)[1])
composition.Null.pred.average.SR <- apply(data.frame(obs, Null.pred.average.SR),1, composition.prob)
composition.Null.pred.prevalence <- apply(data.frame(obs, Null.pred.prevalence),1, composition.prob)
composition.pred <- apply(data.frame(obs, pred),1, composition.prob)
composition.results <- signif(data.frame(composition.imp.05 = composition.pred/composition.Null.pred.05, composition.imp.average.SR = composition.pred/composition.Null.pred.average.SR, composition.imp.prevalence = composition.pred/composition.Null.pred.prevalence),3)
if("probabilistic.Sorensen" %in% metrics){
Sorensen.stat <- data.frame(t(apply(data.frame(obs, pred),1, probabilisticSorensen)))
composition.results <- signif(data.frame(probabilistic.Sorensen=unlist(Sorensen.stat), composition.results),3)
}
if("probabilistic.Jaccard" %in% metrics){
Jaccard.stat <- data.frame(t(apply(data.frame(obs, pred),1, probabilisticJaccard)))
composition.results <- signif(data.frame(probabilistic.Jaccard=unlist(Jaccard.stat), composition.results),3)
}
if("Max.Sorensen" %in% metrics){
Sorensen.stat <- data.frame(t(apply(data.frame(obs, pred),1, MaxSorensen)))
composition.results <- signif(data.frame(Max.Sorensen=unlist(Sorensen.stat), composition.results),3)
}
if("Max.Jaccard" %in% metrics){
Jaccard.stat <- data.frame(t(apply(data.frame(obs, pred),1, MaxJaccard)))
composition.results <- signif(data.frame(Max.Jaccard=unlist(Jaccard.stat), composition.results),3)
}
if("community.AUC" %in% metrics){
AUC.stat <- apply(data.frame(obs, pred),1, Community.AUC)
composition.results <- signif(data.frame(Community.AUC=AUC.stat, composition.results),3)
}
}
if("SR.deviation" %in% metrics & length(intersect(metrics,c("community.AUC","Max.Sorensen","Max.Jaccard","probabilistic.Sorensen","probabilistic.Jaccard")))==0){
return(SR.results)
}
if(!("SR.deviation" %in% metrics) & length(intersect(metrics,c("community.AUC","Max.Sorensen","Max.Jaccard","probabilistic.Sorensen","probabilistic.Jaccard")))>0){
return(composition.results)
}
if("SR.deviation" %in% metrics & length(intersect(metrics,c("community.AUC","Max.Sorensen","Max.Jaccard","probabilistic.Sorensen","probabilistic.Jaccard")))>0){
return(data.frame(SR.results,composition.results))
}
}
nb.mes <- 0
if("SR.deviation" %in% community.metrics){
nb.mes <- nb.mes+8
}
if(length(intersect(community.metrics,c("community.AUC","Max.Sorensen","Max.Jaccard","probabilistic.Sorensen","probabilistic.Jaccard")))>0){
nb.mes <- nb.mes+3
}
if("community.AUC" %in% community.metrics){
nb.mes <- nb.mes+1
}
if("Max.Sorensen" %in% community.metrics){
nb.mes <- nb.mes+1
}
if("Max.Jaccard" %in% community.metrics){
nb.mes <- nb.mes+1
}
if("probabilistic.Sorensen" %in% community.metrics){
nb.mes <- nb.mes+1
}
if("probabilistic.Jaccard" %in% community.metrics){
nb.mes <- nb.mes+1
}
ccv.cali <- array(data=NA, dim=c(dim(ccv.modeling.data$speciesData.calibration)[2],nb.mes, dim(ccv.modeling.data$speciesData.calibration)[3]))
ccv.eval <- array(data=NA, dim=c(dim(ccv.modeling.data$speciesData.evaluation)[2],nb.mes, dim(ccv.modeling.data$speciesData.evaluation)[3]))
if(parallel){
sfInit(parallel=TRUE, cpus=cpus)
sfExport("SR.mean.sd", "SR.prob","prob.community.metics", "composition.prob","Community.AUC", "MaxJaccard", "MaxSorensen", "probabilisticJaccard", "probabilisticSorensen")
sfExport("ccv.modeling.data", "community.metrics")
sfLibrary("poibin", character.only=TRUE )
sfLibrary("PresenceAbsence", character.only=TRUE )
temp <- sfLapply(1:dim(ccv.modeling.data$speciesData.calibration)[3],
function(x){
obs.temp <- t(ccv.modeling.data$speciesData.calibration[,,x])[rowSums(is.na(t(ccv.modeling.data$speciesData.calibration[,,x])))!=ncol(t(ccv.modeling.data$speciesData.calibration[,,x])),]
pred.temp <- t(ccv.modeling.data$singleSpecies.calibrationSites.ensemblePredictions[,,x])[rowSums(is.na(t(ccv.modeling.data$singleSpecies.calibrationSites.ensemblePredictions[,,x])))!=ncol(t(ccv.modeling.data$singleSpecies.calibrationSites.ensemblePredictions[,,x])),]/1000
stopifnot(dim(obs.temp)==dim(pred.temp))
prob.community.metics(obs=obs.temp,
pred=pred.temp,
metrics=community.metrics)
})
for(i in 1:dim(ccv.modeling.data$speciesData.calibration)[3]){
ccv.cali[1:dim(temp[[i]])[1],,i] <- unlist(temp[[i]])
}
dimnames(ccv.cali) <- list(dimnames(ccv.modeling.data$speciesData.calibration)[[2]], unlist(dimnames(temp[[1]])[2]), dimnames(ccv.modeling.data$speciesData.calibration)[[3]])
temp <- sfLapply(1:dim(ccv.modeling.data$speciesData.evaluation)[3],
function(x){
obs.temp <- t(ccv.modeling.data$speciesData.evaluation[,,x])[rowSums(is.na(t(ccv.modeling.data$speciesData.evaluation[,,x])))!=ncol(t(ccv.modeling.data$speciesData.evaluation[,,x])),]
pred.temp <- t(ccv.modeling.data$singleSpecies.evaluationSites.ensemblePredictions[,,x])[rowSums(is.na(t(ccv.modeling.data$singleSpecies.evaluationSites.ensemblePredictions[,,x])))!=ncol(t(ccv.modeling.data$singleSpecies.evaluationSites.ensemblePredictions[,,x])),]/1000
stopifnot(dim(obs.temp)==dim(pred.temp))
prob.community.metics(obs=obs.temp,
pred=pred.temp,
metrics=community.metrics)
})
for(i in 1:dim(ccv.modeling.data$speciesData.evaluation)[3]){
ccv.eval[1:dim(temp[[i]])[1],,i] <- unlist(temp[[i]])
}
dimnames(ccv.eval) <- list(dimnames(ccv.modeling.data$speciesData.evaluation)[[2]], unlist(dimnames(temp[[1]])[2]), dimnames(ccv.modeling.data$speciesData.evaluation)[[3]])
sfStop( nostop=FALSE )
}else{
temp <- lapply(1:dim(ccv.modeling.data$speciesData.calibration)[3],
function(x){
obs.temp <- t(ccv.modeling.data$speciesData.calibration[,,x])[rowSums(is.na(t(ccv.modeling.data$speciesData.calibration[,,x])))!=ncol(t(ccv.modeling.data$speciesData.calibration[,,x])),]
pred.temp <- t(ccv.modeling.data$singleSpecies.calibrationSites.ensemblePredictions[,,x])[rowSums(is.na(t(ccv.modeling.data$singleSpecies.calibrationSites.ensemblePredictions[,,x])))!=ncol(t(ccv.modeling.data$singleSpecies.calibrationSites.ensemblePredictions[,,x])),]/1000
stopifnot(dim(obs.temp)==dim(pred.temp))
prob.community.metics(obs=obs.temp,
pred=pred.temp,
metrics=community.metrics)
})
for(i in 1:dim(ccv.modeling.data$speciesData.calibration)[3]){
ccv.cali[1:dim(temp[[i]])[1],,i] <- unlist(temp[[i]])
}
dimnames(ccv.cali) <- list(dimnames(ccv.modeling.data$speciesData.calibration)[[2]], unlist(dimnames(temp[[1]])[2]), dimnames(ccv.modeling.data$speciesData.calibration)[[3]])
temp <- lapply(1:dim(ccv.modeling.data$speciesData.evaluation)[3],
function(x){
obs.temp <- t(ccv.modeling.data$speciesData.evaluation[,,x])[rowSums(is.na(t(ccv.modeling.data$speciesData.evaluation[,,x])))!=ncol(t(ccv.modeling.data$speciesData.evaluation[,,x])),]
pred.temp <- t(ccv.modeling.data$singleSpecies.evaluationSites.ensemblePredictions[,,x])[rowSums(is.na(t(ccv.modeling.data$singleSpecies.evaluationSites.ensemblePredictions[,,x])))!=ncol(t(ccv.modeling.data$singleSpecies.evaluationSites.ensemblePredictions[,,x])),]/1000
stopifnot(dim(obs.temp)==dim(pred.temp))
prob.community.metics(obs=obs.temp,
pred=pred.temp,
metrics=community.metrics)
})
for(i in 1:dim(ccv.modeling.data$speciesData.evaluation)[3]){
ccv.eval[1:dim(temp[[i]])[1],,i] <- unlist(temp[[i]])
}
dimnames(ccv.eval) <- list(dimnames(ccv.modeling.data$speciesData.evaluation)[[2]], unlist(dimnames(temp[[1]])[2]), dimnames(ccv.modeling.data$speciesData.evaluation)[[3]])
}
CommunityEvaluationMetrics.CalibrationSites <- array(data=NA,
dim=c(dim(ccv.modeling.data$speciesData.full)[1],nb.mes, dim(ccv.modeling.data$speciesData.calibration)[3]),
dimnames=list(unlist(dimnames(ccv.modeling.data$speciesData.full)[1]), unlist(dimnames(ccv.cali)[2]), unlist(dimnames(ccv.cali)[3])))
CommunityEvaluationMetrics.EvaluationSites <- array(data=NA,
dim=c(dim(ccv.modeling.data$speciesData.full)[1],nb.mes, dim(ccv.modeling.data$speciesData.calibration)[3]),
dimnames=list(unlist(dimnames(ccv.modeling.data$speciesData.full)[1]), unlist(dimnames(ccv.cali)[2]), unlist(dimnames(ccv.cali)[3])))
for(r in 1:dim(ccv.modeling.data$speciesData.calibration)[3]){
CommunityEvaluationMetrics.CalibrationSites[which(ccv.modeling.data$DataSplitTable[,r]),,r] <- ccv.cali[1:sum(ccv.modeling.data$DataSplitTable[,r]),,r]
CommunityEvaluationMetrics.EvaluationSites[which(!ccv.modeling.data$DataSplitTable[,r]),,r] <- ccv.eval[1:sum(!ccv.modeling.data$DataSplitTable[,r]),,r]
}
ccv.evaluationMetrics.prob <- list(DataSplitTable = ccv.modeling.data$DataSplitTable,
CommunityEvaluationMetrics.CalibrationSites=CommunityEvaluationMetrics.CalibrationSites,
CommunityEvaluationMetrics.EvaluationSites=CommunityEvaluationMetrics.EvaluationSites)
save(ccv.evaluationMetrics.prob, file=paste(ccv.modeling.data$modeling.id,".ccv.evaluationMetrics.prob.RData", sep=""))
return(ccv.evaluationMetrics.prob)
} |
arr2dl <- function(x, ...){
if (class(x) != "array"){
stop("x must be a array.", call. = FALSE)
}
x <- as.vector(x)
result <- chrvec2dl(x, ...)
return(result)
}
arr2vbt <- function(x, ...){
if (class(x) != "array"){
stop("x must be a array.", call. = FALSE)
}
x <- as.vector(x)
x <- chrvec2dl(x, ...)
result <- dl2vbt(x)
return(result)
} |
bumpVersion <- function(element = "patch", pkg.repo = ".",
news = file.path(pkg.repo, "NEWS.md"),
plain_news = TRUE) {
desc <- readLines(paste(pkg.repo, "DESCRIPTION", sep = "/"))
old.ver <- substr(desc[grep("Version*", desc)], 10,
nchar(desc[grep("Version*", desc)]))
old <- as.numeric(unlist(strsplit(old.ver, "\\.")))
new.v <- switch(element,
major = c(old[1] + 1, 0, 0),
minor = c(old[1], old[2] + 1, 0),
patch = c(old[1], old[2], old[3] + 1))
new.ver <- paste(new.v[1], new.v[2], new.v[3], sep = ".")
new.v <- new.v[1] * 100 + new.v[2] * 10 + new.v[3]
old <- old[1] * 100 + old[2] * 10 + old[3]
desc[grep("^Version", desc)] <- paste0("Version: ", new.ver)
desc[grep("^Date", desc)] <- paste0("Date: ", Sys.Date())
writeLines(desc, paste(pkg.repo, "DESCRIPTION", sep = "/"))
pkg.name <- substr(desc[grep("^Package:", desc)], 10,
nchar(desc[grep("^Package:", desc)]))
pkg_fl = paste(pkg.repo, "man",
paste(pkg.name, "-package.Rd", sep = ""),
sep = "/")
if (file.exists(pkg_fl)) {
pkg.doc <- readLines(pkg_fl)
pkg.doc[grep("^Version", pkg.doc)] <- paste("Version: \\tab ",
new.ver, "\\cr", sep = "")
pkg.doc[grep("^Date", pkg.doc)] <- paste("Date: \\tab ",
Sys.Date(), "\\cr", sep = "")
writeLines(pkg.doc, paste(pkg.repo, "man",
paste(pkg.name, "-package.Rd", sep = ""),
sep = "/"))
}
if (file.exists(news)) {
newsfile <- readLines(news)
newsfile[1] <- paste("
writeLines(newsfile, con = news)
if (basename(news) == "NEWS.md") {
nfl = gsub("
writeLines(nfl, con = gsub(".md", "", news))
}
}
} |
readTreeMask =function (rwl, stc = c(5, 2, 1))
{
if (sum(stc) != 8)
stop("Site-Tree-Core mask does not sum to 8")
ids = colnames(rwl)
test = function (x, site.chars=stc) {
out=c(NA,NA,NA)
out[1] = substring(x, 1, stc[1])
out[2] = substring(x, stc[1]+1, sum(stc[1:2]))
out[3] = substring(x, sum(stc[1:2])+1, sum(stc[1:2])+stc[3])
return(out)
}
out = t(sapply(ids, test,site.chars=stc))
out = data.frame(out)
tree.series = ids
tree.vec = as.numeric(out[, 2])
tree.ids = unique(out[, 2])
core.vec = rep(NA, length(tree.vec))
n.trees = length(tree.ids)
for (i in 1:n.trees) {
n.cores = length(core.vec[tree.vec == i])
core.vec[tree.vec == i] = seq(1, n.cores)
}
out =data.frame(out,tree = tree.vec, core = core.vec)
out<-out[order(out[,4]),]
colnames(out) = c("Site", "Tree", "Core","tree", "core")
return(out)
} |
context("rbind and cbind sim_geno")
test_that("rbind.sim_geno works for grav", {
grav2 <- read_cross2(system.file("extdata", "grav2.zip", package="qtl2"))
grav2 <- grav2[1:30,1:2]
map <- insert_pseudomarkers(grav2$gmap, step=5)
draws <- sim_geno(grav2, map, error_prob=0.002, n_draws=8)
drawsA <- draws[1:5,]
drawsB <- draws[6:12,]
drawsC <- draws[13:20,]
drawsAB <- draws[1:12,]
drawsABC <- draws[1:20,]
drawsBACA <- draws[c(6:12, 1:5, 13:20, 1:5),]
expect_equal(rbind(drawsA, drawsB), drawsAB)
expect_equal(rbind(drawsA, drawsB, drawsC), drawsABC)
expect_equal(rbind(drawsB, drawsA, drawsC, drawsA), drawsBACA)
})
test_that("rbind.sim_geno works for iron", {
skip_on_cran()
iron <- read_cross2(system.file("extdata", "iron.zip", package="qtl2"))
map <- insert_pseudomarkers(iron$gmap, step=1)
draws <- sim_geno(iron, map, error_prob=0.002, n_draws=5)
drawsA <- draws[2:20,]
drawsB <- draws[41:60,]
drawsC <- draws[102:201,]
drawsAB <- draws[c(2:20,41:60),]
drawsABC <- draws[c(2:20,41:60,102:201),]
drawsBACA <- draws[c(41:60,2:20,102:201,2:20),]
expect_equal(rbind(drawsA, drawsB), drawsAB)
expect_equal(rbind(drawsA, drawsB, drawsC), drawsABC)
expect_equal(rbind(drawsB, drawsA, drawsC, drawsA), drawsBACA)
})
test_that("cbind.simgeno for grav", {
grav2 <- read_cross2(system.file("extdata", "grav2.zip", package="qtl2"))
grav2 <- grav2[1:30,]
map <- insert_pseudomarkers(grav2$gmap, step=5)
draws <- sim_geno(grav2[1:10,], map, error_prob=0.002, n_draws=8)
drawsA <- draws[,1:2]
drawsB <- draws[,5]
drawsC <- draws[,3:4]
drawsAB <- draws[,c(1:2,5)]
drawsABC <- draws[,c(1,2,5,3,4)]
drawsBACA <- draws[,c(5,1:2,3:4,1:2)]
expect_equal(cbind(drawsA, drawsB), drawsAB)
expect_equal(cbind(drawsA, drawsB, drawsC), drawsABC)
expect_equal(cbind(drawsB, drawsA, drawsC, drawsA), drawsBACA)
})
test_that("cbind.sim_geno works for iron", {
skip_on_cran()
iron <- read_cross2(system.file("extdata", "iron.zip", package="qtl2"))
map <- insert_pseudomarkers(iron$gmap, step=1)
draws <- sim_geno(iron[6:21,], map, error_prob=0.002, n_draws=6)
drawsA <- draws[,2:3]
drawsB <- draws[,c(4,5,8)]
drawsC <- draws[,c(19,"X")]
drawsAB <- draws[,c(2:3,4,5,8)]
drawsABC <- draws[,c(2:3,4,5,8,19,"X")]
drawsBACA <- draws[,c(4,5,8,2,3,19,"X",2,3)]
expect_equal(cbind(drawsA, drawsB), drawsAB)
expect_equal(cbind(drawsA, drawsB, drawsC), drawsABC)
expect_equal(cbind(drawsB, drawsA, drawsC, drawsA), drawsBACA)
}) |
dMetselaar_model<- function(t, x, parms, temp_profile) {
temp <- temp_profile(t)
with(as.list(c(x, parms)),{
D_T <- D_R * 10^( -(temp-temp_ref)/z)
dN <- - N * p * (1/D_T)^p * (t/Delta)^(p-1) * log(10)
res <- c(dN)
return(list(res))
})
} |
common.prefix <- function(cnames) {
n.char <- nchar(cnames)
n <- length(cnames)
y <- "";
for (i in dec(min(n.char), 1)) {
first.prefix <- substr(cnames[1], 1, i)
prefix.match <- TRUE
for (j in inc(2, n)) {
if (first.prefix != substr(cnames[j], 1, i)) {
prefix.match <- FALSE
break
}
}
if (prefix.match) {
y <- first.prefix
break
}
}
y
}
consistent.ids.titles <- function(ids, titles) {
unique.trees.1 <- unique(ids$tree)
unique.trees.2 <- unique(titles$tree)
if (length(unique.trees.1) != length(unique.trees.2)) {
return(FALSE)
}
for (tree in unique.trees.1) {
idx.t <- which(ids$tree %in% tree)
if (length(idx.t) > 1) {
if (any(titles$tree[idx.t] != titles$tree[idx.t[1]])) {
return(FALSE)
}
unique.cores.1 <- unique(ids$core[idx.t])
unique.cores.2 <- unique(titles$core[idx.t])
if (length(unique.cores.1) != length(unique.cores.2)) {
return(FALSE)
}
for (core in unique.cores.1) {
idx.c <- idx.t[ids$core[idx.t] %in% core]
if (length(idx.c) > 1) {
if (any(titles$core[idx.c] != titles$core[idx.c[1]])) {
return(FALSE)
}
unique.radii.1 <- unique(ids$radius[idx.c])
unique.radii.2 <- unique(titles$radius[idx.c])
if (length(unique.radii.1) != length(unique.radii.2)) {
return(FALSE)
}
for (radius in unique.radii.1) {
idx.r <- idx.c[ids$radius[idx.c] %in% radius]
length.idx <- length(idx.r)
if (length.idx > 1) {
if (any(titles$radius[idx.r] !=
titles$radius[idx.r[1]])) {
return(FALSE)
}
unique.mments.1 <- unique(ids$measurement[idx.r])
unique.mments.2 <- unique(titles$measurement[idx.r])
if (length(unique.mments.1) != length.idx ||
length(unique.mments.2) != length.idx) {
return(FALSE)
}
}
}
}
}
}
}
return(TRUE)
}
create.title.hierarchy <- function(cnames, ids) {
n <- length(cnames)
max.nchar <- max(nchar(cnames))
out.t <- character(length = n)
out.c <- character(length = n)
out.r <- character(length = n)
out.m <- character(length = n)
unique.trees <- unique(ids$tree)
t.names <- character(length = length(unique.trees))
names(t.names) <- unique.trees
t.map <- list()
for (tree in unique.trees) {
idx.t <- which(ids$tree %in% tree)
t.map[[tree]] <- idx.t
if (length(idx.t) > 1) {
cp <- common.prefix(cnames[idx.t])
t.names[tree] <- cp
chars.used <- length(cp)
unique.cores <- unique(ids$core[idx.t])
n.uc <- length(unique.cores)
c.names <- character(length = n.uc)
names(c.names) <- unique.cores
c.map <- list()
for (core in unique.cores) {
idx.c <- idx.t[ids$core[idx.t] %in% core]
c.map[[core]] <- idx.c
if (length(idx.c) > 1) {
if (n.uc == 1) {
c.names[core] <- "1"
chars.used.2 <- chars.used
} else {
cp <- common.prefix(substr(cnames[idx.c],
chars.used+1,
max.nchar))
c.names[core] <- cp
chars.used.2 <- chars.used + length(cp)
}
unique.radii <- unique(ids$radius[idx.c])
n.ur <- length(unique.radii)
r.names <- character(length = n.ur)
names(r.names) <- unique.radii
r.map <- list()
for (radius in unique.radii) {
idx.r <- idx.c[ids$radius[idx.c] %in% radius]
r.map[[radius]] <- idx.r
if (length(idx.r) > 1) {
if (n.ur == 1) {
r.names[radius] <- "1"
chars.used.3 <- chars.used.2
} else {
cp <- common.prefix(substr(cnames[idx.r],
chars.used.2+1,
max.nchar))
r.names[radius] <- cp
chars.used.3 <- chars.used.2 + length(cp)
}
for (idx.m in idx.r) {
out.m[idx.m] <- substr(cnames[idx.m],
chars.used.3+1,
max.nchar)
}
suppressWarnings(out.m[idx.r] <-
fix.names(out.m[idx.r],
basic.charset = FALSE))
} else {
if (n.ur == 1) {
r.names[radius] <- "1"
} else {
r.names[radius] <-
substr(cnames[idx.r], chars.used.2+1,
max.nchar)
}
out.m[idx.r] <- "1"
}
}
suppressWarnings(r.names <-
fix.names(r.names,
basic.charset = FALSE))
for (radius in unique.radii) {
out.r[r.map[[radius]]] <- r.names[radius]
}
} else {
if (n.uc == 1) {
c.names[core] <- "1"
} else {
c.names[core] <-
substr(cnames[idx.c], chars.used+1, max.nchar)
}
out.r[idx.c] <- out.m[idx.c] <- "1"
}
}
suppressWarnings(c.names <-
fix.names(c.names, basic.charset = FALSE))
for (core in unique.cores) {
out.c[c.map[[core]]] <- c.names[core]
}
} else {
t.names[tree] <- cnames[idx.t]
out.c[idx.t] <- out.r[idx.t] <- out.m[idx.t] <- "1"
}
}
suppressWarnings(t.names <- fix.names(t.names, basic.charset = FALSE))
for (tree in unique.trees) {
out.t[t.map[[tree]]] <- t.names[tree]
}
data.frame(tree = out.t, core = out.c, radius = out.r, measurement = out.m)
}
po.to.wc <- function(po) {
data.frame(n.missing.heartwood = as.integer(po[[2]] - 1),
row.names = po[[1]])
}
expand.metadata <- function(md.in, crn, default.value="") {
if (is.null(md.in)) {
md.out <- lapply(crn, function(x) rep(default.value, length(x)))
} else if (is.character(md.in)) {
if (length(md.in) == 0) {
md.out <- lapply(crn, function(x) rep(default.value, length(x)))
} else {
md.in2 <- rep(md.in, length.out=length(crn))
md.out <- list()
for (k in seq_along(crn)) {
md.out[[k]] <- rep(md.in2[k], length(crn[[k]]))
}
}
}
md.out
}
write.tridas <- function(rwl.df = NULL, fname, crn = NULL,
prec = NULL,
ids = NULL, titles = NULL,
crn.types = NULL,
crn.titles = NULL,
crn.units = NULL,
tridas.measuring.method = NA,
other.measuring.method = "unknown",
sample.type = "core",
wood.completeness = NULL,
taxon = "",
tridas.variable = "ring width",
other.variable = NA,
project.info = list(
type = c("unknown"),
description = NULL,
title = "",
category = "",
investigator = "",
period = ""
),
lab.info = data.frame(
name = "",
acronym = NA,
identifier = NA,
domain = "",
addressLine1 = NA,
addressLine2 = NA,
cityOrTown = NA,
stateProvinceRegion = NA,
postalCode = NA,
country = NA
),
research.info = data.frame(
identifier = NULL,
domain = NULL,
description = NULL
),
site.info = list(
type = "unknown",
description = NULL,
title = ""
),
random.identifiers = FALSE,
identifier.domain = lab.info$name[1],
...) {
if (!is.data.frame(lab.info) || nrow(lab.info) < 1) {
stop("'lab.info' must be a data.frame with at least one row")
}
lab.names <- names(lab.info)
if (!("name" %in% lab.names)) {
stop("\"name\" is a required variable in 'lab.info'")
}
identifier.present <- "identifier" %in% lab.names
if (identifier.present && !("domain" %in% lab.names)) {
stop("\"domain\" is required together with \"identifier\" in 'lab.info'")
}
if (!is.data.frame(research.info) || nrow(research.info) < 1) {
research.present <- FALSE
} else {
research.names <- names(research.info)
if (!("identifier" %in% research.names)) {
stop("\"identifier\" is a required variable in 'research.info'")
}
if (!("domain" %in% research.names)) {
stop("\"domain\" is a required variable in 'research.info'")
}
if (!("description" %in% research.names)) {
stop("\"description\" is a required variable in 'research.info'")
}
research.present <- TRUE
}
check.char.vars <- function(must.exist) {
for (var.specs in must.exist) {
base.name <- var.specs[1]
this.var <- get(base.name)
specs.length <- length(var.specs)
if (specs.length < 3) {
if (!is.character(this.var)) {
if (specs.length == 1) {
default.value <- ""
} else {
default.value <- var.specs[2]
}
warning(gettextf("'%s' must be of type character - inserting \"%s\"",
base.name, default.value))
assign(base.name, default.value, inherits = TRUE)
}
} else {
default.value <- var.specs[2]
if (!is.list(this.var)) {
warning(gettextf("'%s' must be a list. Creating one.",
base.name))
this.var <- list()
}
for (component.name in var.specs[3:specs.length]) {
if (!is.character(this.var[[component.name]])) {
warning(gettextf("'%s$%s' must be of type character - inserting \"%s\"",
base.name, component.name,
default.value))
this.var[[component.name]] <- default.value
}
}
assign(base.name, this.var, inherits = TRUE)
}
}
}
check.char.vars(list(c("project.info", "",
"title", "category", "investigator", "period"),
c("project.info", "unknown",
"type")))
if (random.identifiers) {
check.char.vars(list("identifier.domain"))
}
if (!is.na(tridas.variable)) {
tridas.variable2 <-
tridas.vocabulary("variable", term=tridas.variable)
} else {
tridas.variable2 <- NA
}
address.order <- c("addressLine1",
"addressLine2",
"cityOrTown",
"stateProvinceRegion",
"postalCode",
"country")
if (random.identifiers) {
ugen <- uuid.gen(paste0("dplR",
packageDescription("dplR", fields = "Version"),
fname))
}
doc <- simpleXML(fname, root="tridas",
xml.ns="http://www.tridas.org/1.2.2")
on.exit(doc$close())
doc.addTag <- doc$addTag
doc.addTag.nc <- doc$addTag.noCheck
doc.closeTag <- doc$closeTag
doc.addTag.nc("project", close = FALSE)
doc.addTag("title", project.info$title[1])
if (random.identifiers) {
doc.addTag("identifier",
ugen(),
attrs = c(domain = identifier.domain))
}
for (t in project.info$type) {
doc.addTag("type", t)
}
doc.addTag("description", project.info$description[1])
acronym.present <- "acronym" %in% lab.names
address.order <- address.order[address.order %in% lab.names]
for (i in seq_len(nrow(lab.info))) {
doc.addTag.nc("laboratory", close = FALSE)
if (identifier.present) {
this.identifier <- lab.info$identifier[i]
if (!is.na(this.identifier) && nzchar(this.identifier)) {
doc.addTag("identifier",
this.identifier,
attrs = c(domain = lab.info$domain[i]))
}
}
if (acronym.present) {
this.acronym <- lab.info$acronym[i]
if (!is.na(this.acronym) && nzchar(this.acronym)) {
doc.addTag("name", lab.info$name[i],
attrs = c(acronym = this.acronym))
} else {
doc.addTag("name", lab.info$name[i])
}
} else {
doc.addTag("name", lab.info$name[i])
}
doc.addTag.nc("address", close = FALSE)
for (address.line in address.order) {
address.text <- lab.info[[address.line]][i]
if (!is.na(address.text) && nzchar(address.text)) {
doc.addTag(address.line, address.text)
}
}
doc.closeTag()
doc.closeTag()
}
doc.addTag("category", project.info$category[1])
doc.addTag("investigator", project.info$investigator[1])
doc.addTag("period", project.info$period[1])
if (research.present) {
for (i in seq_len(nrow(research.info))) {
doc.addTag.nc("research", close = FALSE)
doc.addTag("identifier",
research.info$identifier[i],
attrs = c(domain = research.info$domain[i]))
doc.addTag("description", research.info$description[i])
doc.closeTag()
}
}
if (!is.null(rwl.df)) {
if (!is.data.frame(rwl.df)) {
stop("'rwl.df' must be a data.frame")
}
check.char.vars(list(c("site.info", "unknown", "type"),
c("site.info", "", "title")))
n.col <- ncol(rwl.df)
cnames <- names(rwl.df)
stopifnot(is.character(cnames), !is.na(cnames),
Encoding(cnames) != "bytes")
ids2 <- ids
titles2 <- titles
if (is.null(ids2)) {
ones <- rep(1, n.col)
ids2 <- data.frame(tree = seq_len(n.col),
core = ones,
radius = ones,
measurement = ones)
} else if (is.data.frame(ids2) && nrow(ids2) == n.col) {
ncol.ids <- ncol(ids2)
if (ncol.ids == 2) {
ones <- rep(1, n.col)
if (!all(c("tree","core") %in% names(ids2))) {
stop("2-col 'ids' needs \"tree\" and \"core\" columns")
}
ids2 <- data.frame(ids2,
radius = ones,
measurement = ones)
} else if(ncol.ids == 3) {
if (!all(c("tree","core","radius") %in% names(ids2))) {
stop("3-col 'ids' needs \"tree\", \"core\", and \"radius\" columns")
}
ids2 <- data.frame(ids2,
measurement = rep(1, n.col))
} else if (ncol.ids == 4) {
if (!all(c("tree","core","radius","measurement") %in%
names(ids2))) {
stop("4-col 'ids' needs \"tree\", \"core\", \"radius\", and \"measurement\" columns")
}
} else {
stop("argument 'ids' is in wrong format (2, 3, or 4 columns required)")
}
} else {
stop("argument 'ids' is not data.frame or has wrong number of rows")
}
if (!all(vapply(ids2, is.numeric, TRUE))) {
stop("'ids' must have numeric columns")
}
if (is.null(titles2)) {
titles2 <- create.title.hierarchy(cnames, ids2)
} else if (is.data.frame(titles2) && nrow(titles2) == n.col) {
if (ncol(titles2) != 4 ||
!all(c("tree", "core", "radius", "measurement") %in%
names(ids2))) {
stop("columns needed in 'titles': \"tree\", \"core\", \"radius\", and \"measurement\"")
}
} else {
stop("argument 'titles' is not data.frame or has wrong number of rows")
}
if (!consistent.ids.titles(ids2, titles2)) {
stop("'ids' and 'titles' not consistent or duplicates present")
}
if (!is.null(prec)) {
if (prec == 0.001) {
data.unit <- "micrometres"
rwl.df2 <- round(rwl.df * 1000)
} else if (prec == 0.01) {
data.unit <- "1/100th millimetres"
rwl.df2 <- round(rwl.df * 100)
} else if (prec == 0.05) {
data.unit <- "1/20th millimetres"
rwl.df2 <- round(rwl.df * 20)
} else if (prec == 0.1) {
data.unit <- "1/10th millimetres"
rwl.df2 <- round(rwl.df * 10)
} else if (prec == 1) {
data.unit <- "millimetres"
rwl.df2 <- round(rwl.df)
} else if (prec == 10) {
data.unit <- "centimetres"
rwl.df2 <- round(rwl.df / 10)
} else if (prec == 100) {
data.unit <- "centimetres"
rwl.df2 <- round(rwl.df / 100) * 10
} else if (prec == 1000) {
data.unit <- "metres"
rwl.df2 <- round(rwl.df / 1000)
} else {
warning("unknown 'prec' specified: no unit conversion or rounding done")
data.unit <- "millimetres"
rwl.df2 <- rwl.df
}
} else {
data.unit <- "millimetres"
rwl.df2 <- rwl.df
}
tridas.measuring.method2 <- tridas.measuring.method
if (!all(is.na(tridas.measuring.method2))) {
for (k in seq_along(tridas.measuring.method2)) {
if (!is.na(this.mm <- tridas.measuring.method2[k])) {
tridas.measuring.method2[k] <-
tridas.vocabulary("measuring method", term=this.mm)
}
}
}
if (length(tridas.measuring.method2) != n.col) {
tridas.measuring.method2 <-
rep(tridas.measuring.method2, length.out = n.col)
}
check.char.vars(list(c("other.measuring.method", "unknown")))
other.measuring.method2 <- other.measuring.method
if (length(other.measuring.method2) != n.col) {
other.measuring.method2 <-
rep(other.measuring.method2, length.out = n.col)
}
check.char.vars(list(c("sample.type", "core")))
if (length(sample.type) != n.col) {
sample.type2 <- rep(sample.type, length.out = n.col)
} else {
sample.type2 <- sample.type
}
wood.completeness2 <- wood.completeness
if (!is.null(wood.completeness2)) {
if (nrow(wood.completeness2) != n.col) {
stop("'nrow(wood.completeness)' must be equal to 'ncol(rwl.df)'")
}
if (any(row.names(wood.completeness2) != cnames)) {
stop("row names of 'wood.completeness' must match column names of 'rwl.df'")
}
names.wc <- names(wood.completeness2)
wc <- TRUE
names.complex <- c("pith.presence", "heartwood.presence",
"sapwood.presence")
names.nonnegative <-
c("n.unmeasured.inner", "n.missing.sapwood", "n.sapwood",
"n.missing.heartwood", "n.unmeasured.outer")
for (nam in names.complex[!(names.complex %in% names.wc)]) {
wood.completeness2[[nam]] <- rep("unknown", n.col)
}
for (nam in names.nonnegative[names.nonnegative %in% names.wc]) {
temp <- na.omit(wood.completeness2[[nam]])
if (any(!is.int(temp) | temp < 0)) {
stop(gettextf("some values in 'wood.completeness$%s' are invalid, i.e. not integer or < 0", nam))
}
}
for (nam in names.complex) {
wood.completeness2[[nam]][is.na(wood.completeness2[[nam]])] <-
"unknown"
wood.completeness2[[nam]] <-
tridas.vocabulary("complex presence / absence",
term = wood.completeness2[[nam]])
}
if (!("bark.presence" %in% names.wc)) {
wood.completeness2$bark.presence <- rep("unknown", n.col)
}
idx.bark.na <- which(is.na(wood.completeness2$bark.presence))
if (length(idx.bark.na) > 0) {
wood.completeness2$bark.presence[idx.bark.na] <- "unknown"
}
wood.completeness2$bark.presence <-
tridas.vocabulary("presence / absence",
term = wood.completeness2$bark.presence)
if ("last.ring.presence" %in% names.wc) {
idx.notna <-
which(!is.na(wood.completeness2$last.ring.presence))
wood.completeness2$last.ring.presence[idx.notna] <-
tridas.vocabulary("presence / absence",
term = wood.completeness2$last.ring.presence[idx.notna])
wc.lrp <- TRUE
if ("last.ring.details" %in% names.wc) {
wc.lrd <- TRUE
} else {
wc.lrd <- FALSE
}
} else {
wc.lrp <- FALSE
}
if ("n.missing.sapwood" %in% names.wc) {
wc.nms <- TRUE
if ("missing.sapwood.foundation" %in% names.wc) {
wc.msf <- TRUE
} else {
wc.msf <- FALSE
}
} else {
wc.nms <- FALSE
}
if ("n.sapwood" %in% names.wc) {
wc.ns <- TRUE
} else {
wc.ns <- FALSE
}
if ("n.missing.heartwood" %in% names.wc) {
wc.nmh <- TRUE
if ("missing.heartwood.foundation" %in% names.wc) {
wc.mhf <- TRUE
} else {
wc.mhf <- FALSE
}
} else {
wc.nmh <- FALSE
}
if ("n.unmeasured.inner" %in% names.wc) {
wc.nui <- TRUE
} else {
wc.nui <- FALSE
}
if ("n.unmeasured.outer" %in% names.wc) {
wc.nuo <- TRUE
} else {
wc.nuo <- FALSE
}
} else {
wc <- FALSE
}
}
crn2 <- crn
crn.types2 <- crn.types
crn.units2 <- crn.units
crn.titles2 <- crn.titles
if (!is.null(crn2)) {
if (is.data.frame(crn2)) {
crn2 <- list(crn2)
}
if (!is.list(crn.types2)) {
crn.types2 <- expand.metadata(crn.types2, crn2, "")
} else {
crn.types2 <- rep(crn.types2, length.out=length(crn2))
}
if (!is.list(crn.units2)) {
crn.units2 <- expand.metadata(crn.units2, crn2, NA)
} else {
crn.units2 <- rep(crn.units2, length.out=length(crn2))
}
if (!is.null(crn.titles2)) {
titles.present <- TRUE
if (!is.list(crn.titles2)) {
crn.titles2 <- list(crn.titles2)
}
crn.titles2 <- rep(crn.titles2, length.out=length(crn2))
} else {
titles.present <- FALSE
}
if (titles.present && length(crn2) != length(crn.titles2)) {
titles.present <- FALSE
}
}
if (!is.null(rwl.df)) {
doc.addTag.nc("object", close = FALSE)
doc.addTag("title", site.info$title[1])
if (random.identifiers) {
doc.addTag("identifier",
ugen(),
attrs = c(domain = identifier.domain))
}
doc.addTag("type", site.info$type[1])
if (is.character(site.info$description)) {
doc.addTag("description", site.info$description)
}
unique.trees <- unique(ids2$tree)
yrs.all <- as.numeric(row.names(rwl.df2))
for (tree in unique.trees) {
idx.t <- which(ids2$tree %in% tree)
doc.addTag.nc("element", close = FALSE)
doc.addTag("title", titles2$tree[idx.t[1]])
if (random.identifiers) {
doc.addTag("identifier",
ugen(),
attrs = c(domain = identifier.domain))
}
doc.addTag("taxon", taxon);
unique.cores <- unique(ids2$core[idx.t])
for (core in unique.cores) {
idx.c <- idx.t[ids2$core[idx.t] %in% core]
doc.addTag.nc("sample", close = FALSE)
doc.addTag("title", titles2$core[idx.c[1]])
if (random.identifiers) {
doc.addTag("identifier",
ugen(),
attrs = c(domain = identifier.domain))
}
doc.addTag("type", sample.type2[idx.c[1]])
unique.radii <- unique(ids2$radius[idx.c])
for (radius in unique.radii) {
idx.r <- idx.c[ids2$radius[idx.c] %in% radius]
doc.addTag.nc("radius", close = FALSE)
doc.addTag("title", titles2$radius[idx.r[1]])
if (random.identifiers) {
doc.addTag("identifier",
ugen(),
attrs = c(domain = identifier.domain))
}
for (idx.m in idx.r) {
doc.addTag.nc("measurementSeries", close = FALSE)
doc.addTag("title", titles2$measurement[idx.r])
if (random.identifiers) {
doc.addTag("identifier",
ugen(),
attrs = c(domain = identifier.domain))
}
doc.addTag("comments", cnames[idx.m])
if (wc) {
doc.addTag.nc("woodCompleteness",
close = FALSE)
if (wc.nui &&
!is.na(this.val <-
wood.completeness2$n.unmeasured.inner[idx.m])) {
doc.addTag.nc("nrOfUnmeasuredInnerRings",
this.val)
}
if (wc.nuo &&
!is.na(this.val <-
wood.completeness2$n.unmeasured.outer[idx.m])) {
doc.addTag.nc("nrOfUnmeasuredOuterRings",
this.val)
}
doc.addTag.nc("pith",
attrs = c(presence = wood.completeness2$pith.presence[idx.m]))
doc.addTag.nc("heartwood",
attrs = c(presence = wood.completeness2$heartwood.presence[idx.m]),
close = FALSE)
if (wc.nmh &&
!is.na(this.val <-
wood.completeness2$n.missing.heartwood[idx.m])) {
doc.addTag.nc("missingHeartwoodRingsToPith",
this.val)
if (wc.mhf &&
!is.na(this.val <-
wood.completeness2$missing.heartwood.foundation[idx.m])) {
doc.addTag("missingHeartwoodRingsToPithFoundation",
this.val)
}
}
doc.closeTag()
doc.addTag.nc("sapwood",
attrs = c(presence = wood.completeness2$sapwood.presence[idx.m]),
close = FALSE)
if (wc.ns &&
!is.na(this.val <-
wood.completeness2$n.sapwood[idx.m])) {
doc.addTag.nc("nrOfSapwoodRings",
this.val)
}
if (wc.lrp &&
!is.na(this.val <-
wood.completeness2$last.ring.presence[idx.m])) {
if (wc.lrd &&
!is.na(this.detail <-
wood.completeness2$last.ring.details[idx.m])) {
doc.addTag("lastRingUnderBark",
this.detail,
attrs = c(presence = this.val))
} else {
doc.addTag("lastRingUnderBark",
attrs = c(presence = this.val))
}
}
if (wc.nms &&
!is.na(this.val <-
wood.completeness2$n.missing.sapwood[idx.m])) {
doc.addTag.nc("missingSapwoodRingsToBark",
this.val)
if (wc.msf &&
!is.na(this.val <-
wood.completeness2$missing.sapwood.foundation[idx.m])) {
doc.addTag("missingSapwoodRingsToBarkFoundation",
this.val)
}
}
doc.closeTag()
doc.addTag.nc("bark",
attrs = c(presence = wood.completeness2$bark.presence[idx.m]))
doc.closeTag()
}
if (!is.na(this.mm <-
tridas.measuring.method2[idx.m])) {
doc.addTag.nc("measuringMethod", NULL,
attrs = c(normalTridas=this.mm))
} else {
doc.addTag("measuringMethod",
other.measuring.method2[idx.m])
}
doc.addTag.nc("interpretation", close = FALSE)
series <- as.numeric(rwl.df2[[idx.m]])
idx <- !is.na(series)
series <- series[idx]
yrs <- yrs.all[idx]
min.year <- min(yrs)
max.year <- max(yrs)
if (min.year < 1) {
doc.addTag.nc("firstYear",
1 - min.year,
attrs = c(suffix = "BC"))
} else {
doc.addTag.nc("firstYear",
min.year,
attrs = c(suffix = "AD"))
}
if (max.year < 1) {
doc.addTag.nc("lastYear",
1 - max.year,
attrs = c(suffix = "BC"))
} else {
doc.addTag.nc("lastYear",
max.year,
attrs = c(suffix = "AD"))
}
doc.closeTag()
doc.addTag.nc("values", close = FALSE)
if (!is.na(tridas.variable2)) {
doc.addTag.nc("variable", NULL,
attrs = c(normalTridas = tridas.variable2))
} else {
doc.addTag("variable", other.variable)
}
doc.addTag.nc("unit", NULL,
attrs = c(normalTridas = data.unit))
for (i in seq_along(series)) {
doc.addTag.nc("value", NULL,
attrs = c(value = series[i]))
}
doc.closeTag()
doc.closeTag()
}
doc.closeTag()
}
doc.closeTag()
}
doc.closeTag()
}
doc.closeTag()
}
if (!is.null(crn2)) {
for (i in seq_along(crn2)) {
this.frame <- crn2[[i]]
yrs.all <- as.numeric(row.names(this.frame))
crn.names <- names(this.frame)
depth.idx <- grep("^samp[.]depth", crn.names)
n.depth <- length(depth.idx)
if (n.depth > 0) {
depth.present <- TRUE
series.idx <- setdiff(seq_along(crn.names), depth.idx)
n.series <- length(series.idx)
depth.idx <- rep(depth.idx, length.out = n.series)
} else {
depth.present <- FALSE
n.series <- length(crn.names)
series.idx <- seq_len(n.series)
}
this.typevec <- as.character(crn.types2[[i]])
n.type <- length(this.typevec)
if (n.type == 0) {
this.typevec <- rep("", n.series)
} else {
this.typevec <- rep(this.typevec, length.out = n.series)
}
this.unitvec <- as.character(crn.units2[[i]])
n.unit <- length(this.unitvec)
if (n.unit == 0) {
this.unitvec <- rep(NA, n.series)
} else {
this.unitvec <- rep(this.unitvec, length.out = n.series)
}
if (titles.present) {
this.titlevec <- as.character(crn.titles2[[i]])
n.title <- length(this.titlevec)
if (n.title == 0) {
this.titlevec <- rep(NA, n.series)
} else {
this.titlevec <- rep(this.titlevec, length.out = n.series)
}
}
if (depth.present) {
n.depth <- length(depth.idx)
}
for (j in seq_len(n.series)) {
this.idx <- series.idx[j]
series <- as.numeric(this.frame[[this.idx]])
if (depth.present) {
samp.depth <- as.numeric(this.frame[[depth.idx[j]]])
}
doc.addTag.nc("derivedSeries", close = FALSE)
this.crn.name <- crn.names[this.idx]
if (titles.present) {
this.title <- this.titlevec[j]
if (is.na(this.title)) {
this.title <- this.crn.name
this.title.present <- FALSE
} else {
this.title.present <- TRUE
}
} else {
this.title <- this.crn.name
this.title.present <- FALSE
}
doc.addTag("title", this.title)
if (random.identifiers) {
doc.addTag("identifier",
ugen(),
attrs = c(domain = identifier.domain))
}
if (this.title.present && this.title != this.crn.name) {
doc.addTag("comments", this.crn.name)
}
doc.addTag("type", this.typevec[j])
doc.addTag.nc("linkSeries")
doc.addTag.nc("interpretation", close = FALSE)
idx <- !is.na(series)
series <- series[idx]
yrs <- yrs.all[idx]
min.year <- min(yrs)
max.year <- max(yrs)
if (min.year < 1) {
doc.addTag.nc("firstYear",
1 - min.year,
attrs = c(suffix = "BC"))
} else {
doc.addTag.nc("firstYear",
min.year,
attrs = c(suffix = "AD"))
}
if (max.year < 1) {
doc.addTag.nc("lastYear",
1 - max.year,
attrs = c(suffix = "BC"))
} else {
doc.addTag.nc("lastYear",
max.year,
attrs = c(suffix = "AD"))
}
doc.closeTag()
doc.addTag.nc("values", close = FALSE)
if (!is.na(tridas.variable2)) {
doc.addTag.nc("variable", NULL,
attrs = c(normalTridas=tridas.variable2))
} else {
doc.addTag("variable", other.variable)
}
this.unit <- this.unitvec[j]
if (is.na(this.unit)) {
doc.addTag.nc("unitless", NULL)
} else {
doc.addTag("unit", this.unit)
}
if (depth.present) {
for (i in seq_along(series)) {
doc.addTag.nc("value", NULL,
attrs = c(count = samp.depth[i],
value = series[i]))
}
} else {
for (i in seq_along(series)) {
doc.addTag.nc("value", NULL,
attrs = c(value = series[i]))
}
}
doc.closeTag()
doc.closeTag()
}
}
}
doc.closeTag()
fname
} |
RUS <- function(level){
x <- NULL
if(level==1){
x1 <- github.cssegisanddata.covid19(country = "Russia")
x2 <- ourworldindata.org(id = "RUS")
x <- full_join(x1, x2, by = "date")
}
if(level==2){
x <- github.cssegisanddata.covid19unified(iso = "RUS", level = level)
x$id <- id(x$id, iso = "RUS", ds = "github.cssegisanddata.covid19unified", level = level)
}
return(x)
} |
kronecker <- function (X, Y, FUN = "*", make.dimnames = FALSE, ...)
{
if (.isMethodsDispatchOn() && (isS4(X) || isS4(Y))) {
return(methods::kronecker(X, Y, FUN = FUN,
make.dimnames = make.dimnames, ...))
}
.kronecker(X, Y, FUN = FUN, make.dimnames = make.dimnames, ...)
}
.kronecker <- function (X, Y, FUN = "*", make.dimnames = FALSE, ...)
{
X <- as.array(X)
Y <- as.array(Y)
if (make.dimnames) {
dnx <- dimnames(X)
dny <- dimnames(Y)
}
dX <- dim(X)
dY <- dim(Y)
ld <- length(dX) - length(dY)
if (ld < 0L)
dX <- dim(X) <- c(dX, rep.int(1, -ld))
else if (ld > 0L)
dY <- dim(Y) <- c(dY, rep.int(1, ld))
opobj <- outer(X, Y, FUN, ...)
dp <- as.vector(t(matrix(1L:(2*length(dX)), ncol = 2)[, 2:1]))
opobj <- aperm(opobj, dp)
dim(opobj) <- dX * dY
if (make.dimnames && !(is.null(dnx) && is.null(dny))) {
if (is.null(dnx))
dnx <- vector("list", length(dX))
else if (ld < 0L)
dnx <- c(dnx, vector("list", -ld))
tmp <- which(sapply(dnx, is.null))
dnx[tmp] <- lapply(tmp, function(i) rep.int("", dX[i]))
if (is.null(dny))
dny <- vector("list", length(dY))
else if (ld > 0)
dny <- c(dny, vector("list", ld))
tmp <- which(sapply(dny, is.null))
dny[tmp] <- lapply(tmp, function(i) rep.int("", dY[i]))
k <- length(dim(opobj))
dno <- vector("list", k)
for (i in 1L:k) {
tmp <- outer(dnx[[i]], dny[[i]], FUN=paste, sep=":")
dno[[i]] <- as.vector(t(tmp))
}
dimnames(opobj) <- dno
}
opobj
}
`%x%` <- function(X, Y) kronecker(X, Y) |
design.strip <-
function (trt1, trt2,r, serie = 2, seed = 0, kinds = "Super-Duper",randomization=TRUE)
{
number<-10
if(serie>0) number<-10^serie
n1<-length(trt1)
n2<-length(trt2)
if (seed == 0) {
genera<-runif(1)
seed <-.Random.seed[3]
}
set.seed(seed, kinds)
a<-trt1[1:n1]
b<-trt2[1:n2]
if(randomization){
a<-sample(trt1,n1)
b<-sample(trt2,n2)
}
fila<-rep(b,n1)
columna <- a[gl(n1,n2)]
block <- rep(1,n1*n2)
if (r > 1) {
for (i in 2:r) {
a<-trt1[1:n1]
b<-trt2[1:n2]
if(randomization){
a<-sample(trt1,n1)
b<-sample(trt2,n2)
}
fila<-c(fila,rep(b,n1))
columna <- c(columna,a[gl(n1,n2)])
block <- c(block,rep(i,n1*n2))
}}
parameters<-list(design="strip",trt1=trt1,trt2=trt2,r=r,serie=serie,seed=seed,kinds=kinds)
plots <- block*number+1:(n1*n2)
book <- data.frame(plots, block = as.factor(block), column=as.factor(columna),row = as.factor(fila))
names(book)[3] <- c(paste(deparse(substitute(trt1))))
names(book)[4] <- c(paste(deparse(substitute(trt2))))
outdesign<-list(parameters=parameters,book=book)
return(outdesign)
} |
saveOutput <- function(peakdet, filename){
outmat <- cbind(peakdet$peakgenes, peakdet$peakloc, peakdet$peakheight)
colnames(outmat) <- c('Gene Name', 'Peak Location', 'Peak Height')
write.table(outmat, filename, sep = '\t', quote = F, row.names = F, col.names = T)
} |
\donttest{
library(ggPMX)
report_dir <- tempdir()
ctr <- theophylline()
ctr %>% pmx_report(
name = "my_report",
save_dir = report_dir,
format="report")
ctr <- theophylline()
ctr %>% pmx_report(
name = "my_report",
save_dir = report_dir,
format="plots")
ctr <- theophylline()
ctr %>% pmx_report(
name = "my_report",
save_dir = report_dir,
format="both")
ctr <- theophylline()
ctr %>% pmx_report(
name = "my_report",
save_dir = report_dir,
footnote=TRUE,
format="plots")
ctr <- theophylline()
custom_template <-
file.path( system.file(package = "ggPMX"),"examples","templates","custom_report.Rmd")
ctr %>% pmx_report(
name="report2",
save_dir = report_dir,
template=custom_template,
format="both"
)
ctr <- theophylline()
misc_template <-
file.path( system.file(package = "ggPMX"),"examples","templates","misc.Rmd")
ctr %>% pmx_report(
name="misc",
save_dir = report_dir,
template=misc_template,
format="both"
)
} |
get_budget <- function(year,
period,
cod,
simple = FALSE,
annex = NULL,
verbose = FALSE) {
get(
type = "rreo",
an_exercicio = year,
nr_periodo = period,
id_ente = cod,
co_tipo_demonstrativo = if (simple) "RREO Simplificado" else "RREO",
no_anexo = if (!is.null(annex)) paste0("RREO-Anexo ", annex) else annex,
verbose = verbose
)
} |
"gamList"
"sds"
"crv"
"countMatrix"
"celltype" |
library("MVA")
set.seed(280875)
library("lattice")
lattice.options(default.theme =
function()
standard.theme("pdf", color = FALSE))
if (file.exists("deparse.R")) {
if (!file.exists("figs")) dir.create("figs")
source("deparse.R")
options(prompt = "R> ", continue = "+ ", width = 64,
digits = 4, show.signif.stars = FALSE, useFancyQuotes = FALSE)
options(SweaveHooks = list(onefig = function() {par(mfrow = c(1,1))},
twofig = function() {par(mfrow = c(1,2))},
figtwo = function() {par(mfrow = c(2,1))},
threefig = function() {par(mfrow = c(1,3))},
figthree = function() {par(mfrow = c(3,1))},
fourfig = function() {par(mfrow = c(2,2))},
sixfig = function() {par(mfrow = c(3,2))},
nomar = function() par("mai" = c(0, 0, 0, 0))))
}
bc <- c(
0.290,
0.202, 0.415,
-0.055, 0.285, 0.419,
-0.105, -0.376, -0.521, -0.877,
-0.252, -0.349, -0.441, -0.076, 0.206,
-0.229, -0.164, -0.145, 0.023, 0.034, 0.192,
0.058, -0.129, -0.076, -0.131, 0.151, 0.077, 0.423)
blood_sd <- c(rblood = 0.371, plate = 41.253, wblood = 1.935,
neut = 0.077, lymph = 0.071, bilir = 4.037,
sodium = 2.732, potass = 0.297)
blood_corr <- diag(length(blood_sd)) / 2
blood_corr[upper.tri(blood_corr)] <- bc
blood_corr <- blood_corr + t(blood_corr)
blood_cov <- blood_corr * outer(blood_sd, blood_sd, "*")
blood_corr
blood_sd
blood_pcacov <- princomp(covmat = blood_cov)
summary(blood_pcacov, loadings = TRUE)
blood_pcacor <- princomp(covmat = blood_corr)
summary(blood_pcacor, loadings = TRUE)
plot(blood_pcacor$sdev^2, xlab = "Component number",
ylab = "Component variance", type = "l", main = "Scree diagram")
plot(log(blood_pcacor$sdev^2), xlab = "Component number",
ylab = "log(Component variance)", type="l",
main = "Log(eigenvalue) diagram")
"headsize" <-
matrix(c(191, 195, 181, 183, 176, 208, 189, 197, 188, 192, 179, 183, 174, 190, 188, 163, 195, 186, 181, 175, 192, 174,
176, 197, 190, 155, 149, 148, 153, 144, 157, 150, 159, 152, 150, 158, 147, 150, 159, 151, 137, 155, 153,
145, 140, 154, 143, 139, 167, 163, 179, 201, 185, 188, 171, 192, 190, 189, 197, 187, 186, 174, 185, 195,
187, 161, 183, 173, 182, 165, 185, 178, 176, 200, 187, 145, 152, 149, 149, 142, 152, 149, 152, 159, 151,
148, 147, 152, 157, 158, 130, 158, 148, 146, 137, 152, 147, 143, 158, 150)
, nrow = 25, ncol = 4
, dimnames = list(character(0)
, c("head1", "breadth1", "head2", "breadth2")))
x <- headsize
headsize <- as.data.frame(headsize)
toLatex(HSAURtable(headsize), pcol = 2,
caption = "Head Size Data.",
label = "ch:PCA:headsize:tab", rownames = FALSE)
headsize <- x
head_dat <- headsize[, c("head1", "head2")]
colMeans(head_dat)
cov(head_dat)
head_pca <- princomp(x = head_dat)
head_pca
print(summary(head_pca), loadings = TRUE)
s1 <- round(diag(cov(head_pca$scores))[1], 3)
s2 <- round(diag(cov(head_pca$scores))[2], 3)
s <- summary(head_pca)
l1 <- round(s$loadings[,1], 2)
l2 <- round(s$loadings[,2], 2)
diag(cov(head_pca$scores))
a1<-183.84-0.721*185.72/0.693
b1<-0.721/0.693
a2<-183.84-(-0.693*185.72/0.721)
b2<--0.693/0.721
plot(head_dat, xlab = "First son's head length (mm)",
ylab = "Second son's head length")
abline(a1, b1)
abline(a2, b2, lty = 2)
xlim <- range(head_pca$scores[,1])
plot(head_pca$scores, xlim = xlim, ylim = xlim)
data("heptathlon",package="HSAUR2")
toLatex(HSAURtable(heptathlon), pcol = 1,
caption = "Results of Olympic heptathlon, Seoul, 1988.",
label = "ch:PCA:heptathlon:tab",
rownames = TRUE)
heptathlon$hurdles <- with(heptathlon, max(hurdles)-hurdles)
heptathlon$run200m <- with(heptathlon, max(run200m)-run200m)
heptathlon$run800m <- with(heptathlon, max(run800m)-run800m)
score <- which(colnames(heptathlon) == "score")
round(cor(heptathlon[,-score]), 2)
plot(heptathlon[,-score])
plot(heptathlon[,-score], pch = ".", cex = 1.5)
heptathlon <- heptathlon[-grep("PNG", rownames(heptathlon)),]
score <- which(colnames(heptathlon) == "score")
round(cor(heptathlon[,-score]), 2)
plot(heptathlon[,-score], pch = ".", cex = 1.5)
op <- options(digits = 2)
heptathlon_pca <- prcomp(heptathlon[, -score], scale = TRUE)
print(heptathlon_pca)
summary(heptathlon_pca)
a1 <- heptathlon_pca$rotation[,1]
a1
center <- heptathlon_pca$center
scale <- heptathlon_pca$scale
hm <- as.matrix(heptathlon[,-score])
drop(scale(hm, center = center, scale = scale) %*%
heptathlon_pca$rotation[,1])
predict(heptathlon_pca)[,1]
sdev <- heptathlon_pca$sdev
prop12 <- round(sum(sdev[1:2]^2)/sum(sdev^2)*100, 0)
plot(heptathlon_pca, main = "")
cor(heptathlon$score, heptathlon_pca$x[,1])
plot(heptathlon$score, heptathlon_pca$x[,1])
data("USairpollution", package = "HSAUR2")
panel.hist <- function(x, ...) {
usr <- par("usr"); on.exit(par(usr))
par(usr = c(usr[1:2], 0, 1.5) )
h <- hist(x, plot = FALSE)
breaks <- h$breaks; nB <- length(breaks)
y <- h$counts; y <- y/max(y)
rect(breaks[-nB], 0, breaks[-1], y, col="grey", ...)
}
USairpollution$negtemp <- USairpollution$temp * (-1)
USairpollution$temp <- NULL
pairs(USairpollution[,-1], diag.panel = panel.hist,
pch = ".", cex = 1.5)
cor(USairpollution[,-1])
usair_pca <- princomp(USairpollution[,-1], cor = TRUE)
summary(usair_pca, loadings = TRUE)
pairs(usair_pca$scores[,1:3], ylim = c(-6, 4), xlim = c(-6, 4),
panel = function(x,y, ...) {
text(x, y, abbreviate(row.names(USairpollution)),
cex = 0.6)
bvbox(cbind(x,y), add = TRUE)
})
out <- sapply(1:6, function(i) {
plot(USairpollution$SO2,usair_pca$scores[,i],
xlab = paste("PC", i, sep = ""),
ylab = "Sulphur dioxide concentration")
})
usair_reg <- lm(SO2 ~ usair_pca$scores,
data = USairpollution)
summary(usair_reg)
tmp <- heptathlon[, -score]
rownames(tmp) <- abbreviate(gsub(" \\(.*", "", rownames(tmp)))
biplot(prcomp(tmp, scale = TRUE), col = c("black", "darkgray"), xlim =
c(-0.5, 0.7), cex = 0.7)
headsize.std <- sweep(headsize, 2,
apply(headsize, 2, sd), FUN = "/")
R <- cor(headsize.std)
r11 <- R[1:2, 1:2]
r22 <- R[-(1:2), -(1:2)]
r12 <- R[1:2, -(1:2)]
r21 <- R[-(1:2), 1:2]
(E1 <- solve(r11) %*% r12 %*% solve(r22) %*%r21)
(E2 <- solve(r22) %*% r21 %*% solve(r11) %*%r12)
(e1 <- eigen(E1))
(e2 <- eigen(E2))
p <- function(x) formatC(x, format = "f", digits = 2)
f <- function(x, add = 0) paste(ifelse(x < 0, "-", "+"), p(abs(x)), "x_", 1:length(x) + add,
collapse = "")
ff <- function(x, xname) paste(ifelse(x < 0, "-", "+"), p(abs(x)), "\\\\text{", xname, "}",
collapse = "")
girth1 <- headsize.std[,1:2] %*% e1$vectors[,1]
girth2 <- headsize.std[,3:4] %*% e2$vectors[,1]
shape1 <- headsize.std[,1:2] %*% e1$vectors[,2]
shape2 <- headsize.std[,3:4] %*% e2$vectors[,2]
(g <- cor(girth1, girth2))
(s <- cor(shape1, shape2))
plot(girth1, girth2)
plot(shape1, shape2)
depr <- c(
0.212,
0.124, 0.098,
-0.164, 0.308, 0.044,
-0.101, -0.207, -0.106, -0.208,
-0.158, -0.183, -0.180, -0.192, 0.492)
LAdepr <- diag(6) / 2
LAdepr[upper.tri(LAdepr)] <- depr
LAdepr <- LAdepr + t(LAdepr)
rownames(LAdepr) <- colnames(LAdepr) <- c("CESD", "Health", "Gender", "Age", "Edu", "Income")
x <- LAdepr
LAdepr <- as.data.frame(LAdepr)
toLatex(HSAURtable(LAdepr),
caption = "Los Angeles Depression Data.",
label = "ch:PCA:LAdepr:tab", rownames = FALSE)
LAdepr <- x
r11 <- LAdepr[1:2, 1:2]
r22 <- LAdepr[-(1:2), -(1:2)]
r12 <- LAdepr[1:2, -(1:2)]
r21 <- LAdepr[-(1:2), 1:2]
(E1 <- solve(r11) %*% r12 %*% solve(r22) %*%r21)
(E2 <- solve(r22) %*% r21 %*% solve(r11) %*%r12)
(e1 <- eigen(E1))
(e2 <- eigen(E2)) |
fredr_category_children <- function(category_id,
...,
realtime_start = NULL,
realtime_end = NULL) {
check_dots_empty(...)
check_not_null(category_id, "category_id")
user_args <- capture_args(
category_id = category_id,
realtime_start = realtime_start,
realtime_end = realtime_end
)
fredr_args <- list(
endpoint = "category/children"
)
do.call(fredr_request, c(fredr_args, user_args))
} |
updateCompareObject.matches <- function(x, compObj){
compObj$matches <- x
return(compObj)
} |
NULL
add_duration_to_duration <- function(dur2, dur1)
new("Duration", [email protected] + [email protected])
add_duration_to_date <- function(dur, date) {
if (is.Date(date)) {
date <- as.POSIXct(date)
ans <- with_tz(date + [email protected], "UTC")
if (all(is.na(ans))) return(as.Date(ans))
if (all(hour(na.omit(ans)) == 0 &
minute(na.omit(ans)) == 0 &
second(na.omit(ans)) == 0)) {
return(as.Date(ans))
}
return(ans)
}
new <- date + [email protected]
attr(new, "tzone") <- tz(date)
reclass_date(new, date)
}
add_period_to_period <- function(per2, per1) {
new("Period", [email protected] + [email protected],
year = per1@year + per2@year,
month = per1@month + per2@month,
day = per1@day + per2@day,
hour = per1@hour + per2@hour,
minute = per1@minute + per2@minute)
}
add_period_to_date <- function(per, date) {
lt <- as.POSIXlt(date)
ms <- month(per) + year(per) * 12
lt <- add_months(lt, ms)
if (is.Date(date)) {
new <- update(as.Date(lt),
days = mday(lt) + per@day,
hours = per@hour,
minutes = per@minute,
seconds = [email protected])
return(new)
}
new <- update(lt,
days = mday(lt) + per@day,
hours = hour(lt) + per@hour,
minutes = minute(lt) + per@minute,
seconds = second(lt) + [email protected])
reclass_date(new, date)
}
add_months <- function(mt, mos) {
nnas <- !is.na(mos)
if (all(mos[nnas] == 0L)) {
return(mt)
}
mt$mon <- mt$mon + mos
ndays <- as.numeric(format.POSIXlt(mt, "%d", usetz = FALSE))
mt$mon[mt$mday != ndays] <- NA
mt
}
add_number_to_duration <- function(num, dur) {
new("Duration", [email protected] + num)
}
add_number_to_period <- function(num, per) {
slot(per, ".Data") <- [email protected] + num
per
}
setMethod("+", signature(e1 = "Duration", e2 = "Duration"),
function(e1, e2) add_duration_to_duration(e2, e1))
setMethod("+", signature(e1 = "Duration", e2 = "Date"),
function(e1, e2) add_duration_to_date(e1, e2))
setMethod("+", signature(e1 = "Duration", e2 = "difftime"),
function(e1, e2) add_duration_to_duration(as.duration(e2), e1))
setMethod("+", signature(e1 = "Duration", e2 = "numeric"),
function(e1, e2) add_number_to_duration(e2, e1))
setMethod("+", signature(e1 = "Duration", e2 = "POSIXct"),
function(e1, e2) add_duration_to_date(e1, e2))
setMethod("+", signature(e1 = "Duration", e2 = "POSIXlt"),
function(e1, e2) add_duration_to_date(e1, e2))
setMethod("+", signature(e1 = "Period", e2 = "Period"),
function(e1, e2) add_period_to_period(e2, e1))
setMethod("+", signature(e1 = "Period", e2 = "Date"),
function(e1, e2) add_period_to_date(e1, e2))
setMethod("+", signature(e1 = "Period", e2 = "numeric"),
function(e1, e2) add_number_to_period(e2, e1))
setMethod("+", signature(e1 = "Period", e2 = "POSIXct"),
function(e1, e2) add_period_to_date(e1, e2))
setMethod("+", signature(e1 = "Period", e2 = "POSIXlt"),
function(e1, e2) add_period_to_date(e1, e2))
setMethod("+", signature(e1 = "Date", e2 = "Duration"),
function(e1, e2) add_duration_to_date(e2, e1))
setMethod("+", signature(e1 = "Date", e2 = "Period"),
function(e1, e2) add_period_to_date(e2, e1))
setMethod("+", signature(e1 = "difftime", e2 = "Duration"),
function(e1, e2) as.difftime(e2, units = "secs") + e1)
setMethod("+", signature(e1 = "numeric", e2 = "Duration"),
function(e1, e2) add_number_to_duration(e1, e2))
setMethod("+", signature(e1 = "numeric", e2 = "Period"),
function(e1, e2) add_number_to_period(e1, e2))
setMethod("+", signature(e1 = "POSIXct", e2 = "Duration"),
function(e1, e2) add_duration_to_date(e2, e1))
setMethod("+", signature(e1 = "POSIXct", e2 = "Period"),
function(e1, e2) add_period_to_date(e2, e1))
setMethod("+", signature(e1 = "POSIXlt", e2 = "Duration"),
function(e1, e2) add_duration_to_date(e2, e1))
setMethod("+", signature(e1 = "POSIXlt", e2 = "Period"),
function(e1, e2) add_period_to_date(e2, e1)) |
CDpre <- function(DATA, Jk, R, Posit, GroupStructure, LASSO, MaxIter){
DATA <- data.matrix(DATA)
DistPosition <- setdiff(1:R, Posit)
I_Data <- dim(DATA)[1]
sumJk <- dim(DATA)[2]
eps <- 10^(-12)
if(missing(MaxIter)){
MaxIter <- 400
}
P <- matrix(stats::rnorm(sumJk * R), nrow = sumJk, ncol = R)
P[GroupStructure == 0]<-0
Pt <- t(P)
PIndexforLasso <- Pt
PIndexforLasso[Posit, ] <- 1
PIndexforLasso[DistPosition, ] <- 0
PIndexforGLasso <- Pt
PIndexforGLasso[Posit, ] <- 0
PIndexforGLasso[DistPosition, ] <- 1
pen1 <- LASSO*sum(abs(P[, Posit]))
sqP <- P^2
residual <- sum(DATA^2)
Lossc <- residual + pen1
conv <- 0
iter <- 1
Lossvec <- array()
while (conv == 0){
if (LASSO == 0){
SVD_DATA <- svd(DATA, R, R)
Tmat <- SVD_DATA$u
}
else {
A <- Pt %*% t(DATA)
SVD_DATA <- svd(A, R, R)
Tmat <- SVD_DATA$v %*% t(SVD_DATA$u)
}
residual <- sum((DATA - Tmat %*% Pt)^2)
Lossu <- residual + pen1
if (LASSO == 0){
P <- t(DATA) %*% Tmat
P[GroupStructure == 0] <- 0
Pt <- t(P)
}
else{
for (r in 1:R){
if (r %in% Posit) {
for (j in 1:sumJk){
ols <- t(DATA[, j]) %*% Tmat[, r]
Lambda <- 0.5 * LASSO
if (ols < 0 & abs(ols) > Lambda) {
P[j, r] <- ols + Lambda
}
else if (ols > 0 & abs(ols) > Lambda) {
P[j, r] <- ols - Lambda
}
else {
P[j, r] <- 0
}
}
}
else {
for (j in 1:sumJk){
P[j, r] <- t(DATA[, j]) %*% Tmat[, r]
}
}
}
P[GroupStructure == 0] <- 0
Pt <- t(P)
}
pen1 <- LASSO*sum(abs(P[, Posit]))
sqP <- P^2
residual <- sum((DATA - Tmat %*% Pt)^2)
Lossu2 <- residual + pen1
if (abs(Lossc-Lossu)< 10^(-9)) {
Loss <- Lossu
residual <- residual
lassopen <- pen1
P[abs(P) <= 2 * eps] <- 0
conv <- 1
}
else if (iter > MaxIter | LASSO == 0){
Loss <- Lossu
residual <- residual
lassopen <- pen1
P[abs(P) <= 2 * eps] <- 0
conv <- 1
}
Lossvec[iter] <- Lossu
iter <- iter + 1
Lossc <- Lossu2
}
return_varselect <- list()
return_varselect$Pmatrix <- P
return_varselect$Tmatrix <- Tmat
return_varselect$Loss <- Loss
return_varselect$Lossvec <- Lossvec
return(return_varselect)
} |
recombination_wright <- function(X, M, ...) {
env <- parent.frame()
assertthat::assert_that(is.matrix(X), is.numeric(X),
is.matrix(M), is.numeric(M),
assertthat::are_equal(dim(X), dim(M)),
all(assertthat::has_name(env, c("J", "probpars", "nfe"))))
f.X <- env$J
f.M <- evaluate_population(probpars = env$probpars,
Pop = M)
env$nfe <- env$nfe + nrow(M)
X.is.best <- matrix(rep(f.X <= f.M,
times = ncol(X)),
ncol = ncol(X),
byrow = FALSE)
C1 <- X * X.is.best + M * !X.is.best
C2 <- M * X.is.best + X * !X.is.best
return (randM(X) * (C1 - C2) + C1)
} |
HollBivSym<-function(x,y=NULL){
check<-0
if((is.null(ncol(x))||ncol(x)==1)&&!is.null(y)){
check=1
}
if(max(dim(x)[2],1)==2){
y<-x[!is.na(x[,2]),2]
x<-x[!is.na(x[,1]),1]
check=1
}
if(!check){
return('Error: invalid form for entered data')
}
obs.data<-cbind(x,y)
a.vec<-apply(obs.data,1,min)
b.vec<-apply(obs.data,1,max)
n<-length(a.vec)
test<-function(r,c) {as.numeric((a.vec[c]<b.vec[r])&&(b.vec[r]<=b.vec[c])&&(a.vec[r]<=a.vec[c]))}
myVecFun <- Vectorize(test,vectorize.args = c('r','c'))
d.mat<-outer(1:n, 1:n, FUN=myVecFun)
A.calc<-function(r.vec){
s.vec<-2*r.vec-1
T.vec<-s.vec%*%d.mat
A.obs<-sum(T.vec*T.vec)/n^2
return(A.obs)
}
A.obs<-A.calc(apply(obs.data,1,function(x){x[1]<x[2]}))
return(A.obs)
} |
ic_find <- function(x, pattern) {
pattern <- paste0(pattern, ":")
locations <- grepl(x, pattern = pattern)
locations
} |
"print.jointPenal" <- function (x, digits = max(options()$digits - 4, 6), ...)
{
if(is.null(x$family)){
if (x$istop == 1){
if (any(x$nvartimedep != 0)) par(mfrow=c(1,2))
if ((x$nvartimedep[1] != 0) & (x$istop == 1)){
for (i in 0:(x$nvartimedep[1]-1)){
matplot(x$BetaTpsMat[,1],x$BetaTpsMat[,(2:4)+4*i],col="blue",type="l",lty=c(1,2,2),xlab="t",ylab="beta(t)",main=paste("Recurrent : ",x$Names.vardep[i+1]),ylim=c(min(x$BetaTpsMat[,-1]),max(x$BetaTpsMat[,-1])))
}
}
if ((x$nvartimedep[2] != 0) & (x$istop == 1)){
for (i in 0:(x$nvartimedep[2]-1)){
matplot(x$BetaTpsMatDc[,1],x$BetaTpsMatDc[,(2:4)+4*i],col="blue",type="l",lty=c(1,2,2),xlab="t",ylab="beta(t)",main=paste("Death : ",x$Names.vardepdc[i+1]),ylim=c(min(x$BetaTpsMatDc[,-1]),max(x$BetaTpsMatDc[,-1])))
}
}
}
if (!is.null(cl <- x$call)){
cat("Call:\n")
dput(cl)
if (x$AG == TRUE){
cat("\n Calendar timescale")
}
if (x$intcens == TRUE){
cat("\n interval censored data used")
}
cat("\n")
}
if (!is.null(x$fail)) {
cat(" frailtyPenal failed.", x$fail, "\n")
return()
}
savedig <- options(digits = digits)
on.exit(options(savedig))
coef <- x$coef
nvar <- length(x$coef)
if (is.null(coef)){
x$varH<-matrix(x$varH)
x$varHIH<-matrix(x$varHIH)
}
if (x$typeof == 0){
if (x$n.knots.temp < 4){
cat("\n")
cat(" The minimum number of knots is 4","\n")
cat("\n")
}
if (x$n.knots.temp > 20){
cat("\n")
cat(" The maximum number of knots is 20","\n")
}
}else{
if ((x$typeof == 1) & (x$indic.nb.intR == 1)) cat(" The maximum number of time intervals is 20","\n")
if ((x$typeof == 1) & (x$indic.nb.intD == 1)) cat(" The maximum number of time intervals is 20","\n")
}
if (x$logNormal == 0) frail <- x$theta
else frail <- x$sigma2
indic_alpha <- x$indic_alpha
if (x$istop == 1){
if (!is.null(coef)){
if (indic_alpha == 1 || x$joint.clust==2) {
seH <- sqrt(diag(x$varH))[-c(1,2)]
seHIH <- sqrt(diag(x$varHIH))[-c(1,2)]
}else{
seH <- sqrt(diag(x$varH))[-1]
seHIH <- sqrt(diag(x$varHIH))[-1]
}
if (x$typeof == 0){
tmp <- cbind(coef, exp(coef), seH, seHIH, coef/seH, ifelse(signif(1 - pchisq((coef/seH)^2, 1), digits - 1) == 0, "< 1e-16", signif(1 - pchisq((coef/seH)^2, 1), digits - 1)))
if(x$global_chisq.test==1) tmpwald <- cbind(x$global_chisq, x$dof_chisq, ifelse(x$p.global_chisq == 0, "< 1e-16", x$p.global_chisq))
if(x$global_chisq.test_d==1) tmpwalddc <- cbind(x$global_chisq_d, x$dof_chisq_d, ifelse(x$p.global_chisq_d == 0, "< 1e-16", x$p.global_chisq_d))
}else{
tmp <- cbind(coef, exp(coef), seH, coef/seH, ifelse(signif(1 - pchisq((coef/seH)^2, 1), digits - 1) == 0, "< 1e-16", signif(1 - pchisq((coef/seH)^2, 1), digits - 1)))
if(x$global_chisq.test==1) tmpwald <- cbind(x$global_chisq, x$dof_chisq, ifelse(x$p.global_chisq == 0, "< 1e-16", x$p.global_chisq))
if(x$global_chisq.test_d==1) tmpwalddc <- cbind(x$global_chisq_d, x$dof_chisq_d, ifelse(x$p.global_chisq_d == 0, "< 1e-16", x$p.global_chisq_d))
}
cat("\n")
if (x$joint.clust == 0) cat(" For clustered data","\n")
if (x$joint.clust == 0){
if (x$logNormal == 0){
cat(" Joint gamma frailty model for a survival and a terminal event processes","\n")
}else{
cat(" Joint Log-Normal frailty model for a survival and a terminal event processes","\n")
}
}else{
if ((x$logNormal == 0)&(x$joint.clust==1)){
cat(" Joint gamma frailty model for recurrent and a terminal event processes","\n")
}
else if ((x$logNormal == 0)&(x$joint.clust==2)){
cat(" General Joint gamma frailty model for recurrent and a terminal event processes","\n")
}
else{
cat(" Joint Log-Normal frailty model for recurrent and a terminal event processes","\n")
}
}
if (x$typeof == 0){
cat(" using a Penalized Likelihood on the hazard function","\n")
}else{
cat(" using a Parametrical approach for the hazard function","\n")
}
if (any(x$nvartimedep != 0)) cat(" and some time-dependant covariates","\n")
if (x$n.strat>1) cat(" (Stratification structure used for recurrences) :",x$n.strat,"strata \n")
if(x$ncc==TRUE)cat(" and considering weights for the nested case-control design \n")
if (x$typeof == 0){
if(x$global_chisq.test==1){
dimnames(tmpwald) <- list(x$names.factor,c("chisq", "df", "global p"))
}
if(x$global_chisq.test_d==1){
dimnames(tmpwalddc) <- list(x$names.factordc,c("chisq", "df", "global p"))
}
dimnames(tmp) <- list(names(coef), c("coef", "exp(coef)",
"SE coef (H)", "SE coef (HIH)", "z", "p"))
}else{
if(x$global_chisq.test==1){
dimnames(tmpwald) <- list(x$names.factor,c("chisq", "df", "global p"))
}
if(x$global_chisq.test_d==1){
dimnames(tmpwalddc) <- list(x$names.factordc,c("chisq", "df", "global p"))
}
dimnames(tmp) <- list(names(coef), c("coef", "exp(coef)",
"SE coef (H)", "z", "p"))
}
cat("\n")
if (x$nvarnotdep[1] == 0){
if (x$joint.clust == 0) cat("Survival event:\n")
if ((x$joint.clust == 1) | (x$joint.clust == 2)) cat("Recurrences:\n")
cat("------------- \n")
cat("No constant coefficients, only time-varying effects of the covariates \n")
}else{
if (x$noVar1 == 0){
if (x$joint.clust == 0) cat("Survival event:\n")
if (x$joint.clust >= 1) cat("Recurrences:\n")
cat("------------- \n")
prmatrix(tmp[1:x$nvarnotdep[1], ,drop=FALSE])
if(x$global_chisq.test==1){
cat("\n")
prmatrix(tmpwald)
}
}
}
cat("\n")
if (x$nvarnotdep[2] == 0){
cat("Terminal event:\n")
cat("---------------- \n")
cat("No constant coefficients, only time-varying effects of the covariates \n")
}else{
if (x$noVar2 == 0){
cat("Terminal event:\n")
cat("---------------- \n")
prmatrix(tmp[-c(1:x$nvarnotdep[1]), ,drop=FALSE])
if(x$global_chisq.test_d==1){
cat("\n")
prmatrix(tmpwalddc)
}
}
}
cat("\n")
}
temp <- diag(x$varH)[1]
seH.frail <- sqrt(((2 * (frail^0.5))^2) * temp)
temp <- diag(x$varHIH)[1]
seHIH.frail <- sqrt(((2 * (frail^0.5))^2) * temp)
if (x$noVar1 == 1){
cat("\n")
if (x$joint.clust == 0) cat(" Survival event: No covariates \n")
if (x$joint.clust >= 1) cat(" Recurrences: No covariates \n")
cat(" ----------- \n")
}
if (x$noVar2 == 1){
cat("\n")
cat(" Terminal event: No covariates \n")
cat(" -------------- \n")
cat("\n")
}
cat(" Frailty parameters: \n")
if (x$logNormal == 0){
if (indic_alpha == 1 & x$joint.clust<=1){
cat(" theta (variance of Frailties, w):", frail, "(SE (H):",seH.frail, ")", "p =", ifelse(signif(1 - pnorm(frail/seH.frail), digits - 1) == 0, "< 1e-16", signif(1 - pnorm(frail/seH.frail), digits - 1)), "\n")
cat(" alpha (w^alpha for terminal event):", x$alpha, "(SE (H):",sqrt(diag(x$varH))[2], ")", "p =", ifelse(signif(1 - pchisq((x$alpha/sqrt(diag(x$varH))[2])^2,1), digits - 1) == 0, "< 1e-16", signif(1 - pchisq((x$alpha/sqrt(diag(x$varH))[2])^2,1), digits - 1)), "\n")
}else if (x$joint.clust ==2) {
cat(" theta (variance of u, association between recurrences and terminal event):", frail, "(SE (H):",seH.frail, ")", "p =", ifelse(signif(1 - pnorm(frail/seH.frail), digits - 1) == 0, "< 1e-16", signif(1 - pnorm(frail/seH.frail), digits - 1)), "\n")
cat(" eta (variance of v, intra-subject correlation):", x$eta, "(SE (H):",sqrt(((2 * (x$eta^0.5))^2) * diag(x$varH)[2]), ")", "p =", ifelse(signif(1 - pnorm (x$eta/sqrt(((2 * (x$eta^0.5))^2) * diag(x$varH)[2]),1), digits - 1) == 0, "< 1e-16", signif(1 - pnorm (x$eta/sqrt(((2 * (x$eta^0.5))^2) * diag(x$varH)[2]),1), digits - 1)), "\n")
} else {
cat(" theta (variance of Frailties, w):", frail, "(SE (H):",seH.frail, ")", "p =", ifelse(signif(1 - pnorm(frail/seH.frail), digits - 1) == 0, "< 1e-16", signif(1 - pnorm(frail/seH.frail), digits - 1)), "\n")
cat(" alpha is fixed (=1) \n")
}
}else{
cat(" sigma square (variance of Frailties, eta):", frail, "(SE (H):",seH.frail, ")", "p =", ifelse(signif(1 - pnorm(frail/seH.frail), digits - 1) == 0, "< 1e-16", signif(1 - pnorm(frail/seH.frail), digits - 1)), "\n")
if (indic_alpha == 1) cat(" alpha (exp(alpha.eta) for terminal event):", x$alpha, "(SE (H):",sqrt(diag(x$varH))[2], ")", "p =", ifelse(signif(1 - pchisq((x$alpha/sqrt(diag(x$varH))[2])^2,1), digits - 1) == 0, "< 1e-16", signif(1 - pchisq((x$alpha/sqrt(diag(x$varH))[2])^2,1), digits - 1)), "\n")
else cat(" alpha is fixed (=1) \n")
}
cat(" \n")
if (x$typeof == 0){
cat(paste("Penalized marginal log-likelihood =", round(x$logLikPenal,2)))
cat("\n")
cat(" Convergence criteria: \n")
cat(" parameters =",signif(x$EPS[1],3),"likelihood =",signif(x$EPS[2],3),"gradient =",signif(x$EPS[3],3),"\n")
cat("\n")
cat("Likelihood Cross-Validation (LCV) criterion in the semi parametrical case:\n")
cat(" approximate LCV =",x$LCV,"\n")
}else{
cat(paste(" marginal log-likelihood =", round(x$logLik,2)))
cat("\n")
cat(" Convergence criteria: \n")
cat(" parameters =",signif(x$EPS[1],3),"likelihood =",signif(x$EPS[2],3),"gradient =",signif(x$EPS[3],3),"\n")
cat("\n")
cat(" AIC = Aikaike information Criterion =",x$AIC,"\n")
cat("\n")
cat("The expression of the Aikaike Criterion is:","\n")
cat(" 'AIC = (1/n)[np - l(.)]'","\n")
if (x$typeof == 2){
cat("\n")
cat(" Scale for the weibull hazard function is :",round(x$scale.weib[1],2),round(x$scale.weib[2],2),"\n")
cat(" Shape for the weibull hazard function is :",round(x$shape.weib[1],2),round(x$shape.weib[2],2),"\n")
cat("\n")
cat("The expression of the Weibull hazard function is:","\n")
cat(" 'lambda(t) = (shape.(t^(shape-1)))/(scale^shape)'","\n")
cat("The expression of the Weibull survival function is:","\n")
cat(" 'S(t) = exp[- (t/scale)^shape]'")
cat("\n")
}
}
cat("\n")
if (x$joint.clust == 0){
cat(" n observations=", x$n, " n subjects=", x$ind, " n groups=", x$groups)
}else{
cat(" n observations=", x$n, " n subjects=", x$groups)
}
if (length(x$na.action)){
cat(" (", length(x$na.action), " observation deleted due to missing) \n")
}else{
cat("\n")
}
if (x$joint.clust == 0){
cat(" n events=", x$n.events)
}else{
cat(" n recurrent events=", x$n.events)
}
cat("\n")
cat(" n terminal events=", x$n.deaths)
cat("\n")
cat(" n censored events=" ,x$n.censored)
cat("\n")
cat(" number of iterations: ", x$n.iter,"\n")
if (x$logNormal == 0) {
cat(" Number of nodes for the Gauss-Laguerre quadrature: ", x$nb.gl,"\n")
}
else {cat(" Number of nodes for the Gauss-Hermite quadrature: ", x$nb.gh,"\n")}
if ((x$typeof == 1) & (x$indic.nb.intR == 1)){
cat(" Exact number of time intervals used: 20","\n")
}else{
if (x$typeof == 1) cat(" Exact number of time intervals used: ",x$nbintervR,"\n")
}
if ((x$typeof == 1) & (x$indic.nb.intD == 1)){
cat(" Exact number of time intervals used: 20","\n")
}else{
if (x$typeof == 1) cat(" Exact number of time intervals used: ",x$nbintervDC,"\n")
}
if (x$typeof == 0){
cat("\n")
cat(" Exact number of knots used: ", x$n.knots, "\n")
cat(" Value of the smoothing parameters: ", x$kappa, sep=" ")
cat("\n")
}
}else{
if (!is.null(coef)){
cat("\n")
if (x$joint.clust == 0) cat(" For clustered data","\n")
if (x$joint.clust == 0){
if (x$logNormal == 0){
cat(" Joint gamma frailty model for a survival and a terminal event processes","\n")
}else{
cat(" Joint Log-Normal frailty model for a survival and a terminal event processes","\n")
}
}else{
if ((x$logNormal == 0)&(x$joint.clust==1)){
cat(" Joint gamma frailty model for recurrent and a terminal event processes","\n")
}
else if ((x$logNormal == 0)&(x$joint.clust==2)){
cat(" General Joint gamma frailty model for recurrent and a terminal event processes","\n")
}
else{
cat(" Joint Log-Normal frailty model for recurrent and a terminal event processes","\n")
}
}
if (x$typeof == 0){
cat(" using a Penalized Likelihood on the hazard function","\n")
}else{
cat(" using a Parametrical approach for the hazard function","\n")
}
if (any(x$nvartimedep != 0)) cat(" and some time-dependant covariates","\n")
if (x$noVar1 == 1){
cat("\n")
if (x$joint.clust == 0) cat(" Survival event: No covariates \n")
if (x$joint.clust >= 1) cat(" Recurrences: No covariates \n")
cat(" ----------- \n")
}
if (x$noVar2 == 1){
cat("\n")
cat(" Terminal event: No covariates \n")
cat(" -------------- \n")
cat("\n")
}
cat("\n")
cat(" Convergence criteria: \n")
cat(" parameters =",signif(x$EPS[1],3),"likelihood =",signif(x$EPS[2],3),"gradient =",signif(x$EPS[3],3),"\n")
cat("\n")
cat(" n=", x$n)
if (length(x$na.action)){
cat(" (", length(x$na.action), " observation deleted due to missing) \n")
}else{
cat("\n")
}
if (x$joint.clust == 0){
cat(" n events=", x$n.events)
}else{
cat(" n recurrent events=", x$n.events)
}
cat("\n")
cat(" n terminal events=", x$n.deaths)
cat("\n")
if (x$logNormal == 0) {
cat(" Number of nodes for the Gauss-Laguerre quadrature: ", x$nb.gl,"\n")
}
else {cat(" Number of nodes for the Gauss-Hermite quadrature: ", x$nb.gh,"\n")}
}
}
invisible()
}
else{
if (x$istop == 1){
if(x$family[2] != 3){
if ((x$nvartimedep[1] != 0) & (x$istop == 1)){
for (i in 0:(x$nvartimedep[1]-1)){
matplot(x$BetaTpsMat[,1],x$BetaTpsMat[,(2:4)+4*i],col="blue",type="l",lty=c(1,2,2),xlab="t",ylab="beta(t)",main=paste("Recurrent : ",x$Names.vardep[i+1]),ylim=c(min(x$BetaTpsMat[,-1]),max(x$BetaTpsMat[,-1])))
}
}
}else{
if ((x$nvartimedep[1] != 0) & (x$istop == 1)){
par(mfrow=c(1,2))
trapz <- function(x,y){
idx = 2:length(x)
return (as.double( (x[idx] - x[idx-1]) %*% (y[idx] + y[idx-1])) / 2)
}
for (i in 0:(x$nvartimedep[1]-1)){
matplot(x$BetaTpsMat[,1],x$BetaTpsMat[,(2:4)+4*i],col="blue",type="l",lty=c(1,2,2),xlab="t",ylab="beta(t)",main=paste("Recurrent : ",x$Names.vardep[i+1]),ylim=c(min(x$BetaTpsMat[,-1]),max(x$BetaTpsMat[,-1])))
nblignes = nrow(x$BetaTpsMat)-1
matcumul = matrix(NA, nrow = nblignes, ncol = 3)
abs = x$BetaTpsMat[, 1]
ord = x$BetaTpsMat[,(2:4)+4*i]
for(j in 1:nblignes){
matcumul[j, ] = c(
trapz(abs[1:(j+1)], ord[1:(j+1), 1]),
trapz(abs[1:(j+1)], ord[1:(j+1), 2]),
trapz(abs[1:(j+1)], ord[1:(j+1), 3])
)
}
matplot(x=abs[-1],
y=matcumul[,(1:3)],
col="blue", type="l", lty=c(1,2,2),
xlab="t",
ylab="Cumulative effect",
main=paste("Recurrent : ",x$Names.vardep[i+1])
)
}
}
}
if(x$family[1] != 3){
if ((x$nvartimedep[2] != 0) & (x$istop == 1)){
for (i in 0:(x$nvartimedep[2]-1)){
matplot(x$BetaTpsMatDc[,1],x$BetaTpsMatDc[,(2:4)+4*i],col="blue",type="l",lty=c(1,2,2),xlab="t",ylab="beta(t)",main=paste("Death : ",x$Names.vardepdc[i+1]),ylim=c(min(x$BetaTpsMatDc[,-1]),max(x$BetaTpsMatDc[,-1])))
}
}
}else{
if ((x$nvartimedep[2] != 0) & (x$istop == 1)){
par(mfrow=c(1,2))
trapz <- function(x,y){
idx = 2:length(x)
return (as.double( (x[idx] - x[idx-1]) %*% (y[idx] + y[idx-1])) / 2)
}
for (i in 0:(x$nvartimedep[2]-1)){
matplot(x$BetaTpsMatDc[,1],x$BetaTpsMatDc[,(2:4)+4*i],col="blue",type="l",lty=c(1,2,2),xlab="t",ylab="beta(t)",main=paste("Death : ",x$Names.vardepdc[i+1]),ylim=c(min(x$BetaTpsMatDc[,-1]),max(x$BetaTpsMatDc[,-1])))
nblignes = nrow(x$BetaTpsMatDc)-1
matcumul = matrix(NA, nrow = nblignes, ncol = 3)
abs = x$BetaTpsMatDc[, 1]
ord = x$BetaTpsMatDc[,(2:4)+4*i]
for(j in 1:nblignes){
matcumul[j, ] = c(
trapz(abs[1:(j+1)], ord[1:(j+1), 1]),
trapz(abs[1:(j+1)], ord[1:(j+1), 2]),
trapz(abs[1:(j+1)], ord[1:(j+1), 3])
)
}
matplot(x=abs[-1],
y=matcumul[,(1:3)],
col="blue", type="l", lty=c(1,2,2),
main=paste("Death : ",x$Names.vardepdc[i+1]),
xlab="t",
ylab="Cumulative effect"
)
}
}
}
}
if (!is.null(cl <- x$call)){
cat("Call:\n")
dput(cl)
if (x$AG == TRUE){
cat("\n Calendar timescale")
}
if (x$intcens == TRUE){
cat("\n interval censored data used")
}
cat("\n")
}
if (!is.null(x$fail)) {
cat(" frailtyPenal failed.", x$fail, "\n")
return()
}
savedig <- options(digits = digits)
on.exit(options(savedig))
coef <- x$coef
nvar <- length(x$coef)
if (is.null(coef)){
x$varH<-matrix(x$varH)
x$varHIH<-matrix(x$varHIH)
}
if (x$typeof == 0){
if (x$n.knots.temp < 4){
cat("\n")
cat(" The minimum number of knots is 4","\n")
cat("\n")
}
if (x$n.knots.temp > 20){
cat("\n")
cat(" The maximum number of knots is 20","\n")
}
}else{
if ((x$typeof == 1) & (x$indic.nb.intR == 1)) cat(" The maximum number of time intervals is 20","\n")
if ((x$typeof == 1) & (x$indic.nb.intD == 1)) cat(" The maximum number of time intervals is 20","\n")
}
if (x$logNormal == 0) frail <- x$theta
else frail <- x$sigma2
indic_alpha <- x$indic_alpha
if (x$istop == 1){
if (!is.null(coef)){
if (indic_alpha == 1 || x$joint.clust==2) {
seH <- sqrt(diag(x$varH))[-c(1,2)]
seHIH <- sqrt(diag(x$varHIH))[-c(1,2)]
}else{
seH <- sqrt(diag(x$varH))[-1]
seHIH <- sqrt(diag(x$varHIH))[-1]
}
if (x$typeof == 0){
tmp <- cbind(coef, exp(coef), seH, seHIH, coef/seH, ifelse(signif(1 - pchisq((coef/seH)^2, 1), digits - 1) == 0, "< 1e-16", signif(1 - pchisq((coef/seH)^2, 1), digits - 1)))
if(x$global_chisq.test==1) tmpwald <- cbind(x$global_chisq, x$dof_chisq, ifelse(x$p.global_chisq == 0, "< 1e-16", x$p.global_chisq))
if(x$global_chisq.test_d==1) tmpwalddc <- cbind(x$global_chisq_d, x$dof_chisq_d, ifelse(x$p.global_chisq_d == 0, "< 1e-16", x$p.global_chisq_d))
}else{
tmp <- cbind(coef, exp(coef), seH, coef/seH, ifelse(signif(1 - pchisq((coef/seH)^2, 1), digits - 1) == 0, "< 1e-16", signif(1 - pchisq((coef/seH)^2, 1), digits - 1)))
if(x$global_chisq.test==1) tmpwald <- cbind(x$global_chisq, x$dof_chisq, ifelse(x$p.global_chisq == 0, "< 1e-16", x$p.global_chisq))
if(x$global_chisq.test_d==1) tmpwalddc <- cbind(x$global_chisq_d, x$dof_chisq_d, ifelse(x$p.global_chisq_d == 0, "< 1e-16", x$p.global_chisq_d))
}
if (x$joint.clust == 0) cat(" For clustered data","\n")
if (x$joint.clust == 0){
if (x$logNormal == 0){
cat(" Generalized Joint Survival Model with Shared Gamma Frailty","\n")
cat(" for a survival and a terminal event processes")
}else{
cat(" Generalized Joint Survival Model with Shared Log-Normal Frailty","\n")
cat(" a survival and a terminal event processes")
}
}else{
if ((x$logNormal == 0)&(x$joint.clust==1)){
cat(" Generalized Joint Survival Model with Shared Gamma Frailty","\n")
cat(" for recurrent events and a terminal event")
}
else if ((x$logNormal == 0)&(x$joint.clust==2)){
cat(" General Joint gamma frailty model for recurrent and a terminal event processes","\n")
}
else{
cat(" Generalized Joint Survival Model with Shared Log-Normal Frailty","\n")
cat(" for recurrent events and a terminal event")
}
}
if (x$typeof == 0){
cat(" using a Penalized Likelihood on the hazard function","\n")
}else{
cat(" using parametrical approaches","\n")
}
if (any(x$nvartimedep != 0)) cat(" and some time-dependent covariates","\n")
if (x$n.strat>1) cat(" (Stratification structure used for recurrences) :",x$n.strat,"strata \n")
if(x$ncc==TRUE)cat(" and considering weights for the nested case-control design \n")
if (x$typeof == 0){
if(x$global_chisq.test==1){
dimnames(tmpwald) <- list(x$names.factor,c("chisq", "df", "global p"))
}
if(x$global_chisq.test_d==1){
dimnames(tmpwalddc) <- list(x$names.factordc,c("chisq", "df", "global p"))
}
dimnames(tmp) <- list(names(coef), c("coef", "exp(coef)",
"SE coef (H)", "SE coef (HIH)", "z", "p"))
}else{
if(x$global_chisq.test==1){
dimnames(tmpwald) <- list(x$names.factor,c("chisq", "df", "global p"))
}
if(x$global_chisq.test_d==1){
dimnames(tmpwalddc) <- list(x$names.factordc,c("chisq", "df", "global p"))
}
dimnames(tmp) <- list(names(coef), c("coef", "exp(coef)",
"SE coef (H)", "z", "p"))
}
cat("\n")
if (x$nvarnotdep[1] == 0){
if (x$joint.clust == 0) cat("Survival event:\n")
if ((x$joint.clust == 1) | (x$joint.clust == 2)) cat("Recurrences:\n")
cat("------------- \n")
cat("No constant coefficients, only time-varying effects of the covariates \n")
}else{
if (x$noVar1 == 0){
if (x$joint.clust == 0) cat("Survival event:\n")
if (x$joint.clust >= 1) cat("\n Recurrences:\n")
cat("------------- \n")
if (x$family[2]==0){
if(!(x$typeof==0)){
cat(" Parametrical approach with link g() = log(-log()) ", "\n")
cat(" S(t) = [ g^-1(eta) ]^frailty ", "\n")
cat(" eta = shape.log(t) - shape.log(scale) + beta'X", "\n")
cat(" (Proportional Hazards Frailty Model with a Weibull distribution) ", "\n")
cat(" Expression of the Weibull hazard function: 'lambda(t) = (shape.(t^(shape-1)))/(scale^shape)'", "\n")
cat(" Expression of the Weibull survival function: 'S(t) = exp[- (t/scale)^shape]'")
cat("\n\n")
}else{
cat(" Semi-Parametrical approach ", "\n")
cat(" Expression of the hazard function: 'lambda(t) = lambda_0(t) * exp(beta(t)'X)'", "\n")
cat(" (Baseline hazard function lambda_0(.) estimated using M-splines)")
cat("\n\n")
}
}
else if (x$family[2]==1){
cat(" Parametrical approach with link g() = -logit() ", "\n")
cat(" S(t) = [ g^-1(eta) ]^frailty ", "\n")
cat(" eta = shape.log(t) - shape.log(scale) + beta'X ", "\n")
cat(" (Proportional Odds Frailty Model with a log-logistic distribution) ", "\n")
cat(" Expression of the log-logistic hazard function: 'lambda(t) = 1 / [ 1+exp(-eta) ] * d.eta/d.t'", "\n")
cat(" Expression of the log-logistic survival function: 'S(t) = 1 / [ 1 + (t/scale)^shape ]'")
cat("\n\n")
}
else if (x$family[2]==2){
cat(" Parametrical approach with link g() = -PHI^-1() ", "\n")
cat(" S(t) = [ g^-1(eta) ]^frailty ", "\n")
cat(" eta = shape.log(t) - shape.log(scale) + beta'X ", "\n")
cat(" (Probit Frailty Model with a log-normal distribution) ", "\n")
cat(" Expression of the log-normal hazard function: 'lambda(t) = phi(-eta)/PHI(-eta) * d.eta/d.t'", "\n")
cat(" Expression of the log-normal survival function: 'S(t) = PHI(-eta)'")
cat("\n\n")
}
else if (x$family[2]==3){
if(!(x$typeof==0)){
cat(" Parametrical approach with link g() = -log() ", "\n")
cat(" S(t) = [ g^-1(eta) ]^frailty ", "\n")
cat(" eta = (t/scale)^shape + t*beta'X", "\n")
cat(" (Additive Hazards Frailty Model with a Weibull distribution) ", "\n")
cat(" Expression of the Weibull hazard function: 'lambda(t) = (shape.(t^(shape-1)))/(scale^shape)'", "\n")
cat(" Expression of the Weibull survival function: 'S(t) = exp[- (t/scale)^shape]'")
cat("\n\n")
}else{
cat(" Semi-Parametrical approach ", "\n")
cat(" Expression of the hazard function: 'lambda(t) = lambda_0(t) + beta(t)'X'", "\n")
cat(" (Baseline hazard function lambda_0(.) estimated using M-splines)")
cat("\n\n")
}
}
tmp.mieux = data.frame(tmp)
tmp.mieux[] = lapply(tmp.mieux, type.convert)
tmp.mieux.rec = tmp.mieux[1:x$nvarnotdep[1], -2]
if(x$typeof == 0){
names(tmp.mieux.rec) = c("coef", "SE coef (H)", "SE coef (HIH)", "z", "p")
}
if(x$typeof == 2){
names(tmp.mieux.rec) = c("coef", "SE coef (H)", "z", "p")
}
prmatrix(tmp.mieux.rec, quote=FALSE)
if(x$global_chisq.test==1){
cat("\n")
tmpwald.mieux = data.frame(tmpwald)
tmpwald.mieux[] = lapply(tmpwald.mieux, type.convert)
prmatrix(tmpwald.mieux, quote=FALSE)
}
cat("\n")
if ( (x$family[2]==0)&(!(x$typeof==0)) ){
cat("Scale for the Weibull hazard function:", round(x$scale.param[1],2), "\n")
cat("Shape for the Weibull hazard function:", round(x$shape.param[1],2), "\n")
}
else if ( (x$family[2]==1)&(!(x$typeof==0)) ){
cat("Scale for the log-logistic hazard function:", round(x$scale.param[1],2), "\n")
cat("Shape for the log-logistic hazard function:", round(x$shape.param[1],2), "\n")
}
else if ( (x$family[2]==2)&(!(x$typeof==0)) ){
cat("Scale for the log-normal hazard function:", round(x$scale.param[1],2), "\n")
cat("Shape for the log-normal hazard function:", round(x$shape.param[1],2), "\n")
}
else if ( (x$family[2]==3)&(!(x$typeof==0)) ){
cat("Scale for the Weibull hazard function:", round(x$scale.param[1],2), "\n")
cat("Shape for the Weibull hazard function:", round(x$shape.param[1],2), "\n")
}
}
}
cat("\n")
if (x$nvarnotdep[2] == 0){
cat("\n Terminal event:\n")
cat("---------------- \n")
cat("No constant coefficients, only time-varying effects of the covariates \n")
}else{
if (x$noVar2 == 0){
cat("\n Terminal event:\n")
cat("---------------- \n")
if (x$family[1]==0){
if(!(x$typeof==0)){
cat(" Parametrical approach with link g() = log(-log()) ", "\n")
cat(" S(t) = [ g^-1(eta) ] ^ (frailty^gamma) ", "\n")
cat(" eta = shape.log(t) - shape.log(scale) + beta'X", "\n")
cat(" (Proportional Hazards Frailty Model with a Weibull distribution) ", "\n")
cat(" Expression of the Weibull hazard function: 'lambda(t) = (shape.(t^(shape-1)))/(scale^shape)'", "\n")
cat(" Expression of the Weibull survival function: 'S(t) = exp[- (t/scale)^shape]'")
cat("\n\n")
}else{
cat(" Semi-Parametrical approach ", "\n")
cat(" Expression of the hazard function: 'lambda(t) = lambda_0(t) * exp(beta(t)'X)'", "\n")
cat(" (Baseline hazard function lambda_0(.) estimated using M-splines)")
cat("\n\n")
}
}
else if (x$family[1]==1){
cat(" Parametrical approach with link g() = -logit() ", "\n")
cat(" S(t) = [ g^-1(eta) ] ^ (frailty^gamma) ", "\n")
cat(" eta = shape.log(t) - shape.log(scale) + beta'X ", "\n")
cat(" (Proportional Odds Frailty Model with a log-logistic distribution) ", "\n")
cat(" Expression of the log-logistic hazard function: 'lambda(t) = 1 / [ 1+exp(-eta) ] * d.eta/d.t'", "\n")
cat(" Expression of the log-logistic survival function: 'S(t) = 1 / [ 1 + (t/scale)^shape ]'")
cat("\n\n")
}
else if (x$family[1]==2){
cat(" Parametrical approach with link g() = -PHI^-1() ", "\n")
cat(" S(t) = [ g^-1(eta) ] ^ (frailty^gamma) ", "\n")
cat(" eta = shape.log(t) - shape.log(scale) + beta'X ", "\n")
cat(" (Probit Frailty Model with a log-normal distribution) ", "\n")
cat(" Expression of the log-normal hazard function: 'lambda(t) = phi(-eta)/PHI(-eta) * d.eta/d.t'", "\n")
cat(" Expression of the log-normal survival function: 'S(t) = PHI(-eta)'")
cat("\n\n")
}
else if (x$family[1]==3){
if(!(x$typeof==0)){
cat(" Parametrical approach with link g() = -log() ", "\n")
cat(" S(t) = [ g^-1(eta) ]^frailty ", "\n")
cat(" eta = (t/scale)^shape + t*beta'X", "\n")
cat(" (Additive Hazards Frailty Model with a Weibull distribution) ", "\n")
cat(" Expression of the Weibull hazard function: 'lambda(t) = (shape.(t^(shape-1)))/(scale^shape)'", "\n")
cat(" Expression of the Weibull survival function: 'S(t) = exp[- (t/scale)^shape]'")
cat("\n\n")
}else{
cat(" Semi-Parametrical approach ", "\n")
cat(" Expression of the hazard function: 'lambda(t) = lambda_0(t) + beta(t)'X'", "\n")
cat(" (Baseline hazard function lambda_0(.) estimated using M-splines)")
cat("\n\n")
}
}
tmp.mieux.dc = tmp.mieux[-c(1:x$nvarnotdep[1]), -2]
if(x$typeof==0){
names(tmp.mieux.dc) = c("coef", "SE coef (H)", "SE coef (HIH)", "z", "p")
}
if(x$typeof==2){
names(tmp.mieux.dc) = c("coef", "SE coef (H)", "z", "p")
}
row.names(tmp.mieux.dc) = names(coef)[-c(1:x$nvarnotdep[1])]
prmatrix(tmp.mieux.dc, quote=FALSE)
if(x$global_chisq.test_d==1){
cat("\n")
tmpwalddc.mieux = data.frame(tmpwalddc)
tmpwalddc.mieux[] = lapply(tmpwalddc.mieux, type.convert)
prmatrix(tmpwalddc.mieux, quote=FALSE)
}
cat("\n")
if ( (x$family[1]==0)&(!(x$typeof==0)) ){
cat("Scale for the Weibull hazard function:", round(x$scale.param[2],2), "\n")
cat("Shape for the Weibull hazard function:", round(x$shape.param[2],2), "\n")
}
else if ( (x$family[1]==1)&(!(x$typeof==0)) ){
cat("Scale for the log-logistic hazard function:", round(x$scale.param[2],2), "\n")
cat("Shape for the log-logistic hazard function:", round(x$shape.param[2],2), "\n")
}
else if ( (x$family[1]==2)&(!(x$typeof==0)) ){
cat("Scale for the log-normal hazard function:", round(x$scale.param[2],2), "\n")
cat("Shape for the log-normal hazard function:", round(x$shape.param[2],2), "\n")
}
else if ( (x$family[1]==3)&(!(x$typeof==0)) ){
cat("Scale for the Weibull hazard function:", round(x$scale.param[2],2), "\n")
cat("Shape for the Weibull hazard function:", round(x$shape.param[2],2), "\n")
}
}
}
cat("\n")
}
temp <- diag(x$varH)[1]
seH.frail <- sqrt(((2 * (frail^0.5))^2) * temp)
temp <- diag(x$varHIH)[1]
seHIH.frail <- sqrt(((2 * (frail^0.5))^2) * temp)
if (x$noVar1 == 1){
cat("\n")
if (x$joint.clust == 0) cat(" Survival event: No covariates \n")
if (x$joint.clust >= 1) cat(" Recurrences: No covariates \n")
cat(" ----------- \n")
}
if (x$noVar2 == 1){
cat("\n")
cat(" Terminal event: No covariates \n")
cat(" -------------- \n")
cat("\n")
}
cat("\nFrailty parameters: \n")
if (x$logNormal == 0){
if (indic_alpha == 1 & x$joint.clust<=1){
cat(" theta (variance of frailties, u_i): ", frail, " (SE(H): ",seH.frail, "), ",
"p = ", ifelse(signif(1 - pnorm(frail/seH.frail), digits - 1) == 0, "< 1e-16", signif(1 - pnorm(frail/seH.frail), digits - 1)), sep="", "\n")
cat(" alpha ((u_i)^alpha for terminal event): ", x$alpha, " (SE(H): ",sqrt(diag(x$varH))[2], "), ",
"p = ", ifelse(signif(1 - pchisq((x$alpha/sqrt(diag(x$varH))[2])^2,1), digits - 1) == 0, "< 1e-16", signif(1 - pchisq((x$alpha/sqrt(diag(x$varH))[2])^2,1), digits - 1)), sep="", "\n")
}else if (x$joint.clust ==2) {
cat(" theta (variance of u, association between recurrences and terminal event):", frail, "(SE (H):",seH.frail, ")", "p =", ifelse(signif(1 - pnorm(frail/seH.frail), digits - 1) == 0, "< 1e-16", signif(1 - pnorm(frail/seH.frail), digits - 1)), "\n")
cat(" eta (variance of v, intra-subject correlation):", x$eta, "(SE (H):",sqrt(((2 * (x$eta^0.5))^2) * diag(x$varH)[2]), ")", "p =", ifelse(signif(1 - pnorm (x$eta/sqrt(((2 * (x$eta^0.5))^2) * diag(x$varH)[2]),1), digits - 1) == 0, "< 1e-16", signif(1 - pnorm (x$eta/sqrt(((2 * (x$eta^0.5))^2) * diag(x$varH)[2]),1), digits - 1)), "\n")
} else {
cat(" theta (variance of Frailties, w):", frail, "(SE (H):",seH.frail, ")", "p =", ifelse(signif(1 - pnorm(frail/seH.frail), digits - 1) == 0, "< 1e-16", signif(1 - pnorm(frail/seH.frail), digits - 1)), "\n")
cat(" gamma is fixed (=1) \n")
}
}else{
cat(" sigma square (variance of Frailties, eta):", frail, "(SE (H):",seH.frail, ")", "p =", ifelse(signif(1 - pnorm(frail/seH.frail), digits - 1) == 0, "< 1e-16", signif(1 - pnorm(frail/seH.frail), digits - 1)), "\n")
if (indic_alpha == 1) cat(" alpha (exp(alpha.eta) for terminal event):", x$alpha, "(SE (H):",sqrt(diag(x$varH))[2], ")", "p =", ifelse(signif(1 - pchisq((x$alpha/sqrt(diag(x$varH))[2])^2,1), digits - 1) == 0, "< 1e-16", signif(1 - pchisq((x$alpha/sqrt(diag(x$varH))[2])^2,1), digits - 1)), "\n")
else cat(" alpha is fixed (=1) \n")
}
cat(" \n")
if (x$typeof == 0){
cat(paste("Penalized marginal log-likelihood =", round(x$logLikPenal,2)))
cat("\n")
cat(" Convergence criteria: \n")
cat(" parameters =",signif(x$EPS[1],3),"; likelihood =",signif(x$EPS[2],3),"; gradient =",signif(x$EPS[3],3),"\n")
cat("\n")
cat("Likelihood Cross-Validation (LCV) criterion in the semi parametrical case:\n")
cat(" approximate LCV =",x$LCV,"\n")
}else{
cat(paste("Marginal log-likelihood =", round(x$logLik,2)))
cat("\n")
cat(" Convergence criteria: \n")
cat(" parameters =",signif(x$EPS[1],3),"; likelihood =",signif(x$EPS[2],3),"; gradient =",signif(x$EPS[3],3),"\n")
cat("\n")
cat("AIC (Aikaike Information Criterion) =",x$AIC,"\n")
cat("The expression of the Aikaike Information Criterion is:","\n")
cat(" 'AIC = (1/n)[np - l(.)]'","\n")
}
cat("\n")
if (x$joint.clust == 0){
cat(" n.observations =", x$n, " n.subjects =", x$ind, " n.groups =", x$groups)
}else{
cat(" n.observations = ", x$n, ", n.subjects = ", x$groups, sep="")
}
if (length(x$na.action)){
cat(" (", length(x$na.action), " observation deleted due to missing) \n")
}else{
cat("\n")
}
if (x$joint.clust == 0){
cat(" n.events =", x$n.events)
}else{
cat(" n.recurrent events =", x$n.events)
}
cat("\n")
cat(" n.terminal events =", x$n.deaths)
cat("\n")
cat(" n.censored events =" ,x$n.censored)
cat("\n")
cat(" number of iterations:", x$n.iter,"\n")
if (x$logNormal == 0) {
cat(" Number of nodes for the Gauss-Laguerre quadrature:", x$nb.gl,"\n")
}
else {cat(" Number of nodes for the Gauss-Hermite quadrature:", x$nb.gh,"\n")}
if ((x$typeof == 1) & (x$indic.nb.intR == 1)){
cat(" Exact number of time intervals used: 20","\n")
}else{
if (x$typeof == 1) cat(" Exact number of time intervals used: ",x$nbintervR,"\n")
}
if ((x$typeof == 1) & (x$indic.nb.intD == 1)){
cat(" Exact number of time intervals used: 20","\n")
}else{
if (x$typeof == 1) cat(" Exact number of time intervals used:",x$nbintervDC,"\n")
}
if (x$typeof == 0){
cat("\n")
cat(" Exact number of knots used:", x$n.knots, "\n")
cat(" Value of the smoothing parameters:", x$kappa, sep=" ")
cat("\n")
}
}else{
if (!is.null(coef)){
cat("\n")
if (x$joint.clust == 0) cat(" For clustered data","\n")
if (x$joint.clust == 0){
if (x$logNormal == 0){
cat(" Joint gamma frailty model for a survival and a terminal event processes","\n")
}else{
cat(" Joint Log-Normal frailty model for a survival and a terminal event processes","\n")
}
}else{
if ((x$logNormal == 0)&(x$joint.clust==1)){
cat(" Joint gamma frailty model for recurrent and a terminal event processes","\n")
}
else if ((x$logNormal == 0)&(x$joint.clust==2)){
cat(" General Joint gamma frailty model for recurrent and a terminal event processes","\n")
}
else{
cat(" Joint Log-Normal frailty model for recurrent and a terminal event processes","\n")
}
}
if (x$typeof == 0){
cat(" using a Penalized Likelihood on the hazard function","\n")
}else{
cat(" using a Parametrical approach for the hazard function","\n")
}
if (any(x$nvartimedep != 0)) cat(" and some time-dependant covariates","\n")
if (x$noVar1 == 1){
cat("\n")
if (x$joint.clust == 0) cat(" Survival event: No covariates \n")
if (x$joint.clust >= 1) cat(" Recurrences: No covariates \n")
cat(" ----------- \n")
}
if (x$noVar2 == 1){
cat("\n")
cat(" Terminal event: No covariates \n")
cat(" -------------- \n")
cat("\n")
}
cat("\n")
cat(" Convergence criteria: \n")
cat(" parameters =",signif(x$EPS[1],3),"likelihood =",signif(x$EPS[2],3),"gradient =",signif(x$EPS[3],3),"\n")
cat("\n")
cat(" n=", x$n)
if (length(x$na.action)){
cat(" (", length(x$na.action), " observation deleted due to missing) \n")
}else{
cat("\n")
}
if (x$joint.clust == 0){
cat(" n events=", x$n.events)
}else{
cat(" n recurrent events=", x$n.events)
}
cat("\n")
cat(" n terminal events=", x$n.deaths)
cat("\n")
if (x$logNormal == 0) {
cat(" Number of nodes for the Gauss-Laguerre quadrature: ", x$nb.gl,"\n")
}
else {cat(" Number of nodes for the Gauss-Hermite quadrature: ", x$nb.gh,"\n")}
}
}
invisible()
}
} |
Initialize.corRStruct <- function(object, data, ...)
{
form <- formula(object)
if (!is.null(getGroupsFormula(form))) {
attr(object, "groups") <- getGroups(object, form, data = data)
attr(object, "Dim") <- Dim(object, attr(object, "groups"))
} else {
attr(object, "Dim") <- Dim(object, as.factor(rep(1, nrow(data))))
}
attr(object, "covariate") <- getCovariate(object, data = data)
object
}
Dim.corRStruct <- function(object, groups, ...)
{
if (missing(groups)) return(attr(object, "Dim"))
ugrp <- unique(groups)
groups <- factor(groups, levels = ugrp)
len <- table(groups)
list(N = length(groups),
M = length(len),
maxLen = max(len),
sumLenSq = sum(len^2),
len = len,
start = match(ugrp, groups) - 1)
}
print.corRStruct <- function(x, ...)
{
aux <- coef(x)
if (length(aux) > 0) {
cat("Correlation structure of class", class(x)[1], "representing\n")
print(invisible(aux), ...)
} else {
cat("Uninitialized correlation structure of class", class(x)[1], "\n")
}
}
Initialize.corRSpatial <- function(object, data, ...)
{
if (!is.null(attr(object, "covariate"))) {
return(object)
}
object <- Initialize.corRStruct(object, data)
val <- as.vector(object)
if (length(val) == 0) {
val <- attr(getCovariate(object), "minD") * 0.9
} else if (!all(inbounds(val, attr(object, "bounds")))) {
stop()
}
attributes(val) <- attributes(object)
val
}
Dim.corRSpatial <- function(object, groups, ...)
{
if (missing(groups)) return(attr(object, "Dim"))
val <- Dim.corRStruct(object, groups)
val[["start"]] <-
c(0, cumsum(val[["len"]] * (val[["len"]] - 1)/2)[-val[["M"]]])
names(val)[3] <- "spClass"
val[[3]] <- match(class(object)[1], c("corRExp", "corRExpwr", "corRGaus",
"corRGneit", "corRLin", "corRMatern", "corRCauchy",
"corRSpher"), 0)
val
}
getCovariate.corRSpatial <- function(object, form = formula(object), data)
{
covar <- attr(object, "covariate")
if (is.null(covar)) {
if (missing(data)) {
stop("Need data to calculate covariate")
}
covForm <- terms(getCovariateFormula(form))
attr(covForm, "intercept") <- 0
if (length(all.vars(covForm)) > 0) {
covar <- model.matrix(covForm,
model.frame(covForm, data, drop.unused.levels = TRUE))
} else {
covar <- as.matrix(1:nrow(data))
}
if (is.null(getGroupsFormula(form))) {
attr(covar, "assign") <- NULL
attr(covar, "contrasts") <- NULL
attr(covar, "dist") <- as.vector(dist2(covar,
method = attr(object, "metric"),
r = attr(object, "radius")))
attr(covar, "minD") <- min(attr(covar, "dist"))
} else {
grps <- getGroups(object, data = data)
covar <- lapply(split(as.data.frame(covar), grps),
function(el, metric, radius) {
el <- as.matrix(el)
attr(el, "dist") <- as.vector(dist2(el, metric, r = radius))
el
}, metric = attr(object, "metric"), radius = attr(object, "radius"))
attr(covar, "minD") <- min(unlist(lapply(covar, attr, which = "dist")))
}
if (attr(covar, "minD") == 0) {
stop("Cannot have zero distances in \"corRSpatial\"")
}
}
covar
}
corMatrix.corRSpatial <- function(object, covariate = getCovariate(object),
corr = TRUE, ...)
{
if (data.class(covariate) == "list") {
dist <- unlist(lapply(covariate, attr, which = "dist"))
len <- unlist(lapply(covariate, nrow))
} else {
dist <- attr(covariate, "dist")
len <- nrow(covariate)
names(len) <- 1
}
par <- coef(object)
val <- switch(class(object)[1],
corRExp = cor.exp(dist, par[1]),
corRExpwr = cor.exp(dist, par[1], par[2]),
corRGaus = cor.exp(dist, par[1], 2),
corRGneit = cor.gneiting(dist, par[1]),
corRLin = cor.lin(dist, par[1]),
corRMatern = cor.matern(dist, par[1], par[2]),
corRCauchy = cor.cauchy(dist, par[1]),
corRSpher = cor.spher(dist, par[1]),
corRWave = cor.wave(dist, par[1])
)
val <- split(val, rep(names(len), len * (len - 1) / 2))
lD <- NULL
for(i in names(val)) {
x <- matrix(0, len[i], len[i])
x[lower.tri(x)] <- val[[i]]
if (corr) {
val[[i]] <- x + t(x)
diag(val[[i]]) <- 1
} else {
diag(x) <- 1
l <- chol(t(x))
val[[i]] <- t(backsolve(l, diag(len[i])))
lD <- c(lD, diag(l))
}
}
if (length(len) == 1) val <- val[[1]]
if (!is.null(lD)) attr(val, "logDet") <- -1 * sum(log(lD))
val
}
corFactor.corRSpatial <- function(object, ...)
{
val <- corMatrix(object, corr = FALSE, ...)
lD <- attr(val, "logDet")
if (is.list(val)) val <- unlist(val)
else val <- as.vector(val)
names(val) <- NULL
attr(val, "logDet") <- lD
val
}
coef.corRSpatial <- function(object, ...)
{
val <- as.vector(object)
if (length(val) == 0) {
return(val)
}
names(val) <- rownames(attr(object, "bounds"))
val
}
"coef<-.corRSpatial" <- function(object, ..., value)
{
if (!all(inbounds(value, attr(object, "bounds")))) stop()
object[] <- value
object
}
corRExp <- function(value = numeric(0), form = ~ 1,
metric = c("euclidean", "maximum", "manhattan", "haversine"), radius = 3956)
{
attr(value, "formula") <- form
attr(value, "metric") <- match.arg(metric)
attr(value, "radius") <- radius
attr(value, "bounds") <- matrix(c(0, Inf, 1), ncol=3,
dimnames = list("range", c("lower", "upper", "type")))
class(value) <- c("corRExp", "corRSpatial", "corRStruct")
value
}
corRExpwr <- function(value = numeric(0), form = ~ 1,
metric = c("euclidean", "maximum", "manhattan", "haversine"), radius = 3956)
{
attr(value, "formula") <- form
attr(value, "metric") <- match.arg(metric)
attr(value, "radius") <- radius
attr(value, "bounds") <- matrix(c(0, 0, Inf, 2, 1, 3), ncol=3,
dimnames = list(c("range", "shape"), c("lower", "upper", "type")))
class(value) <- c("corRExpwr", "corRSpatial", "corRStruct")
value
}
Initialize.corRExpwr <- function(object, data, ...)
{
if (!is.null(attr(object, "covariate"))) {
return(object)
}
object <- Initialize.corRStruct(object, data)
val <- as.vector(object)
if (length(val) == 0) {
val <- c(attr(getCovariate(object), "minD") * 0.9, 1)
} else if (!all(inbounds(val, attr(object, "bounds")))) {
stop()
}
attributes(val) <- attributes(object)
val
}
corRGaus <- function(value = numeric(0), form = ~ 1,
metric = c("euclidean", "maximum", "manhattan", "haversine"), radius = 3956)
{
attr(value, "formula") <- form
attr(value, "metric") <- match.arg(metric)
attr(value, "radius") <- radius
attr(value, "bounds") <- matrix(c(0, Inf, 1), ncol=3,
dimnames = list("range", c("lower", "upper", "type")))
class(value) <- c("corRGaus", "corRSpatial", "corRStruct")
value
}
corRGneit <- function(value = numeric(0), form = ~ 1,
metric = c("euclidean", "maximum", "manhattan", "haversine"), radius = 3956)
{
attr(value, "formula") <- form
attr(value, "metric") <- match.arg(metric)
attr(value, "radius") <- radius
attr(value, "bounds") <- matrix(c(0, Inf, 1), ncol=3,
dimnames = list("range", c("lower", "upper", "type")))
class(value) <- c("corRGneit", "corRSpatial", "corRStruct")
value
}
corRLin <- function(value = numeric(0), form = ~ 1,
metric = c("euclidean", "maximum", "manhattan", "haversine"), radius = 3956)
{
attr(value, "formula") <- form
attr(value, "metric") <- match.arg(metric)
attr(value, "radius") <- radius
attr(value, "bounds") <- matrix(c(0, Inf, 1), ncol=3,
dimnames = list("range", c("lower", "upper", "type")))
class(value) <- c("corRLin", "corRSpatial", "corRStruct")
value
}
corRMatern <- function(value = numeric(0), form = ~ 1,
metric = c("euclidean", "maximum", "manhattan", "haversine"), radius = 3956)
{
attr(value, "formula") <- form
attr(value, "metric") <- match.arg(metric)
attr(value, "radius") <- radius
attr(value, "bounds") <- matrix(c(0, 0, Inf, 2, 1, 3), ncol=3,
dimnames = list(c("range", "scale"), c("lower", "upper", "type")))
class(value) <- c("corRMatern", "corRSpatial", "corRStruct")
value
}
Initialize.corRMatern <- function(object, data, ...)
{
if (!is.null(attr(object, "covariate"))) {
return(object)
}
object <- Initialize.corRStruct(object, data)
val <- as.vector(object)
if (length(val) == 0) {
val <- c(attr(getCovariate(object), "minD") * 0.9, 0.5)
} else if (!all(inbounds(val, attr(object, "bounds")))) {
stop()
}
attributes(val) <- attributes(object)
val
}
corRCauchy <- function(value = numeric(0), form = ~ 1,
metric = c("euclidean", "maximum", "manhattan", "haversine"), radius = 3956)
{
attr(value, "formula") <- form
attr(value, "metric") <- match.arg(metric)
attr(value, "radius") <- radius
attr(value, "bounds") <- matrix(c(0, Inf, 1), ncol=3,
dimnames = list("range", c("lower", "upper", "type")))
class(value) <- c("corRCauchy", "corRSpatial", "corRStruct")
value
}
corRSpher <- function(value = numeric(0), form = ~ 1,
metric = c("euclidean", "maximum", "manhattan", "haversine"), radius = 3956)
{
attr(value, "formula") <- form
attr(value, "metric") <- match.arg(metric)
attr(value, "radius") <- radius
attr(value, "bounds") <- matrix(c(0, Inf, 1), ncol=3,
dimnames = list("range", c("lower", "upper", "type")))
class(value) <- c("corRSpher", "corRSpatial", "corRStruct")
value
}
corRWave <- function(value = numeric(0), form = ~ 1,
metric = c("euclidean", "maximum", "manhattan", "haversine"), radius = 3956)
{
attr(value, "formula") <- form
attr(value, "metric") <- match.arg(metric)
attr(value, "radius") <- radius
attr(value, "bounds") <- matrix(c(0, Inf, 1), ncol=3,
dimnames = list("range", c("lower", "upper", "type")))
class(value) <- c("corRWave", "corRSpatial", "corRStruct")
value
}
Initialize.corRSpatioTemporal <- function(object, data, ...)
{
if (!is.null(attr(object, "covariate"))) {
return(object)
}
object <- Initialize.corRStruct(object, data)
val <- as.vector(object)
if (length(val) == 0) {
val <- attr(getCovariate(object), "minD") * 0.9
val[val == 0] <- 1
} else if (!all(inbounds(val, attr(object, "bounds")))) {
stop()
}
attributes(val) <- attributes(object)
val
}
Dim.corRSpatioTemporal <- function(object, groups, ...)
{
if (missing(groups)) return(attr(object, "Dim"))
val <- Dim.corRStruct(object, groups)
val[["start"]] <-
c(0, cumsum(val[["len"]] * (val[["len"]] - 1)/2)[-val[["M"]]])
names(val)[3] <- "spClass"
val[[3]] <- match(class(object)[1], c("corRExp2", "corRExpwr2"), 0)
val
}
getCovariate.corRSpatioTemporal <- function(object, form = formula(object), data)
{
covar <- attr(object, "covariate")
if (is.null(covar)) {
if (missing(data)) {
stop("Need data to calculate covariate")
}
covForm <- terms(getCovariateFormula(form))
attr(covForm, "intercept") <- 0
tcovar <- length(all.vars(covForm))
if (tcovar >= 2) {
covar <- model.matrix(covForm,
model.frame(covForm, data, drop.unused.levels = TRUE))
} else if (tcovar == 1) {
covar <- model.matrix(covForm,
model.frame(covForm, data, drop.unused.levels = TRUE))
covar <- cbind(covar, 1:nrow(data))
tcovar <- 2
} else {
covar <- cbind(1:nrow(data), 1:nrow(data))
tcovar <- 2
}
if (nrow(covar) > nrow(unique(covar))) {
stop("Cannot have zero distances in \"corRSpatioTemporal\"")
}
if (is.null(getGroupsFormula(form))) {
attr(covar, "assign") <- NULL
attr(covar, "contrasts") <- NULL
x <- as.vector(dist2(covar[, -tcovar], method = attr(object, "metric"),
r = attr(object, "radius")))
attr(covar, "dist") <- x
minD <- ifelse(any(x > 0), min(x[x > 0]), 0)
idx <- lower.tri(matrix(0, nrow(covar), nrow(covar)))
x <- abs(covar[col(idx)[idx], tcovar] - covar[row(idx)[idx], tcovar])
attr(covar, "period") <- x
minD <- c(minD, ifelse(any(x > 0), min(x[x > 0]), 0))
} else {
grps <- getGroups(object, data = data)
covar <- lapply(split(as.data.frame(covar), grps),
function(el, metric, radius) {
el <- as.matrix(el)
attr(el, "dist") <- as.vector(dist2(el[, -tcovar], metric,
r = radius))
idx <- lower.tri(matrix(0, nrow(el), nrow(el)))
attr(el, "period") <- abs(el[col(idx)[idx], tcovar] -
el[row(idx)[idx], tcovar])
el
}, metric = attr(object, "metric"), radius = attr(object, "radius"))
x <- unlist(lapply(covar, attr, which = "dist"))
minD <- ifelse(any(x > 0), min(x[x > 0]), 0)
x <- unlist(lapply(covar, attr, which = "period"))
minD <- c(minD, ifelse(any(x > 0), min(x[x > 0]), 0))
}
attr(covar, "minD") <- minD
}
covar
}
corMatrix.corRSpatioTemporal <- function(object, covariate = getCovariate(object),
corr = TRUE, ...)
{
if (data.class(covariate) == "list") {
dist <- unlist(lapply(covariate, attr, which = "dist"))
period <- unlist(lapply(covariate, attr, which = "period"))
len <- unlist(lapply(covariate, nrow))
} else {
dist <- attr(covariate, "dist")
period <- attr(covariate, "period")
len <- nrow(covariate)
names(len) <- 1
}
par <- coef(object)
val <- switch(class(object)[1],
corRExp2 = cor.exp2(dist, period, par[1], 1, par[2], 1, par[3]),
corRExpwr2 = cor.exp2(dist, period, par[1], par[2], par[3], par[4], par[5])
)
val <- split(val, rep(names(len), len * (len - 1) / 2))
lD <- NULL
for(i in names(val)) {
x <- matrix(0, len[i], len[i])
x[lower.tri(x)] <- val[[i]]
if (corr) {
val[[i]] <- x + t(x)
diag(val[[i]]) <- 1
} else {
diag(x) <- 1
l <- chol(t(x))
val[[i]] <- t(backsolve(l, diag(len[i])))
lD <- c(lD, diag(l))
}
}
if (length(len) == 1) val <- val[[1]]
if (!is.null(lD)) attr(val, "logDet") <- -1 * sum(log(lD))
val
}
corFactor.corRSpatioTemporal <- function(object, ...)
{
val <- corMatrix(object, corr = FALSE, ...)
lD <- attr(val, "logDet")
if (is.list(val)) val <- unlist(val)
else val <- as.vector(val)
names(val) <- NULL
attr(val, "logDet") <- lD
val
}
coef.corRSpatioTemporal <- function(object, ...)
{
val <- as.vector(object)
if (length(val) == 0) {
return(val)
}
names(val) <- rownames(attr(object, "bounds"))
val
}
"coef<-.corRSpatioTemporal" <- function(object, ..., value)
{
if (!all(inbounds(value, attr(object, "bounds")))) stop()
object[] <- value
object
}
corRExp2 <- function(value = numeric(0), form = ~ 1,
metric = c("euclidean", "maximum", "manhattan", "haversine"), radius = 3956)
{
attr(value, "formula") <- form
attr(value, "metric") <- match.arg(metric)
attr(value, "radius") <- radius
attr(value, "bounds") <- matrix(c(0, 0, 0, Inf, Inf, Inf, 1, 1, 2), ncol=3,
dimnames = list(c("spatial range", "temporal range", "interaction"),
c("lower", "upper", "type")))
class(value) <- c("corRExp2", "corRSpatioTemporal", "corRStruct")
value
}
Initialize.corRExp2 <- function(object, data, ...)
{
if (!is.null(attr(object, "covariate"))) {
return(object)
}
object <- Initialize.corRStruct(object, data)
val <- as.vector(object)
if (length(val) == 0) {
val <- attr(getCovariate(object), "minD") * 0.9
val[val == 0] <- 1
val <- c(val, 0)
} else if (!all(inbounds(val, attr(object, "bounds")))) {
stop()
}
attributes(val) <- attributes(object)
val
}
corRExpwr2 <- function(value = numeric(0), form = ~ 1,
metric = c("euclidean", "maximum", "manhattan", "haversine"), radius = 3956)
{
attr(value, "formula") <- form
attr(value, "metric") <- match.arg(metric)
attr(value, "radius") <- radius
attr(value, "bounds") <- matrix(c(0, 0, 0, 0, 0, Inf, 2, Inf, 2, Inf,
1, 3, 1, 3, 2), ncol=3,
dimnames = list(c("spatial range", "spatial shape", "temporal range",
"temporal shape", "interaction"),
c("lower", "upper", "type")))
class(value) <- c("corRExpwr2", "corRSpatioTemporal", "corRStruct")
value
}
Initialize.corRExpwr2 <- function(object, data, ...)
{
if (!is.null(attr(object, "covariate"))) {
return(object)
}
object <- Initialize.corRStruct(object, data)
val <- as.vector(object)
if (length(val) == 0) {
val <- attr(getCovariate(object), "minD") * 0.9
val[val == 0] <- 1
val <- c(val[1], 1, val[2], 1, 0)
} else if (!all(inbounds(val, attr(object, "bounds")))) {
stop()
}
attributes(val) <- attributes(object)
val
}
corRExpwr2Dt <- function(value = numeric(0), form = ~ 1,
metric = c("euclidean", "maximum", "manhattan", "haversine"), radius = 3956)
{
attr(value, "formula") <- form
attr(value, "metric") <- match.arg(metric)
attr(value, "radius") <- radius
attr(value, "bounds") <- matrix(c(0, 0, 0, 0, Inf, 2, Inf, Inf,
1, 3, 1, 2), ncol=3,
dimnames = list(c("spatial range", "spatial shape", "temporal range",
"interaction"), c("lower", "upper", "type")))
class(value) <- c("corRExpwr2Dt", "corRSpatioTemporal", "corRStruct")
value
}
Initialize.corRExpwr2Dt <- function(object, data, ...)
{
if (!is.null(attr(object, "covariate"))) {
return(object)
}
object <- Initialize.corRStruct(object, data)
val <- as.vector(object)
if (length(val) == 0) {
val <- attr(getCovariate(object), "minD") * 0.9
val[val == 0] <- 1
val <- c(val[1], 1, val[2], 0)
} else if (!all(inbounds(val, attr(object, "bounds")))) {
stop()
}
attributes(val) <- attributes(object)
val
}
Dim.corRExpwr2Dt <- function(object, groups, ...)
{
if (missing(groups)) return(attr(object, "Dim"))
val <- Dim.corRStruct(object, groups)
val[["start"]] <-
c(0, cumsum(val[["len"]] * (val[["len"]] - 1)/2)[-val[["M"]]])
names(val)[3] <- "spClass"
val[[3]] <- match(class(object)[1], c("corRExpwr2Dt"), 0)
val
}
getCovariate.corRExpwr2Dt <- function(object, form = formula(object), data)
{
covar <- attr(object, "covariate")
if (is.null(covar)) {
if (missing(data)) {
stop("Need data to calculate covariate")
}
covForm <- terms(getCovariateFormula(form))
attr(covForm, "intercept") <- 0
tcovar <- length(all.vars(covForm)) + c(-1, 0)
if (tcovar[1] >= 2) {
covar <- model.matrix(covForm,
model.frame(covForm, data, drop.unused.levels = TRUE))
} else if (tcovar[1] == 1) {
covar <- model.matrix(covForm,
model.frame(covForm, data, drop.unused.levels = TRUE))
covar <- cbind(1:nrow(data), covar)
tcovar <- tcovar + 1
} else {
covar <- matrix(1:nrow(data), nrow(data), 3)
tcovar <- c(2, 3)
}
if (nrow(covar) > nrow(unique(covar))) {
stop("Cannot have duplicate sites in \"corRExpwr2Dt\"")
} else if (any(covar[,tcovar[1]] > covar[,tcovar[2]])) {
stop("Temporal limits must be ascending in \"corRExpwr2Dt\"")
}
if (is.null(getGroupsFormula(form))) {
attr(covar, "assign") <- NULL
attr(covar, "contrasts") <- NULL
x <- as.vector(dist2(covar[, -tcovar], method = attr(object, "metric"),
r = attr(object, "radius")))
attr(covar, "dist") <- x
minD <- ifelse(any(x > 0), min(x[x > 0]), 0)
idx <- lower.tri(matrix(0, nrow(covar), nrow(covar)))
t1 <- covar[col(idx)[idx], tcovar]
t2 <- covar[row(idx)[idx], tcovar]
attr(covar, "t1") <- t1
attr(covar, "t2") <- t2
x <- abs((t2 - t1) %*% c(0.5, 0.5))
minD <- c(minD, ifelse(any(x > 0), min(x[x > 0]), 0))
} else {
grps <- getGroups(object, data = data)
covar <- lapply(split(as.data.frame(covar), grps),
function(el, metric, radius) {
el <- as.matrix(el)
attr(el, "dist") <- as.vector(dist2(el[, -tcovar], metric,
r = radius))
idx <- lower.tri(matrix(0, nrow(el), nrow(el)))
attr(el, "t1") <- el[col(idx)[idx], tcovar]
attr(el, "t2") <- el[row(idx)[idx], tcovar]
el
}, metric = attr(object, "metric"), radius = attr(object, "radius"))
x <- unlist(lapply(covar, attr, which = "dist"))
minD <- ifelse(any(x > 0), min(x[x > 0]), 0)
x <- rapply(covar,
function(x) abs((attr(x, "t2") - attr(x, "t1")) %*% c(0.5, 0.5)))
minD <- c(minD, ifelse(any(x > 0), min(x[x > 0]), 0))
}
attr(covar, "minD") <- minD
}
covar
}
corMatrix.corRExpwr2Dt <- function(object, covariate = getCovariate(object),
corr = TRUE, ...)
{
if (data.class(covariate) == "list") covar <- covariate
else covar <- list(covariate)
par <- coef(object)
val <- list()
lD <- NULL
for(i in seq(covar)) {
r <- cor.exp2dt(attr(covar[[i]], "dist"),
attr(covar[[i]], "t1"), attr(covar[[i]], "t2"),
par[1], par[2], par[3], par[4])
x <- matrix(0, nrow(covar[[i]]), nrow(covar[[i]]))
x[lower.tri(x)] <- r
idx <- ncol(covar[[i]]) + c(-1, 0)
if (corr) {
val[[i]] <- x + t(x)
diag(val[[i]]) <- cor.exp2dt(0, covar[[i]][,idx], covar[[i]][,idx],
par[1], par[2], par[3], par[4])
} else {
diag(x) <- cor.exp2dt(0, covar[[i]][,idx], covar[[i]][,idx],
par[1], par[2], par[3], par[4])
l <- chol(t(x))
val[[i]] <- t(backsolve(l, diag(length(covar[[i]]))))
lD <- c(lD, diag(l))
}
}
if (length(val) == 1) val <- val[[1]]
if (!is.null(lD)) attr(val, "logDet") <- -1 * sum(log(lD))
val
}
corFactor.corRExpwr2Dt <- function(object, ...)
{
val <- corMatrix(object, corr = FALSE, ...)
lD <- attr(val, "logDet")
if (is.list(val)) val <- unlist(val)
else val <- as.vector(val)
names(val) <- NULL
attr(val, "logDet") <- lD
val
}
dist2 <- function(x, method = c("euclidean", "maximum", "manhattan", "canberra",
"binary", "minkowski", "haversine"), diag = FALSE, upper = FALSE,
p = 2, r = 3956)
{
METHOD <- match.arg(method)
switch(METHOD,
haversine = {
m <- matrix(NA, nrow(x), nrow(x))
idx <- lower.tri(m)
m[idx] <- haversine(x[col(m)[idx],1:2], x[row(m)[idx],1:2], r)
d <- as.dist(m, diag = diag, upper = upper)
},
{
f <- get("dist", envir = as.environment("package:stats"))
d <- f(x, method = METHOD, diag = diag, upper = upper, p = p)
}
)
d
}
anisotropic <- function(x, par, system = c("cartesian", "polar", "spherical"))
{
X <- as.matrix(x)
system <- match.arg(system)
d <- ncol(x)
if (d == 2) {
if (length(par) != 2)
stop("parameter vector must consist of two elements",
" - an anisotropy angle and ratio")
if ((r <- par[2]) < 1) stop("anisotropy ratios must be >= 1")
S <- diag(c(1, 1 / r))
alpha <- par[1]
R <- matrix(c(cos(alpha), -sin(alpha),
sin(alpha), cos(alpha)), 2, 2)
switch(system,
cartesian = {
Y <- X %*% S %*% R
},
polar = {
theta <- X[,2]
Z <- X[,1] * cbind(cos(theta), sin(theta)) %*% S %*% R
x <- Z[,1]
y <- Z[,2]
theta <- if (x >= 0 && y >= 0) atan(abs(y / x))
else if (x < 0 && y >= 0) pi - atan(abs(y / x))
else if (x < 0 && y < 0) pi + atan(abs(y / x))
else 2 * pi - atan(abs(y / x))
Y <- cbind(sqrt(x^2 + y^2), theta)
},
spherical = {
}
)
} else if (d == 3) {
if (length(par) != 5)
stop("parameter vector must consist of five elements",
" - three anisotropy angles and two ratios")
if (any((r <- par[4:5]) < 1)) stop("anisotropy ratios must be >= 1")
S <- diag(c(1, 1 / r))
alpha <- par[1]
beta <- par[2]
theta <- par[3]
R1 <- matrix(c(cos(alpha), -sin(alpha), 0,
sin(alpha), cos(alpha), 0,
0, 0, 1), 3, 3)
R2 <- matrix(c( cos(beta), 0, sin(beta),
0, 1, 0,
-sin(beta), 0, cos(beta)), 3, 3)
R3 <- matrix(c(1, 0, 0,
0, cos(theta), -sin(theta),
0, sin(theta), cos(theta)), 3, 3)
R <- R1 %*% R2 %*% R3
switch(system,
cartesian = {
Y <- X %*% S %*% R
},
polar = {
stop("polar coordinates must be of two dimensions")
},
spherical = {
phi <- X[,2]
theta <- X[,3]
Z <- X[,1] * cbind(sin(phi) * cos(theta), sin(phi) * sin(theta),
cos(theta)) %*% S %*% R
x <- Z[,1]
y <- Z[,2]
z <- Z[,3]
Y <- cbind(sqrt(x^2 + y^2 + z^2), atan(y / x),
atan(sqrt(x^2 + y^2) / z))
}
)
} else {
stop("anisotropy supported only for 2-D and 3-D coordinate systems")
}
Y
}
haversine <- function(x, y, r = 3956)
{
if(is.vector(x)) x <- matrix(x, 1, 2)
if(is.vector(y)) y <- matrix(y, 1, 2)
rad <- pi / 180
z <- sin((y - x) * (rad / 2))^2
a <- z[,2] + cos(rad * x[,2]) * cos(rad * y[,2]) * z[,1]
(2 * r) * atan2(sqrt(a), sqrt(1 - a))
}
cor.exp <- function(x, range = 1, p = 1)
{
if (range <= 0 || p <= 0)
stop("Exponential correlation parameter must be > 0")
if (p == 1) exp(x / (-1 * range))
else exp(-1 * (x / range)^p)
}
cor.gneiting <- function(x, range = 1)
{
if (range <= 0)
stop("Gneiting correlation parameter must be > 0")
range <- range / 0.3008965026325734
r <- (x < range)
x0 <- x[r] / range
r[r] <- (1 + 8 * x0 + 25 * x0^2 + 32 * x0^3) * (1 - x0)^8
r
}
cor.lin <- function(x, range = 1)
{
if (range <= 0)
stop("Linear correlation parameter must be > 0")
r <- (x < range)
r[r] <- 1 - x[r] / range
r
}
cor.matern <- function(x, range = 1, scale = 1)
{
if(range <= 0 || scale <= 0)
stop("Matern correlation parameters must be > 0")
idx <- (x > 0)
r <- as.double(!idx)
x0 <- x[idx] / range
r[idx] <- x0^scale * besselK(x0, scale) / (2^(scale - 1) * gamma(scale))
r
}
cor.cauchy <- function(x, range = 1)
{
if (range <= 0)
stop("Cauchy correlation parameter must be > 0")
1 / (1 + (x / range)^2)
}
cor.spher <- function(x, range = 1)
{
if (range <= 0)
stop("Spherical correlation parameter must be > 0")
r <- (x < range)
x0 <- x[r] / range
r[r] <- 1 - 1.5 * x0 + 0.5 * x0^3
r
}
cor.wave <- function(x, range = 1)
{
if (range <= 0)
stop("Sine wave correlation parameter must be > 0")
x0 <- (x / range)
sin(x0) / x0
}
cor.exp2 <- function(x, t, x.range = 1, x.p = 1, t.range = 1, t.p = 1, lambda = 0)
{
if (t.range <= 0 || x.range <= 0 || x.p <= 0 || lambda < 0)
stop("Exponential correlation parameters must be > 0")
x0 <- if (x.p == 1) x / (-1 * x.range)
else -1 * (x / x.range)^x.p
t0 <- if (t.p == 1) t / (-1 * t.range)
else -1 * (t / t.range)^t.p
exp(x0 - lambda * x0 * t0 + t0)
}
cor.exp2dt <- function(x, t1, t2, x.range = 1, x.p = 1, t.range = 1, lambda = 0)
{
if (t.range <= 0 || x.range <= 0 || x.p <= 0 || lambda < 0)
stop("Exponential correlation parameters must be > 0")
if (is.vector(t1)) t1 <- matrix(t1, 1, 2)
if (is.vector(t2)) t2 <- matrix(t2, 1, 2)
x0 <- if (x.p == 1) x / (-1 * x.range)
else -1 * (x / x.range)^x.p
overlap <- pmin(t1[,2], t2[,2]) - pmax(t1[,1], t2[,1])
overlap[overlap < 0] <- 0
norm <- (t1 %*% c(-1, 1)) * (t2 %*% c(-1, 1))
if (lambda == 0) theta <- t.range
else theta <- t.range / (1 - lambda * x0)
val <- (theta^2 * exp(abs(t1[, c(1,1,2,2)] - t2[, c(2,1,2,1)]) /
(-1 * theta)) %*% c(1, -1, -1, 1) + 2 * theta * overlap) / norm
exp(x0) * as.vector(val)
} |
context("count")
s1 <- system.file('examples/SAScode/MainAnalysis.SAS', package='sasMap')
s2 <- system.file('examples/SAScode/Macros/Util1.SAS', package='sasMap')
sasCode1 <- loadSAS(s1)
sasCode2 <- loadSAS(s2)
test_that("Counts data steps", {
expect_equal(countDataSteps(sasCode1), 2)
expect_equal(countDataSteps(sasCode2), 0)
})
test_that("Counts proc steps", {
expect_equal(countProcSteps(sasCode1), 0)
expect_equal(countProcSteps(sasCode2), 2)
})
test_that("Counts lines", {
expect_equal(countLines(sasCode1), 15)
expect_equal(countLines(sasCode2), 20)
})
test_that("Counts statements", {
expect_equal(countStatements(sasCode1), 10)
expect_equal(countStatements(sasCode2), 14)
}) |
annu.fv <- function(pmt,i,n,type = 0){
if(type == 1){
fv <- pmt*((((1+i)^n)-1)/i)*(1+i)
}else{
fv <- pmt*((((1+i)^n)-1)/i)
}
return(fv)
} |
IRT.predict <- function( object, dat, group=1 )
{
resp <- as.matrix(dat)
irf1 <- IRT.irfprob( object )
irf1[ is.na(irf1) ] <- 0
N <- nrow(resp)
I <- ncol(resp)
TP <- dim(irf1)[3]
K <- dim(irf1)[2]
if ( length( dim(irf1) )==4 ){
irf1 <- irf1[,,,group]
}
irf1_ <- as.numeric(irf1)
res0 <- cdm_rcpp_irt_predict( resp=resp, irf1=irf1_, K=K, TP=TP )
probs.categ <- array( res0$probs_categ, dim=c(N,K,TP,I) )
pred <- res0$pred
var1 <- res0$var1
resid1 <- res0$resid1
sresid1 <- res0$sresid1
res <- list( "expected"=pred, "probs.categ"=probs.categ,
"variance"=var1, "residuals"=resid1, "stand.resid"=sresid1 )
return(res)
} |
"ch3a" |
dmsen <- function(x, mu = rep(0, d), Sigma, theta = Inf, formula = "direct") {
if (missing(Sigma)) {
stop("Sigma is missing")
}
if (theta < 0) {
stop("theta must be greater than, or equal to, 0")
}
if (is.matrix(Sigma)) {
d <- ncol(Sigma)
}
if (!is.matrix(Sigma)) {
d <- 1
}
if (is.vector(x)) {
x <- matrix(x, length(x), 1)
Sigma <- matrix(Sigma, nrow = d, ncol = d)
}
if (formula == "direct") {
delta <- sapply(1:nrow(x), function(i) t(as.vector(t(x[i, ]) - mu)) %*% solve(Sigma) %*% as.vector(t(x[i, ]) - mu))
delta <- replace(delta, delta == 0, 1 / (theta * (2 *
pi)^(d / 2) * (d / 2 + 1)) * (1 - (1 - theta)^(d / 2 +
1)))
pdfgamma <- expint::gammainc(a = (d / 2 + 1), x = 1 / 2 *
delta + theta) * (1 / 2 * delta + theta)^(-(d / 2 +
1))
pdfconst <- (2 * pi)^(-d / 2) * theta * exp(theta) * det(Sigma)^(-1 / 2)
PDF <- pdfconst * pdfgamma
}
if (formula == "indirect") {
delta <- sapply(1:nrow(x), function(i) t(as.vector(t(x[i, ]) - mu)) %*% solve(Sigma) %*% as.vector(t(x[i, ]) - mu))
intf <- function(w, gamm) {
w^(d / 2) * exp(-w * gamm)
}
pdfinteg <- sapply(1:nrow(x), function(i) {
stats::integrate(intf,
lower = 1, upper = Inf, gamm = delta[i] / 2 + theta
)$value
})
pdfconst <- (2 * pi)^(-d / 2) * theta * exp(theta) * det(Sigma)^(-1 / 2)
PDF <- pdfconst * pdfinteg
}
if (formula == "series") {
delta <- sapply(1:nrow(x), function(i) t(as.vector(t(x[i, ]) - mu)) %*% solve(Sigma) %*% as.vector(t(x[i, ]) - mu))
delta <- replace(delta, delta == 0, 1 / (theta * (2 *
pi)^(d / 2) * (d / 2 + 1)) * (1 - (1 - theta)^(d / 2 +
1)))
n <- d / 2
term <- sapply(1:length(delta), function(j) {
exp(-delta[j] / 2 -
theta) * (delta[j] / 2 + theta)^(-1) * (1 + sum(sapply(
1:floor(n),
function(i) {
prod(seq(from = n, to = n - i + 1, by = -1)) *
(delta[j] / 2 + theta)^(-i)
}
)))
})
if (d %% 2 == 1) {
term <- term + sapply(1:length(delta), function(j) {
prod(seq(
from = n,
to = 0.5, by = -1
)) * sqrt(pi) * 2 * 1 / (delta[j] / 2 +
theta)^(floor(n) + 1 + 1 / 2) * (1 - stats::pnorm(sqrt(2) *
sqrt(delta[j] / 2 + theta)))
})
}
PDF <- (2 * pi)^(-d / 2) * det(Sigma)^(-1 / 2) * theta *
exp(theta) * term
}
return(PDF)
} |
test_logistic_cost <- function() {
x <- c(1, 2, 3, 4, 5, 6, 7)
y <- c(0, 0, 1, 0, 1, 1, 0)
w <- c(1, 1, 1, 1, 1, 1, 1)
il <- numeric(length(x))
sm3 <- summarize_input(x,y,w,0,3,-1)
expect3 <- list(max_x = 4, min_x = 1, saw_y_pos = TRUE, max_x_pos = 3, min_x_pos = 3,
saw_y_neg = TRUE, max_x_neg = 4, min_x_neg = 1, total_w = 4,
total_wy = 1, k_points = 4, saw_data = TRUE, x_varies = TRUE,
y_varies = TRUE, seperable = FALSE)
msg <- wrapr::map_to_char(sm3)
expect_equal(sm3, expect3, info = msg)
for(k in wrapr::seqi(0, 4)) {
m1 <- logistic_solve1(x, y, w, il, 0, k, -1)
msg <- paste("k", k, wrapr::map_to_char(m1))
expect_true(is.numeric(m1), info = msg)
expect_equal(2, length(m1), info = msg)
expect_true(!any(is.na(m1)), info = msg)
expect_true(!any(is.nan(m1)), info = msg)
expect_true(!any(is.infinite(m1)), info = msg)
lf <- logistic_fits(x, y, w, 0, k)
msg <- paste("k", k, wrapr::map_to_char(m1), wrapr::map_to_char(lf))
expect_true(is.numeric(lf), info = msg)
expect_equal(k+1, length(lf), info = msg)
expect_true(!any(is.na(lf)), info = msg)
expect_true(!any(is.nan(lf)), info = msg)
expect_true(!any(is.infinite(lf)), info = msg)
if(k>=3) {
d <- data.frame(x = x[1:(k+1)], y = y[1:(k+1)])
m <- glm(y~x, data=d, family = binomial)
cm <- as.numeric(coef(m))
diff1 <- max(abs(m1-cm))
msg1 <- paste("coef problem", k, diff1,
"RccpDynProg", wrapr::map_to_char(m1),
"glm", wrapr::map_to_char(cm))
expect_true(diff1<=1e-3, info = msg1)
p <- as.numeric(predict(m, newdata = d, type = "link"))
diff2 <- max(abs(lf-p))
msg2 <- paste("link problem", k, diff2,
"RccpDynProg", wrapr::map_to_char(lf),
"glm", wrapr::map_to_char(p))
expect_true(diff2<=1e-3, info = msg2)
}
}
invisible(NULL)
}
test_logistic_cost() |
context("XLSX Lines")
library(xml2)
test_that("segments don't have fill", {
file <- tempfile()
dml_xlsx( file = file, bg = "transparent" )
plot.new()
segments(0.5, 0.5, 1, 1)
dev.off()
doc <- read_xml(file)
fill_node <- xml_find_first(doc, ".//xdr:sp/xdr:spPr/a:solidFill", ns = xml_ns( doc ) )
expect_is( fill_node, "xml_missing")
})
test_that("lines don't have fill", {
file <- tempfile()
dml_xlsx( file = file, bg = "transparent" )
plot.new()
lines(c(0.5, 1, 0.5), c(0.5, 1, 1))
dev.off()
doc <- read_xml(file)
fill_node <- xml_find_first(doc, ".//xdr:sp/xdr:spPr/a:solidFill", ns = xml_ns( doc ) )
expect_is( fill_node, "xml_missing")
})
test_that("polygons do have fill", {
file <- tempfile()
dml_xlsx( file = file, bg = "transparent" )
plot.new()
polygon(c(0.5, 1, 0.5), c(0.5, 1, 1), col = "red", border = "blue")
dev.off()
doc <- read_xml(file)
fill_node <- xml_find_first(doc, ".//xdr:sp/xdr:spPr/a:solidFill", ns = xml_ns( doc ) )
expect_is( fill_node, "xml_node")
})
test_that("polygons without border", {
file <- tempfile()
dml_xlsx( file = file, bg = "transparent" )
plot.new()
polygon(c(0.5, 1, 0.5), c(0.5, 1, 1), col = "red", border = NA)
dev.off()
doc <- read_xml(file)
fill_color <- xml_find_first(doc, ".//xdr:sp/xdr:spPr/a:solidFill/a:srgbClr", ns = xml_ns( doc ) )
expect_equal(xml_attr(fill_color, "val"), "FF0000")
line_color <- xml_find_first(doc, ".//xdr:sp/xdr:spPr/a:ln", ns = xml_ns( doc ))
expect_is( line_color, "xml_missing")
})
dash_array <- function(...) {
file <- tempfile()
dml_xlsx( file = file, bg = "transparent" )
plot(1:3, ..., axes = FALSE, xlab = "", ylab = "", type = "l")
dev.off()
doc <- read_xml(file)
dash <- xml_find_first(doc, ".//xdr:sp/xdr:spPr/a:ln/a:prstDash", ns = xml_ns( doc ))
dash
}
custom_dash_array <- function(...) {
file <- tempfile()
dml_xlsx( file = file, bg = "transparent" )
plot(1:3, ..., axes = FALSE, xlab = "", ylab = "", type = "l")
dev.off()
doc <- read_xml(file)
dash <- xml_find_all(doc, ".//xdr:sp/xdr:spPr/a:ln/a:custDash/a:ds", ns = xml_ns( doc ))
as.character( unlist(lapply( dash, xml_attrs)) )
}
test_that("lty are ok", {
expect_equal(xml_attr(dash_array(lty = 1), "val"), "solid")
expect_equal(xml_attr(dash_array(lty = 2), "val"), "dash")
expect_equal(xml_attr(dash_array(lty = 3), "val"), "dot")
expect_equal(custom_dash_array(lty = 4), c("100000", "300000", "400000", "300000"))
expect_equal(xml_attr(dash_array(lty = 5), "val"), "lgDash")
expect_equal(custom_dash_array(lty = 6), c("200000", "200000", "600000", "200000"))
expect_equal(custom_dash_array(lty = "1F"), c("100000", "1500000"))
expect_equal(custom_dash_array(lty = "1234"), c("100000", "200000", "300000", "400000"))
})
test_that("lty scales with lwd", {
expect_equal(custom_dash_array(lty = 4), c("100000", "300000", "400000", "300000"))
expect_equal(custom_dash_array(lty = 4, lwd = 2), c("200000", "600000", "800000", "600000"))
})
test_that("line join shapes", {
file <- tempfile()
dml_xlsx( file = file, bg = "transparent" )
plot.new()
lines(c(0.3, 0.5, 0.7), c(0.1, 0.9, 0.1), lwd = 15, ljoin = "round")
dev.off()
doc <- read_xml(file)
join_shape <- xml_find_first(doc, ".//xdr:sp/xdr:spPr/a:ln/a:round", ns = xml_ns( doc ) )
expect_is(join_shape, "xml_node")
file <- tempfile()
dml_xlsx( file = file, bg = "transparent" )
plot.new()
lines(c(0.3, 0.5, 0.7), c(0.1, 0.9, 0.1), lwd = 15, ljoin = "mitre")
dev.off()
doc <- read_xml(file)
join_shape <- xml_find_first(doc, ".//xdr:sp/xdr:spPr/a:ln/a:miter", ns = xml_ns( doc ) )
expect_is(join_shape, "xml_node")
file <- tempfile()
dml_xlsx( file = file, bg = "transparent" )
plot.new()
lines(c(0.3, 0.5, 0.7), c(0.1, 0.9, 0.1), lwd = 15, ljoin = "bevel")
dev.off()
doc <- read_xml(file)
join_shape <- xml_find_first(doc, ".//xdr:sp/xdr:spPr/a:ln/a:bevel", ns = xml_ns( doc ) )
expect_is(join_shape, "xml_node")
}) |
.dccspec = function(uspec, VAR = FALSE, robust = FALSE, lag = 1, lag.max = NULL,
lag.criterion = c("AIC", "HQ", "SC", "FPE"), external.regressors = NULL,
robust.control = list("gamma" = 0.25, "delta" = 0.01, "nc" = 10, "ns" = 500),
dccOrder = c(1,1), asymmetric = FALSE, distribution = c("mvnorm", "mvt", "mvlaplace"),
start.pars = list(), fixed.pars = list())
{
VAR.opt = list()
if(is.null(VAR)) VAR.opt$VAR = FALSE else VAR.opt$VAR = as.logical(VAR)
if(is.null(robust)) VAR.opt$robust = FALSE else VAR.opt$robust = as.logical(robust)
if(is.null(lag)) VAR.opt$lag = 1 else VAR.opt$lag = as.integer(lag)
if(is.null(lag.max)) VAR.opt$lag.max = NULL else VAR.opt$lag.max = as.integer(max(1, lag.max))
if(is.null(lag.criterion)) VAR.opt$lag.criterion = "AIC" else VAR.opt$lag.criterion = lag.criterion[1]
if(is.null(external.regressors)) VAR.opt$external.regressors = NULL else VAR.opt$external.regressors = external.regressors
rc = list("gamma" = 0.25, "delta" = 0.01, "nc" = 10, "ns" = 500)
rcmatch = match(names(robust.control), c("gamma", "delta", "nc", "ns"))
if(length(rcmatch[!is.na(rcmatch)]) > 0){
rx = which(!is.na(rcmatch))
rc[rcmatch[!is.na(rcmatch)]] = robust.control[rx]
}
VAR.opt$robust.control = rc
.eps = .Machine$double.eps
modeldata = list()
modeldesc = list()
m = length(uspec@spec)
if(is.null(distribution)) distribution = "mvnorm"
distribution = distribution[1]
valid.distributions = c("mvnorm", "mvt", "mvlaplace")
if(!any(distribution == valid.distributions)) stop("\nInvalid Distribution Choice\n", call. = FALSE)
modelinc = rep(0, 10)
names(modelinc) = c("var", "mvmxreg", "dcca", "dccb", "dccg", "mshape", "mskew", "aux", "aux", "aux")
if(distribution == "mvt") modelinc[6] = 1
if(is.null(dccOrder)){
modelinc[3] = 1
modelinc[4] = 1
} else{
modelinc[3] = as.integer( dccOrder[1] )
modelinc[4] = as.integer( dccOrder[2] )
}
if( asymmetric ) modelinc[5] = modelinc[3]
if( VAR ){
if(is.null(VAR.opt$lag)) modelinc[1] = 1 else modelinc[1] = as.integer( VAR.opt$lag )
if(!is.null(VAR.opt$external.regressors)){
if(!is.matrix(VAR.opt$external.regressors)) stop("\nexternal.regressors must be a matrix.")
modelinc[2] = dim(VAR.opt$external.regressors)[2]
modeldata$mexdata = VAR.opt$external.regressors
} else{
modeldata$mexdata = NULL
}
}
modelinc[10] = which(c("mvnorm", "mvt", "mvlaplace") == distribution)
maxdccOrder = max(dccOrder)
modeldesc$distribution = distribution
modeldesc$dccmodel = ifelse(asymmetric, "ADCC", "DCC")
if( !is(uspec, "uGARCHmultispec") ) stop("\ndccspec-->error: uspec must be a uGARCHmultispec object")
varmodel = list()
umodel = vector(mode ="list")
if( modelinc[1]>0 ){
varmodel$robust = VAR.opt$robust
varmodel$lag.max = VAR.opt$lag.max
varmodel$lag.criterion = VAR.opt$lag.criterion
varmodel$robust.control = VAR.opt$robust.control
umodel$modelinc = matrix(0, ncol = m, nrow = 21)
rownames(umodel$modelinc) = names(uspec@spec[[1]]@model$modelinc[1:21])
umodel$modeldesc = list()
umodel$vt = sapply(uspec@spec, FUN = function(x) x@model$modelinc[22])
umodel$modeldesc$vmodel = vector(mode = "character", length = m)
umodel$modeldesc$vsubmodel = vector(mode = "character", length = m)
umodel$start.pars = umodel$fixed.pars = vector(mode = "list", length = m)
umodel$modeldesc$distribution = vector(mode = "character", length = m)
umodel$modeldata = list()
umodel$modeldata$vexdata = vector(mode = "list", length = m)
for(i in 1:m){
umodel$modeldesc$vmodel[i] = uspec@spec[[i]]@model$modeldesc$vmodel
umodel$modeldesc$vsubmodel[i] = ifelse(is.null(uspec@spec[[i]]@model$modeldesc$vsubmodel),"GARCH",uspec@spec[[i]]@model$modeldesc$vsubmodel)
umodel$modeldesc$distribution[i] = uspec@spec[[i]]@model$modeldesc$distribution
umodel$modelinc[,i] = uspec@spec[[i]]@model$modelinc[1:21]
umodel$modelinc[1:6,i] = 0
umodel$modeldata$vexdata[[i]] = if(is.null(uspec@spec[[i]]@model$modeldata$vexdata)) NA else uspec@spec[[i]]@model$modeldata$vexdata
umodel$start.pars[[i]] = if(is.null(uspec@spec[[i]]@model$start.pars)) NA else uspec@spec[[i]]@model$start.pars
umodel$fixed.pars[[i]] = if(is.null(uspec@spec[[i]]@model$fixed.pars)) NA else uspec@spec[[i]]@model$fixed.pars
umodel$modeldata$mexdata[[i]] = NA
}
} else{
varmodel$lag.max = 1
varmodel$lag.criterion = "HQ"
umodel$modelinc = matrix(0, ncol = m, nrow = 21)
rownames(umodel$modelinc) = names(uspec@spec[[1]]@model$modelinc[1:21])
umodel$modeldesc = list()
umodel$vt = sapply(uspec@spec, FUN = function(x) x@model$modelinc[22])
umodel$modeldesc$vmodel = vector(mode = "character", length = m)
umodel$modeldesc$vsubmodel = vector(mode = "character", length = m)
umodel$start.pars = umodel$fixed.pars = vector(mode = "list", length = m)
umodel$modeldesc$distribution = vector(mode = "character", length = m)
umodel$modeldata = list()
umodel$modeldata$mexdata = vector(mode = "list", length = m)
umodel$modeldata$vexdata = vector(mode = "list", length = m)
for(i in 1:m){
umodel$modeldesc$vmodel[i] = uspec@spec[[i]]@model$modeldesc$vmodel
umodel$modeldesc$vsubmodel[i] = ifelse(is.null(uspec@spec[[i]]@model$modeldesc$vsubmodel),"GARCH",uspec@spec[[i]]@model$modeldesc$vsubmodel)
umodel$modeldesc$distribution[i] = uspec@spec[[i]]@model$modeldesc$distribution
umodel$modelinc[,i] = uspec@spec[[i]]@model$modelinc[1:21]
umodel$modeldata$mexdata[[i]] = if(is.null(uspec@spec[[i]]@model$modeldata$mexdata)) NA else uspec@spec[[i]]@model$modeldata$mexdata
umodel$modeldata$vexdata[[i]] = if(is.null(uspec@spec[[i]]@model$modeldata$vexdata)) NA else uspec@spec[[i]]@model$modeldata$vexdata
umodel$start.pars[[i]] = if(is.null(uspec@spec[[i]]@model$start.pars)) NA else uspec@spec[[i]]@model$start.pars
umodel$fixed.pars[[i]] = if(is.null(uspec@spec[[i]]@model$fixed.pars)) NA else uspec@spec[[i]]@model$fixed.pars
}
}
maxgarchOrder = max( sapply(uspec@spec, FUN = function(x) x@model$maxOrder) )
if(modelinc[1]>0){
maxgarchOrder = max(c(maxgarchOrder, modelinc[1]))
}
pars = matrix(0, ncol = 6, nrow = 5)
colnames(pars) = c("Level", "Fixed", "Include", "Estimate", "LB", "UB")
pidx = matrix(NA, nrow = 5, ncol = 2)
colnames(pidx) = c("begin", "end")
rownames(pidx) = c("dcca", "dccb", "dccg", "mshape", "mskew")
pos = 1
pos.matrix = matrix(0, ncol = 3, nrow = 5)
colnames(pos.matrix) = c("start", "stop", "include")
rownames(pos.matrix) = c("dcca", "dccb", "dccg", "mshape", "mskew")
for(i in 1:5){
if( modelinc[2+i] > 0 ){
pos.matrix[i,1:3] = c(pos, pos+modelinc[2+i]-1, 1)
pos = max(pos.matrix[1:i,2]+1)
}
}
mm = sum(modelinc[3:7])
mm = mm - length( which(modelinc[c(3:7)]>0) )
pars = matrix(0, ncol = 6, nrow = 5 + mm)
colnames(pars) = c("Level", "Fixed", "Include", "Estimate", "LB", "UB")
pidx = matrix(NA, nrow = 5, ncol = 2)
colnames(pidx) = c("begin", "end")
rownames(pidx) = c("dcca", "dccb", "dccg", "mshape", "mskew")
fixed.names = names(fixed.pars)
start.names = names(start.pars)
fixed.pars = unlist(fixed.pars)
start.pars = unlist(start.pars)
pn = 1
pnames = NULL
nx = 0
if(pos.matrix[1,3] == 1){
pn = length( seq(pos.matrix[1,1], pos.matrix[1,2], by = 1) )
for(i in 1:pn){
nnx = paste("dcca", i, sep="")
pars[(nx+i), 1] = 0.05/pn
if(any(substr(start.names, 1, nchar(nnx))==nnx)){
nix = which(start.names == nnx)
pars[(nx+i), 1] = start.pars[nix]
}
pars[(nx+i), 3] = 1
pars[(nx+i), 5] = .eps
pars[(nx+i), 6] = 1
if(any(substr(fixed.names, 1, nchar(nnx))==nnx)){
nix = which(fixed.names == nnx)
pars[(nx+i), 1] = fixed.pars[nix]
pars[(nx+i), 2] = 1
} else{
pars[(nx+i), 4] = 1
}
pnames = c(pnames, nnx)
}
} else{
pnames = c(pnames, "dcca")
}
pidx[1,1] = 1
pidx[1,2] = pn
nx = pn
pn = 1
pidx[2,1] = nx+1
if(pos.matrix[2,3] == 1){
pn = length( seq(pos.matrix[2,1], pos.matrix[2,2], by = 1) )
for(i in 1:pn){
nnx = paste("dccb", i, sep="")
pars[(nx+i), 1] = 0.9/pn
if(any(substr(start.names, 1, nchar(nnx))==nnx)){
nix = which(start.names == nnx)
pars[(nx+i), 1] = start.pars[nix]
}
pars[(nx+i), 3] = 1
pars[(nx+i), 5] = .eps
pars[(nx+i), 6] = 1
if(any(substr(fixed.names, 1, nchar(nnx))==nnx)){
nix = which(fixed.names == nnx)
pars[(nx+i), 1] = fixed.pars[nix]
pars[(nx+i), 2] = 1
} else{
pars[(nx+i), 4] = 1
}
pnames = c(pnames, nnx)
}
} else{
pnames = c(pnames, "dccb")
}
pidx[2,2] = nx+pn
nx = nx + pn
pn = 1
pidx[3,1] = nx+1
if(pos.matrix[3,3] == 1){
pn = length( seq(pos.matrix[3,1], pos.matrix[3,2], by = 1) )
for(i in 1:pn){
nnx = paste("dccg", i, sep="")
pars[(nx+i), 1] = 0.05/pn
if(any(substr(start.names, 1, nchar(nnx))==nnx)){
nix = which(start.names == nnx)
pars[(nx+i), 1] = start.pars[nix]
}
pars[(nx+i), 3] = 1
pars[(nx+i), 5] = .eps
pars[(nx+i), 6] = 1
if(any(substr(fixed.names, 1, nchar(nnx))==nnx)){
nix = which(fixed.names == nnx)
pars[(nx+i), 1] = fixed.pars[nix]
pars[(nx+i), 2] = 1
} else{
pars[(nx+i), 4] = 1
}
pnames = c(pnames, nnx)
}
} else{
pnames = c(pnames, "dccg")
}
pidx[3,2] = nx+pn
nx = nx + pn
pn = 1
pidx[4,1] = nx+1
if(modelinc[6]<=1){
if(pos.matrix[4,3]==1){
pars[nx+pn, 3] = 1
pars[nx+pn, 1] = 5
pars[(nx+pn), 5] = 4
pars[(nx+pn), 6] = 50
if(any(substr(start.names, 1, 6) == "mshape")) pars[nx+pn, 1] = start.pars["mshape"]
if(any(substr(fixed.names, 1, 6) == "mshape")){
pars[nx+pn,2] = 1
pars[nx+pn, 1] = fixed.pars["mshape"]
} else{
pars[nx+pn,4] = 1
}
}
pnames = c(pnames, "mshape")
} else{
if(pos.matrix[4,3] == 1){
pn = length( seq(pos.matrix[4,1], pos.matrix[4,2], by = 1) )
for(i in 1:pn){
nnx = paste("mshape", i, sep="")
pars[(nx+i), 1] = 5
if(any(substr(start.names, 1, nchar(nnx))==nnx)){
nix = which(start.names == nnx)
pars[(nx+i), 1] = start.pars[nix]
}
pars[(nx+i), 3] = 1
pars[(nx+i), 5] = 4
pars[(nx+i), 6] = 50
if(any(substr(fixed.names, 1, nchar(nnx))==nnx)){
nix = which(fixed.names == nnx)
pars[(nx+i), 1] = fixed.pars[nix]
pars[(nx+i), 2] = 1
} else{
pars[(nx+i), 4] = 1
}
pnames = c(pnames, nnx)
}
} else{
pnames = c(pnames, "mshape")
}
}
pidx[4,2] = nx+pn
nx = nx + pn
pn = 1
pidx[5,1] = nx+1
if(modelinc[7]<=1){
if(pos.matrix[5,3]==1){
pars[nx+pn, 3] = 1
pars[nx+pn, 1] = 0.5
pars[(nx+pn), 5] = -1
pars[(nx+pn), 6] = 1
if(any(substr(start.names, 1, 5) == "mskew")) pars[nx+pn, 1] = start.pars["mskew"]
if(any(substr(fixed.names, 1, 5) == "mskew")){
pars[nx+pn,2] = 1
pars[nx+pn, 1] = fixed.pars["mskew"]
} else{
pars[nx+pn,4] = 1
}
}
pnames = c(pnames, "mskew")
} else{
if(pos.matrix[5,3] == 1){
pn = length( seq(pos.matrix[5,1], pos.matrix[5,2], by = 1) )
for(i in 1:pn){
nnx = paste("mskew", i, sep="")
pars[(nx+i), 1] = 3
pars[(nx+i), 5] = -1
pars[(nx+i), 6] = 1
if(any(substr(start.names, 1, nchar(nnx))==nnx)){
nix = which(start.names == nnx)
pars[(nx+i), 1] = start.pars[nix]
}
if(any(substr(fixed.names, 1, nchar(nnx))==nnx)){
nix = which(fixed.names == nnx)
pars[(nx+i), 1] = fixed.pars[nix]
pars[(nx+i), 2] = 1
} else{
pars[(nx+i), 4] = 1
}
pnames = c(pnames, nnx)
}
} else{
pnames = c(pnames, "mskew")
}
}
pidx[5,2] = nx+pn
rownames(pars) = pnames
modeldesc$type = "2-step"
model = list(modelinc = modelinc, modeldesc = modeldesc, modeldata = modeldata, varmodel = varmodel,
pars = pars, start.pars = start.pars, fixed.pars = fixed.pars, maxgarchOrder = maxgarchOrder,
maxdccOrder = maxdccOrder, pos.matrix = pos.matrix, pidx = pidx)
model$DCC = ifelse(asymmetric, "aDCC", "DCC")
ans = new("DCCspec",
model = model,
umodel = umodel)
return(ans)
}
.dccfit = function(spec, data, out.sample = 0, solver = "solnp",
solver.control = list(), fit.control = list(eval.se = TRUE,
stationarity = TRUE, scale = FALSE),
cluster = NULL, fit = NULL, VAR.fit = NULL, verbose = FALSE,
realizedVol = NULL, ...)
{
tic = Sys.time()
.eps = .Machine$double.eps
model = spec@model
umodel = spec@umodel
ufit.control = list()
if(is.null(fit.control$stationarity)){
ufit.control$stationarity = TRUE
} else {
ufit.control$stationarity = fit.control$stationarity
fit.control$stationarity = NULL
}
if(is.null(fit.control$scale)){
ufit.control$scale = TRUE
} else{
ufit.control$scale = fit.control$scale
fit.control$scale = NULL
}
if(is.null(fit.control$eval.se)) fit.control$eval.se = TRUE
if(length(solver)==2){
garch.solver = solver[1]
solver = solver[2]
} else{
garch.solver = solver[1]
}
solver = match.arg(tolower(solver)[1], c("solnp", "nlminb", "lbfgs","gosolnp"))
m = dim(data)[2]
if( is.null( colnames(data) ) ) cnames = paste("Asset_", 1:m, sep = "") else cnames = colnames(data)
colnames(umodel$modelinc) = cnames
xdata = .extractmdata(data)
if(!is.numeric(out.sample))
stop("\ndccfit-->error: out.sample must be numeric\n")
if(as.numeric(out.sample) < 0)
stop("\ndccfit-->error: out.sample must be positive\n")
n.start = round(out.sample, 0)
n = dim(xdata$data)[1]
if( (n-n.start) < 100)
stop("\ndccfit-->error: function requires at least 100 data\n points to run\n")
data = xdata$data
index = xdata$index
period = xdata$period
model$modeldata$data = data
model$modeldata$index = index
model$modeldata$period = period
T = model$modeldata$T = n - n.start
model$modeldata$n.start = n.start
model$modeldata$asset.names = cnames
if( model$modelinc[1]>0 ){
tmp = mvmean.varfit(model = model, data = data, VAR.fit = VAR.fit, T = T,
out.sample = out.sample, cluster = cluster)
model = tmp$model
zdata = tmp$zdata
mu = tmp$mu
varcoef = tmp$varcoef
p = tmp$p
N = tmp$N
} else{
zdata = data
ex = NULL
}
T = dim(zdata)[1] - out.sample
mspec = .makemultispec(umodel$modelinc, umodel$modeldesc$vmodel, umodel$modeldesc$vsubmodel,
umodel$modeldata$mexdata, umodel$modeldata$vexdata, umodel$start.pars,
umodel$fixed.pars, umodel$vt)
if( !is.null(fit) && is(fit, "uGARCHmultifit") ){
if(model$modelinc[1]>0){
for(i in 1:m){
if(sum(fit@fit[[i]]@model$modelinc[1:6])>0)
stop("\nThe user supplied fit object has a non-null mean specification but VAR already chosen for mean filtration!!!")
}
}
fitlist = fit
if(spec@model$modelinc[1]>0) model$mu = mu else model$mu = fitted(fitlist)
model$residuals = res = residuals(fitlist)
model$sigma = sig = sigma(fitlist)
if(umodel$modeldesc$vmodel[1]=="realGARCH") plik = sapply(fitlist@fit, function(x) sum(-x@fit$partial.log.likelihoods)) else plik = sapply(fitlist@fit, function(x) sum(-x@fit$log.likelihoods))
} else{
fitlist = multifit(multispec = mspec, data = xts(zdata, index), out.sample = n.start,
solver = garch.solver, solver.control = solver.control,
fit.control = ufit.control, cluster = cluster, realizedVol = realizedVol)
converge = sapply(fitlist@fit, FUN = function(x) x@fit$convergence)
if( any( converge == 1 ) ){
pr = which(converge != 1)
cat("\nNon-Converged:\n")
print(pr)
cat("\ndccfit-->error: convergence problem in univariate fit...")
cat("\n...returning uGARCHmultifit object instead...check and resubmit...")
return( fitlist )
}
if(umodel$modeldesc$vmodel[1]=="realGARCH") plik = sapply(fitlist@fit, function(x) sum(-x@fit$partial.log.likelihoods)) else plik = sapply(fitlist@fit, function(x) sum(-x@fit$log.likelihoods))
if(spec@model$modelinc[1]>0) model$mu = mu else model$mu = fitted(fitlist)
model$residuals = res = residuals(fitlist)
model$sigma = sig = sigma(fitlist)
}
stdresid = res/sig
modelinc = model$modelinc
midx = .fullinc(modelinc, umodel)
midx["omega",1:m]=1
mpars = midx*0
eidx = .estindfn(midx, mspec, model$pars)
unipars = lapply(fitlist@fit, FUN = function(x) x@fit$ipars[x@fit$ipars[,3]==1,1])
if(is.list(unipars)){
for(i in 1:length(unipars)){
uninames = names(unipars[[i]])
mpars[uninames, i] = unipars[[i]]
}
} else{
uninames = rownames(unipars)
mpars[uninames, 1:NCOL(unipars)] = unipars
}
mpars[which(midx[,m+1]==1), m+1] = as.numeric( model$pars[model$pars[,3]==1,1] )
ipars = model$pars
LB = ipars[,5]
UB = ipars[,6]
estidx = as.logical( ipars[,4] )
npars = sum(estidx)
Qbar = cov(stdresid)
if(modelinc[5]>0){
Ibar = .asymI(stdresid)
astdresid = Ibar*stdresid
Nbar = cov(astdresid)
} else{
Ibar = .asymI(stdresid)
astdresid = Ibar*stdresid*0
Nbar = matrix(0, m, m)
}
H = sig^2
mgarchenv = new.env(hash = TRUE)
arglist = list()
arglist$mgarchenv = mgarchenv
arglist$verbose = verbose
arglist$cluster = cluster
arglist$eval.se = fit.control$eval.se
arglist$solver = solver
arglist$fit.control = fit.control
arglist$cnames = cnames
arglist$m = m
arglist$T = T
arglist$data = zdata
arglist$index = index
arglist$realizedVol = realizedVol
arglist$model = model
arglist$fitlist = fitlist
arglist$umodel = umodel
arglist$midx = midx
arglist$eidx = eidx
arglist$mpars = mpars
arglist$ipars = ipars
arglist$estidx = estidx
arglist$dccN = npars
arglist$stdresid = stdresid
arglist$astdresid = astdresid
arglist$Ibar = Ibar
arglist$Qbar = Qbar
arglist$Nbar = Nbar
arglist$H = H
if(any(ipars[,2]==1)){
if(npars == 0){
if(fit.control$eval.se==0) {
warning("\ndccfit-->warning: all parameters fixed...returning dccfilter object instead\n")
xspex = spec
for(i in 1:m) xspex@umodel$fixed.pars[[i]] = as.list(fitlist@fit[[i]]@model$pars[fitlist@fit[[i]]@model$pars[,3]==1,1])
return(dccfilter(spec = xspex, data = xts(data, index), out.sample = out.sample,
cluster = cluster, VAR.fit = VAR.fit, , realizedVol = realizedVol, ...))
} else{
use.solver = 0
ipars[ipars[,2]==1, 4] = 1
ipars[ipars[,2]==1, 2] = 0
arglist$pars = ipars
estidx = as.logical( ipars[,4] )
arglist$estidx = estidx
}
} else{
use.solver = 1
}
} else{
use.solver = 1
}
assign("rmgarch_llh", 1, envir = mgarchenv)
ILB = 0
IUB = 1
if(model$modelinc[5]> 0) Ifn = .adcccon else Ifn = .dcccon
if( solver == "solnp" | solver == "gosolnp") fit.control$stationarity = FALSE else fit.control$stationarity = TRUE
arglist$fit.control = fit.control
if( use.solver )
{
arglist$returnType = "llh"
solution = switch(model$modeldesc$distribution,
mvnorm = .dccsolver(solver, pars = ipars[estidx, 1], fun = normal.dccLLH1, Ifn, ILB,
IUB, gr = NULL, hessian = NULL, control = solver.control,
LB = ipars[estidx, 5], UB = ipars[estidx, 6], arglist = arglist),
mvlaplace = .dccsolver(solver, pars = ipars[estidx, 1], fun = laplace.dccLLH1, Ifn, ILB,
IUB, gr = NULL, hessian = NULL, control = solver.control,
LB = ipars[estidx, 5], UB = ipars[estidx, 6], arglist = arglist),
mvt = .dccsolver(solver, pars = ipars[estidx, 1], fun = student.dccLLH1, Ifn, ILB,
IUB, gr = NULL, hessian = NULL, control = solver.control,
LB = ipars[estidx, 5], UB = ipars[estidx, 6], arglist = arglist))
sol = solution$sol
hess = solution$hess
timer = Sys.time()-tic
convergence = sol$convergence
mpars[which(eidx[,(m+1)]==1, arr.ind = TRUE),m+1] = sol$pars
ipars[estidx, 1] = sol$pars
arglist$mpars = mpars
arglist$ipars = ipars
} else{
hess = NULL
timer = Sys.time()-tic
convergence = 0
sol = list()
sol$message = "all parameters fixed"
}
fit = list()
if( convergence == 0 ){
fit = switch(model$modeldesc$distribution,
mvnorm = .dccmakefitmodel(garchmodel = "dccnorm", f = normal.dccLLH2,
arglist = arglist, timer = 0, message = sol$message, fname = "normal.dccLLH2"),
mvlaplace = .dccmakefitmodel(garchmodel = "dcclaplace", f = laplace.dccLLH2,
arglist = arglist, timer = 0, message = sol$message, fname = "laplace.dccLLH2"),
mvt =.dccmakefitmodel(garchmodel = "dccstudent", f = student.dccLLH2,
arglist = arglist, timer = 0, message = sol$message, fname = "student.dccLLH2"))
fit$timer = Sys.time() - tic
} else{
fit$message = sol$message
fit$convergence = 1
}
fit$Nbar = Nbar
fit$Qbar = Qbar
fit$realizedVol = realizedVol
fit$plik = plik
model$mpars = mpars
model$ipars = ipars
model$pars[,1] = ipars[,1]
model$midx = midx
model$eidx = eidx
model$umodel = umodel
ans = new("DCCfit",
mfit = fit,
model = model)
return(ans)
}
.dccfilter = function(spec, data, out.sample = 0, filter.control = list(n.old = NULL),
cluster = NULL, varcoef = NULL, realizedVol = NULL, ...)
{
tic = Sys.time()
model = spec@model
umodel = spec@umodel
n.old = filter.control$n.old
m = dim(data)[2]
if( is.null( colnames(data) ) ) cnames = paste("Asset_", 1:m, sep = "") else cnames = colnames(data)
colnames(umodel$modelinc) = cnames
xdata = .extractmdata(data)
if(!is.numeric(out.sample))
stop("\ndccfilter-->error: out.sample must be numeric\n")
if(as.numeric(out.sample) < 0)
stop("\ndccfilter-->error: out.sample must be positive\n")
n.start = round(out.sample, 0)
n = dim(xdata$data)[1]
if( (n-n.start) < 100)
stop("\ndccfilter-->error: function requires at least 100 data\n points to run\n")
data = xdata$data
index = xdata$index
period = xdata$period
model$modeldata$data = data
model$modeldata$index = index
model$modeldata$period = period
T = model$modeldata$T = n - n.start
model$modeldata$n.start = n.start
model$modeldata$asset.names = cnames
if( spec@model$modelinc[1]>0 ){
tmp = mvmean.varfilter(model = model, data = data, varcoef = varcoef,
T = T, out.sample = out.sample)
model = tmp$model
zdata = tmp$zdata
mu = tmp$mu
p = tmp$p
N = tmp$N
} else{
zdata = data
ex = NULL
}
T = dim(zdata)[1] - out.sample
if(is.null(filter.control$n.old)) n.old = T
if(model$modelinc[1]>0){
for(i in 1:m){
if(sum(umodel$modelinc[1:6,i])>0)
stop("\nThe user supplied univariate spec object has a non-null mean specification but VAR already chosen for mean filtration!!!")
}
}
mspec = .makemultispec(umodel$modelinc, umodel$modeldesc$vmodel, umodel$modeldesc$vsubmodel,
umodel$modeldata$mexdata, umodel$modeldata$vexdata, umodel$start.pars,
umodel$fixed.pars, NULL)
filterlist = multifilter(multifitORspec = mspec, data = xts(zdata, index[1:nrow(zdata)]),
out.sample = out.sample, cluster = cluster, n.old = n.old, , realizedVol = realizedVol, ...)
if(spec@model$modelinc[1]>0) model$mu = mu else model$mu = fitted(filterlist)
model$residuals = res = residuals(filterlist)
model$sigma = sig = sigma(filterlist)
stdresid = res/sig
if(is.null(filter.control$n.old)) dcc.old = dim(stdresid)[1] else dcc.old = n.old
modelinc = model$modelinc
midx = .fullinc(modelinc, umodel)
mpars = midx*0
eidx = midx
unipars = sapply(filterlist@filter, FUN = function(x) x@filter$ipars[x@filter$ipars[,3]==1,1])
if(is.list(unipars)){
for(i in 1:length(unipars)){
uninames = names(unipars[[i]])
mpars[uninames, i] = unipars[[i]]
}
} else{
uninames = rownames(unipars)
mpars[uninames, 1:NCOL(unipars)] = unipars
}
mpars[which(midx[,m+1]==1, arr.ind = TRUE), m+1] = as.numeric( model$pars[model$pars[,3]==1,1] )
ipars = model$pars
estidx = as.logical( ipars[,3] )
npars = sum(estidx)
Qbar = cov(stdresid[1:dcc.old, ])
if(modelinc[5]>0){
Ibar = .asymI(stdresid)
astdresid = Ibar*stdresid
Nbar = cov(astdresid[1:dcc.old, ])
} else{
Ibar = .asymI(stdresid)
astdresid = Ibar*stdresid*0
Nbar = matrix(0, m, m)
}
H = sig^2
mgarchenv = new.env(hash = TRUE)
arglist = list()
arglist$mgarchenv = mgarchenv
arglist$verbose = FALSE
arglist$cluster = cluster
arglist$filter.control = filter.control
arglist$cnames = cnames
arglist$m = m
arglist$T = T
arglist$n.old = n.old
arglist$dcc.old = dcc.old
arglist$data = zdata
arglist$index = index
arglist$model = model
arglist$filterlist = filterlist
arglist$realizedVol = realizedVol
arglist$umodel = umodel
arglist$midx = midx
arglist$eidx = eidx
arglist$mpars = mpars
arglist$ipars = ipars
arglist$estidx = estidx
arglist$dccN = npars
arglist$stdresid = stdresid
arglist$astdresid = astdresid
arglist$Ibar = Ibar
arglist$Qbar = Qbar
arglist$Nbar = Nbar
arglist$H = H
assign("rmgarch_llh", 1, envir = mgarchenv)
filt = switch(model$modeldesc$distribution,
mvnorm = .dccmakefiltermodel(garchmodel = "dccnorm", f = normalfilter.dccLLH2,
arglist = arglist, timer = 0, message = 0, fname = "normalfilter.dccLLH2"),
mvlaplace = .dccmakefiltermodel(garchmodel = "dcclaplace", f = laplacefilter.dccLLH2,
arglist = arglist, timer = 0, message = 0, fname = "laplacefilter.dccLLH2"),
mvt = .dccmakefiltermodel(garchmodel = "dccstudent", f = studentfilter.dccLLH2,
arglist = arglist, timer = 0, message = 0, fname = "studentfilter.dccLLH2"))
model$mpars = mpars
model$ipars = ipars
model$pars[,1] = ipars[,1]
model$midx = midx
model$eidx = eidx
model$umodel = umodel
filt$Nbar = Nbar
filt$Qbar = Qbar
filt$realizedVol = realizedVol
ans = new("DCCfilter",
mfilter = filt,
model = model)
return(ans)
}
.dccforecast = function(fit, n.ahead = 1, n.roll = 0, external.forecasts = list(mregfor = NULL, vregfor = NULL),
cluster = NULL, ...)
{
model = fit@model
modelinc = model$modelinc
ns = model$modeldata$n.start
if( n.roll > ns ) stop("n.roll must not be greater than out.sample!")
if(n.roll>1 && n.ahead>1) stop("\ngogarchforecast-->error: n.ahead must be equal to 1 when using n.roll\n")
if( fit@model$modelinc[5] > 0 && n.ahead > 1) stop("\ngogarchforecast-->error: asymmetric DCC specification only support n.ahead = 1 currently.\n")
tf = n.ahead + n.roll
if( !is.null( external.forecasts$mregfor ) ){
mregfor = external.forecasts$mregfor
if( !is.matrix(mregfor) ) stop("\nmregfor must be a matrix.")
if( dim(mregfor)[1] < tf ) stop("\nmregfor must have at least n.ahead + n.roll observations to be used")
mregfor = mregfor[1:tf, , drop = FALSE]
} else{
mregfor = NULL
}
if( !is.null( external.forecasts$vregfor ) ){
if( !is.matrix(vregfor) ) stop("\nvregfor must be a matrix.")
if( dim(vregfor)[1] < tf ) stop("\nvregfor must have at least n.ahead + n.roll observations to be used")
vregfor = vregfor[1:tf, , drop = FALSE]
}
if( modelinc[1]>0 ){
if( modelinc[2] > 0 ){
if( is.null(external.forecasts$mregfor ) ){
warning("\nExternal Regressor Forecasts Matrix NULL...setting to zero...\n")
mregfor = matrix(0, ncol = modelinc[2], nrow = (n.roll + n.ahead) )
} else{
if( dim(mregfor)[2] != modelinc[2] ) stop("\ndccforecast-->error: wrong number of external regressors!...", call. = FALSE)
if( dim(mregfor)[1] < (n.roll + n.ahead) ) stop("\ndccforecast-->error: external regressor matrix has less points than requested forecast length (1+n.roll) x n.ahead!...", call. = FALSE)
}
} else{
mregfor = NULL
}
if(n.roll>1 && n.ahead>1) stop("\ndccforecast-->error: n.ahead must be equal to 1 when using n.roll\n")
if( n.ahead == 1 && (n.roll > ns) ) stop("\ndccforecast-->error: n.roll greater than out.sample!", call. = FALSE)
mu = varxforecast(X = fit@model$modeldata$data, Bcoef = model$varcoef, p = modelinc[1],
out.sample = ns, n.ahead = n.ahead, n.roll = n.roll, mregfor = mregfor)
} else{
mu = NULL
}
exf = external.forecasts
if( modelinc[1] > 0 ){
exf$mregfor = NULL
}
ans = .dccforecastm(fit, n.ahead = n.ahead, n.roll = n.roll, external.forecasts = exf, cluster = cluster, realizedVol = fit@mfit$realizedVol, ...)
if(modelinc[1]==0) mu = ans$mu
model$n.roll = n.roll
model$n.ahead = n.ahead
model$H = rcov(fit)
mforecast = list( H = ans$H, R = ans$R, Q = ans$Q, Rbar = ans$Rbar, mu = mu )
ans = new("DCCforecast",
mforecast = mforecast,
model = model)
return( ans )
}
.dccforecastm = function(fit, n.ahead = 1, n.roll = 10, external.forecasts = list(mregfor = NULL, vregfor = NULL),
cluster = NULL, realizedVol = NULL, ...)
{
model = fit@model
modelinc = model$modelinc
umodel = model$umodel
m = dim(umodel$modelinc)[2]
ns = fit@model$modeldata$n.start
Data = fit@model$modeldata$data
if(modelinc[1]>0){
zdata = varxfilter(Data, p = model$modelinc[1], Bcoef = model$varcoef,
exogen = fit@model$modeldata$mexdata, postpad = c("constant"))$xresiduals
} else{
zdata = Data
}
fpars = lapply(1:m, FUN = function(i) fit@model$mpars[fit@model$midx[,i]==1,i])
mspec = .makemultispec(umodel$modelinc, umodel$modeldesc$vmodel, umodel$modeldesc$vsubmodel,
umodel$modeldata$mexdata, umodel$modeldata$vexdata, umodel$start.pars,
fpars, NULL)
filterlist = multifilter(multifitORspec = mspec, data = xts(zdata, fit@model$modeldata$index[1:nrow(zdata)]), out.sample = 0,
n.old = fit@model$modeldata$T, cluster = cluster, realizedVol = realizedVol)
n.roll = n.roll + 1
m = length(mspec@spec)
out.sample = fit@model$modeldata$n.start
mo = max(fit@model$maxdccOrder)
forclist = multiforecast(multifitORspec = mspec, data = xts(zdata, fit@model$modeldata$index[1:nrow(zdata)]), n.ahead = n.ahead,
out.sample = ns, n.roll = n.roll - 1, external.forecasts = external.forecasts,
cluster = cluster, realizedVol = realizedVol, ...)
if(modelinc[1] == 0){
mu = array(NA, dim=c(n.ahead, m, n.roll))
f = lapply(forclist@forecast, function(x) fitted(x))
for(i in 1:n.roll) mu[,,i] = matrix( sapply( f, function(x) x[,i] ), ncol = m)
} else{
mu = NULL
}
sig = sigma(filterlist)
resid = residuals(filterlist)
stdresid = resid/sig
if(modelinc[5]>0){
Ibar = .asymI(stdresid)
astdresid = Ibar*stdresid
} else{
Ibar = .asymI(stdresid)
astdresid = Ibar*stdresid*0
}
T = dim(fit@mfit$H)[3]
Rbar = Rtfor = Htfor = Qtfor = vector(mode = "list", length = n.roll)
Qstart = last( rcor(fit, type = "Q"), mo )
Rstart = last( rcor(fit, type = "R"), mo )
Hstart = last( rcov(fit), mo)
f = lapply(forclist@forecast, function(x) sigma(x))
for(i in 1:n.roll){
xQbar = cov(stdresid[1:(T + i - 1), ])
if(modelinc[5]>0) xNbar = cov(astdresid[1:(T + i - 1), ]) else xNbar = matrix(0, m, m)
xstdresids = stdresid[(T - mo + i ):(T + i - 1), , drop = FALSE]
xastdresids = astdresid[(T - mo + i ):(T + i - 1), , drop = FALSE]
xfsig = matrix( sapply( f, function(x) x[,i] ), ncol = m)
ans = .dccforecastn(model, Qstart, Rstart, Hstart, xQbar, xNbar, xstdresids, xastdresids, xfsig, n.ahead, mo)
Rtfor[[i]] = ans$Rtfor
Qtfor[[i]] = ans$Qtfor
Htfor[[i]] = ans$Htfor
Rbar[[i]] = ans$Rbar
Qstart = last( rugarch:::.abind(Qstart, ans$Qtfor[, , 1]), mo )
Rstart = last( rugarch:::.abind(Rstart, ans$Rtfor[, , 1]), mo )
Hstart = last( rugarch:::.abind(Hstart, ans$Htfor[, , 1]), mo )
}
forc = list( H = Htfor, R = Rtfor, Q = Qtfor, Rbar = Rbar, mu = mu )
return(forc)
}
.dccforecastn = function(model, Qstart, Rstart, Hstart, Qbar, Nbar, stdresids,
astdresids, fsig, n.ahead, mo)
{
m = dim(Qbar)[1]
modelinc = model$modelinc
Qtfor = Rtfor = Htfor = array(NA, dim = c(m, m, n.ahead + mo))
Qtfor[ , , 1:mo] = Qstart[, , 1:mo]
Rtfor[ , , 1:mo] = Rstart[, , 1:mo]
Htfor[ , , 1:mo] = Hstart[, , 1:mo]
pars = model$ipars[,1]
idx = model$pidx
dccsum = sum(pars[idx["dcca",1]:idx["dcca",2]]) + sum(pars[idx["dccb",1]:idx["dccb",2]])
Qt_1 = (1 - dccsum) * Qbar - sum(pars[idx["dccg",1]:idx["dccg",2]])*Nbar
for(i in 1:n.ahead){
Qtfor[, , mo + i] = Qt_1
if( i == 1 ){
if(modelinc[3]>0){
for(j in 1:modelinc[3]){
Qtfor[ , , mo + 1] = Qtfor[ , , mo + 1] + pars[idx["dcca",1]+j-1] * (stdresids[(mo + 1 - j), ] %*% t(stdresids[(mo + 1 - j), ]))
}
}
if(modelinc[5]>0){
for(j in 1:modelinc[5]){
Qtfor[ , , mo + 1] = Qtfor[ , , mo + 1] + pars[idx["dccg",1]+j-1] * (astdresids[(mo + 1 - j), ] %*% t(astdresids[(mo + 1 - j), ]))
}
}
if(modelinc[4]>0){
for(j in 1:modelinc[4]){
Qtfor[ , , mo + 1] = Qtfor[ , , mo + 1] + pars[idx["dccb",1]+j-1] * Qtfor[ , , mo + 1 - j]
}
}
Qtmp = diag( 1/sqrt( diag(Qtfor[ , , mo + 1]) ) , m, m)
Rtfor[ , , mo + 1] = Qtmp %*% Qtfor[ , , mo + 1] %*% t(Qtmp)
Dtmp = diag(fsig[1, ], m, m)
Htfor[ , , mo + 1] = Dtmp %*% Rtfor[ , , mo + 1] %*% Dtmp
Qt_1star = diag( 1/sqrt( diag(Qtfor[, , mo + 1]) ) , m, m)
ER_1 = Qt_1star %*% Qtfor[, , mo + 1] %*% t(Qt_1star)
Qbarstar = diag( 1/sqrt( diag(Qbar) ) , m, m)
Rbar = Qbarstar %*% Qbar %*% t(Qbarstar)
} else{
Rtfor[, , mo + i] = (1 - dccsum^(i - 1) ) * Rbar + dccsum^(i - 1) * ER_1
Dtmp = diag(fsig[i, ], m, m)
Htfor[, , mo + i] = Dtmp %*% Rtfor[, , mo + i] %*% Dtmp
Qtfor[, , mo + i] = Qtfor[, , mo + 1]
}
}
return( list( Rtfor = Rtfor[, , -(1:mo), drop = FALSE], Htfor = Htfor[, , -(1:mo), drop = FALSE],
Qtfor = Qtfor[, , -(1:mo), drop = FALSE], Rbar = Rbar, ER_1 = ER_1) )
}
.dccsim.fit = function(fitORspec, n.sim = 1000, n.start = 0, m.sim = 1,
startMethod = c("unconditional", "sample"), presigma = NULL,
preresiduals = NULL, prereturns = NULL, preQ = NULL, preZ = NULL,
Qbar = NULL, Nbar = NULL, rseed = NULL, mexsimdata = NULL,
vexsimdata = NULL, cluster = NULL, VAR.fit = NULL, prerealized = NULL, ...)
{
fit = fitORspec
T = fit@model$modeldata$T
Data = fit@model$modeldata$data[1:T,]
m = dim(Data)[2]
mo = fit@model$maxdccOrder
mg = fit@model$maxgarchOrder
startMethod = startMethod[1]
if( is.null(rseed) ){
rseed = as.integer(runif(1, 1, Sys.time()))
} else {
if(length(rseed) == 1) rseed = as.integer(rseed[1]) else rseed = as.integer( rseed[1:m.sim] )
}
model = fit@model
umodel = model$umodel
modelinc = model$modelinc
fpars = lapply(1:m, FUN = function(i) fit@model$mpars[fit@model$midx[,i]==1,i])
mspec = .makemultispec(umodel$modelinc, umodel$modeldesc$vmodel, umodel$modeldesc$vsubmodel,
umodel$modeldata$mexdata, umodel$modeldata$vexdata, umodel$start.pars,
fpars, NULL)
if(startMethod == "sample"){
if(is.null(preZ)){
preZ = matrix(tail(residuals(fit)/sigma(fit), mo), ncol = m)
} else{
preZ = matrix(tail(preZ, 1), ncol = m, nrow = mo, byrow = TRUE)
}
if(is.null(preQ)){
preQ = fit@mfit$Q[[length(fit@mfit$Q)]]
} else{
dcc.symcheck(preQ, m, d = NULL)
}
Rbar = preQ/(sqrt(diag(preQ)) %*% t(sqrt(diag(preQ))) )
} else{
if(is.null(preZ)){
preZ = matrix(0, ncol = m, nrow = mo)
} else{
preZ = matrix(tail(preZ, 1), ncol = m, nrow = mo, byrow = TRUE)
}
Rbar = cor(Data)
if(is.null(preQ)){
preQ = Rbar
} else{
dcc.symcheck(preQ, m, d = NULL)
Rbar = preQ/(sqrt(diag(preQ)) %*% t(sqrt(diag(preQ))) )
}
}
if(is.null(Qbar)){
Qbar = fit@mfit$Qbar
} else{
dcc.symcheck(Qbar, m, d = NULL)
}
if(model$modelinc[5]>0){
if(is.null(Nbar)){
Nbar = fit@mfit$Nbar
} else{
dcc.symcheck(Nbar, m, d = NULL)
}
} else{
Nbar = matrix(0, m, m)
}
uncv = sapply(mspec@spec, FUN = function(x) uncvariance(x))
if( !is.null(presigma) ){
if( !is.matrix(presigma) )
stop("\ndccsim-->error: presigma must be a matrix.")
if( dim(presigma)[2] != m )
stop("\ndccsim-->error: wrong column dimension for presigma.")
if( dim(presigma)[1] != mg )
stop(paste("\ndccsim-->error: wrong row dimension for presigma (need ", mg, " rows.", sep = ""))
} else{
if(startMethod == "sample"){
mx = max(sapply(mspec@spec, FUN = function(x) x@model$maxOrder))
presigma = matrix(NA, ncol = m, nrow = mx)
tmp = last(fit@mfit$H, mx)
for(i in 1:mx) presigma[i,] = sqrt(diag(tmp[,,i]))
}
}
if( !is.null(preresiduals) ){
if( !is.matrix(preresiduals) )
stop("\ndccsim-->error: preresiduals must be a matrix.")
if( dim(preresiduals)[2] != m )
stop("\ndccsim-->error: wrong column dimension for preresiduals.")
if( dim(preresiduals)[1] != mg )
stop(paste("\ndccsim-->error: wrong row dimension for preresiduals (need ", mg, " rows.", sep = ""))
} else{
if(startMethod == "sample"){
mx = max(sapply(mspec@spec, FUN = function(x) x@model$maxOrder))
preresiduals = matrix(NA, ncol = m, nrow = mx)
tmp = tail(fit@model$residuals, mx)
for(i in 1:mx) preresiduals[i,] = tmp[i,]
}
}
if( !is.null(prereturns) ){
if( !is.matrix(prereturns) )
stop("\ndccsim-->error: prereturns must be a matrix.")
if( dim(prereturns)[2] != m )
stop("\ndccsim-->error: wrong column dimension for prereturns.")
if( dim(prereturns)[1] != mg )
stop(paste("\ndccsim-->error: wrong row dimension for prereturns (need ", mg, " rows.", sep = ""))
} else{
if(startMethod == "sample"){
mx = max(sapply(mspec@spec, FUN = function(x) x@model$maxOrder))
prereturns = matrix(NA, ncol = m, nrow = mx)
tmp = tail(Data, mx)
for(i in 1:mx) prereturns[i,] = tmp[i,]
}
}
if(fit@model$umodel$modeldesc$vmodel[1]=="realGARCH"){
if( !is.null(prerealized) ){
if( !is.matrix(prerealized) )
stop("\ndccsim-->error: prerealized must be a matrix.")
if( dim(prerealized)[2] != m )
stop("\ndccsim-->error: wrong column dimension for prerealized.")
if( dim(prerealized)[1] != mg )
stop(paste("\ndccsim-->error: wrong row dimension for prerealized (need ", mg, " rows.", sep = ""))
} else{
if(startMethod == "sample"){
mx = max(sapply(mspec@spec, FUN = function(x) x@model$maxOrder))
prerealized = matrix(NA, ncol = m, nrow = mx)
tmp = tail(fit@mfit$realizedVol[1:T,], mx)
for(i in 1:mx) prerealized[i,] = tmp[i,]
}
}
} else{
mx = max(sapply(mspec@spec, FUN = function(x) x@model$maxOrder))
prerealized = matrix(NA, ncol = m, nrow = mx)
}
if(fit@model$modeldesc$distribution == "mvnorm"){
if(length(rseed) == 1){
set.seed( rseed )
tmp = matrix(rnorm(m * (n.sim + n.start) * m.sim, 0, 1), ncol = m, nrow = n.sim+n.start)
z = array(NA, dim = c(n.sim + n.start + mo, m, m.sim))
for(i in 1:m.sim) z[,,i] = rbind(preZ, tmp)
} else{
z = array(NA, dim = c(n.sim + n.start + mo, m, m.sim))
for(i in 1:m.sim){
set.seed( rseed[i] )
z[,,i] = rbind(preZ, matrix(rnorm(m * (n.sim + n.start), 0, 1), nrow = n.sim + n.start, ncol = m))
}
}
} else if(fit@model$modeldesc$distribution == "mvlaplace"){
if(length(rseed) == 1){
set.seed( rseed )
tmp = matrix(rugarch:::rged(m * (n.sim + n.start) * m.sim, 0, 1, shape = 1), ncol = m, nrow = n.sim+n.start)
z = array(NA, dim = c(n.sim + n.start + mo, m, m.sim))
for(i in 1:m.sim) z[,,i] = rbind(preZ, tmp)
} else{
z = array(NA, dim = c(n.sim + n.start + mo, m, m.sim))
for(i in 1:m.sim){
set.seed( rseed[i] )
z[,,i] = rbind(preZ, matrix(rugarch:::rged(m * (n.sim + n.start), 0, 1, shape = 1), nrow = n.sim + n.start, ncol = m))
}
}
} else{
if(length(rseed) == 1){
set.seed( rseed )
tmp = matrix(rugarch:::rstd(m * (n.sim + n.start) * m.sim, 0, 1, shape = rshape(fit)), ncol = m, nrow = n.sim+n.start)
z = array(NA, dim = c(n.sim + n.start + mo, m, m.sim))
for(i in 1:m.sim) z[,,i] = rbind(preZ, tmp)
} else{
z = array(NA, dim = c(n.sim + n.start + mo, m, m.sim))
for(i in 1:m.sim){
set.seed( rseed[i] )
z[,,i] = rbind(preZ, matrix(rugarch:::rstd(m * (n.sim + n.start), 0, 1, shape = rshape(fit)), nrow = n.sim + n.start, ncol = m))
}
}
}
if(length(rseed) == 1){
rseed = c(rseed, (1:m.sim)*(rseed+1))
}
simRes = simX = simR = simQ = simH = simSeries = vector(mode = "list", length = m.sim)
if( !is.null(cluster) ){
simH = vector(mode = "list", length = m.sim)
simX = vector(mode = "list", length = m.sim)
clusterEvalQ(cluster, require(rmgarch))
clusterExport(cluster, c("model", "z", "preQ", "Rbar",
"Qbar", "Nbar", "mo", "n.sim", "n.start", "m",
"rseed",".dccsimf"), envir = environment())
mtmp = parLapply(cluster, as.list(1:m.sim), fun = function(j){
.dccsimf(model, Z = z[,,j], Qbar = Qbar,
preQ = preQ, Nbar = Nbar, Rbar = Rbar, mo = mo,
n.sim, n.start, m, rseed[j])
})
simR = lapply(mtmp, FUN = function(x) if(is.matrix(x$R)) array(x$R, dim = c(m, m, n.sim)) else last(x$R, n.sim))
simQ = lapply(mtmp, FUN = function(x) if(is.matrix(x$Q)) array(x$Q, dim = c(m, m, n.sim)) else last(x$Q, n.sim))
simZ = vector(mode = "list", length = m)
for(i in 1:m) simZ[[i]] = sapply(mtmp, FUN = function(x) x$Z[,i])
clusterExport(cluster, c("fit", "n.sim", "n.start", "m.sim",
"startMethod", "simZ", "presigma", "preresiduals",
"prereturns", "mexsimdata", "vexsimdata", "prerealized"),
envir = environment())
xtmp = parLapply(cluster, as.list(1:m), fun = function(j){
maxx = mspec@spec[[j]]@model$maxOrder;
htmp = ugarchpath(mspec@spec[[j]], n.sim = n.sim + n.start, n.start = 0, m.sim = m.sim,
custom.dist = list(name = "sample", distfit = matrix(simZ[[j]][-(1:mo), ], ncol = m.sim)),
presigma = if( is.null(presigma) ) NA else tail(presigma[,j], maxx),
preresiduals = if( is.null(preresiduals) ) NA else tail(preresiduals[,j], maxx),
prereturns = if( is.null(prereturns) | model$modelinc[1]>0 ) NA else tail(prereturns[,j], maxx),
mexsimdata = if( model$modelinc[1]==0 ) mexsimdata[[j]] else NULL,
vexsimdata = vexsimdata[[j]], prerealized = tail(prerealized[,j], maxx));
h = matrix(tail(htmp@path$sigmaSim^2, n.sim), nrow = n.sim);
x = matrix(htmp@path$seriesSim, nrow = n.sim + n.start);
return(list(h = h, x = x))
})
} else{
simH = vector(mode = "list", length = m.sim)
simX = vector(mode = "list", length = m.sim)
mtmp = lapply(as.list(1:m.sim), FUN = function(j){
.dccsimf(model, Z = z[,,j], Qbar = Qbar, preQ = preQ,
Nbar = Nbar, Rbar = Rbar, mo = mo, n.sim, n.start,
m, rseed[j])
})
simR = lapply(mtmp, FUN = function(x) if(is.matrix(x$R)) array(x$R, dim = c(m, m, n.sim)) else last(x$R, n.sim))
simQ = lapply(mtmp, FUN = function(x) if(is.matrix(x$Q)) array(x$Q, dim = c(m, m, n.sim)) else last(x$Q, n.sim))
simZ = vector(mode = "list", length = m)
for(i in 1:m) simZ[[i]] = sapply(mtmp, FUN = function(x) x$Z[,i])
xtmp = lapply(as.list(1:m), FUN = function(j){
maxx = mspec@spec[[j]]@model$maxOrder;
htmp = ugarchpath(mspec@spec[[j]], n.sim = n.sim + n.start, n.start = 0, m.sim = m.sim,
custom.dist = list(name = "sample", distfit = matrix(simZ[[j]][-(1:mo), ], ncol = m.sim)),
presigma = if( is.null(presigma) ) NA else tail(presigma[,j], maxx),
preresiduals = if( is.null(preresiduals) ) NA else tail(preresiduals[,j], maxx),
prereturns = if( is.null(prereturns) | model$modelinc[1]>0 ) NA else tail(prereturns[,j], maxx),
mexsimdata = if( model$modelinc[1]==0 ) mexsimdata[[j]] else NULL,
vexsimdata = vexsimdata[[j]], prerealized = tail(prerealized[,j], maxx));
h = matrix(tail(htmp@path$sigmaSim^2, n.sim), nrow = n.sim);
x = matrix(htmp@path$seriesSim, nrow = n.sim + n.start);
return(list(h = h, x = x))})
}
H = array(NA, dim = c(n.sim, m, m.sim))
tmpH = array(NA, dim = c(m, m, n.sim))
for(i in 1:n.sim) H[i,,] = t(sapply(xtmp, FUN = function(x) as.numeric(x$h[i,])))
for(i in 1:m.sim){
for(j in 1:n.sim){
tmpH[ , , j] = diag(sqrt( H[j, , i]) ) %*% simR[[i]][ , , j] %*% diag(sqrt( H[j, , i] ) )
}
simH[[i]] = tmpH
}
if(model$modelinc[1]>0){
simxX = array(NA, dim = c(n.sim+n.start, m, m.sim))
for(i in 1:m.sim) simxX[,,i] = sapply(xtmp, FUN = function(x) as.numeric(x$x[,i]))
simX = vector(mode = "list", length = m.sim)
for(i in 1:m.sim) simX[[i]] = matrix(simxX[,,i], nrow = n.sim+n.start)
} else{
simxX = array(NA, dim = c(n.sim+n.start, m, m.sim))
for(i in 1:m.sim) simxX[,,i] = sapply(xtmp, FUN = function(x) as.numeric(x$x[,i]))
simX = vector(mode = "list", length = m.sim)
for(i in 1:m.sim) simX[[i]] = matrix(tail(matrix(simxX[,,i], ncol = m), n.sim), nrow = n.sim)
}
if( model$modelinc[1]>0 ){
simRes = simX
simX = mvmean.varsim(model = model, Data = Data, res = simX,
mexsimdata = mexsimdata, prereturns = prereturns, m.sim = m.sim,
n.sim = n.sim, n.start = n.start, startMethod = startMethod,
cluster = cluster)
} else{
for(j in 1:m.sim) simX[[j]] = tail(simX[[j]], n.sim)
}
msim = list()
msim$simH = simH
msim$simR = simR
msim$simQ = simQ
msim$simX = simX
msim$simZ = simZ
msim$simRes = simRes
msim$rseed = rseed
model$n.sim = n.sim
model$m.sim = m.sim
model$n.start = n.start
model$startMethod = startMethod[1]
ans = new("DCCsim",
msim = msim,
model = model)
return( ans )
}
.dccsim.spec = function(fitORspec, n.sim = 1000, n.start = 0, m.sim = 1,
startMethod = c("unconditional", "sample"), presigma = NULL,
preresiduals = NULL, prereturns = NULL, preQ = NULL, preZ = NULL,
Qbar = NULL, Nbar = NULL, rseed = NULL, mexsimdata = NULL,
vexsimdata = NULL, cluster = NULL, VAR.fit = NULL,
prerealized = NULL, ...)
{
spec = fitORspec
startMethod = startMethod[1]
if( spec@model$modelinc[1]>0 ){
if( is.null(VAR.fit) ) stop("\ndccsim-->error: VAR.fit must not be NULL for VAR method when calling dccsim using spec!", call. = FALSE)
}
model = spec@model
model$umodel = spec@umodel
m = dim(spec@umodel$modelinc)[2]
mo = spec@model$maxdccOrder
mg = spec@model$maxgarchOrder
model$modeldata$asset.names = paste("Asset", 1:m, sep = "")
if( is.null(rseed) ){
rseed = as.integer(runif(1, 1, Sys.time()))
} else {
if(length(rseed) == 1) rseed = as.integer(rseed[1]) else rseed = as.integer( rseed[1:m.sim] )
}
if(is.null(preZ)){
preZ = matrix(0, ncol = m, nrow = mo)
} else{
preZ = matrix(tail(preZ, 1), ncol = m, nrow = mo, byrow = TRUE)
}
if(is.null(preQ)){
stop("\ndccsim-->error: preQ cannot be NULL when method uses spec!")
} else{
dcc.symcheck(preQ, m, d = NULL)
Rbar = preQ/(sqrt(diag(preQ)) %*% t(sqrt(diag(preQ))) )
}
if(is.null(Qbar)){
stop("\ndccsim-->error: Qbar cannot be NULL when method uses spec!")
} else{
dcc.symcheck(Qbar, m, d = NULL)
}
if(model$modelinc[5]>0){
if(is.null(Nbar)){
stop("\ndccsim-->error: Nbar cannot be NULL for aDCC when method uses spec!")
} else{
dcc.symcheck(Nbar, m, d = NULL)
}
} else{
Nbar = matrix(0, m, m)
}
model = spec@model
umodel = spec@umodel
modelinc = model$modelinc
midx = .fullinc(modelinc, umodel)
mspec = .makemultispec(umodel$modelinc, umodel$modeldesc$vmodel, umodel$modeldesc$vsubmodel,
umodel$modeldata$mexdata, umodel$modeldata$vexdata, umodel$start.pars,
umodel$fixed.pars, NULL)
uncv = sapply(mspec@spec, FUN = function(x) uncvariance(x))
if( !is.null(presigma) ){
if( !is.matrix(presigma) )
stop("\ndccsim-->error: presigma must be a matrix.")
if( dim(presigma)[2] != m )
stop("\ndccsim-->error: wrong column dimension for presigma.")
if( dim(presigma)[1] != mg )
stop(paste("\ndccsim-->error: wrong row dimension for presigma (need ", mg, " rows.", sep = ""))
}
if( !is.null(preresiduals) ){
if( !is.matrix(preresiduals) )
stop("\ndccsim-->error: preresiduals must be a matrix.")
if( dim(preresiduals)[2] != m )
stop("\ndccsim-->error: wrong column dimension for preresiduals.")
if( dim(preresiduals)[1] != mg )
stop(paste("\ndccsim-->error: wrong row dimension for preresiduals (need ", mg, " rows.", sep = ""))
}
if( !is.null(prereturns) ){
if( !is.matrix(prereturns) )
stop("\ndccsim-->error: prereturns must be a matrix.")
if( dim(prereturns)[2] != m )
stop("\ndccsim-->error: wrong column dimension for prereturns.")
if( dim(prereturns)[1] != mg )
stop(paste("\ndccsim-->error: wrong row dimension for prereturns (need ", mg, " rows.", sep = ""))
}
if(spec@umodel$modeldesc$vmodel[1]=="realGARCH"){
if( !is.null(prerealized) ){
if( !is.matrix(prerealized) )
stop("\ndccsim-->error: prerealized must be a matrix.")
if( dim(prerealized)[2] != m )
stop("\ndccsim-->error: wrong column dimension for prerealized.")
if( dim(prerealized)[1] != mg )
stop(paste("\ndccsim-->error: wrong row dimension for prerealized (need ", mg, " rows.", sep = ""))
}
} else{
prerealized = matrix(NA, ncol = m, nrow = mg)
}
if(model$modeldesc$distribution == "mvnorm"){
if(length(rseed) == 1){
set.seed( rseed )
tmp = matrix(rnorm(m * (n.sim + n.start) * m.sim, 0, 1), ncol = m, nrow = n.sim+n.start)
z = array(NA, dim = c(n.sim + n.start + mo, m, m.sim))
for(i in 1:m.sim) z[,,i] = rbind(preZ, tmp)
} else{
z = array(NA, dim = c(n.sim + n.start + mo, m, m.sim))
for(i in 1:m.sim){
set.seed( rseed[i] )
z[,,i] = rbind(preZ, matrix(rnorm(m * (n.sim + n.start), 0, 1), nrow = n.sim + n.start, ncol = m))
}
}
} else if(model$modeldesc$distribution == "mvlaplace"){
if(length(rseed) == 1){
set.seed( rseed )
tmp = matrix(rugarch:::rged(m * (n.sim + n.start) * m.sim, 0, 1, shape = 1), , ncol = m, nrow = n.sim+n.start)
z = array(NA, dim = c(n.sim + n.start + mo, m, m.sim))
for(i in 1:m.sim) z[,,i] = rbind(preZ, tmp)
} else{
z = array(NA, dim = c(n.sim + n.start + mo, m, m.sim))
for(i in 1:m.sim){
set.seed( rseed[i] )
z[,,i] = rbind(preZ, matrix(rugarch:::rged(m * (n.sim + n.start), 0, 1, shape = 1), nrow = n.sim + n.start, ncol = m))
}
}
} else{
if(length(rseed) == 1){
set.seed( rseed )
tmp = matrix(rugarch:::rstd(m * (n.sim + n.start) * m.sim, 0, 1, shape = model$pars["mshape",1]), ncol = m, nrow = n.sim+n.start)
z = array(NA, dim = c(n.sim + n.start + mo, m, m.sim))
for(i in 1:m.sim) z[,,i] = rbind(preZ, tmp)
} else{
z = array(NA, dim = c(n.sim + n.start + mo, m, m.sim))
for(i in 1:m.sim){
set.seed( rseed[i] )
z[,,i] = rbind(preZ, matrix(rugarch:::rstd(m * (n.sim + n.start), 0, 1, shape = model$pars["mshape",1]), nrow = n.sim + n.start, ncol = m))
}
}
}
if(length(rseed) == 1){
rseed = c(rseed, as.integer(runif(m.sim, 1, Sys.time())))
}
simRes = simX = simR = simQ = simH = simSeries = vector(mode = "list", length = m.sim)
if( !is.null(cluster) ){
simH = vector(mode = "list", length = m.sim)
simX = vector(mode = "list", length = m.sim)
clusterEvalQ(cluster, require(rmgarch))
clusterExport(cluster, c("model", "z", "preQ", "Rbar",
"Qbar", "Nbar", "mo", "n.sim", "n.start", "m",
"rseed",".dccsimf"), envir = environment())
mtmp = parLapply(cluster, as.list(1:m.sim), fun = function(j){
.dccsimf(model, Z = z[,,j], Qbar = Qbar,
preQ = preQ, Nbar = Nbar, Rbar = Rbar, mo = mo,
n.sim, n.start, m, rseed[j])
})
simR = lapply(mtmp, FUN = function(x) if(is.matrix(x$R)) array(x$R, dim = c(m, m, n.sim)) else last(x$R, n.sim))
simQ = lapply(mtmp, FUN = function(x) if(is.matrix(x$Q)) array(x$Q, dim = c(m, m, n.sim)) else last(x$Q, n.sim))
simZ = vector(mode = "list", length = m)
for(i in 1:m) simZ[[i]] = sapply(mtmp, FUN = function(x) x$Z[,i])
clusterExport(cluster, c("mspec", "n.sim", "n.start", "m.sim",
"startMethod", "simZ", "presigma", "preresiduals",
"prereturns", "mexsimdata", "vexsimdata", "prerealized"),
envir = environment())
xtmp = parLapply(cluster, as.list(1:m), fun = function(j){
maxx = mspec@spec[[j]]@model$maxOrder;
htmp = ugarchpath(mspec@spec[[j]], n.sim = n.sim + n.start, n.start = 0, m.sim = m.sim,
custom.dist = list(name = "sample", distfit = matrix(simZ[[j]][-(1:mo), ], ncol = m.sim)),
presigma = if( is.null(presigma) ) NA else tail(presigma[,j], maxx),
preresiduals = if( is.null(preresiduals) ) NA else tail(preresiduals[,j], maxx),
prereturns = if( is.null(prereturns) | model$modelinc[1]>0 ) NA else tail(prereturns[,j], maxx),
mexsimdata = if( model$modelinc[1]==0 ) mexsimdata[[j]] else NULL,
vexsimdata = vexsimdata[[j]], prerealized = tail(prerealized[,j], maxx))
h = matrix(tail(htmp@path$sigmaSim^2, n.sim), nrow = n.sim)
x = matrix(htmp@path$seriesSim, nrow = n.sim + n.start)
xres = matrix(htmp@path$residSim, nrow = n.sim + n.start)
return(list(h = h, x = x, xres = xres))
})
} else{
simH = vector(mode = "list", length = m.sim)
simX = vector(mode = "list", length = m.sim)
mtmp = lapply(as.list(1:m.sim), FUN = function(j)
.dccsimf(model, Z = z[,,j], Qbar = Qbar, preQ = preQ, Nbar = Nbar,
Rbar = Rbar, mo = mo, n.sim, n.start, m, rseed[j]))
simR = lapply(mtmp, FUN = function(x) if(is.matrix(x$R)) array(x$R, dim = c(m, m, n.sim)) else last(x$R, n.sim))
simQ = lapply(mtmp, FUN = function(x) if(is.matrix(x$Q)) array(x$Q, dim = c(m, m, n.sim)) else last(x$Q, n.sim))
simZ = vector(mode = "list", length = m)
for(i in 1:m) simZ[[i]] = sapply(mtmp, FUN = function(x) x$Z[,i])
xtmp = lapply(as.list(1:m), FUN = function(j){
maxx = mspec@spec[[j]]@model$maxOrder;
htmp = ugarchpath(mspec@spec[[j]], n.sim = n.sim + n.start, n.start = 0, m.sim = m.sim,
custom.dist = list(name = "sample", distfit = matrix(simZ[[j]][-(1:mo), ], ncol = m.sim)),
presigma = if( is.null(presigma) ) NA else tail(presigma[,j], maxx),
preresiduals = if( is.null(preresiduals) ) NA else tail(preresiduals[,j], maxx),
prereturns = if( is.null(prereturns) | model$modelinc[1]>0 ) NA else tail(prereturns[,j], maxx),
mexsimdata = if( model$modelinc[1]==0 ) mexsimdata[[j]] else NULL, vexsimdata = vexsimdata[[j]],
prerealized = tail(prerealized[,j], maxx));
h = matrix(tail(htmp@path$sigmaSim^2, n.sim), nrow = n.sim);
x = matrix(htmp@path$seriesSim, nrow = n.sim + n.start);
xres = matrix(htmp@path$residSim, nrow = n.sim + n.start);
return(list(h = h, x = x, xres = xres))
})
}
H = array(NA, dim = c(n.sim, m, m.sim))
tmpH = array(NA, dim = c(m, m, n.sim))
for(i in 1:n.sim) H[i,,] = t(sapply(xtmp, FUN = function(x) as.numeric(x$h[i,])))
for(i in 1:m.sim){
for(j in 1:n.sim){
tmpH[ , , j] = diag(sqrt( H[j, , i]) ) %*% simR[[i]][ , , j] %*% diag(sqrt( H[j, , i] ) )
}
simH[[i]] = tmpH
}
if(model$modelinc[1]>0){
simxX = array(NA, dim = c(n.sim+n.start, m, m.sim))
for(i in 1:m.sim) simxX[,,i] = sapply(xtmp, FUN = function(x) as.numeric(x$x[,i]))
simX = vector(mode = "list", length = m.sim)
for(i in 1:m.sim) simX[[i]] = matrix(simxX[,,i], nrow = n.sim+n.start)
} else{
simxX = array(NA, dim = c(n.sim+n.start, m, m.sim))
for(i in 1:m.sim) simxX[,,i] = sapply(xtmp, FUN = function(x) as.numeric(x$x[,i]))
simX = vector(mode = "list", length = m.sim)
for(i in 1:m.sim) simX[[i]] = matrix(tail(matrix(simxX[,,i], ncol = m), n.sim), nrow = n.sim)
}
simxRes = array(NA, dim = c(n.sim, m, m.sim))
for(i in 1:n.sim) simxRes[i,,] = t(sapply(xtmp, FUN = function(x) as.numeric(x$xres[i,])))
simRes = vector(mode = "list", length = m.sim)
for(i in 1:m.sim) simRes[[i]] = matrix(simxRes[,,i], nrow = n.sim)
if( model$modelinc[1]>0 ){
model$varcoef = VAR.fit$Bcoef
Data = VAR.fit$xfitted
simRes = simX
simX = mvmean.varsim(model = model, Data = Data, res = simX,
mexsimdata = mexsimdata, prereturns = prereturns, m.sim = m.sim,
n.sim = n.sim, n.start = n.start, startMethod = startMethod,
cluster = cluster)
} else{
for(j in 1:m.sim) simX[[j]] = tail(simX[[j]], n.sim)
}
msim = list()
msim$simH = simH
msim$simR = simR
msim$simQ = simQ
msim$simX = simX
msim$simZ = simZ
msim$simRes = simRes
msim$rseed = rseed
msim$model$Data = NULL
model$n.sim = n.sim
model$m.sim = m.sim
model$n.start = n.start
model$startMethod = "unconditional"
ans = new("DCCsim",
msim = msim,
model = model)
return( ans )
}
.rolldcc.assets = function(spec, data, n.ahead = 1, forecast.length = 50, refit.every = 25,
n.start = NULL, refit.window = c("recursive", "moving"), window.size = NULL,
solver = "solnp", solver.control = list(),
fit.control = list(eval.se = TRUE, stationarity = TRUE, scale = FALSE),
cluster = NULL, save.fit = FALSE, save.wdir = NULL, realizedVol = NULL,...)
{
if(spec@model$DCC=="FDCC") stop("\nFDCC model rolling estimation not yet implemented.")
model = spec@model
verbose = FALSE
model$umodel = spec@umodel
if(is.null(solver.control$trace)) solver.control$trace = 0
if(is.null(fit.control$stationarity)) fit.control$stationarity = TRUE
if(is.null(fit.control$eval.se)) fit.control$eval.se = FALSE
if(is.null(fit.control$scale)) fit.control$scale = FALSE
mm = match(names(fit.control), c("stationarity", "eval.se", "scale"))
if(any(is.na(mm))){
idx = which(is.na(mm))
enx = NULL
for(i in 1:length(idx)) enx = c(enx, names(fit.control)[idx[i]])
warning(paste(c("unidentified option(s) in fit.control:\n", enx), sep="", collapse=" "), call. = FALSE, domain = NULL)
}
asset.names = colnames(data)
xdata = .extractmdata(data)
data = xts(xdata$data, xdata$index)
index = xdata$index
period = xdata$period
if(is.null(fit.control$stationarity)) fit.control$stationarity = 1
if(is.null(fit.control$fixed.se)) fit.control$fixed.se = 0
T = dim(data)[1]
if(n.ahead>1)
stop("\ndccroll:--> n.ahead>1 not supported...try again.")
if(is.null(n.start)){
if(is.null(forecast.length))
stop("\ndccroll:--> forecast.length amd n.start are both NULL....try again.")
n.start = T - forecast.length
} else{
forecast.length = T - n.start
}
if(T<=n.start)
stop("\ndccroll:--> start cannot be greater than length of data")
s = seq(n.start+refit.every, T, by = refit.every)
m = length(s)
out.sample = rep(refit.every, m)
if(s[m]<T){
s = c(s,T)
m = length(s)
out.sample = c(out.sample, s[m]-s[m-1])
}
if(refit.window == "recursive"){
rollind = lapply(1:m, FUN = function(i) 1:s[i])
} else{
if(!is.null(window.size)){
if(window.size<100) stop("\ndccroll:--> window size must be greater than 100.")
rollind = lapply(1:m, FUN = function(i) max(1, (s[i]-window.size-out.sample[i])):s[i])
} else{
rollind = lapply(1:m, FUN = function(i) (1+(i-1)*refit.every):s[i])
}
}
cf = lik = forc = vector(mode = "list", length = m)
plik = vector(mode = "list", length = m)
mspec = .makemultispec(model$umodel$modelinc, model$umodel$modeldesc$vmodel, model$umodel$modeldesc$vsubmodel,
model$umodel$modeldata$mexdata, model$umodel$modeldata$vexdata, model$umodel$start.pars,
model$umodel$fixed.pars, model$umodel$vt)
for(i in 1:m){
if(!is.null(realizedVol)){
mfit = multifit(mspec, data[rollind[[i]],], out.sample = out.sample[i],
solver = solver[1], fit.control = fit.control, cluster = cluster,
realizedVol = realizedVol[rollind[[i]],], solver.control = solver.control)
k=1
while(k==1){
conv = sapply(mfit@fit, function(x) convergence(x))
if(any(conv==1)){
idx = which(conv==1)
for(j in idx){ mfit@fit[[j]] = ugarchfit(mspec@spec[[j]], data[rollind[[i]],j],
out.sample = out.sample[i], solver = "gosolnp", fit.control = fit.control,
realizedVol = realizedVol[rollind[[i]],j])
}
} else{
k=0
}
}
k=1
while(k==1){
tmp = sapply(mfit@fit, function(x){
L = try(likelihood(x), silent=TRUE)
if(inherits(L, 'try-error') | !is.numeric(L)) L = 1e10
L})
conv=diff(log(abs(tmp)))
if(any(conv>1)){
idx = which(conv>1)+1
for(j in idx){
mfit@fit[[j]] = ugarchfit(mspec@spec[[j]], data[rollind[[i]],j],
out.sample = out.sample[i],
solver = "gosolnp", fit.control = fit.control,
realizedVol = realizedVol[rollind[[i]],j])
}
} else{
k=0
}
}
mcfit = dccfit(spec, data[rollind[[i]],], out.sample = out.sample[i],
solver = solver, fit.control = fit.control, solver.control=solver.control,
cluster = NULL, realizedVol = realizedVol[rollind[[i]],],
fit = mfit)
plik[[i]] = mcfit@mfit$plik
} else{
mfit = multifit(mspec, data[rollind[[i]],], out.sample = out.sample[i],
solver = solver[1], fit.control = fit.control, solver.control = solver.control,
cluster = cluster)
k=1
while(k==1){
conv = sapply(mfit@fit, function(x) convergence(x))
if(any(conv==1)){
idx = which(conv==1)
for(j in idx){ mfit@fit[[j]] = ugarchfit(mspec@spec[[j]], data[rollind[[i]],j],
out.sample = out.sample[i], solver = "gosolnp", fit.control = fit.control)
}
} else{
k=0
}
}
mcfit = dccfit(spec, data[rollind[[i]],], out.sample = out.sample[i],
solver = solver, fit.control = fit.control, solver.control = solver.control,
cluster = cluster, fit = mfit)
plik[[i]] = mcfit@mfit$plik
}
cf[[i]] = mcfit@model$mpars
lik[[i]] = likelihood(mcfit)
forc[[i]] = dccforecast(mcfit, n.ahead = 1, n.roll = out.sample[i]-1, cluster = cluster)
if(save.fit){
saveRDS(mcfit,file=paste0(save.wdir,"/dccroll_",i,".rds"))
}
}
model$n.start = n.start
model$n.refits = m
model$refit.every = refit.every
model$refit.window = refit.window
model$window.size = window.size
model$forecast.length = forecast.length
model$n.start = n.start
model$rollind = rollind
model$out.sample = out.sample
model$modeldata$asset.names = asset.names
model$rollcoef = cf
model$rolllik = lik
model$index = index
model$period = period
model$data = xdata$data
model$plik = plik
ans = new("DCCroll",
mforecast = forc,
model = model)
return(ans)
}
.rolldcc.windows = function(spec, data, n.ahead = 1, forecast.length = 50, refit.every = 25,
n.start = NULL, refit.window = c("recursive", "moving"), window.size = NULL,
solver = "solnp", solver.control = list(),
fit.control = list(eval.se = TRUE, stationarity = TRUE, scale = FALSE),
cluster = NULL, save.fit = FALSE, save.wdir = NULL, realizedVol = NULL,...)
{
if(spec@model$DCC=="FDCC") stop("\nFDCC model rolling estimation not yet implemented.")
model = spec@model
verbose = FALSE
model$umodel = spec@umodel
if(is.null(solver.control$trace)) solver.control$trace = 0
if(is.null(fit.control$stationarity)) fit.control$stationarity = TRUE
if(is.null(fit.control$eval.se)) fit.control$eval.se = FALSE
if(is.null(fit.control$scale)) fit.control$scale = FALSE
mm = match(names(fit.control), c("stationarity", "eval.se", "scale"))
if(any(is.na(mm))){
idx = which(is.na(mm))
enx = NULL
for(i in 1:length(idx)) enx = c(enx, names(fit.control)[idx[i]])
warning(paste(c("unidentified option(s) in fit.control:\n", enx), sep="", collapse=" "), call. = FALSE, domain = NULL)
}
asset.names = colnames(data)
xdata = .extractmdata(data)
data = xts(xdata$data, xdata$index)
index = xdata$index
period = xdata$period
if(is.null(fit.control$stationarity)) fit.control$stationarity = 1
if(is.null(fit.control$fixed.se)) fit.control$fixed.se = 0
T = dim(data)[1]
if(n.ahead>1)
stop("\ndccroll:--> n.ahead>1 not supported...try again.")
if(is.null(n.start)){
if(is.null(forecast.length))
stop("\ndccroll:--> forecast.length amd n.start are both NULL....try again.")
n.start = T - forecast.length
} else{
forecast.length = T - n.start
}
if(T<=n.start)
stop("\ndccroll:--> start cannot be greater than length of data")
s = seq(n.start+refit.every, T, by = refit.every)
m = length(s)
out.sample = rep(refit.every, m)
if(s[m]<T){
s = c(s,T)
m = length(s)
out.sample = c(out.sample, s[m]-s[m-1])
}
if(refit.window == "recursive"){
rollind = lapply(1:m, FUN = function(i) 1:s[i])
} else{
if(!is.null(window.size)){
if(window.size<100) stop("\ndccroll:--> window size must be greater than 100.")
rollind = lapply(1:m, FUN = function(i) max(1, (s[i]-window.size-out.sample[i])):s[i])
} else{
rollind = lapply(1:m, FUN = function(i) (1+(i-1)*refit.every):s[i])
}
}
cf = lik = forc = vector(mode = "list", length = m)
plik = vector(mode = "list", length = m)
mspec = .makemultispec(model$umodel$modelinc, model$umodel$modeldesc$vmodel, model$umodel$modeldesc$vsubmodel,
model$umodel$modeldata$mexdata, model$umodel$modeldata$vexdata, model$umodel$start.pars,
model$umodel$fixed.pars, model$umodel$vt)
res = parLapply(cluster, as.list(1:m), function(i){
if(!is.null(realizedVol)){
mfit = multifit(mspec, data[rollind[[i]],], out.sample = out.sample[i],
solver = solver[1], fit.control = fit.control, cluster = NULL,
realizedVol = realizedVol[rollind[[i]],], solver.control = solver.control)
k=1
while(k==1){
conv = sapply(mfit@fit, function(x) convergence(x))
if(any(conv==1)){
idx = which(conv==1)
for(j in idx){ mfit@fit[[j]] = ugarchfit(mspec@spec[[j]], data[rollind[[i]],j],
out.sample = out.sample[i], solver = "gosolnp", fit.control = fit.control,
realizedVol = realizedVol[rollind[[i]],j])
}
} else{
k=0
}
}
k=1
while(k==1){
tmp = sapply(mfit@fit, function(x){
L = try(likelihood(x), silent=TRUE)
if(inherits(L, 'try-error') | !is.numeric(L)) L = 1e10
L})
conv=diff(log(abs(tmp)))
if(any(conv>1)){
idx = which(conv>1)+1
for(j in idx){
mfit@fit[[j]] = ugarchfit(mspec@spec[[j]], data[rollind[[i]],j],
out.sample = out.sample[i],
solver = "gosolnp", fit.control = fit.control,
realizedVol = realizedVol[rollind[[i]],j])
}
} else{
k=0
}
}
mcfit = dccfit(spec, data[rollind[[i]],], out.sample = out.sample[i],
solver = solver, fit.control = fit.control, solver.control=solver.control,
cluster = NULL, realizedVol = realizedVol[rollind[[i]],],
fit = mfit)
plik = mcfit@mfit$plik
} else{
mfit = multifit(mspec, data[rollind[[i]],], out.sample = out.sample[i],
solver = solver[1], fit.control = fit.control, solver.control = solver.control,
cluster = NULL)
k=1
while(k==1){
conv = sapply(mfit@fit, function(x) convergence(x))
if(any(conv==1)){
idx = which(conv==1)
for(j in idx){ mfit@fit[[j]] = ugarchfit(mspec@spec[[j]], data[rollind[[i]],j],
out.sample = out.sample[i], solver = "gosolnp", fit.control = fit.control)
}
} else{
k=0
}
}
mcfit = dccfit(spec, data[rollind[[i]],], out.sample = out.sample[i],
solver = solver, fit.control = fit.control, solver.control = solver.control,
cluster = NULL, fit = mfit)
plik = mcfit@mfit$plik
}
cf = mcfit@model$mpars
lik = likelihood(mcfit)
forc = dccforecast(mcfit, n.ahead = 1, n.roll = out.sample[i]-1, cluster = NULL)
if(save.fit){
saveRDS(mcfit,file=paste0(save.wdir,"/dccroll_",i,".rds"))
}
return(list(cf=cf, lik=lik, forc=forc, plik=plik))
})
model$n.start = n.start
model$n.refits = m
model$refit.every = refit.every
model$refit.window = refit.window
model$window.size = window.size
model$forecast.length = forecast.length
model$n.start = n.start
model$rollind = rollind
model$out.sample = out.sample
model$modeldata$asset.names = asset.names
model$rollcoef = lapply(res, function(x) x$cf)
model$rolllik = lapply(res, function(x) x$lik)
model$index = index
model$period = period
model$data = xdata$data
model$plik = lapply(res, function(x) x$plik)
forc = lapply(res, function(x) x$forc)
ans = new("DCCroll",
mforecast = forc,
model = model)
return(ans)
}
.dccsimf = function(model, Z, Qbar, preQ, Nbar, Rbar, mo, n.sim, n.start, m, rseed)
{
modelinc = model$modelinc
ipars = model$pars
idx = model$pidx
n = n.sim + n.start + mo
set.seed(rseed[1]+1)
NZ = matrix(rnorm(m * (n.sim+n.start+mo)), nrow = n.sim + n.start + mo, ncol = m)
sumdcca = sum(ipars[idx["dcca",1]:idx["dcca",2],1])
sumdccb = sum(ipars[idx["dccb",1]:idx["dccb",2],1])
sumdcc = sumdcca + sumdccb
sumdccg = sum(ipars[idx["dccg",1]:idx["dccg",2],1])
res = switch(model$modeldesc$distribution,
mvnorm = .Call( "dccsimmvn", model = as.integer(modelinc), pars = as.numeric(ipars[,1]),
idx = as.integer(idx[,1]-1), Qbar = as.matrix(Qbar), preQ = as.matrix(preQ),
Rbar = as.matrix(Rbar), Nbar = as.matrix(Nbar), Z = as.matrix(Z),
NZ = as.matrix(NZ), epars = c(sumdcc, sumdccg, mo), PACKAGE = "rmgarch"),
mvlaplace = .Call( "dccsimmvl", model = as.integer(modelinc), pars = as.numeric(ipars[,1]),
idx = as.integer(idx[,1]-1), Qbar = as.matrix(Qbar), preQ = as.matrix(preQ),
Rbar = as.matrix(Rbar), Nbar = as.matrix(Nbar), Z = as.matrix(Z),
NZ = as.matrix(NZ), epars = c(sumdcc, sumdccg, mo), PACKAGE = "rmgarch"),
mvt = .Call( "dccsimmvt", model = as.integer(modelinc), pars = as.numeric(ipars[,1]),
idx = as.integer(idx[,1]-1), Qbar = as.matrix(Qbar), preQ = as.matrix(preQ),
Rbar = as.matrix(Rbar), Nbar = as.matrix(Nbar), Z = as.matrix(Z),
NZ = as.matrix(NZ), epars = c(sumdcc, sumdccg, mo), PACKAGE = "rmgarch"))
Q = array(NA, dim = c(m, m, n.sim + n.start + mo))
R = array(NA, dim = c(m, m, n.sim + n.start + mo))
for(i in 1:(n.sim + n.start + mo)){
R[,,i] = res[[2]][[i]]
Q[,,i] = res[[1]][[i]]
}
ans = list( Q = Q, R = R, Z = res[[3]])
return( ans )
}
.asymI = function(x){
ans = (-sign(x)+1)/2
ans[ans==0.5] = 0
ans
}
.makemultispec = function(modelinc, vmodel, vsubmodel, mexdata, vexdata, spars, fpars, vt){
m = dim(modelinc)[2]
mspec = vector(mode = "list", length = m)
dist = c("norm", "snorm", "std", "sstd","ged", "sged", "nig", "ghyp", "jsu", "ghst")
for(i in 1:m){
if(is.null(vt)){
vtarget = FALSE
} else{
if(!is.na(vt[i])) vtarget = vt[i] else vtarget = ifelse(modelinc[7,i]==0, TRUE, FALSE)
}
mspec[[i]] = ugarchspec(variance.model = list(model = vmodel[i], garchOrder = modelinc[8:9,i],
submodel = vsubmodel[i], external.regressors = if(is.na(vexdata[[i]][1])) NULL else vexdata[[i]],
variance.targeting = vtarget),
mean.model = list(armaOrder = modelinc[2:3,i], include.mean = as.logical(modelinc[1,i]),
archm = ifelse(modelinc[5,i]>0, TRUE, FALSE), archpow = modelinc[5,i],
arfima = modelinc[4,i], external.regressors = if(is.na(mexdata[[i]][1])) NULL else mexdata[[i]]),
distribution.model = dist[modelinc[21,i]], start.pars = if(is.na(spars[[i]][1])) NULL else spars[[i]],
fixed.pars = if(is.na(fpars[[i]][1])) NULL else fpars[[i]])
}
ans = multispec( mspec )
return(ans)
}
.fullinc = function(modelinc, umodel){
m = dim(umodel$modelinc)[2]
vecmax = rep(0, 19)
names(vecmax) = rownames(umodel$modelinc[1:19,])
vecmax = apply(umodel$modelinc, 1, FUN = function(x) max(x) )
maxOrder = apply(umodel$modelinc, 2, FUN = function(x) max(c(x[2], x[3], x[8], x[9])))
sumv = 19 + sum(pmax(1, vecmax[c(2,3,6,8,9,10,11,12,13,15,16)])) - 11
tmpmat = matrix(0, ncol = m+1, nrow = sumv)
nx = 0
pnames = NULL
if(vecmax[1]>0){
tmpmat[1, 1:m] = umodel$modelinc[1,]
}
nx = nx + max(1, vecmax[1])
pnames = c(pnames, "mu")
if(vecmax[2]>0){
for(i in 1:vecmax[2]){
tmpmat[nx+i, 1:m] = as.integer( umodel$modelinc[2,] >= i)
pnames = c(pnames, paste("ar", i, sep = ""))
}
} else{
pnames = c(pnames, "ar")
}
nx = nx + max(1, vecmax[2])
if(vecmax[3]>0){
for(i in 1:vecmax[3]){
tmpmat[nx+i, 1:m] = as.integer( umodel$modelinc[3,] >= i)
pnames = c(pnames, paste("ma", i, sep = ""))
}
} else{
pnames = c(pnames, "ma")
}
nx = nx + max(1, vecmax[3])
if(vecmax[4]>0){
tmpmat[nx+1, 1:m] = umodel$modelinc[4, ]
}
nx = nx + max(1, vecmax[4])
pnames = c(pnames, "arfima")
nx = nx + max(1, vecmax[5])
pnames = c(pnames, "archm")
if(vecmax[6]>0){
for(i in 1:vecmax[6]){
tmpmat[nx+i, 1:m] = as.integer(umodel$modelinc[6, ] >= i)
pnames = c(pnames, paste("mxreg", i, sep = ""))
}
} else{
pnames = c(pnames, "mxreg")
}
nx = nx + max(1, vecmax[6])
if(vecmax[7]>0){
tmpmat[nx+1, 1:m] = umodel$modelinc[7, ]
}
nx = nx + max(1, vecmax[7])
pnames = c(pnames, "omega")
if(vecmax[8]>0){
for(i in 1:vecmax[8]){
tmpmat[nx+i, 1:m] = as.integer( umodel$modelinc[8, ] >= i)
pnames = c(pnames, paste("alpha", i, sep = ""))
}
} else{
pnames = c(pnames, "alpha")
}
nx = nx + max(1, vecmax[8])
if(vecmax[9]>0){
for(i in 1:vecmax[9]){
tmpmat[nx+i, 1:m] = as.integer( umodel$modelinc[9, ] >= i)
pnames = c(pnames, paste("beta", i, sep = ""))
}
} else{
pnames = c(pnames, "beta")
}
nx = nx + max(1, vecmax[9])
if(vecmax[10]>0){
for(i in 1:vecmax[10]){
tmpmat[nx+i, 1:m] = as.integer( umodel$modelinc[10, ] >= i)
pnames = c(pnames, paste("gamma", i, sep = ""))
}
} else{
pnames = c(pnames, "gamma")
}
nx = nx + max(1, vecmax[10])
if(vecmax[11]>0){
for(i in 1:vecmax[11]){
tmpmat[nx+i, 1:m] = as.integer( umodel$modelinc[11, ] >= i)
pnames = c(pnames, paste("eta1", i, sep = ""))
}
} else{
pnames = c(pnames, "eta1")
}
nx = nx + max(1, vecmax[11])
if(vecmax[12]>0){
for(i in 1:vecmax[12]){
tmpmat[nx+i, 1:m] = as.integer( umodel$modelinc[12, ] >= i)
pnames = c(pnames, paste("eta2", i, sep = ""))
}
} else{
pnames = c(pnames, "eta2")
}
nx = nx + max(1, vecmax[12])
if(vecmax[13]>0){
tmpmat[nx+1, 1:m] = umodel$modelinc[13, ]
}
nx = nx + max(1, vecmax[13])
pnames = c(pnames, "delta")
if(vecmax[14]>0){
tmpmat[nx+1, 1:m] = umodel$modelinc[14, ]
}
nx = nx + max(1, vecmax[14])
pnames = c(pnames, "lambda")
if(vecmax[15]>0){
for(i in 1:vecmax[15]){
tmpmat[nx+i, 1:m] = as.integer( umodel$modelinc[15, ] >= i)
pnames = c(pnames, paste("vxreg", i, sep = ""))
}
} else{
pnames = c(pnames, "vxreg")
}
nx = nx + max(1, vecmax[15])
if(vecmax[16]>0){
tmpmat[nx+1, 1:m] = umodel$modelinc[16, ]
}
nx = nx + max(1, vecmax[16])
pnames = c(pnames, "skew")
if(vecmax[17]>0){
tmpmat[nx+1, 1:m] = umodel$modelinc[17, ]
}
nx = nx + max(1, vecmax[17])
pnames = c(pnames, "shape")
if(vecmax[18]>0){
tmpmat[nx+1, 1:m] = umodel$modelinc[18, ]
}
nx = nx + max(1, vecmax[18])
pnames = c(pnames, "ghlambda")
if(vecmax[19]>0){
tmpmat[nx+1, 1:m] = umodel$modelinc[19, ]
}
nx = nx + max(1, vecmax[19])
pnames = c(pnames, "xi")
sumdcc = 5 + sum(pmax(1, modelinc[c(3,4,5,6,7)])) - 5
tmpmat = rbind(tmpmat, matrix(0, ncol = m+1, nrow = sumdcc))
if(modelinc[3]>0){
for(i in 1:modelinc[3]){
tmpmat[nx+i, m+1] = 1
pnames = c(pnames, paste("dcca", i, sep = ""))
}
} else{
pnames = c(pnames, "dcca")
}
nx = nx + max(1, modelinc[3])
if(modelinc[4]>0){
for(i in 1:modelinc[4]){
tmpmat[nx+i, m+1] = 1
pnames = c(pnames, paste("dccb", i, sep = ""))
}
} else{
pnames = c(pnames, "dccb")
}
nx = nx + max(1, modelinc[4])
if(modelinc[5]>0){
for(i in 1:modelinc[5]){
tmpmat[nx+i, m+1] = 1
pnames = c(pnames, paste("dccg", i, sep = ""))
}
} else{
pnames = c(pnames, "dccg")
}
nx = nx + max(1, modelinc[5])
if(modelinc[6]>0){
for(i in 1:modelinc[6]){
tmpmat[nx+i, m+1] = 1
if(modelinc[6]>1) pnames = c(pnames, paste("mshape", i, sep = "")) else pnames = c(pnames, "mshape")
}
} else{
pnames = c(pnames, "mshape")
}
nx = nx + max(1, modelinc[6])
if(modelinc[7]>0){
for(i in 1:modelinc[7]){
tmpmat[nx+i, m+1] = 1
if(modelinc[7]>1) pnames = c(pnames, paste("mskew", i, sep = "")) else pnames = c(pnames, "mskew")
}
} else{
pnames = c(pnames, "mskew")
}
colnames(tmpmat) = c(paste("Asset", 1:m, sep = ""), "Joint")
rownames(tmpmat) = pnames
return(tmpmat)
}
.estindfn = function(midx, mspec, dccpars){
m = dim(midx)[2]-1
eidx = midx*0
rnx = rownames(midx)
for(i in 1:m){
um = mspec@spec[[i]]@model$pars
zi = match(rownames(um), rnx)
zi = zi[!is.na(zi)]
zx = match(rnx, rownames(um))
zx = zx[!is.na(zx)]
eidx[zi,i] = um[zx,4]
}
zi = match(rownames(dccpars), rnx)
eidx[zi,m+1] = dccpars[,4]
return(eidx)
} |
cat("\014")
rm(list = ls())
setwd("~/git/of_dollars_and_data")
source(file.path(paste0(getwd(),"/header.R")))
library(scales)
library(readxl)
library(lubridate)
library(zoo)
library(ggrepel)
library(tidyverse)
folder_name <- "_jkb/0002_lifestyle_creep"
out_path <- paste0(exportdir, folder_name)
dir.create(file.path(paste0(out_path)), showWarnings = FALSE)
inc <- 100000
raise <- 100000
withdrawal_pct <- 0.04
annual_ret <- 0.04
initial_savings <- 0.1
savings_rates <- c(0.1)
for(initial_savings_rate in savings_rates){
annual_savings <- inc*initial_savings_rate
annual_expenditure <- inc*(1-initial_savings_rate)
retirement_target <- annual_expenditure/withdrawal_pct
n_periods_baseline <- log(1 + (retirement_target/annual_savings)*annual_ret)/log(1 + annual_ret)
print(n_periods_baseline)
raise_saved_pcts <- c(0.1)
tmp <- data.frame(
annual_ret = rep(annual_ret, length(raise_saved_pcts)),
raise_saved_pct = raise_saved_pcts
)
counter_f <- 1
for(r in raise_saved_pcts){
df <- data.frame(year = c(),
saving_amount = c(),
total_saved = c(),
retirement_target = c(),
income = c(),
pct_total_retirement = c())
counter <- 1
retire_pct <- 0
while(retire_pct < 1){
if(counter == 1){
df[counter, "year"] <- counter
df[counter, "saving_amount"] <- annual_savings
df[counter, "total_saved"] <- annual_savings
df[counter, "retirement_target"] <- retirement_target
df[counter, "income"] <- inc
} else{
if(counter <= 10){
df[counter, "income"] <- inc
df[counter, "saving_amount"] <- annual_savings
} else{
df[counter, "income"] <- inc + raise
df[counter, "saving_amount"] <- (inc*initial_savings_rate) + (raise*r)
}
df[counter, "year"] <- counter
df[counter, "total_saved"] <- (df[(counter-1), "total_saved"] * (1 + annual_ret)) + df[counter, "saving_amount"]
df[counter, "retirement_target"] <- (df[counter, "income"] - df[counter, "saving_amount"])/withdrawal_pct
}
df[counter, "pct_total_retirement"] <- df[counter, "total_saved"]/df[counter, "retirement_target"]
retire_pct <- df[counter, "pct_total_retirement"]
counter <- counter + 1
}
tmp[counter_f, "n_periods"] <- counter - 1
tmp[counter_f, "better_than_baseline"] <- ifelse(tmp[counter_f, "n_periods"] < n_periods_baseline, 1, 0)
if(r == initial_savings_rate & initial_savings_rate == 0.6){
assign("investigate", df, envir = .GlobalEnv)
}
counter_f <- counter_f + 1
}
tmp2 <- tmp %>%
filter(better_than_baseline == 1) %>%
head(1) %>%
select(-better_than_baseline) %>%
mutate(savings_rate = initial_savings_rate)
if(initial_savings_rate == savings_rates[1]){
final_results <- tmp2
} else{
final_results <- final_results %>% bind_rows(tmp2) %>%
select(savings_rate, raise_saved_pct, n_periods)
}
} |
crossref <- function(...) {
.Defunct("rcrossref", msg = "Crossref functionality moved to package rcrossref")
} |
.interpolateGridDay<-function(object, grid, latitude, d) {
i = which(object@dates == d)
if(length(i)==0) stop("Date not found. Date 'd' has to be comprised within the dates specified in 'object'.")
sp = spTransform(SpatialPoints(coordinates(grid), grid@proj4string), object@proj4string)
cc = sp@coords
z = grid@data$elevation
mPar = object@params
if(!("debug" %in% names(mPar))) mPar$debug = FALSE
tmin = .interpolateTemperatureSeriesPoints(Xp= cc[,1], Yp =cc[,2], Zp = z,
X = object@coords[,1],
Y = object@coords[,2],
Z = object@elevation,
T = as.matrix(object@MinTemperature)[,i,drop=FALSE],
iniRp = mPar$initial_Rp,
alpha = mPar$alpha_MinTemperature,
N = mPar$N_MinTemperature,
iterations = mPar$iterations,
debug = mPar$debug)
tmax = .interpolateTemperatureSeriesPoints(Xp= cc[,1], Yp =cc[,2], Zp = z,
X = object@coords[,1],
Y = object@coords[,2],
Z = object@elevation,
T = as.matrix(object@MaxTemperature)[,i,drop=FALSE],
iniRp = mPar$initial_Rp,
alpha = mPar$alpha_MaxTemperature,
N = mPar$N_MaxTemperature,
iterations = mPar$iterations,
debug = mPar$debug)
tmean = 0.606*tmax+0.394*tmin
prec = .interpolatePrecipitationSeriesPoints(Xp= cc[,1], Yp =cc[,2], Zp = z,
X = object@coords[,1],
Y = object@coords[,2],
Z = object@elevation,
P = as.matrix(object@Precipitation)[,i,drop=FALSE],
Psmooth = object@SmoothedPrecipitation[,i,drop=FALSE],
iniRp = mPar$initial_Rp,
alpha_event = mPar$alpha_PrecipitationEvent,
alpha_amount = mPar$alpha_PrecipitationAmount,
N_event = mPar$N_PrecipitationEvent,
N_amount = mPar$N_PrecipitationAmount,
iterations = mPar$iterations,
popcrit = mPar$pop_crit,
fmax = mPar$f_max,
debug = mPar$debug)
if(is.null(object@RelativeHumidity)) {
rhmean = .relativeHumidityFromMinMaxTemp(tmin, tmax)
VP = .temp2SVP(tmin)
rhmax = rep(100, length(rhmean))
rhmin = pmax(0,.relativeHumidityFromDewpointTemp(tmax, tmin))
} else {
TdewM = .dewpointTemperatureFromRH(0.606*as.matrix(object@MaxTemperature[,i,drop=FALSE])+0.394*as.matrix(object@MinTemperature[,i,drop=FALSE]),
as.matrix(object@RelativeHumidity))
tdew = .interpolateTdewSeriesPoints(Xp= cc[,1], Yp =cc[,2], Zp = z,
X = object@coords[,1],
Y = object@coords[,2],
Z = object@elevation,
T = TdewM,
iniRp = mPar$initial_Rp,
alpha = mPar$alpha_DewTemperature,
N = mPar$N_DewTemperature,
iterations = mPar$iterations,
debug = mPar$debug)
rhmean = .relativeHumidityFromDewpointTemp(tmean, tdew)
VP = .temp2SVP(tdew)
rhmin = pmax(0,.relativeHumidityFromDewpointTemp(tmax, tdew))
rhmax = pmin(100,.relativeHumidityFromDewpointTemp(tmin, tdew))
}
doy = as.numeric(format(object@dates[i],"%j"))
J = radiation_dateStringToJulianDays(d)
diffTemp = tmax-tmin
diffTempMonth = .interpolateTemperatureSeriesPoints(Xp= cc[,1], Yp =cc[,2], Zp = z,
X = object@coords[,1],
Y = object@coords[,2],
Z = object@elevation,
T = as.matrix(object@SmoothedTemperatureRange)[,i,drop=FALSE],
iniRp = mPar$initial_Rp,
alpha = mPar$alpha_MinTemperature,
N = mPar$N_MinTemperature,
iterations = mPar$iterations,
debug = mPar$debug)
latrad = latitude * (pi/180)
asprad = grid$aspect * (pi/180)
slorad = grid$slope * (pi/180)
rad = .radiationPoints(latrad, grid$elevation, slorad, asprad, J,
diffTemp, diffTempMonth, VP, prec)
if((!is.null(object@WFIndex)) && (!is.null(object@WFFactor))) {
wstopo = getGridTopology(object@WindFields$windSpeed)
wdtopo = getGridTopology(object@WindFields$windDirection)
indws = getGridIndex(cc, wstopo)
indwd = getGridIndex(cc, wdtopo)
WS = as.matrix(object@WindFields$windSpeed@data[indws,])
WD = as.matrix(object@WindFields$windDirection@data[indwd,])
Wp = .interpolateWindFieldSeriesPoints(Xp= cc[,1], Yp =cc[,2], WS[,i,drop=FALSE], WD[,i,drop=FALSE],
X = object@coords[,1],
Y = object@coords[,2],
I = object@WFIndex[,i,drop=FALSE],
F = object@WFFactor[,i,drop=FALSE],
iniRp = mPar$initial_Rp,
alpha = mPar$alpha_Wind,
N = mPar$N_Wind,
iterations = mPar$iterations)
Ws = as.vector(Wp$WS)
Wd = as.vector(Wp$WD)
} else if((!is.null(object@WindSpeed)) && (!is.null(object@WindDirection))) {
Wp = .interpolateWindStationSeriesPoints(Xp= cc[,1], Yp =cc[,2],
WS = object@WindSpeed[,i,drop=FALSE], WD = object@WindDirection[,i,drop=FALSE],
X = object@coords[,1],
Y = object@coords[,2],
iniRp = mPar$initial_Rp,
alpha = mPar$alpha_Wind,
N = mPar$N_Wind,
iterations = mPar$iterations)
Ws = as.vector(Wp$WS)
Wd = as.vector(Wp$WD)
} else {
Ws = rep(NA,nrow(cc))
Wd = rep(NA,nrow(cc))
}
pet = .PenmanPETPointsDay(latrad, grid$elevation, slorad, asprad, J, tmin, tmax,
rhmin, rhmax, rad, Ws, mPar$wind_height,
0.001, 0.25);
df = data.frame(MeanTemperature = as.vector(tmean),
MinTemperature = as.vector(tmin),
MaxTemperature = as.vector(tmax),
Precipitation = as.vector(prec),
MeanRelativeHumidity = rhmean,
MinRelativeHumidity = rhmin,
MaxRelativeHumidity = rhmax,
Radiation = rad,
WindSpeed = Ws,
WindDirection = Wd,
PET = pet)
return(SpatialGridDataFrame(grid@grid, df, grid@proj4string))
}
interpolationgrid<-function(object, grid, dates = NULL,
exportFile = NULL, exportFormat = "netCDF", add = FALSE, overwrite = FALSE,
verbose = TRUE) {
if(!inherits(object,"MeteorologyInterpolationData")) stop("'object' has to be of class 'MeteorologyInterpolationData'.")
if(!inherits(grid,"SpatialGridTopography")) stop("'grid' has to be of class 'SpatialGridTopography'.")
if(!is.null(dates)) {
if(class(dates)!="Date") stop("'dates' has to be of class 'Date'.")
if(sum(as.character(dates) %in% as.character(object@dates))<length(dates))
stop("At least one of the dates is outside the time period for which interpolation is possible.")
}
else dates = object@dates
bbox = object@bbox
if(proj4string(grid)!=proj4string(object)) {
warning("CRS projection in 'grid' adapted to that of 'object'.")
sp = spTransform(SpatialPoints(coordinates(grid), grid@proj4string), object@proj4string)
gbbox = sp@bbox
} else {
gbbox = grid@bbox
}
insidebox = (gbbox[1,1]>=bbox[1,1]) && (gbbox[1,2]<=bbox[1,2]) && (gbbox[2,1]>=bbox[2,1]) && (gbbox[2,2]<=bbox[2,2])
if(!insidebox) warning("Boundary box of target grid is not within boundary box of interpolation data object.")
longlat = spTransform(as(grid,"SpatialPoints"),CRS(SRS_string = "EPSG:4326"))
latitude = longlat@coords[,2]
ndates = length(dates)
export = !is.null(exportFile)
if((ndates==1) && !export) return(.interpolateGridDay(object, grid, latitude, dates))
l = vector("list", ndates)
if(export) nc = .openwritegridNetCDF(grid@grid, proj4string(grid), vars = NULL,
dates = dates, file = exportFile, add = add, overwrite = overwrite, verbose = verbose)
for(i in 1:ndates) {
if(verbose) cat(paste("Interpolating day '", dates[i], "' (",i,"/",ndates,") - ",sep=""))
m = .interpolateGridDay(object, grid, latitude, dates[i])
if(export) {
dl = list(m@data)
names(dl) = as.character(dates[i])
.writemeteorologygridNetCDF(dl,m@grid, proj4string(m), nc, byPixel = F, verbose = verbose)
} else {
l[[i]] = m@data
if(verbose) cat("done.\n")
}
}
if(!export) {
names(l) = dates
return(SpatialGridMeteorology(grid@grid, grid@proj4string, l, dates))
} else {
.closeNetCDF(exportFile,nc, verbose = verbose)
}
} |
context("Geoms")
library("ggplot2")
test_that("geom_linerangeh() flips", {
v <- range_p_orig + geom_linerange(aes(ymin = lower, ymax = upper))
h <- range_p + geom_linerangeh(aes(xmin = lower, xmax = upper))
check_horizontal(v, h, "geom_linerangeh()")
})
test_that("geom_pointangeh() flips", {
v <- range_p_orig + geom_pointrange(aes(ymin = lower, ymax = upper))
h <- range_p + geom_pointrangeh(aes(xmin = lower, xmax = upper))
check_horizontal(v, h, "geom_pointrangeh()")
v_facet <- ggplot(range_df, aes(trt, resp)) + facet_wrap(~group) +
geom_pointrange(aes(ymin = lower, ymax = upper))
h_facet <- ggplot(range_df, aes(resp, trt)) + facet_wrap(~group) +
geom_pointrangeh(aes(xmin = lower, xmax = upper))
check_horizontal(v_facet, h_facet, "geom_pointrangeh() + facet_wrap()")
v_dodge <- range_p_orig + geom_pointrange(aes(ymin = lower, ymax = upper),
position = position_dodge(0.3))
h_dodge <- range_p + geom_pointrangeh(aes(xmin = lower, xmax = upper),
position = position_dodgev(0.3))
check_horizontal(v_dodge, h_dodge, "geom_pointrangeh() + position_dodgev()")
})
test_that("geom_crossbarh() flips", {
v <- range_p_orig + geom_crossbar(aes(ymin = lower, ymax = upper))
h <- range_p + geom_crossbarh(aes(xmin = lower, xmax = upper))
check_horizontal(v, h, "geom_crossbarh()")
})
test_that("geom_errorbarh() flips", {
v <- range_p_orig + geom_errorbar(aes(ymin = lower, ymax = upper))
h <- range_p + geom_errorbarh(aes(xmin = lower, xmax = upper))
check_horizontal(v, h, "geom_errorbarh()")
})
test_that("geom_barh() flips", {
v <- range_p_orig + geom_bar(aes(fill = group),
stat = "identity", position = "dodge")
h <- range_p + geom_barh(aes(fill = group),
stat = "identity", position = "dodgev")
check_horizontal(v, h, "geom_barh()")
v_facet <- ggplot(range_df, aes(trt, resp)) + facet_wrap(~group) +
geom_bar(position = "dodge", stat = "identity")
h_facet <- ggplot(range_df, aes(resp, trt)) + facet_wrap(~group) +
geom_barh(position = "dodgev", stat = "identity")
check_horizontal(v_facet, h_facet, "geom_barh() + facet_wrap()")
v <- ggplot(mpg, aes(x = class)) + geom_bar()
h <- ggplot(mpg, aes(y = class)) + geom_barh()
check_horizontal(v, h, "geom_barh() with count stat")
})
test_that("geom_colh() flips", {
df <- data.frame(trt = c("a", "b", "c"), outcome = c(2.3, 1.9, 3.2))
v <- ggplot(df, aes(trt, outcome)) + geom_col()
h <- ggplot(df, aes(outcome, trt)) + geom_colh()
check_horizontal(v, h, "geom_colh()")
})
test_that("geom_histogramh() flips", {
v <- ggplot(mtcars, aes(drat)) + geom_histogram(bins = 10)
h <- ggplot(mtcars, aes(y = drat)) + geom_histogramh(bins = 10)
check_horizontal(v, h, "geom_histogramh()", TRUE)
v_fill_stack <- ggplot(mtcars, aes(drat, fill = factor(cyl))) + geom_histogram(bins = 10, position = position_stack())
h_fill_stack <- ggplot(mtcars, aes(y = drat, fill = factor(cyl))) + geom_histogramh(bins = 10, position = position_stackv())
check_horizontal(v_fill_stack, h_fill_stack, "geom_histogramh() + position_stack() with fill", TRUE)
v_fill_facet_nudge <- ggplot(mtcars, aes(drat, fill = factor(cyl))) + facet_wrap(~am) + geom_histogram(bins = 10, position = position_nudge())
h_fill_facet_nudge <- ggplot(mtcars, aes(y = drat, fill = factor(cyl))) + facet_wrap(~am) + geom_histogramh(bins = 10, position = position_nudge())
check_horizontal(v_fill_facet_nudge, h_fill_facet_nudge, "geom_histogramh() + position_nudge() with fill")
})
test_that("geom_violinh() flips", {
v <- ggplot(mtcars, aes(factor(cyl), mpg, fill = factor(am))) + geom_violin()
h <- ggplot(mtcars, aes(mpg, factor(cyl), fill = factor(am))) + geom_violinh()
check_horizontal(v, h, "geom_violinh()")
v_facet <- ggplot(mtcars, aes(factor(cyl), mpg, fill = factor(am))) + facet_wrap(~vs) + geom_violin()
h_facet <- ggplot(mtcars, aes(mpg, factor(cyl), fill = factor(am))) + facet_wrap(~vs) + geom_violinh()
check_horizontal(v_facet, h_facet, "geom_violinh() + facet_wrap()")
set.seed(111)
dat <- data.frame(x = LETTERS[1:3], y = rnorm(90))
dat <- dat[dat$x != "C" | c(TRUE, FALSE), ]
v <- ggplot(dat, aes(x = x, y = y)) + geom_violin(draw_quantiles = c(0.25, 0.5, 0.75))
h <- ggplot(dat, aes(x = y, y = x)) + geom_violinh(draw_quantiles = c(0.25, 0.5, 0.75))
check_horizontal(v, h, "geom_violinh() + draw_quantiles")
})
test_that("geom_boxploth() flips", {
v <- ggplot(mpg, aes(class, hwy)) + geom_boxplot()
h <- ggplot(mpg, aes(hwy, class)) + geom_boxploth()
check_horizontal(v, h, "geom_boxploth()")
v_fill <- ggplot(mpg, aes(class, hwy, fill = factor(cyl))) + geom_boxplot()
h_fill <- ggplot(mpg, aes(hwy, class, fill = factor(cyl))) + geom_boxploth()
check_horizontal(v_fill, h_fill, "geom_boxploth() with fill")
v_facet_fill <- ggplot(mpg, aes(class, hwy, fill = factor(cyl))) + facet_wrap(~model) + geom_boxplot()
h_facet_fill <- ggplot(mpg, aes(hwy, class, fill = factor(cyl))) + facet_wrap(~model) + geom_boxploth()
check_horizontal(v_facet_fill, h_facet_fill, "geom_boxploth() + facet_wrap() with fill")
df <- data.frame(x = 1:10, y = rep(1:2, 5))
h_continuous <- ggplot(df) + geom_boxploth(aes(x = x, y = y, group = 1))
v_continuous <- ggplot(df) + geom_boxplot(aes(x = y, y = x, group = 1))
check_horizontal(v_continuous, h_continuous, "geom_boxploth() and continuous y scale")
})
test_that("facet_grid() with free scales flips", {
v <- ggplot(mtcars, aes(factor(cyl), disp)) + geom_boxplot() + facet_grid(am ~ ., scales = "free")
h <- ggplot(mtcars, aes(disp, factor(cyl))) + geom_boxploth() + facet_grid(. ~ am, scales = "free")
check_horizontal(v, h, "facet_grid() with free scales")
})
test_that("scale information is preserved", {
v <- range_p_orig +
geom_pointrange(aes(ymin = lower, ymax = upper))+
scale_y_continuous(breaks = c(1, 2, 3, 4, 5),
labels = c("1/1", "2/1", "3/1", "4/1", "5/1"))
h <- range_p +
geom_pointrangeh(aes(xmin = lower, xmax = upper)) +
scale_x_continuous(breaks = c(1, 2, 3, 4, 5),
labels = c("1/1", "2/1", "3/1", "4/1", "5/1"))
check_horizontal(v, h, "scales")
}) |
pat_filter <- function(
pat,
...
) {
result <- try({
if ( !pat_isPat(pat) )
stop("First argument is not of class 'pat'.")
}, silent = TRUE)
if ( class(result) %in% "try-error" ) {
err_msg <- geterrmessage()
if ( stringr::str_detect(err_msg, "object .* not found") ) {
stop(paste0(err_msg, "\n(Did you forget to pass in the 'pat' object?)"))
}
}
if ( pat_isEmpty(pat) )
stop("Parameter 'pat' has no data.")
pat <- pat_distinct(pat)
pat$data <-
dplyr::filter(pat$data,...)
pat <- pat_distinct(pat)
return(pat)
} |
heat_ppoints <- function(x, y, z, legend = "horizontal",
proj = "none", parameters,
orientation, lratio = 0.2,
map = "none", n = 5, ...) {
if (missing(parameters)) parameters <- NULL
if (missing(orientation)) orientation <- NULL
arglist <- list(...)
xyz <- heat_ppoints_xyz_setup(x = x, y = y, z = z,
tx = deparse(substitute(x)),
ty = deparse(substitute(y)),
arglist = arglist)
object <- heat_ppoints_setup(xyz, legend, proj, parameters,
orientation, lratio, map, n = n)
if (legend != "none") {
.legend.mar(object$legend.mar)
}
.legend.scale.args(object$legend.scale.args)
if (legend == "none") {
do.call(object$plotf, object$arglist)
} else {
autolayout(size = c(1, 1), legend = legend, lratio = lratio, show = FALSE,
reverse = TRUE)
autolegend()
do.call(object$plotf, object$arglist)
}
if (!is.null(arglist$pch) & !is.null(arglist$border_col)) {
if (arglist$pch >= 19) {
arglist$bg = arglist$col
arglist$col = arglist$border_col
}
}
if (object$axes) {
do.call("paxes", object$paxes.args)
}
if (!is.null(object$lines.args$x)) {
do.call("plines", object$lines.args)
}
if (!is.null(object$points.args$x)) {
f <- autoimage::ppoints
do.call(f, object$points.args)
}
if (!is.null(object$text.args$x)) {
do.call("ptext", object$text.args)
}
return(invisible(structure(object, class = "heat_ppoints")))
}
heat_ppoints_setup <- function(xyz, legend = "none",
proj = "none",
parameters = NULL,
orientation = NULL,
lratio = 0.2, map = "none",
n) {
x <- xyz$x
y <- xyz$y
z <- xyz$z
arglist <- xyz$arglist
if (length(proj) != 1) {
stop("proj should be a single character string")
}
if (!is.character(proj)) {
stop("proj should be a single character string")
}
legend <- try(match.arg(legend, c("none", "horizontal", "vertical")),
silent = TRUE)
if (length(legend) != 1) {
stop("legend should be a single character string")
}
if (class(legend) == "try-error") {
stop("invalid legend argument.
legend should be \"none\", \"horizontal\", or \"vertical\".")
}
if (length(lratio) != 1) {
stop("lratio should be a positive number")
}
if (!is.numeric(lratio)) {
stop("lratio should be a positive number")
}
if (lratio <= 0) {
stop("lratio should be a positive number")
}
if (is.null(arglist$pch)) {
arglist$pch <- 16
}
if (!is.null(arglist$col)) {
n <- length(arglist$col)
}
if (length(n) != 1 | !is.numeric(n) | n <= 1) {
stop("n should be a positive integer")
}
zlim_breaks <- zlim_breaks_setup(arglist$zlim,
arglist$breaks, n,
range(z, na.rm = TRUE),
arglist$col)
arglist$zlim <- zlim_breaks$zlim
arglist$breaks <- zlim_breaks$breaks
if (is.null(arglist$col)) {
arglist$col <- colorspace::sequential_hcl(n = length(arglist$breaks) - 1, palette = "Viridis")
}
legend.scale.args <- list()
legend.scale.args$zlim <- arglist$zlim
legend.scale.args$breaks <- arglist$breaks
legend.scale.args$col <- arglist$col
legend.scale.args$axis.args <- arglist$legend.axis.args
hpcol = as.character(cut(z, breaks = arglist$breaks, labels = arglist$col))
arglist$col = hpcol
if (!is.null(arglist$pch) & !is.null(arglist$border_col)) {
if (arglist$pch >= 19) {
arglist$bg = arglist$col
arglist$col = arglist$border_col
}
}
legend.mar <- arglist$legend.mar
if (is.null(legend.mar)) {
legend.mar <- automar(legend)
}
if (map != "none") arglist$lines <- map_setup(map)
lines.args <- lines_args_setup(arglist, proj)
points.args <- points_args_setup(arglist, proj)
text.args = text_args_setup(arglist, proj)
paxes.args <- paxes_args_setup(arglist, proj)
axes <- axes_setup(arglist)
if (proj != "none") {
arglist$asp <- 1
which.in <- which(x >= arglist$xlim[1] & x <= arglist$xlim[2] &
y >= arglist$ylim[1] & y <= arglist$ylim[2])
projectxy <- mapproj::mapproject(x, y, projection = proj,
parameters = parameters,
orientation = orientation)
x <- projectxy$x
y <- projectxy$y
sx = seq(arglist$xlim[1], arglist$xlim[2], len = 100)
sy = seq(arglist$ylim[1], arglist$ylim[2], len = 100)
sg = expand.grid(sx, sy)
project_lim <- mapproj::mapproject(sg[,1], sg[,2],
projection = proj,
parameters = parameters,
orientation = orientation)
arglist$xlim <- range(project_lim$x, na.rm = TRUE)
arglist$ylim <- range(project_lim$y, na.rm = TRUE)
}
arglist$x <- x
arglist$y <- y
plotf <- graphics::plot
arglist <- arglist_clean(arglist, image = FALSE)
object <- list(plotf = plotf, arglist = arglist,
legend = legend,
legend.scale.args = legend.scale.args,
legend.mar = legend.mar, proj = proj,
points.args = points.args,
lines.args = lines.args,
text.args = text.args,
paxes.args = paxes.args,
axes = axes)
return(object)
}
heat_ppoints_xyz_setup <- function(x, y, z, tx, ty, arglist) {
if (is.null(arglist$xlab)) arglist$xlab <- tx
if (is.null(arglist$ylab)) arglist$ylab <- ty
if (!is.vector(x) | !is.numeric(x)) {
stop("x must be a numeric vector")
}
if (!is.vector(y) | !is.numeric(y)) {
stop("y must be a numeric vector")
}
if (!is.vector(z) | !is.numeric(z)) {
stop("z must be a numeric vector")
}
if (length(x) != length(y)) stop("length(x) != length(y)")
if (length(x) != length(z)) stop("length(x) != length(z)")
if (is.null(arglist$xlim)) {
arglist$xlim <- range(x, na.rm = TRUE)
}
if (is.null(arglist$ylim)) {
arglist$ylim <- range(y, na.rm = TRUE)
}
return(list(x = x, y = y, z = z, arglist = arglist))
} |
EkNNval <- function(xtrain,ytrain,xtst,K,ytst=NULL,param=NULL){
xtst<-as.matrix(xtst)
xtrain<-as.matrix(xtrain)
ytrain<-y<-as.integer(as.factor(ytrain))
if(!is.null(ytst)) ytst<-y<-as.integer(as.factor(ytst))
if(is.null(param)) param<-EkNNinit(xtrain,ytrain)
Napp<-nrow(xtrain)
M<-max(ytrain)
N<-nrow(xtst)
knn<-get.knnx(xtrain, xtst, k=K)
knn$nn.dist<-knn$nn.dist^2
is<-t(knn$nn.index)
ds<-t(knn$nn.dist)
m = rbind(matrix(0,M,N),rep(1,N))
for(i in 1:N){
for(j in 1:K){
m1 <- rep(0,M+1)
m1[ytrain[is[j,i]]] <- param$alpha*exp(-param$gamma[ytrain[is[j,i]]]^2*ds[j,i])
m1[M+1] <- 1 - m1[ytrain[is[j,i]]]
m[1:M,i] <- m1[1:M]*m[1:M,i] + m1[1:M]*m[M+1,i] + m[1:M,i]*m1[M+1]
m[M+1,i] <- m1[M+1] * m[M+1,i]
m<-m/matrix(colSums(m),M+1,N,byrow=TRUE)
}
}
m<-t(m)
ypred<-max.col(m[,1:M])
if(!is.null(ytst)) err<-length(which(ypred != ytst))/N else err<-NULL
return(list(m=m,ypred=ypred,err=err))
} |
gexp.default <- function(x = NULL,
mu = 26,
err = NULL,
errp = NULL,
r = 5L,
fl = NULL,
blkl = NULL,
rowl = NULL,
coll = NULL,
fe = NULL,
inte = NULL,
blke = NULL,
rowe = NULL,
cole = NULL,
contrasts = NULL,
type = c('SIMPLE', 'FE', 'SPE'),
design = c('CRD', 'RCBD', 'LSD'),
round = 2L,
...)
{
toe <- match.arg(type)
des <- match.arg(design)
option <- paste(toe,
des,
sep = '_')
qualiquanti <- checkQualiQuanti(fl)
obj <- list(mu = mu,
err = err,
errp = errp,
r = r,
fl = fl,
blkl = blkl,
rowl = rowl,
coll = coll,
fe = fe,
inte = inte,
blke = blke,
rowe = rowe,
cole = cole,
contrasts = contrasts,
round = round,
qualiquanti = qualiquanti)
class(obj) <- tolower(option)
res <- gexp(obj,
...)
}
checkQualiQuanti <- function(fl){
if(is.null(fl)){
quali <- TRUE
quanti <- FALSE
posquanti <- NULL
}else{
quanti <- all(lapply(fl,
function(x) is.numeric(x)) == TRUE)
quali <- all(lapply(fl,
function(x) is.numeric(x)) != TRUE)
posquanti <- which(unlist(lapply(fl,
is.numeric)) == TRUE)
}
res <- list(quali = quali,
quanti = quanti,
posquanti = posquanti)
return(res)
} |
library(ggplot2)
this_base <- "fig04-08_judith-leyster-exhibition-strip-plot"
my_data <- data.frame(
minutes <- c(4, 7, 7, 9, 10, 10, 11,
11, 13, 14, 15, 15, 20, 21,
22, 22, 23, 27, 27, 28, 28,
29, 31, 32, 33, 33, 35, 38,
38, 39, 40, 40, 40, 40, 42,
42, 42, 43, 45, 47, 48, 48,
49, 49, 55, 58, 66, 72, 73)
)
summ_stats <-
with(my_data,
data.frame(stat = c("Mean", "Median", "Quartile", "Quartile"),
val = c(mean(minutes), median(minutes),
quantile(minutes, probs = c(0.25, 0.75)))))
p <- ggplot(my_data, aes(x = minutes, y = factor(1))) +
geom_point(shape = 1) +
scale_x_continuous(breaks = c(20, 40, 60), limits = c(0, 80),
expand = c(0, 0)) +
labs(x = "Minutes", y = NULL) +
ggtitle("Fig 4.8 Judith Leyster Exhibition: Strip Plot") +
theme_bw() +
theme(panel.grid.major = element_blank(),
panel.grid.minor = element_blank(),
plot.title = element_text(size = rel(1.5), face = "bold"),
axis.ticks.y = element_blank(),
axis.text.y = element_blank(),
legend.position = "top",
legend.title = element_blank())
p <- p + geom_vline(data = summ_stats, aes(xintercept = val, linetype = stat),
show_guide = TRUE)
p
ggsave(paste0(this_base, ".png"),
p, width = 6, height = 3) |
searchFacebook <- function(string, token, n=200, since=NULL, until=NULL)
{
tkversion <- getTokenVersion(token)
if (tkversion=="v2"){
stop("Searching for posts was deprecated with version 2.0 of",
" the Facebook Graph API.\nFor more details see ?searchFacebook")
}
if (length(string)>1){ string <- paste(string, collapse=" ") }
url <- paste("https://graph.facebook.com/search?q=", string,
"&type=post&limit=", sep="")
if (n<=200){
url <- paste(url, n, sep="")
}
if (n>200){
url <- paste(url, "200", sep="")
}
url <- paste(url,
"&fields=from.fields(name,id),message,created_time,type,link,likes.summary(true),comments.summary(true),shares",
sep="")
if (!is.null(since)){
url <- paste(url, "&since=", since, sep="")
}
if (!is.null(until)){
url <- paste(url, "&until=", until, sep="")
}
url <- utils::URLencode(url)
content <- callAPI(url=url, token=token)
l <- length(content$data); cat(l, "posts ")
error <- 0
while (length(content$error_code)>0){
cat("Error!\n")
Sys.sleep(0.5)
error <- error + 1
content <- callAPI(url=url, token=token)
if (error==3){ stop(content$error_msg) }
}
if (length(content$data)==0){
message("No public posts mentioning the string were found")
return(data.frame())
}
df <- searchDataToDF(content$data)
if (n>200){
df.list <- list(df)
while (l<n & length(content$data)>0 &
!is.null(content$paging$`next`)){
url <- content$paging$`next`
if (!is.null(since)){
url <- paste(url, "&since=", since, sep="")
}
content <- callAPI(url=url, token=token)
l <- l + length(content$data)
if (length(content$data)>0){ cat(l, " ") }
error <- 0
while (length(content$error_code)>0){
cat("Error!\n")
Sys.sleep(0.5)
error <- error + 1
content <- callAPI(url=url, token=token)
if (error==3){ stop(content$error_msg) }
}
df.list <- c(df.list, list(searchDataToDF(content$data)))
}
df <- do.call(rbind, df.list)
}
return(df)
} |
pls2B <- function(x, y, tol=1e-12, same.config=FALSE, rounds=0,useCor=FALSE,cv=FALSE,cvlv=NULL, mc.cores=parallel::detectCores()) {
landmarks <- landmarksx <- landmarksy <- FALSE
xorig <- x
yorig <- y
win <- FALSE
if(.Platform$OS.type == "windows")
win <- TRUE
else
registerDoParallel(cores=mc.cores)
if (length(dim(x)) == 3) {
landmarks <- TRUE
landmarksx <- TRUE
x <- vecx(x)
}
if (length(dim(y)) == 3) {
landmarksy <- TRUE
y <- vecx(y)
} else
landmarks <- FALSE
xdim <- dim(x)
ydim <- dim(y)
if (same.config && !landmarks)
warning("the option same.config requires landmark array as input")
xs <- x <- scale(x,scale = F)
ys <- y <- scale(y,scale = F)
if (useCor) {
xs <- scale(x,scale = TRUE)
ys <- scale(y,scale = TRUE)
}
svd.cova <- svd2B(xs,ys,scale = useCor)
svs <- svd.cova$d
svs <- svs[which(svs > tol)]
svs <- svs^2
covas <- (svs/sum(svs))*100
l.covas <- length(covas)
svd.cova$d <- svd.cova$d[1:l.covas,drop=FALSE]
svd.cova$u <- svd.cova$u[,1:l.covas,drop=FALSE]
svd.cova$v <- svd.cova$v[,1:l.covas,drop=FALSE]
Xscores <- x%*%svd.cova$u
Yscores <- y%*%svd.cova$v
cors <- 0
for(i in 1:length(covas))
cors[i] <- cor(Xscores[,i],Yscores[,i])
permupls <- function(i)
{
x.sample <- sample(1:xdim[1])
y.sample <- sample(x.sample)
if (same.config && landmarks) {
tmparr <- .bindArr2(xorig[,,x.sample],yorig[,,y.sample],along=1)
tmpproc <- ProcGPA(tmparr,silent=TRUE)
x1 <- vecx(tmpproc$rotated[1:dim(xorig)[1],,])
y1 <- vecx(tmpproc$rotated[1:dim(yorig)[1],,])
} else {
x1 <- x
y1 <- y
}
svd.cova.tmp <- svd2B(x1[x.sample,],y1[y.sample,],u=F,v=F,scale = useCor)
svs.tmp <- svd.cova.tmp$d
return(svs.tmp[1:l.covas])
}
p.values <- rep(NA,l.covas)
if (rounds > 0) {
if (win)
permuscores <- foreach(i = 1:rounds, .combine = cbind) %do% permupls(i)
else
permuscores <- foreach(i = 1:rounds, .combine = cbind) %dopar% permupls(i)
p.val <- function(x,rand.x)
{
p.value <- length(which(rand.x >= x))
if (p.value > 0)
p.value <- p.value/rounds
else
p.value <- 1/rounds
return(p.value)
}
for (i in 1:l.covas)
p.values[i] <- p.val(svd.cova$d[i],permuscores[i,])
}
xlm <- lm(Xscores ~ Yscores -1)
ylm <- lm(Yscores ~ Xscores -1)
Cova <- data.frame(svd.cova$d[1:l.covas],covas,cors,p.values)
colnames(Cova) <- c("singular value","% total covar.","Corr. coefficient", "p-value")
out <- list(svd=svd.cova,Xscores=Xscores,Yscores=Yscores,CoVar=Cova)
out$x <- xorig
out$y <- yorig
out$xcenter <- attributes(x)$"scaled:center"
out$ycenter <- attributes(y)$"scaled:center"
out$xlm <- xlm
out$ylm <- ylm
class(out) <- "pls2B"
if (cv) {
if (is.null(cvlv))
cvlv <- nrow(Cova)-1
else
cvlv <- min(nrow(Cova),cvlv,(nrow(x)-2))
cvarrayX <- array(NA,dim=c(dim(x),cvlv))
cvarrayY <- array(NA,dim=c(dim(y),cvlv))
dimnames(cvarrayX)[1:2] <- dimnames(x)
dimnames(cvarrayY)[1:2] <- dimnames(y)
dimnames(cvarrayX)[[3]] <- dimnames(cvarrayY)[[3]] <- paste("LV",1:cvlv)
if (landmarksx)
x <- vecx(xorig)
if (landmarksy)
y <- vecx(yorig)
for (i in 1:xdim[1]) {
tmppls <- pls2B(x[-i,],y[-i,],useCor = useCor,tol=tol)
for (j in 1:cvlv) {
cvarrayY[i,,j] <- predictPLSfromData(tmppls,x=x[i,],ncomp=j)
cvarrayX[i,,j] <- predictPLSfromData(tmppls,y=y[i,],ncomp=j)
}
}
out$predicted.x <- cvarrayX
out$predicted.y <- cvarrayY
}
return(out)
}
print.pls2B <- function(x,...) {
cat(" Covariance explained by the singular values\n\n")
df <- x$CoVar
df <- df[,colSums(is.na(df)) != nrow(df)]
print( df,row.names=FALSE)
}
getPLSfromScores <- function(pls,x,y) {
if (!missing(x) && !missing(y))
stop("either x or y must be missing")
svdpls <- pls$svd
if (missing(y)) {
if (is.vector(x) || length(x) == 1) {
xl <- length(x)
x <- t(x)
}
else if (is.matrix(x))
xl <- ncol(x)
out <- t(svdpls$u[,1:xl,drop=FALSE]%*%t(x))
out <- sweep(out,2,-pls$xcenter)
if (length(dim(pls$x)) == 3) {
if (is.matrix(x) && nrow(x) > 1) {
out <- vecx(out,revert = T,lmdim = dim(pls$x)[2])
} else {
out <- matrix(out,dim(pls$x)[1],dim(pls$x)[2])
}
}
return(out)
}
if (missing(x)) {
if (is.vector(y) || length(y) == 1) {
xl <- length(y)
y <- t(y)
} else if (is.matrix(y))
xl <- ncol(y)
out <- t(svdpls$v[,1:xl]%*%t(y))
out <- sweep(out,2,-pls$ycenter)
if (length(dim(pls$y)) == 3) {
if (is.matrix(y) && nrow(y) > 1) {
out <- vecx(out,revert = T,lmdim = dim(pls$y)[2])
} else {
out <- matrix(out,dim(pls$y)[1],dim(pls$y)[2])
}
}
return(out)
}
}
predictPLSfromScores <- function(pls,x,y) {
if (!missing(x) && !missing(y))
stop("either x or y must be missing")
svdpls <- pls$svd
if (missing(y)) {
pls$ylm$coefficients <- as.matrix(pls$ylm$coefficients)
if (is.vector(x) || length(x) == 1) {
xl <- length(x)
x <- t(x)
}
else if (is.matrix(x))
xl <- ncol(x)
yest <- t(t(pls$ylm$coefficients[1:xl,,drop=FALSE])%*%t(x))
out <- t(svdpls$v%*%t(yest))
out <- sweep(out,2,-pls$ycenter)
if (length(dim(pls$y)) == 3) {
if (is.matrix(x) && nrow(x) > 1) {
out <- vecx(out,revert = T,lmdim = dim(pls$x)[2])
} else {
out <- matrix(out,dim(pls$y)[1],dim(pls$y)[2])
}
}
}
if (missing(x)) {
pls$xlm$coefficients <- as.matrix(pls$xlm$coefficients)
if (is.vector(y) || length(y) == 1) {
xl <- length(y)
y <- t(y)
}
else if (is.matrix(y))
xl <- ncol(y)
xest <- t(t(pls$xlm$coefficients[c(1:xl),,drop=FALSE])%*%t(y))
out <- t(svdpls$u%*%t(xest))
out <- sweep(out,2,-pls$xcenter)
if (length(dim(pls$x)) == 3) {
if (is.matrix(y) && nrow(y) > 1) {
out <- vecx(out,revert = T,lmdim = dim(pls$x)[2])
} else {
out <- matrix(out,dim(pls$x)[1],dim(pls$x)[2])
}
}
}
return(out)
}
getPLSscores <- function(pls,x,y) {
if (!missing(x) && !missing(y))
stop("either x or y must be missing")
if (missing(y)) {
if (length(dim(x)) == 3 || (is.matrix(x) && is.matrix(pls$x))) {
if (length(dim(x)) == 3)
x <- vecx(x)
out <- NULL
for(i in 1:nrow(x))
out <- rbind(out,getPLSscores(pls,x=x[i,]))
} else {
if (is.matrix(x))
x <- as.vector(x)
x <- x-pls$xcenter
out <- t(t(pls$svd$u)%*%x)
}
}
if (missing(x)) {
if (length(dim(y)) == 3 || (is.matrix(y) && is.matrix(pls$y))) {
if (length(dim(y)) == 3)
y <- vecx(y)
out <- NULL
for(i in 1:nrow(y))
out <- rbind(out,getPLSscores(pls,y=y[i,]))
} else {
if (is.matrix(y))
y <- as.vector(y)
y <- y-pls$ycenter
out <- t(t(pls$svd$v)%*%y)
}
}
return(out)
}
predictPLSfromData <- function(pls,x,y,ncomp=NULL) {
if (!missing(x) && !missing(y))
stop("either x or y must be missing")
if (is.null(ncomp))
ncomp <- ncol(pls$Xscores)
if (missing(y)) {
scores <- getPLSscores(pls,x=x)[,1:ncomp,drop=F]
out <- predictPLSfromScores(pls,x=scores)
}
if (missing(x)) {
scores <- getPLSscores(pls,y=y)[,1:ncomp,drop=F]
out <- predictPLSfromScores(pls,y=scores)
}
return(out)
}
plsCoVar <- function(pls,i,sdx=3,sdy=3) {
x <- t(t(c(-1,1)*sdx*sd(pls$Xscores[,i])))
y <- t(t(c(-1,1)*sdy*sd(pls$Yscores[,i])))
x0 <- matrix(0,2,i); x0[,i] <- x
y0 <- matrix(0,2,i); y0[,i] <- y
xnames <- paste(c("neg","pos"),"x_sd",sdx,sep="_")
ynames <- paste(c("neg","pos"),"y_sd",sdy,sep="_")
pls1x <- getPLSfromScores(pls,x=x0)
if (is.matrix(pls1x))
rownames(pls1x) <- xnames
else
dimnames(pls1x)[[3]] <- xnames
pls1y <- getPLSfromScores(pls,y=y0)
if (is.matrix(pls1y))
rownames(pls1y) <- ynames
else
dimnames(pls1y)[[3]] <- ynames
pls1out <- list(x=pls1x,y=pls1y)
return(pls1out)
}
svd2B <- function(x,y,scale=F,u=T,v=T) {
xs <- scale(x,scale = scale)
ys <- scale(y,scale = scale)
svdx <- svd(xs)
svdy <- svd(ys)
u1 <- t(t(svdx$u)*svdx$d)
u2 <- t(t(svdy$u)*svdy$d)
utu <- crossprod(u1,u2)
svdutu <- svd(utu)
svdutu$d <- svdutu$d/(nrow(x) -1 )
if (u)
svdutu$u <- as.matrix((svdx$v)%*%svdutu$u)
else
svdutu$u <- NULL
if (v)
svdutu$v <- as.matrix((svdy$v)%*%svdutu$v)
else
svdutu$v <- NULL
return(svdutu)
}
getPLSCommonShape <- function(pls) {
out <- NULL
xdim <- dim(pls$x)
ydim <- dim(pls$y)
lmdim <- xdim[2]
nlmx <- xdim[1]
nlmy <- ydim[1]
if (xdim[2] != ydim[2])
stop("landmarks need to be of same dimensionality")
if (length(xdim) != 3 || length(ydim) != 3)
stop("this function only works on landmark data")
XscoresScaled <- pls$Xscores
YscoresScaled <- pls$Yscores
for (i in 1:ncol(pls$Xscores)) {
tmp <- cbind(pls$Xscores[,i],pls$Yscores[,i])
tmppca <- prcompfast(tmp,retx = FALSE)$rotation[,1]
if (prod(tmppca) > 0)
tmppca <- abs(tmppca)
xtmp <- matrix(pls$svd$u[,i]*tmppca[1],nlmx,lmdim)
ytmp <- matrix(pls$svd$v[,i]*tmppca[2],nlmy,lmdim)
tmpvec <- c(rbind(xtmp,ytmp))
XscoresScaled[,i] <- XscoresScaled[,i]/tmppca[1]
YscoresScaled[,i] <- YscoresScaled[,i]/tmppca[2]
out <- cbind(out,tmpvec)
}
commoncenter <- c(rbind(matrix(pls$xcenter,nlmx,lmdim),matrix(pls$ycenter,nlmy,lmdim)))
return(list(shapevectors=out,XscoresScaled=XscoresScaled,YscoresScaled=YscoresScaled,commoncenter=commoncenter,lmdim=lmdim))
}
plsCoVarCommonShape <- function(pls,i,sdcommon=1) {
commonshape <- getPLSCommonShape(pls)
sdi <- sd(c(commonshape$XscoresScaled[,i],commonshape$YscoresScaled[,i]))
sdvec <- t(commonshape$shapevectors[,i]%*%t(c(-1,1)*sdcommon*sdi))
sdvec <- sweep(sdvec,2,-commonshape$commoncenter)
out <- vecx(sdvec,revert = TRUE,lmdim = commonshape$lmdim)
return(out)
} |
context("graph generators")
test_that("graph generation: simple 2o graph", {
g = mcGP(lower = 0, upper = 100)
g = addCoordinates(g, n = 50L, generator = coordUniform)
g = addWeights(g, method = "euclidean", symmetric = TRUE)
g = addWeights(g, method = "random", weight.fun = runif, symmetric = TRUE)
expect_class(g, "mcGP")
expect_true(g$n.nodes == 50L)
expect_true(g$n.clusters == 0L)
expect_true(g$n.weights == 2L)
expect_set_equal(g$weight.types, c("distance", "random"))
expect_true(isSymmetricMatrix(g$weights[[1L]]))
expect_true(isSymmetricMatrix(g$weights[[2L]]))
expect_output(print(g), regexp = "MULTI")
pls = plot(g)
expect_list(pls, types = "ggplot", len = 2L, any.missing = FALSE, all.missing = FALSE)
})
test_that("graph generation: complex clustered graph", {
g = mcGP(lower = 0, upper = 100)
g = addCenters(g, n.centers = 3L, generator = coordLHS)
g = addCoordinates(g, n = c(5L, 10L, 15), by.centers = TRUE, generator = coordUniform, lower = c(0, 0), upper = c(1, 1))
g = addCoordinates(g, n = 22, by.centers = TRUE, generator = coordUniform, lower = c(0, 0), upper = c(1, 1))
g = addCoordinates(g, n = 100L, generator = coordGrid)
g = addWeights(g, method = "random", weight.fun = rnorm, mean = 5, sd = 1.3)
g = addWeights(g, method = "minkowski", p = 2.5, symmetric = FALSE)
pls = plot(g, show.cluster.centers = TRUE)
expect_list(pls, types = "ggplot", len = 2L, any.missing = FALSE, all.missing = FALSE)
g = addWeights(g, method = "random", weight.fun = function(n) {
sample(c(1, -10), n, replace = TRUE) * rexp(n, rate = 0.1) * 1:n
})
expect_class(g, "mcGP")
expect_true(g$n.nodes == 152L)
expect_true(g$n.clusters == 3L)
expect_true(g$n.weights == 3L)
expect_list(g$weights, types = "matrix", any.missing = FALSE, all.missing = FALSE, len = g$n.weights)
expect_true(isSymmetricMatrix(g$weights[[1L]]))
expect_true(isSymmetricMatrix(g$weights[[2L]]))
expect_true(isSymmetricMatrix(g$weights[[3L]]))
expect_error(plot(g), regexpr = "not supported")
})
test_that("graph generation: manual passing of coordinates weights works", {
g = mcGP(lower = 0, upper = 10)
center.coordinates = matrix(c(1, 2, 2, 5, 8, 3), byrow = TRUE, ncol = 2L)
g = addCenters(g, center.coordinates = center.coordinates)
expect_equal(center.coordinates, g$center.coordinates)
expect_true(g$n.clusters == nrow(center.coordinates))
weights = diag(10)
g = addWeights(g, weights = weights)
g = addWeights(g, method = "random", weight.fun = rnorm, mean = 5, sd = 1.3)
weights[1, 4] = 4
g = addWeights(g, weights = weights)
expect_class(g, "mcGP")
expect_true(g$n.nodes == 10L)
expect_true(g$n.clusters == 3L)
expect_true(g$n.weights == 3L)
expect_list(g$weights, types = "matrix", any.missing = FALSE, all.missing = FALSE, len = g$n.weights)
expect_true(isSymmetricMatrix(g$weights[[1L]]))
expect_true(isSymmetricMatrix(g$weights[[2L]]))
expect_false(isSymmetricMatrix(g$weights[[3L]]))
})
test_that("graph generation: check correct error messages", {
expect_error(mcGP(lower = 10, upper = 5))
g = mcGP(lower = 0, upper = 100)
expect_error(addWeights(g, method = "euclidean"), regexp = "number of nodes")
}) |
context("test-power_oneway_within")
test_that("error messages", {
expect_error(power_oneway_within(), "argument \"design_result\" is missing, with no default" )
})
test_that("2w and 3w", {
K <- 2
n <- 34
sd <- 1
r <- 0.5
alpha = 0.05
f <- 0.25
f2 <- f^2
ES <- f2/(f2+1)
mu <- mu_from_ES(K = K, ES = ES)
design = paste(K,"w",sep="")
design_result1 <- ANOVA_design(design = design,
n = n,
mu = mu,
sd = sd,
r = r, plot = FALSE)
expect_equal(power_oneway_within(design_result1, alpha_level = 0.05)$power,
pwr::pwr.t.test(d = 0.5,
n = 34,
sig.level = 0.05,
type = "paired",
alternative = "two.sided")$power*100,
tolerance = .001)
K <- 3
n <- 20
sd <- 1
r <- 0.8
f <- 0.25
f2 <- f^2
ES <- f2 / (f2 + 1)
mu <- mu_from_ES(K = K, ES = ES)
design = paste(K,"w",sep = "")
design_result2 <- ANOVA_design(design = design,
n = n,
mu = mu,
sd = sd,
r = r, plot = FALSE)
f <- 0.25
k <- 1
m <- 3
n <- 20
e <- 1
r <- 0.8
alpha <- 0.05
df1 <- (m - 1) * e
df2 <- (n - k) * (m - 1) * e
lambda <- (n * m * f^2) / (1 - r)
F_critical <- qf(alpha,
df1,
df2,
lower.tail = FALSE)
pow <- pf(qf(alpha,
df1,
df2,
lower.tail = FALSE),
df1,
df2,
lambda,
lower.tail = FALSE)
expect_equal(power_oneway_within(design_result2, alpha_level = 0.05)$power,
pow*100,
tolerance = .01)
}) |
ee = expect_equal
co = container(a = 1, b = 2, f = mean, 3)
co2 = clone(co)
ee(discard_at(co), co2)
ee(discard_at(co, "a"), container(b = 2, f = mean, 3))
original_was_not_touched = ee(co, co2)
expect_true(original_was_not_touched)
ee(discard_at(co, "a"), discard_at(co, 1))
ee(discard_at(co, "b"), discard_at(co, 2))
ee(discard_at(co, 1:4), container())
ee(discard_at(co, "b", "a", 4:3, 1), container())
ee(discard_at(co, "a", 1), discard_at(co, 1))
ee(discard_at(co, "a", "x"), discard_at(co, "a"))
ee(discard_at(co, "x", "a"), discard_at(co, "a"))
ee(discard_at(co, "a", 5), discard_at(co, "a"))
ee(discard_at(co, 6, "a", 5), discard_at(co, "a"))
ee(ref_discard_at(co, 1:4), container())
ee(co, container())
d = dict(a = 1, b = 2, f = mean)
d2 = clone(d)
ee(discard_at(d, "a", "f", "b"), dict())
original_was_not_touched = ee(d, d2)
expect_true(original_was_not_touched)
expect_true(is_empty(discard_at(d, names(d))))
ee(ref_discard_at(d, "a", "f", "b"), dict())
discard_was_done_on_original = ee(d, dict())
expect_true(discard_was_done_on_original)
d = dict.table(a = 1, b = 2, f = mean)
d2 = clone(d)
expect_true(is_empty(discard_at(d, 1, "b", 3)))
expect_true(is_empty(discard_at(d, 1:3)))
expect_true(is_empty(discard_at(d, 3:1)))
ee(d, d2)
expect_true(is_empty(discard_at(d, colnames(d))))
expect_true(is_empty(discard_at(d, rev(colnames(d)))))
ee(d, d2)
expect_silent(ref_discard_at(d, "x", 4, 11))
d_was_not_altered = ee(d, d2)
expect_true(d_was_not_altered)
ee(ref_discard_at(d, "b"), d2[, c(1, 3)])
expect_silent(ref_discard_at(d, "a"))
expect_false(ncol(d) == ncol(d2)) |
doPareto <- function(df_final,
objective,
nr.fronts){
checkInputParam <- function(df_final, objective, nr.fronts) {
if(missing(df_final) || is.null(df_final) ||
missing(objective) || is.null(objective) ||
missing(nr.fronts) || is.null(nr.fronts)) {
stop("arguments df_final, objective, nr.fronts must be specified")
}
if(class(df_final)!="data.frame"){
stop("df_final must be a data.frame")
}
if(class(objective)!="data.frame"){
stop("objective must be a data.frame")
}
if(!c("mark")%in%colnames(objective)){
stop('objective has to contain "mark" column to indicate z scores you
want to operate')
}
if(!c("obj")%in%colnames(objective)){
stop('objective has to contain "obj" column to indicate the objective
of max or min')
}
if(!all(objective$obj %in% c("min","max"))){
stop('the value of "obj" column of objective has to be "max" or "min"')
}
if(length(unique(objective$obj)) > 2) {
stop('the value of "obj" column of objective has to be "max" or "min"')
}
if(!all(objective$mark %in% colnames(df_final))){
stop('the value of "mark" column of objective has to be one of the column
of the df_final')
}
}
checkInputParam(df_final = df_final,
objective = objective,
nr.fronts = nr.fronts)
p <- rPref::empty()
for (i in 1:nrow(objective)) {
if(objective[i,]$obj=="max"){
m <- as.character(objective[i,]$mark)
a <- rPref::high_(m)
}else
{
m <- as.character(objective[i,]$mark)
a <- rPref::low_(m)
}
p <- p*a
}
res <- rPref::psel(df = df_final, pref = p, top_level = nr.fronts)
names(res)[names(res) == '.level'] <- 'front'
return(res)
} |
jamoviBAplotHistogramClass <- if (requireNamespace('jmvcore')) R6::R6Class(
"jamoviBAplotHistogramClass",
inherit = jamoviBAplotHistogramBase,
private = list(
.run = function() {
if ( !is.null(self$options$method1) && !is.null(self$options$method2) ) {
method1 <- self$options$method1
method2 <- self$options$method2
data <- self$data
data[[method1]] <- jmvcore::toNumeric(data[[method1]])
data[[method2]] <- jmvcore::toNumeric(data[[method2]])
results <- blandr.statistics( data[[method1]] , data[[method2]] )
image <- self$results$plot
image$setState(results)
}
},
.plot=function(image, ggtheme, ...) {
if ( !is.null(self$options$method1) && !is.null(self$options$method2) ) {
plotData <- image$state
plot <- blandr.plot.normality( plotData )
plot <- plot + ggtheme
print(plot)
TRUE
}
})
) |
prepare_autoplot_cstf = function(task, resampling) {
data = task$data()
data$row_id = task$row_ids
data$indicator = ""
coords = task$coordinates()
coords$row_id = task$row_ids
if (grepl("Repeated", class(resampling)[1])) {
n_iters = resampling$iters / resampling$repeats(resampling$iters)
} else {
n_iters = resampling$iters
}
for (i in seq_len(n_iters)) {
row_id_test = resampling$instance$test[[i]]
row_id_train = resampling$instance$train[[i]]
data$test[data$row_id %in% row_id_test] = i
data$train[data$row_id %in% row_id_train] = i
}
data$Date = as.Date(data$Date)
data_coords = merge(data, coords, by = "row_id")
return(data_coords)
}
assert_autoplot = function(object, fold_id, task) {
if (!object$is_instantiated) {
object = object$instantiate(task)
}
if (!is.null(fold_id)) {
if (length(fold_id) > object$iters) {
stopf("More folds specified than stored in resampling.")
}
if (length(fold_id) == 1 && fold_id > object$iters) {
stopf("Specified a fold id which exceeds the total number of folds.")
}
if (any(fold_id > object$iters)) {
stopf("Specified a fold id which exceeds the total number of folds.")
}
}
return(object)
}
reorder_levels = function(object) {
object$indicator = as.factor(as.character(object$indicator))
if ("Omitted" %in% levels(object$indicator)) {
object$indicator = ordered(object$indicator,
levels = c("Train", "Test", "Omitted"))
} else {
object$indicator = ordered(object$indicator, levels = c("Train", "Test"))
}
return(object)
} |
rating.period <-
function(db, ncomp, period){
if (period=="month"){
notNA<-na.omit(db)
db.i<-notNA[!rowSums(notNA[,-(1)] == 0) >= 1,]
index.u<-(which(db[,-which(names(db) %in% c("datetime"))]==0))
if (length(index.u)==0) {
db.u<-db}
if(length(index.u)!=0) {
db.u<-db[-index.u,]}
new<-db.i
new$newdate<-format(as.POSIXct(new$datetime), format="%Y-%m")
maximum<-length(unique(new$newdate))
index<-vector(length=maximum)
for (i in 1:(maximum)){
index[i]<-length(which(new$newdate==(unique(new$newdate)[i])))}
result <- vector("list",maximum)
for (i in 1:(maximum)){
seldata<-subset(new, new$newdate==unique(new$newdate)[i])
result[[i]]<-log10(seldata[,-which(names(seldata) %in% c("datetime", "newdate"))])
}
ols.model<-vector("list", maximum)
mat<-matrix(nrow=2, ncol=ncomp)
for (j in 1:maximum){
mat.conc<-(result[j])
sel<-do.call(rbind, mat.conc)
for (i in 1:ncomp){
ols<-(lm(sel[,i+1]~sel[,1]))$coefficients
mat[,i]<-ols}
ols.model[[j]]<-mat}
db.u$newdate<-format(as.POSIXct(db.u$datetime), format="%Y-%m")
maximum2<-length(unique(db.u$newdate))
index2<-vector(length=maximum2)
for (i in 1:(maximum2)){
index2[i]<-length(which(db.u$newdate==(unique(db.u$newdate)[i])))}
result2 <- vector("list",maximum2)
for (i in 1:(maximum2)){
seldata2<-subset(db.u, db.u$newdate==unique(db.u$newdate)[i])
result2[[i]]<-log10(seldata2[,-which(names(seldata2) %in% c("datetime", "newdate"))])
}
logconc<-vector("list", maximum2)
for (j in 1:maximum2){
coefficic<-ols.model[j]
coeffsel<-do.call(rbind, coefficic)
flowsel<-result2[j]
sel<-do.call(rbind, flowsel)
matload<-matrix(nrow=nrow(sel), ncol=ncomp)
for(i in 1:ncomp){
matload[,i]<-10^(as.matrix(((coeffsel[1,i]))+coeffsel[2,i]*((sel$flow))))}
logconc[[j]]<-matload}
concfinal<-do.call(rbind, logconc)
colnames(concfinal)<-c(names(db.u[3:(ncomp+2)]))
concdate<-cbind.data.frame(db.u$datetime, concfinal)
colnames(concdate)[1]<-c("datetime")
n<-nrow(db.u)
concdate$newdate<-format(as.POSIXct(concdate$datetime), format="%Y-%m-%d")
dateday<-aggregate(concdate[,2]~newdate, concdate, mean)
colnames(dateday)[1]<-c("datetime")
agg.dataC<-matrix(nrow=nrow(dateday), ncol=(ncomp))
for (i in 1:ncomp) {
agg.data<-aggregate(concdate[,i+1]~newdate, concdate, mean)
agg.dataC[,i]<-as.matrix(agg.data[,2])
concent<-cbind.data.frame(dateday$datetime, agg.dataC)
colnames(concent)[1]<-"datetime"
colnames(concent)[2:(ncomp+1)]<-c(names(db)[3:(ncomp+2)])}
db.u$newdate<-format(as.POSIXct(db.u$datetime), format="%Y-%m-%d")
agg.dataQ<-aggregate(flow~newdate, db.u, mean)
agg.dataQ$newdate<-as.POSIXct(agg.dataQ$newdate, format = c("%Y-%m-%d"))
prodCQ<-as.matrix(concent[,-which(names(concent) %in% c("datetime"))]*(agg.dataQ[,"flow"]*86400))
prodCQdate<-cbind.data.frame(agg.dataQ[,1], prodCQ)
colnames(prodCQdate)[1]<-"newdate"
prodCQdate$newdate<-format(as.POSIXct(prodCQdate$newdate), format="%Y-%m")
aggrg.data<-matrix(nrow=length(unique(prodCQdate$newdate)), ncol=(ncomp))
for(i in 1:(ncomp)){
agg.init<-aggregate(prodCQdate[,i+1]~newdate, prodCQdate, sum)
aggrg.data[,i]<-agg.init[,2]
colnames(aggrg.data)<-c(names(db.u)[3:(ncomp+2)])
rownames(aggrg.data)<-agg.init[,1]}
return(aggrg.data)}
if (period=="year"){
notNA<-na.omit(db)
db.i<-notNA[!rowSums(notNA[,-(1)] == 0) >= 1,]
index.u<-(which(db[,-which(names(db) %in% c("datetime"))]==0))
if (length(index.u)==0) {
db.u<-db}
if(length(index.u)!=0) {
db.u<-db[-index.u,]}
new<-db.i
new$newdate<-format(as.POSIXct(new$datetime), format="%Y")
maximum<-length(unique(new$newdate))
index<-vector(length=maximum)
for (i in 1:(maximum)){
index[i]<-length(which(new$newdate==(unique(new$newdate)[i])))}
result <- vector("list",maximum)
for (i in 1:(maximum)){
seldata<-subset(new, new$newdate==unique(new$newdate)[i])
result[[i]]<-log10(seldata[,-which(names(seldata) %in% c("datetime", "newdate"))])
}
ols.model<-vector("list", maximum)
mat<-matrix(nrow=2, ncol=ncomp)
for (j in 1:maximum){
mat.conc<-(result[j])
sel<-do.call(rbind, mat.conc)
for (i in 1:ncomp){
ols<-(lm(sel[,i+1]~sel[,1]))$coefficients
mat[,i]<-ols}
ols.model[[j]]<-mat}
db.u$newdate<-format(as.POSIXct(db.u$datetime), format="%Y")
maximum2<-length(unique(db.u$newdate))
index2<-vector(length=maximum2)
for (i in 1:(maximum2)){
index2[i]<-length(which(db.u$newdate==(unique(db.u$newdate)[i])))}
result2 <- vector("list",maximum2)
for (i in 1:(maximum2)){
seldata2<-subset(db.u, db.u$newdate==unique(db.u$newdate)[i])
result2[[i]]<-log10(seldata2[,-which(names(seldata2) %in% c("datetime", "newdate"))])
}
logconc<-vector("list", maximum2)
for (j in 1:maximum2){
coefficic<-ols.model[j]
coeffsel<-do.call(rbind, coefficic)
flowsel<-result2[j]
sel<-do.call(rbind, flowsel)
matload<-matrix(nrow=nrow(sel), ncol=ncomp)
for(i in 1:ncomp){
matload[,i]<-10^(as.matrix(((coeffsel[1,i]))+coeffsel[2,i]*((sel$flow))))}
logconc[[j]]<-matload}
concfinal<-do.call(rbind, logconc)
colnames(concfinal)<-c(names(db.u[3:(ncomp+2)]))
concdate<-cbind.data.frame(db.u$datetime, concfinal)
colnames(concdate)[1]<-c("datetime")
n<-nrow(db.u)
concdate$newdate<-format(as.POSIXct(concdate$datetime), format="%Y-%m-%d")
dateday<-aggregate(concdate[,2]~newdate, concdate, mean)
colnames(dateday)[1]<-c("datetime")
agg.dataC<-matrix(nrow=nrow(dateday), ncol=(ncomp))
for (i in 1:ncomp) {
agg.data<-aggregate(concdate[,i+1]~newdate, concdate, mean)
agg.dataC[,i]<-as.matrix(agg.data[,2])
concent<-cbind.data.frame(dateday$datetime, agg.dataC)
colnames(concent)[1]<-"datetime"
colnames(concent)[2:(ncomp+1)]<-c(names(db)[3:(ncomp+2)])}
db.u$newdate<-format(as.POSIXct(db.u$datetime), format="%Y-%m-%d")
agg.dataQ<-aggregate(flow~newdate, db.u, mean)
agg.dataQ$newdate<-as.POSIXct(agg.dataQ$newdate, format = c("%Y-%m-%d"))
prodCQ<-as.matrix(concent[,-which(names(concent) %in% c("datetime"))]*(agg.dataQ[,"flow"]*86400))
prodCQdate<-cbind.data.frame(agg.dataQ[,1], prodCQ)
colnames(prodCQdate)[1]<-"newdate"
prodCQdate$newdate<-format(as.POSIXct(prodCQdate$newdate), format="%Y")
aggrg.data<-matrix(nrow=length(unique(prodCQdate$newdate)), ncol=(ncomp))
for(i in 1:(ncomp)){
agg.init<-aggregate(prodCQdate[,i+1]~newdate, prodCQdate, sum)
aggrg.data[,i]<-agg.init[,2]
colnames(aggrg.data)<-c(names(db.u)[3:(ncomp+2)])
rownames(aggrg.data)<-agg.init[,1]}
return(aggrg.data)}} |
senWilcox<-function(d,gamma=1,conf.int=FALSE,alpha=0.05,alternative="greater"){
stopifnot(is.vector(d)&(length(d)>1))
stopifnot(is.vector(gamma)&(length(gamma)==1)&(gamma>=1))
stopifnot((alternative=="greater")|(alternative=="less")|(alternative=="twosided"))
stopifnot(is.vector(alpha)&(length(alpha)==1)&(alpha>0)&(alpha<1))
pr<-gamma/(1+gamma)
if (alternative=="twosided") crit<-stats::qnorm(1-(alpha/2))
else crit<-stats::qnorm(1-(alpha))
int<-c(min(d),max(d))
devu<-function(taus){
ntaus<-length(taus)
res<-rep(NA,ntaus)
for (i in 1:ntaus){
dt<-d-taus[i]
adt<-abs(dt)
rk<-rank(adt)*(adt>0)
sg<-1*(dt>0)
ts<-sum(sg*rk)
ex<-sum(pr*rk)
va<-sum(rk*rk)*pr*(1-pr)
res[i]<-(ts-ex)/sqrt(va)
}
res
}
devl<-function(taus){
ntaus<-length(taus)
res<-rep(NA,ntaus)
for (i in 1:ntaus){
dt<-taus[i]-d
adt<-abs(dt)
rk<-rank(adt)*(adt>0)
sg<-1*(dt>0)
ts<-sum(sg*rk)
ex<-sum(pr*rk)
va<-sum(rk*rk)*pr*(1-pr)
res[i]<-(ts-ex)/sqrt(va)
}
res
}
if (alternative=="greater") pval <- 1-stats::pnorm(devu(0))
else if (alternative=="less") pval <- 1-stats::pnorm(devl(0))
else {
pvall <- 1-stats::pnorm(devl(0))
pvalu <- 1-stats::pnorm(devu(0))
pval <- min(1,2*min(pvall,pvalu))
}
estimate<-c(-Inf,Inf)
names(estimate)<-c("low","high")
ci<-estimate
if ((alternative!="less")&(conf.int==TRUE)) {
estimate[1]<-stats::uniroot(devu,int)$root
devuCI<-function(taus){devu(taus)-crit}
ci[1]<-stats::uniroot(devuCI,int)$root
}
if ((alternative!="greater")&(conf.int==TRUE)) {
estimate[2]<-stats::uniroot(devl,int)$root
devuCI<-function(taus){devl(taus)-crit}
ci[2]<-stats::uniroot(devuCI,int)$root
}
if (conf.int==TRUE) list(pval=pval,estimate=estimate,ci=ci)
else list(pval=pval)
} |
get_parameters <- function(x, ...) {
UseMethod("get_parameters")
}
get_parameters.default <- function(x, verbose = TRUE, ...) {
if (inherits(x, "list") && .obj_has_name(x, "gam")) {
x <- x$gam
class(x) <- c(class(x), c("glm", "lm"))
return(get_parameters.gam(x, ...))
}
tryCatch(
{
cf <- stats::coef(x)
params <- names(cf)
if (is.null(params)) {
params <- paste(1:length(cf))
}
params <- data.frame(
Parameter = params,
Estimate = unname(cf),
stringsAsFactors = FALSE,
row.names = NULL
)
text_remove_backticks(params)
},
error = function(x) {
if (isTRUE(verbose)) {
warning(sprintf("Parameters can't be retrieved for objects of class '%s'.", class(x)[1]), call. = FALSE)
}
return(NULL)
}
)
}
get_parameters.summary.lm <- function(x, ...) {
cf <- stats::coef(x)
params <- data.frame(
Parameter = names(cf[, 1]),
Estimate = unname(cf[, 1]),
stringsAsFactors = FALSE,
row.names = NULL
)
text_remove_backticks(params)
}
get_parameters.data.frame <- function(x, ...) {
stop("A data frame is no valid object for this function")
}
get_parameters.rms <- get_parameters.default
get_parameters.tobit <- get_parameters.default
get_parameters.model_fit <- function(x, ...) {
get_parameters(x$fit, ...)
}
get_parameters.bfsl <- function(x, ...) {
cf <- stats::coef(x)
params <- data.frame(
Parameter = rownames(cf),
Estimate = unname(cf[, "Estimate"]),
stringsAsFactors = FALSE,
row.names = NULL
)
text_remove_backticks(params)
}
get_parameters.selection <- function(x, component = c("all", "selection", "outcome", "auxiliary"), ...) {
component <- match.arg(component)
s <- summary(x)
rn <- row.names(s$estimate)
estimates <- as.data.frame(s$estimate, row.names = FALSE)
params <- data.frame(
Parameter = rn,
Estimate = estimates[[1]],
Component = "auxiliary",
stringsAsFactors = FALSE,
row.names = NULL
)
params$Component[s$param$index$betaS] <- "selection"
params$Component[s$param$index$betaO] <- "outcome"
if (component != "all") {
params <- params[params$Component == component, , drop = FALSE]
}
text_remove_backticks(params)
}
get_parameters.epi.2by2 <- function(x, ...) {
coef_names <- grepl("^([^NNT]*)(\\.strata\\.wald)", names(x$massoc.detail), perl = TRUE)
cf <- x$massoc.detail[coef_names]
names(cf) <- gsub(".strata.wald", "", names(cf), fixed = TRUE)
params <- data.frame(
Parameter = names(cf),
Estimate = unname(unlist(lapply(cf, function(i) i["est"]))),
stringsAsFactors = FALSE,
row.names = NULL
)
text_remove_backticks(params)
}
get_parameters.Rchoice <- function(x, ...) {
cf <- stats::coef(x)
params <- data.frame(
Parameter = find_parameters(x, flatten = TRUE),
Estimate = as.vector(cf),
stringsAsFactors = FALSE,
row.names = NULL
)
text_remove_backticks(params)
}
get_parameters.btergm <- function(x, ...) {
cf <- x@coef
params <- data.frame(
Parameter = names(cf),
Estimate = as.vector(cf),
stringsAsFactors = FALSE,
row.names = NULL
)
text_remove_backticks(params)
}
get_parameters.mediate <- function(x, ...) {
info <- model_info(x$model.y, verbose = FALSE)
if (info$is_linear && !x$INT) {
out <- data.frame(
Parameter = c("ACME", "ADE", "Total Effect", "Prop. Mediated"),
Estimate = c(x$d1, x$z0, x$tau.coef, x$n0),
stringsAsFactors = FALSE
)
} else {
out <- data.frame(
Parameter = c(
"ACME (control)", "ACME (treated)", "ADE (control)",
"ADE (treated)", "Total Effect", "Prop. Mediated (control)",
"Prop. Mediated (treated)", "ACME (average)", "ADE (average)",
"Prop. Mediated (average)"
),
Estimate = c(x$d0, x$d1, x$z0, x$z1, x$tau.coef, x$n0, x$n1, x$d.avg, x$z.avg, x$n.avg),
stringsAsFactors = FALSE
)
}
text_remove_backticks(out)
}
get_parameters.ridgelm <- function(x, ...) {
out <- data.frame(
Parameter = names(x$coef),
Estimate = as.vector(x$coef),
stringsAsFactors = FALSE
)
text_remove_backticks(out)
}
get_parameters.ivFixed <- function(x, ...) {
out <- data.frame(
Parameter = rownames(x$coefficients),
Estimate = as.vector(x$coefficients),
stringsAsFactors = FALSE
)
text_remove_backticks(out)
}
get_parameters.ivprobit <- function(x, ...) {
out <- data.frame(
Parameter = x$names,
Estimate = as.vector(x$coefficients),
stringsAsFactors = FALSE
)
text_remove_backticks(out)
}
get_parameters.survreg <- function(x, ...) {
s <- summary(x)
out <- data.frame(
Parameter = rownames(s$table),
Estimate = as.vector(s$table[, 1]),
stringsAsFactors = FALSE
)
text_remove_backticks(out)
}
get_parameters.riskRegression <- function(x, ...) {
junk <- utils::capture.output(cs <- stats::coef(x))
out <- data.frame(
Parameter = as.vector(cs[, 1]),
Estimate = as.numeric(cs[, 2]),
stringsAsFactors = FALSE
)
text_remove_backticks(out)
}
get_parameters.mipo <- function(x, ...) {
out <- data.frame(
Parameter = as.vector(summary(x)$term),
Estimate = as.vector(summary(x)$estimate),
stringsAsFactors = FALSE
)
text_remove_backticks(out)
}
get_parameters.mira <- function(x, ...) {
check_if_installed("mice")
get_parameters(mice::pool(x), ...)
}
get_parameters.margins <- function(x, ...) {
s <- summary(x)
param <- as.vector(s$factor)
estimate_pos <- which(colnames(s) == "AME")
if (estimate_pos > 2) {
out <- s[1:(estimate_pos - 1)]
r <- apply(out, 1, function(i) paste0(colnames(out), " [", i, "]"))
param <- unname(sapply(as.data.frame(r), paste, collapse = ", "))
}
out <- data.frame(
Parameter = param,
Estimate = as.vector(summary(x)$AME),
stringsAsFactors = FALSE
)
text_remove_backticks(out)
}
get_parameters.glht <- function(x, ...) {
s <- summary(x)
alt <- switch(x$alternative,
two.sided = "==",
less = ">=",
greater = "<="
)
out <- data.frame(
Parameter = paste(names(s$test$coefficients), alt, x$rhs),
Estimate = unname(s$test$coefficients),
stringsAsFactors = FALSE
)
text_remove_backticks(out)
}
get_parameters.mle2 <- function(x, ...) {
check_if_installed("bbmle")
s <- bbmle::summary(x)
params <- data.frame(
Parameter = names(s@coef[, 1]),
Estimate = unname(s@coef[, 1]),
stringsAsFactors = FALSE,
row.names = NULL
)
text_remove_backticks(params)
}
get_parameters.mle <- get_parameters.mle2
get_parameters.lrm <- function(x, ...) {
tryCatch(
{
cf <- stats::coef(x)
params <- data.frame(
Parameter = names(cf),
Estimate = unname(cf),
stringsAsFactors = FALSE,
row.names = NULL
)
text_remove_backticks(params)
},
error = function(x) {
NULL
}
)
}
get_parameters.orm <- get_parameters.lrm
get_parameters.multinom <- function(x, ...) {
params <- stats::coef(x)
if (is.matrix(params)) {
out <- data.frame()
for (i in 1:nrow(params)) {
out <- rbind(out, data.frame(
Parameter = colnames(params),
Estimate = unname(params[i, ]),
Response = rownames(params)[i],
stringsAsFactors = FALSE,
row.names = NULL
))
}
} else {
out <- data.frame(
Parameter = names(params),
Estimate = unname(params),
stringsAsFactors = FALSE,
row.names = NULL
)
}
text_remove_backticks(out)
}
get_parameters.brmultinom <- get_parameters.multinom
get_parameters.mlm <- function(x, ...) {
cs <- stats::coef(summary(x))
out <- lapply(names(cs), function(i) {
params <- data.frame(
Parameter = rownames(cs[[i]]),
Estimate = cs[[i]][, 1],
Response = gsub("^Response (.*)", "\\1", i),
stringsAsFactors = FALSE,
row.names = NULL
)
text_remove_backticks(params)
})
do.call(rbind, out)
}
get_parameters.gbm <- function(x, ...) {
s <- summary(x, plotit = FALSE)
params <- data.frame(
Parameter = as.character(s$var),
Estimate = s$rel.inf,
stringsAsFactors = FALSE,
row.names = NULL
)
text_remove_backticks(params)
}
get_parameters.BBreg <- function(x, ...) {
pars <- summary(x)$coefficients
params <- data.frame(
Parameter = rownames(pars),
Estimate = pars[, "Estimate"],
stringsAsFactors = FALSE,
row.names = NULL
)
text_remove_backticks(params)
}
get_parameters.rma <- function(x, ...) {
tryCatch(
{
cf <- stats::coef(x)
params <- data.frame(
Parameter = names(cf),
Estimate = unname(cf),
stringsAsFactors = FALSE,
row.names = NULL
)
params$Parameter[grepl("intrcpt", params$Parameter)] <- "(Intercept)"
text_remove_backticks(params)
},
error = function(x) {
NULL
}
)
}
get_parameters.meta_random <- function(x, ...) {
tryCatch(
{
cf <- x$estimates
params <- data.frame(
Parameter = rownames(cf),
Estimate = unname(cf[, 1]),
stringsAsFactors = FALSE,
row.names = NULL
)
params$Parameter[grepl("d", params$Parameter)] <- "(Intercept)"
text_remove_backticks(params)
},
error = function(x) {
NULL
}
)
}
get_parameters.meta_fixed <- get_parameters.meta_random
get_parameters.meta_bma <- get_parameters.meta_random
get_parameters.metaplus <- function(x, ...) {
params <- data.frame(
Parameter = rownames(x$results),
Estimate = unname(x$results[, 1]),
stringsAsFactors = FALSE,
row.names = NULL
)
params$Parameter[grepl("muhat", params$Parameter)] <- "(Intercept)"
text_remove_backticks(params)
}
get_parameters.blavaan <- function(x, summary = FALSE, centrality = "mean", ...) {
check_if_installed("lavaan")
check_if_installed("blavaan")
draws <- blavaan::blavInspect(x, "draws")
posteriors <- as.data.frame(as.matrix(draws))
param_tab <- lavaan::parameterEstimates(x)
params <- paste0(param_tab$lhs, param_tab$op, param_tab$rhs)
coef_labels <- names(lavaan::coef(x))
if ("group" %in% colnames(param_tab) && .n_unique(param_tab$group) > 1) {
params <- paste0(params, " (group ", param_tab$group, ")")
groups <- grepl("(.*)\\.g(.*)", coef_labels)
coef_labels[!groups] <- paste0(coef_labels[!groups], " (group 1)")
coef_labels[groups] <- gsub("(.*)\\.g(.*)", "\\1 \\(group \\2\\)", coef_labels[groups])
}
are_labels <- !coef_labels %in% params
if (any(are_labels)) {
unique_labels <- unique(coef_labels[are_labels])
for (ll in seq_along(unique_labels)) {
coef_labels[coef_labels == unique_labels[ll]] <-
params[param_tab$label == unique_labels[ll]]
}
}
colnames(posteriors) <- coef_labels
if (isTRUE(summary)) {
posteriors <- .summary_of_posteriors(posteriors, centrality = centrality)
posteriors$Component <- NA
posteriors$Component[grepl("=~", posteriors$Parameter, fixed = TRUE)] <- "latent"
posteriors$Component[grepl("~~", posteriors$Parameter, fixed = TRUE)] <- "residual"
posteriors$Component[grepl("~1", posteriors$Parameter, fixed = TRUE)] <- "intercept"
posteriors$Component[is.na(posteriors$Component)] <- "regression"
}
posteriors
}
get_parameters.lavaan <- function(x, ...) {
check_if_installed("lavaan")
params <- lavaan::parameterEstimates(x)
params$parameter <- paste0(params$lhs, params$op, params$rhs)
params$comp <- NA
params$comp[params$op == "~"] <- "regression"
params$comp[params$op == "=~"] <- "latent"
params$comp[params$op == "~~"] <- "residual"
params$comp[params$op == "~1"] <- "intercept"
params <- data.frame(
Parameter = params$parameter,
Estimate = params$est,
Component = params$comp,
stringsAsFactors = FALSE
)
text_remove_backticks(params)
}
get_parameters.polr <- function(x, ...) {
pars <- c(sprintf("Intercept: %s", names(x$zeta)), names(x$coefficients))
params <- data.frame(
Parameter = pars,
Estimate = c(unname(x$zeta), unname(x$coefficients)),
stringsAsFactors = FALSE,
row.names = NULL
)
text_remove_backticks(params)
}
get_parameters.bracl <- function(x, ...) {
pars <- stats::coef(x)
params <- data.frame(
Parameter = names(pars),
Estimate = unname(pars),
Response = gsub("(.*):(.*)", "\\1", names(pars)),
stringsAsFactors = FALSE,
row.names = NULL
)
text_remove_backticks(params)
}
get_parameters.aov <- function(x, ...) {
cf <- stats::coef(x)
params <- data.frame(
Parameter = names(cf),
Estimate = unname(cf),
stringsAsFactors = FALSE,
row.names = NULL
)
text_remove_backticks(params)
}
get_parameters.aovlist <- function(x, ...) {
cs <- stats::coef(x)
out <- do.call(rbind, lapply(names(cs), function(i) {
params <- data.frame(
Parameter = names(cs[[i]]),
Estimate = unname(cs[[i]]),
Group = i,
stringsAsFactors = FALSE
)
text_remove_backticks(params)
}))
rownames(out) <- NULL
out
}
get_parameters.manova <- function(x, ...) {
params <- stats::na.omit(stats::coef(x))
out <- .gather(as.data.frame(params), names_to = "Response", values_to = "Estimate")
out$Parameter <- rownames(out)
out <- out[c("Parameter", "Estimate", "Response")]
rownames(out) <- NULL
pattern <- paste0("(", paste0(paste0(".", unique(out$Response)), collapse = "|"), ")$")
out$Parameter <- gsub(pattern, "", out$Parameter)
text_remove_backticks(out)
}
get_parameters.maov <- get_parameters.manova
get_parameters.afex_aov <- function(x, ...) {
if (!is.null(x$aov)) {
get_parameters(x$aov, ...)
} else {
get_parameters(x$lm, ...)
}
}
get_parameters.pgmm <- function(x, component = c("conditional", "all"), ...) {
component <- match.arg(component)
cs <- stats::coef(summary(x, time.dummies = TRUE, robust = FALSE))
params <- data.frame(
Parameter = rownames(cs),
Estimate = unname(cs[, 1]),
Component = "conditional",
stringsAsFactors = FALSE,
row.names = NULL
)
params$Component[params$Parameter %in% x$args$namest] <- "time_dummies"
if (component == "conditional") {
params <- params[params$Component == "conditional", ]
params <- .remove_column(params, "Component")
}
text_remove_backticks(params)
}
.get_armsim_fixef_parms <- function(x) {
sn <- methods::slotNames(x)
as.data.frame(methods::slot(x, sn[1]))
}
.get_armsim_ranef_parms <- function(x) {
dat <- NULL
if (methods::.hasSlot(x, "ranef")) {
re <- x@ranef
dat <- data.frame()
for (i in 1:length(re)) {
dn <- dimnames(re[[i]])[[2]]
cn <- dimnames(re[[i]])[[3]]
l <- lapply(1:length(dn), function(j) {
d <- as.data.frame(re[[i]][, j, ])
colnames(d) <- sprintf("%s.%s", cn, dn[j])
d
})
if (ncol(dat) == 0) {
dat <- do.call(cbind, l)
} else {
dat <- cbind(dat, do.call(cbind, l))
}
}
}
dat
} |
cmat.star <-
function(plist, CorrMat, no.ord, no.norm){
if (no.norm==0 & no.ord>1) {
Sigma = IntermediateOO(plist, CorrMat)
}
if (no.norm>1 & no.ord==0) {
Sigma = CorrMat
}
if (no.norm==1 & no.ord==1) {
if ( validate.target.cormat(plist, CorrMat, no.ord, no.norm)) {
ON = IntermediateON(plist, CorrMat[(no.ord+1):nrow(CorrMat), 1:no.ord] )
Sigma = diag(2)
Sigma[lower.tri((Sigma))] = ON
Sigma = Sigma + t(Sigma)
diag(Sigma) = 1}
}
if (no.norm>1 & no.ord==1) {
if ( validate.target.cormat(plist, CorrMat, no.ord, no.norm)) {
ON = IntermediateON(plist, CorrMat[(no.ord+1):nrow(CorrMat), 1:no.ord] )
NN = CorrMat[(no.ord+1):ncol(CorrMat), (no.ord+1):ncol(CorrMat) ]
Sigma = cbind(c(1,ON), rbind(ON,NN) )
if(!is.positive.definite(Sigma)){
warning( "Intermediate correlation matrix is not positive definite. A nearPD function is applied.")
Sigma=as.matrix(nearPD(Sigma, corr = TRUE, keepDiag = TRUE)$mat)
}
Sigma = ( Sigma+t(Sigma) )/2
}
}
if (no.norm==1 & no.ord>1) {
if ( validate.target.cormat(plist, CorrMat, no.ord, no.norm)) {
OO = IntermediateOO(plist, CorrMat[1:no.ord,1:no.ord])
ON = IntermediateON(plist, CorrMat[(no.ord+1):nrow(CorrMat), 1:no.ord] )
Sigma = cbind(rbind(OO,ON), c(ON,1) )
if(!is.positive.definite(Sigma)){
warning( "Intermediate correlation matrix is not positive definite. A nearPD function is applied.")
Sigma=as.matrix(nearPD(Sigma, corr = TRUE, keepDiag = TRUE)$mat)
}
Sigma = ( Sigma+t(Sigma) )/2
}
}
if (no.norm>1 & no.ord>1) {
if ( validate.target.cormat(plist, CorrMat, no.ord, no.norm)) {
OO = IntermediateOO(plist, CorrMat[1:no.ord,1:no.ord])
ON = IntermediateON(plist, CorrMat[(no.ord+1):nrow(CorrMat), 1:no.ord] )
NN = CorrMat[(no.ord+1):ncol(CorrMat), (no.ord+1):ncol(CorrMat) ]
Sigma = cbind(rbind(OO,ON), rbind(t(ON),NN) )
if(!is.positive.definite(Sigma)){
warning( "Intermediate correlation matrix is not positive definite. A nearPD function is applied.")
Sigma=as.matrix(nearPD(Sigma, corr = TRUE, keepDiag = TRUE)$mat)
}
Sigma = ( Sigma+t(Sigma) )/2
}
}
rownames(Sigma)<-NULL
return(Sigma)
} |
plot.reco <- function(x, ...){
x <- x[[1]]
plot(R ~ Temp, data=x$model, ...)
nd <- seq(min(x$model$Temp), max(x$model$Temp), length.out=100)
lines(predict(x, newdata=data.frame(Temp=nd)) ~ nd)
} |
info_sidra <- function(x, wb = FALSE) {
if (!is.logical(wb)) {
stop("'wb' argument must be TRUE or FALSE")
} else if (wb == FALSE || wb == F) {
a <- xml2::read_html(paste0("http://api.sidra.ibge.gov.br/desctabapi.aspx?c=", x))
tab1 = a %>%
rvest::html_nodes("
rvest::html_text()
tab2 = a %>%
rvest::html_nodes("
rvest::html_text()
table <- list("table" = paste0("Tabela ", tab1, ": ", tab2))
p1 = a %>%
rvest::html_nodes("
rvest::html_text()
period <- list("period" = p1)
v1 <- a %>% rvest::html_nodes("
rvest::html_text()
v2 <- a %>% rvest::html_table(fill = TRUE, trim = TRUE)
v2 <- v2[[2]]
v3 <- data.frame(cod = apply(v2, 1, stringr::str_extract,"[[:digit:]]+"),
desc = apply(v2, 1, stringr::str_replace_all, "([[:digit:]])", ""))
v3$cod <- stringr::str_trim(v3$cod)
v3$desc <- stringr::str_trim(v3$desc)
v3$desc <- stringr::str_replace(v3$desc, " - casas decimais: padr\uE3o = , m\uE1ximo =", "")
variables <- list("variable" = v3)
c1 <- rvest::html_nodes(a, "table") %>%
rvest::html_table(fill = TRUE, trim = TRUE) %>%
unlist() %>%
stringr::str_extract("\\C[0-9]+") %>%
stringr::str_subset("\\C[0-9]+") %>%
base::tolower()
if (length(c1) >= 1) {
lc1 <- length(c1)
c2 <- a %>% rvest::html_nodes(".tituloLinha:nth-child(4)") %>% rvest::html_text()
c3 <- a %>% rvest::html_nodes(".tituloLinha:nth-child(5)") %>% rvest::html_text()
c4 <- paste(c1, "=", c2, c3)
c5 <- list()
for (i in 0:(lc1-1)) {
c5[[i+1]] <- a %>% rvest::html_nodes(paste0("
rvest::html_text() %>% stringr::str_replace("\\[[^]]*]", "NA")
c5[[i+1]] <- c5[[i+1]][c5[[i+1]] != "NA"]
c5[[i+1]] <- data.frame(cod = c5[[i+1]][seq(1, length(c5[[i+1]]), 2)],
desc = c5[[i+1]][seq(2, length(c5[[i+1]]), 2)])
}
names(c5) <- c4
classific_category <- list("classific_category" = c5)
} else {
classific_category <- list("classific_category" = NULL)
}
trad.geo <- data.frame(cod = as.character(c("n1","n2","n3","n8","n9","n7","n13","n14","n15","n23","n6","n10",
"n11","n102")),
cod2 = as.character(c("Brazil","Region","State","MesoRegion","MicroRegion",
"MetroRegion","MetroRegionDiv","IRD","UrbAglo","PopArrang",
"City", "District","subdistrict","Neighborhood")),
level = c(1:14),
order = c(1:5, 10:14, 6:9))
n1 <- rvest::html_nodes(a, "table") %>%
rvest::html_table(fill = TRUE, trim = TRUE) %>%
unlist() %>%
stringr::str_extract("N[0-9]+") %>%
stringr::str_subset("N[0-9]+") %>%
tolower() %>%
as.data.frame()
n2 <- a %>% rvest::html_nodes("p+
n3 <- a %>% rvest::html_nodes("p+
n4 <- data.frame(desc = paste(n2, n3))
n5 <- cbind(n1, n4)
ngeo <- merge(trad.geo, n5, by.x = "cod", by.y = ".")
ngeo <- ngeo[c("cod2","desc")]
names(ngeo) <- c("cod","desc")
ngeo <- list(geo = ngeo)
info <- c(table, period, variables, classific_category, ngeo)
return(info)
} else if (wb == TRUE || wb == T) {
p <- readline(prompt = "Can the web browser be open? (y/n): ")
if (p == "y" | p == "Y") {
shell.exec(paste0("http://api.sidra.ibge.gov.br/desctabapi.aspx?c=", x))
} else {
stop(paste("Sorry, I need your permission to show the parameters of the table", x))
}
}
} |
renewDis <- function(ttf, ttr, time, n, printSummary=TRUE){
ttf <- as.numeric(ttf)
ttr <- as.numeric(ttr)
time <- as.numeric(time)
n <- as.integer(n)
if(length(time) != 1 || length(n) != 1)
stop("time and n should be of lenght 1")
res <- as.matrix(rep(as.numeric(NA), n), ncol=1)
for(i in 1:n){
tt <- 0
it <- 1
while(tt < time){
tt <- sum(c(tt, sample(ttf, 1)))
tt <- sum(c(tt, sample(ttr, 1)))
it <- it + 1
}
res[i,1] <- it
}
if(printSummary){
cat(paste(" The estimated MEAN NUMBER Of RENEWALS is", round(mean(res[,1]), 2)))
cat("\n")
cat("number of renewals EBD\n")
print(summary(c(res)))
}
invisible(as.numeric(res))
} |
WilliamsDesign.Equivalence <-
function(alpha,beta,sigma,sequence,delta,margin){
n<-(qnorm(1-alpha)+qnorm(1-beta/2))^2*sigma/(sequence*(margin-abs(delta))^2)
n
} |
add_timings <- function(
trajectory,
timings
) {
assert_that(is_data_wrapper(trajectory))
if (is.data.frame(timings)) {
timings <- tibble::deframe(timings)
}
if (is.numeric(timings) && !is.null(timings)) {
timings <- as.list(timings)
}
assert_that(is.list(timings))
trajectory %>% extend_with(
"dynwrap::with_timings",
timings = timings
)
}
is_wrapper_with_timings <- function(trajectory) {
is_data_wrapper(trajectory) && "dynwrap::with_timings" %in% class(trajectory)
}
add_timing_checkpoint <- function(timings, name) {
if (is.null(timings)) {
timings <- list()
}
timings[[name]] <- as.numeric(Sys.time())
timings
} |
source("RSquared.R")
library(LiblineaR)
set.seed(1)
N=10
df=data.frame(x1 = (1:N)/N*10 + 2*rnorm(N), x2 = (1:N)/N*10 + 2*rnorm(N), x3 = (1:N)/N*10 + 2*rnorm(N))
df$y.regr = apply(as.matrix(df),1,mean) + 2*rnorm(N)
df$y.logical = df$y.regr > 5.5
df$y.int = ifelse(df$y.logical, 1L, -1L)
df$y.double = as.double(df$y.int)
df$y.char = as.character(df$y.int)
df$y.factor = factor(df$y.int)
df$y.factorRev = factor(df$y.int, levels=rev(levels(df$y.factor)))
df$y.factorExtra = factor(df$y.int, levels=c(-1,1,99), labels=c("no","yes","maybe"))
df$y.multiclass = cut(df$y.regr, breaks=c(-99,4,7,99))
regrTargets = "y.regr"
classifTargets = setdiff(grep("^y",colnames(df), value=TRUE), regrTargets)
binTargets = setdiff(classifTargets,"y.multiclass")
testClassif = function(rev,yy,weighted,tt) {
cat("Testing",rev,yy,weighted,tt,"\n")
if(rev)
is=1:nrow(df)
else
is=nrow(df):1
nis = which(!df[is,"y.logical"])
if(weighted)
wi=c("1"=2,"TRUE"=2,"yes"=2, "(7,99]"=1,
"(4,7]"=50,
"-1"=100,"FALSE"=100,"no"=100,"(-99,4]"=150)
else
wi=NULL
y = df[is,yy]
x = df[is,1:3]
m = LiblineaR(x, y, type = tt, wi=wi)
p = predict(m, newx = x)
res=c(
type=tt,
target=yy,
weighted=weighted,
y1 = as.character(y[1]),
perf=(mean(as.character(y)==as.character(p$predictions))),
perfNeg=(mean(as.character(y[nis])==as.character(p$predictions[nis]))),
dimW=paste(dim(m$W), collapse = " "),
sumW=sum(m$W[,1:3]),
biasW=m$W[1,][["Bias"]],
classNames=paste(m$ClassNames, collapse = " "),
yLev=paste(levels(y), collapse=" "),
predLev=paste(levels(p$predictions), collapse=" "),
yClass=class(y),
predClass=class(p$predictions),
weights=paste(colnames(m$W),"=",round(m$W[1,],3),collapse = " ; ")
)
return(res)
}
testRegr = function(rev,yy,tt) {
cat("Testing",rev,yy,tt,"\n")
if(rev)
is=1:nrow(df)
else
is=nrow(df):1
y = df[is,yy]
x = df[is,1:3]
m = LiblineaR(x, y, type = tt, svr_eps=.1)
p = predict(m, newx = x)
res=c(
type=tt,
target=yy,
weighted=FALSE,
y1 = as.character(y[1]),
perf=RSquared(p$predictions, y),
perfNeg=0,
dimW=paste(dim(m$W), collapse = " "),
sumW=sum(m$W[,1:3]),
biasW=m$W[1,][["Bias"]],
classNames=paste(m$ClassNames, collapse = " "),
yLev=paste(levels(y), collapse=" "),
predLev=paste(levels(p$predictions), collapse=" "),
yClass=class(y),
predClass=class(p$predictions),
weights=paste(colnames(m$W),"=",round(m$W[1,],3),collapse = " ; ")
)
return(res)
}
allRes=NULL
for(tt in 0:7) {
for(weighted in c(FALSE,TRUE)) {
for(rev in c(FALSE,TRUE)) {
for (yy in classifTargets) {
res = testClassif(rev,yy,weighted,tt)
allRes=rbind(allRes,res)
browser()
}
}
}
}
for(tt in 11:13) {
for(rev in c(FALSE,TRUE)) {
for (yy in regrTargets) {
res = testRegr(rev,yy,tt)
allRes=rbind(allRes,res)
}
}
}
allRes = as.data.frame(allRes,stringsAsFactors = F)
allRes$dimOK=(allRes$type=="4" | allRes$target=="y.multiclass" | allRes$dimW=="1 4")
allRes$perfOK=(allRes$target=="y.multiclass" & allRes$perf>=.6 |
ifelse(allRes$weighted, allRes$perfNeg>=.9, allRes$perf>=.75))
allRes$sumOK=(!allRes$target%in%c("y.int","y.double") | allRes$type=="4" | as.numeric(allRes$sumW)>0)
allRes$biasOK=(!allRes$target%in%c("y.int","y.double") | allRes$type=="4" | as.numeric(allRes$biasW)<0)
allRes$levelsOK=(allRes$target%in%c("y.char","y.double") | (allRes$yLev==allRes$predLev & allRes$yClass==allRes$predClass)) |
mdes.bcrd4r2 <- function(score = NULL, dists = "normal", k1 = -6, k2 = 6,
order = 1, interaction = FALSE, treat.lower = TRUE, cutoff = 0, p = NULL,
power = .80, alpha = .05, two.tailed = TRUE, df = n4 - g4 - 1,
rho2, rho3, rho4, omega3, omega4,
r21 = 0, r22 = 0, r2t3 = 0, r2t4 = 0, g4 = 0,
rate.tp = 1, rate.cc = 0, n1, n2, n3, n4) {
user.parms <- as.list(match.call())
.error.handler(user.parms)
if(df < 1) stop("Insufficient degrees of freedom", call. = FALSE)
if(!is.null(score) & order == 0) warning("Ignoring information from the 'score' object \n", call. = FALSE)
if(order == 0) {
d <- 1
if(is.null(p)) stop("'p' cannot be NULL in random assignment designs", call. = FALSE)
idx.score <- intersect(c("dists", "k1", "k2", "interaction", "treat.lower", "cutoff"), names(user.parms))
if(length(idx.score) > 0) cat("\nCAUTION: Ignoring argument(s):",
sQuote(names(user.parms[idx.score])), "\n")
ifelse(treat.lower, cutoff <- p, cutoff <- 1 - p)
interaction <- FALSE
dists <- "uniform"
k1 <- 0
k2 <- 1
} else if(order %in% 1:8) {
if(is.null(score)) {
score <- inspect.score(order = order, interaction = interaction,
treat.lower = treat.lower, cutoff = cutoff,
p = p, k1 = k1, k2 = k2, dists = dists)
} else {
if("p" %in% names(user.parms)) warning("Using 'p' from the 'score' object, ignoring 'p' in the function call", call. = FALSE)
if(!inherits(score, "score")) {
score <- inspect.score(score = score, order = order, interaction = interaction,
treat.lower = treat.lower, cutoff = cutoff,
p = p, k1 = k1, k2 = k2, dists = dists)
} else {
idx.score <- intersect(c("dists", "k1", "k2", "order", "interaction", "treat.lower", "p", "cutoff"), names(user.parms))
if(length(idx.score) > 0) cat("\nCAUTION: 'score' object overwrites argument(s):",
sQuote(names(user.parms[idx.score])), "\n")
}
}
d <- score$rdde
p <- score$p
cutoff <- score$cutoff
treat.lower <- score$treat.lower
order <- score$order
interaction <- score$interaction
dists <- score$parms$dists
k1 <- score$parms$k1
k2 <- score$parms$k2
} else if(order > 8) {
stop("'order' > 8 is not allowed", call. = FALSE)
}
sse <- (1/(rate.tp - rate.cc)) * sqrt(rho4 * omega4 * (1 - r2t4) / n4 +
rho3 * omega3 * (1 - r2t3) / (n4 * n3) +
d * (rho2 * (1 - r22) / (p * (1 - p) * n4 * n3 * n2) +
(1 - rho4 - rho3 - rho2) * (1 - r21) / (p * (1 - p) * n4 * n3 * n2 * n1)))
mdes <- .mdes(power, alpha, sse, df, two.tailed)
colnames(mdes) <- c("mdes", paste0(100 * (1 - round(alpha, 2)), "%lcl"),
paste0(100 * (1 - round(alpha, 2)), "%ucl"))
mdes.out <- list(parms = list(dists = dists, k1 = k1, k2 = k2,
order = order, interaction = interaction,
treat.lower = treat.lower, p = p, cutoff = cutoff,
power = power, alpha = alpha, two.tailed = two.tailed,
rho2 = rho2, rho3 = rho3, rho4 = rho4,
omega3 = omega3, omega4 = omega4,
r21 = r21, r22 = r22, r2t3 = r2t3, r2t4 = r2t4,
g4 = g4, rate.tp = rate.tp, rate.cc = rate.cc,
n1 = n1, n2 = n2, n3 = n3, n4 = n4),
df = df,
sse = sse,
mdes = mdes)
class(mdes.out) <- c("mdes", "bcrd4r2")
.summary.mdes(mdes.out)
return(invisible(mdes.out))
}
power.bcrd4r2 <- function(score = NULL, dists = "normal", k1 = -6, k2 = 6,
order = 1, interaction = FALSE, treat.lower = TRUE, cutoff = 0, p = NULL,
es = .25, alpha = .05, two.tailed = TRUE, df = n4 - g4 - 1,
rho2, rho3, rho4, omega3, omega4,
r21 = 0, r22 = 0, r2t3 = 0, r2t4 = 0, g4 = 0,
rate.tp = 1, rate.cc = 0, n1, n2, n3, n4) {
user.parms <- as.list(match.call())
.error.handler(user.parms)
if(df < 1) stop("Insufficient degrees of freedom", call. = FALSE)
if(!is.null(score) & order == 0) warning("Ignoring information from the 'score' object \n", call. = FALSE)
if(order == 0) {
d <- 1
if(is.null(p)) stop("'p' cannot be NULL in random assignment designs", call. = FALSE)
idx.score <- intersect(c("dists", "k1", "k2", "interaction", "treat.lower", "cutoff"), names(user.parms))
if(length(idx.score) > 0) cat("\nCAUTION: Ignoring argument(s):",
sQuote(names(user.parms[idx.score])), "\n")
ifelse(treat.lower, cutoff <- p, cutoff <- 1 - p)
interaction <- FALSE
dists <- "uniform"
k1 <- 0
k2 <- 1
} else if(order %in% 1:8) {
if(is.null(score)) {
score <- inspect.score(order = order, interaction = interaction,
treat.lower = treat.lower, cutoff = cutoff,
p = p, k1 = k1, k2 = k2, dists = dists)
} else {
if("p" %in% names(user.parms)) warning("Using 'p' from the 'score' object, ignoring 'p' in the function call", call. = FALSE)
if(!inherits(score, "score")) {
score <- inspect.score(score = score, order = order, interaction = interaction,
treat.lower = treat.lower, cutoff = cutoff,
p = p, k1 = k1, k2 = k2, dists = dists)
} else {
idx.score <- intersect(c("dists", "k1", "k2", "order", "interaction", "treat.lower", "p", "cutoff"), names(user.parms))
if(length(idx.score) > 0) cat("\nCAUTION: 'score' object overwrites argument(s):",
sQuote(names(user.parms[idx.score])), "\n")
}
}
d <- score$rdde
p <- score$p
cutoff <- score$cutoff
treat.lower <- score$treat.lower
order <- score$order
interaction <- score$interaction
dists <- score$parms$dists
k1 <- score$parms$k1
k2 <- score$parms$k2
} else if(order > 8) {
stop("'order' > 8 is not allowed", call. = FALSE)
}
sse <- (1/(rate.tp - rate.cc)) * sqrt(rho4 * omega4 * (1 - r2t4) / n4 +
rho3 * omega3 * (1 - r2t3) / (n4 * n3) +
d * (rho2 * (1 - r22) / (p * (1 - p) * n4 * n3 * n2) +
(1 - rho4 - rho3 - rho2) * (1 - r21) / (p * (1 - p) * n4 * n3 * n2 * n1)))
power <- .power(es, alpha, sse, df, two.tailed)
power.out <- list(parms = list(dists = dists, k1 = k1, k2 = k2,
order = order, interaction = interaction,
treat.lower = treat.lower, p = p, cutoff = cutoff,
es = es, alpha = alpha, two.tailed = two.tailed,
rho2 = rho2, rho3 = rho3, rho4 = rho4,
omega3 = omega3, omega4 = omega4,
r21 = r21, r22 = r22, r2t3 = r2t3, r2t4 = r2t4,
g4 = g4, rate.tp = rate.tp, rate.cc = rate.cc,
n1 = n1, n2 = n2, n3 = n3, n4 = n4),
df = df,
sse = sse,
power = power)
class(power.out) <- c("power", "bcrd4r2")
.summary.power(power.out)
return(invisible(power.out))
}
cosa.bcrd4r2 <- function(score = NULL, dists = "normal", k1 = -6, k2 = 6, rhots = NULL,
order = 1, interaction = FALSE,
treat.lower = TRUE, cutoff = 0, p = NULL,
cn1 = 0, cn2 = 0, cn3 = 0, cn4 = 0, cost = NULL,
n1 = NULL, n2 = NULL, n3 = NULL, n4 = NULL,
n0 = c(10, 3, 100, 5 + g4), p0 = .499,
constrain = "power", round = TRUE, max.power = FALSE,
local.solver = c("LBFGS", "SLSQP"),
power = .80, es = .25, alpha = .05, two.tailed = TRUE,
rho2, rho3, rho4, omega3, omega4,
g4 = 0, r21 = 0, r22 = 0, r2t3 = 0, r2t4 = 0) {
user.parms <- as.list(match.call())
.error.handler(user.parms, fun = "cosa")
if(!is.null(rhots)) {
if(rhots == 0) {
if(order != 0) {
order <- 0
warning("'order' argument is ignored, forcing 'order = 0' because 'rhots = 0'", call. = FALSE)
}
} else {
stop("'rhots' argument will be removed in the future, arbitrary correlations are not allowed,
use inspect.score() function instead", call. = FALSE)
}
}
if(!is.null(score) & order == 0) warning("Ignoring information from the 'score' object \n", call. = FALSE)
if(order == 0) {
d <- 1
idx.score <- intersect(c("dists", "k1", "k2", "interaction", "treat.lower", "cutoff"), names(user.parms))
if(length(idx.score) > 0) cat("\nCAUTION: Ignoring argument(s):",
sQuote(names(user.parms[idx.score])), "\n")
cutoff <- NA
interaction <- FALSE
dists <- "uniform"
k1 <- 0
k2 <- 1
} else if(order %in% 1:8) {
if(is.null(score)) {
score <- inspect.score(order = order, interaction = interaction,
treat.lower = treat.lower, cutoff = cutoff,
p = p, k1 = k1, k2 = k2, dists = dists)
} else {
if("p" %in% names(user.parms)) warning("Using 'p' from the 'score' object, ignoring 'p' in the function call", call. = FALSE)
if(!inherits(score, "score")) {
score <- inspect.score(score = score, order = order, interaction = interaction,
treat.lower = treat.lower, cutoff = cutoff,
p = p, k1 = k1, k2 = k2, dists = dists)
} else {
idx.score <- intersect(c("dists", "k1", "k2", "order", "interaction", "treat.lower", "p", "cutoff"), names(user.parms))
if(length(idx.score) > 0) cat("\nCAUTION: 'score' object overwrites argument(s):",
sQuote(names(user.parms[idx.score])), "\n")
}
}
d <- score$rdde
p <- score$p
cutoff <- score$cutoff
treat.lower <- score$treat.lower
order <- score$order
interaction <- score$interaction
dists <- score$parms$dists
k1 <- score$parms$k1
k2 <- score$parms$k2
} else if(order > 8) {
stop("'order' > 8 is not allowed", call. = FALSE)
}
fun <- "cosa.bcrd4r2"
lb <- c(1, 1, 1, g4 + 2)
if(!is.null(n4)) {
if(n4[1] < lb[4]) stop("Lower bound for 'n4' violate minimum degrees of freedom requirement", call. = FALSE)
}
.df <- quote(n4 - g4 - 1)
.sse <- quote(sqrt(rho4 * omega4 * (1 - r2t4) / n4 +
rho3 * omega3 * (1 - r2t3) / (n4 * n3) +
d * (rho2 * (1 - r22) / (p * (1 - p) * n4 * n3 * n2) +
(1 - rho4 - rho3 - rho2) * (1 - r21) / (p * (1 - p) * n4 * n3 * n2 * n1))))
.cost <- quote(n4 * cn4 +
n4 * n3 * cn3 +
n4 * n3 * n2 * (cn2[2] + p * (cn2[1] - cn2[2])) +
n4 * n3 * n2 * n1 * (cn1[2] + p * (cn1[1] - cn1[2])))
.var.jacob <- expression(
c(
-d * (1 - rho4 - rho3 - rho2) * (1 - r21) / (p * (1 - p) * n2 * n3 * n4 * n1^2),
-d * rho2 * (1 - r22) / (p * (1 - p) * n2^2 * n3 * n4) -
d * (1 - rho4 - rho3 - rho2) * (1 - r21) / (p * (1 - p) * n2^2 * n3 * n4 * n1),
-rho3 * omega3 * (1 - r2t3) / (n3^2 * n4) -
d * rho2 * (1 - r22) / (p * (1 - p) * n2 * n3^2 * n4) -
d * (1 - rho4 - rho3 - rho2) * (1 - r21) / (p * (1 - p) * n2 * n3^2 * n4 * n1),
-rho4 * omega4 * (1 - r2t4) / n4^2 -
rho3 * omega3 * (1 - r2t3) / (n3 * n4^2) -
d * rho2 * (1 - r22) / (p * (1 - p) * n2 * n3 * n4^2) -
d * (1 - rho4 - rho3 - rho2) * (1 - r21) / (p * (1 - p) * n2 * n3 * n4^2 * n1),
-(1 - 2 * p) * d * rho2 * (1 - r22) / ((1 - p)^2 * p^2 * n2 * n3 * n4) -
(1 - 2 * p) * d * (1 - rho4 - rho3 - rho2) * (1 - r21) / ((1 - p)^2 * p^2 * n2 * n3 * n4 * n1)
)
)
.cost.jacob <- expression(
c(
n4 * n3 * n2 * (p * cn1[1] + (1 - p) * cn1[2]),
n4 * n3 * (p * cn2[1] + (1 - p) * cn2[2]) +
n4 * n3 * n1 * (p * cn1[1] + (1 - p) * cn1[2]),
n4 * cn3 +
n4 * n2 * (p * cn2[1] + (1 - p) * cn2[2]) +
n4 * n2 * n1 * (p * cn1[1] + (1 - p) * cn1[2]),
cn4 +
n3 * cn3 +
n3 * n2 * (p * cn2[1] + (1 - p) * cn2[2]) +
n3 * n2 * n1 * (p * cn1[1] + (1 - p) * cn1[2]),
n4 * n3 * n2 * (cn2[1] - cn2[2]) +
n4 * n3 * n2 * n1 * (cn1[1] - cn1[2])
)
)
if(all(cn1 == 0) & all(cn2 == 0) & is.null(p)) p <- .50
cosa <- .cosa(order = order, interaction = interaction,
cn1 = cn1, cn2 = cn2, cn3 = cn3, cn4 = cn4, cost = cost,
constrain = constrain, round = round,
max.power = max.power, local.solver = local.solver,
power = power, es = es, alpha = alpha, two.tailed = two.tailed,
rho2 = rho2, rho3 = rho3, rho4 = rho4,
omega3 = omega3, omega4 = omega4,
r21 = r21, r22 = r22, r2t3 = r2t3, r2t4 = r2t4,
g4 = g4, p0 = p0, p = p, n0 = n0,
n1 = n1, n2 = n2, n3 = n3, n4 = n4)
cosa.out <- list(parms = list(dists = dists, k1 = k1, k2 = k2,
order = order, interaction = interaction,
treat.lower = treat.lower, cutoff = cutoff,
cn1 = cn1, cn2 = cn2, cn3 = cn3, cn4 = cn4, cost = cost,
constrain = constrain, round = round,
max.power = max.power, local.solver = local.solver,
power = power, es = es, alpha = alpha, two.tailed = two.tailed,
rho2 = rho2, rho3 = rho3, rho4 = rho4,
omega3 = omega3, omega4 = omega4,
r21 = r21, r22 = r22, r2t3 = r2t3, r2t4 = r2t4,
g4 = g4, p0 = p0, p = p, n0 = n0,
n1 = n1, n2 = n2, n3 = n3, n4 = n4),
cosa = cosa)
class(cosa.out) <- c("cosa", "bcrd4r2")
.summary.cosa(cosa.out)
return(invisible(cosa.out))
} |
print.summary.ldbglm <-function(x,digits = 2,...){
if (!inherits(x,"summary.ldbglm"))
stop("use only with \"summary.ldbglm\" objects")
x$call[[1]]<-as.name("ldbglm")
cat("\nCall: ", paste(deparse(x$call), sep = "\n", collapse = "\n"),
"\n", sep = "")
if (!is.character(x$family))
cat(gettextf("\nfamily: %s",x$family$family),"\n")
else
cat("\nfamily: gaussian\n")
cat("\nDeviance Residuals:\n")
print(summary(as.numeric(format(round(x$deviance.resid,digits=4)))))
if (x$family$family %in% c("poisson","binomial"))
cat(gettextf("\n(Dispersion parameter for %s family taken to be %i)",
x$family$family,x$dispersion),"\n")
else
cat(gettextf("\n(Dispersion parameter for %s family taken to be %f)",
x$family$family,x$dispersion),"\n")
cat(gettextf("\n Null deviance: %s on %i degrees of freedom",
format(round(x$null.deviance,digits)),x$df.null))
cat(gettextf("\nResidual deviance: %s on %s equivalent number of degrees of freedom",
format(round(x$residual.deviance,digits=digits)),format(round(x$df.residual,digits=digits))),"\n")
cat(gettextf("\nNumber of Observations: %i",x$nobs))
cat(gettextf("\nTrace of smoothing matrix: %s",format(round(x$trace.hat,2))),"\n")
cat("\nSummary of distances between data:\n")
print(x$summary.dist1)
if (x$method.h!="user.h")
cat(gettextf("\nOptimal bandwidth h : %f",x$h.opt),"\n")
else
cat(gettextf("\nUser bandwidth h : %f",x$h.opt),"\n")
cat(paste(gettextf("Percentile of bandwidth in the distance matrix= %s",
format(round(x$percentile.h.opt,2))),"%",sep=""),"\n\n")
if(!is.null(x$crit.value)){
cat("Bandwidth choice based on ",x$method.h,"\n")
cat(paste(x$method.h, "value criterion :", format(round(x$crit.value,digits)),"\n"))
}
cat(gettextf("\nKind of kernel= %s",x$kind.kernel),"\n")
cat("\n")
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.