code
stringlengths 1
13.8M
|
---|
context("Declarations: Complete Random Assignments")
test_declaration <- function(declaration, esum, eprob, conditions) {
Z <- conduct_ra(declaration)
if (!is.null(declaration$N))
expect_length(Z, declaration$N)
prob <-
obtain_condition_probabilities(declaration = declaration, assignment = conditions)
expect_true(is.numeric(prob))
if (!is.na(esum))
expect_equal(sum(Z), esum)
if (!is.na(eprob))
expect_true(all(prob == eprob))
if (is.vector(declaration$clusters)) {
expect_true(all(colSums(table(
Z, declaration$clusters
) != 0) == 1))
}
}
test_that("default", {
declaration <- declare_ra(N = 100)
test_declaration(declaration, 50, .5, 0:1)
})
test_that("N=101, prob=.34", {
declaration <- declare_ra(N = 100, prob = .34)
test_declaration(declaration, 34, .34, 1)
})
test_that("N=100, m=50", {
declaration <- declare_ra(N = 100, m = 50)
test_declaration(declaration, 50, .5, 0:1)
})
test_that("N=100, m_each", {
declaration <- declare_ra(
N = 100,
m_each = c(30, 70),
conditions = c("control", "treatment")
)
test_declaration(declaration, NA, .3, "control")
test_declaration(declaration, NA, .7, "treatment")
})
test_that("m_each=c(30, 30, 40)", {
declaration <- declare_ra(N = 100, m_each = c(30, 30, 40))
test_declaration(declaration, NA, .3, "T1")
test_declaration(declaration, NA, .3, "T2")
test_declaration(declaration, NA, .4, "T3")
})
test_that("named conditions m_Each", {
declaration <- declare_ra(
N = 100,
m_each = c(30, 30, 40),
conditions = c("control", "placebo", "treatment")
)
test_declaration(declaration, NA, .3, "control")
test_declaration(declaration, NA, .3, "placebo")
test_declaration(declaration, NA, .4, "treatment")
})
test_that("names", {
declaration <-
declare_ra(N = 100,
conditions = c("control", "placebo", "treatment"))
test_declaration(declaration, NA, 1 / 3, "control")
test_declaration(declaration, NA, 1 / 3, "placebo")
test_declaration(declaration, NA, 1 / 3, "treatment")
})
test_that("num_arms", {
declaration <- declare_ra(N = 100, num_arms = 3)
test_declaration(declaration, NA, 1 / 3, "T1")
test_declaration(declaration, NA, 1 / 3, "T2")
test_declaration(declaration, NA, 1 / 3, "T3")
})
test_that("simple + m fails", {
expect_error(declare_ra(N = 101, m = 34, simple = TRUE))
})
context("Declarations: Simple Random Assignments")
test_that("simple", {
declaration <- declare_ra(N = 100, simple = TRUE)
test_declaration(declaration, NA, .5, 0)
})
test_that("simple p = .4", {
declaration <- declare_ra(N = 100, prob = .4, simple = TRUE)
test_declaration(declaration, NA, .4, 1)
})
test_that("simple named prob each", {
declaration <- declare_ra(
N = 100,
prob_each = c(0.3, 0.7),
conditions = c("control", "treatment"),
simple = TRUE
)
test_declaration(declaration, NA, .3, "control")
test_declaration(declaration, NA, .7, "treatment")
})
test_that("simple num_arms = 3", {
declaration <- declare_ra(N = 100,
num_arms = 3,
simple = TRUE)
test_declaration(declaration, NA, 1 / 3, "T1")
test_declaration(declaration, NA, 1 / 3, "T2")
test_declaration(declaration, NA, 1 / 3, "T3")
})
test_that("simple 3 armed prob each", {
declaration <-
declare_ra(N = 100,
prob_each = c(0.3, 0.3, 0.4),
simple = TRUE)
test_declaration(declaration, NA, .3, "T1")
test_declaration(declaration, NA, .3, "T2")
test_declaration(declaration, NA, .4, "T3")
})
test_that("simple 3 arm prob each named", {
declaration <- declare_ra(
N = 100,
prob_each = c(0.3, 0.3, 0.4),
conditions = c("control", "placebo", "treatment"),
simple = TRUE
)
test_declaration(declaration, NA, .3, "control")
test_declaration(declaration, NA, .3, "placebo")
test_declaration(declaration, NA, .4, "treatment")
})
test_that("simple names 3 armed", {
declaration <-
declare_ra(
N = 100,
conditions = c("control", "placebo", "treatment"),
simple = TRUE
)
test_declaration(declaration, NA, 1 / 3, "control")
test_declaration(declaration, NA, 1 / 3, "placebo")
test_declaration(declaration, NA, 1 / 3, "treatment")
})
context("Declarations: Block Random Assignments")
test_that("Blocks default", {
blocks <- rep(c("A", "B", "C"), times = c(50, 100, 200))
declaration <- declare_ra(blocks = blocks)
test_declaration(declaration, 175, .5, 1)
})
test_that("Blocks default w/ factor", {
blocks <- gl(3, 100)
declaration <- declare_ra(blocks = blocks)
test_declaration(declaration, 150, .5, 1)
})
test_that("blocks m_each", {
blocks <- rep(c("A", "B", "C"), times = c(50, 100, 200))
block_m_each <- rbind(c(25, 25),
c(50, 50),
c(100, 100))
declaration <-
declare_ra(blocks = blocks, block_m_each = block_m_each)
test_declaration(declaration, 175, .5, 1)
})
test_that("block_m_each different", {
blocks <- rep(c("A", "B", "C"), times = c(50, 100, 200))
block_m_each <- rbind(c(10, 40),
c(30, 70),
c(50, 150))
declaration <-
declare_ra(blocks = blocks, block_m_each = block_m_each)
test_declaration(declaration, 260, NA, "A")
expect_equal(
obtain_condition_probabilities(declaration = declaration, assignment = 1)[c(1, 88, 175)],
c(.8, .7, .75)
)
})
test_that("block_m_eahc named", {
blocks <- rep(c("A", "B", "C"), times = c(50, 100, 200))
block_m_each <- rbind(c(10, 40),
c(30, 70),
c(50, 150))
declaration <-
declare_ra(
blocks = blocks,
block_m_each = block_m_each,
conditions = c("control", "treatment")
)
expect_equal(
obtain_condition_probabilities(declaration = declaration,
assignment = "treatment")[c(1, 88, 175)],
c(.8, .7, .75)
)
})
test_that("Three arm block_m_each", {
blocks <- rep(c("A", "B", "C"), times = c(50, 100, 200))
block_m_each <- rbind(c(10, 20, 20),
c(30, 50, 20),
c(50, 75, 75))
declaration <-
declare_ra(blocks = blocks, block_m_each = block_m_each)
test_declaration(declaration, NA, NA, "T1")
expect_equal(
obtain_condition_probabilities(declaration = declaration,
assignment = "T1")[c(1, 88, 175)],
c(.2, .3, .25)
)
})
test_that("blocks num_arms = 3 ", {
blocks <- rep(c("A", "B", "C"), times = c(50, 100, 200))
declaration <- declare_ra(blocks = blocks, num_arms = 3)
test_declaration(declaration, NA, 1 / 3, "T1")
expect_true(all(table(conduct_ra(declaration), blocks) > 10))
})
test_that("block_m_each named", {
blocks <- rep(c("A", "B", "C"), times = c(50, 100, 200))
block_m_each <- rbind(c(10, 20, 20),
c(30, 50, 20),
c(50, 75, 75))
declaration <-
declare_ra(
blocks = blocks,
block_m_each = block_m_each,
conditions = c("control", "placebo", "treatment")
)
test_declaration(declaration, NA, NA, "treatment")
})
test_that("blocks prob_each", {
blocks <- rep(c("A", "B", "C"), times = c(50, 100, 200))
declaration <-
declare_ra(blocks = blocks, prob_each = c(.1, .1, .8))
test_declaration(declaration, NA, .1, "T1")
})
context("Declarations: Cluster Random Assignments")
clusters <- rep(letters, times = 1:26)
test_that("Two Group clusters", {
declaration <- declare_ra(clusters = clusters)
test_declaration(declaration, NA, .5, 1)
})
test_that("clusters, m=13", {
declaration <- declare_ra(clusters = clusters, m = 13)
test_declaration(declaration, NA, .5, 1)
})
test_that("cluster m_each", {
declaration <- declare_ra(
clusters = clusters,
m_each = c(10, 16),
conditions = c("control", "treatment")
)
test_declaration(declaration, NA, 16 / 26, "treatment")
})
test_that("Multi-arm Designs", {
declaration <- declare_ra(clusters = clusters, num_arms = 3)
test_declaration(declaration, NA, 1 / 3, "T1")
})
test_that("clusters m_each three arms", {
declaration <- declare_ra(clusters = clusters, m_each = c(7, 7, 12))
test_declaration(declaration, NA, 7 / 26, "T1")
})
test_that("clusters m_each three arms named", {
declaration <- declare_ra(
clusters = clusters,
m_each = c(7, 7, 12),
conditions = c("control", "placebo", "treatment")
)
test_declaration(declaration, NA, 7 / 26, "placebo")
})
test_that("clusters three conditons", {
declaration <- declare_ra(clusters = clusters,
conditions = c("control", "placebo", "treatment"))
test_declaration(declaration, NA, 1 / 3, "placebo")
})
test_that("cluster prob_each three arm", {
declaration <-
declare_ra(clusters = clusters, prob_each = c(.1, .2, .7))
test_declaration(declaration, NA, .2, "T2")
})
context("Declarations: Blocked and Cluster Random Assignments")
clusters <- rep(letters, times = 1:26)
blocks <- rep(NA, length(clusters))
blocks[clusters %in% letters[1:5]] <- "block_1"
blocks[clusters %in% letters[6:10]] <- "block_2"
blocks[clusters %in% letters[11:15]] <- "block_3"
blocks[clusters %in% letters[16:20]] <- "block_4"
blocks[clusters %in% letters[21:26]] <- "block_5"
test_that("blocks, clusters", {
declaration <- declare_ra(clusters = clusters, blocks = blocks)
test_declaration(declaration, NA, .5, 1)
})
test_that("blocks, clusters numarm=3", {
declaration <-
declare_ra(clusters = clusters,
blocks = blocks,
num_arms = 3)
test_declaration(declaration, NA, 1 / 3, "T1")
})
test_that("blocks clusters probeach three arm", {
declaration <-
declare_ra(
clusters = clusters,
blocks = blocks,
prob_each = c(.2, .5, .3)
)
test_declaration(declaration, NA, .2, "T1")
})
test_that("block clusters block_m_each", {
block_m_each <- rbind(c(2, 3),
c(1, 4),
c(3, 2),
c(2, 3),
c(5, 1))
declaration <-
declare_ra(clusters = clusters,
blocks = blocks,
block_m_each = block_m_each)
test_declaration(declaration, NA, NA, "T1")
expect_equal(
obtain_condition_probabilities(declaration = declaration,
assignment = 1)[c(1, 23, 56, 122)],
c(.6, .8, .4, .6)
)
})
test_that("big permutation matrix", {
pm <- obtain_permutation_matrix(declare_ra(N = 12))
expect_equal(ncol(unique(pm, MARGIN = 2)) , ncol(pm))
})
test_that("check errors", {
expect_error(declare_ra(clusters = c(1, 1, 1, 1), blocks = c(1, 2, 1, 2)))
expect_error(declare_ra(N = 9, blocks = c(1, 1, 2, 2)))
expect_error(declare_ra(prob = .2))
expect_error(declare_ra(
N = 4,
prob = .2,
prob_each = .3
))
})
test_that("check deprecations", {
d <- declare_ra(N = 10, n = 4)
expect_warning(d$ra_function())
expect_warning(d$ra_type)
expect_warning(d$cleaned_arguments)
})
test_that("conduct_ra auto-declare", {
expect_equal(conduct_ra(N = 1, prob = 1), 1)
expect_error(conduct_ra(sleep))
})
test_that("obtain_condition_probabilities auto-declare", {
expect_equal(obtain_condition_probabilities(assignment = 1), .5)
expect_error(obtain_condition_probabilities(sleep))
})
test_that("print and summary", {
d <- declare_ra(N = 10, n = 4)
expect_output(print(d))
expect_output(summary(d))
})
test_that("_unit",{
blocks <- rep(c("A", "B", "C"), times = c(50, 100, 200))
d <- declare_ra(blocks = blocks, prob_unit = rep(c(.1, .2, .3), c(50, 100, 200)))
expect_equal(table(blocks, conduct_ra(d)),
structure(
c(45L, 80L, 140L, 5L, 20L, 60L),
.Dim = 3:2,
.Dimnames = list(blocks = c("A", "B", "C"), c("0", "1")),
class = "table"
))
expect_error(declare_ra(blocks = blocks, prob_unit = rep(c(.1, .2, .3), c(200, 100, 50))))
})
|
context("select input")
test_that("id argument", {
expect_missing_id_error(selectInput())
expect_silent(selectInput("ID"))
})
test_that("choices argument", {
expect_error(selectInput("ID", 1, 1:2))
})
test_that("selected argument", {
expect_error(selectInput("ID", 1:3, selected = 1:2))
})
test_that("returns tag", {
expect_is(selectInput("ID", letters[1:3]), "shiny.tag")
})
test_that("map_* helper", {
items <- map_selectitems(1:3, 1:3, 1)
expect_length(items, 3)
})
test_that("has dependencies", {
expect_dependencies(selectInput("ID"))
})
|
FitCalibCoxRSInts<- function(w, w.res, Q, hz.times, n.int = 5, order = 2 , tm, event, pts.for.ints)
{
n.fail <- 0
n.int.org <- n.int
if (pts.for.ints[1] != 0) {pts.for.ints <- c(0, pts.for.ints)}
r <- length(pts.for.ints)
event.index <- which(event==1)
lr.for.fit.all <- as.data.frame(FindIntervalCalibCPP(w = w, wres = w.res))
Q.all <- Q
all.fit.cox.res <- list()
for (j in 1:r)
{
n.int <- n.int.org
point <- pts.for.ints[j]
lr.for.fit <- lr.for.fit.all[tm>=point, ]
Q <- Q.all[tm>=point, ]
Q <- Q[!(lr.for.fit[,1]==0 & lr.for.fit[,2]==Inf),]
lr.for.fit <- lr.for.fit[!(lr.for.fit[,1]==0 & lr.for.fit[,2]==Inf),]
colnames(lr.for.fit) <- c("left","right")
d1 <- lr.for.fit[,1]==0
d3 <- lr.for.fit[,2]==Inf
d2 <- 1 - d1 - d3
fit.cox.point <- tryCatch(ICsurv::fast.PH.ICsurv.EM(d1 = d1, d2 = d2, d3 = d3,Li = lr.for.fit[,1],
Ri = lr.for.fit[,2], n.int = n.int, order = order, Xp = Q, g0 =rep(1,n.int + order), b0 = rep(0,ncol(Q)),
t.seq = hz.times, tol = 0.001), error = function(e){e})
while(inherits(fit.cox.point, "error") & n.int >= 2) {
n.int <- n.int - 1
fit.cox.point <- tryCatch(ICsurv::fast.PH.ICsurv.EM(d1 = d1, d2 = d2, d3 = d3,Li = lr.for.fit[,1],
Ri = lr.for.fit[,2], n.int = n.int, order = order, Xp = Q, g0 =rep(1,n.int + order), b0 = rep(0,ncol(Q)),
t.seq = hz.times, tol = 0.001), error = function(e){e})
}
if (inherits(fit.cox.point, "error")) {
fit.cox.point <- FitCalibCox(w = w, w.res = w.res, Q = Q.all, hz.times = hz.times, n.int = n.int.org, order = order)
warning(paste("In point", point, "Calibration was used instead of risk set calibration"),immediate. = T)
n.fail <- n.fail + 1
} else {
if (max(abs(fit.cox.point$b)) > 3.5)
{
fit.cox.point <- FitCalibCox(w = w, w.res = w.res, Q = Q.all, hz.times = hz.times, n.int = n.int.org, order = order)
warning(paste("In point", point, "Calibration was used instead of risk set calibration"),immediate. = T)
n.fail <- n.fail + 1
} else {
ti <- c(lr.for.fit[d1 == 0,1], lr.for.fit[d3 == 0,2])
fit.cox.point$knots <- seq(min(ti) - 1e-05, max(ti) + 1e-05, length.out = (n.int + 2))
fit.cox.point$order <- order
}}
all.fit.cox.res[[j]] <- fit.cox.point
}
if (n.fail > 0) {warning(paste("In ", round(100*n.fail/r,0), "% of the event times there were no sufficient data to fit a risk-set calibration model"))}
if (n.fail/r > 0.5) stop("In more of 50% of the intervals there were no sufficient data to fit a risk-set calibration model")
return(all.fit.cox.res)
}
|
"_PACKAGE"
proxy_prefun <- function(x, y, pairwise, params, reg_entry) {
params$pairwise <- pairwise
list(x = x, y = y, pairwise = pairwise, p = params, reg_entry = reg_entry)
}
.onLoad <- function(lib, pkg) {
if (!check_consistency("DTW2", "dist", silent = TRUE))
proxy::pr_DB$set_entry(FUN = dtw2_proxy, names=c("DTW2", "dtw2"),
loop = TRUE, type = "metric", distance = TRUE,
description = "DTW with L2 norm",
PACKAGE = "dtwclust")
if (!check_consistency("DTW_BASIC", "dist", silent = TRUE))
proxy::pr_DB$set_entry(FUN = dtw_basic_proxy, names=c("DTW_BASIC", "dtw_basic"),
loop = FALSE, type = "metric", distance = TRUE,
description = "Basic and maybe faster DTW distance",
PACKAGE = "dtwclust", PREFUN = proxy_prefun)
if (!check_consistency("LB_Keogh", "dist", silent = TRUE))
proxy::pr_DB$set_entry(FUN = lb_keogh_proxy, names=c("LBK", "LB_Keogh", "lbk"),
loop = FALSE, type = "metric", distance = TRUE,
description = "Keogh's DTW lower bound for the Sakoe-Chiba band",
PACKAGE = "dtwclust", PREFUN = proxy_prefun)
if (!check_consistency("LB_Improved", "dist", silent = TRUE))
proxy::pr_DB$set_entry(FUN = lb_improved_proxy, names=c("LBI", "LB_Improved", "lbi"),
loop = FALSE, type = "metric", distance = TRUE,
description = "Lemire's improved DTW lower bound for the Sakoe-Chiba band",
PACKAGE = "dtwclust", PREFUN = proxy_prefun)
if (!check_consistency("SBD", "dist", silent = TRUE))
proxy::pr_DB$set_entry(FUN = sbd_proxy, names=c("SBD", "sbd"),
loop = FALSE, type = "metric", distance = TRUE,
description = "Paparrizos and Gravanos' shape-based distance for time series",
PACKAGE = "dtwclust", PREFUN = proxy_prefun,
convert = function(d) { 2 - d })
if (!check_consistency("DTW_LB", "dist", silent = TRUE))
proxy::pr_DB$set_entry(FUN = dtw_lb, names=c("DTW_LB", "dtw_lb"),
loop = FALSE, type = "metric", distance = TRUE,
description = "DTW distance aided with Lemire's lower bound",
PACKAGE = "dtwclust", PREFUN = proxy_prefun)
if (!check_consistency("GAK", "dist", silent = TRUE))
proxy::pr_DB$set_entry(FUN = gak_proxy, names=c("GAK", "gak"),
loop = FALSE, type = "metric", distance = TRUE,
description = "Fast (triangular) global alignment kernel distance",
PACKAGE = "dtwclust", PREFUN = proxy_prefun,
convert = function(d) { 1 - d })
if (!check_consistency("uGAK", "dist", silent = TRUE))
proxy::pr_DB$set_entry(FUN = gak_simil, names=c("uGAK", "ugak"),
loop = FALSE, type = "metric", distance = FALSE,
description = "Fast (triangular) global alignment kernel similarity",
PACKAGE = "dtwclust", PREFUN = proxy_prefun)
if (!check_consistency("sdtw", "dist", silent = TRUE))
proxy::pr_DB$set_entry(FUN = sdtw_proxy, names=c("sdtw", "SDTW", "soft-DTW"),
loop = FALSE, type = "metric", distance = TRUE,
description = "Soft-DTW",
PACKAGE = "dtwclust", PREFUN = proxy_prefun)
if (is.null(foreach::getDoParName())) foreach::registerDoSEQ()
}
.onAttach <- function(lib, pkg) {
RNGkind(dtwclust_rngkind)
packageStartupMessage("dtwclust:\n",
"Setting random number generator to L'Ecuyer-CMRG (see RNGkind()).\n",
'To read the included vignettes type: browseVignettes("dtwclust").\n',
'See news(package = "dtwclust") after package updates.')
if (grepl("\\.9000$", utils::packageVersion("dtwclust")))
packageStartupMessage("This is a developer version of dtwclust.")
}
.onUnload <- function(libpath) {
if (check_consistency("DTW2", "dist", silent = TRUE)) proxy::pr_DB$delete_entry("DTW2")
if (check_consistency("DTW_BASIC", "dist", silent = TRUE)) proxy::pr_DB$delete_entry("DTW_BASIC")
if (check_consistency("LB_Keogh", "dist", silent = TRUE)) proxy::pr_DB$delete_entry("LB_Keogh")
if (check_consistency("LB_Improved", "dist", silent = TRUE)) proxy::pr_DB$delete_entry("LB_Improved")
if (check_consistency("SBD", "dist", silent = TRUE)) proxy::pr_DB$delete_entry("SBD")
if (check_consistency("DTW_LB", "dist", silent = TRUE)) proxy::pr_DB$delete_entry("DTW_LB")
if (check_consistency("GAK", "dist", silent = TRUE)) proxy::pr_DB$delete_entry("GAK")
if (check_consistency("uGAK", "dist", silent = TRUE)) proxy::pr_DB$delete_entry("uGAK")
if (check_consistency("sdtw", "dist", silent = TRUE)) proxy::pr_DB$delete_entry("sdtw")
library.dynam.unload("dtwclust", libpath)
}
release_questions <- function() {
c(
"Changed .Rbuildignore to exclude test rds files?",
"Built the binary with --compact-vignettes=both?",
"Set vignette's cache to FALSE?"
)
}
|
GetIntersects <- function(data, start_col, sets, num_sets){
end_col <- as.numeric(((start_col + num_sets) -1))
set_cols <- data[ ,start_col:end_col]
temp_data <- data[which(rowSums(data[ ,start_col:end_col]) == length(sets)), ]
unwanted <- colnames(set_cols[ ,!(colnames(set_cols) %in% sets), drop = F])
temp_data <- (temp_data[ ,!(colnames(data) %in% unwanted), drop = F])
new_end <- ((start_col + length(sets)) -1 )
if(new_end == start_col){
temp_data <- temp_data[ which(temp_data[ ,start_col] == 1), ]
return(temp_data)
}
else{
temp_data <- temp_data[ which(rowSums(temp_data[ ,start_col:new_end]) == length(sets)) , ]
return(temp_data)
}
}
QuerieInterData <- function(query, data1, first_col, num_sets, data2, exp, names, palette){
rows <- data.frame()
if(length(query) == 0){
return(NULL)
}
for(i in 1:length(query)){
index_q <- unlist(query[[i]]$params)
inter_color <- query[[i]]$color
test <- as.character(index_q[1])
check <- match(test, names)
if(is.na(check) == T){
inter_data <- NULL
}
else{
for( i in 1:length(index_q)){
double_check <- match(index_q[i], names)
if(is.na(double_check) == T){
warning("Intersection or set may not be present in data set. Please refer to matrix.")
}
}
inter_data <- OverlayEdit(data1, data2, first_col, num_sets, index_q, exp, inter_color)
}
rows <- rbind(rows, inter_data)
}
if(nrow(rows) != 0){
rows <- cbind(rows$x, rows$color)
rows <- as.data.frame(rows)
colnames(rows) <- c("x", "color")
}
else{
rows <- NULL
}
return(rows)
}
QuerieInterBar <- function(q, data1, first_col, num_sets, data2, exp, names, palette){
rows <- data.frame()
act <- c()
if(length(q) == 0){
return(NULL)
}
for(i in 1:length(q)){
index_q <- unlist(q[[i]]$params)
inter_color <- q[[i]]$color
test <- as.character(index_q[1])
check <- match(test, names)
if(is.na(check) == T){
inter_data <- NULL
}
else{
inter_data <- OverlayEdit(data1, data2, first_col, num_sets, index_q, exp, inter_color)
}
if((isTRUE(q[[i]]$active) == T) && (is.null(inter_data) == F)){
act[i] <- T
}
else if((isTRUE(q[[i]]$active) == F) && (is.null(inter_data) == F)){
act[i] <- F
}
rows <- rbind(rows, inter_data)
}
rows <- cbind(rows, act)
return(rows)
}
QuerieInterAtt <- function(q, data, first_col, num_sets, att_x, att_y, exp, names, palette){
rows <- data.frame()
if(length(q) == 0){
return(NULL)
}
for(i in 1:length(q)){
index_q <- unlist(q[[i]]$params)
inter_color <- unlist(q[[i]]$color)
test <- as.character(index_q[1])
check <- match(test, names)
if(is.na(check) == T){
intersect <- NULL
}
else{
intersect <- GetIntersects(data, first_col, index_q, num_sets)
if(is.na(att_y[i]) == T){
if(is.null(exp) == F){
intersect <- Subset_att(intersect, exp)
}
if(nrow(intersect) != 0){
intersect$color <- inter_color
}
}
else if(is.na(att_y[i]) == F){
if(is.null(exp) == F){
intersect <- Subset_att(intersect, exp)
}
intersect$color <- inter_color
}
}
intersect <- intersect[ ,-which(names(intersect) %in% index_q)]
rows <- rbind(rows, intersect)
}
return(rows)
}
|
matchEnsembleMembers.ensembleMOSlognormal <-
function(fit, ensembleData)
{
if (!is.null(dim(fit$B))) {
fitMems <- dimnames(fit$B)[[1]]
}
else {
fitMems <- names(fit$B)
}
ensMems <- ensembleMemberLabels(ensembleData)
if (!is.null(fitMems) && !is.null(ensMems)
&& length(fitMems) > length(ensMems))
stop("model fit has more ensemble members than ensemble data")
WARN <- rep(FALSE,3)
WARN[1] <- is.null(fitMems) && !is.null(ensMems)
WARN[2] <- !is.null(fitMems) && is.null(ensMems)
WARN[3] <- is.null(fitMems) && is.null(ensMems)
if (any(WARN) && length(fitMems) != length(ensMems))
stop("model fit and ensemble data differ in ensemble size")
if (any(WARN))
warning("cannot check correspondence between model fit and ensemble data members")
M <- match(fitMems, ensMems, nomatch = 0)
if (any(!M)) stop("ensembleData is missing a member used in fit")
M
}
|
createObj<- function(obj,...)
{
UseMethod("createObj", obj)
}
createObj.default<- function(obj, data,...)
{
myobj<- GTree(obj$graph, Data = data)
return(myobj)
}
createObj.MLE<- function(obj, data)
{
myobj<- CovSelectTree(obj$graph, Data = data)
Ubar<- getNoDataNodes(myobj)
if (length(Ubar)!=0)
stop("Covariance Selection Model estimation is impossible with missing variables in the data.")
return(myobj)
}
createObj.HRMBG<- function(obj, data)
{
myobj<- BlockGraph(obj$graph, data)
return(myobj)
}
|
load(file.path("..", "data", "data-summary-rankEN.RData"))
context("summary function for rankEN")
test_that("binMS summary: compare outputs from binMS.format", {
expect_identical(out_v1, target_v1)
expect_identical(out_v2, target_v2)
})
|
tdmReadAndSplit <- function(opts,tdm,nExp=0,dset=NULL) {
tdm<-tdmDefaultsFill(tdm)
if (opts$READ.INI) {
if (is.null(dset)) {
dset <- tdmReadDataset(opts);
}
testit::assert ("tdmReadDataset does not return a data frame. Check opts$READ.TrnFn", is.data.frame(dset));
if (is.null(tdm$SPLIT.SEED)) {
theSeed=tdmRandomSeed();
} else {
theSeed=tdm$SPLIT.SEED;
}
if (tdm$umode=="SP_T") {
cvi <- splitTestTrnVa(opts,tdm,dset,theSeed,nExp);
} else if (tdm$umode=="TST") {
if (!any(names(dset)==opts$TST.COL)) {
stop(sprintf("Data frame dset does not contain a column opts$TST.COL=\"%s\". \n%s",
opts$TST.COL,"This might be due to a missing opts$READ.TstFn when using tdm$umode==\"TST\"."));
}
cvi=dset[,opts$TST.COL];
} else {
cvi = rep(0,nrow(dset));
}
dataObj <- list(
dset=dset
, cvi=cvi
, filename=opts$filename
, theSeed=theSeed
, opts = opts
, tdm = tdm
);
class(dataObj) <- "TDMdata";
checkData(dataObj,nExp,opts);
} else {
dataObj <- NULL;
}
dataObj;
}
tdmReadTaskData <- function(envT,tdm) {
opts <- tdmEnvTGetOpts(envT,1);
dataObj <- tdmReadAndSplit(opts,tdm);
}
checkData <- function(dataObj,nExp,opts) {
checkFactor <- function(set,txt,fact) {
for (i in which(fact==TRUE)) {
if (nlevels(set[,i])>32) {
strng = sprintf("Column %s of %s has %d levels. Consider to use tdmPreGroupLevels() or as.numeric().",names(dset)[i], txt,nlevels(set[,i]));
cat("NOTE:",strng);
warning(strng);
}
}
}
dset <- dsetTrnVa(dataObj,nExp);
tset <- dsetTest(dataObj,nExp);
testit::assert ("dsetTrnVa does not return a data frame. Check opts$READ.TrnFn", is.data.frame(dset));
testit::assert ("dsetTest does not return a data frame. Check opts$READ.TrnFn",
if (!is.null(tset)) {is.data.frame(tset)} else {TRUE});
dfactor <- sapply(dset,is.factor);
tfactor <- sapply(tset,is.factor);
if (any(dfactor!=tfactor)) {
w = which(dfactor!=tfactor);
strng = paste("dataObj has columns with different mode in train-vali- and test part:",sprintf("%s,",names(dset)[w]));
cat("NOTE:",strng);
warning(strng);
}
checkFactor(dset,"train-validation set",dfactor);
checkFactor(tset,"test set",tfactor);
if (!is.null(tset)) {
firstRows <- min(100,nrow(dset),nrow(tset))
activeCols <- setdiff(names(dset),opts$TST.COL)
if (all(dset[1:firstRows,activeCols]==tset[1:firstRows,activeCols])) {
warning(sprintf("Data sets dset and tset might be identical, since the first %d rows %s"
,firstRows,"are identical. \n Check the reading functions opts$READ.TrnFn and opts$READ.TstFn."))
}
}
}
splitTestTrnVa <- function(opts,tdm,dset,theSeed,nExp) {
if (exists(".Random.seed")) SAVESEED<-.Random.seed
if (is.null(tdm$TST.testFrac)) stop("tdm$TST.testFrac is NULL. Consider using 'tdm <- tdmDefaultsFill(tdm);'")
set.seed(theSeed+nExp);
L = nrow(dset);
if (is.null(L)) stop("No data"); if (L==0) stop("Empty data frame");
if (!is.null(tdm$stratified)) {
cat1(opts,opts$filename,": Stratified random test-trainVali-index w.r.t. variable",tdm$stratified
,"and with tdm$TST.testFrac = ",tdm$TST.testFrac*100,"%\n");
if (!any(names(dset)==tdm$stratified)) stop("The value of tdm$stratified does not match any column name in dset!");
rv <- dset[,tdm$stratified];
urv <- unique(rv);
tfr <- sapply(urv,function(x) { round((1-tdm$TST.testFrac)*length(which(rv==x))) });
tfr[tfr<1] <- 1;
cvi <- rep(1,L);
for (i in 1:length(urv)) cvi[ sample(which(rv==urv[i]), tfr[i]) ] <- 0;
} else {
p <- sample(L)
tfr <- (1-tdm$TST.testFrac)*L;
cat1(opts,opts$filename,": Setting data aside for testing with tdm$TST.testFrac = ",tdm$TST.testFrac*100,"%\n");
cvi <- rep(0,L);
cvi[p[(tfr+1):L]] <- 1;
}
cat1(opts,"*** from tdmReadAndSplit: *** \n");
wI <- which(cvi==1);
cat1(opts,"dset contains",L,"records; we put ",length(wI),"records aside into test set\n")
if (length(wI)<300) print1(opts,wI);
if (exists("SAVESEED")) assign(".Random.seed", SAVESEED, envir=globalenv());
return(cvi)
}
dsetTrnVa <- function(x,...) UseMethod("dsetTrnVa");
dsetTrnVa.default <- function(x,...) stop("Method dsetTrnVa only allowed for objects of class TDMdata. Consider opts$READ.INI=TRUE");
dsetTrnVa.TDMdata <- function(x,...) {
dots = list(...)
if (is.null(dots$nExp)) dots$nExp=0
if (x$tdm$umode=="SP_T") {
x$cvi <- splitTestTrnVa(x$opts,x$tdm,x$dset,x$theSeed,dots$nExp);
}
ind=which(x$cvi==0);
x$dset[ind,];
}
dsetTest <- function(x,...) UseMethod("dsetTest");
dsetTest.default <- function(x,...) stop("Method dsetTest only allowed for objects of class TDMdata");
dsetTest.TDMdata <- function(x,...) {
dots = list(...)
if (is.null(dots$nExp)) dots$nExp=0
if (x$tdm$umode=="SP_T") {
x$cvi <- splitTestTrnVa(x$opts,x$tdm,x$dset,x$theSeed,dots$nExp);
}
ind = which(x$cvi>0);
if (length(ind)==0) {
NULL;
} else {
x$dset[ind,];
}
}
print.TDMdata <- function(x,...) {
nTrn = length(which(x$cvi==0))
nTst = length(which(x$cvi==1))
cat(sprintf("TDMdata object with %d records (%d test, %d train-vali records).\n",nrow(x$dset),nTst,nTrn));
cat(sprintf("TDMdata object with %d variables:\n",length(x$dset)));
print(names(x$dset));
}
|
summary.APLE <-
function(object, ...) {
if(names(object)[1] == "preVFS") {
results <- c(
AnnualErosionPRemoval = mean(object$pErosion),
AnnualErosionPRemovalsd = sd(object$pErosion),
AnnualTotalPRemoval = mean(object$pTotal),
AnnualTotalPRemovalsd = sd(object$pTotal),
AnnualLossErosionPre = mean(object$preVFS$lossErosion),
AnnualLossDissolvedSoilPre = mean(object$preVFS$lossDissolvedSoil),
AnnualLossDissolvedManurePre = mean(object$preVFS$lossDissolvedManure),
AnnualLossDissolvedFertPre = mean(object$preVFS$lossDissolvedFert),
AnnualLossTotalPre = mean(object$preVFS$lossTotal),
AnnualLossErosionPost = mean(object$postVFS$lossErosion),
AnnualLossDissolvedSoilPost = mean(object$postVFS$lossDissolvedSoil),
AnnualLossDissolvedManurePost = mean(object$postVFS$lossDissolvedManure),
AnnualLossDissolvedFertPost = mean(object$postVFS$lossDissolvedFert),
AnnualLossTotalPost = mean(object$postVFS$lossTotal))
} else {
results <- c(
AnnualLossErosion = mean(object$lossErosion),
AnnualLossDissolvedSoil = mean(object$lossDissolvedSoil),
AnnualLossDissolvedManure = mean(object$lossDissolvedManure),
AnnualLossDissolvedFert = mean(object$lossDissolvedFert),
AnnualLossTotal = mean(object$lossTotal))
}
results
}
|
phiMethods <- c("extremes","range")
phi.setup <- function(y, method = phiMethods,
extr.type = NULL, coef=1.5, control.pts = NULL) {
method <- match.arg(method, phiMethods)
control.pts <- do.call(paste("phi",method,sep="."),
c(list(y=y), extr.type = extr.type,
list(control.pts=control.pts),coef = coef))
list(method = method,
npts = control.pts$npts, control.pts = control.pts$control.pts)
}
phi.control <- function(y, method="extremes", extr.type="both", coef=1.5, control.pts=NULL) {
call <- match.call()
phiP <- phi.setup(y, method, extr.type, coef, control.pts)
phiP
}
phi.extremes <- function(y, extr.type = c("both","high","low"), control.pts,
coef=1.5) {
extr.type <- match.arg(extr.type)
control.pts <- NULL
extr <- boxplot.stats(y,coef=coef)
r <- range(y)
if(extr.type %in% c("both","low") &&
any(extr$out < extr$stats[1])) {
control.pts <- rbind(control.pts,c(extr$stats[1],1,0))
} else {
control.pts <- rbind(control.pts,c(r[1],0,0))
}
if(extr$stats[3]!= r[1]){
control.pts <- rbind(control.pts,c(extr$stats[3],0,0))
}
if(extr.type %in% c("both","high") &&
any(extr$out > extr$stats[5])) {
control.pts <- rbind(control.pts,c(extr$stats[5],1,0))
} else {
if(extr$stats[3] != r[2]){
control.pts <- rbind(control.pts,c(r[2],0,0))
}
}
npts <- NROW(control.pts)
list(npts = npts,
control.pts = as.numeric(t(control.pts)))
}
phi.range <- function(y, extr.type, coef, control.pts, ...) {
if(!is.null(names(control.pts)))
control.pts <- matrix(control.pts$control.pts,nrow=control.pts$npts,byrow=T)
extr.type <- NULL
coef <- NULL
if(missing(control.pts) || !is.matrix(control.pts) ||
(NCOL(control.pts) > 3 || NCOL(control.pts) < 2))
stop('The control.pts must be given as a matrix in the form: \n',
'< x, y, m > or, alternatively, < x, y >')
npts <- NROW(control.pts)
dx <- control.pts[-1L,1L] - control.pts[-npts,1L]
if(any(is.na(dx)) || any(dx == 0))
stop("'x' must be *strictly* increasing (non - NA)")
if(any(control.pts[,2L] > 1 | control.pts[,2L] < 0))
stop("phi relevance function maps values only in [0,1]")
control.pts <- control.pts[order(control.pts[,1L]),]
if(NCOL(control.pts) == 2) {
dx <- control.pts[-1L,1L] - control.pts[-npts,1L]
dy <- control.pts[-1L,2L] - control.pts[-npts,2L]
Sx <- dy / dx
m <- c(0, (Sx[-1L] + Sx[-(npts-1)]) / 2, 0)
control.pts <- cbind(control.pts,m)
}
r <- range(y)
npts <- NROW(matrix(control.pts,ncol=3))
list(npts = npts,
control.pts = as.numeric(t(control.pts)))
}
|
InterSIM <- function(n.sample=500,cluster.sample.prop=c(0.30,0.30,0.40),
delta.methyl=2.0,delta.expr=2.0,delta.protein=2.0,
p.DMP=0.2,p.DEG=NULL,p.DEP=NULL,
sigma.methyl=NULL,sigma.expr=NULL,sigma.protein=NULL,
cor.methyl.expr=NULL,cor.expr.protein=NULL,
do.plot=FALSE, sample.cluster=TRUE, feature.cluster=TRUE)
{
if (sum(cluster.sample.prop)!=1) stop("The proportions must sum up to 1")
if (!length(cluster.sample.prop)>1) stop("Number of proportions must be larger than 1")
if (p.DMP<0 | p.DMP>1) stop("p.DMP must be between 0 to 1")
if (!is.null(p.DEG) && (p.DEG<0 | p.DEG>1)) stop("p.DEG must be between 0 and 1")
if (!is.null(p.DEP) && (p.DEP<0 | p.DEP>1)) stop("p.DEP must be between 0 and 1")
n.cluster <- length(cluster.sample.prop)
n.sample.in.cluster <- c(round(cluster.sample.prop[-n.cluster]*n.sample),
n.sample - sum(round(cluster.sample.prop[-n.cluster]*n.sample)))
cluster.id <- do.call(c,sapply(1:n.cluster, function(x) rep(x,n.sample.in.cluster[x])))
n.CpG <- ncol(cov.M)
if (!is.null(sigma.methyl)){
if (sigma.methyl=="indep") cov.str <- diag(diag(cov.M))
else cov.str <- sigma.methyl
} else cov.str <- cov.M
DMP <- sapply(1:n.cluster,function(x) rbinom(n.CpG, 1, prob = p.DMP))
rownames(DMP) <- names(mean.M)
d <- lapply(1:n.cluster,function(i) {
effect <- mean.M + DMP[,i]*delta.methyl
mvrnorm(n=n.sample.in.cluster[i], mu=effect, Sigma=cov.str)})
sim.methyl <- do.call(rbind,d)
sim.methyl <- rev.logit(sim.methyl)
n.gene <- ncol(cov.expr)
if (!is.null(sigma.expr)){
if (sigma.expr=="indep") cov.str <- diag(diag(cov.expr))
else cov.str <- sigma.expr
} else cov.str <- cov.expr
if (!is.null(cor.methyl.expr)){
rho.m.e <- cor.methyl.expr
} else rho.m.e <- rho.methyl.expr
if (!is.null(p.DEG)){
DEG <- sapply(1:n.cluster,function(x) rbinom(n.gene, 1, prob = p.DEG))
rownames(DEG) <- names(mean.expr)
} else { DEG <- sapply(1:n.cluster,function(x){
cg.name <- rownames(subset(DMP,DMP[,x]==1))
gene.name <- as.character(CpG.gene.map.for.DEG[cg.name,]$tmp.gene)
as.numeric(names(mean.expr) %in% gene.name)})
rownames(DEG) <- names(mean.expr)}
if(delta.expr==0) rho.m.e <- 0
d <- lapply(1:n.cluster,function(i) {
effect <- (rho.m.e*methyl.gene.level.mean+sqrt(1-rho.m.e^2)*mean.expr) + DEG[,i]*delta.expr
mvrnorm(n=n.sample.in.cluster[i], mu=effect, Sigma=cov.str)})
sim.expr <- do.call(rbind,d)
n.protein <- ncol(cov.protein)
if (!is.null(sigma.protein)){
if (sigma.protein=="indep") cov.str <- diag(diag(cov.protein))
else cov.str <- sigma.protein
} else cov.str <- cov.protein
if (!is.null(cor.expr.protein)){
rho.e.p <- cor.expr.protein
} else rho.e.p <- rho.expr.protein
if (!is.null(p.DEP)){
DEP <- sapply(1:n.cluster,function(x) rbinom(n.protein, 1, prob = p.DEP))
rownames(DEP) <- names(mean.protein)
} else { DEP <- sapply(1:n.cluster,function(x){
gene.name <- rownames(subset(DEG,DEG[,x]==1))
protein.name <- rownames(protein.gene.map.for.DEP[protein.gene.map.for.DEP$gene %in% gene.name,])
as.numeric(names(mean.protein) %in% protein.name)})
rownames(DEP) <- names(mean.protein)}
if(delta.protein==0) rho.e.p <- 0
d <- lapply(1:n.cluster,function(i) {
effect <- (rho.e.p*mean.expr.with.mapped.protein+sqrt(1-rho.e.p^2)*mean.protein) + DEP[,i]*delta.protein
mvrnorm(n=n.sample.in.cluster[i], mu=effect, Sigma=cov.str)})
sim.protein <- do.call(rbind,d)
indices <- sample(1:n.sample)
cluster.id <- cluster.id[indices]
sim.methyl <- sim.methyl[indices,]
sim.expr <- sim.expr[indices,]
sim.protein <- sim.protein[indices,]
rownames(sim.methyl) <- paste("subject",1:nrow(sim.methyl),sep="")
rownames(sim.expr) <- paste("subject",1:nrow(sim.expr),sep="")
rownames(sim.protein) <- paste("subject",1:nrow(sim.protein),sep="")
d.cluster <- data.frame(rownames(sim.methyl),cluster.id)
colnames(d.cluster)[1] <- "subjects"
if(do.plot){
hmcol <- colorRampPalette(c("blue","deepskyblue","white","orangered","red3"))(100)
if (dev.interactive()) dev.off()
if(sample.cluster && feature.cluster) {
dev.new(width=15, height=5)
par(mfrow=c(1,3))
aheatmap(t(sim.methyl),color=hmcol,Rowv=FALSE, Colv=FALSE, labRow=NA, labCol=NA,annLegend=T,main="Methylation",fontsize=10,breaks=0.5)
aheatmap(t(sim.expr),color=hmcol,Rowv=FALSE, Colv=FALSE, labRow=NA, labCol=NA,annLegend=T,main="Gene expression",fontsize=10,breaks=0.5)
aheatmap(t(sim.protein),color=hmcol,Rowv=FALSE, Colv=FALSE, labRow=NA, labCol=NA,annLegend=T,main="Protein expression",fontsize=10,breaks=0.5)}
else if(sample.cluster) {
dev.new(width=15, height=5)
par(mfrow=c(1,3))
aheatmap(t(sim.methyl),color=hmcol,Rowv=NA, Colv=FALSE, labRow=NA, labCol=NA,annLegend=T,main="Methylation",fontsize=8,breaks=0.5)
aheatmap(t(sim.expr),color=hmcol,Rowv=NA, Colv=FALSE, labRow=NA, labCol=NA,annLegend=T,main="Gene expression",fontsize=8,breaks=0.5)
aheatmap(t(sim.protein),color=hmcol,Rowv=NA, Colv=FALSE, labRow=NA, labCol=NA,annLegend=T,main="Protein expression",fontsize=8,breaks=0.5)}
else if(feature.cluster){
dev.new(width=15, height=5)
par(mfrow=c(1,3))
aheatmap(t(sim.methyl),color=hmcol,Rowv=FALSE, Colv=NA, labRow=NA, labCol=NA,annLegend=T,main="Methylation",fontsize=8,breaks=0.5)
aheatmap(t(sim.expr),color=hmcol,Rowv=FALSE, Colv=NA, labRow=NA, labCol=NA,annLegend=T,main="Gene expression",fontsize=8,breaks=0.5)
aheatmap(t(sim.protein),color=hmcol,Rowv=FALSE, Colv=NA, labRow=NA, labCol=NA,annLegend=T,main="Protein expression",fontsize=8,breaks=0.5)}
else {
dev.new(width=15, height=5)
par(mfrow=c(1,3))
aheatmap(t(sim.methyl),color=hmcol,Rowv=NA, Colv=NA, labRow=NA, labCol=NA,annLegend=T,main="Methylation",fontsize=8,breaks=0.5)
aheatmap(t(sim.expr),color=hmcol,Rowv=NA, Colv=NA, labRow=NA, labCol=NA,annLegend=T,main="Gene expression",fontsize=8,breaks=0.5)
aheatmap(t(sim.protein),color=hmcol,Rowv=NA, Colv=NA, labRow=NA, labCol=NA,annLegend=T,main="Protein expression",fontsize=8,breaks=0.5)}
}
return(list(dat.methyl=sim.methyl,dat.expr=sim.expr,dat.protein=sim.protein,clustering.assignment=d.cluster))
}
|
.calc.ll.grm <- function( theta, b, b.cat, freq.categories){
eps <- 10^(-10)
TP <- length(theta)
I <- length(b)
K <- length(b.cat)
prob1 <- prob <- array( 1, dim=c(TP,I,K+1) )
for (kk in 1:K){
prob1[,,kk+1] <- stats::plogis( theta + matrix( b, nrow=TP, ncol=I, byrow=TRUE) + b.cat[kk] )
prob[,,kk] <- prob1[,,kk]-prob1[,,kk+1]
}
kk <- K+1
prob[,,kk] <- prob1[,,kk]
prob[ prob < eps ] <- eps
ll <- freq.categories * log( prob )
res <- list("ll"=ll, "prob"=prob )
return(res)
}
.update.theta.grm <- function( theta, b, b.cat, freq.categories,
numdiff.parm, max.increment){
h <- numdiff.parm
ll0 <- .calc.ll.grm( theta, b, b.cat, freq.categories)
prob.grm <- ll0$prob
ll0 <- ll0$ll
ll1 <- .calc.ll.grm( theta+h, b, b.cat, freq.categories)$ll
ll2 <- .calc.ll.grm( theta-h, b, b.cat, freq.categories)$ll
ll0 <- rowSums(ll0)
ll1 <- rowSums(ll1)
ll2 <- rowSums(ll2)
d1 <- ( ll1 - ll2 ) / ( 2 * h )
d2 <- ( ll1 + ll2 - 2*ll0 ) / h^2
d2[ abs(d2) < 10^(-10) ] <- 10^(-10)
increment <- - d1 / d2
increment <- ifelse( abs( increment) > abs(max.increment),
sign(increment)*max.increment, increment )
theta <- theta + increment
res <- list("theta"=theta, "ll"=sum(ll0), "prob.grm"=prob.grm )
return(res)
}
.update.b.grm <- function( theta, b, b.cat, freq.categories,
numdiff.parm, max.increment){
h <- numdiff.parm
ll0 <- .calc.ll.grm( theta, b, b.cat, freq.categories)
ll0 <- ll0$ll
ll1 <- .calc.ll.grm( theta, b+h, b.cat, freq.categories)$ll
ll2 <- .calc.ll.grm( theta, b-h, b.cat, freq.categories)$ll
ll0 <- rowSums( colSums(ll0))
ll1 <- rowSums( colSums(ll1))
ll2 <- rowSums( colSums(ll2))
d1 <- ( ll1 - ll2 ) / ( 2 * h )
d2 <- ( ll1 + ll2 - 2*ll0 ) / h^2
d2[ abs(d2) < 10^(-10) ] <- 10^(-10)
increment <- - d1 / d2
increment <- ifelse( abs( increment) > abs(max.increment),
sign(increment)*max.increment, increment )
b <- b + increment
res <- list("b"=b, "ll"=sum(ll0) )
return(res)
}
.update.bcat.grm <- function( theta, b, b.cat, freq.categories,
numdiff.parm, max.increment){
h <- numdiff.parm
b.catN <- 0*b.cat
for (kk in seq(1,length(b.cat))){
e1 <- b.catN
e1[kk] <- 1
ll0 <- .calc.ll.grm( theta, b, b.cat, freq.categories)
ll0 <- ll0$ll
ll1 <- .calc.ll.grm( theta, b, b.cat+h*e1, freq.categories)$ll
ll2 <- .calc.ll.grm( theta, b, b.cat-h*e1, freq.categories)$ll
ll0 <- sum(ll0)
ll1 <- sum(ll1)
ll2 <- sum(ll2)
d1 <- ( ll1 - ll2 ) / ( 2 * h )
d2 <- ( ll1 + ll2 - 2*ll0 ) / h^2
d2[ abs(d2) < 10^(-10) ] <- 10^(-10)
increment <- - d1 / d2
increment <- ifelse( abs( increment) > abs(max.increment),
sign(increment)*max.increment, increment )
b.cat[kk] <- b.cat[kk] + increment
}
res <- list("b.cat"=b.cat, "ll"=sum(ll0) )
return(res)
}
|
import <- function(path, type="auto", pattern, excludePattern=NULL,
removeEmptySpectra=TRUE, centroided=FALSE, massRange=c(0, Inf),
minIntensity=0, mc.cores=1L, verbose=interactive(), ...) {
isUrl <- .isUrl(path)
if (any(isUrl)) {
path[isUrl] <- .download(path[isUrl], verbose=verbose)
on.exit(.cleanupDownloadedTmpFiles())
}
isReadable <- file.exists(path) & file.access(path, mode=4) == 0
if (any(!isReadable)) {
stop(sQuote(path[!isReadable]), " doesn't exist or isn't readable!")
}
isCompressed <- .isPackedOrCompressed(path)
if (any(isCompressed)) {
path[isCompressed] <- .uncompress(path[isCompressed], verbose=verbose)
on.exit(.cleanupUncompressedTmpFiles(), add=TRUE)
}
i <- pmatch(tolower(type), c("auto", importFormats$type), nomatch=0,
duplicates.ok=FALSE)-1
if (i == -1) {
stop("File type ", sQuote(type), " is not supported!")
} else if (i == 0) {
if (!missing(pattern)) {
warning("User defined ", sQuote("pattern"), " is ignored in auto-mode.")
}
return(.importAuto(path=path, excludePattern=excludePattern,
removeEmptySpectra=removeEmptySpectra,
centroided=centroided, massRange=massRange,
minIntensity=minIntensity, verbose=verbose, ...))
} else {
if (missing(pattern)) {
pattern <- importFormats$pattern[i]
}
handler <- get(importFormats$handler[i], mode="function")
s <- unlist(MALDIquant:::.lapply(.files(path=path, pattern=pattern,
excludePattern=excludePattern),
handler, centroided=centroided,
massRange=massRange,
minIntensity=minIntensity,
mc.cores=mc.cores,
verbose=verbose, ...))
if (is.null(s)) {
stop("Import failed! Unsupported file type?")
}
if (removeEmptySpectra) {
emptyIdx <- MALDIquant::findEmptyMassObjects(s)
if (length(emptyIdx)) {
.msg(verbose, "Remove ", length(emptyIdx), " empty spectra.")
return(s[-emptyIdx])
}
}
return(s)
}
}
importTxt <- function(path, ...) {
import(path=path, type="txt", ...)
}
importTab <- function(path, ...) {
import(path=path, type="tab", ...)
}
importCsv <- function(path, ...) {
import(path=path, type="csv", ...)
}
importBrukerFlex <- function(path, ...) {
import(path=path, type="fid", ...)
}
importMzXml <- function(path, ...) {
import(path=path, type="mzxml", ...)
}
importMzMl <- function(path, ...) {
import(path=path, type="mzml", ...)
}
importImzMl <- function(path, coordinates=NULL, ...) {
import(path=path, type="imzml", coordinates=coordinates, ...)
}
importCiphergenXml <- function(path, ...) {
import(path=path, type="ciphergen", ...)
}
importAnalyze <- function(path, ...) {
import(path=path, type="analyze", ...)
}
importCdf <- function(path, ...) {
import(path=path, type="cdf", ...)
}
importMsd <- function(path, ...) {
import(path=path, type="msd", ...)
}
|
render_compass = function(angle = 0, position = "SE", altitude = NULL, zscale = 1,
x = NULL, y = NULL, z = NULL, compass_radius = NULL, scale_distance = 1,
color_n = "darkred", color_arrow = "grey90",
color_background = "grey60", color_bevel = "grey20",
position_circular = FALSE, clear_compass = FALSE) {
if(clear_compass) {
rgl::rgl.pop(tag = c("north_symbol","arrow_symbol","bevel_symbol","background_symbol"))
return(invisible())
}
if(rgl::rgl.cur() == 0) {
stop("No rgl window currently open.")
}
radius = 1.3
if(is.null(compass_radius)) {
id_base = get_ids_with_labels("surface")$id
if(length(id_base) == 0) {
id_base = get_ids_with_labels("surface_tris")$id
}
fullverts = rgl::rgl.attrib(id_base,"vertices")
xyz_range = apply(fullverts,2,range,na.rm=TRUE)
widths = xyz_range[2,c(1,3)] - xyz_range[1,c(1,3)]
maxwidth = max(widths)
compass_radius = c(maxwidth/10,maxwidth/10,maxwidth/10)
radius = maxwidth/10
} else if (length(compass_radius) == 1) {
radius = compass_radius / 1.5
compass_radius = c(radius,radius,radius)
} else {
stop("radius must be NULL or numeric vector of length 1")
}
if(is.null(x) || is.null(y) || is.null(z)) {
id_shadow = get_ids_with_labels("shadow")$id
if(length(id_shadow) < 1) {
id_base = get_ids_with_labels("surface")$id
if(length(id_base) == 0) {
id_base = get_ids_with_labels("surface_tris")$id
}
fullverts = rgl::rgl.attrib(id_base,"vertices")
} else {
fullverts = rgl::rgl.attrib(id_shadow,"vertices")
}
xyz_range = apply(fullverts,2,range,na.rm=TRUE) * scale_distance *
matrix(c(1,1,1/scale_distance,1/scale_distance,1,1),ncol=3,nrow=2)
radial_dist = sqrt((xyz_range[1,1] - radius)^2 + (xyz_range[1,3] - radius)^2)
if(is.null(altitude)) {
y = xyz_range[2,2]
} else {
y = altitude/zscale
}
if(position == "N") {
x = 0
if(position_circular) {
z = -radial_dist
} else {
z = xyz_range[1,3] - radius
}
} else if (position == "NE") {
x = xyz_range[2,1] + radius
z = xyz_range[1,3] - radius
} else if (position == "E") {
if(position_circular) {
x = radial_dist
} else {
x = xyz_range[2,1] + radius
}
z = 0
} else if (position == "SE") {
x = xyz_range[2,1] + radius
z = xyz_range[2,3] + radius
} else if (position == "S") {
x = 0
if(position_circular) {
z = radial_dist
} else {
z = xyz_range[2,3] + radius
}
} else if (position == "SW") {
x = xyz_range[1,1] - radius
z = xyz_range[2,3] + radius
} else if (position == "W") {
if(position_circular) {
x = -radial_dist
} else {
x = xyz_range[1,1] - radius
}
z = 0
} else if (position == "NW") {
x = xyz_range[1,1] - radius
z = xyz_range[1,3] - radius
}
}
north_symbol = .north_symbol_rgl
change_color_shape = function(shapes, color, shape_index) {
color = convert_color(color, as_hex = TRUE)
shapes[[shape_index]]$material$color = color
shapes
}
rotate_vertices = function(shapes, angle) {
shapes$vb = apply(shapes$vb, 2, `%*%`,
rgl::rotationMatrix(angle*pi/180,0,1,0))
shapes
}
north_symbol = change_color_shape(north_symbol, color_n, 1)
north_symbol = change_color_shape(north_symbol, color_arrow, 2)
north_symbol = change_color_shape(north_symbol, color_bevel, 3)
north_symbol = change_color_shape(north_symbol, color_background, 4)
shade3d(translate3d(scale3d(rotate_vertices(north_symbol[[1]],angle),
compass_radius[1],compass_radius[2],compass_radius[3]),
x, y, z),
lit=FALSE, tag = "north_symbol", skipRedraw = FALSE)
shade3d(translate3d(scale3d(rotate_vertices(north_symbol[[2]],angle),
compass_radius[1],compass_radius[2],compass_radius[3]),
x, y, z),
lit=FALSE, tag = "arrow_symbol", skipRedraw = FALSE)
shade3d(translate3d(scale3d(rotate_vertices(north_symbol[[3]],angle),
compass_radius[1],compass_radius[2],compass_radius[3]),
x, y, z),
lit=FALSE, tag = "bevel_symbol", skipRedraw = FALSE)
shade3d(translate3d(scale3d(rotate_vertices(north_symbol[[4]],angle),
compass_radius[1],compass_radius[2],compass_radius[3]),
x, y, z),
lit=FALSE, tag = "background_symbol", skipRedraw = FALSE)
}
|
test_that("running two make_query in rapid succession will not trigger HTTP 429", {
skip_on_cran()
skip_on_ci()
skip_if(Sys.getenv("TWITTER_BEARER") == "")
skip_if(!dir.exists("_snaps"))
params <-
list(query = "from:Peter_Tolochko -is:retweet", max_results = 15,
start_time = "2020-02-03T00:00:00Z", end_time = "2020-11-03T00:00:00Z",
tweet.fields = "attachments,author_id,conversation_id,created_at,entities,geo,id,in_reply_to_user_id,lang,public_metrics,possibly_sensitive,referenced_tweets,source,text,withheld",
user.fields = "created_at,description,entities,id,location,name,pinned_tweet_id,profile_image_url,protected,public_metrics,url,username,verified,withheld",
expansions = "author_id,entities.mentions.username,geo.place_id,in_reply_to_user_id,referenced_tweets.id,referenced_tweets.id.author_id",
place.fields = "contained_within,country,country_code,full_name,geo,id,name,place_type")
endpoint_url <- "https://api.twitter.com/2/tweets/search/all"
expect_snapshot(academictwitteR:::make_query(url = endpoint_url, params = params, bearer_token = get_bearer()))
expect_snapshot(academictwitteR:::make_query(url = endpoint_url, params = params, bearer_token = get_bearer()))
})
|
fusionAnchors <- function(fusionPlot, drawAnchors = TRUE, showvalues = FALSE, anchorSides = "0", anchorRadius = "3", anchorAlpha = "100", anchorBorderThickness = "1",
anchorBorderColor = "
AnchorsAttrs <- list()
AnchorsAttrs$drawAnchors <- as.numeric(drawAnchors)
AnchorsAttrs$showvalues <- as.numeric(showvalues)
AnchorsAttrs$anchorSides <- anchorSides
AnchorsAttrs$anchorRadius <- anchorRadius
AnchorsAttrs$anchorAlpha <- anchorAlpha
AnchorsAttrs$anchorBorderThickness <- anchorBorderThickness
AnchorsAttrs$anchorBorderColor <- anchorBorderColor
AnchorsAttrs$anchorBgColor <- anchorBgColor
AnchorsAttrs$anchorBgAlpha <- anchorBgAlpha
AnchorsAttrs$anchorImageAlpha <- anchorImageAlpha
AnchorsAttrs$anchorImageScale <- anchorImageScale
fusionPlot$x$drawAnchors <- AnchorsAttrs$drawAnchors
fusionPlot$x$showvalues <- AnchorsAttrs$showvalues
fusionPlot$x$anchorSides <- AnchorsAttrs$anchorSides
fusionPlot$x$anchorRadius <- AnchorsAttrs$anchorRadius
fusionPlot$x$anchorAlpha <- AnchorsAttrs$anchorAlpha
fusionPlot$x$anchorBorderThickness <- AnchorsAttrs$anchorBorderThickness
fusionPlot$x$anchorBorderColor <- AnchorsAttrs$anchorBorderColor
fusionPlot$x$anchorBgColor <- AnchorsAttrs$anchorBgColor
fusionPlot$x$anchorBgAlpha <- AnchorsAttrs$anchorBgAlpha
fusionPlot$x$anchorImageAlpha <- AnchorsAttrs$anchorImageAlpha
fusionPlot$x$anchorImageScale <- AnchorsAttrs$anchorImageScale
return(fusionPlot)
}
|
isFitLogit <- function(fit){
if ("lrm" %in% class(fit))
return (TRUE)
if ("glm" %in% class(fit) &&
fit$family$link == "logit")
return (TRUE)
return (FALSE)
}
|
highbrow <- function(input=NULL, output=NULL, browse=TRUE) {
if (is.null(input)) {
stop("Please supply some input", call. = FALSE)
}
if (!inherits(input, "list")) {
stop("Please supply a list object", call. = FALSE)
}
plos_check_dois(names(input))
input <- lapply(input, function(x) ifelse(length(x) == 0, "no data", x))
tmp <- NULL
outlist <- list()
for (i in seq_along(input)) {
tmp$doi <- names(input[i])
content_tmp <- input[[i]][[1]]
if (length(content_tmp) > 1) {
content_tmp <- paste(content_tmp, collapse = ' ... ')
}
tmp$content <- content_tmp
outlist[[i]] <- tmp
}
template <-
'<!DOCTYPE html>
<head>
<meta charset="utf-8">
<title>rplos - view highlighs</title>
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<meta name="description" content="View highlights from rplos search">
<meta name="author" content="rplos">
<!-- Le styles -->
<link href="http://netdna.bootstrapcdn.com/bootstrap/3.0.2/css/bootstrap.min.css" rel="stylesheet">
<link href="http://netdna.bootstrapcdn.com/font-awesome/4.0.3/css/font-awesome.css" rel="stylesheet">
</head>
<body>
<div class="container">
<center><h2>rplos <i class="fa fa-lightbulb-o"></i> highlights</h2></center>
<table class="table table-striped table-hover" align="center">
<thead>
<tr>
<th>DOI</th>
<th>Fragment(s)</th>
</tr>
</thead>
<tbody>
{{
<tr><td><a href="https://doi.org/{{doi}}" class="btn btn-info btn-xs" role="button">{{doi}}</a></td><td>{{content}}</td></tr>
{{/outlist}}
</tbody>
</table>
</div>
<script src="http://code.jquery.com/jquery-2.0.3.min.js"></script>
<script src="http://netdna.bootstrapcdn.com/bootstrap/3.0.2/js/bootstrap.min.js"></script>
</body>
</html>'
rendered <- whisker.render(template)
rendered <- gsub("<em>", "<b>", rendered)
rendered <- gsub("</em>", "</b>", rendered)
if (is.null(output)) {
output <- tempfile(fileext = ".html")
}
write(rendered, file = output)
if (browse) utils::browseURL(output) else output
}
|
library(gsignal)
library(testthat)
tol <- 1e-6
test_that("parameters to buffer() are correct", {
expect_error(buffer())
expect_error(buffer(x = 1:10, n = 4.1))
expect_error(buffer(x = 1:10, n = 4, p = 3.1))
expect_error(buffer(x = 1:10, n = 4, p = 4))
expect_error(buffer(x = 1:10, n = 4, p = 1, opt = 10:11))
expect_error(buffer(x = 1:10, n = 4, p = 1, opt = 'badstring'))
expect_error(buffer(x = 1:10, n = 3, p = -2, opt = 4))
expect_error(buffer(x = 1:10, n = 4, zopt = 5))
})
test_that("buffer() tests returning only y are correct", {
expect_equal(buffer(1:10, 4), matrix(c(1:10, 0, 0), 4, 3))
expect_equal(buffer(1:10, 4, 1), matrix(c(0:3, 3:6, 6:9, 9, 10, 0, 0), 4, 4))
expect_equal(buffer(1:10, 4, 2), matrix(c(0, 0:2, 1:4, 3:6, 5:8, 7:10), 4, 5))
expect_equal(buffer(1:10, 4, 3), rbind(c(0, 0, 0:7), c(0, 0:8), 0:9, 1:10))
expect_equal(buffer(1:10, 4, -1), matrix(c(1:4, 6:9), 4, 2))
expect_equal(buffer(1:10, 4, -2), matrix(c(1:4, 7:10), 4, 2))
expect_equal(buffer(1:10, 4, -3), matrix(c(1:4, 8:10, 0), 4, 2))
expect_equal(buffer(1:10, 4, 1, 11), matrix(c(11,1:3,3:6,6:9,9,10,0,0), 4, 4))
expect_equal(buffer(1:10, 4, 1, 'nodelay'), matrix(c(1:4,4:7,7:10), 4, 3))
expect_equal(buffer(1:10, 4, 2, 'nodelay'), matrix(c(1:4,3:6,5:8,7:10), 4, 4))
expect_equal(buffer(1:10, 4, 3, c(11, 12, 13)),
rbind(c(11:13, 1:7), c(12:13, 1:8), c(13, 1:9), 1:10))
expect_equal(buffer(1:10, 4, 3, 'nodelay'), rbind(1:8, 2:9, 3:10, c(4:10, 0)))
expect_equal(buffer(1:11, 4, -2, 1), matrix(c(2:5, 8:11), 4, 2))
})
test_that("buffer() tests returning y, and z are correct", {
buf <- buffer(1:12, 4, zopt = TRUE)
expect_equal(buf$y, matrix(1:12, 4, 3))
expect_equal(buf$z, NULL)
buf <- buffer(1:11, 4, zopt = TRUE)
expect_equal(buf$y, matrix(1:8, 4, 2))
expect_equal(buf$z, 9:11)
buf <- buffer(t(1:12), 4, zopt = TRUE)
expect_equal(buf$y, matrix(1:12, 4, 3))
expect_equal(buf$z, NULL)
buf <- buffer(t(1:11), 4, zopt = TRUE)
expect_equal(buf$y, matrix(1:8, 4, 2))
expect_equal(buf$z, 9:11)
})
test_that("buffer() tests returning y, z, and opt are correct", {
buf <- buffer(1:15, 4, -2, 1, zopt = TRUE)
expect_equal(buf$y, matrix(c(2:5,8:11), 4, 2))
expect_equal(buf$z, c(14,15))
expect_equal(buf$opt, 0L)
buf <- buffer(1:11, 4, -2, 1, zopt = TRUE)
expect_equal(buf$y, matrix(c(2:5,8:11), 4, 2))
expect_equal(buf$z, NULL)
expect_equal(buf$opt, 2)
buf <- buffer(t(1:15), 4, -2, 1, zopt = TRUE)
expect_equal(buf$y, matrix(c(2:5,8:11), 4, 2))
expect_equal(buf$z, c(14,15))
expect_equal(buf$opt, 0L)
buf <- buffer(t(1:11), 4, -2, 1, zopt = TRUE)
expect_equal(buf$y, matrix(c(2:5,8:11), 4, 2))
expect_equal(buf$z, NULL)
expect_equal(buf$opt, 2)
buf <- buffer(1:11, 5, 2, c(-1,0), zopt = TRUE)
expect_equal(buf$y, matrix(c(-1:3,2:6,5:9), 5, 3))
expect_equal(buf$z, c(10, 11))
expect_equal(buf$opt, c(8, 9))
buf <- buffer(t(1:11), 5, 2, c(-1,0), zopt = TRUE)
expect_equal(buf$y, matrix(c(-1:3,2:6,5:9), 5, 3))
expect_equal(buf$z, c(10, 11))
expect_equal(buf$opt, c(8, 9))
buf <- buffer(t(1:10), 6, 4, zopt = TRUE)
expect_equal(buf$y, matrix(c(rep(0, 4), 1:2, rep(0, 2), 1:4, 1:6, 3:8, 5:10), 6, 5))
expect_equal(buf$z, NULL)
expect_equal(buf$opt, 7:10)
})
test_that("buffer() works correctly with continuous buffering", {
data <- buffer(1:1100, 11)
n <- 4
p <- 1
buf <- list(y = NULL, z = NULL, opt = -5)
for (i in seq_len(ncol(data))) {
x <- data[,i]
buf <- buffer(x = c(buf$z,x), n, p, opt=buf$opt, zopt = TRUE)
}
expect_equal(buf$y, matrix(c(1089:1092, 1092:1095, 1095:1098), 4, 3))
expect_equal(buf$z, c(1099, 1100))
expect_equal(buf$opt, 1098)
data <- buffer(1:1100, 11)
n <- 4
p <- -2
buf <- list(y = NULL, z = NULL, opt = 1)
for (i in seq_len(ncol(data))) {
x <- data[,i]
buf <- buffer(x = c(buf$z,x), n, p, opt=buf$opt, zopt = TRUE)
}
expect_equal(buf$y, matrix(c(1088:1091, 1094:1097), 4, 2))
expect_equal(buf$z, 1100)
expect_equal(buf$opt, 0)
})
test_that("parameters to chirp() are correct", {
expect_error(chirp())
expect_error(chirp(1, 2, 3, 4, 5, 6, 7))
expect_error(chirp(0, shape = "foo"))
})
test_that("chirp() works for linear, quadratic and logarithmic shapes", {
t <- seq(0, 5, 0.001)
y <- chirp (t)
expect_equal(sum(head(y)), 5.999952, tolerance = tol)
expect_equal(sum(tail(y)), 2.146626e-05, tolerance = tol)
t <- seq(-2, 15, 0.001)
y <- chirp (t, 400, 10, 100, "quadratic")
expect_equal(sum(head(y)), 0.8976858, tolerance = tol)
expect_equal(sum(tail(y)), 0.4537373, tolerance = tol)
t <- seq(0, 5, 1/8000)
y <- chirp (t, 200, 2, 500, "logarithmic")
expect_equal(sum(head(y)), -4.56818, tolerance = tol)
expect_equal(sum(tail(y)), 0.8268064, tolerance = tol)
})
test_that("parameters to cmorwavf() are correct", {
expect_error(cmorwavf(n = -1))
expect_error(cmorwavf(n = 2.5))
expect_error(cmorwavf(fb = -1))
expect_error(cmorwavf(fb = 0))
expect_error(cmorwavf(fc = -1))
expect_error(cmorwavf(fc = 0))
})
test_that("cmorwavf() works correctly", {
expect_equal(round(mean(Re(cmorwavf(-8, 8, 1000, 1.5, 1)$psi)), 4), 0)
expect_equal(round(mean(Im(cmorwavf(-8, 8, 1000, 1.5, 1)$psi)), 4), 0)
expect_lt(max(Re(cmorwavf(-8, 8, 1000, 1.5, 1)$psi)), 1L)
expect_lt(max(Im(cmorwavf(-8, 8, 1000, 1.5, 1)$psi)), 1L)
expect_gt(min(Re(cmorwavf(-8, 8, 1000, 1.5, 1)$psi)), -1L)
expect_gt(min(Im(cmorwavf(-8, 8, 1000, 1.5, 1)$psi)), -1L)
})
test_that("parameters to diric() are correct", {
expect_error(diric())
expect_error(diric(seq(-2*pi, 2*pi, len = 301)))
expect_error(diric(seq(-2*pi, 2*pi, len = 301), 0))
expect_error(diric(seq(-2*pi, 2*pi, len = 301), -1))
expect_error(diric(seq(-2*pi, 2*pi, len = 301), 2.5))
})
test_that("parameters to gauspuls() are correct", {
expect_error(gauspuls())
expect_error(gauspuls(seq(-2*pi, 2*pi, len = 301), -1))
expect_error(gauspuls(seq(-2*pi, 2*pi, len = 301), 2, 0))
expect_error(gauspuls(seq(-2*pi, 2*pi, len = 301), 2, -1))
})
test_that("parameters to gmonopuls() are correct", {
expect_error(gmonopuls())
expect_error(gmonopuls(seq(-2*pi, 2*pi, len = 301), -1))
})
test_that("parameters to mexihat() are correct", {
expect_error(mexihat(n = -1))
expect_error(mexihat(n = 2.5))
})
test_that("parameters to meyeraux() are correct", {
expect_error(meyeraux())
})
test_that("parameters to morlet() are correct", {
expect_error(morlet(n = -1))
expect_error(morlet(n = 2.5))
})
test_that("parameters to pulstran() are correct", {
expect_error(pulstran())
expect_error(pulstran(NULL))
expect_error(pulstran(1, 2, 3, 4, 5, 6))
expect_error(pulstran(d = seq(0, 0.1, 0.01)))
})
test_that("rectpuls() works correctly", {
t <- seq(0, 1, 0.01)
d <- seq(0, 1, 0.1)
expect_equal(pulstran(NA, d, 'sin'), NA_integer_)
expect_equal(pulstran(t, NULL, 'sin'), rep(0L, length(t)))
expect_equal(pulstran(seq(0, 0.1, 0.001)), rep(0L, length(seq(0, 0.1, 0.001))))
expect_equal(length(pulstran(t, d, 'sin')), length(t))
})
test_that("parameters to rectpuls() are correct", {
expect_error(rectpuls())
expect_error(rectpuls(NULL, 0.1))
expect_error(rectpuls(seq(-2*pi, 2*pi, len = 301), -1))
expect_error(rectpuls(seq(-2*pi, 2*pi, len = 301), 1, 3))
expect_error(rectpuls(seq(-2*pi, 2*pi, len = 301), 1i))
})
test_that("rectpuls() works correctly", {
expect_equal(rectpuls(0, 0), 0L)
expect_equal(rectpuls(0, 0.1), 1L)
expect_equal(rectpuls(rep(0L, 10)), rep(1L, 10))
expect_equal(rectpuls(-1:1), c(0, 1, 0))
expect_equal(rectpuls(-5:5, 9), c(0, rep(1L, 9), 0))
})
test_that("parameters to sawtooth() are correct", {
expect_error(sawtooth())
expect_error(sawtooth(NULL, 0.1))
expect_error(sawtooth(0:10, -1))
expect_error(sawtooth(0:10, 2))
expect_error(sawtooth(0:10, 1, 3))
expect_error(sawtooth(0:10, 1i))
})
test_that("sawtooth() works correctly", {
expect_equal(sawtooth(0, 0), 1L)
expect_equal(sawtooth(0, 1), -1L)
expect_equal(sawtooth(rep(0L, 10)), rep(-1L, 10))
})
test_that("parameters to square() are correct", {
expect_error(square())
expect_error(square(NULL, 1))
expect_error(square(0:10, -1))
expect_error(square(0:10, 150))
expect_error(square(0:10, 1, 3))
expect_error(square(0:10, 1i))
})
test_that("square() works correctly", {
expect_equal(square(0, 0), -1L)
expect_equal(square(0, 1), 1L)
expect_equal(square(rep(0L, 10)), rep(1L, 10))
expect_equal(square(1:12, 50), rep(c(rep(1,3), rep(-1, 3)), 2))
})
test_that("parameters to tripuls() are correct", {
expect_error(tripuls())
expect_error(tripuls(NULL, 1))
expect_error(tripuls(0:10, c(0,1)))
expect_error(tripuls(0:10, 1, -2))
expect_error(tripuls(0:10, 1, 2))
expect_error(tripuls(0:10, 1i))
})
test_that("tripuls() works correctly", {
expect_equal(tripuls(0, 1), 1L)
expect_equal(tripuls(rep(0L, 10)), rep(1L, 10))
})
test_that("parameters to shanwavf() are correct", {
expect_error(shanwavf(n = -1))
expect_error(shanwavf(n = 2.5))
expect_error(shanwavf(fb = -1))
expect_error(shanwavf(fb = 0))
expect_error(shanwavf(fc = -1))
expect_error(shanwavf(fc = 0))
})
test_that("shanwavf() works correctly", {
expect_equal(mean(Re(shanwavf(-20, 20, 1000, 1.5, 1)$psi)), 0, tolerance = 1e-3)
expect_equal(mean(Im(shanwavf(-20, 20, 1000, 1.5, 1)$psi)), 0, tolerance = 1e-3)
})
test_that("parameters to shiftdata() are correct", {
expect_error(shiftdata())
expect_error(shiftdata(1, 2, 3))
expect_error(shiftdata(1, 2.5))
expect_error(shiftdata(1, 2i))
expect_error(shiftdata(1:5, 2))
expect_error(shiftdata(array(1:24, c(2,3)), 3))
})
test_that("shiftdata() works correctly", {
sd <- shiftdata(matrix(1:9, 3, 3, byrow = TRUE), 2)
expect_equal(sd$x, matrix(c(1, 4, 7, 2, 5, 8, 3, 6, 9), 3, 3, byrow = TRUE))
expect_equal(sd$perm, c(2,1))
expect_equal(sd$nshifts, NA)
sd <- shiftdata(array(c(27, 63, 67, 42, 48, 74, 11, 5, 93, 15, 34, 70, 23, 60, 54, 81, 28, 38), c(3, 3, 2)), 2)
expect_equal(sd$x, array(c(27, 42, 11, 63, 48, 5, 67, 74, 93, 15, 23, 81, 34, 60, 28, 70, 54, 38), c(3, 3, 2)))
expect_equal(sd$perm, c(2, 1, 3))
expect_equal(sd$nshifts, NA)
X <- array(round(runif(4 * 4 * 4 * 4) * 100), c(4, 4, 4, 4))
Y <- shiftdata(X, 3)
T <- NULL
for (i in 1:3) {
for (j in 1:3) {
for (k in 1:2) {
for (l in 1:2) {
T <- c(T, Y$x[k, i, j, l] - X[i, j, k ,l])
}
}
}
}
expect_equal(T, rep(0L, length(T)))
})
test_that("parameters to unshiftdata() are correct", {
expect_error(unshiftdata())
expect_error(unshiftdata(1, 2, 3))
expect_error(unshiftdata(1))
expect_error(unshiftdata(2i))
expect_error(unshiftdata(list(x=array(1:5), perm = 2i, nshifts = 0)))
expect_error(unshiftdata(list(x=array(1:5), perm = NULL, nshifts = NULL)))
})
test_that("unshiftdata() works correctly", {
x <- 1:5
sd <- shiftdata(x)
x2 <- unshiftdata(sd)
expect_equal(array(x), x2)
x <- array(round(runif(3 * 3) * 100), c(3, 3))
sd <- shiftdata(x, 2)
x2 <- unshiftdata(sd)
expect_equal(x, x2)
x <- array(round(runif(4 * 4 * 4 * 4) * 100), c(4, 4, 4, 4))
sd <- shiftdata(x, 3)
x2 <- unshiftdata(sd)
expect_equal(x, x2)
x <- array(round(runif(1 * 1 * 3 * 4) * 100), c(1, 1, 3, 4))
sd <- shiftdata(x)
x2 <- unshiftdata(sd)
expect_equal(x, x2)
})
test_that("parameters to sigmoid_train() are correct", {
expect_error(sigmoid_train())
expect_error(sigmoid_train(1:10, NULL, NULL))
expect_error(sigmoid_train(1:10, rbind(c(1,2),1), NULL))
expect_error(sigmoid_train(1:10, rbind(c(1,2),1), 2i))
})
test_that("sigmoid_train() works correctly", {
st <- sigmoid_train(1:10, rbind(c(2,3)), 1)
expect_equal(st$y, st$s, tolerance = tol)
st <- sigmoid_train(1:10, c(2,3), 1)
expect_equal(st$y, st$s, tolerance = tol)
})
test_that("parameters to specgram() are correct", {
expect_error(specgram())
expect_error(specgram(matrix(1:10, 2, 5)))
expect_error(specgram(x = 1:10, n = 4.1))
expect_warning(specgram(x = 1:10, n = 11))
expect_warning(specgram(x = 1:10, n = 2, window = 1:11))
expect_error(specgram(x = 1:10, n = 2, overlap = 3))
})
test_that("specgram() works correctly", {
sp <- specgram(chirp(seq(-2, 15, by = 0.001), 400, 10, 100, 'quadratic'))
expect_equal(length(sp$f), 128L)
expect_equal(length(sp$t), 131L)
expect_equal(nrow(sp$S), length(sp$f))
expect_equal(ncol(sp$S), length(sp$t))
})
test_that("parameters to uencode() are correct", {
expect_error(uencode())
expect_error(uencode(1))
expect_error(uencode(1, 2, 3, 4, 5))
expect_error(uencode(1, 100))
expect_error(uencode(1, 4, 0))
expect_error(uencode(1, 4, -1))
expect_error(uencode(1, 4, 2, 'invalid'))
})
test_that("uencode() works correctly", {
expect_equal(uencode(seq(-3, 3, 0.5), 2),
c(0, 0, 0, 0, 0, 1, 2, 3, 3, 3, 3, 3, 3))
expect_equal(uencode(seq(-4, 4, 0.5), 3, 4),
c(0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7, 7))
expect_equal(uencode(seq(-8, 8, 0.5), 4, 8, FALSE),
c(0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7, 8, 8, 9, 9, 10,
10, 11, 11, 12, 12, 13, 13, 14, 14, 15, 15, 15))
expect_equal(uencode(seq(-8, 8, 0.5), 4, 8, TRUE),
c(-8, -8, -7, -7, -6, -6, -5, -5, -4, -4, -3, -3, -2, -2, -1, -1, 0,
0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7, 7))
expect_equal(uencode(matrix(c(-2, 1, -1, 2), 2, 2), 2),
matrix(c(0, 3, 0, 3), 2, 2))
expect_equal(uencode(matrix(c(1+1i, 2+1i, 3+1i, 4+2i, 5+2i, 6+2i, 7+3i, 8+3i, 9+3i), 3, 3, byrow = TRUE), 2),
matrix(rep(3, 9), 3, 3))
})
test_that("parameters to udecode() are correct", {
expect_error(udecode())
expect_error(udecode(1))
expect_error(udecode(1, 2, 3, 4, 5))
expect_error(udecode(1, 100))
expect_error(udecode(1, 4, 0))
expect_error(udecode(1, 4, -1))
expect_error(udecode(1, 4, 2, 'invalid'))
})
test_that("udecode() works correctly", {
expect_equal(udecode(c(rep(0, 5), 1, 2, rep(3, 6)), 2),
c(-1, -1, -1, -1, -1, -0.5, 0, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5))
expect_equal(udecode(0:10, 2, 1, TRUE),
c(-1, -0.5, 0, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5))
expect_equal(udecode(0:10, 2, 1, FALSE),
c(-1, -0.5, 0, 0.5, -1, -0.5, 0, 0.5, -1, -0.5, 0))
expect_equal(udecode(-4:3, 3, 2), c(-2, -1.5, -1, -0.5, 0, 0.5, 1, 1.5))
expect_equal(udecode(-7:7, 3, 2, TRUE),
c(-2, -2, -2, -2, -1.5, -1, -0.5, 0, 0.5, 1, 1.5, 1.5, 1.5, 1.5, 1.5))
expect_equal(udecode(-7:7, 3, 2, FALSE),
c(0.5, 1, 1.5, -2, -1.5, -1, -0.5, 0, 0.5, 1, 1.5, -2, -1.5, -1, -0.5))
expect_equal(udecode(matrix(c(-2, 1, -1, 2), 2, 2), 2),
matrix(c(-1, 0.5, -0.5, 0.5), 2, 2))
expect_equal(udecode(matrix(c(1+1i, 2+1i, 3+1i, 4+2i, 5+2i, 6+2i, 7+3i, 8+3i, 9+3i), 3, 3, byrow = TRUE), 2),
matrix(complex(real = c(-0.5, 0.0, rep(0.5, 7)), imaginary = c(rep(-0.5, 3), rep(0, 3), rep(0.5,3))), 3, 3))
})
test_that("parameters to sinetone() are correct", {
expect_error(sinetone())
expect_error(sinetone('invalid'))
expect_error(sinetone(-1))
expect_error(sinetone(1, 'invalid'))
expect_error(sinetone(1, 0))
expect_error(sinetone(1, 1, 'invalid'))
expect_error(sinetone(1, 1, 0))
expect_error(sinetone(1, 1, 1, 'invalid'))
expect_error(sinetone(1, 1, 1, 1, 1))
})
test_that("sinetone() works correctly", {
y <- sinetone(0)
expect_equal(length(y), 8000)
expect_equal(y, rep(0, 8000))
y <-sinetone (18e6, 150e6, 19550/150e6, 1)
expect_equal(length(y), 19550)
})
test_that("parameters to sinewave() are correct", {
expect_error(sinewave())
expect_error(sinewave(1, 'invalid'))
expect_error(sinewave(1, 1, 'invalid'))
expect_error(sinewave(1, 2, 3, 4))
})
test_that("sinetone() works correctly", {
expect_equal(sinewave(1), 0)
expect_equal(sinewave(1, 4, 1), 1)
expect_equal(sinewave(1, 12, 1), 1 / 2, tolerance = tol)
expect_equal(sinewave(1, 12, 2), sqrt(3) / 2, tolerance = tol)
expect_equal(sinewave(1, 20, 1), (sqrt(5) - 1) / 4, tolerance = tol)
expect_equal(sinewave(1), sinewave(1, 1, 0), tolerance = tol)
expect_equal(sinewave(3, 4), sinewave(3, 4, 0), tolerance = tol)
})
|
rm.dupl <- function(obj,
zcol=1,
zero.tol=0){
zerod=zerodist(obj@sp, zero=zero.tol)
if(nrow(zerod)!=0){
zs=zerod[,2]
numNA <- apply(matrix(obj@data[,zcol],
nrow=length(obj@sp),byrow=F), MARGIN=1,
FUN=function(x) sum(is.na(x)))
for(i in 1:length(zerod[,1])) {
if(numNA[zerod[i,1]]>=numNA[zerod[i,2]]){
zs[i]=zerod[i,1] }
}
res = obj[-zs,drop=F]
row.names(res@sp)=1:nrow(res@sp)
} else {
res= obj}
return(res)
}
|
"session_dataset"
|
library(OpenMx)
data(demoOneFactor)
manifests <- names(demoOneFactor)
latents <- c("G")
base <- mxModel(
"OneFactorCov", type="RAM",
manifestVars = manifests,
latentVars = latents,
mxPath(from=latents, to=manifests, values=0, free=FALSE, labels=paste0('l',1:length(manifests))),
mxPath(from=manifests, arrows=2, values=rlnorm(length(manifests)), lbound=.01),
mxPath(from=latents, arrows=2, free=FALSE, values=1.0),
mxPath(from = 'one', to = manifests, values=0, free=TRUE, labels=paste0('m',1:length(manifests))),
mxData(demoOneFactor, type="raw"))
base <- mxRun(base)
set.seed(1)
got <- mxParametricBootstrap(base, paste0('l', 1:length(manifests)),
alternative="two.sided",
alpha=0.05,
correction="none",
replications=10)
omxCheckEquals(got['l2', 'note'], "< 1/10")
got2 <- mxParametricBootstrap(base, paste0('l', 1:length(manifests)), "two.sided",
replications=100,
previousRun=got)
omxCheckCloseEnough(got2[,'p'], c(.32,.32,.32,.25,.32), .12)
omxCheckEquals(attr(got,'bootData')[5,],
attr(got2,'bootData')[5,])
got3 <- mxParametricBootstrap(base, paste0('l', 1:length(manifests)), "two.sided",
previousRun=got2)
omxCheckCloseEnough(got3[,'p'], c(.36,.36,.36,.325,.36), .08)
gc()
before <- proc.time()[['elapsed']]
got4 <- mxParametricBootstrap(base, paste0('l', 1:length(manifests)), "two.sided",
previousRun=got3)
elapsed <- proc.time()[['elapsed']] - before
omxCheckCloseEnough(elapsed, 0, 1.)
|
as.RollingLDA = function(x, id, lda, docs, dates, vocab, chunks, param){
if (!missing(x)){
if (!is.RollingLDA(x)){
is.RollingLDA(x, verbose = TRUE)
stop("\"x\" is not a RollingLDA object")
}
if (missing(id)) id = x$id
if (missing(lda)) lda = x$lda
if (missing(docs)) docs = x$docs
if (missing(dates)) dates = x$dates
if (missing(vocab)) vocab = x$vocab
if (missing(chunks)) chunks = x$chunks
if (missing(param)) param = x$param
}
if (missing(id)) id = "rolling - converted"
if (!is.LDA(lda)){
is.LDA(lda, verbose = TRUE)
stop("\"lda\" not an LDA object")
}
if (is.null(names(dates))) names(dates) = names(docs)
dates = as.Date(dates[match(names(dates), names(docs))])
if (missing(vocab)) vocab = colnames(getTopics(lda))
if (missing(chunks)){
chunks = data.table(
chunk.id = 0L,
start.date = min(dates),
end.date = max(dates),
memory = NA_Date_,
n = length(docs),
n.discarded = NA_integer_,
n.memory = NA_integer_,
n.vocab = length(vocab)
)
}
if (missing(param)) param = .defaultParam()
res = list(
id = id,
lda = lda,
docs = docs,
dates = dates,
vocab = vocab,
chunks = chunks,
param = param)
class(res) = "RollingLDA"
if (!is.RollingLDA(res)){
is.RollingLDA(res, verbose = TRUE)
stop("input arguments do not create a RollingLDA object")
}
res
}
is.RollingLDA = function(obj, verbose = FALSE){
assert_flag(verbose)
if (!inherits(obj, "RollingLDA")){
if (verbose) message("object is not of class \"RollingLDA\"")
return(FALSE)
}
if (!is.list(obj)){
if (verbose) message("object is not a list")
return(FALSE)
}
testNames = c("id", "lda", "docs", "dates", "vocab", "chunks", "param")
if (!test_list(obj, types = c("character", "LDA", "list", "Date", "character", "data.table", "list"),
names = "named", any.missing = FALSE)){
if (verbose) message(check_list(obj, types = c("character", "LDA", "list", "Date", "character", "data.table", "list"),
names = "named", any.missing = FALSE))
return(FALSE)
}
if (!test_set_equal(names(obj), testNames)){
if (verbose) message(check_set_equal(names(obj), testNames))
return(FALSE)
}
if (verbose) message("id: ", appendLF = FALSE)
id = getID(obj)
if (!is.character(id) || !(length(id) == 1)){
if (verbose) message("not a character of length 1")
return(FALSE)
}
if (verbose) message("checked")
if (verbose) message("lda: ", appendLF = FALSE)
lda = try(getLDA(obj), silent = !verbose)
if(inherits(lda, "try-error")){
return(FALSE)
}
if(!is.LDA(lda)){
if (verbose) message("not an \"LDA\" object")
return(FALSE)
}
if (verbose) message("checked")
if (verbose) message("docs: ", appendLF = FALSE)
docs = getDocs(obj)
if (!test_list(docs, min.len = 1, names = "unique", types = "matrix", any.missing = FALSE)){
if (verbose) message(check_list(docs, min.len = 1, names = "unique", types = "matrix", any.missing = FALSE))
return(FALSE)
}
if (!all(sapply(docs, nrow) == 2)){
if (verbose) message("not all elements have two rows")
return(FALSE)
}
if (!all(sapply(docs, function(x) all(x[2,] == 1)))){
if (verbose) message("not all values in the second row equal 1")
return(FALSE)
}
if (verbose) message("checked")
if (verbose) message("dates: ", appendLF = FALSE)
dates = getDates(obj)
if (!test_date(dates, any.missing = FALSE)){
if (verbose) message(check_date(dates, any.missing = FALSE))
return(FALSE)
}
if (!all(names(dates) %in% names(docs)) || !all(names(docs) %in% names(dates))){
if (verbose) message("not same names as \"docs\"")
return(FALSE)
}
if (length(dates) != length(docs)){
if (verbose) message("not same length as \"docs\"")
return(FALSE)
}
if (verbose) message("checked")
if (verbose) message("vocab: ", appendLF = FALSE)
vocab = getVocab(obj)
if (!test_character(vocab, any.missing = FALSE, unique = TRUE)){
if (verbose) message(check_character(vocab, any.missing = FALSE, unique = TRUE))
return(FALSE)
}
if (verbose) message("checked")
if (verbose) message("chunks: ", appendLF = FALSE)
chunks = getChunks(obj)
if (!is.data.table(chunks) ||
!all(c("chunk.id", "start.date", "end.date", "memory", "n", "n.discarded",
"n.memory", "n.vocab") %in% colnames(chunks))){
if (verbose) message("not a data.table with standard parameters")
return(FALSE)
}
if (anyDuplicated(chunks$chunk.id)){
if (verbose) message("duplicated \"chunk.id\"")
return(FALSE)
}
if (!is.integer(chunks$chunk.id)){
if (verbose) message("\"chunk.id\" is not an integer")
return(FALSE)
}
if (!is.integer(chunks$n)){
if (verbose) message("\"n\" is not an integer")
return(FALSE)
}
if (!is.integer(chunks$n.discarded)){
if (verbose) message("\"n.discarded\" is not an integer")
return(FALSE)
}
if (!is.integer(chunks$n.memory)){
if (verbose) message("\"n.memory\" is not an integer")
return(FALSE)
}
if (!is.integer(chunks$n.vocab)){
if (verbose) message("\"n.vocab\" is not an integer")
return(FALSE)
}
if (!is.Date(chunks$start.date)){
if (verbose) message("\"start.date\" is not a Date object")
return(FALSE)
}
if (!is.Date(chunks$end.date)){
if (verbose) message("\"end.date\" is not a Date object")
return(FALSE)
}
if (!is.Date(chunks$memory)){
if (verbose) message("\"memory\" is not a Date object")
return(FALSE)
}
if (any(is.na(chunks$chunk.id))){
if (verbose) message("NA(s) in \"chunk.id\"")
return(FALSE)
}
if (any(is.na(chunks$n))){
if (verbose) message("NA(s) in \"n\"")
return(FALSE)
}
if (any(is.na(chunks$n.vocab))){
if (verbose) message("NA(s) in \"n.vocab\"")
return(FALSE)
}
if (any(is.na(chunks$start.date))){
if (verbose) message("NA(s) in \"start.date\"")
return(FALSE)
}
if (any(is.na(chunks$end.date))){
if (verbose) message("NA(s) in \"end.date\"")
return(FALSE)
}
if (length(dates) != sum(chunks$n)){
if (verbose) message("sum of \"n\" does not match number of texts")
return(FALSE)
}
if (length(vocab) != max(chunks$n.vocab)){
if (verbose) message("max of \"n.vocab\" does not match number of vocabularies")
return(FALSE)
}
if (is.unsorted(chunks$n.vocab)){
if (verbose) message("\"n.vocab\" is not monotonously increasing")
return(FALSE)
}
if (min(dates) < min(chunks$start.date)){
if (verbose) message("minimum of \"start.date\" is larger than minimum of text's dates")
return(FALSE)
}
if (max(dates) > max(chunks$end.date)){
if (verbose) message("maximum of \"end.date\" is smaller than maximum of text's dates")
return(FALSE)
}
if (verbose) message("checked")
if (verbose) message("param: ", appendLF = FALSE)
param = getParam(obj)
testNames = c("vocab.abs", "vocab.rel", "vocab.fallback", "doc.abs")
if (!test_list(param, types = c("numeric", "integer"), names = "named", any.missing = FALSE)){
if (verbose) message(check_list(param, types = c("numeric", "integer"), names = "named", any.missing = FALSE))
return(FALSE)
}
if (!test_set_equal(names(param), testNames)){
if (verbose) message(check_set_equal(names(param), testNames))
return(FALSE)
}
if (param$vocab.abs < 0){
if (verbose) message("\"vocab.abs\" is smaller than 0")
return(FALSE)
}
if (param$vocab.rel < 0){
if (verbose) message("\"vocab.rel\" is smaller than 0")
return(FALSE)
}
if (param$vocab.rel > 1){
if (verbose) message("\"vocab.rel\" is greater than 0")
return(FALSE)
}
if (param$vocab.fallback < 0){
if (verbose) message("\"vocab.fallback\" is smaller than 0")
return(FALSE)
}
if (param$doc.abs < 0){
if (verbose) message("\"doc.abs\" is smaller than 0")
return(FALSE)
}
if (verbose) message("checked")
return(TRUE)
}
print.RollingLDA = function(x, ...){
elements = paste0("\"", names(which(!sapply(x, is.null))), "\"")
cat(
"RollingLDA Object named \"", getID(x),
"\" with elements\n", paste0(elements, collapse = ", "), "\n ",
nrow(getChunks(x)), " Chunks with Texts from ",
as.character(min(getDates(x))), " to ", as.character(max(getDates(x))),
"\n ", paste0(paste0(names(getParam(x)), ": ",
unlist(getParam(x))), collapse = ", "),
"\n\n", sep = "")
print(getLDA(x))
}
|
tokenize_character_shingles <- function(x,
n = 3L,
n_min = n,
lowercase = TRUE,
strip_non_alphanum = TRUE,
simplify = FALSE) {
UseMethod("tokenize_character_shingles")
}
tokenize_character_shingles.data.frame <-
function(x,
n = 3L,
n_min = n,
lowercase = TRUE,
strip_non_alphanum = TRUE,
simplify = FALSE) {
x <- corpus_df_as_corpus_vector(x)
tokenize_character_shingles(x, n, n_min, lowercase, strip_non_alphanum, simplify)
}
tokenize_character_shingles.default <-
function(x,
n = 3L,
n_min = n,
lowercase = TRUE,
strip_non_alphanum = TRUE,
simplify = FALSE) {
check_input(x)
named <- names(x)
if (n < n_min || n_min <= 0)
stop("n and n_min must be integers, and n_min must be less than ",
"n and greater than 1.")
chars <- tokenize_characters(x, lowercase = lowercase,
strip_non_alphanum = strip_non_alphanum)
out <-
generate_ngrams_batch(
chars,
ngram_min = n_min,
ngram_max = n,
stopwords = "",
ngram_delim = ""
)
if (!is.null(named))
names(out) <- named
simplify_list(out, simplify)
}
|
nhppSpike <-
function(smoothRates, nSpike=25, cptLenR=4, cptLenMean=10, minGain=1.5, maxGain=10, minLoss=0.01, maxLoss=0.5, pGain=0.6) {
grid.mid = smoothRates$x
spikeRate = smoothRates$y
gridSize = grid.mid[2] - grid.mid[1]
grid.fix = grid.mid - gridSize/2
nGrid = length(grid.fix)
gridL = sample(1:nGrid, nSpike)
cptGridLen = rnbinom(nSpike, size=cptLenR, mu=cptLenMean)
gridR = gridL+cptGridLen
gridR[gridR>nGrid] = nGrid
relCN = sample(0:1, nSpike, replace=TRUE, prob=c(1-pGain, pGain))
relCN[1] = 0
relCN[2] = 1
relCN[relCN==0] = runif(sum(relCN==0), min=minLoss, max=maxLoss)
relCN[relCN==1] = runif(sum(relCN==1), min=minGain, max=maxGain)
spikeMat = cbind(gridL, gridR, grid.fix[gridL], grid.fix[gridR]+gridSize, relCN)
colnames(spikeMat) = c("gridL", "gridR", "readL", "readR", "relCN")
for(i in 1:nSpike) {
spikeRate[gridL[i]:gridR[i]] = spikeRate[gridL[i]:gridR[i]]*relCN[i]
}
spikeRate = spikeRate/mean(spikeRate)*mean(smoothRates$y)
caseRates = list(x=grid.mid, y=spikeRate)
return(list(spikeMat = spikeMat, caseRates=caseRates))
}
|
busca_cep <- function(cep = "01001000", token = NULL) {
if (nchar(cep) != 8) {
stop("O cep deve ter 8 digitos.")
}
if (is.null(token)) {
stop(msg)
}
url <- paste0(base_url, "cep?cep=", cep)
auth <- paste0("Token token=", token)
r <- GET(url, add_headers(Authorization = auth)) %>% content("parsed")
CEP <- parse_api(r)
return(CEP)
}
|
source("tinytestSettings.R")
using(ttdo)
library(OmicNavigator)
testUrls <- c("http://somewhere.net", "https://secure.com/", "C:/path/to/file")
expect_identical_xl(
OmicNavigator:::isUrl(testUrls),
c(TRUE, TRUE, FALSE)
)
|
package_review <- function(path = ".", config = get_config()) {
cli_h1("docreview Results")
results <- list()
if (config$functions$active) {
function_checks <- config$functions
results$functions <- function_review(path, function_checks)
function_results_display(results$functions$details, function_checks)
}
if (config$vignettes$active) {
vignette_checks <- config$vignette
results$vignettes <- vignette_review(path, vignette_checks)
vignette_results_display(results$vignettes$details, vignette_checks)
}
check_results(results, config)
}
check_results <- function(results, config) {
if (config$error_on_failure | config$error_on_warning) {
total_failures <- sum(map_dbl(results, "failures"))
total_warnings <- sum(map_dbl(results, "warnings"))
if (config$error_on_warning && total_warnings > 0) {
rlang::abort(
paste("\nFailures found by docreview:", total_failures, "\nWarnings found by docreview:", total_warnings),
call. = FALSE
)
}
if (config$error_on_failure && total_failures > 0) {
rlang::abort(
paste("\nFailures found by docreview:", total_failures),
call. = FALSE
)
}
}
invisible(results)
}
get_config <- function(config_path = system.file("configs/docreview.yml", package = "docreview", mustWork = TRUE)) {
read_yaml(config_path)
}
|
panorama <-
function(collection, main, cut,
ylab.push.factor = 10,
cut.col = "darkred",
cut.lty = 1,
cut.lwd = 2,
col = "RoyalBlue",
col.ramp = c("red", "pink","blue"),
col.line = "gray30",
mar = c(5, 4+ylab.push.factor, 3, 2),
cex.axis = 0.8,
cex.yaxis = 0.7,
xlab = "Year",
color.by.data = FALSE,
...) {
if ( length(names(collection$catalog)) != 0 ) {
cat = collection$catalog; catalogo = list(); catalogo[[1]] = cat; rm("cat")
dat = collection$data; datos = list(); datos[[1]] = dat; rm("dat")
} else {
catalogo = collection$catalog
datos = collection$data
}
if ( length(catalogo) != length(datos) ) { stop("Collection: catalog and data lengths differ.") }
disponibles <- function(x) { return( start(x)[1] : end(x)[1] ) }
n = length(catalogo)
xcol = col
colf = function(x) { colorRamp(col.ramp)(x) }
if ( ! missing(datos) ) {
dpa = list()
xcol = list()
for ( k in 1 : n ) {
s = start(datos[[k]])
e = end(datos[[k]])
f = frequency(datos[[k]])
kk = 1
an = array()
for ( a in s[1] : e[1] ) {
if ( color.by.data == FALSE ) {
an[[kk]] = sum(!is.na(window(datos[[k]], start=c(a,1), end=c(a, f), extend=T))) / f
} else {
an[[kk]] = mean((window(datos[[k]], start=c(a,1), end=c(a, f), extend=T)), na.rm=TRUE)
}
dpa[[k]] = an
kk = kk + 1
}
xcol[[k]] = rgb(colf(an)/255)
}
}
dis = unlist(lapply(datos, function(x) { c(start(x)[1], end(x)[1]) } ))
ylabs = unlist(lapply(catalogo, function(x) { x$Name } ))
xdat = range(dis)
xlim = xdat + c(0, 1)
xlim.names = c(0, 0)
ylim.names = c(0, 4)
ylim = c(0.5, n)
old.par <- par(no.readonly = TRUE)
on.exit(par(old.par))
layout(1)
par(bty="n", mar = mar, ...)
plot(axes=F, xdat, ylim+ylim.names,type="n", xlab=xlab,ylab=NA, xlim=xlim+xlim.names, ylim=ylim+ylim.names)
abline(h=seq(1,n,2), col=col.line, lty=3, lwd=1)
if (!missing(cut)) { abline(v=cut, col=cut.col, lty=cut.lty, lwd=cut.lwd) }
points(disponibles(datos[[1]]), rep(1, length(disponibles(datos[[1]]))), pch=22, bg=xcol[[1]])
text(xdat[1]+5, ylim[2]-1.5+ylim.names[2],labels="Available data", pos=3, cex=0.85)
points(xdat[1], ylim[2]-2+ylim.names[2], pch=22, bg=rgb(colf(1)/255))
text(xdat[1], ylim[2]-2+ylim.names[2],labels="100%", pos=4, cex=0.85)
points(xdat[1]+5, ylim[2]-2+ylim.names[2], pch=22, bg=rgb(colf(0.5)/255))
text(xdat[1]+5, ylim[2]-2+ylim.names[2],labels="50%", pos=4, cex=0.85)
points(xdat[1]+10, ylim[2]-2+ylim.names[2], pch=22, bg=rgb(colf(0.0)/255))
text(xdat[1]+10, ylim[2]-2+ylim.names[2],labels="0%", pos=4, cex=0.85)
if ( n > 1 ) {
for ( f in 2:n ) {
if (missing(datos)) {
points(disponibles(datos[[f]]), rep(f, length(disponibles(datos[[f]]))), pch=22, bg=xcol, type="p")
} else {
points(disponibles(datos[[f]]), rep(f, length(disponibles(datos[[f]]))), pch=22, bg=xcol[[f]], type="p")
}
}
}
axis(1)
axis(2, 1:n, ylabs, hadj=1, las=1,tick=F, cex.axis=cex.yaxis)
axis(4, 1:n, 1:n, las=1,tick=F, hadj=0.5, col.axis=col.line, cex.axis=cex.yaxis)
if (missing(main)) {
main="Longevity of stations"
}
title(main=main)
invisible()
}
|
chisq.benftest<-function(x=NULL,digits=1,pvalmethod="asymptotic",pvalsims=10000)
{
if(!is.numeric(x)){stop("x must be numeric.")}
pvalmethod <- pmatch(pvalmethod, c("asymptotic", "simulate"))
if (is.na(pvalmethod)){stop("invalid 'pvalmethod' argument")}
if((length(pvalsims)!=1)){stop("'pvalsims' argument takes only single integer!")}
if((length(digits)!=1)){stop("'digits' argument takes only single integer!")}
first_digits<-signifd(x,digits)
n<-length(first_digits)
freq_of_digits<-table(c(first_digits,signifd.seq(digits)))-1
rel_freq_of_digits<-freq_of_digits/n
rel_freq_of_digits_H0<-pbenf(digits)
chi_square<-n*sum((rel_freq_of_digits-rel_freq_of_digits_H0)^2/rel_freq_of_digits_H0)
if(pvalmethod==1)
{
pval<-1-pchisq(chi_square,df=length(signifd.seq(digits))-1)
}
if(pvalmethod==2)
{
dist_chisquareH0<-simulateH0(teststatistic="chisq",n=n,digits=digits,pvalsims=pvalsims)
pval<-1-sum(dist_chisquareH0<=chi_square)/length(dist_chisquareH0)
}
RVAL <- list(statistic = c(chisq = chi_square), p.value = pval, method = "Chi-Square Test for Benford Distribution",
data.name = deparse(substitute(x)))
class(RVAL) <- "htest"
return(RVAL)
}
ks.benftest<-function(x=NULL,digits=1,pvalmethod="simulate",pvalsims=10000)
{
if(!is.numeric(x)){stop("x must be numeric.")}
pvalmethod <- pmatch(pvalmethod, c("simulate"))
if (is.na(pvalmethod)){stop("invalid 'pvalmethod' argument")}
if((length(pvalsims)!=1)){stop("'pvalsims' argument takes only single integer!")}
if((length(digits)!=1)){stop("'digits' argument takes only single integer!")}
first_digits<-signifd(x,digits)
n<-length(first_digits)
freq_of_digits<-table(c(first_digits,signifd.seq(digits)))-1
rel_freq_of_digits<-freq_of_digits/n
rel_freq_of_digits_H0<-pbenf(digits)
cum_sum_Ds<-cumsum(rel_freq_of_digits)-cumsum(rel_freq_of_digits_H0)
K_S_D<-max(max(cum_sum_Ds),abs(min(cum_sum_Ds)))*sqrt(n)
if(pvalmethod==1)
{
dist_K_S_D_H0<-simulateH0(teststatistic="ks",n=n,digits=digits,pvalsims=pvalsims)
pval<-1-sum(dist_K_S_D_H0<=K_S_D)/length(dist_K_S_D_H0)
}
RVAL <- list(statistic = c(D = K_S_D), p.value = pval, method = "K-S Test for Benford Distribution",
data.name = deparse(substitute(x)))
class(RVAL) <- "htest"
return(RVAL)
}
mdist.benftest<-function(x=NULL,digits=1,pvalmethod="simulate",pvalsims=10000)
{
if(!is.numeric(x)){stop("x must be numeric.")}
pvalmethod <- pmatch(pvalmethod, c("simulate"))
if (is.na(pvalmethod)){stop("invalid 'pvalmethod' argument")}
if((length(pvalsims)!=1)){stop("'pvalsims' argument takes only single integer!")}
if((length(digits)!=1)){stop("'digits' argument takes only single integer!")}
first_digits<-signifd(x,digits)
n<-length(first_digits)
freq_of_digits<-table(c(first_digits,signifd.seq(digits)))-1
rel_freq_of_digits<-freq_of_digits/n
rel_freq_of_digits_H0<-pbenf(digits)
m_star<-sqrt(n)*max(abs(rel_freq_of_digits-rel_freq_of_digits_H0))
if(pvalmethod==1)
{
dist_m_star_H0<-simulateH0(teststatistic="mdist",n=n,digits=digits,pvalsims=pvalsims)
pval<-1-sum(dist_m_star_H0<=m_star)/length(dist_m_star_H0)
}
RVAL <- list(statistic = c(m_star = m_star), p.value = pval, method = "Chebyshev Distance Test for Benford Distribution",
data.name = deparse(substitute(x)))
class(RVAL) <- "htest"
return(RVAL)
}
edist.benftest<-function(x=NULL,digits=1,pvalmethod="simulate",pvalsims=10000)
{
if(!is.numeric(x)){stop("x must be numeric.")}
pvalmethod <- pmatch(pvalmethod, c("simulate"))
if (is.na(pvalmethod)){stop("invalid 'pvalmethod' argument")}
if((length(pvalsims)!=1)){stop("'pvalsims' argument takes only single integer!")}
if((length(digits)!=1)){stop("'digits' argument takes only single integer!")}
first_digits<-signifd(x,digits)
n<-length(first_digits)
freq_of_digits<-table(c(first_digits,signifd.seq(digits)))-1
rel_freq_of_digits<-freq_of_digits/n
rel_freq_of_digits_H0<-pbenf(digits)
d_star<-sqrt(n)*sqrt(sum((rel_freq_of_digits-rel_freq_of_digits_H0)^2))
if(pvalmethod==1)
{
dist_d_star_H0<-simulateH0(teststatistic="edist",n=n,digits=digits,pvalsims=pvalsims)
pval<-1-sum(dist_d_star_H0<=d_star)/length(dist_d_star_H0)
}
RVAL <- list(statistic = c(d_star = d_star), p.value = pval, method = "Euclidean Distance Test for Benford Distribution",
data.name = deparse(substitute(x)))
class(RVAL) <- "htest"
return(RVAL)
}
usq.benftest<-function(x=NULL,digits=1,pvalmethod="simulate",pvalsims=10000)
{
if(!is.numeric(x)){stop("x must be numeric.")}
pvalmethod <- pmatch(pvalmethod, c("simulate"))
if (is.na(pvalmethod)){stop("invalid 'pvalmethod' argument")}
if((length(pvalsims)!=1)){stop("'pvalsims' argument takes only single integer!")}
if((length(digits)!=1)){stop("'digits' argument takes only single integer!")}
first_digits<-signifd(x,digits)
n<-length(first_digits)
freq_of_digits<-table(c(first_digits,signifd.seq(digits)))-1
rel_freq_of_digits<-freq_of_digits/n
rel_freq_of_digits_H0<-pbenf(digits)
cum_sum_Ds<-cumsum(rel_freq_of_digits-rel_freq_of_digits_H0)
U_square<-(n/length(rel_freq_of_digits))*(sum(cum_sum_Ds^2)-((sum(cum_sum_Ds)^2)/length(rel_freq_of_digits)))
if(pvalmethod==1)
{
dist_U_square_H0<-simulateH0(teststatistic="usq",n=n,digits=digits,pvalsims=pvalsims)
pval<-1-sum(dist_U_square_H0<=U_square)/length(dist_U_square_H0)
}
RVAL <- list(statistic = c(U_square = U_square), p.value = pval, method = "Freedman-Watson U-squared Test for Benford Distribution",
data.name = deparse(substitute(x)))
class(RVAL) <- "htest"
return(RVAL)
}
meandigit.benftest<-function(x=NULL,digits=1,pvalmethod="asymptotic",pvalsims=10000)
{
if(!is.numeric(x)){stop("x must be numeric.")}
pvalmethod <- pmatch(pvalmethod, c("asymptotic", "simulate"))
if (is.na(pvalmethod)){stop("invalid 'pvalmethod' argument")}
if((length(pvalsims)!=1)){stop("'pvalsims' argument takes only single integer!")}
if((length(digits)!=1)){stop("'digits' argument takes only single integer!")}
first_digits<-signifd(x,digits)
n<-length(first_digits)
mu_emp<-mean(first_digits)
mu_bed<-sum(signifd.seq(digits)*pbenf(digits))
var_bed<-sum(((signifd.seq(digits)-mu_bed)^2)*pbenf(digits))
a_star<-abs(mu_emp-mu_bed)/(max(signifd.seq(digits))-mu_bed)
if(pvalmethod==1)
{
pval<-(1-pnorm(a_star,mean=0,sd=sqrt(var_bed/n)/(9-mu_bed)))*2
}
if(pvalmethod==2)
{
dist_a_star_H0<-simulateH0(teststatistic="meandigit",n=n,digits=digits,pvalsims=pvalsims)
pval<-1-sum(dist_a_star_H0<=a_star)/length(dist_a_star_H0)
}
RVAL <- list(statistic = c(a_star = a_star), p.value = pval, method = "Judge-Schechter Normed Deviation Test for Benford Distribution",
data.name = deparse(substitute(x)))
class(RVAL) <- "htest"
return(RVAL)
}
jpsq.benftest<-function(x=NULL,digits=1,pvalmethod="simulate",pvalsims=10000)
{
if(!is.numeric(x)){stop("x must be numeric.")}
pvalmethod <- pmatch(pvalmethod, c("simulate"))
if (is.na(pvalmethod)){stop("invalid 'pvalmethod' argument")}
if((length(pvalsims)!=1)){stop("'pvalsims' argument takes only single integer!")}
if((length(digits)!=1)){stop("'digits' argument takes only single integer!")}
first_digits<-signifd(x,digits)
n<-length(first_digits)
freq_of_digits<-table(c(first_digits,signifd.seq(digits)))-1
rel_freq_of_digits<-freq_of_digits/n
rel_freq_of_digits_H0<-pbenf(digits)
J_stat_squ<-cor(rel_freq_of_digits,rel_freq_of_digits_H0)
J_stat_squ<-sign(J_stat_squ)*(J_stat_squ^2)
if(pvalmethod==1)
{
dist_J_stat_H0<- simulateH0(teststatistic="jpsq",n=n,digits=digits,pvalsims=pvalsims)
pval<-sum(dist_J_stat_H0<=J_stat_squ)/length(dist_J_stat_H0)
}
RVAL <- list(statistic = c(J_stat_squ = J_stat_squ), p.value = pval, method = "JP-Square Correlation Statistic Test for Benford Distribution",
data.name = deparse(substitute(x)))
class(RVAL) <- "htest"
return(RVAL)
}
jointdigit.benftest<-function(x = NULL, digits = 1, eigenvalues="all", tol = 1e-15, pvalmethod = "asymptotic", pvalsims = 10000)
{
if(!is.numeric(x)){stop("x must be numeric.")}
pvalmethod <- pmatch(pvalmethod, c("asymptotic"))
if (is.na(pvalmethod)){stop("invalid 'pvalmethod' argument")}
if((length(pvalsims)!=1)){stop("'pvalsims' argument takes only single integer!")}
if((length(digits)!=1)){stop("'digits' argument takes only single integer!")}
decompose=TRUE
first_digits<-signifd(x,digits)
n<-length(first_digits)
freq_of_digits<-table(c(first_digits,signifd.seq(digits)))-1
rel_freq_of_digits<-freq_of_digits/n
rel_freq_of_digits_H0<-pbenf(digits)
covariance_matirx<-outer(rel_freq_of_digits_H0,rel_freq_of_digits_H0,"*")*-1
diag(covariance_matirx)<-rel_freq_of_digits_H0*(1-rel_freq_of_digits_H0)
if(decompose)
{
eigenval_vect<-eigen(covariance_matirx,symmetric = TRUE)
eigenval_vect_result<-eigenval_vect
eigen_to_keep<-abs(eigenval_vect$values)>tol
eigenval_vect$values<-eigenval_vect$values[eigen_to_keep]
eigenval_vect$vectors<-eigenval_vect$vectors[,eigen_to_keep]
if(length(eigenvalues)>0)
{
if(is.character(eigenvalues))
{
if(length(eigenvalues)==1)
{
eigenvalues <- pmatch(tolower(eigenvalues), c("all","kaiser"))
if(eigenvalues == 1)
{
eigen_to_keep<-1:length(eigenval_vect$values)
}
if(eigenvalues == 2)
{
eigen_to_keep<-which(eigenval_vect$values>=mean(eigenval_vect$values))
}
}
else
{stop("Error: 'is.character(eigenvalues) && length(eigenvalues)!=1', use only one string!")}
}
else
{
if(is.numeric(eigenvalues)&all(eigenvalues>=0,na.rm = TRUE))
{
eigen_to_keep<-eigenvalues[!is.na(eigenvalues)]
eigen_to_keep<-eigen_to_keep[eigen_to_keep<=length(eigenval_vect$values)]
if(length(eigen_to_keep)<=0)
{stop("Error: No eigenvalues remain.")}
}
else
{stop("Error: non string value for eigenvalues must numeric vector of eigenvalue indexes! No negative indexing allowed.")}
}
}else{stop("Error: 'length(eigenvalues)<=0'!")}
eigenval_vect$values<-eigenval_vect$values[eigen_to_keep]
eigenval_vect$vectors<-eigenval_vect$vectors[,eigen_to_keep]
principle_components<-rel_freq_of_digits%*%eigenval_vect$vectors
true_components_means<-rel_freq_of_digits_H0%*%eigenval_vect$vectors
if(length(eigenval_vect$values)==1)
{
hotelling_T<-(n/eigenval_vect$values)*((principle_components-true_components_means)^2)
}else{
hotelling_T<-n*(principle_components-true_components_means)%*%solve(diag(eigenval_vect$values))%*%t(principle_components-true_components_means)
}
deg_free<-length(principle_components)
}else{
hotelling_T<-n*(rel_freq_of_digits-rel_freq_of_digits_H0)%*%solve(covariance_matirx)%*%t(rel_freq_of_digits-rel_freq_of_digits_H0)
deg_free<-length(rel_freq_of_digits)
}
if(pvalmethod==1)
{
pval<-1-pchisq(q = hotelling_T,df = deg_free)
}
if(pvalmethod==2)
{
}
RVAL <- list(statistic = c(Tsquare = hotelling_T), p.value = pval, method = "Joint Digits Test",
data.name = deparse(substitute(x)),eigenvalues_tested=eigen_to_keep,eigen_val_vect=eigenval_vect_result)
class(RVAL) <- "htest"
return(RVAL)
}
signifd<-function(x=NULL, digits=1)
{
if(!is.numeric(x)){stop("x needs to be numeric.")}
x<-abs(x)
return(trunc((10^((floor(log10(x))*-1)+digits-1))*x))
}
signifd.seq<-function(digits=1)
{return(seq(from=10^(digits-1),to=(10^(digits))-1))}
qbenf<-function(digits=1)
{
return(cumsum(pbenf(digits)))
}
pbenf<-function(digits=1)
{
pbenf_for_seq<-function(leaddigit=10)
{
return(log10(1+(1/leaddigit)))
}
benf_table<-table(signifd.seq(digits))-1
benf_table<-benf_table+sapply(signifd.seq(digits),FUN=pbenf_for_seq)
return(benf_table)
}
rbenf<-function(n)
{
return(10^(runif(n)))
}
simulateH0<-function(teststatistic="chisq",n=10,digits=1,pvalsims=10)
{
teststatistic<-match.arg(arg = teststatistic, choices = c("chisq","edist","jpsq","ks","mdist","meandigit","usq"), several.ok = FALSE)
if(teststatistic=="chisq")
{
H0_chi_square<-rep(0,pvalsims)
H0_chi_square<- .C("compute_H0_chi_square", H0_chi_square = as.double(H0_chi_square), digits = as.integer(digits),
pbenf = as.double(pbenf(digits)),qbenf=as.double(qbenf(digits)),n = as.integer(n),
n_sim=as.integer(pvalsims))$H0_chi_square
return(H0_chi_square)
}
if(teststatistic=="edist")
{
H0_dstar <- rep(0, pvalsims)
H0_dstar <- .C("compute_H0_dstar", H0_dstar = as.double(H0_dstar),
digits = as.integer(digits), pbenf = as.double(pbenf(digits)),
qbenf = as.double(qbenf(digits)), n = as.integer(n),
n_sim = as.integer(pvalsims))$H0_dstar
return(H0_dstar)
}
if(teststatistic=="jpsq")
{
H0_J_stat <- rep(0, pvalsims)
H0_J_stat <- .C("compute_H0_J_stat", H0_J_stat = as.double(H0_J_stat),
digits = as.integer(digits), pbenf = as.double(pbenf(digits)),
qbenf = as.double(qbenf(digits)), n = as.integer(n),
n_sim = as.integer(pvalsims))$H0_J_stat
return(H0_J_stat)
}
if(teststatistic=="ks")
{
H0_KSD <- rep(0, pvalsims)
H0_KSD <- .C("compute_H0_KSD", H0_KSD = as.double(H0_KSD),
digits = as.integer(digits), pbenf = as.double(pbenf(digits)),
qbenf = as.double(qbenf(digits)), n = as.integer(n),
n_sim = as.integer(pvalsims))$H0_KSD
return(H0_KSD)
}
if(teststatistic=="mdist")
{
H0_mstar <- rep(0, pvalsims)
H0_mstar <- .C("compute_H0_mstar", H0_mstar = as.double(H0_mstar),
digits = as.integer(digits), pbenf = as.double(pbenf(digits)),
qbenf = as.double(qbenf(digits)), n = as.integer(n),
n_sim = as.integer(pvalsims))$H0_mstar
return(H0_mstar)
}
if(teststatistic=="meandigit")
{
H0_astar <- rep(0, pvalsims)
H0_astar <- .C("compute_H0_astar", H0_astar = as.double(H0_astar),
digits = as.integer(digits), pbenf = as.double(pbenf(digits)),
qbenf = as.double(qbenf(digits)), n = as.integer(n),
n_sim = as.integer(pvalsims))$H0_astar
return(H0_astar)
}
if(teststatistic=="usq")
{
H0_U_square <- rep(0, pvalsims)
H0_U_square <- .C("compute_H0_U_square", H0_U_square = as.double(H0_U_square),
digits = as.integer(digits), pbenf = as.double(pbenf(digits)),
qbenf = as.double(qbenf(digits)), n = as.integer(n),
n_sim = as.integer(pvalsims))$H0_U_square
return(H0_U_square)
}
}
signifd.analysis<-function(x=NULL,digits=1,graphical_analysis=TRUE,freq=FALSE,alphas=20,tick_col="red",ci_col="darkgreen",ci_lines=c(.05))
{
if(length(alphas)==1)
{
if(alphas>1)
{
alphas=seq(from=0,to=.5,length.out=alphas+2)[-c(1,alphas+2)]
}
}
n<-length(x)
first_digits<-signifd(x, digits)
pdf_benf<-pbenf(digits)
freq_of_digits <- table(c(first_digits, signifd.seq(digits))) - 1
E_vals<-pdf_benf*n
Var_vals<-pdf_benf*n*(1-pdf_benf)
Cov_vals<-outer(pdf_benf,pdf_benf)*-1*n
diag(Cov_vals)<-Var_vals
pval<-rep(0,length(pdf_benf))
for(i in 1:length(pval))
{
pval[i]<-pnorm(q=freq_of_digits[i],mean=E_vals[i],sd=sqrt(Var_vals[i]))
if(pval[i]>.5)
{pval[i]<- (1- pval[i])*2}else{pval[i]<- pval[i]*2}
}
if(graphical_analysis)
{
mids<-seq(from=0,to=1,length.out=length(E_vals)+2)
ci_line_length<-(mids[2]-mids[1])*(2/5)
mids<-mids[-c(1,length(mids))]
numformat <- function(val,trailing=4) { sub("^(-?)0.", "\\1.", sprintf(paste(sep="","%.",trailing,"f"), val)) }
trailing<-0
ci_cols<-colorRampPalette(colors=c("white",ci_col),interpolate="linear")(length(alphas)+1)[-1]
ci_cols<-c(ci_cols,rev(ci_cols))
alphas<-c(alphas/2,0.5,rev(1-(alphas/2)))
cis<-sapply(alphas,FUN=qnorm,mean=E_vals,sd=sqrt(Var_vals))
CIs=t(cis)
colnames(CIs)<-signifd.seq(digits)
rownames(CIs)<-alphas
if(!freq)
{
cis<-cis/n
freq_of_digits<-freq_of_digits/n
cis[cis>1]<-1
trailing<-4
}
results<-list(summary=rbind(freq=freq_of_digits,pvals=pval),CIs=CIs)
cis[cis<0]<-0
lr_mid<-cbind(mids-ci_line_length,mids+ci_line_length)
plot(x=0,y=0,xlim=c(0,1),ylim=c(0,max(cis)*1.3),type="n",axes=FALSE,xlab="summary",ylab="")
for(i in 1:dim(cis)[1])
{
for(j in 1:(dim(cis)[2]-1))
{
polygon(x=lr_mid[i,c(1,1,2,2)],y=cis[i,c(j,j+1,j+1,j)],col=ci_cols[j],border=FALSE)
}
}
dim_cis<-dim(cis)
dim(cis)<-NULL
posy<-seq(from=0,to=max(cis)*1.3,length.out=10)
axis(side=2,at=round(posy-5*(10^-(digits+1)),digits),las=1)
if(any(ci_lines!=FALSE))
{
if((!is.logical(ci_lines))&(all(ci_lines<1)&all(ci_lines>0)))
{
ci_lines<-c(ci_lines/2,0.5,rev(1-(ci_lines/2)))
cis<-sapply(ci_lines,FUN=qnorm,mean=E_vals,sd=sqrt(Var_vals))
if(!freq)
{cis<-cis/n}
CIs=t(cis)
colnames(CIs)<-signifd.seq(digits)
rownames(CIs)<-ci_lines
results$CIs<-CIs
dim_cis<-dim(cis)
dim(cis)<-NULL
if(!freq)
{cis[cis>1]<-1}
cis[cis<0]<-0
j<-1
for(i in 1:length(cis))
{
lines(lr_mid[j,],rep(cis[i],2))
if(j==dim(lr_mid)[1])
{j<-1}
else
{j<-j+1}
}
}
else
{
j<-1
for(i in 1:length(cis))
{
lines(lr_mid[j,],rep(cis[i],2))
if(j==dim(lr_mid)[1])
{j<-1}
else
{j<-j+1}
}
}
}
points(mids,freq_of_digits,col=tick_col,pch=3)
if(digits==1)
{
mtext(c("digit: ",names(E_vals)),side=1,line=0,at=c(-1*ci_line_length,mids))
if(freq){mtext(c("freq: ",numformat(freq_of_digits,trailing)),side=1,line=1,at=c(-1*ci_line_length,mids))}
else{mtext(c("rel freq: ",numformat(freq_of_digits,trailing)),side=1,line=1,at=c(-1*ci_line_length,mids))}
mtext(c("pval: ",numformat(pval)),side=1,line=2,at=c(-1*ci_line_length,mids))
}
dim(cis)<-dim_cis
abline(h=0)
}
if(!graphical_analysis)
{
if(any(ci_lines!=FALSE)&(!is.logical(ci_lines))&(all(ci_lines<1)&all(ci_lines>0)))
{alphas<-c(ci_lines/2,0.5,rev(1-(ci_lines/2)))}
else
{alphas<-c(alphas/2,0.5,rev(1-(alphas/2)))}
cis<-sapply(alphas,FUN=qnorm,mean=E_vals,sd=sqrt(Var_vals))
if(!freq)
{
cis<-cis/n
freq_of_digits<-freq_of_digits/n
}
CIs=t(cis)
colnames(CIs)<-signifd.seq(digits)
rownames(CIs)<-alphas
results<-list(summary=rbind(freq=freq_of_digits,pvals=pval),CIs=CIs)
return(results)
}
return(results)
}
|
context("Labellers")
test_that("label_bquote has access to functions in the calling environment", {
labels <- data.frame(lab = letters[1:2])
attr(labels, "facet") <- "wrap"
labeller <- label_bquote(rows = .(paste0(lab, ":")))
labels_calc <- labeller(labels)
expect_equal(labels_calc[[1]][[1]], "a:")
})
|
testDat <- read.csv("testDat.csv", stringsAsFactors = FALSE)
testTP <- createTimePoints(dat = testDat, experimentName = "testExp",
genotype = "Genotype", timePoint = "timepoints",
plotId = "pos", repId = "Replicate", rowNum = "y",
colNum = "x", addCheck = TRUE,
checkGenotypes = "check1")
testFitMod <- fitModels(testTP, trait = "t1", quiet = TRUE)
testFitMod2 <- fitModels(testTP, trait = "t1", geno.decomp = "repId",
quiet = TRUE)
testFitMod3 <- fitModels(testTP, trait = "t1", useCheck = TRUE, quiet = TRUE)
if (at_home()) {
testFitModAs <- fitModels(testTP, trait = "t1", engine = "asreml",
quiet = TRUE)
testFitModAs2 <- fitModels(testTP, trait = "t1", engine = "asreml",
spatial = TRUE, quiet = TRUE)
}
tmpFile <- tempfile(fileext = ".pdf")
expect_error(plot(testFitMod, plotType = "test"), "should be one of")
expect_error(plot(testFitMod, title = 1), "title should be NULL or a character")
expect_error(plot(testFitMod, plotType = "rawPred", genotypes = 1),
"genotypes should be NULL or a character vector")
expect_error(plot(testFitMod, plotType = "rawPred", genotypes = "g1"),
"All genotypes should be in testFitMod")
expect_silent(p0 <- plot(testFitMod, plotType = "rawPred", outFile = tmpFile))
expect_inherits(p0, "list")
expect_equal(length(p0), 1)
expect_inherits(p0[[1]], "ggplot")
geoms0 <- sapply(p0[[1]]$layers, function(x) class(x$geom)[1])
expect_equal(geoms0, c("GeomPoint", "GeomPoint"))
expect_silent(p1 <- plot(testFitMod[1], plotType = "rawPred",
outFile = tmpFile))
geoms1 <- sapply(p1[[1]]$layers, function(x) class(x$geom)[1])
expect_equal(geoms1, c("GeomPoint", "GeomPoint"))
expect_silent(p2 <- plot(testFitMod, plotType = "rawPred", genotypes = "G12",
outFile = tmpFile))
nCol <- ggplot2::ggplot_build(p2[[1]])$layout$facet$params$ncol
nRow <- ggplot2::ggplot_build(p2[[1]])$layout$facet$params$nrow
expect_equal(nRow, 1)
expect_equal(nCol, 1)
expect_silent(p3 <- plot(testFitMod2, plotType = "rawPred", outFile = tmpFile))
expect_silent(p4 <- plot(testFitMod3, plotType = "rawPred", outFile = tmpFile))
expect_silent(p5 <- plot(testFitMod3, plotType = "rawPred", plotChecks = TRUE,
outFile = tmpFile))
expect_equal(nrow(p4[[1]]$data), 105)
expect_equal(nrow(p5[[1]]$data), 125)
if (at_home() && FALSE) {
expect_silent(plot(testFitModAs, plotType = "rawPred"))
}
expect_error(plot(testFitMod, plotType = "corrPred", genotypes = 1),
"genotypes should be NULL or a character vector")
expect_error(plot(testFitMod, plotType = "corrPred", genotypes = "g1"),
"All genotypes should be in testFitMod")
expect_silent(p0 <- plot(testFitMod, plotType = "corrPred", outFile = tmpFile))
expect_inherits(p0, "list")
expect_equal(length(p0), 1)
expect_inherits(p0[[1]], "ggplot")
geoms0 <- sapply(p0[[1]]$layers, function(x) class(x$geom)[1])
expect_equal(geoms0, c("GeomPoint", "GeomPoint"))
expect_silent(p1 <- plot(testFitMod[1], plotType = "corrPred",
outFile = tmpFile))
geoms1 <- sapply(p1[[1]]$layers, function(x) class(x$geom)[1])
expect_equal(geoms1, c("GeomPoint", "GeomPoint"))
expect_silent(p2 <- plot(testFitMod, plotType = "corrPred", genotypes = "G12",
outFile = tmpFile))
nCol <- ggplot2::ggplot_build(p2[[1]])$layout$facet$params$ncol
nRow <- ggplot2::ggplot_build(p2[[1]])$layout$facet$params$nrow
expect_equal(nRow, 1)
expect_equal(nCol, 1)
expect_silent(p3 <- plot(testFitMod2, plotType = "corrPred", outFile = tmpFile))
expect_silent(p4 <- plot(testFitMod3, plotType = "corrPred", outFile = tmpFile))
expect_silent(p5 <- plot(testFitMod3, plotType = "corrPred", plotChecks = TRUE,
outFile = tmpFile))
expect_equal(nrow(p4[[1]]$data), 105)
expect_equal(nrow(p5[[1]]$data), 125)
if (at_home() && FALSE) {
expect_silent(plot(testFitModAs, plotType = "corrPred"))
}
expect_silent(p0 <- plot(testFitMod, plotType = "herit", outFile = tmpFile))
expect_inherits(p0, "ggplot")
geoms0 <- sapply(p0$layers, function(x) class(x$geom)[1])
expect_equal(geoms0, c("GeomPoint", "GeomLine"))
expect_silent(p1 <- plot(testFitMod[1], plotType = "herit", outFile = tmpFile))
geoms1 <- sapply(p1$layers, function(x) class(x$geom)[1])
expect_equal(geoms1, c("GeomPoint"))
expect_silent(p2 <- plot(testFitMod, plotType = "herit", yLim = c(0, 1),
outFile = tmpFile))
expect_equal(as.list(p2$scales$get_scales("y"))$limits, c(0, 1))
expect_silent(p3 <- plot(testFitMod2, plotType = "herit", outFile = tmpFile))
expect_silent(p0 <- plot(testFitMod, plotType = "effDim", outFile = tmpFile))
expect_inherits(p0, "ggplot")
geoms0 <- sapply(p0$layers, function(x) class(x$geom)[1])
expect_equal(geoms0, c("GeomPoint", "GeomLine"))
expect_silent(p1 <- plot(testFitMod[1], plotType = "effDim", outFile = tmpFile))
geoms1 <- sapply(p1$layers, function(x) class(x$geom)[1])
expect_equal(geoms1, c("GeomPoint"))
expect_silent(p2 <- plot(testFitMod, plotType = "effDim", yLim = c(0, 100),
outFile = tmpFile))
expect_equal(as.list(p2$scales$get_scales("y"))$limits, c(0, 100))
expect_error(plot(testFitMod, plotType = "effDim", EDType = "ED"),
"should be one of")
expect_silent(p3 <- plot(testFitMod, plotType = "effDim", EDType = "ratio",
outFile = tmpFile))
expect_equal(as.list(p3$scales$get_scales("y"))$limits, c(0, 0.777730459700606))
expect_silent(p4 <- plot(testFitMod, plotType = "effDim", whichED = "colId",
outFile = tmpFile))
expect_silent(p5 <- plot(testFitMod2, plotType = "effDim", outFile = tmpFile))
if (at_home() && FALSE) {
expect_error(plot(testFitModAs, plotType = "effDim"),
"only be plotted for models fitted with SpATS")
}
expect_silent(p0 <- plot(testFitMod, plotType = "variance", outFile = tmpFile))
expect_inherits(p0, "ggplot")
geoms0 <- sapply(p0$layers, function(x) class(x$geom)[1])
expect_equal(geoms0, c("GeomPoint", "GeomLine"))
expect_silent(p1 <- plot(testFitMod[1], plotType = "variance",
outFile = tmpFile))
geoms1 <- sapply(p1$layers, function(x) class(x$geom)[1])
expect_equal(geoms1, c("GeomPoint"))
expect_silent(p2 <- plot(testFitMod, plotType = "variance", yLim = c(0, 1e-3),
outFile = tmpFile))
expect_equal(as.list(p2$scales$get_scales("y"))$limits, c(0, 1e-3))
expect_silent(p3 <- plot(testFitMod2, plotType = "variance", outFile = tmpFile))
expect_silent(p0 <- plot(testFitMod, plotType = "spatial", outFile = tmpFile))
expect_inherits(p0, "list")
expect_equal(length(p0), 5)
expect_inherits(p0[[1]], "list")
expect_equal(length(p0[[1]]), 6)
expect_inherits(p0[[1]][[1]], "ggplot")
expect_error(plot(testFitMod, plotType = "spatial", spaTrend = "sTr"),
"should be one of")
expect_silent(p1 <- plot(testFitMod, plotType = "spatial",
spaTrend = "percentage", outFile = tmpFile))
expect_silent(plot(testFitMod3, plotType = "spatial", outFile = tmpFile))
expect_silent(p2 <- plot(testFitMod2, plotType = "spatial", outFile = tmpFile))
if (at_home()) {
expect_error(plot(testFitModAs, plotType = "spatial"),
"when setting spatial = TRUE when fitting the asreml models")
p3 <- plot(testFitModAs2, plotType = "spatial")
expect_equal(length(p3), 5)
}
tmpFile2 <- tempfile(fileext = ".gif")
expect_silent(p0 <- plot(testFitMod, plotType = "timeLapse",
outFile = tmpFile2))
expect_silent(p1 <- plot(testFitMod2, plotType = "timeLapse",
outFile = tmpFile2))
unlink(tmpFile)
unlink(tmpFile2)
|
rmsse <- function(forecast, outsampletrue, insampletrue)
{
if(length(forecast) != length(outsampletrue))
stop("RMSSE: the lengths of input vectors must be the same.")
n = length(insampletrue)
insamplerr = vector(, (n - 1))
for(i in 1:(n - 1))
{
insamplerr[i] = abs(insampletrue[i+1] - insampletrue[i])
}
qt = (outsampletrue - forecast)/(sum(insamplerr)/(n - 1))
scalederror = sqrt(mean(qt^2))
return(round(scalederror, 6))
}
|
NULL
summary.lmerModLmerTest <- function(object, ...,
ddf=c("Satterthwaite", "Kenward-Roger", "lme4")) {
ddf <- match.arg(ddf)
if(!inherits(object, "lmerModLmerTest") && !inherits(object, "lmerMod")) {
stop("Cannot compute summary for objects of class: ",
paste(class(object), collapse = ", "))
}
if(!inherits(object, "lmerModLmerTest") && inherits(object, "lmerMod")) {
message("Coercing object to class 'lmerModLmerTest'")
object <- as_lmerModLmerTest(object)
if(!inherits(object, "lmerModLmerTest")) {
warning("Failed to coerce object to class 'lmerModLmerTest'")
return(summary(object))
}
}
summ <- summary(as(object, "lmerMod"), ...)
if(ddf == "lme4") return(summ)
summ$coefficients <- get_coefmat(object, ddf=ddf)
ddf_nm <- switch(ddf, "Satterthwaite" = "Satterthwaite's",
"Kenward-Roger" = "Kenward-Roger's")
summ$objClass <- class(object)
summ$methTitle <- paste0(summ$methTitle, ". t-tests use ", ddf_nm, " method")
class(summ) <- c("summary.lmerModLmerTest", class(summ))
summ
}
get_coefmat <- function(model, ddf=c("Satterthwaite", "Kenward-Roger")) {
ddf <- match.arg(ddf)
p <- length(fixef(model))
if(p < 1)
return(as.matrix(contest1D(model, numeric(0L), ddf=ddf)))
Lmat <- diag(p)
tab <- rbindall(lapply(1:p, function(i) contest1D(model, Lmat[i, ], ddf=ddf)))
rownames(tab) <- names(fixef(model))
as.matrix(tab)
}
|
Gibbs_LA_IYE <- function(y, mu, ome, la, psx, gammal_sq, thd, const, prior, alas) {
Q <- const$Q
J <- const$J
N <- const$N
K <- const$K
Jp <- const$Jp
Nmis <- const$Nmis
a_gamma <- prior$a_gaml_sq
b_gamma <- prior$b_gaml_sq
Pmean <- prior$m_LA
Sigla <- prior$s_LA
sub_sl <- const$sub_sl
len_sl <- const$len_sl
sub_ul <- const$sub_ul
len_ul <- const$len_ul
taul_sq <- gammal_sq
a_gams <- prior$a_gams
b_gams <- prior$b_gams
temp <- y - mu - la %*% ome
S <- temp %*% t(temp)
for (j in 1:J) {
subs <- sub_sl[j, ]
len <- len_sl[j]
if (len > 0)
{
yj <- y[j, ] - matrix(la[j, (!subs)], nrow = 1) %*% matrix(ome[(!subs), ], ncol = N)
yj <- as.vector(yj)
if (len == 1) {
omesub <- matrix(ome[subs, ], nrow = 1)
} else {
omesub <- ome[subs, ]
}
PSiginv <- diag(len) * Sigla
vtmp <- chol2inv(chol(tcrossprod(omesub)/psx[j, j] + PSiginv))
mtmp <- (omesub %*% yj/psx[j, j] + PSiginv %*% rep(Pmean,len))
la[j, subs] <- mvrnorm(1, vtmp %*% mtmp, Sigma = vtmp)
}
subs <- sub_ul[j, ]
len <- len_ul[j]
if (len > 0) {
yj <- y[j, ] - matrix(la[j, (!subs)], nrow = 1) %*% matrix(ome[(!subs), ], ncol = N)
yj <- as.vector(yj)
Cadj <- pmax((la[j, subs])^2, 10^(-6))
mu_p <- pmin(sqrt(gammal_sq[j, subs]/Cadj), 10^12)
taul_sq[j, subs] <- 1/rinvgauss1(len, mean = mu_p, dispersion = 1/gammal_sq[j, subs])
if(alas) gammal_sq[j, subs] <- rgamma(len, shape = a_gamma + 1, rate = b_gamma + taul_sq[j, subs]/2)
if (len == 1) {
omesub <- matrix(ome[subs, ], nrow = 1)
invD_tau <- 1/taul_sq[j, subs]
} else {
omesub <- ome[subs, ]
invD_tau <- diag(1/taul_sq[j, subs])
}
vtmp <- chol2inv(chol(tcrossprod(omesub)/psx[j, j] + invD_tau))
mtmp <- (omesub %*% yj/psx[j, j])
la[j, subs] <- mvrnorm(1, vtmp %*% mtmp, Sigma = vtmp)
tmp <- t(la[j, subs]) %*% invD_tau %*% la[j, subs]
psx[j, j] <- 1/rgamma(1, shape = a_gams + (N + len)/2 - 1, rate = b_gams + (S[j, j] +
tmp)/2)
} else {
psx[j, j] <- 1/rgamma(1, shape = a_gams + (N - 1)/2, rate = b_gams + (S[j, j])/2)
}
}
if(!alas)
gammal_sq[Q==-1]<- rgamma(1, shape=a_gamma+sum(Q==-1), rate=b_gamma + sum(taul_sq)/2)
if (Nmis > 0 || Jp > 0) {
ysta <- la %*% ome
spsxa <- sqrt(diag(psx))
ysa <- matrix(rnorm(N * J), J, N) + ysta/spsxa
ysa <- ysa/apply(ysa, 1, sd)
if (Jp > 0) {
pind <- const$cati
zind <- const$zind
ys <- ysa[pind, ]
acc <- ((ys > 0) ==(zind>1))
ys <- ys * acc + (1 - acc) * y[pind, ]
accr <- c( mean(acc, na.rm = T))
out <- list(la = la, gammal_sq = gammal_sq, ys = ys, thd = thd, accr = accr, psx = psx,
ysm = ysa)
} else {
out <- list(la = la, gammal_sq = gammal_sq, psx = psx, ysm = ysa)
}
} else {
out <- list(la = la, gammal_sq = gammal_sq, psx = psx)
}
return(out)
}
|
bhl_getpartmetadata <- function(partid, key = NULL, ...) {
args <- bhlc(list(op = "GetPartMetadata", apikey = check_key(key),
format = as_f("list"), id = partid))
bhl_GET("list", args, ...)
}
|
getOBO <- function(x) {
data <- readLines(x)
n <- vapply(data, nchar, numeric(1L))
data <- data[n != 0]
kv0 <- strsplit(data, ": ", fixed = TRUE)
kv <- kv0[lengths(kv0) == 2]
k <- vapply(kv, "[", character(1L), i = 1)
v <- vapply(kv, "[", character(1L), i = 2)
d <- which(k == "id")
tk <- vector("list", length(d))
keys <- k[d[1]:length(k)]
df <- data.frame(matrix(ncol = length(unique(keys)), nrow = 0),
stringsAsFactors = FALSE
)
colnames(df) <- unique(keys)
for (i in seq_along(d)) {
if (i == length(d)) {
l <- seq(from = d[i], to = length(kv), by = 1)
} else {
l <- seq(from = d[i], to = d[i + 1] - 1, by = 1)
}
ch <- v[l]
names(ch) <- k[l]
keys <- unique(k[l])
m <- max(table(k[l]))
lr <- lapply(keys, function(a, y) {
rep_len(y[names(y) == a], m)
}, y = ch)
names(lr) <- keys
not_pres <- setdiff(colnames(df), keys)
sub_df <- as.data.frame(lr, stringsAsFactors = FALSE)
sub_df[, not_pres] <- NA
df <- rbind(df, sub_df)
}
if ("is_obsolete" %in% colnames(df)) {
df <- df[is.na(df[, "is_obsolete"]), ]
}
strs <- strsplit(df$is_a, " ! ")
df$sets <- vapply(strs, "[", character(1L), i = 1)
df$set_name <- vapply(strs, "[", character(1L), i = 2)
strs <- strsplit(df$xref, ":")
df$ref_origin <- vapply(strs, "[", character(1L), i = 1)
df$ref_code <- vapply(strs, "[", character(1L), i = 2)
df$fuzzy <- 1
colnames(df)[colnames(df) == "id"] <- "elements"
df <- df[!is.na(df$sets), ]
keep_columns <- setdiff(colnames(df), c("xref", "is_obsolete", "is_a"))
df <- df[, keep_columns]
tidySet.data.frame(df)
}
getGAF <- function(x) {
df <- read.delim(x,
header = FALSE, comment.char = "!",
stringsAsFactors = FALSE
)
gaf_columns <- c(
"DB", "DB_Object_ID", "DB_Object_Symbol", "Qualifier",
"O_ID", "DB_Reference", "Evidence_Code", "With_From",
"Aspect", "DB_Object_Name", "DB_Object_Synonym",
"DB_Object_Type", "Taxon", "Date", "Assigned_By",
"Annotation_Extension", "Gene_Product_Form_ID"
)
colnames(df) <- gaf_columns
optional_columns <- c(4, 8, 10, 11, 16, 17)
remove <- apply(df[, optional_columns], 2, function(x) {
all(is.na(x))
})
df <- df[, -optional_columns[remove]]
GO <- grepl("^GO:", df$O_ID)
df$Aspect[GO] <- gsub("P", "BP", df$Aspect[GO])
df$Aspect[GO] <- gsub("C", "CC", df$Aspect[GO])
df$Aspect[GO] <- gsub("F", "MF", df$Aspect[GO])
elements <- c(1, 2, 3, 10, 11, 12, 13, 17)
sets <- c(5, 6, 9, 16)
colnames(df) <- gsub("O_ID", "sets", colnames(df))
colnames(df) <- gsub("DB_Object_Symbol", "elements", colnames(df))
TS <- tidySet(df)
columns_gaf <- function(names, originals) {
names[names %in% originals]
}
sets_columns <- columns_gaf(gaf_columns[sets], colnames(df))
nColm <- vapply(sets_columns, function(x) {
nrow(unique(df[, c("sets", x)]))
}, numeric(1))
sets_columns <- sets_columns[nColm <= length(unique(df$sets))]
elements_columns <- columns_gaf(gaf_columns[elements], colnames(df))
nColm <- vapply(sets_columns, function(x) {
nrow(unique(df[, c("elements", x)]))
}, numeric(1))
elements_columns <- elements_columns[nColm <= length(unique(df$elements))]
TS <- move_to(TS, "relations", "sets", sets_columns)
TS <- move_to(TS, "relations", "elements", elements_columns)
TS
}
|
test_that("return argument pulls returns right piece of code", {
x <- expression(y <- eventReactive(input$button, {print(input$n)}))
code_as_call <- as.call(x)[[1]]
all_args <- full_argument_names(code_as_call[[3]])
expect_equal(
object = all_args,
expected = c("", "eventExpr", "valueExpr")
)
get_event <- return_inner_expression(code_as_call[[3]], "eventExpr")
get_value <- return_inner_expression(code_as_call[[3]], "valueExpr")
expect_equal(
object = as.call(get_event),
expected = as.call(quote(input$button))
)
expect_equal(
object = as.character(get_value)[[2]],
expected = "print(input$n)"
)
})
test_that("full_argument_names works", {
all_args <- c("", "pattern", "replacement", "x")
expect_equal(
object = full_argument_names(parse(text = "gsub(' ', '_', 'a b c')")[[1]]),
expected = all_args
)
expect_equal(
object = full_argument_names(expression(gsub(x = "a b c", " ", "_"))[[1]]),
expected = all_args[c(1,4,2,3)]
)
expect_equal(
object = full_argument_names(expression(gsub(x = "a b c", pat = " ", rep = "_"))[[1]]),
expected = all_args[c(1,4,2,3)]
)
})
|
library(spacetime)
data(air)
rural_PM10 = as(rural[1:5,], "data.frame")
rural_PM10$PM10[is.na(rural_PM10$PM10)] = 0
library(googleVis)
TimeLine <- gvisAnnotatedTimeLine(
rural_PM10,
datevar="time",
numvar="PM10",
idvar="sp.ID",
options=list(displayAnnotations=FALSE, width=900, height=600)
)
rural_PM10$Annotation = rural_PM10$Title = as.character(NA)
row = which(rural_PM10$sp.ID == "DEBE056" &
rural_PM10$time == as.Date("2003-12-31"))
row
rural_PM10[row, "Title"] = "DEBE056"
rural_PM10[row, "Annotation"] = "Period with missing values drawn as line"
summary(rural_PM10)
AnnoTimeLine <- gvisAnnotatedTimeLine(
rural_PM10,
datevar="time",
numvar="PM10",
idvar="sp.ID",
titlevar="Title", annotationvar="Annotation",
options=list(displayAnnotations=TRUE,
zoomStartTime = as.Date("2003-07-01"),
zoomEndTime = as.Date("2004-07-01"),
width=1200, height=600)
)
plot(AnnoTimeLine)
publish = FALSE
if (publish) {
fname = paste(tempdir(), "/", AnnoTimeLine$chartid, ".html", sep="")
target = "[email protected]:WWW/googleVis"
scpcmd = paste("scp", fname, target)
system(scpcmd)
}
r = rural_PM10[1:100,]
r$PM10[20:80] = NA
TimeLine <- gvisAnnotatedTimeLine(
r,
datevar="time",
numvar="PM10",
idvar="sp.ID",
options=list(displayAnnotations=FALSE, width=900, height=600)
)
plot(TimeLine)
LineChart = gvisLineChart(
r, "time", "PM10"
)
plot(LineChart)
r2 = as.data.frame(as(rural[6:10,"2008"], "xts"))
r2$time = as.Date(rownames(r2))
LineChart = gvisLineChart(
r2, "time", c("DEBE032", "DEHE046", "DEUB007", "DENW081", "DESH008"),
options = list(width = 1200, focusTarget = "category",
title = "PM10, 2008, for 5 German rural background stations",
vAxis.logScale = TRUE)
)
plot(LineChart)
stopifnot(!is.projected(rural@sp))
sp = rural@sp
coord = coordinates(sp)
df = data.frame(cc = paste(coord[,2], coord[,1], sep=":"),
name = rownames(coord),
stringsAsFactors = FALSE)
M2 <- gvisMap(df, "cc", "name",
options=list(showTip = TRUE, mapType = 'normal',
enableScrollWheel = TRUE, width = 1200, height = 700,
useMapTypeControl = TRUE))
plot(M2)
|
fbGetAds <- function(accounts_id = getOption("rfacebookstat.accounts_id"),
api_version = getOption("rfacebookstat.api_version"),
username = getOption("rfacebookstat.username"),
token_path = fbTokenPath(),
access_token = getOption("rfacebookstat.access_token")) {
if ( is.null(access_token) ) {
if ( Sys.getenv("RFB_API_TOKEN") != "" ) {
access_token <- Sys.getenv("RFB_API_TOKEN")
} else {
access_token <- fbAuth(username = username,
token_path = token_path)$access_token
}
}
if ( class(access_token) == "fb_access_token" ) {
access_token <- access_token$access_token
}
if ( is.null(accounts_id) ) {
message("...Loading your account list.")
accounts_id <- suppressMessages(fbGetAdAccounts()$id)
message("...Loading ads from ", length(accounts_id), " account", ifelse( length(accounts_id) > 1, "s", "" ))
}
rq_ids <- list()
out_headers <- list()
result <- list()
accounts_id <- ifelse(grepl("^act_", accounts_id), accounts_id, paste0("act_",accounts_id))
if ( length(accounts_id) > 1 ) {
pgbar <- TRUE
pb_step <- 1
pb <- txtProgressBar(pb_step,
length(accounts_id),
style = 3,
title = "Loading:",
label = "load" )
} else {
pgbar <- FALSE
}
for ( account_id in accounts_id ) {
url <- str_interp("https://graph.facebook.com/${api_version}/${account_id}/ads")
api_answer <- GET(url,
query = list(fields = "id,name,object_url,adlabels,adset_id,bid_amount,bid_type,campaign_id,account_id,configured_status,effective_status,creative",
limit = 1000,
filtering = "[{'field':'ad.delivery_info','operator':'NOT_IN','value':['stupid_filter']}]",
access_token = access_token))
rq_ids <- append(rq_ids, setNames(list(status_code(api_answer)), api_answer$headers$`x-fb-trace-id`))
out_headers <- append(out_headers, setNames(list(headers(api_answer)), api_answer$headers$`x-fb-trace-id`))
pars_answer <- content(api_answer, as = "parsed")
if(!is.null(pars_answer$error)) {
error <- pars_answer$error
stop(pars_answer$error)
}
if (length(pars_answer$data) == 0) {
if( pgbar ) {
pb_step <- pb_step + 1
setTxtProgressBar(pb, pb_step)
}
next
}
result <- append(result,
lapply( pars_answer$data, fbParserAds ))
while (!is.null(pars_answer$paging$`next`)) {
api_answer <- GET(pars_answer$paging$`next`)
pars_answer <- content(api_answer, as = "parsed")
result <- append(result,
lapply( pars_answer$data, fbParserAds ))
}
if (pgbar) Sys.sleep(0.2)
if( pgbar ) {
pb_step <- pb_step + 1
setTxtProgressBar(pb, pb_step)
}
}
result <- map_df(result, flatten)
attr(result, "request_ids") <- rq_ids
attr(result, "headers") <- out_headers
if( pgbar ) close(pb)
return(result)
}
|
expit1 <- function(lp,ref=1){
k = length(lp)+1
G = matrix(diag(k)[,-ref],k,k-1)
p = exp(G%*%lp); p = p/sum(p)
p = as.vector(p)
Der = (diag(p)-p%o%p)%*%G
out = list(p=p,Der=Der)
}
|
dCorrs <- function(rho1, n1, rho2, n2, corrType = "pearson"){
if(!(all.equal(length(rho1), length(n1), length(rho2), length(n2)))) stop("All of the input vectors must be the same length.")
zr1 = atanh(rho1)
zr2 = atanh(rho2)
if(corrType == "pearson"){
diff12 = (zr2 - zr1)/sqrt((1/(n1 - 3)) + (1/(n2 - 3)))
}
if(corrType == "spearman"){
diff12 = (zr2 - zr1)/sqrt((1.06/(n1 - 3)) + (1.06/(n2 - 3)))
}
return(diff12)
}
|
[
{
"title": "Not Just Normal… Gaussian",
"href": "http://www.cerebralmastication.com/2009/06/not-just-normal-gaussian/"
},
{
"title": "I like you and you like me…but what does it all mean. (Part 1)",
"href": "https://web.archive.org/web/https://mathewanalytics.com/2014/08/20/i-like-you-and-you-like-me-but-what-does-it-all-mean/"
},
{
"title": "Open Source/Science – drilling down to unknown depths in unexpected places",
"href": "https://web.archive.org/web/http://pineda-krch.com/2007/08/12/open-sourcescience-drilling-down-to-unknown-depths-in-unexpected-places/"
},
{
"title": "Learning to code in R",
"href": "http://www.quantumforest.com/2013/04/learning-to-code-in-r/"
},
{
"title": "sab-R-metrics: Intermediate Boxplots and Histograms",
"href": "http://princeofslides.blogspot.com/2011/01/sab-r-metrics-intermediate-boxplots-and.html"
},
{
"title": "New surveys show continued popularity of R",
"href": "http://blog.revolutionanalytics.com/2015/11/new-surveys-show-continued-popularity-of-r.html"
},
{
"title": "resizing plot panels to fit data distribution",
"href": "https://metvurst.wordpress.com/2013/03/04/resizing-plot-panels-to-fit-data-distribution/"
},
{
"title": "Who is the most complete athlete? – An insight with the Mahalanobis distance (sport & data analysis)",
"href": "http://rsnippets.blogspot.com/2012/09/plotting-watts-strogatz-model.html"
},
{
"title": "Interactive maps of Crime data in Greater London",
"href": "http://r-video-tutorial.blogspot.com/2015/05/interactive-maps-of-crime-data-in.html"
},
{
"title": "Local Council Spending Data – Time Series Charts",
"href": "https://blog.ouseful.info/2013/11/06/local-council-spending-data-time-series-charts/"
},
{
"title": "Missouri: Comparison of Registered Voter Counts to Census Voting Age Population",
"href": "https://web.archive.org/web/http://watchdoglabs.org/blog/2012/08/13/missouri-comparison-of-registered-voter-counts-to-census-voting-age-population/"
},
{
"title": "A must-read paper on statistical analysis of experimental data",
"href": "http://andrewgelman.com/2013/02/13/17262/"
},
{
"title": "analyze the national household travel survey (nhts) with r and monetdb",
"href": "http://www.asdfree.com/2014/03/analyze-national-household-travel.html"
},
{
"title": "Enhancements to the AzureML package to connect R to AzureML Studio",
"href": "http://blog.revolutionanalytics.com/2015/11/azureml-update.html"
},
{
"title": "Chances of making an NFL field goal",
"href": "http://blog.revolutionanalytics.com/2013/01/chances-of-making-an-nfl-field-goal.html"
},
{
"title": "Implementing the stochastic simulation algorithm in R",
"href": "https://web.archive.org/web/http://pineda-krch.com/2007/09/09/implementing-the-stochastic-simulation-algorithm-in-r/"
},
{
"title": "More on causes of death in Netherlands over the years",
"href": "http://wiekvoet.blogspot.com/2015/07/more-on-causes-of-death-in-netherlands.html"
},
{
"title": "A Few Tips for Writing an R Book",
"href": "http://yihui.name/en/2013/06/tips-for-writing-an-r-book/"
},
{
"title": "The virtues of incoherence?",
"href": "http://andrewgelman.com/2011/07/08/the_virtues_of/"
},
{
"title": "What’s Happening, Man?",
"href": "https://web.archive.org/web/http://www.znmeb.mobi/2009/10/14/whats-happening-man/"
},
{
"title": "How-to go parallel in R – basics + tips",
"href": "http://gforge.se/2015/02/how-to-go-parallel-in-r-basics-tips/"
},
{
"title": "rCharts version of d3 horizon",
"href": "http://timelyportfolio.blogspot.com/2013/07/rcharts-version-of-d3-horizon.html"
},
{
"title": "Measuring associations between non-numeric variables",
"href": "http://exploringdatablog.blogspot.com/2012/02/measuring-associations-between-non.html"
},
{
"title": "\"R\": PLS Regression (Gasoline) – 003",
"href": "http://nir-quimiometria.blogspot.com/2012/02/pls-regression-in-r-gasoline.html"
},
{
"title": "ThinkStats … in R :: Example/Chapter 2 :: Example 2.1-2.3",
"href": "http://rud.is/b/2012/03/14/thinkstats-in-r-examplechapter-2-example-2-1-2-3/"
},
{
"title": "using the httr package to retrieve data from apis in R",
"href": "http://www.numbrcrunch.com/blog/using-the-httr-package-to-retrieve-data-from-apis-in-r"
},
{
"title": "Mickey Mouse Models",
"href": "https://web.archive.org/web/http://mickeymousemodels.blogspot.com/2011/04/mickey-mouse-models.html"
},
{
"title": "FALSE: Clinton Funded by \"Grassroots\"",
"href": "http://www.econometricsbysimulation.com/2016/03/false-clintons-funded-by-grassroots.html"
},
{
"title": "A twitter feed for new R packages",
"href": "http://blog.revolutionanalytics.com/2011/01/a-twitter-feed-for-new-r-packages.html"
},
{
"title": "Chicago Half Marathon 2010",
"href": "http://dirk.eddelbuettel.com/blog/2010/09/12/"
},
{
"title": "RStudio presents Essential Tools for Data Science with R",
"href": "https://blog.rstudio.org/2014/07/16/rstudio-presents-essential-tools-for-data-science-with-r/"
},
{
"title": "structure and uncertainty, Bristol, Sept. 26",
"href": "https://xianblog.wordpress.com/2012/09/27/structure-and-uncertainty-bristol-sept-26/"
},
{
"title": "NLP on NPR’s Commencement Addresses",
"href": "http://data-steve.github.io/nlp-on-commencement-addresses/"
},
{
"title": "Ramarro: “R for Developers” free (web) book",
"href": "http://www.milanor.net/blog/ramarro-r-for-developers-free-web-book/"
},
{
"title": "d3-ify Systematic Investor Cluster Weight",
"href": "http://timelyportfolio.blogspot.com/2013/09/d3-ify-systematic-investor-cluster.html"
},
{
"title": "Quality comparison of floating-point maths libraries",
"href": "http://shape-of-code.coding-guidelines.com/2011/04/11/quality-comparison-of-floating-point-maths-libraries/"
},
{
"title": "Some sort of update to ggplot2",
"href": "http://andrewgelman.com/2009/11/22/some_sort_of_up/"
},
{
"title": "Hash Table Performance in R: Part I",
"href": "http://jeffreyhorner.tumblr.com/post/114524915928/hash-table-performance-in-r-part-i"
},
{
"title": "Revolution R: 100% R and More – slides and replay",
"href": "http://blog.revolutionanalytics.com/2011/08/revolution-r-100-r-and-more-slides-and-replay.html"
},
{
"title": "Of Needles and Haystacks: Building an Accurate Statewide Dropout Early Warning System in Wisconsin",
"href": "http://jaredknowles.com/journal/2014/8/24/of-needles-and-haystacks-building-an-accurate-statewide-dropout-early-warning-system-in-wisconsin"
},
{
"title": "Adding Annotation to R Objects",
"href": "https://matloff.wordpress.com/2014/04/01/adding-annotation-to-r-objects/"
},
{
"title": "Statistics doesn’t have to be so hard: simulate!",
"href": "http://blog.revolutionanalytics.com/2014/10/statistics-doesnt-have-to-be-that-hard.html"
},
{
"title": "reports 0.1.2 Released",
"href": "https://trinkerrstuff.wordpress.com/2013/03/12/reports-0-1-2-released/"
},
{
"title": "Hunspell: Spell Checker and Text Parser for R",
"href": "https://www.opencpu.org/posts/hunspell-release/"
},
{
"title": "Call for participation: DMApps 2013 – an International Workshop on Data Mining Applications in Industry and Government",
"href": "https://rdatamining.wordpress.com/2013/03/10/call-for-participation-dmapps-2013-an-international-workshop-on-data-mining-applications-in-industry-and-government/"
},
{
"title": "Announcing Packrat v0.4",
"href": "https://blog.rstudio.org/2014/07/22/announcing-packrat-v0-4/"
},
{
"title": "IIATMS Guest Contribution",
"href": "http://princeofslides.blogspot.com/2010/09/iiatms-guest-contribution.html"
},
{
"title": "Google Summer of Code 2009",
"href": "http://dirk.eddelbuettel.com/blog/2009/01/07/"
},
{
"title": "Forecasts and ggplot",
"href": "http://robjhyndman.com/hyndsight/forecasts-and-ggplot/"
},
{
"title": "Candlestick charts using Quandl and Plotly",
"href": "http://moderndata.plot.ly/candlestick-charts-using-quandl-and-plotly/"
}
]
|
fit_AR1_t <- function(y, random_walk = FALSE, zero_mean = FALSE, fast_and_heuristic = TRUE, remove_outliers = FALSE, outlier_prob_th = 1e-3,
verbose = TRUE,
return_iterates = FALSE, return_condMean_Gaussian = FALSE,
tol = 1e-8, maxiter = 100, n_chain = 10, n_thin = 1, K = 30) {
if (!is.matrix(try(as.matrix(y), silent = TRUE))) stop("\"y\" must be coercible to a vector or matrix.")
if (tol <= 0) stop("\"tol\" must be greater than 0.")
if (maxiter < 1) stop("\"maxiter\" must be greater than 1.")
if (round(n_chain)!=n_chain | n_chain<=0) stop("\"n_chain\" must be a positive integer.")
if (round(n_thin)!=n_thin | n_thin<=0) stop("\"n_thin\" must be a positive integer.")
if (round(K)!=K | K<=0) stop("\"K\" must be a positive integer.")
if (NCOL(y) > 1) {
estimation_list <- apply(y, MARGIN = 2, FUN = fit_AR1_t, random_walk, zero_mean, fast_and_heuristic, remove_outliers, outlier_prob_th, verbose = FALSE,
return_iterates, return_condMean_Gaussian, tol, maxiter, n_chain, n_thin, K)
phi0_vct <- unlist(lapply(estimation_list, function(x) x$phi0))
phi1_vct <- unlist(lapply(estimation_list, function(x) x$phi1))
sigma2_vct <- unlist(lapply(estimation_list, function(x) x$sigma2))
nu_vct <- unlist(lapply(estimation_list, function(x) x$nu))
if (verbose)
for (i in 1:length(estimation_list))
message(names(estimation_list)[i], ": ",
length(estimation_list[[i]]$index_miss), " inner missing values and ",
length(estimation_list[[i]]$index_outliers), " outliers detected.")
return(c(estimation_list, list("phi0_vct" = phi0_vct,
"phi1_vct" = phi1_vct,
"sigma2_vct" = sigma2_vct,
"nu_vct" = nu_vct)))
}
if (!is.numeric(y)) stop("\"y\" only allows numerical or NA values.")
if (sum(!is.na(y)) < 5L) stop("Each time series in \"y\" must have at least 5 observations.")
y_name <- colnames(y)
y <- as.numeric(y)
if(remove_outliers) {
fitted_with_outliers <- if (!any_inner_NA(y)) fit_AR1_t_complete(y[!is.na(y)], random_walk, zero_mean, return_iterates, tol, maxiter)
else fit_AR1_t(y, random_walk, zero_mean, fast_and_heuristic, remove_outliers = FALSE, outlier_prob_th, verbose = FALSE,
return_iterates, return_condMean_Gaussian, tol, maxiter, n_chain, n_thin, K)
idx_outliers <- find_outliers_AR1_t(y, fitted_with_outliers, outlier_prob_th)
if (!is.null(idx_outliers))
y[idx_outliers] <- NA
}
index_obs <- which(!is.na(y))
y <- y[min(index_obs):max(index_obs)]
idx_offset <- min(index_obs) - 1L
index_obs <- which(!is.na(y))
if (!anyNA(y))
results <- fit_AR1_t_complete(y, random_walk, zero_mean, return_iterates, tol, maxiter)
else {
n <- index_obs <- index_miss <- n_obs <- y_obs <- delta_index_obs <- n_block <- n_in_block <-
first_index_in_block <- last_index_in_block <- previous_obs_before_block <- next_obs_after_block <- NULL
list2env(findMissingBlock(y), envir = environment())
if (fast_and_heuristic)
results <- fit_AR1_t_heuristic(y, index_miss, random_walk, zero_mean, return_iterates, return_condMean_Gaussian, tol, maxiter)
else {
phi0 <- phi1 <- sigma2 <- nu <- gamma <- c()
estimation_Gaussian <- fit_AR1_Gaussian(y, random_walk, zero_mean, verbose = FALSE, return_condMeanCov = TRUE)
phi0[1] <- estimation_Gaussian$phi0
phi1[1] <- estimation_Gaussian$phi1
sigma2[1] <- estimation_Gaussian$sigma2
nu[1] <- 3
y_samples <- matrix(estimation_Gaussian$cond_mean_y, n, n_chain)
tau_samples <- matrix(NA, n, n_chain)
s <- s_approx <- rep(0, 7)
for (k in 1:maxiter) {
for (j in 1:n_chain) {
sample <- sampling_latent_variables(y_sample_init = y_samples[, j], n_thin, n_block, n_in_block,
first_index_in_block, last_index_in_block, previous_obs_before_block, next_obs_after_block,
phi0[k], phi1[k], sigma2[k], nu[k])
y_samples[, j] <- sample$y
tau_samples[, j] <- sample$tau
}
if (k <= K)
gamma[k] <- 1
else
gamma[k] <- 1/(k - K)
s[1] <- sum(log(tau_samples[2:n,]) - tau_samples[2:n,]) / n_chain
s[2] <- sum(tau_samples[2:n,] * y_samples[2:n,]^2) / n_chain
s[3] <- sum(tau_samples[2:n,] ) / n_chain
s[4] <- sum(tau_samples[2:n,] * y_samples[1:(n-1),]^2) / n_chain
s[5] <- sum(tau_samples[2:n,] * y_samples[2:n,]) / n_chain
s[6] <- sum(tau_samples[2:n,] * y_samples[2:n,] * y_samples[1:(n - 1),]) / n_chain
s[7] <- sum(tau_samples[2:n,] * y_samples[1:(n-1),]) / n_chain
s_approx <- s_approx + gamma[k] * (s - s_approx)
if (!random_walk && !zero_mean) {
phi1[k+1] <- ( s_approx[3] * s_approx[6] - s_approx[5] * s_approx[7] ) / ( s_approx[3] * s_approx[4] - s_approx[7]^2 )
phi0[k+1] <- (s_approx[5] - phi1[k+1] * s_approx[7] ) / s_approx[3]
} else if (random_walk && !zero_mean){
phi1[k+1] <- 1
phi0[k+1] <- (s_approx[5] - s_approx[7] ) / s_approx[3]
} else if (!random_walk && zero_mean){
phi1[k+1] <- s_approx[6] / s_approx[4]
phi0[k+1] <- 0
} else{
phi1[k+1] <- 1
phi0[k+1] <- 0
}
sigma2[k+1] <- (s_approx[2] + phi0[k+1]^2 * s_approx[3] + phi1[k+1]^2 * s_approx[4] - 2 * phi0[k+1] * s_approx[5]
- 2 * phi1[k+1] * s_approx[6] + 2 * phi0[k+1] * phi1[k+1] * s_approx[7]) / (n - 1)
f_nu <- function(nu, n, s_approx1)
return(-sum(0.5 * nu * s_approx1 + (0.5 * nu * log(0.5 * nu) - lgamma(0.5 * nu)) * (n - 1)))
optimation_result <- optimize(f_nu, c(1e-6, 1e6), n, s_approx[1])
nu[k + 1] <- optimation_result$minimum
}
results <- list("phi0" = phi0[k + 1],
"phi1" = phi1[k + 1],
"sigma2" = sigma2[k + 1],
"nu" = nu[k + 1])
if (return_iterates)
results <- c(results, list("phi0_iterates" = phi0,
"phi1_iterates" = phi1,
"sigma2_iterates" = sigma2,
"nu_iterates" = nu))
if(return_condMean_Gaussian)
results <- c(results, list("cond_mean_y_Gaussian" = estimation_Gaussian$cond_mean_y))
}
}
results <- c(results, list("index_miss" = if (sum(is.na(y)) == 0) NULL
else which(is.na(y)) + idx_offset))
if(!remove_outliers) idx_outliers <- NULL
results <- c(results, list("index_outliers" = if(is.null(idx_outliers)) NULL
else idx_outliers + idx_offset))
if (verbose)
message(y_name, ": ",
length(results$index_miss), " inner missing values and ",
length(results$index_outliers), " outliers detected.")
return(results)
}
impute_AR1_t <- function(y, n_samples = 1,
random_walk = FALSE, zero_mean = FALSE,
fast_and_heuristic = TRUE, remove_outliers = FALSE, outlier_prob_th = 1e-3,
verbose = TRUE, return_estimates = FALSE,
tol = 1e-8, maxiter = 100, K = 30,
n_burn = 100, n_thin = 50) {
if (!is.matrix(try(as.matrix(y), silent = TRUE))) stop("\"y\" must be coercible to a vector or matrix.")
if (round(n_samples)!=n_samples | n_samples<=0) stop("\"n_samples\" must be a positive integer.")
if (round(n_burn)!=n_burn | n_burn<=0) stop("\"n_burn\" must be a positive integer.")
if (round(n_thin)!=n_thin | n_thin<=0) stop("\"n_thin\" must be a positive integer.")
if (NCOL(y) > 1) {
results_list <- lapply(c(1:NCOL(y)), FUN = function(i) {
impute_AR1_t(y[, i, drop = FALSE], n_samples, random_walk, zero_mean, fast_and_heuristic, remove_outliers, outlier_prob_th, verbose = FALSE,
return_estimates, tol, maxiter, K, n_burn, n_thin)
})
names(results_list) <- colnames(y)
if (n_samples == 1 && !return_estimates) {
results <- do.call(cbind, results_list)
attr(results, "index_miss") <- lapply(results_list, FUN = function(res) attr(res, "index_miss"))
attr(results, "index_outliers") <- lapply(results_list, FUN = function(res) attr(res, "index_outliers"))
} else if (n_samples == 1 && return_estimates) {
results <- do.call(mapply, c("FUN" = cbind, results_list, "SIMPLIFY" = FALSE))
attr(results$y_imputed, "index_miss") <- lapply(results_list, FUN = function(res) attr(res$y_imputed, "index_miss"))
attr(results$y_imputed, "index_outliers") <- lapply(results_list, FUN = function(res) attr(res$y_imputed, "index_outliers"))
} else {
results <- do.call(mapply, c("FUN" = cbind, results_list, "SIMPLIFY" = FALSE))
index_miss_list <- lapply(results_list, FUN = function(res) attr(res$y_imputed.1, "index_miss"))
index_outliers_list <- lapply(results_list, FUN = function(res) attr(res$y_imputed.1, "index_outliers"))
for (i in 1:n_samples) {
attr(results[[i]], "index_miss") <- index_miss_list
attr(results[[i]], "index_outliers") <- index_outliers_list
}
if (return_estimates) {
results$phi0 <- as.vector(results$phi0)
results$phi1 <- as.vector(results$phi1)
results$sigma2 <- as.vector(results$sigma2)
results$nu <- as.vector(results$nu)
}
}
if (verbose)
for (i in 1:length(results_list))
message(names(results_list)[i], ": ",
length(attr(results_list[[i]], "index_miss")), " inner missing values imputed and ",
length(attr(results_list[[i]], "index_outliers")), " outliers detected and corrected.")
return(results)
}
if (!is.numeric(y)) stop("\"y\" only allows numerical or NA values.")
if (sum(!is.na(y)) < 5) stop("Each time series in \"y\" must have at least 5 observations.")
y_attrib <- attributes(y)
y_name <- colnames(y)
y <- as.numeric(y)
y_imputed <- matrix(rep(y, times = n_samples), ncol = n_samples)
if (remove_outliers) {
fitted <- fit_AR1_t(y, random_walk, zero_mean, fast_and_heuristic, remove_outliers = TRUE, outlier_prob_th = outlier_prob_th, verbose = FALSE,
tol = tol, maxiter = maxiter, K = K)
if (!is.null(index_outliers <- fitted$index_outliers))
y[index_outliers] <- NA
}
if (!any_inner_NA(y)) {
if (return_estimates && !remove_outliers)
fitted <- fit_AR1_t(y, random_walk, zero_mean, fast_and_heuristic, remove_outliers = FALSE, verbose = FALSE, tol = tol, maxiter = maxiter, K = K)
} else {
fitted <- fit_AR1_t(y, random_walk, zero_mean, fast_and_heuristic, remove_outliers = FALSE, verbose = FALSE,
return_condMean_Gaussian = TRUE, tol = tol, maxiter = maxiter, K = K)
index_obs <- which(!is.na(y))
index_obs_min <- min(index_obs)
index_miss_middle <- which(is_inner_NA(y))
if (length(index_miss_middle) > 0) {
y_middle <- y[min(index_obs):max(index_obs)]
index_miss_deleted <- index_miss_middle - (index_obs_min - 1)
n <- index_obs <- index_miss <- n_obs <- y_obs <- delta_index_obs <- n_block <- n_in_block <-
first_index_in_block <- last_index_in_block <- previous_obs_before_block <- next_obs_after_block <- NULL
list2env(findMissingBlock(y_middle), envir = environment())
y_middle_tmp <- fitted$cond_mean_y_Gaussian
for (i in 1:n_burn) {
sample <- sampling_latent_variables(y_middle_tmp, n_thin = 1, n_block, n_in_block,
first_index_in_block, last_index_in_block, previous_obs_before_block, next_obs_after_block,
fitted$phi0, fitted$phi1, fitted$sigma2, fitted$nu)
y_middle_tmp <- sample$y
}
for (j in 1:n_samples) {
sample <- sampling_latent_variables(y_middle_tmp, n_thin, n_block, n_in_block,
first_index_in_block, last_index_in_block, previous_obs_before_block, next_obs_after_block,
fitted$phi0, fitted$phi1, fitted$sigma2, fitted$nu)
y_imputed[index_miss_middle, j] <- sample$y[index_miss_deleted]
}
}
}
index_miss <- which(is_inner_NA(y))
if (length(index_miss) == 0) index_miss <- NULL
if(!remove_outliers) index_outliers <- NULL
if (n_samples == 1) {
attributes(y_imputed) <- y_attrib
attr(y_imputed, "index_miss") <- index_miss
attr(y_imputed, "index_outliers") <- index_outliers
results <- if (!return_estimates) y_imputed else list("y_imputed" = y_imputed)
} else {
y_imputed <-lapply(split(y_imputed, col(y_imputed)), FUN = function(x) { attributes(x) <- y_attrib
attr(x, "index_miss") <- index_miss
attr(x, "index_outliers") <- index_outliers
return(x) })
results <- c("y_imputed" = y_imputed)
}
if (return_estimates) results <- c(results, list("phi0" = fitted$phi0,
"phi1" = fitted$phi1,
"sigma2" = fitted$sigma2,
"nu" = fitted$nu))
if (verbose)
message(y_name, ": ",
length(index_miss), " missing values imputed and ",
length(index_outliers), " outliers detected and corrected.")
return(results)
}
sampling_latent_variables <- function(y_sample_init, n_thin, n_block, n_in_block,
first_index_in_block, last_index_in_block, previous_obs_before_block, next_obs_after_block,
phi0, phi1, sigma2, nu) {
n <- length(y_sample_init)
tau_tmp <- vector(length = n)
y_tmp <- y_sample_init
max_n_in_block <- max( n_in_block )
phi1_exponential <- phi1^( 0:(max_n_in_block + 1) )
sum_phi1_exponential <- cumsum(phi1_exponential)
for(j in 1:n_thin){
for (i_tau in 2:n) {
tau_tmp[i_tau] <- rgamma(n = 1, shape = 0.5 * nu + 0.5,
rate = 0.5 * ( (y_tmp[i_tau] - phi0 - phi1 * y_tmp[i_tau - 1])^2 / sigma2 + nu) )
}
for (d in 1:n_block ) {
n_in_d_block <- n_in_block[d]
mu_cd <- ( sum_phi1_exponential[1:(n_in_d_block + 1)] * phi0
+ phi1_exponential[2:(n_in_d_block + 2)] * previous_obs_before_block[d] )
mu1 <- mu_cd[1:n_in_d_block]
mu2 <- mu_cd[n_in_d_block + 1]
sigma_cd <- matrix( nrow = n_in_d_block + 1, ncol = n_in_d_block + 1)
for(i in 1 : (n_in_d_block + 1) ){
if(i == 1){ sigma_cd[1, 1] <- sigma2/tau_tmp[ first_index_in_block[d] ]
} else {
sigma_cd[i, i] <- sigma_cd[ i - 1, i - 1 ] * phi1^2 + sigma2/tau_tmp[ first_index_in_block[d] + i - 1]
}
if( i != n_in_d_block + 1){
sigma_cd[ i, (i + 1) : (n_in_d_block + 1)] <- sigma_cd[ i, i ] * phi1_exponential[ 2:(n_in_d_block + 1 - i + 1) ]
sigma_cd[ (i + 1) : (n_in_d_block + 1), i ] <- sigma_cd[ i, i ] * phi1_exponential[ 2:(n_in_d_block + 1 - i + 1) ]
}
}
sigma11 <- sigma_cd[ 1 : n_in_d_block, 1 : n_in_d_block]
sigma12 <- sigma_cd[ 1 : n_in_d_block, n_in_d_block + 1]
sigma22 <- sigma_cd[ n_in_d_block + 1, n_in_d_block + 1]
mu_d <- mu1 + sigma12 / sigma22 * ( next_obs_after_block[d] - mu2 )
sigma_d <- sigma11 - sigma12 %*% t( sigma12 )/sigma22
y_tmp[ first_index_in_block[d] : last_index_in_block[d]] <- MASS::mvrnorm( n = 1, mu = mu_d, Sigma = sigma_d )
}
}
return(list("y" = y_tmp,
"tau" = tau_tmp))
}
fit_AR1_t_complete <- function(y, random_walk = FALSE, zero_mean = FALSE,
return_iterates = FALSE,
tol = 1e-10, maxiter = 1000) {
if (anyNA(y)) stop("Function fit_AR1_t_complete() cannot accept NAs.")
phi0 <- phi1 <- sigma2 <- nu <- c()
estimation_Gaussian <- fit_AR1_Gaussian_complete(y, random_walk, zero_mean)
phi0[1] <- estimation_Gaussian$phi0
phi1[1] <- estimation_Gaussian$phi1
sigma2[1] <- estimation_Gaussian$sigma2
nu[1] <- 3
n <- length(y)
tmp <- (y[-1] - phi0[1] - phi1[1] * y[-n])^2/sigma2[1]
exp_tau <- vector( length = n )
if (return_iterates) {
f = vector()
f[1] = sum( log( gamma( 0.5 * (nu[1] + 1) )/gamma( 0.5 * nu[1] )/sqrt( pi * nu[1] * sigma2[1] ) )
+ - 0.5 * (nu[1] + 1) * log( (y[-1] - phi0[1] - phi1[1] * y[-n])^2/sigma2[1]/nu[1] + 1 ) )
}
for ( k in 1:maxiter) {
exp_tau = (nu[k] + 1)/( nu[k] + tmp )
s_tau = sum( exp_tau )
s_tau_y2 = sum( exp_tau * y[-1] )
s_tau_y1 = sum( exp_tau * y[-n] )
s_tau_y1y2 = sum( exp_tau * y[-n] * y[-1] )
s_tau_y1y1 = sum( exp_tau * y[-n] * y[-n] )
if (!random_walk && !zero_mean) {
phi1[k+1] <- (s_tau * s_tau_y1y2 - s_tau_y2 * s_tau_y1 )/(s_tau * s_tau_y1y1 - s_tau_y1^2)
phi0[k+1] <- (s_tau_y2 - phi1[k+1] * s_tau_y1)/s_tau
} else if (random_walk && !zero_mean){
phi1[k+1] <- 1
phi0[k+1] <- (s_tau_y2 - s_tau_y1)/s_tau
} else if (!random_walk && zero_mean){
phi1[k+1] <- s_tau_y1y2 / s_tau_y1y1
phi0[k+1] <- 0
} else{
phi1[k+1] <- 1
phi0[k+1] <- 0
}
sigma2[k+1] = sum( exp_tau * (y[-1] - phi0[k+1] - phi1[k+1] * y[-n])^2 )/(n - 1)
tmp = (y[-1] - phi0[k+1] - phi1[k+1] * y[-n])^2/sigma2[k+1]
f_nu = function( nu){
f_nu = - sum ( - 0.5 * (nu + 1) * log( nu + tmp)
+ lgamma ( 0.5*(nu + 1) ) - lgamma (0.5*nu) + 0.5 * nu * log(nu ) )
return( f_nu )
}
opt_rst = optimise ( f_nu, c(1e-6, 1e6) )
nu[k+1] = opt_rst$minimum
if (return_iterates) f[k+1] = sum( log( gamma( 0.5 * (nu[k+1] + 1) )/gamma( 0.5 * nu[k+1] )/sqrt( pi * nu[k+1] * sigma2[k+1] ) )
- 0.5 * (nu[k+1] + 1) * log( tmp/nu[k+1] + 1 ) )
if (abs(phi0[k + 1] - phi0[k]) <= tol * (abs(phi0[k + 1]) + abs(phi0[k]))/2
&& abs(phi1[k + 1] - phi1[k]) <= tol * (abs(phi1[k + 1]) + abs(phi1[k]))/2
&& abs(sigma2[k + 1] - sigma2[k]) <= tol * (abs(sigma2[k + 1]) + abs(sigma2[k]))/2
&& KLgamma(nu[k]/2, nu[k]/2, nu[k+1]/2, nu[k+1]/2) <= tol)
break
}
results <- list("phi0" = phi0[k+1],
"phi1" = phi1[k+1],
"sigma2" = sigma2[k+1],
"nu" = nu[k+1])
if (return_iterates)
results <- c(results, list("phi0_iterate" = phi0,
"phi1_iterate" = phi1,
"sigma2_iterate" = sigma2,
"nu_iterate" = nu,
"f_iterate" = f))
return(results)
}
fit_AR1_t_heuristic <- function(y, index_miss, random_walk = FALSE, zero_mean = TRUE,
return_iterates = FALSE, return_condMean_Gaussian = FALSE,
tol = 1e-10, maxiter = 1000) {
phi0 <- phi1 <- sigma2 <- nu <- gamma <- c()
estimation_Gaussian <- fit_AR1_Gaussian(y, random_walk, zero_mean, verbose = FALSE, return_condMeanCov = return_condMean_Gaussian)
phi0[1] <- estimation_Gaussian$phi0
phi1[1] <- estimation_Gaussian$phi1
sigma2[1] <- estimation_Gaussian$sigma2
nu[1] <- 3
index_miss_p <- c(0, index_miss, length(y) + 1)
delta_index_miss_p <- diff(index_miss_p)
index_delta_index_miss_p <- which(delta_index_miss_p > 2)
n_obs_block <- length(index_delta_index_miss_p)
n_in_obs_block <- delta_index_miss_p[index_delta_index_miss_p] - 1
m <- 0
y_obs2 <- y_obs1 <- c()
for (i in 1:n_obs_block) {
y_obs1[(m + 1):(m + n_in_obs_block[i] - 1)] <- y[(index_miss_p[index_delta_index_miss_p[i]] + 1):(index_miss_p[index_delta_index_miss_p[i] + 1] - 2)]
y_obs2[(m + 1):(m + n_in_obs_block[i] - 1)] <- y[(index_miss_p[index_delta_index_miss_p[i]] + 2):(index_miss_p[index_delta_index_miss_p[i] + 1] - 1)]
m <- m + n_in_obs_block[i] - 1
}
n_y_obs1 <- length(y_obs1)
tmp <- (y_obs2 - phi0[1] - phi1[1] * y_obs1)^2/sigma2[1]
exp_tau <- vector( length = n_y_obs1 )
if (return_iterates) {
f = vector()
f[1] = sum( log( gamma( 0.5 * (nu[1] + 1) )/gamma( 0.5 * nu[1] )/sqrt( pi * nu[1] * sigma2[1] ) )
+ - 0.5 * (nu[1] + 1) * log( (y_obs2 - phi0[1] - phi1[1] * y_obs1)^2/sigma2[1]/nu[1] + 1 ) )
}
for ( k in 1:maxiter) {
exp_tau = (nu[k] + 1)/( nu[k] + tmp )
s_tau = sum( exp_tau )
s_tau_y2 = sum( exp_tau * y_obs2 )
s_tau_y1 = sum( exp_tau * y_obs1 )
s_tau_y1y2 = sum( exp_tau * y_obs1 * y_obs2 )
s_tau_y1y1 = sum( exp_tau * y_obs1 * y_obs1 )
if (!random_walk && !zero_mean) {
phi1[k+1] <- (s_tau * s_tau_y1y2 - s_tau_y2 * s_tau_y1 )/(s_tau * s_tau_y1y1 - s_tau_y1^2)
phi0[k+1] <- (s_tau_y2 - phi1[k+1] * s_tau_y1)/s_tau
} else if (random_walk && !zero_mean){
phi1[k+1] <- 1
phi0[k+1] <- (s_tau_y2 - s_tau_y1)/s_tau
} else if (!random_walk && zero_mean){
phi1[k+1] <- s_tau_y1y2 / s_tau_y1y1
phi0[k+1] <- 0
} else{
phi1[k+1] <- 1
phi0[k+1] <- 0
}
sigma2[k+1] = sum( exp_tau * (y_obs2 - phi0[k+1] - phi1[k+1] * y_obs1)^2 )/n_y_obs1
tmp = (y_obs2 - phi0[k+1] - phi1[k+1] * y_obs1)^2/sigma2[k+1]
f_nu = function( nu){
f_nu = - sum ( - 0.5 * (nu + 1) * log( nu + tmp)
+ lgamma ( 0.5*(nu + 1) ) - lgamma (0.5*nu) + 0.5 * nu * log(nu ) )
return( f_nu )
}
opt_rst = optimise ( f_nu, c(1e-6, 1e6) )
nu[k+1] = opt_rst$minimum
if (return_iterates) f[k+1] = sum( log( gamma( 0.5 * (nu[k+1] + 1) )/gamma( 0.5 * nu[k+1] )/sqrt( pi * nu[k+1] * sigma2[k+1] ) )
- 0.5 * (nu[k+1] + 1) * log( tmp/nu[k+1] + 1 ) )
if (abs(phi0[k + 1] - phi0[k]) <= tol * (abs(phi0[k + 1]) + abs(phi0[k]))/2
&& abs(phi1[k + 1] - phi1[k]) <= tol * (abs(phi1[k + 1]) + abs(phi1[k]))/2
&& abs(sigma2[k + 1] - sigma2[k]) <= tol * (abs(sigma2[k + 1]) + abs(sigma2[k]))/2
&& KLgamma(nu[k]/2, nu[k]/2, nu[k+1]/2, nu[k+1]/2) <= tol)
break
}
results <- list("phi0" = phi0[k+1],
"phi1" = phi1[k+1],
"sigma2" = sigma2[k+1],
"nu" = nu[k+1])
if (return_iterates)
results <- c(results, list("phi0_iterate" = phi0,
"phi1_iterate" = phi1,
"sigma2_iterate" = sigma2,
"nu_iterate" = nu,
"f_iterate" = f))
if(return_condMean_Gaussian)
results <- c(results, list("cond_mean_y_Gaussian" = estimation_Gaussian$cond_mean_y))
return(results)
}
KLgamma <- function(shape1, rate1, shape2, rate2) {
h <- function(shape1, rate1, shape2, rate2)
- shape2/ rate2 / shape1 - 1/rate1 * log(shape1) - lgamma(1/rate1) + (1/rate1-1)*(psigamma(1/rate2) + log(shape2))
return(h(shape2,1/rate2,shape2,1/rate2) - h(shape1,1/rate1,shape2,1/rate2))
}
|
theme_min <- function (size = 11, font = "sans", face = 'plain',
backgroundColor = 'white', panelColor = 'white',
axisColor = 'black', gridColor = 'grey70', textColor = 'black'){
theme(
panel.border = element_rect(colour = gridColor, linetype = "solid", fill = NA),
axis.text.x = element_text(vjust = 1, hjust = 0.5,
colour = axisColor, family = font, face = face, size = 9),
axis.text.y = element_text(hjust = 1, vjust = 0.5,
colour = axisColor, family = font, face = face, size = 9),
axis.title.x = element_text(family = font, face = face, colour = axisColor, size = size),
axis.title.y = element_text(angle = 90, family = font, face = face, colour = axisColor, size = size),
axis.line = element_blank(),
axis.ticks = element_blank(),
legend.background = element_rect(fill = NA, colour = gridColor),
legend.key = element_blank(),
legend.key.size = unit(1.5, 'lines'),
legend.text = element_text(hjust = 0, family = font, face = face, colour = textColor, size = size),
legend.title = element_text(hjust = 0, family = font, face = face, colour = textColor, size = size),
panel.background = element_rect(fill = panelColor, colour = NA),
plot.background = element_rect(fill = backgroundColor, colour = NA),
panel.grid.major = element_line(colour = gridColor, size = 0.33, linetype = "dotted"),
panel.grid.minor = element_blank(),
strip.background = element_rect(fill = NA, colour = NA),
strip.text.x = element_text(hjust = 0, family = font, face = face, colour = textColor, size = size),
strip.text.y = element_text(angle = -90, family = font, face = face, colour = textColor, size = size),
plot.title = element_text(hjust = 0, vjust = 1, family = font, face = face, colour = textColor, size = 15),
plot.margin = unit(c(0.3, 0.1, 0.1, 0.1), 'lines'))
}
|
predict_surrogate <- function(explainer, new_observation, ..., type = "localModel") {
switch (type,
"localModel" = predict_surrogate_local_model(explainer, new_observation, ...),
"lime" = predict_surrogate_lime(explainer, new_observation, ...),
"iml" = predict_surrogate_iml(explainer, new_observation, ...),
stop("The type argument shall be either 'localModel' or 'iml' or 'lime'")
)
}
predict_surrogate_local_model <- function(explainer,
new_observation,
size = 1000,
seed = 1313, ...) {
localModel::individual_surrogate_model(explainer,
new_observation,
size = size,
seed = seed)
}
predict_model.dalex_explainer <- function(x, newdata, ...) {
class(x) = "explainer"
pred <- predict(x, newdata)
return(as.data.frame(pred))
}
model_type.dalex_explainer <- function(x, ...) {
return("regression")
}
predict_surrogate_lime <- function(explainer, new_observation, n_features = 4, n_permutations = 1000, labels = unique(explainer$y)[1], ...) {
class(explainer) <- "dalex_explainer"
lime_model <- lime::lime(x = explainer$data[,colnames(new_observation)],
model = explainer)
lime_expl <- lime::explain(x = new_observation,
explainer = lime_model,
n_features = n_features,
n_permutations = n_permutations,
...)
class(lime_expl) <- c("predict_surrogate_lime", class(lime_expl))
lime_expl
}
plot.predict_surrogate_lime <- function(x, ...) {
class(x) <- class(x)[-1]
lime::plot_features(x, ...)
}
predict_surrogate_iml <- function(explainer, new_observation, k = 4, ...) {
iml_model <- iml::Predictor$new(model = explainer$model, data = explainer$data[,colnames(new_observation)])
iml::LocalModel$new(predictor = iml_model, x.interest = new_observation, k = k)
}
|
getLHS <- function(n, dimension, Q = 1e4, radius = qnorm(1e-5, lower.tail = FALSE)){
lhs <- foreach(icount(Q)) %do% {
lhsDesign(n, dimension)$design
}
crit <- sapply(lhs, function(l) min(stats::dist(l)))
ind <- which.max(crit)
lhs <- t(lhs[[ind]])
rownames(lhs) <- rep(c("x", "y"), l = dimension)
qnorm(lhs)
}
lhsDesign <- function(n, dimension, randomized=TRUE, seed=NULL){
if (randomized) ran = matrix(runif(n*dimension),nrow=n,ncol=dimension)
else ran = matrix(0.5,nrow=n,ncol=dimension)
x = matrix(0,nrow=n,ncol=dimension)
for (i in 1:dimension) {
idx = sample(1:n)
P = (idx-ran[,i]) / n
x[,i] <- P }
return(list(n=n,dimension=dimension,design=x,randomized=randomized,seed=seed))
}
|
geom_grob <- function(mapping = NULL,
data = NULL,
stat = "identity",
position = "identity",
...,
nudge_x = 0,
nudge_y = 0,
add.segments = TRUE,
arrow = NULL,
na.rm = FALSE,
show.legend = FALSE,
inherit.aes = FALSE) {
if (!missing(nudge_x) || !missing(nudge_y)) {
if (!missing(position)) {
rlang::abort("You must specify either `position` or `nudge_x`/`nudge_y`.")
}
position <-
position_nudge_center(nudge_x, nudge_y,
kept.origin = ifelse(add.segments,
"original", "none"))
}
ggplot2::layer(
data = data,
mapping = mapping,
stat = stat,
geom = GeomGrob,
position = position,
show.legend = show.legend,
inherit.aes = inherit.aes,
params = list(
add.segments = add.segments,
arrow = arrow,
na.rm = na.rm,
...
)
)
}
grob_draw_panel_fun <-
function(data,
panel_params,
coord,
na.rm = FALSE,
add.segments = TRUE,
arrow = NULL) {
if (nrow(data) == 0) {
return(grid::nullGrob())
}
if (!grid::is.grob(data$label[[1]])) {
warning("Skipping as object mapped to 'label' is not a list of \"grob\" objects.")
return(grid::nullGrob())
}
add.segments <- add.segments && all(c("x_orig", "y_orig") %in% colnames(data))
data <- coord$transform(data, panel_params)
if (add.segments) {
data_orig <- data.frame(x = data$x_orig, y = data$y_orig)
data_orig <- coord$transform(data_orig, panel_params)
data$x_orig <- data_orig$x
data$y_orig <- data_orig$y
}
if (is.character(data$vjust)) {
data$vjust <-
compute_just2d(data = data,
coord = coord,
panel_params = panel_params,
just = data$vjust,
a = "y", b = "x")
}
if (is.character(data$hjust)) {
data$hjust <-
compute_just2d(data = data,
coord = coord,
panel_params = panel_params,
just = data$hjust,
a = "x", b = "y")
}
all.grobs <- grid::gList()
user.grobs <- data[["label"]]
for (row.idx in 1:nrow(data)) {
row <- data[row.idx, , drop = FALSE]
user.grob <- user.grobs[[row.idx]]
user.grob$vp <-
grid::viewport(x = grid::unit(row$x, "native"),
y = grid::unit(row$y, "native"),
width = grid::unit(row$vp.width, "npc"),
height = grid::unit(row$vp.height, "npc"),
just = c(row$hjust, row$vjust),
angle = row$angle,
name = paste("inset.grob.vp", row$PANEL,
"row", row.idx, sep = "."))
user.grob$name <- paste("inset.grob", row.idx, sep = ".")
if (add.segments) {
segment.grob <-
grid::segmentsGrob(x0 = row$x,
y0 = row$y,
x1 = row$x_orig,
y1 = row$y_orig,
arrow = arrow,
gp = grid::gpar(col = ggplot2::alpha(row$segment.colour,
row$segment.alpha)),
name = paste("inset.grob.segment", row.idx, sep = "."))
all.grobs <- grid::gList(all.grobs, segment.grob, user.grob)
} else {
all.grobs <- grid::gList(all.grobs, user.grob)
}
}
grid::grobTree(children = all.grobs, name = "geom.grob.panel")
}
GeomGrob <-
ggplot2::ggproto("GeomGrob", ggplot2::Geom,
required_aes = c("x", "y", "label"),
default_aes = ggplot2::aes(
colour = "black", angle = 0, hjust = 0.5,
vjust = 0.5, alpha = NA, family = "", fontface = 1,
vp.width = 1/5, vp.height = 1/5,
segment.linetype = 1,
segment.colour = "grey33",
segment.size = 0.5,
segment.alpha = 1
),
draw_panel = grob_draw_panel_fun,
draw_key = function(...) {
grid::nullGrob()
}
)
geom_grob_npc <- function(mapping = NULL,
data = NULL,
stat = "identity",
position = "identity",
...,
na.rm = FALSE,
show.legend = FALSE,
inherit.aes = FALSE) {
layer(
data = data,
mapping = mapping,
stat = stat,
geom = GeomGrobNpc,
position = position,
show.legend = show.legend,
inherit.aes = inherit.aes,
params = list(
na.rm = na.rm,
...
)
)
}
grobnpc_draw_panel_fun <-
function(data, panel_params, coord,
na.rm = FALSE) {
if (nrow(data) == 0) {
return(grid::nullGrob())
}
if (!grid::is.grob(data$label[[1]])) {
warning("Skipping as object mapped to 'label' is not a list of \"grob\".")
return(grid::nullGrob())
}
data$npcx <- compute_npcx(data$npcx)
data$npcy <- compute_npcy(data$npcy)
if (is.character(data$vjust)) {
data$vjust <- compute_just(data$vjust, data$npcy)
}
if (is.character(data$hjust)) {
data$hjust <- compute_just(data$hjust, data$npcx)
}
user.grobs <- grid::gList()
for (row.idx in 1:nrow(data)) {
userGrob <- data$label[[row.idx]]
userGrob$vp <-
grid::viewport(x = grid::unit(data$npcx[row.idx], "npc"),
y = grid::unit(data$npcy[row.idx], "npc"),
width = grid::unit(data$vp.width[row.idx], "npc"),
height = grid::unit(data$vp.height[row.idx], "npc"),
just = c(data$hjust[row.idx], data$vjust[row.idx]),
angle = data$angle[row.idx],
name = paste("geom_grob.panel", data$PANEL[row.idx],
"row", row.idx, sep = "."))
userGrob$name <- paste("inset.grob", row.idx, sep = ".")
user.grobs[[row.idx]] <- userGrob
}
grid.name <- paste("geom_grob.panel",
data$PANEL[row.idx], sep = ".")
grid::gTree(children = user.grobs, name = grid.name)
}
GeomGrobNpc <-
ggplot2::ggproto("GeomGrobNpc", ggplot2::Geom,
required_aes = c("npcx", "npcy", "label"),
default_aes = ggplot2::aes(
colour = "black", angle = 0, hjust = "inward",
vjust = "inward", alpha = NA, family = "", fontface = 1,
vp.width = 1/5, vp.height = 1/5
),
draw_panel = grobnpc_draw_panel_fun,
draw_key = function(...) {
grid::nullGrob()
}
)
|
test_that("A label can be extracted from a region of a loaded annotation", {
testthat::skip_on_cran();
skip_if(tests_running_on_cran_under_macos(), message = "Skipping on CRAN under MacOS, required test data cannot be downloaded.");
fsbrain::download_optional_data();
subjects_dir = fsbrain::get_optional_data_filepath("subjects_dir");
skip_if_not(dir.exists(subjects_dir), message="Test data missing.");
num_verts_bankssts_subject1_lh = 1722;
num_verts_subject1_lh = 149244;
num_verts_subject1_lh_nonbankssts = num_verts_subject1_lh - num_verts_bankssts_subject1_lh;
annotdata = subject.annot(subjects_dir, "subject1", "lh", "aparc");
label = label.from.annotdata(annotdata, "bankssts");
expect_equal(length(label), num_verts_bankssts_subject1_lh);
label = label.from.annotdata(annotdata, "bankssts", invert=TRUE);
expect_equal(length(label), num_verts_subject1_lh_nonbankssts);
expect_error(label.from.annotdata(annotdata, "nosuchregioninatlas"));
label = label.from.annotdata(annotdata, "nosuchregioninatlas", error_on_invalid_region=FALSE)
expect_equal(length(label), 0);
})
test_that("A label can be extracted from a region of an annotation file", {
testthat::skip_on_cran();
skip_if(tests_running_on_cran_under_macos(), message = "Skipping on CRAN under MacOS, required test data cannot be downloaded.");
fsbrain::download_optional_data();
subjects_dir = fsbrain::get_optional_data_filepath("subjects_dir");
skip_if_not(dir.exists(subjects_dir), message="Test data missing.");
num_verts_bankssts_subject1_lh = 1722;
label = subject.label.from.annot(subjects_dir, 'subject1', 'lh', 'aparc', 'bankssts');
expect_equal(length(label), num_verts_bankssts_subject1_lh);
})
test_that("Labels can be extracted from a region of an annotation file for a group of subjects", {
testthat::skip_on_cran();
skip_if(tests_running_on_cran_under_macos(), message = "Skipping on CRAN under MacOS, required test data cannot be downloaded.");
fsbrain::download_optional_data();
subjects_dir = fsbrain::get_optional_data_filepath("subjects_dir");
subjects_list = c('subject1', 'subject2');
skip_if_not(dir.exists(subjects_dir), message="Test data missing.");
num_verts_bankssts_subject1_lh = 1722;
num_verts_bankssts_subject2_lh = 1722;
labels = group.label.from.annot(subjects_dir, subjects_list, 'lh', 'aparc', 'bankssts');
expect_equal(length(labels), 2);
expect_equal(length(labels$subject1), num_verts_bankssts_subject1_lh);
expect_equal(length(labels$subject2), num_verts_bankssts_subject2_lh);
})
test_that("Labels can be merged into an annotation", {
label1 = c(46666, 46777);
label2 = c(99888, 99889);
label_vertices = list("region1"=label1, "region2"=label2);
colortable_df = data.frame("struct_index"=seq(0, 2), "struct_name"=c("unknown", "region1", "region2"), "r"=c(255L, 255L, 0L), "g"=c(255L, 0L, 255L), "b"=c(255L, 0L, 0L), "a"=c(0L, 0L, 0L));
annot = label.to.annot(label_vertices, 100000, colortable_df);
expect_equal(length(annot$vertices), 100000);
expect_equal(length(annot$label_codes), 100000);
expect_equal(length(annot$label_names), 100000);
expect_equal(length(annot$hex_colors_rgb), 100000);
expect_equal(nrow(annot$colortable_df), 3);
expect_equal(nrow(annot$colortable$table), 3);
})
|
extract.compared.vectors <- function(
output_from_compare.vectors,
vector_names = NULL,
only_match_vector_names = FALSE,
degrees_of_comparison = NULL,
elements_of_output = NULL
){
output <- output_from_compare.vectors
if(!is.null(degrees_of_comparison)){
output <- output[
which(
sapply(
purrr::map(output, "elements_involved"),
function(x){length(x) %in% degrees_of_comparison}
)
)
]
}
if(!is.null(vector_names)){
if(only_match_vector_names == TRUE){
output <- output[
sapply(
purrr::map(output, "elements_involved"),
function(x){setequal(x, vector_names)}
)
]
} else {
output <- output[
sapply(
purrr::map(output, "elements_involved"),
function(x){all(vector_names %in% x)}
)
]
}
}
if(length(output) > 0){
output <- output[
sapply(
output,
function(x){!is.null(x)}
)
]
}
if(!is.null(elements_of_output)){
output <- lapply(output, function(x){
return(x[elements_of_output])
})
}
if(length(output) == 1){
output <- output[[1]]
}
return(output)
}
|
setMethod("dim", signature(x = "ratingMatrix"),
function(x) dim(x@data))
setMethod("dimnames", signature(x = "ratingMatrix"),
function(x) dimnames(x@data))
setReplaceMethod("dimnames", signature(x = "ratingMatrix",
value = "list"), function(x, value) {
dimnames(x@data) <- value
x
})
setAs("ratingMatrix", "list", function(from) getList(from))
setMethod("getData.frame", signature(from = "ratingMatrix"),
function(from, decode = TRUE, ratings = TRUE,...) {
dgT <- as(from, "dgTMatrix")
if(decode) {
df <- data.frame(user=rownames(from)[dgT@i+1L],
item=colnames(from)[dgT@j+1L],
rating=dgT@x)
}else{
df <- data.frame(user=dgT@i+1L,
item=dgT@j+1L,
rating=dgT@x)
}
if(!ratings) df <- df[,-3]
df[order(df[,1]),]
})
setAs("ratingMatrix", "data.frame", function(from) getData.frame(from))
setMethod("colCounts", signature(x = "ratingMatrix"),
function(x, ...) colSums(hasRating(x)))
setMethod("rowCounts", signature(x = "ratingMatrix"),
function(x, ...) rowSums(hasRating(x)))
setMethod("colSums", signature(x = "ratingMatrix"),
function(x, na.rm = FALSE, dims = 1, ...) colSums(as(x, "dgCMatrix"), na.rm, dims, ...))
setMethod("rowSums", signature(x = "ratingMatrix"),
function(x, na.rm = FALSE, dims = 1, ...) rowSums(as(x, "dgCMatrix"), na.rm, dims, ...))
setMethod("colMeans", signature(x = "ratingMatrix"),
function(x, na.rm = FALSE, dims = 1, ...)
colSums(x, dims, na.rm, ...) / colCounts(x, dims, na.rm, ...))
setMethod("rowMeans", signature(x = "ratingMatrix"),
function(x, na.rm = FALSE, dims = 1, ...)
rowSums(x, dims, na.rm, ...) / rowCounts(x, dims, na.rm, ...))
setMethod("hasRating", signature(x = "ratingMatrix"),
function(x, ...) as(x, "ngCMatrix"))
setMethod("nratings", signature(x = "ratingMatrix"),
function(x, ...) sum(hasRating(x)))
setMethod("getNormalize", signature(x = "ratingMatrix"),
function(x, ...) x@normalize)
setMethod("getRatingMatrix", signature(x = "ratingMatrix"),
function(x, ...) x@data)
setMethod("getRatings", signature(x = "ratingMatrix"),
function(x, ...) as(x, "dgCMatrix")@x)
setMethod("[", signature(x = "ratingMatrix"),
function(x, i, j, ..., drop) {
if(!missing(drop) && drop) warning("drop not implemented for ratingMatrix!")
if(missing(i)) i <- 1:nrow(x)
if(missing(j)) j <- 1:ncol(x)
if(is.null(i)) i <- integer(0)
if(is.null(j)) j <- integer(0)
x@data <- x@data[i,j, ..., drop=FALSE]
x
})
setMethod("sample", signature(x = "ratingMatrix"),
function(x, size, replace = FALSE, prob = NULL){
index <- sample(c(1:nrow(x)), size = size,
replace = replace, prob = prob)
x[index,]
})
setMethod("show", signature(object = "ratingMatrix"),
function(object) {
cat(nrow(object), 'x', ncol(object), "rating matrix of class",
sQuote(class(object)), "with",
nratings(object), "ratings.\n")
if(!is.null(object@normalize$row))
cat("Normalized using",object@normalize$row$method,"on rows.\n")
if(!is.null(object@normalize$col))
cat("Normalized using",object@normalize$col$method,"on columns.\n")
invisible(NULL)
})
setMethod("image", signature(x = "ratingMatrix"),
function(x, xlab = "Items (Columns)", ylab = "Users (Rows)",
colorkey=TRUE, ...) {
if(is(x, "binaryRatingMatrix")) colorkey <- FALSE
Matrix::image(as(x, "dgTMatrix"), ylab = ylab, xlab = xlab,
colorkey = colorkey, ...)
})
|
knitr::opts_chunk$set(echo = TRUE)
library(GSIF)
library(rgdal)
library(raster)
library(geoR)
library(ranger)
library(gstat)
library(intamap)
library(plyr)
library(plotKML)
library(scales)
library(RCurl)
library(parallel)
library(lattice)
library(gridExtra)
source('./RF_vs_kriging/R/RFsp_functions.R')
demo(meuse, echo=FALSE)
grid.dist0 <- GSIF::buffer.dist(meuse["zinc"], meuse.grid[1], as.factor(1:nrow(meuse)))
dn0 <- paste(names(grid.dist0), collapse="+")
fm0 <- as.formula(paste("zinc ~ ", dn0))
fm0
ov.zinc <- over(meuse["zinc"], grid.dist0)
rm.zinc <- cbind(meuse@data["zinc"], ov.zinc)
m.zinc <- ranger(fm0, rm.zinc, quantreg=TRUE, num.trees=150, seed=1)
m.zinc
zinc.rfd <- predict(m.zinc, grid.dist0@data, type="quantiles", quantiles=quantiles)$predictions
str(zinc.rfd)
meuse.grid$zinc_rfd = zinc.rfd[,2]
meuse.grid$zinc_rfd_range = (zinc.rfd[,3]-zinc.rfd[,1])/2
zinc.geo <- as.geodata(meuse["zinc"])
ini.v <- c(var(log1p(zinc.geo$data)),500)
zinc.vgm <- likfit(zinc.geo, lambda=0, ini=ini.v, cov.model="exponential")
zinc.vgm
locs = meuse.grid@coords
zinc.ok <- krige.conv(zinc.geo, locations=locs, krige=krige.control(obj.model=zinc.vgm))
meuse.grid$zinc_ok = zinc.ok$predict
meuse.grid$zinc_ok_range = sqrt(zinc.ok$krige.var)
meuse.grid$SW_occurrence = readGDAL("./RF_vs_kriging/data/meuse/Meuse_GlobalSurfaceWater_occurrence.tif")$band1[[email protected]]
meuse.grid$AHN = readGDAL("./RF_vs_kriging/data/meuse/ahn.asc")$band1[[email protected]]
grids.spc = GSIF::spc(meuse.grid, as.formula("~ SW_occurrence + AHN + ffreq + dist"))
fm1 <- as.formula(paste("zinc ~ ", dn0, " + ", paste(names(grids.spc@predicted), collapse = "+")))
fm1
ov.zinc1 <- over(meuse["zinc"], grids.spc@predicted)
rm.zinc1 <- do.call(cbind, list(meuse@data["zinc"], ov.zinc, ov.zinc1))
m1.zinc <- ranger(fm1, rm.zinc1, importance="impurity", quantreg=TRUE, num.trees=150, seed=1)
m1.zinc
xl <- as.list(ranger::importance(m1.zinc))
par(mfrow=c(1,1),oma=c(0.7,2,0,1), mar=c(4,3.5,1,0))
plot(vv <- t(data.frame(xl[order(unlist(xl), decreasing=TRUE)[10:1]])), 1:10, type = "n", ylab = "", yaxt = "n", xlab = "Variable Importance (Node Impurity)")
abline(h = 1:10, lty = "dotted", col = "grey60")
points(vv, 1:10)
axis(2, 1:10, labels = dimnames(vv)[[1]], las = 2)
zinc.geo$covariate = ov.zinc1
sic.t = ~ PC1 + PC2 + PC3 + PC4 + PC5
zinc1.vgm <- likfit(zinc.geo, trend = sic.t, lambda=0, ini=ini.v, cov.model="exponential")
zinc1.vgm
KC = krige.control(trend.d = sic.t, trend.l = ~ grids.spc@predicted$PC1 + grids.spc@predicted$PC2 + grids.spc@predicted$PC3 + grids.spc@predicted$PC4 + grids.spc@predicted$PC5, obj.model = zinc1.vgm)
zinc.uk <- krige.conv(zinc.geo, locations=locs, krige=KC)
meuse.grid$zinc_UK = zinc.uk$predict
meuse@data = cbind(meuse@data, data.frame(model.matrix(~soil-1, meuse@data)))
summary(as.factor(meuse$soil1))
fm.s1 = as.formula(paste("soil1 ~ ", paste(names(grid.dist0), collapse="+"), " + SW_occurrence + dist"))
rm.s1 <- do.call(cbind, list(meuse@data["soil1"], over(meuse["soil1"], meuse.grid), over(meuse["soil1"], grid.dist0)))
m1.s1 <- ranger(fm.s1, rm.s1, mtry=22, num.trees=150, seed=1, quantreg=TRUE)
m1.s1
fm.s1c <- as.formula(paste("soil1c ~ ", paste(names(grid.dist0), collapse="+"), " + SW_occurrence + dist"))
rm.s1$soil1c = as.factor(rm.s1$soil1)
m2.s1 <- ranger(fm.s1c, rm.s1, mtry=22, num.trees=150, seed=1, probability=TRUE, keep.inbag=TRUE)
m2.s1
pred.regr <- predict(m1.s1, cbind(meuse.grid@data, grid.dist0@data), type="response")
pred.clas <- predict(m2.s1, cbind(meuse.grid@data, grid.dist0@data), type="se")
fm.s = as.formula(paste("soil ~ ", paste(names(grid.dist0), collapse="+"), " + SW_occurrence + dist"))
fm.s
rm.s <- do.call(cbind, list(meuse@data["soil"], over(meuse["soil"], meuse.grid), over(meuse["soil"], grid.dist0)))
m.s <- ranger(fm.s, rm.s, mtry=22, num.trees=150, seed=1, probability=TRUE, keep.inbag=TRUE)
m.s
m.s0 <- ranger(fm.s, rm.s, mtry=22, num.trees=150, seed=1)
m.s0
pred.soil_rfc = predict(m.s, cbind(meuse.grid@data, grid.dist0@data), type="se")
pred.grids = meuse.grid["soil"]
pred.grids@data = do.call(cbind, list(pred.grids@data, data.frame(pred.soil_rfc$predictions), data.frame(pred.soil_rfc$se)))
names(pred.grids) = c("soil", paste0("pred_soil", 1:3), paste0("se_soil", 1:3))
str(pred.grids@data)
library(intamap)
library(gstat)
data(sic2004)
coordinates(sic.val) <- ~x+y
sic.val$value <- sic.val$joker
coordinates(sic.test) <- ~x+y
pred.sic2004 <- interpolate(sic.val, sic.test, maximumTime = 90)
sd(sic.test$joker-pred.sic2004$predictions$mean)
bbox=sic.val@bbox
bbox[,"min"]=bbox[,"min"]-4000
bbox[,"max"]=bbox[,"max"]+4000
de2km = plotKML::vect2rast(sic.val, cell.size=2000, bbox=bbox)
de2km$mask = 1
de2km = as(de2km["mask"], "SpatialPixelsDataFrame")
de.dist0 <- GSIF::buffer.dist(sic.val["joker"], de2km, as.factor(1:nrow(sic.val@data)))
ov.de = over(sic.val["joker"], de.dist0)
de.dn0 <- paste(names(de.dist0), collapse="+")
de.fm1 <- as.formula(paste("joker ~ ", de.dn0))
de.rm = do.call(cbind, list(sic.val@data["joker"], ov.de))
m1.gamma <- ranger(de.fm1, de.rm[complete.cases(de.rm),], mtry=1)
m1.gamma
de2km$gamma_rfd1 = predict(m1.gamma, de.dist0@data)$predictions
ov.test <- over(sic.test, de2km["gamma_rfd1"])
sd(sic.test$joker-ov.test$gamma_rfd1, na.rm=TRUE)
par(oma=c(0,0,0,1), mar=c(0,0,4,3))
plot(raster(de2km["gamma_rfd1"]), col=rev(bpy.colors()))
points(sic.val, pch="+")
carson <- read.csv(file="./RF_vs_kriging/data/NRCS/carson_CLYPPT.csv")
str(carson)
carson$DEPTH.f = ifelse(is.na(carson$DEPTH), 20, carson$DEPTH)
carson1km <- readRDS("./RF_vs_kriging/data/NRCS/carson_covs1km.rds")
coordinates(carson) <- ~X+Y
proj4string(carson) = carson1km@proj4string
rm.carson <- cbind(as.data.frame(carson), over(carson["CLYPPT"], carson1km))
fm.clay <- as.formula(paste("CLYPPT ~ DEPTH.f + ", paste(names(carson1km), collapse = "+")))
fm.clay
rm.carson <- rm.carson[complete.cases(rm.carson[,all.vars(fm.clay)]),]
rm.carson.s <- rm.carson[sample.int(size=2000, nrow(rm.carson)),]
m.clay <- ranger(fm.clay, rm.carson.s, num.trees=150, mtry=25, case.weights=1/(rm.carson.s$CLYPPT.sd^2), quantreg = TRUE)
m.clay
geochem = readRDS("./RF_vs_kriging/data/geochem/geochem.rds")
usa5km = readRDS("./RF_vs_kriging/data/geochem/usa5km.rds")
str(usa5km@data)
for(i in c("PB_ICP40","CU_ICP40","K_ICP40","MG_ICP40")) { geochem[,i] = ifelse(geochem[,i] < 0, abs(geochem[,i])/2, geochem[,i]) }
coordinates(geochem) = ~coords.x1 + coords.x2
proj4string(geochem) = "+proj=longlat +ellps=clrk66 +towgs84=-9.0,151.0,185.0,0.0,0.0,0.0,0.0 +no_defs"
geochem$TYPEDESC = as.factor(paste(geochem$TYPEDESC))
summary(geochem$TYPEDESC)
geochem = spTransform(geochem, CRS(proj4string(usa5km)))
usa5km.spc = spc(usa5km, ~geomap+globedem+dTRI+nlights03+dairp+sdroads)
ov.geochem = over(x=geochem, y=usa5km.spc@predicted)
t.vars = c("PB_ICP40","CU_ICP40","K_ICP40","MG_ICP40")
df.lst = lapply(t.vars, function(i){cbind(geochem@data[,c(i,"TYPEDESC")], ov.geochem)})
names(df.lst) = t.vars
for(i in t.vars){colnames(df.lst[[i]])[1] = "Y"}
for(i in t.vars){df.lst[[i]]$TYPE = i}
rm.geochem = do.call(rbind, df.lst)
type.mat = data.frame(model.matrix(~TYPE-1, rm.geochem))
typed.mat = data.frame(model.matrix(~TYPEDESC-1, rm.geochem))
rm.geochem.e = do.call(cbind, list(rm.geochem[,c("Y",paste0("PC",1:21))], type.mat, typed.mat))
fm.g = as.formula(paste0("Y ~ ", paste0(names(rm.geochem.e)[-1], collapse = "+")))
fm.g
m1.geochem <- ranger::ranger(fm.g, rm.geochem.e[complete.cases(rm.geochem.e),], importance = "impurity", seed = 1)
m1.geochem
co_prec = readRDS("./RF_vs_kriging/data/st_prec/boulder_prcp.rds")
str(co_prec)
co_prec$cdate = floor(unclass(as.POSIXct(as.POSIXct(paste(co_prec$DATE), format="%Y-%m-%d")))/86400)
co_prec$doy = as.integer(strftime(as.POSIXct(paste(co_prec$DATE), format="%Y-%m-%d"), format = "%j"))
co_locs.sp = co_prec[!duplicated(co_prec$STATION),c("STATION","LATITUDE","LONGITUDE")]
coordinates(co_locs.sp) = ~ LONGITUDE + LATITUDE
proj4string(co_locs.sp) = CRS("+proj=longlat +datum=WGS84")
co_grids = readRDS("./RF_vs_kriging/data/st_prec/boulder_grids.rds")
co_grids = as(co_grids, "SpatialPixelsDataFrame")
co_locs.sp = spTransform(co_locs.sp, co_grids@proj4string)
sel.co <- over(co_locs.sp, co_grids[1])
co_locs.sp <- co_locs.sp[!is.na(sel.co$elev_1km),]
grid.distP <- GSIF::buffer.dist(co_locs.sp["STATION"], co_grids[1], as.factor(1:nrow(co_locs.sp)))
dnP <- paste(names(grid.distP), collapse="+")
fmP <- as.formula(paste("PRCP ~ cdate + doy + elev_1km + PRISM_prec +", dnP))
fmP
ov.prec <- do.call(cbind, list(co_locs.sp@data, over(co_locs.sp, grid.distP), over(co_locs.sp, co_grids[c("elev_1km","PRISM_prec")])))
rm.prec <- plyr::join(co_prec, ov.prec)
rm.prec <- rm.prec[complete.cases(rm.prec[,c("PRCP","elev_1km","cdate")]),]
|
if (identical(Sys.getenv("NOT_CRAN"), "true")) {
library(testthat)
library(healthcareai)
Sys.setenv("R_TESTS" = "")
test_check("healthcareai", filter = "^[(a-o)|(A-O)]")
}
|
ddiscexp <- function(x, lambda, threshold=0, log=FALSE) {
if (log) {
C <- log(1-exp(-lambda)) + lambda*threshold
f <- function(x) {C -lambda*x}
} else {
C <- (1-exp(-lambda))*exp(lambda*threshold)
f <- function(x) {C*exp(-lambda*x)}
}
d <- ifelse(x<threshold,NA,f(x))
return(d)
}
discexp.loglike <- function(x, lambda, threshold=0) {
return(sum(suppressWarnings(ddiscexp(x,lambda,threshold,log=TRUE))))
}
discexp.fit <- function(x,threshold=0) {
x <- x[x>=threshold]
n <- length(x)
lambda <- log(1+n/sum(x-threshold))
loglike <- discexp.loglike(x,lambda,threshold)
fit <- list(type="discexp", lambda=lambda, loglike=loglike,
threshold=threshold, method="formula", samples.over.threshold=n)
return(fit)
}
|
.prepare_calcregion <- function(calcregion, imgdim, psf, finesample)
{
.Call('R_profit_adjust_mask', calcregion, imgdim, psf, finesample)
}
.profitParsePSF <- function(psf, modellist, psfdim=dim(psf), finesample=1L)
{
haspsf = length(psf) > 0
if(!is.null(modellist$psf))
{
psftype = "analytical"
haspsf= TRUE
if(all(names(modellist) %in% c("pointsource", "psf","sky"))) psf = matrix(1,1,1)
else
{
stopifnot(!is.null(psfdim))
psf = profitMakePointSource(modellist=modellist$psf,finesample=finesample,
image=matrix(0,psfdim[1],psfdim[2]), returnfine = TRUE)
sumpsf = sum(psf)
psfsumdiff = !abs(sumpsf-1) < 1e-2
if(psfsumdiff) stop(paste0("Error; model psf has |sum| -1 = ",psfsumdiff," > 1e-2; ",
"please adjust your PSF model or psf dimensions until it is properly normalized."))
psf = psf/sumpsf
}
} else if(haspsf) {
psftype = "empirical"
} else {
psftype = "none"
}
return(list(has=haspsf,psf=psf,type=psftype))
}
profitDataBenchmark <- function(modellist, calcregion, imgdim,
finesample=1L, psf=NULL, fitpsf=FALSE, omp_threads=NULL, openclenv=NULL,
openclenv_int=openclenv, openclenv_conv=openclenv,
nbenchmark=0L, nbenchint=nbenchmark, nbenchconv=nbenchmark,
benchintmethods=c("brute"), benchconvmethods = c("brute","fftw"),
benchprecisions="double", benchconvprecisions=benchprecisions,
benchintprecisions=benchprecisions,
benchopenclenvs = profitGetOpenCLEnvs(make.envs = TRUE),
printbenchmark=FALSE, printbenchint=printbenchmark, printbenchconv=printbenchmark)
{
profitCheckIsPositiveInteger(finesample)
haspsf = .profitParsePSF(psf, modellist, finesample=finesample)$has
usecalcregion = haspsf
if(haspsf)
{
modelimg = profitMakeModel(modellist, dim=imgdim, finesample=finesample, psf=psf,
returnfine = TRUE, returncrop = FALSE, openclenv=openclenv_int, omp_threads=omp_threads)
imgdim = dim(modelimg$z)/finesample
}
benches=list()
if((length(benchintmethods) > 1) && nbenchint > 0)
{
image = matrix(0,imgdim[1],imgdim[2])
benches$benchint = profitBenchmark(image=image, modellist = modellist,
nbench = nbenchint, methods = benchintmethods, precisions = benchintprecisions,
openclenvs = benchopenclenvs, omp_threads = omp_threads, finesample = finesample)
if(printbenchint)
{
print(profitBenchmarkResultStripPointers(benches$benchint$result)[
c("name","env_name","version","dev_name",paste0("tinms.mean_",c("single","double")))])
}
bestint = profitBenchmarkResultBest(benches$benchint$result)
print(paste0("Best integrator: '", bestint$name, "' device: '", bestint$dev_name,
"', t=[",sprintf("%.2e",bestint$time)," ms]"))
openclenv_int = bestint$openclenv
} else {
if(identical(openclenv_int,"get")) openclenv_int = openclenv
}
convopt = list(convolver=NULL,openclenv=openclenv_conv)
if(haspsf)
{
dimregion = dim(calcregion)
dimmodel = dim(modelimg$z)
dimdiff = (dimmodel - dimregion)/2
if(any(dimdiff>0))
{
benchregion = matrix(0,dimmodel[1],dimmodel[2])
benchregion[(1:dimregion[1])+dimdiff[1],(1:dimregion[2])+dimdiff[2]] = calcregion
} else {
benchregion = calcregion
}
if(nbenchconv > 0)
{
benches$benchconv = profitBenchmark(image = modelimg$z, psf=psf,
nbench = nbenchconv, calcregion = benchregion,
reusepsffft = !fitpsf, methods = benchconvmethods,
openclenvs = benchopenclenvs, omp_threads = omp_threads)
if(printbenchconv)
{
print(profitBenchmarkResultStripPointers(benches$benchconv$result)[
c("name","env_name","version","dev_name",paste0("tinms.mean_",c("single","double")))])
}
bestconv = profitBenchmarkResultBest(benches$benchconv$result)
print(paste0("Best convolver: '", bestconv$name, "' device: '", bestconv$dev_name,
"', t=[",sprintf("%.2e",bestconv$time)," ms]"))
convopt$convolver = bestconv$convolver
convopt$openclenv = bestconv$openclenv
usecalcregion = bestconv$usecalcregion
} else {
convpsf = psf
if(finesample > 1) convpsf = profitUpsample(psf, finesample)
if(identical(openclenv_conv,"get")) openclenv_conv = profitOpenCLEnv()
if(is.character(benchconvmethods) && length(benchconvmethods) > 0)
{
convmethod = benchconvmethods[1]
} else {
if(is.null(openclenv_conv)) convmethod = "brute"
else convmethod = "opencl"
}
convopt$convolver = profitMakeConvolver(convmethod,dim(modelimg),psf = convpsf,
openclenv=openclenv_conv)
}
}
rv = list(
benches=benches, convopt=convopt, usecalcregion=usecalcregion,
openclenv = openclenv_int)
class(rv)="profit.data.benchmark"
return(rv)
}
profitDataSetOptionsFromBenchmarks <- function(Data, benchmarks)
{
if(class(Data) != 'profit.data')
{
stop("The Data must be of class profit.data, as generated by the profitSetupData function!")
}
if(class(benchmarks) != 'profit.data.benchmark')
{
stop("The benchmarks must be of class profit.data.benchmark, as generated by the profitSetupData function!")
}
for(var in names(benchmarks))
{
Data[[var]] = benchmarks[[var]]
}
return(Data)
}
profitSetupData=function(image, region, sigma, segim, mask, modellist,
tofit, tolog, priors, intervals, constraints, psf=NULL, psfdim=dim(psf),
finesample=1L, psffinesampled=FALSE, magzero=0, algo.func='LA',
like.func="norm", magmu=FALSE, verbose=FALSE, omp_threads = NULL,
openclenv=NULL, openclenv_int=openclenv, openclenv_conv=openclenv,
nbenchmark=0L, nbenchint=nbenchmark, nbenchconv=nbenchmark,
benchintmethods=c("brute"), benchconvmethods = c("brute","fftw"),
benchprecisions="double", benchconvprecisions=benchprecisions,
benchintprecisions=benchprecisions,
benchopenclenvs = profitGetOpenCLEnvs(make.envs = TRUE),
printbenchmark=FALSE, printbenchint=printbenchmark, printbenchconv=printbenchmark)
{
profitCheckIsPositiveInteger(finesample)
stopifnot(all(is.integer(c(nbenchconv,nbenchint))) && nbenchint >= 0L && nbenchconv >=0L)
if(missing(image)){stop("User must supply an image matrix input!")}
if(missing(modellist)){stop("User must supply a modellist input!")}
imagedim = dim(image)
if(missing(mask)){mask=matrix(0,imagedim[1],imagedim[2])}
if(missing(sigma)){sigma=sqrt(abs(image))}
if(missing(segim)){segim=matrix(1,imagedim[1],imagedim[2])}
if(missing(tofit)){
tofit=relist(rep(TRUE,length(unlist(modellist))),modellist)
}else{
if(length(unlist(tofit)) != length(unlist(modellist))){
tofit_temp=relist(rep(TRUE,length(unlist(modellist))),modellist)
compnames=names(tofit)
for(i in compnames){
subnames=names(tofit[[i]])
for(j in subnames){
subsubnames=names(tofit[[i]][[j]])
if(is.null(subsubnames)){
tofit_temp[[i]][[j]]=tofit[[i]][[j]]
}else{
for (k in subsubnames){
tofit_temp[[i]][[j]][[k]]=tofit[[i]][[j]][[k]]
}
}
}
}
tofit=tofit_temp
}
}
if(missing(tolog)){
tolog=relist(rep(FALSE,length(unlist(modellist))),modellist)
}else{
if(length(unlist(tolog)) != length(unlist(modellist))){
tolog_temp=relist(rep(FALSE,length(unlist(modellist))),modellist)
compnames=names(tolog)
for(i in compnames){
subnames=names(tolog[[i]])
for(j in subnames){
subsubnames=names(tolog[[i]][[j]])
if(is.null(subsubnames)){
tolog_temp[[i]][[j]]=tolog[[i]][[j]]
}else{
for (k in subsubnames){
tolog_temp[[i]][[j]][[k]]=tolog[[i]][[j]][[k]]
}
}
}
}
tolog=tolog_temp
}
}
if(missing(priors)){priors={}}
if(missing(intervals)){intervals={}}
if(missing(constraints)){constraints={}}
if(missing(region)){
segimkeep = segim[ceiling(imagedim[1]/2),ceiling(imagedim[2]/2)]
region = segim==segimkeep & mask!=1
}else{
region=region==TRUE
}
psf = .profitParsePSF(psf, modellist, psfdim, finesample)
psftype = psf$type
haspsf = psf$has
psf = psf$psf
if(haspsf)
{
psf[psf<0] = 0
dimpsf = dim(psf)
if(psftype == "empirical")
{
xeven = dimpsf[1]%%2==0
yeven = dimpsf[2]%%2==0
if(((finesample > 1L) && !psffinesampled) || xeven || yeven)
{
xrange = seq(0.5*(1+xeven),dimpsf[1]-0.5*(1+xeven),1/finesample)
yrange = seq(0.5*(1+yeven),dimpsf[2]-0.5*(1+yeven),1/finesample)
regrid=expand.grid(xrange-dimpsf[1]/2,yrange-dimpsf[2]/2)
psf=matrix(profitInterp2d(regrid[,1],regrid[,2],psf)[,3],length(xrange),length(yrange))
}
} else if(psftype == "analytical")
{
psffinesampled = finesample > 1
}
psf = psf/sum(psf)
}
if (!is.null(openclenv)) {
if (class(openclenv) == "externalptr") {
openclenv = openclenv
}
else if (identical(openclenv,"get")) {
openclenv = profitOpenCLEnv()
}
}
calcregion = .prepare_calcregion(region, imagedim, psf, finesample)
fitpsf = psftype == "analytical" && any(unlist(tofit$psf)) && any(!(names(modellist) %in% c("psf","pointsource","sky")))
benchmarks = profitDataBenchmark(modellist = modellist, calcregion = calcregion, imgdim = imagedim,
finesample = finesample, psf=psf, fitpsf = fitpsf, omp_threads = omp_threads,
openclenv = openclenv, openclenv_int = openclenv_int, openclenv_conv = openclenv_conv,
nbenchmark = nbenchmark, nbenchint = nbenchint, nbenchconv = nbenchint,
benchintmethods = benchintmethods, benchconvmethods = benchconvmethods,
benchprecisions = benchprecisions, benchconvprecisions = benchconvprecisions,
benchintprecisions = benchintprecisions, benchopenclenvs = benchopenclenvs,
printbenchmark = printbenchmark, printbenchint=printbenchint, printbenchconv=printbenchconv)
init = unlist(modellist)
init[unlist(tolog)]=log10(init[unlist(tolog)])
init=init[which(unlist(tofit))]
parm.names=names(init)
mon.names=c("LL","LP","time")
if(profitParseLikefunc(like.func) == "t") mon.names=c(mon.names,"dof")
profit.data=list(
init=init, image=image, mask=mask, sigma=sigma, segim=segim, modellist=modellist,
psf=psf, psftype=psftype, fitpsf=fitpsf,
algo.func=algo.func, mon.names=mon.names, parm.names=parm.names, N=length(which(as.logical(region))),
region=region, calcregion=calcregion, tofit=tofit, tolog=tolog, priors=priors, intervals=intervals, constraints=constraints,
like.func = like.func, magzero=magzero, finesample=finesample, imagedim=imagedim, verbose=verbose, magmu=magmu,
openclenv=openclenv, omp_threads=omp_threads)
class(profit.data)="profit.data"
profit.data = profitDataSetOptionsFromBenchmarks(profit.data, benchmarks)
return=profit.data
}
|
NULL
default_table_width_unit <- "\\textwidth"
print_latex <- function (ht, ...) {
cat(to_latex(ht, ...))
}
to_latex <- function (ht, ...) UseMethod("to_latex")
to_latex.huxtable <- function (ht, tabular_only = FALSE, ...){
assert_that(is.flag(tabular_only))
tabular <- build_tabular(ht)
commands <- "
\\providecommand{\\huxb}[2]{\\arrayrulecolor[RGB]{
\\providecommand{\\huxvb}[2]{\\color[RGB]{
\\providecommand{\\huxtpad}[1]{\\rule{0pt}{
\\providecommand{\\huxbpad}[1]{\\rule[-
if (tabular_only) return(maybe_markdown_fence(paste0(commands, tabular)))
tabular <- paste0("\\setlength{\\tabcolsep}{0pt}\n", tabular)
resize_box <- if (is.na(height <- height(ht))) c("", "") else {
if (is.numeric(height)) height <- sprintf("%.3g\\textheight", height)
c(sprintf("\\resizebox*{!}{%s}{", height), "}")
}
table_env <- table_environment(ht)
table_env <- switch(position(ht),
"wrapleft" = c("\\begin{wraptable}{l}{%s}", "\\end{wraptable}"),
"wrapright" = c("\\begin{wraptable}{r}{%s}", "\\end{wraptable}"),
c(
sprintf("\\begin{%s}[%s]", table_env, latex_float(ht)),
sprintf("\\end{%s}", table_env)
)
)
wraptable_width <- latex_table_width(ht)
if (is.na(wraptable_width)) wraptable_width <- "0.25\\textwidth"
if (position(ht) %in% c("wrapleft", "wrapright")) {
table_env[1] <- sprintf(table_env[1], wraptable_width)
}
table_env <- paste0("\n", table_env, "\n")
cap <- build_latex_caption(ht)
pos_text <- switch(position(ht),
wrapleft = ,
left = c("\\begin{raggedright}\n", "\\par\\end{raggedright}\n"),
center = c("\\begin{centerbox}\n", "\\par\\end{centerbox}\n"),
wrapright = ,
right = c("\\begin{raggedleft}\n", "\\par\\end{raggedleft}\n")
)
cap_top <- grepl("top", caption_pos(ht))
cap <- if (cap_top) c(cap, "") else c("", cap)
tpt <- c("\\begin{threeparttable}\n", "\n\\end{threeparttable}")
res <- if (is.na(caption_width(ht))) {
nest_strings(table_env, pos_text, tpt, cap, tabular)
} else {
nest_strings(table_env, cap, pos_text, tabular)
}
res <- paste0(commands, res)
return(maybe_markdown_fence(res))
}
build_latex_caption <- function (ht, lab) {
lab <- make_label(ht)
cap_has_label <- FALSE
if (is.na(cap <- make_caption(ht, lab, "latex"))) {
cap <- ""
} else {
cap_has_label <- ! is.null(attr(cap, "has_label"))
hpos <- get_caption_hpos(ht)
cap_just <- switch(hpos,
left = "raggedright",
center = "centering",
right = "raggedleft"
)
cap_width <- caption_width(ht)
if (is.na(cap_width)) {
cap_margins <- ""
} else {
if (! is.na(suppressWarnings(as.numeric(cap_width)))) {
cap_width <- sprintf("%s\\textwidth", cap_width)
}
cap_margin_width <- paste("\\textwidth - ", cap_width)
cap_margins <- switch(hpos,
right = c(cap_margin_width, "0pt"),
center = rep(paste0("(", cap_margin_width, ")/2"), 2),
left = c("0pt", cap_margin_width)
)
cap_margins <- sprintf("margin={%s,%s},", cap_margins[1], cap_margins[2])
}
cap <- sprintf(
"\\captionsetup{justification=%s,%ssinglelinecheck=off}\n\\caption{%s}\n",
cap_just, cap_margins, cap)
}
lab <- if (is.na(lab) || cap_has_label) "" else sprintf("\\label{%s}\n", lab)
cap <- paste(cap, lab)
return(cap)
}
build_tabular <- function (ht) {
if (! check_positive_dims(ht)) return("")
multirow <- multicol <- bg_color <- inner_cell <- contents <- matrix("", nrow(ht), ncol(ht))
real_align <- real_align(ht)
display_cells <- display_cells(ht, all = TRUE)
start_end_cols <- as.matrix(display_cells[, c("display_col", "end_col")])
width_spec <- apply(start_end_cols, 1, function (x) compute_width(ht, x[1], x[2]))
cb <- get_visible_borders(ht)
cbc <- collapsed_border_colors(ht)
cbs <- collapsed_border_styles(ht)
dc_pos_matrix <- as.matrix(display_cells[, c("display_row", "display_col")])
dc_map <- matrix(1:length(contents), nrow(ht), ncol(ht))
dc_map <- c(dc_map[dc_pos_matrix])
dc_idx <- ! display_cells$shadowed
left_idx <- display_cells$col == display_cells$display_col
right_idx <- display_cells$col == display_cells$end_col
bottom_idx <- display_cells$row == display_cells$end_row
multirow_idx <- display_cells$rowspan > 1
bl_idx <- bottom_idx & left_idx
blm_idx <- bl_idx & multirow_idx
bl_dc <- dc_map[bl_idx]
lh_dc <- dc_map[left_idx]
horiz_b <- cb$horiz
hb_maxes <- apply(horiz_b, 1, max)
if (any(horiz_b > 0 & horiz_b < hb_maxes[row(horiz_b)])) warning(
"Multiple horizontal border widths in a single row; using the maximum.")
has_own_border <- horiz_b > 0
horiz_b[] <- hb_maxes[row(horiz_b)]
hb_default <- is.na(cbc$horiz)
hb_colors <- format_color(cbc$horiz, default = "black")
hb_chars <- ifelse(cbs$horiz == "double", "=", "-")
bg_colors <- background_color(ht)[dc_map]
dim(bg_colors) <- dim(ht)
bg_colors <- rbind(rep(NA, ncol(horiz_b)), bg_colors)
bg_colors <- format_color(bg_colors, default = "white")
hhline_colors <- bg_colors
hhline_colors[has_own_border] <- hb_colors[has_own_border]
hhlines_horiz <- sprintf(">{\\huxb{%s}{%.4g}}%s", hhline_colors, horiz_b, hb_chars)
dim(hhlines_horiz) <- dim(horiz_b)
no_hborder_in_row <- hb_maxes[row(hhlines_horiz)] == 0
hhlines_horiz[no_hborder_in_row] <- ""
vert_b <- cb$vert
vert_b <- rbind(vert_b[1, ], vert_b)
vert_bs <- rbind(cbs$vert[1, ], cbs$vert)
vert_bc <- cbind(NA, cbc$horiz)
no_left_hb <- cbind(0, cb$horiz) == 0
no_lr_hb <- no_left_hb & cbind(cb$horiz, 0) == 0
no_lrb_b <- no_lr_hb & rbind(cb$vert, 0) == 0
vert_bc[no_left_hb] <- cbind(cbc$horiz, NA)[no_left_hb]
vert_bc[no_lr_hb] <- rbind(cbc$vert, NA)[no_lr_hb]
vert_bc[no_lrb_b] <- rbind(NA, cbc$vert)[no_lrb_b]
vert_bc <- format_color(vert_bc, default = "black")
hhlines_vert <- rep("", length(vert_b))
has_vert_b <- vert_b > 0
has_horiz_b <- cbind(cb$horiz[, 1], cb$horiz) > 0
vert_bchars <- rep("", length(vert_bc))
vert_bchars[! vert_bs == "double" & ! has_horiz_b] <- "|"
vert_bchars[vert_bs == "double" & ! has_horiz_b] <- "||"
hhlines_vert[has_vert_b] <- sprintf(">{\\huxb{%s}{%.4g}}%s",
vert_bc[has_vert_b],
vert_b[has_vert_b],
vert_bchars[has_vert_b])
hhlines_vert[vert_bchars == ""] <- ""
dim(hhlines_vert) <- c(nrow(horiz_b), ncol(horiz_b) + 1)
hhlines <- matrix("", nrow(hhlines_horiz), ncol(hhlines_horiz) + ncol(hhlines_vert))
hhlines[, seq(2, ncol(hhlines), 2)] <- hhlines_horiz
hhlines[, seq(1, ncol(hhlines), 2)] <- hhlines_vert
hhlines <- apply(hhlines, 1, paste0, collapse = "")
hhlines <- sprintf("\n\n\\hhline{%s}\n\\arrayrulecolor{black}\n", hhlines)
inner_cell_bldc <- clean_contents(ht, output_type = "latex")[bl_dc]
fs_bldc <- font_size(ht)[bl_dc]
line_space_bldc <- round(fs_bldc * 1.2, 2)
has_fs_bldc <- ! is.na(fs_bldc)
inner_cell_bldc[has_fs_bldc] <- sprintf("{\\fontsize{%.4gpt}{%.4gpt}\\selectfont %s}",
fs_bldc[has_fs_bldc], line_space_bldc[has_fs_bldc], inner_cell_bldc[has_fs_bldc])
tc_bldc <- text_color(ht)[bl_dc]
tcf_bldc <- format_color(tc_bldc)
has_tc_bldc <- ! is.na(tc_bldc)
inner_cell_bldc[has_tc_bldc] <- sprintf("\\textcolor[RGB]{%s}{%s}", tcf_bldc[has_tc_bldc],
inner_cell_bldc[has_tc_bldc])
bold_bldc <- bold(ht)[bl_dc]
italic_bldc <- italic(ht)[bl_dc]
inner_cell_bldc[bold_bldc] <- sprintf("\\textbf{%s}", inner_cell_bldc[bold_bldc])
inner_cell_bldc[italic_bldc] <- sprintf("\\textit{%s}", inner_cell_bldc[italic_bldc])
font_bldc <- font(ht)[bl_dc]
has_font_bldc <- ! is.na(font_bldc)
font_template <- if (getOption("huxtable.latex_use_fontspec", FALSE)) {
"{\\fontspec{%s} %s}"
} else {
"{\\fontfamily{%s}\\selectfont %s}"
}
inner_cell_bldc[has_font_bldc] <- sprintf(font_template,
font_bldc[has_font_bldc],
inner_cell_bldc[has_font_bldc])
rt_bldc <- rotation(ht)[bl_dc]
has_rt_bldc <- rt_bldc != 0
inner_cell_bldc[has_rt_bldc] <- sprintf("\\rotatebox{%.4g}{%s}", rt_bldc[has_rt_bldc],
inner_cell_bldc[has_rt_bldc])
pad_bldc <- list()
pad_bldc$left <- left_padding(ht)[bl_dc]
pad_bldc$right <- right_padding(ht)[bl_dc]
pad_bldc$top <- top_padding(ht)[bl_dc]
pad_bldc$bottom <- bottom_padding(ht)[bl_dc]
align_bldc <- real_align[bl_dc]
valign_bldc <- valign(ht)[bl_dc]
wrap_bldc <- wrap(ht)[bl_dc] & ! is.na(width(ht))
has_pad_bldc <- lapply(pad_bldc, Negate(is.na))
pad_bldc <- lapply(pad_bldc, function (x) if (is.numeric(x)) sprintf("%.4gpt", x) else x)
tpad_tex_bldc <- rep("", length(pad_bldc$top))
tpad_tex_bldc[has_pad_bldc$top] <- sprintf("\\huxtpad{%s + 1em}",
pad_bldc$top[has_pad_bldc$top])
bpad_tex_bldc <- rep("", length(pad_bldc$bottom))
bpad_vals_bldc <- pad_bldc$bottom[has_pad_bldc$bottom]
bpad_tex_bldc[has_pad_bldc$bottom] <- sprintf("\\huxbpad{%s}", bpad_vals_bldc)
align_tex_key <- c("left" = "\\raggedright ", "right" = "\\raggedleft ", "center" = "\\centering ")
align_tex_bldc <- align_tex_key[align_bldc]
lpad_tex_bldc <- ifelse(has_pad_bldc$left & ! wrap_bldc,
sprintf("\\hspace{%s} ", pad_bldc$left), "")
rpad_tex_bldc <- ifelse(has_pad_bldc$right & ! wrap_bldc,
sprintf(" \\hspace{%s}", pad_bldc$right), "")
inner_cell_bldc <- paste0(tpad_tex_bldc, align_tex_bldc, lpad_tex_bldc, inner_cell_bldc,
rpad_tex_bldc, bpad_tex_bldc)
if (any(wrap_bldc)) {
valign_tex_key <- c("top" = "b", "middle" = "c", "bottom" = "t")
valign_bldc <- valign_tex_key[valign_bldc]
width_spec_bldc <- width_spec[bl_dc]
left_pad_bldc <- ifelse(has_pad_bldc$left, sprintf("\\hspace{%s}", pad_bldc$left), "")
hpad_loss_left_bldc <- ifelse(has_pad_bldc$left, paste0("-", pad_bldc$left), "")
hpad_loss_right_bldc <- ifelse(has_pad_bldc$right, paste0("-", pad_bldc$right), "")
inner_cell_bldc[wrap_bldc] <- sprintf("%s\\parbox[%s]{%s%s%s}{%s}",
left_pad_bldc[wrap_bldc],
valign_bldc[wrap_bldc],
width_spec_bldc[wrap_bldc],
hpad_loss_left_bldc[wrap_bldc],
hpad_loss_right_bldc[wrap_bldc],
inner_cell_bldc[wrap_bldc]
)
}
row_height <- row_height(ht)
row_height_tex_bldc <- if (all(is.na(row_height))) {
rep("", sum(dc_idx))
} else {
start_end_rows_bldc <- display_cells[dc_map, c("display_row", "end_row")][bl_idx, ]
row_seqs_bldc <- apply(start_end_rows_bldc, 1, function (x) seq(x[1], x[2]))
rh_bldc <- sapply(row_seqs_bldc, function (x) {
rh <- row_height[x]
if (is.numeric(rh)) sprintf("%.4g\\textheight", sum(rh)) else paste(rh, collapse = "+")
})
sprintf("\\rule{0pt}{%s}", rh_bldc)
}
inner_cell_bldc <- paste0(inner_cell_bldc, row_height_tex_bldc)
inner_cell[bl_idx] <- inner_cell_bldc
bg_color_lhdc <- background_color(ht)[lh_dc]
has_bg_color_lhdc <- ! is.na(bg_color_lhdc)
bg_color_lhdc <- format_color(bg_color_lhdc)
bg_color_lhdc <- sprintf("\\cellcolor[RGB]{%s}", bg_color_lhdc)
bg_color_lhdc[! has_bg_color_lhdc] <- ""
bg_color[left_idx] <- bg_color_lhdc
colspan_lhdc <- colspan(ht)[lh_dc]
wrap_lhdc <- wrap(ht)[lh_dc] & ! is.na(width(ht))
valign_lhdc <- valign(ht)[lh_dc]
real_align_lhdc <- real_align[lh_dc]
colspec_tex_key <- c("left" = "l", "center" = "c", "right" = "r")
real_align_lhdc <- colspec_tex_key[real_align_lhdc]
colspec_lhdc <- real_align_lhdc
width_spec_lhdc <- width_spec[lh_dc]
colspec_lhdc[wrap_lhdc] <- {
pmb <- valign_lhdc[wrap_lhdc]
pmb_tex_key <- c("top" = "p", "bottom" = "b", "middle" = "m")
pmb <- pmb_tex_key[pmb]
sprintf("%s{%s}", pmb, width_spec_lhdc[wrap_lhdc])
}
bord <- cb$vert
bcol <- cbc$vert
has_bord <- ! is.na(bord)
bs_double <- cbs$vert == "double"
bcol <- format_color(bcol, default = "black")
bord_tex <- rep("", length(bord))
bord_tex[has_bord] <- sprintf("!{\\huxvb{%s}{%.4g}}", bcol[has_bord], bord[has_bord])
bord_tex[bs_double] <- paste0(bord_tex[bs_double], bord_tex[bs_double])
dim(bord_tex) <- dim(cb$vert)
lborders <- matrix("", nrow(contents), ncol(contents))
lborders[, 1] <- bord_tex[, 1]
rborders <- bord_tex[, - 1]
for (r in seq_len(nrow(ht))) {
row_idx <- row(ht) == r
rborders[left_idx & row_idx] <- rborders[right_idx & row_idx]
}
multicol[left_idx] <- sprintf("\\multicolumn{%d}{%s%s%s}{",
colspan_lhdc,
lborders[left_idx],
colspec_lhdc,
rborders[left_idx]
)
rowspan_blm <- rowspan(ht)[dc_map][blm_idx]
valign_blm <- valign(ht)[dc_map][blm_idx]
valign_multirow_key <- c(
"top" = "t",
"middle" = "c",
"bottom" = "b"
)
valign_blm <- valign_multirow_key[valign_blm]
vert_adj_blm <- sprintf("%dex", 0)
multirow_blm_tex <- sprintf("\\multirow[%s]{-%s}{*}[%s]{", valign_blm,
rowspan_blm, vert_adj_blm)
multirow[blm_idx] <- multirow_blm_tex
closer <- function (x) ifelse(nzchar(x), "}", "")
contents <- paste0(
multicol,
multirow,
bg_color,
inner_cell,
closer(multirow),
closer(multicol)
)
dim(contents) <- dim(ht)
content_rows <- apply(contents, 1, function (x) {
x <- x[nzchar(x)]
row <- paste(x, collapse = " &\n")
paste(row, "\\tabularnewline[-0.5pt]")
})
table_body <- paste(content_rows, hhlines[-1], sep = "\n", collapse = "\n")
table_body <- paste(hhlines[1], table_body, sep = "\n")
tenv <- tabular_environment(ht)
if (is.na(tenv)) tenv <- if (is.na(width(ht))) "tabular" else "tabularx"
tenv_tex <- paste0(c("\\begin{", "\\end{"), tenv, "}")
width_spec <- if (tenv %in% c("tabularx", "tabular*", "tabulary")) {
tw <- latex_table_width(ht)
paste0("{", tw, "}")
} else {
""
}
colspec_top <- if (is.na(width(ht))) {
rep("l", ncol(ht))
} else {
sapply(seq_len(ncol(ht)), function (mycol) {
sprintf("p{%s}", compute_width(ht, mycol, mycol))
})
}
colspec_top <- paste0(colspec_top, collapse = " ")
colspec_top <- sprintf("{%s}\n", colspec_top)
res <- paste0(tenv_tex[1], width_spec, colspec_top, table_body, tenv_tex[2])
return(res)
}
latex_table_width <- function (ht) {
tw <- width(ht)
if (is.numeric(tw) && ! is.na(tw)) {
tw <- paste0(tw, default_table_width_unit)
}
return(tw)
}
compute_width <- function (ht, start_col, end_col) {
table_width <- width(ht)
if (is.numeric(table_width)) {
table_unit <- default_table_width_unit
table_width <- as.numeric(table_width)
} else {
table_unit <- gsub("\\d", "", table_width)
table_width <- as.numeric(gsub("\\D", "", table_width))
}
cw <- col_width(ht)[start_col:end_col]
cw[is.na(cw)] <- 1 / ncol(ht)
cw <- if (! is.numeric(cw)) {
paste(cw, collapse = "+")
} else {
cw <- sum(as.numeric(cw))
cw <- cw * table_width
paste0(cw, table_unit)
}
if (end_col > start_col) {
extra_seps <- (end_col - start_col) * 2
cw <- paste0(cw, "+", extra_seps, "\\tabcolsep")
}
cw
}
maybe_markdown_fence <- function (text) {
fence <- FALSE
if (requireNamespace("knitr", quietly = TRUE)) {
in_rmarkdown <- ! is.null(knitr::opts_knit$get("rmarkdown.pandoc.to"))
if (in_rmarkdown && requireNamespace("rmarkdown", quietly = TRUE)) {
fence <- rmarkdown::pandoc_version() >= "2.0.0"
}
}
if (fence) {
text <- paste("\n\n```{=latex}\n", text, "\n```\n\n")
}
return(text)
}
|
projection <- function(a){
d <- dim(a)[2]
if(sum(t(a)%*%a)==0){
return(0)
}
pa <- a%*%matpower(t(a)%*%a,-1)%*%t(a)
return(pa)
}
matpower <- function(a,alpha){
small <- 0.000001
p1<-nrow(a)
eva<-eigen(a)$values
eve<-as.matrix(eigen(a)$vectors)
eve<-eve/t(matrix((diag(t(eve)%*%eve)^0.5),p1,p1))
index<-(1:p1)[eva>small]
evai<-eva
evai[index]<-(eva[index])^(alpha)
foo <- NULL
if(length(evai) == 1) foo <- diag(evai, nrow = 1)
else foo <- diag(evai)
ai<-as.matrix(eve)%*%foo%*%t(as.matrix(eve))
return(ai)
}
get1Dobj <- function(w,A,B){
small <- 0.000001
p <- dim(A)[1]
foo <- eigen((A+B), symmetric = TRUE)
if(p == 1) B.int <- foo$vec %*% 1/foo$val %*% t(foo$vec)
else B.int <- foo$vec %*% diag(1/foo$val) %*% t(foo$vec)
Fw <- log(t(w)%*%A%*%w + small) + log(t(w)%*%B.int%*%w + small) - 2*log(t(w)%*%w)
return(Fw)
}
get1Dini <- function(A,B){
p <- dim(A)[1]
vecs <- cbind(eigen(A, symmetric = TRUE)$vectors,
eigen(A+B, symmetric = TRUE)$vectors)
idx <- order(apply(vecs,2,get1Dobj,A,B))[1]
w <- vecs[,idx]
return(w)
}
get1Dderiv <- function(w,A,B){
p <- dim(A)[1]
foo <- eigen((A + B), symmetric = TRUE)
if(p == 1) B.int <- foo$vec %*% 1/foo$val %*% t(foo$vec)
else B.int <- foo$vec %*% diag(1/foo$val) %*% t(foo$vec)
dF <- c(2/(t(w)%*%A%*%w))*A%*%w + c(2/(t(w)%*%B.int%*%w))*B.int%*%w - c(4/(t(w)%*%w))*w
return(dF)
}
manifold1Dplus <- function(M,U,u){
p <- dim(M)[1]
Mnew <- M
Unew <- U
G <- matrix(0,p,u)
G0 <- diag(1,p)
for(i in 1:u){
ans <- optim(get1Dini(Mnew,Unew),get1Dobj,get1Dderiv,
A=Mnew,B=Unew,method="CG",
control=list(maxit=500,type=2))
w <- c(ans$par)
gk <- c(1/sqrt(sum(w^2)))*w
if(p == 1) G[,i] <- G0 * gk
else G[,i] <- G0%*%gk
G0 <- qr.Q(qr(G[,1:i]),complete=T)
G0 <- G0[,(i+1):p]
Mnew <- t(G0)%*%M%*%G0
Unew <- t(G0)%*%U%*%G0
}
return(G)
}
|
ic.ranks = function(y, sigma = rep(1,length(y)), Method = c("ExactLR","BoundLR","Tukey","SeqTukey","ApproximateLR", "TukeyNoTies", "RescaledExactLR", "RescaledTukey"), BoundChoice = c("Upper", "Lower"), ApproxAlgo = c("Exact","Upper"), alpha = 0.05, control = list(crit = NULL, trace = TRUE, adjustL = FALSE, adjustU = FALSE, n_adjust = length(y)-1, N = 10^4, MM = 10^3, gridSize = 5, RandPermut = 0, SwapPerm = TRUE))
{
if(length(Method) != 1) Method = "SeqTukey"
trace = control$trace
if(is.null(trace)) trace = TRUE
RandPermut = control$RandPermut
SwapPerm = control$SwapPerm
if(is.null(RandPermut)) RandPermut = 0
if(is.null(SwapPerm)) SwapPerm = TRUE
if(!(Method %in% c("ExactLR","BoundLR","Tukey","SeqTukey","ApproximateLR", "TukeyNoTies", "OnlyBlock", "RescaledExactLR", "RescaledTukey"))) {print("Error! Method not supported."); return(0)}
n = length(y)
if(length(sigma) == 1) sigma = rep(sigma,n)
if(length(sigma) != n) {print("Error: sigma and y must have the same length!"); return(0)}
if(n == 1) return(1)
ind = sort.int(y, index.return = T)$ix
y = y[ind]
sigma = sigma[ind]
if(sum(ind != 1:n)) print("The sample had to be sorted in ascending way. Results are shown for the sorted sample.")
ranks = NULL
if(n <= 2 & Method == "BoundLR") {print("Upper- and Lower-bound CIs require at least three centers"); return(0)}
EqSigIndex = FALSE
if(length(unique(round(sigma,16)))==1)
{
EqSigIndex = TRUE
}
if(Method == "ExactLR")
{
crit = qchisq(1-alpha,(n-1):1)
if(sum((y - sum(y/sigma^2)/(sum(1/sigma^2)))^2/sigma^2) < qchisq(1-alpha,n-1))
{
if(trace==TRUE) cat("Process ended with trivial confidence intervals.\n")
return(list(Lower = rep(1,n), Upper = rep(n,n)))
}
if(EqSigIndex)
{
ranks = PartitioningRankingLevelEqSig(y, sigma, crit, n, trace)
}else
{
ranks = PartitioningRankingLevelUneqSig(y, sigma, crit, n, trace, RandPermut, SwapPerm)
}
ranks = list(Lower = ranks[,1], Upper = ranks[,2])
}
if(Method == "RescaledExactLR")
{
MM = control$MM
if(is.null(MM)) MM = 1000
gridSize = control$gridSize
if(is.null(gridSize)) gridSize = 5
if(EqSigIndex)
{
crit = matrix(0, nrow = n-1, ncol = gridSize)
alph = seq(from=alpha, to = 0.4, length = gridSize)
for(ss in 1:gridSize)
{
for(i in 3:(n+1))
{
w = as.numeric(abs(Stirling1.all(i))/factorial(i))
crit[n-(i-2),ss] = GeneralizedInvCDF(function(x) 1-sum(w[(i-1):1]*pchisq(x,df=1:(i-1),lower.tail=FALSE)),1-alph[ss], Bsup=100,npoints=10^4)
}
}
ranks = PartitioningRankingLevelEqSigRescaled(y, sigma, crit, matrix(rnorm(n*MM,sd=sigma[1]),nrow=n,ncol=MM),MM, n, RandPermut, alpha, gridSize, trace)
ranks = list(Lower = ranks[,1], Upper = ranks[,2])
}else
{
cat("\n The standard deviations are not the same.\n The rescaled partitioning procedure is not implemented with this option.\n")
}
}
if(Method == "OnlyBlock")
{
crit = qchisq(1-alpha,(n-1):1)
if(sum((y - sum(y/sigma^2)/(sum(1/sigma^2)))^2/sigma^2) < qchisq(1-alpha,n-1))
{
if(trace==TRUE) cat("Process ended with trivial confidence intervals.\n")
return(list(Lower = rep(1,n), Upper = rep(n,n)))
}
ranks = OnlyBlockRanking(y, sigma, crit, n, trace, RandPermut, SwapPerm)
ranks = list(Lower = ranks[,1], Upper = ranks[,2])
}
if(Method == "BoundLR")
{
if(length(BoundChoice)!= 1) BoundChoice = "Upper"
if(!(BoundChoice %in% c("Upper", "Lower"))){print("Error! Could not recognize your choice whether it is upper of lower bound."); return(0)}
adjustL = control$adjustL
if(is.null(adjustL)) adjustL = FALSE
adjustU = control$adjustU
if(is.null(adjustU)) adjustU = FALSE
n_adjust = control$n_adjust
if(is.null(n_adjust)) n_adjust = n-1
n_adjust = floor(n_adjust)
if(n_adjust > n-1 | n_adjust<1){n_adjust = n-1; cat(paste("n_adjust can take values only between 1 and ", n-1)); cat(". Default value is considered.")}
if(sum((y - sum(y/sigma^2)/(sum(1/sigma^2)))^2/sigma^2) < qchisq(1-alpha,n-1))
{
if(trace==TRUE) cat("Process ended with trivial confidence intervals.\n")
return(list(Lower = rep(1,n), Upper = rep(n,n)))
}
if(BoundChoice == "Lower")
{
if(trace == TRUE) cat('\n Calculate lower bounds for simultaneous confidence intervals for ranks using the partitioning principle and the LRT.\n')
ranks = ApproximatePartition(y,sigma,"Lower", alpha, 1, trace, RandPermut)
if(adjustL == TRUE)
{
ind = which(ranks$Upper == n)[1]
n_adjust = min(ranks$BlockMax[1:ind])+1
ranks = ApproximatePartition(y,sigma,"Lower", alpha, n_adjust, trace, RandPermut, SwapPerm)
if(trace == TRUE) cat(paste("\n Adjustment on the lower bound. Intersection with the chi-square quantile curve at n_adjust = ",n_adjust))
}
}else{
if(trace == TRUE) cat('\n Calculate upper (conservative) bounds for simultaneous confidence intervals for ranks using the partitioning principle and the LRT.\n')
if(adjustU == FALSE) n_adjust = n-1
if(adjustU == TRUE & trace == TRUE) cat(paste("Adjustment on the upper bound by the user. Tangent on the chi-square quantile at n_adjust = ", n_adjust))
ranks = ApproximatePartition(y,sigma,"Upper", alpha, n_adjust, trace, RandPermut, SwapPerm)
}
}
if(Method == "ApproximateLR")
{
if(length(ApproxAlgo)!= 1) ApproxAlgo = "Upper"
if(!(ApproxAlgo %in% c("Exact","Upper"))) {print("Error! Approximate algorithm not supported."); return(0)}
if(trace == TRUE) cat('\n Calculate approximate simultaneous confidence intervals for ranks using the partitioning principle and the LRT.\n')
if(ApproxAlgo == "Upper")
{
if(trace == TRUE) cat('\n A fast (cubic complex) algorithm is being used.\n')
ranks = ApproximatePartitionCorrectOrder(y,sigma,"Upper",alpha,n-1)
}else
{
crit = qchisq(1-alpha,(n-1):1)
if(trace == TRUE) cat('\n A slow (exponentially complex) algorithm is being used.\n')
res = ApproximatePartitionCorrectOrder(y, sigma, BoundChoice = "Lower", alpha,1)
ind = which(res$Upper == n)[1]
n_adjust = min(res$BlockMax[1:ind])+1
res = ApproximatePartitionCorrectOrder(y,sigma,"Lower", alpha, n_adjust)
Lower = res$Lower-1; Upper = res$Upper-1; MinBlock = res$BlockMax
res = ApproximatePartitionCorrectOrder(y,sigma,"Upper", alpha, floor(n/2))
MaxBlock = res$BlockMax
ranks = PartitioningRankingBlockCorrectOrder(y, sigma, crit, MinBlock, MaxBlock, Lower, Upper, n, trace)
ranks = list(Lower = ranks[,1], Upper = ranks[,2])
}
}
if(Method == "Tukey")
{
N = control$N
if(is.null(N)) N = 10^4
crit = control$crit
if(is.null(crit))
{
if(length(unique(sigma)) == 1 & alpha<0.3)
{
crit = qtukey(1-alpha,n,Inf)/sqrt(2)
}else
{
x=t(mapply(rnorm,N,0,sigma))
if(n<100){
Cp=contrMat(rep(1,n), type ="Tukey")
S<-Cp%*%diag(sigma^2)%*%t(Cp)
std=diag(S)^(-1/2)
d=diag(std)%*%Cp%*%x
crit=quantile(apply(abs(d),2,max),1-alpha)
rm(d); rm(std); rm(S); rm(Cp); rm(x)
}else
{
Diff = numeric(N)
for(i in 1:N)
{
for(j in 1:(n-1))
{
Diff[i] = max(Diff[i],abs(x[j,i]-x[(j+1):n,i])/sqrt(sigma[j]^2+sigma[(j+1):n]^2))
}
}
crit = quantile(Diff,1-alpha)
rm(Diff)
rm(x)
}
}
}
ranks = tukey(y,sigma,crit)
if(trace == TRUE) cat(paste("\n Confidence intervals for ranks calculated using Tukey's HSD procedure at simultaneous level", 1-alpha))
}
if(Method == "SeqTukey")
{
N = control$N
if(is.null(N)) N = 10^4
ranks = StepDownTukeySeqRej(y,sigma,alpha, N)
if(trace == TRUE)
{
cat(paste("\n Confidence intervals for ranks calculated using a sequential-rejective variant of Tukey's HSD procedure at simultaneous level", 1-alpha))
cat(paste("\n Number of iterations = ",ranks$NbSteps))
cat("\n")
}
}
if(Method == "TukeyNoTies")
{
if(trace == TRUE) cat('\n Caclulating an adjusted alpha...\n')
N = control$N
MM = control$MM
if(is.null(N)) N = 10^4
if(is.null(MM)) MM = 10^3
d = numeric(N)
sigmaTree1 = sigma
if(length(unique(sigma)) > 1 | alpha<0.3)
{
OddInd = seq(from=1, to = n, by = 2); EvenInd = seq(from=2,to = n, by = 2)
sigmaTree1 = sort(sigma)
sigmaTree1 = sigmaTree1[c(EvenInd, OddInd[((n+1)/2):1])]
x=t(mapply(rnorm,N,0,sigmaTree1))
if(n<100)
{
Cp=contrMat(rep(1,n), type ="Tukey")
S<-Cp%*%diag(sigmaTree1^2)%*%t(Cp)
std=diag(S)^(-1/2)
d=diag(std)%*%Cp%*%x
d = apply(abs(d),2,max)
rm(std); rm(S); rm(Cp); rm(x)
}else
{
d = numeric(N)
for(i in 1:N)
{
for(j in 1:(n-1))
{
d[i] = max(d[i],abs(x[j,i]-x[(j+1):n,i])/sqrt(sigmaTree1[j]^2+sigmaTree1[(j+1):n]^2))
}
}
rm(x)
}
}
x=t(mapply(rnorm,MM,0,sigmaTree1))
TukeyCoverage = function(a)
{
if(trace == TRUE) {cat(a);cat('\n')}
q = 1
if(length(unique(sigmaTree1)) > 1 | alpha<0.3)
{
q=quantile(d,1-a)
}else
{
q = qtukey(1-a,n,Inf)/sqrt(2)
}
TrueLowerRank = 1:n; TrueUpperRank = 1:n
coverageTuk = MM
for(i in 1:MM)
{
y = x[,i]
ind = sort.int(y, index.return = T)$ix
y = y[ind]
resTukey = tukey(y,sigmaTree1[ind], q)
if(sum(TrueLowerRank[ind]<resTukey$Lower | TrueUpperRank[ind]>resTukey$Upper)>0) coverageTuk = coverageTuk - 1
}
coverageTuk/MM
}
alphaTuk = uniroot(function(a)TukeyCoverage(a)-(1-alpha), c(alpha,0.9), maxiter=15)$root
rm(x)
if(trace == TRUE) cat("Applying Tukey's HSD using the new alpha...\n")
crit = 1
if(length(unique(sigma)) == 1 & alphaTuk<0.3)
{
crit = qtukey(1-alphaTuk,n,Inf)/sqrt(2)
}else
{
x=t(mapply(rnorm,N,0,sigma))
if(n<100)
{
Cp=contrMat(rep(1,n), type ="Tukey")
S<-Cp%*%diag(sigma^2)%*%t(Cp)
std=diag(S)^(-1/2)
d=diag(std)%*%Cp%*%x
crit=quantile(apply(abs(d),2,max),1-alphaTuk)
rm(d); rm(std); rm(S); rm(Cp); rm(x)
}else
{
Diff = numeric(N)
for(i in 1:N)
{
for(j in 1:(n-1))
{
Diff[i] = max(Diff[i],abs(x[j,i]-x[(j+1):n,i])/sqrt(sigma[j]^2+sigma[(j+1):n]^2))
}
}
crit = quantile(Diff,1-alphaTuk)
rm(Diff)
rm(x)
}
}
ranks = tukey(y,sigma, crit)
if(trace == TRUE)
{
cat(paste("\n Confidence intervals for ranks calculated using a rescaled version \n of Tukey's HSD procedure at simultaneous level", 1-alpha))
cat(paste("\n Rescaled significance level is ",alphaTuk)); cat(".")
}
}
if(Method == "RescaledTukey")
{
N = control$N
MM = control$MM
gridSize = control$gridSize
if(is.null(N)) N = 10^4
if(is.null(MM)) MM = 10^3
if(is.null(gridSize)) gridSize = 5
if(EqSigIndex == 1 & alpha<0.3)
{
crit = qtukey(1-seq(from=alpha, to = 0.4, length = gridSize),n,Inf)/sqrt(2)
}else
{
x=t(mapply(rnorm,N,0,sigma))
Cp=contrMat(rep(1,n), type ="Tukey")
S<-Cp%*%diag(sigma^2)%*%t(Cp)
std=diag(S)^(-1/2)
d=diag(std)%*%Cp%*%x
crit=quantile(apply(abs(d),2,max),1-seq(from=alpha, to = 0.4, length = gridSize))
rm(d); rm(std); rm(S); rm(Cp); rm(x)
}
if(EqSigIndex == 1)
{
ranks = TukeyRankingLevelEqSigRescaled(y, sigma, as.matrix(crit), t(mapply(rnorm,MM,0,sigma)), MM, n, RandPermut, alpha, gridSize, trace)
}else
{
ranks = TukeyRankingLevelUneqSigRescaled(y, sigma, as.matrix(crit), t(mapply(rnorm,MM,0,sigma)), MM, n, RandPermut, alpha, gridSize, trace)
}
if(trace == TRUE) cat(paste("\n Confidence intervals for ranks calculated using a rescaled version \n of Tukey's HSD procedure at simultaneous level", 1-alpha))
ranks = list(Lower = ranks[,1], Upper = ranks[,2])
}
if(trace == TRUE) cat(paste(paste("\n Number of compared centers is ",n),"\n"))
return(list(Lower = ranks$Lower, Upper = ranks$Upper))
}
GeneralizedInvCDF = function(CdfFun, proba = 0.95, Binf = 0, Bsup = 100,npoints=1000)
{
knots = seq(from=Binf,to=Bsup,length=npoints)
yVal = as.numeric(sapply(1:npoints, function(ll)CdfFun(knots[ll])))
ind = min(which(yVal>=proba))
knots[ind]
}
ApproximatePartition = function(y, sigma, BoundChoice = c("Upper", "Lower"), alpha = 0.05, n_adjust, trace = FALSE, RandPermut = 0, SwapPerm = TRUE)
{
critFun = function(x)
{
if(x<=0) return(0)
slop*x
}
n = length(y)
z = qchisq(1-alpha,1:(n-1))
slop = z[n_adjust] - z[n_adjust-1]; Intercept = z[n_adjust] - slop*n_adjust
if(BoundChoice == "Lower") {slop = (z[n-1] - z[n_adjust])/(n-n_adjust-1); Intercept = z[n_adjust] - slop*n_adjust}
if(trace == TRUE) cat('\n Caclulate simultaneous confidence intervals using the correctly ordered hypotheses.\n')
EmpOrder = 1:n
res = ApproximatePartitionCorrectOrder(y, sigma, BoundChoice = BoundChoice, alpha = alpha, n_adjust=n_adjust)
Lower = res$Lower; Upper = res$Upper
if(sum(Lower==rep(1,n) & Upper==rep(n,n)) == n) return(list(Lower=Lower,Upper=Upper))
minY = min(y)
maxY = max(y)
res = ApproximatePartitionPermutations(y, sigma, Lower, Upper, n, slop, Intercept, minY, maxY, trace, SwapPerm, RandPermut)
Lower = res[,1]; Upper = res[,2]
return(list(Lower = Lower, Upper = Upper))
}
ApproximatePartitionCorrectOrder = function(y, sigma, BoundChoice = c("Upper", "Lower"), alpha = 0.05, n_adjust)
{
n = length(y)
if(sum((y - sum(y/sigma^2)/(sum(1/sigma^2)))^2/sigma^2) < qchisq(1-alpha,n-1))
{
return(list(Lower = rep(1,n), Upper = rep(n,n), BlockMax = rep(n-1,n)))
}
critFun = function(x)
{
if(x<=0) return(0)
slop*x
}
z = qchisq(1-alpha,1:(n-1))
slop = z[n_adjust] - z[n_adjust-1]; Intercept = z[n_adjust] - slop*n_adjust
if(BoundChoice == "Lower") {slop = (z[n-1] - z[n_adjust])/(n-n_adjust-1); Intercept = z[n_adjust] - slop*n_adjust}
LogL = matrix(0, nrow = n, ncol = n)
IndividContribBlock = matrix(0, nrow = n, ncol = n)
for(j in 2:n)
{
for(i in (j-1):1)
{
LogL[i,j] = sum((y[i:j] - sum(y[i:j]/sigma[i:j]^2)/sum(1/sigma[i:j]^2))^2/sigma[i:j]^2)
IndividContribBlock[i,j] = min(IndividContribBlock[i,i:(j-1)]+IndividContribBlock[(i+1):j,j], LogL[i,j] - critFun(j-i))
}
}
Lower = 1:n; Upper = 1:n
BlockMax = numeric(n)
for(j in (n-1):2)
{
if(LogL[1,j] - critFun(j-1) + IndividContribBlock[j+1,n] - Intercept < 0)
{
Lower[1:j] = 1
Upper[1:j] = pmax(Upper[1:j], j)
BlockMax[1] = j - 1
break
}
}
for(i in 2:(n-1))
{
if(LogL[i,n] - critFun(n-i) + IndividContribBlock[1,i-1] - Intercept < 0)
{
Lower[i:n] = pmin(Lower[i:n],i)
Upper[i:n] = n
break
}
}
for(i in 2:(n-2))
{
for(j in (n-1):(i+1))
{
if(LogL[i,j] - critFun(j-i) + IndividContribBlock[1,i-1] + IndividContribBlock[j+1,n] - Intercept < 0)
{
Lower[i:j] = pmin(Lower[i:j],i)
Upper[i:j] = pmax(Upper[i:j], j)
BlockMax[i] = j-i
break
}
}
}
return(list(Lower = Lower, Upper = Upper, BlockMax = BlockMax))
}
tukey = function(y,sigma, qq) {
n=length(y)
ranks=matrix(0,n,2)
for(j in 1:n)
{
stat = (y[j]-y)/sqrt(sigma[j]^2+sigma^2)
ranks[j,1]=1+sum(stat>qq)
ranks[j,2]=n-sum(stat<(-qq))
}
return(list(Lower = ranks[,1], Upper = ranks[,2]))
}
StepDownTukeySeqRej = function(y,sigma,alpha=0.05,N = 10^4)
{
n = length(y)
Diff = NULL
PosPairs = matrix(0,ncol=2,nrow = n*(n-1)/2)
NegPairs = matrix(0,ncol=2,nrow = n*(n-1)/2)
for(j in 1:(n-1))
{
PosPairs[(j-1)*n-j*(j-1)/2+1:(n-j),1] = rep(j,n-j)
PosPairs[(j-1)*n-j*(j-1)/2+1:(n-j),2] = (j+1):n
}
NegPairs[,2] = PosPairs[,1]
NegPairs[,1] = PosPairs[,2]
x=t(mapply(rnorm,N,0,sigma))
Diff = numeric(N)
for(k in 1:N)
{
for(j in 1:(n-1))
{
Diff[k] = max(Diff[k],abs(x[j,k]-x[(j+1):n,k])/sqrt(sigma[j]^2+sigma[(j+1):n]^2))
}
}
qDown = quantile(Diff,1-alpha)
rm(Diff)
NbNRejOld = n*(n-1)/2; NbNRejNew = 0
NBSteps = 0
while(NbNRejOld>NbNRejNew)
{
NBSteps = NBSteps +1
NewPairs = NULL
for(i in 1:length(PosPairs[,1]))
{
T = abs(y[PosPairs[i,1]]-y[PosPairs[i,2]])/sqrt(sigma[PosPairs[i,1]]^2+sigma[PosPairs[i,2]]^2)
if(T<qDown)
{
NewPairs = rbind(NewPairs,PosPairs[i,])
}
}
NbNRejOld = length(PosPairs[,1])
PosPairs = NewPairs
NbNRejNew = length(PosPairs[,1])
AllPairs = rbind(PosPairs, NegPairs)
Diff = numeric(N)
for(i in 1:N)
{
Diff[i] = max((x[AllPairs[,2],i]-x[AllPairs[,1],i])/sqrt(sigma[AllPairs[,1]]^2+sigma[AllPairs[,2]]^2))
}
qDown = quantile(Diff,1-alpha)
rm(Diff)
}
rm(x)
ranks = matrix(0,nrow = n,ncol = 2)
ranks[,1] = 1:n
ranks[,2] = 1:n
ranks[1,2] = 1+sum(PosPairs[,1]==1)
for(i in 2:(n-1))
{
ranks[i,1] = i - sum(PosPairs[,2] == i)
ranks[i,2] = i + sum(PosPairs[,1]==i)
}
ranks[n,1] = n - sum(PosPairs[,2] == n)
return(list(Lower = ranks[,1], Upper = ranks[,2], NbSteps = NBSteps))
}
|
clean_string <- function(v){
v <- gsub('\\\n|\\\t|\\\r', '', v)
v <- gsub('^\\s+|\\s+$', '', v)
v <- gsub(' {2,}', ' ', v)
return(v)
}
|
context("Two-year trend")
library(EGRET)
eList <- Choptank_eList
test_that("trendSetUp",{
caseSetUp <- trendSetUp(eList,
year1=1990,
year2=2012,
nBoot = 50,
bootBreak = 39,
blockLength = 200)
df <- data.frame(year1 = 1990,
yearData1 = 1980,
year2 = 2012,
yearData2 = 2011,
numSamples = 606,
nBoot = 50,
bootBreak = 39,
blockLength = 200,
confStop = 0.7)
expect_equal(caseSetUp, df)
expect_error( caseSetUp <- trendSetUp(eList,
year1=1970,
year2=2012,
nBoot = 50,
bootBreak = 39,
blockLength = 200))
expect_error( caseSetUp <- trendSetUp(eList,
year1=1980,
year2=2013,
nBoot = 50,
bootBreak = 39,
blockLength = 200))
})
test_that("setForBoot", {
INFO <- eList$INFO
eList <- setForBoot(eList, caseSetUp)
INFO2 <- eList$INFO
expect_gt(ncol(INFO2),ncol(INFO))
expect_true(all(c("DecLow","DecHigh") %in% names(INFO2)))
})
test_that("wordLike", {
likeList <- c(0.01, 0.5, 0.55, 0.99)
Trends <- wordLike(likeList)
expect_equal(Trends, c("Upward trend in concentration is highly unlikely",
"Downward trend in concentration is about as likely as not",
"Upward trend in flux is about as likely as not",
"Downward trend in flux is highly likely"))
})
test_that("blockSample", {
skip_on_cran()
Sample <- eList$Sample
suppressWarnings(RNGversion("3.5.0"))
set.seed(1)
bsReturn <- blockSample(Sample, 25)
expect_equal(bsReturn$ConcLow[1], 0.62)
expect_equal(bsReturn$Date[1], as.Date("1979-10-24"))
})
test_that("pVal", {
s <- c(0.01, 0.5, 0.55, 0.99)
pValue <- pVal(s)
expect_equal(pValue, 0.4)
})
test_that("makeTwoYearsResults", {
testthat::skip_on_cran()
twoResultsWaterYear <- EGRETci:::makeTwoYearsResults(eList, 1985, 2005)
expect_equal(floor(twoResultsWaterYear[1:2]), c(1,0))
})
test_that("makeCombo", {
surfaces1 <- c(1,2,3)
surfaces2 <- c(4, NA, 5)
surfaces <- EGRETci:::makeCombo(surfaces1, surfaces2)
expect_equal(surfaces, c(5,2,8))
})
test_that("paVector", {
year <- 2000
paStart <- 10
paLong <- 12
vectorYear <- c(seq(1999,2001,0.0833))
paIndexWaterYear <- paVector(year, paStart, paLong, vectorYear)
expect_equal(paIndexWaterYear, 10:21)
paStart <- 11
paLong <- 3
paIndexWinter <- paVector(year, paStart, paLong, vectorYear)
expect_equal(paIndexWinter, 11:13)
paStart <- 6
paLong <- 3
paIndexSummer <- paVector(year, paStart, paLong, vectorYear)
expect_equal(paIndexSummer, 18:20)
paStart <- 10
paLong <- 3
paIndexLate <- paVector(year, paStart, paLong, vectorYear)
expect_equal(paIndexLate, 22:24)
paCalendarYear <- paVector(year, 1, 12, vectorYear)
expect_equal(paCalendarYear, 14:24)
})
test_that("estSliceSurfacesSimpleAlt", {
testthat::skip_on_cran()
eList <- Choptank_eList
caseSetUp <- trendSetUp(eList,
year1=1990,
year2=2012,
nBoot = 50,
bootBreak = 39,
blockLength = 200)
eList <- setForBoot(eList,caseSetUp)
surfaces <- EGRETci:::estSliceSurfacesSimpleAlt(eList, 1990)
expect_equal(surfaces[1:14,1,3], as.numeric(rep(NA, 14)))
expect_equal(surfaces[1,173,1], 0.16541093)
})
test_that("wBT", {
testthat::skip_on_cran()
eList <- Choptank_eList
caseSetUp <- trendSetUp(eList,
year1=1985,
year2=2005,
nBoot = 5,
bootBreak = 39,
blockLength = 200)
eList <- setForBoot(eList,caseSetUp)
eBoot <- wBT(eList,caseSetUp,saveOutput = FALSE)
bootOut <- eBoot$bootOut
expect_true(bootOut$rejectC)
expect_equal(signif(bootOut$lowC, digits = 6), 0.298427)
expect_equal(signif(bootOut$likeCUp, digits = 6), 0.916667)
expect_true(bootOut$rejectF)
expect_equal(eBoot$wordsOut, c("Upward trend in concentration is very likely" ,
"Downward trend in concentration is very unlikely",
"Upward trend in flux is very likely",
"Downward trend in flux is very unlikely"))
expect_equal(signif(eBoot$xConc, digits = 2), c(0.31,0.35,0.30,0.34,0.31))
expect_equal(signif(eBoot$pFlux, digits = 2), c(18,30,31,21,18))
expect_equal(signif(eBoot$xFlux, digits = 2), c(0.022,0.034,0.034,0.025,0.021))
expect_equal(signif(eBoot$pConc, digits = 2), c(30,35,30,33,31))
})
test_that("runPairsBoot", {
testthat::skip_on_cran()
eList <- EGRET::Choptank_eList
year1 <- 1985
year2 <- 2009
pairOut_2 <- EGRET::runPairs(eList, year1, year2, windowSide = 7)
boot_pair_out <- runPairsBoot(eList, pairOut_2, nBoot = 3, jitterOn = TRUE)
expect_true(all(c("bootOut","wordsOut","xConc","xFlux",
"pConc","pFlux","startSeed") %in% names(boot_pair_out)))
expect_true(boot_pair_out$bootOut$rejectC)
expect_true(all(c("Upward trend in concentration is likely",
"Downward trend in concentration is unlikely",
"Upward trend in flux is likely",
"Downward trend in flux is unlikely") %in% boot_pair_out$wordsOut))
expect_equal(round(boot_pair_out$xConc[1:2], digits = 2), c(0.38,0.40))
expect_equal(round(boot_pair_out$xFlux[1:2], digits = 2), c(0.05,0.06))
expect_equal(round(boot_pair_out$pConc[1:2], digits = 2), c(36.68,40.19))
expect_equal(round(boot_pair_out$pFlux[1:2], digits = 2), c(48.39,56.03))
})
test_that("runGroupBoot", {
testthat::skip_on_cran()
eList <- EGRET::Choptank_eList
year1 <- 1985
year2 <- 2009
groupResults <- EGRET::runGroups(eList,
group1firstYear = 1995,
group1lastYear = 2004,
group2firstYear = 2005,
group2lastYear = 2014,
windowSide = 7, wall = TRUE,
sample1EndDate = "2004-10-30",
paStart = 4, paLong = 2,
verbose = FALSE)
boot_group_out <- suppressWarnings(runGroupsBoot(eList, groupResults, nBoot = 3,
jitterOn = TRUE))
expect_true(all(c("bootOut","wordsOut","xConc","xFlux",
"pConc","pFlux","startSeed") %in% names(boot_group_out)))
expect_true(boot_group_out$bootOut$rejectC)
expect_true(all(c("Upward trend in concentration is likely",
"Downward trend in concentration is unlikely",
"Upward trend in flux is likely",
"Downward trend in flux is unlikely") %in% boot_group_out$wordsOut))
expect_equal(round(boot_group_out$xConc[1:2], digits = 2), c(0.1,0.21))
expect_equal(round(boot_group_out$xFlux[1:2], digits = 2), c(0.00,0.01))
expect_equal(round(boot_group_out$pConc[1:2], digits = 2), c(8.24,18.73))
expect_equal(round(boot_group_out$pFlux[1:2], digits = 2), c(0.08,7.75))
})
|
context("Multihash")
test_that("Multihash for connections or raw vectors", {
desc <- system.file('DESCRIPTION')
buf <- readBin(desc, raw(), 1e5)
algos <- c("md5", "sha1", "sha256", "sha512")
out1 <- multihash(buf, algos = algos)
out2 <- multihash(file(desc), algos = algos)
expect_identical(out1, out2)
expect_named(out1, algos)
expect_equal(out1$md5, md5(file(desc)))
expect_equal(out1$sha1, sha1(file(desc)))
expect_equal(out1$sha256, sha256(file(desc)))
expect_equal(out1$sha512, sha512(file(desc)))
})
test_that("Multihash for text vectors", {
algos <- c("md5", "sha1", "sha256", "sha512")
out0 <- multihash(character(), algos = algos)
expect_is(out0, 'data.frame')
expect_named(out0, algos)
expect_equal(nrow(out0), 0)
out1 <- multihash("foo", algos = algos)
expect_is(out1, 'data.frame')
expect_named(out1, algos)
expect_equal(nrow(out1), 1)
out2 <- multihash(c("foo", "bar"), algos = algos)
expect_is(out2, 'data.frame')
expect_named(out2, algos)
expect_equal(nrow(out2), 2)
expect_equal(out2[1,], out1)
})
|
multiplierProposal <- function(x, a){
m <- exp( a * (runif(1) - 0.5) )
return( setNames(c(m * x, m), c("prop","prop.ratio") ) )
}
|
source("ESEUR_config.r")
library("circular")
library("extRemes")
library("lubridate")
library("plyr")
read_csv=function(f_str)
{
t=read.csv(paste0(ESEUR_dir, "regression/", f_str), as.is=TRUE)
t$date=as.Date(t$date_time, format="%Y-%m-%d")
t$wday=wday(t$date, week_start=1)
t$is_work=(t$wday < 6)
t$week=week(t$date)
t$month=month(t$date)
t$year=year(t$date)
return(t)
}
mail_p_day=function(df_date)
{
core_days=count(df_date)
min_day=min(df_date)
max_day=max(df_date)
num_days=1+as.integer(max_day-min_day)
all_days=data.frame(date=min_day+0:(num_days-1), freq=rep(0, num_days))
all_days$freq[all_days$date %in% core_days$x]=core_days$freq
all_days$mday=day(all_days$date)
all_days$wday=wday(all_days$date, week_start=1)
all_days$is_work=(all_days$wday < 6)
all_days$week=week(all_days$date)
all_days$month=month(all_days$date)
return(all_days)
}
emails_per_week=function(df)
{
mem=ddply(df, .(year, week), function(df) nrow(df))
mem$total_weeks=mem$year+mem$week
return(mem)
}
max_email_month=function(df)
{
mem=ddply(df, .(X1, month), function(df) max(df$freq))
return(mem)
}
seasons=function(vec)
{
mon_ts=ts(vec, start=c(0, 0), frequency=12)
plot(stl(mon_ts, s.window="periodic"))
}
rev_df=function(df)
{
df_names=names(df)
t=df[nrow(df):1, ]
names(t)=df_names
return(t)
}
plot_rose=function(weeks)
{
lib_circ=circular(weeks*360/52, units="degrees", template="clock12")
rose.diag(weeks, bins=52, shrink=1.5, prop=6, axes=FALSE, col="blue")
axis.circular(at=circular((0:11)*360/12, units="degrees", rotation="clock"),
labels=c("Jan", "Feb", "Mar", "Apr", "May", "Jun",
"Jul", "Aug", "Sep", "Oct", "Nov", "Dec"),
col="green")
arrows.circular(mean(weeks), y=rho.circular(weeks), col="red", lwd=3)
}
mk_days_before=function(last_meeting)
{
days=as.integer(meeting$start[last_meeting+1]-meeting$end[last_meeting])
d_before=rbind(data.frame(date=meeting$start[last_meeting]+(1:6),
days_before=rep(NA, 6)),
data.frame(date=meeting$end[last_meeting]+(1:days),
days_before=days:1)
)
return(d_before)
}
weekly_email=function(df)
{
s_week=which(df$wday == 1)
t_week=adply(s_week, .margin=1, function(X) data.frame(
freq=c(sum(df$freq[X:(X+4)]),
sum(df$freq[c(X+5, X+6)])),
days_before=c(df$days_before[X]-2,
df$days_before[X]-5),
is_work=c(TRUE,FALSE),
date=df$date[X]
))
return(t_week)
}
core=read_csv("Cpp_core.txt")
lib=read_csv("Cpp_lib.txt")
core_days=mail_p_day(core$date)
lib_days=mail_p_day(lib$date)
meeting=read.csv(paste0(ESEUR_dir, "regression/Cpp_meeting-dates.txt"), as.is=TRUE)
meeting$start=as.Date(meeting$start, format="%Y-%m-%d")
meeting$end=meeting$start+5
meeting=rev_df(meeting)
timeline=adply(1:(nrow(meeting)-1), .margins=1, mk_days_before)
t=merge(timeline, lib_days, by="date")
days_before=subset(t, !is.na(days_before))
weeks_before=weekly_email(days_before)
month_max=max_email_month(days_before)
max_mod=fevd(month_max$V1, type="GEV", period.basis="month")
plot(max_mod, rperiods=c(6, 12, 18, 24, 36, 72, 120), type="rl", col="red",
main="")
|
turbulence <-
function(mast, turb.set, dir.set, num.sectors=12, bins=c(5,10,15,20), subset, digits=3, print=TRUE) {
if(class(mast)!="mast") stop(substitute(mast), " is no mast object")
num.sets <- length(mast$sets)
if(!missing(turb.set) && missing(dir.set)) dir.set <- turb.set
if(missing(turb.set) && !missing(dir.set)) turb.set <- dir.set
if(!is.numeric(num.sectors)) stop("'num.sectors' must be numeric")
if(num.sectors<=1) stop("There must be at least 2 sectors")
if(!is.numeric(turb.set)) turb.set <- match(turb.set, names(mast$sets))
if(is.na(turb.set)) stop("'turb.set' not found")
if(turb.set<=0 || turb.set>num.sets) stop("'turb.set' not found")
if(!is.numeric(dir.set)) dir.set <- match(dir.set, names(mast$sets))
if(is.na(dir.set)) stop("'dir.set' not found")
if(dir.set<=0 || dir.set>num.sets) stop("'dir.set' not found")
if(is.null(mast$sets[[turb.set]]$data$turb.int)) stop("'set' does not contain turbulence intensity data")
if(is.null(mast$sets[[dir.set]]$data$dir.avg)) stop("'set' does not contain wind direction data")
if(any(bins<0)) stop("'bins' must be NULL or a vector of positives")
if(missing(subset)) subset <- c(NA, NA)
start.end <- subset.int(mast$timestamp, subset)
start <- start.end[1]
end <- start.end[2]
v <- mast$sets[[turb.set]]$data$v.avg[start:end]
tu <- mast$sets[[turb.set]]$data$turb.int[start:end]
d <- mast$sets[[dir.set]]$data$dir.avg[start:end]
sector.width <- 360/num.sectors
sectors <- seq(0, 360-sector.width, by=sector.width)
sector.edges <- c(sectors-sector.width/2, tail(sectors, n=1)+sector.width/2)%%360
if(!is.null(bins)) if(head(bins, 1)!=0) bins <- c(0, bins)
num.classes <- length(bins)
v.max <- max(v, na.rm=TRUE)
if(num.classes>2) {
for(i in (num.classes-1):2) {
if(bins[i+1]>=v.max & bins[i]>=v.max) {
bins <- head(bins, -1)
num.classes <- length(bins)
}
}
}
if(!is.null(bins)) if(num.classes==2 && bins[num.classes]>=v.max) stop("Only one wind class found")
turb.tbl <- matrix(NA, nrow=num.sectors+1, ncol=num.classes+1)
idx.val <- !is.na(tu) & !is.na(d)
idx.v <- !is.na(v)
for(s in 1:num.sectors) {
low <- sector.edges[s]
high <- sector.edges[s+1]
if(low<high) idx.dir <- d>=low & d<high
else idx.dir <- d>=low | d<high
if(length(tu[idx.val & idx.dir])<3) turb.tbl[s,1] <- NA
else turb.tbl[s,1] <- mean(tu[idx.val & idx.dir])
if(!is.null(bins)) {
for(c in 1:(num.classes-1)) {
idx.class <- v>=bins[c] & v[start:end]<bins[c+1]
if(length(tu[idx.val & idx.v & idx.dir & idx.class])<3) turb.tbl[s,c+1] <- NA
else turb.tbl[s,c+1] <- mean(tu[idx.val & idx.v & idx.dir & idx.class])
}
if(length(tu[idx.val & idx.v & idx.dir & v>=bins[num.classes]])<3) turb.tbl[s,num.classes+1] <- NA
else turb.tbl[s,num.classes+1] <- mean(tu[idx.val & idx.v & idx.dir & v>=bins[num.classes]])
}
}
if(length(tu[start:end])<3) turb.tbl[num.sectors+1,1] <- NA
else turb.tbl[num.sectors+1,1] <- mean(tu, na.rm=TRUE)
if(!is.null(bins)) {
for(i in 1:(num.classes-1)) {
idx.class <- v>=bins[i] & v<bins[i+1]
if(length(tu[idx.val & idx.v & idx.class])<3) turb.tbl[num.sectors+1,i+1] <- NA
else turb.tbl[num.sectors+1,i+1] <- mean(tu[idx.val & idx.v & idx.class], na.rm=TRUE)
}
if(length(tu[idx.val & idx.v & v>=bins[num.classes]])<3) turb.tbl[num.sectors+1,num.classes+1] <- NA
else turb.tbl[num.sectors+1,num.classes+1] <- mean(tu[idx.val & idx.v & v>=bins[num.classes]])
}
r.names <- c(paste0("s", 1:num.sectors), "all")
if(num.sectors==4) r.names <- c("n","e","s","w","all")
if(num.sectors==8) r.names <- c("n","ne","e","se","s","sw","w","nw","all")
if(num.sectors==12) r.names <- c("n","nne","ene","e","ese","sse","s","ssw","wsw","w","wnw","nnw","all")
if(num.sectors==16) r.names <- c("n","nne","ne","ene","e","ese","se","sse","s","ssw","sw","wsw","w","wnw","nw","nnw","all")
turb.tbl <- data.frame(turb.tbl, row.names=r.names)
c.names <- c("total")
if(!is.null(bins)) {
for(i in 1:(num.classes-1)) c.names <- append(c.names, paste(bins[i], bins[i+1], sep="-"))
c.names <- append(c.names, paste0(">", bins[num.classes]))
}
names(turb.tbl) <- c.names
for(i in 1:length(turb.tbl)) turb.tbl[,i][is.nan(turb.tbl[,i]) | is.na(turb.tbl[,i])] <- 0
if(sum(turb.tbl[,length(turb.tbl)], na.rm=TRUE)==0) turb.tbl[,length(turb.tbl)] <- NULL
attr(turb.tbl, "call") <- list(func="turbulence", mast=deparse(substitute(mast)), turb.set=turb.set, dir.set=dir.set, num.sectors=num.sectors, bins=bins, subset=subset, digits=digits, print=print)
turb.tbl <- round(turb.tbl, digits)
class(turb.tbl) <- "turbulence"
if(print) print(turb.tbl)
invisible(turb.tbl)
}
|
NULL
calc.npar <- function(object)
{
if(!any(is.null(object$thetas)||is.null(object$thetas))) nth <- length(object$thetas)
if(!any(is.null(object$sethetas))){
nseth <- length(object$sethetas[!is.na(object$sethetas)])
}
else {
nseth <- 0
}
if(!(is.null(object$omega)||any(is.na(object$omega)))) {
nom <- 0
for(i in 1:length(object$omega)) {
sel <- object$omega[[i]] != 0
if(!any(sel==TRUE)){
nom <- nom + 1
}
nom <- length(object$omega[[i]][sel]) + nom
}
}
if(!any(is.null(object$seomegas))) {
nseom <- 0
for(i in 1:length(object$seomegas)) {
sel <- object$seomegas[[i]] != 0
sel2 <- !is.na(object$seomegas[[i]])
sel3 <- sel & sel2
nseom <- length(object$seomegas[[i]][sel3]) + nseom
}
}
else {
nseom <- 0
}
nsi <- 0
if(!(is.null(object$sigma)||any(is.na(object$sigma)))) {
for(i in 1:length(object$sigma)) {
sel <- object$sigma[[i]] != 0
nsi <- length(object$sigma[[i]][sel]) + nsi
}
}
if(!any(is.null(object$sesigmas))) {
nsesi <- 0
for(i in 1:length(object$sesigmas)) {
sel <- object$sesigmas[[i]] != 0
sel2 <- !is.na(object$sesigmas[[i]])
sel3 <- sel & sel2
nsesi <- length(object$sesigmas[[i]][sel3]) + nsesi
}
}
else {
nsesi <- 0
}
npar <- nth + nom + nsi
if(length(nseth) > 0 || length(nseom) > 0 || length(nsesi) > 0) {
ret.list <- list(npar = npar, nth = nth, nseth = nseth, nom =
nom, nseom = nseom, nsi = nsi, nsesi = nsesi)
}
else {
ret.list <- list(npar = npar, nth = nth, nom = nom, nsi = nsi)
}
return(ret.list)
}
|
.__A__ <-
".1"
.__A__.1 <-
function (ns)
Rcpp::loadModule(module = "stan_fit4model_mod", what = TRUE,
env = ns, loadNow = TRUE)
|
t.test(Leniency~Group, alt = "less", data = Smiles)
|
HARviewer <- function(har, width = NULL, height = NULL, elementId = NULL){
x <- list(
data = har,
options = list(
rowHeight = 23,
showAlignmentHelpers = TRUE,
showIndicatorIcons = TRUE,
leftColumnWith = 25
)
)
htmlwidgets::createWidget("HARviewer", x, width = width, height = height,
package = "HARtools", elementId = elementId)
}
HARviewer_html <- function(id, style, class, ...){
htmltools::renderTags(
htmltools::tags$div(
htmltools::tags$select(id="page-selector"),
htmltools::tags$div(id = id, style = style, class = class)
)
)
}
HARviewerOutput <- function(outputId, width = "100%", height = "400px") {
shinyWidgetOutput(outputId, "HARviewer", width, height,
package = "HARtools")
}
renderHARviewer <- function(expr, env = parent.frame(), quoted = FALSE) {
if (!quoted) { expr <- substitute(expr) }
shinyRenderWidget(expr, HARviewerOutput, env, quoted = TRUE)
}
|
addCovariates <- function (object, spatialdata, columns = NULL, strict = FALSE, replace = FALSE) {
if (!(inherits(object, 'mask') | inherits(object, 'traps')))
object <- matrix(unlist(object), ncol = 2)
if (!ms(object) & ms(spatialdata))
stop ("mismatch of single session object, multisession spatialdata")
if (ms(object)) {
nsession <- length(object)
out <- object
for (session in 1:nsession) {
if (ms(spatialdata)) {
out[[session]] <- addCovariates(out[[session]], spatialdata[[session]])
}
else {
out[[session]] <- addCovariates(out[[session]], spatialdata)
}
}
out
}
else {
if (is.character(spatialdata))
type <- "shapefile"
else if (inherits(spatialdata, "SpatialPolygonsDataFrame"))
type <- "SPDF"
else if (inherits(spatialdata, "SpatialGridDataFrame"))
type <- "SGDF"
else if (inherits(spatialdata, "mask"))
type <- "mask"
else if (inherits(spatialdata, "traps"))
type <- "traps"
else if (inherits(spatialdata, "RasterLayer"))
type <- "raster"
else if (inherits(spatialdata, "SpatRaster"))
type <- "SpatRaster"
else
stop ("spatialdata type unrecognised or unsupported")
if (type == "shapefile") {
polyfilename <- spatialdata
if (!requireNamespace('rgdal', quietly = TRUE)) {
stop("package rgdal is required to read shapefiles")
}
else {
isshp <- function(filename) {
nch <- nchar(filename)
tolower(substring(filename, nch-3,nch)) == ".shp"
}
if (!isshp(polyfilename)) {
polyfilename <- paste0(polyfilename, ".shp")
}
spatialdata <- basename(spatialdata)
if (isshp(spatialdata)) {
spatialdata <- substring(spatialdata, 1, nchar(spatialdata)-4)
}
spatialdata <- rgdal::readOGR(dsn = polyfilename, layer = spatialdata)
}
}
if (type %in% c("shapefile", "SPDF", "SGDF")) {
xy <- matrix(unlist(object), ncol = 2)
xy <- sp::SpatialPoints(xy)
sp::proj4string(spatialdata) <- sp::CRS()
df <- sp::over (xy, spatialdata)
}
else if (type == "raster") {
df <- data.frame(raster = extract(spatialdata, as.matrix(object)))
if (!is.null(columns)) {
names(df) <- columns
}
}
else if (type == "SpatRaster") {
df <- data.frame(raster = extract(spatialdata, as.matrix(object)))
if (!is.null(columns)) {
names(df) <- columns
}
}
else {
if (is.null(covariates(spatialdata)))
stop ("spatialdata does not have covariates")
index <- nearesttrap(object, spatialdata)
df <- covariates(spatialdata)[index,, drop=FALSE]
if (strict & type %in% c("mask")) {
incell <- function (xy, m, mask) {
sp2 <- spacing(mask) / 2
mxy <- mask[m,]
((xy[,1] + sp2) >= mxy[,1]) &
((xy[,1] - sp2) <= mxy[,1]) &
((xy[,2] + sp2) >= mxy[,2]) &
((xy[,2] - sp2) <= mxy[,2])
}
cellOK <- incell(object, index, spatialdata)
df[!cellOK,] <- NA
if (any(!cellOK))
warning ("some requested points lie outside mask")
}
}
if (!is.null(columns))
df <- df[,columns, drop = FALSE]
fn <- function(x) {
if (is.numeric(x))
!any(is.na(x))
else
!any((x == "") | is.na(x))
}
OK <- all(apply(df, 2, fn))
if (!OK)
warning ("missing values among new covariates")
rownames(df) <- 1:nrow(df)
if (is.null(covariates(object)))
covariates(object) <- df
else {
if (replace) {
repeated <- names(covariates(object)) %in% names(df)
covariates(object) <- covariates(object)[,!repeated]
}
covariates(object) <- cbind(covariates(object), df)
}
object
}
}
|
data(iris)
Setosa <- iris %>% filter(Species == "setosa")
corStat <- function(x, y) {sum(x * y) - length(x) * mean(x) * mean(y)}
testStat <- with(Setosa, corStat(Sepal.Length, Petal.Length)); testStat
SetosaSims <-
expand.grid(rep = 1:10000) %>%
group_by(rep) %>%
mutate(
simStat = with(Setosa, corStat(Sepal.Length, shuffle(Petal.Length)))
)
gf_dhistogram( ~ simStat, data = SetosaSims) %>%
gf_vline(xintercept = testStat)
prop1( ~ (simStat >= testStat), data = SetosaSims)
2 * prop1( ~ (simStat >= testStat), data = SetosaSims)
|
as_group_map_function <- function(.f, error_call = caller_env()) {
.f <- rlang::as_function(.f)
if (length(form <- formals(.f)) < 2 && ! "..." %in% names(form)){
bullets <- c(
"`.f` must accept at least two arguments.",
i = "You can use `...` to absorb unused components."
)
abort(bullets, call = error_call)
}
.f
}
group_map <- function(.data, .f, ..., .keep = FALSE) {
lifecycle::signal_stage("experimental", "group_map()")
UseMethod("group_map")
}
group_map.data.frame <- function(.data, .f, ..., .keep = FALSE, keep = deprecated()) {
if (!missing(keep)) {
lifecycle::deprecate_warn("1.0.0", "group_map(keep = )", "group_map(.keep = )")
.keep <- keep
}
.f <- as_group_map_function(.f)
chunks <- if (is_grouped_df(.data)) {
group_split(.data, .keep = isTRUE(.keep))
} else {
group_split(.data)
}
keys <- group_keys(.data)
group_keys <- map(seq_len(nrow(keys)), function(i) keys[i, , drop = FALSE])
if (length(chunks)) {
map2(chunks, group_keys, .f, ...)
} else {
structure(list(), ptype = .f(attr(chunks, "ptype"), keys[integer(0L), ], ...))
}
}
group_modify <- function(.data, .f, ..., .keep = FALSE) {
lifecycle::signal_stage("experimental", "group_map()")
UseMethod("group_modify")
}
group_modify.data.frame <- function(.data, .f, ..., .keep = FALSE, keep = deprecated()) {
if (!missing(keep)) {
lifecycle::deprecate_warn("1.0.0", "group_modify(keep = )", "group_modify(.keep = )")
.keep <- keep
}
.f <- as_group_map_function(.f)
.f(.data, group_keys(.data), ...)
}
group_modify.grouped_df <- function(.data, .f, ..., .keep = FALSE, keep = deprecated()) {
if (!missing(keep)) {
lifecycle::deprecate_warn("1.0.0", "group_modify(keep = )", "group_modify(.keep = )")
.keep <- keep
}
tbl_group_vars <- group_vars(.data)
.f <- as_group_map_function(.f)
error_call <- current_env()
fun <- function(.x, .y){
res <- .f(.x, .y, ...)
if (!inherits(res, "data.frame")) {
abort("The result of `.f` must be a data frame.", call = error_call)
}
if (any(bad <- names(res) %in% tbl_group_vars)) {
msg <- glue(
"The returned data frame cannot contain the original grouping variables: {names}.",
names = paste(names(res)[bad], collapse = ", ")
)
abort(msg, call = error_call)
}
bind_cols(.y[rep(1L, nrow(res)), , drop = FALSE], res)
}
chunks <- group_map(.data, fun, .keep = .keep)
res <- if (length(chunks) > 0L) {
bind_rows(!!!chunks)
} else {
attr(chunks, "ptype")
}
grouped_df(res, group_vars(.data), group_by_drop_default(.data))
}
group_walk <- function(.data, .f, ...) {
lifecycle::signal_stage("experimental", "group_walk()")
group_map(.data, .f, ...)
invisible(.data)
}
|
mr_place_types <- function(...) {
jsonlite::fromJSON(
getter(
file.path(mr_base(), 'getGazetteerTypes.json'),
format = "application/json; charset=UTF-8;",
...
)
)
}
|
findNSCdesigns <- function(nmin, nmax, p0, p1, alpha, power, progressBar=FALSE)
{
nr.lists <- findN1N2R1R2NSC(nmin, nmax)
n1.vec <- nr.lists$n1
n2.vec <- nr.lists$n2
n.vec <- nr.lists$n
r1 <- nr.lists$r1
r <- nr.lists$r
alpha.power.nsc <- vector("list", length(n.vec))
l <- 1
ns <- nr.lists$ns
if(progressBar==TRUE) pb <- txtProgressBar(min = 0, max = nrow(ns), style = 3)
for(i in 1:nrow(ns))
{
n1 <- n1.vec[i]
n2 <- n2.vec[i]
n <- n.vec[i]
n.to.n1 <- 1:n1
Sm <- 0:n
m <- 1:n
for(j in 1:length(r1[[i]]))
{
r1.j <- r1[[i]][[j]]
r2.j <- r[[i]][[j]]
cp.subset1.Sm.list <- lapply(r2.j, function(x) {which(r1.j < Sm & Sm < (x+1)) - 1})
cp.subset2.Sm <- 0:r1.j
cp.subset2.m <- lapply(cp.subset2.Sm, function(x) {which(n1-n.to.n1 >= r1.j-x+1 & n.to.n1 >= x)})
for(k in 1:length(r[[i]][[j]]))
{
alpha.power.nsc[[l]] <- findNSCerrorRates(n1=n1, n2=n2, r1=r1.j, r2=r[[i]][[j]][k], p0=p0, p1=p1, Sm=Sm, m=m, n.to.n1=n.to.n1,
cp.subset2.Sm=cp.subset2.Sm, cp.subset2.m=cp.subset2.m, cp.subset1.Sm=cp.subset1.Sm.list[[k]])
l <- l+1
}
}
if(progressBar==TRUE) setTxtProgressBar(pb, i)
}
alpha.power.nsc <- do.call(rbind.data.frame, alpha.power.nsc)
names(alpha.power.nsc) <- c("n1", "n2", "n", "r1", "r", "alpha", "power")
nsc.search <- alpha.power.nsc$power > power & alpha.power.nsc$alpha < alpha
results.nsc.search <- alpha.power.nsc[nsc.search, ]
if(sum(nsc.search)>0)
{
ess.nsc.search <- apply(results.nsc.search, 1, function(x) {findNSCdesignOCs(n1=x[1], n2=x[2], r1=x[4], r2=x[5], p0=p0, p1=p1)})
ess.n.search <- as.data.frame(t(ess.nsc.search))
discard <- rep(NA, nrow(ess.n.search))
for(i in 1:nrow(ess.n.search))
{
discard[i] <- sum(ess.n.search$EssH0[i] > ess.n.search$EssH0 & ess.n.search$Ess[i] > ess.n.search$Ess & ess.n.search$n[i] >= ess.n.search$n)
}
subset.nsc <- ess.n.search[discard==0,]
duplicates <- duplicated(subset.nsc[, c("n", "Ess", "EssH0")])
subset.nsc <- subset.nsc[!duplicates,]
rm(results.nsc.search)
} else {subset.nsc <- rep(NA, 9)}
names(subset.nsc) <- c("n1", "n2", "n", "r1", "r", "alpha", "power", "EssH0", "Ess")
nsc.input <- data.frame(nmin=nmin, nmax=nmax, p0=p0, p1=p1, alpha=alpha, power=power)
nsc.output <- list(input=nsc.input,
all.des=subset.nsc)
return(nsc.output)
}
findNSCerrorRates <- function(n1, n2, r1, r2, p1, p0, theta0=0, theta1=1,
cp.subset2.Sm=cp.subset2.Sm, cp.subset2.m=cp.subset2.m, Sm=Sm, m=m, n.to.n1=n.to.n1, cp.subset1.Sm=cp.subset1.Sm)
{
n <- n1+n2
q1 <- 1-p1
q0 <- 1-p0
mat <- matrix(c(rep(0, n*(r2+1)), rep(1, n*((n+1)-(r2+1)))), nrow = n+1, byrow=T)
cp.subset1.m <- list()
i <- 1
for(k in cp.subset1.Sm)
{
cp.subset1.m[[i]] <- which(n-m >= r2-k+1 & m>=k)
i <- i+1
}
cp.subset21.Sm <- c(cp.subset2.Sm, cp.subset1.Sm)
cp.subset21.m <- c(cp.subset2.m, cp.subset1.m)
for(i in 1:length(cp.subset21.Sm))
{
mat[cp.subset21.Sm[i]+1, cp.subset21.m[[i]]] <- 0.5
}
NAs <- rbind(FALSE, lower.tri(mat)[-nrow(mat),])
mat[NAs] <- NA
pascal.list <- list(1, c(1,1))
for(i in 3:(n+2))
{
column <- mat[!is.na(mat[,i-2]), i-2]
CPzero.or.one <- which(column!=0.5)
newnew <- pascal.list[[i-1]]
newnew[CPzero.or.one] <- 0
pascal.list[[i]] <- c(0, newnew) + c(newnew, 0)
}
pascal.list <- pascal.list[c(-1, -length(pascal.list))]
needed <- (r2+1):n
coeffs2 <- p1^(r2+1)*q1^(needed-(r2+1))
coeffs2.p0 <- p0^(r2+1)*q0^(needed-(r2+1))
pascal.element.r2plus1 <- sapply(pascal.list, "[", (r2+2))[(r2+1):n]
output <- c(n1=n1, n2=n2, n=n, r1=r1, r=r2, alpha=sum(pascal.element.r2plus1*coeffs2.p0), power=sum(pascal.element.r2plus1*coeffs2))
output
}
findN1N2R1R2NSC <- function(nmin, nmax)
{
nposs <- nmin:nmax
n1.list <- list()
n2.list <- list()
for(i in 1:length(nposs))
{
n1.list[[i]] <- 1:(nposs[i]-1)
n2.list[[i]] <- nposs[i]-n1.list[[i]]
}
n1.list2 <- rev(unlist(n1.list))
n2.list2 <- rev(unlist(n2.list))
length(n1.list2)
ns <- cbind(n1.list2, n2.list2)
n.list2 <- apply(ns, 1, sum)
ns <- cbind(ns, n.list2)
colnames(ns) <- c("n1", "n2", "n")
r1 <- list()
for(i in 1:nrow(ns))
{
r1[[i]] <- 0:(ns[i,"n1"]-1)
}
length(unlist(r1))
r <- vector("list", (nrow(ns)))
for(i in 1:nrow(ns))
{
for(j in 1:length(r1[[i]]))
{
r[[i]][[j]] <- (r1[[i]][j]+1):(ns[i,"n"]-1)
keep.these <- r[[i]][[j]]-r1[[i]][j] <= ns[i,"n2"]
r[[i]][[j]] <- r[[i]][[j]][keep.these]
}
}
rm(nposs, keep.these)
return(list(ns=ns, n1=n1.list2, n2=n2.list2, n=n.list2, r1=r1, r=r))
}
findNSCdesignOCs <- function(n1, n2, r1, r2, p0=p0, p1=p1, theta0=0, theta1=1)
{
n <- as.numeric(n1+n2)
q1 <- 1-p1
q0 <- 1-p0
mat <- matrix(c(rep(0, n*(r2+1)), rep(1, n*((n+1)-(r2+1)))), nrow = n+1, byrow=T)
Sm <- 0:n
cp.subset1.Sm <- which(r1 < Sm & Sm < (r2+1)) - 1
m <- 1:n
cp.subset1.m <- list()
i <- 1
for(k in cp.subset1.Sm)
{
cp.subset1.m[[i]] <- which(n-m >= r2-k+1 & m>=k)
i <- i+1
}
cp.subset2.Sm <- 0:r1
n.to.n1 <- 1:n1
cp.subset2.m <- list()
i <- 1
for(k in cp.subset2.Sm)
{
cp.subset2.m[[i]] <- which(n1-n.to.n1 >= r1-k+1 & n.to.n1 >= k)
i <- i+1
}
cp.subset21.Sm <- c(cp.subset2.Sm, cp.subset1.Sm)
cp.subset21.m <- c(cp.subset2.m, cp.subset1.m)
for(i in 1:length(cp.subset21.Sm))
{
mat[cp.subset21.Sm[i]+1, cp.subset21.m[[i]]] <- 0.5
}
NAs <- rbind(FALSE, lower.tri(mat)[-nrow(mat),])
mat[NAs] <- NA
cp.sm <- c(cp.subset2.Sm, cp.subset1.Sm)
cp.m <- c(cp.subset2.m, cp.subset1.m)
pascal.list <- list(1, c(1,1))
for(i in 3:(n+2))
{
column <- mat[!is.na(mat[,i-2]), i-2]
CPzero.or.one <- which(column!=0.5)
newnew <- pascal.list[[i-1]]
newnew[CPzero.or.one] <- 0
pascal.list[[i]] <- c(0, newnew) + c(newnew, 0)
}
pascal.list <- pascal.list[c(-1, -length(pascal.list))]
coeffs <- list()
coeffs.p0 <- list()
for(i in 1:n){
j <- 1:(i+1)
coeffs[[i]] <- p1^(j-1)*q1^(i+1-j)
coeffs.p0[[i]] <- p0^(j-1)*q0^(i+1-j)
}
needed <- (r2+1):n
coeffs2 <- p1^(r2+1)*q1^(needed-(r2+1))
coeffs2.p0 <- p0^(r2+1)*q0^(needed-(r2+1))
pascal.element.r2plus1 <- sapply(pascal.list, "[", (r2+2))[(r2+1):n]
final.probs <- Map("*", pascal.list, coeffs)
final.probs.p0 <- Map("*", pascal.list, coeffs.p0)
final.probs.mat <- matrix(unlist(lapply(final.probs, '[', 1:max(sapply(final.probs, length)))), ncol = n, byrow = F)
final.probs.mat.p0 <- matrix(unlist(lapply(final.probs.p0, '[', 1:max(sapply(final.probs.p0, length)))), ncol = n, byrow = F)
rows.with.cp1 <- r2+1+1
columns.of.rows.w.cp1 <- (r2+1):n
success.n <- (r2+1):n
success.Sm <- rep(r2+1, length(columns.of.rows.w.cp1))
success.prob <- pascal.element.r2plus1*coeffs2
success.prob.p0 <-pascal.element.r2plus1*coeffs2.p0
success <- cbind(success.Sm, success.n, success.prob, success.prob.p0)
colnames(success) <- c("Sm", "m", "prob", "prob.p0")
m.fail <- rep(NA, r2+1)
prob.fail <- rep(NA, r2+1)
prob.fail.p0 <- rep(NA, r2+1)
for(i in 1:(r2+1))
{
m.fail[i] <- max(which(final.probs.mat[i ,]!=0))
prob.fail[i] <- final.probs.mat[i, m.fail[i]]
prob.fail.p0[i] <- final.probs.mat.p0[i, m.fail[i]]
}
Sm.fail <- 0:r2
fail.deets <- cbind(Sm.fail, m.fail, prob.fail, prob.fail.p0)
output <- rbind(fail.deets, success)
rownames(output) <- NULL
output <- as.data.frame(output)
output$success <- c(rep("Fail", length(m.fail)), rep("Success", nrow(success)))
names(output) <- c("Sm", "m", "prob", "prob.p0", "success")
sample.size.expd <- sum(output$m*output$prob)
sample.size.expd.p0 <- sum(output$m*output$prob.p0)
output <- c(n1=n1, n2=n2, n=n, r1=r1, r=r2, alpha=sum(success.prob.p0), power=sum(success.prob), EssH0=sample.size.expd.p0, Ess=sample.size.expd)
output
}
|
estimateDistances <- function(odenet, equilibrium
, distGround=c("combined", "individual", "fixed", c("A", "B", "123", "A"))
, optim.control=list()) {
UseMethod("estimateDistances")
}
estimateDistances.ODEnetwork <- function(odenet, equilibrium, distGround="combined"
, optim.control=list()) {
cN <- length(odenet$masses)
assertNumeric(equilibrium, any.missing=FALSE, len=cN)
assertVector(equilibrium, strict=TRUE)
assert(
checkCharacter(distGround, any.missing=FALSE, len=1L)
, checkCharacter(distGround, any.missing=FALSE, len=cN)
)
if (cN > 1 && length(distGround) == 1) {
assertChoice(distGround, c("combined", "individual", "fixed"))
}
names(equilibrium) <- NULL
if (cN == 1 && distGround != "fixed") {
odenet <- updateOscillators(odenet, ParamVec=c(r.1=equilibrium))
return(odenet)
}
cParams <- numeric()
if (length(distGround) == 1) {
if (distGround == "combined") {
cParams <- c(r.glob = stats::median(equilibrium))
} else if (distGround == "individual") {
cParams <- c(equilibrium)
names(cParams) <- paste("r.glob", 1:cN, sep=".")
}
} else {
for (grp in unique(distGround)) {
cParams <- c(cParams, stats::median(equilibrium[distGround == grp]))
names(cParams)[length(cParams)] <- paste("r.glob", paste(which(distGround == grp), collapse = "."), sep = ".")
}
}
locat.spring <- which(odenet$springs != 0, arr.ind=TRUE)
locat.ok <- apply(locat.spring, 1, function(x) x[1] < x[2])
if (sum(locat.ok) == 0) {
message("All parameters are fixed.")
return(odenet)
}
locat.spring <- matrix(locat.spring[locat.ok, ], ncol=2)
if (is.null(nrow(locat.spring))) locat.spring <- t(locat.spring)
for (i in 1:nrow(locat.spring)) {
cParams <- c(cParams, odenet$distances[locat.spring[i,1], locat.spring[i,2]])
names(cParams)[length(cParams)] <- paste(c("r", locat.spring[i ,]), collapse = ".")
}
mK <- odenet$springs
diag(mK) <- -rowSums(mK)
mK <- -mK
bTarget <- -mK %*% equilibrium
dista <- odenet$distances
if (nrow(dista) == 30) {
dista <- rep(345, 30)
dista[c(12, 20)] <- 220
dista <- diag(dista)
}
for (i in 1:nrow(locat.spring)) {
row <- locat.spring[i,1]
col <- locat.spring[i,2]
dista[row, col] <- diff(c(dista[row,row], dista[col, col]))
}
pTarget <- dista[locat.spring]
names(pTarget) <- paste("r.", apply(locat.spring, 1, paste, collapse="."), sep="")
pTarget <- c(cParams[grep("glob", names(cParams))], pTarget)
if (nrow(dista) == 30) {
pTarget[c(1, 2)] <- c(345, 220)
}
distCost <- function(cParameters, pTarget) {
odenet <- updateOscillators(odenet, ParamVec=splitGlobalParams(cParameters))
mR <- odenet$distances
diag(mR) <- -diag(mR)
mR[lower.tri(mR)] <- -mR[lower.tri(mR)]
b <- diag(odenet$springs %*% t(mR))
delta.b <- sum((b-bTarget)^2)
return(delta.b
+ sum((cParameters-pTarget)^2) * exp(-10*delta.b)
)
}
splitGlobalParams <- function(cParameters) {
if (sum(grepl("r\\.glob", names(cParameters))) > 0) {
globVal <- cParameters[grep("r\\.glob", names(cParameters))]
cParameters <- cParameters[-grep("r\\.glob", names(cParameters))]
if (length(globVal) == 1) {
lstMassGrps <- list(1:length(odenet$masses))
} else {
lstMassGrps <- gsub("r\\.glob\\.", "", names(globVal))
lstMassGrps <- strsplit(lstMassGrps, ".", fixed = TRUE)
}
for (i in length(lstMassGrps):1) {
cParameters <- c(rep(globVal[i], length(lstMassGrps[[i]])), cParameters)
names(cParameters)[1:length(lstMassGrps[[i]])] <- paste("r", lstMassGrps[[i]], sep = ".")
}
}
return(cParameters)
}
firstFit <- stats::optim(cParams, distCost, pTarget=pTarget, method="BFGS", control=optim.control)
checkFit <- stats::optim(firstFit$par, distCost, pTarget=pTarget, method="BFGS", control=optim.control)
if (checkFit$value/firstFit$value < 0.999)
warning("Optimization by estimateDistances() seems to be unsuccessful!")
odenet <- updateOscillators(odenet, ParamVec=splitGlobalParams(checkFit$par))
return(odenet)
}
|
gisco_get_airports <- function(year = "2013",
country = NULL,
cache_dir = NULL,
update_cache = FALSE,
verbose = FALSE) {
year <- as.character(year)
if (!(year %in% c("2013"))) {
stop("Year should be 2013")
}
if (year == "2013") {
url <- paste0(
"https://ec.europa.eu/eurostat/cache/GISCO/",
"geodatafiles/Airports-2013-SHP.zip"
)
}
data_sf <- gsc_load_shp(url, cache_dir, verbose, update_cache)
data_sf <- sf::st_make_valid(data_sf)
data_sf <- sf::st_transform(data_sf, 4326)
if (!is.null(country) & "CNTR_CODE" %in% names(data_sf)) {
country <- gsc_helper_countrynames(country, "eurostat")
data_sf <- data_sf[data_sf$CNTR_CODE %in% country, ]
}
return(data_sf)
}
gisco_get_ports <- function(year = "2013",
country = NULL,
cache_dir = NULL,
update_cache = FALSE,
verbose = FALSE) {
year <- as.character(year)
if (!(year %in% c("2013"))) {
stop("Year should be 2013")
}
if (year == "2013") {
url <- paste0(
"https://ec.europa.eu/eurostat/cache/GISCO/",
"geodatafiles/PORT_2013_SH.zip"
)
}
data_sf <- gsc_load_shp(url, cache_dir, verbose, update_cache)
data_sf <- sf::st_make_valid(data_sf)
data_sf <- sf::st_transform(data_sf, 4326)
data_sf$CNTR_ISO2 <- substr(data_sf$PORT_ID, 1, 2)
if (!is.null(country) & "PORT_ID" %in% names(data_sf)) {
country <- gsc_helper_countrynames(country, "iso2c")
data_sf <- data_sf[data_sf$CNTR_ISO2 %in% country, ]
}
return(data_sf)
}
|
setMethod(
"subset",
"SpatialStack",
function(x, i, drop=TRUE){
if(any(is.na(i))) stop("Cannot use NAs in subsetting of SpatialStacks.")
x@Spatials <- x@Spatials[i]
newNames<- names(x@Spatials)
if(any(is.na(newNames))) stop("Invalid subset.")
if(length(x@Spatials)==1 & drop){
return(x@Spatials[[1]])
}else{
x@bbox <- reBBOX(x@Spatials)
return(x)
}
}
)
setMethod(
"[",
"SpatialStack",
function(x, i, drop=TRUE){
subset(x=x, i=i, drop=drop)
}
)
setReplaceMethod(
"[",
signature(x="SpatialStack", i="character", value="VectorSpatialClasses"),
definition=function(x,i,j=NULL,..., value){
if(is.character(i)){
i <- which(names(x@Spatials)==i)
}
x[i] <- value
return(x)
}
)
setReplaceMethod(
"[",
signature(x="SpatialStack", i="logical", value="VectorSpatialClasses"),
definition=function(x,i,j=NULL,..., value){
if(is.logical(i)){
i <- which(i)
}
x[i] <- value
return(x)
}
)
setReplaceMethod(
"[",
signature(x="SpatialStack", i="numeric", value="VectorSpatialClasses"),
definition=function(x,i,j=NULL,..., value){
if(!missing(j)) stop("Multiple dimensions are not allowed for SpatialStacks.")
lays <- nlayers(x)
if(grep("Spatial", class(value))){
for(k in 1L:length(i)){
x@Spatials[[i[k]]] <- value
}
}
if(nlayers(x)>lays) stop("Out of bounds replacement is not allowed. ")
return(x)
}
)
setReplaceMethod(
"[",
signature(x="SpatialStack", i="character", value="SpatialStack"),
definition=function(x,i,j=NULL,..., value){
if(is.character(i)){
newI <- NULL
for(k in 1:length(i)){
newI <- c(newI, which(names(x@Spatials)==i[k]))
}
}
x[newI] <- value
return(x)
}
)
setReplaceMethod(
"[",
signature(x="SpatialStack", i="logical", value="SpatialStack"),
definition=function(x,i,j=NULL,..., value){
if(is.logical(i)){
i <- which(i)
}
x[i] <- value
return(x)
}
)
setReplaceMethod(
"[",
signature(x="SpatialStack", i="numeric", value="SpatialStack"),
definition=function(x,i,j=NULL,..., value){
if(!missing(j)) stop("Multiple dimensions are not allowed for SpatialStacks.")
lays <- nlayers(x)
if(length(i)!=nlayers(value)) stop("Length of replacement value is not the same as the length of subscript.")
for(k in 1L:length(i)){
x@Spatials[[i[k]]] <- value[k]
}
if(nlayers(x)>lays) stop("Out of bounds replacement is not allowed. ")
return(x)
}
)
setMethod(
"[[",
"SpatialStack",
function(x, i, drop=TRUE){
subset(x=x, i=i, drop=drop)
}
)
setReplaceMethod(
"[[",
signature(x="SpatialStack", i="character"),
definition=function(x,i,..., value){
if(is.character(i)){
i <- which(names(x@Spatials)==i)
}
x[i] <- value
return(x)
}
)
setReplaceMethod(
"[[",
signature(x="SpatialStack", i="logical"),
definition=function(x,i,..., value){
if(is.logical(i)){
i <- which(i)
}
x[i] <- value
return(x)
}
)
setReplaceMethod(
"[[",
signature(x="SpatialStack", i="numeric", value="VectorSpatialClasses"),
definition=function(x,i,j=NULL,..., value){
if(!missing(j)) stop("Multiple dimensions are not allowed for SpatialStacks.")
lays <- nlayers(x)
if(grep("Spatial", class(value))){
for(k in 1L:length(i)){
x@Spatials[[i[k]]] <- value
}
}
if(nlayers(x)>lays) stop("Out of bounds replacement is not allowed. ")
return(x)
}
)
|
epmc_db <- function(ext_id = NULL,
data_src = "med",
db = NULL,
limit = 100,
verbose = TRUE) {
val_input(ext_id, data_src, limit, verbose)
if (is.null(db))
stop("Please restrict reponse to a database")
if (!toupper(db) %in% supported_db)
stop(
paste0(
"Data source '",
db,
"' not supported. Try one of the
following sources: ",
paste0(supported_db, collapse = ", ")
)
)
path <- mk_path(data_src, ext_id, req_method = "databaseLinks")
hit_count <- get_counts(path = path, database = db)
if (hit_count == 0) {
message("No links found")
out <- NULL
} else {
msg(hit_count = hit_count,
limit = limit,
verbose = verbose)
if (limit <= batch_size()) {
req <-
rebi_GET(path = path,
query = list(format = "json", pageSize = limit, database = db))
out <- dplyr::bind_cols(req$dbCrossReferenceList$dbCrossReference$dbCrossReferenceInfo)
} else {
query <-
make_path(hit_count = hit_count,
limit = limit, database = db)
out <- purrr::map_df(query, function(x) {
req <- rebi_GET(path = path, query = x)
dplyr::bind_cols(req$dbCrossReferenceList$dbCrossReference$dbCrossReferenceInfo)
})
}
attr(out, "hit_count") <- hit_count
}
tibble::as_tibble(out)
}
supported_db <-
c("ARXPR",
"CHEBI",
"CHEMBL",
"EMBL",
"INTACT",
"INTERPRO",
"OMIM",
"PDB",
"UNIPROT",
"PRIDE")
|
animateCC = function(filename, out_type = c("html", "gif"), out_name = "aniCC"){
if (!requireNamespace("animation", quietly = TRUE)) {
stop("You need to install the aniimation packageF.")
}
if (filename$expt != "CC" & filename$file_type != "full") {
stop("This file is not from a chronocoulometry
simulation created using ccSim.")
}
out_type = match.arg(out_type)
if (out_type == "html"){
old.ani = animation::ani.options(interval = 0.2, verbose = FALSE)
} else {
old.ani = animation::ani.options(interval = 0.2, loop = 1)
}
time_increment = round(length(filename$time)/40, digits = 0)
if (out_type == "html"){
animation::saveHTML({
old.par = par(mfrow = c(2, 1))
for (i in seq(1, length(filename$time), time_increment)) {
plot(x = filename$distance, y = filename$oxdata[i, ],
type = "l", lwd = 3, col = "blue",
ylim = c(0, 1050 * filename$conc.bulk),
xlab = "distance from electrode (cm)",
ylab = "concentration (mM)")
grid()
lines(x = filename$distance, y = filename$reddata[i, ],
lwd = 3, col = "red")
if (filename$mechanism != "E") {
lines(x = filename$distance,
y = filename$chemdata[i, ],
lwd = 3, col = "green")
legend(x = "right", legend = c("Ox", "Red", "Chem"),
fill = c("blue", "red", "green"),
bty = "n", inset = 0.05)
} else {
legend(x = "right", legend = c("Ox", "Red"),
fill = c("blue", "red"),
bty = "n", inset = 0.05)
}
plot(x = filename$time[1:i], y = filename$charge[1:i],
col = "blue", type = "l", lwd = 3,
xlim = c(min(filename$time), max(filename$time)),
ylim = c(min(filename$charge), max(filename$charge)),
xlab = "time (s)", ylab = expression(paste("charge (", mu, "C)")))
grid()
}
par(old.par)
},
img.name = paste0(out_name,"_plot"),
imgdir = paste0(out_name,"_dir"),
htmlfile = paste0(out_name,".html"),
navigator = FALSE
)
} else {
animation::saveGIF({
old.par = par(mfrow = c(2, 1))
for (i in seq(1, length(filename$time), time_increment)) {
plot(x = filename$distance, y = filename$oxdata[i, ],
type = "l", lwd = 3, col = "blue",
ylim = c(0, 1050 * filename$conc.bulk),
xlab = "distance from electrode (cm)",
ylab = "concentration (mM)")
grid()
lines(x = filename$distance, y = filename$reddata[i, ],
lwd = 3, col = "red")
if (filename$mechanism != "E") {
lines(x = filename$distance,
y = filename$chemdata[i, ],
lwd = 3, col = "green")
legend(x = "right", legend = c("Ox", "Red", "Chem"),
fill = c("blue", "red", "green"),
bty = "n", inset = 0.05)
} else {
legend(x = "right", legend = c("Ox", "Red"), fill = c("blue", "red"),
bty = "n", inset = 0.05)
}
plot(x = filename$time[1:i], y = filename$charge[1:i],
col = "blue", type = "l", lwd = 3,
xlim = c(min(filename$time), max(filename$time)),
ylim = c(min(filename$charge), max(filename$charge)),
xlab = "time (s)", ylab = expression(paste("current (", mu, "A)")))
grid()
}
par(old.par)}, movie.name = paste0(out_name,".gif")
)
}
animation::ani.options(old.ani)
}
|
node_sets <- function(M, six_node){
M[M > 0] <- 1
dimnames(M) <- NULL
sets <- stats::setNames(object = rep(NA, 17), nm = paste0("m",1:17))
nr <- nrow(M)
nc <- ncol(M)
m1_1 <- choose(nr,1) * choose(nc,1)
m1_2 <- choose(nr,1) * choose(nc,2)
m2_1 <- choose(nr,2) * choose(nc,1)
m3_1 <- choose(nr,3) * choose(nc,1)
m2_2 <- choose(nr,2) * choose(nc,2)
m1_3 <- choose(nr,1) * choose(nc,3)
m4_1 <- choose(nr,4) * choose(nc,1)
m3_2 <- choose(nr,3) * choose(nc,2)
m2_3 <- choose(nr,2) * choose(nc,3)
m1_4 <- choose(nr,1) * choose(nc,4)
if(six_node == TRUE){
m5_1 <- choose(nr,5) * choose(nc,1)
m4_2 <- choose(nr,4) * choose(nc,2)
m3_3 <- choose(nr,3) * choose(nc,3)
m2_4 <- choose(nr,2) * choose(nc,4)
m1_5 <- choose(nr,1) * choose(nc,5)
}
if(six_node == TRUE){
sets <- stats::setNames(c(m1_1,
m1_2,
m2_1,
m3_1,
rep(m2_2,2),
m1_3,
m4_1,
rep(m3_2,4),
rep(m2_3,4),
m1_4,
m5_1,
rep(m4_2,6),
rep(m3_3,13),
rep(m2_4,6),
m1_5),
nm = paste0("m", 1:44))
} else {
sets <- stats::setNames(c(m1_1,
m1_2,
m2_1,
m3_1,
rep(m2_2,2),
m1_3,
m4_1,
rep(m3_2,4),
rep(m2_3,4),
m1_4),
nm = paste0("m", 1:17))
}
return(sets)
}
|
get_pars <- function(fitted_model, conf_int = 0.05) {
if ("model" %in% names(fitted_model) == FALSE & class(fitted_model$model)[1] != "stanfit") {
stop("Error: input isn't an stanfit object")
}
p <- get_fitted(fitted_model, conf_int = conf_int)
pars <- rstan::extract(fitted_model$model)
n_group <- dim(pars$beta)[2]
n_cov <- dim(pars$beta)[3]
betas <- expand.grid(
"group" = seq(1, n_group),
"cov" = seq(1, n_cov),
"par" = NA,
"mean" = NA,
"median" = NA,
"lo" = NA,
"hi" = NA
)
for (i in 1:nrow(betas)) {
betas$mean[i] <- mean(pars$beta[, betas$group[i], betas$cov[i]])
betas$median[i] <- median(pars$beta[, betas$group[i], betas$cov[i]])
betas$lo[i] <- quantile(pars$beta[, betas$group[i], betas$cov[i]], conf_int / 2.0)
betas$hi[i] <- quantile(pars$beta[, betas$group[i], betas$cov[i]], 1 - conf_int / 2.0)
betas$par[i] <- fitted_model$par_names[betas$cov[i]]
}
par_list <- list(p = p, betas = betas)
if (fitted_model$overdispersion == TRUE) {
phi <- data.frame(
"mean" = mean(pars$phi),
"median" = median(pars$phi),
"lo" = quantile(pars$phi, conf_int / 2.0),
"hi" = quantile(pars$phi, 1 - conf_int / 2.0)
)
par_list$phi <- phi
}
return(par_list)
}
|
expected <- eval(parse(text="numeric(0)"));
test(id=0, code={
argv <- eval(parse(text="list(NULL, \"double\")"));
.Internal(`as.vector`(argv[[1]], argv[[2]]));
}, o=expected);
|
Request <- setRefClass(
'Request',
fields = c('FORM_DATA_MEDIA_TYPES','PARSEABLE_DATA_MEDIA_TYPES','env'),
methods = list(
initialize = function(env,...){
env <<- env
FORM_DATA_MEDIA_TYPES <<- c(
'application/x-www-form-urlencoded',
'multipart/form-data'
)
PARSEABLE_DATA_MEDIA_TYPES <<- c(
'multipart/related',
'multipart/mixed'
)
if (exists('HTTP_X_SCRIPT_NAME',env)){
env[['HTTP_X_SCRIPT_NAME']] <<- sub('/$','',env[['HTTP_X_SCRIPT_NAME']])
env[['SCRIPT_NAME']] <<- paste(env[['HTTP_X_SCRIPT_NAME']],env[['SCRIPT_NAME']],sep='')
}
callSuper(...)
},
body = function() env[["rook.input"]],
scheme = function() env[["rook.url_scheme"]],
path_info = function() env[["PATH_INFO"]],
port = function() as.integer(env[["SERVER_PORT"]]),
request_method = function() env[["REQUEST_METHOD"]],
query_string = function() env[["QUERY_STRING"]],
content_length = function() env[['CONTENT_LENGTH']],
content_type = function() env[['CONTENT_TYPE']],
media_type = function(){
if (is.null(content_type())) return(NULL)
tolower(strsplit(content_type(),'\\s*[;,]\\s*')[[1]][1])
},
media_type_params = function(){
if (is.null(content_type())) return(NULL)
params <- list()
for(i in strsplit(content_type(),'\\s*[;,]\\s*')[[1]][-1]){
x <- strsplit(i,'=')[[1]]
params[[tolower(x[1])]] <- x[2]
}
params
},
content_charset = function() media_type_params()[['charset']],
host_with_port = function(){
if(exists('HTTP_X_FORWARDED_HOST',env)){
x <- strsplit(env[['HTTP_X_FORWARDED_HOST']],',\\s?')[[1]]
return(x[length(x)])
} else if (exists('HTTP_HOST',env)){
env[['HTTP_HOST']]
} else {
if (exists('SERVER_NAME',env))
host <- env[['SERVER_NAME']]
else
host <- env[['SERVER_ADDR']]
paste(host,env[['SERVER_PORT']],sep=':')
}
},
host = function() sub(':\\d+','',host_with_port(),perl=TRUE),
script_name = function(s=NULL){
if (!is.null(s) && is.character(s)) env[['SCRIPT_NAME']] <<- s
env[['SCRIPT_NAME']]
},
path_info = function(s=NULL){
if (!is.null(s) && is.character(s)) env[['PATH_INFO']] <<- s
env[['PATH_INFO']]
},
delete = function() request_method() == 'DELETE',
get = function() request_method() == 'GET',
head = function() request_method() == 'HEAD',
options = function() request_method() == 'OPTIONS',
post = function() request_method() == 'POST',
put = function() request_method() == 'PUT',
trace = function() request_method() == 'TRACE',
form_data = function(){
(post() && !is.null(media_type())) || any(FORM_DATA_MEDIA_TYPES==media_type())
},
parseable_data = function(){
any(PARSEABLE_DATA_MEDIA_TYPES==media_type())
},
GET = function(){
if (!exists('rook.request.query_list',env))
env[['rook.request.query_list']] <<- Utils$parse_query(query_string())
env[['rook.request.query_list']]
},
POST = function(){
if (!exists('rook.input',env))
stop("Missing rook.input")
if (exists('rook.request.form_list',env))
env[['rook.request.form_list']]
else if (form_data() || parseable_data()){
env[['rook.request.form_list']] <<- Multipart$parse(env)
if (length(env[['rook.request.form_list']]) == 0){
form_vars <- env[['rook.input']]$read()
env[['rook.request.form_list']] <<- Utils$parse_query(rawToChar(form_vars))
}
}
env[['rook.request.form_list']]
},
params = function() c(GET(),POST()) ,
referer = function(){
if (!is.null(env[['HTTP_REFERER']])) env[['HTTP_REFERER']] else '/'
},
referrer = function() referer(),
user_agent = function() env[['HTTP_USER_AGENT']],
cookies = function(){
if (exists('rook.request.cookie_list',env))
return(env[['rook.request.cookie_list']])
if (!is.null(env[['HTTP_COOKIE']]))
env[['rook.request.cookie_list']] <<- Utils$parse_query(env[['HTTP_COOKIE']])
else
env[['rook.request.cookie_list']] <<- NULL
},
xhr = function() {
(exists('HTTP_X_REQUESTED_WITH',env) &&
env[['HTTP_X_REQUESTED_WITH']] == 'XMLHttpRequest')
},
url = function(){
x <- paste(scheme(),'://',host(),sep='')
if ( (scheme() == 'https' && port() != 443) || (scheme() == 'http' && port() != 80))
x <- paste(x,':',port(),sep='')
x <- paste(x,fullpath(),sep='')
x
},
path = function() paste(script_name(),path_info(),sep=''),
fullpath = function(){
if (is.null(query_string()))
path()
else
paste(path(),'?',query_string(),sep='')
},
to_url = function(url,...) {
newurl <- paste(script_name(),url,sep='')
opt <- list(...)
if (length(opt)){
newurl <- paste(
newurl,'?',
paste(names(opt),opt,sep='=',collapse='&'),
sep=''
)
}
newurl
},
accept_encoding = function() env[['HTTP_ACCEPT_ENCODING']],
ip = function() env[['REMOTE_ADDR']]
)
)
|
InventoryGrowthFusion <- function(data, cov.data=NULL, time_data = NULL, n.iter=5000, n.chunk = n.iter, n.burn = min(n.chunk, 2000), random = NULL, fixed = NULL,time_varying=NULL, burnin_plot = FALSE, save.jags = "IGF.txt", z0 = NULL, save.state=TRUE,restart = NULL) {
burnin.variables <- c("tau_add", "tau_dbh", "tau_inc", "mu")
out.variables <- c("deviance", "tau_add", "tau_dbh", "tau_inc", "mu")
if(!exists("model")) model = 0
if(length(n.chunk)>1){
k_restart = n.chunk[2]
n.chunk = n.chunk[1]
} else {
k_restart = 1
}
max.chunks <- ceiling(n.iter/n.chunk)
if(max.chunks < k_restart){
PEcAn.logger::logger.warn("MCMC already complete",max.chunks,k_restart)
return(NULL)
}
avail.chunks <- k_restart:ceiling(n.iter/n.chunk)
check.dup.data <- function(data,loc){
if(any(duplicated(names(data)))){PEcAn.logger::logger.error("duplicated variable at",loc,names(data))}
}
TreeDataFusionMV <- "
model{
for(i in 1:ni){
for(t in 1:nt){
z[i,t] ~ dnorm(x[i,t],tau_dbh)
}
for(t in 2:nt){
inc[i,t] <- x[i,t]-x[i,t-1]
y[i,t] ~ dnorm(inc[i,t],tau_inc)
}
for(t in 2:nt){
Dnew[i,t] <- x[i,t-1] + mu
x[i,t]~dnorm(Dnew[i,t],tau_add)
}
x[i,1] ~ dnorm(x_ic,tau_ic)
}
tau_dbh ~ dgamma(a_dbh,r_dbh)
tau_inc ~ dgamma(a_inc,r_inc)
tau_add ~ dgamma(a_add,r_add)
mu ~ dnorm(0.5,0.5)
}"
Pformula <- NULL
if (!is.null(random)) {
Rpriors <- NULL
Reffects <- NULL
r_vars <- gsub(" ","",unlist(strsplit(random,"+",fixed=TRUE)))
for(i in seq_along(r_vars)){
if(r_vars[i] == "i"){
r_var <- "i"
counter <- ""
index <- "i"
nr <- nrow(cov.data)
} else if(r_vars[i] == "t"){
r_var <- "t"
counter <- ""
index <- "t"
nr <- ncol(cov.data)
} else {
index <- counter <- nr <- NA
r_var <- gsub("(","",gsub(")","",r_vars[i],fixed = TRUE),fixed="TRUE")
r_var <- strsplit(r_var,"|",fixed=TRUE)[[1]]
fix <- r_var[1]
r_var <- strsplit(gsub("\\",":",r_var[2],fixed=TRUE),":",fixed = TRUE)[[1]]
for(j in seq_along(length(r_var))){
if(j>1)print("WARNING: not actually nesting random effects at this time")
j_var <- strsplit(r_var[j],"[",fixed = TRUE)[[1]]
index[j] <- gsub("]","",j_var[2],fixed=TRUE)
counter[j] <- j_var[1]
r_var[j] <- j_var[1]
if(!(r_var[j] %in% names(data))){
data[[length(data)+1]] <- as.numeric(as.factor(as.character(cov.data[,r_var[j]])))
names(data)[length(data)] <- r_var[j]
}
check.dup.data(data,"r_var")
nr[j] <- max(as.numeric(data[[r_var[j]]]))
}
index <- paste0("[",index,"]")
}
Pformula <- paste(Pformula,
paste0("+ alpha_", r_var,"[",counter,index,"]"))
for(j in seq_along(nr)){
Reffects <- paste(Reffects,
paste0("for(k in 1:",nr[j],"){\n"),
paste0(" alpha_",r_var[j],"[k] ~ dnorm(0,tau_",r_var[j],")\n}\n"))
}
Rpriors <- paste(Rpriors,paste0("tau_",r_var," ~ dgamma(1,0.1)\n",collapse = " "))
burnin.variables <- c(burnin.variables, paste0("tau_", r_var))
out.variables <- c(out.variables, paste0("tau_", r_var), paste0("alpha_",r_var))
}
TreeDataFusionMV <- sub(pattern = "
TreeDataFusionMV <- gsub(pattern = "
}
if(FALSE){
fixed <- "X + X^3 + X*bob + bob + dia + X*Tmin[t]"
}
if (is.null(fixed)) {
Xf <- NULL
} else {
if (is.null(cov.data)) {
print("formula provided but covariate data is absent:", fixed)
} else {
cov.data <- as.data.frame(cov.data)
}
if (length(grep("~", fixed)) == 0) {
fixed <- paste("~", fixed)
}
fixedX <- sub("~","",fixed, fixed=TRUE)
lm.terms <- gsub("[[:space:]]", "", strsplit(fixedX,split = "+",fixed=TRUE)[[1]])
X.terms <- strsplit(lm.terms,split = c("^"),fixed = TRUE)
X.terms <- sapply(X.terms,function(str){unlist(strsplit(str,,split="*",fixed=TRUE))})
X.terms <- which(sapply(X.terms,function(x){any(toupper(x) == "X")}))
if(length(X.terms) > 0){
fixed <- paste("~",paste(lm.terms[-X.terms],collapse = " + "))
X.terms <- lm.terms[X.terms]
Xpriors <- NULL
for(i in seq_along(X.terms)){
myBeta <- NULL
Xformula <- NULL
if(length(grep("*",X.terms[i],fixed = TRUE)) == 1){
myIndex <- "[i]"
covX <- strsplit(X.terms[i],"*",fixed=TRUE)[[1]]
covX <- covX[-which(toupper(covX)=="X")]
tvar <- length(grep("[t]",covX,fixed=TRUE)) > 0
if(tvar){
covX <- sub("[t]","",covX,fixed = TRUE)
if(!(covX %in% names(data))){
data[[covX]] <- time_data[[covX]]
}
check.dup.data(data,"covX")
myIndex <- "[i,t]"
} else {
if(covX %in% colnames(cov.data)){
if(!(covX %in% names(data))){
data[[covX]] <- cov.data[,covX]
}
check.dup.data(data,"covX2")
} else {
print("covariate absent from covariate data:", covX)
}
}
myBeta <- paste0("betaX_",covX)
Xformula <- paste0(myBeta,"*x[i,t-1]*",covX,myIndex)
} else if(length(grep("^",X.terms[i],fixed=TRUE))==1){
powX <- strsplit(X.terms[i],"^",fixed=TRUE)[[1]]
powX <- powX[-which(toupper(powX)=="X")]
myBeta <- paste0("betaX",powX)
Xformula <- paste0(myBeta,"*x[i,t-1]^",powX)
} else {
myBeta <- "betaX"
Xformula <- paste0(myBeta,"*x[i,t-1]")
}
Pformula <- paste(Pformula,"+",Xformula)
Xpriors <- paste(Xpriors," ",myBeta,"~dnorm(0,0.001)\n")
out.variables <- c(out.variables, myBeta)
}
TreeDataFusionMV <- sub(pattern = "
}
Xf <- with(cov.data, model.matrix(formula(fixed)))
Xf.cols <- colnames(Xf)
Xf.cols <- sub(":","_",Xf.cols)
colnames(Xf) <- Xf.cols
Xf.cols <- Xf.cols[Xf.cols != "(Intercept)"]
Xf <- as.matrix(Xf[, Xf.cols])
colnames(Xf) <- Xf.cols
Xf.center <- apply(Xf, 2, mean, na.rm = TRUE)
Xf <- t(t(Xf) - Xf.center)
}
if (!is.null(Xf)) {
Xf.names <- gsub(" ", "_", colnames(Xf))
Pformula <- paste(Pformula,
paste0("+ beta", Xf.names, "*Xf[rep[i],", seq_along(Xf.names), "]", collapse = " "))
if(is.null(data$rep)){
data$rep <- seq_len(nrow(Xf))
}
Xf.priors <- paste0(" beta", Xf.names, "~dnorm(0,0.001)", collapse = "\n")
TreeDataFusionMV <- sub(pattern = "
data[["Xf"]] <- Xf
out.variables <- c(out.variables, paste0("beta", Xf.names))
}
check.dup.data(data,"Xf")
if(FALSE){
time_varying <- "tmax_Jun + ppt_Dec + tmax_Jun*ppt_Dec"
time_data <- list(TminJuly = matrix(0,4,4),PrecipDec = matrix(1,4,4))
}
if(!is.null(time_varying)){
if (is.null(time_data)) {
PEcAn.logger::logger.error("time_varying formula provided but time_data is absent:", time_varying)
}
Xt.priors <- ""
t_vars <- gsub(" ","",unlist(strsplit(time_varying,"+",fixed=TRUE)))
it_vars <- t_vars[grep(pattern = "*",x=t_vars,fixed = TRUE)]
if(length(it_vars) > 0){
t_vars <- t_vars[!(t_vars %in% it_vars)]
}
for(i in seq_along(it_vars)){
covX <- strsplit(it_vars[i],"*",fixed=TRUE)[[1]]
tvar <- length(grep("[t]",covX[1],fixed=TRUE)) > 0
tvar[2] <- length(grep("[t]",covX[2],fixed=TRUE)) > 0
myBeta <- "beta"
for(j in 1:2){
if(j == 2) myBeta <- paste0(myBeta,"_")
if(tvar[j]){
covX[j] <- sub("[t]","",covX[j],fixed = TRUE)
if(!(covX[j] %in% names(data))){
data[[covX[j]]] <- time_data[[covX[j]]]
}
myBeta <- paste0(myBeta,covX[j])
covX[j] <- paste0(covX[j],"[i,t]")
} else {
if(!(covX[j] %in% names(data))){
data[[covX[j]]] <- cov.data[,covX[j]]
}
myBeta <- paste0(myBeta,covX[j])
covX[j] <- paste0(covX[j],"[i]")
}
}
Pformula <- paste(Pformula,
paste0(" + ",myBeta,"*",covX[1],"*",covX[2]))
Xt.priors <- paste0(Xt.priors,
" ",myBeta,"~dnorm(0,0.001)\n")
out.variables <- c(out.variables, myBeta)
}
for(j in seq_along(t_vars)){
tvar <- t_vars[j]
if(!(tvar %in% names(data))){
data[[tvar]] <- time_data[[tvar]]
}
check.dup.data(data,"tvar")
Pformula <- paste(Pformula,
paste0("+ beta", tvar, "*",tvar,"[i,t]"))
out.variables <- c(out.variables, paste0("beta", tvar))
}
Xt.priors <- paste0(Xt.priors,
paste0(" beta", t_vars, "~dnorm(0,0.001)", collapse = "\n")
)
TreeDataFusionMV <- sub(pattern = "
}
if (!is.null(Pformula)) {
TreeDataFusionMV <- sub(pattern = "
}
if(!is.null(save.jags)){
cat(TreeDataFusionMV,file=save.jags)
}
if(is.null(z0)){
z0 <- t(apply(data$y, 1, function(y) {
-rev(cumsum(rev(y)))
})) + data$z[, ncol(data$z)]
}
init <- list()
if(is.mcmc.list(restart)){
init <- mcmc.list2init(restart)
nchain <- length(init)
} else {
nchain <- 3
for (i in seq_len(nchain)) {
y.samp <- sample(data$y, length(data$y), replace = TRUE)
init[[i]] <- list(x = z0,
tau_add = runif(1, 1, 5) / var(diff(y.samp), na.rm = TRUE),
tau_dbh = 1,
tau_inc = 1500,
tau_ind = 50,
tau_yr = 100,
betaX2 = 0,
ind = rep(0, data$ni),
year = rep(0, data$nt))
}
}
PEcAn.logger::logger.info("COMPILE JAGS MODEL")
j.model <- rjags::jags.model(file = textConnection(TreeDataFusionMV), data = data, inits = init, n.chains = 3)
if(n.burn > 0){
PEcAn.logger::logger.info("BURN IN")
jags.out <- rjags::coda.samples(model = j.model,
variable.names = burnin.variables,
n.iter = n.burn)
if (burnin_plot) {
plot(jags.out)
}
}
PEcAn.logger::logger.info("RUN MCMC")
load.module("dic")
for(k in avail.chunks){
if(as.logical(save.state) & k%%as.numeric(save.state) == 0){
vnames <- c("x",out.variables)
} else {
vnames <- out.variables
}
jags.out <- rjags::coda.samples(model = j.model, variable.names = vnames, n.iter = n.chunk)
ofile <- paste("IGF",model,k,"RData",sep=".")
print(ofile)
save(jags.out,file=ofile)
if(!is.null(restart) & ((is.logical(restart) && restart) || is.mcmc.list(restart))){
ofile <- paste("IGF",model,"RESTART.RData",sep=".")
jags.final <- coda.samples(model = j.model, variable.names = c("x",out.variables), n.iter = 1)
k_restart = k + 1
save(jags.final,k_restart,file=ofile)
}
D <- as.mcmc.list(lapply(jags.out,function(x){x[,'deviance']}))
gbr <- coda::gelman.diag(D)$psrf[1,1]
trend <- mean(sapply(D,function(x){coef(lm(x~seq_len(n.chunk)))[2]}))
if(gbr < 1.005 & abs(trend) < 0.5) break
}
return(jags.out)
}
|
library(tidyquant)
library(cranlogs)
library(tidyquant)
custom_stat_fun_2 <- function(x, na.rm = TRUE) {
m <- mean(x, na.rm = na.rm)
s <- sd(x, na.rm = na.rm)
hi <- m + 2*s
lo <- m - 2*s
ret <- c(mean = m, stdev = s, hi.95 = hi, lo.95 = lo)
return(ret)
}
tidyverse_downloads_rollstats <- tidyverse_downloads %>%
tq_mutate(
select = count,
mutate_fun = rollapply,
width = 30,
align = "right",
by.column = FALSE,
FUN = custom_stat_fun_2,
na.rm = TRUE
)
class(tidyverse_downloads)
tidyverse_downloads_rollstats
print(tbl_df(tidyverse_downloads_rollstats), n=40)
tq_mutate_fun_options() %>% str()
pkgs <- c(
"tidyr", "lubridate", "dplyr",
"broom", "tidyquant", "ggplot2", "purrr",
"stringr", "knitr"
)
tidyverse_downloads <- cran_downloads(
packages = pkgs,
from = "2017-01-01",
to = "2017-06-30") %>%
tibble::as_tibble() %>%
group_by(package)
tidyverse_downloads %>%
ggplot(aes(x = date, y = count, color = package)) +
geom_point(alpha = 0.5) +
facet_wrap(~ package, ncol = 3, scale = "free_y") +
labs(title = "tidyverse packages: Daily downloads", x = "",
subtitle = "2017-01-01 through 2017-06-30",
caption = "Downloads data courtesy of cranlogs package") +
scale_color_tq() +
theme_tq() +
theme(legend.position="none")
head(data)
head(df)
names(df)
df2 = xts(df[1:4], order.by=df$timestamp2)
names(df2)
custom_stat_fun_3 <- function(x, na.rm = TRUE) {
m <- mean(x, na.rm = na.rm)
s <- sd(x, na.rm = na.rm)
hi <- m + 1*s
lo <- m - 1*s
ret <- c(mean = m, stdev = s, hi.95 = hi, lo.95 = lo)
return(ret)
}
class(df2)
df2_rollstats <- df %>%
tq_mutate(
select = value,
mutate_fun = rollapply,
width = 30,
align = "right",
by.column = FALSE,
FUN = custom_stat_fun_3,
na.rm = TRUE
)
|
.testChecksum <- function(file, target, algo="sha1", ..., verbose=FALSE) {
.msg(verbose, "Calculating ", algo, "-sum for ", sQuote(file), ": ",
appendLF=FALSE)
fileChecksum <- tolower(digest::digest(file, algo=algo, file=TRUE, ...))
target <- tolower(target)
.msg(verbose, fileChecksum)
if (fileChecksum != target) {
warning("Stored and calculated ", algo, " sums do not match ",
"(stored: ", sQuote(target), ", calculated: ",
sQuote(fileChecksum), ")!")
return(FALSE)
}
TRUE
}
|
rsfitterem<-function(data,b,maxiter,ratetable,tol,bwin,p,cause,Nie){
pr.time<-proc.time()[3]
if (maxiter<1) stop("There must be at least one iteration run")
n<-nrow(data)
m <- p
dtimes <- which(data$stat==1)
td <- data$Y[dtimes]
ntd <- length(td)
utimes <- which(c(1,diff(td))!=0)
utd <- td[utimes]
nutd <- length(utd)
udtimes <- dtimes[utimes]
razteg <- function(x){
n <- length(x)
repu <- rep(1,n)
repu[x==1] <- 0
repu <- rev(cumsum(rev(repu)))
repu <- repu[x==1]
repu <- -diff(c(repu,0))+1
if(sum(repu)!=n)repu <- c(n-sum(repu),repu)
repu
}
rutd <- rep(0,ntd)
rutd[utimes] <- 1
rutd <- razteg(rutd)
rtd <- razteg(data$stat)
a <- data$a[data$stat==1]
if(bwin[1]!=0){
nt4 <- c(1,ceiling(c(nutd*.25,nutd/2,nutd*.75,nutd)))
if(missing(bwin))bwin <- rep(1,4)
else bwin <- rep(bwin,4)
for(it in 1:4){
bwin[it] <- bwin[it]*max(diff(utd[nt4[it]:nt4[it+1]]))
}
while(utd[nt4[2]]<bwin[1]){
nt4 <- nt4[-2]
if(length(nt4)==1)break
}
krn <- kernerleftch(utd,bwin,nt4)
}
if(p>0){
whtemp <- data$stat==1&cause==2
dataded <- data[data$stat==1&cause==2,]
datacens <- data[data$stat==0|cause<2,]
datacens$cause <- cause[data$stat==0|cause<2]*data$stat[data$stat==0|cause<2]
databig <- lapply(dataded, rep, 2)
databig <- do.call("data.frame", databig)
databig$cause <- rep(2,nrow(databig))
nded <- nrow(databig)
databig$cens <- c(rep(1,nded/2),rep(0,nded/2))
datacens$cens <- rep(0,nrow(datacens))
datacens$cens[datacens$cause<2] <- datacens$cause[datacens$cause<2]
names(datacens) <- names(databig)
databig <- rbind(databig,datacens)
cause <- cause[data$stat==1]
fk <- (attributes(ratetable)$factor != 1)
nfk <- length(fk)
varstart <- 3+nfk+1
varstop <- 3+nfk+m
xmat <- as.matrix(data[,varstart:varstop])
ebx <- as.vector(exp(xmat%*%b))
modmat <- as.matrix(databig[,varstart:varstop])
varnames <- names(data)[varstart:varstop]
}
else{
cause <- cause[data$stat==1]
ebx <- rep(1,n)
}
starter <- sort(data$start)
starter1<-c(starter[1],starter[-length(starter)])
index <- c(TRUE,(starter!=starter1)[-1])
starter <- starter[index]
val1 <- apply(matrix(starter,ncol=1),1,function(x,Y)sum(x>=Y),data$Y)
val1 <- c(val1[1],diff(val1),length(data$Y)-val1[length(val1)])
eb <- ebx[data$stat==1]
s0 <- cumsum((ebx)[n:1])[n:1]
ebx.st <- ebx[order(data$start)]
s0.st <- ((cumsum(ebx.st[n:1]))[n:1])[index]
s0.st <- rep(c(s0.st,0),val1)
s0 <- s0 - s0.st
s0 <- s0[udtimes]
start <- data$start
if(any(start!=0)){
wstart <- rep(NA,n)
ustart <- unique(start[start!=0])
for(its in ustart){
wstart[start==its] <- min(which(data$Y==its))
}
}
difft <- c(data$Y[data$stat==1][1],diff(td))
difft <- difftu <- difft[difft!=0]
difft <- rep(difft,rutd)
a0 <- a*difft
if(sum(Nie==.5)!=0)maxit0 <- maxiter
else maxit0<- maxiter - 3
for(i in 1:maxit0){
nietemp <- rep(1:nutd,rutd)
Nies <- as.vector(by(Nie,nietemp,sum))
lam0u <- lam0 <- Nies/s0
if(bwin[1]!=0)lam0s <- krn%*%lam0
else lam0s <- lam0/difftu
lam0s <- rep(lam0s,rutd)
Nie[cause==2] <- as.vector(lam0s*eb/(a+lam0s*eb))[cause==2]
}
if(maxit0!=maxiter & i==maxit0) i <- maxiter
Lam0 <- cumsum(lam0)
Lam0 <- rep(Lam0,rutd)
if(data$stat[1]==0) Lam0 <- c(0,Lam0)
Lam0 <- rep(Lam0,rtd)
if(any(start!=0))Lam0[start!=0] <- Lam0[start!=0] - Lam0[wstart[start!=0]]
lam0 <- rep(lam0,rutd)
likely0 <- sum(log(a0 + lam0*eb)) - sum(data$ds + Lam0*ebx)
likely <- likely0
tempind <- Nie<=0|Nie>=1
if(any(tempind)){
if(any(Nie<=0))Nie[Nie<=0] <- tol
if(any(Nie>=1))Nie[Nie>=1] <- 1-tol
}
if(p>0)databig$wei <- c(Nie[cause==2],1-Nie[cause==2],rep(1,nrow(datacens)))
if(maxiter>=1&p!=0){
for(i in 1:maxiter){
if(p>0){
b00<-b
if(i==1)fit <- coxph(Surv(start,Y,cens)~modmat,data=databig,weights=databig$wei,init=b00,x=TRUE,iter.max=maxiter)
else fit <- coxph(Surv(start,Y,cens)~modmat,data=databig,weights=databig$wei,x=TRUE,iter.max=maxiter)
if(any(is.na(fit$coeff))) stop("X matrix deemed to be singular, variable ",which(is.na(fit$coeff)))
b <- fit$coeff
ebx <- as.vector(exp(xmat%*%b))
}
else ebx <- rep(1,n)
eb <- ebx[data$stat==1]
s0 <- cumsum((ebx)[n:1])[n:1]
ebx.st <- ebx[order(data$start)]
s0.st <- ((cumsum(ebx.st[n:1]))[n:1])[index]
s0.st <- rep(c(s0.st,0),val1)
s0 <- s0 - s0.st
nietemp <- rep(1:nutd,rutd)
Nies <- as.vector(by(Nie,nietemp,sum))
s0 <- s0[udtimes]
lam0u <- lam0 <- Nies/s0
Lam0 <- cumsum(lam0)
Lam0 <- rep(Lam0,rutd)
if(data$stat[1]==0) Lam0 <- c(0,Lam0)
Lam0 <- rep(Lam0,rtd)
if(any(start!=0))Lam0[start!=0] <- Lam0[start!=0] - Lam0[wstart[start!=0]]
if(bwin[1]!=0)lam0s <- krn%*%lam0
else lam0s <- lam0/difft
lam0s <- rep(lam0s,rutd)
Nie[cause==2] <- as.vector(lam0s*eb/(a+lam0s*eb))[cause==2]
lam0 <- rep(lam0,rutd)
likely <- sum(log(a0 + lam0*eb)) - sum(data$ds + Lam0*ebx)
if(p>0){
tempind <- Nie<=0|Nie>=1
if(any(tempind)){
if(any(Nie<=0))Nie[Nie<=0] <- tol
if(any(Nie>=1))Nie[Nie>=1] <- 1-tol
}
if(nded==0) break()
databig$wei[1:nded] <- c(Nie[cause==2],1-Nie[cause==2])
bd <- abs(b-b00)
if(max(bd)< tol) break()
}
}
}
iter <- i
if(p>0){
if(nded!=0){
resi <- resid(fit,type="schoenfeld")
if(!is.null(dim(resi)))resi <- resi[1:(nded/2),]
else resi <- resi[1:(nded/2)]
swei <- fit$weights[1:(nded/2)]
if(is.null(dim(resi))) fishem <- sum((resi^2*swei*(1-swei)))
else {
fishem <- apply(resi,1,function(x)outer(x,x))
fishem <- t(t(fishem)*swei*(1-swei))
fishem <- matrix(apply(fishem,1,sum),ncol=m)
}
}
else fishem <- 0
fishcox <- solve(fit$var)
fisher <- fishcox - fishem
fit$var <- solve(fisher)
names(fit$coefficients)<-varnames
fit$lambda0 <- lam0s
}
else fit <- list(lambda0 = lam0s)
fit$lambda0 <- fit$lambda0[utimes]
fit$Lambda0 <- Lam0[udtimes]
fit$times <- utd
fit$Nie <- Nie
fit$bwin <- bwin
fit$iter <- i
class(fit) <- c("rsadd",class(fit))
fit$loglik <- c(likely0,likely)
fit$lam0.ns <- lam0u
fit
}
em <- function (rform, init, control, bwin)
{
data <- rform$data
n <- nrow(data)
p <- rform$m
id <- order(data$Y)
rform$cause <- rform$cause[id]
data <- data[id, ]
fk <- (attributes(rform$ratetable)$factor != 1)
nfk <- length(fk)
nev <- length(data$Y[data$stat == 1])
data$a <- rep(NA, n)
xx <- exp.prep(data[, 4:(nfk + 3),drop=FALSE], data$Y - data$start, rform$ratetable)
data$ds <- -log(xx)
data1 <- data
data1[, 4:(nfk + 3)] <- data[, 4:(nfk + 3)] + data$Y %*% t(fk)
xx <- exp.prep(data1[data1$stat == 1, 4:(nfk + 3),drop=FALSE], 1, rform$ratetable)
data$a[data$stat == 1] <- -log(xx)
if (p > 0) {
if (!missing(init) && !is.null(init)) {
if (length(init) != p)
stop("Wrong length for inital values")
}
else init <- rep(0, p)
beta <- matrix(init, p, 1)
}
pr.time<-proc.time()[3]
Nie <- rep(.5,sum(data$stat==1))
Nie[rform$cause[data$stat==1]<2] <- rform$cause[data$stat==1][rform$cause[data$stat==1]<2]
varstart <- 3+nfk+1
varstop <- 3+nfk+p
if(missing(bwin))bwin <- -1
if(bwin<0){
if(p>0)data1 <- data[,-c(varstart:varstop)]
else data1 <- data
nfk <- length(attributes(rform$ratetable)$dimid)
names(data)[4:(3+nfk)] <- attributes(rform$ratetable)$dimid
expe <- rs.surv(Surv(Y,stat)~1,data,ratetable=rform$ratetable,method="ederer2")
esurv <- -log(expe$surv[expe$n.event!=0])
if(esurv[length(esurv)]==Inf)esurv[length(esurv)] <- esurv[length(esurv)-1]
x <- seq(.1,3,length=5)
dif <- rep(NA,5)
options(warn=-1)
diter <- max(round(max(data$Y)/356.24),3)
for(it in 1:5){
fit <- rsfitterem(data1,NULL,diter,rform$ratetable,control$epsilon,x[it],0,rform$cause,Nie)
dif[it] <- sum((esurv-fit$Lambda0)^2)
}
wh <- which.min(dif)
if(wh==1)x <- seq(x[wh],x[wh+1]-.1,length=5)
else if(wh==5)x <- c(x, max(data$Y)/ max(diff(data$Y)))
if(wh!=1)
x <- seq(x[wh-1]+.1,x[wh+1]-.1,length=5)
dif <- rep(NA,5)
for(it in 1:5){
fit <- rsfitterem(data1,NULL,diter,rform$ratetable,control$epsilon,x[it],0,rform$cause,Nie)
dif[it] <- sum((esurv-fit$Lambda0)^2)
}
options(warn=0)
Nie <- fit$Nie
bwin <- x[which.min(dif)]
}
fit <- rsfitterem(data, beta, control$maxit, rform$ratetable,
control$epsilon, bwin, p, rform$cause,Nie)
Nie <- rep(0,nrow(data))
Nie[data$stat==1] <- fit$Nie
fit$Nie <- Nie[order(id)]
fit$bwin <- list(bwin=fit$bwin,bwinfac=bwin)
fit
}
rsadd <- function (formula = formula(data), data = parent.frame(), ratetable = relsurv::slopop,
int, na.action, method = "max.lik", init, bwin, centered = FALSE,
cause, control, rmap, ...)
{
call <- match.call()
if (missing(control))
control <- glm.control(...)
if(!missing(cause)){
if (length(cause) != nrow(data))
stop("Length of cause does not match data dimensions")
data$cause <- cause
rform <- rformulate(formula, data, ratetable, na.action,
int, centered, cause)
}
else{
if (!missing(rmap)) {
rmap <- substitute(rmap)
}
rform <- rformulate(formula,data, ratetable, na.action, rmap, int, centered)
}
if (method == "EM") {
if (!missing(int)) {
if (length(int) > 1 | any(int <= 0))
stop("Invalid value of 'int'")
}
}
else {
if (missing(int))
int <- c(0,ceiling(max(rform$Y/365.241)))
if (length(int) == 1) {
if (int <= 0)
stop("The value of 'int' must be positive ")
int <- 0:int
}
else if (int[1] != 0)
stop("The first interval in 'int' must start with 0")
}
method <- match.arg(method,c("glm.bin","glm.poi","max.lik","EM"))
if (method == "glm.bin" | method == "glm.poi")
fit <- glmxp(rform = rform, interval = int, method = method,
control = control)
else if (method == "max.lik")
fit <- maxlik(rform = rform, interval = int, init = init,
control = control)
else if (method == "EM")
fit <- em(rform, init, control, bwin)
fit$call <- call
fit$formula <- formula
fit$data <- rform$data
fit$ratetable <- rform$ratetable
fit$n <- nrow(rform$data)
if (length(rform$na.action))
fit$na.action <- rform$na.action
fit$y <- rform$Y.surv
fit$method <- method
if (method == "EM") {
if (!missing(int))
fit$int <- int
else fit$int <- ceiling(max(rform$Y[rform$status == 1])/365.241)
fit$terms <- rform$Terms
if(centered)fit$mvalue <- rform$mvalue
}
if (method == "max.lik") {
fit$terms <- rform$Terms
}
if (rform$m > 0)
fit$linear.predictors <- as.matrix(rform$X) %*% fit$coef[1:ncol(rform$X)]
fit
}
maxlik <- function (rform, interval, subset, init, control)
{
data <- rform$data
max.time <- max(data$Y)/365.241
if (max.time < max(interval))
interval <- interval[1:(sum(max.time > interval) + 1)]
fk <- (attributes(rform$ratetable)$factor != 1)
nfk <- length(fk)
data <- cbind(data, offset = rform$offset)
data <- survsplit(data, cut = interval[-1] * 365.241, end = "Y",
event = "stat", start = "start", episode = "epi", interval = interval)
del <- which(data$start==data$Y)
if(length(del)) data <- data[-del,]
offset <- data$offset
data$offset <- NULL
d.int <- diff(interval)
data[, 4:(nfk + 3)] <- data[, 4:(nfk + 3)] + data$start %*%
t(fk)
data$lambda <- rep(0, nrow(data))
nsk <- nrow(data[data$stat == 1, ])
xx <- exp.prep(data[data$stat == 1, 4:(nfk + 3),drop=FALSE] + (data[data$stat ==
1, ]$Y - data[data$stat == 1, ]$start) %*% t(fk), 1, rform$ratetable)
data$lambda[data$stat == 1] <- -log(xx) * 365.241
xx <- exp.prep(data[, 4:(nfk + 3),drop=FALSE], data$Y - data$start, rform$ratetable)
data$epi <- NULL
data$ds <- -log(xx)
data$Y <- data$Y/365.241
data$start <- data$start/365.241
data <- data[, -(4:(3 + nfk))]
intn <- length(interval[-1])
m <- rform$m
p <- m + intn
if (!missing(init) && !is.null(init)) {
if (length(init) != p)
stop("Wrong length for inital values")
}
else init <- rep(0, p)
if(m>0){
init0 <- init[-(1:m)]
data1 <- data[,-(4:(3+m))]
}
else{
init0 <- init
data1 <- data
}
fit0 <- lik.fit(data1, 0, intn, init0, control, offset)
if(m>0){
init[-(1:m)] <- fit0$coef
fit <- lik.fit(data, m, intn, init, control, offset)
}
else fit <- fit0
fit$int <- interval
class(fit) <- "rsadd"
fit$times <- fit$int*365.241
fit$Lambda0 <- cumsum(c(0, exp(fit$coef[(m+1):p])*diff(fit$int) ))
fit
}
lik.fit <- function (data, m, intn, init, control, offset)
{
n <- dim(data)[1]
varpos <- 4:(3 + m + intn)
x <- data[, varpos]
varnames <- names(data)[varpos]
lbs <- names(x)
x <- as.matrix(x)
p <- length(varpos)
d <- data$stat
ds <- data$ds
h <- data$lambda
y <- data$Y - data$start
maxiter <- control$maxit
if (!missing(init) && !is.null(init)) {
if (length(init) != p)
stop("Wrong length for inital values")
}
else init <- rep(0, p)
b <- matrix(init, p, 1)
b0 <- b
fit <- mlfit(b, p, x, offset, d, h, ds, y, maxiter, control$epsilon)
if (maxiter > 1 & fit$nit >= maxiter) {
values <- apply(data[data$stat==1,varpos,drop=FALSE],2,sum)
problem <- which.min(values)
outmes <- "Ran out of iterations and did not converge"
if(values[problem]==0)tzero <- ""
else tzero <- "only "
if(values[problem]<5){
if(!is.na(strsplit(names(values)[problem],"fu")[[1]][2]))outmes <- paste(outmes, "\n This may be due to the fact that there are ",tzero, values[problem], " events on interval",strsplit(names(values)[problem],"fu")[[1]][2],"\n You can use the 'int' argument to change the follow-up intervals in which the baseline excess hazard is assumed constant",sep="")
else outmes <- paste(outmes, "\n This may be due to the fact that there are ",tzero, values[problem], " events for covariate value ",names(values)[problem],sep="")
}
warning(outmes)
}
b <- as.vector(fit$b)
names(b) <- varnames
fit <- list(coefficients = b, var = -solve(fit$sd), iter = fit$nit,
loglik = fit$loglik)
fit
}
survsplit <- function (data, cut, end, event, start, id = NULL, zero = 0,
episode = NULL, interval = NULL)
{
ntimes <- length(cut)
n <- nrow(data)
p <- ncol(data)
if (length(interval) > 0) {
ntimes <- ntimes - 1
sttime <- c(rep(0, n), rep(cut[-length(cut)], each = n))
endtime <- rep(cut, each = n)
}
else {
endtime <- rep(c(cut, Inf), each = n)
sttime <- c(rep(0, n), rep(cut, each = n))
}
newdata <- lapply(data, rep, ntimes + 1)
eventtime <- newdata[[end]]
if (start %in% names(data))
starttime <- newdata[[start]]
else starttime <- rep(zero, length = (ntimes + 1) * n)
starttime <- pmax(sttime, starttime)
epi <- rep(0:ntimes, each = n)
if (length(interval) > 0)
status <- ifelse(eventtime <= endtime & eventtime >=
starttime, newdata[[event]], 0)
else status <- ifelse(eventtime <= endtime & eventtime >
starttime, newdata[[event]], 0)
endtime <- pmin(endtime, eventtime)
if (length(interval) > 0)
drop <- (starttime > endtime) | (starttime == endtime &
status == 0)
else drop <- starttime >= endtime
newdata <- do.call("data.frame", newdata)
newdata <- newdata[!drop, ]
newdata[, start] <- starttime[!drop]
newdata[, end] <- endtime[!drop]
newdata[, event] <- status[!drop]
if (!is.null(id))
newdata[, id] <- rep(rownames(data), ntimes + 1)[!drop]
fu <- NULL
if (length(interval) > 2) {
for (it in 1:length(interval[-1])) {
drop1 <- sum(!drop[1:(it * n - n)])
drop2 <- sum(!drop[(it * n - n + 1):(it * n)])
drop3 <- sum(!drop[(it * n + 1):(length(interval[-1]) *
n)])
if (it == 1)
fu <- cbind(fu, c(rep(1, drop2), rep(0, drop3)))
else if (it == length(interval[-1]))
fu <- cbind(fu, c(rep(0, drop1), rep(1, drop2)))
else fu <- cbind(fu, c(rep(0, drop1), rep(1, drop2),
rep(0, drop3)))
}
fu <- as.data.frame(fu)
names(fu) <- c(paste("fu [", interval[-length(interval)],
",", interval[-1], ")", sep = ""))
newdata <- cbind(newdata, fu)
}
else if (length(interval) == 2) {
fu <- rep(1, sum(!drop))
newdata <- cbind(newdata, fu)
names(newdata)[ncol(newdata)] <- paste("fu [", interval[1],
",", interval[2], "]", sep = "")
}
if (!is.null(episode))
newdata[, episode] <- epi[!drop]
newdata
}
glmxp <- function (rform, data, interval, method, control)
{
if (rform$m == 1)
g <- as.integer(as.factor(rform$X[[1]]))
else if (rform$m > 1) {
gvar <- NULL
for (i in 1:rform$m) {
gvar <- append(gvar, rform$X[i])
}
tabgr <- as.data.frame(table(gvar))
tabgr <- tabgr[, 1:rform$m]
n.groups <- dim(tabgr)[1]
mat <- do.call("data.frame", gvar)
names(mat) <- names(tabgr)
tabgr <- cbind(tabgr, g = as.numeric(row.names(tabgr)))
mat <- cbind(mat, id = 1:rform$n)
c <- merge(tabgr, mat)
g <- c[order(c$id), rform$m + 1]
}
else g <- rep(1, rform$n)
vg <- function(X) {
n <- dim(X)[1]
w <- sum((X$event == 0) & (X$fin == 1) & (X$y != 1))
nd <- sum((X$event == 1) & (X$fin == 1))
ps <- exp.prep(X[, 4:(nfk + 3),drop=FALSE], t.int, rform$ratetable)
ld <- n - w/2
lny <- log(sum(X$y))
k <- t.int/365.241
dstar <- sum(-log(ps)/k * X$y)
ps <- mean(ps)
if (rform$m == 0)
data.rest <- X[1, 7 + nfk + rform$m, drop = FALSE]
else data.rest <- X[1, c((3 + nfk + 1):(3 + nfk + rform$m),
7 + nfk + rform$m)]
cbind(nd = nd, ld = ld, ps = ps, lny = lny, dstar = dstar,
k = k, data.rest)
}
nint <- length(interval)
if (nint < 2)
stop("Illegal interval value")
meje <- interval
my.fun <- function(x) {
if (x > 1) {
x.t <- rep(1, floor(x))
if (x - floor(x) > 0)
x.t <- c(x.t, x - floor(x))
x.t
}
else x
}
int <- apply(matrix(diff(interval), ncol = 1), 1, my.fun)
if (is.list(int))
int <- c(0, cumsum(do.call("c", int)))
else int <- c(0, cumsum(int))
int <- int * 365.241
nint <- length(int)
X <- cbind(rform$data, grupa = g)
fk <- (attributes(rform$ratetable)$factor != 1)
nfk <- length(fk)
Z <- X[X$start >= int[2], ]
nz <- dim(Z)[1]
Z$fin <- rep(0, nz)
Z$event <- rep(0, nz)
Z$fu <- rep(0, nz)
Z$y <- rep(0, nz)
Z$origstart <- Z$start
Z$xind <- rep(0, nz)
if (nrow(Z) > 0)
Z[, 4:(nfk + 3)] <- Z[, 4:(nfk + 3)] + matrix(Z$start,
ncol = nfk, byrow = FALSE, nrow = nrow(Z)) * matrix(fk,
ncol = nfk, byrow = TRUE, nrow = nrow(Z))
X <- X[X$start < int[2], ]
X$fin <- (X$Y <= int[2])
X$event <- X$fin * X$stat
ford <- eval(substitute(paste("[", a, ",", b, "]", sep = ""),
list(a = meje[1], b = meje[2])))
X$fu <- rep(ford, rform$n - nz)
t.int <- int[2] - int[1]
X$y <- (pmin(X$Y, int[2]) - X$start)/365.241
X$origstart <- X$start
X$xind <- rep(1, nrow(X))
gr1 <- by(X, X$grupa, vg)
grm1 <- do.call("rbind", gr1)
X <- X[X$fin == 0, ]
X$start <- rep(int[2], dim(X)[1])
X <- rbind(X, Z[Z$start < int[3], ])
Z <- Z[Z$start >= int[3], ]
temp <- 0
if (nint > 2) {
for (i in 3:nint) {
ni <- dim(X)[1]
if (ni == 0) {
temp <- 1
break
}
X$fin <- X$Y <= int[i]
X$event <- X$fin * X$stat
l <- sum(int[i - 1] >= meje * 365.241)
if(l==1)
ftemp <- eval(substitute(paste("[", a, ",", b, "]", sep = ""),
list(a = meje[l], b = meje[l + 1])))
else
ftemp <- eval(substitute(paste("(", a, ",", b, "]", sep = ""),
list(a = meje[l], b = meje[l + 1])))
ford <- c(ford, ftemp)
X$fu <- rep(ford[i - 1], ni)
t.int <- int[i] - int[i - 1]
index <- X$origstart < int[i - 1]
index1 <- as.logical(X$xind)
if (sum(index) > 0)
X[index, 4:(nfk + 3)] <- X[index, 4:(nfk + 3)] +
matrix(fk * t.int, ncol = nfk, byrow = TRUE,
nrow = sum(index))
X$xind <- rep(1, nrow(X))
X$y <- (pmin(X$Y, int[i]) - X$start)/365.241
gr1 <- by(X, X$grupa, vg)
grm1 <- rbind(grm1, do.call("rbind", gr1))
X <- X[X$fin == 0, ]
X$start <- rep(int[i], dim(X)[1])
if (i == nint)
break
X <- rbind(X, Z[Z$start < int[i + 1], ])
X <- X[X$start != X$Y, ]
Z <- Z[Z$start >= int[i + 1], ]
}
l <- sum(int[i - temp] > meje * 365.241)
interval <- meje[1:(l + 1)]
}
else interval <- meje[1:2]
grm1$fu <- factor(grm1$fu, levels = unique(ford))
if (method == "glm.bin") {
ht <- binomial(link = cloglog)
ht$link <- "Hakulinen-Tenkanen relative survival model"
ht$linkfun <- function(mu) log(-log((1 - mu)/ps))
ht$linkinv <- function(eta) 1 - exp(-exp(eta)) * ps
ht$mu.eta <- function(eta) exp(eta) * exp(-exp(eta)) *
ps
.ps <- ps <- grm1$ps
if (any(grm1$ld - grm1$nd > grm1$ps * grm1$ld)) {
n <- sum(grm1$ld - grm1$nd > grm1$ps * grm1$ld)
g <- dim(grm1)[1]
warnme <- paste("Observed number of deaths is smaller than the expected in ",
n, "/", g, " groups of patients", sep = "")
}
else warnme <- ""
if (length(interval) == 2 & rform$m == 0)
stop("No groups can be formed")
if (length(interval) == 1 | length(table(grm1$fu)) ==
1)
grm1$fu <- as.integer(grm1$fu)
y <- ifelse(grm1$ld == 0, 0, grm1$nd/grm1$ld)
mustart <- (grm1$ld * y + 0.01)/(grm1$ld + 0.02)
mustart[(1 - mustart)/grm1$ps >= 1] <- grm1$ps[(1 - mustart)/grm1$ps >=
1] * 0.9
if (!length(rform$X))
local.ht <- glm(cbind(nd, ld - nd) ~ -1 + fu + offset(log(k)),
data = grm1, family = ht,mustart=mustart)
else {
xmat <- as.matrix(grm1[, 7:(ncol(grm1) - 1)])
local.ht <- glm(cbind(nd, ld - nd) ~ -1 + xmat +
fu + offset(log(k)), data = grm1, family = ht,mustart=mustart)
}
names(local.ht[[1]]) <- c(names(rform$X), paste("fu",
levels(grm1$fu)))
}
else if (method == "glm.poi") {
pot <- poisson()
pot$link <- "glm relative survival model with Poisson error"
pot$linkfun <- function(mu) log(mu - dstar)
pot$linkinv <- function(eta) dstar + exp(eta)
if (any(grm1$nd - grm1$dstar < 0)) {
pot$initialize <- expression({
if (any(y < 0)) stop(paste("Negative values not allowed for",
"the Poisson family"))
n <- rep.int(1, nobs)
})
}
if (any(grm1$nd - grm1$dstar < 0)) {
n <- sum(grm1$nd - grm1$dstar < 0)
g <- dim(grm1)[1]
warnme <- paste("Observed number of deaths is smaller than the expected in ",
n, "/", g, " groups of patients", sep = "")
}
else warnme <- ""
dstar <- grm1$dstar
if (length(interval) == 2 & rform$m == 0)
stop("No groups can be formed")
if (length(interval) == 1 | length(table(grm1$fu)) ==
1)
grm1$fu <- as.integer(grm1$fu)
mustart <- pmax(grm1$nd, grm1$dstar) + 0.1
if (!length(rform$X))
local.ht <- glm(nd ~ -1 + fu, data = grm1, family = pot,
offset = grm1$lny,mustart=mustart)
else {
xmat <- as.matrix(grm1[, 7:(ncol(grm1) - 1)])
local.ht <- glm(nd ~ -1 + xmat + fu, data = grm1,
family = pot, offset = grm1$lny,mustart=mustart)
}
names(local.ht[[1]]) <- c(names(rform$X), paste("fu",
levels(grm1$fu)))
}
else stop(paste("Method '", method, "' not a valid method",
sep = ""))
class(local.ht) <- c("rsadd", class(local.ht))
local.ht$warnme <- warnme
local.ht$int <- interval
local.ht$groups <- local.ht$data
return(local.ht)
}
residuals.rsadd <- function (object, type = "schoenfeld", ...)
{
data <- object$data[order(object$data$Y), ]
ratetable <- object$ratetable
beta <- object$coef
start <- data[, 1]
stop <- data[, 2]
event <- data[, 3]
fk <- (attributes(ratetable)$factor != 1)
nfk <- length(fk)
n <- nrow(data)
scale <- 1
if (object$method == "EM")
scale <- 365.241
m <- ncol(data)
rem <- m - nfk - 3
interval <- object$int
int <- ceiling(max(interval))
R <- data[, 4:(nfk + 3)]
lp <- matrix(-log(exp.prep(as.matrix(R), 365.241, object$ratetable))/scale, ncol = 1)
fu <- NULL
if (object$method == "EM") {
death.time <- stop[event == 1]
for (it in 1:int) {
fu <- as.data.frame(cbind(fu, as.numeric(death.time/365.241 <
it & (death.time/365.241) >= (it - 1))))
}
if(length(death.time)!=length(unique(death.time))){
utimes <- which(c(1,diff(death.time))!=0)
razteg <- function(x){
n <- length(x)
repu <- rep(1,n)
repu[x==1] <- 0
repu <- rev(cumsum(rev(repu)))
repu <- repu[x==1]
repu <- -diff(c(repu,0))+1
if(sum(repu)!=n)repu <- c(n-sum(repu),repu)
repu
}
rutd <- rep(0,length(death.time))
rutd[utimes] <- 1
rutd <- razteg(rutd)
}
else rutd <- rep(1,length(death.time))
lambda0 <- rep(object$lambda0,rutd)
}
else {
pon <- NULL
for (i in 1:(length(interval) - 1)) {
width <- ceiling(interval[i + 1]) - floor(interval[i])
lo <- interval[i]
hi <- min(interval[i + 1], floor(interval[i]) + 1)
for (j in 1:width) {
fu <- as.data.frame(cbind(fu, as.numeric(stop/365.241 <
hi & stop/365.241 >= lo)))
names(fu)[ncol(fu)] <- paste("fu", lo, "-", hi,
sep = "")
if (j == width) {
pon <- c(pon, sum(fu[event == 1, (ncol(fu) -
width + 1):ncol(fu)]))
break()
}
else {
lo <- hi
hi <- min(interval[i + 1], floor(interval[i]) +
1 + j)
}
}
}
m <- ncol(data)
data <- cbind(data, fu)
rem <- m - nfk - 3
lambda0 <- rep(exp(beta[rem + 1:(length(interval) - 1)]),
pon)
fu <- fu[event == 1, , drop = FALSE]
beta <- beta[1:rem]
}
if (int >= 2) {
for (j in 2:int) {
R <- R + matrix(fk * 365.241, ncol = ncol(R), byrow = TRUE,
nrow = n)
xx <- exp.prep(R, 365.241, object$ratetable)
lp <- cbind(lp, -log(xx)/scale)
}
}
z <- as.matrix(data[, (4 + nfk):m])
out <- resid.com(start, stop, event, z, beta, lp, lambda0,
fu, n, rem, int, type)
out
}
resid.com <- function (start, stop, event, z, beta, lp, lambda0, fup, n, rem,
int, type)
{
le <- exp(z %*% beta)
olp <- if (int > 1)
apply(lp[n:1, ], 2, cumsum)[n:1, ]
else matrix(cumsum(lp[n:1])[n:1], ncol = 1)
ole <- cumsum(le[n:1])[n:1]
lp.st <- lp[order(start), , drop = FALSE]
le.st <- le[order(start), , drop = FALSE]
starter <- sort(start)
starter1 <- c(starter[1], starter[-length(starter)])
index <- c(TRUE, (starter != starter1)[-1])
starter <- starter[index]
val1 <- apply(matrix(starter, ncol = 1), 1, function(x, Y) sum(x >=
Y), stop)
val1 <- c(val1[1], diff(val1), length(stop) - val1[length(val1)])
olp.st <- (apply(lp.st[n:1, , drop = FALSE], 2, cumsum)[n:1,
, drop = FALSE])[index, , drop = FALSE]
olp.st <- apply(olp.st, 2, function(x) rep(c(x, 0), val1))
olp <- olp - olp.st
olp <- olp[event == 1, ]
olp <- apply(fup * olp, 1, sum)
ole.st <- cumsum(le.st[n:1])[n:1][index]
ole.st <- rep(c(ole.st, 0), val1)
ole <- ole - ole.st
ole <- ole[event == 1] * lambda0
s0 <- ole + olp
sc <- NULL
zb <- NULL
kzb <- NULL
f1 <- function(x) rep(mean(x), length(x))
f2 <- function(x) apply(x, 2, f1)
f3 <- function(x) apply(x, 1:2, f1)
ties <- length(unique(stop[event == 1])) != length(stop[event ==
1])
for (k in 1:rem) {
zlp <- apply((z[, k] * lp)[n:1, , drop = FALSE], 2, cumsum)[n:1,
, drop = FALSE]
zlp.st <- (apply((z[, k] * lp.st)[n:1, , drop = FALSE],
2, cumsum)[n:1, , drop = FALSE])[index, , drop = FALSE]
zlp.st <- apply(zlp.st, 2, function(x) rep(c(x, 0), val1))
zlp <- zlp - zlp.st
zlp <- zlp[event == 1, , drop = FALSE]
zlp <- apply(fup * zlp, 1, sum)
zle <- cumsum((z[, k] * le)[n:1])[n:1]
zle.st <- cumsum((z[, k] * le.st)[n:1])[n:1][index]
zle.st <- rep(c(zle.st, 0), val1)
zle <- zle - zle.st
zle <- zle[event == 1]
zle <- zle * lambda0
s1 <- zle + zlp
zb <- cbind(zb, s1/s0)
kzb <- cbind(kzb, zle/s0)
}
s1ties <- cbind(zb, kzb)
if (ties) {
s1ties <- by(s1ties, stop[event == 1], f2)
s1ties <- do.call("rbind", s1ties)
}
zb <- s1ties[, 1:rem, drop = FALSE]
kzb <- s1ties[, -(1:rem), drop = FALSE]
sc <- z[event == 1, , drop = FALSE] - zb
row.names(sc) <- stop[event == 1]
out.temp <- function(x) outer(x, x, FUN = "*")
krez <- rez <- array(matrix(NA, ncol = rem, nrow = rem),
dim = c(rem, rem, sum(event == 1)))
for (a in 1:rem) {
for (b in a:rem) {
zzlp <- apply((z[, a] * z[, b] * lp)[n:1, , drop = FALSE],
2, cumsum)[n:1, , drop = FALSE]
zzlp.st <- (apply((z[, a] * z[, b] * lp.st)[n:1,
, drop = FALSE], 2, cumsum)[n:1, , drop = FALSE])[index,
, drop = FALSE]
zzlp.st <- apply(zzlp.st, 2, function(x) rep(c(x,
0), val1))
zzlp <- zzlp - zzlp.st
zzlp <- zzlp[event == 1, , drop = FALSE]
zzlp <- apply(fup * zzlp, 1, sum)
zzle <- cumsum((z[, a] * z[, b] * le)[n:1])[n:1]
zzle.st <- cumsum((z[, a] * z[, b] * le.st)[n:1])[n:1][index]
zzle.st <- rep(c(zzle.st, 0), val1)
zzle <- zzle - zzle.st
zzle <- zzle[event == 1]
zzle <- zzle * lambda0
s2 <- zzlp + zzle
s20 <- s2/s0
ks20 <- zzle/s0
s2ties <- cbind(s20, ks20)
if (ties) {
s2ties <- by(s2ties, stop[event == 1], f2)
s2ties <- do.call("rbind", s2ties)
}
rez[a, b, ] <- rez[b, a, ] <- s2ties[, 1]
krez[a, b, ] <- krez[b, a, ] <- s2ties[, 2]
}
}
juhu <- apply(zb, 1, out.temp)
if (is.null(dim(juhu)))
juhu1 <- array(data = matrix(juhu, ncol = a), dim = c(a,
a, length(zb[, 1])))
else juhu1 <- array(data = apply(juhu, 2, matrix, ncol = a),
dim = c(a, a, length(zb[, 1])))
varr <- rez - juhu1
kjuhu <- apply(cbind(zb, kzb), 1, function(x) outer(x[1:rem],
x[-(1:rem)], FUN = "*"))
if (is.null(dim(kjuhu)))
kjuhu1 <- array(data = matrix(kjuhu, ncol = rem), dim = c(rem,
rem, length(zb[, 1])))
else kjuhu1 <- array(data = apply(kjuhu, 2, matrix, ncol = rem),
dim = c(rem, rem, length(zb[, 1])))
kvarr <- krez - kjuhu1
for (i in 1:dim(varr)[1]) varr[i, i, which(varr[i, i, ] <
0)] <- 0
for (i in 1:dim(kvarr)[1]) kvarr[i, i, which(kvarr[i, i,
] < 0)] <- 0
varr1 <- apply(varr, 1:2, sum)
kvarr1 <- apply(kvarr, 1:2, sum)
if (type == "schoenfeld")
out <- list(res = sc, varr1 = varr1, varr = varr, kvarr = kvarr,
kvarr1 = kvarr1)
out
}
rs.br <- function (fit, sc, rho = 0, test = "max", global = TRUE)
{
test <- match.arg(test,c("max","cvm"))
if (inherits(fit, "rsadd")) {
if (missing(sc))
sc <- resid(fit, "schoenfeld")
sresid <- sc$res
varr <- sc$varr
sresid <- as.matrix(sresid)
}
else {
coef <- fit$coef
options(warn = -1)
sc <- coxph.detail(fit)
options(warn = 0)
sresid <- sc$score
varr <- sc$imat
if (is.null(dim(varr)))
varr <- array(varr, dim = c(1, 1, length(varr)))
sresid <- as.matrix(sresid)
}
if (inherits(fit, "coxph")) {
if(is.null(fit$data)){
temp <- fit$y
class(temp) <- "matrix"
if(ncol(fit$y)==2)temp <- data.frame(rep(0,nrow(fit$y)),temp)
if(is.null(fit$x))stop("The coxph model should be called with x=TRUE argument")
fit$data <- data.frame(temp,fit$x)
names(fit$data)[1:3] <- c("start","Y","stat")
}
}
data <- fit$data[order(fit$data$Y), ]
time <- data$Y[data$stat == 1]
ties <- (length(unique(time)) != length(time))
keep <- 1:(ncol(sresid))
options(warn = -1)
scaled <- NULL
varnova <- NULL
if (ncol(sresid) == 1) {
varr <- varr[1, 1, ]
scaled <- sresid/sqrt(varr)
}
else { for (i in 1:ncol(sresid)) varnova <- cbind(varnova,varr[i,i,])
scaled <- sresid/sqrt(varnova)
}
options(warn = 0)
nvar <- ncol(sresid)
survfit <- getFromNamespace("survfit", "survival")
temp <- survfit(fit$y~1, type = "kaplan-meier")
n.risk <- temp$n.risk
n.time <- temp$time
if (temp$type == "right") {
cji <- matrix(fit$y, ncol = 2)
n.risk <- n.risk[match(cji[cji[, 2] == 1, 1], n.time)]
}
else {
cji <- matrix(fit$y, ncol = 3)
n.risk <- n.risk[match(cji[cji[, 3] == 1, 2], n.time)]
}
n.risk <- sort(n.risk, decreasing = TRUE)
varnames <- names(fit$coef)[keep]
u2 <- function(bb) {
n <- length(bb)
1/n * (sum(bb^2) - sum(bb)^2/n)
}
wc <- function(x, k = 1000) {
a <- 1
for (i in 1:k) a <- a + 2 * (-1)^i * exp(-2 * i^2 * pi^2 *
x)
a
}
brp <- function(x, n = 1000) {
a <- 1
for (i in 1:n) a <- a - 2 * (-1)^(i - 1) * exp(-2 * i^2 *
x^2)
a
}
global <- as.numeric(global & ncol(sresid) > 1)
table <- NULL
bbt <- as.list(1:(nvar + global))
for (i in 1:nvar) {
if (nvar != 1)
usable <- which(varr[i, i, ] > 1e-12)
else usable <- which(varr > 1e-12)
w <- (n.risk[usable])^rho
w <- w/sum(w)
if (nvar != 1) {
sci <- scaled[usable, i]
}
else sci <- scaled[usable]
if (ties) {
if (inherits(fit, "rsadd")) {
sci <- as.vector(by(sci, time[usable], function(x) sum(x)/sqrt(length(x))))
w <- as.vector(by(w, time[usable], sum))
}
else {
w <- w * as.vector(table(time))[usable]
w <- w/sum(w)
}
}
sci <- sci * sqrt(w)
timescale <- cumsum(w)
bm <- cumsum(sci)
bb <- bm - timescale * bm[length(bm)]
if (test == "max")
table <- rbind(table, c(max(abs(bb)), 1 - brp(max(abs(bb)))))
else if (test == "cvm")
table <- rbind(table, c(u2(bb), 1 - wc(u2(bb))))
bbt[[i]] <- cbind(timescale, bb)
}
if (inherits(fit, "rsadd")) {
beta <- fit$coef[1:(length(fit$coef) - length(fit$int) + 1)]
}
else beta <- fit$coef
if (global) {
qform <- function(matrix, vector) t(vector) %*% matrix %*%
vector
diagonal <- apply(varr, 3, diag)
sumdiag <- apply(diagonal, 2, sum)
usable <- which(sumdiag > 1e-12)
score <- t(beta) %*% t(sresid[usable, ])
varr <- varr[, , usable]
qf <- apply(varr, 3, qform, vector = beta)
w <- (n.risk[usable])^rho
w <- w/sum(w)
sci <- score/(qf)^0.5
if (ties) {
if (inherits(fit, "rsadd")) {
sci <- as.vector(by(t(sci), time[usable], function(x) sum(x)/sqrt(length(x))))
w <- as.vector(by(w, time[usable], sum))
}
else {
w <- w * as.vector(table(time))
w <- w/sum(w)
}
}
sci <- sci * sqrt(w)
timescale <- cumsum(w)
bm <- cumsum(sci)
bb <- bm - timescale * bm[length(bm)]
if (test == "max")
table <- rbind(table, c(max(abs(bb)), 1 - brp(max(abs(bb)))))
else if (test == "cvm")
table <- rbind(table, c(u2(bb), 1 - wc(u2(bb))))
bbt[[nvar + 1]] <- cbind(timescale, bb)
varnames <- c(varnames, "GLOBAL")
}
dimnames(table) <- list(varnames, c(test, "p"))
out <- list(table = table, bbt = bbt, rho = rho)
class(out) <- "rs.br"
out
}
rs.zph <- function (fit, sc, transform = "identity", var.type = "sum")
{
if (inherits(fit, "rsadd")) {
if (missing(sc))
sc <- resid(fit, "schoenfeld")
sresid <- sc$res
varr <- sc$kvarr
fvar <- solve(sc$kvarr1)
sresid <- as.matrix(sresid)
}
else {
coef <- fit$coef
options(warn = -1)
sc <- coxph.detail(fit)
options(warn = 0)
sresid <- as.matrix(resid(fit, "schoenfeld"))
varr <- sc$imat
fvar <- fit$var
}
data <- fit$data[order(fit$data$Y), ]
time <- data$Y
stat <- data$stat
if (!inherits(fit, "rsadd")) {
ties <- as.vector(table(time[stat==1]))
if(is.null(dim(varr))) varr <- rep(varr/ties,ties)
else{
varr <- apply(varr,1:2,function(x)rep(x/ties,ties))
varr <- aperm(varr,c(2,3,1))
}
}
keep <- 1:(length(fit$coef) - length(fit$int) + 1)
varnames <- names(fit$coef)[keep]
nvar <- length(varnames)
ndead <- length(sresid)/nvar
if (inherits(fit, "rsadd"))
times <- time[stat == 1]
else times <- sc$time
if (is.character(transform)) {
tname <- transform
ttimes <- switch(transform, identity = times, rank = rank(times),
log = log(times), km = {
fity <- Surv(time, stat)
temp <- survfit(fity~1)
t1 <- temp$surv[temp$n.event > 0]
t2 <- temp$n.event[temp$n.event > 0]
km <- rep(c(1, t1), c(t2, 0))
if (is.null(attr(sresid, "strata")))
1 - km
else (1 - km[sort.list(sort.list(times))])
}, stop("Unrecognized transform"))
}
else {
tname <- deparse(substitute(transform))
ttimes <- transform(times)
}
if (var.type == "each") {
invV <- apply(varr, 3, function(x) try(solve(x), silent = TRUE))
if (length(invV) == length(varr)){
if(!is.numeric(invV)){
usable <- rep(FALSE, dim(varr)[3])
options(warn=-1)
invV <- as.numeric(invV)
usable[1:(min(which(is.na(invV)))-1)] <- TRUE
invV <- invV[usable]
sresid <- sresid[usable,,drop=FALSE]
options(warn=0)
}
else usable <- rep(TRUE, dim(varr)[3])
}
else {
usable <- unlist(lapply(invV, is.matrix))
if (!any(usable))
stop("All the matrices are singular")
invV <- invV[usable]
sresid <- sresid[usable, , drop = FALSE]
}
di1 <- dim(varr)[1]
di3 <- sum(usable)
u <- array(data = matrix(unlist(invV), ncol = di1), dim = c(di1,
di1, di3))
uv <- cbind(matrix(u, ncol = di1, byrow = TRUE), as.vector(t(sresid)))
uv <- array(as.vector(t(uv)), dim = c(di1 + 1, di1, di3))
r2 <- t(apply(uv, 3, function(x) x[1:di1, ] %*% x[di1 +
1, ]))
r2 <- matrix(r2, ncol = di1)
whr2 <- apply(r2<100,1,function(x)!any(x==FALSE))
usable <- as.logical(usable*whr2)
r2 <- r2[usable,,drop=FALSE]
u <- u[,,usable]
dimnames(r2) <- list(times[usable], varnames)
temp <- list(x = ttimes[usable], y = r2 + outer(rep(1,
sum(usable)), fit$coef[keep]), var = u, call = call,
transform = tname)
}
else if (var.type == "sum") {
xx <- ttimes - mean(ttimes)
r2 <- t(fvar %*% t(sresid) * ndead)
r2 <- as.matrix(r2)
dimnames(r2) <- list(times, varnames)
temp <- list(x = ttimes, y = r2 + outer(rep(1, ndead),
fit$coef[keep]), var = fvar, transform = tname)
}
else stop("Unknown 'var.type'")
class(temp) <- "rs.zph"
temp
}
plot.rs.zph <- function (x,resid = TRUE, df = 4, nsmo = 40, var, cex = 1, add = FALSE, col = 1,
lty = 1, xlab, ylab, xscale = 1, ...)
{
xx <- x$x
if(x$transform=="identity")xx <- xx/xscale
yy <- x$y
d <- nrow(yy)
df <- max(df)
nvar <- ncol(yy)
pred.x <- seq(from = min(xx), to = max(xx), length = nsmo)
temp <- c(pred.x, xx)
lmat <- splines::ns(temp, df = df, intercept = TRUE)
pmat <- lmat[1:nsmo, ]
xmat <- lmat[-(1:nsmo), ]
qmat <- qr(xmat)
if (missing(ylab))
ylab <- paste("Beta(t) for", dimnames(yy)[[2]])
if (missing(xlab))
xlab <- "Time"
if (missing(var))
var <- 1:nvar
else {
if (is.character(var))
var <- match(var, dimnames(yy)[[2]])
if (any(is.na(var)) || max(var) > nvar || min(var) <
1)
stop("Invalid variable requested")
}
if (x$transform == "log") {
xx <- exp(xx)
pred.x <- exp(pred.x)
}
else if (x$transform != "identity") {
xtime <- as.numeric(dimnames(yy)[[1]])/xscale
apr1 <- approx(xx, xtime, seq(min(xx), max(xx), length = 17)[2 *
(1:8)])
temp <- signif(apr1$y, 2)
apr2 <- approx(xtime, xx, temp)
xaxisval <- apr2$y
xaxislab <- rep("", 8)
for (i in 1:8) xaxislab[i] <- format(temp[i])
}
for (i in var) {
y <- yy[, i]
yhat <- pmat %*% qr.coef(qmat, y)
yr <- range(yhat, y)
if (!add) {
if (x$transform == "identity")
plot(range(xx), yr, type = "n", xlab = xlab, ylab = ylab[i],...)
else if (x$transform == "log")
plot(range(xx), yr, type = "n", xlab = xlab, ylab = ylab[i],log = "x", ...)
else {
plot(range(xx), yr, type = "n", xlab = xlab, ylab = ylab[i],axes = FALSE, ...)
axis(1, xaxisval, xaxislab)
axis(2)
box()
}
}
if (resid)
points(xx, y, cex = cex, col = col)
lines(pred.x, yhat, col = col, lty = lty)
}
}
plot.rs.br <- function (x, var, ylim = c(-2, 2), xlab, ylab, ...)
{
bbt <- x$bbt
par(ask = TRUE)
if (missing(var))
var <- 1:nrow(x$table)
ychange <- FALSE
if (missing(ylab))
ylab <- paste("Brownian bridge for", row.names(x$table))
else {
if (length(ylab) == 1 & nrow(x$table) > 1)
ylab <- rep(ylab, nrow(x$table))
}
if (missing(xlab))
xlab <- "Time"
for (i in var) {
timescale <- bbt[[i]][, 1]
bb <- bbt[[i]][, 2]
plot(c(0, timescale), c(0, bb), type = "l", ylim = ylim,
xlab = xlab, ylab = ylab[i], ...)
abline(h = 1.36, col = 2)
abline(h = 1.63, col = 2)
abline(h = -1.36, col = 2)
abline(h = -1.63, col = 2)
}
par(ask = FALSE)
}
Kernmatch <- function (t, tv, b, tD, nt4)
{
kmat <- NULL
for (it in 1:(length(nt4) - 1)) {
kmat1 <- (outer(t[(nt4[it] + 1):nt4[it + 1]], tv, "-")/b[it])
kmat1 <- kmat1^(kmat1 >= 0)
kmat <- rbind(kmat, pmax(1 - kmat1^2, 0) * (1.5/b[it]))
}
kmat
}
kernerleftch <- function (td, b, nt4)
{
n <- length(td)
ttemp <- td[td >= b[1]]
ntemp <- length(ttemp)
if (ntemp == n)
nt4 <- c(0, nt4[-1])
else {
nfirst <- n - ntemp
nt4 <- c(0, 1:nfirst, nt4[-1])
b <- c(td[1:nfirst], b)
}
krn <- Kernmatch(td, td, b, max(td), nt4)
krn
}
invtime <- function (y = 0.1, age = 23011, sex = "male", year = 9497, scale = 1,
ratetable = relsurv::slopop, lower, upper)
{
if (!is.numeric(age))
stop("\"age\" must be numeric", call. = FALSE)
if (!is.numeric(y))
stop("\"y\" must be numeric", call. = FALSE)
if (!is.numeric(scale))
stop("\"scale\" must be numeric", call. = FALSE)
temp <- data.frame(age = age, sex = I(sex), year = year)
if (missing(lower)) {
if (!missing(upper))
stop("Argument \"lower\" is missing, with no default",
call. = FALSE)
nyears <- round((110 - age/365.241))
tab <- data.frame(age = rep(age, nyears), sex = I(rep(sex,
nyears)), year = rep(year, nyears))
vred <- 1 - survexp(c(0, 1:(nyears - 1)) * 365.241 ~ ratetable(age = age,
sex = sex, year = year), ratetable = ratetable, data = tab,
cohort = FALSE)
place <- sum(vred <= y)
if (place == 0)
lower <- 0
else lower <- floor((place - 1) * 365.241 - place)
upper <- ceiling(place * 365.241 + place)
}
else {
if (missing(upper))
stop("Argument \"upper\" is missing, with no default",
call. = FALSE)
if (!is.integer(lower))
lower <- floor(lower)
if (!is.integer(upper))
upper <- ceiling(upper)
if (upper <= lower)
stop("'upper' must be higher than 'lower'", call. = FALSE)
}
lower <- max(0, lower)
tab <- data.frame(age = rep(age, upper - lower + 1), sex = I(rep(sex,
upper - lower + 1)), year = rep(year, upper - lower +
1))
vred <- 1 - survexp((lower:upper) ~ ratetable(age = age,
sex = sex, year = year), ratetable = ratetable, data = tab,
cohort = FALSE)
place <- sum(vred <= y)
if (place == 0)
warning(paste("The event happened on or before day",
lower), call. = FALSE)
if (place == length(vred))
warning(paste("The event happened on or after day", upper),
call. = FALSE)
t <- (place + lower - 1)/scale
age <- round(age/365.241, 0.01)
return(list(age, sex, year, Y = y, T = t))
}
rsmul <- function (formula = formula(data), data = parent.frame(), ratetable = relsurv::slopop,
int, na.action, init, method = "mul", control,rmap, ...)
{
if (!missing(rmap)) {
rmap <- substitute(rmap)
}
rform <- rformulate(formula,data, ratetable, na.action,rmap,int)
U <- rform$data
if (missing(int))
int <- ceiling(max(rform$Y/365.241))
if(length(int)!=1)int <- max(int)
fk <- (attributes(rform$ratetable)$factor != 1)
nfk <- length(fk)
if (method == "mul") {
U <- survsplit(U, cut = (1:int) * 365.241, end = "Y",
event = "stat", start = "start", episode = "epi")
fk <- (attributes(rform$ratetable)$factor != 1)
nfk <- length(fk)
U[, 4:(nfk + 3)] <- U[, 4:(nfk + 3)] + 365.241 * (U$epi) %*%
t(fk)
nsk <- dim(U)[1]
xx <- exp.prep(U[, 4:(nfk + 3),drop=FALSE], 365.241, rform$ratetable)
lambda <- -log(xx)/365.241
}
else if (method == "mul1") {
U$id <- 1:dim(U)[1]
my.fun <- function(x, attcut, nfk, fk) {
intr <- NULL
for (i in 1:nfk) {
if (fk[i]) {
n1 <- max(findInterval(as.numeric(x[3 + i]) +
as.numeric(x[1]), attcut[[i]]) + 1, 2)
n2 <- findInterval(as.numeric(x[3 + i]) + as.numeric(x[2]),
attcut[[i]])
if (n2 > n1 & length(attcut[[i]] > 1)) {
if (n2 > length(attcut[[i]]))
n2 <- length(attcut[[i]])
intr <- c(intr, as.numeric(attcut[[i]][n1:n2]) -
as.numeric(x[3 + i]))
}
}
}
intr <- sort(unique(c(intr, as.numeric(x[2]))))
intr
}
attcut <- attributes(rform$ratetable)$cutpoints
intr <- apply(U[, 1:(3 + nfk)], 1, my.fun, attcut, nfk,
fk)
dolg <- unlist(lapply(intr, length))
newdata <- lapply(U, rep, dolg)
stoptime <- unlist(intr)
starttime <- c(-1, stoptime[-length(stoptime)])
first <- newdata$id != c(-1, newdata$id[-length(newdata$id)])
starttime[first] <- newdata$start[first]
last <- newdata$id != c(newdata$id[-1], -1)
event <- rep(0, length(newdata$id))
event[last] <- newdata$stat[last]
U <- do.call("data.frame", newdata)
U$start <- starttime
U$Y <- stoptime
U$stat <- event
U[, 4:(nfk + 3)] <- U[, 4:(nfk + 3)] + (U$start) %*%
t(fk)
nsk <- dim(U)[1]
xx <- exp.prep(U[, 4:(nfk + 3),drop=FALSE], 1, rform$ratetable)
lambda <- -log(xx)/1
}
else stop("'method' must be one of 'mul' or 'mul1'")
U$lambda <- log(lambda)
if (rform$m == 0)
fit <- coxph(Surv(start, Y, stat) ~ 1 + offset(lambda),
data = U, init = init, control = control, x = TRUE,
...)
else {
xmat <- as.matrix(U[, (3 + nfk + 1):(ncol(U) - 2)])
fit <- coxph(Surv(start, Y, stat) ~ xmat + offset(lambda),
data = U, init = init, control = control, x = TRUE,
...)
names(fit[[1]]) <- names(U)[(3 + nfk + 1):(ncol(U) -
2)]
}
class(fit) <- c("rsmul",class(fit))
fit$basehaz <- basehaz(fit)
fit$data <- rform$data
fit$call <- match.call()
fit$int <- int
if (length(rform$na.action))
fit$na.action <- rform$na.action
fit
}
rstrans <- function (formula = formula(data), data = parent.frame(), ratetable = relsurv::slopop,
int, na.action, init, control,rmap, ...)
{
if (!missing(rmap)) {
rmap <- substitute(rmap)
}
rform <- rformulate(formula, data, ratetable, na.action, rmap, int)
if (missing(int))
int <- ceiling(max(rform$Y/365.241))
fk <- (attributes(rform$ratetable)$factor != 1)
nfk <- length(fk)
if (rform$type == "counting") {
start <- 1 - exp.prep(rform$R, rform$start, rform$ratetable)
}
else start <- rep(0, rform$n)
stop <- 1 - exp.prep(rform$R, rform$Y, rform$ratetable)
if(any(stop==0&rform$Y!=0))stop[stop==0&rform$Y!=0] <- .Machine$double.eps
if(length(int)!=1)int <- max(int)
data <- rform$data
stat <- rform$status
if (rform$m == 0) {
if (rform$type == "counting")
fit <- coxph(Surv(start, stop, stat) ~ 1,
init = init, control = control, x = TRUE, ...)
else fit <- coxph(Surv(stop, stat) ~ 1,
init = init, control = control, x = TRUE, ...)
}
else {
xmat <- as.matrix(data[, (4 + nfk):ncol(data)])
fit <- coxph(Surv(start, stop, stat) ~ xmat,
init = init, control = control, x = TRUE, ...)
names(fit[[1]]) <- names(rform$X)
}
fit$call <- match.call()
if (length(rform$na.action))
fit$na.action <- rform$na.action
data$start <- start
data$Y <- stop
fit$data <- data
fit$int <- int
return(fit)
}
transrate <- function (men, women, yearlim, int.length = 1)
{
if (any(dim(men) != dim(women)))
stop("The men and women matrices must be of the same size. \n In case of missing values at the end carry the last value forward")
if ((yearlim[2] - yearlim[1])/int.length + 1 != dim(men)[2])
stop("'yearlim' cannot be divided into intervals of equal length")
if (!is.matrix(men) | !is.matrix(women))
stop("input tables must be of class matrix")
dimi <- dim(men)
temp <- array(c(men, women), dim = c(dimi, 2))
temp <- -log(temp)/365.241
temp <- aperm(temp, c(1, 3, 2))
cp <- as.date(apply(matrix(yearlim[1] + int.length * (0:(dimi[2] -
1)), ncol = 1), 1, function(x) {
paste("1jan", x, sep = "")
}))
attributes(temp) <- list(dim = c(dimi[1], 2, dimi[2]), dimnames = list(age=as.character(0:(dimi[1] -
1)), sex=c("male", "female"), year=as.character(yearlim[1] + int.length *
(0:(dimi[2] - 1)))), dimid = c("age", "sex", "year"),
factor = c(0, 1, 0),type=c(2,1,3), cutpoints = list((0:(dimi[1] - 1)) *
(365.241), NULL, cp), class = "ratetable")
attributes(temp)$summary <- function (R)
{
x <- c(format(round(min(R[, 1])/365.241, 1)), format(round(max(R[,
1])/365.241, 1)), sum(R[, 2] == 1), sum(R[, 2] == 2))
x2 <- as.character(as.Date(c(min(R[, 3]), max(R[, 3])), origin=as.Date('1970-01-01')))
paste(" age ranges from", x[1], "to", x[2], "years\n", " male:",
x[3], " female:", x[4], "\n", " date of entry from",
x2[1], "to", x2[2], "\n")
}
temp
}
transrate.hld <- function(file, cut.year,race){
nfiles <- length(file)
data <- NULL
for(it in 1:nfiles){
tdata <- read.table(file[it],sep=",",header=TRUE)
if(!any(tdata$TypeLT==1)) stop("Currently only TypeLT 1 is implemented")
names(tdata) <- gsub(".","",names(tdata),fixed=TRUE)
tdata <- tdata[,c("Country","Year1","Year2","TypeLT","Sex","Age","AgeInt","qx")]
tdata <- tdata[tdata$TypeLT==1,]
tdata <- tdata[!is.na(tdata$AgeInt),]
if(!missing(race))tdata$race <- rep(race[it],nrow(tdata))
data <- rbind(data,tdata)
}
if(length(unique(data$Country))>1)warning("The data belongs to different countries")
data <- data[order(data$Year1,data$Age),]
data$qx <- as.character(data$qx)
options(warn = -1)
data$qx[data$qx=="."] <- NA
data$qx <- as.numeric(data$qx)
options(warn = 0)
if(missing(cut.year)){
y1 <- unique(data$Year1)
y2 <- unique(data$Year2)
if(any(apply(cbind(y1[-1],y2[-length(y2)]),1,diff)!=-1))warning("Data is not given for all the cut.year between the minimum and the maximum, use argument 'cut.year'")
}
else
y1 <- cut.year
if(length(y1)!=length(unique(data$Year1)))stop("Length 'cut.year' must match the number of unique values of Year1")
cp <- as.date(apply(matrix(y1,ncol=1),1,function(x){paste("1jan",x,sep="")}))
dn2 <- as.character(y1)
amax <- max(data$Age)
a.fun <- function(data,amax){
mdata <- data[data$Sex==1,]
wdata <- data[data$Sex==2,]
men <-NULL
women <- NULL
k <- sum(mdata$Age==0)
mind <- c(which(mdata$Age[-nrow(mdata)] != mdata$Age[-1]-1),nrow(mdata))
wind <- c(which(wdata$Age[-nrow(wdata)] != wdata$Age[-1]-1),nrow(wdata))
mst <- wst <- 1
for(it in 1:k){
qx <- mdata[mst:mind[it],]$qx
lqx <- length(qx)
if(lqx!=amax+1){
nmiss <- amax + 1 - lqx
qx <- c(qx,rep(qx[lqx],nmiss))
}
naqx <- max(which(!is.na(qx)))
if(naqx!=amax+1) qx[(naqx+1):(amax+1)] <- qx[naqx]
men <- cbind(men,qx)
mst <- mind[it]+1
qx <- wdata[wst:wind[it],]$qx
lqx <- length(qx)
if(lqx!=amax+1){
nmiss <- amax + 1 - lqx
qx <- c(qx,rep(qx[lqx],nmiss))
}
naqx <- max(which(!is.na(qx)))
if(naqx!=amax+1) qx[(naqx+1):(amax+1)] <- qx[naqx]
women <- cbind(women,qx)
wst <- wind[it]+1
}
men<- -log(1-men)/365.241
women<- -log(1-women)/365.241
dims <- c(dim(men),2)
array(c(men,women),dim=dims)
}
if(missing(race)){
out <- a.fun(data,amax)
dims <- dim(out)
attributes(out)<-list(
dim=dims,
dimnames=list(as.character(0:amax),as.character(y1),c("male","female")),
dimid=c("age","year","sex"),
factor=c(0,0,1),type=c(2,3,1),
cutpoints=list((0:amax)*(365.241),cp,NULL),
class="ratetable"
)
}
else{
race.val <- unique(race)
if(length(race)!=length(file))stop("Length of 'race' must match the number of files")
for(it in 1:length(race.val)){
if(it==1){
out <- a.fun(data[data$race==race.val[it],],amax)
dims <- dim(out)
out <- array(out,dim=c(dims,1))
}
else{
out1 <- array(a.fun(data[data$race==race.val[it],],amax),dim=c(dims,1))
out <- array(c(out,out1),dim=c(dims,it))
}
}
attributes(out)<-list(
dim=c(dims,it),
dimnames=list(age=as.character(0:amax),year=as.character(y1),sex=c("male","female"),race=race.val),
dimid=c("age","year","sex","race"),
factor=c(0,0,1,1),type=c(2,3,1,1),
cutpoints=list((0:amax)*(365.241),cp,NULL,NULL),
class="ratetable"
)
}
attributes(out)$summary <- function (R)
{
x <- c(format(round(min(R[, 1])/365.241, 1)), format(round(max(R[,
1])/365.241, 1)), sum(R[, 3] == 1), sum(R[, 3] == 2))
x2 <- as.character(as.Date(c(min(R[, 2]), max(R[, 2])), origin=as.Date('1970-01-01')))
paste(" age ranges from", x[1], "to", x[2], "years\n", " male:",
x[3], " female:", x[4], "\n", " date of entry from",
x2[1], "to", x2[2], "\n")
}
out
}
transrate.hmd <- function(male,female){
nfiles <- 2
men <- try(read.table(male,sep="",header=TRUE),silent=TRUE)
if(class(men)=="try-error")men <- read.table(male,sep="",header=TRUE,skip=1)
men <- men[,c("Year","Age","qx")]
y1 <- sort(unique(men$Year))
ndata <- nrow(men)/111
if(round(ndata)!=ndata)stop("Each year must contain ages from 0 to 110")
men <- matrix(men$qx, ncol=ndata)
men <- matrix(as.numeric(men),ncol=ndata)
women <- try(read.table(female,sep="",header=TRUE),silent=TRUE)
if(class(women)=="try-error")women <- read.table(female,sep="",header=TRUE,skip=1)
women <- women[,"qx"]
if(length(women)!=length(men))stop("Number of rows in the table must be equal for both sexes")
women <- matrix(women, ncol=ndata)
women <- matrix(as.numeric(women),ncol=ndata)
cp <- as.date(apply(matrix(y1,ncol=1),1,function(x){paste("1jan",x,sep="")}))
dn2 <- as.character(y1)
tfun <- function(vec){
ind <- which(vec == 1 | is.na(vec))
if(length(ind)>0)vec[min(ind):length(vec)] <- 0.999
vec
}
men <- apply(men,2,tfun)
women <- apply(women,2,tfun)
men<- -log(1-men)/365.241
women<- -log(1-women)/365.241
nr <- nrow(men)-1
dims <- c(dim(men),2)
out <- array(c(men,women),dim=dims)
attributes(out)<-list(
dim=dims,
dimnames=list(age=as.character(0:nr),year=as.character(y1),sex=c("male","female")),
dimid=c("age","year","sex"),
factor=c(0,0,1),type=c(2,3,1),
cutpoints=list((0:nr)*(365.241),cp,NULL),
class="ratetable"
)
attributes(out)$summary <- function (R)
{
x <- c(format(round(min(R[, 1])/365.241, 1)), format(round(max(R[,
1])/365.241, 1)), sum(R[, 3] == 1), sum(R[, 3] == 2))
x2 <- as.character(as.Date(c(min(R[, 2]), max(R[, 2])), origin=as.Date('1970-01-01')))
paste(" age ranges from", x[1], "to", x[2], "years\n", " male:",
x[3], " female:", x[4], "\n", " date of entry from",
x2[1], "to", x2[2], "\n")
}
out
}
joinrate <- function(tables,dim.name="country"){
nfiles <- length(tables)
if(is.null(names(tables))) names(tables) <- paste("D",1:nfiles,sep="")
if(any(!unlist(lapply(tables,is.ratetable))))stop("Tables must be in ratetable format")
if(length(attributes(tables[[1]])$dim)!=3)stop("Currently implemented only for ratetables with 3 dimensions")
if(is.null(attr(tables[[1]],"dimid")))attr(tables[[1]],"dimid") <- names((attr(tables[[1]],"dimnames")))
for(it in 2:nfiles){
if(is.null(attr(tables[[it]],"dimid")))attr(tables[[it]],"dimid") <- names((attr(tables[[it]],"dimnames")))
if(length(attributes(tables[[it]])$dimid)!=3)stop("Each ratetable must have 3 dimensions: age, year and sex")
mc <- match(attributes(tables[[it]])$dimid,attributes(tables[[1]])$dimid,nomatch=0)
if(any(mc)==0) stop("Each ratetable must have 3 dimensions: age, year and sex")
if(any(mc!=1:3)){
atts <- attributes(tables[[it]])
tables[[it]] <- aperm(tables[[it]],mc)
atts$dimid <- atts$dimid[mc]
atts$dimnames <- atts$dimnames[mc]
atts$cutpoints <- atts$cutpoints[mc]
atts$factor <- atts$factor[mc]
atts$type <- atts$type[mc]
atts$dim <- atts$dim[mc]
attributes(tables[[it]]) <- atts
}
}
list.eq <- function(l1,l2){
n <- length(l1)
rez <- rep(TRUE,n)
for(it in 1:n){
if(length(l1[[it]])!=length(l2[[it]]))rez[it] <- FALSE
else if(any(l1[[it]]!=l2[[it]]))rez[it] <- FALSE
}
rez
}
equal <- rep(TRUE,3)
for(it in 2:nfiles){
equal <- equal*list.eq(attributes(tables[[1]])$cutpoints,attributes(tables[[it]])$cutpoints)
}
kir <- which(!equal)
newat <- attributes(tables[[1]])
imena <- list(d1=NULL,d2=NULL,d3=NULL)
for(jt in kir){
listy <- NULL
for(it in 1:nfiles){
listy <- c(listy,attributes(tables[[it]])$cutpoints[[jt]])
}
imena[[jt]] <- names(table(listy)[table(listy) == nfiles])
if(!length(imena[[jt]]))stop(paste("There are no common cutpoints for dimension", attributes(tables[[1]])$dimid[jt]))
}
for(it in 1:nfiles){
keep <- lapply(dim(tables[[it]]),function(x)1:x)
for(jt in kir){
meci <- which(match(attributes(tables[[it]])$cutpoints[[jt]],imena[[jt]],nomatch=0)!=0)
if(it==1){
newat$dimnames[[jt]] <- attributes(tables[[it]])$dimnames[[jt]][meci]
newat$dim[[jt]] <- length(imena[[jt]])
newat$cutpoints[[jt]] <- attributes(tables[[it]])$cutpoints[[jt]][meci]
}
if(length(meci)>1){if(max(diff(meci)!=1))warning(paste("The cutpoints for ",attributes(tables[[1]])$dimid[jt] ," are not equally spaced",sep=""))}
keep[[jt]] <- meci
}
tables[[it]] <- tables[[it]][keep[[1]],keep[[2]],keep[[3]]]
}
dims <- newat$dim
out <- array(tables[[1]],dim=c(dims,1))
for(it in 2:nfiles){
out1 <- array(tables[[it]],dim=c(dims,1))
out <- array(c(out,out1),dim=c(dims,it))
}
mc <- 1:4
if(any(newat$factor>1)){
wh <- which(newat$factor>1)
mc <- c(mc[-wh],wh)
out <- aperm(out,mc)
}
newat$dim <- c(dims,nfiles)[mc]
newat$dimid <- c(newat$dimid,dim.name)[mc]
newat$cutpoints <- list(newat$cutpoints[[1]],newat$cutpoints[[2]],newat$cutpoints[[3]],NULL)[mc]
newat$factor <- c(newat$factor,1)[mc]
newat$type <- c(newat$type,1)[mc]
newat$dimnames <- list(newat$dimnames[[1]],newat$dimnames[[2]],newat$dimnames[[3]],names(tables))[mc]
names(newat$dimnames) <- newat$dimid
attributes(out) <- newat
out
}
mlfit <- function (b, p, x, offset, d, h, ds, y, maxiter, tol)
{
for (nit in 1:maxiter) {
b0 <- b
fd <- matrix(0, p, 1)
sd <- matrix(0, p, p)
if (nit == 1) {
ebx <- exp(x %*% b) * exp(offset)
l0 <- sum(d * log(h + ebx) - ds - y * ebx)
}
for (it in 1:p) {
fd[it, 1] <- sum((d/(h + ebx) - y) * x[, it] * ebx)
for (jt in 1:p) sd[it, jt] = sum((d/(h + ebx) - d *
ebx/(h + ebx)^2 - y) * x[, it] * x[, jt] * ebx)
}
b <- b - solve(sd) %*% fd
ebx <- exp(x %*% b) * exp(offset)
l <- sum(d * log(h + ebx) - ds - y * ebx)
bd <- abs(b - b0)
if (max(bd) < tol)
break()
}
out <- list(b = b, sd = sd, nit = nit, loglik = c(l0, l))
out
}
print.rs.br <- function (x, digits = max(options()$digits - 4, 3), ...)
{
invisible(print(x$table, digits = digits))
if (x$rho != 0)
invisible(cat("Weighted Brownian bridge with rho=", x$rho,
"\n"))
}
print.rsadd <- function (x, digits = max(3, getOption("digits") - 3), ...)
{
cat("\nCall: ", paste(deparse(x$call), sep = "\n", collapse = "\n"),
"\n\n", sep = "", "\n")
if (length(coef(x))) {
cat("Coefficients")
cat(":\n")
print.default(format(x$coefficients, digits = digits),
print.gap = 2, quote = FALSE)
}
else cat("No coefficients\n\n")
if(x$method=="EM")
cat("\n", "Expected number of disease specific deaths: ",format(round(sum(x$Nie),2))," = ",format(round(100*sum(x$Nie)/sum(x$data$stat),1)),"% \n" ,sep="")
if(x$method=="EM"|x$method=="max.lik"){
chi <- 2*max((x$loglik[2]-x$loglik[1]),0)
if(x$method=="EM")df <- length(x$coef)
else df <- length(x$coef)-length(x$int)+1
if(df>0){
p.val <- 1- pchisq(chi,df)
if(x$method=="max.lik")cat("\n")
cat("Likelihood ratio test=",format(round(chi,2)),", on ",df," df, p=",format(p.val),"\n",sep="")
}
else cat("\n")
}
cat("n=",nrow(x$data),sep="")
if(length(x$na.action))cat(" (",length(x$na.action)," observations deleted due to missing)",sep="")
cat("\n")
if (length(x$warnme))
cat("\n", x$warnme, "\n\n")
else cat("\n")
invisible(x)
}
summary.rsadd <- function (object, correlation = FALSE, symbolic.cor = FALSE,
...)
{
if (inherits(object, "glm")) {
p <- object$rank
if (p > 0) {
p1 <- 1:p
Qr <- object$qr
aliased <- is.na(coef(object))
coef.p <- object$coefficients[Qr$pivot[p1]]
covmat <- chol2inv(Qr$qr[p1, p1, drop = FALSE])
dimnames(covmat) <- list(names(coef.p), names(coef.p))
var.cf <- diag(covmat)
s.err <- sqrt(var.cf)
tvalue <- coef.p/s.err
dn <- c("Estimate", "Std. Error")
pvalue <- 2 * pnorm(-abs(tvalue))
coef.table <- cbind(coef.p, s.err, tvalue, pvalue)
dimnames(coef.table) <- list(names(coef.p), c(dn,
"z value", "Pr(>|z|)"))
df.f <- NCOL(Qr$qr)
}
else {
coef.table <- matrix(, 0, 4)
dimnames(coef.table) <- list(NULL, c("Estimate",
"Std. Error", "t value", "Pr(>|t|)"))
covmat.unscaled <- covmat <- matrix(, 0, 0)
aliased <- is.na(coef(object))
df.f <- length(aliased)
}
ans <- c(object[c("call", "terms", "family", "iter",
"warnme")], list(coefficients = coef.table, var = covmat,
aliased = aliased))
if (correlation && p > 0) {
dd <- s.err
ans$correlation <- covmat/outer(dd, dd)
ans$symbolic.cor <- symbolic.cor
}
class(ans) <- "summary.rsadd"
}
else if (inherits(object, "rsadd")) {
aliased <- is.na(coef(object))
coef.p <- object$coef
var.cf <- diag(object$var)
s.err <- sqrt(var.cf)
tvalue <- coef.p/s.err
dn <- c("Estimate", "Std. Error")
pvalue <- 2 * pnorm(-abs(tvalue))
coef.table <- cbind(coef.p, s.err, tvalue, pvalue)
dimnames(coef.table) <- list(names(coef.p), c(dn, "z value",
"Pr(>|z|)"))
ans <- c(object[c("call", "terms", "iter", "var")], list(coefficients = coef.table,
aliased = aliased))
if (correlation && sum(aliased) != length(aliased)) {
dd <- s.err
ans$correlation <- object$var/outer(dd, dd)
ans$symbolic.cor <- symbolic.cor
}
class(ans) <- "summary.rsadd"
}
else ans <- object
return(ans)
}
print.summary.rsadd <- function (x, digits = max(3, getOption("digits") - 3), symbolic.cor = x$symbolic.cor,
signif.stars = getOption("show.signif.stars"), ...)
{
cat("\nCall:\n")
cat(paste(deparse(x$call), sep = "\n", collapse = "\n"),
"\n\n", sep = "")
if (length(x$aliased) == 0) {
cat("\nNo Coefficients\n")
}
else {
cat("\nCoefficients:\n")
coefs <- x$coefficients
if (!is.null(aliased <- x$aliased) && any(aliased)) {
cn <- names(aliased)
coefs <- matrix(NA, length(aliased), 4, dimnames = list(cn,
colnames(coefs)))
coefs[!aliased, ] <- x$coefficients
}
printCoefmat(coefs, digits = digits, signif.stars = signif.stars,
na.print = "NA", ...)
}
if (length(x$warnme))
cat("\n", x$warnme, "\n")
correl <- x$correlation
if (!is.null(correl)) {
p <- NCOL(correl)
if (p > 1) {
cat("\nCorrelation of Coefficients:\n")
if (is.logical(symbolic.cor) && symbolic.cor) {
print(symnum(correl, abbr.colnames = NULL))
}
else {
correl <- format(round(correl, 2), nsmall = 2,
digits = digits)
correl[!lower.tri(correl)] <- ""
print(correl[-1, -p, drop = FALSE], quote = FALSE)
}
}
}
cat("\n")
invisible(x)
}
epa <- function(fit,bwin,times,n.bwin=16,left=FALSE){
utd <- fit$times
if(missing(times))times <- seq(1,max(utd),length=100)
if(max(times)>max(utd)){
warning("Cannot extrapolate beyond max event time")
times <- pmax(times,max(utd))
}
nutd <- length(utd)
nt4 <- c(1,ceiling(nutd*(1:n.bwin)/n.bwin))
if(missing(bwin))bwin <- rep(length(fit$times)/100,n.bwin)
else bwin <- rep(bwin*length(fit$times)/100,n.bwin)
for(it in 1:n.bwin){
bwin[it] <- bwin[it]*max(diff(utd[nt4[it]:nt4[it+1]]))
}
while(utd[nt4[2]]<bwin[1]){
nt4 <- nt4[-2]
if(length(nt4)==1)break
}
if(left) krn <- kernerleftch(utd,bwin,nt4)
else krn <- kern(times,utd,bwin,nt4)
lams <- pmax(krn%*%fit$lam0.ns,0)
list(lambda=lams,times=times)
}
Kern <- function (t, tv, b, tD, nt4)
{
Rb <- max(tv)
kmat <- NULL
tvs <- tv
tv <- tv[-1]
kt <- function(q,t)12*(t+1)/(1+q)^4*( (1-2*q)*t + (3*q^2-2*q+1)/2 )
totcajti <- NULL
for (it in 1:(length(nt4) - 1)) {
cajti <- t[t>tvs[nt4[it]] & t<=tvs[nt4[it + 1]]]
if(length(cajti)){
q <- min( cajti/b[it],1,(Rb-cajti)/b[it])
if(q<1 & length(cajti)>1){
jc <- 1
while(jc <=length(cajti)){
qd <- pmin( cajti[jc:length(cajti)]/b[it],1,(Rb-cajti[jc:length(cajti)])/b[it])
q <- qd[1]
if(q==1){
casi <- cajti[jc:length(cajti)][qd==1]
q <- 1
jc <- sum(qd==1)+jc
}
else{
casi <- cajti[jc]
jc <- jc+1
}
kmat1 <- outer(casi, tv, "-")/b[it]
if(q<1){
if(casi>b[it]) kmt1 <- -kmat1
vr <- kt(q,kmat1)*(kmat1>=-1 & kmat1 <= q)
}
else vr <- pmax((1 - kmat1^2) * .75,0)
kmat <- rbind(kmat, vr/b[it])
totcajti <- c(totcajti,casi)
}
}
else{
kmat1 <- outer(cajti, tv, "-")/b[it]
q <- min( cajti/b[it],1)
if(q<1)vr <- kt(q,kmat1)*(kmat1>=-1 & kmat1 <= q)
else vr <- pmax((1 - kmat1^2) * .75,0)
kmat <- rbind(kmat, vr/b[it])
totcajti <- c(totcajti,cajti)
}
}
}
kmat
}
kern <- function (times,td, b, nt4)
{
n <- length(td)
ttemp <- td[td >= b[1]]
ntemp <- length(ttemp)
if (ntemp == n)
nt4 <- c(0, nt4[-1])
td <- c(0,td)
nt4 <- c(1,nt4+1)
b <- c(b[1],b)
krn <- Kern(times, td, b, max(td), nt4)
krn
}
exp.prep <- function (x, y,ratetable,status,times,fast=FALSE,ys,prec,cmp=F,netweiDM=FALSE) {
x <- as.matrix(x)
if (ncol(x) != length(dim(ratetable)))
stop("x matrix does not match the rate table")
atts <- attributes(ratetable)
cuts <- atts$cutpoints
if (is.null(atts$type)) {
rfac <- atts$factor
us.special <- (rfac > 1)
}
else {
rfac <- 1 * (atts$type == 1)
us.special <- (atts$type == 4)
}
if (length(rfac) != ncol(x))
stop("Wrong length for rfac")
if (any(us.special)) {
if (sum(us.special) > 1)
stop("Two columns marked for special handling as a US rate table")
cols <- match(c("age", "year"), atts$dimid)
if (any(is.na(cols)))
stop("Ratetable does not have expected shape")
if (exists("as.Date")) {
bdate <- as.Date("1960/1/1") + (x[, cols[2]] - x[,
cols[1]])
byear <- format(bdate, "%Y")
offset <- as.numeric(bdate - as.Date(paste(byear,
"01/01", sep = "/")))
}
else if (exists("date.mdy")) {
bdate <- as.date(x[, cols[2]] - x[, cols[1]])
byear <- date.mdy(bdate)$year
offset <- bdate - mdy.date(1, 1, byear)
}
else stop("Can't find an appropriate date class\n")
x[, cols[2]] <- x[, cols[2]] - offset
if (any(rfac > 1)) {
temp <- which(us.special)
nyear <- length(cuts[[temp]])
nint <- rfac[temp]
cuts[[temp]] <- round(approx(nint * (1:nyear), cuts[[temp]],
nint:(nint * nyear))$y - 1e-04)
}
}
if(!missing(status)){
if(length(status)!=nrow(x)) stop("Wrong length for status")
if(missing(times)) times <- sort(unique(y))
if (any(times < 0))
stop("Negative time point requested")
ntime <- length(times)
if(missing(ys)) ys <- rep(0,length(y))
if(cmp) temp <- .Call("cmpfast", as.integer(rfac),
as.integer(atts$dim), as.double(unlist(cuts)), ratetable,
x, y, ys,as.integer(status), times,PACKAGE="relsurv")
else if(fast&!missing(prec)) temp <- .Call("netfastpinter2", as.integer(rfac),
as.integer(atts$dim), as.double(unlist(cuts)), ratetable,
x, y, ys,as.integer(status), times,prec,PACKAGE="relsurv")
else if(fast&missing(prec)) temp <- .Call("netfastpinter", as.integer(rfac),
as.integer(atts$dim), as.double(unlist(cuts)), ratetable,
x, y, ys,as.integer(status), times,PACKAGE="relsurv")
else if(netweiDM==TRUE) temp <- .Call("netweiDM", as.integer(rfac),
as.integer(atts$dim), as.double(unlist(cuts)), ratetable,
x, y, ys,as.integer(status), times,PACKAGE="relsurv")
else temp <- .Call("netwei", as.integer(rfac),
as.integer(atts$dim), as.double(unlist(cuts)), ratetable,
x, y, as.integer(status), times,PACKAGE="relsurv")
}
else{
if(length(y)==1)y <- rep(y,nrow(x))
if(length(y)!=nrow(x)) stop("Wrong length for status")
temp <- .Call("expc", as.integer(rfac),
as.integer(atts$dim), as.double(unlist(cuts)), ratetable,
x, y,PACKAGE="relsurv")
temp <- temp$surv
}
temp
}
rs.surv <- function (formula = formula(data), data = parent.frame(),ratetable = relsurv::slopop,
na.action, fin.date, method = "pohar-perme", conf.type = "log",
conf.int = 0.95,type="kaplan-meier",add.times,precision=1,rmap)
{
call <- match.call()
if (!missing(rmap)) {
rmap <- substitute(rmap)
}
rform <- rformulate(formula,data, ratetable, na.action,rmap)
data <- rform$data
type <- match.arg(type, c("kaplan-meier", "fleming-harrington"))
type <- match(type, c("kaplan-meier", "fleming-harrington"))
method <- match.arg(method,c("pohar-perme", "ederer2", "hakulinen","ederer1"))
method <- match(method,c("pohar-perme", "ederer2", "hakulinen","ederer1"))
conf.type <- match.arg(conf.type,c("plain","log","log-log"))
if (method == 3) {
R <- rform$R
coll <- match("year", attributes(ratetable)$dimid)
year <- R[, coll]
if (missing(fin.date))
fin.date <- max(rform$Y + year)
Y2 <- rform$Y
if (length(fin.date) == 1)
Y2[rform$status == 1] <- fin.date - year[rform$status == 1]
else if (length(fin.date) == nrow(rform$R))
Y2[rform$status == 1] <- fin.date[rform$status ==
1] - year[rform$status == 1]
else stop("fin.date must be either one value or a vector of the same length as the data")
status2 <- rep(0, nrow(rform$X))
}
p <- rform$m
if (p > 0)
data$Xs <- strata(rform$X[, ,drop=FALSE ])
else data$Xs <- rep(1, nrow(data))
se.fac <- sqrt(qchisq(conf.int, 1))
out <- NULL
out$n <- table(data$Xs)
out$time <- out$n.risk <- out$n.event <- out$n.censor <- out$surv <- out$std.err <- out$strata <- NULL
for (kt in 1:length(out$n)) {
inx <- which(data$Xs == names(out$n)[kt])
tis <- sort(unique(rform$Y[inx]))
if (method == 1 & !missing(add.times)){
add.times <- pmin(as.numeric(add.times),max(rform$Y[inx]))
tis <- sort(union(rform$Y[inx],as.numeric(add.times)))
}
if(method==3)tis <- sort(unique(pmin(max(tis),c(tis,Y2[inx]))))
temp <- exp.prep(rform$R[inx,,drop=FALSE],rform$Y[inx],rform$ratetable,rform$status[inx],times=tis,fast=(method<3),prec=precision)
out$time <- c(out$time, tis)
out$n.risk <- c(out$n.risk, temp$yi)
out$n.event <- c(out$n.event, temp$dni)
out$n.censor <- c(out$n.censor, c(-diff(temp$yi),temp$yi[length(temp$yi)]) - temp$dni)
if(method==1){
approximate <- temp$yidlisiw
haz <- temp$dnisi/temp$yisi - approximate
out$std.err <- c(out$std.err, sqrt(cumsum(temp$dnisisq/(temp$yisi)^2)))
}
else if(method==2){
haz <- temp$dni/temp$yi - temp$yidli/temp$yi
out$std.err <- c(out$std.err, sqrt(cumsum(temp$dni/(temp$yi)^2)))
}
else if(method==3){
temp2 <- exp.prep(rform$R[inx,,drop=FALSE],Y2[inx],ratetable,status2[inx],times=tis)
popsur <- exp(-cumsum(temp2$yisidli/temp2$yisis))
haz <- temp$dni/temp$yi
out$std.err <- c(out$std.err, sqrt(cumsum(temp$dni/(temp$yi)^2)))
}
else if(method==4){
popsur <- temp$sis/length(inx)
haz <- temp$dni/temp$yi
out$std.err <- c(out$std.err, sqrt(cumsum(temp$dni/(temp$yi)^2)))
}
if(type==2)survtemp <- exp(-cumsum(haz))
else survtemp <- cumprod(1-haz)
if(method>2){
survtemp <- survtemp/popsur
}
out$surv <- c(out$surv,survtemp)
out$strata <- c(out$strata, length(tis))
}
if (conf.type == "plain") {
out$lower <- as.vector(out$surv - out$std.err * se.fac *
out$surv)
out$upper <- as.vector(out$surv + out$std.err * se.fac *
out$surv)
}
else if (conf.type == "log") {
out$lower <- exp(as.vector(log(out$surv) - out$std.err *
se.fac))
out$upper <- exp(as.vector(log(out$surv) + out$std.err *
se.fac))
}
else if (conf.type == "log-log") {
out$lower <- exp(-exp(as.vector(log(-log(out$surv)) -
out$std.err * se.fac/log(out$surv))))
out$upper <- exp(-exp(as.vector(log(-log(out$surv)) +
out$std.err * se.fac/log(out$surv))))
}
names(out$strata) <- names(out$n)
if (p == 0){
out$strata <- NULL
}
out$n <- as.vector(out$n)
out$conf.type <- conf.type
out$conf.int <- conf.int
out$method <- method
out$call <- call
out$type <- "right"
class(out) <- c("survfit", "rs.surv")
out
}
nessie <- function (formula = formula(data), data = parent.frame(), ratetable = relsurv::slopop,times,rmap)
{
call <- match.call()
if (!missing(rmap)) {
rmap <- substitute(rmap)
}
na.action <- NA
rform <- rformulate(formula, data, ratetable,na.action, rmap)
templab <- attr(rform$Terms,"term.labels")
if(!is.null(attr(rform$Terms,"specials")$ratetable))templab <- templab[-length(templab)]
nameslist <- vector("list",length(templab))
for(it in 1:length(nameslist)){
valuetab <- table(data[,match(templab[it],names(data))])
nameslist[[it]] <- paste(templab[it],names(valuetab),sep="")
}
names(nameslist) <- templab
data <- rform$data
p <- rform$m
if (p > 0) {
data$Xs <- my.strata(rform$X[,,drop=F],nameslist=nameslist)
}
else data$Xs <- rep(1, nrow(data))
if(!missing(times)) tis <- times
else tis <- unique(sort(floor(rform$Y/365.241)))
tis <- unique(c(0,tis))
tisd <- tis*365.241
out <- NULL
out$n <- table(data$Xs)
out$sp <- out$strata <- NULL
for (kt in order(names(table(data$Xs)))) {
inx <- which(data$Xs == names(out$n)[kt])
temp <- exp.prep(rform$R[inx,,drop=FALSE],rform$Y[inx],rform$ratetable,rform$status[inx],times=tisd,fast=FALSE)
out$time <- c(out$time, tisd)
out$sp <- c(out$sp, temp$sis)
out$strata <- c(out$strata, length(tis))
temp <- exp.prep(rform$R[inx,,drop=FALSE],rform$Y[inx],rform$ratetable,rform$status[inx],times=(seq(0,100,by=.5)*365.241)[-1],fast=FALSE)
out$povp <- c(out$povp,mean(temp$sit/365.241))
}
names(out$strata) <- names(out$n)[order(names(table(data$Xs)))]
if (p == 0) out$strata <- NULL
mata <- matrix(out$sp,ncol=length(tis),byrow=TRUE)
mata <- data.frame(mata)
mata <- cbind(mata,out$povp)
row.names(mata) <- names(out$n)[order(names(table(data$Xs)))]
names(mata) <- c(tis,"c.exp.surv")
cat("\n")
print(round(mata,1))
cat("\n")
out$mata <- mata
out$n <- as.vector(out$n)
class(out) <- "nessie"
invisible(out)
}
rs.period <- function (formula = formula(data), data = parent.frame(), ratetable = relsurv::slopop,
na.action, fin.date, method = "pohar-perme", conf.type = "log",
conf.int = 0.95,type="kaplan-meier",winst,winfin,diag.date,rmap)
{
call <- match.call()
if (!missing(rmap)) {
rmap <- substitute(rmap)
}
rform <- rformulate(formula, data, ratetable, na.action,rmap)
data <- rform$data
type <- match.arg(type, c("kaplan-meier", "fleming-harrington"))
type <- match(type, c("kaplan-meier", "fleming-harrington"))
method <- match.arg(method,c("pohar-perme", "ederer2", "hakulinen","ederer1"))
method <- match(method,c("pohar-perme", "ederer2", "hakulinen","ederer1"))
conf.type <- match.arg(conf.type,c("plain","log","log-log"))
R <- rform$R
coll <- match("year", attributes(ratetable)$dimid)
year <- R[, coll]
ys <- as.numeric(winst - year)
yf <- as.numeric(winfin - year)
relv <- which(ys <= rform$Y & yf>0)
centhem <- which(yf < rform$Y)
rform$status[centhem] <- 0
rform$Y[centhem] <- yf[centhem]
rform$Y <- rform$Y[relv]
rform$X <- rform$X[relv,,drop=F]
rform$R <- rform$R[relv,,drop=F]
rform$status <- rform$status[relv]
data <- data[relv,,drop=F]
ys <- ys[relv]
yf <- yf[relv]
year <- year[relv]
if (method == 3) {
if (missing(fin.date))
fin.date <- max(rform$Y + year)
Y2 <- rform$Y
if (length(fin.date) == 1)
Y2[rform$status == 1] <- fin.date - year[rform$status == 1]
else if (length(fin.date[relv]) == nrow(rform$R)) {
fin.date <- fin.date[relv]
Y2[rform$status == 1] <- fin.date[rform$status ==
1] - year[rform$status == 1]
}
else stop("fin.date must be either one value of a vector of the same length as the data")
status2 <- rep(0, nrow(rform$X))
}
p <- rform$m
if (p > 0)
data$Xs <- strata(rform$X[, ,drop=FALSE ])
else data$Xs <- rep(1, nrow(data))
se.fac <- sqrt(qchisq(conf.int, 1))
out <- NULL
out$n <- table(data$Xs)
out$time <- out$n.risk <- out$n.event <- out$n.censor <- out$surv <- out$std.err <- out$strata <- NULL
for (kt in 1:length(out$n)) {
inx <- which(data$Xs == names(out$n)[kt])
tis <- sort(unique(rform$Y[inx]))
if(method==3)tis <- sort(unique(pmin(max(tis),c(tis,Y2[inx]))))
ys <- pmax(ys,0)
tis <- sort(unique(c(tis,ys[ys>0])))
tis <- sort(unique(c(tis,tis-1,tis+1)))
tis <- tis[-length(tis)]
temp <- exp.prep(rform$R[inx,,drop=FALSE],rform$Y[inx],rform$ratetable,rform$status[inx],times=tis,fast=(method<3),ys=ys)
out$time <- c(out$time, tis)
out$n.risk <- c(out$n.risk, temp$yi)
out$n.event <- c(out$n.event, temp$dni)
out$n.censor <- c(out$n.censor, c(-diff(temp$yi),temp$yi[length(temp$yi)]) - temp$dni)
if(method==1){
haz <- temp$dnisi/temp$yisi - temp$yidlisi/temp$yisi
out$std.err <- c(out$std.err, sqrt(cumsum(temp$dnisisq/(temp$yisi)^2)))
}
else if(method==2){
haz <- temp$dni/temp$yi - temp$yidli/temp$yi
out$std.err <- c(out$std.err, sqrt(cumsum(temp$dni/(temp$yi)^2)))
}
else if(method==3){
temp2 <- exp.prep(rform$R[inx,,drop=FALSE],Y2[inx],rform$ratetable,status2[inx],times=tis,ys=ys)
popsur <- exp(-cumsum(temp2$yisidli/temp2$yisis))
haz <- temp$dni/temp$yi
out$std.err <- c(out$std.err, sqrt(cumsum(temp$dni/(temp$yi)^2)))
}
else if(method==4){
popsur <- temp$sis/length(inx)
haz <- temp$dni/temp$yi
out$std.err <- c(out$std.err, sqrt(cumsum(temp$dni/(temp$yi)^2)))
}
if(type==2)survtemp <- exp(-cumsum(haz))
else survtemp <- cumprod(1-haz)
if(method>2){
survtemp <- survtemp/popsur
}
out$surv <- c(out$surv,survtemp)
out$strata <- c(out$strata, length(tis))
}
if (conf.type == "plain") {
out$lower <- as.vector(out$surv - out$std.err * se.fac *
out$surv)
out$upper <- as.vector(out$surv + out$std.err * se.fac *
out$surv)
}
else if (conf.type == "log") {
out$lower <- exp(as.vector(log(out$surv) - out$std.err *
se.fac))
out$upper <- exp(as.vector(log(out$surv) + out$std.err *
se.fac))
}
else if (conf.type == "log-log") {
out$lower <- exp(-exp(as.vector(log(-log(out$surv)) -
out$std.err * se.fac/log(out$surv))))
out$upper <- exp(-exp(as.vector(log(-log(out$surv)) +
out$std.err * se.fac/log(out$surv))))
}
names(out$strata) <- names(out$n)
if (p == 0) out$strata <- NULL
out$n <- as.vector(out$n)
out$conf.type <- conf.type
out$conf.int <- conf.int
out$method <- method
out$call <- call
out$type <- "right"
class(out) <- c("survfit", "rs.surv")
out
}
expprep2 <- function (x, y,ratetable,status,times,fast=FALSE,ys,prec,cmp=F,netweiDM=FALSE) {
x <- as.matrix(x)
if (ncol(x) != length(dim(ratetable)))
stop("x matrix does not match the rate table")
atts <- attributes(ratetable)
cuts <- atts$cutpoints
if (is.null(atts$type)) {
rfac <- atts$factor
us.special <- (rfac > 1)
}
else {
rfac <- 1 * (atts$type == 1)
us.special <- (atts$type == 4)
}
if (length(rfac) != ncol(x))
stop("Wrong length for rfac")
if (any(us.special)) {
if (sum(us.special) > 1)
stop("Two columns marked for special handling as a US rate table")
cols <- match(c("age", "year"), atts$dimid)
if (any(is.na(cols)))
stop("Ratetable does not have expected shape")
if (exists("as.Date")) {
bdate <- as.Date("1960/1/1") + (x[, cols[2]] - x[,
cols[1]])
byear <- format(bdate, "%Y")
offset <- as.numeric(bdate - as.Date(paste(byear,
"01/01", sep = "/")))
}
else if (exists("date.mdy")) {
bdate <- as.date(x[, cols[2]] - x[, cols[1]])
byear <- date.mdy(bdate)$year
offset <- bdate - mdy.date(1, 1, byear)
}
else stop("Can't find an appropriate date class\n")
x[, cols[2]] <- x[, cols[2]] - offset
if (any(rfac > 1)) {
temp <- which(us.special)
nyear <- length(cuts[[temp]])
nint <- rfac[temp]
cuts[[temp]] <- round(approx(nint * (1:nyear), cuts[[temp]],
nint:(nint * nyear))$y - 1e-04)
}
}
if(!missing(status)){
if(length(status)!=nrow(x)) stop("Wrong length for status")
if(missing(times)) times <- sort(unique(y))
if (any(times < 0))
stop("Negative time point requested")
ntime <- length(times)
if(missing(ys)) ys <- rep(0,length(y))
if(cmp) temp <- .Call("cmpfast", as.integer(rfac),
as.integer(atts$dim), as.double(unlist(cuts)), ratetable,
x, y, ys,as.integer(status), times,PACKAGE="relsurv")
else if(fast&!missing(prec)) temp <- .Call("netfastpinter2", as.integer(rfac),
as.integer(atts$dim), as.double(unlist(cuts)), ratetable,
x, y, ys,as.integer(status), times,prec,PACKAGE="relsurv")
else if(fast&missing(prec)) temp <- .Call("netfastpinter", as.integer(rfac),
as.integer(atts$dim), as.double(unlist(cuts)), ratetable,
x, y, ys,as.integer(status), times,PACKAGE="relsurv")
else if(netweiDM==TRUE) temp <- .Call("netweiDM", as.integer(rfac),
as.integer(atts$dim), as.double(unlist(cuts)), ratetable,
x, y, ys,as.integer(status), times,PACKAGE="relsurv")
else temp <- .Call("netwei", as.integer(rfac),
as.integer(atts$dim), as.double(unlist(cuts)), ratetable,
x, y, as.integer(status), times,PACKAGE="relsurv")
}
else{
if(length(y)==1)y <- rep(y,nrow(x))
if(length(y)!=nrow(x)) stop("Wrong length for status")
temp <- .Call("expc", as.integer(rfac),
as.integer(atts$dim), as.double(unlist(cuts)), ratetable,
x, y,PACKAGE="relsurv")
temp <- temp$surv
}
temp
}
|
build.log.output <- function(log.results,
include.full.call.stack = getOption("tryCatchLog.include.full.call.stack", TRUE),
include.compact.call.stack = getOption("tryCatchLog.include.compact.call.stack", TRUE),
include.severity = TRUE,
include.timestamp = FALSE,
use.platform.newline = FALSE) {
stopifnot("data.frame" %in% class(log.results))
res <- ""
i <- 1
while (i <= NROW(log.results)) {
res <- paste0(res,
if (include.timestamp)
format(log.results$timestamp[i], "%Y-%m-%d %H:%M:%S "),
if (include.severity)
paste0("[", log.results$severity[i], "] "),
log.results$msg.text[i],
if (!is.na(log.results$execution.context.msg[i]) && !log.results$execution.context.msg[i] == "")
paste0(" {execution.context.msg: ", log.results$execution.context.msg[i], "}"),
"\n\n",
if (nchar(log.results$dump.file.name[i]) > 0)
paste0("Created dump file: ", log.results$dump.file.name[i], "\n\n"),
if (include.compact.call.stack) {
paste0("Compact call stack:",
"\n",
log.results$compact.stack.trace[i],
"\n\n")
},
if (include.full.call.stack) {
paste0("Full call stack:",
"\n",
log.results$full.stack.trace[i],
"\n\n")
}
)
i <- i + 1
}
if (use.platform.newline)
res <- gsub("\n", platform.NewLine(), res, fixed = TRUE)
return(res)
}
|
summary.ma.allunid <- function(object, ...){
cat("\nUnidentified Model Summaries")
cat("\n~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n")
species.name <- names(object)
for(sp in seq(along = species.name)){
summary(object[[sp]], species = species.name[sp])
}
invisible(object)
}
|
oneDay_migrationIn_Patch <- function(maleIn, femaleIn){
private$popMale[] = maleIn
private$popFemale[] = femaleIn
}
|
testthat::context("FindHashtagPipe")
testthat::setup({
bdpar.Options$reset()
bdpar.Options$configureLog()
})
testthat::test_that("initialize",{
testthat::skip_if_not_installed("rex")
testthat::skip_if_not_installed("stringr")
propertyName <- "hashtag"
alwaysBeforeDeps <- list()
notAfterDeps <- list()
removeHashtags <- TRUE
testthat::expect_silent(FindHashtagPipe$new(propertyName,
alwaysBeforeDeps,
notAfterDeps,
removeHashtags))
})
testthat::teardown({
bdpar.Options$reset()
bdpar.Options$configureLog()
})
testthat::setup({
bdpar.Options$reset()
bdpar.Options$configureLog()
})
testthat::test_that("initialize propertyName type error",{
testthat::skip_if_not_installed("rex")
testthat::skip_if_not_installed("stringr")
propertyName <- NULL
alwaysBeforeDeps <- list()
notAfterDeps <- list()
removeHashtags <- TRUE
testthat::expect_error(FindHashtagPipe$new(propertyName,
alwaysBeforeDeps,
notAfterDeps,
removeHashtags),
"[FindHashtagPipe][initialize][FATAL] Checking the type of the 'propertyName' variable: NULL",
fixed = TRUE)
})
testthat::teardown({
bdpar.Options$reset()
bdpar.Options$configureLog()
})
testthat::setup({
bdpar.Options$reset()
bdpar.Options$configureLog()
})
testthat::test_that("initialize alwaysBeforeDeps type error",{
testthat::skip_if_not_installed("rex")
testthat::skip_if_not_installed("stringr")
propertyName <- "hashtag"
alwaysBeforeDeps <- NULL
notAfterDeps <- list()
removeHashtags <- TRUE
testthat::expect_error(FindHashtagPipe$new(propertyName,
alwaysBeforeDeps,
notAfterDeps,
removeHashtags),
"[FindHashtagPipe][initialize][FATAL] Checking the type of the 'alwaysBeforeDeps' variable: NULL",
fixed = TRUE)
})
testthat::teardown({
bdpar.Options$reset()
bdpar.Options$configureLog()
})
testthat::setup({
bdpar.Options$reset()
bdpar.Options$configureLog()
})
testthat::test_that("initialize notAfterDeps type error",{
testthat::skip_if_not_installed("rex")
testthat::skip_if_not_installed("stringr")
propertyName <- "hashtag"
alwaysBeforeDeps <- list()
notAfterDeps <- NULL
removeHashtags <- TRUE
testthat::expect_error(FindHashtagPipe$new(propertyName,
alwaysBeforeDeps,
notAfterDeps,
removeHashtags),
"[FindHashtagPipe][initialize][FATAL] Checking the type of the 'notAfterDeps' variable: NULL",
fixed = TRUE)
})
testthat::teardown({
bdpar.Options$reset()
bdpar.Options$configureLog()
})
testthat::setup({
bdpar.Options$reset()
bdpar.Options$configureLog()
})
testthat::test_that("initialize removeHashtags type error",{
testthat::skip_if_not_installed("rex")
testthat::skip_if_not_installed("stringr")
propertyName <- "hashtag"
alwaysBeforeDeps <- list()
notAfterDeps <- list()
removeHashtags <- NULL
testthat::expect_error(FindHashtagPipe$new(propertyName,
alwaysBeforeDeps,
notAfterDeps,
removeHashtags),
"[FindHashtagPipe][initialize][FATAL] Checking the type of the 'removeHashtags' variable: NULL",
fixed = TRUE)
})
testthat::teardown({
bdpar.Options$reset()
bdpar.Options$configureLog()
})
testthat::setup({
bdpar.Options$reset()
bdpar.Options$configureLog()
})
testthat::test_that("pipe removeHashtags <- TRUE",{
testthat::skip_if_not_installed("rex")
testthat::skip_if_not_installed("stringr")
propertyName <- "hashtag"
alwaysBeforeDeps <- list()
notAfterDeps <- list()
removeHashtags <- TRUE
pipe <- FindHashtagPipe$new(propertyName,
alwaysBeforeDeps,
notAfterDeps,
removeHashtags)
path <- file.path("testFiles",
"testFindHashtagPipe",
"testFile.tsms")
instance <- ExtractorSms$new(path)
instance$setData("Hey I am
instance <- pipe$pipe(instance)
testthat::expect_equal(instance$getSpecificProperty("hashtag"),
"
testthat::expect_equal(instance$getData(),
"Hey I am")
})
testthat::teardown({
bdpar.Options$reset()
bdpar.Options$configureLog()
})
testthat::setup({
bdpar.Options$reset()
bdpar.Options$configureLog()
})
testthat::test_that("pipe removeHashtags <- FALSE",{
testthat::skip_if_not_installed("rex")
testthat::skip_if_not_installed("stringr")
propertyName <- "hashtag"
alwaysBeforeDeps <- list()
notAfterDeps <- list()
removeHashtags <- FALSE
pipe <- FindHashtagPipe$new(propertyName,
alwaysBeforeDeps,
notAfterDeps,
removeHashtags)
path <- file.path("testFiles",
"testFindHashtagPipe",
"testFile.tsms")
instance <- ExtractorSms$new(path)
instance$setData("Hey I am
instance <- pipe$pipe(instance)
testthat::expect_equal(instance$getSpecificProperty("hashtag"),
"
testthat::expect_equal(instance$getData(),
"Hey I am
})
testthat::teardown({
bdpar.Options$reset()
bdpar.Options$configureLog()
})
testthat::setup({
bdpar.Options$reset()
bdpar.Options$configureLog()
})
testthat::test_that("pipe instance type error",{
testthat::skip_if_not_installed("rex")
testthat::skip_if_not_installed("stringr")
propertyName <- "hashtag"
alwaysBeforeDeps <- list()
notAfterDeps <- list()
removeHashtags <- TRUE
pipe <- FindHashtagPipe$new(propertyName,
alwaysBeforeDeps,
notAfterDeps,
removeHashtags)
instance <- NULL
testthat::expect_error(pipe$pipe(instance),
"[FindHashtagPipe][pipe][FATAL] Checking the type of the 'instance' variable: NULL",
fixed = TRUE)
})
testthat::teardown({
bdpar.Options$reset()
bdpar.Options$configureLog()
})
testthat::setup({
bdpar.Options$reset()
bdpar.Options$configureLog()
})
testthat::test_that("pipe empty data",{
testthat::skip_if_not_installed("rex")
testthat::skip_if_not_installed("stringr")
propertyName <- "hashtag"
alwaysBeforeDeps <- list()
notAfterDeps <- list()
removeHashtags <- TRUE
pipe <- FindHashtagPipe$new(propertyName,
alwaysBeforeDeps,
notAfterDeps,
removeHashtags)
path <- file.path("testFiles",
"testFindHashtagPipe",
"testFile.tsms")
instance <- ExtractorSms$new(path)
instance$setData("
expect_warning(pipe$pipe(instance),
"\\[FindHashtagPipe\\]\\[pipe\\]\\[WARN\\] The file: [\\\\\\:[:alnum:]\\/_.-]*testFiles\\/testFindHashtagPipe\\/testFile\\.tsms has data empty on pipe Hashtag")
testthat::expect_equal(instance$getSpecificProperty("hashtag"),
"
testthat::expect_equal(instance$getData(),
"")
})
testthat::teardown({
bdpar.Options$reset()
bdpar.Options$configureLog()
})
testthat::setup({
bdpar.Options$reset()
bdpar.Options$configureLog()
})
testthat::test_that("findUserName",{
testthat::skip_if_not_installed("rex")
testthat::skip_if_not_installed("stringr")
propertyName <- "hashtag"
alwaysBeforeDeps <- list()
notAfterDeps <- list()
removeHashtags <- TRUE
pipe <- FindHashtagPipe$new(propertyName,
alwaysBeforeDeps,
notAfterDeps,
removeHashtags)
data <- "
testthat::expect_equal(pipe$findHashtag(data),
"
})
testthat::teardown({
bdpar.Options$reset()
bdpar.Options$configureLog()
})
testthat::setup({
bdpar.Options$reset()
bdpar.Options$configureLog()
})
testthat::test_that("findHashtag data type error",{
testthat::skip_if_not_installed("rex")
testthat::skip_if_not_installed("stringr")
propertyName <- "hashtag"
alwaysBeforeDeps <- list()
notAfterDeps <- list()
removeHashtags <- TRUE
pipe <- FindHashtagPipe$new(propertyName,
alwaysBeforeDeps,
notAfterDeps,
removeHashtags)
data <- NULL
testthat::expect_error(pipe$findHashtag(data),
"[FindHashtagPipe][findHashtag][FATAL] Checking the type of the 'data' variable: NULL",
fixed = TRUE)
})
testthat::teardown({
bdpar.Options$reset()
bdpar.Options$configureLog()
})
testthat::setup({
bdpar.Options$reset()
bdpar.Options$configureLog()
})
testthat::test_that("removeHashtag",{
testthat::skip_if_not_installed("rex")
testthat::skip_if_not_installed("stringr")
propertyName <- "hashtag"
alwaysBeforeDeps <- list()
notAfterDeps <- list()
removeHashtags <- TRUE
pipe <- FindHashtagPipe$new(propertyName,
alwaysBeforeDeps,
notAfterDeps,
removeHashtags)
data <- "
testthat::expect_equal(pipe$removeHashtag(data),
" ")
})
testthat::teardown({
bdpar.Options$reset()
bdpar.Options$configureLog()
})
testthat::setup({
bdpar.Options$reset()
bdpar.Options$configureLog()
})
testthat::test_that("removeHashtag data type error",{
testthat::skip_if_not_installed("rex")
testthat::skip_if_not_installed("stringr")
propertyName <- "hashtag"
alwaysBeforeDeps <- list()
notAfterDeps <- list()
removeHashtags <- TRUE
pipe <- FindHashtagPipe$new(propertyName,
alwaysBeforeDeps,
notAfterDeps,
removeHashtags)
data <- NULL
testthat::expect_error(pipe$removeHashtag(data),
"[FindHashtagPipe][removeHashtag][FATAL] Checking the type of the 'data' variable: NULL",
fixed = TRUE)
})
testthat::teardown({
bdpar.Options$reset()
bdpar.Options$configureLog()
})
|
require(ggplot2)
require(nlraa)
require(nlme)
require(mgcv)
if(Sys.info()[["user"]] == "fernandomiguez"){
y <- c(12, 14, 33, 50, 67, 74, 123, 141, 165, 204, 253, 246, 240)
t <- 1:13
dat <- data.frame(y = y, t = t)
ggplot(data = dat, aes(x = t, y = y)) + geom_point()
m1 <- gam(y ~ t + I(t^2), data = dat, family = poisson)
ggplot(data = dat, aes(x = t, y = y)) +
geom_point() +
geom_line(aes(y = fitted(m1)))
m1.sim <- simulate_gam(m1, nsim = 1e3)
m1.sims <- summary_simulate(m1.sim)
datA <- cbind(dat, m1.sims)
ggplot(data = datA, aes(x = t, y = y)) +
geom_point() +
geom_line(aes(y = Estimate)) +
geom_ribbon(aes(ymin = Q2.5, ymax = Q97.5), fill = "purple", alpha = 0.3) +
ggtitle("Simulate + summary_simulate method")
m1.prd <- predict(m1, se.fit = TRUE, type = "response")
m1.prdd <- data.frame(dat, prd = m1.prd$fit,
lwr = m1.prd$fit - 1.96 * m1.prd$se.fit,
upr = m1.prd$fit + 1.96 * m1.prd$se.fit)
ggplot(data = m1.prdd, aes(x = t, y = y)) +
geom_point() +
geom_line(aes(y = prd)) +
geom_ribbon(aes(ymin = lwr, ymax = upr), fill = "purple", alpha = 0.3) +
ggtitle("Built-in predict.gam method")
m1.simP <- simulate_gam(m1, nsim = 1e3, psim = 2)
m1.simPs <- summary_simulate(m1.simP)
datAP <- cbind(dat, m1.simPs)
ggplot(data = datAP, aes(x = t, y = y)) +
geom_point() +
geom_line(aes(y = Estimate)) +
geom_ribbon(aes(ymin = Q2.5, ymax = Q97.5), fill = "purple", alpha = 0.3)
m1.simP2 <- simulate_gam(m1, nsim = 1e3, psim = 2, resid.type = "resample")
m1.simP2s <- summary_simulate(m1.simP2)
datAP2 <- cbind(dat, m1.simP2s)
ggplot(data = datAP2, aes(x = t, y = y)) +
geom_point() +
geom_line(aes(y = Estimate)) +
geom_ribbon(aes(ymin = Q2.5, ymax = Q97.5), fill = "purple", alpha = 0.3)
m1.simPW <- simulate_gam(m1, nsim = 1e3, psim = 2, resid.type = "wild")
m1.simPWs <- summary_simulate(m1.simPW)
datAPW <- cbind(dat, m1.simPWs)
ggplot(data = datAPW, aes(x = t, y = y)) +
geom_point() +
geom_line(aes(y = Estimate)) +
geom_ribbon(aes(ymin = Q2.5, ymax = Q97.5), fill = "purple", alpha = 0.3)
data(Soybean)
fms.G <- gam(weight ~ Time + s(Time), data = Soybean)
fms.C <- lm(weight ~ Time + I(Time^2) + I(Time^3), data = Soybean)
fms.B <- nls(weight ~ SSbgrp(Time, w.max, lt.e, ldt), data = Soybean)
IC_tab(fms.G, fms.C, fms.B, criteria = "AIC")
IC_tab(fms.G, fms.C, fms.B, criteria = "BIC")
ggplot(data = Soybean, aes(x = Time, y = weight)) +
geom_point() +
geom_line(aes(y = fitted(fms.C), color = "Cubic")) +
geom_line(aes(y = fitted(fms.G), color = "GAM")) +
geom_line(aes(y = fitted(fms.B), color = "Beta"))
prd <- predict_gam(fms.G, interval = "confidence")
SoybeanAG <- cbind(Soybean, prd)
ggplot(data = SoybeanAG, aes(x = Time, y = weight)) +
geom_point() +
geom_line(aes(y = fitted(fms.G), color = "GAM")) +
geom_ribbon(aes(ymin = Q2.5, ymax = Q97.5), fill = "purple", alpha = 0.3)
prdc <- predict_nls(fms.B, interval = "confidence")
prdp <- predict_nls(fms.B, interval = "prediction")
colnames(prdp) <- paste0("p", c("Estimate", "Est.Error", "Q2.5", "Q97.5"))
SoybeanAB <- cbind(Soybean, prdc, prdp)
ggplot(data = SoybeanAB, aes(x = Time, y = weight)) +
geom_point() +
geom_line(aes(y = fitted(fms.B), color = "beta")) +
geom_ribbon(aes(ymin = Q2.5, ymax = Q97.5), fill = "purple", alpha = 0.3) +
geom_ribbon(aes(ymin = pQ2.5, ymax = pQ97.5), fill = "purple", alpha = 0.2)
fms.Bg <- gnls(weight ~ SSbgrp(Time, w.max, lt.e, ldt), data = Soybean,
weights = varPower())
IC_tab(fms.G, fms.C, fms.B, fms.Bg, criteria = "AIC")
prdc.bg <- predict_nlme(fms.Bg, interval = "confidence")
prdp.bg <- predict_nlme(fms.Bg, interval = "prediction")
colnames(prdp.bg) <- paste0("p", c("Estimate", "Est.Error", "Q2.5", "Q97.5"))
SoybeanBG <- cbind(Soybean, prdc.bg, prdp.bg)
ggplot(data = SoybeanBG, aes(x = Time, y = weight)) +
geom_point() +
geom_line(aes(y = fitted(fms.Bg), color = "mean")) +
geom_ribbon(aes(ymin = Q2.5, ymax = Q97.5, color = "confidence"), fill = "purple", alpha = 0.3) +
geom_ribbon(aes(ymin = pQ2.5, ymax = pQ97.5, color = "prediction"), fill = "purple", alpha = 0.2) +
ggtitle("Beta growth with increasing variance is the best model")
data(barley)
fgb0 <- gam(yield ~ s(NF, k = 5), data = barley)
fgb0p <- predict_gam(fgb0, interval = "conf")
barleyA <- cbind(barley, fgb0p)
prd <- predict(fgb0, se = TRUE)
barleyG <- barley
barleyG$fit <- prd$fit
barleyG$lwr <- prd$fit - 1.96 * prd$se.fit
barleyG$upr <- prd$fit + 1.96 * prd$se.fit
barleyAG <- merge(barleyA, barleyG)
ggplot(data = barleyAG, aes(x = NF, y = yield)) +
geom_point() +
geom_line(aes(y = fit)) +
geom_line(aes(y = Estimate), linetype = 2) +
geom_ribbon(aes(ymin = Q2.5, ymax = Q97.5), fill = "purple", alpha = 0.2) +
geom_ribbon(aes(ymin = lwr, ymax = upr), fill = "yellow", alpha = 0.1)
barley$year.f <- as.factor(barley$year)
fgb1 <- gam(yield ~ s(NF, k = 5) + s(year.f, bs = "re"), data = barley)
fgb1p <- predict_gam(fgb1, interval = "conf")
barleyA <- cbind(barley, fgb1p)
prd <- predict(fgb1, se = TRUE)
barleyG <- barley
barleyG$fit <- prd$fit
barleyG$lwr <- prd$fit - 1.96 * prd$se.fit
barleyG$upr <- prd$fit + 1.96 * prd$se.fit
barleyAG <- merge(barleyA, barleyG)
ggplot(data = barleyAG, aes(x = NF, y = yield)) +
facet_wrap(~year.f) +
geom_point() +
geom_line(aes(y = fit)) +
geom_line(aes(y = Estimate), linetype = 2) +
geom_ribbon(aes(ymin = Q2.5, ymax = Q97.5), fill = "blue", alpha = 0.3) +
geom_ribbon(aes(ymin = lwr, ymax = upr), fill = "white", alpha = 0.3)
f1 <- function(){
dat <- data.frame(x = rnorm(10), y = rnorm(10))
fm00 <- mgcv::gam(y ~ x, data = dat)
ans <- simulate_gam(fm00)
ans
}
res1 <- f1()
}
|
test_convert <- function(con, type, val) {
val_comp <- val
if (is.factor(val)) {
val_comp <- as.character(val)
}
q <- dbSendQuery(con, sprintf("SELECT CAST(? AS %s) a", type))
dbBind(q, list(val))
res1 <- dbFetch(q)
dbBind(q, list(NA))
res2 <- dbFetch(q)
dbClearResult(q)
expect_equal(res1[[1]][1], val_comp)
expect_true(is.na(res2[[1]][1]))
dbExecute(con, "DROP TABLE IF EXISTS bind_test")
dbExecute(con, sprintf("CREATE TEMPORARY TABLE bind_test(i INTEGER, a %s)", type))
q <- dbSendStatement(con, "INSERT INTO bind_test VALUES ($1, $2)")
dbBind(q, list(1, val))
dbBind(q, list(2, NA))
dbClearResult(q)
res3 <- dbGetQuery(con, "SELECT a FROM bind_test ORDER BY i")
dbExecute(con, "DROP TABLE bind_test")
expect_equal(res3[[1]][1], val_comp)
expect_true(is.na(res3[[1]][2]))
}
test_that("dbBind() works as expected for all types", {
con <- dbConnect(duckdb::duckdb())
on.exit(dbDisconnect(con, shutdown = TRUE))
test_convert(con, "BOOLEAN", TRUE)
test_convert(con, "BOOLEAN", FALSE)
test_convert(con, "INTEGER", 42L)
test_convert(con, "INTEGER", 42)
test_convert(con, "HUGEINT", 39218390)
test_convert(con, "DOUBLE", 42L)
test_convert(con, "DOUBLE", 42.2)
test_convert(con, "STRING", "Hello, World")
test_convert(con, "DATE", as.Date("2019-11-26"))
test_convert(con, "TIMESTAMP", as.POSIXct("2019-11-26 21:11Z", "UTC"))
})
test_that("dbBind() is called from dbGetQuery and dbExecute", {
con <- dbConnect(duckdb::duckdb())
on.exit(dbDisconnect(con, shutdown = TRUE))
res <- dbGetQuery(con, "SELECT CAST (? AS INTEGER), CAST(? AS STRING)", params = list(42, "Hello"))
expect_equal(res[[1]][1], 42L)
expect_equal(res[[2]][1], "Hello")
res <- dbGetQuery(con, "SELECT CAST (? AS INTEGER), CAST(? AS STRING)", params = list(42, "Hello"))
expect_equal(res[[1]][1], 42L)
expect_equal(res[[2]][1], "Hello")
q <- dbSendQuery(con, "SELECT CAST (? AS INTEGER), CAST(? AS STRING)", params = list(42, "Hello"))
res <- dbFetch(q)
expect_equal(res[[1]][1], 42L)
expect_equal(res[[2]][1], "Hello")
dbBind(q, list(43, "Holla"))
res <- dbFetch(q)
expect_equal(res[[1]][1], 43L)
expect_equal(res[[2]][1], "Holla")
dbClearResult(q)
})
test_that("test blobs", {
con <- dbConnect(duckdb::duckdb())
on.exit(dbDisconnect(con, shutdown = TRUE))
res <- dbGetQuery(con, "SELECT BLOB 'hello'")
expect_equal(res[[1]][[1]], charToRaw("hello"))
})
test_that("various error cases for dbBind()", {
con <- dbConnect(duckdb::duckdb())
on.exit(dbDisconnect(con, shutdown = TRUE))
q <- dbSendQuery(con, "SELECT CAST (? AS INTEGER)")
expect_error(dbFetch(q))
expect_error(dbBind(q, list()))
expect_error(dbBind(q, list(1, 2)))
expect_error(dbBind(q, list("asdf")))
expect_error(dbBind(q, list("asdf", "asdf")))
expect_error(dbBind(q))
expect_error(dbBind(q, "asdf"))
dbClearResult(q)
expect_error(dbGetQuery(con, "SELECT CAST (? AS INTEGER)", "asdf"))
expect_error(dbGetQuery(con, "SELECT CAST (? AS INTEGER)", "asdf", "asdf"))
expect_error(dbGetQuery(con, "SELECT CAST (? AS INTEGER)"))
expect_error(dbGetQuery(con, "SELECT CAST (? AS INTEGER)", list()))
expect_error(dbGetQuery(con, "SELECT CAST (? AS INTEGER)", list(1, 2)))
expect_error(dbGetQuery(con, "SELECT CAST (? AS INTEGER)", list("asdf")))
expect_error(dbGetQuery(con, "SELECT CAST (? AS INTEGER)", list("asdf", "asdf")))
q <- dbSendQuery(con, "SELECT CAST (42 AS INTEGER)")
res <- dbFetch(q)
expect_equal(res[[1]][1], 42L)
expect_error(dbBind(q, list()))
expect_error(dbBind(q, list(1)))
expect_error(dbBind(q, list("asdf")))
expect_error(dbBind(q))
expect_error(dbBind(q, 1))
expect_error(dbBind(q, "asdf"))
dbClearResult(q)
expect_error(dbGetQuery(con, "SELECT CAST (42 AS INTEGER)", 1))
expect_error(dbGetQuery(con, "SELECT CAST (42 AS INTEGER)", 1, 2))
expect_error(dbGetQuery(con, "SELECT CAST (42 AS INTEGER)", "asdf"))
expect_error(dbGetQuery(con, "SELECT CAST (42 AS INTEGER)", "asdf", "asdf"))
expect_error(dbGetQuery(con, "SELECT CAST (42 AS INTEGER)", list(1)))
expect_error(dbGetQuery(con, "SELECT CAST (42 AS INTEGER)", list(1, 2)))
expect_error(dbGetQuery(con, "SELECT CAST (42 AS INTEGER)", list("asdf")))
expect_error(dbGetQuery(con, "SELECT CAST (42 AS INTEGER)", list("asdf", "asdf")))
})
|
fit.simulation<-function(model, PEmethod="ML", dataList="Data_List.dat", f.loc){
data.names<-read.table(paste(f.loc, "/", dataList,sep=""), header = FALSE)
fit.names<-c("rep
"baseline.chisq", "baseline.df", "baseline.pvalue", "cfi","tli","srmr", "rmsea", "rmsea.ci.lower",
"rmsea.ci.upper", "rmsea.pvalue", "logl","aic", "bic")
par.names1<-c("est","se","pvalue")
par.names2<-c("std.est","std.se","pvalue")
veri<-read.table(paste(f.loc,"/", data.names[1,],sep=""))
colnames(veri)<-c("ID", paste("x",seq(1:(dim(veri)[2]-1)),sep=""))
veri<-veri[,-1]
sonuc<-cfa(model,veri, estimator= PEmethod )
tum.sonuc<-matrix(NA,dim(data.names)[1],(length(fit.names)+(dim(parameterEstimates(sonuc))[1])*6))
for (i in 1:dim(data.names)[1]){
P.Est<-parameterEstimates(sonuc)
Sp.Est<-standardizedSolution(sonuc)
veri<-read.table(paste(f.loc,"/",data.names[i,], sep = ""))
colnames(veri)<-c("ID", paste("x",seq(1:(dim(veri)[2]-1)),sep=""))
veri<-veri[,-1]
sonuc<-cfa(model,veri, estimator =PEmethod)
tum.sonuc[i,1]<-i
if(lavTech(sonuc, "converged")==TRUE){
tum.sonuc[i,2:length(fit.names)]<-round(fitmeasures(sonuc)[fit.names[-1]],3)
for(k in 1:(dim(P.Est)[1])){
tum.sonuc[i,length(fit.names)+6*k-5]<-round(P.Est[k,"est"],3)
tum.sonuc[i,length(fit.names)+6*k-4]<-round(P.Est[k,"se"],3)
tum.sonuc[i,length(fit.names)+6*k-3]<-round(P.Est[k,"pvalue"],3)
tum.sonuc[i,length(fit.names)+6*k-2]<-round(Sp.Est[k,"est.std"],3)
tum.sonuc[i,length(fit.names)+6*k-1]<-round(Sp.Est[k,"se"],3)
tum.sonuc[i,length(fit.names)+6*k]<-round(Sp.Est[k,"pvalue"],3)
}
print(paste(round(100*i/dim(data.names)[1],2),"% has completed...", sep=""))
if(lavInspect(sonuc, "post.check")==TRUE){tum.sonuc[i,2]<-c("CONVERGED")}
if(lavInspect(sonuc, "post.check")==FALSE){tum.sonuc[i,2]<-c("WARNING")}
}
if(lavTech(sonuc, "converged")==FALSE){
for(k in 1:(dim(P.Est)[1])){
tum.sonuc[i,]<-NA
tum.sonuc[i,1]<-i
}
print(paste(round(100*i/dim(data.names)[1],2),"% has completed...", sep=""))
tum.sonuc[i,2]<-c("NOT_CONVERGED")
}
}
print("All Done !!!")
colnames(tum.sonuc)<-c(paste("x",seq(1:(length(fit.names)+(dim(P.Est)[1])*6)),sep = ""))
colnames(tum.sonuc)[1:length(fit.names)]<-c(fit.names)
for(k in 1:(dim(P.Est)[1])){
eft<-paste(P.Est[k,c("lhs","op","rhs")],sep = "", collapse="")
colnames(tum.sonuc)[length(fit.names)+6*k-5]<-eft
colnames(tum.sonuc)[length(fit.names)+6*k-4]<-par.names1[2]
colnames(tum.sonuc)[length(fit.names)+6*k-3]<-par.names1[3]
colnames(tum.sonuc)[length(fit.names)+6*k-2]<-par.names2[1]
colnames(tum.sonuc)[length(fit.names)+6*k-1]<-par.names2[2]
colnames(tum.sonuc)[length(fit.names)+6*k]<-par.names2[3]
}
write.csv(tum.sonuc, file= paste(f.loc,"/All_Results.csv", sep = ""), row.names = FALSE)
}
|
cog_desc <- function(x, desc = NULL) {
assert_scalar(x, na.ok = TRUE)
assert_character(desc, len = 1, any.missing = FALSE)
attr(x, "desc") <- desc
x
}
|
predict.GauPro <- function(object, XX, se.fit=F, covmat=F, split_speed=T, ...) {
object$predict(XX=XX, se.fit=se.fit, covmat=covmat, split_speed=split_speed)
}
plot.GauPro <- function(x, ...) {
if (x$D == 1) {
x$cool1Dplot(...)
} else if (x$D == 2) {
x$plot2D(...)
} else {
stop("No plot method for higher than 2 dimension")
}
}
'+.GauPro_kernel' <- function(k1, k2) {
kernel_sum$new(k1=k1, k2=k2)
}
'*.GauPro_kernel' <- function(k1, k2) {
kernel_product$new(k1=k1, k2=k2)
}
|
counthaplotype <- function(seq){
n <- dim(seq)[1]
m <- dim(seq)[2]
if(n==1){
numHap<-1
sizHap<-1
seqHap <-seq
return(list(numHap=numHap,sizHap=sizHap,seqHap=seqHap))
}
seq <- sortmatrix(seq)
seq <- as.matrix(seq)
seqHap<-seq[1, ]
curseq<-seq[1, ]
sizHap<-matrix(0,1,n)
numHap <-1
for(i in 1:n){
if(sum(seq[i,]==curseq)!=m){
seqHap <-rbind(seqHap,seq[i,])
numHap <- numHap+1
curseq <- seq[i,]
}
sizHap[numHap]<-sizHap[numHap]+1
}
sizHap<-sizHap[1:numHap]
v <- sort(-sizHap,index.return=TRUE)
sizHap <- v$x
sizHap <- -sizHap
idx <- v$ix
seqHap <- as.matrix(seqHap)
seqHap <- seqHap[idx, ]
seqHap <- as.matrix(seqHap)
return(list(numHap=numHap,sizHap=sizHap,seqHap=seqHap))
}
|
NULL
run_imagefluency <- function() {
appDir <- system.file("imagefluencyApp", package = "imagefluency")
if (appDir == "") {
stop("Could not find shiny app directory. Try re-installing `imagefluency`.", call. = FALSE)
}
if (requireNamespace("shiny", quietly = TRUE)) {
shiny::runApp(appDir, display.mode = "normal")
} else {
stop("Package 'shiny' is required but not installed on your system.", call. = FALSE)
}
}
|
mm <- function(ob, min.pr, max.pr){
res <- ifelse(ob > max.pr, max.pr, ob)
res <- ifelse(res < min.pr, min.pr, res)
res
}
|
interval_score <- function(true_values,
lower,
upper,
interval_range,
weigh = TRUE,
separate_results = FALSE) {
present <- c(methods::hasArg("true_values"), methods::hasArg("lower"),
methods::hasArg("upper"), methods::hasArg("interval_range"))
if (!all(present)) {
stop("need all arguments 'true_values', 'lower', 'upper' and 'interval_range' in function 'interval_score()'")
}
check_not_null(true_values = true_values, lower = lower, upper = upper,
interval_range = interval_range)
check_equal_length(true_values, lower, interval_range, upper)
alpha <- (100 - interval_range) / 100
sharpness <- (upper - lower)
overprediction <- 2/alpha * (lower - true_values) * (true_values < lower)
underprediction <- 2/alpha * (true_values - upper) * (true_values > upper)
if (weigh) {
sharpness <- sharpness * alpha / 2
underprediction <- underprediction * alpha / 2
overprediction <- overprediction * alpha / 2
}
score <- sharpness + underprediction + overprediction
if (separate_results) {
return(list(interval_score = score,
sharpness = sharpness,
underprediction = underprediction,
overprediction = overprediction))
} else {
return(score)
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.