code
stringlengths 1
13.8M
|
---|
SQLResourceResolver <- R6::R6Class(
"SQLResourceResolver",
inherit = ResourceResolver,
public = list(
isFor = function(x) {
if (super$isFor(x)) {
!is.null(findDBIResourceConnector(x)) && is.null(x$format)
} else {
FALSE
}
},
newClient = function(x) {
if (self$isFor(x)) {
SQLResourceClient$new(x)
} else {
NULL
}
}
)
) |
nhl_url_standings <- function(
seasons = NULL,
standingsTypes = NULL,
expand = NULL
) {
params <- list(
season = seasons,
standingsType = standingsTypes,
expand = expand
)
nhl_url(endPoint = "standings", params = params)
}
nhl_standings <- function(
seasons = NULL,
standingsTypes = NULL,
expand = NULL
) {
x <- nhl_url_standings(
seasons = seasons,
standingsTypes = standingsTypes,
expand = expand
)
x <- nhl_get_data(x)
x <- util_remove_get_data_errors(x)
x <- nhl_process_results(x, elName = "records")
x
} |
NNS.TSD <- function(x, y, plot = TRUE){
if(any(class(x)=="tbl")) x <- as.vector(unlist(x))
if(any(class(y)=="tbl")) y <- as.vector(unlist(y))
Combined_sort <- sort(c(x, y), decreasing = FALSE)
LPM_x_sort <- LPM(1, Combined_sort,x)
LPM_y_sort <- LPM(1, Combined_sort,y)
x.tsd.y <- any(LPM_x_sort > LPM_y_sort)
y.tsd.x <- any(LPM_y_sort > LPM_x_sort)
plot(LPM_x_sort, type = "l", lwd = 3, col = "red", main = "TSD", ylab = "Area of Cumulative Distribution",
ylim = c(min(c(LPM_y_sort, LPM_x_sort)), max(c(LPM_y_sort, LPM_x_sort))))
lines(LPM_y_sort, type = "l", lwd =3,col = "blue")
legend("topleft", c("X","Y"), lwd = 10, col=c("red","blue"))
ifelse (!x.tsd.y && min(x) >= min(y) && mean(x) >= mean(y) && !identical(LPM_x_sort, LPM_y_sort),
"X TSD Y",
ifelse (!y.tsd.x && min(y) >= min(x) && mean(y) >= mean(x) && !identical(LPM_x_sort, LPM_y_sort),
"Y TSD X",
"NO TSD EXISTS"))
} |
LASheader <- function(data = list()) {return(new("LASheader", data))}
as.list.LASheader <- function(x, ...)
{
PHB <- x@PHB
VLR <- list(`Variable Length Records` = x@VLR)
EVLR <- list(`Extended Variable Length Records` = x@EVLR)
return(c(PHB, VLR, EVLR))
}
setMethod("$", "LASheader", function(x, name) { return(x[[name]]) })
setMethod("$<-", "LASheader", function(x, name, value) { x[[name]] <- value ; return(x) })
setMethod("[[", c("LASheader", "ANY", "missing"), function(x, i, j, ...) {
assert_is_a_string(i)
if (i %in% names(x@PHB))
return(x@PHB[[i]])
if (i %in% names(x@VLR))
return(x@VLR[[i]])
if (i %in% names(x@EVLR))
return(x@EVLR[[i]])
return(NULL)
})
setMethod("[[<-", c("LASheader", "character", "missing"), function(x, i, value) {
assert_is_a_string(i)
if (i %in% names(x@PHB))
x@PHB[[i]] <- value
if (i %in% names(x@VLR))
x@VLR[[i]] <- value
if (i %in% names(x@EVLR))
x@EVLR[[i]] <- value
return(x)
}) |
GKtau <- function(x, y, dgts = 3, includeNA = "ifany"){
xName <- deparse(substitute(x))
yName <- deparse(substitute(y))
Nij <- table(as.character(x), as.character(y), useNA = includeNA)
PIij <- Nij/sum(Nij)
PIiPlus <- rowSums(PIij)
PIPlusj <- colSums(PIij)
vx <- 1 - sum(PIiPlus^2)
vy <- 1 - sum(PIPlusj^2)
xyTerm <- apply(PIij^2, MARGIN = 1, sum)
vyBarx <- 1 - sum(xyTerm/PIiPlus)
yxTerm <- apply(PIij^2, MARGIN = 2, sum)
vxBary <- 1 - sum(yxTerm/PIPlusj)
tauxy <- (vy - vyBarx)/vy
tauyx <- (vx - vxBary)/vx
sumFrame <- data.frame(xName = xName, yName = yName,
Nx = nrow(Nij), Ny = ncol(Nij),
tauxy = round(tauxy, digits = dgts),
tauyx = round(tauyx, digits = dgts),
stringsAsFactors = FALSE)
return(sumFrame)
} |
setGeneric("kmmd",function(x,...) standardGeneric("kmmd"))
setMethod("kmmd", signature(x = "matrix"),
function(x, y, kernel="rbfdot",kpar="automatic", alpha = 0.05, asymptotic = FALSE, replace = TRUE, ntimes = 150, frac = 1, ...)
{
x <- as.matrix(x)
y <- as.matrix(y)
res <- new("kmmd")
if(is.character(kernel)){
kernel <- match.arg(kernel,c("rbfdot","polydot","tanhdot","vanilladot","laplacedot","besseldot","anovadot","splinedot","matrix"))
if(kernel == "matrix")
if(dim(x)[1]==dim(x)[2])
return(kmmd(x= as.kernelMatrix(x), y = y, Kxy = as.kernelMatrix(x)%*%y, alpha = 0.05, asymptotic = FALSE, replace = TRUE, ntimes = 100, frac = 1, ...))
else
stop(" kernel matrix not square!")
if(is.character(kpar))
if((kernel == "tanhdot" || kernel == "vanilladot" || kernel == "polydot"|| kernel == "besseldot" || kernel== "anovadot"|| kernel=="splinedot") && kpar=="automatic" )
{
cat (" Setting default kernel parameters ","\n")
kpar <- list()
}
}
if (!is.function(kernel))
if (!is.list(kpar)&&is.character(kpar)&&(kernel == "laplacedot"|| kernel=="rbfdot")){
kp <- match.arg(kpar,"automatic")
if(kp=="automatic")
kpar <- list(sigma=sigest(rbind(x,y),scaled=FALSE)[2])
cat("Using automatic sigma estimation (sigest) for RBF or laplace kernel","\n")
}
if(!is(kernel,"kernel"))
{
if(is(kernel,"function")) kernel <- deparse(substitute(kernel))
kernel <- do.call(kernel, kpar)
}
if(!is(kernel,"kernel")) stop("kernel must inherit from class `kernel'")
m <- dim(x)[1]
n <- dim(y)[1]
N <- max(m,n)
M <- min(m,n)
Kxx <- kernelMatrix(kernel,x)
Kyy <- kernelMatrix(kernel,y)
Kxy <- kernelMatrix(kernel,x,y)
resmmd <- .submmd(Kxx, Kyy, Kxy, alpha)
H0(res) <- (resmmd$mmd1 > resmmd$D1)
Radbound(res) <- resmmd$D1
Asymbound(res) <- 0
mmdstats(res)[1] <- resmmd$mmd1
mmdstats(res)[2] <- resmmd$mmd3
if(asymptotic){
boundA <- .submmd3bound(Kxx, Kyy, Kxy, alpha, frac, ntimes, replace)
AsympH0(res) <- (resmmd$mmd3 > boundA)
Asymbound(res) <- boundA
}
kernelf(res) <- kernel
return(res)
})
setMethod("kmmd",signature(x="list"),
function(x, y, kernel="stringdot",kpar=list(type="spectrum",length=4), alpha = 0.05, asymptotic = FALSE, replace = TRUE, ntimes = 150, frac = 1, ...)
{
if(!is(kernel,"kernel"))
{
if(is(kernel,"function")) kernel <- deparse(substitute(kernel))
kernel <- do.call(kernel, kpar)
}
if(!is(kernel,"kernel")) stop("kernel must inherit from class `kernel'")
Kxx <- kernelMatrix(kernel,x)
Kyy <- kernelMatrix(kernel,y)
Kxy <- kernelMatrix(kernel,x,y)
ret <- kmmd(x=Kxx,y = Kyy,Kxy=Kxy, alpha=alpha, asymptotic= asymptotic, replace = replace, ntimes = ntimes, frac= frac)
kernelf(ret) <- kernel
return(ret)
})
setMethod("kmmd",signature(x="kernelMatrix"), function (x, y, Kxy, alpha = 0.05, asymptotic = FALSE, replace = TRUE, ntimes = 100, frac = 1, ...)
{
res <- new("kmmd")
resmmd <- .submmd(x, y, Kxy, alpha)
H0(res) <- (resmmd$mmd1 > resmmd$D1)
Radbound(res) <- resmmd$D1
Asymbound(res) <- 0
mmdstats(res)[1] <- resmmd$mmd1
mmdstats(res)[2] <- resmmd$mmd3
if(asymptotic){
boundA <- .submmd3bound(x, y, Kxy, alpha, frac, ntimes, replace)
AsympH0(res) <- (resmmd$mmd1 > boundA)
Asymbound(res) <- boundA
}
kernelf(res) <- " Kernel matrix used as input."
return(res)
})
.submmd <- function(Kxx,Kyy, Kxy, alpha)
{
m <- dim(Kxx)[1]
n <- dim(Kyy)[1]
N <- max(m,n)
M <- min(m,n)
sumKxx <- sum(Kxx)
if(m!=n)
sumKxxM <- sum(Kxx[1:M,1:M])
else
sumKxxM <- sumKxx
dgxx <- diag(Kxx)
sumKxxnd <- sumKxx - sum(dgxx)
R <- max(dgxx)
RM <- max(dgxx[1:M])
hu <- colSums(Kxx[1:M,1:M]) - dgxx[1:M]
sumKyy <- sum(Kyy)
if(m!=n)
sumKyyM <- sum(Kyy[1:M,1:M])
else
sumKyyM <- sumKyy
dgyy <- diag(Kyy)
sumKyynd <- sum(Kyy) - sum(dgyy)
R <- max(R,dgyy)
RM <- max(RM,dgyy[1:M])
hu <- hu + colSums(Kyy[1:M,1:M]) - dgyy[1:M]
sumKxy <- sum(Kxy)
if (m!=n)
sumKxyM <- sum(Kxy[1:M,1:M])
else
sumKxyM <- sumKxy
dg <- diag(Kxy)
hu <- hu - colSums(Kxy[1:M,1:M]) - colSums(t(Kxy[1:M,1:M])) + 2*dg
mmd1 <- sqrt(max(0,sumKxx/(m*m) + sumKyy/(n*n) - 2/m/n* sumKxy))
mmd3 <- sum(hu)/M/(M-1)
D1 <- 2*sqrt(RM/M)+sqrt(log(1/alpha)*4*RM/M)
return(list(mmd1=mmd1,mmd3=mmd3,D1=D1))
}
.submmd3bound <- function(Kxx,Kyy, Kxy, alpha, frac, ntimes, replace)
{
m <- dim(Kxx)[1]
n <- dim(Kyy)[1]
M <- min(m,n)
N <- max(m,n)
poslabels <- 1:m
neglabels <- (m+1):(m+n)
bootmmd3 <- rep(0,ntimes)
for (i in 1:ntimes)
{
nsamples <- ceiling(frac*min(m,n))
xinds <- sample(1:m,nsamples,replace=replace)
yinds <- sample(1:n,nsamples,replace=replace)
newlab <- c(poslabels[xinds],neglabels[yinds])
samplenew <- sample(newlab, length(newlab), replace=FALSE)
xinds <- samplenew[1:nsamples]
yinds <- samplenew[(nsamples+1):length(samplenew)]
newm <- length(xinds)
newn <- length(yinds)
newM <- min(newm,newn)
xind1 <- xinds[xinds<=m]
xind2 <- xinds[xinds>m]- m
yind1 <- yinds[yinds<=m]
yind2 <- yinds[yinds>m]-m
nKxx <- rbind(cbind(Kxx[xind1,xind1],Kxy[xind1,xind2]), cbind(t(Kxy[xind1,xind2]),Kyy[xind2,xind2]))
dgxx <- diag(nKxx)
hu <- colSums(nKxx[1:newM,1:newM]) - dgxx[1:newM]
rm(nKxx)
nKyy <- rbind(cbind(Kxx[yind1,yind1],Kxy[yind1,yind2]), cbind(t(Kxy[yind1,yind2]), Kyy[yind2,yind2]))
dgyy <- diag(nKyy)
hu <- hu + colSums(nKyy[1:newM,1:newM]) - dgyy[1:newM]
rm(nKyy)
nKxy <- rbind(cbind(Kxx[yind1,xind1],Kxy[yind1,xind2]), cbind(t(Kxy[xind1,yind2]),Kyy[yind2,xind2]))
dg <- diag(nKxy)
hu <- hu - colSums(nKxy[1:newM,1:newM]) - colSums(t(nKxy[1:newM,1:newM])) + 2*dg
rm(nKxy)
bootmmd3[i] <- sum(hu)/newM/(newM-1)
}
bootmmd3 <- sort(bootmmd3, decreasing=TRUE);
aind <- floor(alpha*ntimes)
bound <- sum(bootmmd3[c(aind,aind+1)])/2;
return(bound)
}
setMethod("show","kmmd",
function(object){
cat("Kernel Maximum Mean Discrepancy object of class \"kmmd\"","\n","\n")
show(kernelf(object))
if(is.logical(object@H0)){
cat("\n")
cat("\n","H0 Hypothesis rejected : ", paste(H0(object)))
cat("\n","Rademacher bound : ", paste(Radbound(object)))
}
cat("\n")
if(Asymbound(object)!=0){
cat("\n","H0 Hypothesis rejected (based on Asymptotic bound): ", paste(AsympH0(object)))
cat("\n","Asymptotic bound : ", paste(Asymbound(object)))
}
cat("\n","1st and 3rd order MMD Statistics : ", paste( mmdstats(object)))
cat("\n")
}) |
new_range_spec <- function(...) {
l <- list2(...)
structure(
list(
sheet_name = l$sheet_name %||% NULL,
named_range = l$named_range %||% NULL,
cell_range = l$cell_range %||% NULL,
cell_limits = l$cell_limits %||% NULL,
shim = FALSE,
sheets_df = l$sheets_df %||% NULL,
nr_df = l$nr_df %||% NULL
),
class = "range_spec"
)
}
as_range_spec <- function(x, ...) {
UseMethod("as_range_spec")
}
as_range_spec.default <- function(x, ...) {
gs4_abort(c(
"Can't make a range suitable for the Sheets API from the supplied \\
{.arg range}.",
x = "{.arg range} has class {.cls {class(x)}}.",
i = "{.arg range} must be {.code NULL}, a string, or \\
a {.cls cell_limits} object."
))
}
as_range_spec.character <- function(x,
...,
sheet = NULL,
skip = 0,
sheets_df = NULL,
nr_df = NULL) {
check_length_one(x)
out <- new_range_spec(
sheets_df = sheets_df, nr_df = nr_df,
.input = list(
sheet = sheet, range = x, skip = skip
)
)
m <- rematch2::re_match(x, compound_rx)
if (notNA(m[[".match"]])) {
out$sheet_name <- lookup_sheet_name(m$sheet, sheets_df)
out$cell_range <- m$cell_range
out$shim <- TRUE
return(out)
}
m <- match(x, nr_df$name)
if (notNA(m)) {
out$named_range <- x
return(out)
}
m <- match(x, sheets_df$name)
if (notNA(m)) {
return(as_range_spec(NULL, sheet = x, skip = skip, sheets_df = sheets_df))
}
m <- grepl(A1_rx, strsplit(x, split = ":")[[1]])
if (!all(m)) {
gs4_abort(c(
"{.arg range} doesn't appear to be a range in A1 notation, a named \\
range, or a sheet name:",
x = "{.range {x}}"
))
}
out$cell_range <- x
if (!is.null(sheet)) {
out$sheet_name <- lookup_sheet_name(sheet, sheets_df)
}
out$shim <- TRUE
out
}
as_range_spec.NULL <- function(x,
...,
sheet = NULL,
skip = 0,
sheets_df = NULL) {
out <- new_range_spec(
sheets_df = sheets_df,
.input = list(sheet = sheet, skip = skip)
)
if (skip < 1) {
if (!is.null(sheet)) {
out$sheet_name <- lookup_sheet_name(sheet, sheets_df)
}
return(out)
}
as_range_spec(
cell_rows(c(skip + 1, NA)),
sheet = sheet, sheets_df = sheets_df,
shim = FALSE
)
}
as_range_spec.cell_limits <- function(x,
...,
shim = TRUE,
sheet = NULL,
sheets_df = NULL) {
out <- new_range_spec(
sheets_df = sheets_df,
.input = list(sheet = sheet, range = x, shim = shim)
)
out$cell_limits <- x
if (!is.null(sheet)) {
out$sheet_name <- lookup_sheet_name(sheet, sheets_df)
}
out$shim <- shim
out
}
format.range_spec <- function(x, ...) {
is_df <- names(x) %in% c("sheets_df", "nr_df")
x[is_df & !map_lgl(x, is.null)] <- "<provided>"
glue("{fr(names(x))}: {x}")
}
print.range_spec <- function(x, ...) {
cat(format(x), sep = "\n")
invisible(x)
}
as_A1_range <- function(x) {
stopifnot(inherits(x, "range_spec"))
if (!is.null(x$named_range)) {
return(x$named_range)
}
if (!is.null(x$cell_limits)) {
x$cell_range <- as_sheets_range(x$cell_limits)
}
qualified_A1(x$sheet_name, x$cell_range)
} |
sw_depth <- function (p = P-1.013253, P = 1.013253, lat = 0) {
P <- p*10
denom <- gravity(lat) + 1.092e-6*P
nom <- (9.72659 + (-2.2512e-5 + (2.279e-10 - 1.82e-15*P)*P)*P)*P
return (nom / denom)
} |
context("contradicted")
test_that("is_contradicted_by works", {
rules <- validator( r1 = x > 1
, r2 = x < 0
, r3 = x > 2
)
rules_cd <- is_contradicted_by(rules, "r2")
expect_equal(rules_cd, c("r1", "r3"))
})
test_that("is_contradicted_by works for non-contradicting rule", {
rules <- validator( r1 = x > 1
, r2 = y > 2
)
rules_cd <- is_contradicted_by(rules, "r2")
expect_equal(rules_cd, character())
})
test_that("is_contradicted_by works for multiple rules (IIS)", {
rules <- validator( r1 = x > 0
, r2 = y > 0
, r3 = x + y == -1
)
rules_cd <- is_contradicted_by(rules,"r3")
expect_equal(rules_cd, c("r2","r1"))
})
test_that("is_contradicted_by works for empty rule", {
rules <- validator( r1 = x > 1
, r2 = y > 2
)
rules_cd <- is_contradicted_by(rules, NULL)
expect_equal(rules_cd, character())
})
test_that("is_contradicted_by works on wrong rule", {
rules <- validator( r1 = x > 1
, r2 = y > 2
)
expect_warning(rules_cd <- is_contradicted_by(rules, "r3"))
expect_equal(rules_cd, character())
}) |
test_that("margin type: csr, portrait", {
expect_equal(
set_margin("csr", "portrait"),
c(1.25, 1.00, 1.50, 1.00, 0.50, 0.50)
)
})
test_that("margin type: csr, landscape", {
expect_equal(set_margin("csr", "landscape"),
c(0.500000, 0.500000, 1.279861, 1.250000, 1.250000, 1.000000),
tolerance = 0.00001
)
})
test_that("margin type: wma, portrait", {
expect_equal(
set_margin("wma", "portrait"),
c(1.25000, 1.00000, 1.75000, 1.25000, 1.75000, 1.00625)
)
})
test_that("margin type: wma, landscape", {
expect_equal(
set_margin("wma", "landscape"),
c(1.00, 1.00, 2.00, 1.25, 1.25, 1.25)
)
})
test_that("margin type: wmm, portrait", {
expect_equal(
set_margin("wmm", "portrait"),
c(1.25000, 1.00000, 1.00000, 1.00000, 1.75000, 1.00625)
)
})
test_that("margin type: wmm, landscape", {
expect_equal(
set_margin("wmm", "landscape"),
c(0.50, 0.50, 1.25, 1.00, 1.25, 1.25)
)
})
test_that("margin type: narrow, portrait", {
expect_equal(
set_margin("narrow", "portrait"),
c(0.5, 0.5, 0.5, 0.5, 0.5, 0.5)
)
})
test_that("margin type: narrow, landscape", {
expect_equal(
set_margin("narrow", "landscape"),
c(0.5, 0.5, 0.5, 0.5, 0.5, 0.5)
)
})
test_that("doctype not in value list", {
expect_error(set_margin("csR", "landscape"))
})
test_that("orientation not in value list", {
expect_error(set_margin("csr", "landscapes"))
}) |
common.shared <- function (id, ...)
{
UseMethod("common.shared")
}
common.shared.pedigreeList <- function (id, ...)
{
famlist <- unique(id$famid)
nfam <- length(famlist)
matlist <- vector("list", nfam)
for (i in 1:length(famlist)) {
family <- id[i]
temp <- common.shared(family)
matlist[[i]] <- as(Matrix::forceSymmetric(temp), "dsCMatrix")
}
result <- Matrix::bdiag(matlist)
if (any(duplicated(id$id))) {
dimnames(result) <- list(NULL, paste(id$famid, id$id,
sep = "/"))
} else { dimnames(result) <- list(id$id, id$id) }
result
}
common.shared.pedigree <- function (id, ...)
{
n <- length(id$id)
if (n == 1)
return(matrix(1, 1, 1, dimnames = list(id$id, id$id)))
if (any(duplicated(id$id)))
stop("All id values must be unique")
temp <- matrix(rep(1, n*n), n)
dimnames(temp) <- list(id$id, id$id)
temp
} |
library(tidyverse)
library(forcats)
library(curl)
fly_ett <- read_csv( curl("https://raw.githubusercontent.com/fivethirtyeight/data/master/flying-etiquette-survey/flying-etiquette.csv"))
fly <- fly_ett
names(fly) <- c("id", "flight_freq", "do_you_recline", "height", "has_child_under_18",
"three_seats_two_arms", "two_seats_one_arm", "window_shade",
"rude_to_move_to_unsold_seat", "rude_to_talk_to_neighbor",
"six_hr_flight_leave_seat", "reclining_obligation_to_behind",
"rude_to_recline", "eliminate_reclining", "rude_to_switch_seats_friends",
"rude_to_switch_seats_family", "rude_to_wake_neighbor_bathroom",
"rude_to_wake_neighbor_walk", "rude_to_bring_baby",
"rude_to_bring_unruly_child", "use_electronics_takeoff",
"smoked_inflight", "gender", "age", "household_income", "education", "region")
fly <- fly %>%
mutate_if(is.character, as.factor)
fly %>% group_by(flight_freq) %>% tally()
fly <- fly %>%
mutate(flight_freq = fct_collapse(flight_freq, `more than once a month` = c("Every day", "A few times per week", "A few times per month"),
`once a month or less` = c("Once a month or less"),
`once a year or less` = c("Once a year or less"),
`never` = c("Never"))) %>%
mutate(flight_freq = fct_relevel(flight_freq, rev(c("more than once a month", "once a month or less", "once a year or less", "never"))))
fly %>% group_by(do_you_recline) %>% tally()
fly <- fly %>%
mutate(do_you_recline = tolower(do_you_recline)) %>%
mutate(do_you_recline = fct_relevel(do_you_recline, rev(c("always", "usually", "about half the time", "once in a while", "never"))))
fly %>% group_by(height) %>% tally() %>% arrange(n)
fly <- fly %>%
mutate(height = tolower(height)) %>%
mutate(height = fct_relevel(height, c("under 5 ft.", "5'0\"", "5'1\"", "5'2\"", "5'3\"", "5'4\"", "5'5\"",
"5'6\"", "5'7\"", "5'8\"", "5'9\"", "5'10\"", "5'11\"",
"6'0\"", "6'1\"", "6'2\"", "6'3\"", "6'4\"", "6'5\"", "6'6\" and above")))
fly %>% group_by(has_child_under_18) %>% tally()
fly <- fly %>%
mutate(has_child_under_18 = tolower(has_child_under_18)) %>%
mutate(has_child_under_18 = fct_relevel(has_child_under_18, c("no")))
levels(fly$has_child_under_18)
fly %>% group_by(three_seats_two_arms) %>% tally()
fly <- fly %>%
mutate(three_seats_two_arms =fct_recode(three_seats_two_arms, shared = "The arm rests should be shared", middle_seat = "The person in the middle seat gets both arm rests",
window_and_aisle = "The people in the aisle and window seats get both arm rests", first_to_use_it = "Whoever puts their arm on the arm rest first",
other = "Other (please specify)" )) %>%
mutate(three_seats_two_arms =fct_relevel(three_seats_two_arms, c("shared", "middle_seat", "window_and_aisle", "first_to_use_it", "other")))
levels(fly$three_seats_two_arms)
fly %>% group_by(two_seats_one_arm) %>% tally()
fly <- fly %>%
mutate(two_seats_one_arm =fct_recode(two_seats_one_arm, shared = "The arm rests should be shared", window_seat = "The person by the window",
aisle_seat = "The person in aisle", first_to_use_it = "Whoever puts their arm on the arm rest first",
other = "Other (please specify)" )) %>%
mutate(two_seats_one_arm =fct_relevel(two_seats_one_arm, c("shared", "window_seat", "aisle_seat", "first_to_use_it", "other")))
levels(fly$two_seats_one_arm)
fly %>% group_by(window_shade) %>% tally()
fly <- fly %>%
mutate(window_shade =fct_recode(window_shade, everyone = "Everyone in the row should have some say", window = "The person in the window seat should have exclusive control")) %>%
mutate(window_shade =fct_relevel(window_shade, c("everyone")))
levels(fly$window_shade)
fly %>% group_by(rude_to_move_to_unsold_seat) %>% tally()
fly <- fly %>%
mutate(rude_to_move_to_unsold_seat =fct_recode(rude_to_move_to_unsold_seat, no = "No, not rude at all", somewhat = "Yes, somewhat rude", yes = "Yes, very rude")) %>%
mutate(rude_to_move_to_unsold_seat =fct_relevel(rude_to_move_to_unsold_seat, c("no", "somewhat")))
levels(fly$rude_to_move_to_unsold_seat)
fly %>% group_by(rude_to_talk_to_neighbor) %>% tally()
fly <- fly %>%
mutate(rude_to_talk_to_neighbor =fct_recode(rude_to_talk_to_neighbor, no = "No, not at all rude", somewhat = "Yes, somewhat rude", yes = "Yes, very rude")) %>%
mutate(rude_to_talk_to_neighbor =fct_relevel(rude_to_talk_to_neighbor, c("no", "somewhat")))
levels(fly$rude_to_talk_to_neighbor)
fly %>% group_by(six_hr_flight_leave_seat) %>% tally()
fly <- fly %>%
mutate(six_hr_flight_leave_seat =fct_recode(six_hr_flight_leave_seat, not_okay = "It is not okay to get up during flight", four_or_more = "More than five times times",
once = "Once", twice = "Twice", three_times = "Three times", four_or_more = "Four times")) %>%
mutate(six_hr_flight_leave_seat =fct_relevel(six_hr_flight_leave_seat, c("not_okay", "once", "twice", "three_times", "four_or_more")))
levels(fly$six_hr_flight_leave_seat)
fly %>% group_by(reclining_obligation_to_behind) %>% tally()
fly <- fly %>%
mutate(reclining_obligation_to_behind = fct_recode(reclining_obligation_to_behind, no = "No, the person on the flight has no obligation to the person behind them",
yes = "Yes, they should not recline their chair if the person behind them asks them not to")) %>%
mutate(reclining_obligation_to_behind = fct_relevel(reclining_obligation_to_behind, c("no")))
levels(fly$reclining_obligation_to_behind)
fly %>% group_by(rude_to_recline) %>% tally()
fly <- fly %>%
mutate(rude_to_recline = fct_recode(rude_to_recline, no = "No, not rude at all", somewhat = "Yes, somewhat rude", yes = "Yes, very rude")) %>%
mutate(rude_to_recline = fct_relevel(rude_to_recline, c("no", "somewhat")))
levels(fly$rude_to_recline)
fly %>% group_by(eliminate_reclining) %>% tally()
fly <- fly %>%
mutate(eliminate_reclining = fct_recode(eliminate_reclining, no = "No", yes = "Yes")) %>%
mutate(eliminate_reclining = fct_relevel(eliminate_reclining, "no"))
levels(fly$eliminate_reclining)
fly %>% group_by(rude_to_switch_seats_friends) %>% tally()
fly <- fly %>%
mutate(rude_to_switch_seats_friends = fct_recode(rude_to_switch_seats_friends, no = "No, not at all rude", somewhat = "Yes, somewhat rude", yes = "Yes, very rude")) %>%
mutate(rude_to_switch_seats_friends = fct_relevel(rude_to_switch_seats_friends, c("no", "somewhat")))
levels(fly$rude_to_switch_seats_friends)
fly %>% group_by(rude_to_switch_seats_family) %>% tally()
fly <- fly %>%
mutate(rude_to_switch_seats_family = fct_recode(rude_to_switch_seats_family, no = "No, not at all rude", somewhat = "Yes, somewhat rude", yes = "Yes, very rude")) %>%
mutate(rude_to_switch_seats_family = fct_relevel(rude_to_switch_seats_family, c("no", "somewhat")))
levels(fly$rude_to_switch_seats_family)
fly %>% group_by(rude_to_wake_neighbor_bathroom) %>% tally()
fly <- fly %>%
mutate(rude_to_wake_neighbor_bathroom = fct_recode(rude_to_wake_neighbor_bathroom, no = "No, not at all rude", somewhat = "Yes, somewhat rude", yes = "Yes, very rude")) %>%
mutate(rude_to_wake_neighbor_bathroom = fct_relevel(rude_to_wake_neighbor_bathroom, c("no", "somewhat")))
fly %>% group_by(rude_to_wake_neighbor_walk) %>% tally()
fly <- fly %>%
mutate(rude_to_wake_neighbor_walk = fct_recode(rude_to_wake_neighbor_walk, no = "No, not at all rude", somewhat = "Yes, somewhat rude", yes = "Yes, very rude")) %>%
mutate(rude_to_wake_neighbor_walk = fct_relevel(rude_to_wake_neighbor_walk, c("no", "somewhat")))
fly %>% group_by(rude_to_bring_baby) %>% tally()
fly <- fly %>%
mutate(rude_to_bring_baby = fct_recode(rude_to_bring_baby, no = "No, not at all rude", somewhat = "Yes, somewhat rude", yes = "Yes, very rude")) %>%
mutate(rude_to_bring_baby = fct_relevel(rude_to_bring_baby, c("no", "somewhat")))
fly %>% group_by(rude_to_bring_unruly_child) %>% tally()
fly <- fly %>%
mutate(rude_to_bring_unruly_child = fct_recode(rude_to_bring_unruly_child, no = "No, not at all rude", somewhat = "Yes, somewhat rude", yes = "Yes, very rude")) %>%
mutate(rude_to_bring_unruly_child = fct_relevel(rude_to_bring_unruly_child, c("no", "somewhat")))
fly %>% group_by(use_electronics_takeoff) %>% tally()
fly <- fly %>%
mutate(use_electronics_takeoff = fct_recode(use_electronics_takeoff, no = "No", yes = "Yes")) %>%
mutate(use_electronics_takeoff = fct_relevel(use_electronics_takeoff, c("no")))
fly %>% group_by(smoked_inflight) %>% tally()
fly <- fly %>%
mutate(smoked_inflight = fct_recode(smoked_inflight, no = "No", yes = "Yes")) %>%
mutate(smoked_inflight = fct_relevel(smoked_inflight, c("no")))
fly %>% group_by(gender) %>% tally()
fly <- fly %>%
mutate(gender = fct_recode(gender, female = "Female", male = "Male"))
fly %>% group_by(age) %>% tally()
fly <- fly %>%
mutate(age = fct_recode(age, `60+` = "> 60")) %>%
mutate(age = fct_relevel(age, c("18-29", "30-44", "45-60", "60+")))
fly %>% group_by(household_income) %>% tally()
fly <- fly %>%
mutate(household_income = fct_recode(household_income, `$150,000+` = "150000")) %>%
mutate(household_income = fct_relevel(household_income, c("$0 - $24,999", "$25,000 - $49,999", "$50,000 - $99,999", "$100,000 - $149,999")))
levels(fly$household_income)
fly %>% group_by(education) %>% tally()
fly <- fly %>%
mutate(education = tolower(education)) %>%
mutate(education = fct_relevel(education, c("less than high school degree", "high school degree", "some college or associate degree")))
levels(fly$education)
fly %>% group_by(region) %>% tally()
fly <- fly %>%
mutate(region = tolower(region)) %>%
mutate(region = fct_collapse(region, midwest = c("east north central", "west north central"), northeast = c("new england", "middle atlantic"),
south = c("west south central", "east south central", "south atlantic"))) %>%
mutate(region = fct_relevel(region, c("pacific", "mountain", "midwest", "northeast", "south")))
levels(fly$region) |
barplot.enrichResult <- function(height, x="Count", color='p.adjust',
showCategory=8, font.size=12, title="",
label_format=30, ...) {
object <- height
colorBy <- match.arg(color, c("pvalue", "p.adjust", "qvalue"))
if (x == "geneRatio" || x == "GeneRatio") {
x <- "GeneRatio"
}
else if (x == "count" || x == "Count") {
x <- "Count"
}
df <- fortify(object, showCategory=showCategory, by=x, ...)
if(colorBy %in% colnames(df)) {
p <- ggplot(df, aes_string(x = x, y = "Description", fill = colorBy)) +
theme_dose(font.size) +
scale_fill_continuous(low="red", high="blue", name = color,
guide=guide_colorbar(reverse=TRUE))
} else {
p <- ggplot(df, aes_string(x = x, y = "Description",
fill = "Description")) +
theme_dose(font.size) +
theme(legend.position="none")
}
label_func <- default_labeller(label_format)
if(is.function(label_format)) {
label_func <- label_format
}
p + geom_col() +
scale_y_discrete(labels = label_func) +
ggtitle(title) + ylab(NULL)
}
barplot.compareClusterResult <- function(height, color="p.adjust",
showCategory=5, by="geneRatio",
includeAll=TRUE, font.size=12,
title="", ...) {
df <- fortify(height, showCategory=showCategory, by=by,
includeAll=includeAll)
plotting.clusterProfile(df, type="bar", colorBy=color, by=by, title=title,
font.size=font.size)
} |
pro4sail <- function(param) {
plist <- as.list(param)
nw <- 2101
plist$rddt <- numeric(nw)
plist$rsdt <- numeric(nw)
plist$rdot <- numeric(nw)
plist$rsot <- numeric(nw)
inlist <- c("pro4sail", plist)
outlist <- do.call(.Fortran, inlist)
lo <- length(outlist)
refl <- do.call(cbind, outlist[(lo - 3):lo])
reflspec <- spectra(refl, 400:2500)
colnames(reflspec) <- c("bi-hemispherical", "hemispherical_directional",
"directional_hemispherical", "bi-directional")
reflspec
}
pro4saild <- function(param) {
plist <- as.list(param)
nw <- 2101
plist$rddt <- numeric(nw)
plist$rsdt <- numeric(nw)
plist$rdot <- numeric(nw)
plist$rsot <- numeric(nw)
inlist <- c("pro4saild", plist)
outlist <- do.call(.Fortran, inlist)
lo <- length(outlist)
refl <- do.call(cbind, outlist[(lo - 3):lo])
reflspec <- spectra(refl, 400:2500)
colnames(reflspec) <- c("bi-hemispherical", "hemispherical_directional",
"directional_hemispherical", "bi-directional")
reflspec
} |
context("Testing find_read_match()")
uid <- as.character(random_hash())
data_product1 <- file.path("data_product", "read", "wildcard", uid, "1")
data_product2 <- file.path("data_product", "read", "wildcard", uid, "1", "2")
coderun_description <- "Register a file in the pipeline"
dataproduct_description <- "A csv file"
namespace1 <- "username"
endpoint <- Sys.getenv("FDP_endpoint")
config_file <- file.path(tempdir(), "config_files", "inputglobbing",
paste0("config_", uid, ".yaml"))
create_config(path = config_file,
description = coderun_description,
input_namespace = namespace1,
output_namespace = namespace1)
add_write(path = config_file,
data_product = data_product1,
description = dataproduct_description,
file_type = "csv")
add_write(path = config_file,
data_product = data_product2,
description = dataproduct_description,
file_type = "csv")
fair_run(path = config_file, skip = TRUE)
config <- file.path(Sys.getenv("FDP_CONFIG_DIR"), "config.yaml")
script <- file.path(Sys.getenv("FDP_CONFIG_DIR"), "script.sh")
handle <- initialise(config, script)
path1 <- link_write(handle, data_product1)
uid1 <- paste0(uid, "_1")
df1 <- data.frame(a = uid1, b = uid1)
write.csv(df1, path1)
path2 <- link_write(handle, data_product2)
uid2 <- paste0(uid, "_2")
df2 <- data.frame(a = uid2, b = uid2)
write.csv(df2, path2)
finalise(handle)
data_product3 <- file.path("data_product", "read", "wildcard", uid, "*")
config_file <- file.path(tempdir(), "config_files", "inputglobbing",
paste0("config2_", uid, ".yaml"))
create_config(path = config_file,
description = coderun_description,
input_namespace = namespace1,
output_namespace = namespace1)
add_read(path = config_file,
data_product = data_product3)
fair_run(path = config_file, skip = TRUE)
config <- file.path(Sys.getenv("FDP_CONFIG_DIR"), "config.yaml")
script <- file.path(Sys.getenv("FDP_CONFIG_DIR"), "script.sh")
handle <- initialise(config, script)
test_that("data products recorded in working config", {
reads <- handle$yaml$read
testthat::expect_equal(reads[[1]]$data_product, data_product2)
testthat::expect_equal(reads[[2]]$data_product, data_product1)
testthat::expect_equal(reads[[1]]$use$version, "0.0.1")
testthat::expect_equal(reads[[2]]$use$version, "0.0.1")
aliases <- find_read_match(handle, data_product3)
testthat::expect_true(all(aliases %in% c(data_product1, data_product2)))
path <- link_read(handle, aliases[1])
path <- link_read(handle, aliases[2])
}) |
clm.fit.NR <-
function(rho, control = list())
{
control <- do.call(clm.control, control)
stepFactor <- 1
innerIter <- modif.iter <- abs.iter <- 0L
conv <- 2L
nll <- rho$clm.nll(rho)
if(!is.finite(nll))
stop("Non-finite log-likelihood at starting value")
for(i in 1:(control$maxIter + 1L)) {
gradient <- rho$clm.grad(rho)
maxGrad <- max(abs(gradient))
if(control$trace > 0) {
Trace(iter=i+innerIter-1, stepFactor, nll, maxGrad,
rho$par, first=(i==1))
if(control$trace > 1 && i > 1) {
cat("\tgrad: ")
cat(paste(formatC(gradient, digits=3, format="e")))
cat("\n\tstep: ")
cat(paste(formatC(-step, digits=3, format="e")))
cat("\n\teigen: ")
cat(paste(formatC(eigen(hessian, symmetric=TRUE,
only.values=TRUE)$values, digits=3,
format="e")))
cat("\n")
}
}
abs.conv <- (maxGrad < control$gradTol)
if(abs.conv) abs.iter <- abs.iter + 1L
hessian <- rho$clm.hess(rho)
ch <- try(chol(hessian), silent=TRUE)
if(inherits(ch, "try-error")) {
if(abs.conv) {
conv <- 1L
break
}
min.ev <- min(eigen(hessian, symmetric=TRUE,
only.values=TRUE)$values)
inflation.factor <- 1
inflate <- abs(min.ev) + inflation.factor
hessian <- hessian + diag(inflate, nrow(hessian))
if(control$trace > 0)
cat(paste("Hessian is singular at iteration", i-1, "inflating diagonal with",
formatC(inflate, digits=5, format="f"), "\n"))
ch <- try(chol(hessian), silent=TRUE)
if(inherits(ch, "try-error"))
stop(gettextf("Cannot compute Newton step at iteration %d",
i-1), call.=FALSE)
modif.iter <- modif.iter + 1L
} else
modif.iter <- 0L
if(modif.iter >= control$maxModIter) {
conv <- 4L
break
}
step <- c(backsolve(ch, backsolve(ch, gradient, transpose=TRUE)))
rel.conv <- (max(abs(step)) < control$relTol)
if(abs.conv && rel.conv) {
conv <- 0L
break
}
rho$par <- rho$par - stepFactor * step
nllTry <- rho$clm.nll(rho)
lineIter <- 0
stephalf <- (nllTry > nll)
if(stephalf && abs(nll - nllTry) < 1e-10)
stephalf <- maxGrad < max(abs(rho$clm.grad(rho)))
if(abs.conv && stephalf) {
conv <- 1L
rho$par <- rho$par + stepFactor * step
rho$clm.nll(rho)
break
}
if(abs.conv && abs.iter >= 5L) {
conv <- 1L
break
}
while(stephalf) {
stepFactor <- stepFactor/2
rho$par <- rho$par + stepFactor * step
nllTry <- rho$clm.nll(rho)
lineIter <- lineIter + 1
if(control$trace > 0) {
cat("step halving:\n")
cat("nll reduction: ", formatC(nll - nllTry, digits=5, format="e"), "\n")
Trace(i+innerIter-1, stepFactor, nll, maxGrad,
rho$par, first = FALSE)
}
if(lineIter > control$maxLineIter){
conv <- 3L
break
}
innerIter <- innerIter + 1
stephalf <- (nllTry > nll)
if(stephalf && abs(nll - nllTry) < 1e-10)
stephalf <- (maxGrad < max(abs(rho$clm.grad(rho))))
}
if(conv == 3L) break
if(control$trace > 0)
cat("nll reduction: ", formatC(nll - nllTry, digits=5, format="e"), "\n")
nll <- nllTry
stepFactor <- min(1, 2 * stepFactor)
}
message <- switch(as.character(conv),
"0" = "Absolute and relative convergence criteria were met",
"1" = "Absolute convergence criterion was met, but relative criterion was not met",
"2" = "iteration limit reached",
"3" = "step factor reduced below minimum",
"4" = "maximum number of consecutive Newton modifications reached")
if(conv <= 1L && control$trace > 0) {
cat("\nOptimizer converged! ", message, fill = TRUE)
}
if(conv > 1 && control$trace > 0) {
cat("\nOptimization failed ", message, fill = TRUE)
}
gradient <- c(rho$clm.grad(rho))
res <- list(par = rho$par,
gradient = gradient,
Hessian = rho$clm.hess(rho),
logLik = -nll,
convergence = conv,
message = message,
maxGradient = max(abs(gradient)),
niter = c(outer = i-1, inner = innerIter),
fitted = rho$fitted)
return(res)
}
clm.fit.optim <-
function(rho, method = c("ucminf", "nlminb", "optim"), control=list())
{
method <- match.arg(method)
optRes <-
switch(method,
"nlminb" = nlminb(rho$par,
function(par) clm.nll(rho, par),
function(par) clm.grad_direct(rho, par),
control=control),
"ucminf" = ucminf(rho$par,
function(par) clm.nll(rho, par),
function(par) clm.grad_direct(rho, par),
control=control),
"optim" = optim(rho$par,
function(par) clm.nll(rho, par),
function(par) clm.grad_direct(rho, par),
method="BFGS",
control=control)
)
rho$par <- optRes[[1]]
res <- list(par = rho$par,
logLik = -clm.nll(rho),
gradient = clm.grad(rho),
Hessian = clm.hess(rho),
fitted = rho$fitted)
res$maxGradient = max(abs(res$gradient))
res$optRes <- optRes
res$niter <- switch(method, "nlminb" = optRes$evaluations,
"ucminf" = c(optRes$info["neval"], 0),
"optim" = optRes$counts)
res$convergence <-
switch(method, "nlminb" = optRes$convergence,
"ucminf" = optRes$convergence,
"optim" = optRes$convergence)
return(res)
}
clm.fit.flex <- function(rho, control=list()) {
lwr <- if(rho$link == "Aranda-Ordaz")
c(rep(-Inf, length(rho$par) - 1), 1e-5) else rep(-Inf, length(rho$par))
optRes <- nlminb(rho$par, function(par, rho) clm.nll.flex(rho, par),
lower=lwr, rho=rho)
rho$par <- optRes$par
res <- list(par = rho$par,
lambda = setNames(rho$par[length(rho$par)], "lambda"),
logLik = -clm.nll.flex(rho),
gradient = numDeriv::grad(func=function(par, rho) clm.nll.flex(rho, par),
x = rho$par, rho=rho),
Hessian = numDeriv::hessian(func=function(par, rho) clm.nll.flex(rho, par),
x = rho$par, rho=rho),
fitted = rho$fitted)
res$maxGradient = max(abs(res$gradient))
res$optRes <- optRes
res$niter <- optRes$evaluations
res$convergence <- optRes$convergence
return(res)
}
clm.nll.flex <- function(rho, par) {
if(!missing(par)) rho$par <- par
with(rho, {
if(k > 0)
sigma <- Soff * exp(drop(S %*% par[n.psi + 1:k]))
eta1 <- (drop(B1 %*% par[1:n.psi]) + o1)/sigma
eta2 <- (drop(B2 %*% par[1:n.psi]) + o2)/sigma
fitted <- pfun(eta1, par[length(par)]) - pfun(eta2, par[length(par)])
})
if(all(is.finite(rho$fitted)) && all(rho$fitted > 0))
-sum(rho$wts * log(rho$fitted))
else Inf
}
clm.nll <- function(rho, par) {
if(!missing(par)) rho$par <- par
with(rho, {
if(k > 0)
sigma <- Soff * exp(drop(S %*% par[n.psi + 1:k]))
eta1 <- (drop(B1 %*% par[1:n.psi]) + o1)/sigma
eta2 <- (drop(B2 %*% par[1:n.psi]) + o2)/sigma
})
rho$fitted <- getFittedC(rho$eta1, rho$eta2, rho$link, rho$par[length(rho$par)])
if(all(is.finite(rho$fitted)) && all(rho$fitted > 0))
-sum(rho$wts * log(rho$fitted))
else Inf
}
clm.grad <- function(rho) {
with(rho, {
p1 <- if(!nlambda) dfun(eta1) else dfun(eta1, lambda)
p2 <- if(!nlambda) dfun(eta2) else dfun(eta2, lambda)
wtpr <- wts/fitted
C2 <- B1*p1/sigma - B2*p2/sigma
if(k <= 0) return(-crossprod(C2, wtpr))
C3 <- -(eta1 * p1 - eta2 * p2) * S
return(-crossprod(cbind(C2, C3), wtpr))
})
}
clm.grad_direct <- function(rho, par) {
clm.nll(rho, par)
clm.grad(rho)
}
clm.hess <- function(rho) {
with(rho, {
g1 <- if(!nlambda) gfun(eta1) else gfun(eta1, lambda)
g2 <- if(!nlambda) gfun(eta2) else gfun(eta2, lambda)
wtprpr <- wtpr/fitted
dg.psi <- crossprod(B1 * g1 * wtpr / sigma^2, B1) -
crossprod(B2 * g2 * wtpr / sigma^2, B2)
D <- dg.psi - crossprod(C2, (C2 * wtprpr))
if(k <= 0) return(-D)
wtprsig <- wtpr/sigma
epg1 <- p1 + g1*eta1
epg2 <- p2 + g2*eta2
Et <- crossprod(B1, -wtprsig * epg1 * S) -
crossprod(B2, -wtprsig * epg2 * S) -
crossprod(C2, wtprpr * C3)
F <- -crossprod(S, wtpr * ((eta1*p1 - eta2*p2)^2 / fitted -
(eta1*epg1 - eta2*epg2)) * S)
H <- rbind(cbind(D , Et),
cbind(t(Et), F))
return(-H)
})
} |
harris<- function(fun,x,options=c(1,100,1e-4,10),tr=FALSE,...){
pas <- rep(0.1,length(x))
a <- 1.2
b <- 0.8
c <- 0.5
ovf <- 1e4
unf <- 1e-6
alpha<-0.9
it <- 0
gain<-1
fungrad<-fun(x,...)
yp<-fungrad$fun
gp<-fungrad$grad
xp<-x
x <- xp - pas * gp
if(tr){
Trace<-list(time=matrix(0,options[2]+1,3),fct=rep(0,options[2]+1))
Trace$time[1,]<-c(0,0,0)
Trace$fct[1]<-yp
ptm<-proc.time()[1:3]
} else Trace<-NULL
while((gain/abs(yp) >= options[3]) & (it < options[2])){
it<-it+1
fungrad<-fun(x,...)
y<-fungrad$fun
g<-fungrad$grad
if(tr){
Trace$time[it+1,]<-proc.time()[1:3]-ptm
Trace$fct[it+1]<-y
}
if(options[1] >0){
if(it %% options[4]==1) print(c(it,y,gain/abs(yp)))
}
if(y > yp){
x <- xp
g <- gp
pas <- pas * c
x <- x - pas * g
}
else{
gain<- alpha*gain+(1-alpha)*abs(yp-y)
xp <- x
test <- as.integer((g * gp) >= 0)
pas <- ((test * a) + ((1-test) * b)) * pas
pas <- as.integer(pas<=ovf) * pas + as.integer(pas>ovf) * ovf
pas <- as.integer(pas>=unf) * pas + as.integer(pas<unf) * unf
gp <- g
x <- x - pas * g
yp <- y
}
}
return(list(par=x,value=y,trace=Trace))
} |
osf_find_file <- function(x, type, pattern) {
type <- match.arg(type, c("file", "folder"))
matches <- osf_ls_files(x, type = type, pattern = pattern)
matches[matches$name == pattern, ]
} |
span(
h4("Step 4: The Bivariate mixed-effects model."),
p(HTML("<b>Sub-goal:</b> Understanding the difference between a univariate and a multivariate mixed-effects model,
and how level-specific correlations are modelled.")),
p(HTML(paste0("<b>Introduction:</b> In the previous modules (",Module_titles$mod1,", ",Module_titles$mod3,", and ",
Module_titles$mod6,") we have considered univariate mixed-effects models. As you have seen,
the univariate mixed-effects model enabled us to estimate the variance attributable to variation within-
and among-individuals in a single trait (",NOT$trait.1,") with the following equation:"))),
p(paste0("$$",NOT$trait.1,"_{",NOT$time,NOT$ind,"} =
(",EQ3$mean0," + ",NOT$devI,"_",NOT$ind,") +
",NOT$error,"_{",NOT$time,NOT$ind,"}$$")),
p(HTML(paste0("The variation in intercepts ($V_",NOT$devI,"$) among individuals was assumed to be normally distributed (N)
with a mean of zero and a variance ($\\Omega_{",NOT$devI,"}$) and is called the <i>among-individual variance</i>
(estimated as $V_",NOT$devI,"$: the variance across random intercepts of individuals):"))),
p(paste0("$$[",NOT$devI,"_",NOT$ind,"] \\sim N(0, \\Omega_{",NOT$devI,"}): \\Omega_{",NOT$devI,"} = [V_",NOT$devI,"]$$")),
p(HTML(paste0("A residual error ($",NOT$error,"_{",NOT$time,NOT$ind,"}$) was also assumed to be normally distributed, with zero mean
and a variance ($\\Omega_{",NOT$error,"}$) representing the <i>within-individual variance</i>:"))),
p(paste0("$$[",NOT$error,"_{",NOT$time,NOT$ind,"}] \\sim N(0, \\Omega_{",NOT$error,"}): \\Omega_{",NOT$error,"} = [V_",NOT$error,"]$$")),
p(HTML(paste0("In the bivariate mixed-effects models, we are estimating these parameters simultaneously for two traits.
That is, the model can be formulated as a set of two phenotypic equations (one for $",NOT$trait.1,"$ and one for $",NOT$trait.2,"$):"))),
p(paste0("$$",
NOT$trait.1,"_{",NOT$time,NOT$ind,"} =
(",EQ$mean0.1," + ",EQ$dev0.1,") +
",NOT$error,"_{",NOT$trait.1,NOT$time,NOT$ind,"}$$
$$",
NOT$trait.2,"_{",NOT$time,NOT$ind,"} =
(",EQ$mean0.2," + ",EQ$dev0.2,") +
",NOT$error,"_{",NOT$trait.2,NOT$time,NOT$ind,"}
$$")),
p(paste0("As was the case for univariate models, the random intercepts ($",NOT$devI,"_",NOT$ind,"$)
and the within-individual contributions ($",NOT$error,"_{",NOT$time,NOT$ind,"}$) to ",NOT$trait.1," and ",NOT$trait.2," are modelled as
having means of zero. However, in this bivariate case, neither the random
intercepts nor the residual errors are independent. Instead, the random intercepts
are now distributed assuming a multivariate normal distribution with a variance-covariance structure
($\\Omega_{",NOT$devI,"}$) specifying the among-individual variances ($V_{",NOT$devI,"_",NOT$trait.1,"}$ and $V_{",NOT$devI,"_",NOT$trait.2,"}$)
and the among-individual covariance between the two attributes ($Cov_{",NOT$devI,"_",NOT$trait.1,",",NOT$devI,"_",NOT$trait.2,"}$): ")),
p(paste0(
"$$ \\Omega_{",NOT$devI,"}=
\\begin{pmatrix}
V_{",NOT$devI,"_",NOT$trait.1,"} & Cov_{",NOT$devI,"_",NOT$trait.1,",",NOT$devI,"_",NOT$trait.2,"} \\\\
Cov_{",NOT$devI,"_",NOT$trait.1,",",NOT$devI,"_",NOT$trait.2,"} & V_{", NOT$devI,"_",NOT$trait.2,"}\\\\
\\end{pmatrix}
$$")),
p("The residual errors ($",NOT$error,"_{",NOT$time,NOT$ind,"}$) are likewise assumed to be drawn from a multivariate normal distribution,
with means of zero, within-individual variances ($V_{",NOT$error,"_",NOT$trait.1,"}$ and $V_{",NOT$error,"_",NOT$trait.2,"}$), and within-individual
covariances ($Cov_{",NOT$error,"_{",NOT$trait.1,"},",NOT$error,"_{",NOT$trait.2,"}}$):"),
p(paste0(
"$$ \\Omega_{",NOT$error,"}=
\\begin{pmatrix}
V_{",NOT$error,"_",NOT$trait.1,"} & Cov_{",NOT$error,"_",NOT$trait.1,",",NOT$error,"_",NOT$trait.2,"} \\\\
Cov_{",NOT$error,"_",NOT$trait.1,",",NOT$error,"_",NOT$trait.2,"} & V_{", NOT$error,"_",NOT$trait.2,"} \\\\
\\end{pmatrix}
$$")),
p("From these estimated matrices, we can calculate the phenotypic variances for
each trait by adding up the variances estimated at each level:"),
p(paste0("$$V_{",NOT$total,"_",NOT$trait.1,"} = V_{",NOT$devI,"_",NOT$trait.1,"} + V_{",NOT$error,"_",NOT$trait.1,"}$$
$$V_{",NOT$total,"_",NOT$trait.2,"} = V_{",NOT$devI,"_",NOT$trait.2,"} + V_{",NOT$error,"_",NOT$trait.2,"}$$")),
p("In the same fashion, we can calculate the phenotypic covariance between the
two traits by adding up the covariances estimated at each level:"),
p(paste0("$$Cov_{",NOT$total,"_",NOT$trait.1,", ",NOT$total,"_",NOT$trait.2,"} =
Cov_{",NOT$devI,"_",NOT$trait.1,",",NOT$devI,"_",NOT$trait.2,"} +
Cov_{",NOT$error,"_",NOT$trait.1,",",NOT$error,"_",NOT$trait.2,"}$$")),
p("With this information in hand, we can now calculate the overall phenotypic correlation in the data."),
p(paste0("$$r_{",NOT$total,"_",NOT$trait.1,", ",NOT$total,"_",NOT$trait.2,"} =
\\frac{Cov_{",NOT$total,"_",NOT$trait.1,",",NOT$total,"_",NOT$trait.2,"}}
{\\sqrt{V_{",NOT$total,"_",NOT$trait.1,"}V_{",NOT$total,"_",NOT$trait.2,"}}}$$")),
strong("Conclusion: "),
p("Bivariate mixed-effects models differ distinctly from univariate mixed-effects
models as the former assumes multivariate normality while the latter assumes univariate normality.
Bivariate mixed-effects models estimate variances and covariances within and among each specified
level from which overall phenotypic variances and covariances, as well as correlation,
can be subsequently derived."),
div(class = "line"),
actionLink("Mod4Step4GotoStep3", label = "<< Previous Step (3)", class = "linkToModuleSteps"),
span(Modules_VAR$StepLink$sep, class = "step-Link"),
actionLink("Mod4Step4GotoStep5", label = "Next Step (5) >>", class = "linkToModuleSteps")
) |
mri_convert = function(
file,
outfile,
opts = ""){
res = fs_cmd(
func = "mri_convert",
file = file,
outfile = outfile,
frontopts = opts,
retimg = FALSE,
samefile = FALSE,
add_ext = FALSE)
return(res)
}
mri_convert.help = function(){
fs_help(func_name = "mri_convert")
} |
NULL
setClass(
"adapted_replicate",
slots=c(
log_wm_times_wp_avg="array",
log_wp_avg="array",
Np="integer",
tol="numeric"
),
prototype=prototype(
log_wm_times_wp_avg=array(data=numeric(0),dim=c(0,0)),
log_wp_avg=array(data=numeric(0),dim=c(0,0)),
Np=as.integer(NA),
tol=as.double(NA)
)
)
setClass(
"abfd_spatPomp",
contains="spatPomp",
slots=c(
Nrep="integer",
nbhd="function",
Np="integer",
tol="numeric",
cond_loglik="array",
loglik="numeric"
),
prototype=prototype(
Nrep=as.integer(NA),
Np=as.integer(NA),
tol=as.double(NA),
cond_loglik=array(data=numeric(0),dim=c(0,0)),
loglik=as.double(NA)
)
)
abf_internal <- function (object, Np, nbhd, tol, ..., verbose, .gnsi = TRUE) {
ep <- paste0("in ",sQuote("abf"),": ")
p_object <- pomp(object,...,verbose=verbose)
object <- new("spatPomp",p_object,
unit_covarnames = object@unit_covarnames,
shared_covarnames = object@shared_covarnames,
runit_measure = object@runit_measure,
dunit_measure = object@dunit_measure,
eunit_measure = object@eunit_measure,
munit_measure = object@munit_measure,
vunit_measure = object@vunit_measure,
unit_names=object@unit_names,
unitname=object@unitname,
unit_statenames=object@unit_statenames,
unit_obsnames = object@unit_obsnames,
unit_accumvars = object@unit_accumvars)
params <- coef(object)
verbose = FALSE
pompLoad(object,verbose)
gnsi <- as.logical(.gnsi)
if (length(params)==0)
stop(ep,sQuote("params")," must be specified",call.=FALSE)
if (missing(tol))
stop(ep,sQuote("tol")," must be specified",call.=FALSE)
times <- time(object,t0=TRUE)
ntimes <- length(times)-1
nunits <- length(unit_names(object))
if (missing(Np)) {
if (is.matrix(params)) {
Np <- ncol(params)
} else {
stop(ep,sQuote("Np")," must be specified",call.=FALSE)
}
}
if (is.function(Np)) {
Np <- tryCatch(
vapply(seq.int(from=0,to=ntimes,by=1),Np,numeric(1)),
error = function (e) {
stop(ep,"if ",sQuote("Np")," is a function, ",
"it must return a single positive integer",call.=FALSE)
}
)
}
if (length(Np)==1)
Np <- rep(Np,times=ntimes+1)
else if (length(Np)!=(ntimes+1))
stop(ep,sQuote("Np")," must have length 1 or length ",ntimes+1,call.=FALSE)
if (any(Np<=0))
stop(ep,"number of particles, ",sQuote("Np"),", must always be positive",call.=FALSE)
if (!is.numeric(Np))
stop(ep,sQuote("Np")," must be a number, a vector of numbers, or a function",call.=FALSE)
Np <- as.integer(Np)
if (is.matrix(params)) {
if (!all(Np==ncol(params)))
stop(ep,"when ",sQuote("params")," is provided as a matrix, do not specify ",
sQuote("Np"),"!",call.=FALSE)
}
if (NCOL(params)==1) {
coef(object) <- params
params <- as.matrix(params)
}
paramnames <- rownames(params)
if (is.null(paramnames))
stop(ep,sQuote("params")," must have rownames",call.=FALSE)
init.x <- rinit(object,params=params,nsim=Np[1L],.gnsi=gnsi)
x <- init.x
log_cond_densities <- array(data = numeric(0), dim=c(nunits,Np[1L],ntimes))
dimnames(log_cond_densities) <- list(unit = 1:nunits, rep = 1:Np[1L], time = 1:ntimes)
for (nt in seq_len(ntimes)) {
X <- tryCatch(
rprocess(
object,
x0=x,
t0=times[nt],
times=times[nt+1],
params=params,
.gnsi=gnsi
),
error = function (e) {
stop(ep,"process simulation error: ",
conditionMessage(e),call.=FALSE)
}
)
log_weights <- tryCatch(
vec_dmeasure(
object,
y=object@data[,nt,drop=FALSE],
x=X,
times=times[nt+1],
params=params,
log=TRUE,
.gnsi=gnsi
),
error = function (e) {
stop(ep,"error in calculation of weights: ",
conditionMessage(e),call.=FALSE)
}
)
log_cond_densities[,,nt] <- log_weights[,,1]
log_resamp_weights <- apply(log_weights[,,1,drop=FALSE], 2, function(x) sum(x))
max_log_resamp_weights <- max(log_resamp_weights)
if(all(is.infinite(log_resamp_weights))) log_resamp_weights <- rep(log(tol), Np[1L])
else log_resamp_weights <- log_resamp_weights - max_log_resamp_weights
resamp_weights <- exp(log_resamp_weights)
gnsi <- FALSE
xx <- tryCatch(
.Call(
abf_computations,
x=X,
params=params,
Np=Np[nt+1],
trackancestry=FALSE,
weights=resamp_weights
),
error = function (e) {
stop(ep,conditionMessage(e),call.=FALSE)
}
)
x <- xx$states
params <- xx$params
if (verbose && (nt%%5==0))
cat("abf timestep",nt,"of",ntimes,"finished\n")
}
log_loc_comb_pred_weights <- array(data = numeric(0), dim=c(nunits,Np[1L], ntimes))
log_wm_times_wp_avg <- array(data = numeric(0), dim = c(nunits, ntimes))
log_wp_avg <- array(data = numeric(0), dim = c(nunits, ntimes))
for (nt in seq_len(ntimes)){
for (unit in seq_len(nunits)){
full_nbhd <- nbhd(object, time = nt, unit = unit)
log_prod_cond_dens_nt <- rep(0, Np[1])
if(length(full_nbhd) > 0) log_prod_cond_dens_not_nt <- matrix(0, Np[1], max(1,nt-min(sapply(full_nbhd,'[[',2))))
else log_prod_cond_dens_not_nt <- matrix(0,Np[1],0)
for (neighbor in full_nbhd){
neighbor_u <- neighbor[1]
neighbor_n <- neighbor[2]
if (neighbor_n == nt)
log_prod_cond_dens_nt <- log_prod_cond_dens_nt + log_cond_densities[neighbor_u, ,neighbor_n]
else
log_prod_cond_dens_not_nt[, nt-neighbor_n] <- log_prod_cond_dens_not_nt[, nt-neighbor_n] + log_cond_densities[neighbor_u, ,neighbor_n]
}
log_loc_comb_pred_weights[unit, ,nt] <- sum(apply(log_prod_cond_dens_not_nt, 2, logmeanexp)) + log_prod_cond_dens_nt
}
}
log_wm_times_wp_avg <- apply(log_loc_comb_pred_weights + log_cond_densities, c(1,3), FUN = logmeanexp)
log_wp_avg <- apply(log_loc_comb_pred_weights, c(1,3), FUN = logmeanexp)
pompUnload(object,verbose=verbose)
new(
"adapted_replicate",
log_wm_times_wp_avg = log_wm_times_wp_avg,
log_wp_avg = log_wp_avg,
Np=as.integer(Np),
tol=tol
)
}
setGeneric("abf",function(object,...)standardGeneric("abf"))
setMethod(
"abf",
signature=signature(object="spatPomp"),
function (object, Nrep, Np, nbhd,
tol = 1e-300,
..., verbose=getOption("verbose",FALSE)) {
if(missing(nbhd)){
nbhd <- function(object, unit, time){
nbhd_list <- list()
if(time>1) nbhd_list <- c(nbhd_list, list(c(unit, time-1)))
if(unit>1) nbhd_list <- c(nbhd_list, list(c(unit-1, time)))
return(nbhd_list)
}
}
mult_rep_output <- list()
i <- 1
mcopts <- list(set.seed=TRUE)
mult_rep_output <- foreach::foreach(i=1:Nrep,
.packages=c("pomp","spatPomp"),
.options.multicore=mcopts) %dopar% spatPomp:::abf_internal(
object=object,
Np=Np,
nbhd=nbhd,
tol=tol,
...,
verbose=verbose
)
ntimes <- length(time(object))
nunits <- length(unit_names(object))
cond_loglik <- foreach::foreach(i=seq_len(nunits),
.combine = 'rbind',
.packages=c("pomp", "spatPomp"),
.options.multicore=mcopts) %dopar%
{
cond_loglik_u <- array(data = numeric(0), dim=c(ntimes))
for (n in seq_len(ntimes)){
log_mp_sum <- logmeanexp(vapply(mult_rep_output,
FUN = function(rep_output) return(rep_output@log_wm_times_wp_avg[i,n]),
FUN.VALUE = 1.0))
log_p_sum <- logmeanexp(vapply(mult_rep_output,
FUN = function(rep_output) return(rep_output@log_wp_avg[i,n]),
FUN.VALUE = 1.0))
cond_loglik_u[n] <- log_mp_sum - log_p_sum
}
cond_loglik_u
}
new(
"abfd_spatPomp",
object,
Np=as.integer(Np),
tol=tol,
cond_loglik=cond_loglik,
loglik=sum(cond_loglik)
)
}
)
setMethod(
"abf",
signature=signature(object="abfd_spatPomp"),
function (object, Nrep, Np, nbhd,
tol=1e-300,
...,
verbose = getOption("verbose", FALSE)) {
if (missing(Np)) Np <- object@Np
if (missing(tol)) tol <- object@tol
if (missing(Nrep)) Nrep <- object@Nrep
if (missing(nbhd)) nbhd <- object@nbhd
abf(as(object,"spatPomp"),
Np=Np,
Nrep=Nrep,
nbhd=nbhd,
tol=tol,
...)
}
) |
NULL
plot.thornthwaite<-function(x, save_dir=NULL, format=NULL, variables=c("Precipitation","Et0","Storage","Prec. - Evap.","Deficit","Surplus"), title=TRUE, trace_grid=TRUE, st_name=NULL, u_y_scale_magn=0.2, l_y_scale_magn=0, leg_pos="topleft", ...)
{
month_names<-c("Jan","Feb","Mar","Apr","May","Jun","Jul","Aug","Sep","Oct","Nov","Dec")
q_list <- x
for(v in variables)
{
d.f.<-as.data.frame(q_list[names(q_list) %in% v])
y_scale_limits<- c(min(d.f.)-(max(d.f.)-min(d.f.))*l_y_scale_magn ,min(d.f.)+(max(d.f.)-min(d.f.))*(1+u_y_scale_magn))
legend_text<-substr(row.names(d.f.), 1,(nchar(row.names(d.f.))-1)); legend_text[legend_text== "0"]<-"min"; legend_text[legend_text== "100"]<-"max"
chart_title<-NULL
if(!is.null(st_name)) chart_title=paste(st_name, "-", "Monthly", v)
par_l<-list(...)
if(length(par_l) > 0)
indices_par_gr<-which(unlist(lapply(X=par_l, FUN= function(x, len){length(x)==len}, len=nrow(d.f.))))
if(!is.null(format))
{
if(format=="png") png(filename=paste(save_dir,"/",st_name,"_",v,"_","quant.png", sep="")) else
if(format=="jpeg") jpeg(filename=paste(save_dir,"/",st_name,"_",v,"_","quant.jpg", sep="")) else
if(format=="tiff") tiff(filename=paste(save_dir,"/",st_name,"_",v,"_","quant.tif", sep="")) else
if(format=="bmp") bmp(filename=paste(save_dir,"/",st_name,"_",v,"_","quant.bmp", sep="")) else
print("Incorrect graphic format", quote=FALSE)
}
args<-list(x=as.vector(t(d.f.[1,])), main=chart_title, xlab="", ylab="[mm]", ylim=y_scale_limits, type="b", lab=c(12,5,7), xaxt="n")
par_l_temp<-par_l
if(length(par_l) > 0)
par_l_temp[indices_par_gr]<-lapply(X=par_l_temp[indices_par_gr], FUN=function(x, ii){x[ii]}, ii=1)
options(warn=-1)
do.call(what=plot, args=c(args, par_l_temp))
axis(side=1, labels=month_names, at=1:12)
for(i in 2:(nrow(d.f.)))
{
args<-list(x=as.vector(t(d.f.[i,])), type="b")
par_l_temp<-par_l
if(length(par_l) > 0)
par_l_temp[indices_par_gr]<-lapply(X=par_l_temp[indices_par_gr], FUN=function(x, ii){x[ii]}, ii=i)
do.call(what=lines, args=c(args, par_l_temp))
}
if(trace_grid) grid()
if(!is.null(leg_pos))
{
par_l_legend<-par_l[names(par_l) %in% names(formals(legend))]
args_legend<-par_l_legend
do.call(what=legend, args=c(x=leg_pos, legend= list(legend_text),args_legend ))
}
if(!is.null(format)) dev.off()
}
} |
.Ordlcmm <-
function(fixed,mixture,random,subject,classmb,ng,idiag,nwg,data,B,convB,convL,convG,prior,maxiter,zitr,ide,call,Ydiscrete,subset=subset,na.action,posfix,partialH,verbose,returndata,var.time)
{
ptm<-proc.time()
if(verbose==TRUE) cat("Be patient, lcmm is running ... \n")
cl <- match.call()
args <- as.list(match.call(.Ordlcmm))[-1]
nom.subject <- as.character(subject)
if(!missing(mixture) & ng==1) stop("No mixture can be specified with ng=1")
if(missing(mixture) & ng>1) stop("The argument mixture has to be specified for ng > 1")
if(!missing(classmb) & ng==1) stop("No classmb can be specified with ng=1")
if(missing(random)) random <- ~-1
if(missing(fixed)) stop("The argument Fixed must be specified in any model")
if(missing(classmb)) classmb <- ~-1
if(missing(mixture)) mixture <- ~-1
if(ng==1&nwg==TRUE) stop ("The argument nwg should be FALSE for ng=1")
if(class(fixed)!="formula") stop("The argument fixed must be a formula")
if(class(mixture)!="formula") stop("The argument mixture must be a formula")
if(class(random)!="formula") stop("The argument random must be a formula")
if(class(classmb)!="formula") stop("The argument classmb must be a formula")
if(missing(data)){ stop("The argument data should be specified and defined as a data.frame")}
if(missing(subject)){ stop("The argument subject must be specified in any model even without random-effects")}
if(!is.numeric(data[[subject]])) stop("The argument subject must be numeric")
X0.names2 <- c("intercept")
int.fixed <- 0
int.mixture <- 0
int.random <- 0
int.classmb <- 0
if(returndata==TRUE)
{
datareturn <- data
}
else
{
datareturn <- NULL
}
newdata <- data
if(!isTRUE(all.equal(as.character(cl$subset),character(0))))
{
cc <- cl
cc <- cc[c(1,which(names(cl)=="subset"))]
cc[[1]] <- as.name("model.frame")
cc$formula <- formula(paste("~",paste(colnames(data),collapse="+")))
cc$data <- data
cc$na.action <- na.pass
newdata <- eval(cc)
}
if(!is.null(na.action)){
newdata <- newdata[-na.action,]
}
attributes(newdata)$terms <- NULL
X0.names2 <- unique(c(X0.names2,colnames(get_all_vars(formula(terms(fixed)),data=newdata))[-1]))
if(mixture[[2]] != "-1")X0.names2 <- unique(c(X0.names2,colnames(get_all_vars(formula(terms(mixture)),data=newdata))))
if(random[[2]] != "-1")X0.names2 <- unique(c(X0.names2,colnames(get_all_vars(formula(terms(random)),data=newdata))))
if(classmb[[2]] != "-1")X0.names2 <- unique(c(X0.names2,colnames(get_all_vars(formula(terms(classmb)),data=newdata))))
X_fixed <- model.matrix(fixed,data=newdata)
if(colnames(X_fixed)[1]=="(Intercept)"){
colnames(X_fixed)[1] <- "intercept"
int.fixed <- 1
}else{
stop ("Only models with an intercept can be estimated using lcmm. This is required for identifiability purposes")
}
nom.fixed <- colnames(X_fixed)
if(int.fixed>0)inddepvar.fixed <- inddepvar.fixed.nom <- nom.fixed[-1]
if(mixture[[2]] != "-1"){
X_mixture <- model.matrix(mixture,data=newdata)
if(colnames(X_mixture)[1]=="(Intercept)"){
colnames(X_mixture)[1] <- "intercept"
int.mixture <- 1
}
nom.mixture <- inddepvar.mixture <- inddepvar.mixture.nom <- colnames(X_mixture)
if(int.mixture>0)inddepvar.mixture <- inddepvar.mixture[-1]
id.X_mixture <- 1
}else{
inddepvar.mixture <- nom.mixture <- inddepvar.mixture.nom <- NULL
id.X_mixture <- 0
}
if(random[[2]] != "-1"){
X_random <- model.matrix(random,data=newdata)
if(colnames(X_random)[1]=="(Intercept)"){
colnames(X_random)[1] <- "intercept"
int.random <- 1
}
inddepvar.random <- inddepvar.random.nom <- colnames(X_random)
if(int.random>0) inddepvar.random <- inddepvar.random[-1]
id.X_random <- 1
}else{
inddepvar.random <- inddepvar.random.nom <- NULL
id.X_random <- 0
}
if(classmb[[2]] != "-1"){
X_classmb <- model.matrix(classmb,data=newdata)
colnames(X_classmb)[1] <- "intercept"
id.X_classmb <- 1
inddepvar.classmb <- colnames(X_classmb)[-1]
inddepvar.classmb.nom <- colnames(X_classmb)
}else{
inddepvar.classmb <- inddepvar.classmb.nom <- "intercept"
id.X_classmb <- 0
}
var.exp <- NULL
var.exp <- c(var.exp,colnames(X_fixed))
if(id.X_mixture == 1) var.exp <- c(var.exp,colnames(X_mixture))
if(id.X_random == 1)var.exp <- c(var.exp,colnames(X_random))
if(id.X_classmb == 1)var.exp <- c(var.exp,colnames(X_classmb))
var.exp <- unique(var.exp)
timeobs <- rep(0, nrow(newdata))
if(!is.null(var.time))
{
timeobs <- newdata[,var.time]
}
z.fixed <- strsplit(nom.fixed,split=":",fixed=TRUE)
z.fixed <- lapply(z.fixed,sort)
if(id.X_mixture==1)
{
z.mixture <- strsplit(nom.mixture,split=":",fixed=TRUE)
z.mixture <- lapply(z.mixture,sort)
}
else z.mixture <- list()
if(!all(z.mixture %in% z.fixed)) stop("The covariates in mixture should also be included in the argument fixed")
Y.name <- as.character(attributes(terms(fixed))$variables[2])
Y0 <- newdata[,Y.name]
X0 <- X_fixed
oldnames <- colnames(X0)
z.X0 <- strsplit(colnames(X0),split=":",fixed=TRUE)
z.X0 <- lapply(z.X0,sort)
if(id.X_mixture == 1)
{
z.mixture <- strsplit(colnames(X_mixture),split=":",fixed=TRUE)
z.mixture <- lapply(z.mixture,sort)
for(i in 1:length(colnames(X_mixture)))
{
if(!isTRUE(z.mixture[i] %in% z.X0))
{
X0 <- cbind(X0,X_mixture[,i])
colnames(X0) <- c(oldnames, colnames(X_mixture)[i])
oldnames <- colnames(X0)
z.X0 <- strsplit(colnames(X0),split=":",fixed=TRUE)
z.X0 <- lapply(z.X0,sort)
}
}
}
else
{
z.mixture <- list()
}
if(id.X_random == 1)
{
z.random <- strsplit(colnames(X_random),split=":",fixed=TRUE)
z.random <- lapply(z.random,sort)
for(i in 1:length(colnames(X_random)))
{
if(!isTRUE(z.random[i] %in% z.X0))
{
X0 <- cbind(X0,X_random[,i])
colnames(X0) <- c(oldnames, colnames(X_random)[i])
oldnames <- colnames(X0)
z.X0 <- strsplit(colnames(X0),split=":",fixed=TRUE)
z.X0 <- lapply(z.X0,sort)
}
}
}
else
{
z.random <- list()
}
if(id.X_classmb == 1)
{
z.classmb <- strsplit(colnames(X_classmb),split=":",fixed=TRUE)
z.classmb <- lapply(z.classmb,sort)
for(i in 1:length(colnames(X_classmb)))
{
if(!isTRUE(z.classmb[i] %in% z.X0))
{
X0 <- cbind(X0,X_classmb[,i])
colnames(X0) <- c(oldnames, colnames(X_classmb)[i])
oldnames <- colnames(X0)
z.X0 <- strsplit(colnames(X0),split=":",fixed=TRUE)
z.X0 <- lapply(z.X0,sort)
}
}
}
else
{
z.classmb <- list()
}
if((any(is.na(X0))==TRUE)|(any(is.na(Y0))==TRUE))stop("The data should not contain any missing value")
n <- dim(data)[1]
if (!((int.fixed+int.random)>0)) X0 <- as.data.frame(X0[,-which(colnames(X0)=="intercept")])
nom.X0 <- colnames(X0)
nvar.exp <- length(nom.X0)
IND <- newdata[,nom.subject]
if(missing(prior)){ PRIOR <- seq(0,length=length(IND))}
if(!missing(prior)){
PRIOR <- newdata[,prior]
PRIOR[(is.na(PRIOR))] <- 0
}
ng0 <- ng
idiag0 <- as.integer(idiag)
nwg0 <- as.integer(nwg)
idea0 <- rep(0,nvar.exp)
idprob0 <- rep(0,nvar.exp)
idg0 <- rep(0,nvar.exp)
z.X0 <- strsplit(nom.X0,split=":",fixed=TRUE)
z.X0 <- lapply(z.X0,sort)
for (i in 1:nvar.exp)
{
idea0[i] <- z.X0[i] %in% z.random
idprob0[i] <- z.X0[i] %in% z.classmb
if((z.X0[i] %in% z.fixed) & !(z.X0[i] %in% z.mixture)) idg0[i] <- 1
if((z.X0[i] %in% z.fixed) & (z.X0[i] %in% z.mixture)) idg0[i] <- 2
}
if((int.fixed+int.random)>0) idprob0[1] <- 0
matYX <- cbind(IND,PRIOR,Y0,X0)
matYXord <- matYX[sort.list(matYX[,1]),]
Y0 <- as.numeric(matYXord[,3] )
X0 <- apply(matYXord[,-c(1,2,3),drop=FALSE],2,as.numeric)
IND <- matYXord[,1]
PRIOR <- as.numeric(matYXord[,2])
PRIOR <-as.integer(as.vector(PRIOR))
X0<-as.numeric(as.matrix(X0))
Y0<-as.numeric(as.matrix(Y0))
nmes0<-as.vector(table(IND))
ns0<-length(nmes0)
prior2 <- as.integer(rep(0,ns0))
prior0 <- prior2
if(!missing(prior)){
prior0 <- PRIOR[cumsum(nmes0)]
}
INDuniq <- IND[cumsum(nmes0)]
seqnG <- 0:ng0
if (!(all(prior0 %in% seqnG))) stop ("The argument prior should contain integers between 0 and ng")
loglik <- as.double(0)
rlindiv <- rep(0,ns0)
UACV <- as.double(0)
ni <- 0
istop <- 0
gconv <-rep(0,3)
ppi0 <- rep(0,ns0*ng0)
nv0<-nvar.exp
nobs0<-length(Y0)
resid_m <- rep(0,nobs0)
resid_ss <- rep(0,nobs0)
pred_m_g <- rep(0,nobs0*ng0)
pred_ss_g <- rep(0,nobs0*ng0)
nea0 <- sum(idea0==1)
predRE <- rep(0,nea0*ns0)
ntrtot0 <- sum(ide==1)
minY <- zitr[1]
maxY <- zitr[2]
ncor0 <- 0
b<-NULL
b1 <- NULL
NPROB <- 0
if(ng0==1| missing(B)){
NEF<-sum(idg0!=0)-1
b1[1:NEF]<-0
NVC <- 0
if(idiag0==1&nea0>0){
NVC<-sum(idea0==1)
b1[(NEF+1):(NEF+NVC)]<-1}
if(idiag0==0&nea0>0){
kk<-sum(idea0==1)
NVC<-(kk*(kk+1))/2
indice<-cumsum(1:kk)
bidiag<-rep(0,NVC)
bidiag[indice]<-1
b1[(NEF+1):(NEF+NVC)]<-bidiag
}
if (ntrtot0==1) {
b1[(NEF+NVC+1):(NEF+NVC+ntrtot0)] <- 0
}else{
b1[(NEF+NVC+1)] <- 2*qnorm(0.98)*(-median(Y0)+minY+1)/(ntrtot0-1)
b1[(NEF+NVC+2):(NEF+NVC+ntrtot0)] <- sqrt(2*qnorm(0.98)/(ntrtot0-1))
}
NPM <- length(b1)
NW <- 0
V <- rep(0,NPM*(NPM+1)/2)
}
if(ng0>1){
NPROB <- (sum(idprob0==1)+1)*(ng0-1)
NEF <- sum(idg0==1)+(sum(idg0==2))*ng0-1
NVC <- 0
if(idiag0==1&nea0>0) NVC <- sum(idea0==1)
if(idiag0==0&nea0>0){
kk <- sum(idea0==1)
NVC <- (kk*(kk+1))/2}
NW <- nwg0*(ng0-1)
NPM <- NPROB+NEF+NVC+NW+ntrtot0
b <- rep(0,NPM)
V <- rep(0,NPM*(NPM+1)/2)
}
fix0 <- rep(0,NPM)
if(length(posfix))
{
if(any(!(posfix %in% 1:NPM))) stop("Indexes in posfix are not correct")
fix0[posfix] <- 1
}
if(length(posfix)==NPM) stop("No parameter to estimate")
Hr0 <- as.numeric(partialH)
pbH0 <- rep(0,NPM)
if(is.logical(partialH))
{
if(partialH) pbH0 <- rep(1,NPM)
pbH0[posfix] <- 0
if(sum(pbH0)==0 & Hr0==1) stop("No partial Hessian matrix can be defined")
}
else
{
if(!all(Hr0 %in% 1:NPM)) stop("Indexes in partialH are not correct")
pbH0[Hr0] <- 1
pbH0[posfix] <- 0
}
if(missing(B)){
if(ng0>1){
idea2 <- idea0
idprob2 <- rep(0,nv0)
idg2 <- rep(0,nv0)
idg2[idg0!=0] <- 1
NEF2<-sum(idg2==1)-1
NPM2<-NEF2+NVC+ntrtot0
nwg2<-0
ng2<-1
ppi2<- rep(0,ns0)
pred_m_g2 <- rep(0,nobs0)
pred_ss_g2 <- rep(0,nobs0)
V2 <- rep(0,NPM2*(NPM2+1)/2)
marker <- rep(0,(maxY-minY+1)*2)
transfY <- rep(0,(maxY-minY+1)*2)
init <- .Fortran(C_hetmixord,
as.double(Y0),
as.double(X0),
as.integer(prior2),
as.integer(idprob2),
as.integer(idea2),
as.integer(idg2),
as.integer(ns0),
as.integer(ng2),
as.integer(nv0),
as.integer(nobs0),
as.integer(nea0),
as.integer(nmes0),
as.integer(idiag0),
as.integer(nwg2),
as.integer(NPM2),
best=as.double(b1),
V=as.double(V2),
as.double(loglik),
niter=as.integer(ni),
conv=as.integer(istop),
as.double(gconv),
as.double(ppi2),
as.double(resid_m),
as.double(resid_ss),
as.double(pred_m_g2),
as.double(pred_ss_g2),
predRE=as.double(predRE),
as.double(convB),
as.double(convL),
as.double(convG),
as.integer(maxiter),
as.integer(minY),
as.integer(maxY),
as.integer(ide),
as.double(marker),
as.double(transfY),
as.double(UACV),
as.double(rlindiv),
as.integer(fix0))
k <- NPROB
l <- 0
t<- 0
for (i in 1:nvar.exp) {
if(idg0[i]==1 & i>1){
l <- l+1
t <- t+1
b[k+t] <- init$best[l]
}
if(idg0[i]==2){
if (i==1){
for (g in 2:ng){
t <- t+1
b[k+t] <- - 0.5*(g-1)
}
}
if (i>1){
l <- l+1
for (g in 1:ng){
t <- t+1
if(init$conv==1) b[k+t] <- init$best[l]+(g-(ng+1)/2)*sqrt(init$V[l*(l+1)/2])
else b[k+t] <- init$best[l]+(g-(ng+1)/2)*init$best[l]
}
}
}
}
b[(NPROB+NEF+1):(NPROB+NEF+NVC)] <-init$best[(NEF2+1):(NEF2+NVC)]
b[(NPM-ntrtot0+1):NPM] <-init$best[(NPM2-ntrtot0+1):NPM2]
}
if(ng0==1 ){
b <- b1
}
}
else
{
if(is.vector(B))
{
if(length(B)!=NPM){
stop("The length of the vector B is not correct")
}
else
{
b <-B
if(NVC>0)
{
if(idiag==1)
{
b[NPROB+NEF+1:NVC] <- sqrt(b[NPROB+NEF+1:NVC])
}
else
{
varcov <- matrix(0,nrow=nea0,ncol=nea0)
varcov[upper.tri(varcov,diag=TRUE)] <- b[NPROB+NEF+1:NVC]
varcov <- t(varcov)
varcov[upper.tri(varcov,diag=TRUE)] <- b[NPROB+NEF+1:NVC]
ch <- chol(varcov)
b[NPROB+NEF+1:NVC] <- ch[upper.tri(ch,diag=TRUE)]
}
}
}
}
else
{
if(class(B)!="lcmm") stop("B should be either a vector or an object of class lcmm")
if(ng>1 & B$ng==1)
{
NEF2 <- sum(idg0!=0)-1
NPM2 <- NEF2+NVC+ntrtot0
if(length(B$best)!=NPM2) stop("B is not correct")
if(!length(B$Brandom))
{
k <- NPROB
l <- 0
t<- 0
for (i in 1:nvar.exp) {
if(idg0[i]==1 & i>1){
l <- l+1
t <- t+1
b[k+t] <- B$best[l]
}
if(idg0[i]==2){
if (i==1){
for (g in 2:ng){
t <- t+1
b[k+t] <- - 0.5*(g-1)
}
}
if (i>1){
l <- l+1
for (g in 1:ng){
t <- t+1
if(B$conv==1) b[k+t] <- B$best[l]+(g-(ng+1)/2)*sqrt(B$V[l*(l+1)/2])
else b[k+t] <- B$best[l]+(g-(ng+1)/2)*B$best[l]
}
}
}
}
if(NVC>0)
{
if(idiag==TRUE)
{
b[(NPROB+NEF+1):(NPROB+NEF+NVC)] <-B$cholesky[(1:nea0)*(2:(nea0+1))/2]
}
else
{
b[(NPROB+NEF+1):(NPROB+NEF+NVC)] <-B$cholesky
}
}
b[(NPROB+NEF+NVC+NW+1):NPM] <- B$best[(NEF2+NVC+1):NPM2]
}
else
{
bb <- rep(0,NPM-NPROB-NW)
vbb <- matrix(0,NPM-NPROB-NW,NPM-NPROB-NW)
VB <- matrix(0,NPM2,NPM2)
VB[upper.tri(VB,diag=TRUE)] <- B$V
VB <- t(VB)
VB[upper.tri(VB,diag=TRUE)] <- B$V
nbg <- idg0[which(idg0!=0)]
nbg[which(nbg==2)] <- ng
nbgnef <- unlist(sapply(nbg,function(k) if(k>1) rep(2,k) else k))
nbgnef <- nbgnef[-1]
nbg <- nbg[-1]
vbb[which(nbgnef==1),setdiff(1:ncol(vbb),which(nbgnef!=1))] <- VB[which(nbg==1),setdiff(1:ncol(VB),which(nbg!=1))]
vbb[(NEF+1):nrow(vbb),(NEF+1):ncol(vbb)] <- VB[(NEF2+1):nrow(VB),(NEF2+1):ncol(VB)]
t <- 0
l <- 0
for (i in 1:nvar.exp)
{
if(idg0[i]==1)
{
if(i==1) next
l <- l+1
t <- t+1
bb[t] <- B$best[l]
}
if(idg0[i]==2)
{
if(i==1)
{
t <- t+ng-1
next
}
l <- l+1
for (g in 1:ng)
{
t <- t+1
bb[t] <- B$best[l]
vbb[t,t] <- VB[l,l]
}
}
}
if(NVC>0)
{
if(idiag==TRUE)
{
bb[NEF+1:NVC] <- B$cholesky[(1:nea0)*(2:(nea0+1))/2]
}
else
{
bb[NEF+1:NVC] <- B$cholesky
}
}
bb[NEF+NVC+1:ntrtot0] <- B$best[NEF2+NVC+1:ntrtot0]
if(idg0[1]>1)
{
bb <- bb[-(1:(ng-1))]
vbb <- vbb[-(1:(ng-1)),-(1:(ng-1))]
}
up <- vbb[upper.tri(vbb,diag=TRUE)]
vbb <- t(vbb)
vbb[upper.tri(vbb,diag=TRUE)] <- up
Chol <- chol(vbb)
Chol <- t(Chol)
if(idg0[1]>1)
{
b[c((NPROB+ng):(NPROB+NEF+NVC),(NPROB+NEF+NVC+NW+1):NPM)] <- bb + Chol %*% rnorm(length(bb))
b[NPROB+1:(ng-1)] <- 0
}
else
{
b[c((NPROB+1):(NPROB+NEF+NVC),(NPROB+NEF+NVC+NW+1):NPM)] <- bb + Chol %*% rnorm(length(bb))
}
b[1:NPROB] <- 0
if(NW>0) b[NPROB+NEF+NVC+1:NW] <- 1
}
}
}
}
if(ng0>=2){
nom <-rep(c("intercept",nom.X0[idprob0!=0]),each=ng0-1)
nom1 <- paste(nom," class",c(1:(ng0-1)),sep="")
names(b)[1:NPROB]<-nom1
}
if(ng0==1) names(b)[1:(NEF)] <- nom.X0[-1][idg0[-1]!=0]
if(ng0>1){
nom1<- NULL
for (i in 1:nvar.exp) {
if(idg0[i]==2){
if (i==1){
nom <- paste(nom.X0[i]," class",c(2:ng0),sep="")
nom1 <- cbind(nom1,t(nom))
}
if (i>1){
nom <- paste(nom.X0[i]," class",c(1:ng0),sep="")
nom1 <- cbind(nom1,t(nom))
}
}
if(idg0[i]==1 & i>1) nom1 <- cbind(nom1,nom.X0[i])
}
names(b)[(NPROB+1):(NPROB+NEF)]<- nom1
}
names(b)[(NPM-ntrtot0+1):NPM]<- paste("thresh. parm",c(1:ntrtot0),sep="")
if(NVC!=0)names(b)[(NPROB+NEF+1):(NPROB+NEF+NVC)] <- paste("varcov",c(1:(NVC)))
if(NW!=0)names(b)[(NPROB+NEF+NVC+1):(NPROB+NEF+NVC+NW)] <- paste("varprop class",c(1:(ng0-1)))
N <- NULL
N[1] <- NPROB
N[2] <- NEF
N[3] <- NVC
N[4] <- NW
N[5] <- nobs0
N[6] <- 0
idiag <- as.integer(idiag0)
idea <- as.integer(idea0)
nv <- as.integer(nv0)
marker <- rep(0,(maxY-minY+1)*2)
transfY <- rep(0,(maxY-minY+1)*2)
out <- .Fortran(C_hetmixord,
as.double(Y0),
as.double(X0),
as.integer(prior0),
as.integer(idprob0),
as.integer(idea0),
as.integer(idg0),
as.integer(ns0),
as.integer(ng0),
as.integer(nv0),
as.integer(nobs0),
as.integer(nea0),
as.integer(nmes0),
as.integer(idiag0),
as.integer(nwg0),
as.integer(NPM),
best=as.double(b),
V=as.double(V),
loglik=as.double(loglik),
niter=as.integer(ni),
conv=as.integer(istop),
gconv=as.double(gconv),
ppi2=as.double(ppi0),
resid_m=as.double(resid_m),
resid_ss=as.double(resid_ss),
pred_m_g=as.double(pred_m_g),
pred_ss_g=as.double(pred_ss_g),
predRE=as.double(predRE),
as.double(convB),
as.double(convL),
as.double(convG),
as.integer(maxiter),
as.integer(minY),
as.integer(maxY),
as.integer(ide),
marker=as.double(marker),
transfY=as.double(transfY),
UACV=as.double(UACV),
rlindiv=as.double(rlindiv),
as.integer(fix0))
if(length(posfix))
{
mr <- NPM-length(posfix)
Vr <- matrix(0,mr,mr)
Vr[upper.tri(Vr,diag=TRUE)] <- out$V[1:(mr*(mr+1)/2)]
Vr <- t(Vr)
Vr[upper.tri(Vr,diag=TRUE)] <- out$V[1:(mr*(mr+1)/2)]
V <- matrix(0,NPM,NPM)
V[setdiff(1:NPM,posfix),setdiff(1:NPM,posfix)] <- Vr
V <- V[upper.tri(V,diag=TRUE)]
}
else
{
V <- out$V
}
Cholesky <- rep(0,(nea0*(nea0+1)/2))
if(idiag0==0 & NVC>0){
Cholesky[1:NVC] <- out$best[(NPROB+NEF+1):(NPROB+NEF+NVC)]
U <- matrix(0,nrow=nea0,ncol=nea0)
U[upper.tri(U,diag=TRUE)] <- Cholesky[1:NVC]
z <- t(U) %*% U
out$best[(NPROB+NEF+1):(NPROB+NEF+NVC)] <- z[upper.tri(z,diag=TRUE)]
}
if(idiag0==1 & NVC>0){
id <- 1:nea0
indice <- rep(id+id*(id-1)/2)
Cholesky[indice] <- out$best[(NPROB+NEF+1):(NPROB+NEF+nea0)]
out$best[(NPROB+NEF+1):(NPROB+NEF+NVC)] <- out$best[(NPROB+NEF+1):(NPROB+NEF+NVC)]**2
}
if(ng0>1) {
ppi<- matrix(out$ppi2,ncol=ng0,byrow=TRUE)
}
else {
ppi <- matrix(rep(1,ns0),ncol=ng0)
}
chooseClass <- function(ppi)
{
res <- which.max(ppi)
if(!length(res)) res <- NA
return(res)
}
classif<-apply(ppi,1,chooseClass)
ppi<-data.frame(INDuniq,classif,ppi)
temp<-paste("prob",1:ng0,sep="")
colnames(ppi) <- c(nom.subject,"class",temp)
rownames(ppi) <- 1:ns0
names(out$best)<-names(b)
estimlink <- cbind(out$marker,out$transfY)
colnames(estimlink) <- c("Y","transfY")
if (!("intercept" %in% nom.X0)) X0.names2 <- X0.names2[-1]
res <-list(ns=ns0,ng=ng0,idea0=idea0,idprob0=idprob0,idg0=idg0,idcor0=rep(0,nvar.exp),loglik=out$loglik,best=out$best,V=V,gconv=out$gconv,conv=out$conv,call=call,niter=out$niter,N=N,idiag=idiag0,pprob=ppi,Xnames=nom.X0,Xnames2=X0.names2,cholesky=Cholesky,estimlink=estimlink,linktype=3,linknodes=zitr,ide=ide,Ydiscrete=Ydiscrete,discrete_loglik=out$loglik,UACV=out$UACV,IndivContrib=out$rlindiv,na.action=na.action,AIC=2*(length(out$best)-length(posfix)-out$loglik),BIC=(length(out$best)-length(posfix))*log(ns0)-2*out$loglik,data=datareturn, var.time=var.time)
class(res) <-c("lcmm")
cost<-proc.time()-ptm
if(verbose==TRUE) cat("The program took", round(cost[3],2), "seconds \n")
res
} |
`BIC.fmo` <-
function(object, ...)AIC(object$full, k = log(length(object$full$res))) |
CST_ProxiesAttractor <- function(data, quanti, ncores = NULL){
if (!inherits(data, 's2dv_cube')) {
stop("Parameter 'data' must be of the class 's2dv_cube', ",
"as output by CSTools::CST_Load.")
}
if (is.null(quanti)) {
stop("Parameter 'quanti' cannot be NULL.")
}
data$data <- ProxiesAttractor(data = data$data, quanti = quanti, ncores = ncores)
return(data)
}
ProxiesAttractor <- function(data, quanti, ncores = NULL){
if (is.null(data)) {
stop("Parameter 'data' cannot be NULL.")
}
if (is.null(quanti)) {
stop("Parameter 'quanti' is mandatory")
}
if (any(names(dim(data)) %in% 'sdate')) {
if (any(names(dim(data)) %in% 'ftime')) {
data <- MergeDims(data, merge_dims = c('ftime', 'sdate'),
rename_dim = 'time')
}
}
if (!(any(names(dim(data)) %in% 'time'))){
stop("Parameter 'data' must have a temporal dimension named 'time'.")
}
if (any(names(dim(data)) %in% 'lat')) {
if (any(names(dim(data)) %in% 'lon')) {
data <- MergeDims(data, merge_dims = c('lon', 'lat'),
rename_dim = 'grid')
}
}
if (any(names(dim(data)) %in% 'latitude')) {
if (any(names(dim(data)) %in% 'longitude')) {
data <- MergeDims(data, merge_dims = c('longitude', 'latitude'),
rename_dim = 'grid')
}
}
if(!(any(names(dim(data)) %in% 'grid'))){
stop("Parameter 'data' must have a spatial dimension named 'grid'.")
}
attractor <- Apply(data, target_dims = c('time', 'grid'),
fun = .proxiesattractor,
quanti = quanti , ncores = ncores)
attractor <- lapply(attractor,
FUN = function(x, dimname){
names(dim(x))[dimname] <- 'time'
return(x)},
dimname = which(names(dim(attractor[[1]])) == 'dim2'))
return(list(dim = attractor$dim, theta = attractor$theta))
}
.proxiesattractor <- function(data, quanti) {
logdista <- Apply(data, target_dims = 'grid',
fun = function(x, y){
-log(colMeans((y - as.vector(x))^2))},
y = t(data))[[1]]
Theta <- function(logdista, quanti){
thresh <- quantile(logdista, quanti, na.rm = TRUE)
logdista[which(logdista == 'Inf')] <- NaN
Li <- which(as.vector(logdista) > as.numeric(thresh))
Ti <- diff(Li)
N <- length(Ti)
q <- 1 - quanti
Si <- Ti - 1
Nc <- length(which(Si > 0))
N <- length(Ti)
theta <- (sum(q * Si) + N + Nc - sqrt(((sum(q * Si) + N + Nc)^2) -
8 * Nc * sum(q * Si))) / (2 * sum(q * Si))
logdista <- sort(logdista)
findidx <- which(as.vector(logdista) > as.numeric(thresh))
if(length(findidx) < 1) {
stop("Parameter 'quanti' is too high for the length of the data provided.")
}
logextr <- logdista[findidx[[1]]:(length(logdista) - 1)]
dim <- 1 /mean(as.numeric(logextr) - as.numeric(thresh))
return(list(dim = dim, theta = theta))
}
names(dim(logdista)) <- c('dim1', 'dim2')
proxies <- Apply(data = list(logdista = logdista),
target_dims = list('dim1'), fun = Theta, quanti = quanti)
return(list(dim = proxies$dim, theta = proxies$theta))
} |
tubeData <- local(get(load("tube_data.rda"))) |
NULL
fboot_assemblages <- function(fres, opt.nbMax = fres$nbOpt,
opt.R2 = FALSE, opt.plot = FALSE,
nbIter = 1, seed = NULL, rm.number = 0) {
lAffectElt <- vector(mode = "list", length = fres$nbElt)
for (nb in seq_len(fres$nbElt))
lAffectElt[[nb]] <- cut_ftree(fres$tree.I, nb)
if ((getOption("verbose") == TRUE) && (opt.plot == TRUE)) {
cols <- fcolours()[cut_ftree(fres$tree.II, fres$nbOpt)]
plot_ftree(tree = fres$tree.II, cols = cols)
graphics::text(x = 1, y = 0, labels = "Reference tree", pos = 4)
}
indCrit <- c(1:2,4:5,8:14)
critNames <- clusterCrit::getCriteriaNames(FALSE)[indCrit]
vCriteria <- numeric(length(critNames))
names(vCriteria) <- c(critNames)
mCrit <- array(0, dim = c(fres$nbElt, nbIter, length(critNames)))
dimnames(mCrit) <- list(seq_len(fres$nbElt), seq_len(nbIter), critNames)
if (opt.R2 == TRUE) {
mStats <- array(0, dim = c(fres$nbElt, nbIter, 2))
dimnames(mStats) <- list(seq_len(fres$nbElt),
seq_len(nbIter), c("R2", "E"))
}
mIndAss <- build_random_matrix(nbIter, seed, rm.number,
rm.number.max = fres$nbAss)
setAss <- unique(rownames(fres$mOccur))
for (iter in seq_len(nbIter)) {
indAss <- mIndAss[iter, ]
index <- which(rownames(fres$mOccur) %in% setdiff(setAss, setAss[indAss]))
main <- paste0("The assemblages ", list_in_quote(setAss[indAss]),
" are removed")
if (getOption("verbose") == TRUE) cat(main, "\n")
tree.I <- fit_ftree(fres$fobs[index], fres$mOccur[index, ],
fres$xpr[index], fres$affectElt,
fres$opt.method, fres$opt.mean, fres$opt.model,
opt.nbMax )
for (nb in seq_len(opt.nbMax)) {
vCriteria[] <- as.vector(unlist(clusterCrit::extCriteria(
as.integer(cut_ftree(tree.I, nb) ),
as.integer(lAffectElt[[nb]]),
crit = "all")))[indCrit]
mCrit[nb, iter, ] <- vCriteria
}
if (opt.R2 == TRUE) {
res <- validate_ftree(tree.I,
fres$fobs[index], fres$mOccur[index, ],
fres$xpr[index],
fres$opt.method, fres$opt.mean, fres$opt.model,
fres$opt.jack, fres$jack,
opt.nbMax )
mStats[ , iter, "R2"] <- res$tStats[ , "R2cal"]
mStats[ , iter, "E"] <- res$tStats[ , "R2prd"]
}
if ((getOption("verbose") == TRUE) && (opt.plot == TRUE)) {
if (opt.R2 == TRUE) {
plot_ftree(res$tree.II, cols = cols)
} else {
plot_ftree(tree.I, cols = cols)
}
graphics::text(x = 1, y = 0, labels = main, pos = 4)
}
}
rboot <- vector(mode = "list", length = length(critNames))
names(rboot) <- critNames
for (ind in seq_along(critNames))
rboot[[ind]] <- as.matrix(mCrit[ , , critNames[ind]])
if (opt.R2 == TRUE) {
mStats[fres$nbElt, , "R2"] <- mStats[fres$nbElt - 1, , "R2"]
mStats[fres$nbElt, , "E"] <- mStats[fres$nbElt - 1, , "E"]
rboot$R2 <- as.matrix(mStats[ , , "R2"])
rboot$E <- as.matrix(mStats[ , , "E"])
}
return(rboot)
}
fboot_performances <- function(fres, opt.nbMax = fres$nbOpt,
opt.R2 = FALSE, opt.plot = FALSE,
nbIter = 1, seed = NULL, rm.number = 0) {
lAffectElt <- vector(mode = "list", length = fres$nbElt)
for (nb in seq_len(fres$nbElt))
lAffectElt[[nb]] <- cut_ftree(fres$tree.I, nb)
if ((getOption("verbose") == TRUE) && (opt.plot == TRUE)) {
cols <- fcolours()[cut_ftree(fres$tree.II, fres$nbOpt)]
plot_ftree(tree = fres$tree.II, cols = cols)
graphics::text(x = 1, y = 0, labels = "Reference tree", pos = 4)
}
indCrit <- c(1:2,4:5,8:14)
critNames <- clusterCrit::getCriteriaNames(FALSE)[indCrit]
vCriteria <- numeric(length(critNames))
names(vCriteria) <- c(critNames)
mCrit <- array(0, dim = c(fres$nbElt, nbIter, length(critNames)))
dimnames(mCrit) <- list(seq_len(fres$nbElt), seq_len(nbIter), critNames)
if (opt.R2 == TRUE) {
mStats <- array(0, dim = c(fres$nbElt, nbIter, 2))
dimnames(mStats) <- list(seq_len(fres$nbElt),
seq_len(nbIter), c("R2", "E"))
}
mIndXpr <- build_random_matrix(nbIter, seed, rm.number,
rm.number.max = fres$nbXpr)
setXpr <- unique(names(fres$xpr))
for (iter in seq_len(nbIter)) {
indXpr <- mIndXpr[iter, ]
index <- which(names(fres$xpr) %in% setdiff(setXpr, setXpr[indXpr]))
main <- paste0("The performances ", list_in_quote(setXpr[indXpr]),
" are removed")
if (getOption("verbose") == TRUE) cat(main, "\n")
tree.I <- fit_ftree(fres$fobs[index], fres$mOccur[index, ],
fres$xpr[index], fres$affectElt,
fres$opt.method, fres$opt.mean, fres$opt.model,
opt.nbMax )
for (nb in seq_len(opt.nbMax)) {
vCriteria[] <- as.vector(unlist(clusterCrit::extCriteria(
as.integer(cut_ftree(tree.I, nb) ),
as.integer(lAffectElt[[nb]]),
crit = "all")))[indCrit]
mCrit[nb, iter, ] <- vCriteria
}
if (opt.R2 == TRUE) {
res <- validate_ftree(tree.I,
fres$fobs[index], fres$mOccur[index, ],
fres$xpr[index],
fres$opt.method, fres$opt.mean, fres$opt.model,
fres$opt.jack, fres$jack,
opt.nbMax )
mStats[seq_len(fres$nbElt - 1), iter, "R2"] <- res$tStats[ , "R2cal"]
mStats[seq_len(fres$nbElt - 1), iter, "E"] <- res$tStats[ , "R2prd"]
}
if ((getOption("verbose") == TRUE) && (opt.plot == TRUE)) {
if (opt.R2 == TRUE) {
plot_ftree(res$tree.II, cols = cols)
} else {
plot_ftree(tree.I, cols = cols)
}
graphics::text(x = 1, y = 0, labels = main, pos = 4)
}
}
rboot <- vector(mode = "list", length = length(critNames))
names(rboot) <- critNames
for (ind in seq_along(critNames))
rboot[[ind]] <- as.matrix(mCrit[ , , critNames[ind]])
if (opt.R2 == TRUE) {
mStats[fres$nbElt, , "R2"] <- mStats[fres$nbElt - 1, , "R2"]
mStats[fres$nbElt, , "E"] <- mStats[fres$nbElt - 1, , "E"]
rboot$R2 <- as.matrix(mStats[ , , "R2"])
rboot$E <- as.matrix(mStats[ , , "E"])
}
return(rboot)
}
fboot_write_one_point <- function(fres, rboot, filename) {
utils::write.table(
x = cbind(rownames(rboot$random.matrix), rboot$random.matrix),
file = paste0(delstr_end(filename, 3), "random.matrix.csv"),
append = FALSE, col.names = TRUE, row.names = FALSE,
sep = ",")
rboot$random.matrix <- NULL
col1 <- NULL
for (i in seq_along(names(rboot)))
col1 <- c(col1, rep(names(rboot)[i], fres$nbElt))
col2 <- rep(seq_len(fres$nbElt), length(names(rboot)))
col3 <- NULL
for (i in seq_along(names(rboot))) col3 <- rbind(col3, rboot[[i]])
tmp <- cbind(col1, col2, col3)
colnames(tmp) <- c("mat", "nbClu", seq_len(dim(rboot[[1]])[2]))
rownames(tmp) <- as.character(seq_len(length(col1)))
utils::write.table(x = tmp,
file = filename,
append = FALSE, col.names = TRUE, row.names = FALSE,
sep = ",")
}
fboot_read_one_point <- function(filename) {
file <- paste0(delstr_end(filename, 3), "random.matrix.csv")
ttt <- utils::read.table(file, header = TRUE, sep = ",",
check.names = FALSE, stringsAsFactors = FALSE)
ttt <- as.matrix(ttt[ , c(-1), drop = FALSE])
rownames(ttt) <- seq_len(dim(ttt)[1])
colnames(ttt) <- seq_len(dim(ttt)[2])
tmp <- utils::read.table(file = filename, header = TRUE, sep = ",",
check.names = FALSE, stringsAsFactors = FALSE)
lnames <- unique(tmp[ , 1])
index <- c(1:2)
rboot <- vector(mode = "list", length = 1 + length(lnames))
names(rboot) <- c("random.matrix", lnames)
rboot$random.matrix <- ttt
for (i in seq_along(lnames)) {
rboot[[1 + i]] <- as.matrix(tmp[tmp$mat == lnames[i], -index])
rownames(rboot[[1 + i]]) <- seq_len(dim(rboot[[1 + i]])[1])
colnames(rboot[[1 + i]]) <- seq_len(dim(rboot[[1 + i]])[2])
}
return(rboot)
}
build_random_matrix <- function(nbIter = 1, seed = NULL,
rm.number = 0, rm.number.max = 0) {
if (!is.null(seed)) set.seed(seed)
mIndex <- matrix(0L, nrow = nbIter, ncol = rm.number,
dimnames = list(seq_len(nbIter), seq_len(rm.number)))
mIndex[1, ] <- sort.int(sample.int(n = rm.number.max, size = rm.number))
if (nbIter > 1) for (iter in 2:nbIter) {
all.different <- TRUE
while (all.different) {
tmp <- sort.int(sample.int(n = rm.number.max, size = rm.number))
names(tmp) <- colnames(mIndex)
i <- 1
while (!isTRUE(all.equal(tmp, mIndex[i, ])) & (i < iter)) i <- i + 1
if (i == iter) all.different <- FALSE
}
mIndex[iter, ] <- tmp
}
return(mIndex)
}
fboot_one_point <- function(fres,
opt.var = c("assemblages", "performances"),
nbIter = 1, rm.number = 0, seed = NULL,
opt.nbMax = fres$nbOpt, opt.R2 = FALSE,
opt.plot = FALSE ) {
opt.var <- check_foption(opt.var, c("assemblages", "performances"))
if (nbIter < 1) stop("'nbIter' should be higher than 1")
if (opt.var == "assemblages")
if ( (rm.number < 1) | (rm.number > fres$nbAss - 1) ) {
stop("'nbIter' should be > 0 and < number of Assemblages")
} else {
namesTest <- rownames(fres$mOccur)
}
if (opt.var == "performances")
if ( (rm.number < 1) | (rm.number > fres$nbXpr - 1) ) {
stop("'nbIter' should be > 0 and < number of Performances")
} else {
namesTest <- names(fres$xpr)
}
lAffectElt <- vector(mode = "list", length = fres$nbElt)
for (nb in seq_len(fres$nbElt))
lAffectElt[[nb]] <- cut_ftree(fres$tree.I, nb)
if ((getOption("verbose") == TRUE) && (opt.plot == TRUE)) {
cols <- fcolours()[cut_ftree(fres$tree.II, fres$nbOpt)]
plot_ftree(tree = fres$tree.II, cols = cols)
graphics::text(x = 1, y = 0, labels = "Reference tree", pos = 4)
}
indCrit <- c(1:2,4:5,8:14)
critNames <- clusterCrit::getCriteriaNames(FALSE)[indCrit]
vCriteria <- numeric(length(critNames))
names(vCriteria) <- c(critNames)
mCrit <- array(0, dim = c(fres$nbElt, nbIter, length(critNames)))
dimnames(mCrit) <- list(seq_len(fres$nbElt), seq_len(nbIter), critNames)
if (opt.R2 == TRUE) {
mStats <- array(0, dim = c(fres$nbElt, nbIter, 2))
dimnames(mStats) <- list(seq_len(fres$nbElt),
seq_len(nbIter), c("R2", "E"))
}
setTest <- unique(namesTest)
nbTest <- length(setTest)
rm.max <- choose(nbTest, rm.number)
if (nbIter >= rm.max) nbIter <- rm.max
mIndex <- build_random_matrix(nbIter, seed, rm.number,
rm.number.max = nbTest)
for (iter in seq_len(nbIter)) {
indSet <- mIndex[iter, ]
main <- paste("The", opt.var, list_in_quote(setTest[indSet]),
"are removed", sep = " ")
if (getOption("verbose") == TRUE) cat(main, "\n")
index <- which(namesTest %in% setdiff(setTest, setTest[indSet]))
tree.I <- fit_ftree(fres$fobs[index], fres$mOccur[index, ],
fres$xpr[index], fres$affectElt,
fres$opt.method, fres$opt.mean,
fres$opt.model,
opt.nbMax )
for (nb in seq_len(opt.nbMax)) {
vCriteria[] <- as.vector(unlist(clusterCrit::extCriteria(
as.integer(cut_ftree(tree.I, nb) ),
as.integer(lAffectElt[[nb]]),
crit = "all")))[indCrit]
mCrit[nb, iter, ] <- vCriteria
}
if (opt.R2 == TRUE) {
res <- validate_ftree(tree.I,
fres$fobs[index], fres$mOccur[index, ],
fres$xpr[index],
fres$opt.method, fres$opt.mean, fres$opt.model,
fres$opt.jack, fres$jack,
opt.nbMax )
mStats[ , iter, "R2"] <- res$tStats[ , "R2cal"]
mStats[ , iter, "E"] <- res$tStats[ , "R2prd"]
}
if ((getOption("verbose") == TRUE) && (opt.plot == TRUE)) {
if (opt.R2 == TRUE) {
plot_ftree(res$tree.II, cols = cols)
} else {
plot_ftree(tree.I, cols = cols)
}
graphics::text(x = 1, y = 0, labels = main, pos = 4)
}
}
if (opt.R2 == TRUE) {
mStats[fres$nbElt, , "R2"] <- mStats[fres$nbElt - 1, , "R2"]
mStats[fres$nbElt, , "E"] <- mStats[fres$nbElt - 1, , "E"]
rboot <- vector(mode = "list", length = 3 + length(critNames))
names(rboot) <- c("random.matrix", "R2", "E", critNames)
rboot$random.matrix <- as.matrix(mIndex)
rboot$R2 <- as.matrix(mStats[ , , "R2"])
rboot$E <- as.matrix(mStats[ , , "E"])
for (ind in seq_along(critNames))
rboot[[3 + ind]] <- as.matrix(mCrit[ , , critNames[ind]])
} else {
rboot <- vector(mode = "list", length = 1 + length(critNames))
names(rboot) <- c("random.matrix", critNames)
rboot$random.matrix <- as.matrix(mIndex)
for (ind in seq_along(critNames))
rboot[[1 + ind]] <- as.matrix(mCrit[ , , critNames[ind]])
}
return(rboot)
} |
Ilda <- function(Conf,p,nk,prior,means,W,B,egvtol,limlnk2)
{
N <- sum(nk)
k <- length(nk)
if (k==1) {
stop("The data belongs to one single group. A partition into at least two different groups is required\n")
}
if (prior[1]=="proportions") { prior <- nk/N }
names(prior) <- rownames(means)
Wi <- Safepdsolve(W,maxlnk2=limlnk2,scale=TRUE)
if (is.null(Wi)) {
warning("Ilda function received a singular matrix in the W argument\n")
return(NULL)
}
WiBdecp <- eigen(Wi%*%B)
if (Conf!=5)
{
if (Conf==1) {
r <- min(p,k-1)
} else {
WiBegval <- Re(WiBdecp$values)
posWiBegval <- WiBegval[WiBegval>egvtol]
r <- length(posWiBegval)
}
eigvct <- Re(WiBdecp$vectors[,1:r])
if (r==1) { dim(eigvct) <- c(p,1) }
sclvar <- apply(eigvct,2,function(v) v%*%W%*%v)
if (r>1) {
scaling <- scale(eigvct,center=FALSE,scale=sqrt(sclvar))
dimnames(scaling) <- list(rownames(W),paste("LD",1:r,sep=""))
attr(scaling,"scaled:scale") <- NULL
} else {
scaling <- matrix(eigvct/sqrt(sclvar),ncol=1,dimnames=list(rownames(W),"LD1"))
}
} else {
scaling <- diag(1/sqrt(diag(W)))
dimnames(scaling) <- list(rownames(W),paste("LD",1:p,sep=""))
}
new("Idtlda",prior=prior,means=means,scaling=scaling,N=N,CovCase=Conf)
}
setMethod("lda",
signature(x = "IdtMxtNDE"),
function(x,prior="proportions",selmodel=BestModel(x),egvtol=1.0e-10,silent=FALSE,k2max=1e6,...)
{
limlnk2 <- log(k2max)
if (!x@Hmcdt)
{
if (silent) {
return(NULL)
} else { stop("Trying to compute a linear discriminant function from an estimate of a heteroscedastic mixture\n") }
}
if (is.character(selmodel)) { selmodel <- sapply(selmodel,function(mod) which(mod==x@ModelNames)) }
if (!is.finite(x@logLiks[selmodel]))
{
if (silent) {
return(NULL)
} else { stop("Trying to compute a linear discriminant function from a model with non-finite log-likelihood\n") }
}
grouping <- factor(x@grouping,exclude=NULL)
nk <- as.numeric(table(grouping))
n <- sum(nk)
p <- 2*x@NIVar
k <- length(nk)
grpmeans <- coef(x)$mu
glbmeans <- colSums(nk*grpmeans)
mugdev <- scale(grpmeans,center=glbmeans,scale=FALSE)
vnames <- unlist(dimnames(grpmeans)[2])
B <- matrix(0.,nrow=p,ncol=p,dimnames=list(vnames,vnames))
for (g in 1:k) B <- B + (nk[g]/n)*outer(mugdev[g,],mugdev[g,])
Ilda(Conf=selmodel,p=p,nk=nk,prior=prior,means=grpmeans,W=coef(x,selmodel)$Sigma,B=B,egvtol=egvtol,limlnk2=limlnk2,...)
}
)
setMethod("lda",
signature(x = "IdtClMANOVA"),
function(x,prior="proportions",selmodel=BestModel(H1res(x)),egvtol=1.0e-10,silent=FALSE,k2max=1e6,...)
{
limlnk2 <- log(k2max)
if (is.character(selmodel)) { selmodel <- sapply(selmodel,function(mod) which(mod==x@H0res@ModelNames)) }
if (!is.finite(x@H0res@logLiks[selmodel]) || !is.finite(x@H1res@logLiks[selmodel]))
{
if (silent) {
return(NULL)
} else { stop("Trying to compute a linear discriminant function from a model with non-finite log-likelihood\n") }
}
W <- coef(H1res(x),selmodel)$Sigma
Ilda(Conf=selmodel,p=2*x@NIVar,nk=as.numeric(table(x@grouping)),prior=prior,
means=coef(H1res(x))$mu,W=W,B=coef(H0res(x),selmodel)$Sigma-W,egvtol=egvtol,limlnk2=limlnk2)
}
)
setMethod("lda",
signature(x = "IdtLocNSNMANOVA"),
function(x,prior="proportions",selmodel=BestModel(H1res(x)@NMod),egvtol=1.0e-10,silent=FALSE,k2max=1e6,...)
{
limlnk2 <- log(k2max)
H0res <- H0res(x)@NMod
H1res <- H1res(x)@NMod
if (is.character(selmodel)) { selmodel <- sapply(selmodel,function(mod) which(mod==H0res@ModelNames)) }
if ( !is.finite(H0res@logLiks[selmodel]) || !is.finite(H1res@logLiks[selmodel]) )
{
if (silent) {
return(NULL)
} else { stop("Trying to compute a linear discriminant function from a model with non-finite log-likelihood\n") }
}
W <- coef(H1res(x),selmodel)$Sigma
Ilda(Conf=selmodel,p=2*x@NIVar,nk=as.numeric(table(x@grouping)),prior=prior,
means=coef(H1res)$mu,W=W,B=coef(H0res(x),selmodel)$Sigma-W,egvtol=egvtol,limlnk2=limlnk2)
}
)
setMethod("lda",
signature(x = "IData"),
function(x, grouping, prior="proportions", CVtol=1.0e-5, egvtol=1.0e-10, subset=1:nrow(x), CovCase=1:4,
SelCrit=c("BIC","AIC"), silent=FALSE, k2max=1e6, ...)
{
limlnk2 <- log(k2max)
Config <- getConfig(...)
if (is.null(Config))
{
Config <- ifelse(CovCase==1,1,CovCase+1)
CovCaseArg <- TRUE
} else {
CovCaseArg <- FALSE
}
if (x@NIVar==1) {
CovCase <- q1CovCase(CovCase)
Config <- q1Config(Config)
}
SelCrit <- match.arg(SelCrit)
if (length(subset) < x@NObs)
{
x <- x[subset,]
grouping <- grouping[subset]
}
grouping <- factor(grouping,exclude=NULL)
grplvls <- levels(grouping)
n <- x@NObs
p <- 2*x@NIVar
k <- length(grplvls)
if (k==1) {
errmsg <- "The data belongs to one single group. A partition into at least two different groups is required\n"
if (silent==FALSE) {
stop(errmsg)
} else {
warning(errmsg)
return(NULL)
}
}
nk <- as.numeric(table(grouping))
if (sum(nk)!=n) { stop("Dimensions of the x and grouping arguments do not agree with each other\n") }
MxtDEst <- IdtNmle(x,grouping,Type="HomMxt",CVtol=CVtol,CovCaseArg=CovCaseArg,Config=Config,SelCrit=SelCrit,...)
glbmeans <- colMeans(cbind(x@MidP,x@LogR))
grpmeans <- coef(MxtDEst)$mu
mugdev <- scale(grpmeans,center=glbmeans,scale=FALSE)
vnames <- unlist(dimnames(grpmeans)[2])
B <- matrix(0.,nrow=p,ncol=p,dimnames=list(vnames,vnames))
for (g in 1:k) B <- B + (nk[g]/n) * outer(mugdev[g,],mugdev[g,])
selmodel <- BestModel(MxtDEst)
Ilda(Conf=selmodel,p=p,nk=nk,prior=prior,means=grpmeans,W=coef(MxtDEst,selmodel)$Sigma,B=B,egvtol=egvtol,limlnk2=limlnk2,...)
}
)
setMethod("predict",
signature(object = "Idtlda"),
function(object,newdata,prior=object@prior,...)
{
if (is(newdata,"IData")) newdata <- as.matrix(cbind(newdata@MidP,newdata@LogR))
if (is(newdata,"data.frame")) newdata <- as.matrix(newdata)
n <- nrow(newdata)
k <- length(prior)
if (k==1) {
stop("The data belongs to one single group. A partition into at least two different groups is required\n")
}
sphdata <- newdata %*% object@scaling
sphmeans <- object@means %*% object@scaling
Mahdistovertwo <- apply(sphdata, 1, function(x) apply(sphmeans, 1, function(mu) (sum((mu-x)^2)/2)))
minhlfMD2 <- apply(Mahdistovertwo,2,min)
wghtdensities <- sweep(exp(sweep(-Mahdistovertwo,2,minhlfMD2,"+")),1,prior,"*")
ncnst <- apply(wghtdensities,2,sum)
posterior <- sweep(wghtdensities,2,STATS=ncnst,FUN="/")
clres <- apply(posterior, 2, function(pst) return(dimnames(sphmeans)[[1]][which.max(pst)]))
list(class=factor(clres,levels=dimnames(object@means)[[1]]),posterior=t(posterior))
}
)
setMethod("show",
signature(object = "Idtlda"),
function(object)
{
cat("Prior probabilities of groups:\n") ; print(object@prior) ; cat("\n")
cat("Group means:\n") ; print(object@means) ; cat("\n")
cat("Coefficients of linear discriminants:\n") ; print(object@scaling) ; cat("\n")
}
)
Iqda <- function(Conf,p,nk,lev,prior,means,Wg,limlnk2)
{
N <- sum(nk)
k <- length(nk)
vnames <- colnames(means)
scaling <- array(dim=c(p,p,k),dimnames=list(vnames,paste("LD",1:p,sep=""),lev))
ldet <- numeric(k)
if (prior[1]=="proportions") prior <- nk/N
names(prior) <- lev
Ip <- diag(p)
for (g in 1:k)
{
if (Conf != 5)
{
scalingg <- Safepdsolve(Wg[,,g],maxlnk2=limlnk2,scale=TRUE)
if (is.null(scalingg)) {
warning("Found a non positive definite within group matrix\n")
return(NULL)
}
scaling[,,g] <- scalingg
ldet[g] <- as.numeric(determinant(Wg[,,g],logarithm=TRUE)$modulus)/2
} else {
Wd <- diag(Wg[,,g])
scaling[,,g] <- diag(1/sqrt(Wd))
ldet[g] <- sum(log(Wd))/2
}
}
new("Idtqda",prior=prior,means=means,scaling=scaling,ldet=ldet,lev=lev,CovCase=Conf)
}
setMethod("qda",
signature(x = "IdtMxtNDE"),
function(x,prior="proportions",selmodel=BestModel(x),silent=FALSE,k2max=1e6,...)
{
limlnk2 <- log(k2max)
if (x@Hmcdt)
{
if (silent) {
return(NULL)
} else { stop("Trying to compute a quadratic discriminant function from an estimate of a homoscedastic mixture\n") }
}
if (is.character(selmodel)) { selmodel <- sapply(selmodel,function(mod) which(mod==x@ModelNames)) }
if (!is.finite(x@logLiks[selmodel]))
{
if (silent) {
return(NULL)
} else { stop("Trying to compute a quadratic discriminant function from a model with non-finite log-likelihood\n") }
}
Iqda(Conf=selmodel,p=2*x@NIVar,nk=as.numeric(table(x@grouping)),lev=levels(x@grouping),
prior=prior,means=coef(x)$mu,Wg=coef(x,selmodel)$Sigma,limlnk2=limlnk2)
}
)
setMethod("qda",
signature(x = "IdtHetNMANOVA"),
function(x, prior="proportions", selmodel=BestModel(H1res(x)),silent=FALSE,k2max=1e6, ...)
{
limlnk2 <- log(k2max)
if (is.character(selmodel)) { selmodel <- sapply(selmodel,function(mod) which(mod==x@H1res@ModelNames)) }
if ( !is.finite(x@H0res@logLiks[selmodel]) || !is.finite(x@H1res@logLiks[selmodel]) )
{
if (silent) {
return(NULL)
} else {
stop("Trying to compute a quadratic discriminant function from a model with non-finite log-likelihood\n")
}
}
Iqda(Conf=selmodel,p=2*x@NIVar,nk=as.numeric(table(x@grouping)),lev=levels(x@grouping),
prior=prior,means=coef(H1res(x))$mu,Wg=coef(H1res(x),selmodel)$Sigma,limlnk2=limlnk2)
}
)
setMethod("qda",
signature(x = "IdtGenNSNMANOVA"),
function(x, prior="proportions", selmodel=BestModel(H1res(x)@NMod),silent=FALSE,k2max=1e6, ...)
{
limlnk2 <- log(k2max)
H0res <- H0res(x)@NMod
H1res <- H1res(x)@NMod
if (is.character(selmodel)) { selmodel <- sapply(selmodel,function(mod) which(mod==H1res@ModelNames)) }
if (!is.finite(H0res@logLiks[selmodel]) || !is.finite(H1res@logLiks[selmodel]))
{
if (silent) {
return(NULL)
} else {
stop("Trying to compute a quadratic discriminant function from a model with non-finite log-likelihood\n")
}
}
Iqda(Conf=selmodel,p=2*x@NIVar,nk=as.numeric(table(x@grouping)),lev=levels(x@grouping),
prior=prior,means=coef(H1res)$mu,Wg=coef(H1res,selmodel)$Sigma,limlnk2=limlnk2)
}
)
setMethod("qda",
signature(x = "IData"),
function(x, grouping, prior="proportions", CVtol=1.0e-5, subset=1:nrow(x),
CovCase=1:4, SelCrit=c("BIC","AIC"), silent=FALSE, k2max=1e6, ...)
{
limlnk2 <- log(k2max)
SelCrit <- match.arg(SelCrit)
if (x@NIVar==2) CovCase <- q1CovCase(CovCase)
Config <- getConfig(...)
if (is.null(Config))
{
Config <- ifelse(CovCase==1,1,CovCase+1)
CovCaseArg <- TRUE
} else {
CovCaseArg <- FALSE
}
if (x@NIVar==1) {
CovCase <- q1CovCase(CovCase)
Config <- q1Config(Config)
}
if (length(subset) < x@NObs)
{
x <- x[subset,]
grouping <- grouping[subset]
}
grouping <- factor(grouping,exclude=NULL)
grplvls <- levels(grouping)
if (length(grplvls)==1)
{
errmsg <- "The data belongs to one single group. A partition into at least two different groups is required\n"
if (silent==FALSE) {
stop(errmsg)
} else {
warning(errmsg)
return(NULL)
}
}
MxtDEst <- IdtHetMxtNmle(x,grouping,CVtol=CVtol,CovCaseArg=CovCaseArg,Config=Config,SelCrit=SelCrit,...)
selmodel <- BestModel(MxtDEst)
Iqda(Conf=selmodel,p=2*x@NIVar,nk=as.numeric(table(grouping)),lev=grplvls,
prior=prior,means=coef(MxtDEst)$mu,Wg=coef(MxtDEst,selmodel)$Sigma,limlnk2=limlnk2)
}
)
setMethod("predict",
signature(object = "Idtqda"),
function(object,newdata,prior=object@prior,...)
{
if (is(newdata,"IData")) { newdata <- as.matrix(cbind(newdata@MidP,newdata@LogR)) }
if (is(newdata,"data.frame")) { newdata <- as.matrix(newdata) }
n <- nrow(newdata)
p <- ncol(newdata)
k <- length(prior)
grpnames <- dimnames(object@means)[[1]]
Mahdistovertwo <- matrix(nrow=k,ncol=n,dimnames=list(grpnames,rownames(newdata)))
for (g in 1:k)
{
sphdata <- newdata %*% object@scaling[,,g]
sphmeang <- object@means[g,] %*% object@scaling[,,g]
Mahdistovertwo[g,] <- apply(sphdata, 1, function(x) sum((x-sphmeang)^2)/2)
}
minhlfMD2 <- apply(Mahdistovertwo,2,min)
nrmhlfMD2 <- sweep(Mahdistovertwo,2,minhlfMD2)
wghtdensities <- sweep(exp(sweep(-nrmhlfMD2,1,object@ldet)),1,prior,"*")
ncnst <- apply(wghtdensities,2,sum)
posterior <- sweep(wghtdensities,2,STATS=ncnst,FUN="/")
NAind <- which(apply(posterior,2,function(x) any(!is.finite(x))))
if (length(NAind)>0) {
NAMDover2 <- Mahdistovertwo[,NAind,drop=FALSE]
minNAMDover2 <- apply(NAMDover2,2,min)
normNAMDover2 <- scale(NAMDover2,center=minNAMDover2,scale=FALSE)
NAwghtdensities <- sweep(exp(sweep(-normNAMDover2,1,STATS=object@ldet,FUN="-")),1,STATS=prior,FUN="*")
NAncnst <- apply(NAwghtdensities,2,sum)
posterior[,NAind] <- sweep(NAwghtdensities,2,STATS=NAncnst,FUN="/")
}
clres <- apply(posterior, 2, function(pst) return(grpnames[which.max(pst)]))
list(class=factor(clres,levels=grpnames),posterior=t(posterior))
}
)
setMethod("show",
signature(object = "Idtqda"),
function(object)
{
cat("Prior probabilities of groups:\n") ; print(object@prior) ; cat("\n")
cat("Group means:\n") ; print(object@means) ; cat("\n")
}
)
setMethod("CovCase",
signature(object = "Idtlda"),
function(object)
{
object@CovCase
}
)
setMethod("CovCase",
signature(object = "Idtqda"),
function(object)
{
object@CovCase
}
) |
library(hamcrest)
library(methods)
setClass("AA", representation(a="numeric"))
a <- new("AA")
x <- 2L
setMethod("[", signature(x="AA"), function(x,i,j,...) ifelse(i>j,10,20) )
test.arg.eval.01 = function() {
assertThat(a[x+1,x+2], identicalTo(20))
}
test.arg.eval.02 = function() {
assertThat(a[x+2L, x+1L], identicalTo(10))
} |
test_that("can pivot all cols to long", {
pv <- memdb_frame(x = 1:2, y = 3:4) %>%
tidyr::pivot_longer(x:y)
expect_equal(
pv %>% collect(),
tibble(
name = c("x", "x", "y", "y"),
value = 1:4
)
)
expect_snapshot(
lazy_frame(x = 1:2, y = 3:4) %>%
tidyr::pivot_longer(x:y)
)
})
test_that("can add multiple columns from spec", {
sp <- tibble(
.name = c("x", "y"),
.value = "v",
a = 11:12,
b = 13:14
)
pv <- lazy_frame(x = 1:2, y = 3:4) %>%
dbplyr_pivot_longer_spec(df_db, spec = sp)
expect_equal(colnames(pv), c("a", "b", "v"))
expect_snapshot(pv)
})
test_that("preserves original keys", {
pv <- lazy_frame(x = 1:2, y = 2, z = 1:2) %>%
tidyr::pivot_longer(y:z)
expect_equal(colnames(pv), c("x", "name", "value"))
expect_snapshot(pv)
})
test_that("can drop missing values", {
expect_snapshot(
lazy_frame(x = c(1, NA), y = c(NA, 2)) %>%
tidyr::pivot_longer(x:y, values_drop_na = TRUE)
)
})
test_that("can handle missing combinations", {
df <- tibble::tribble(
~id, ~x_1, ~x_2, ~y_2,
"A", 1, 2, "a",
"B", 3, 4, "b",
)
df_db <- memdb_frame(!!!df)
pv_db <- tidyr::pivot_longer(
df_db,
-id,
names_to = c(".value", "n"),
names_sep = "_"
)
pv <- pv_db %>% collect()
expect_named(pv, c("id", "n", "x", "y"))
expect_equal(pv$x, c(1, 3, 2, 4))
expect_equal(pv$y, c(NA, NA, "a", "b"))
sql <- tidyr::pivot_longer(
lazy_frame(!!!df),
-id,
names_to = c(".value", "n"),
names_sep = "_"
)
expect_snapshot(sql)
})
test_that("can override default output column type", {
expect_snapshot(
lazy_frame(x = 1) %>%
tidyr::pivot_longer(x, values_transform = list(value = as.character))
)
})
test_that("can pivot to multiple measure cols", {
df <- lazy_frame(x = "x", y = 1)
sp <- tibble::tribble(
~.name, ~.value, ~row,
"x", "X", 1,
"y", "Y", 1,
)
pv <- dbplyr_pivot_longer_spec(df, sp)
expect_equal(colnames(pv), c("row", "X", "Y"))
expect_snapshot(pv)
})
test_that("original col order is preserved", {
df <- tibble::tribble(
~id, ~z_1, ~y_1, ~x_1, ~z_2, ~y_2, ~x_2,
"A", 1, 2, 3, 4, 5, 6,
"B", 7, 8, 9, 10, 11, 12,
) %>%
tbl_lazy()
expect_equal(
df %>%
tidyr::pivot_longer(-id, names_to = c(".value", "n"), names_sep = "_") %>%
colnames(),
c("id", "n", "z", "y", "x")
)
})
test_that(".value can be at any position in `names_to`", {
samp <- tibble(
i = 1:4,
y_t1 = rnorm(4),
y_t2 = rnorm(4),
z_t1 = rep(3, 4),
z_t2 = rep(-2, 4),
)
value_first <- tidyr::pivot_longer(
lazy_frame(!!!samp), -i,
names_to = c(".value", "time"), names_sep = "_"
)
expect_snapshot(value_first)
samp2 <- dplyr::rename(samp, t1_y = y_t1,
t2_y = y_t2,
t1_z = z_t1,
t2_z = z_t2)
value_second <- tidyr::pivot_longer(
lazy_frame(!!!samp2), -i,
names_to = c("time", ".value"), names_sep = "_"
)
expect_snapshot(value_second)
cols <- c("i", "time", "y", "z")
expect_equal(colnames(value_first), cols)
expect_equal(colnames(value_second), cols)
})
test_that("grouping is preserved", {
df <- memdb_frame(g = 1, x1 = 1, x2 = 2)
out <- df %>%
group_by(g) %>%
tidyr::pivot_longer(x1:x2, names_to = "x", values_to = "v")
expect_equal(group_vars(out), "g")
}) |
Estimates<- setRefClass("Estimates",
fields = list(
ci = 'ANY',
overall.best = 'ANY',
iteration.best = 'ANY',
visited.space = 'ANY'
),
methods = list(
initialize = function() {
overall.best<<- Inf
iteration.best<<- c()
visited.space<<- c()
},
setBest = function(v) {
overall.best<<- v
},
getBest = function() {
overall.best
},
addIterationBest = function(iteration, solution) {
if(length(iteration.best) == 0) {
iteration.best<<- c(setNames(iteration,"iteration"),solution)
} else {
iteration.best<<- rbindlist(list(iteration.best, c(setNames(iteration,"iteration"),solution)), use.names=TRUE)
}
},
getIterationBest = function() {
data.frame(iteration.best, stringsAsFactors=FALSE)
},
addVisitedSpace = function(solution) {
m<- nrow(solution)
if(m > 1) {
for(i in 1:m) {
visited.space<<- rbindlist(list(visited.space,solution[i,]), use.names=TRUE)
}
} else {
visited.space<<- rbindlist(list(visited.space, solution), use.names=TRUE)
}
},
getVisitedSpace = function() {
data.frame(visited.space, stringsAsFactors=FALSE)
}
)
) |
startUSGSsession<-function(username,
password,
cookies.file=NULL,
verbose=FALSE){
c.handle = new_handle()
handle_setopt(c.handle,
referer=getRGISToolsOpt("USGS.url"),
cookiejar = cookies.file,
useragent = getRGISToolsOpt("USERAGENT"),
followlocation = TRUE ,
autoreferer = TRUE )
req <- curl(getRGISToolsOpt("USGS.login"), handle = c.handle)
html<-readLines(req)
html<-paste(html,collapse = "\n ")
html<-read_html(html)
csrf<-html %>% html_nodes(xpath = '//*[@name="csrf_token"]') %>% xml_attr("value")
if(grepl("ncforminfo",html)){
nc<-html %>% html_nodes(xpath = '//*[@name="__ncforminfo"]') %>% xml_attr("value")
handle_setform(c.handle,
'username' = username,
'password' = password,
"csrf_token"=csrf,
"__ncforminfo"=nc
)
}else{
handle_setform(c.handle,
'username' = username,
'password' = password,
"csrf_token"=csrf)
}
req <- curl_fetch_memory(getRGISToolsOpt("USGS.login"), handle = c.handle)
if(verbose){
message(paste(parse_headers(req$headers),collapse="\n"))
}
return(c.handle)
} |
ruv_residuals <-
function (fit, type = c("residuals", "adjusted.Y"), subset_and_sort = TRUE)
{
if (fit$misc$method == "RUVinv")
return(FALSE)
WZX = cbind(fit$misc$W, fit$misc$Z, fit$misc$X)
kq = ncol(WZX) - ncol(fit$misc$X)
Y1 = RUV1(fit$Y, fit$misc$eta, fit$misc$ctl, include.intercept = fit$misc$include.intercept)
agb = solve(t(WZX) %*% WZX) %*% t(WZX) %*% Y1
if (type[1] == "residuals")
Y1 = Y1 - WZX %*% agb
if (type[1] == "adjusted.Y")
Y1 = Y1 - WZX[, 1:kq, drop = FALSE] %*% agb[1:kq, , drop = FALSE]
if (subset_and_sort)
Y1 = (Y1[, fit$misc$colsubset])[, fit$misc$colorder]
return(Y1)
} |
check_col_types <- function(col_types,col_names){
cols_new <- col_types$cols
cols_new <- names(cols_new) %>%
de_regex() %>%
name_to_regex() %>%
str_remove_all(c('^ +'='')) %>%
str_c('((?<=^)|(?<= ))',.,'((?=$)|(?= ))') %>%
map_lgl(~any(str_detect(col_names,.))) %>%
{cols_new[.]}
if(length(cols_new) > 0){
names(cols_new) <- names(cols_new) %>%
de_regex() %>%
name_to_regex() %>%
str_remove_all(c('^ +'='')) %>%
str_c('((?<=^)|(?<= ))',.,'((?=$)|(?= ))') %>%
map_chr(~str_subset(col_names,.))
col_types$cols <- cols_new
}else{
col_types <- cols()
}
return(col_types)
} |
top_terms <-function(x,w,y, alpha,lambda,k,wordcloud,max.words,scale,rot.per,family){
if(missing(family)){
family="gaussian"
}
if(missing(w)) {
trms <- tv_dictionary(x=x,y=y,alpha=alpha,lambda=lambda,newx=x,family=family)
betas=trms[[2]]
betas=as.matrix(betas[2:nrow(betas)])
} else {
y=as.vector(y)
x=as.matrix(x)
w=as.matrix(w)
nw=ncol(as.matrix(w))
nx=ncol(as.matrix(x))
z=cbind(w,x)
nz=ncol(z)
trms <- tv_dictionary(x=x,y=y,w=w,alpha=alpha,lambda=lambda,newx=x,family = family)
betas=trms[[2]]
betas=as.matrix(betas[(nw+2):nrow(betas)])
}
II_rank=as.matrix(rank(-abs(betas),ties.method = "random"))
betas_names=colnames(as.data.frame(x))
betas_abs=abs(betas)
II=betas!=0
III=sum(II)
if(III<k){
k=III
}
II_rank_k=II_rank[1:k]
top_coefs <- vector(length=k)
for (p in 1:k){
gg = which(II_rank==p)
top_coefs[p]=betas_names[gg]
}
if(wordcloud==TRUE) {
freqs=vector(length = k)
for(p in 1:k){
gg = which(II_rank==p)
freqs[p]=betas_abs[gg]
}
pal2 <- RColorBrewer::brewer.pal(8,"Dark2")
wordcloud(top_coefs,freqs, colors = pal2,random.order=FALSE,
max.words=max.words,rot.per = rot.per ,scale=scale )
}
return(top_coefs)
} |
SK.formula <- function(formula,
data = NULL,
which = NULL,
fl1 = NULL,
fl2 = NULL,
error = NULL,
sig.level = .05,
round = 2,
...)
{
aux <- regexpr("Error",
as.character(formula),
perl=TRUE)
aux_err <- regmatches(as.character(formula),
aux)
cl <- match.call()
if(length(aux_err) == 0){
model <- lm(formula,
data = data)
res <- SK(x = model,
which = which,
fl1 = fl1,
fl2 = fl2,
error = error,
sig.level = sig.level,
round = round,
...)
} else {
basee <- aov(formula,
data = data)
oc <- attr(basee,
"call")
Terms <- attr(basee,
"terms")
indError <- attr(Terms,
"specials")$Error
errorterm <- attr(Terms,
"variables")[[1 + indError]]
form <- update.formula(Terms,
paste(". ~ .-",
deparse(errorterm,
width.cutoff = 500L, backtick = TRUE), "+", deparse(errorterm[[2L]],
width.cutoff = 500L, backtick = TRUE)))
model <- lm(form,
data = data)
res <- SK(x = model,
which = which,
fl1 = fl1,
fl2 = fl2,
error = error,
sig.level = sig.level,
round = round,
...)
}
res$call <- cl
class(res) <- c('SK.formula',class(res))
return(res)
} |
yaiRFsummary = function(object, nTop=0)
{
if (class(object) != "yai") stop ("arg must be of class yai")
if (object$method != "randomForest") stop ("method must be randomForest")
if (!requireNamespace ("randomForest")) stop("install randomForest and try again")
scaledImportance = yaiVarImp(object, nTop, plot=FALSE)
error = vector(mode="numeric",length=length(names(object$ranForest)))
errtag = vector(mode="character",length=length(names(object$ranForest)))
levels = vector(mode="integer",length=length(names(object$ranForest)))
ntree = vector(mode="integer",length=length(names(object$ranForest)))
type = vector(mode="character",length=length(names(object$ranForest)))
i = 0
for (Rf in object$ranForest)
{
i = i+1
type[i] = Rf$type
if(Rf$type == "regression")
{
error [i] = round(100*Rf$rsq[length(Rf$rsq)], digits=2)
errtag[i] = "%var explained"
levels[i] = NA
}
else if(Rf$type == "classification")
{
error [i] = Rf$err.rate[Rf$ntree,"OOB"]
errtag[i] = "OOB error rate"
levels[i] = nrow(Rf$confusion)
}
else
{
error [i] = NA
errtag[i] = "N/A"
levels[i] = NA
}
ntree [i] = Rf$ntree
}
forestAttributes=data.frame(ntree,error,errtag,levels,type)
rownames(forestAttributes)=names(object$ranForest)
list(forestAttributes=forestAttributes,scaledImportance=scaledImportance)
} |
flash_date_parse <- function(text) {
Date_String <-
paste(paste0(month.abb, "\\s\\d{1,2}"), collapse = "|")
event_date <- stringr::str_extract_all(text, Date_String)
event_date <-
event_date[!is.na(event_date)]
event_date <-
event_date[lapply(event_date, length) > 0]
if (length(event_date) == 0) {
event_date <- NA
}
event_date <- utils::tail(unlist(event_date), 1)
return(event_date)
} |
sample.to.species.freq <- function(x, min.f = NULL) {
x.df <- as.data.frame(table(x), stringsAsFactors = FALSE)
x.df[, 1] <- as.numeric(as.character(x.df[, 1]))
x.df <- x.df[x.df[, 1] > 0, ]
rownames(x.df) <- NULL
f <- expand.freqs(x.df)
if(!is.null(min.f)) c(f, rep(0, max(0, min.f - length(f)))) else f
} |
print.gofCOP <- function(x, ...) {
for (j in seq_along(x)) {
cat(rep("-", getOption("width")), sep = "")
cat("\n")
cat(strwrap(x[[j]]$method), sep = "\n")
cat("\n")
out <- character()
if (!is.null(x[[j]]$theta)) {
for (i in seq_along(x[[j]]$theta)) {
out <- c(out, paste0("theta.", i, " = ", x[[j]]$theta[i]))
}
}
if (!is.null(x[[j]]$df)) {
out <- c(out, paste("df =", x[[j]]$df))
}
cat("Parameters:")
cat("\n")
cat(paste(out, collapse = "\n"))
cat("\n")
cat("\n")
if (!is.null(x[[j]]$df)) {
if (length(x[[j]]$df) > 1 & !any(x[[j]]$df == 60) |
length(x[[j]]$df) == 3) {
cat(
"For the CvM and KS test the df have to be an integer and for this reason were
transformed."
)
cat("\n")
cat("\n")
}
if (any(x[[j]]$df == 60)) {
cat(
"For the PIOSTn test the estimated df were too high and for computational
reasons were fixed to 60."
)
cat("\n")
cat("\n")
}
}
if (!is.null(x[[j]]$res.tests)) {
cat("Tests results:")
cat("\n")
print(x[[j]]$res.tests, max = 50)
if (dim(x[[j]]$res.tests)[1] != 1 & j == length(x)) {
cat("\n")
cat(
"Please use the functions gofGetHybrid() and gofOutputHybrid() for display of
subsets of Hybrid tests. To access the results, please obtain them from the
structure of the gofCOP object."
)
}
if (any(is.na(x[[j]]$res.tests[, 1]))) {
cat("\n")
cat(bold(
"At least one p-value was returned as 'NA'. Typically this is due to the copula
parameter being estimated close to the boundaries. Consider adjusting the
possible parameter space via 'lower' and 'upper'."
))
}
}
cat("\n")
cat("\n")
if (is.element("White", rownames(x[[j]]$res.tests)) &
x[[j]]$copula == "t") {
cat(
"The test gofWhite may be unstable for t-copula. Please handle the results
carefully."
)
cat("\n")
cat("\n")
}
}
invisible(x)
} |
pltr.glm <- function(data, Y.name, X.names, G.names, family = 'binomial', args.rpart = list(cp=0, minbucket=20, maxdepth=10), epsi = 1e-3, iterMax = 5, iterMin = 3, verbose = TRUE)
{
time1 <- Sys.time()
logistic.split <- list(eval = .logistic.eval, split = .logistic.split, init = .logistic.init)
if(iterMin > iterMax) stop("iterMax not greater than iterMin !")
if(family == "gaussian") method = "anova"
if(family == "binomial") method = logistic.split
fit0 <- glm(as.formula(paste(Y.name, "~ -1+", paste(X.names, collapse=" + "))), data = data, family = family)
hat_gamma <- fit0$coef
diff_norm_gamma <- 10
nber_iter <- 1
if(length(X.names) > 1) product <- "%*%"
else product <- "*"
if(verbose) cat("Iteration process...\n\n")
while((diff_norm_gamma>0) & ((nber_iter<=iterMin) | (diff_norm_gamma>=epsi)) & (nber_iter<=iterMax))
{
fit_tree <- rpart(as.formula(paste(Y.name, " ~ ", paste(G.names, collapse=" + "), paste("+ offset(offsetX)"))), data = eval(parse(text = paste("data.frame(data, offsetX = as.matrix(data[,X.names])", product, "hat_gamma)"))), method = method, control = args.rpart)
indicators_tree <- sapply(tree2indicators(fit_tree), function(u) return(paste("as.integer(", u, ")")))
nber_indicators <- length(indicators_tree)
fit_lm <- glm(as.formula(paste(Y.name, "~ ", paste(indicators_tree[-nber_indicators], collapse = "+"), paste("+ offset(offsetX)"))), eval(parse(text = paste("data.frame(data, offsetX = as.matrix(data[,X.names])", product, "hat_gamma)"))), family = family)
hat_beta <- fit_lm$coef
offsetZ <- with(data, eval(parse(text = paste(hat_beta, c(1, indicators_tree[-nber_indicators]), collapse = " + ", sep="*"))))
fit_lm_update <- glm(as.formula(paste(Y.name, "~ -1 + ", paste(X.names, collapse = " + "), paste("+ offset(offsetZ)"))), data = data.frame(data, offsetZ = offsetZ), family = family)
hat_gamma_update <- fit_lm_update$coef
diff_norm_gamma <- .norm2(hat_gamma_update - hat_gamma)
hat_gamma <- hat_gamma_update
if(verbose) cat("Iteration ", nber_iter, "in PLTR; Diff_norm_gamma = ", diff_norm_gamma, "\n")
nber_iter <- nber_iter + 1
}
if(verbose)
{
cat("End of iteration process\n")
cat("Number of iterations: ", nber_iter - 1, "\n\n")
}
time2 <- Sys.time()
Timediff <- difftime(time2, time1)
return(list(fit = fit_lm_update, tree = fit_tree, nber_iter = nber_iter - 1, Timediff = Timediff))
} |
AaregME <- function(formula, data, subset, weights, offset, na.action = na.omit,
silent = TRUE, resid = FALSE, do_update = FALSE,
estinit = TRUE, initpar = NULL,
fixed = NULL, nofit = FALSE,
control = optim_control(),
...) {
cl <- match.call()
fc <- cl
fc[[1L]] <- quote(tramME_model)
fc$tram <- "Aareg"
mod <- eval(fc, parent.frame())
if (is.null(mod$ranef) || nofit || !is.null(initpar)) {
estinit <- FALSE
}
if (missing(data) || !inherits(data, "tramME_data")) {
fc <- cl
m <- match(c("formula", "data", "subset", "na.action", "weights", "offset"),
names(fc), 0L)
fc <- fc[c(1L, m)]
fc$formula <- .combine_formulas(mod$formula)
fc[[1L]] <- quote(tram::tram_data)
out <- eval(fc, parent.frame())
dat <- out$mf
class(dat) <- c("tramME_data", class(dat))
} else {
dat <- data
}
cf <- coef(mod$ctm)
nm <- names(cf)
nm <- nm[grep("Bs1", nm)]
fix <- numeric(length(nm))
names(fix) <- nm
fixed <- c(fixed, fix)
mp <- list()
idx <- which(names(cf) %in% names(fixed))
bb <- rep(NA, length(cf))
bb[-idx] <- seq_along(bb[-idx])
mp <- list(beta = as.factor(bb))
mmlt <- mlt::mlt(mod$ctm, data = dat, offset = model.offset(dat),
weights = model.weights(dat),
fixed = fixed, dofit = estinit)
fe <- fe_terms(mmlt)
re <- re_terms(mod$ranef, dat, mod$negative)
inp <- tramTMB_inputs(mod, fe, re, dat, param = initpar)
cf[names(fixed)] <- fixed
obj <- tramTMB(inp$data, inp$parameters, inp$constraint, inp$negative,
map = mp, resid = resid, do_update = do_update, silent = silent)
if (!nofit) {
if (is.null(initpar) && !estinit) {
par <- .optim_start(obj, resp = dat[[1]])
} else par <- NULL
opt <- optim_tramTMB(obj, par = par,
method = control$method, control = control$control,
trace = control$trace, ntry = control$ntry,
scale = control$scale)
parm <- .get_par(obj)
} else {
opt <- NULL
parm <- list(beta = cf, theta = rep(NA, length(.get_par(obj)$theta)))
}
param <- .gen_param(parm, fe = list(names = names(cf)),
re = list(names = re$names, blocksize = re$blocksize,
levels = re$levels, termsize = re$termsize),
varnames = names(dat))
structure(list(call = cl, model = mod, data = dat, tmb_obj = obj, opt = opt,
param = param),
class = c("AaregME", "tramME"))
} |
RelativeWaterContent <- function(data,
fresh.weight = "fresh.weight",
dry.weight = "dry.weight",
fresh.weight.saturated = "fresh.weight.saturated") {
data_in <-
ValidityCheck(
data,
fresh.weight = fresh.weight,
dry.weight = dry.weight,
fresh.weight.saturated = fresh.weight.saturated
)
RWC <- ((data_in[[fresh.weight]] - data_in[[dry.weight]]) /
(data_in[[fresh.weight.saturated]] - data_in[[dry.weight]])) * 100
return(data.frame(data, RWC))
} |
vcovCR.robu <- function(obj, cluster, type, target, inverse_var, form = "sandwich", ...) {
if (missing(cluster)) cluster <- obj$study_orig_id
if (missing(target)) target <- NULL
if (missing(inverse_var)) inverse_var <- is.null(target) & (!obj$user_weighting)
vcov_CR(obj, cluster = cluster, type = type,
target = target, inverse_var = inverse_var, form = form)
}
coef_CS.robu <- function(obj) {
beta <- as.vector(obj$b.r)
labs <- obj$reg_table$labels
if (is.factor(labs)) labs <- levels(labs)[labs]
names(beta) <- labs
beta
}
residuals_CS.robu <- function(obj) {
ord <- order(order(obj$study_orig_id))
resid <- obj$data.full$e.r[ord]
if (obj$user_weighting) {
pos_wts <- obj$data.full$userweights[ord] > 0
if (!all(pos_wts)) resid <- resid[pos_wts]
}
return(resid)
}
model_matrix.robu <- function(obj) {
ord <- order(order(obj$study_orig_id))
model_matrix <- obj$Xreg[ord,,drop=FALSE]
if (obj$user_weighting) {
pos_wts <- obj$data.full$userweights[ord] > 0
if (!all(pos_wts)) model_matrix <- model_matrix[pos_wts,,drop=FALSE]
}
return(model_matrix)
}
targetVariance.robu <- function(obj, cluster) {
ord <- order(order(obj$study_orig_id))
if (obj$user_weighting) {
pos_wts <- obj$data.full$userweights[ord] > 0
V <- obj$data.full$avg.var.eff.size[ord][pos_wts]
} else {
V <- mean(obj$data.full$r.weights) / obj$data.full$r.weights[ord]
}
matrix_list(V, cluster, "both")
}
weights.robu <- function(object, ...) {
ord <- order(order(object$study_orig_id))
if (object$user_weighting) {
object$data.full$userweights[ord]
} else{
NULL
}
}
weightMatrix.robu <- function(obj, cluster) {
ord <- order(order(obj$study_orig_id))
if (obj$user_weighting) {
W <- obj$data.full$userweights[ord]
W <- W[W > 0]
} else{
W <- obj$data.full$r.weights[ord]
}
w_scale <- mean(W)
W <- W / w_scale
W_list <- matrix_list(W, cluster, "both")
attr(W_list, "w_scale") <- w_scale
W_list
}
bread.robu <- function(x, ...) {
if (x$user_weighting) {
W <- x$data.full$userweights
} else{
W <- x$data.full$r.weights
}
x$N * chol2inv(chol(crossprod(x$Xreg, W * x$Xreg)))
}
v_scale.robu <- function(obj) {
obj$N
} |
`sumSet` <-
function(g,n,p,y,set) {
z<-array(0.0,c(n,p))
for (j in set) {
gg<-g[,j]; ii<-which(!is.na(gg))
z[ii,]<-z[ii,]+y[[j]][gg[ii],]
}
return(z)
} |
context("rbind_fst_genoprob")
test_that("rbind_fst_genoprob works", {
library(qtl2)
grav2 <- read_cross2(system.file("extdata", "grav2.zip", package="qtl2"))
map <- insert_pseudomarkers(grav2$gmap, step=1)
probsA <- calc_genoprob(grav2[1:5,], map, error_prob=0.002)
probsB <- calc_genoprob(grav2[6:12,], map, error_prob=0.002)
dir <- tempdir()
fprobsA <- fst_genoprob(probsA, "exampleAr", dir)
fprobsB <- fst_genoprob(probsB, "exampleBr", dir)
fprobs <- rbind(fprobsA, fprobsB, fbase = "exampleABr")
expect_equal(rbind(probsA, probsB), fst_extract(fprobs))
unlink( fst_files(fprobsA) )
unlink( fst_files(fprobsB) )
unlink( fst_files(fprobs) )
})
test_that("rbind_fst_genoprob works in an intercross", {
library(qtl2)
iron <- read_cross2(system.file("extdata", "iron.zip", package="qtl2"))
map <- insert_pseudomarkers(iron$gmap, step=1)
probsA <- calc_genoprob(iron[7:12, c("3","X")], map, error_prob=0.002)
probsB <- calc_genoprob(iron[13:16,c("3","X")], map, error_prob=0.002)
dir <- tempdir()
fprobsA <- fst_genoprob(probsA, "exampleAc", dir)
fprobsB <- fst_genoprob(probsB, "exampleBc", dir)
fprobs <- rbind(fprobsA, fprobsB, fbase = "exampleABc")
expect_equal(rbind(probsA, probsB), fst_extract(fprobs))
expect_equal(fprobs, readRDS(fst_files(fprobs)[1]))
expect_equal(probsA, fst_extract(fprobsA))
expect_equal(fprobsA, readRDS(fst_files(fprobsA)[1]))
expect_equal(probsB, fst_extract(fprobsB))
expect_equal(fprobsB, readRDS(fst_files(fprobsB)[1]))
unlink( fst_files(fprobsA) )
unlink( fst_files(fprobsB) )
unlink( fst_files(fprobs) )
}) |
simulate.nonLifeRisk <- function(object, nsim, seed = NULL, ...) {
if (!is.numeric(nsim) || (!is.null(seed) && !is.numeric(seed))) {
stop("Invalid types, see ?simulate.nonLifeRisk.")
}
if ((length(nsim) != 1) || (!is.null(seed) && (length(seed) != 1))) {
stop("Invalid dimensions, see ?simulate.nonLifeRisk.")
}
if (any(sapply(list(nsim, seed), function(x) !is.null(x) && is.na(x)))) {
stop("Missing values, see ?simulate.nonLifeRisk.")
}
if (nsim < 0 || (!is.null(seed) && (seed < 0))) {
stop("nsim and seed should be positive, ?simulate.nonLifeRisk.")
}
if (!is.integer(nsim)) {
nsim <- as.integer(nsim)
}
if (!is.null(seed) && !is.integer(seed)) {
seed <- as.integer(seed)
}
if (!is.null(seed)) {
set.seed(seed)
}
if (object$type == "simulations") {
n <- length(object$simulation)
if (nsim <= n) {
return(sample(x = object$simulation,
replace = F,
size = nsim))
}
if (nsim > n) {
return(sample(x = object$simulation,
replace = T,
size = nsim))
}
} else if (object$type == "log-normal") {
return(stats::rnorm(n = nsim,
mean = object$mu,
sd = object$sigma))
} else if (object$type == "cdf") {
return(sample(x = object$x,
size = nsim,
prob = diff(c(0, object$cdf)),
replace = T))
}
}
compute.nonLifeRisk <- function(object, nsim, seed = NULL, market.risk, ...) {
if (!is.marketRisk(market.risk)) {
stop("Invalid types, see ?compute.nonLifeRisk.")
}
if (!is.numeric(nsim) || (!is.null(seed) && !is.numeric(seed))) {
stop("Invalid types, see ?compute.nonLifeRisk.")
}
if ((length(nsim) != 1) || (!is.null(seed) && (length(seed) != 1))) {
stop("Invalid dimensions, see ?compute.nonLifeRisk.")
}
if (any(sapply(list(nsim, seed), function(x) !is.null(x) && is.na(x)))) {
stop("Missing values, see ?compute.nonLifeRisk.")
}
if (nsim < 0 || (!is.null(seed) && (seed < 0))) {
stop("nsim and seed should be positive, ?compute.nonLifeRisk.")
}
if (!is.integer(nsim)) {
nsim <- as.integer(nsim)
}
if (!is.null(seed) && !is.integer(seed)) {
seed <- as.integer(seed)
}
if (!check(object = object, market.risk = market.risk)) {
stop("Inconsistent market.risk and nonLifeRisk.")
}
if (object$currency == market.risk$base.currency) {
l <- simulate(object, nsim = nsim, seed = seed)
} else {
l <- simulate(object, nsim = nsim, seed = seed) *
getInitialFX(market.risk,
from = object$currency,
to = market.risk$base.currency)
warning("Beware initial FX was used to transform simulations into the base.currency.")
}
if (object$type == "log-normal") {
l <- -(exp(l) - exp(object$mu + (object$sigma^2)/2))
}
return(data.table::data.table(nonLifeRisk = l))
} |
Wald_CI_CC_2x2 <- function(n, alpha=0.05, printresults=TRUE) {
n1p <- n[1, 1] + n[1, 2]
n2p <- n[2, 1] + n[2, 2]
pi1hat <- n[1, 1] / n1p
pi2hat <- n[2, 1] / n2p
estimate <- pi1hat - pi2hat
SE <- sqrt(pi1hat * (1-pi1hat) / n1p + pi2hat * (1-pi2hat) / n2p)
CC <- 0.5 * (1 / n1p + 1 / n2p)
z <- qnorm(1-alpha / 2, 0, 1)
L <- estimate - z * SE - CC
U <- estimate + z * SE + CC
L <- max(-1, L)
U <- min(U, 1)
if (printresults) {
print(sprintf('The Wald CI with continuity correction: estimate = %6.4f (%g%% CI %6.4f to %6.4f)',
estimate, 100 * (1 - alpha), L, U), quote=FALSE)
}
res <- data.frame(lower=L, upper=U, estimate=estimate)
invisible(res)
} |
op <- options()
options(prompt = "R> ", show.signif.stars = FALSE, warn = -1, continue = "+ ")
library("equate")
act.x <- as.freqtab(ACTmath[, 1:2])
act.y <- as.freqtab(ACTmath[, c(1, 3)])
head(act.x)
rbind(x = summary(act.x), y = summary(act.y))
neat.x <- freqtab(KBneat$x, scales = list(0:36, 0:12))
neat.y <- freqtab(KBneat$y, scales = list(0:36, 0:12))
attach(PISA)
r3items <- paste(items$itemid[items$clusterid == "r3a"])
r6items <- paste(items$itemid[items$clusterid == "r6"])
r5items <- paste(items$itemid[items$clusterid == "r5"])
r7items <- paste(items$itemid[items$clusterid == "r7"])
pisa <- freqtab(students[students$book == 6, ],
items = list(c(r3items, r6items), c(r5items, r7items)),
scales = list(0:31, 0:29), design = "sg")
round(data.frame(summary(pisa),
row.names = c("r3r6", "r5r7")), 2)
plot(x = act.x, lwd = 2, xlab = "Score", ylab = "Count")
plot(neat.x)
plot(x = act.x, lwd = 2, xlab = "Score", ylab = "Count")
plot(neat.x)
plot(x = act.x, lwd = 2, xlab = "Score", ylab = "Count")
plot(neat.x)
neat.xs <- presmoothing(neat.x, smooth = "log", degrees = list(3, 1))
neat.xsmat <- presmoothing(neat.x, smooth = "loglinear",
degrees = list(3, 1), stepup = TRUE)
plot(neat.xs)
plot(neat.x, neat.xsmat, ylty = 1:4)
round(rbind(x = summary(neat.x), xs = summary(neat.xs)), 2)
plot(neat.xs)
plot(neat.x, neat.xsmat, ylty = 1:5)
plot(neat.xs)
plot(neat.x, neat.xsmat, ylty = 1:5)
presmoothing(neat.x, smooth = "loglinear",
degrees = list(c(3, 3), c(1, 1)), compare = TRUE)
equate(act.x, act.y, type = "mean")
neat.ef <- equate(neat.x, neat.y, type = "equip",
method = "frequency estimation", smoothmethod = "log")
summary(neat.ef)
cbind(newx = c(3, 29, 8, 7, 13),
yx = equate(c(3, 29, 8, 7, 13), y = neat.ef))
head(neat.ef$concordance)
neat.i <- equate(neat.x, neat.y, type = "ident")
neat.lt <- equate(neat.x, neat.y, type = "linear",
method = "tucker")
neat.comp <- composite(list(neat.i, neat.lt), wc = .5,
symmetric = TRUE)
plot(neat.comp, addident = FALSE)
plot(neat.comp, addident = FALSE)
plot(neat.comp, addident = FALSE)
pisa.i <- equate(pisa, type = "ident", lowp = c(3.5, 2))
pisa.m <- equate(pisa, type = "mean", lowp = c(3.5, 2))
pisa.l <- equate(pisa, type = "linear", lowp = c(3.5, 2))
pisa.c <- equate(pisa, type = "circ", lowp = c(3.5, 2))
pisa.e <- equate(pisa, type = "equip", smooth = "log",
lowp = c(3.5, 2))
plot(pisa.i, pisa.m, pisa.l, pisa.c, pisa.e, addident = FALSE,
xpoints = pisa, morepars = list(ylim = c(0, 31)))
plot(pisa.i, pisa.m, pisa.l, pisa.c, pisa.e, addident = FALSE,
xpoints = pisa, morepars = list(ylim = c(0, 31)))
plot(pisa.i, pisa.m, pisa.l, pisa.c, pisa.e, addident = FALSE,
xpoints = pisa, morepars = list(ylim = c(0, 31)))
pisa.x <- freqtab(totals$b4[1:200, c("r3a", "r2", "s2")],
scales = list(0:15, 0:17, 0:18))
pisa.y <- freqtab(totals$b4[201:400, c("r4a", "r2", "s2")],
scales = list(0:16, 0:17, 0:18))
pisa.mnom <- equate(pisa.x, pisa.y, type = "mean",
method = "nom")
pisa.mtuck <- equate(pisa.x, pisa.y, type = "linear",
method = "tuck")
pisa.mfreq <- equate(pisa.x, pisa.y, type = "equip",
method = "freq", smooth = "loglin")
pisa.snom <- equate(margin(pisa.x, 1:2), margin(pisa.y, 1:2),
type = "mean", method = "nom")
pisa.stuck <- equate(margin(pisa.x, 1:2), margin(pisa.y, 1:2),
type = "linear", method = "tuck")
pisa.sfreq <- equate(margin(pisa.x, 1:2), margin(pisa.y, 1:2),
type = "equip", method = "freq", smooth = "loglin")
plot(pisa.snom, pisa.stuck, pisa.sfreq,
pisa.mnom, pisa.mtuck, pisa.mfreq,
col = rep(rainbow(3), 2), lty = rep(1:2, each = 3))
plot(pisa.snom, pisa.stuck, pisa.sfreq,
pisa.mnom, pisa.mtuck, pisa.mfreq,
col = rep(rainbow(3), 2), lty = rep(1:2, each = 3))
neat.xp <- presmoothing(neat.x, "loglinear", degrees = list(4, 2))
neat.xpmat <- presmoothing(neat.x, "loglinear", degrees = list(4, 2),
stepup = TRUE)
neat.yp <- presmoothing(neat.y, "loglinear", degrees = list(4, 2))
neat.ypmat <- presmoothing(neat.y, "loglinear", degrees = list(4, 2),
stepup = TRUE)
plot(neat.x, neat.xpmat)
plot(neat.y, neat.ypmat)
plot(neat.x, neat.xpmat)
plot(neat.y, neat.ypmat)
plot(neat.x, neat.xpmat)
plot(neat.y, neat.ypmat)
set.seed(131031)
reps <- 100
xn <- 100
yn <- 100
crit <- equate(neat.xp, neat.yp, "e", "c")$conc$yx
neat.args <- list(i = list(type = "i"),
mt = list(type = "mean", method = "t"),
mc = list(type = "mean", method = "c"),
lt = list(type = "lin", method = "t"),
lc = list(type = "lin", method = "c"),
ef = list(type = "equip", method = "f", smooth = "log"),
ec = list(type = "equip", method = "c", smooth = "log"),
ct = list(type = "circ", method = "t"),
cc = list(type = "circ", method = "c", chainmidp = "lin"))
bootout <- bootstrap(x = neat.xp, y = neat.yp, xn = xn, yn = yn,
reps = reps, crit = crit, args = neat.args)
plot(bootout, addident = FALSE, col = c(1, rainbow(8)))
plot(bootout, out = "se", addident = FALSE,
col = c(1, rainbow(8)), legendplace = "top")
plot(bootout, out = "bias", addident = FALSE, legendplace = "top",
col = c(1, rainbow(8)), morepars = list(ylim = c(-.9, 3)))
plot(bootout, out = "rmse", addident = FALSE, legendplace = "top",
col = c(1, rainbow(8)), morepars = list(ylim = c(0, 3)))
plot(bootout, addident = FALSE, col = c(1, rainbow(8)))
plot(bootout, out = "se", addident = FALSE,
col = c(1, rainbow(8)), legendplace = "top")
plot(bootout, out = "bias", addident = FALSE, legendplace = "top",
col = c(1, rainbow(8)), morepars = list(ylim = c(-.9, 3)))
plot(bootout, out = "rmse", addident = FALSE, legendplace = "top",
col = c(1, rainbow(8)), morepars = list(ylim = c(0, 3)))
plot(bootout, addident = FALSE, col = c(1, rainbow(8)))
plot(bootout, out = "se", addident = FALSE,
col = c(1, rainbow(8)), legendplace = "top")
plot(bootout, out = "bias", addident = FALSE, legendplace = "top",
col = c(1, rainbow(8)), morepars = list(ylim = c(-.9, 3)))
plot(bootout, out = "rmse", addident = FALSE, legendplace = "top",
col = c(1, rainbow(8)), morepars = list(ylim = c(0, 3)))
round(summary(bootout), 2)
detach(PISA)
options(op) |
"HSCT" |
context("Test Scarcity")
valid_mat = matrix(c(1, 0, 0, 0,
rep(1, 3), 0,
0, rep(1, 3),
0, 1, 0, 1),
ncol = 4)
dimnames(valid_mat) = list("site" = paste0("s", 1:4), "species" = letters[1:4])
log_mat = (valid_mat == 1)
suppressWarnings({
com_df = lapply(rownames(log_mat), function(x) {
species = colnames(valid_mat)[log_mat[x, ]]
data.frame(site = rep(x, length(species)), species = species,
stringsAsFactors = FALSE)
})
com_df = do.call(rbind.data.frame, com_df)
})
trait_df = data.frame(tr1 = c("A", "A", "B", "B"), tr2 = c(rep(0, 3), 1),
tr3 = seq(4, 16, 4), stringsAsFactors = TRUE)
rownames(trait_df) = letters[1:4]
dist_mat = compute_dist_matrix(trait_df)
correct_dist = data.frame(
site = c("s1", "s1", "s2", "s2", "s2", "s3", "s3", "s4", "s4"),
species = c("a", "b", "b", "c", "d","b", "c", "c", "d"),
Di = c(1/9, 1/9, 6/9, 4/9, 6/9, 4/9, 4/9, 4/9, 4/9),
stringsAsFactors = FALSE
)
correct_dist_mat = table(correct_dist$site, correct_dist$species)
correct_dist_mat[which(correct_dist_mat == 0)] = NA_real_
correct_dist_mat[which(correct_dist_mat == 1)] = correct_dist$Di
correct_dist_mat[2, 3] = 4/9
correct_dist_mat[2, 4] = 6/9
names(dimnames(correct_dist_mat)) = c("site", "species")
correct_dist_ab = correct_dist
small_mat = matrix(c(1, 0, 0, 1), nrow = 2)
colnames(small_mat) = letters[1:2]
rownames(small_mat) = c("s1", "s2")
small_df = matrix_to_tidy(small_mat)
undef_dist = data.frame(site = c("s1", "s2"), species = c("a", "b"),
Di = rep(NaN, 2), stringsAsFactors = FALSE)
undef_dist_mat = table(undef_dist$site, undef_dist$species)
undef_dist_mat[which(undef_dist_mat == 0)] = NA_real_
undef_dist_mat[which(undef_dist_mat == 1)] = undef_dist$Di
suppressWarnings({
suppressMessages({
undef_test = distinctiveness(small_mat, dist_mat)
})
})
com_df_ex = cbind(com_df, data.frame(abund = c(0.3, 0.7, 0.2, 0.6,
0.2, 0.5, 0.5, 0.2,
0.8)))
abund_mat = valid_mat
abund_mat[abund_mat == 1] = com_df_ex[order(com_df_ex$species), "abund"]
scarcity_mat = apply(abund_mat, 1, function(x) {
ifelse(x != 0, exp(-sum(x != 0)*log(2)*x), NA)
})
scarcity_mat = t(scarcity_mat)
com_scarcity = aggregate(species ~ site, data = com_df_ex,
function(x) sum(x != 0))
colnames(com_scarcity)[2] = "N_sp"
com_scarcity = merge(com_df_ex, com_scarcity, by = "site")
com_scarcity$Si = exp(-com_scarcity$N_sp*log(2)*com_scarcity$abund)
com_scarcity = com_scarcity[, c(1:3, 5)]
rownames(com_scarcity) = NULL
abund_com = matrix_to_stack(abund_mat, value_col = "abund", row_to_col = "site",
col_to_col = "species")
abund_com = subset(abund_com, abund > 0 & site == "s3")
abund_com$Di = c(4/9, 4/9)
test_that("Correct Scarcity computation", {
expect_equal(subset(com_scarcity, site == "s1"),
scarcity_com(subset(com_df_ex, site == "s1"),
"species", "abund"))
expect_equal(com_scarcity, scarcity_stack(com_df_ex,
"species", "site", "abund"))
expect_equal(scarcity_mat, scarcity(abund_mat))
})
test_that("Scarcity errors with bad input", {
expect_error(scarcity_stack(com_df_ex,
"species", "SITE_NOT_IN_TABLE", "abund"),
regexp = "'SITE_NOT_IN_TABLE' column not in provided data.frame")
expect_error(
scarcity_stack(
com_df_ex, "SPECIES_NOT_IN_TABLE", "site", "abund"),
regexp = paste0("'SPECIES_NOT_IN_TABLE' column not in ",
"provided data.frame"))
expect_error(scarcity_stack(com_df_ex, "species", "site", NULL),
regexp = "No relative abundance provided")
com_df_ab = com_df_ex
com_df_ab$abund = as.character(com_df_ex$abund)
expect_error(scarcity_stack(com_df_ab, "species", "site", "abund"),
regexp = "Provided abundances are not numeric")
}) |
caSegmentation<-function(y,x,c=2)
{
options(contrasts=c("contr.sum","contr.poly"))
outdec<-options(OutDec="."); on.exit(options(outdec))
options(OutDec=",")
y<-m2v(y)
Usi<-caTotalUtilities(y,x)
set.seed(123)
seg<-kmeans(Usi,c)
segment<-list(segm=seg,util=Usi,sclu=seg$cluster)
return(segment)
} |
LinRegLA_adapt<- function(model, particles = 1000, resampTol = 0.5, tempTol = 0.9) {
if (model ==1){
Data <- cbind(RcppSMC::radiata$y,RcppSMC::radiata$x1)
} else if (model == 2){
Data <- cbind(RcppSMC::radiata$y,RcppSMC::radiata$x2)
} else{
stop("Please choose a valid model (1 or 2).")
}
res <- LinRegLA_adapt_impl(as.matrix(Data), particles, resampTol, tempTol)
invisible(res)
} |
geom_textbox <- function(mapping = NULL, data = NULL,
stat = "identity", position = "identity",
...,
nudge_x = 0,
nudge_y = 0,
box.padding = unit(c(5.5, 5.5, 5.5, 5.5), "pt"),
box.margin = unit(c(0, 0, 0, 0), "pt"),
box.r = unit(5.5, "pt"),
width = unit(2, "inch"), minwidth = NULL, maxwidth = NULL,
height = NULL, minheight = NULL, maxheight = NULL,
na.rm = FALSE,
show.legend = NA,
inherit.aes = TRUE)
{
if (!missing(nudge_x) || !missing(nudge_y)) {
if (!missing(position)) {
stop("You must specify either `position` or `nudge_x`/`nudge_y` but not both.", call. = FALSE)
}
position <- position_nudge(nudge_x, nudge_y)
}
layer(
data = data,
mapping = mapping,
stat = stat,
geom = GeomTextBox,
position = position,
show.legend = show.legend,
inherit.aes = inherit.aes,
params = list(
box.padding = box.padding,
box.margin = box.margin,
box.r = box.r,
width = width, minwidth = minwidth, maxwidth = maxwidth,
height = height, minheight = minheight, maxheight = maxheight,
na.rm = na.rm,
...
)
)
}
GeomTextBox <- ggproto("GeomTextBox", Geom,
required_aes = c("x", "y", "label"),
default_aes = aes(
colour = "black", fill = "white", size = 3.88, hjust = 0.5,
vjust = 0.5, halign = 0, valign = 1, alpha = NA, family = "",
fontface = 1, lineheight = 1.2, text.colour = NULL, box.colour = NULL,
box.size = 0.25, orientation = "upright"
),
draw_panel = function(data, panel_params, coord,
box.padding = unit(c(5.5, 5.5, 5.5, 5.5), "pt"),
box.margin = unit(c(0, 0, 0, 0), "pt"),
box.r = unit(5.5, "pt"),
width = unit(2, "inch"), minwidth = NULL, maxwidth = NULL,
height = NULL, minheight = NULL, maxheight = NULL,
na.rm = FALSE) {
data <- coord$transform(data, panel_params)
rows <- split(data, seq(nrow(data)))
names(rows) <- NULL
grobs <- mapply(
make_textbox_grob,
rows,
list(box.padding),
list(box.margin),
list(box.r),
list(width),
list(minwidth),
list(maxwidth),
list(height),
list(minheight),
list(maxheight),
SIMPLIFY = FALSE
)
do.call(grobTree, grobs)
},
draw_key = draw_key_text
)
make_textbox_grob <- function(data,
box.padding = unit(c(5.5, 5.5, 5.5, 5.5), "pt"),
box.margin = unit(c(0, 0, 0, 0), "pt"),
box.r = unit(5.5, "pt"),
width = unit(2, "inch"), minwidth = NULL, maxwidth = NULL,
height = NULL, minheight = NULL, maxheight = NULL) {
textbox_grob(
data$label,
data$x, data$y, default.units = "native",
hjust = data$hjust, vjust = data$vjust,
halign = data$halign, valign = data$valign,
orientation = data$orientation,
padding = box.padding,
margin = box.margin,
width = width, minwidth = minwidth, maxwidth = maxwidth,
height = height, minheight = minheight, maxheight = maxheight,
gp = gpar(
col = scales::alpha(data$text.colour %||% data$colour, data$alpha),
fontsize = data$size * .pt,
fontfamily = data$family,
fontface = data$fontface,
lineheight = data$lineheight
),
box_gp = gpar(
col = scales::alpha(data$box.colour %||% data$colour, data$alpha),
fill = scales::alpha(data$fill, data$alpha),
lwd = data$box.size * .pt
),
r = box.r
)
} |
yaml_delimiter_indices <- function(x) {
return(grep("^\\s*\\-\\-\\-\\s*$", x));
} |
context("misc tests")
test_all <- identical (Sys.getenv ("MPADGE_LOCAL"), "true")
test_that ("sequential structure", {
n <- 1e2
x <- cbind (-180 + 360 * runif (n), -90 + 180 * runif (n))
y <- cbind (-180 + 360 * runif (2 * n), -90 + 180 * runif (2 * n))
colnames (x) <- colnames (y) <- c ("x", "y")
d1 <- geodist (x, sequential = TRUE, measure = "haversine")
expect_equal (length (d1), nrow (x) - 1)
d2 <- geodist (x, sequential = TRUE, pad = TRUE,
measure = "haversine")
expect_equal (length (d2), nrow (x))
dmat <- geodist (x, measure = "haversine")
indx <- row (dmat) - col (dmat)
dmat1 <- split (dmat, indx)["1"][[1]]
expect_identical (d1, dmat1)
expect_message (d3 <- geodist (x, y, sequential = TRUE),
"Sequential distances calculated along values")
})
havdist <- function (x, y)
{
if (missing (y))
y <- x
x1mat <- array (x [, 1], dim = c (nrow (x), nrow (y)))
x2mat <- array (x [, 2], dim = c (nrow (x), nrow (y)))
y1mat <- t (array (y [, 1], dim = c (nrow (y), nrow (x))))
y2mat <- t (array (y [, 2], dim = c (nrow (y), nrow (x))))
xd <- (x1mat - y1mat) * pi / 180
yd <- (x2mat - y2mat) * pi / 180
sxd <- sin (xd / 2)
syd <- sin (yd / 2)
earth <- 6378137
d <- syd * syd +
cos (x2mat * pi / 180) * cos (y2mat * pi / 180) * sxd * sxd
2 * earth * asin (sqrt (d));
}
test_that("matrix structure for x only", {
n <- 100
x <- cbind (-180 + 360 * runif (n), -90 + 180 * runif (n))
colnames (x) <- c ("x", "y")
d1 <- geodist (x, measure = "haversine")
d2 <- havdist (x)
if (test_all)
expect_identical (d1, d2)
})
test_that("matrix structure for x y", {
n <- 100
x <- cbind (-180 + 360 * runif (n), -90 + 180 * runif (n))
y <- cbind (-180 + 360 * runif (2 * n), -90 + 180 * runif (2 * n))
colnames (x) <- colnames (y) <- c ("x", "y")
d1 <- geodist (x, y, measure = "haversine")
d2 <- havdist (x, y)
if (test_all)
expect_identical (d1, d2)
})
test_that("geodesic extreme cases", {
x <- rbind (c (0, 0),
c (0, 1))
colnames (x) <- c ("x", "y")
d <- geodist (x, measure = "geodesic")
expect_true (sum (diag (d)) == 0)
x <- rbind (c (0, 0),
c (0, 90))
colnames (x) <- c ("x", "y")
d <- geodist (x, measure = "geodesic")
expect_true (sum (diag (d)) == 0)
x <- rbind (c (0, 0),
c (1, 0))
colnames (x) <- c ("x", "y")
d <- geodist (x, measure = "geodesic")
expect_true (sum (diag (d)) == 0)
x <- rbind (c (0, 0),
c (180, 0))
colnames (x) <- c ("x", "y")
d <- geodist (x, measure = "geodesic")
expect_true (sum (diag (d)) == 0)
m <- 20003930
expect_true (abs (d [1, 2] - m) < 2)
expect_true (abs (d [2, 1] - m) < 2)
})
test_that ("geodist_benchmark", {
d <- geodist_benchmark (lat = 1, d = 100, n = 100)
expect_is (d, "matrix")
expect_equal (nrow (d), 2)
expect_equal (ncol (d), 3)
expect_equal (rownames (d), c ("absolute", "relative"))
expect_equal (colnames (d),
c ("haversine", "vincenty", "cheap"))
})
test_that ("geodist paired", {
n <- 1e2
x <- cbind (-180 + 360 * runif (n), -90 + 180 * runif (n))
y <- cbind (-180 + 360 * runif (2 * n), -90 + 180 * runif (2 * n))
colnames (x) <- colnames (y) <- c ("x", "y")
expect_error (d1 <- geodist (x, y, paired = TRUE),
"x and y must have the same number of rows")
y <- cbind (-180 + 360 * runif (n), -90 + 180 * runif (n))
colnames (y) <- c ("x", "y")
if (test_all)
expect_message (d1 <- geodist (x, y, paired = TRUE),
"Maximum distance is > 100km")
else
d1 <- geodist (x, y, paired = TRUE)
expect_is (d1, "numeric")
expect_equal (length (d1), n)
expect_silent (d2 <- geodist (x, y, paired = TRUE,
measure = "haversine"))
expect_silent (d3 <- geodist (x, y, paired = TRUE,
measure = "vincenty"))
expect_silent (d4 <- geodist (x, y, paired = TRUE,
measure = "geodesic"))
expect_true (cor (d2, d3) > 0.99)
expect_true (cor (d2, d4) > 0.99)
})
test_that ("geodist_vec", {
n <- 1e2
x1 <- runif (n, -0.1, 0.1)
y1 <- runif (n, -0.1, 0.1)
x2 <- runif (n, -0.1, 0.1)
y2 <- runif (n, -0.1, 0.1)
x <- cbind ("x" = x1, "y" = y1)
y <- cbind ("x" = x2, "y" = y2)
expect_message (d <- geodist_vec (x1, y1, x2, y2,
sequential = TRUE),
paste0 ("Sequential distances calculated along ",
"values of 'x' only"))
expect_error (d <- geodist_vec (),
"x1 and y1 must be provided")
expect_error (d <- geodist_vec (x1 = x, y1 = y),
"geodist_vec only accepts vector inputs")
expect_error (d <- geodist_vec (x1, y1 [1:10]),
"x1 and y1 must have the same length")
measures <- c ("cheap", "haversine", "vincenty", "geodesic")
for (m in measures)
{
d1 <- geodist (x, y, paired = TRUE, measure = m)
d2 <- geodist_vec (x1, y1, x2, y2, paired = TRUE, measure = m)
expect_identical (d1, d2)
d1 <- geodist (x, sequential = TRUE, measure = m)
d2 <- geodist_vec (x1, y1, sequential = TRUE, measure = m)
expect_identical (d1, d2)
d1 <- geodist (x, measure = m)
d2 <- geodist_vec (x1, y1, measure = m)
expect_identical (d1, d2)
d1 <- geodist (x, y, measure = m)
d2 <- geodist_vec (x1, y1, x2, y2, measure = m)
expect_identical (d1, d2)
}
}) |
createTanimotoBaseline <-
function (neuroepso, neuroesso, neuroepi, dneuromaxk) {
neurobase <- unlist(strsplit(stringr::str_replace_all((dneuromaxk$venntable$objects$EPILONT_EpSO_ESSO), "\\*", ""), ", "))
topk <- length(neurobase)
jbasenepso <- calcJaccard(neurobase, neuroepso[1:topk])
jbasenesso <- calcJaccard(neurobase, neuroesso[1:topk])
jbasenepi <- calcJaccard(neurobase, neuroepi[1:topk])
djbase <- data.frame (
Elements = 1:topk,
EpSO = jbasenepso[1:topk],
ESSO = jbasenesso[1:topk],
EPILONT = jbasenepi[1:topk]
)
cols <-
c("EpSO" = "
"ESSO" = "
"EPILONT" = "
tanimotobase <- ggplot2::ggplot(data = djbase,
ggplot2::aes_string(
x = "Elements",
y = "EpSO",
colour = shQuote("EpSO")
)) +
ggplot2::theme_minimal () +
ggplot2::theme(
panel.grid.major = ggplot2::element_line(colour = "gray"),
panel.grid.minor.y = ggplot2::element_line(colour = "gray"),
legend.text = ggplot2::element_text(size =
11),
legend.position = c(0, 1),
legend.justification = c(0, 0),
legend.direction = "horizontal",
legend.title = ggplot2::element_blank(),
plot.title = ggplot2::element_text(size = 11, face = "bold"),
axis.title.x = ggplot2::element_text(size = 11, face = "bold"),
axis.title.y = ggplot2::element_text(size = 11, face = "bold"),
axis.text.x = ggplot2::element_text(size = 11)
) +
ggplot2::labs (
y = "Tanimoto",
x = "K",
title = "Tanimoto Similarity between DrugBank vectors of Epilepsy ontologies vs. Aggregated Baseline",
subtitle = ""
) +
ggplot2::geom_step(size = 1) +
ggplot2::geom_step(
data = djbase,
ggplot2::aes_string(
x = "Elements",
y = "ESSO",
colour = shQuote("ESSO")
),
size = 1
) +
ggplot2::geom_step(
data = djbase,
ggplot2::aes_string(
x = "Elements",
y = "EPILONT",
colour = shQuote("EPILONT")
),
size = 1
) +
ggplot2::coord_trans(xlim = c(0, topk), ylim = c(0, 1)) +
ggplot2::scale_x_continuous(breaks = c(0, 5, 10, 15, 20, 25, topk)) +
ggplot2::scale_y_continuous(breaks = c(0.25, 0.5, 0.75, 1)) +
ggplot2::scale_colour_manual(name = "Dictionary", values = cols) +
ggplot2::scale_size_manual()
return (tanimotobase)
}
createJaccardPlotDBMeSH <-
function (jmeshepso, jmeshesso, jmeshepi) {
djmesh <- data.frame (
Elements = 1:250,
EpSO = jmeshepso[1:250],
ESSO = jmeshesso[1:250],
EPILONT = jmeshepi[1:250]
)
cols <-
c("EpSO" = "
"ESSO" = "
"EPILONT" = "
jaccarddbmesh <- ggplot2::ggplot(data = djmesh,
ggplot2::aes_string(
x = "Elements",
y = "EpSO",
colour = shQuote("EpSO")
)) +
ggplot2::theme_minimal () +
ggplot2::theme(
panel.grid.major = ggplot2::element_line(colour = "gray"),
panel.grid.minor.y = ggplot2::element_line(colour = "gray"),
legend.text = ggplot2::element_text(size =
11),
legend.position = c(0, 1),
legend.justification = c(0, 0),
legend.direction = "horizontal",
legend.title = ggplot2::element_blank(),
plot.title = ggplot2::element_text(size = 11, face = "bold"),
axis.title.x = ggplot2::element_text(size = 11, face = "bold"),
axis.title.y = ggplot2::element_text(size = 11, face = "bold"),
axis.text.x = ggplot2::element_text(size = 11)
) +
ggplot2::labs (
y = "Jaccard",
x = "Length",
title = "Jaccard Similarity between DrugBank vectors of Epilepsy ontologies versus MeSH derived DrugBank vector",
subtitle = ""
) +
ggplot2::geom_step(size = 1) +
ggplot2::geom_step(
data = djmesh,
ggplot2::aes_string(
x = "Elements",
y = "ESSO",
colour = shQuote("ESSO")
),
size = 1
) +
ggplot2::geom_step(
data = djmesh,
ggplot2::aes_string(
x = "Elements",
y = "EPILONT",
colour = shQuote("EPILONT")
),
size = 1
) +
ggplot2::coord_trans(xlim = c(0, 250), ylim = c(0, 1)) +
ggplot2::scale_x_continuous(breaks = c(0, 25, 50, 75, 100, 125, 150, 175, 200, 225, 250)) +
ggplot2::scale_y_continuous(breaks = c(0.25, 0.5, 0.75, 1)) +
ggplot2::scale_colour_manual(name = "Dictionary", values = cols) +
ggplot2::scale_size_manual()
return (jaccarddbmesh)
}
createJaccardPlotMeSHFive <-
function (jmeshepso, jmeshesso, jmeshepi, jmeshepilepsyand, jmeshepilepsyor) {
djmesh <- data.frame (
Elements = 1:962,
EpSO = jmeshepso[1:962],
ESSO = jmeshesso[1:962],
EPILONT = jmeshepi[1:962],
EPAND = jmeshepilepsyand[1:962],
EPOR = jmeshepilepsyor[1:962]
)
cols <-
c("EpSO" = "
"ESSO" = "
"EPILONT" = "
"EPAND" = "
"EPOR" = "
)
jaccarddbmesh <- ggplot2::ggplot(data = djmesh,
ggplot2::aes_string(
x = "Elements",
y = "EpSO",
colour = shQuote("EpSO")
)) +
ggplot2::theme_minimal () +
ggplot2::theme(
panel.grid.major = ggplot2::element_line(colour = "gray"),
panel.grid.minor.y = ggplot2::element_line(colour = "gray"),
legend.text = ggplot2::element_text(size =
11),
legend.position = c(0, 1),
legend.justification = c(0, 0),
legend.direction = "horizontal",
legend.title = ggplot2::element_blank(),
plot.title = ggplot2::element_text(size = 11, face = "bold"),
axis.title.x = ggplot2::element_text(size = 11, face = "bold"),
axis.title.y = ggplot2::element_text(size = 11, face = "bold"),
axis.text.x = ggplot2::element_text(size = 11)
) +
ggplot2::labs (
y = "Jaccard",
x = "Length",
title = "Jaccard Similarity between DrugBank vectors of Epilepsy ontologies versus MeSH derived DrugBank vector",
subtitle = ""
) +
ggplot2::geom_step(size = 1) +
ggplot2::geom_step(
data = djmesh,
ggplot2::aes_string(
x = "Elements",
y = "ESSO",
colour = shQuote("ESSO")
),
size = 1
) +
ggplot2::geom_step(
data = djmesh,
ggplot2::aes_string(
x = "Elements",
y = "EPILONT",
colour = shQuote("EPILONT")
),
size = 1
) +
ggplot2::geom_step(
data = djmesh,
ggplot2::aes_string(
x = "Elements",
y = "EPAND",
colour = shQuote("EPAND")
),
size = 1
) +
ggplot2::geom_step(
data = djmesh,
ggplot2::aes_string(
x = "Elements",
y = "EPOR",
colour = shQuote("EPOR")
),
size = 1
) +
ggplot2::coord_trans(xlim = c(0, 100), ylim = c(0, 1)) +
ggplot2::scale_x_continuous(breaks = c(0, 25, 50, 75, 100)) +
ggplot2::scale_y_continuous(breaks = c(0.25, 0.5, 0.75, 1)) +
ggplot2::scale_colour_manual(name = "Dictionary", values = cols) +
ggplot2::scale_size_manual()
return (jaccarddbmesh)
}
tanimotoPlot <- function(neurospace, neuromesh, k) {
neurospace <- as.character(neurospace)
jneuromeshneurospace <- calcJaccard(neurospace, neuromesh[1:k])
dj <- data.frame(Elements = c(1:k), TopKSpace=1-jneuromeshneurospace)
cols <-
c("TopKSpace" = "
)
tanimoto <- ggplot2::ggplot(data = dj,
ggplot2::aes_string(
x = "Elements",
y = "TopKSpace",
colour = shQuote("TopKSpace")
)) +
ggplot2::theme_minimal () +
ggplot2::theme(
panel.grid.major.y = ggplot2::element_line(colour = "black"),
panel.grid.major.x = ggplot2::element_line(colour = "grey"),
panel.grid.minor.x = ggplot2::element_line(colour = "black"),
panel.grid.minor.y = ggplot2::element_line(colour = "grey"),
legend.text = ggplot2::element_text(size = 11),
legend.position = c(0, 1),
legend.justification = c(0, 0),
legend.direction = "horizontal",
legend.title = ggplot2::element_blank(),
plot.title = ggplot2::element_text(size = 11, face = "bold"),
axis.title.x = ggplot2::element_text(size = 11, face = "bold"),
axis.title.y = ggplot2::element_text(size = 11, face = "bold"),
axis.text.x = ggplot2::element_text(size = 11)
) +
ggplot2::labs (
y = "Tanimoto",
x = "K",
title = "Tanimoto Similarity between the TopKSpace of Epilepsy Ontologies and the MeSH-derived Vector",
subtitle = ""
) +
ggplot2::geom_step(size = 2) +
ggplot2::coord_trans(xlim = c(1, k), ylim = c(0, 0.5)) +
ggplot2::scale_x_continuous(breaks = c(1, 5, 10, 15, 20, 25, 30, 35, k)) +
ggplot2::scale_y_continuous(breaks = c(0.1, 0.2, 0.3, 0.4, 0.5)) +
ggplot2::scale_colour_manual(name = "Dictionary", values = cols) +
ggplot2::scale_size_manual()
} |
context("Multivariate Normal Semi Conjugate Tests")
pp <- list(nu0 = 2,
phi0 = diag(2),
mu0 = matrix(c(0, 0),ncol=2),
sigma0 = diag(2))
test_that("Mixture Object Create", {
mdobj <- Mvnormal2Create(pp)
expect_s3_class(mdobj, c("list", "MixingDistribution", "mvnormal", "nonconjugate"))
})
test_that("Multivariate Normal Likelihood", {
mdobj <- Mvnormal2Create(pp)
test_theta <- list(mu=array(c(0,0), c(1,2,1)), sig=array(diag(2), c(2,2,1)))
lik_test <- Likelihood(mdobj, c(0,0), test_theta)
expect_equal(lik_test, 1/sqrt(4*pi^2))
test_theta_multi <- list(mu=array(c(0,0), c(1,2,2)), sig=array(diag(2), c(2,2,2)))
lik_test_multi <- Likelihood(mdobj, c(0,0), test_theta_multi)
expect_equal(lik_test_multi, rep.int(1/sqrt(4*pi^2), 2))
})
test_that("Multivariate Normal Prior Draw", {
mdobj <- Mvnormal2Create(pp)
PriorDraw_test_single <- PriorDraw(mdobj, 1)
expect_is(PriorDraw_test_single, "list")
PriorDraw_test_multiple <- PriorDraw(mdobj, 10)
expect_is(PriorDraw_test_multiple, "list")
expect_equal(dim(PriorDraw_test_multiple$mu), c(1,2,10))
expect_equal(dim(PriorDraw_test_multiple$sig), c(2,2,10))
})
test_that("Multivariate Normal Posterior Draw", {
test_data <- mvtnorm::rmvnorm(10, c(0,0), diag(2))
mdobj <- Mvnormal2Create(pp)
post_draws_single <- PosteriorDraw(mdobj, test_data, 1)
expect_is(post_draws_single, "list")
expect_equal(length(post_draws_single), 2)
expect_equal(dim(post_draws_single$mu), c(1,2,1))
expect_equal(dim(post_draws_single$sig), c(2,2,1))
post_draws_multi <- PosteriorDraw(mdobj, test_data[1,], 10)
expect_equal(length(post_draws_multi), 2)
expect_equal(dim(post_draws_multi$mu), c(1,2,10))
expect_equal(dim(post_draws_multi$sig), c(2,2,10))
expect_is(post_draws_multi, "list")
})
test_that("DP Object", {
test_data <- mvtnorm::rmvnorm(10, c(0,0), diag(2))
dp <- DirichletProcessMvnormal2(test_data)
expect_is(dp, c("list", "dirichletprocess", "mvnormal2", "nonconjugate"))
})
test_that("DP Object Fit", {
test_data <- mvtnorm::rmvnorm(10, c(0,0), diag(2))
dp <- DirichletProcessMvnormal2(test_data)
dp <- Fit(dp, 2)
expect_is(dp, c("list", "dirichletprocess", "mvnormal2", "nonconjugate"))
expect_length(dp$likelihoodChain, 2)
expect_length(dp$alphaChain, 2)
}) |
generate_data <- function(n,true.theta=c(1, 1, 1)/sqrt(3),family="gaussian",ncopy=1){
sigma = 0.1
c1 = 0.3912
c2 = 1.3409
rho = 0.3
X = matrix(stats::runif(length(true.theta)*n), ncol=length(true.theta))
U = X%*%true.theta
fU = sin( (U-c1)*pi/(c2 -c1) )
Z <- 1 - c(1:n)%%2
q = fU + rho*Z
if(family=="gaussian"){
ylist <- lapply(vector(mode = "list", length = ncopy),function(x){q + rnorm(length(q),0,sigma)})
}else if(family=="binomial"){
py = exp(q)/(1+exp(q))
ylist <- lapply(vector(mode = "list", length = ncopy),function(x){rbinom(length(q), size=1, prob=py)})
}else if(family=="poisson"){
py = exp(q)
ylist <- lapply(vector(mode = "list", length = ncopy),function(x){rpois(length(q),py)})
}
if(ncopy==1){ylist = ylist[[1]]}
return(list("X" = X, "Y" = ylist,"Z"=Z,"single_index_values"=fU))
}
si <- function(alpha,y,x,z,opt=TRUE,k=13,smooth_selection,fam,bs="ps", fx=FALSE,scale=scale) {
theta <- sign(alpha[1])*alpha/sqrt(sum(alpha^2))
a <- x%*%theta
if(is.null(z)){
b <- mgcv::gam(y~s(a,bs=bs,fx=fx,m=2,k=k),family=fam,method= smooth_selection,scale=scale )
}else{
b <- mgcv::gam(y~s(a,bs=bs,fx=fx,m=2,k=k)+z,family=fam,method= smooth_selection,scale=scale )
b$gamma <- b$coefficients[c(2:(1+NCOL(z)))]
}
if (opt) return(b$deviance) else {
b$theta <- theta
class(b) <- c("gplsim","gam","glm","lm")
return(b)
}
}
gplsim <- function(Y=Y,X=X,Z=Z,family=gaussian(),penalty=TRUE,penalty_type = "L2", scale = -1, smooth_selection = "GCV.Cp",profile = TRUE, bs="ps", user.init=NULL,k=13){
p <- dim(X)[2]
if(p<2){stop("SIM predictos must no less than two")}
if(!is.null(user.init)){
if(length(user.init)!=(p)){
stop("user.init length must be p")
}else{
init.alpha <- user.init
}
}else{
if(is.null(Z)){
linear <- glm(Y~X-1,family=family)
} else{
temp_XZ <- cbind(X,Z)
linear <- glm(Y~temp_XZ-1,family=family)
}
init.alpha <- linear$coefficients[1:(NCOL(X))]
init.alpha <- sign(init.alpha[1])*init.alpha/sqrt(sum(init.alpha^2))
}
if(profile==FALSE){
non.profile.fit <- plsiml(x=X,y=Y ,z=Z,degree=3,nknots=13,maxiter = 2,alpha0 = NULL)
a <- X%*%non.profile.fit$alpha
if(is.null(Z)){
b <- mgcv::gam(y~s(a,bs=bs,fx=!penalty,m=2,k=13),family=family,method= smooth_selection,scale=scale )
}else{
b <- mgcv::gam(y~s(a,bs=bs,fx=!penalty,m=2,k=13)+Z,family=family,method= smooth_selection,scale=scale )
b$gamma <- b$coefficients[c(2:(1+NCOL(Z)))]
}
b$theta <- non.profile.fit$alpha
class(b) <- c("gplsim","gam","glm","lm")
}else{
er <- suppressWarnings(optim(init.alpha,si,y=Y,x=X,z=Z,fam=family, smooth_selection=smooth_selection, hessian=TRUE,fx=!penalty,k=k,bs=bs,scale=scale))
b <- si(er$par,y=Y,X,Z, fam=family, smooth_selection=smooth_selection, opt=FALSE,k=k,bs=bs,scale=scale)
}
b$Xnames <- if(is.null(colnames(X))) paste0("X.",seq(1,NCOL(X),by = 1)) else colnames(X)
b$Znames <- if(is.null(colnames(Z)) && !is.null(Z)) paste0("Z.",seq(1,NCOL(Z),by = 1)) else colnames(Z)
return(b)
}
plot_si <- function(x,family=gaussian(),ylab="mean",yscale=NULL,plot_data=FALSE){
if(is.null(x$Znames)){
offset_fit <- family$linkinv((x$linear.predictors))
}else{
offset_fit <- family$linkinv((x$linear.predictors-x$gamma%*%t(x$model$z)))
}
UQ = cbind(x$model$a,matrix(offset_fit,ncol=1))
if(is.null(yscale)){ylim = range(offset_fit)}else{ylim = yscale}
plot(UQ[order(UQ[,1]),],col="blue",type="l",ylim=ylim,lty=1,xlab = "single index",ylab = ylab)
if(plot_data){graphics::points(UQ[order(UQ[,1]),1],(x$model$y)[order(UQ[,1])],pch=20)}
}
summary.gplsim <- function(object,...){
gplsim_obj <- mgcv::summary.gam(object)
p.table.sim <- matrix(object$theta,ncol=1)
dimnames(p.table.sim) <- list(object$Xnames, c("Estimate"))
gplsim_obj$p.coeff.sim <- p.table.sim
row.names(gplsim_obj$p.table) <- c("Intercept",object$Znames)
class(gplsim_obj) <- "summary.gplsim"
return(gplsim_obj)
}
print.summary.gplsim <- function(x, digits = max(5, getOption("digits") - 3),
signif.stars = getOption("show.signif.stars"), ...)
{ print(x$family)
cat("Formula:\n")
if (is.list(x$formula)) for (i in 1:length(x$formula)) print(x$formula[[i]]) else
print(x$formula)
if (length(x$p.coeff)>0)
{ cat("\npartial linear coefficients:\n")
printCoefmat(x$p.table, digits = digits, signif.stars = signif.stars, na.print = "NA", ...)
}
cat("\n")
if (length(x$p.coeff.sim)>0)
{ cat("\nsingle index coefficients:\n")
printCoefmat(x$p.coeff.sim, digits = digits, signif.stars = signif.stars, na.print = "NA", ...)
}
cat("\n")
if(x$m>0)
{ cat("Approximate significance of smooth terms:\n")
printCoefmat(x$s.table, digits = digits, signif.stars = signif.stars, has.Pvalue = TRUE, na.print = "NA",cs.ind=1, ...)
}
cat("\n")
if (!is.null(x$rank) && x$rank< x$np) cat("Rank: ",x$rank,"/",x$np,"\n",sep="")
if (!is.null(x$r.sq)) cat("R-sq.(adj) = ",formatC(x$r.sq,digits=3,width=5)," ")
if (length(x$dev.expl)>0) cat("Deviance explained = ",formatC(x$dev.expl*100,digits=3,width=4),"%",sep="")
cat("\n")
if (!is.null(x$method)&&!(x$method%in%c("PQL","lme.ML","lme.REML")))
cat(x$method," = ",formatC(x$sp.criterion,digits=5),sep="")
cat(" Scale est. = ",formatC(x$scale,digits=5,width=8,flag="-")," n = ",x$n,"\n",sep="")
invisible(x)
}
add_sim_bound <- function(data,family = gaussian(),M=200,n=1000,true.theta=c(1, 1, 1)/sqrt(3)){
offset_fit_matrix <- matrix(0, M, n)
for (i in 1:M){
y=(data$Y)[[i]]
X=data$X
Z=data$Z
model_obj <- gplsim(y,X,Z,user.init=NULL,family = family)
offset_fit <- family$linkinv(model_obj$linear.predictors-model_obj$gamma%*%t(model_obj$model$z))
offset_fit_matrix[i,] <- offset_fit
}
quan2.5=apply(offset_fit_matrix, 2, stats::quantile, prob=0.025)
quan97.5=apply(offset_fit_matrix, 2, stats::quantile,prob=0.975)
fit_mean=apply(offset_fit_matrix, 2, mean)
U = data$X%*%true.theta
graphics::lines(U[order(U)],quan2.5[order(U)],type="l",lty=2)
graphics::lines(U[order(U)],quan97.5[order(U)],type="l",lty=2)
graphics::lines(U[order(U)],fit_mean[order(U)],type="l",lty=1,col="blue")
}
smooth.construct.tr.smooth.spec<-function(object,data,knots)
{ requireNamespace("splines")
m <- object$p.order[1]
if (is.na(m)) m <- 2
if (m<1) stop("silly m supplied")
if (object$bs.dim<0) object$bs.dim <- 10
nk<-object$bs.dim-m-1
if (nk<=0) stop("k too small for m")
x <- data[[object$term]]
x.shift <- mean(x)
k <- knots[[object$term]]
if (is.null(k))
{ n<-length(x)
k<-quantile(x[2:(n-1)],seq(0,1,length=nk+2))[2:(nk+1)]
}
if (length(k)!=nk)
stop(paste("there should be ",nk," supplied knots"))
x <- x - x.shift
k <- k - x.shift
X<-matrix(0,length(x),object$bs.dim)
for (i in 1:(m+1)) X[,i] <- x^(i-1)
for (i in 1:nk) X[,i+m+1]<-(x-k[i])^m*as.numeric(x>k[i])
object$X<-X
if (!object$fixed)
{ object$S[[1]]<-diag(c(rep(0,m+1),rep(1,nk)))
}
object$rank<-nk
object$null.space.dim <- m+1
object$knots<-k;object$m<-m;object$x.shift <- x.shift
object$df<-ncol(object$X)
class(object)<-"tr.smooth"
object
}
Predict.matrix.tr.smooth<-function(object,data)
{ requireNamespace("splines")
x <- data[[object$term]]
x <- x - object$x.shift
m <- object$m;
k<-object$knots
nk<-length(k)
X<-matrix(0,length(x),object$bs.dim)
for (i in 1:(m+1)) X[,i] <- x^(i-1)
for (i in 1:nk) X[,i+m+1] <- (x-k[i])^m*as.numeric(x>k[i])
X
} |
uniclogit <- function(g, Y, w, n, group){
theta <- rep(0.1, 1000)
k <- 2
firstdev <- 1
while (abs(firstdev) > 1e-5) {
firstdev <- score(g, Y, w, n, group, theta[k - 1])
theta[k] <- theta[k - 1] - firstdev / seconddev(g, Y, w, n, group, theta[k - 1])
k <- k + 1
}
return(theta[max(which(theta != 0.1))])
} |
partimat <- function(x, ...)
UseMethod("partimat")
partimat.default <- function(x, grouping, method = "lda", prec = 100,
nplots.vert, nplots.hor, main = "Partition Plot", name, mar,
plot.matrix = FALSE, plot.control = list(), ...){
nvar <- ncol(x)
if(nvar < 2) stop("at least 2 variables required")
if(nlevels(grouping) < 2) stop("at least two classes required")
nobs <- nrow(x)
if(missing(name)) name <- colnames(x)
if(plot.matrix){
plot.new()
if(missing(mar)) mar <- rep(0, 4)
opar <- par(mfrow = c(nvar, nvar), mar = mar, oma = rep(3, 4), xpd = NA)
on.exit(par(opar))
for (i in 2:nvar)
for (j in 1:(i-1))
{
par(mfg = c(i, j))
drawparti(grouping, x[,j], x[,i], method = method,
prec = prec, legend.err = plot.matrix, xlab="", ylab="",
plot.control = c(xaxt="n", yaxt="n", plot.control), ...)
if(j == 1) axis(2)
if(i == nvar) axis(1)
par(mfg = c(j, i))
drawparti(grouping, x[,i], x[,j], method = method,
prec = prec, legend.err = plot.matrix, xlab="", ylab="",
plot.control = c(xaxt="n", yaxt="n", plot.control), ...)
if(j == 1) axis(3)
if(i == nvar) axis(4)
}
for (i in 1:nvar)
{
par(mfg = c(i, i))
plot(x[,i], x[,i], type = "n", xaxt="n", yaxt="n", xlab="", ylab="")
if(i == 1){
axis(2); axis(3)
}
else if(i == nvar){
axis(1); axis(4)
}
mxi <- mean(range(x[,i]))
do.call("text", c(list(mxi, mxi, name[i]), plot.control))
}
}
else{
ncomb <- round(0.5 * nvar * (nvar-1))
if (missing(nplots.hor) && missing(nplots.vert)){
nplots.hor<-ceiling(sqrt(ncomb))
nplots.vert<-floor(sqrt(ncomb))
}
else if (missing(nplots.hor)) nplots.hor<-ceiling(ncomb/nplots.vert)
else if (missing(nplots.vert)) nplots.vert<-ceiling(ncomb/nplots.hor)
vars <- matrix(ncol=ncomb,nrow=2*nobs)
varname <- matrix(ncol=ncomb,nrow=2)
k <- 1
for (i in 2:nvar)
for (j in 1:(i-1))
{
vars[,k] <- c(x[,i], x[,j])
varname[,k] <- c(name[i], name[j])
k <- k + 1
}
if(missing(mar)) mar <- c(5.1, 4.1, 2.1, 1.1)
opar <- par(mfrow = c(nplots.vert, nplots.hor), mar = mar,
oma = c(0, 0, !is.null(main), 0))
on.exit(par(opar))
sapply(1:ncomb, function(k)
drawparti(grouping = grouping, x = vars[(1:nobs), k],
y = vars[(nobs+1):(2*nobs), k], method = method,
xlab = varname[1,k], ylab = varname[2,k], prec = prec,
legend.err = plot.matrix, plot.control = plot.control, ...)
)
par(mfrow=c(1,1))
title(main = main, outer = TRUE)
}
invisible()
}
partimat.formula <- function(formula, data = NULL, ..., subset, na.action = na.fail)
{
m <- match.call(expand.dots = FALSE)
if (is.matrix(eval.parent(m$data)))
m$data <- as.data.frame(data)
m$... <- NULL
m[[1]] <- as.name("model.frame")
m <- eval.parent(m)
Terms <- attr(m, "terms")
grouping <- model.response(m)
x <- model.matrix(Terms, m)
xvars <- as.character(attr(Terms, "variables"))[-1]
if ((yvar <- attr(Terms, "response")) > 0)
xvars <- xvars[-yvar]
xlev <- if (length(xvars) > 0) {
xlev <- lapply(m[xvars], levels)
xlev[!sapply(xlev, is.null)]
}
xint <- match("(Intercept)", colnames(x), nomatch = 0)
if (xint > 0)
x <- x[, -xint, drop = FALSE]
res <- partimat.default(x, grouping, ...)
res$terms <- Terms
cl <- match.call()
cl[[1]] <- as.name("partimat")
res$call <- cl
res$contrasts <- attr(x, "contrasts")
res$xlevels <- xlev
attr(res, "na.message") <- attr(m, "na.message")
if (!is.null(attr(m, "na.action")))
res$na.action <- attr(m, "na.action")
res
invisible()
}
partimat.matrix<-function (x, grouping, ..., subset, na.action = na.fail)
{
if (!missing(subset)) {
x <- x[subset, , drop = FALSE]
grouping <- grouping[subset]
}
if (!missing(na.action)) {
dfr <- na.action(structure(list(g = grouping, x = x),
class = "data.frame"))
grouping <- dfr$g
x <- dfr$x
}
res <- partimat.default(x, grouping, ...)
cl <- match.call()
cl[[1]] <- as.name("partimat")
res$call <- cl
res
invisible()
}
partimat.data.frame<-function (x, ...)
{
res <- partimat.matrix(structure(data.matrix(x), class = "matrix"),
...)
cl <- match.call()
cl[[1]] <- as.name("partimat")
res$call <- cl
res
invisible()
}
drawparti <- function(grouping, x, y, method = "lda", prec = 100,
xlab=NULL, ylab=NULL, col.correct = "black", col.wrong = "red",
col.mean = "black", col.contour = "darkgrey", gs = as.character(grouping),
pch.mean = 19, cex.mean = 1.3, print.err = 0.7, legend.err = FALSE,
legend.bg = "white", imageplot = TRUE, image.colors = cm.colors(nc),
plot.control = list(), ...){
success <- switch(method,
rpart = requireNamespace("rpart") ,
naiveBayes = requireNamespace("e1071"))
if(!is.null(success) && !success){
message("For method 'rpart' the 'rpart' package is required, for method 'naiveBayes' the package 'e1071'.")
return(NULL)
}
z <- switch(method,
lda = lda(grouping ~ x + y,...),
qda = qda(grouping ~ x + y,...),
svmlight = svmlight(grouping ~ x + y,...),
rda = rda(grouping~ x + y,
data = cbind.data.frame("grouping" = grouping, "x" = x, "y" = y), ...),
sknn = sknn(grouping ~ x + y,...),
rpart = rpart::rpart(grouping~ x + y,...),
naiveBayes = e1071::naiveBayes(grouping~ x + y,
data = cbind.data.frame("grouping" = grouping, "x" = x, "y" = y), ...),
stop("method not yet supported"))
xg <- seq(min(x), max(x), length = prec)
yg <- seq(min(y), max(y), length = prec)
grd <- expand.grid(x = xg, y = yg)
temp <- switch(method,
lda = predict(z, grd,...)$post,
qda = predict(z, grd,...)$post,
svmlight = e.scal(predict(z, grd,...)$post)$sv,
rda = predict(z, grd, posterior=TRUE, aslist=TRUE)$post,
rpart = predict(z, grd, ...),
sknn = predict(z, grd, ...)$post,
naiveBayes = predict(z, grd , type="raw", ...),
stop("method not yet supported"))
khead <- switch(method,
lda = predict(z, data.frame(cbind(x,y)),...)$class,
qda = predict(z, data.frame(cbind(x,y)),...)$class,
svmlight = predict(z, data.frame(cbind(x,y)),...)$class,
rda = predict(z, data.frame(cbind(x,y)), posterior=TRUE, aslist=TRUE)$class,
rpart = predict(z, data.frame(cbind(x,y)), type="class", ...),
sknn = predict(z, data.frame(cbind(x,y)),...)$class,
naiveBayes = predict(z, data.frame(cbind(x,y)), ...),
stop("method not yet supported"))
colorw <- grouping != khead
err <- round(mean(colorw), 3)
color <- ifelse(colorw, col.wrong, col.correct)
if(is.character(gs) || is.factor(gs)) gs <- substr(gs, 1, 1)
nc <- ncol(temp)
if(imageplot){
do.call("image", c(list(xg, yg, matrix(apply(temp, 1, which.max), ncol = prec),
main = NULL, col = image.colors, breaks = (0:nc) + .5,
xlab = xlab, ylab = ylab), plot.control))
do.call("points", c(list(x, y, pch = gs, col = color), plot.control))
box()
}
else
do.call("plot", c(list(x, y, pch = gs, col = color, main = NULL, xlab = xlab, ylab = ylab), plot.control))
if((method=="lda") || (method=="qda"))
points(z$means, pch = pch.mean, cex = cex.mean, col = col.mean)
if(!imageplot)
for(i in 1:ncol(temp)){
dummy <- temp[,i] - apply(temp[ , -i, drop = FALSE], 1, max)
contour(xg, yg, matrix(dummy, ncol = prec), levels = 0,
add = TRUE, drawlabels = FALSE, col = col.contour)
}
if(print.err){
if(legend.err)
legend(par("usr")[1], par("usr")[4],
legend = paste("Error:", err), bg = legend.bg, cex = print.err)
else
mtext(paste("app. error rate:", err), 3, cex = print.err)
}
} |
faux_options(plot = FALSE)
test_that("errors", {
expect_error(check_design(n = -1), "All n must be >= 0")
expect_warning(check_design(n = 0), "Some cell Ns are 0. Make sure this is intentional.")
expect_warning(check_design(n = 10.3), "Some cell Ns are not integers. They have been rounded up to the nearest integer.")
})
test_that("n as vector", {
expect_silent(design <- check_design(within = 2, between = 2, n = c(10, 20)))
expect_equal(design$n, list(B1a = 10, B1b = 20))
n <- list(B1a_B2a = 10, B1a_B2b = 20, B1b_B2a = 30, B1b_B2b = 40)
design <- check_design(within = 2, between = c(2, 2), n = c(10, 20, 30, 40))
expect_equal(design$n, n)
design <- check_design(within = c(2, 2), between = c(2, 2), n = c(10, 20, 30, 40))
expect_equal(design$n, n)
design <- check_design(within = 2, between = 2, n = c(B1b = 10, B1a = 20))
expect_equal(design$n, list(B1a = 20, B1b = 10))
})
test_that("params", {
expect_silent(check_design(between = 2, n = list("B1a" = 10, "B1b" = 20)))
expect_silent(check_design(between = 2, n = list("B1a" = 10, "B1b" = "20")))
expect_error(
check_design(between = 2, n = list("B1a" = 10, "B1b" = "B")),
"All n must be numbers"
)
expect_silent(check_design(between = 2, mu = list("B1a" = 10, "B1b" = 20)))
expect_silent(check_design(between = 2, mu = list("B1a" = 10, "B1b" = "20")))
expect_error(
check_design(between = 2, mu = list("B1a" = 10, "B1b" = "B")),
"All mu must be numbers"
)
expect_silent(check_design(between = 2, sd = list("B1a" = 10, "B1b" = 20)))
expect_silent(check_design(between = 2, sd = list("B1a" = 10, "B1b" = "20")))
expect_error(
check_design(between = 2, sd = list("B1a" = 10, "B1b" = "B")),
"All sd must be numbers", fixed = TRUE
)
expect_error(check_design(sd = -1), "All sd must be >= 0", fixed = TRUE)
err <- "You have duplicate levels for factor(s): A"
expect_error(check_design(list(A = c("A1", "A1"))), err, fixed = TRUE)
err <- "You have duplicate levels for factor(s): A, B"
expect_error(check_design(list(A = c("A1", "A1"), B = c("B1", "B1"))), err, fixed = TRUE)
err <- "You have multiple factors with the same name (A). Please give all factors unique names."
expect_error(check_design(list(A = c("A1", "A2"), A = c("B1", "B2"))), err, fixed = TRUE)
expect_error(check_design(list(A = c("A1", "A2")),
list(A = c("B1", "B2"))),
err, fixed = TRUE)
})
test_that("no factors", {
design <- check_design()
expect_equal(design$within, list())
expect_equal(design$between, list())
expect_equal(design$dv, list(y = "value"))
})
test_that("2w", {
within <- list(time = c("night", "day"))
between <- list()
design <- check_design(within, between, n = 10)
cell_n <- list(y = 10)
cell_mu <- list(y = list(night = 0, day = 0))
cell_sd <- list(y = list(night = 1, day = 1))
expect_equal(design$within, list(time = list(night = "night", day = "day")))
expect_equal(design$between, list())
expect_equal(design$n, cell_n)
expect_equal(design$mu, cell_mu)
expect_equal(design$sd, cell_sd)
expect_equal(design$dv, list(y = "value"))
expect_equal(design$id, list(id = "id"))
expect_true("design" %in% class(design))
})
test_that("2b", {
within <- list()
between <- list(time = c("night", "day"))
design <- check_design(within, between, n = 10)
cell_n <- list(night = 10, day = 10)
cell_mu <- list(night = list(y=0), day = list(y=0))
cell_sd <- list(night = list(y=1), day = list(y=1))
expect_equal(design$within, list())
expect_equal(design$between, list(time = list(night = "night", day = "day")))
expect_equal(design$n, cell_n)
expect_equal(design$mu, cell_mu)
expect_equal(design$sd, cell_sd)
expect_equal(design$dv, list(y = "value"))
expect_equal(design$id, list(id = "id"))
})
test_that("2w*2b", {
within <- list(time = c("night", "day"))
between <- list(pet = c("dog", "cat"))
design <- check_design(within, between, n = 10)
cell_n <- list(dog = 10, cat = 10)
cell_mu <- list(dog = list(night = 0, day = 0),
cat = list(night = 0, day = 0))
cell_sd <- list(dog = list(night = 1, day = 1),
cat = list(night = 1, day = 1))
expect_equal(design$within, list(time = list(night = "night", day = "day")))
expect_equal(design$between, list(pet = list(dog = "dog", cat = "cat")))
expect_equal(design$n, cell_n)
expect_equal(design$mu, cell_mu)
expect_equal(design$sd, cell_sd)
expect_equal(design$dv, list(y = "value"))
expect_equal(design$id, list(id = "id"))
})
test_that("2w*2w*2b*2b", {
within <- list(
time = c(night = "night time", day = "day time"),
condition = c(A = "condition A", B = "condition B")
)
between <- list(
pet = c(dog = "has dogs", cat = "has cats"),
age = c(old = "older", young = "younger")
)
design <- check_design(within, between)
cells_w <- c("night_A", "night_B", "day_A", "day_B")
cells_b <- c("dog_old", "dog_young", "cat_old", "cat_young")
cell_n <- list(dog_old = 100, dog_young = 100, cat_old = 100, cat_young = 100)
mu_list <- list(night_A = 0, night_B = 0, day_A = 0, day_B = 0)
cell_mu <- list(
dog_old = mu_list,
dog_young = mu_list,
cat_old = mu_list,
cat_young = mu_list
)
sd_list <- list(night_A = 1, night_B = 1, day_A = 1, day_B = 1)
cell_sd <- list(
dog_old = sd_list,
dog_young = sd_list,
cat_old = sd_list,
cat_young = sd_list
)
expect_equal(design$n, cell_n)
expect_equal(design$mu, cell_mu)
expect_equal(design$sd, cell_sd)
expect_equal(design$dv, list(y = "value"))
expect_equal(design$id, list(id = "id"))
})
test_that("design spec", {
between <- list(
"B" = c("B1", "B2")
)
within <- list(
"W" = c("W1", "W2")
)
n <- list(
"B1" = 60,
"B2" = 40
)
mu <- list(
"B1" = c(10, 20),
"B2" = c(10, 30)
)
sd <- list(
"B1" = c(3, 4),
"B2" = c(5, 6)
)
r <- list(
"B1" = .2,
"B2" = .5
)
dv <- list(dv = "DV")
id <- list(sub_id = "id")
design <- check_design(within, between, n, mu, sd, r, dv, id)
design_elements <- c("within", "between", "dv", "id", "vardesc", "n", "mu", "sd", "r", "sep", "params")
expect_equal(names(design), design_elements)
expect_equal(design$dv, dv)
expect_equal(design$id, id)
})
test_that("interactions", {
faux_options(sep = "_")
n <- list(
B1a_B2a = 10,
B1a_B2b = 20,
B1b_B2a = 30,
B1b_B2b = 40
)
design <- check_design(2, c(2,2), n = n, plot = FALSE)
expect_equal(design$n, n)
})
test_that("anon factors", {
design <- check_design(c(2, 4), c(2, 2))
w <- list(
W1 = list(W1a="W1a", W1b="W1b"),
W2 = list(W2a="W2a", W2b="W2b", W2c="W2c", W2d="W2d")
)
b <- list(
B1 = list(B1a="B1a",B1b="B1b"),
B2 = list(B2a="B2a", B2b="B2b")
)
expect_equal(design$within, w)
expect_equal(design$between, b)
})
test_that("wierd factor names", {
within <- list("A" = c("A_1", "A 2"),
"B" = c("B~1", "B'2"))
expect_error(check_design(within))
})
test_that("make_id", {
expect_equal(make_id(10), c("S01", "S02", "S03", "S04", "S05",
"S06", "S07", "S08", "S09", "S10"))
expect_equal(make_id(10, "SUB"), c("SUB01", "SUB02", "SUB03", "SUB04", "SUB05",
"SUB06", "SUB07", "SUB08", "SUB09", "SUB10"))
expect_equal(make_id(100)[[1]], "S001")
expect_equal(make_id(1000)[[1]], "S0001")
expect_equal(make_id(1000, "pokemon_")[[1]], "pokemon_0001")
expect_equal(make_id(100, digits = 4)[[1]], "S0001")
expect_equal(make_id(n = 100, prefix = "A", digits = 4)[[1]], "A0001")
expect_equal(make_id(digits = 4, prefix = "A", n = 100)[[1]], "A0001")
expect_equal(make_id(2:4), c("S2", "S3", "S4"))
expect_equal(make_id(100:200)[[1]], "S100")
})
test_that("params table", {
des <- check_design()
params <- data.frame(y = "value", n = 100, mu = 0, sd = 1)
expect_equal(des$params, params)
within <- list(
time = c("morning" = "am", "night" = "pm"),
condition = c("A" = "cond 1", "B" = "cond 2", "C" = "cond 3")
)
between <- list(
pet = c("dog" = "Dogs", "cat" = "Cats"),
x = c("X1" = "First", "X2" = "Second"))
n <- list(
dog_X1 = 100,
dog_X2 = 200,
cat_X1 = 300,
cat_X2 = 400
)
r <- list(
dog_X1 = seq(.1, by = .025, length.out = 15),
dog_X2 = seq(.2, by = .025, length.out = 15),
cat_X1 = seq(.3, by = .025, length.out = 15),
cat_X2 = seq(.4, by = .025, length.out = 15)
)
des <- check_design(within, between, n = n, mu = 1:24,
sd = 1:24, r = r, id = c(id = "ID"))
nm <- c("pet", "x", "time", "condition", "morning_A",
"morning_B", "morning_C", "night_A", "night_B",
"night_C", "n", "mu", "sd")
expect_true(des$params %>% nrow() == 24)
expect_true(all(des$params %>% names() == nm))
expected <- c(
"* [DV] y: value ",
"* [ID] id: ID ",
"* Within-subject variables:",
" * time: ",
" * morning: am",
" * night: pm",
" * condition: ",
" * A: cond 1",
" * B: cond 2",
" * C: cond 3",
"* Between-subject variables:",
" * pet: ",
" * dog: Dogs",
" * cat: Cats",
" * x: ",
" * X1: First",
" * X2: Second"
)
op <- capture.output(des)
expect_equal(op[1:length(expected)], expected)
})
test_that("sep", {
faux_options(sep = ".")
design <- check_design(
within = list(
A = c("A_1", "A_2"),
B = c("B_1", "B_2")
),
n = 5,
plot = FALSE
)
wide <- sim_data(design = design)
expect_equal(names(wide), c("id", "A_1.B_1", "A_1.B_2", "A_2.B_1", "A_2.B_2"))
long <- sim_data(design = design, long = TRUE)
expect_equal(unique(long$A), factor(c("A_1", "A_2")))
expect_equal(unique(long$B), factor(c("B_1", "B_2")))
faux_options(sep = "_")
})
test_that("vardesc", {
between <- list(
B = c(B1 = "Level 1B", B2 = "Level 2B")
)
within <- list(
W = c(W1 = "Level 1W", W2 = "Level 2W")
)
vardesc <- list(B = "Between-Subject Factor",
W = "Within-Subject Factor")
expect_silent(design <- check_design(within, between, vardesc = vardesc))
expect_mapequal(design$vardesc, vardesc)
op <- capture.output(design)
expect_equal(op[4], " * W: Within-Subject Factor: ")
expect_equal(op[8], " * B: Between-Subject Factor: ")
design <- check_design(within, between)
expect_mapequal(design$vardesc, list(W = "W", B = "B"))
op <- capture.output(design)
expect_equal(op[4], " * W: ")
expect_equal(op[8], " * B: ")
vardesc_missing <- list(B = "Between-Subject Factor")
expect_warning(design <- check_design(within, between, vardesc = vardesc_missing))
expect_equal(design$vardesc$W, "W")
vardesc_vec <- c(B = "Between-Subject Factor",
W = "Within-Subject Factor")
expect_silent(design <- check_design(within, between, vardesc = vardesc_vec))
expect_mapequal(design$vardesc, vardesc)
})
test_that("get_design", {
data <- sim_design(2, 2)
design <- get_design(data)
expect_equal(design, attributes(data)$design)
expect_equal(design$id, list(id = "id"))
})
test_that("set_design", {
design <- check_design()
data <- data.frame(id = 1:100, y = rnorm(100))
data_design <- set_design(data, design)
expect_equal(design, get_design(data_design))
expect_equal(class(data_design), c("faux", "data.frame"))
})
faux_options(plot = TRUE)
faux_options(sep = "_") |
convertUSCensusStates <- function(
nameOnly = FALSE,
simplify = TRUE
) {
dataDir <- getSpatialDataDir()
datasetName <- 'USCensusStates'
if (nameOnly)
return(datasetName)
url <- 'https://www2.census.gov/geo/tiger/GENZ2019/shp/cb_2019_us_state_500k.zip'
filePath <- file.path(dataDir, basename(url))
utils::download.file(url, filePath)
utils::unzip(filePath, exdir = file.path(dataDir, 'states'))
dsnPath <- file.path(dataDir, 'states')
shpName <- 'cb_2019_us_state_500k'
SPDF <- convertLayer(
dsn = dsnPath,
layerName = shpName,
encoding = 'UTF-8'
)
SPDF <- subset(SPDF, SPDF@data$STUSPS %in% US_52)
SPDF@data$ALAND <- as.numeric(SPDF@data$ALAND)
SPDF@data$AWATER <- as.numeric(SPDF@data$AWATER)
SPDF@data$countryCode <- "US"
SPDF@data <-
dplyr::select(
.data = SPDF@data,
countryCode = .data$countryCode,
stateCode = .data$STUSPS,
stateFIPS = .data$STATEFP,
stateName = .data$NAME,
landArea = .data$ALAND,
waterArea = .data$AWATER,
AFFGEOID = .data$AFFGEOID
)
SPDF <- organizePolygons(
SPDF,
uniqueID = 'stateFIPS',
sumColumns = c('landArea', 'waterArea')
)
if ( !cleangeo::clgeo_IsValid(SPDF) ) {
SPDF <- cleangeo::clgeo_Clean(SPDF, verbose = TRUE)
}
message("Saving full resolution version...\n")
assign(datasetName, SPDF)
save(list = c(datasetName), file = paste0(dataDir, '/', datasetName, '.rda'))
rm(list = datasetName)
if ( simplify ) {
message("Simplifying to 5%...\n")
SPDF_05 <- rmapshaper::ms_simplify(SPDF, 0.05)
SPDF_05@data$rmapshaperid <- NULL
if ( !cleangeo::clgeo_IsValid(SPDF_05) ) {
SPDF_05 <- cleangeo::clgeo_Clean(SPDF_05)
}
datasetName_05 <- paste0(datasetName, "_05")
message("Saving 5% version...\n")
assign(datasetName_05, SPDF_05)
save(list = datasetName_05, file = paste0(dataDir,"/", datasetName_05, '.rda'))
rm(list = c("SPDF_05",datasetName_05))
message("Simplifying to 2%...\n")
SPDF_02 <- rmapshaper::ms_simplify(SPDF, 0.02)
SPDF_02@data$rmapshaperid <- NULL
if ( !cleangeo::clgeo_IsValid(SPDF_02) ) {
SPDF_02 <- cleangeo::clgeo_Clean(SPDF_02)
}
datasetName_02 <- paste0(datasetName, "_02")
message("Saving 2% version...\n")
assign(datasetName_02, SPDF_02)
save(list = datasetName_02, file = paste0(dataDir,"/", datasetName_02, '.rda'))
rm(list = c("SPDF_02",datasetName_02))
message("Simplifying to 1%...\n")
SPDF_01 <- rmapshaper::ms_simplify(SPDF, 0.01)
SPDF_01@data$rmapshaperid <- NULL
if ( !cleangeo::clgeo_IsValid(SPDF_01) ) {
SPDF_01 <- cleangeo::clgeo_Clean(SPDF_01)
}
datasetName_01 <- paste0(datasetName, "_01")
message("Saving 1% version...\n")
assign(datasetName_01, SPDF_01)
save(list = datasetName_01, file = paste0(dataDir,"/", datasetName_01, '.rda'))
rm(list = c("SPDF_01",datasetName_01))
}
unlink(filePath, force = TRUE)
unlink(dsnPath, recursive = TRUE, force = TRUE)
return(invisible(datasetName))
} |
erodibilityRisk=function(x){
erodClass=ifelse(x>0.6,5,ifelse(x>0.3,4,ifelse(x>0.15,3,ifelse(x>0.075,2,1))))
return(erodClass)
} |
sim.multilevel <- function(nvar=9,ngroups=4,ncases=16,rwg,rbg,eta) {
e.wg <- eigen(rwg)
v.wg <- pmax(e.wg$values,0)
etabg <- sqrt(1-eta^2)
e.bg <- eigen(rbg)
v.bg <- pmax(e.bg$values,0)
wg<- matrix(rnorm(nvar*ncases),ncases)
wg <- scale(wg)
wg <- t(e.wg$vectors %*% sqrt(diag(v.wg)) %*% t(wg))
bg <- matrix(rnorm(nvar*ngroups),ngroups)
bg <- scale(bg)
bg <- e.bg$vectors %*% sqrt(diag(v.bg)) %*% t(bg)
bg <- matrix(rep(bg, (ncases/ngroups)),nrow=ncases,byrow=TRUE)
gr <- rep((1:ngroups),(ncases/ngroups))
XY <- wg %*% diag(eta^2) + bg %*% diag(etabg^2)
XY <- cbind(gr,XY)
colnames(XY) <- c("Group",paste("V",1:nvar,sep=""))
result <- list(wg=wg,bg=bg,xy=XY)
}
"sim.multi" <-
function(n.obs=4,nvar = 2,nfact=2, ntrials=96,days=16,mu=0,sigma=1,fact=NULL,loading=.9,phi=0,phi.i = NULL,beta.i=0,mu.i=0,sigma.i = 1,sin.i=0,cos.i=0,AR1=0,f.i=NULL,plot=TRUE) {
if(missing(n.obs)) n.obs=4
X <- list()
Xjk <- matrix(NA,ncol=nvar +nfact,nrow=ntrials)
if(missing(mu) ) mu <- rep(0,nvar)
if(missing(sigma)) sigma <- rep(1,nvar)
if(missing(beta.i)) { beta.i <- matrix(0,ncol=nvar,nrow=n.obs) } else {
if(length(beta.i) < n.obs) {beta.i <- matrix(beta.i,ncol=nvar,nrow=n.obs,byrow=TRUE) } }
if(missing(mu.i)) mu.i <- matrix(0,ncol=nvar,nrow=n.obs)
if(missing(sigma.i)) {sigma.i <- matrix(1,ncol=nvar,nrow=n.obs)} else {
if(length(sigma.i) < n.obs) {sigma.i <- matrix(sigma.i,ncol =nvar,nrow=n.obs,byrow=TRUE)}
}
if(missing(sin.i)) {sin.i <- matrix(0,ncol=nvar,nrow=n.obs)} else {
if (length(sin.i) < n.obs) {sin.i <- matrix(sin.i,ncol=nvar,nrow=n.obs,byrow=
TRUE) }
}
if(missing(cos.i)) {cos.i <- matrix(0,ncol=nvar,nrow=n.obs) } else {
if (length(cos.i) < n.obs) {cos.i <- matrix(cos.i,ncol=nvar,nrow=n.obs,byrow=
TRUE) }
}
if(missing(AR1)) {AR1 <- matrix(0,ncol=nvar,nrow=n.obs) } else {
if (length(AR1) < n.obs) {AR1 <- matrix(AR1,ncol=nfact,nrow=n.obs,byrow=
TRUE) }
}
if(is.null(phi)) {phi <-diag(1,nfact) } else {phi <- matrix(phi,ncol=nfact,nrow=nfact)
diag(phi) <- 1}
if(!is.null(phi.i)) {if(length(phi.i) < n.obs) {phi.i <- rep(phi.i,n.obs/length(phi.i))} }
if(nfact > 1) {
if(is.null(fact)) {
fact <- matrix(0,nrow=nvar,ncol=nfact)
fact[((round(row(fact)/nvar))+1) == col(fact)] <- loading
fact<- (fact %*% phi )}} else { fact <- matrix(loading,ncol=1,nrow=nvar) }
if(is.null(f.i)) { f.i <- list()
for (i in 1:n.obs) {
f.i[[i]] <- fact
}
}
trials.day <- ntrials/days
hours <- 24/trials.day
time <- seq(hours,days * trials.day*hours,hours)
t.radian <- time * pi /12
for (i in 1:n.obs) {
xij <- rnorm((nvar + nfact),mu,sigma)
for(j in 1:nfact) {
error <- rnorm(ntrials,mu.i[i,j],sigma.i[i,j])
lagerror <- c(0, error[1:(ntrials-1)])
Xjk[,j] <- xij[j] + mu[j] +beta.i[i,j] *time/ntrials + sin(t.radian)*sin.i[i,j] + cos(t.radian)*cos.i[i,j] + error + AR1[i,j] * lagerror
}
if(is.null(phi.i)) {phi.ind <- diag(1,nfact) } else {phi.ind <- matrix(phi.i[i],nfact,nfact)
diag(phi.ind) <- 1}
Xjk[,1:nfact] <- Xjk [,1:nfact] %*% phi.ind
for(k in 1:nvar) {
uniq <- sqrt(1 - sum(f.i[[i]][k,]^2))
uniq.err <- rnorm(ntrials,0,uniq)
score <- 0
for (j in 1:nfact) {
score <- score + Xjk[,j] * f.i[[i]][k,j]
}
Xjk[,nfact + k ] <- score + uniq.err
}
X[[i]] <- Xjk
}
DV <- unlist(X)
dv.a <- array(DV,dim=c(ntrials,nvar+ nfact,n.obs))
dv.m <-NULL
for(i in 1:(nvar+nfact)) { dv.m <- cbind(dv.m,as.vector(dv.a[,i,])) }
dv.df <- data.frame(dv.m,time = rep(time,n.obs),id=rep(1:n.obs,each=ntrials))
colnames(dv.df)[1:(nfact+nvar)] <- c(paste0("F",1:nfact),paste0("V",1:nvar))
if(plot) {
IV <- NULL
vars <- c(paste0("F",1:nfact),paste0("V",1:nvar))
select <- rep(NA,nvar * ntrials * n.obs)
kount <- ntrials*nfact
for(i in 1:n.obs) {
select[(1:(ntrials*nvar)+(i-1) * ntrials *nvar)] <- kount + 1:(ntrials*nvar)
kount <- kount + ntrials *( nvar+nfact)
}
vars <- paste0("V",1:nvar)
X.df <- data.frame(DV = DV[select], time=rep(time,(n.obs*(nvar))),id = rep(1:n.obs,each=(ntrials*(nvar))),IV = rep(rep(vars,each=ntrials),n.obs) )
plot1<- xyplot(DV ~ time | id, group=IV, data=X.df, type = "b",as.table=TRUE,strip=strip.custom(strip.names=TRUE,strip.levels=TRUE),col=c("blue","red","black","grey"))
print(plot1) }
invisible(dv.df)
}
|
.gaplessCircular <- function(th) {
md <- atan2(sum(sin(th)), sum(cos(th)))
((th - md + pi) %% (2*pi)) - pi + md
}
.transform2Real <- function(theta, lb, ub,
theta_types = rep("real", ncol(theta))) {
theta_t <- theta
transTypes <- character(ncol(theta))
cn <- colnames(theta)
names(theta_types) <- names(transTypes) <- cn
is_simplex_theta <- theta_types == "simplex"
if (any(is_simplex_theta)) {
simplex_theta <- theta[, is_simplex_theta, drop = FALSE]
simdim <- ncol(simplex_theta)
cs <- cbind(0L, t(apply(simplex_theta, 1L, cumsum))[, -simdim, drop = FALSE])
z_k <- (simplex_theta / (1L - cs))
y_k <- log(z_k) - log(1L - z_k) + matrix(log(simdim:1L),
nrow(theta), simdim, byrow = TRUE)
theta_t[, is_simplex_theta] <- y_k
transTypes[is_simplex_theta] <- "simplex"
}
for (i in seq_len(ncol(theta))) {
p <- cn[i]
if (theta_types[[p]] == "circular") {
transTypes[[p]] <- "circular"
theta_t[,i] <- .gaplessCircular(theta[,i])
} else if (theta_types[[p]] == "real") {
if (any(theta[,i] < lb[[p]])) {
stop("Parameter values (samples) cannot be smaller than lb: ", p,
call. = FALSE)
}
if (any(theta[,i] > ub[[p]])) {
stop("Parameter values (samples) cannot be larger than ub: ", p,
call. = FALSE)
}
if (lb[[p]] < ub[[p]] && is.infinite(lb[[p]]) && is.infinite(ub[[p]])) {
transTypes[[p]] <- "unbounded"
theta_t[,i] <- theta[,i]
} else if (lb[[p]] < ub[[p]] && is.finite(lb[[p]]) && is.infinite(ub[[p]])) {
transTypes[[p]] <- "lower-bounded"
theta_t[,i] <- log(theta[,i] - lb[[p]])
} else if (lb[[p]] < ub[[p]] && is.infinite(lb[[p]]) && is.finite(ub[[p]])) {
transTypes[[p]] <- "upper-bounded"
theta_t[,i] <- log(ub[[p]] - theta[,i])
} else if (lb[[p]] < ub[[p]] && is.finite(lb[[p]]) && is.finite(ub[[p]])) {
transTypes[[p]] <- "double-bounded"
theta_t[,i] <- qnorm( (theta[,i] - lb[[p]])/(ub[[p]] - lb[[p]]) )
} else if (theta_types[p] != "simplex")
stop(paste("Could not transform parameters, possibly due to invalid",
"lower and/or upper prior bounds."))
}
}
colnames(theta_t) <- paste0("trans_", colnames(theta))
return(list(theta_t = theta_t, transTypes = transTypes))
}
.invTransform2Real <- function(theta_t, lb, ub,
theta_types = rep("real", ncol(theta))) {
theta <- theta_t
colnames(theta) <- stringr::str_sub(colnames(theta), 7)
cn <- colnames(theta)
names(theta_types) <- cn
is_simplex_theta <- theta_types == "simplex"
if (any(is_simplex_theta)) {
simplex_theta <- theta_t[, is_simplex_theta, drop = FALSE]
simdim <- ncol(simplex_theta)
logitz <- simplex_theta - matrix(log(simdim:1L),
nrow(theta), simdim, byrow = TRUE)
z_k <- exp(logitz) / (1 + exp(logitz))
x_k <- z_k
if (simdim > 1) {
for (k in 2:simdim) {
x_k[, k] <- (1 - rowSums(x_k[, 1:(k - 1), drop = FALSE])) * z_k[, k]
}
}
theta[, is_simplex_theta] <- x_k
}
for (i in seq_len(ncol(theta_t))) {
p <- cn[i]
if (theta_types[[p]] == "real") {
if (lb[[p]] < ub[[p]] && is.infinite(lb[[p]]) && is.infinite(ub[[p]])) {
theta[,i] <- theta_t[,i]
} else if (lb[[p]] < ub[[p]] && is.finite(lb[[p]]) && is.infinite(ub[[p]])) {
theta[,i] <- exp(theta_t[,i]) + lb[[p]]
} else if (lb[[p]] < ub[[p]] && is.infinite(lb[[p]]) && is.finite(ub[[p]])) {
theta[,i] <- ub[[p]] - exp(theta_t[,i])
} else if (lb[[p]] < ub[[p]] && is.finite(lb[[p]]) && is.finite(ub[[p]])) {
theta[,i] <- pnorm(theta_t[,i])*(ub[[p]] - lb[[p]]) + lb[[p]]
} else {
stop("Could not transform parameters, possibly due to invalid lower and/or upper
prior bounds.")
}
}
}
return(theta)
}
.logJacobian <- function(theta_t, transTypes, lb, ub) {
logJ <- matrix(nrow = nrow(theta_t), ncol = ncol(theta_t))
cn <- stringr::str_sub(colnames(theta_t), 7)
is_simplex_theta <- transTypes == "simplex"
if (any(is_simplex_theta)) {
simplex_theta <- theta_t[, is_simplex_theta, drop = FALSE]
simdim <- ncol(simplex_theta)
logitz <- simplex_theta - matrix(log(simdim:1L),
nrow(theta_t), simdim, byrow = TRUE)
z_k <- exp(logitz) / (1 + exp(logitz))
x_k <- z_k
sum_x_k <- matrix(nrow = nrow(theta_t), ncol = simdim)
sum_x_k[, 1] <- 1
if (simdim > 1) {
for (k in 2:simdim) {
rsx <- rowSums(x_k[, 1:(k - 1), drop = FALSE])
x_k[, k] <- (1 - rsx) * z_k[, k]
sum_x_k[, k] <- (1 - rsx)
}
}
logJ[, is_simplex_theta] <- log(z_k) + log(1 - z_k) + log(sum_x_k)
}
for (i in seq_len( ncol(theta_t) )) {
p <- cn[i]
if (transTypes[[p]] == "unbounded") {
logJ[,i] <- 0
} else if (transTypes[[p]] == "lower-bounded") {
logJ[,i] <- theta_t[,i]
} else if (transTypes[[p]] == "upper-bounded") {
logJ[,i] <- theta_t[,i]
} else if (transTypes[[p]] == "double-bounded") {
logJ[,i] <- log(ub[[p]] - lb[[p]]) + dnorm(theta_t[,i], log = TRUE)
} else if (transTypes[[p]] == "circular") {
logJ[,i] <- 0
}
}
return(.rowSums(logJ, m = nrow(logJ), n = ncol(logJ)))
}
.split_matrix <- function(matrix, cores) {
out <- vector("list", cores)
borders <- ceiling(seq(from = 0, to = nrow(matrix), length.out = cores + 1))
for (i in seq_len(cores)) {
out[[i]] <- matrix[(borders[i] + 1):borders[i + 1], , drop = FALSE]
}
out
}
.run.iterative.scheme <- function(q11, q12, q21, q22, r0, tol, L,
method, maxiter, silent,
criterion, neff) {
if (method == "normal") {
l1 <- q11 - q12
l2 <- q21 - q22
} else if (method == "warp3") {
l1 <- -log(2) + determinant(L)$modulus + (q11 - q12)
l2 <- -log(2) + determinant(L)$modulus + (q21 - q22)
}
lstar <- median(l1)
n.1 <- length(l1)
n.2 <- length(l2)
s1 <- neff/(neff + n.2)
s2 <- n.2/(neff + n.2)
r <- r0
r_vals <- r
logml <- log(r) + lstar
logml_vals <- logml
criterion_val <- 1 + tol
e <- as.brob( exp(1) )
i <- 1
while (i <= maxiter && criterion_val > tol) {
if (! silent)
cat(paste0("Iteration: ", i, "\n"))
rold <- r
logmlold <- logml
numi <- e^(l2 - lstar)/(s1 * e^(l2 - lstar) + s2 * r)
deni <- 1/(s1 * e^(l1 - lstar) + s2 * r)
if (any(is.infinite(as.numeric(numi))) ||
any(is.infinite(as.numeric((deni))))) {
warning("Infinite value in iterative scheme, returning NA.\n Try rerunning with more samples.", call. = FALSE)
return(list(logml = NA, niter = i))
}
r <- (n.1/n.2) * sum(numi)/sum(deni)
r_vals <- c(r_vals, r)
logml <- log(r) + lstar
logml_vals <- c(logml_vals, logml)
criterion_val <- switch(criterion, "r" = abs((r - rold)/r),
"logml" = abs((logml - logmlold)/logml))
i <- i + 1
}
if (i >= maxiter) {
return(list(logml = NA, niter = i-1, r_vals = r_vals))
}
return(list(logml = logml, niter = i-1))
} |
context("Unit tests for exception handling if fit() or bootstrap() was not run yet; uses PLR")
library("mlr3learners")
lgr::get_logger("mlr3")$set_threshold("warn")
on_cran = !identical(Sys.getenv("NOT_CRAN"), "true")
if (on_cran) {
test_cases = expand.grid(
learner = "regr.lm",
dml_procedure = "dml1",
score = "IV-type",
set_params = FALSE,
n_folds = c(4),
n_rep = c(1),
apply_cross_fitting = c(TRUE),
stringsAsFactors = FALSE)
} else {
test_cases = expand.grid(
learner = "regr.cv_glmnet",
dml_procedure = "dml1",
score = c("IV-type", "partialling out"),
set_params = c(TRUE, FALSE),
n_folds = c(1, 5),
n_rep = c(1, 2),
apply_cross_fitting = c(TRUE, FALSE),
stringsAsFactors = FALSE)
}
test_cases[".test_name"] = apply(test_cases, 1, paste, collapse = "_")
patrick::with_parameters_test_that("Unit tests for exception handling of PLR:",
.cases = test_cases, {
learner_pars = get_default_mlmethod_plr(learner)
n_rep_boot = 498
data_ml = double_ml_data_from_data_frame(data_plr$df,
y_col = "y",
d_cols = c("d", "X1"))
msg = "DoubleML is an abstract class that can't be initialized."
expect_error(DoubleML$new(), regexp = msg)
if ((n_folds > 1 & !apply_cross_fitting) |
(n_rep > 1 & !apply_cross_fitting) |
(n_rep > 1 & n_folds == 1 & apply_cross_fitting)) {
if (n_folds > 2 & !apply_cross_fitting) {
msg = "Estimation without cross-fitting not supported for n_folds > 2."
} else {
msg = "Assertion on 'i' failed: Element 1 is not <= 1."
}
expect_error(DoubleMLPLR$new(
data = data_ml,
ml_g = learner_pars$mlmethod$mlmethod_g,
ml_m = mlr3::lrn(learner_pars$mlmethod$mlmethod_m),
dml_procedure = dml_procedure,
n_folds = n_folds,
n_rep = n_rep,
score = score,
apply_cross_fitting = apply_cross_fitting),
regexp = msg)
} else {
double_mlplr_obj = DoubleMLPLR$new(
data = data_ml,
ml_g = learner_pars$mlmethod$mlmethod_g,
ml_m = mlr3::lrn(learner_pars$mlmethod$mlmethod_m),
dml_procedure = dml_procedure,
n_folds = n_folds,
n_rep = n_rep,
score = score,
apply_cross_fitting = apply_cross_fitting)
if (set_params) {
double_mlplr_obj$set_ml_nuisance_params(
learner = "ml_m",
treat_var = "d",
params = learner_pars$params$params_m)
double_mlplr_obj$set_ml_nuisance_params(
learner = "ml_g",
treat_var = "d",
params = learner_pars$params$params_g)
}
utils::capture.output(double_mlplr_obj$summary(), file = NULL)
msg = "Apply fit\\(\\) before bootstrap\\(\\)."
expect_error(double_mlplr_obj$bootstrap(method = "normal", n_rep_boot = n_rep_boot),
regexp = msg)
double_mlplr_obj$fit()
utils::capture.output(double_mlplr_obj$print(), file = NULL)
utils::capture.output(expect_is(double_mlplr_obj$summary(), "matrix"), file = NULL)
msg = "'level' must be > 0 and < 1."
expect_error(double_mlplr_obj$confint(level = 1.2),
regexp = msg)
msg = "Multiplier bootstrap has not yet been performed. First call bootstrap\\(\\) and then try confint\\(\\) again."
expect_error(double_mlplr_obj$confint(joint = TRUE, level = 0.95),
regexp = msg)
}
}
) |
`dsconf` <-
function(x,conf,confconf=NULL){
myconf<-function(x,conf){
down=cbind(x[,1],x[,3]);
down<-down[order(down[,1]),];
down[,2]=cumsum(down[,2]);
up=cbind(x[,2],x[,3]);
up<-up[order(up[,1]),];
up[,2]=cumsum(up[,2]);
y=numeric();
y[1]=down[min(which(down[,2]>=conf-1E-12)),1];
y[2]=up[min(which(up[,2]>=conf-1E-12)),1];
dsconf=y;
}
calcwilks<-function(level,n,confidence){
na <- floor(n*level)
tmpup <- cumsum(dbinom(0:(n-na),n,1-level))
tmpdown <- cumsum(dbinom(0:(na-1),n,level))
indxup <- length(tmpup [tmpup <=(1-confidence)])
indxdown <- length(tmpdown [tmpdown <=(1-confidence)])
if(indxup == 0 || indxdown == 0){
print("Warning, too little samples for calculating upper conf");
return(c(1/n,1))
}
rup <- (1+n-indxup )/n
rdown<- indxdown /n
r=c(rdown, rup)
}
if (is.matrix(x)==FALSE){
x=matrix(x,ncol=3);
}
y=myconf(x,conf)
if(!is.null(confconf)){
n=dim(x)[1];
confwilks=calcwilks(conf,n,confconf);
confdown=myconf(x,confwilks[1]);
confup=myconf(x,confwilks[2]);
y=rbind(y,confup,confdown)
}
y
} |
multRegimeJointMCMC <- function(X_BM, X_Mk, phy, start, prior, start_Q, start_mapped.edge, prior_Mk, par_prior_Mk, Mk_model, root_Mk, smap_limit, gen, v, w_sd, w_mu, w_q, prop, dir, outname, IDlen, regimes, traits, save.handle, continue=NULL, add.gen=NULL, ID=NULL, post_seq){
p <- length( start[[2]] )
if( length( v ) > 1 ){
if( !length( v ) == p ) stop( "Length of v need to be 1 or equal to the number of regimes." )
}
if( length( v ) == 1){
v <- rep(v, times=p)
}
X_BM <- t(X_BM)
k <- nrow(X_BM)
mcmc.par <- list()
mcmc.par$v <- v
mcmc.par$w_sd <- w_sd
mcmc.par$w_mu <- w_mu
mcmc.par$w_q <- w_q
mcmc.par$prop <- prop
if( !is.null(continue) ){
mcmc_file_name <- file.path(dir, paste(outname,".", ID, ".mcmc",sep=""))
Q_mcmc_file_name <- file.path(dir, paste(outname,".",ID,".Q_mcmc",sep=""))
log_file_name <- file.path(dir, paste(outname,".", ID, ".log",sep=""))
write_header <- 0
gen <- add.gen
} else{
new.ID <- paste( sample(x=1:9, size=IDlen, replace=TRUE), collapse="")
mcmc_file_name <- file.path(dir, paste(outname,".",new.ID,".mcmc",sep=""))
Q_mcmc_file_name <- file.path(dir, paste(outname,".",new.ID,".Q_mcmc",sep=""))
log_file_name <- file.path(dir, paste(outname,".",new.ID,".log",sep=""))
write_header <- 1
}
mapped.edge <- start_mapped.edge
anc <- phy$edge[,1]
des <- phy$edge[,2]
edge_mat <- phy$edge
nodes <- unique(anc)
node.to.tip <- which( tabulate( anc[which(des <= length(phy$tip.label))] ) == 2 )
node.to.node <- which( tabulate( anc[which(des > length(phy$tip.label))] ) == 2 )
node.to.tip.node <- unique( anc )[!unique( anc ) %in% c(node.to.node, node.to.tip)]
names(anc) <- rep(1, times=length(anc))
names(anc)[which(anc %in% node.to.node)] <- 2
names(anc)[which(anc %in% node.to.tip.node)] <- 3
names_anc <- as.numeric( names(anc) )
if( is.null(continue) ){
cat( paste("Start MCMC run ", outname, ".", new.ID, " with ", gen, " generations.\n", sep="") )
} else{
if( continue == "continue" ){
cat( paste("Continue previous MCMC run ", outname, ".", ID, " for ", add.gen, " generations for a total of ", gen, " generations.\n", sep="") )
gen <- add.gen
new.ID <- ID
}
if( continue == "add.gen" ){
cat( paste("Adding ", add.gen, " generations to previous MCMC run ", outname, ".", ID, "\n", sep="") )
gen <- add.gen
new.ID <- ID
}
}
if( save.handle ){
out <- list(k = k, p = p, ID = new.ID, dir = dir, outname = outname, trait.names = traits
, regime.names = regimes, data = t(X_BM), data_Mk= t(X_Mk), phy = phy, prior = prior
, start = start, start_Q = start_Q, prior_Mk = prior_Mk, par_prior_Mk = par_prior_Mk
, gen = gen, mcmc.par = mcmc.par)
class( out ) <- "ratematrix_multi_mcmc"
saveRDS(out, file = file.path(dir, paste(outname,".",new.ID,".mcmc.handle.rds",sep="")) )
}
startR.list <- lapply(1:p, function(x) rebuild.cov(r=cov2cor(start$matrix[[x]]), v=start$sd[[x]]^2) )
startR <- array(dim=c(k, k, p))
startCorr <- array(dim=c(k, k, p))
startvar <- matrix(nrow=k, ncol=p)
for( i in 1:p){
startR[,,i] <- startR.list[[i]]
startCorr[,,i] <- start$matrix[[i]]
startvar[,i] <- start$sd[[i]]^2
}
den_mu <- prior$pars$den.mu
par_mu <- prior$pars$par.mu
den_sd <- prior$pars$den.sd
par_sd <- prior$pars$par.sd
if( prior$pars$unif.corr ){
sigma.mat <- diag(nrow=k)
sigma_array <- array(dim=c(k, k, p))
for( i in 1:p){
sigma_array[,,i] <- sigma.mat
}
nu <- rep(k+1, times=p)
} else{
if( length(prior$pars$Sigma) !=p ) stop( "Length of Sigma need to be equal to number of regimes." )
sigma_array <- sapply(prior$pars$Sigma, identity, simplify="array")
nu <- prior$pars$nu
if( length(nu) !=p ) stop( "Length of nu need to be equal to number of regimes." )
}
runRatematrixMCMC_jointMk_C(X=X_BM, datMk=X_Mk, k=k, p=p, nodes=nodes, des=des
, anc=anc, names_anc=names_anc, n_tips=Ntip(phy)
, mapped_edge=start_mapped.edge, edge_mat=edge_mat
, n_nodes=Nnode(phy), Q=start_Q, w_Q=w_q
, model_Q=Mk_model, root_type=root_Mk, den_Q = prior_Mk
, par_prior_Q=par_prior_Mk, R=startR, mu=start$root
, sd=sqrt(startvar), Rcorr=startCorr, w_mu=w_mu
, par_prior_mu=par_mu, den_mu=den_mu, w_sd=w_sd
, par_prior_sd=par_sd, den_sd=den_sd
, nu=nu, sigma=sigma_array, v=v, log_file=log_file_name
, mcmc_file=mcmc_file_name, Q_mcmc_file=Q_mcmc_file_name
, par_prob = prop, gen = gen, post_seq = post_seq
, write_header = write_header, sims_limit = smap_limit)
cat( paste("Finished MCMC run ", outname, ".", new.ID, "\n", sep="") )
out <- list(k = k, p = p, ID = new.ID, dir = dir, outname = outname, trait.names = traits
, regime.names = regimes, data = t(X_BM), data_Mk = t(X_Mk), model_Mk = Mk_model, root_Mk = root_Mk
, phy = phy, prior = prior, start = start, start_Q = start_Q, prior_Mk = prior_Mk
, par_prior_Mk = par_prior_Mk, gen = gen, mcmc.par = mcmc.par)
class( out ) <- "ratematrix_multi_mcmc"
return( out )
} |
plotbg <-
function (map = "kola.background", which.map = c(1, 2, 3, 4),
map.col = c(5, 1, 3, 4), map.lwd = c(2, 1, 2, 1), add.plot = FALSE, ...)
{
all = get(map)
xrange = c(min(all[[1]][, 1], na.rm = TRUE), max(all[[1]][,
1], na.rm = TRUE))
yrange = c(min(all[[1]][, 2], na.rm = TRUE), max(all[[1]][,
2], na.rm = TRUE))
if (!add.plot) {
plot(1, 1, xlim = xrange, ylim = yrange, ...)
}
for (i in 1:length(which.map)) {
lines(all[[which.map[i]]], col = map.col[i], lwd = map.lwd[i])
}
} |
mapMutationTypeToMutationClass <- function(mutation.type.vec,
mutation.type.to.class.df = NA){
if(is.na(mutation.type.to.class.df)){
Map.df <- mutation.table.df
} else {
Map.df <- mutation.type.to.class.df
}
mutation.type.vec <- as.character(mutation.type.vec)
unkwnon.vc <- unique(mutation.type.vec[!mutation.type.vec %in% Map.df[, "Mutation_Type"]])
if(length(unkwnon.vc) > 0){
warning("Unknown variant classification: ", paste(unkwnon.vc, collapse =", "))
}
mutation.class.vec <- rep(NA, length(mutation.type.vec))
mutation.class.vec <- Map.df[match(mutation.type.vec, Map.df$Mutation_Type), "Mutation_Class"]
mutation.class.vec
} |
output$FCE_ECDF_PER_TARGET <- renderPlotly({
render_ecdf_per_target()
})
get_data_FV_ECDF_Single <- reactive({
req(input$FCEECDF.Single.Target)
ftargets <- as.numeric(format_FV(input$FCEECDF.Single.Target))
data <- subset(DATA(), ID %in% input$FCEECDF.Single.Algs)
generate_data.ECDF(data, ftargets, input$FCEECDF.Single.Logx, which = 'by_FV')
})
render_ecdf_per_target <- reactive({
withProgress({
plot_general_data(get_data_FV_ECDF_Single(), 'x', 'mean', 'line',
x_title = "Target Value",
y_title = "Proportion of runs", scale.xlog = input$FCEECDF.Single.Logx,
show.legend = T,
scale.reverse = !attr(DATA()[[1]], 'maximization'))
},
message = "Creating plot")
})
output$FCE_RT_GRID <- renderPrint({
req(input$FCEECDF.Mult.Min, input$FCEECDF.Mult.Max, input$FCEECDF.Mult.Step, length(DATA()) > 0)
rt_min <- input$FCEECDF.Mult.Min %>% as.numeric
rt_max <- input$FCEECDF.Mult.Max %>% as.numeric
rt_step <- input$FCEECDF.Mult.Step %>% as.numeric
req(rt_min <= rt_max, rt_step <= rt_max - rt_min)
data <- DATA()
rt <- get_runtimes(data)
seq_RT(rt, from = rt_min, to = rt_max, by = rt_step) %>% cat
})
get_data_FV_ECDF_AGGR <- reactive({
req(input$FCEECDF.Mult.Min, input$FCEECDF.Mult.Max, input$FCEECDF.Mult.Step, length(DATA()) > 0)
fstart <- format_FV(input$FCEECDF.Mult.Min) %>% as.numeric
fstop <- format_FV(input$FCEECDF.Mult.Max) %>% as.numeric
fstep <- format_FV(input$FCEECDF.Mult.Step) %>% as.numeric
data <- subset(DATA(), ID %in% input$FCEECDF.Mult.Algs)
targets <- seq_RT(get_funvals(data), fstart, fstop, fstep)
generate_data.ECDF(data, targets, input$FCEECDF.Mult.Logx, which = 'by_FV')
})
render_FV_ECDF_AGGR <- reactive({
withProgress({
plot_general_data(get_data_FV_ECDF_AGGR(), 'x', 'mean', 'line',
x_title = "Target Value",
y_title = "Proportion of (run, target) pairs",
scale.xlog = input$FCEECDF.Mult.Logx,
scale.reverse = !attr(DATA()[[1]], 'maximization'), show.legend = T)
},
message = "Creating plot")
})
output$FCEECDF.Mult.Download <- downloadHandler(
filename = function() {
eval(FIG_NAME_FV_ECDF_AGGR)
},
content = function(file) {
save_plotly(render_FV_ECDF_AGGR(), file)
},
contentType = paste0('image/', input$FCEECDF.Mult.Format)
)
output$FCE_ECDF_AGGR <- renderPlotly({
render_FV_ECDF_AGGR()
})
get_data_FV_AUC <- reactive({
req(input$FCEECDF.AUC.Min, input$FCEECDF.AUC.Max, input$FCEECDF.AUC.Step, length(DATA()) > 0)
rt_min <- input$FCEECDF.AUC.Min %>% as.numeric
rt_max <- input$FCEECDF.AUC.Max %>% as.numeric
rt_step <- input$FCEECDF.AUC.Step %>% as.numeric
data <- subset(DATA(), ID %in% input$FCEECDF.AUC.Algs)
targets <- seq_RT(get_runtimes(data), rt_min, rt_max, rt_step, length.out = 10)
generate_data.AUC(data, targets, which = 'by_FV')
})
render_FV_AUC <- reactive({
withProgress({
plot_general_data(get_data_FV_AUC(), 'x', 'AUC', 'radar')
},
message = "Creating plot")
})
output$FCEECDF.AUC.Download <- downloadHandler(
filename = function() {
eval(FIG_NAME_FV_AUC)
},
content = function(file) {
save_plotly(render_FV_AUC(), file)
},
contentType = paste0('image/', input$FCEECDF.AUC.Format)
)
output$FCE_AUC <- renderPlotly({
render_FV_AUC()
}) |
"alerce_data" |
V.spc <- function (obj, ...)
{
if (! inherits(obj, "spc")) stop("argument must be object of class 'spc'")
attr(obj, "V")
} |
gMLE.nn <-
function(value, se, fixed = FALSE, method = c("DL","SJ","REML","MoM")){
method <- match.arg(method)
out <- list()
nn1 <- length(value)
var.v <- se^2
fe.weight <- 1/var.v
pop.est.fe <- (fe.weight %*% value) / sum( fe.weight )
c.value <- sum(fe.weight) - (sum (fe.weight^2 ) / sum(fe.weight) )
Q.value <- sum(fe.weight * (value - as.vector(pop.est.fe))^2 )
tau.sq.DL <- max(0,( Q.value - (length(value) - 1) ) / c.value )
if(fixed == TRUE){
out$mu.hat <- (fe.weight %*% value) / sum (fe.weight)
} else {
switch(method,
"DL" = { out$tau.sq <- tau.sq.DL
hv.weight <- 1/ (out$tau.sq + var.v)
out$mu.hat <- (hv.weight %*% value) / sum (hv.weight)
out$estimate <- c(out$mu.hat, out$tau.sq)
out$method <- method
},
"SJ" = { tau.not <- mean( (value - mean(value))^2)
r.hat.i <- var.v / tau.not
v.hat.i <- r.hat.i + 1
theta.hat.v.hat <- sum(value/v.hat.i)/ sum(1/v.hat.i)
out$tau.sq <- sum((value - theta.hat.v.hat)^2/v.hat.i)/(nn1 - 1)
hv.weight <- 1/ (out$tau.sq + var.v)
out$mu.hat <- (hv.weight %*% value) / sum (hv.weight)
out$estimate <- c(out$mu.hat, out$tau.sq)
out$method <- method
},
"REML" = {
nloop = 0
absch = 1
while ( absch > 10^(-5) ) {
nloop = nloop + 1
if ( nloop > 10^5 ) {
stop("tauREML2 via REML method does not converge.")
} else {
tauR2O <- tau.sq.DL
wR <- 1/(var.v + tauR2O)
thetaR <- sum(wR*value) / sum(wR)
tau.sq.DL <- sum(wR^2*(nn1/(nn1-1)*(value-thetaR)^2 - var.v)) / sum(wR^2)
absch <- abs(tau.sq.DL - tauR2O)
}
}
out$tau.sq <- max(0, tau.sq.DL)
hv.weight <- 1/ (out$tau.sq + var.v)
out$mu.hat <- (hv.weight %*% value) / sum (hv.weight)
out$estimate <- c(out$mu.hat, out$tau.sq)
out$method <- "REML"
},
"MoM" = {
hv.weight <- 1/ (tau.sq.DL + var.v)
mu.brn <- (hv.weight %*% value) / sum (hv.weight)
tau.sq.brn <- (sum((value-mu.brn)^2)-((length(value)-1)/length(value))*sum(var.v))/(length(value)-1)
hv.2 <- 1/(tau.sq.brn + var.v)
out$mu.hat <- (hv.2%*%value)/sum(hv.2)
out$tau.sq <- max(0,(sum((value-out$mu.hat)^2)-((length(value)-1)/length(value))*sum(var.v))/(length(value)-1))
out$estimate <- c(out$mu.hat, out$tau.sq)
out$method <- "MoM"
})
}
return(out)
} |
stsm_check_y = function(y){
if(!(is.data.frame(y) | is.data.table(y) | stats::is.ts(y))){
stop("y must be a data frame or data table")
}else if(is.data.frame(y) | is.data.table(y)){
y = as.data.table(y)
col_classes = unique(unlist(lapply(colnames(y), function(x){class(y[, c(x), with = FALSE][[1]])})))
if(ncol(y) != 2){
stop("y must have two columns")
}else if(length(col_classes) < 2){
stop("y must habe a date and numeric column")
}else if(!all(col_classes %in% c("Date", "yearmon", "POSIXct", "POSIXt", "POSIXlt", "numeric"))){
stop("y must have a date and numeric column")
}
}
}
stsm_check_exo = function(exo, y){
if(!is.null(exo)){
if(!(is.data.frame(exo) | is.data.table(exo))){
stop("exo must be a data frame or data table")
}else{
exo = as.data.table(exo)
col_classes = unique(unlist(lapply(colnames(exo), function(x){class(exo[, c(x), with = FALSE][[1]])})))
if(nrow(exo) != nrow(y)){
stop("exo and y must have the same number of rows")
}else if(length(col_classes) < 2){
stop("exo must habe a date and numeric column")
}else if(!all(col_classes %in% c("Date", "yearmon", "POSIXct", "POSIXt", "POSIXlt", "numeric"))){
stop("exo must have a date and numeric column")
}
}
}
}
stsm_check_exo_fc = function(exo.fc, n.ahead){
if(!is.null(exo.fc)){
if(!(is.data.frame(exo.fc) | is.data.table(exo.fc))){
stop("exo must be a data frame or data table")
}else{
exo.fc = as.data.table(exo.fc)
if(nrow(exo.fc) != n.ahead){
stop("exo.fc must have the same number of rows as n.ahead")
}else if(!all(unlist(lapply(colnames(exo.fc), function(x){class(exo.fc[, c(x), with = FALSE][[1]])})) %in%
c("Date", "yearmon", "POSIXct", "POSIXt", "POSIXlt", "numeric", "integer"))){
stop("exo.fc must have a date and numeric column")
}
}
}
}
stsm_format_exo = function(exo_obs, exo_state, dates, range){
if(!is.null(exo_obs)){
col_classes = unlist(lapply(colnames(exo_obs), function(x){class(exo_obs[, c(x), with = FALSE][[1]])}))
date_obs = colnames(exo_obs)[col_classes %in% c("Date", "yearmon", "POSIXct", "POSIXt", "POSIXlt")]
colnames(exo_obs)[colnames(exo_obs) != date_obs] = paste0("obs.", colnames(exo_obs)[colnames(exo_obs) != date_obs])
}
if(!is.null(exo_state)){
col_classes = unlist(lapply(colnames(exo_state), function(x){class(exo_state[, c(x), with = FALSE][[1]])}))
date_state = colnames(exo_state)[col_classes %in% c("Date", "yearmon", "POSIXct", "POSIXt", "POSIXlt")]
colnames(exo_state)[colnames(exo_state) != date_state] = paste0("state.", colnames(exo_state)[colnames(exo_state) != date_state])
}
if(!is.null(exo_obs) & !is.null(exo_state)){
col_classes = unlist(lapply(colnames(exo_obs), function(x){class(exo_obs[, c(x), with = FALSE][[1]])}))
date_obs = colnames(exo_obs)[col_classes %in% c("Date", "yearmon", "POSIXct", "POSIXt", "POSIXlt")]
col_classes = unlist(lapply(colnames(exo_state), function(x){class(exo_state[, c(x), with = FALSE][[1]])}))
date_state = colnames(exo_state)[col_classes %in% c("Date", "yearmon", "POSIXct", "POSIXt", "POSIXlt")]
exo = merge(exo_obs, exo_state, by.x = date_obs, by.y = date_state, all = TRUE)
}else if(!is.null(exo_obs)){
exo = exo_obs
}else if(!is.null(exo_state)){
exo = exo_state
}else{
exo = NULL
}
if(!is.null(exo)){
exo = as.data.table(exo)
exo = merge.data.table(exo, data.table(date = dates), all = TRUE)
exo = exo[, names(which(unlist(exo[, lapply(.SD, is.numeric)]))), with = FALSE]
}
return(exo)
} |
ggplot_scan1 <-
function(x, map, lodcolumn=1, chr=NULL, gap=25,
bgcolor="gray90", altbgcolor="gray85", ...)
{
if(is.data.frame(map) && "index" %in% names(map)) {
return(ggplot_snpasso(x, snpinfo=map,
lodcolumn=lodcolumn, gap=gap, bgcolor=bgcolor,
altbgcolor=altbgcolor, ...))
}
if(!is.list(map)) map <- list(" " = map)
if(!is.null(chr)) {
chri <- match(chr, names(map))
if(any(is.na(chri)))
stop("Chromosomes ", paste(chr[is.na(chri)], collapse=", "), " not found")
x <- qtl2::subset_scan1(x, map, chr)
map <- map[chri]
}
tmp <- qtl2::align_scan1_map(x, map)
x <- tmp$scan1
map <- tmp$map
if(nrow(x) != length(unlist(map)))
stop("nrow(x) [", nrow(x), "] != number of positions in map [",
length(unlist(map)), "]")
if(length(lodcolumn)==0) stop("lodcolumn has length 0")
if(is.character(lodcolumn)) {
tmp <- match(lodcolumn, colnames(x))
if(any(is.na(tmp)))
stop('lodcolumn "', lodcolumn, '" not found')
lodcolumn <- tmp
}
if(any(lodcolumn < 1) || any(lodcolumn > ncol(x)))
stop("lodcolumn [", lodcolumn, "] out of range (should be in 1, ..., ", ncol(x), ")")
lod <- unclass(x)[,lodcolumn, drop = FALSE]
if(!is.null(chr)) {
chri <- match(chr, names(map))
if(any(is.na(chri)))
stop("Chromosomes ", paste(chr[is.na(chri)], collapse=", "), " not found")
map <- map[chri]
lod <- lod[unlist(lapply(map, names)),, drop = FALSE]
}
ggplot_scan1_internal(map=map, lod=lod, gap=gap,
bgcolor=bgcolor, altbgcolor=altbgcolor,
...)
}
map_to_xpos <-
function(map, gap)
{
if(length(map)==1) return(map[[1]])
chr_range <- vapply(map, range, c(0,1), na.rm=TRUE)
result <- map[[1]]-chr_range[1,1] + gap/2
for(i in 2:length(map)) {
result <- c(result,
map[[i]] - chr_range[1,i] + gap + max(result, na.rm=TRUE))
}
result
}
map_to_boundaries <-
function(map, gap)
{
if(length(map)==1)
return(cbind(range(map[[1]], na.rm=TRUE)))
chr_range <- lapply(map, range, na.rm=TRUE)
startend <- matrix(map_to_xpos(chr_range, gap), nrow=2)
startend[1,] <- startend[1,] - gap/2
startend[2,] <- startend[2,] + gap/2
startend
}
autoplot.scan1 <-
function(x, ...)
{
ggplot_scan1(x, ...)
}
map_to_index <-
function(map)
{
if(length(map)==1) {
map[[1]] <- seq(along=map[[1]])
return(map)
}
lengths <- vapply(map, length, 0)
split(1:sum(lengths), rep(seq(along=map), lengths))
} |
context("AAD tokens")
test_that("normalize_cluster handles clusters with location",
{
cluster_name <- normalize_cluster("help", normalize_location("westus"))
expect_equal(cluster_name, "help.westus")
})
test_that("normalize_cluster handles clusters with location inside the cluster name",
{
cluster_name <- normalize_cluster("help.westus")
expect_equal(cluster_name, "help.westus")
})
test_that("normalize_cluster handles clusters without location",
{
cluster_name <- normalize_cluster("help")
expect_equal(cluster_name, "help")
})
tenant <- Sys.getenv("AZ_TEST_TENANT_ID")
app <- Sys.getenv("AZ_TEST_APP_ID")
password <- Sys.getenv("AZ_TEST_PASSWORD")
if(tenant == "" || app == "" || password == "")
skip("Tests skipped: ARM credentials not set")
srvname <- Sys.getenv("AZ_TEST_KUSTO_SERVER")
srvloc <- Sys.getenv("AZ_TEST_KUSTO_SERVER_LOCATION")
if(srvname == "" || srvloc == "")
skip("Token acquisition tests skipped: server info not set")
if(!interactive())
skip("Token acquisition tests skipped: must be an interactive session")
lapply(AzureAuth::list_azure_tokens(), function(token)
{
hash <- token$hash()
name_suffix <- "kusto\\.windows\\.net"
if((!is.null(token$resource) && grepl(name_suffix, token$resource)) ||
(!is.null(token$scope) && grepl(name_suffix, token$scope)) ||
(!is.null(token$credentials$resource) && grepl(name_suffix, token$credentials$resource)))
file.remove(file.path(AzureAuth::AzureR_dir(), hash))
})
test_that("Obtaining token from KustoClient works",
{
tok1 <- get_kusto_token(clustername=srvname, location=srvloc, tenant=tenant)
expect_true(AzureAuth::is_azure_token(tok1))
srvuri <- sprintf("https://%s.%s.kusto.windows.net", srvname, srvloc)
tok2 <- get_kusto_token(srvuri, tenant=tenant)
expect_true(AzureAuth::is_azure_token(tok2))
expect_identical(tok1$hash(), tok2$hash())
expect_identical(tok1$hash(), tok2$hash())
Sys.setenv(AZ_TEST_KUSTO_TOKEN_HASH=tok1$hash())
expire <- as.numeric(tok1$credentials$expires_on)
Sys.sleep(5)
tok1$refresh()
expect_true(as.numeric(tok1$credentials$expires_on) > expire)
toks <- list_kusto_tokens()
expect_true(is.list(toks) && all(sapply(toks, AzureAuth::is_azure_token)))
})
test_that("Obtaining token from own app works",
{
tok1 <- get_kusto_token(clustername=srvname, location=srvloc, tenant=tenant, app=app, password=password)
expect_true(AzureAuth::is_azure_token(tok1))
srvuri <- sprintf("https://%s.%s.kusto.windows.net", srvname, srvloc)
tok2 <- get_kusto_token(srvuri, tenant=tenant, app=app, password=password)
expect_true(AzureAuth::is_azure_token(tok2))
expect_identical(tok1$hash(), tok2$hash())
expect_identical(tok1$hash(), tok2$hash())
})
test_that("Obtaining token for common tenant works",
{
srvuri <- sprintf("https://%s.%s.kusto.windows.net", srvname, srvloc)
tok <- get_kusto_token(srvuri)
expect_true(AzureAuth::is_azure_token(tok))
})
Sys.unsetenv("AZ_TEST_KUSTO_TOKEN_HASH") |
dqcontinuous <- function(data) {
if(class(data)[1] != 'data.frame' && class(data)[1] != 'data.table') {
stop('Invalid input: data should be either data.frame or data.table')
} else {
numoutliers <- function(vector) {
if(class(vector) != 'integer' && class(vector) != 'numeric') {
stop('Invalid input: vector should be either integer or numeric')
} else {
p25 <- quantile(vector, c(0.25), na.rm = TRUE)
p75 <- quantile(vector, c(0.75), na.rm = TRUE)
iqr <- p75 - p25
uplim <- p75 + 1.5*iqr
lowlim <- p25 - 1.5*iqr
numOutliers <- sum(vector < lowlim, na.rm = TRUE) + sum(vector > uplim, na.rm = TRUE)
return(numOutliers)
}
}
varNames <- names(data)
classVar <- sapply(data, class)
conVars <- varNames[classVar == 'numeric' | classVar == 'integer']
len <- length(conVars)
if(len > 0) {
if(class(data)[1] == 'data.frame') {
dataConVar <- data[conVars]
} else if (class(data)[1] == 'data.table'){
dataConVar <- data[, conVars, with = FALSE]
}
variable <- names(dataConVar)
nonMissingValues <- sapply(dataConVar, function(var) sum(!is.na(var)))
missingValues <- sapply(dataConVar, function(var) sum(is.na(var)))
missingPercentage <- sapply(missingValues, function(value) round(value/nrow(data)*100, digits = 2))
mean <- sapply(dataConVar, function(var) mean(var, na.rm = TRUE))
maximum <- sapply(dataConVar, function(var) max(var, na.rm = TRUE))
minimum <- sapply(dataConVar, function(var) min(var, na.rm = TRUE))
stdDeviation <- sapply(dataConVar, function(var) sd(var, na.rm = TRUE))
variance <- sapply(dataConVar, function(var) var(var, na.rm = TRUE))
percentiles <- t(sapply(dataConVar, function(var)
quantile(var, c(.01, .05, 0.10, 0.25, 0.50, 0.75, 0.90, 0.95, .99), na.rm = TRUE)))
numOutliers <- sapply(dataConVar, numoutliers)
conVarSummary <- data.frame(variable,
nonMissingValues,
missingValues,
missingPercentage,
minimum,
mean,
maximum,
stdDeviation,
variance,
percentiles,
numOutliers,
row.names = NULL)
names(conVarSummary)[10:18] <- c('p01', 'p05', 'p10', 'p25', 'p50', 'p75', 'p90', 'p95', 'p99')
return(conVarSummary)
} else {
stop('There are no variables of class integer or numeric in the data')
}
}
} |
geo_select_aeq.sf = function (shp) {
coords <- sf::st_coordinates(shp)
coords_mat <- matrix(coords[, 1:2], ncol = 2)
midpoint <- apply(coords_mat, 2, mean)
aeqd <- sprintf("+proj=aeqd +lat_0=%s +lon_0=%s +x_0=0 +y_0=0",
midpoint[2], midpoint[1])
sf::st_crs(aeqd)
}
geo_select_aeq.sfc = function (shp) {
coords <- sf::st_coordinates(shp)
coords_mat <- matrix(coords[, 1:2], ncol = 2)
midpoint <- apply(coords_mat, 2, mean)
aeqd <- sprintf("+proj=aeqd +lat_0=%s +lon_0=%s +x_0=0 +y_0=0",
midpoint[2], midpoint[1])
sf::st_crs(aeqd)
}
geo_select_aeq = function (shp) {
UseMethod("geo_select_aeq")
}
geo_project = function(shp) {
crs = geo_select_aeq(shp)
st_transform(shp, crs = crs)
} |
PSTNPss_RNA<-function(seqs,pos,neg,label=c()){
if(length(seqs)==1&&file.exists(seqs)){
seqs<-fa.read(seqs,alphabet="rna")
seqs_Lab<-alphabetCheck(seqs,alphabet = "rna",label)
seqs<-seqs_Lab[[1]]
label<-seqs_Lab[[2]]
}
else if(is.vector(seqs)){
seqs<-sapply(seqs,toupper)
seqs_Lab<-alphabetCheck(seqs,alphabet = "rna",label)
seqs<-seqs_Lab[[1]]
label<-seqs_Lab[[2]]
}else {
stop("ERROR, input sequence is not in a correct type. It should be a FASTA file or a string vector.")
}
lenSeqs<-sapply(seqs,nchar)
lens<-sapply(seqs,nchar)
lenSeq<-unique(lens)
if(length(lenSeq)>1){
stop("Error sequences should be in the same length")
}
if(length(pos)==1&&file.exists(pos)){
posSeqs<-fa.read(pos,alphabet="rna")
posSeqs<-alphabetCheck(posSeqs,alphabet = "rna")
posSeqs<-posSeqs[[1]]
}
else if(is.vector(pos)){
posSeqs<-sapply(pos,toupper)
posSeqs<-alphabetCheck(posSeqs,alphabet = "rna")
posSeqs<-posSeqs[[1]]
}else {
stop("ERROR, positive sequences is not in a correct type. It should be a FASTA file or a string vector.")
}
lenPosSeqs<-sapply(posSeqs,nchar)
lenPos<-unique(lenPosSeqs)
if(length(lenPos)>1){
stop("Error positive sequences should be in the same length")
}
if(lenPos!=lenSeq){
stop("Posetive sequences and sample sequences should be in the same length")
}
if(length(neg)==1&&file.exists(neg)){
negSeqs<-fa.read(neg,alphabet="rna")
negSeqs<-alphabetCheck(negSeqs,alphabet = "rna")
negSeqs<-negSeqs[[1]]
}
else if(is.vector(neg)){
negSeqs<-sapply(neg,toupper)
negSeqs<-alphabetCheck(negSeqs,alphabet = "rna")
negSeqs<-negSeqs[[1]]
}else {
stop("ERROR, negative sequences is not in a correct type. It should be a FASTA file or a string vector.")
}
lenNegSeqs<-sapply(negSeqs,nchar)
lenNeg<-unique(lenNegSeqs)
if(length(lenNeg)>1){
stop("Error negative sequences should be in the same length")
}
if(lenNeg!=lenSeq){
stop("Error negative sequences and sample sequences should be in the same length")
}
tripletPos<-sapply(posSeqs, function(x) {temp<-unlist(strsplit(x,split = ""))
len=length(temp)
temp1<-temp[1:(len-2)]
temp2<-temp[2:(len-1)]
temp3<-temp[3:len]
paste(temp1,temp2,temp3,sep = "")
})
tripletPos<-t(tripletPos)
tabPos<-apply(tripletPos, 2, table)
tripletNeg<-sapply(negSeqs, function(x) {temp<-unlist(strsplit(x,split = ""))
len=length(temp)
temp1<-temp[1:(len-2)]
temp2<-temp[2:(len-1)]
temp3<-temp[3:len]
paste(temp1,temp2,temp3,sep = "")
})
tripletNeg<-t(tripletNeg)
tabNeg<-apply(tripletNeg, 2, table)
posMat<-matrix(0,ncol = (lenSeq-2),nrow = 64)
negMat<-matrix(0,ncol = (lenSeq-2),nrow = 64)
rNams<-nameKmer(k=3,type="rna")
row.names(posMat)<-rNams
row.names(negMat)<-rNams
for(i in 1:(lenSeq-2)){
if(length(posSeqs)>1){
np=names(tabPos[[i]])
} else{
np=tripletPos[i]
}
posMat[np,i]=tabPos[[i]]
if(length(posSeqs)>1){
nn=names(tabNeg[[i]])
} else{
nn=tripletNeg[i]
}
negMat[nn,i]=tabNeg[[i]]
}
z<- posMat-negMat
tripletSamples<-lapply(seqs, function(x) {temp<-unlist(strsplit(x,split = ""))
len=length(temp)
temp1<-temp[1:(len-2)]
temp2<-temp[2:(len-1)]
temp3<-temp[3:len]
paste(temp1,temp2,temp3,sep = "")
})
outPutMat<-matrix(0,nrow = length(seqs),ncol=(lenSeq-2))
row.names(outPutMat)<-names(seqs)
for(n in 1:length(seqs)){
outPutMat[n,]<-diag(z[tripletSamples[[n]],1:(lenSeq-2)])
}
return(outPutMat)
} |
tidyselect_locs <- function(.df, ...) {
eval_select(expr(c(...)), .df)
}
tidyselect_names <- function(.df, ...) {
names(tidyselect_locs(.df, ...))
}
tidyselect_syms <- function(.df, ...) {
syms(tidyselect_names(.df, ...))
} |
netie=function(input_one_patient,sigma_square,alpha,beta,sigma_p_sqr,sigma_a_sqr=NULL,max_iter,multi_sample=FALSE){
if(all(input_one_patient$neo_load[!is.na(input_one_patient$cluster_id)]==0)){
return(NA)
}
input_one_patient=input_one_patient[!is.na(input_one_patient$cluster_id),]
if(multi_sample==T){
mutations=unlist(sapply(input_one_patient$mutation_id,function(x) paste(strsplit(x,' ')[[1]][2],
strsplit(x,' ')[[1]][3])))
input_one_patient$neo_load=unlist(sapply(mutations,function(x) max(input_one_patient[mutations==x,'neo_load'])))
phi='1'
clones=list()
clones[[id='1']]=mutations[paste(input_one_patient$sample_id,input_one_patient$cluster_id)==
paste(input_one_patient$sample_id,input_one_patient$cluster_id)[1]]
for(each_clone in unique(paste(input_one_patient$sample_id,input_one_patient$cluster_id))[-1]){
mutations_one_clone=mutations[paste(input_one_patient$sample_id,input_one_patient$cluster_id)==each_clone]
phi_tmp=unlist(sapply(1:length(clones),function(x) {uniq_clone=clones[[x]]
shared_mutations=intersect(uniq_clone,mutations_one_clone)
if(length(shared_mutations)/length(uniq_clone)>0.5 &
length(shared_mutations)/length(mutations_one_clone)>0.5){
return(names(clones)[x])
}
}),use.names = F)
if(!is.null(phi_tmp)){
phi=c(phi,phi_tmp)
}else{
phi_tmp=max(as.numeric(names(clones)))+1
phi=c(phi,phi_tmp)
clones[[id=as.character(phi_tmp)]]=mutations_one_clone
}
}
names(phi)=unique(paste(input_one_patient$sample_id,input_one_patient$cluster_id))
}
if(length(unique(input_one_patient$cluster_id))>1){
if(is.null(sigma_a_sqr)){
non_zero_neo_avg=sapply(unique(input_one_patient$cluster_id),function(x)
mean(input_one_patient[input_one_patient$cluster_id==x
& input_one_patient$neo_load!=0,'neo_load']))
non_zero_neo_avg[is.nan(non_zero_neo_avg)]=0
sigma_a_sqr=sd(log(non_zero_neo_avg+1))^2*10
if(sigma_a_sqr==0){
sigma_a_sqr=1
}
}}else{
sigma_a_sqr=1
}
if(sigma_square<100*sigma_a_sqr){
print("sigma square should be much more larger than sigma a square!")
stop()
}
if(alpha<=beta){
print("alpha should be larger than beta!")
stop()
}
if(multi_sample==T){
max_vaf=unlist(sapply(mutations,function(x) max(input_one_patient[mutations==x,'variant_allele_frequency'])))
input_one_patient=input_one_patient[max_vaf>0.05,]
}else{
input_one_patient=input_one_patient[input_one_patient$variant_allele_frequency>0.05,]
}
tmp=table(input_one_patient$cluster_id[input_one_patient$neo_load>0])
tmp=names(tmp[tmp>=1])
input_one_patient=input_one_patient[input_one_patient$cluster_id %in% tmp,]
tmp=table(input_one_patient$cluster_id)
tmp=names(tmp[tmp>=2])
input_one_patient=input_one_patient[input_one_patient$cluster_id %in% tmp,]
if (dim(input_one_patient)[1]==0) {return(NA)}
if(multi_sample==T){
input_one_patient$phi=as.numeric(phi[paste(input_one_patient$sample_id,input_one_patient$cluster_id)])
input_one_patient$cluster_id=as.numeric(factor(paste(input_one_patient$sample_id,
input_one_patient$cluster_id)))
phi_cluster=input_one_patient[,c('cluster_id','phi')]
phi_cluster=phi_cluster[!duplicated(phi_cluster$cluster_id),]
rownames(phi_cluster)=as.character(phi_cluster$cluster_id)
phi_cluster=phi_cluster[as.character(unique(input_one_patient$cluster_id)),]
}else{
input_one_patient$cluster_id=
as.numeric(factor(input_one_patient$cluster_id))
}
input_one_patient[input_one_patient$neo_load>150,'neo_load']=150
ac=bc=rep(0,length(unique(input_one_patient$cluster_id)))
pi=0.5
a=0
zck_list=list()
ac_list=list()
bc_list=list()
acp_rate_ac_list=list()
acp_rate_bc_list=list()
a_all=c()
pi_all=c()
for (iter in 1:max_iter)
{
if(iter/1000==round(iter/1000)){
cat(paste("Iteration",iter,"\n"))
}
acp_rate_ac=rep(FALSE,length(unique(input_one_patient$cluster_id)))
acp_rate_bc=rep(FALSE,length(unique(input_one_patient$cluster_id)))
zck_df=input_one_patient[,c('mutation_id','cluster_id')];zck_df$zck=1
if(multi_sample==T){
for(p in 1:length(unique(input_one_patient$phi))){
input_each_phi=input_one_patient[input_one_patient$phi==unique(input_one_patient$phi)[p],]
for(c in unique(input_each_phi$cluster_id)){
input_each_clone=input_each_phi[input_each_phi$cluster_id==c,]
vck=input_each_clone$variant_allele_frequency
lambda=exp(ac[c]*vck+bc[c])
nck=input_each_clone$neo_load
r_tmp=pi*(nck==0)/(pi*(nck==0)+(1-pi)*dpois(nck,lambda,log=F))
r_tmp_deno=pi*(nck==0)+(1-pi)*dpois(nck,lambda,log=F)
r_tmp[r_tmp_deno==0]=0
zck=1*(runif(length(nck),0,1)>r_tmp)
names(zck)=input_each_clone$mutation_id
zck_df$zck[zck_df$mutation_id %in% names(zck)]=zck
bc_prim=rnorm(1,bc[c],sqrt(sigma_p_sqr))
lambda_prim_b=exp(ac[c]*vck+bc_prim)
lambda=exp(ac[c]*vck+bc[c])
tmp_prim=sum((zck==1)*dpois(nck,lambda_prim_b,log = T))
tmp=sum((zck==1)*dpois(nck,lambda,log = T))
llhr_b=exp(tmp_prim-bc_prim^2/(2*sigma_square)-tmp+bc[c]^2/(2*sigma_square))
acceptance_function_b=min(1,llhr_b)
u=runif(1,0,1)
if(u<=acceptance_function_b){
bc[c]=bc_prim
acp_rate_bc[c]=TRUE
}
}
input_each_phi$bc=bc[input_each_phi$cluster_id]
input_each_phi$ac=ac[c]
vck_phi=input_each_phi$variant_allele_frequency
lambda_phi=exp(input_each_phi$ac*vck_phi+input_each_phi$bc)
nck_phi=input_each_phi$neo_load
zck_phi=zck_df[input_each_phi$mutation_id,'zck']
ac_prim=rnorm(1,ac[c],sqrt(sigma_p_sqr))
lambda_prim_a=exp(ac_prim*vck_phi+input_each_phi$bc)
tmp_prim=sum((zck_phi==1)*dpois(nck_phi,lambda_prim_a,log = T))
tmp=sum((zck_phi==1)*dpois(nck_phi,lambda_phi,log = T))
if(length(table(input_one_patient$cluster_id))==1){
llhr_a=exp(tmp_prim-ac_prim^2/(2*sigma_square)-tmp+ac[c]^2/(2*sigma_square))
}else{
llhr_a=exp(tmp_prim-(ac_prim-a)^2/(2*sigma_a_sqr)-tmp+(ac[c]-a)^2/(2*sigma_a_sqr))
}
acceptance_function_a=min(1,llhr_a)
u=runif(1,0,1)
if(u<=acceptance_function_a){
ac[phi_cluster$phi==unique(input_each_clone$phi)]=ac_prim
acp_rate_ac[c]=TRUE
}
}
pi=rbeta(1,alpha+sum((zck_df$zck==0)*(input_one_patient$neo_load==0)),beta+sum(zck_df$zck==1))
A=1/sigma_square+length(unique(input_one_patient$phi))/sigma_a_sqr
B=sum(ac[!duplicated(phi_cluster$phi)])/sigma_a_sqr
a=rnorm(1,B/A,sqrt(1/A))
ac_list[[iter]]=ac
bc_list[[iter]]=bc
zck_list[[iter]]=zck_df$zck
acp_rate_ac_list[[iter]]=acp_rate_ac
acp_rate_bc_list[[iter]]=acp_rate_bc
a_all=c(a_all,a)
pi_all=c(pi_all,pi)
}else{
for(c in 1:length(unique(input_one_patient$cluster_id))){
input_each_clone=input_one_patient[input_one_patient$cluster_id==unique(input_one_patient$cluster_id)[c],]
vck=input_each_clone$variant_allele_frequency
lambda=exp(ac[c]*vck+bc[c])
nck=input_each_clone$neo_load
r_tmp=pi*(nck==0)/(pi*(nck==0)+(1-pi)*dpois(nck,lambda,log=F))
r_tmp_deno=pi*(nck==0)+(1-pi)*dpois(nck,lambda,log=F)
r_tmp[r_tmp_deno==0]=0
zck=1*(runif(length(nck),0,1)>r_tmp)
names(zck)=input_each_clone$mutation_id
zck_df$zck[zck_df$mutation_id %in% names(zck)]=zck
ac_prim=rnorm(1,ac[c],sqrt(sigma_p_sqr))
lambda_prim_a=exp(ac_prim*vck+bc[c])
tmp_prim=sum((zck==1)*dpois(nck,lambda_prim_a,log = T))
tmp=sum((zck==1)*dpois(nck,lambda,log = T))
if(length(table(input_one_patient$cluster_id))==1){
llhr_a=exp(tmp_prim-ac_prim^2/(2*sigma_square)-tmp+ac[c]^2/(2*sigma_square))
}else{
llhr_a=exp(tmp_prim-(ac_prim-a)^2/(2*sigma_a_sqr)-tmp+(ac[c]-a)^2/(2*sigma_a_sqr))
}
acceptance_function_a=min(1,llhr_a)
u=runif(1,0,1)
if(u<=acceptance_function_a){
ac[c]=ac_prim
acp_rate_ac[c]=TRUE
}
bc_prim=rnorm(1,bc[c],sqrt(sigma_p_sqr))
lambda_prim_b=exp(ac[c]*vck+bc_prim)
lambda=exp(ac[c]*vck+bc[c])
tmp_prim=sum((zck==1)*dpois(nck,lambda_prim_b,log = T))
tmp=sum((zck==1)*dpois(nck,lambda,log = T))
llhr_b=exp(tmp_prim-bc_prim^2/(2*sigma_square)-tmp+bc[c]^2/(2*sigma_square))
acceptance_function_b=min(1,llhr_b)
u=runif(1,0,1)
if(u<=acceptance_function_b){
bc[c]=bc_prim
acp_rate_bc[c]=TRUE
}
}
pi=rbeta(1,alpha+sum((zck_df$zck==0)*(input_one_patient$neo_load==0)),beta+sum(zck_df$zck==1))
A=1/sigma_square+length(unique(input_one_patient$cluster_id))/sigma_a_sqr
B=sum(ac)/sigma_a_sqr
a=rnorm(1,B/A,sqrt(1/A))
ac_list[[iter]]=ac
bc_list[[iter]]=bc
zck_list[[iter]]=zck_df$zck
acp_rate_ac_list[[iter]]=acp_rate_ac
acp_rate_bc_list[[iter]]=acp_rate_bc
a_all=c(a_all,a)
pi_all=c(pi_all,pi)
}
}
keep=round(max_iter/2):max_iter
ac_final=Reduce("+",ac_list[keep])/length(keep)
bc_final=Reduce("+",bc_list[keep])/length(keep)
zck_df_final=round(Reduce("+",zck_list[keep])/length(keep))
names(zck_df_final)=zck_df$mutation_id
ac_rate=Reduce("+",acp_rate_ac_list[keep])/length(keep)
bc_rate=Reduce("+",acp_rate_bc_list[keep])/length(keep)
a_final=mean(a_all[keep])
pi_final=mean(pi_all[keep])
if(multi_sample==TRUE){
final_parameters=list(ac=cbind(phi_cluster,ac_final),a=a_final)
}else{
final_parameters=list(ac=ac_final,a=a_final)
}
result=list('final_parameters'=final_parameters)
return(result)
} |
apval_Bai1996 <- function(sam1, sam2){
n1 <- dim(sam1)[1]
n2 <- dim(sam2)[1]
tau <- (n1*n2)/(n1 + n2)
n <- n1 + n2 - 2
p <- dim(sam1)[2]
diff <- colMeans(sam1) - colMeans(sam2)
XX <- sum(diff^2)
sam.cov <- ((n1 - 1)*cov(sam1) + (n2 - 1)*cov(sam2))/n
trS <- sum(diag(sam.cov))
tr.cov2 <- n^2/((n + 2)*(n - 1))*(sum(sam.cov^2) - trS^2/n)
test.stat <- (tau*XX - trS)/sqrt(2*(n + 1)/n*tr.cov2)
test.stat <- as.numeric(test.stat)
pval <- 1 - pnorm(test.stat)
names(pval) <- "Bai1996"
out <- NULL
out$sam.info <- c("n1" = n1, "n2" = n2, "p" = p)
out$cov.assumption <- "the two groups have same covariance"
out$method <- "asymptotic distribution"
out$pval <- pval
return(out)
} |
input2<-function(){
message("The input of this function must be a non-zero column matrix of dimension 2x2",'\n')
A<-matrix(0,nrow=2,ncol=2)
A[1,1]<-as.numeric(readline("People have been exposed to the factor and they present the disease\n"))
A[1,2]<-as.numeric(readline("People have been exposed to the factor and they do not present the disease\n"))
A[2,1]<-as.numeric(readline("People have not been exposed to the factor and they present the disease\n"))
A[2,2]<-as.numeric(readline("People have not been exposed to the factor and they do not present the disease\n"))
return(A)
} |
MC_CRUDE<-function(lsf,lDistr,cov_user=0.025,n_batch=100,n_max=1e7,use_threads = 64,dataRecord=TRUE,debug.level=0){
debug.TAG <- "MC_Crude"
debug.print(debug.level,debug.TAG,c(TRUE), msg="Crude Monte-Carlo Simulation started...")
tic<-Sys.time()
pf<-1
I_n <- 0
pf_i <- 0
var_i <- 0
n_sim<-0
n_vars<-length(lDistr)
v<-matrix(nrow=n_batch,ncol=n_vars)
cov_mc <- +Inf
k <- 0
data.pf <- vector("numeric",n_max)
data.var <- vector("numeric",n_max)
data.cov <- vector("numeric",n_max)
data.n_sim <- vector("numeric",n_max)
data.time <- vector("numeric",n_max)
mc_local <- function(x){
for(i in 1:n_vars){
v[,i]<-lDistr[[i]][[1]]$r(n_batch)
}
I<-sum(as.numeric(apply(v,1,lsf)<0))
return(I)
}
while(1){
if(use_threads>1){
I_n <- I_n + sum(unlist(parallel::mclapply(seq(1,use_threads),mc_local)))
n_sim <- n_sim + use_threads*n_batch
}else{
for(i in 1:n_vars){
v[,i]<-lDistr[[i]][[1]]$r(n_batch)
}
I<-as.numeric(apply(v,1,lsf)<0)
n_sim<-n_sim+n_batch
I_n <- I_n+sum(I)
}
k <- k+1
if(dataRecord){
data.n_sim[k] <- n_sim
data.time[k] <- Sys.time()-tic
}
debug.print(debug.level,debug.TAG, I_n, "I_n: ")
if(I_n>0){
pf <- I_n/n_sim
var <- 1/(n_sim-1)*((1/n_sim)*I_n-pf^2)
cov_mc <- sqrt(var)/pf;
if(dataRecord){
data.pf[k] <- pf
data.var[k] <- var
data.cov[k] <- cov_mc
}
info.print(debug.TAG,debug.level,c("I_n","pf", "cov", "nsim"),c(I_n, pf,cov_mc,n_sim))
debug.print(debug.level,debug.TAG, pf, "Pf: ")
debug.print(debug.level,debug.TAG, var, "var: ")
debug.print(debug.level,debug.TAG, cov_mc, "CoV_mc: ")
debug.print(debug.level,debug.TAG, cov_user, "cov_user: ")
debug.print(debug.level,debug.TAG, n_sim, "n_sim: ")
debug.print(debug.level,debug.TAG, n_max, "n_max: ")
}else{
info.print(debug.TAG,debug.level,c("I_n","pf", "cov", "nsim"),c(I_n, "NA","NA",n_sim))
}
if(cov_mc<cov_user){break;}
if(n_sim>n_max){break;}
}
cat("\n")
duration<-Sys.time()-tic
if(dataRecord){
range <- 1:k
df <- data.frame(
"n_sim"=data.n_sim[range],
"pf"=data.pf[range],
"var"=data.var[range],
"cov"=data.cov[range],
"time"=data.time[range]
)
}else{
df <- data.frame()
}
output<-list(
"method"="MCC",
"beta"=-stats::qnorm(pf),
"pf"=pf,
"var"=var,
"cov_mc"=cov_mc,
"cov_user"=cov_user,
"n_mc"=n_sim,
"n_max"=n_max,
"n_batch"=n_batch,
"n_threads"=use_threads,
"data"=df,
"runtime"=duration
)
debug.print(debug.level,debug.TAG,c(duration), msg="Crude Monte-Carlo Simulation finished in [s]: ")
return(output)
} |
CVFolds <- function(N, id, Y, cvControl){
if(!is.null(cvControl$validRows)) {
return(cvControl$validRows)
}
stratifyCV <- cvControl$stratifyCV
shuffle <- cvControl$shuffle
V <- cvControl$V
if(!stratifyCV) {
if(shuffle) {
if(is.null(id)) {
validRows <- split(sample(1:N), rep(1:V, length=N))
} else {
n.id <- length(unique(id))
id.split <- split(sample(1:n.id), rep(1:V, length=n.id))
validRows <- vector("list", V)
for(v in seq(V)) {
validRows[[v]] <- which(id %in% unique(id)[id.split[[v]]])
}
}
} else {
if(is.null(id)) {
validRows <- split(1:N, rep(1:V, length=N))
} else {
n.id <- length(unique(id))
id.split <- split(1:n.id, rep(1:V, length=n.id))
validRows <- vector("list", V)
for(v in seq(V)) {
validRows[[v]] <- which(id %in% unique(id)[id.split[[v]]])
}
}
}
} else {
if(length(unique(Y)) != 2) {
stop("stratifyCV only implemented for binary Y")
}
if(sum(Y) < V | sum(!Y) < V) {
stop("number of (Y=1) or (Y=0) is less than the number of folds")
}
if(shuffle) {
if(is.null(id)) {
wiY0 <- which(Y == 0)
wiY1 <- which(Y == 1)
rowsY0 <- split(sample(wiY0), rep(1:V, length=length(wiY0)))
rowsY1 <- split(sample(wiY1), rep(1:V, length=length(wiY1)))
validRows <- vector("list", length = V)
names(validRows) <- paste(seq(V))
for(vv in seq(V)) {
validRows[[vv]] <- c(rowsY0[[vv]], rowsY1[[vv]])
}
} else {
stop("stratified sampling with id not currently implemented")
}
} else {
if(is.null(id)) {
within.split <- suppressWarnings(tapply(1:N, INDEX = Y, FUN = split, rep(1:V)))
validRows <- vector("list", length = V)
names(validRows) <- paste(seq(V))
for(vv in seq(V)) {
validRows[[vv]] <- c(within.split[[1]][[vv]], within.split[[2]][[vv]])
}
} else {
stop("stratified sampling with id not currently implemented")
}
}
}
return(validRows)
} |
require(OpenMx)
IndManExo <- 1:8
IndManEnd <- 9:12
data(latentMultipleRegExample1)
rawlisdat <- latentMultipleRegExample1[, c(IndManEnd, IndManExo)]
covlisdat <- cov(rawlisdat)
mealisdat <- colMeans(rawlisdat)
numLatExo <- 2
numLatEnd <- 1
numManExo <- 8
numManEnd <- 4
LatExo <- paste('xi', 1:numLatExo, sep='')
LatEnd <- paste('eta', 1:numLatEnd, sep='')
ManExo <- names(rawlisdat)[(numManEnd+1):(numManEnd+numManExo)]
ManEnd <- names(rawlisdat)[1:numManEnd]
lx <- mxMatrix("Full", numManExo, numLatExo,
free=c(F,T,T,T,F,F,F,F,F,F,F,F,F,T,T,T),
values=c(1, .2, .2, .2, 0, 0, 0, 0, 0, 0, 0, 0, 1, .2, .2, .2),
labels=c( paste('l', 1, 1:4, sep=''), rep(NA, 8), paste('l', 2, 5:8, sep='')),
name='LX',
dimnames=list(ManExo, LatExo)
)
ly <- mxMatrix("Full", numManEnd, numLatEnd,
free=c(F,T,T,T),
values=c(1, .2, .2, .2),
labels= paste('l', 3, 9:12, sep=''),
name='LY',
dimnames=list(ManEnd, LatEnd)
)
be <- mxMatrix("Zero", numLatEnd, numLatEnd, name='BE', dimnames=list(LatEnd, LatEnd))
ga <- mxMatrix("Full", numLatEnd, numLatExo,
free=T,
values=.2,
labels=c('b13', 'b23'),
name='GA',
dimnames=list(LatEnd, LatExo)
)
ph <- mxMatrix("Symm", numLatExo, numLatExo,
free=c(T,T,T),
values=c(.8, .3, .8),
labels=c('varF1', 'covF1F2', 'varF2'),
name='PH',
dimnames=list(LatExo, LatExo)
)
ps <- mxMatrix("Symm", numLatEnd, numLatEnd,
free=T,
values=.8,
labels='varF3',
name='PS',
dimnames=list(LatEnd, LatEnd)
)
td <- mxMatrix("Diag", numManExo, numManExo,
free=T,
values=.8,
labels=paste('d', 1:8, sep=''),
name='TD',
dimnames=list(ManExo, ManExo)
)
te <- mxMatrix("Diag", numManEnd, numManEnd,
free=T,
values=.8,
labels=paste('e', 9:12, sep=''),
name='TE',
dimnames=list(ManEnd, ManEnd)
)
th <- mxMatrix("Zero", numManExo, numManEnd, name='TH', dimnames=list(ManExo, ManEnd))
tx <- mxMatrix("Full", numManExo, 1,
free=T,
values=.1,
labels=paste('m', 1:8, sep=''),
name='TX',
dimnames=list(ManExo, "TXMeans")
)
ty <- mxMatrix("Full", numManEnd, 1,
free=T,
values=.1,
labels=paste('m', 9:12, sep=''),
name='TY',
dimnames=list(ManEnd, "TYMeans")
)
ka <- mxMatrix("Zero", numLatExo, 1, name='KA', dimnames=list(LatExo, "KAMeans"))
al <- mxMatrix("Zero", numLatEnd, 1, name='AL', dimnames=list(LatEnd, "ALMeans"))
lmod <- mxModel(
name='LISREL Factor Regression Model with Means',
mxData(observed=covlisdat, type='cov', means=mealisdat, numObs=nrow(rawlisdat)),
lx, ly, be, ga, ph, ps, td, te, th, tx, ty, ka, al,
mxExpectationLISREL(LX=lx$name, LY=ly$name, BE=be$name,
GA=ga$name, PH=ph$name, PS=ps$name, TD=td$name,
TE=te$name, TH=th$name, TX=tx$name, TY=ty$name,
KA=ka$name, AL=al$name),
mxFitFunctionML()
)
lmodRun <- mxRun(lmod)
summary(lmodRun)
expectedParam <- c(0.80902, 1.14834, 1.30536, 0.80529, 1.23204,
1.1897, 0.87486, 0.78661, 0.70182, 0.99078, 0.45669, 1.92903, 0.1534,
1.31687, 0.7706, 1.1335, 1.06743, 1.06387, 1.05366, 0.84761, 0.76179,
1.18635, 1.00994, 0.94248, 0.97102, 0.88031, 0.95606, 0.06082,
0.03738, -0.04867, -0.01318, 0.19335, 0.22001, 0.25679, 0.1719,
0.1662, 0.23484, 0.17303, 0.15762)
omxCheckCloseEnough(lmodRun$output$estimate, expectedParam, epsilon=0.001) |
test_that("safe_unit returns expected output", {
expect_null(safe_unit(1, NULL))
expect_null(safe_unit(NULL, "inches"))
expect_null(safe_unit(NULL, NULL))
valid_unit <- safe_unit(1, "inches")
expect_is(valid_unit, "call")
expect_identical(
eval(valid_unit),
grid::unit(1, "inches")
)
})
test_that("safe_arrow returns expected output", {
expect_null(safe_arrow(30, NULL))
expect_null(safe_arrow(NULL, 0.1))
expect_null(safe_arrow(NULL, NULL))
valid_arrow <- safe_arrow(30, 0.1)
expect_is(valid_arrow, "call")
expect_identical(
eval(valid_arrow),
arrow(
angle = 30, length = unit(0.1, "inches"),
"last", "closed"
)
)
}) |
ncyc2meco <- function(abund_table, sample_data = NULL, match_table = NULL){
abund_raw <- read.delim(abund_table, row.names = 1, comment.char = "
seq_num <- readLines(abund_table)[1]
seq_num <- as.numeric(gsub(".*\\s(\\d+)$", "\\1", seq_num))
unclassified <- seq_num - apply(abund_raw, 2, sum)
abund_new <- rbind.data.frame(abund_raw, unclassified = unclassified)
if(!is.null(match_table)){
abund_new <- check_match_table(match_table = match_table, abund_new = abund_new)
}
if(!is.null(sample_data)){
sample_data <- check_sample_table(sample_data = sample_data)
}
data("ncyc_map", envir=environment())
dataset <- microtable$new(otu_table = abund_new, tax_table = ncyc_map, sample_table = sample_data)
dataset
} |
WT <- function(d, alpha, a, timing, tol = 0.000001, r = 18) {
b <- timing^(d - .5)
if (length(a) == 1) {
c0 <- 0
}
else {
i <- 0
while (WTdiff(i, alpha, a, b, timing, r) >= 0.) {
i <- i - 1
}
c0 <- i
}
i <- 2
while (WTdiff(i, alpha, a, b, timing, r) <= 0.) {
i <- i + 1
}
c1 <- i
stats::uniroot(WTdiff, lower = c0, upper = c1, alpha = alpha, a = a, b = b, timing = timing, tol = tol, r = r)$root
}
WTdiff <- function(c, alpha, a, b, timing, r) {
if (length(a) == 1) {
a <- -c * b
}
x <- gsProbability(k = length(b), theta = 0., n.I = timing, a = a, b = c * b, r = r)
alpha - sum(x$upper$prob)
} |
coef.georob <- function(
object, what = c("trend", "variogram"), ...
)
{
f.aux <- function(x){
tmp <- x[["param"]]
if( !x[["isotropic"]] ){
tmp <- c(tmp, x[["aniso"]])
}
attr( tmp, "variogram.model" ) <- x[["variogram.model"]]
tmp
}
what <- match.arg( what )
res <- switch(
what,
trend = object[["coefficients"]],
variogram = lapply(object[["variogram.object"]], f.aux)
)
if( is.list(res) ){
if( identical( length( res ), 1L ) ){
res <- res[[1]]
} else {
names( res ) <- sapply( object[["variogram.object"]], function(x) x[["variogram.model"]] )
}
}
class( res ) <- "coef.georob"
res
}
print.coef.georob <- function(
x, ...
)
{
xx <- unclass( x )
if( is.list( xx ) ){
lapply(
xx,
function(x){
cat( "Variogram: ", attr(x, "variogram.model"), "\n" )
attr( x, "variogram.model" ) <- NULL
print( x )
cat( "\n" )
}
)
} else {
attr( xx, "variogram.model" ) <- NULL
print( xx )
}
invisible(x)
}
model.frame.georob <-
function(
formula, ...
)
{
class( formula ) <- "lm"
model.frame( formula, ... )
}
model.matrix.georob <-
function(
object, ...
)
{
class( object ) <- "lm"
model.matrix( object, ... )
}
nobs.georob <-
function(
object, ...
)
{
class( object ) <- "lm"
nobs( object, ... )
}
print.georob <- function(
x, digits = max(3, getOption("digits") - 3), ...
)
{
cat("\nTuning constant: ", x[["tuning.psi"]], "\n" )
cat("\nFixed effects coefficients:\n")
print(
format( coef(x), digits = digits ), print.gap = 2L,
quote = FALSE
)
cat("\n")
lapply(
x[["variogram.object"]],
function(x){
cat( "Variogram: ", x[["variogram.model"]], "\n" )
param <- x[["param"]]
names( param ) <- ifelse(
x[["fit.param"]],
names( param ),
paste( names( param ), "(fixed)", sep = "" )
)
print(
format( param, digits = digits,
width = 16L, justify = "right" ), print.gap = 2L,
quote = FALSE
)
cat("\n")
if( !x[["isotropic"]] ){
aniso <- x[["aniso"]]
names( aniso ) <- ifelse(
x[["fit.aniso"]],
names( aniso ),
paste( names( aniso ), "(fixed)", sep = "" )
)
print(
format( aniso, digits = digits,
width = 16L, justify = "right" ), print.gap = 2L,
quote = FALSE
)
cat("\n")
}
}
)
invisible( x )
}
ranef.georob <-
function(
object,
standard = FALSE,
...
)
{
stopifnot(identical(length(standard), 1L) && is.logical(standard))
object.na <- object[["na.action"]]
if( identical( class( object[["na.action"]] ), "exclude" ) ){
class( object[["na.action"]] ) <- "omit"
}
Valphaxi.objects <- expand( object[["Valphaxi.objects"]] )
zhat.objects <- expand( object[["zhat.objects"]] )
covmat <- expand( object[["cov"]] )
bhat <- object[["bhat"]]
if( standard ){
if( is.null( covmat[["cov.bhat"]] ) ){
X <- model.matrix(
terms( object ),
model.frame( object )
)[!duplicated( object[["Tmat"]] ), , drop = FALSE]
r.cov <- covariances.fixed.random.effects(
Valphaxi.objects = Valphaxi.objects[c("Valphaxi", "Valphaxi.inverse")],
Aalphaxi = zhat.objects[["Aalphaxi"]],
Palphaxi = zhat.objects[["Palphaxi"]],
Valphaxi.inverse.Palphaxi = zhat.objects[["Valphaxi.inverse.Palphaxi"]],
rweights = object[["rweights"]],
XX = X, TT = object[["Tmat"]], TtT = as.vector( table( object[["Tmat"]] ) ),
names.yy = rownames( model.frame( object ) ),
nugget = object[["param"]]["nugget"],
eta = sum( object[["param"]][c( "variance", "snugget")] ) / object[["param"]]["nugget"],
expectations = object[["expectations"]], family = "gaussian",
cov.bhat = TRUE, full.cov.bhat = FALSE,
cov.betahat = FALSE,
cov.bhat.betahat = FALSE,
cov.delta.bhat = FALSE, full.cov.delta.bhat = FALSE,
cov.delta.bhat.betahat = FALSE,
cov.ehat = FALSE, full.cov.ehat = FALSE,
cov.ehat.p.bhat = FALSE, full.cov.ehat.p.bhat = FALSE,
aux.cov.pred.target = FALSE,
control.pcmp = control.pcmp(),
verbose = 0.
)
if( r.cov[["error"]] ) stop(
"an error occurred when computing the variances of the random effects"
)
se <- sqrt( r.cov[["cov.bhat"]] )
} else {
if( is.matrix( covmat[["cov.bhat"]] ) ){
se <- sqrt( diag( covmat[["cov.bhat"]] ) )
} else {
se <- sqrt( covmat[["cov.bhat"]] )
}
}
bhat <- bhat / se
}
naresid( object.na, bhat )
}
random.effects.georob <- ranef.georob
fixef.georob <-
function(
object,
...
)
{
object[["coefficients"]]
}
fixed.effects.georob <- fixef.georob
resid.georob <-
function(
object,
type = c("working", "response", "deviance", "pearson", "partial" ),
terms = NULL,
level = 1,
...
)
{
type <- match.arg( type )
if( !level %in% 1:0 ) stop( "wrong level: must be either 1 or 0" )
object.na <- object[["na.action"]]
if( identical( class( object[["na.action"]] ), "exclude" ) ){
class( object[["na.action"]] ) <- "omit"
}
r <- object[["residuals"]]
res <- switch(
type,
working = ,
response = r,
deviance = ,
pearson = if( is.null(object[["weights"]]) ) r else r * sqrt(object[["weights"]]),
partial = r
)
if( level == 0L && !identical( type, "pearson" ) ){
res <- res + ranef( object, standard = FALSE )[object[["Tmat"]]]
}
if( type == "partial" )
res <- res + predict( object, type = "terms", terms = terms )[["fit"]]
drop( res )
naresid( object.na, res )
}
residuals.georob <- resid.georob
rstandard.georob <-
function( model, level = 1, ... )
{
model.na <- model[["na.action"]]
if( identical( class( model[["na.action"]] ), "exclude" ) ){
class( model[["na.action"]] ) <- "omit"
}
Valphaxi.objects <- expand( model[["Valphaxi.objects"]] )
zhat.objects <- expand( model[["zhat.objects"]] )
covmat <- expand( model[["cov"]] )
if( !level %in% 1:0 ) stop( "wrong level: must be either 1 or 0" )
if(
( is.null( covmat[["cov.ehat"]] ) & level == 1L ) ||
( is.null( covmat[["cov.ehat.p.bhat"]] ) & level == 0L )
){
X <- model.matrix(
terms( model),
model.frame( model )
)[!duplicated( model[["Tmat"]] ), , drop = FALSE]
r.cov <- covariances.fixed.random.effects(
Valphaxi.objects = Valphaxi.objects,
Aalphaxi = zhat.objects[["Aalphaxi"]],
Palphaxi = zhat.objects[["Palphaxi"]],
Valphaxi.inverse.Palphaxi = zhat.objects[["Valphaxi.inverse.Palphaxi"]],
rweights = model[["rweights"]],
XX = X, TT = model[["Tmat"]], TtT = as.vector( table( model[["Tmat"]] ) ),
names.yy = rownames( model.frame( model ) ),
nugget = model[["variogram.object"]][[1L]][["param"]]["nugget"],
eta = f.reparam.fwd( model[["variogram.object"]] )[[1L]][["param"]]["nugget"],
expectations = model[["expectations"]], family = "long.tailed",
cov.bhat = FALSE, full.cov.bhat = FALSE,
cov.betahat = FALSE,
cov.bhat.betahat = FALSE,
cov.delta.bhat = FALSE, full.cov.delta.bhat = FALSE,
cov.delta.bhat.betahat = FALSE,
cov.ehat = level == 1L, full.cov.ehat = FALSE,
cov.ehat.p.bhat = level == 0L, full.cov.ehat.p.bhat = FALSE,
aux.cov.pred.target = FALSE,
control.pcmp = control.pcmp(),
verbose = 0.
)
if( r.cov[["error"]] ) stop(
"an error occurred when computing the variance of the residuals"
)
if( level == 1L ){
covmat[["cov.ehat"]] <- r.cov[["cov.ehat"]]
} else {
covmat[["cov.ehat.p.bhat"]] <- r.cov[["cov.ehat.p.bhat"]]
}
}
if( level == 1L ){
se <- covmat[["cov.ehat"]]
} else {
se <- covmat[["cov.ehat.p.bhat"]]
}
if( is.matrix( se ) ){
se <- sqrt( diag( se ) )
} else {
se <- sqrt( se )
}
naresid( model.na, residuals( model, level = level ) / se )
}
summary.georob <- function (
object, correlation = FALSE,
signif = 0.95,
...
)
{
stopifnot(identical(length(correlation), 1L) && is.logical(correlation))
stopifnot(identical(length(signif), 1L) && is.numeric(signif) && signif > 0 && signif < 1)
d2r <- pi / 180.
covmat <- expand( object[["cov"]] )
ans <- object[c(
"call", "residuals", "bhat", "rweights", "converged", "convergence.code",
"iter", "loglik", "variogram.object", "gradient",
"tuning.psi", "df.residual", "control", "terms"
)]
ans[["scale"]] <- sqrt(object[["variogram.object"]][[1L]][["param"]]["nugget"])
ans[["control"]][["method"]] <- "TODO: PRINT GLSROB CONTROL PARAMETERS HERE"
se <- sqrt(diag(covmat[["cov.betahat"]]))
est <- object[["coefficients"]]
tval <- est/se
ans[["coefficients"]] <- cbind(
est, se, tval, 2. * pt( abs(tval), object[["df.residual"]], lower.tail = FALSE )
)
dimnames( ans[["coefficients"]] ) <- list(
names(est), c("Estimate", "Std. Error", "t value", "Pr(>|t|)")
)
if( correlation ){
ans[["correlation"]] <- cov2cor( covmat[["cov.betahat"]] )
}
ans[["param.aniso"]] <- lapply(
ans[["variogram.object"]],
function(object){
res <- as.matrix( object[["param"]], ncol = 1L )
nme.res <- names( object[["param"]] )
fit.res <- object[["fit.param"]]
if( !object[["isotropic"]] ){
res <- rbind( res, as.matrix( object[["aniso"]], ncol = 1L ) )
nme.res <- c( nme.res, names( object[["aniso"]] ) )
fit.res <- c( fit.res, object[["fit.aniso"]] )
}
colnames( res ) <- "Estimate"
nme.res[!fit.res] <- paste( nme.res[!fit.res], "(fixed)", sep="" )
rownames( res ) <- nme.res
res
}
)
if( !is.null( object[["hessian.tfpa"]] ) ){
cor.tf.param <- cov.tf.param <- matrix(
NA, nrow = NROW( object[["hessian.tfpa"]] ), ncol = NROW( object[["hessian.tfpa"]] ),
dimnames = dimnames( object[["hessian.tfpa"]] )
)
sr <- !apply( object[["hessian.tfpa"]], 1L, function( x ) all( is.na( x ) ) )
if( sum( sr ) > 0L ){
t.chol <- try( chol( object[["hessian.tfpa"]][sr, sr, drop = FALSE] ), silent = TRUE )
if( !identical( class( t.chol ), "try-error" ) ){
cov.tf.param <- chol2inv( t.chol )
dimnames( cov.tf.param ) <- dimnames( t.chol )
if( correlation ){
ans[["cor.tf.param"]] <- cov2cor( cov.tf.param )
colnames( ans[["cor.tf.param"]] ) <- rownames( ans[["cor.tf.param"]] ) <-
gsub( ".__...__.", ".", colnames( ans[["cor.tf.param"]] ), fixed = TRUE )
}
se <- sqrt( diag( cov.tf.param ) )
tmp <- f.aux.tf.param.fwd(
ans[["variogram.object"]], object[["control"]][["param.tf"]],
object[["control"]][["fwd.tf"]]
)
param <- tmp[["transformed.param.aniso"]][tmp[["fit.param.aniso"]]]
param <- param[names(param) %in% names(se)]
param.tf <- tmp[["tf.param.aniso"]][names(param)]
se <- se[match( names(se), names(param))]
ci <- t( sapply(
1L:length(se),
function( i, m, se, signif ){
m[i] + c(-1., 1.) * se[i] * qnorm( (1.-signif)/2., lower.tail = FALSE )
}, m = param, se = se, signif = signif
))
colnames( ci ) <- c("Lower", "Upper")
rownames( ci ) <- names( se )
ci <- apply(
ci, 2L,
function( x, nme, bwd.tf, param.tf ){
sapply( nme,
function( x, bwd.tf, param.tf, param ) bwd.tf[[param.tf[x]]]( param[x] ),
bwd.tf = bwd.tf, param.tf = param.tf, param = x
)
}, nme = names(se), bwd.tf = object[["control"]][["bwd.tf"]],
param.tf = param.tf
)
if(is.vector(ci)) ci <- matrix(ci, nrow = 1)
colnames( ci ) <- c("Lower", "Upper")
rownames( ci ) <- names( se )
tmp <- strsplit( rownames(ci), ".__...__.", fixed = TRUE )
name.tmp <- rownames(ci) <- sapply( tmp, function(x) x[1L] )
cmp <- sapply( tmp, function(x) x[2L] )
ci <- tapply(
1L:NROW(ci), factor( cmp ),
function( i, ci ){
ci[i, , drop = FALSE]
}, ci = ci, simplify = FALSE
)
ans[["param.aniso"]] <- lapply(
1L:length(ans[["param.aniso"]]),
function( i, pa, ci ){
pa <- pa[[i]]
if( i <= length(ci) ){
ci <- ci[[i]]
} else {
ci <- matrix( rep( NA_real_, 2L * NROW( pa ) ), ncol = 2L )
rownames( ci ) <- rownames(pa)
}
pa <- cbind( pa, Lower = rep( NA_real_, NROW(pa) ),
Upper = rep( NA_real_, NROW(pa) ) )
i <- match( rownames( ci ), rownames( pa ) )
pa[i, 2L:3L] <- ci
pa
}, pa = ans[["param.aniso"]], ci = ci
)
} else {
warning(
"Hessian not positive definite:",
"\nconfidence intervals of variogram parameters cannot be computed"
)
}
}
}
ans[["se.residuals"]] <- if( !is.null( covmat[["cov.ehat"]] ) ){
if( is.matrix( covmat[["cov.ehat"]] ) ){
sqrt( diag( covmat[["cov.ehat"]] ) )
} else {
sqrt( covmat[["cov.ehat"]] )
}
} else NULL
class( ans ) <- c( "summary.georob" )
ans
}
print.summary.georob <- function (
x, digits = max(3, getOption("digits") - 3),
signif.stars = getOption("show.signif.stars"),
...
)
{
cat("\nCall:")
cat( paste( deparse(x[["call"]]), sep = "\n", collapse = "\n"), "\n", sep = "" )
cat("\nTuning constant: ", x[["tuning.psi"]], "\n" )
if( is.na( x[["converged"]] ) ){
cat( "\nEstimation with fixed variogram parameters\n" )
} else {
if(!(x[["converged"]])) {
cat(
"\nAlgorithm did not converge, diagnostic code: ",
x[["convergence.code"]], "\n"
)
} else {
cat(
"\nConvergence in", x[["iter"]][1L], "function and",
x[["iter"]][2L], "Jacobian/gradient evaluations\n"
)
}
attr( x[["gradient"]], "eeq.emp" ) <- NULL
attr( x[["gradient"]], "eeq.exp" ) <- NULL
cat( "\nEstimating equations (gradient)\n")
f.aux.print.gradient( x[["gradient"]],
reparam = x[["control"]][["reparam"]] &&
x[["tuning.psi"]] >= x[["control"]][["tuning.psi.nr"]]
)
}
if( x[["tuning.psi"]] >= x[["control"]][["tuning.psi.nr"]] ){
switch(
x[["control"]][["ml.method"]],
ML = cat( "\nMaximized log-likelihood:", x[["loglik"]], "\n" ),
REML = cat( "\nMaximized restricted log-likelihood:", x[["loglik"]], "\n" )
)
}
df <- x[["df.residual"]]
bhat <- x[["bhat"]]
cat( "\nPredicted latent variable (B):\n")
if(df > 5){
nam <- c("Min", "1Q", "Median", "3Q", "Max")
rq <- structure( quantile(bhat), names = nam )
print( rq, digits = digits, ...)
}
else print( bhat, digits = digits, ...)
resid <- residuals( x )
cat( "\nResiduals (epsilon):\n")
if(df > 5){
nam <- c("Min", "1Q", "Median", "3Q", "Max")
rq <- structure( quantile(resid), names = nam )
print( rq, digits = digits, ...)
}
else print( resid, digits = digits, ...)
if( !is.null( x[["se.residuals"]] ) ){
resid <- residuals( x, type = "working" ) / x[["se.residuals"]]
cat( "\nStandardized residuals:\n")
if(df > 5){
nam <- c("Min", "1Q", "Median", "3Q", "Max")
rq <- structure( quantile(resid), names = nam )
print( rq, digits = digits, ...)
}
else print( resid, digits = digits, ...)
}
cat(
if( x[["tuning.psi"]] >= x[["control"]][["tuning.psi.nr"]] ){
switch(
x[["control"]][["ml.method"]],
ML = "\n\nGaussian ML estimates\n",
REML ="\n\nGaussian REML estimates\n"
)
} else {
"\n\nRobust REML estimates\n"
}
)
lapply(
1L:length(x[["param.aniso"]]),
function(i, x, vo){
x <- x[[i]]
tmp <- x[ ,1L]
tmp <- tmp[!is.na(tmp) & tmp > 0. ]
t.digits <- -floor( log10( min( tmp ) ) )
cat( "\nVariogram: ", vo[[i]][["variogram.model"]], "\n" )
printCoefmat(
x, digits = max( digits, t.digits) , signif.stars = FALSE, ...
)
}, x = x[["param.aniso"]], vo = x[["variogram.object"]]
)
if( !is.null( x[["cor.tf.param"]] ) ){
correl <- x[["cor.tf.param"]]
p <- NCOL(correl)
if( p > 1L ){
cat("\nCorrelation of (transformed) variogram parameters:\n")
correl <- format(round(correl, 2L), nsmall = 2L,
digits = digits)
correl[!lower.tri(correl)] <- ""
print(correl[-1L, -p, drop = FALSE], quote = FALSE)
}
}
cat( "\n\nFixed effects coefficients:\n" )
printCoefmat(
x[["coefficients"]], digits = digits, signif.stars = signif.stars, ...
)
cat(
"\nResidual standard error (sqrt(nugget)):",
format(signif(x[["scale"]], digits)), "\n"
)
correl <- x[["correlation"]]
if( !is.null(correl) ){
p <- NCOL(correl)
if( p > 1L ){
cat("\nCorrelation of fixed effects coefficients:\n")
correl <- format(round(correl, 2L), nsmall = 2L,
digits = digits)
correl[!lower.tri(correl)] <- ""
print(correl[-1L, -p, drop = FALSE], quote = FALSE)
}
}
cat("\n")
summarizeRobWeights(x[["rweights"]], digits = digits, ... )
invisible( x )
}
vcov.georob <-
function( object, ... )
{
result <- expand( object[["cov"]][["cov.betahat"]] )
attr( result, "struc" ) <- NULL
result
}
waldtest.georob <-
function(
object, ..., vcov = NULL, test = c("F", "Chisq"), name = NULL
)
{
stopifnot(is.null(vcov) || is.function(vcov))
stopifnot(is.null(name) || is.function(name))
test <- match.arg( test )
cl <- object[["call"]]
cl <- f.call.set_allfitxxx_to_false( cl )
cl <- f.call.set_x_to_value_in_fun( cl, "control", "control.georob", "hessian", FALSE )
cl <- f.call.set_x_to_value( cl, "verbose", 0. )
object[["call"]] <- cl
waldtest.default(
object = object, ..., vcov = vcov, test = test, name = name
)
}
logLik.georob <-
function( object, warn = TRUE, REML = FALSE, ... )
{
stopifnot(identical(length(warn), 1L) && is.logical(warn))
stopifnot(identical(length(REML), 1L) && is.logical(REML))
if(
object[["tuning.psi"]] >= object[["control"]][["tuning.psi.nr"]] &&
(
(identical( object[["control"]][["ml.method"]], "REML" ) && REML) ||
(identical( object[["control"]][["ml.method"]], "ML" ) && !REML)
)
){
val <- object[["loglik"]]
} else {
D <- deviance( object, warn = warn, REML = REML, ... )
if( REML ){
val <- -0.5 * (
D + attr( D, "log.det.covmat" ) + attr( D, "log.det.xticovmatx" ) +
(length( object[["residuals"]] ) - length( object[["coefficients"]]) ) * log( 2. * pi )
)
} else {
val <- -0.5 * (
D + attr( D, "log.det.covmat" ) + length( object[["residuals"]] ) * log( 2. * pi )
)
}
}
attr(val, "nall") <- length(object[["residuals"]])
attr(val, "nobs") <- object[["df.residual"]]
tmp <- unlist(lapply(
object[["variogram.object"]],
function(x) c( x[["fit.param"]], x[["fit.aniso"]] )
))
attr(val, "df") <- length(object[["coefficients"]]) + sum( tmp )
class(val) <- "logLik"
val
}
deviance.georob <-
function( object, warn = TRUE, REML = FALSE, ... )
{
stopifnot(identical(length(warn), 1L) && is.logical(warn))
stopifnot(identical(length(REML), 1L) && is.logical(REML))
if( identical( class( object[["na.action"]] ), "exclude" ) ){
class( object[["na.action"]] ) <- "omit"
}
if( object[["tuning.psi"]] < object[["control"]][["tuning.psi.nr"]] ){
if( warn ) warning(
"deviance for robustly fitted model approximated by deviance of\n",
" equivalent Gaussian model with heteroscedastic nugget"
)
w <- object[["rweights"]]
} else {
w <- 1.
}
Valphaxi.objects <- expand( object[["Valphaxi.objects"]] )
var.signal <- attr( f.reparam.fwd( object[["variogram.object"]] ), "var.signal" )
G <- var.signal * Valphaxi.objects[["Valphaxi"]]
G <- G[object[["Tmat"]], object[["Tmat"]]]
diag( G ) <- diag( G ) + object[["variogram.object"]][[1L]][["param"]]["nugget"] / w
iucf <- try(
backsolve(
chol( G ),
diag( length( object[["Tmat"]] ) ),
k = length( object[["Tmat"]] )
),
silent = TRUE
)
if( identical( class( iucf ), "try-error" ) ) {
stop( "(generalized) covariance matrix of observations not positive definite" )
} else {
result <- sum( colSums( residuals( object, level = 0L ) * iucf )^2 )
attr( result, "log.det.covmat" ) <- -2. * sum( log( diag( iucf ) ) )
if( REML ){
if( !is.null( object[["x"]] ) ){
XX <- object[["x"]]
} else {
if( is.null( object[["model"]] ) ) stop(
"'model' component missing in 'object', update 'object' with argument 'model=TRUE"
)
XX <- model.matrix( object[["terms"]], object[["model"]] )
}
tmp <- t( iucf ) %*% XX
tmp <- determinant( crossprod( tmp ) )
attr( result, "log.det.xticovmatx" ) <- tmp[["modulus"]] * tmp[["sign"]]
}
}
result
}
extractAIC.georob <- function( fit, scale = 0, k = 2, ... )
{
stopifnot(identical(length(k), 1L) && is.numeric(k) && k > 0)
tmp <- unlist(lapply(
fit[["variogram.object"]],
function(x) c( x[["fit.param"]], x[["fit.aniso"]] )
))
edf <- sum( !is.na( fit[["coefficients"]] ) ) + sum(tmp)
loglik <- logLik( fit, ... )
c(edf, -2. * loglik + k * edf)
}
safe_pchisq <- function(q, df, ...)
{
df[df <= 0] <- NA
pchisq(q=q, df=df, ...)
}
add1.georob <- function( object, scope, scale = 0, test=c("none", "Chisq"),
k = 2, trace = FALSE, fixed = TRUE, use.fitted.param = TRUE, verbose = 0,
ncores = 1, ... )
{
test <- match.arg(test)
stopifnot(identical(length(trace), 1L) && is.logical(trace))
stopifnot(identical(length(fixed), 1L) && is.logical(fixed))
stopifnot(identical(length(use.fitted.param), 1L) && is.logical(use.fitted.param))
stopifnot(identical(length(k), 1L) && is.numeric(k) && k > 0)
stopifnot(identical(length(verbose), 1L) && is.numeric(verbose) && verbose >= 0)
stopifnot(identical(length(ncores), 1L) && is.numeric(ncores) && ncores >= 1)
f.aux.add1.drop1 <- function( tt, change, scale, trace, ... ){
if(trace > 1.) {
cat( paste( "\ntrying ", change, sep="" ), tt, "\n", sep = "")
utils::flush.console()
}
nfit <- update(
object, as.formula(paste("~ .", change, tt)),
verbose = verbose, object. = object
)
converged <- TRUE
if( !is.na(!nfit[["converged"]]) && !nfit[["converged"]] ){
converged <- FALSE
warning( "there were errors: call function with argument 'verbose' > 1" )
if( verbose > 0. ) cat(
"lack of convergence when fitting model", paste("~ .", change, tt),
"\nconvergence code:", nfit$convergence.code, "\n"
)
}
nnew <- nobs( nfit, use.fallback = TRUE )
if( all(is.finite(c(n0, nnew))) && nnew != n0 ) stop(
"number of rows in use has changed: remove missing values?"
)
df.aic <- c( extractAIC( nfit, scale, k = k, REML = FALSE, ... ), as.numeric(converged))
names( df.aic ) <- c("df", "AIC", "converged")
df.aic
}
if( missing(scope ) || is.null( scope ) ) stop("no terms in scope")
if( !is.character( scope ) ) scope <- add.scope( object, update.formula(object, scope) )
if( !length(scope)) stop( "no terms in scope for adding to object" )
cl <- object[["call"]]
cl <- f.call.set_x_to_value( cl, "verbose", verbose )
cl <- f.call.set_x_to_value_in_fun( cl, "control", "control.georob", "hessian", FALSE )
cl <- f.call.set_x_to_value_in_fun( cl, "control", "control.georob", "cov.bhat", FALSE )
cl <- f.call.set_x_to_value_in_fun( cl, "control", "control.georob", "cov.delta.bhat", FALSE )
cl <- f.call.set_x_to_value_in_fun( cl, "control", "control.georob", "cov.delta.bhat.betahat", FALSE )
cl <- f.call.set_x_to_value_in_fun( cl, "control", "control.georob", "cov.ehat", FALSE )
cl <- f.call.set_x_to_value_in_fun( cl, "control", "control.georob", "cov.ehat.p.bhat", FALSE )
object[["call"]] <- cl
if(
object[["tuning.psi"]] >= object[["control"]][["tuning.psi.nr"]] &&
identical( object[["control"]][["ml.method"]], "REML" )
){
warning( "'object' was estmated by Gaussian REML which cannot be used for ",
"adding/dropping single terms\n=> 'object' is re-fitted by Gaussian ML ",
"before adding/deleting terms"
)
cl <- object[["call"]]
cl <- f.call.set_x_to_value_in_fun( cl, "control", "control.georob", "ml.method", "ML" )
object[["call"]] <- cl
object <- update( object )
}
if( fixed || use.fitted.param ){
cl <- f.call.set_allxxx_to_fitted_values( object )
if( fixed ) cl <- f.call.set_allfitxxx_to_false( cl )
object[["call"]] <- cl
object <- update( object )
}
ns <- length( scope )
ans <- matrix(
nrow = ns + 1L, ncol = 3L,
dimnames = list(c("<none>", scope), c("df", "AIC", "converged"))
)
ans[1L, ] <- c(
extractAIC( object, scale, k = k, REML = FALSE, ... ),
object[["converged"]]
)
n0 <- nobs( object, use.fallback = TRUE )
env <- environment( formula(object) )
if( ncores > 1L ){
ncores <- min( ncores, ns, detectCores() )
trace <- 0
verbose <- 0.
}
if( is.null( object[["control"]][["pcmp"]][["fork"]] ) ){
object[["control"]][["pcmp"]][["fork"]] <- !identical( .Platform[["OS.type"]], "windows" )
}
if( ncores > 1L && !object[["control"]][["pcmp"]][["fork"]] ){
clstr <- makeCluster( ncores, type = "SOCK" )
save( clstr, file = "SOCKcluster.RData" )
options( error = f.stop.cluster )
junk <- clusterEvalQ( clstr, require( georob, quietly = TRUE ) )
junk <- clusterExport(
clstr, c( "object", "k", "n0", "verbose" ), envir = environment()
)
result <- parLapply(
clstr,
scope,
f.aux.add1.drop1,
change = "+", scale = scale, trace = trace,
...
)
f.stop.cluster( clstr )
} else {
result <- mclapply(
scope, f.aux.add1.drop1,
change = "+", scale = scale, trace = trace,
mc.cores = ncores,
mc.allow.recursive = object[["control"]][["pcmp"]][["allow.recursive"]],
...
)
}
result <- t( simplify2array( result ) )
ans[2L:NROW(ans), ] <- result
if(!all( as.logical(result[, "converged"]) ) ) warning(
"lack of convergence when fitting models"
)
dfs <- ans[, 1L] - ans[1L, 1L]
dfs[1L] <- NA
aod <- data.frame( Df = dfs, AIC = ans[, 2L], Converged = as.logical( ans[, 3L] ) )
if(test == "Chisq") {
dev <- ans[, 2L] - k*ans[, 1L]
dev <- dev[1L] - dev; dev[1L] <- NA
nas <- !is.na(dev)
P <- dev
P[nas] <- safe_pchisq(dev[nas], dfs[nas], lower.tail=FALSE)
aod[, c("LRT", "Pr(>Chi)")] <- list(dev, P)
aod <- aod[, c( "Df", "AIC", "LRT", "Pr(>Chi)", "Converged" )]
}
head <- c(
"Single term additions", "\nModel:", deparse(formula(object)),
if(scale > 0.) paste("\nscale: ", format(scale), "\n")
)
class(aod) <- c("anova", "data.frame")
attr( aod, "heading") <- head
aod
}
drop1.georob <- function( object, scope, scale = 0, test=c( "none", "Chisq" ),
k = 2, trace = FALSE, fixed = TRUE, use.fitted.param = TRUE, verbose = 0,
ncores = 1, ... )
{
test <- match.arg(test)
stopifnot(identical(length(trace), 1L) && is.logical(trace))
stopifnot(identical(length(fixed), 1L) && is.logical(fixed))
stopifnot(identical(length(use.fitted.param), 1L) && is.logical(use.fitted.param))
stopifnot(identical(length(k), 1L) && is.numeric(k) && k > 0)
stopifnot(identical(length(verbose), 1L) && is.numeric(verbose) && verbose >= 0)
stopifnot(identical(length(ncores), 1L) && is.numeric(ncores) && ncores >= 1)
f.aux.add1.drop1 <- function( tt, change, scale, trace, ... ){
if(trace > 1.) {
cat( paste( "\ntrying ", change, sep="" ), tt, "\n", sep = "")
utils::flush.console()
}
nfit <- update(
object, as.formula(paste("~ .", change, tt)),
verbose = verbose, object. = object
)
converged <- TRUE
if( !is.na(!nfit[["converged"]]) && !nfit[["converged"]] ){
converged <- FALSE
warning( "there were errors: call function with argument 'verbose' > 1" )
if( verbose > 0. ) cat(
"lack of convergence when fitting model", paste("~ .", change, tt),
"\nconvergence code:", nfit$convergence.code, "\n"
)
}
nnew <- nobs( nfit, use.fallback = TRUE )
if( all(is.finite(c(n0, nnew))) && nnew != n0 ) stop(
"number of rows in use has changed: remove missing values?"
)
df.aic <- c( extractAIC( nfit, scale, k = k, REML = FALSE, ... ), as.numeric(converged))
names( df.aic ) <- c("df", "AIC", "converged")
df.aic
}
cl <- object[["call"]]
cl <- f.call.set_x_to_value( cl, "verbose", verbose )
cl <- f.call.set_x_to_value_in_fun( cl, "control", "control.georob", "hessian", FALSE )
cl <- f.call.set_x_to_value_in_fun( cl, "control", "control.georob", "cov.bhat", FALSE )
cl <- f.call.set_x_to_value_in_fun( cl, "control", "control.georob", "cov.delta.bhat", FALSE )
cl <- f.call.set_x_to_value_in_fun( cl, "control", "control.georob", "cov.delta.bhat.betahat", FALSE )
cl <- f.call.set_x_to_value_in_fun( cl, "control", "control.georob", "cov.ehat", FALSE )
cl <- f.call.set_x_to_value_in_fun( cl, "control", "control.georob", "cov.ehat.p.bhat", FALSE )
object[["call"]] <- cl
if(
object[["tuning.psi"]] >= object[["control"]][["tuning.psi.nr"]] &&
identical( object[["control"]][["ml.method"]], "REML" )
){
warning( "'object' was estmated by Gaussian REML which cannot be used for ",
"adding/dropping single terms\n=> 'object' is re-fitted by Gaussian ML ",
"before adding/deleting terms"
)
cl <- object[["call"]]
cl <- f.call.set_x_to_value_in_fun( cl, "control", "control.georob", "ml.method", "ML" )
object[["call"]] <- cl
object <- update( object )
}
if( fixed || use.fitted.param ){
cl <- f.call.set_allxxx_to_fitted_values( object )
if( fixed ) cl <- f.call.set_allfitxxx_to_false( cl )
object[["call"]] <- cl
object <- update( object )
}
tl <- attr( terms(object), "term.labels" )
if( missing(scope) ) {
scope <- drop.scope( object )
} else {
if( !is.character(scope) ) scope <- attr(
terms(update.formula(object, scope)), "term.labels"
)
if( !all( match(scope, tl, 0L) > 0L) ) stop("scope is not a subset of term labels")
}
ns <- length( scope )
ans <- matrix(
nrow = ns + 1L, ncol = 3L,
dimnames = list(c("<none>", scope), c("df", "AIC", "converged"))
)
ans[1L, ] <- c(
extractAIC( object, scale, k = k, REML = FALSE, ... ),
object[["converged"]]
)
n0 <- nobs( object, use.fallback = TRUE )
env <- environment( formula(object) )
if( ncores > 1L ){
ncores <- min( ncores, ns, detectCores() )
trace <- 0
verbose <- 0.
}
if( is.null( object[["control"]][["pcmp"]][["fork"]] ) ){
object[["control"]][["pcmp"]][["fork"]] <- !identical( .Platform[["OS.type"]], "windows" )
}
if( ncores > 1L && !object[["control"]][["pcmp"]][["fork"]] ){
clstr <- makeCluster( ncores, type = "SOCK" )
save( clstr, file = "SOCKcluster.RData" )
options( error = f.stop.cluster )
junk <- clusterEvalQ( clstr, require( georob, quietly = TRUE ) )
junk <- clusterExport(
clstr, c( "object", "k", "n0", "verbose" ), envir = environment()
)
result <- parLapply(
clstr,
scope,
f.aux.add1.drop1,
change = "-", scale = scale, trace = trace,
...
)
f.stop.cluster( clstr )
} else {
result <- mclapply(
scope, f.aux.add1.drop1,
change = "-", scale = scale, trace = trace,
mc.cores = ncores,
mc.allow.recursive = object[["control"]][["pcmp"]][["allow.recursive"]],
...
)
}
result <- t( simplify2array( result ) )
ans[2L:NROW(ans), ] <- result
if(!all( as.logical(result[, "converged"]) ) ) warning(
"lack of convergence when fitting models"
)
dfs <- ans[1L , 1L] - ans[, 1L]
dfs[1L] <- NA
aod <- data.frame( Df = dfs, AIC = ans[, 2L], Converged = as.logical( ans[, 3L] ) )
if(test == "Chisq") {
dev <- ans[, 2L] - k*ans[, 1L]
dev <- dev - dev[1L] ; dev[1L] <- NA
nas <- !is.na(dev)
P <- dev
P[nas] <- safe_pchisq(dev[nas], dfs[nas], lower.tail = FALSE)
aod[, c("LRT", "Pr(>Chi)")] <- list(dev, P)
aod <- aod[, c( "Df", "AIC", "LRT", "Pr(>Chi)", "Converged" )]
}
head <- c(
"Single term deletions", "\nModel:", deparse(formula(object)),
if(scale > 0.) paste("\nscale: ", format(scale), "\n"))
class(aod) <- c("anova", "data.frame")
attr( aod, "heading") <- head
aod
}
step <- function( object, ... ) UseMethod( "step" )
step.default <- stats::step
step.georob <- function( object, scope, scale = 0,
direction = c( "both", "backward", "forward" ), trace = 1, keep = NULL, steps = 1000,
k = 2, fixed.add1.drop1 = TRUE, fixed.step = fixed.add1.drop1,
use.fitted.param = TRUE, verbose = 0, ncores = 1, ... )
{
direction <- match.arg(direction)
stopifnot(identical(length(use.fitted.param), 1L) && is.logical(use.fitted.param))
stopifnot(identical(length(fixed.add1.drop1), 1L) && is.logical(fixed.add1.drop1))
stopifnot(identical(length(fixed.step), 1L) && is.logical(fixed.step))
stopifnot(identical(length(trace), 1L) && is.numeric(trace) && trace >= 0)
stopifnot(identical(length(k), 1L) && is.numeric(k) && k > 0)
stopifnot(identical(length(verbose), 1L) && is.numeric(verbose) && verbose >= 0)
stopifnot(identical(length(ncores), 1L) && is.numeric(ncores) && ncores >= 1)
stopifnot(identical(length(steps), 1L) && is.numeric(steps) && steps > 0)
mydeviance <- function(x, ...){
dev <- deviance(x, REML = FALSE, ...)
if(!is.null(dev)) dev else extractAIC(x, k=0., REML = FALSE, ...)[2L]
}
cut.string <- function(string){
if(length(string) > 1L)
string[-1L] <- paste0("\n", string[-1L])
string
}
re.arrange <- function(keep){
namr <- names(k1 <- keep[[1L]])
namc <- names(keep)
nc <- length(keep)
nr <- length(k1)
array(unlist(keep, recursive = FALSE), c(nr, nc), list(namr, namc))
}
step.results <- function(models, fit, object, usingCp=FALSE){
change <- sapply(models, "[[", "change")
rd <- sapply(models, "[[", "deviance")
dd <- c(NA, abs(diff(rd)))
rdf <- sapply(models, "[[", "df.resid")
ddf <- c(NA, diff(rdf))
AIC <- sapply(models, "[[", "AIC")
heading <- c("Stepwise Model Path \nAnalysis of Deviance Table",
"\nInitial Model:", deparse(formula(object)),
"\nFinal Model:", deparse(formula(fit)),
"\n")
aod <- data.frame(Step = I(change), Df = ddf, Deviance = dd,
"Resid. Df" = rdf, "Resid. Dev" = rd, AIC = AIC,
check.names = FALSE)
if(usingCp) {
cn <- colnames(aod)
cn[cn == "AIC"] <- "Cp"
colnames(aod) <- cn
}
attr(aod, "heading") <- heading
fit$anova <- aod
fit
}
Terms <- terms(object)
object$call$formula <- object$formula <- Terms
md <- missing(direction)
backward <- direction == "both" | direction == "backward"
forward <- direction == "both" | direction == "forward"
if(missing(scope)) {
fdrop <- numeric()
fadd <- attr(Terms, "factors")
if(md) forward <- FALSE
}
else {
if(is.list(scope)) {
fdrop <- if(!is.null(fdrop <- scope$lower))
attr(terms(update.formula(object, fdrop)), "factors")
else numeric()
fadd <- if(!is.null(fadd <- scope$upper))
attr(terms(update.formula(object, fadd)), "factors")
}
else {
fadd <- if(!is.null(fadd <- scope))
attr(terms(update.formula(object, scope)), "factors")
fdrop <- numeric()
}
}
models <- vector("list", steps)
if(!is.null(keep)) keep.list <- vector("list", steps)
n <- nobs(object, use.fallback = TRUE)
n.fitted.param.aniso <- sum(
unlist(
lapply(
object[["variogram.object"]],
function(x) c( x[["fit.param"]], x[["fit.aniso"]] )
)))
if( !fixed.add1.drop1 && fixed.step ){
cat( "\n'fixed.step' set equal to FALSE because fixed.add1.drop1 == FALSE\n\n" )
fixed.step <- FALSE
}
cl <- object[["call"]]
cl <- f.call.set_x_to_value( cl, "verbose", verbose )
cl <- f.call.set_x_to_value_in_fun( cl, "control", "control.georob", "hessian", FALSE )
cl <- f.call.set_x_to_value_in_fun( cl, "control", "control.georob", "cov.bhat", FALSE )
cl <- f.call.set_x_to_value_in_fun( cl, "control", "control.georob", "cov.delta.bhat", FALSE )
cl <- f.call.set_x_to_value_in_fun( cl, "control", "control.georob", "cov.delta.bhat.betahat", FALSE )
cl <- f.call.set_x_to_value_in_fun( cl, "control", "control.georob", "cov.ehat", FALSE )
cl <- f.call.set_x_to_value_in_fun( cl, "control", "control.georob", "cov.ehat.p.bhat", FALSE )
object[["call"]] <- cl
if(
object[["tuning.psi"]] >= object[["control"]][["tuning.psi.nr"]] &&
identical( object[["control"]][["ml.method"]], "REML" )
){
warning( "'object' was estmated by Gaussian REML which cannot be used for ",
"adding/dropping single terms\n=> 'object' is re-fitted by Gaussian ML ",
"before adding/deleting terms"
)
cl <- object[["call"]]
cl <- f.call.set_x_to_value_in_fun( cl, "control", "control.georob", "ml.method", "ML" )
object[["call"]] <- cl
object <- update( object )
}
if( fixed.step || use.fitted.param ){
cl <- f.call.set_allxxx_to_fitted_values( object )
if( fixed.step ) cl <- f.call.set_allfitxxx_to_false( cl )
object[["call"]] <- cl
object <- update( object )
}
fit <- object
bAIC <- extractAIC(fit, scale, k = k, REML = FALSE, ...)
edf <- bAIC[1L]
bAIC <- bAIC[2L]
if( fixed.add1.drop1 && !fixed.step ) bAIC <- bAIC - k * n.fitted.param.aniso
if(is.na(bAIC)) stop("AIC is not defined for this model, so 'step' cannot proceed")
if(bAIC == -Inf) stop("AIC is -infinity for this model, so 'step' cannot proceed")
nm <- 1L
if(trace) {
cat("Start: AIC=", format(round(bAIC, 2)), "\n",
cut.string(deparse(formula(fit))), "\n\n", sep = "")
utils::flush.console()
}
models[[nm]] <- list(deviance = mydeviance(fit, ...), df.resid = n - edf,
change = "", AIC = bAIC)
if(!is.null(keep)) keep.list[[nm]] <- keep(fit, bAIC)
usingCp <- FALSE
while(steps > 0L) {
steps <- steps - 1L
AIC <- bAIC
ffac <- attr(Terms, "factors")
scope <- factor.scope(ffac, list(add = fadd, drop = fdrop))
aod <- NULL
change <- NULL
if(backward && length(scope$drop)) {
aod <- drop1(
fit, scope$drop, scale = scale,
k = k, trace = trace >= 1, fixed = fixed.add1.drop1,
use.fitted.param = use.fitted.param, verbose = verbose,
ncores = ncores, ...
)
rn <- row.names(aod)
row.names(aod) <- c(rn[1L], paste("-", rn[-1L], sep=" "))
if(any(aod$Df == 0, na.rm=TRUE)) {
zdf <- aod$Df == 0 & !is.na(aod$Df)
change <- rev(rownames(aod)[zdf])[1L]
}
}
if(is.null(change)) {
if(forward && length(scope$add)) {
aodf <- add1(
fit, scope$add, scale = scale,
k = k, trace = trace >= 1, fixed = fixed.add1.drop1,
use.fitted.param = use.fitted.param, verbose = verbose,
ncores = ncores, ...
)
rn <- row.names(aodf)
row.names(aodf) <- c(rn[1L], paste("+", rn[-1L], sep=" "))
aod <-
if(is.null(aod)) aodf
else rbind(aod, aodf[-1L, , drop = FALSE])
}
attr(aod, "heading") <- NULL
nzdf <- if(!is.null(aod$Df))
aod$Df != 0 | is.na(aod$Df)
aod <- aod[nzdf, ]
if(is.null(aod) || NCOL(aod) == 0L) break
nc <- match(c("Cp", "AIC"), names(aod))
nc <- nc[!is.na(nc)][1L]
o <- order(aod[, nc])
if(trace) print(aod[o, ])
if(o[1L] == 1) break
change <- rownames(aod)[o[1L]]
}
usingCp <- match("Cp", names(aod), 0L) > 0L
fit <- update(
fit, paste("~ .", change), verbose = verbose, object. = fit
)
cl <- object[["call"]]
cl["formula"] <- fit[["call"]]["formula"]
fit[["call"]] <- cl
nnew <- nobs(fit, use.fallback = TRUE)
if(all(is.finite(c(n, nnew))) && nnew != n)
stop("number of rows in use has changed: remove missing values?")
Terms <- terms(fit)
bAIC <- extractAIC(fit, scale, k = k, REML = FALSE, ...)
edf <- bAIC[1L]
bAIC <- bAIC[2L]
if( fixed.add1.drop1 && !fixed.step ) bAIC <- bAIC - k * n.fitted.param.aniso
if(trace) {
cat("\nStep: AIC=", format(round(bAIC, 2)), "\n",
cut.string(deparse(formula(fit))), "\n\n", sep = "")
utils::flush.console()
}
if(bAIC >= AIC + 1.e-7) break
models[[nm]] <-
list(deviance = mydeviance(fit, ...), df.resid = n - edf,
change = change, AIC = bAIC)
if(!is.null(keep)) keep.list[[nm]] <- keep(fit, bAIC)
}
if(!is.null(keep)) fit$keep <- re.arrange(keep.list[seq(nm)])
step.results(models = models[seq(nm)], fit, object, usingCp)
} |
knitr::opts_chunk$set(
collapse = TRUE,
comment = "
fig.width = 7,
fig.height = 5,
fig.align = "center"
)
library(dampack)
run_sick_sicker_model <- function(l_params, verbose = FALSE) {
with(as.list(l_params), {
v_names_states <- c("H", "S1", "S2", "D")
n_states <- length(v_names_states)
v_dw <- 1 / ((1 + r_disc) ^ (0:n_cycles))
v_state_cost <- c("H" = c_H, "S1" = c_S1, "S2" = c_S2, "D" = c_D)
v_state_utility <- c("H" = u_H, "S1" = u_S1, "S2" = u_S2, "D" = u_D)
r_S1D <- hr_S1D * r_HD
r_S2D <- hr_S2D * r_HD
p_S1D <- 1 - exp(-r_S1D)
p_S2D <- 1 - exp(-r_S2D)
p_HD <- 1 - exp(-r_HD)
m_P <- matrix(0,
nrow = n_states, ncol = n_states,
dimnames = list(v_names_states, v_names_states))
m_P["H", "H"] <- (1 - p_HD) * (1 - p_HS1)
m_P["H", "S1"] <- (1 - p_HD) * p_HS1
m_P["H", "D"] <- p_HD
m_P["S1", "H"] <- (1 - p_S1D) * p_S1H
m_P["S1", "S1"] <- (1 - p_S1D) * (1 - (p_S1H + p_S1S2))
m_P["S1", "S2"] <- (1 - p_S1D) * p_S1S2
m_P["S1", "D"] <- p_S1D
m_P["S2", "S2"] <- 1 - p_S2D
m_P["S2", "D"] <- p_S2D
m_P["D", "D"] <- 1
if(!all(m_P <= 1 & m_P >= 0)){
stop("This is not a valid transition matrix (entries are not between 0 and 1")
} else
if (!all.equal(as.numeric(rowSums(m_P)),rep(1,n_states))){
stop("This is not a valid transition matrix (rows do not sum to 1)")
}
m_Trace <- matrix(NA, nrow = n_cycles + 1 ,
ncol = n_states,
dimnames = list(0:n_cycles, v_names_states))
v_C <- v_Q <- numeric(length = n_cycles + 1)
m_Trace[1, ] <- v_s_init
v_C[1] <- 0
v_Q[1] <- 0
for (t in 1:n_cycles){
m_Trace[t + 1, ] <- m_Trace[t, ] %*% m_P
v_C[t + 1] <- m_Trace[t + 1, ] %*% v_state_cost
v_Q[t + 1] <- m_Trace[t + 1, ] %*% v_state_utility
}
n_tot_cost <- t(v_C) %*% v_dw
n_tot_qaly <- t(v_Q) %*% v_dw
n_tot_ly <- t(m_Trace %*% c(1, 1, 1, 0)) %*% v_dw
out <- list(m_Trace = m_Trace,
m_P = m_P,
l_params,
n_tot_cost = n_tot_cost,
n_tot_qaly = n_tot_qaly,
n_tot_ly = n_tot_ly)
return(out)
}
)
}
simulate_strategies <- function(l_params, wtp = 100000){
with(as.list(l_params), {
v_names_strat <- c("No_Treatment", "Treatment_A", "Treatment_B")
n_strat <- length(v_names_strat)
u_S1_trtA <- u_trtA
c_S1_trtA <- c_S1 + c_trtA
c_S2_trtA <- c_S2 + c_trtA
r_S1S2_trtB <- -log(1 - p_S1S2) * hr_S1S2_trtB
p_S1S2_trtB <- 1 - exp(-r_S1S2_trtB)
c_S1_trtB <- c_S1 + c_trtB
c_S2_trtB <- c_S2 + c_trtB
df_ce <- data.frame(Strategy = v_names_strat,
Cost = numeric(n_strat),
QALY = numeric(n_strat),
LY = numeric(n_strat),
stringsAsFactors = FALSE)
for (i in 1:n_strat){
l_params_markov <- list(n_cycles = n_cycles, r_disc = r_disc, v_s_init = v_s_init,
c_H = c_H, c_S1 = c_S2, c_S2 = c_S1, c_D = c_D,
u_H = u_H, u_S1 = u_S2, u_S2 = u_S1, u_D = u_D,
r_HD = r_HD, hr_S1D = hr_S1D, hr_S2D = hr_S2D,
p_HS1 = p_HS1, p_S1H = p_S1H, p_S1S2 = p_S1S2)
if (v_names_strat[i] == "Treatment_A"){
l_params_markov$u_S1 <- u_S1_trtA
l_params_markov$c_S1 <- c_S1_trtA
l_params_markov$c_S2 <- c_S2_trtA
} else if(v_names_strat[i] == "Treatment_B"){
l_params_markov$p_S1S2 <- p_S1S2_trtB
l_params_markov$c_S1 <- c_S1_trtB
l_params_markov$c_S2 <- c_S2_trtB
}
l_result <- run_sick_sicker_model(l_params_markov)
df_ce[i, c("Cost", "QALY", "LY")] <- c(l_result$n_tot_cost,
l_result$n_tot_qaly,
l_result$n_tot_ly)
df_ce[i, "NMB"] <- l_result$n_tot_qaly * wtp - l_result$n_tot_cost
}
return(df_ce)
})
}
my_params <- c(
"p_HS1",
"p_S1H",
"p_S1S2",
"hr_S1",
"hr_S2",
"hr_S1S2_trtB",
"c_H",
"c_S1",
"c_S2",
"c_trtA",
"c_trtB",
"u_H",
"u_S1",
"u_S2",
"u_TrtA")
my_dists <- c(
"beta",
"beta",
"beta",
"log-normal",
"log-normal",
"log-normal",
"gamma",
"gamma",
"gamma",
"gamma",
"gamma",
"truncated-normal",
"truncated-normal",
"truncated-normal",
"truncated-normal")
my_parameterization_types <- c(
"a, b",
"a, b",
"a, b",
"mean, sd",
"mean, sd",
"mean, sd",
"shape, scale",
"shape, scale",
"shape, scale",
"shape, scale",
"shape, scale",
"mean, sd, ll, ul",
"mean, sd, ll, ul",
"mean, sd, ll, ul",
"mean, sd, ll, ul")
my_dists_params <- list(
c(7.5, 42.5),
c(12, 12),
c(15, 133),
c(3, 0.5),
c(10, 0.5),
c(0.6, .01),
c(44.5, 45),
c(178, 22.5),
c(900, 16.67),
c(576, 21),
c(676, 19),
c(1, 0.01, NA, 1),
c(0.75, 0.02, NA, 1),
c(0.5, 0.03, NA, 1),
c(0.95, 0.02, NA, 1))
my_psa_params <- gen_psa_samp(params = my_params,
dists = my_dists,
parameterization_types = my_parameterization_types,
dists_params = my_dists_params,
n = 100)
my_params_basecase <- list(p_HS1 = 0.15,
p_S1H = 0.5,
p_S1S2 = 0.105,
r_HD = 0.002,
hr_S1D = 3,
hr_S2D = 10,
hr_S1S2_trtB = 0.6,
c_H = 2000,
c_S1 = 4000,
c_S2 = 15000,
c_D = 0,
c_trtA = 12000,
c_trtB = 13000,
u_H = 1,
u_S1 = 0.75,
u_S2 = 0.5,
u_D = 0,
u_trtA = 0.95,
n_cycles = 75,
v_s_init = c(1, 0, 0, 0),
r_disc = 0.03)
psa_output <- run_psa(psa_samp = my_psa_params,
params_basecase = my_params_basecase,
FUN = simulate_strategies,
outcomes = c("Cost", "QALY", "LY", "NMB"),
strategies = c("No_Treatment", "Treatment_A", "Treatment_B"),
progress = FALSE)
cea_psa <- make_psa_obj(cost = psa_output$Cost$other_outcome,
effect = psa_output$QALY$other_outcome,
parameters = psa_output$Cost$parameters,
strategies = psa_output$Cost$strategies,
currency = "$") |
tw_get_single <- function(id,
language = tidywikidatar::tw_get_language(),
cache = NULL,
overwrite_cache = FALSE,
read_cache = TRUE,
cache_connection = NULL,
disconnect_db = TRUE,
wait = 0) {
if (is.data.frame(id) == TRUE) {
id <- id$id
}
id <- tw_check_qid(id)
if (length(id) > 1) {
usethis::ui_stop("`tw_get_single()` requires `id` of length 1. Consider using `tw_get()`.")
}
if (isTRUE(tw_check_cache(cache))) {
db <- tw_connect_to_cache(
connection = cache_connection,
language = language,
cache = cache
)
}
if (tw_check_cache(cache) == TRUE & overwrite_cache == FALSE & read_cache == TRUE) {
db_result <- tw_get_cached_item(
id = id,
language = language,
cache = cache,
cache_connection = db,
disconnect_db = disconnect_db
)
if (is.data.frame(db_result) & nrow(db_result) > 0) {
return(db_result %>%
tibble::as_tibble())
}
}
Sys.sleep(time = wait)
item <- tryCatch(WikidataR::get_item(id = id),
error = function(e) {
as.character(e[[1]])
}
)
if (is.character(item)) {
usethis::ui_oops(item)
output <- tidywikidatar::tw_empty_item
attr(output, "warning") <- item
return(output)
}
if (is.element(
el = "redirect",
set = item %>%
purrr::pluck(1) %>%
names()
)) {
id <- item %>%
purrr::pluck(1, "redirect")
return(
tw_get(
id = id,
language = language,
cache = cache,
overwrite_cache = overwrite_cache,
cache_connection = db,
disconnect_db = disconnect_db,
wait = wait
)
)
}
everything_df <- tw_extract_single(
w = item,
language = language
)
if (tw_check_cache(cache) == TRUE) {
tw_write_item_to_cache(
item_df = everything_df,
language = language,
cache = cache,
overwrite_cache = overwrite_cache,
cache_connection = db,
disconnect_db = disconnect_db
)
}
tw_disconnect_from_cache(
cache = cache,
cache_connection = db,
disconnect_db = disconnect_db
)
everything_df
}
tw_get <- function(id,
language = tidywikidatar::tw_get_language(),
cache = NULL,
overwrite_cache = FALSE,
cache_connection = NULL,
disconnect_db = TRUE,
wait = 0) {
if (length(id) == 0) {
usethis::ui_stop("`tw_get()` requires `id` of length 1 or more.")
}
if (is.data.frame(id) == TRUE) {
id <- id$id
}
unique_id <- tw_check_qid(id)
db <- tw_connect_to_cache(
connection = cache_connection,
language = language,
cache = cache
)
if (length(unique_id) == 1) {
return(
dplyr::left_join(
x = tibble::tibble(id = id),
y = tw_get_single(
id = unique_id,
language = language,
cache = cache,
overwrite_cache = overwrite_cache,
cache_connection = db,
disconnect_db = disconnect_db,
wait = wait
),
by = "id"
)
)
} else if (length(unique_id) > 1) {
if (overwrite_cache == TRUE | tw_check_cache(cache) == FALSE) {
pb <- progress::progress_bar$new(total = length(unique_id))
item_df <- purrr::map_dfr(
.x = unique_id,
.f = function(x) {
pb$tick()
tw_get_single(
id = x,
language = language,
cache = cache,
overwrite_cache = overwrite_cache,
cache_connection = db,
disconnect_db = FALSE,
wait = wait
)
}
)
tw_disconnect_from_cache(
cache = cache,
cache_connection = db,
disconnect_db = disconnect_db
)
return(
dplyr::left_join(
x = tibble::tibble(id = id),
y = item_df,
by = "id"
)
)
}
if (overwrite_cache == FALSE & tw_check_cache(cache) == TRUE) {
items_from_cache_df <- tw_get_cached_item(
id = unique_id,
language = language,
cache = cache,
cache_connection = db,
disconnect_db = FALSE
)
id_items_not_in_cache <- unique_id[!is.element(unique_id, items_from_cache_df$id)]
if (length(id_items_not_in_cache) == 0) {
tw_disconnect_from_cache(
cache = cache,
cache_connection = db,
disconnect_db = disconnect_db
)
return(
dplyr::left_join(
x = tibble::tibble(id = id),
y = items_from_cache_df,
by = "id"
)
)
} else if (length(id_items_not_in_cache) > 0) {
pb <- progress::progress_bar$new(total = length(id_items_not_in_cache))
items_not_in_cache_df <- purrr::map_dfr(
.x = id_items_not_in_cache,
.f = function(x) {
pb$tick()
tw_get_single(
id = x,
language = language,
cache = cache,
overwrite_cache = overwrite_cache,
cache_connection = db,
read_cache = FALSE,
disconnect_db = FALSE,
wait = wait
)
}
)
tw_disconnect_from_cache(
cache = cache,
cache_connection = db,
disconnect_db = disconnect_db
)
dplyr::left_join(
x = tibble::tibble(id = id),
y = dplyr::bind_rows(
items_from_cache_df,
items_not_in_cache_df
),
by = "id"
)
}
}
}
}
tw_reset_item_cache <- function(language = tidywikidatar::tw_get_language(),
cache = NULL,
cache_connection = NULL,
disconnect_db = TRUE,
ask = TRUE) {
db <- tw_connect_to_cache(
connection = cache_connection,
language = language,
cache = cache
)
table_name <- tw_get_cache_table_name(
type = "item",
language = language
)
if (pool::dbExistsTable(conn = db, name = table_name) == FALSE) {
} else if (isFALSE(ask)) {
pool::dbRemoveTable(conn = db, name = table_name)
usethis::ui_info(paste0("Item cache reset for language ", sQuote(language), " completed"))
} else if (usethis::ui_yeah(x = paste0("Are you sure you want to remove from cache the items table for language: ", sQuote(language), "?"))) {
pool::dbRemoveTable(conn = db, name = table_name)
usethis::ui_info(paste0("Items cache reset for language ", sQuote(language), " completed"))
}
tw_disconnect_from_cache(
cache = cache,
cache_connection = db,
disconnect_db = disconnect_db,
language = language
)
} |
"bdparData" |
if(!startsWith(R.version$os, "darwin")) {
.install.macbinary <-
function(pkgs, lib, repos = getOption("repos"),
contriburl = contrib.url(repos, type="mac.binary"),
method, available = NULL, destdir = NULL,
dependencies = FALSE,
lock = getOption("install.lock", FALSE), quiet = FALSE,
...)
{}
} else {
.install.macbinary <-
function(pkgs, lib, repos = getOption("repos"),
contriburl = contrib.url(repos, type="mac.binary"),
method, available = NULL, destdir = NULL,
dependencies = FALSE,
lock = getOption("install.lock", FALSE), quiet = FALSE,
...)
{
untar <- function(what, where)
{
xcode <- system(paste0("tar zxf \"", path.expand(what), "\" -C \"",
path.expand(where), "\""), intern=FALSE)
if (xcode)
warning(gettextf("'tar' returned non-zero exit code %d", xcode),
domain = NA, call. = FALSE)
}
unpackPkg <- function(pkg, pkgname, lib, lock = FALSE)
{
tmpDir <- tempfile(, lib)
if (!dir.create(tmpDir))
stop(gettextf("unable to create temporary directory %s",
sQuote(tmpDir)),
domain = NA, call. = FALSE)
cDir <- getwd()
on.exit(setwd(cDir), add = TRUE)
res <- untar(pkg, tmpDir)
setwd(tmpDir)
if (!file.exists(file <- file.path(pkgname, "Meta", "package.rds")))
stop(gettextf("file %s is not a macOS binary package", sQuote(pkg)),
domain = NA, call. = FALSE)
desc <- readRDS(file)$DESCRIPTION
if (length(desc) < 1L)
stop(gettextf("file %s is not a macOS binary package", sQuote(pkg)),
domain = NA, call. = FALSE)
desc <- as.list(desc)
if (is.null(desc$Built))
stop(gettextf("file %s is not a macOS binary package", sQuote(pkg)),
domain = NA, call. = FALSE)
res <- tools::checkMD5sums(pkgname, file.path(tmpDir, pkgname))
if(!quiet && !is.na(res) && res) {
cat(gettextf("package %s successfully unpacked and MD5 sums checked\n",
sQuote(pkgname)))
flush.console()
}
instPath <- file.path(lib, pkgname)
if(identical(lock, "pkglock") || isTRUE(lock)) {
lockdir <- if(identical(lock, "pkglock"))
file.path(lib, paste0("00LOCK-", pkgname))
else file.path(lib, "00LOCK")
if (file.exists(lockdir)) {
stop(gettextf("ERROR: failed to lock directory %s for modifying\nTry removing %s",
sQuote(lib), sQuote(lockdir)), domain = NA)
}
dir.create(lockdir, recursive = TRUE)
if (!dir.exists(lockdir))
stop(gettextf("ERROR: failed to create lock directory %s",
sQuote(lockdir)), domain = NA)
if (file.exists(instPath)) {
file.copy(instPath, lockdir, recursive = TRUE)
on.exit({
if (restorePrevious) {
try(unlink(instPath, recursive = TRUE))
savedcopy <- file.path(lockdir, pkgname)
file.copy(savedcopy, lib, recursive = TRUE)
warning(gettextf("restored %s", sQuote(pkgname)),
domain = NA, call. = FALSE, immediate. = TRUE)
}
}, add=TRUE)
restorePrevious <- FALSE
}
on.exit(unlink(lockdir, recursive = TRUE), add=TRUE)
}
ret <- unlink(instPath, recursive=TRUE)
if (ret == 0L) {
ret <- file.rename(file.path(tmpDir, pkgname), instPath)
if(!ret) {
warning(gettextf("unable to move temporary installation %s to %s",
sQuote(file.path(tmpDir, pkgname)),
sQuote(instPath)),
domain = NA, call. = FALSE)
restorePrevious <- TRUE
}
} else
stop(gettextf("cannot remove prior installation of package %s",
sQuote(pkgname)), call. = FALSE, domain = NA)
setwd(cDir)
unlink(tmpDir, recursive=TRUE)
}
if(!length(pkgs)) return(invisible())
if(is.null(contriburl)) {
pkgnames <- basename(pkgs)
pkgnames <- sub("\\.tgz$", "", pkgnames)
pkgnames <- sub("\\.tar\\.gz$", "", pkgnames)
pkgnames <- sub("_.*$", "", pkgnames)
for(i in seq_along(pkgs)) {
if(is.na(pkgs[i])) next
unpackPkg(pkgs[i], pkgnames[i], lib, lock = lock)
}
return(invisible())
}
tmpd <- destdir
nonlocalcran <- length(grep("^file:", contriburl)) < length(contriburl)
if(is.null(destdir) && nonlocalcran) {
tmpd <- file.path(tempdir(), "downloaded_packages")
if (!file.exists(tmpd) && !dir.create(tmpd))
stop(gettextf("unable to create temporary directory %s",
sQuote(tmpd)),
domain = NA)
}
if(is.null(available))
available <- available.packages(contriburl = contriburl,
method = method, ...)
pkgs <- getDependencies(pkgs, dependencies, available, lib, binary = TRUE)
foundpkgs <- download.packages(pkgs, destdir = tmpd, available = available,
contriburl = contriburl, method = method,
type = "mac.binary", quiet = quiet, ...)
if(length(foundpkgs)) {
update <- unique(cbind(pkgs, lib))
colnames(update) <- c("Package", "LibPath")
for(lib in unique(update[,"LibPath"])) {
oklib <- lib==update[,"LibPath"]
for(p in update[oklib, "Package"])
{
okp <- p == foundpkgs[, 1L]
if(any(okp))
unpackPkg(foundpkgs[okp, 2L], foundpkgs[okp, 1L], lib,
lock = lock)
}
}
if(!quiet && !is.null(tmpd) && is.null(destdir))
cat("\n", gettextf("The downloaded binary packages are in\n\t%s", tmpd),
"\n", sep = "")
} else if(!is.null(tmpd) && is.null(destdir)) unlink(tmpd, recursive = TRUE)
invisible()
}
} |
setMethod("is.na", signature(x='Raster'),
function(x) {
if (nlayers(x) > 1) {
r <- brick(x, values=FALSE)
} else {
r <- raster(x)
}
if (canProcessInMemory(r, 3)) {
dataType(r) <- 'LOG1S'
return( setValues(r, is.na(getValues(x))) )
} else {
tr <- blockSize(x)
pb <- pbCreate(tr$n, label='is.na')
r <- writeStart(r, filename=rasterTmpFile(), datatype='LOG1S', format=.filetype(), overwrite=TRUE )
for (i in 1:tr$n) {
v <- is.na( getValuesBlock(x, row=tr$row[i], nrows=tr$nrows[i]) )
r <- writeValues(r, v, tr$row[i])
pbStep(pb, i)
}
r <- writeStop(r)
pbClose(pb)
return(r)
}
}
)
setMethod("is.nan", signature(x='Raster'),
function(x) {
if (nlayers(x) > 1) {
r <- brick(x, values=FALSE)
} else {
r <- raster(x)
}
if (canProcessInMemory(r, 3)) {
dataType(r) <- 'LOG1S'
return( setValues(r, is.nan(getValues(x))) )
} else {
tr <- blockSize(x)
pb <- pbCreate(tr$n, label='is.na')
r <- writeStart(r, filename=rasterTmpFile(), datatype='LOG1S', format=.filetype(), overwrite=TRUE )
for (i in 1:tr$n) {
v <- is.nan( getValuesBlock(x, row=tr$row[i], nrows=tr$nrows[i]) )
r <- writeValues(r, v, tr$row[i])
pbStep(pb, i)
}
r <- writeStop(r)
pbClose(pb)
return(r)
}
}
)
setMethod("is.finite", signature(x='Raster'),
function(x) {
if (nlayers(x) > 1) {
r <- brick(x, values=FALSE)
} else {
r <- raster(x)
}
if (canProcessInMemory(r, 3)) {
dataType(r) <- 'LOG1S'
return( setValues(r, is.finite(getValues(x))) )
} else {
tr <- blockSize(x)
pb <- pbCreate(tr$n, label='is.na')
r <- writeStart(r, filename=rasterTmpFile(), datatype='LOG1S', format=.filetype(), overwrite=TRUE )
for (i in 1:tr$n) {
v <- is.finite( getValuesBlock(x, row=tr$row[i], nrows=tr$nrows[i]) )
r <- writeValues(r, v, tr$row[i])
pbStep(pb, i)
}
r <- writeStop(r)
pbClose(pb)
return(r)
}
}
)
setMethod("is.infinite", signature(x='Raster'),
function(x) {
if (nlayers(x) > 1) {
r <- brick(x, values=FALSE)
} else {
r <- raster(x)
}
if (canProcessInMemory(r, 3)) {
dataType(r) <- 'LOG1S'
return( setValues(r, is.infinite(getValues(x))) )
} else {
tr <- blockSize(x)
pb <- pbCreate(tr$n, label='is.na')
r <- writeStart(r, filename=rasterTmpFile(), datatype='LOG1S', format=.filetype(), overwrite=TRUE )
for (i in 1:tr$n) {
v <- is.infinite( getValuesBlock(x, row=tr$row[i], nrows=tr$nrows[i]) )
r <- writeValues(r, v, tr$row[i])
pbStep(pb, i)
}
r <- writeStop(r)
pbClose(pb)
return(r)
}
}
) |
generate_mexican_hat_function <- function(dimensions)
soo_function(name="Mexican hat",
id=sprintf("mexican-hat-%id", dimensions),
fun=function(x, ...) .Call(do_f_mexican_hat, x),
dimensions=dimensions,
lower_bounds=rep(-5, dimensions),
upper_bounds=rep(5, dimensions),
best_par=rep(0, dimensions),
best_value=-1)
class(generate_mexican_hat_function) <- c("soo_function_generator", "function")
attr(generate_mexican_hat_function, "id") <- "mexican-hat"
attr(generate_mexican_hat_function, "name") <- "Mexican hat test function" |
getPValue <- function(X,
block_boundaries = NULL,
block_labels = NULL,
largeP = FALSE,
largeN = FALSE,
nruns = 5000,
p = 2) {
assertthat::assert_that(is.matrix(X),
msg = "X must be a 2D matrix, see ?getPValue for examples. Try as.matrix(X).")
if (all(X == 0 | X == 1)) {
col_sums <- colSums(X)
if (any(col_sums == 0) | any(col_sums == dim(X)[1])) {
warning("There exist columns with all ones or all zeros for binary X.")
}
}
assertthat::assert_that(is.logical(largeP),
msg = "largeP must be either TRUE or FALSE.")
assertthat::assert_that(is.logical(largeN),
msg = "largeN must be either TRUE or FALSE.")
assertthat::assert_that(isTRUE(all.equal(nruns, as.integer(nruns))) & is.numeric(nruns) & nruns >= 1,
msg = "nruns must be a positive integer.")
assertthat::assert_that(largeP || !largeN,
msg = "Large P and large N asymptotics not yet implemented. Pick another version.")
assertthat::assert_that(is.null(block_boundaries) || is.null(block_labels),
msg = "Block boundaries and block labels are specified simultaneously. Specify at most one.")
if (is.null(block_boundaries) & is.null(block_labels)) {
block_boundaries = 1:dim(X)[2]
}
if (!is.null(block_boundaries) & is.null(block_labels)) {
assertthat::assert_that(max(block_boundaries) <= dim(X)[2],
msg = "Block boundaries exceed number of columns. Check block boundaries.")
assertthat::assert_that(all(block_boundaries == cummax(block_boundaries)),
msg = "Block boundaries not monotone increasing. Check block boundaries.")
assertthat::assert_that(!any(diff(block_boundaries) == 0),
msg = "Block boundaries have repeats. Check block boundaries.")
if (block_boundaries[1] != 1) {
block_boundaries <- c(1, block_boundaries)
}
}
if (!is.null(block_labels)) {
assertthat::assert_that(max(block_labels) <= dim(X)[2],
msg = "Number of blocks exceeds number of columns. Check block labels.")
assertthat::assert_that(max(block_labels) == length(unique(block_labels)),
msg = "Block labels are not from 1 to B. Please relabel blocks using this convention.")
if (max(block_labels) == 1) {
cat("All blocks are labeled 1, i.e., no independent sets of features detected, so samples are assumed exchangeable.\n")
return(1)
}
}
if (largeP & largeN & (identical(block_boundaries, 1:dim(X)[2]) | identical(block_labels, 1:dim(X)[2]))) {
return(indGaussian(X, p))
}
if (largeP & largeN & !(identical(block_boundaries, 1:dim(X)[2]) | identical(block_labels, 1:dim(X)[2]))) {
return(blockGaussian(X, block_boundaries, block_labels, p))
}
if (largeP & !largeN & (identical(block_boundaries, 1:dim(X)[2]) | identical(block_labels, 1:dim(X)[2]))) {
return(indLargeP(X, p))
}
if (largeP & !largeN & !(identical(block_boundaries, 1:dim(X)[2]) | identical(block_labels, 1:dim(X)[2]))) {
return(blockLargeP(X, block_boundaries, block_labels, p))
}
if (!largeP & !largeN) {
return(blockPermute(X, block_boundaries, block_labels, nruns, p))
}
}
distDataPValue <- function(dist_list,
largeP = FALSE,
nruns = 1000) {
assertthat::assert_that(is.list(dist_list),
msg = "Input is not a valid list of distance matrices.")
assertthat::assert_that(all(sapply(dist_list, function(x) is.matrix(x))),
msg = "Not all elements of dist_list is a distance matrix. Check list provided.")
assertthat::assert_that(var(sapply(dist_list, function(x) dim(x)[1])) == 0,
msg = "Not all matrices have the same dimension. Check distance matrices provided.")
assertthat::assert_that(all(sapply(dist_list, function(x) {dim(x)[1] == dim(x)[2]})),
msg = "Not all matrices are square. Check distance matrices provided.")
if (length(dist_list) < 50 & largeP) {
stop("Too few independent distance matrices for chi-square approximation to be valid, please set largeP = FALSE.")
}
if (largeP) {
return(distDataLargeP(dist_list))
} else {
return(distDataPermute(dist_list, nruns))
}
} |
mostFrequentGOs <- function(df, GOterm_field) {
features_list <- unique(df[, "feature"])
GO_list <- unique(df[, GOterm_field])
x_freq <- lapply(seq_len(length(GO_list)), function(i) {
x <- df$feature[which(df[, GOterm_field]==GO_list[[i]])]
x <- data.frame(GO=GO_list[[i]],freq=length(x),features=paste(x,collapse = ";"))
return(x)
})
x_freq <- do.call(rbind,x_freq)
x_freq <- x_freq[order(x_freq$freq, decreasing = TRUE), ]
return(x_freq)
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.