code
stringlengths 1
13.8M
|
---|
"se.indirect2" <-
function (a,sa,b,sb)
{
sqrt(a^2*sb^2 + b^2*sa^2)
} |
maxent.jar.name <- "maxent.jar"
maxent.jar.fun <- dismo::maxent
maxent.jar.errors <- function(occs, envs, bg, tune.args, partitions, algorithm,
partition.settings, other.settings,
categoricals, doClamp, clamp.directions) {
if(!("rm" %in% names(tune.args)) | !("fc" %in% names(tune.args))) {
stop("Maxent settings must include 'rm' (regularization multiplier) and 'fc' (feature class) settings. See ?tune.args for details.")
}else{
if(!is.numeric(tune.args[["rm"]])) {
stop("Please input numeric values for 'rm' settings for Maxent.")
}
all.fc <- unlist(sapply(1:5, function(x) apply(combn(c("L","Q","H","P","T"), x), 2, function(y) paste(y, collapse = ""))))
if(any(!tune.args[["fc"]] %in% all.fc)) {
stop("Please input accepted values for 'fc' settings for Maxent.")
}
}
if(is.null(getOption('dismo_rJavaLoaded'))) {
Sys.setenv(NOAWT=TRUE)
if ( requireNamespace('rJava') ) {
rJava::.jpackage('dismo')
options(dismo_rJavaLoaded=TRUE)
} else {
stop('rJava cannot be loaded.')
}
}
}
maxent.jar.msgs <- function(tune.args, other.settings) {
mxe <- rJava::.jnew("meversion")
v <- try(rJava::.jcall(mxe, "S", "meversion"))
msg <- paste0("maxent.jar v", v, " from dismo package v", packageVersion('dismo'))
return(msg)
}
maxent.jar.args <- function(occs.z, bg.z, tune.tbl.i, other.settings) {
out <- list()
out$x <- rbind(occs.z, bg.z)
out$p <- c(rep(1, nrow(occs.z)), rep(0, nrow(bg.z)))
out$args <- c("noremoveDuplicates", "noautofeature")
if(!grepl("L", tune.tbl.i$fc)) out$args <- c(out$args, "nolinear")
if(!grepl("Q", tune.tbl.i$fc)) out$args <- c(out$args, "noquadratic")
if(!grepl("H", tune.tbl.i$fc)) out$args <- c(out$args, "nohinge")
if(!grepl("P", tune.tbl.i$fc)) out$args <- c(out$args, "noproduct")
if(!grepl("T", tune.tbl.i$fc)) out$args <- c(out$args, "nothreshold") else out$args <- c(out$args, "threshold")
out$args <- c(out$args, paste0("betamultiplier=", tune.tbl.i$rm, sep=""))
out <- c(out, other.settings$other.args)
return(out)
}
maxent.jar.predict <- function(mod, envs, tune.tbl.i, other.settings) {
output.format <- paste0("outputformat=", other.settings$pred.type)
pred <- dismo::predict(mod, envs, args = c(output.format, "doclamp=false"), na.rm = TRUE)
return(pred)
}
maxent.jar.ncoefs <- function(mod) {
lambdas <- mod@lambdas[1:(length(mod@lambdas)-4)]
countNonZeroParams <- function(x) if(strsplit(x, split=", ")[[1]][2] != '0.0') 1
np <- sum(unlist(sapply(lambdas, countNonZeroParams)))
return(np)
}
maxent.jar.varimp <- function(mod) {
res <- mod@results
pc <- res[grepl('contribution', rownames(res)),]
pi <- res[grepl('permutation', rownames(res)),]
varnames <- sapply(strsplit(names(pc), '.contribution'), function(x) x[1])
df <- data.frame(variable=varnames, percent.contribution=pc, permutation.importance=pi, row.names=NULL)
return(df)
}
enm.maxent.jar <- ENMdetails(name = maxent.jar.name, fun = maxent.jar.fun, errors = maxent.jar.errors,
msgs = maxent.jar.msgs, args = maxent.jar.args,
predict = maxent.jar.predict, ncoefs = maxent.jar.ncoefs, varimp = maxent.jar.varimp) |
"dist.neig" <- function (neig) {
if (!inherits(neig, "neig"))
stop("Object of class 'neig' expected")
res <- neig.util.LtoG(neig)
n <- nrow(res)
auxi1 <- res
auxi2 <- res
for (itour in 2:n) {
auxi2 <- auxi2 %*% auxi1
auxi2[res != 0] <- 0
diag(auxi2) <- 0
auxi2 <- (auxi2 > 0) * itour
if (sum(auxi2) == 0)
break
res <- res + auxi2
}
return(as.dist(res))
} |
CSWGetRecordById <- R6Class("CSWGetRecordById",
inherit = OWSHttpRequest,
private = list(
xmlElement = "GetRecordById",
xmlNamespacePrefix = "CSW",
defaultAttrs = list(
service = "CSW",
version = "2.0.2",
outputSchema= "http://www.opengis.net/cat/csw"
)
),
public = list(
Id = NA,
ElementSetName = "full",
initialize = function(capabilities, op, url, serviceVersion = "2.0.2",
user = NULL, pwd = NULL, token = NULL, headers = headers,
id, elementSetName = "full", logger = NULL, ...) {
self$Id = id
allowedElementSetNames <- c("full", "brief", "summary")
if(!(elementSetName %in% allowedElementSetNames)){
stop(sprintf("elementSetName value should be among following values: [%s]",
paste(allowedElementSetNames, collapse=",")))
}
self$ElementSetName = elementSetName
nsVersion <- ifelse(serviceVersion=="3.0.0", "3.0", serviceVersion)
private$xmlNamespacePrefix = paste(private$xmlNamespacePrefix, gsub("\\.", "_", nsVersion), sep="_")
super$initialize(element = private$xmlElement, namespacePrefix = private$xmlNamespacePrefix,
capabilities, op, "POST", url, request = "GetRecordById",
user = user, pwd = pwd, token = token, headers = headers,
contentType = "text/xml", mimeType = "text/xml",
logger = logger, ...)
self$attrs <- private$defaultAttrs
self$attrs$version = serviceVersion
self$attrs$outputSchema = paste(self$attrs$outputSchema, nsVersion, sep="/")
outputSchema <- list(...)$outputSchema
if(!is.null(outputSchema)){
self$attrs$outputSchema = outputSchema
}
self$execute()
ns <- getOWSNamespace(private$xmlNamespacePrefix)
outputSchema <- self$attrs$outputSchema
isoSchemas <- c("http://www.isotc211.org/2005/gmd","http://www.isotc211.org/2005/gfc")
if(outputSchema %in% isoSchemas){
xmltxt <- as(private$response, "character")
isMetadata <- regexpr("MD_Metadata", xmltxt)>0
isFeatureCatalogue <- regexpr("FC_FeatureCatalogue", xmltxt)>0
if(isMetadata && outputSchema == isoSchemas[2]){
outputSchema <- isoSchemas[1]
message(sprintf("Metadata detected! Switch to schema '%s'!", outputSchema))
}
if(isFeatureCatalogue && outputSchema == isoSchemas[1]){
outputSchema <- isoSchemas[2]
message(sprintf("FeatureCatalogue detected! Switch to schema '%s'!", outputSchema))
}
}
private$response <- switch(outputSchema,
"http://www.isotc211.org/2005/gmd" = {
out <- NULL
xmlObjs <- getNodeSet(private$response, "//ns:MD_Metadata", c(ns = outputSchema))
if(length(xmlObjs)>0){
xmlObj <- xmlObjs[[1]]
out <- geometa::ISOMetadata$new()
out$decode(xml = xmlObj)
}
out
},
"http://www.isotc211.org/2005/gfc" = {
out <- NULL
xmlObjs <- getNodeSet(private$response, "//ns:FC_FeatureCatalogue", c(ns = outputSchema))
if(length(xmlObjs)>0){
xmlObj <- xmlObjs[[1]]
out <- geometa::ISOFeatureCatalogue$new()
out$decode(xml = xmlObj)
}
out
},
"http://www.opengis.net/cat/csw/2.0.2" = {
out <- NULL
warnMsg <- sprintf("R Dublin Core binding not yet supported for '%s'", outputSchema)
warnings(warnMsg)
self$WARN(warnMsg)
self$WARN("Dublin Core returned as R list...")
recordsXML <- getNodeSet(private$response, "//csw:Record", unlist(ns$getDefinition()))
if(length(recordsXML)>0){
recordXML <- recordsXML[[1]]
children <- xmlChildren(recordXML)
out <- lapply(children, xmlValue)
names(out) <- names(children)
}
out
},
"http://www.opengis.net/cat/csw/3.0" = {
out <- NULL
warnMsg <- sprintf("R Dublin Core binding not yet supported for '%s'", outputSchema)
warnings(warnMsg)
self$WARN(warnMsg)
self$WARN("Dublin Core returned as R list...")
recordsXML <- getNodeSet(private$response, "//csw30:Record", unlist(ns$getDefinition()))
if(length(recordsXML)>0){
recordXML <- recordsXML[[1]]
children <- xmlChildren(recordXML)
out <- lapply(children, xmlValue)
names(out) <- names(children)
}
out
},
"http://www.w3.org/ns/dcat
warnings(sprintf("R binding not yet supported for '%s'", outputSchema))
private$response
}
)
}
)
) |
olsHALL.b <- function(y, x)
{
n <- length(x)
x <- cbind(1, y[-n], x[-n])
y <- y[-1]
p <- ncol(x)
ny <- NCOL(y)
tol <- 1e-07
n <- nrow(x)
z <- lm.fit(x, y)
z$coefficients
} |
context("print.dust")
test_that("printing to console succeeds with defaults",
{
fit <- lm(mpg ~ qsec + factor(am) + wt + factor(gear), data = mtcars)
x <- dust(fit)
expect_silent(x)
})
test_that("printing to console succeeds with sprinkles",
{
fit <- lm(mpg ~ qsec + factor(am) + wt + factor(gear), data = mtcars)
x <- dust(fit) %>%
sprinkle(rows = 2:4,
cols = 2:4,
bg = "black",
bold = TRUE,
border_collapse = "collapse",
border = c("left", "right"),
border_thickness = 2,
border_units = "px",
border_style = "solid",
border_color = "purple",
halign = "left",
height = 7,
height_units = "px",
fn = quote(value * -1),
font_color = "orchid",
font_size = 14,
font_size_units = "px",
italic = TRUE,
pad = 8,
round = 3,
rotate_degree = -45,
valign = "bottom",
width = 15,
width_units = "%")
expect_silent(x)
})
test_that("printing to console succeeds with sprinkles",
{
fit <- lm(mpg ~ qsec + factor(am) + wt + factor(gear), data = mtcars)
x <- dust(fit) %>%
sprinkle(rows = 2:4,
cols = 2:4,
bg = "black",
bold = TRUE,
border_collapse = "collapse",
border = c("left", "right"),
border_thickness = 2,
border_units = "px",
border_style = "solid",
border_color = "purple",
halign = "left",
height = 7,
height_units = "px",
fn = quote(value * -1),
font_color = "orchid",
font_size = 14,
font_size_units = "px",
italic = TRUE,
pad = 8,
round = 3,
rotate_degree = -45,
valign = "bottom",
width = 15,
width_units = "%") %>%
sprinkle_print_method("markdown")
expect_silent(x)
})
test_that("printing to console succeeds with sprinkles",
{
fit <- lm(mpg ~ qsec + factor(am) + wt + factor(gear), data = mtcars)
x <- dust(fit) %>%
sprinkle(rows = 2:4,
cols = 2:4,
bg = "black",
bold = TRUE,
border_collapse = "inherit",
border = c("left", "right"),
border_thickness = 2,
border_units = "px",
border_style = "solid",
border_color = "purple",
halign = "left",
height = 7,
height_units = "px",
fn = quote(value * -1),
font_color = "orchid",
font_size = 14,
font_size_units = "px",
italic = TRUE,
pad = 8,
round = 3,
rotate_degree = -45,
valign = "bottom",
width = 15,
width_units = "%") %>%
sprinkle_print_method("html")
expect_silent(x)
}) |
`weighted_richclub_tm` <-
function(net, NR=1000, seed=NULL, projection.method="Newman", nbins=30){
if(is.null(attributes(net)$tnet)) net <- as.tnet(net, type="binary two-mode tnet")
if(attributes(net)$tnet!="binary two-mode tnet") stop("Network not loaded properly")
if(!is.null(seed))
set.seed(as.integer(seed))
`phi` <- function(net){
output <- cbind(x=xlevels,num=NaN,den=NaN,y=NaN)
net <- cbind(net, rc.i=prominence[net[,1],"r"], rc.j=prominence[net[,2],"r"])
net <- cbind(net, rc=pmin.int(net[,"rc.i"],net[,"rc.j"]))
Er <- sapply(output[,"x"], function(a) which(net[,"rc"]>a))
output[,"num"] <- unlist(lapply(Er, function(a) sum(net[a,"w"])))
net <- net[order(-net[,"w"]),]
output[,"den"] <- unlist(lapply(Er, function(a) sum(net[1:length(a),"w"])))
output <- output[,"num"]/output[,"den"]
return(output)
}
dimnames(net)[[2]] <- c("i","p")
net.2mode <- net;
net.2mode.list <- split(net.2mode[,2], net.2mode[,1])
net.1mode <- projecting_tm(net.2mode, method=projection.method)
prominence <- unique(net.2mode[,1])
prominence <- cbind(node=prominence, r=unlist(lapply(net.2mode.list, length)))
tmp1 <- prominence[,2]
tmp1 <- tmp1[order(tmp1)]
tmp1 <- tmp1[1:(length(tmp1)-1)]
xlevels <- vector()
xlevels[1] <- tmp1[1]-0.00001
xlevels[nbins] <- tmp1[length(tmp1)]-0.00001
tmp2 <- (log(xlevels[nbins])-log(xlevels[1]))/(nbins-1)
for(i in 2:(nbins-1))
xlevels[i] <- exp(log(xlevels[i-1])+tmp2)
ophi <- data.frame(x=xlevels, y=phi(net.1mode))
rphi <- matrix(data=0, nrow=nrow(ophi), ncol=NR)
for(i in 1:NR) {
if(i/10 == round(i/10) )
cat(paste("Random network ", i, "/", NR, " @ ", date(), "\n", sep=""))
rdm.2mode <- rg_reshuffling_tm(net.2mode)
rdm.1mode <- projecting_tm(rdm.2mode)
rphi[,i] <- phi(rdm.1mode)
}
rho <- data.frame(x=ophi[,"x"], y=0, l99=0, l95=0, h95=0, h99=0)
rho[,"y"] <- ophi[,"y"]/rowMeans(rphi)
rho <- rho[!is.na(rho[,"y"]),]
if(NR>100) {
for(i in 1:nrow(rho)) {
rphi[i,] <- rphi[i,order(rphi[i,])]
rho[i,"l99"] <- rphi[i,round(NR/100*00.5)]/mean(rphi[i,])
rho[i,"l95"] <- rphi[i,round(NR/100*02.5)]/mean(rphi[i,])
rho[i,"h95"] <- rphi[i,round(NR/100*97.5)]/mean(rphi[i,])
rho[i,"h99"] <- rphi[i,round(NR/100*99.5)]/mean(rphi[i,])
}
}
return(rho)
} |
ap_adj <- function(data, ap = NULL){
AP = NULL
rm(list = c("AP"))
if(is.character(ap)){
if(toupper(ap) %in% colnames(data) == FALSE){
warning('Could not find user-defined AP argument name in dataset. \ni.e. for example, if user improperly defines ap = "art_pres" but that column name does not exist in the dataset, \nthen there will be no matches for "art_pres". \nCheck spelling of AP argument.\n')
if(length(grep(paste("\\bAP\\b", sep = ""), names(data))) == 1){
stop('Fix user-defined argument name for AP. \nNote: A column in the dataset DOES match the name "AP": \nif this is the correct column, indicate as such in function argument. \ni.e. ap = "AP" \n ')
}
}else{
col_idx <- grep(paste("\\b",toupper(ap),"\\b", sep = ""), names(data) )
data <- data[, c(col_idx, (1:ncol(data))[-col_idx])]
if(colnames(data)[1] != "AP"){
colnames(data)[1] <- "AP"
data$AP <- as.numeric(data$AP)
}
}
} else {
stop('User-defined AP name must be character.\n')
}
return(data)
}
sbp_adj <- function(data, sbp = NULL, data_screen, SUL, SLL){
SBP = NULL
rm(list = c("SBP"))
if(is.character(sbp)){
if(toupper(sbp) %in% colnames(data) == FALSE){
warning('Could not find user-defined SBP argument name in dataset. \ni.e. for example, if user improperly defines sbp = "syst" but that column name does not exist in the dataset, \nthen there will be no matches for "syst". \nCheck spelling of SBP argument.\n')
if(length(grep(paste("\\bSBP\\b", sep = ""), names(data))) == 1){
stop('Fix user-defined argument name for SBP. \nNote: A column in the dataset DOES match the name "SBP": \nif this is the correct column, indicate as such in function argument. \ni.e. sbp = "SBP" \n ')
}
}else{
col_idx <- grep(paste("\\b",toupper(sbp),"\\b", sep = ""), names(data) )
data <- data[, c(col_idx, (1:ncol(data))[-col_idx])]
if(colnames(data)[1] != "SBP"){
colnames(data)[1] <- "SBP"
data$SBP <- as.numeric(data$SBP)
}
if(data_screen == TRUE){
if(SLL > SUL){
stop('Systolic Lower Limit (SLL) cannot exceed Systolic Upper Limit (SUL) \nSLL > SUL is invalid.')
}
if( as.integer( dplyr::tally(data, SBP > SUL | SBP < SLL) ) > 0 ){
message(
paste( as.integer( dplyr::tally(data, SBP > SUL | SBP < SLL) ), ' values that exceeded the SUL or SLL thresholds were coerced to NA.', sep = "" )
)
data$SBP[which(data$SBP > SUL | data$SBP < SLL)] <- NA
}
}
}
} else {
stop('User-defined SBP name must be character.\n')
}
return(data)
}
dbp_adj <- function(data, dbp = NULL, data_screen, DUL, DLL){
DBP = NULL
rm(list = c("DBP"))
if(is.character(dbp)){
if(toupper(dbp) %in% colnames(data) == FALSE){
warning('User-defined DBP name does not match column name of supplied dataset. \ni.e. for example, if user improperly defines dbp = "diast" but there is no column name in the dataset, \nthen there will be no matches for "diast". \nCheck spelling of DBP argument.\n')
if(length(grep(paste("\\bDBP\\b", sep = ""), names(data))) == 1){
stop('Fix user-defined argument name for DBP. \nNote: A column in the dataset DOES match the name "DBP": \nif this is the correct column, indicate as such in function argument. \ni.e. sbp = "DBP" \n ')
}
}else{
col_idx <- grep(paste("\\b",toupper(dbp),"\\b", sep = ""), names(data) )
data <- data[, c(1, col_idx, (2:ncol(data))[-col_idx+1])]
if(colnames(data)[2] != "DBP"){
colnames(data)[2] <- "DBP"
data$DBP <- as.numeric(data$DBP)
}
if(data_screen == TRUE){
if(DLL > DUL){
stop('Diastolic Lower Limit (DLL) cannot exceed Diastolic Upper Limit (DUL) \ni.e. DLL > DUL is invalid.')
}
if( as.integer( dplyr::tally(data, DBP > DUL | DBP < DLL) ) > 0 ){
message(
paste( as.integer( dplyr::tally(data, DBP > DUL | DBP < DLL) ), ' values that exceeded the DUL or DLL thresholds were coerced to NA.', sep = "" )
)
data$DBP[which(data$DBP > DUL | data$DBP < DLL)] <- NA
}
}
}
} else {
stop('User-defined DBP name must be character.\n')
}
return(data)
}
pp_adj <- function(data, pp = NULL){
DBP = PP = PP_OLD = NULL
rm(list = c("DBP", "PP", "PP_OLD"))
if(!is.null(pp)){
if(!is.character(pp)){
stop('User-defined PP name must be character.\n')
}
if(toupper(pp) %in% colnames(data) == FALSE){
stop('User-defined PP name does not match column name of supplied dataset.\n')
}else{
col_idx <- grep(paste("\\b",toupper(pp),"\\b", sep = ""), names(data))
colnames(data)[col_idx] <- "PP"
missing_calc <- which(is.na(data$SBP - data$DBP))
missing_pp <- which(is.na(data$PP))
if( (length(missing_pp) != length(missing_calc)) |
all(missing_pp %in% missing_calc) == FALSE |
all(missing_calc %in% missing_pp) == FALSE |
(sum( (data$PP)[-which(is.na(data$SBP - data$DBP) | is.na(data$PP))] -
(data$SBP - data$DBP)[-which(is.na(data$SBP - data$DBP) | is.na(data$PP))] ) != 0) ){
data$PP_OLD <- data$PP
data$PP <- data$SBP - data$DBP
}
}
}else{
if("PP" %in% colnames(data) == FALSE){
data$PP <- data$SBP - data$DBP
data$PP <- as.numeric(data$PP)
message('No PP column found or specified. Automatically generated from SBP and DBP columns.\n')
}else{
missing_calc <- which(is.na(data$SBP - data$DBP))
missing_pp <- which(is.na(data$PP))
if( (length(missing_pp) != length(missing_calc)) |
all(missing_pp %in% missing_calc) == FALSE |
all(missing_calc %in% missing_pp) == FALSE |
(sum( (data$PP)[-which(is.na(data$SBP - data$DBP) | is.na(data$PP))] -
(data$SBP - data$DBP)[-which(is.na(data$SBP - data$DBP) | is.na(data$PP))] ) != 0) ){
data$PP_OLD <- data$PP
data$PP <- data$SBP - data$DBP
}
}
}
data <- data %>% dplyr::relocate(PP, .after = DBP)
data$PP <- as.numeric(data$PP)
if("PP_OLD" %in% colnames(data)){
data <- data %>% dplyr::relocate(PP_OLD, .after = PP)
}
return(data)
}
hr_adj <- function(data, hr = NULL, data_screen, HRUL, HRLL){
HR = DBP = NULL
rm(list = c("HR", "DBP"))
if(is.null(hr)){
if(length(grep(paste("\\bHR\\b", sep = ""), names(data))) == 1){
warning('HR column found in data. \nIf this column corresponds to Heart Rate, \nuse hr = "HR" in the function argument.\n')
if(data_screen == TRUE){
if(HRLL > HRUL){
stop('Heart Rate Lower Limit (HRLL) cannot exceed Heart Rate Upper Limit (HRUL) \nHRLL > HRUL is invalid.')
}
if( as.integer( dplyr::tally(data, HR > HRUL | HR < HRLL) ) > 0 ){
message(
paste( as.integer( dplyr::tally(data, HR > HRUL | HR < HRLL) ), ' heart rate values that exceeded the HRUL or HRLL thresholds were coerced to NA.', sep = "" )
)
data$HR[which(data$HR > HRUL | data$HR < HRLL)] <- NA
}
}
data <- data %>% dplyr::relocate(HR, .after = DBP)
}
} else if(is.character(hr)){
if(toupper(hr) %in% colnames(data) == FALSE){
stop('User-defined HR name does not match column name of supplied dataset\n')
}else{
col_idx <- grep(paste("\\b",toupper(hr),"\\b", sep = ""), names(data))
colnames(data)[col_idx] <- "HR"
data <- data %>% dplyr::relocate(HR, .after = DBP)
data$HR <- as.numeric(data$HR)
if(data_screen == TRUE){
if( as.integer( dplyr::tally(data, HR > HRUL | HR < HRLL) ) > 0 ){
message(
paste( as.integer( dplyr::tally(data, HR > HRUL | HR < HRLL) ), ' heart rate values exceeded the HRUL or HRLL thresholds and were coerced to NA.', sep = "" )
)
data$HR[which(data$HR > HRUL | data$HR < HRLL)] <- NA
}
}
}
} else {
stop('User-defined HR name must be character.\n')
}
return(data)
}
rpp_adj <- function(data, rpp = NULL){
DBP = RPP = RPP_OLD = PP = NULL
rm(list = c("DBP", "RPP", "RPP_OLD", "PP"))
if(!is.null(rpp)){
if(!is.character(rpp)){
stop('User-defined RPP name must be character.\n')
}
if(toupper(rpp) %in% colnames(data) == FALSE){
stop('User-defined RPP name does not match column name of supplied dataset.\n')
}else{
col_idx <- grep(paste("\\b",toupper(rpp),"\\b", sep = ""), names(data))
colnames(data)[col_idx] <- "RPP"
if("HR" %in% colnames(data)){
missing_calc <- which(is.na(data$SBP * data$HR))
missing_rpp <- which(is.na(data$RPP))
if( (length(missing_rpp) != length(missing_calc)) |
all(missing_rpp %in% missing_calc) == FALSE |
all(missing_calc %in% missing_rpp) == FALSE |
(sum( (data$RPP)[-which(is.na(data$SBP * data$HR) | is.na(data$RPP))] -
(data$SBP * data$HR)[-which(is.na(data$SBP * data$HR) | is.na(data$RPP))] ) != 0) ){
data$RPP_OLD <- data$RPP
data$RPP <- data$SBP * data$HR
}
}else{
message('No HR column found to check RPP for accuracy.')
}
}
}else{
if("RPP" %in% colnames(data) == FALSE){
if("HR" %in% colnames(data)){
data$RPP <- data$SBP * data$HR
data$RPP <- as.numeric(data$RPP)
message('No RPP column found or specified. Automatically generated from SBP and HR columns.\n')
}
}else{
if("HR" %in% colnames(data)){
missing_calc <- which(is.na(data$SBP * data$HR))
missing_rpp <- which(is.na(data$RPP))
if( (length(missing_rpp) != length(missing_calc)) |
all(missing_rpp %in% missing_calc) == FALSE |
all(missing_calc %in% missing_rpp) == FALSE |
(sum( (data$RPP)[-which(is.na(data$SBP * data$HR) | is.na(data$RPP))] -
(data$SBP * data$HR)[-which(is.na(data$SBP * data$HR) | is.na(data$RPP))] ) != 0) ){
data$RPP_OLD <- data$RPP
data$RPP <- data$SBP * data$HR
}
}
}
}
if("RPP" %in% colnames(data)){
data <- data %>% dplyr::relocate(RPP, .before = PP)
data$RPP <- as.numeric(data$RPP)
if("RPP_OLD" %in% colnames(data)){
data <- data %>% dplyr::relocate(RPP_OLD, .after = RPP)
}
}
return(data)
}
map_adj <- function(data, map = NULL){
DBP = MAP = MAP_OLD = NULL
rm(list = c("DBP", "MAP", "MAP_OLD"))
if(!is.null(map)){
if(!is.character(map)){
stop('User-defined MAP name must be character.\n')
}
if(toupper(map) %in% colnames(data) == FALSE){
stop('User-defined MAP name does not match column name of supplied dataset.\n')
}else{
col_idx <- grep(paste("\\b",toupper(map),"\\b", sep = ""), names(data))
colnames(data)[col_idx] <- "MAP"
missing_calc <- which(is.na((1/3) * data$SBP + (2/3) * data$DBP))
missing_map <- which(is.na(data$MAP))
if( (length(missing_map) != length(missing_calc)) |
all(missing_map %in% missing_calc) == FALSE |
all(missing_calc %in% missing_map) == FALSE |
(sum( (data$MAP)[-which(is.na((1/3) * data$SBP + (2/3) * data$DBP) | is.na(data$MAP))] -
((1/3) * data$SBP + (2/3) * data$DBP)[-which(is.na((1/3) * data$SBP + (2/3) * data$DBP) | is.na(data$MAP))] ) != 0) ){
data$MAP_OLD <- data$MAP
data$MAP <- (1/3) * data$SBP + (2/3) * data$DBP
}
}
}else{
if("MAP" %in% colnames(data) == FALSE){
data$MAP <- (1/3) * data$SBP + (2/3) * data$DBP
data$MAP <- as.numeric(data$MAP)
message('No MAP column found or specified. Automatically generated from SBP and DBP columns.\n')
}else{
missing_calc <- which(is.na((1/3) * data$SBP + (2/3) * data$DBP))
missing_map <- which(is.na(data$MAP))
if( (length(missing_map) != length(missing_calc)) |
all(missing_map %in% missing_calc) == FALSE |
all(missing_calc %in% missing_map) == FALSE |
(sum( (data$MAP)[-which(is.na((1/3) * data$SBP + (2/3) * data$DBP) | is.na(data$MAP))] -
((1/3) * data$SBP + (2/3) * data$DBP)[-which(is.na((1/3) * data$SBP + (2/3) * data$DBP) | is.na(data$MAP))] ) != 0) ){
data$MAP_OLD <- data$MAP
data$MAP <- (1/3) * data$SBP + (2/3) * data$DBP
}
}
}
data <- data %>% dplyr::relocate(MAP, .after = DBP)
data$MAP <- as.numeric(data$MAP)
if("MAP_OLD" %in% colnames(data)){
data <- data %>% dplyr::relocate(MAP_OLD, .after = MAP)
}
return(data)
}
wake_adj <- function(data, wake = NULL, bp_type){
WAKE = DBP = TIME_OF_DAY = NULL
rm(list = c("WAKE", "DBP", "TIME_OF_DAY"))
if(!is.null(wake)){
if(toupper(wake) %in% colnames(data) == FALSE){
stop('User-defined WAKE name does not match column name of supplied dataset.\n')
}
col_idx <- grep(paste("\\b",toupper(wake),"\\b", sep = ""), names(data))
colnames(data)[col_idx] <- "WAKE"
if( any(is.na(data$WAKE)) == FALSE){
if(length(unique(data$WAKE)) > 2){
stop('Wake column must only contain 2 unique values corresponding to awake or asleep status. \nTypically, these are denoted as 1 for Awake and 0 for Asleep.\n')
}else{
data$WAKE <- as.integer(data$WAKE)
data <- data %>% dplyr::relocate(WAKE, .after = DBP)
}
}else{
num_NA <- length(data[ is.na(data$WAKE) == TRUE, ]$WAKE)
data[ is.na(data$WAKE) == TRUE, ]$WAKE <- dplyr::if_else( data[ is.na(data$WAKE) == TRUE, ]$TIME_OF_DAY == 'Night', 0, 1)
message( paste(num_NA, ' WAKE NA values were coerced to either 0 or 1 based on TIME_OF_DAY column.', sep = "") )
if(length(unique(data$WAKE)) > 2){
stop('Wake column must only contain 2 unique values corresponding to awake or asleep status. \nTypically, these are denoted as 1 for Awake and 0 for Asleep.\n')
}else{
data$WAKE <- as.integer(data$WAKE)
data <- data %>% dplyr::relocate(WAKE, .after = DBP)
}
}
data$WAKE <- as.factor(data$WAKE)
}else if (("TIME_OF_DAY" %in% colnames(data)) & (toupper(bp_type) == "ABPM")){
message("Absent wake column. Allocating night as sleep.")
data <- data %>%
dplyr::mutate(WAKE = ifelse(TIME_OF_DAY == "Night", 0, 1))
data <- data %>% dplyr::relocate(WAKE, .after = DBP)
data$WAKE <- as.factor(data$WAKE)
}
return(data)
}
visit_adj <- function(data, visit = NULL){
VISIT = DBP = NULL
rm(list = c("VISIT", "DBP"))
if(!is.null(visit)){
if(toupper(visit) %in% colnames(data) == FALSE){
stop('User-defined VISIT name does not match column name of supplied dataset.\n')
} else {
col_idx <- grep(paste("\\b",toupper(visit),"\\b", sep = ""), names(data))
colnames(data)[col_idx] <- "VISIT"
data$VISIT <- as.integer(data$VISIT)
data <- data %>% dplyr::relocate(VISIT, .after = DBP)
}
data$VISIT <- as.factor(data$VISIT)
}
return(data)
}
ToD_int_check <- function(ToD_int){
if (!is.numeric(ToD_int)){
stop("ToD_int must be an integer vector of length 4.")
}
if(!is.vector(ToD_int)){
warning('ToD_int must be a vector, coerced input to vector.')
ToD_int <- as.vector(ToD_int)
}
if(length(ToD_int) != 4){
stop('ToD_int must be an integer vector of length 4.')
}
if (any(!(ToD_int %in% c(0:24)))){
stop('ToD_int must contain integer values corresponding to hours of the day from 0 to 23.')
}
if (any(ToD_int == 24)){
warning('One of the supplied hours is 24, which is treated as midnight and coerced to 0.')
ToD_int[ToD_int == 24] = 0
}
if( any( duplicated( ToD_int ) ) == TRUE ){
stop('Cannot have overlapping / duplicate values within the ToD interval.')
}
if (ToD_int[4] == 0){
ToD_int[4] = 24
}
if ( any(ToD_int != sort(ToD_int))){
warning('The supplied ToD_int hours are not in chronological order, and are automatically resorted.')
ToD_int = sort(ToD_int)
}
ToD_int
}
date_time_adj <- function(data, date_time = NULL, dt_fmt = "ymd HMS", ToD_int = NULL, chron_order = FALSE, tz = "UTC"){
TIME_OF_DAY = DATE = HOUR = DATE_TIME = ID = GROUP = YEAR = MONTH = DAY = SBP = DBP = NULL
rm(list = c("TIME_OF_DAY", "DATE", "HOUR", "DATE_TIME", "ID", "GROUP", "YEAR", "MONTH", "DAY", "SBP", "DBP"))
if(!is.null(date_time)){
if(toupper(date_time) %in% colnames(data) == FALSE){
stop('User-defined date_time name does not match column name within supplied dataset.\n')
}
col_idx <- grep(paste("\\b",toupper(date_time),"\\b", sep = ""), names(data))
colnames(data)[col_idx] <- "DATE_TIME"
data <- data[, c(col_idx, (1:ncol(data))[-col_idx])]
data$DATE_TIME <- lubridate::parse_date_time(data$DATE_TIME, orders = dt_fmt, tz = tz)
data$YEAR <- lubridate::year(data$DATE_TIME)
data$MONTH <- lubridate::month(data$DATE_TIME)
data$DAY <- lubridate::day(data$DATE_TIME)
data$HOUR <- lubridate::hour(data$DATE_TIME)
grps = c("ID", "VISIT", "GROUP")
grps = grps[which(grps %in% colnames(data) == TRUE)]
if(chron_order == TRUE){
data <- data %>%
dplyr::group_by_at(dplyr::vars(grps) ) %>%
dplyr::arrange(DATE_TIME, .by_group = TRUE)
}else{
data <- data %>%
dplyr::group_by_at(dplyr::vars(grps) ) %>%
dplyr::arrange(dplyr::desc(DATE_TIME), .by_group = TRUE)
}
if(is.null(ToD_int)){
data <- data %>% dplyr::mutate(TIME_OF_DAY =
dplyr::case_when(HOUR >= 0 & HOUR < 6 ~ "Night",
HOUR >= 6 & HOUR < 12 ~ "Morning",
HOUR >= 12 & HOUR < 18 ~ "Afternoon",
HOUR >= 18 & HOUR < 24 ~ "Evening",))
}else {
ToD_int = ToD_int_check(ToD_int)
data <- data %>% dplyr::mutate(TIME_OF_DAY =
dplyr::case_when(HOUR >= ToD_int[4] | HOUR < ToD_int[1] ~ "Night",
HOUR >= ToD_int[1] & HOUR < ToD_int[2] ~ "Morning",
HOUR >= ToD_int[2] & HOUR < ToD_int[3] ~ "Afternoon",
HOUR >= ToD_int[3] & HOUR < ToD_int[4] ~ "Evening"))
}
data$TIME_OF_DAY <- factor(data$TIME_OF_DAY, levels = c("Morning", "Afternoon", "Evening", "Night"))
data <- data %>% dplyr::relocate(ID, GROUP, DATE_TIME, YEAR, MONTH, DAY, HOUR, TIME_OF_DAY, SBP, DBP)
}
if("DATE" %in% colnames(data)){
if(inherits(data$DATE, "Date") == FALSE){
data$DATE <- as.Date( data$DATE )
warning("Original DATE column is not of the type as.Date. Coerced to proper format.")
}
data <- data %>% dplyr::relocate(DATE, .after = DBP)
if("DATE_TIME" %in% colnames(data)){
if( !all(data$DATE == as.Date(data$DATE_TIME)) ){
data$DATE_OLD <- data$DATE
data$DATE <- as.Date( data$DATE_TIME )
warning('User-supplied DATE column does not align with DATE_TIME values.\nCreated additional column DATE_OLD in place of DATE.\nMismatches between rows among DATE_OLD and DATE_TIME columns.\n')
}
data <- data %>% dplyr::relocate(DATE, .after = DATE_TIME)
}
}else if("DATE_TIME" %in% colnames(data)){
data$DATE <- as.Date( data$DATE_TIME )
data <- data %>% dplyr::relocate(DATE, .after = DATE_TIME)
message('DATE column created from DATE_TIME column.')
}
data <- as.data.frame(data)
return(data)
}
agg_adj <- function(data, bp_type, agg = TRUE, agg_thresh = 3, collap = FALSE, collapse_df = FALSE){
DATE_TIME = TIME_DIFF = collap2 = collap3 = DATE = HOUR = collap_fin = ID = GROUP = DAY_OF_WEEK = YEAR = MONTH = DAY = TIME_OF_DAY = SBP = DBP = date_first = date_time_first = NULL
rm(list = c('DATE_TIME', 'TIME_DIFF', 'collap2', 'collap3', 'DATE', 'HOUR', 'collap_fin', 'ID', 'GROUP', 'DAY_OF_WEEK', 'YEAR', 'MONTH', 'DAY', 'TIME_OF_DAY', 'SBP', 'DBP', 'date_first', 'date_time_first'))
if(!"DATE_TIME" %in% colnames(data)){
stop('Cannot aggregate data. No DATE_TIME column found. Make sure to specify in process_data function.')
}
if(bp_type == "AP"){
stop('The aggregation feature does not currently support Arterial Pressure (AP) data.')
}
grps = c("ID", "VISIT", "GROUP")
grps = grps[which(grps %in% colnames(data) == TRUE)]
inc_vars <- c("SBP", "DBP", "MAP", "RPP", "HR", "PP", "AP")
data <- data %>%
dplyr::group_by_at(dplyr::vars(grps) ) %>%
dplyr::mutate(TIME_DIFF = abs(DATE_TIME - dplyr::lead(DATE_TIME)) ) %>%
dplyr::relocate(TIME_DIFF, .after = DATE_TIME) %>%
dplyr::mutate(TIME_DIFF = ifelse(dplyr::row_number() == dplyr::n(), 0, TIME_DIFF) ) %>%
dplyr::mutate(collap = ifelse(TIME_DIFF < agg_thresh, 1, 0),
collap2 = ifelse(dplyr::lag(collap) == 1, 1, 0),
collap3 = ifelse(collap == 0 & collap2 == 1, 1, 0) ) %>%
dplyr::relocate(collap, collap2, collap3, .after = TIME_DIFF) %>%
dplyr::group_by(DATE, HOUR) %>%
dplyr::mutate(collap_fin = ifelse(collap == 1 | collap2 == 1 | collap3 == 1, 1, 0) ) %>%
dplyr::ungroup() %>%
dplyr::mutate(collap_fin = ifelse(collap_fin == 0, dplyr::row_number(), collap_fin) ) %>%
dplyr::group_by(DATE, HOUR, collap_fin) %>%
dplyr::mutate(agg = dplyr::cur_group_id() ) %>%
dplyr::relocate(collap_fin, agg, .after = collap3) %>%
dplyr::ungroup() %>%
dplyr::group_by(agg) %>%
dplyr::mutate(date_first = dplyr::first(DATE),
date_time_first = dplyr::first(DATE_TIME)) %>%
dplyr::select(-c("collap", "collap2", "collap3", "collap_fin")) %>%
dplyr::mutate(dplyr::across(!c(TIME_DIFF) & inc_vars[inc_vars %in% colnames(data)], mean)) %>%
dplyr::mutate(dplyr::across(!c(TIME_DIFF) & inc_vars[inc_vars %in% colnames(data)], as.integer)) %>%
dplyr::relocate(ID, GROUP, DATE_TIME, TIME_DIFF, DATE, DAY_OF_WEEK, YEAR, MONTH, DAY, HOUR, TIME_OF_DAY, SBP, DBP)
if(collapse_df == TRUE){
data <- data[which(data$DATE_TIME %in% data$date_time_first),]
}
data <- data %>%
dplyr::ungroup() %>%
dplyr::select(-c(agg, date_first, date_time_first)) %>%
return(data)
}
eod_adj <- function(data, eod){
DATE = DATE_TIME = NULL
rm(list = c("DATE", "DATE_TIME"))
if ("DATE_TIME" %in% colnames(data) == FALSE){
warning("The supplied eod argument is ignored as no DATE_TIME column is found.")
return(data)
}
if (!is.character(eod)){
stop('eod must be a character (string) with four characters that represent 24-hour time format. \n\ni.e. 0130 implies 1:30 AM and 2230 imples 10:30 PM')
}
if (nchar(eod) != 4){
stop('eod must be a character (string) with four characters that represent 24-hour time format. \n\ni.e. 0130 implies 1:30 AM and 2230 imples 10:30 PM')
}
hour_input = as.numeric(substr(eod, 1, 2))
min_input = as.numeric(substr(eod, 3, 4))
if(!(hour_input %in% c(0:23)) | !(min_input %in% c(0:59))){
stop('eod hour argument must be an integer between 0 and 23, eod minutes argument must be an integer between 0 and 59.')
}
data <- data %>%
dplyr::mutate(DATE = dplyr::case_when(
hour_input < 12 ~ {dplyr::case_when(
lubridate::hour(DATE_TIME) == hour_input & lubridate::minute(DATE_TIME) < min_input ~ as.Date( DATE_TIME - lubridate::days(1) ),
lubridate::hour(DATE_TIME) < hour_input ~ as.Date( DATE_TIME - lubridate::days(1) ),
TRUE ~ as.Date(DATE_TIME)
)},
hour_input >= 12 ~ {dplyr::case_when(
lubridate::hour(DATE_TIME) == hour_input & lubridate::minute(DATE_TIME) >= min_input ~ as.Date( DATE_TIME + lubridate::days(1) ),
lubridate::hour(DATE_TIME) > hour_input ~ as.Date( DATE_TIME + lubridate::days(1) ),
TRUE ~ as.Date(DATE_TIME)
)},
TRUE ~ as.Date(DATE_TIME)
)) %>%
dplyr::relocate(DATE, .after = DATE_TIME)
return(data)
}
dow_adj <- function(data, DoW = NULL){
DAY_OF_WEEK = DATE = DATE_TIME = NULL
rm(list = c("DAY_OF_WEEK", "DATE", "DATE_TIME"))
if(!is.null(DoW)){
if(toupper(DoW) %in% colnames(data) == FALSE){
stop('User-defined day of week column name, DoW, does not match column name within supplied dataset.\n')
}
col_idx <- grep(paste("\\b",toupper(DoW),"\\b", sep = ""), names(data))
colnames(data)[col_idx] <- "DAY_OF_WEEK"
if( !all( toupper(unique(data$DAY_OF_WEEK)) %in% toupper(c("Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat"))) ){
if( !("DATE_TIME" %in% colnames(data)) & !("DATE" %in% colnames(data)) ){
stop('Not all unique values from DoW column are valid. (i.e. "Tues" instead of "Tue").
\nNo DATE_TIME or DATE column found. Remove DoW argument and re-process dataset.')
}else{
warning('Not all unique values from DoW column are valid.
\nRenamed user-supplied DoW column to "DAY_OF_WEEK_OLD" and created new column from DATE/DATE_TIME column if available.')
if( !("DATE_TIME" %in% colnames(data)) ){
data$DAY_OF_WEEK_OLD <- data$DAY_OF_WEEK
data$DAY_OF_WEEK <- ordered(weekdays(as.Date(data$DATE), abbreviate = TRUE),
levels = c("Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat"))
}else{
data$DAY_OF_WEEK_OLD <- data$DAY_OF_WEEK
data$DAY_OF_WEEK = ordered(weekdays(as.Date(data$DATE_TIME), abbreviate = TRUE),
levels = c("Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat"))
}
}
}
data$DAY_OF_WEEK = ordered(data$DAY_OF_WEEK,
levels = c("Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat"))
}else{
if( "DATE" %in% colnames(data) ){
data$DAY_OF_WEEK <- ordered(weekdays(as.Date(data$DATE), abbreviate = TRUE),
levels = c("Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat"))
data <- data %>% dplyr::relocate(DAY_OF_WEEK, .after = DATE)
}else if( "DATE_TIME" %in% colnames(data) ){
data$DAY_OF_WEEK <- ordered(weekdays(as.Date(data$DATE_TIME), abbreviate = TRUE),
levels = c("Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat"))
data <- data %>% dplyr::relocate(DAY_OF_WEEK, .after = DATE_TIME)
}
}
return(data)
}
time_adj <- function(data, time_elap = NULL){
TIME_ELAPSED = NULL
rm(list = c("TIME_ELAPSED"))
if(!is.null(time_elap)){
if(toupper(time_elap) %in% colnames(data) == FALSE){
stop('User-defined time_elap name does not match column name of supplied dataset.\n')
} else {
col_idx <- grep(paste("\\b",toupper(time_elap),"\\b", sep = ""), names(data) )
colnames(data)[col_idx] <- "TIME_ELAPSED"
data <- data[, c(col_idx, (1:ncol(data))[-col_idx])]
}
}
return(data)
}
group_adj <- function(data, group = NULL){
GROUP = ID = NULL
rm(list = c("GROUP", "ID"))
if(!is.null(group)){
if(toupper(group) %in% colnames(data) == FALSE){
stop('User-defined Group name does not match column name of supplied dataset.\n')
} else {
col_idx <- grep(paste("\\b",toupper(group),"\\b", sep = ""), names(data) )
colnames(data)[col_idx] <- "GROUP"
}
}else{
if(!("GROUP" %in% colnames(data))){
data <- data %>% dplyr::mutate(GROUP = 1)
}
}
data <- data %>% dplyr::relocate(GROUP, .after = ID)
data$GROUP <- as.factor(data$GROUP)
return(data)
}
id_adj <- function(data, id = NULL){
ID = NULL
rm(list = c("ID"))
if(!is.null(id)){
if(toupper(id) %in% colnames(data) == FALSE){
stop('User-defined ID name does not match column name of supplied dataset.\n')
} else {
col_idx <- grep(paste("\\b",toupper(id),"\\b", sep = ""), names(data) )
colnames(data)[col_idx] <- "ID"
}
}else{
if(!("ID" %in% colnames(data) )){
data <- data %>% dplyr::mutate(ID = 1)
}
}
data <- data %>% dplyr::relocate(ID)
data$ID <- as.factor(data$ID)
return(data)
} |
knitr::opts_chunk$set(echo=TRUE, comment=NA)
library(beezdemand)
knitr::kable(apt[c(1:8, 17:24), ])
wide <- tidyr::spread(apt, id, y)
knitr::kable(wide)
descr <- GetDescriptives(apt)
knitr::kable(descr)
knitr::kable(head(CheckUnsystematic(apt, deltaq = 0.025, bounce = 0.1, reversals = 0, ncons0 = 2), 5))
knitr::kable(head(GetEmpirical(apt), 5))
fc <- FitCurves(apt, "hs")
hs1 <- head(fc, 5)[ , 1:6]
hs2 <- head(fc, 5)[ , 7:11]
hs3 <- head(fc, 5)[ , 12:20]
hs4 <- head(fc, 5)[ , 21:24]
knitr::kable(hs1, caption = "Empirical Measures")
knitr::kable(hs2, caption = "Fitted Measures")
knitr::kable(hs3, caption = "Uncertainty and Model Information")
knitr::kable(hs4, caption = "Derived Measures")
fc <- FitCurves(apt, "koff")
koff1 <- head(fc, 5)[ , 1:6]
koff2 <- head(fc, 5)[ , 7:11]
koff3 <- head(fc, 5)[ , 12:20]
koff4 <- head(fc, 5)[ , 21:24]
knitr::kable(koff1, caption = "Empirical Measures")
knitr::kable(koff2, caption = "Fitted Measures")
knitr::kable(koff3, caption = "Uncertainty and Model Information")
knitr::kable(koff4, caption = "Derived Measures")
mn <- FitCurves(apt, "hs", agg = "Mean")
mn1 <- head(mn)[ , 1:6]
mn2 <- head(mn)[ , 7:11]
mn3 <- head(mn)[ , 12:20]
mn4 <- head(mn)[ , 21:24]
knitr::kable(mn1, caption = "Empirical Measures")
knitr::kable(mn2, caption = "Fitted Measures")
knitr::kable(mn3, caption = "Uncertainty and Model Information")
knitr::kable(mn4, caption = "Derived Measures")
pl <- FitCurves(apt, "hs", agg = "Pooled")
pl1 <- head(pl)[ , 1:6]
pl2 <- head(pl)[ , 7:11]
pl3 <- head(pl)[ , 12:20]
pl4 <- head(pl)[ , 21:24]
knitr::kable(pl1, caption = "Empirical Measures")
knitr::kable(pl2, caption = "Fitted Measures")
knitr::kable(pl3, caption = "Uncertainty and Model Information")
knitr::kable(pl4, caption = "Derived Measures")
df <- FitCurves(apt, "hs", k = "share")
knitr::kable(head(df, 5)[ , 1:6], caption = "Empirical Measures")
knitr::kable(head(df, 5)[ , 7:11], caption = "Fitted Measures")
knitr::kable(head(df, 5)[ , 12:20], caption = "Uncertainty and Model Information")
knitr::kable(head(df, 5)[ , 21:24], caption = "Derived Measures") |
linking.haberman <- function( itempars, personpars=NULL,
estimation="OLS", a_trim=Inf, b_trim=Inf, lts_prop=.5,
a_log=TRUE, conv=.00001, maxiter=1000, progress=TRUE,
adjust_main_effects=TRUE, vcov=TRUE)
{
CALL <- match.call()
s1 <- Sys.time()
res <- linking_proc_itempars(itempars=itempars)
itempars <- res$itempars
NS <- res$NS
NI <- res$NI
items <- res$items
studies <- res$studies
wgtM <- res$wgtM
aM <- res$aM
bM <- res$bM
est_pars <- res$est_pars
weights_exist <- res$weights_exist
a.orig <- aM
b.orig <- bM
if (a_log){
logaM <- log(aM)
} else {
logaM <- aM
}
est_type <- "A (slopes)"
resA <- linking_haberman_als(logaM=logaM, wgtM=wgtM, maxiter=maxiter, conv=conv,
progress=progress, est.type=est_type, cutoff=a_trim,
reference_value=1-a_log, adjust_main_effects=adjust_main_effects,
estimation=estimation, lts_prop=lts_prop, vcov=vcov)
if (a_log){
aj <- exp(resA$logaj)
At <- exp(resA$logaAt)
} else {
aj <- resA$logaj
At <- resA$logaAt
}
aj_resid <- resA$loga_resid
aj_wgt_adj <- resA$loga_wgt_adj
aj_wgtM <- resA$loga_wgt
aj_vcov <- resA$vcov
aj_se <- resA$se
aj_item_stat <- resA$item_stat
H1 <- diag2( At[-1] )
res <- linking_haberman_vcov_transformation( H1=H1, aj_vcov=aj_vcov )
aj_vcov <- res$vcov
aj_se <- c( NA, res$se )
est_type <- "B (intercepts)"
At_m <- sirt_matrix2( At, nrow=NI)
bMadj <- bM * At_m
resB <- linking_haberman_als(logaM=bMadj, wgtM=wgtM, maxiter=maxiter,
conv=conv, progress=progress, est.type=est_type,
cutoff=b_trim, reference_value=0, adjust_main_effects=adjust_main_effects,
estimation=estimation, lts_prop=lts_prop, vcov=vcov)
Bj <- resB$logaj
Bt <- resB$logaAt
Bj_resid <- resB$loga_resid
Bj_wgt_adj <- resB$loga_wgt_adj
Bj_wgtM <- resB$loga_wgt
Bj_vcov <- resB$vcov
Bj_se <- resB$se
Bj_item_stat <- resB$item_stat
transf.pars <- data.frame( study=studies, At=At, se_At=aj_se, Bt=Bt, se_Bt=Bj_se )
rownames(transf.pars) <- NULL
transf.itempars <- data.frame( study=studies, At=1/At, se_At=NA, At2=At,
se_At2=transf.pars$se_At, Bt=Bt, se_Bt=transf.pars$se_Bt )
rownames(transf.itempars) <- NULL
H1 <- diag2( - 1 / ( At[-1] )^2 )
res <- linking_haberman_vcov_transformation( H1=H1, aj_vcov=aj_vcov )
transf.itempars$se_At <- c( NA, res$se )
transf.personpars <- transf.itempars[,c("study","At","se_At2","Bt", "se_Bt")]
transf.personpars$At <- transf.pars$At
transf.personpars$Bt <- -transf.pars$Bt
colnames(transf.personpars) <- c("study", "A_theta",
"se_A_theta", "B_theta", "se_B_theta" )
colnames(transf.itempars) <- c("study", "A_a", "se_A_a",
"A_b", "se_A_b", "B_b", "se_B_b" )
joint.itempars <- data.frame("item"=items, "aj"=aj, "bj"=Bj )
AtM <- sirt_matrix2( At, nrow=NI)
BtM <- sirt_matrix2( Bt, nrow=NI)
aM <- aM / AtM
bM <- bM * AtM - BtM
if ( ! is.null( personpars) ){
for (ll in 1:NS){
pp0 <- pp1 <- personpars[[ll]]
pp1 <- transf.personpars$A_theta[ll] * pp1 + transf.personpars$B_theta[ll]
ind <- which( substring( colnames(pp0),1,2) %in% c("se", "SE") )
if ( length(ind) > 0 ){
pp1[,ind] <- transf.personpars$A_theta[ll] * pp0[,ind]
}
ind <- which( substring( colnames(pp0),1,3) %in% c("pid") )
if ( length(ind) > 0 ){
pp1[,ind] <- pp0[,ind]
}
personpars[[ll]] <- pp1
}
}
selitems <- which( rowSums( 1 - is.na( a.orig ) ) > 1 )
Rsquared.partial.invariance <- Rsquared.invariance <- c(NA,NA)
names(Rsquared.invariance) <- c("slopes", "intercepts" )
names(Rsquared.partial.invariance) <- c("slopes", "intercepts" )
aj1 <- aj * AtM
a.res <- a.orig - aj1
Rsquared.invariance["slopes"] <- 1 - sirt_sum( a.res[ selitems,]^2 ) / sirt_sum( a.orig[ selitems, ]^2 )
Rsquared.partial.invariance["slopes"] <- 1 -
sirt_sum( a.res[ selitems,]^2 * aj_wgtM[selitems,] ) /
sirt_sum( a.orig[ selitems, ]^2 * aj_wgtM[selitems, ] )
bj1 <- 1 / AtM *( Bj + BtM )
b.res <- b.orig - bj1
Rsquared.partial.invariance["intercepts"] <- 1 -
sirt_sum( b.res[ selitems,]^2 * Bj_wgtM[selitems,] ) /
sirt_sum( b.orig[ selitems, ]^2 * Bj_wgtM[selitems,] )
Rsquared.invariance["intercepts"] <- 1 -
sirt_sum( b.res[ selitems,]^2 ) / sirt_sum( b.orig[ selitems, ]^2 )
es.invariance <- rbind( Rsquared.invariance,
sqrt( 1- Rsquared.invariance ) )
rownames(es.invariance) <- c("R2", "sqrtU2")
es.partial.invariance <- rbind( Rsquared.partial.invariance,
sqrt( 1- Rsquared.partial.invariance ) )
rownames(es.partial.invariance) <- c("R2", "sqrtU2")
linking_slopes <- stats::sd( transf.pars$At ) < 1E-10
s2 <- Sys.time()
time <- list(s1=s1, s2=s2)
res <- list( transf.pars=transf.pars, transf.itempars=transf.itempars,
transf.personpars=transf.personpars, joint.itempars=joint.itempars,
a.trans=aM, b.trans=bM, a.orig=a.orig, b.orig=b.orig,
a.resid=aj_resid, b.resid=Bj_resid, personpars=personpars,
es.invariance=es.invariance, es.robust=es.partial.invariance,
selitems=selitems, a_trim=a_trim, b_trim=b_trim,
a.wgt=aj_wgtM, b.wgt=Bj_wgtM, a.wgt.adj=aj_wgt_adj, b.wgt.adj=Bj_wgt_adj,
a.vcov=aj_vcov, b.vcov=Bj_vcov, a.item_stat=aj_item_stat,
b.item_stat=Bj_item_stat, linking_slopes=linking_slopes,
description='Linking according to Haberman (2009)',
res_opt_slopes=resA, res_opt_intercepts=resB, CALL=CALL, time=time )
class(res) <- "linking.haberman"
return(res)
} |
library(fda)
data(smoothed_arctic)
NBASIS <- 300
NORDER <- 4
y <- t(as.matrix(smoothed_arctic[, -1]))
splinebasis <- create.bspline.basis(rangeval = c(1, 365),
nbasis = NBASIS,
norder = NORDER)
fdParobj <- fdPar(fdobj = splinebasis,
Lfdobj = 2,
lambda = .000001)
yfd <- smooth.basis(argvals = 1:365, y = y, fdParobj = fdParobj)
Jan <- c(1, 31); Feb <- c(31, 59); Mar <- c(59, 90)
Apr <- c(90, 120); May <- c(120, 151); Jun <- c(151, 181)
Jul <- c(181, 212); Aug <- c(212, 243); Sep <- c(243, 273)
Oct <- c(273, 304); Nov <- c(304, 334); Dec <- c(334, 365)
intervals <-
rbind(Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, Oct, Nov, Dec)
test_that("calling puls wrongly creates correct errors", {
testthat::expect_error({
PULS()
},
"\"toclust.fd\" must be a functional data object (fda::is.fd).",
fixed = TRUE)
testthat::expect_error({
PULS("hello")
},
"\"toclust.fd\" must be a functional data object (fda::is.fd).",
fixed = TRUE)
testthat::expect_error({
PULS(yfd$fd, minbucket = 5, minsplit = 2)
},
"\"minbucket\" must be less than \"minsplit\".",
fixed = TRUE)
})
test_that("calling puls correctly in a popular case", {
skip_on_cran()
testthat::expect_s3_class({
PULS4_pam <- PULS(toclust.fd = yfd$fd, intervals = intervals,
nclusters = 4, method = "pam")
},
"PULS")
})
test_that("calling puls when intervals don't have names", {
skip_on_cran()
testthat::expect_output({
rownames(intervals) <- NULL
PULS4_pam <- PULS(toclust.fd = yfd$fd, intervals = intervals,
nclusters = 4, method = "pam")
print(PULS4_pam)
},
"2) 7 15 885.3640 0.8431711")
})
test_that("calling puls with ward method", {
skip_on_cran()
testthat::expect_output({
PULS4_pam <- PULS(toclust.fd = yfd$fd, intervals = intervals,
nclusters = 4, method = "ward")
print(PULS4_pam)
},
"2) Jul 15 885.3640 0.8431711 ")
}) |
getPotentialSires <- function(ids, minAge = 1, ped) {
ped <- ped[!is.na(ped$birth), ]
ped$id[ped$id %in% ids & ped$sex == "M" & getCurrentAge(ped$birth) >= minAge &
!is.na(ped$birth)]
} |
summary.drdid <- function(object, ...){
drdid.obj <- object
print(drdid.obj)
} |
ttt_human <- function(name = "no name")
{
getmove <- function(game, prompt = "choose move (e.g. A1) > ", ...)
{
while (TRUE)
{
ans <- readline(prompt)
return(ans)
}
}
self <- environment()
class(self) <- c("ttt_human", "ttt_player")
return(self)
}
print.ttt_human <- function(x, ...)
cat("Human Tic-Tac-Toe Player:", x$name, "\n") |
getTitle <- function(filename) {
splitName <- strsplit(x = filename, split = "\\.")
if(length(splitName[[1]])==2){
tableName <- splitName[[1]][1]
title <- paste0("NEON ", tableName, " data from multiple sites and/or months")
}
if(length(splitName[[1]]) > 2){
site <- splitName[[1]][3]
dpID <- substr(filename, 15, 27)
hor <- splitName[[1]][7]
ver <- splitName[[1]][8]
time <- paste(splitName[[1]][9], " minute resolution")
ym <- splitName[[1]][11]
ym <- as.Date(x = paste0(ym, "-01"), format = "%Y-%m-%d")
y <- format(ym, "%Y")
m <- format(ym, "%B")
uri <- paste0("http://data.neonscience.org/api/v0/products/", dpID)
data_info <- jsonlite::fromJSON(txt = uri)
dpName <- data_info$data$productName
title <- paste("NEON", dpName, "data from", site, m, y, "at horizontal level", hor, "and vertical position", ver, sep = " " )
}
return(title)
} |
source("common.R")
load("try.3.RData")
dbparms.psql <- list(driver="PostgreSQL" , user = "bety", dbname = "bety", password='bety', host='psql-pecan.bu.edu')
con.psql <- db.open(dbparms.psql)
bety.species.dt <- data.table(db.query("SELECT DISTINCT id as bety_species_id, scientificname as bety_species FROM species", con.psql))
bety.species.dt[, bety.species.lower := tolower(bety_species)]
db.close(con.psql)
try.species.unique <- try.dat[, unique(AccSpeciesName)]
try.species.dt <- data.table(try.species = try.species.unique,
try.species.lower = tolower(encodeString(try.species.unique)))
try.species.dt[, try.species.lower := gsub(" sp$", " spp.", try.species.lower)]
setkey(try.species.dt, try.species.lower)
setkey(bety.species.dt, bety.species.lower)
match.species.dt <- bety.species.dt[try.species.dt]
try.unmatched <- match.species.dt[is.na(bety_species_id)]
n.unmatched <- nrow(try.unmatched)
bety.index.match <- list()
message("Partial pattern match using grep...")
pb <- txtProgressBar(0, n.unmatched, style=3)
for(i in 1:n.unmatched){
match.ind <- grep(sprintf(".*%s.*", try.unmatched[i,bety.species.lower]),
bety.species.dt[,bety_species], ignore.case=TRUE, perl=TRUE)
if(length(match.ind) != 0){
bety.index.match[[i]] <- match.ind
}
setTxtProgressBar(pb, i)
}
bety.index.nmatches <- sapply(bety.index.match, length)
single.match <- which(bety.index.nmatches == 1)
single.match.inds <- unlist(bety.index.match[single.match])
setkey(match.species.dt, bety.species.lower)
match.species.dt[try.unmatched[single.match,bety.species.lower],
c("bety_species_id", "bety_species") :=
bety.species.dt[single.match.inds, list(bety_species_id,
bety.species.lower)]]
multiple.matches <- which(bety.index.nmatches > 1)
for(i in seq_along(multiple.matches)){
m <- multiple.matches[i]
try.sp <- try.unmatched[m, bety.species.lower]
bety.sp <- bety.species.dt[bety.index.match[[m]], bety.species.lower]
print(paste("TRY species:", try.sp))
print(paste("BETY species:", paste(seq_along(bety.sp), bety.sp, collapse="; ", sep=" ")))
user.choice <- readline("Select a species number (or enter 'n' for 'neither'):")
if(user.choice == 'n'){
next
} else {
user.choice <- as.numeric(user.choice)
bety.index <- bety.index.match[[m]][user.choice]
}
match.species.dt[try.unmatched[bety.index, bety.species.lower],
c("bety_species_id", "bety_species") :=
bety.species.dt[bety.index, list(bety_species_id, bety.species.lower)]]
}
try.unmatched.final <- match.species.dt[is.na(bety_species_id)]
sp.insert.query <- "INSERT INTO species(scientificname, notes) VALUES('%s', 'TRY_SPECIES') RETURNING id"
message("Looping over unmatched species and adding to BETY")
pb <- txtProgressBar(0, nrow(try.unmatched.final), style=3)
for(i in 1:nrow(try.unmatched.final)) {
sp <- try.unmatched.final[i, encodeString(try.species)]
sp <- fixquote(sp)
sp.bety.id <- db.query(sprintf(sp.insert.query, sp), con)$id
match.species.dt[try.species == sp, bety.species.id := as.character(sp.bety.id)]
setTxtProgressBar(pb, i)
}
setkey(match.species.dt, try.species)
setkey(try.dat, AccSpeciesName)
try.dat <- match.species.dt[try.dat]
try.dat[, try.species.lower := NULL]
setnames(try.dat, "try.species", "AccSpeciesName")
save(try.dat, file = "try.4.RData", compress=TRUE) |
named <- function(x) {
set_names(x, names2(x))
}
named_list <- function(...) {
named(list(...))
}
quos_list <- function(...) {
structure(named_list(...), class = c("quosures", "list"))
}
expect_error_ <- function(object, ...) {
expect_error(object, ...)
}
expect_warning_ <- function(object, ...) {
expect_warning(object, ...)
}
expect_identical_ <- function(object, expected, ...) {
expect_identical(object, expected, ...)
}
expect_equal_ <- function(object, expected, ...) {
expect_equal(object, expected, ...)
}
expect_no_warning <- function(object, ...) {
expect_warning(!!enquo(object), NA, ...)
}
expect_no_warning_ <- function(object, ...) {
expect_warning(object, NA, ...)
}
expect_no_error <- function(object, ...) {
expect_error(!!enquo(object), NA, ...)
}
expect_no_error_ <- function(object, ...) {
expect_error(object, NA, ...)
}
expect_null_ <- function(object, ...) {
expect_null(object, ...)
} |
do.nnp <- function(X, ndim=2, preprocess=c("null","center","scale","cscale","whiten","decorrelate")){
aux.typecheck(X)
n = nrow(X)
p = ncol(X)
ndim = as.integer(ndim)
if (!check_ndim(ndim,p)){stop("* do.nnp : 'ndim' is a positive integer in [1,
algpreprocess = match.arg(preprocess)
tmplist = aux.preprocess.hidden(X,type=algpreprocess,algtype="nonlinear")
trfinfo = tmplist$info
pX = tmplist$pX
D = as.matrix(dist(pX))
included = rep(FALSE, n)
pY = array(0,c(n,ndim))
initid = aux.findmaxidx(D)
initQ = as.integer(initid[1])
initR = as.integer(initid[2])
included[initid] = TRUE
pY[initQ,] = c(D[initQ,initR]/2, rep(0,(ndim-1)))
pY[initR,] = c(-D[initQ,initR]/2, rep(0,(ndim-1)))
toberun = setdiff(1:n, initid)
minfunc <- function(z,center1,center2,rad1,rad2){
return(abs(sqrt(sum((z-center1)^2))-rad1)+abs(sqrt(sum((z-center2)^2))-rad2))
}
Dctrl = RcppDE::DEoptim.control(trace=FALSE)
bdlower = rep(-D[initQ,initR], ndim)
bdupper = rep(D[initQ,initR], ndim)
for (i in 1:length(toberun)){
idx = toberun[i]
currentincluded = which(included)
if (length(currentincluded)==2){
idq = currentincluded[1]
idr = currentincluded[2]
} else {
currentdist = D[idx,currentincluded]
bottom2 = currentdist[order(currentdist)[1:3]]
idq = currentincluded[(currentdist==bottom2[2])]
idr = currentincluded[(currentdist==bottom2[3])]
if (length(idq)>1){
idq = as.integer(idq[1])
}
if (length(idr)>1){
idr = as.integer(idr[1])
}
}
dxq = as.double(D[idx,idq])
dxr = as.double(D[idx,idr])
dxsum = (dxq+dxr)
dqrlow = sqrt(sum(as.vector(pY[idq,]-pY[idr,])^2))
q = as.vector(pY[idq,])
r = as.vector(pY[idr,])
if (dxsum==dqrlow){
pY[idx,] = ((r-q)*dxq/(dxq+dxr))+q
} else if (dxsum>dqrlow){
runDEoptim = RcppDE::DEoptim(minfunc, lower=bdlower, upper=bdupper, control=Dctrl, center1=q, center2=r, rad1=dxq, rad2=dxr)
pY[idx,] = as.vector((runDEoptim$optim$bestmem))
} else {
if (dxq<dxr){
pY[idx,] = q+((r-q)*0.5)
} else {
pY[idx,] = r+((q-r)*0.5)
}
}
included[idx] = TRUE
}
result = list()
result$Y = pY
trfinfo$algtype = "nonlinear"
result$trfinfo = trfinfo
return(result)
} |
addBAMMshifts = function(ephy, index = 1, method = 'phylogram', cex=1, pch=21, col=1, bg=2, msp = NULL, shiftnodes = NULL, par.reset=TRUE) {
if (!inherits(ephy, 'bammdata')) stop("Object ephy must be of class bammdata");
lastPP <- get("last_plot.phylo", envir = .PlotPhyloEnv);
if (par.reset){
op <- par(no.readonly = TRUE);
par(lastPP$pp);
}
if (length(ephy$eventData) == 1){
index <- 1;
}
if (is.null(shiftnodes))
shiftnodes <- getShiftNodesFromIndex(ephy, index)
isShift <- ephy$eventData[[index]]$node %in% shiftnodes;
times <- ephy$eventData[[index]]$time[isShift];
if (!is.null(msp)) {
cex <- 0.75 + 5 * msp$edge.length[msp$edge[,2] %in% shiftnodes];
}
if (method == 'phylogram') {
XX <- times;
YY <- lastPP$yy[shiftnodes];
} else if (method == 'polar') {
rb <- lastPP$rb;
XX <- (rb+times/max(branching.times(as.phylo.bammdata(ephy)))) * cos(lastPP$theta[shiftnodes]);
YY <- (rb+times/max(branching.times(as.phylo.bammdata(ephy)))) * sin(lastPP$theta[shiftnodes]);
}
points(XX,YY,pch=pch,cex=cex,col=col,bg=bg);
if (par.reset) {
par(op);
}
} |
context("normalizeWeights")
test_that("normalizeWeights: use", {
skip_on_cran()
A <- rep(1,5)
v <- normalizeWeights(A)
expect_equal(
sum(v),1
)
expect_equal(v[1],0.2)
A <- c(A,1e6)
v <- normalizeWeights(A)
expect_equal(sum(v),1)
expect_equal(v[6],1,tolerance=1e-4)
}) |
magmap<-
function(data, locut=0, hicut=1, flip=FALSE, range=c(0,2/3), type='quan', stretch='lin', stretchscale=1, bad=NA, clip=''){
if(stretchscale=='auto'){
good=is.na(data)==FALSE & is.nan(data)==FALSE & is.infinite(data)==FALSE & is.null(data)==FALSE
if(length(which(good))==0){stop('There is no numeric data!')}
absdata=abs(data[good]-median(data[good],na.rm=TRUE))
stretchscale=1/median(absdata, na.rm=TRUE)
if(!is.finite(stretchscale)){stretchscale=1}
}
if(stretch=='log' | stretch=='sqrt'){
good=is.na(data)==FALSE & is.nan(data)==FALSE & is.infinite(data)==FALSE & is.null(data)==FALSE & data>0
if(length(which(good))==0){stop('There is no numeric data with a value greater than 0!')}
}else{
good=is.na(data)==FALSE & is.nan(data)==FALSE & is.infinite(data)==FALSE & is.null(data)==FALSE
if(length(which(good))==0){stop('There is no numeric data!')}
}
if(type=='quan'){
locut=quantile(data[good],locut)
hicut=quantile(data[good],hicut)
}else if(type=='num'){
locut=locut
hicut=hicut
}else if(type=='sig'){
locut=quantile(data[good],pnorm(locut))
hicut=quantile(data[good],pnorm(hicut))
}else if(type=='rank'){
locut=1
hicut=length(data[good])
data[good][order(data[good])]=locut:hicut
}else{
stop(type,' is not a valid type option!')
}
loreturn=locut
hireturn=hicut
if(stretch=='log' & locut<=0){stop('locut <= 0 and stretch=\'log\'- this is not allowed!')}
if(stretch=='log' & hicut<=0){stop('hicut <=0 and stretch=\'log\'- this is not allowed!')}
if(locut>hicut){stop('locut > hicut is not allowed')}
if(locut==hicut){data[good]=(range[2]+range[1])/2}
if(locut<hicut){
if(stretch=='lin'){
}else if(stretch=='log'){
locut=log10(locut)
hicut=log10(hicut)
data=suppressWarnings(log10(data))
}else if(stretch=='atan'){
locut=atan(locut*stretchscale)
hicut=atan(hicut*stretchscale)
data=atan(data*stretchscale)
}else if(stretch=='asinh'){
locut=asinh(locut*stretchscale)
hicut=asinh(hicut*stretchscale)
data=asinh(data*stretchscale)
}else if(stretch=='sqrt'){
locut=sqrt(locut)
hicut=sqrt(hicut)
data=suppressWarnings(sqrt(data))
}else if(stretch=='cdf'){
cdf=ecdf(data[good])
locut=cdf(locut)
hicut=cdf(hicut)
data[good]=cdf(data[good])
}else{
stop(stretch,' is not a valid stretch option!')
}
losel=data<locut & good; hisel=data>hicut & good
data[losel]=locut; data[hisel]=hicut
data[good]=data[good]-locut
data[good]=range[1]+(data[good]*(range[2]-range[1])/(hicut-locut))
if(flip){data[good]=range[2]-data[good]+range[1]}
if(clip=='NA'){data[losel]=NA;data[hisel]=NA}
}
data[! good]=bad
return(list(map=data,datalim=c(loreturn,hireturn),maplim=range,loclip=length(which(data[good]==range[1]))/length(data[good]),hiclip=length(which(data[good]==range[2]))/length(data[good])))
} |
logRegLoglik <- function(beta, Y, Z){
n <- length(Y)
pro <- Z %*% beta
L <- - sum(- pro * Y + log(1 + exp(pro)))
return(list("L" = L))
} |
anova.ergmlist <- function(object, ..., eval.loglik = FALSE) {
objects <- list(object, ...)
responses <- as.character(lapply(objects, function(x) deparse(x$formula[[2]])))
sameresp <- responses == responses[1]
if (!all(sameresp)) {
objects <- objects[sameresp]
warning("Models with response ", deparse(responses[!sameresp]),
" removed because response differs from ", "model 1")
}
nmodels <- length(objects)
if (nmodels == 1)
return(anova.ergm(object))
logl <- df <- Rdf <- rep(0, nmodels)
logl.null <- if(is.null(objects[[1]][["null.lik"]])) 0 else objects[[1]][["null.lik"]]
for (i in 1:nmodels) {
logli <- logLik(objects[[i]], eval.loglik = eval.loglik)
n <- nobs(logli)
df[i] <- nparam(objects[[i]], offset = FALSE)
Rdf[i] <- n - df[i]
logl[i] <- logli
}
df <- c(0, df)
Rdf <- c(n, Rdf)
logl <- c(logl.null, logl)
pv <- pchisq(abs(2 * diff(logl)), abs(diff(df)), lower.tail = FALSE)
table <- data.frame(c(NA, -diff(Rdf)), c(NA, diff(2 * logl)),
Rdf, -2 * logl, c(NA, pv))
variables <- lapply(objects, function(x) paste(deparse(formula(x)),
collapse = "\n"))
colnames(table) <- c("Df","Deviance", "Resid. Df",
"Resid. Dev", "Pr(>|Chisq|)")
rownames(table) <- c("NULL", 1:nmodels)
title <- "Analysis of Variance Table\n"
topnote <- paste("Model ", format(1:nmodels), ": ", variables,
sep = "", collapse = "\n")
structure(table, heading = c(title, topnote), class = c("anova",
"data.frame"))
} |
cleanup_tempdir() |
vault_client_auth_userpass <- R6::R6Class(
"vault_client_auth_userpass",
inherit = vault_client_object,
cloneable = FALSE,
private = list(
api_client = NULL,
mount = NULL
),
public = list(
initialize = function(api_client, mount) {
super$initialize("Interact and configure vault's userpass support")
assert_scalar_character(mount)
private$mount <- sub("^/", "", mount)
private$api_client <- api_client
},
custom_mount = function(mount) {
vault_client_auth_userpass$new(private$api_client, mount)
},
write = function(username, password = NULL, policies = NULL, ttl = NULL,
max_ttl = NULL, bound_cidrs = NULL) {
username <- assert_scalar_character(username)
body <- list(
password = assert_scalar_character_or_null(password),
policies = policies %&&%
paste(assert_character(policies), collapse = ","),
ttl = ttl %&&% assert_is_duration(ttl),
max_ttl = max_ttl %&&% assert_is_duration(max_ttl),
bound_cidrs = bound_cidrs %&&% I(assert_character(bound_cidrs)))
path <- sprintf("/auth/%s/users/%s", private$mount, username)
private$api_client$POST(path, body = drop_null(body))
invisible(NULL)
},
read = function(username) {
assert_scalar_character(username)
path <- sprintf("/auth/%s/users/%s", private$mount, username)
ret <- private$api_client$GET(path)$data
ret$policies <- list_to_character(ret$policies)
ret
},
delete = function(username) {
assert_scalar_character(username)
path <- sprintf("/auth/%s/users/%s", private$mount, username)
private$api_client$DELETE(path)
invisible(NULL)
},
update_password = function(username, password) {
assert_scalar_character(username)
body <- list(password = assert_scalar_character(password))
path <- sprintf("/auth/%s/users/%s/password", private$mount, username)
private$api_client$POST(path, body = drop_null(body))
invisible(NULL)
},
update_policies = function(username, policies) {
assert_scalar_character(username)
body <- list(policies = paste(assert_character(policies),
collapse = ","))
path <- sprintf("/auth/%s/users/%s/policies", private$mount, username)
private$api_client$POST(path, body = drop_null(body))
invisible(NULL)
},
list = function() {
path <- sprintf("/auth/%s/users", private$mount)
tryCatch(
list_to_character(private$api_client$LIST(path)$data$keys),
vault_invalid_path = function(e) character(0))
},
login = function(username, password = NULL) {
data <- userpass_data(username, password)
path <- sprintf("/auth/%s/login/%s", private$mount, username)
body <- list(password = data$password)
res <- private$api_client$POST(path, body = body,
allow_missing_token = TRUE)
res$auth
}
))
userpass_data <- function(username, password) {
assert_scalar_character(username, "username")
if (is.null(password)) {
msg <- sprintf("Password for '%s': ", username)
password <- read_password(msg)
}
assert_scalar_character(password, "password")
list(username = username, password = password)
} |
makeOMLRunParameter = function(name, value, component = NA_character_) {
assertString(name)
assertString(component, na.ok = TRUE)
if (length(value) > 1)
stopf("length of parameter '%s' is more than one", name)
makeS3Obj("OMLRunParameter",
name = name,
value = value,
component = component
)
}
print.OMLRunParameter = function(x, ...) {
s = ""
if (!is.na(x$component))
s = sprintf(" (parameter of component %s)", x$component)
val = try(as.character(x$value))
catf("%s %s = %s", s, x$name, ifelse(is.error(val), "can't print this data type", x$value))
} |
winse<-function(x, tr = .2, ...){
x=elimna(x)
n=length(x)
h=n-2*floor(tr*n)
top=(n-1)*sqrt(winvar(x,tr=tr))
bot=(h-1)*sqrt(n)
se=top/bot
se
} |
ThreeComp_Volume_Clearance_HalfLife<-function(V1,Vd,Cl1,t_alpha,t_beta,t_gamma,
V1.sd=NA,Vd.sd=NA,Cl1.sd=NA,t_alpha.sd=NA,t_beta.sd=NA,t_gamma.sd=NA,
covar=c(V1Vd=NA,V1Cl1=NA,V1talpha=NA,V1tbeta=NA,V1tgamma=NA,VdCl1=NA,
Vdtalpha=NA,Vdtbeta=NA,Vdtgamma=NA,Cl1talpha=NA,Cl1tbeta=NA,
Cl1tgamma=NA,talphatbeta=NA,talphatgamma=NA,tbetatgamma=NA),...){
if(is.na(covar[1])) covar<-rep(0,15)
V1.var = (V1.sd)^2; Vd.var = (Vd.sd)^2
Cl1.var = (Cl1.sd)^2; t_alpha.var = (t_alpha.sd)^2;
t_beta.var = (t_beta.sd)^2; t_gamma.var = (t_gamma.sd)^2
f.V2<-quote(quote((V1)*((log(2)/t_alpha)+(log(2)/t_beta)+(log(2)/t_gamma)-
(Cl1/V1)-((((log(2)^2)/(t_alpha*t_beta))+((log(2)^2)/(t_alpha*t_gamma))+
((log(2)^2)/(t_beta*t_gamma))-((Vd-V1)/V1*(((log(2))^3)/
(t_alpha*t_beta*t_gamma)/(Cl1/V1)))-(((log(2))^3)/
(t_alpha*t_beta*t_gamma)/(Cl1/V1)))/(Cl1/V1))-((((Vd/V1-1)*
(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1)))-((log(2)/t_alpha)+
(log(2)/t_beta)+(log(2)/t_gamma)-(Cl1/V1)-((((log(2)^2)/
(t_alpha*t_beta))+((log(2)^2)/(t_alpha*t_gamma))+((log(2)^2)/
(t_beta*t_gamma))-((Vd-V1)/V1*(((log(2))^3)/(t_alpha*t_beta*t_gamma)/
(Cl1/V1)))-(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1)))/
(Cl1/V1)))*((((((log(2)^2)/(t_alpha*t_beta))+((log(2)^2)/
(t_alpha*t_gamma))+((log(2)^2)/(t_beta*t_gamma))-((Vd-V1)/V1*
(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1)))-(((log(2))^3)/
(t_alpha*t_beta*t_gamma)/(Cl1/V1)))/(Cl1/V1))-(sqrt(((((log(2)^2)/
(t_alpha*t_beta))+((log(2)^2)/(t_alpha*t_gamma))+((log(2)^2)/
(t_beta*t_gamma))-((Vd-V1)/V1*(((log(2))^3)/(t_alpha*t_beta*t_gamma)/
(Cl1/V1)))-(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1)))/
(Cl1/V1))^2-(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1))*4)))/2))/
(sqrt(((((log(2)^2)/(t_alpha*t_beta))+((log(2)^2)/(t_alpha*t_gamma))+
((log(2)^2)/(t_beta*t_gamma))-((Vd-V1)/V1*(((log(2))^3)/
(t_alpha*t_beta*t_gamma)/(Cl1/V1)))-(((log(2))^3)/
(t_alpha*t_beta*t_gamma)/(Cl1/V1)))/(Cl1/V1))^2-(((log(2))^3)/
(t_alpha*t_beta*t_gamma)/(Cl1/V1))*4))))/((((((log(2)^2)/
(t_alpha*t_beta))+((log(2)^2)/(t_alpha*t_gamma))+((log(2)^2)/
(t_beta*t_gamma))-((Vd-V1)/V1*(((log(2))^3)/(t_alpha*t_beta*t_gamma)/
(Cl1/V1)))-(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1)))/(Cl1/V1))+
(sqrt(((((log(2)^2)/(t_alpha*t_beta))+((log(2)^2)/(t_alpha*t_gamma))+
((log(2)^2)/(t_beta*t_gamma))-((Vd-V1)/V1*(((log(2))^3)/
(t_alpha*t_beta*t_gamma)/(Cl1/V1)))-(((log(2))^3)/
(t_alpha*t_beta*t_gamma)/(Cl1/V1)))/(Cl1/V1))^2-(((log(2))^3)/
(t_alpha*t_beta*t_gamma)/(Cl1/V1))*4)))/2)))
V2<-eval(eval(f.V2))
ff.V2<-stats::as.formula(paste("~",as.character(f.V2[2],"")))
f.V3<-quote(quote((V1)*((((Vd/V1-1)*(((log(2))^3)/(t_alpha*t_beta*t_gamma)/
(Cl1/V1)))-((log(2)/t_alpha)+(log(2)/t_beta)+(log(2)/t_gamma)-(Cl1/V1)-
((((log(2)^2)/(t_alpha*t_beta))+((log(2)^2)/(t_alpha*t_gamma))+
((log(2)^2)/(t_beta*t_gamma))-((Vd-V1)/V1*(((log(2))^3)/
(t_alpha*t_beta*t_gamma)/(Cl1/V1)))-(((log(2))^3)/
(t_alpha*t_beta*t_gamma)/(Cl1/V1)))/(Cl1/V1)))*((((((log(2)^2)/
(t_alpha*t_beta))+((log(2)^2)/(t_alpha*t_gamma))+((log(2)^2)/
(t_beta*t_gamma))-((Vd-V1)/V1*(((log(2))^3)/(t_alpha*t_beta*t_gamma)/
(Cl1/V1)))-(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1)))/(Cl1/V1))-
(sqrt(((((log(2)^2)/(t_alpha*t_beta))+((log(2)^2)/(t_alpha*t_gamma))+
((log(2)^2)/(t_beta*t_gamma))-((Vd-V1)/V1*(((log(2))^3)/
(t_alpha*t_beta*t_gamma)/(Cl1/V1)))-(((log(2))^3)/
(t_alpha*t_beta*t_gamma)/(Cl1/V1)))/(Cl1/V1))^2-(((log(2))^3)/
(t_alpha*t_beta*t_gamma)/(Cl1/V1))*4)))/2))/(sqrt(((((log(2)^2)/
(t_alpha*t_beta))+((log(2)^2)/(t_alpha*t_gamma))+((log(2)^2)/
(t_beta*t_gamma))-((Vd-V1)/V1*(((log(2))^3)/(t_alpha*t_beta*t_gamma)/
(Cl1/V1)))-(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1)))/
(Cl1/V1))^2-(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1))*4)))/
((((((log(2)^2)/(t_alpha*t_beta))+((log(2)^2)/(t_alpha*t_gamma))+
((log(2)^2)/(t_beta*t_gamma))-((Vd-V1)/V1*(((log(2))^3)/
(t_alpha*t_beta*t_gamma)/(Cl1/V1)))-(((log(2))^3)/
(t_alpha*t_beta*t_gamma)/(Cl1/V1)))/(Cl1/V1))-(sqrt(((((log(2)^2)/
(t_alpha*t_beta))+((log(2)^2)/(t_alpha*t_gamma))+((log(2)^2)/
(t_beta*t_gamma))-((Vd-V1)/V1*(((log(2))^3)/(t_alpha*t_beta*t_gamma)/
(Cl1/V1)))-(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1)))/
(Cl1/V1))^2-(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1))*4)))/2)))
V3<-eval(eval(f.V3))
ff.V3<-stats::as.formula(paste("~",as.character(f.V3[2],"")))
V2_deriv<-as.matrix(attr(eval(stats::deriv(ff.V2,
c("V1","Vd","Cl1","t_alpha","t_beta","t_gamma"))),"gradient"))
V3_deriv<-as.matrix(attr(eval(stats::deriv(ff.V3,
c("V1","Vd","Cl1","t_alpha","t_beta","t_gamma"))),"gradient"))
f.Vdss<-quote(quote(((V1)*((log(2)/t_alpha)+
(log(2)/t_beta)+(log(2)/t_gamma)-(Cl1/V1)-((((log(2)^2)/
(t_alpha*t_beta))+((log(2)^2)/(t_alpha*t_gamma))+((log(2)^2)/
(t_beta*t_gamma))-((Vd-V1)/V1*(((log(2))^3)/(t_alpha*t_beta*t_gamma)/
(Cl1/V1)))-(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1)))/(Cl1/V1))
-((((Vd/V1-1)*(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1)))-
((log(2)/t_alpha)+(log(2)/t_beta)+(log(2)/t_gamma)-(Cl1/V1)-
((((log(2)^2)/(t_alpha*t_beta))+((log(2)^2)/(t_alpha*t_gamma))+
((log(2)^2)/(t_beta*t_gamma))-((Vd-V1)/V1*(((log(2))^3)/
(t_alpha*t_beta*t_gamma)/(Cl1/V1)))-(((log(2))^3)/
(t_alpha*t_beta*t_gamma)/(Cl1/V1)))/(Cl1/V1)))*((((((log(2)^2)/
(t_alpha*t_beta))+((log(2)^2)/(t_alpha*t_gamma))+((log(2)^2)/
(t_beta*t_gamma))-((Vd-V1)/V1*(((log(2))^3)/(t_alpha*t_beta*t_gamma)/
(Cl1/V1)))-(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1)))/(Cl1/V1))-
(sqrt(((((log(2)^2)/(t_alpha*t_beta))+((log(2)^2)/(t_alpha*t_gamma))+
((log(2)^2)/(t_beta*t_gamma))-((Vd-V1)/V1*(((log(2))^3)/
(t_alpha*t_beta*t_gamma)/(Cl1/V1)))-(((log(2))^3)/
(t_alpha*t_beta*t_gamma)/(Cl1/V1)))/(Cl1/V1))^2-(((log(2))^3)/
(t_alpha*t_beta*t_gamma)/(Cl1/V1))*4)))/2))/(sqrt(((((log(2)^2)/
(t_alpha*t_beta))+((log(2)^2)/(t_alpha*t_gamma))+((log(2)^2)/
(t_beta*t_gamma))-((Vd-V1)/V1*(((log(2))^3)/(t_alpha*t_beta*t_gamma)/
(Cl1/V1)))-(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1)))/
(Cl1/V1))^2-(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1))*4))))/
((((((log(2)^2)/(t_alpha*t_beta))+((log(2)^2)/(t_alpha*t_gamma))+
((log(2)^2)/(t_beta*t_gamma))-((Vd-V1)/V1*(((log(2))^3)/
(t_alpha*t_beta*t_gamma)/(Cl1/V1)))-(((log(2))^3)/(t_alpha*t_beta*
t_gamma)/(Cl1/V1)))/(Cl1/V1))+(sqrt(((((log(2)^2)/(t_alpha*t_beta))+
((log(2)^2)/(t_alpha*t_gamma))+((log(2)^2)/(t_beta*t_gamma))-
((Vd-V1)/V1*(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1)))-
(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1)))/(Cl1/V1))^2-
(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1))*4)))/2))+
(V1*((((Vd/V1-1)*(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1)))-
((log(2)/t_alpha)+(log(2)/t_beta)+(log(2)/t_gamma)-(Cl1/V1)-
((((log(2)^2)/(t_alpha*t_beta))+((log(2)^2)/(t_alpha*t_gamma))+
((log(2)^2)/(t_beta*t_gamma))-((Vd-V1)/V1*(((log(2))^3)/
(t_alpha*t_beta*t_gamma)/(Cl1/V1)))-(((log(2))^3)/
(t_alpha*t_beta*t_gamma)/(Cl1/V1)))/(Cl1/V1)))*((((((log(2)^2)/
(t_alpha*t_beta))+((log(2)^2)/(t_alpha*t_gamma))+((log(2)^2)/
(t_beta*t_gamma))-((Vd-V1)/V1*(((log(2))^3)/(t_alpha*t_beta*t_gamma)/
(Cl1/V1)))-(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1)))/(Cl1/V1))-
(sqrt(((((log(2)^2)/(t_alpha*t_beta))+((log(2)^2)/(t_alpha*t_gamma))+
((log(2)^2)/(t_beta*t_gamma))-((Vd-V1)/V1*(((log(2))^3)/
(t_alpha*t_beta*t_gamma)/(Cl1/V1)))-(((log(2))^3)/(t_alpha*t_beta*
t_gamma)/(Cl1/V1)))/(Cl1/V1))^2-(((log(2))^3)/(t_alpha*t_beta*t_gamma)/
(Cl1/V1))*4)))/2))/(sqrt(((((log(2)^2)/(t_alpha*t_beta))+((log(2)^2)/
(t_alpha*t_gamma))+((log(2)^2)/(t_beta*t_gamma))-((Vd-V1)/V1*
(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1)))-(((log(2))^3)/
(t_alpha*t_beta*t_gamma)/(Cl1/V1)))/(Cl1/V1))^2-(((log(2))^3)/
(t_alpha*t_beta*t_gamma)/(Cl1/V1))*4)))/((((((log(2)^2)/
(t_alpha*t_beta))+((log(2)^2)/(t_alpha*t_gamma))+((log(2)^2)/
(t_beta*t_gamma))-((Vd-V1)/V1*(((log(2))^3)/(t_alpha*t_beta*t_gamma)/
(Cl1/V1)))-(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1)))/(Cl1/V1))-
(sqrt(((((log(2)^2)/(t_alpha*t_beta))+((log(2)^2)/(t_alpha*t_gamma))+
((log(2)^2)/(t_beta*t_gamma))-((Vd-V1)/V1*(((log(2))^3)/
(t_alpha*t_beta*t_gamma)/(Cl1/V1)))-(((log(2))^3)/
(t_alpha*t_beta*t_gamma)/(Cl1/V1)))/(Cl1/V1))^2-(((log(2))^3)/
(t_alpha*t_beta*t_gamma)/(Cl1/V1))*4)))/2))+(V1)))
Vdss<-eval(eval(f.Vdss))
ff.Vdss<-stats::as.formula(paste("~",as.character(f.Vdss[2],"")))
Vdss_deriv<-as.matrix(attr(eval(stats::deriv(ff.Vdss,
c("V1","Vd","Cl1","t_alpha","t_beta","t_gamma"))),"gradient"))
sigma6<-matrix(as.numeric(c(V1.var,covar[1],covar[2],covar[3],covar[4],
covar[5],covar[1],Vd.var,covar[6],covar[7],covar[8],covar[9],
covar[2],covar[6],Cl1.var,covar[9],covar[11],covar[12],
covar[3],covar[7],covar[10],t_alpha.var,covar[13],covar[14],
covar[4],covar[8],covar[11],covar[13],t_beta.var,covar[15],
covar[5],covar[9],covar[12],covar[14],covar[15],t_gamma.var)),
6,6,byrow=T)
V2.sd<-sqrt(V2_deriv %*% sigma6 %*% t(V2_deriv))
V3.sd<-sqrt(V3_deriv %*% sigma6 %*% t(V3_deriv))
Vdss.sd<-sqrt(Vdss_deriv %*% sigma6 %*% t(Vdss_deriv))
f.Cl2<-quote(quote(V1*((log(2)/t_alpha)+(log(2)/t_beta)+(log(2)/t_gamma)-
(Cl1/V1)-((((log(2)^2)/(t_alpha*t_beta))+((log(2)^2)/(t_alpha*t_gamma))+
((log(2)^2)/(t_beta*t_gamma))-((Vd-V1)/V1*(((log(2))^3)/
(t_alpha*t_beta*t_gamma)/(Cl1/V1)))-(((log(2))^3)/
(t_alpha*t_beta*t_gamma)/(Cl1/V1)))/(Cl1/V1))-((((Vd/V1-1)*
(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1)))-((log(2)/t_alpha)+
(log(2)/t_beta)+(log(2)/t_gamma)-(Cl1/V1)-((((log(2)^2)/
(t_alpha*t_beta))+((log(2)^2)/(t_alpha*t_gamma))+((log(2)^2)/
(t_beta*t_gamma))-((Vd-V1)/V1*(((log(2))^3)/(t_alpha*t_beta*t_gamma)/
(Cl1/V1)))-(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1)))/(Cl1/V1)))*
((((((log(2)^2)/(t_alpha*t_beta))+((log(2)^2)/(t_alpha*t_gamma))+
((log(2)^2)/(t_beta*t_gamma))-((Vd-V1)/V1*(((log(2))^3)/
(t_alpha*t_beta*t_gamma)/(Cl1/V1)))-(((log(2))^3)/
(t_alpha*t_beta*t_gamma)/(Cl1/V1)))/(Cl1/V1))-(sqrt(((((log(2)^2)/
(t_alpha*t_beta))+((log(2)^2)/(t_alpha*t_gamma))+((log(2)^2)/
(t_beta*t_gamma))-((Vd-V1)/V1*(((log(2))^3)/(t_alpha*t_beta*t_gamma)/
(Cl1/V1)))-(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1)))/
(Cl1/V1))^2-(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1))*4)))/2))/
(sqrt(((((log(2)^2)/(t_alpha*t_beta))+((log(2)^2)/(t_alpha*t_gamma))+
((log(2)^2)/(t_beta*t_gamma))-((Vd-V1)/V1*(((log(2))^3)/
(t_alpha*t_beta*t_gamma)/(Cl1/V1)))-(((log(2))^3)/
(t_alpha*t_beta*t_gamma)/(Cl1/V1)))/(Cl1/V1))^2-(((log(2))^3)/
(t_alpha*t_beta*t_gamma)/(Cl1/V1))*4))))))
Cl2<-eval(eval(f.Cl2))
ff.Cl2<-stats::as.formula(paste("~",as.character(f.Cl2[2],"")))
f.Cl3<-quote(quote(V1*((((Vd/V1-1)*(((log(2))^3)/(t_alpha*t_beta*t_gamma)/
(Cl1/V1)))-((log(2)/t_alpha)+(log(2)/t_beta)+(log(2)/t_gamma)-(Cl1/V1)-
((((log(2)^2)/(t_alpha*t_beta))+((log(2)^2)/(t_alpha*t_gamma))+
((log(2)^2)/(t_beta*t_gamma))-((Vd-V1)/V1*(((log(2))^3)/
(t_alpha*t_beta*t_gamma)/(Cl1/V1)))-(((log(2))^3)/
(t_alpha*t_beta*t_gamma)/(Cl1/V1)))/(Cl1/V1)))*((((((log(2)^2)/
(t_alpha*t_beta))+((log(2)^2)/(t_alpha*t_gamma))+((log(2)^2)/
(t_beta*t_gamma))-((Vd-V1)/V1*(((log(2))^3)/(t_alpha*t_beta*t_gamma)/
(Cl1/V1)))-(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1)))/(Cl1/V1))-
(sqrt(((((log(2)^2)/(t_alpha*t_beta))+((log(2)^2)/(t_alpha*t_gamma))+
((log(2)^2)/(t_beta*t_gamma))-((Vd-V1)/V1*(((log(2))^3)/
(t_alpha*t_beta*t_gamma)/(Cl1/V1)))-(((log(2))^3)/
(t_alpha*t_beta*t_gamma)/(Cl1/V1)))/(Cl1/V1))^2-(((log(2))^3)/
(t_alpha*t_beta*t_gamma)/(Cl1/V1))*4)))/2))/(sqrt(((((log(2)^2)/
(t_alpha*t_beta))+((log(2)^2)/(t_alpha*t_gamma))+((log(2)^2)/
(t_beta*t_gamma))-((Vd-V1)/V1*(((log(2))^3)/(t_alpha*t_beta*t_gamma)/
(Cl1/V1)))-(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1)))/
(Cl1/V1))^2-(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1))*4)))))
Cl3<-eval(eval(f.Cl3))
ff.Cl3<-stats::as.formula(paste("~",as.character(f.Cl3[2],"")))
Cl2_deriv<-as.matrix(attr(eval(stats::deriv(ff.Cl2,
c("V1","Vd","Cl1","t_alpha","t_beta","t_gamma"))),"gradient"))
Cl2.sd<-sqrt(Cl2_deriv %*% sigma6 %*% t(Cl2_deriv))
Cl3_deriv<-as.matrix(attr(eval(stats::deriv(ff.Cl3,
c("V1","Vd","Cl1","t_alpha","t_beta","t_gamma"))),"gradient"))
Cl3.sd<-sqrt(Cl3_deriv %*% sigma6 %*% t(Cl3_deriv))
k10<-Cl1/V1
f.k12<-quote(quote((log(2)/t_alpha)+(log(2)/t_beta)+(log(2)/t_gamma)-(Cl1/V1)-
((((((log(2)^2)/(t_alpha*t_beta))+((log(2)^2)/(t_alpha*t_gamma))+
((log(2)^2)/(t_beta*t_gamma))-((Vd-V1)/V1*(((log(2))^3)/
(t_alpha*t_beta*t_gamma)/(Cl1/V1)))-(((log(2))^3)/
(t_alpha*t_beta*t_gamma)/(Cl1/V1)))/(Cl1/V1))+(sqrt(((((log(2)^2)/
(t_alpha*t_beta))+((log(2)^2)/(t_alpha*t_gamma))+((log(2)^2)/
(t_beta*t_gamma))-((Vd-V1)/V1*(((log(2))^3)/(t_alpha*t_beta*t_gamma)/
(Cl1/V1)))-(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1)))/
(Cl1/V1))^2-(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1))*4)))/2)-
((((((log(2)^2)/(t_alpha*t_beta))+((log(2)^2)/(t_alpha*t_gamma))+
((log(2)^2)/(t_beta*t_gamma))-((Vd-V1)/V1*(((log(2))^3)/
(t_alpha*t_beta*t_gamma)/(Cl1/V1)))-(((log(2))^3)/
(t_alpha*t_beta*t_gamma)/(Cl1/V1)))/(Cl1/V1))-(sqrt(((((log(2)^2)/
(t_alpha*t_beta))+((log(2)^2)/(t_alpha*t_gamma))+((log(2)^2)/
(t_beta*t_gamma))-((Vd-V1)/V1*(((log(2))^3)/(t_alpha*t_beta*t_gamma)/
(Cl1/V1)))-(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1)))/
(Cl1/V1))^2-(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1))*4)))/2)-
((((Vd/V1-1)*(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1)))-
((log(2)/t_alpha)+(log(2)/t_beta)+(log(2)/t_gamma)-(Cl1/V1)-
((((log(2)^2)/(t_alpha*t_beta))+((log(2)^2)/(t_alpha*t_gamma))+
((log(2)^2)/(t_beta*t_gamma))-((Vd-V1)/V1*(((log(2))^3)/
(t_alpha*t_beta*t_gamma)/(Cl1/V1)))-(((log(2))^3)/
(t_alpha*t_beta*t_gamma)/(Cl1/V1)))/(Cl1/V1)))*((((((log(2)^2)/
(t_alpha*t_beta))+((log(2)^2)/(t_alpha*t_gamma))+((log(2)^2)/
(t_beta*t_gamma))-((Vd-V1)/V1*(((log(2))^3)/(t_alpha*t_beta*t_gamma)/
(Cl1/V1)))-(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1)))/
(Cl1/V1))-(sqrt(((((log(2)^2)/(t_alpha*t_beta))+((log(2)^2)/
(t_alpha*t_gamma))+((log(2)^2)/(t_beta*t_gamma))-((Vd-V1)/V1*
(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1)))-(((log(2))^3)/
(t_alpha*t_beta*t_gamma)/(Cl1/V1)))/(Cl1/V1))^2-(((log(2))^3)/
(t_alpha*t_beta*t_gamma)/(Cl1/V1))*4)))/2))/(sqrt(((((log(2)^2)/
(t_alpha*t_beta))+((log(2)^2)/(t_alpha*t_gamma))+((log(2)^2)/
(t_beta*t_gamma))-((Vd-V1)/V1*(((log(2))^3)/(t_alpha*t_beta*t_gamma)/
(Cl1/V1)))-(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1)))/
(Cl1/V1))^2-(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1))*4)))))
k12<-eval(eval(f.k12))
ff.k12<-stats::as.formula(paste("~",as.character(f.k12[2],"")))
f.k13<-quote(quote((((Vd/V1-1)*(((log(2))^3)/(t_alpha*t_beta*t_gamma)/
(Cl1/V1)))-((log(2)/t_alpha)+(log(2)/t_beta)+(log(2)/t_gamma)-(Cl1/V1)-
((((log(2)^2)/(t_alpha*t_beta))+((log(2)^2)/(t_alpha*t_gamma))+
((log(2)^2)/(t_beta*t_gamma))-((Vd-V1)/V1*(((log(2))^3)/
(t_alpha*t_beta*t_gamma)/(Cl1/V1)))-(((log(2))^3)/
(t_alpha*t_beta*t_gamma)/(Cl1/V1)))/(Cl1/V1)))*((((((log(2)^2)/
(t_alpha*t_beta))+((log(2)^2)/(t_alpha*t_gamma))+((log(2)^2)/
(t_beta*t_gamma))-((Vd-V1)/V1*(((log(2))^3)/(t_alpha*t_beta*t_gamma)/
(Cl1/V1)))-(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1)))/(Cl1/V1))-
(sqrt(((((log(2)^2)/(t_alpha*t_beta))+((log(2)^2)/(t_alpha*t_gamma))+
((log(2)^2)/(t_beta*t_gamma))-((Vd-V1)/V1*(((log(2))^3)/
(t_alpha*t_beta*t_gamma)/(Cl1/V1)))-(((log(2))^3)/
(t_alpha*t_beta*t_gamma)/(Cl1/V1)))/(Cl1/V1))^2-(((log(2))^3)/
(t_alpha*t_beta*t_gamma)/(Cl1/V1))*4)))/2))/(sqrt(((((log(2)^2)/
(t_alpha*t_beta))+((log(2)^2)/(t_alpha*t_gamma))+((log(2)^2)/
(t_beta*t_gamma))-((Vd-V1)/V1*(((log(2))^3)/(t_alpha*t_beta*t_gamma)/
(Cl1/V1)))-(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1)))/
(Cl1/V1))^2-(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1))*4))))
k13<-eval(eval(f.k13))
ff.k13<-stats::as.formula(paste("~",as.character(f.k13[2],"")))
f.k21<-quote(quote((((((log(2)^2)/(t_alpha*t_beta))+((log(2)^2)/
(t_alpha*t_gamma))+((log(2)^2)/(t_beta*t_gamma))-((Vd-V1)/V1*
(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1)))-(((log(2))^3)/
(t_alpha*t_beta*t_gamma)/(Cl1/V1)))/(Cl1/V1))+(sqrt(((((log(2)^2)/
(t_alpha*t_beta))+((log(2)^2)/(t_alpha*t_gamma))+((log(2)^2)/
(t_beta*t_gamma))-((Vd-V1)/V1*(((log(2))^3)/(t_alpha*t_beta*t_gamma)/
(Cl1/V1)))-(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1)))/
(Cl1/V1))^2-(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1))*4)))/2))
k21<-eval(eval(f.k21))
ff.k21<-stats::as.formula(paste("~",as.character(f.k21[2],"")))
f.k31<-quote(quote((((((log(2)^2)/(t_alpha*t_beta))+((log(2)^2)/
(t_alpha*t_gamma))+((log(2)^2)/(t_beta*t_gamma))-((Vd-V1)/V1*
(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1)))-(((log(2))^3)/
(t_alpha*t_beta*t_gamma)/(Cl1/V1)))/(Cl1/V1))-(sqrt(((((log(2)^2)/
(t_alpha*t_beta))+((log(2)^2)/(t_alpha*t_gamma))+((log(2)^2)/
(t_beta*t_gamma))-((Vd-V1)/V1*(((log(2))^3)/(t_alpha*t_beta*t_gamma)/
(Cl1/V1)))-(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1)))/
(Cl1/V1))^2-(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1))*4)))/2))
k31<-eval(eval(f.k31))
ff.k31<-stats::as.formula(paste("~",as.character(f.k31[2],"")))
sigma2<-matrix(as.numeric(c(V1.var,covar[1],covar[1],Cl1.var)),2,2,byrow=T)
k10_deriv<-as.matrix(attr(eval(stats::deriv(~Cl1/V1,c("V1","Cl1"))),
"gradient"))
k12_deriv<-as.matrix(attr(eval(stats::deriv(ff.k12,
c("V1","Vd","Cl1","t_alpha","t_beta","t_gamma"))),"gradient"))
k13_deriv<-as.matrix(attr(eval(stats::deriv(ff.k13,
c("V1","Vd","Cl1","t_alpha","t_beta","t_gamma"))),"gradient"))
k21_deriv<-as.matrix(attr(eval(stats::deriv(ff.k21,
c("t_alpha","t_beta","t_gamma","V1","Vd","Cl1"))),"gradient"))
k31_deriv<-as.matrix(attr(eval(stats::deriv(ff.k31,
c("V1","Vd","Cl1","t_alpha","t_beta","t_gamma"))),"gradient"))
k10.sd<-sqrt(k10_deriv %*% sigma2 %*% t(k10_deriv))
k12.sd<-sqrt(k12_deriv %*% sigma6 %*% t(k12_deriv))
k13.sd<-sqrt(k13_deriv %*% sigma6 %*% t(k13_deriv))
k21.sd<-sqrt(k21_deriv %*% sigma6 %*% t(k21_deriv))
k31.sd<-sqrt(k31_deriv %*% sigma6 %*% t(k31_deriv))
f.true_A<-quote(quote((((((((log(2)^2)/(t_alpha*t_beta))+((log(2)^2)/
(t_alpha*t_gamma))+((log(2)^2)/(t_beta*t_gamma))-((Vd-V1)/V1*
(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1)))-(((log(2))^3)/
(t_alpha*t_beta*t_gamma)/(Cl1/V1)))/(Cl1/V1))+(sqrt(((((log(2)^2)/
(t_alpha*t_beta))+((log(2)^2)/(t_alpha*t_gamma))+((log(2)^2)/
(t_beta*t_gamma))-((Vd-V1)/V1*(((log(2))^3)/(t_alpha*t_beta*t_gamma)/
(Cl1/V1)))-(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1)))/
(Cl1/V1))^2-(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1))*4)))/2)-
(log(2)/t_alpha))*(((((((log(2)^2)/(t_alpha*t_beta))+((log(2)^2)/
(t_alpha*t_gamma))+((log(2)^2)/(t_beta*t_gamma))-((Vd-V1)/V1*
(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1)))-(((log(2))^3)/
(t_alpha*t_beta*t_gamma)/(Cl1/V1)))/(Cl1/V1))-(sqrt(((((log(2)^2)/
(t_alpha*t_beta))+((log(2)^2)/(t_alpha*t_gamma))+((log(2)^2)/
(t_beta*t_gamma))-((Vd-V1)/V1*(((log(2))^3)/(t_alpha*t_beta*t_gamma)/
(Cl1/V1)))-(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1)))/(Cl1/V1))^2-
(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1))*4)))/2)-
(log(2)/t_alpha))/((log(2)/t_alpha)-(log(2)/t_beta))/
((log(2)/t_alpha)-(log(2)/t_gamma))/V1))
true_A<-eval(eval(f.true_A))
ff.true_A<-stats::as.formula(paste("~",as.character(f.true_A[2],"")))
f.true_B<-quote(quote((((((((log(2)^2)/(t_alpha*t_beta))+((log(2)^2)/
(t_alpha*t_gamma))+((log(2)^2)/(t_beta*t_gamma))-((Vd-V1)/V1*
(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1)))-(((log(2))^3)/
(t_alpha*t_beta*t_gamma)/(Cl1/V1)))/(Cl1/V1))+(sqrt(((((log(2)^2)/
(t_alpha*t_beta))+((log(2)^2)/(t_alpha*t_gamma))+((log(2)^2)/
(t_beta*t_gamma))-((Vd-V1)/V1*(((log(2))^3)/(t_alpha*t_beta*t_gamma)/
(Cl1/V1)))-(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1)))/
(Cl1/V1))^2-(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1))*4)))/2)-
(log(2)/t_beta))*(((((((log(2)^2)/(t_alpha*t_beta))+((log(2)^2)/
(t_alpha*t_gamma))+((log(2)^2)/(t_beta*t_gamma))-((Vd-V1)/V1*
(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1)))-(((log(2))^3)/
(t_alpha*t_beta*t_gamma)/(Cl1/V1)))/(Cl1/V1))-(sqrt(((((log(2)^2)/
(t_alpha*t_beta))+((log(2)^2)/(t_alpha*t_gamma))+((log(2)^2)/
(t_beta*t_gamma))-((Vd-V1)/V1*(((log(2))^3)/(t_alpha*t_beta*t_gamma)/
(Cl1/V1)))-(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1)))/
(Cl1/V1))^2-(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1))*4)))/2)-
(log(2)/t_beta))/((log(2)/t_beta)-(log(2)/t_alpha))/((log(2)/t_beta)-
(log(2)/t_gamma))/V1))
true_B<-eval(eval(f.true_B))
ff.true_B<-stats::as.formula(paste("~",as.character(f.true_B[2],"")))
f.true_C<-quote(quote((((((((log(2)^2)/(t_alpha*t_beta))+((log(2)^2)/
(t_alpha*t_gamma))+((log(2)^2)/(t_beta*t_gamma))-((Vd-V1)/V1*
(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1)))-(((log(2))^3)/
(t_alpha*t_beta*t_gamma)/(Cl1/V1)))/(Cl1/V1))+(sqrt(((((log(2)^2)/
(t_alpha*t_beta))+((log(2)^2)/(t_alpha*t_gamma))+((log(2)^2)/
(t_beta*t_gamma))-((Vd-V1)/V1*(((log(2))^3)/(t_alpha*t_beta*t_gamma)/
(Cl1/V1)))-(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1)))/
(Cl1/V1))^2-(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1))*4)))/2)-
(log(2)/t_gamma))*(((((((log(2)^2)/(t_alpha*t_beta))+((log(2)^2)/
(t_alpha*t_gamma))+((log(2)^2)/(t_beta*t_gamma))-((Vd-V1)/V1*
(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1)))-(((log(2))^3)/
(t_alpha*t_beta*t_gamma)/(Cl1/V1)))/(Cl1/V1))-(sqrt(((((log(2)^2)/
(t_alpha*t_beta))+((log(2)^2)/(t_alpha*t_gamma))+((log(2)^2)/
(t_beta*t_gamma))-((Vd-V1)/V1*(((log(2))^3)/(t_alpha*t_beta*t_gamma)/
(Cl1/V1)))-(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1)))/
(Cl1/V1))^2-(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1))*4)))/2)-
(log(2)/t_gamma))/((log(2)/t_gamma)-(log(2)/t_beta))/((log(2)/t_gamma)-
(log(2)/t_alpha))/V1))
true_C<-eval(eval(f.true_C))
ff.true_C<-stats::as.formula(paste("~",as.character(f.true_C[2],"")))
true_A_deriv<-as.matrix(attr(eval(stats::deriv(ff.true_A,
c("V1","Vd","Cl1","t_alpha","t_beta","t_gamma"))),"gradient"))
true_B_deriv<-as.matrix(attr(eval(stats::deriv(ff.true_B,
c("V1","Vd","Cl1","t_alpha","t_beta","t_gamma"))),"gradient"))
true_C_deriv<-as.matrix(attr(eval(stats::deriv(ff.true_C,
c("V1","Vd","Cl1","t_alpha","t_beta","t_gamma"))),"gradient"))
true_A.sd<-sqrt(true_A_deriv %*% sigma6 %*% t(true_A_deriv))
true_B.sd<-sqrt(true_B_deriv %*% sigma6 %*% t(true_B_deriv))
true_C.sd<-sqrt(true_C_deriv %*% sigma6 %*% t(true_C_deriv))
f.frac_A<-quote(quote((((((((log(2)^2)/(t_alpha*t_beta))+((log(2)^2)/
(t_alpha*t_gamma))+((log(2)^2)/(t_beta*t_gamma))-
((Vd-V1)/V1*(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1)))-
(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1)))/(Cl1/V1))+
(sqrt(((((log(2)^2)/(t_alpha*t_beta))+((log(2)^2)/(t_alpha*t_gamma))+
((log(2)^2)/(t_beta*t_gamma))-
((Vd-V1)/V1*(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1)))-
(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1)))/(Cl1/V1))^2-
(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1))*4)))/2)-
(log(2)/t_alpha))*(((((((log(2)^2)/(t_alpha*t_beta))+
((log(2)^2)/(t_alpha*t_gamma))+((log(2)^2)/(t_beta*t_gamma))-
((Vd-V1)/V1*(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1)))-
(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1)))/(Cl1/V1))-
(sqrt(((((log(2)^2)/(t_alpha*t_beta))+((log(2)^2)/(t_alpha*t_gamma))+
((log(2)^2)/(t_beta*t_gamma))-
((Vd-V1)/V1*(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1)))-
(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1)))/(Cl1/V1))^2-
(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1))*4)))/2)-
(log(2)/t_alpha))/((log(2)/t_alpha)-(log(2)/t_beta))/((log(2)/t_alpha)-
(log(2)/t_gamma))))
frac_A<-eval(eval(f.frac_A))
ff.frac_A<-stats::as.formula(paste("~",as.character(f.frac_A[2],"")))
f.frac_B<-quote(quote((((((((log(2)^2)/(t_alpha*t_beta))+((log(2)^2)/
(t_alpha*t_gamma))+((log(2)^2)/(t_beta*t_gamma))-
((Vd-V1)/V1*(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1)))-
(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1)))/(Cl1/V1))+
(sqrt(((((log(2)^2)/(t_alpha*t_beta))+((log(2)^2)/(t_alpha*t_gamma))+
((log(2)^2)/(t_beta*t_gamma))-
((Vd-V1)/V1*(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1)))-
(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1)))/(Cl1/V1))^2-
(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1))*4)))/2)-
(log(2)/t_beta))*(((((((log(2)^2)/(t_alpha*t_beta))+
((log(2)^2)/(t_alpha*t_gamma))+((log(2)^2)/(t_beta*t_gamma))-
((Vd-V1)/V1*(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1)))-
(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1)))/(Cl1/V1))-
(sqrt(((((log(2)^2)/(t_alpha*t_beta))+((log(2)^2)/(t_alpha*t_gamma))+
((log(2)^2)/(t_beta*t_gamma))-
((Vd-V1)/V1*(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1)))-
(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1)))/(Cl1/V1))^2-
(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1))*4)))/2)-
(log(2)/t_beta))/((log(2)/t_beta)-(log(2)/t_alpha))/((log(2)/t_beta)-
(log(2)/t_gamma))))
frac_B<-eval(eval(f.frac_B))
ff.frac_B<-stats::as.formula(paste("~",as.character(f.frac_B[2],"")))
f.frac_C<-quote(quote((((((((log(2)^2)/(t_alpha*t_beta))+((log(2)^2)/
(t_alpha*t_gamma))+((log(2)^2)/(t_beta*t_gamma))-
((Vd-V1)/V1*(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1)))-
(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1)))/(Cl1/V1))+
(sqrt(((((log(2)^2)/(t_alpha*t_beta))+((log(2)^2)/(t_alpha*t_gamma))+
((log(2)^2)/(t_beta*t_gamma))-
((Vd-V1)/V1*(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1)))-
(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1)))/(Cl1/V1))^2-
(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1))*4)))/2)-
(log(2)/t_gamma))*(((((((log(2)^2)/(t_alpha*t_beta))+
((log(2)^2)/(t_alpha*t_gamma))+((log(2)^2)/(t_beta*t_gamma))-
((Vd-V1)/V1*(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1)))-
(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1)))/(Cl1/V1))-
(sqrt(((((log(2)^2)/(t_alpha*t_beta))+((log(2)^2)/(t_alpha*t_gamma))+
((log(2)^2)/(t_beta*t_gamma))-
((Vd-V1)/V1*(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1)))-
(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1)))/(Cl1/V1))^2-
(((log(2))^3)/(t_alpha*t_beta*t_gamma)/(Cl1/V1))*4)))/2)-
(log(2)/t_gamma))/((log(2)/t_gamma)-(log(2)/t_beta))/((log(2)/t_gamma)-
(log(2)/t_alpha))))
frac_C<-eval(eval(f.frac_C))
ff.frac_C<-stats::as.formula(paste("~",as.character(f.frac_C[2],"")))
frac_A_deriv<-as.matrix(attr(eval(stats::deriv(ff.frac_A,
c("V1","Vd","Cl1","t_alpha","t_beta","t_gamma"))),"gradient"))
frac_B_deriv<-as.matrix(attr(eval(stats::deriv(ff.frac_B,
c("V1","Vd","Cl1","t_alpha","t_beta","t_gamma"))),"gradient"))
frac_C_deriv<-as.matrix(attr(eval(stats::deriv(ff.frac_C,
c("V1","Vd","Cl1","t_alpha","t_beta","t_gamma"))),"gradient"))
frac_A.sd<-sqrt(frac_A_deriv %*% sigma6 %*% t(frac_A_deriv))
frac_B.sd<-sqrt(frac_B_deriv %*% sigma6 %*% t(frac_B_deriv))
frac_C.sd<-sqrt(frac_C_deriv %*% sigma6 %*% t(frac_C_deriv))
alpha<-log(2)/t_alpha; beta<-log(2)/t_beta; gamma<-log(2)/t_gamma
alpha_deriv<-as.matrix(attr(eval(stats::deriv(~log(2)/t_alpha,"t_alpha")),
"gradient"))
beta_deriv<-as.matrix(attr(eval(stats::deriv(~log(2)/t_beta,"t_beta")),
"gradient"))
gamma_deriv<-as.matrix(attr(eval(stats::deriv(~log(2)/t_gamma,"t_gamma")),
"gradient"))
alpha.sd<-sqrt(alpha_deriv * t_alpha.var * alpha_deriv)
beta.sd<-sqrt(beta_deriv * t_beta.var * beta_deriv)
gamma.sd<-sqrt(gamma_deriv * t_gamma.var * gamma_deriv)
if(is.na(V1[1])){
param = rep(NA,24)
sd = rep(NA,24)
} else{
param = c(V1,Vdss,Cl1,t_alpha,t_beta,t_gamma,V2,V3,Cl2,Cl3,k10,k12,k13,
k21,k31,true_A,true_B,true_C,frac_A,frac_B,frac_C,alpha,beta,gamma)
sd = c(V1.sd,Vdss.sd,Cl1.sd,t_alpha.sd,t_beta.sd,t_gamma.sd,V2.sd,V3.sd,
Cl2.sd,Cl3.sd,k10.sd,k12.sd,k13.sd,k21.sd,k31.sd,true_A.sd,true_B.sd,
true_C.sd,frac_A.sd,frac_B.sd,frac_C.sd,alpha.sd,beta.sd,gamma.sd)
}
result = data.frame(Parameter=c("V1","Vdss","Cl1","t_alpha","t_beta",
"t_gamma","V2","V3","Cl2","Cl3","k10","k12","k13",
"k21","k31","True_A","True_B","True_C","Frac_A",
"Frac_B","Frac_C","alpha","beta","gamma"),
Estimate=param, Std.err=sd)
row.names(result) <- c("V1","Vdss","Cl1","t_alpha","t_beta","t_gamma","V2",
"V3","Cl2","Cl3","k10","k12","k13","k21","k31","True_A","True_B","True_C",
"Frac_A","Frac_B","Frac_C","alpha","beta","gamma")
result<-result[c("Vdss","V1","V2","V3","Cl1","Cl2","Cl3",
"k10","k12","k21","k13","k31","alpha","beta","gamma",
"t_alpha","t_beta","t_gamma","True_A","True_B","True_C",
"Frac_A","Frac_B","Frac_C"),]
return(result)
} |
ROAD <- function(x, y, standardize = FALSE, lambda = NULL, eps = 1e-07){
obj <- dsda(x, y=y, standardize=standardize, lambda=lambda, alpha=1, eps=eps)
p <- dim(x)[2]
n <- length(y)
nlambda <- length(obj$lambda)
beta <- obj$beta[2:(p+1),]
lambda <- obj$lambda
newbeta <- beta
newlambda <- lambda
mu1 <- apply(x[y==1,],2,mean)
mu2 <- apply(x[y==2,],2,mean)
w <- mu2-mu1
for (i in 1:nlambda){
newlambda[i] <- lambda[i]*2/sum(beta[,i]*w)/n
}
beta <- as.matrix(beta)
newbeta <- as.matrix(newbeta)
newbeta <- sweep(newbeta, 2, t(beta)%*%w, FUN="/")
outlist <- list(beta=newbeta, lambda=newlambda)
class(outlist) <- c("ROAD")
outlist
} |
sim.fmri2COVAR <- function(hrf, beta.Var1, beta.Var2){
require(Matrix)
if(any(is.na(hrf)))
stop("\nNAs in hr function.\n")
p <- dim(hrf)[2]
if(p!=2)
stop("Haemodynamic response function needs to be a matrix with column dimension of 2.")
I <- 400
Z <- as.matrix(hrf)
T <- dim(Z)[1]
Z.Var1 <- as.matrix(Z[,1])
Z.Var2 <- as.matrix(Z[,2])
IZ.Var1 <- kronecker(as(diag(1, nrow=I, ncol=I), "sparseMatrix"), Z.Var1)
IZ.Var2 <- kronecker(as(diag(1, nrow=I, ncol=I), "sparseMatrix"), Z.Var2)
sigma.sq <- numeric(I)
for(i in 1:I)
sigma.sq[i] <- 25 + 2*rnorm(1)
eps <- matrix(nrow=T, ncol=I)
for(i in 1:I)
eps[,i] <- rnorm(T, mean=0, sd=sqrt(sigma.sq[i]))
eps <- as.vector(eps)
beta.sim.Var1 <- matrix(0, nrow=I/20, ncol=I/20)
beta.sim.Var1[8:14,8:14] <- beta.Var1
beta.sim.Var1[8,8] <- 0
beta.sim.Var1[9,8] <- 0
beta.sim.Var1[8,9] <- 0
beta.sim.Var1[13,8] <- 0
beta.sim.Var1[14,9] <- 0
beta.sim.Var1[14,8] <- 0
beta.sim.Var1[9,14] <- 0
beta.sim.Var1[8,13] <- 0
beta.sim.Var1[8,14] <- 0
beta.sim.Var1[14,13] <- 0
beta.sim.Var1[13,14] <- 0
beta.sim.Var1[14,14] <- 0
beta.sim.Var1 <- as.vector(beta.sim.Var1)
beta.sim.Var2 <- matrix(0, nrow=I/20, ncol=I/20)
beta.sim.Var2[8:14,8:14] <- beta.Var2
beta.sim.Var2[8,8] <- 0
beta.sim.Var2[9,8] <- 0
beta.sim.Var2[8,9] <- 0
beta.sim.Var2[13,8] <- 0
beta.sim.Var2[14,9] <- 0
beta.sim.Var2[14,8] <- 0
beta.sim.Var2[9,14] <- 0
beta.sim.Var2[8,13] <- 0
beta.sim.Var2[8,14] <- 0
beta.sim.Var2[14,13] <- 0
beta.sim.Var2[13,14] <- 0
beta.sim.Var2[14,14] <- 0
beta.sim.Var2 <- as.vector(beta.sim.Var2)
y <- numeric(T*I)
y <- IZ.Var1%*%beta.sim.Var1+IZ.Var2%*%beta.sim.Var2+eps
y <- t(matrix(nrow=T, ncol=I, data=y))
y <- array(y, dim=c(20,20,T))
y <- aperm(y, c(2,1,3))
eps <- t(matrix(nrow=T, ncol=I, data=eps))
eps <- array(eps, dim=c(20,20,T))
eps <- aperm(eps, c(2,1,3))
return(list("fmri"=y, "hrf"=Z, "coeff1"=beta.sim.Var1, "coeff2"=beta.sim.Var2,
"resid"=eps, "sigma"=sigma.sq))
} |
ormidp.test <- function(a1, a0, b1, b0, or = 1){
x <- matrix(c(a1,a0,b1,b0),2,2, byrow=TRUE)
lteqtoa1 <- fisher.test(x,or=or,alternative="l")$p.val
gteqtoa1 <- fisher.test(x,or=or,alternative="g")$p.val
pval1 <- 0.5*(lteqtoa1-gteqtoa1+1)
one.sided <- min(pval1, 1-pval1)
two.sided <- 2*one.sided
data.frame(one.sided=one.sided, two.sided=two.sided)
} |
`evolAR` <-
function(a, dt=0, numf=1024, pord = 100, Ns=0, Nov=0, fl=0, fh=10 )
{
if(missing(dt)) { dt=1;}
if(missing(numf)) { numf=1024}
if(missing(Ns)) { Ns=250;}
if(missing(Nov)) { Nov=240;}
if(missing(fl)) { fl=0;}
if(missing(fh)) { fh=1/(2*dt);}
if(missing(pord)) { pord = 100;}
Ns = floor(Ns)
NT = length(a);
nyquistf = 1/(2*dt)
Nfft = numf
if(Nov<1)
{
Nov = floor(Ns - 0.1*Ns);
}
kcol =floor( (NT-floor(Nov) )/(Ns-floor(Nov)))
if(kcol<Ns)
{
Ns = kcol
Nov = floor(Ns-0.1*Ns)
kcol =floor( (NT-floor(Nov) )/(Ns-floor(Nov)))
}
min1 = Nfft%%2;
if(min1 == 0)
{
krow = (Nfft/2);
} else {
krow = (Nfft+1)/2;
}
skiplen = Ns - Nov;
df = 1.0/(Nfft*dt);
krow = numf
numfreqs=numf
if(kcol<1)
{
print(paste(sep=' ', "error in evolfft kcol=", kcol, "krow=", krow, "NT", NT, "Ns", Ns, "Nov", Nov))
return()
}
DMAT = matrix(rep(0,krow*kcol), ncol=kcol, nrow=krow)
m = 1:(kcol)
ibeg=((m-1)*skiplen)+1;
iend = ibeg+Ns-1;
for( i in m)
{
tem = a[ibeg[i]:iend[i]]
tem = tem-mean(tem, na.rm=TRUE)
aem =autoreg(tem, numf=numf , pord = pord, PLOT=FALSE, f1=fl, f2=fh)
DMAT[,i] = aem$amp
}
DFFT = DMAT
DSPEC = DMAT
x = (ibeg+Ns/2)*dt
freqs = aem$freq
y = aem$freq*(1/dt)
RET = list(sig=a, dt=dt, numfreqs=numfreqs, wpars=list(Nfft=numfreqs, Ns=Ns, Nov=Nov, fl=fl, fh=fh), DSPEC=DSPEC, freqs=y, tims=x, pord=pord)
invisible(RET)
} |
shiftvector <-
function(g,X,x=c(1,0),verbose=FALSE) {
costheta <- ((t(g)%*%x))/(sqrt(t(g)%*%g))
theta <- acos(costheta)
sintheta <- sin(theta)
ang <- rad2degree(theta)
if(verbose) cat("angle vector g -- x-axis:",theta," rad",ang," degrees\n")
if(g[2]>=0)
H <- matrix(c(costheta, sintheta, -sintheta, costheta), ncol = 2)
else
H <- matrix(c(costheta, -sintheta, sintheta, costheta), ncol = 2)
Xr <- X%*%H
above <- Xr[,2] > 0
below <- Xr[,2] < 0
mg <- matrix(rep(1, nrow(X)), ncol = 1)%*%g
Dp <- diag(as.vector((X%*%g)/(sum(g*g))))
dmat <- X-Dp%*%mg
vecdn <- diag(dmat%*%t(dmat) )
if(sum(above)>0) {
vecdna <- vecdn[above]
dmata <- dmat[above,]
if(is.vector(dmata)) dl <- dmata
else {
i <- which.max(vecdna)
dl <- dmata[i,]
}
} else dl <- c(0,0)
if(sum(below)>0) {
vecdnb <- vecdn[below]
dmatb <- dmat[below,]
if(is.vector(dmatb)) dr <- dmatb
else {
i <- which.max(vecdnb)
dr <- dmatb[i,]
}
} else dr <- c(0,0)
if(verbose) {
cat("Left shift vector: ",dl,"\n")
cat("Right shift vector: ",dr,"\n")
}
return(list(dr=dr,dl=dl))
} |
NvLgNAuto <- function(x){
x<-data.frame(x)
nomiss <- function(x) { x[ !is.na(x) ] }
NvLgN_results<-matrix(,nrow=length(x),ncol=3)
NvLgN_results<-data.frame(NvLgN_results)
pb <- utils::txtProgressBar(min = 0, max = length(x), style = 3)
print("<<Normal v. lognormal per sample initiated>>")
for (i in 1:length(x)) {
tryCatch(
NvLgN_results[i,] <- NvLgN( nomiss(x[[i]])[nomiss(x[[i]])>0], min(nomiss(x[[i]])[nomiss(x[[i]])>0]) ),
error=function(e) { print(paste("Sample",i,": Problem with Normal v. lognormal")) }
)
print(paste("Normal v. lognormal:","Sample",i,"completed on",Sys.time() ))
utils::setTxtProgressBar(pb, i)
}
close(pb)
colnames(NvLgN_results) <- c("NvLgN.rawLR","NvLgN.normLR","NvLgN.p")
return(NvLgN_results)
} |
apa_ndiv <- function(apa_list,
dis_trait_column,
dis_method = "gowdis",
dis_transform = sqrt,
scope = "global",
edge_correction = "none",
pdiv = TRUE){
apa_config <- attr(apa_list, "apa_config")
if(is.null(dis_trait_column)){
stop("A `dis_trait_column` has to be specified for the calculation of `ndiv`.")
}
if(is.null(edge_correction)){
edge_correction <- apa_config$edge_correction
}
if(!is.na(apa_config$edge_correction) && apa_config$edge_correction != edge_correction){
stop("There can only be a single edge correction method for one 'apa_list' object.")
}
if(!"apa_size" %in% apa_config$apa_properties){
apa_list <- apa_size(apa_list, edge_correction = edge_correction)
apa_config <- attr(apa_list, "apa_config")
}
if(any(grepl("ndiv", apa_config$apa_properties))){
stop("`apa_ndiv` has already been calculated. Please use `apa_drop_properties()` first to recalculate.")
}
if(! scope %in% c("global", "local")){
stop("`scope` has to be either `global` or `local`.")
}
apa_map_tree_layer <-
lapply(apa_list$plot_dat$apa_map, raster::subset, apa_config$tree_id_column)
apa_map_tree_levels <-
lapply(apa_map_tree_layer, raster::levels)
apa_map_tree_count <- c(lapply_deep(apa_map_tree_levels, nrow), recursive = TRUE)
if(any(apa_map_tree_count == 1)){
}
if(!is.list(dis_trait_column)){
dis_trait_column <- list(dis_trait_column)
}
if(is.null(names(dis_trait_column))){
dist_trait_lengths <- sapply(dis_trait_column, length)
multi_trait_names <- dis_trait_column[dist_trait_lengths > 1]
multi_trait_names <- lapply(multi_trait_names, substr, 1, 1)
multi_trait_names <- lapply(multi_trait_names, paste, collapse = "")
trait_names <- dis_trait_column
trait_names[dist_trait_lengths > 1] <- multi_trait_names
names(dis_trait_column) <- trait_names
}
if(any(duplicated(names(dis_trait_column)))){
stop("Please choose unique names for `dis_trait_column`.")
}
dis_trait_missing <-
lapply(dis_trait_column, setdiff, y = names(apa_list$tree_dat))
dis_trait_missing <- c(dis_trait_missing, recursive = TRUE)
if(length(dis_trait_missing) > 0){
stop("Columns `", paste(dis_trait_missing, collapse = "`, `"), "` are missing in `tree_dat`.")
}
apa_config$dis_trait_column <- dis_trait_column
add_to_apa_properties <- paste0(names(dis_trait_column), "_ndiv")
if(pdiv){
add_to_apa_properties <-
c(add_to_apa_properties,
paste0(names(dis_trait_column), "_pdiv"))
}
apa_config$apa_properties <-
as.vector(stats::na.omit(unique(c(apa_config$apa_properties, add_to_apa_properties))))
apa_config$dis_method <- dis_method
apa_config$dis_transform <- dis_transform
apa_config$scope <- scope
apa_map <- apa_list$plot_dat$apa_map
trait_dat <- sf::st_drop_geometry(apa_list$tree_dat)
trait_dat <- lapply(lapply(apa_config$dis_trait_column, c, apa_config$tree_id_column),
drop_columns_except,
x = trait_dat)
if(apa_config$core_column == apa_config$buffer_column){
top_level_plot <- NULL
top_level_id_column <- NULL
}else{
top_level_plot <- apa_list$plot_dat
sf::st_geometry(top_level_plot) <- apa_config$core_column
top_level_plot <- top_level_plot[, apa_config$plot_id_column, drop = FALSE]
top_level_id_column <- c(plot_dat = apa_config$plot_id_column)
}
border_tree_id <-
stats::na.omit(apa_list$tree_dat[[apa_config$tree_id_column]][apa_list$tree_dat$border_tree])
output <- NULL
for(trait_i in names(trait_dat)){
message(paste0("\nAggregating `", trait_i, "` neighborhood diversity:"), appendLF = FALSE)
output_i <- apa_ndiv_calc(unclass(apa_list), apa_config, apa_map,
trait_dat = trait_dat[[trait_i]], subplot = top_level_plot,
subplot_id_column = top_level_id_column, pdiv = pdiv,
edge_correction = edge_correction, border_tree_id = border_tree_id)
subplot_list <- apa_list$subplot_dat
for(subplot_i in names(apa_config$subplot_id_column)){
message(paste0("`", subplot_i, "` - aggregating neighborhood diversity: "), appendLF = FALSE)
subplot_dat_i <- apa_list$subplot_dat[[subplot_i]]
subplot_list[[subplot_i]] <-
apa_ndiv_calc(subplot_dat_i, apa_config, apa_map,
trait_dat = trait_dat[[trait_i]],
subplot = subplot_dat_i[[subplot_i]],
subplot_id_column = apa_config$subplot_id_column[subplot_i],
pdiv = pdiv, edge_correction = edge_correction,
border_tree_id = border_tree_id)
}
message("")
output_i$subplot_dat <- subplot_list
output_i <-
lapply_deep(output_i, .what = "data.frame",
function(x){
names(x)[names(x) == "ndiv"] <- paste0(trait_i, "_", "ndiv")
names(x)[names(x) == "pdiv"] <- paste0(trait_i, "_", "pdiv")
x
})
if(is.null(output)){
output <- output_i
}else{
output <- mapply_deep(list(output, output_i), .what = "data.frame",
.f = add_col, col = paste0(trait_i, "_ndiv"))
output <- mapply_deep(list(output, output_i), .what = "data.frame",
.f = add_col, col = paste0(trait_i, "_pdiv"))
}
}
output <- new_apa_list(output, apa_config = apa_config)
output
}
apa_ndiv_calc <-
function(dat_list, apa_config, apa_map, trait_dat, subplot = NULL,
subplot_id_column = NULL, pdiv = TRUE, prefix = NULL,
edge_correction = "none", border_tree_id = NULL){
if(!is.na(apa_config$edge_correction) && apa_config$edge_correction != edge_correction){
stop("There can only be a single edge correction method for one 'dat_list' object.")
}
if(!edge_correction[1] %in% c("none", "critical", "border_tree_exclusion")){
stop("`edge_correction` has to be one of `none`, `critical` or `border_tree_exclusion`.")
}
if(edge_correction == "critical"){
critical_layer <- "critical"
}else{
critical_layer <- NULL
}
if(inherits(trait_dat, "sf")){
trait_dat <- sf::st_drop_geometry(trait_dat)
}
if(is.null(subplot)){
agg_column <- apa_config$plot_id_column
}else{
agg_column <- subplot_id_column
}
if(edge_correction != "border_tree_exclusion" | is.null(subplot)){
boundary_length_dat <-
boundary_length(rst = apa_map,
subplot = subplot,
plot_id_column = apa_config$plot_id_column,
critical_layer = critical_layer,
remove_na = FALSE)
}else{
boundary_length_dat_values <-
boundary_length(rst = apa_map,
subplot = NULL,
plot_id_column = apa_config$plot_id_column,
critical_layer = critical_layer,
remove_na = FALSE)
boundary_length_dat <- sf::st_join(subplot, dat_list$tree_dat[, apa_config$tree_id_column])
boundary_length_dat <- sf::st_drop_geometry(boundary_length_dat)
sfc_idx <- sapply(boundary_length_dat, inherits, "sfc")
boundary_length_dat <- boundary_length_dat[!sfc_idx]
boundary_length_dat <- merge(boundary_length_dat, boundary_length_dat_values,
by = c(apa_config$plot_id_column, apa_config$tree_id_column))
}
if(edge_correction == "critical"){
boundary_length_dat$boundary_length <- boundary_length_dat$boundary_length * (1- boundary_length_dat[[critical_layer]])
} else if(edge_correction == "border_tree_exclusion"){
border_tree_idx <- boundary_length_dat[[apa_config$tree_id_column]] %in% border_tree_id
border_tree_neighbor_idx <-
boundary_length_dat[[paste0(apa_config$tree_id_column, "_bc")]] %in% border_tree_id
boundary_length_dat$border_tree <- border_tree_idx | border_tree_neighbor_idx
if(any(is.na(boundary_length_dat[[paste0(apa_config$tree_id_column, "_bc")]]) & !boundary_length_dat$border_tree)){
stop("BUUUH!")
}
boundary_length_dat$boundary_length <- boundary_length_dat$boundary_length * (1- boundary_length_dat$border_tree)
}
trait_dat_trees <- trait_dat[[apa_config$tree_id_column]]
boundary_length_dat_trees <- boundary_length_dat[, paste0(apa_config$tree_id_column, c("", "_bc"))]
boundary_length_dat_trees <- unique(c(boundary_length_dat_trees, recursive = TRUE))
missing_trees <- stats::na.omit(setdiff(boundary_length_dat_trees, trait_dat_trees))
if(length(missing_trees) > 0){
stop("Observations '", paste(missing_trees, collapse = "', '"), "' are missing in 'trait_dat'.")
}
tree_id <- trait_dat[[apa_config$tree_id_column]]
if(length(unique(tree_id)) != length(tree_id) | any(is.na(tree_id))){
stop("'tree_id_column' has to point to a unique identifier without 'NA' values.")
}
trait_val <- as.data.frame(trait_dat)[setdiff(names(trait_dat), apa_config$tree_id_column)]
fac_trait_idx <- sapply(trait_val, class) == "factor"
trait_val[fac_trait_idx] <- lapply(trait_val[fac_trait_idx], as.character)
rownames(trait_val) <- trait_dat[[apa_config$tree_id_column]]
if(apa_config$scope == "global"){
minmax_dummies <-
rbind(as.data.frame(lapply(trait_val, min)),
as.data.frame(lapply(trait_val, max)))
rownames(minmax_dummies) <- c(".min_dummy", ".max_dummy")
}
if(is.null(subplot)){
relations <- apa_config$plot_id_values
}else{
relations <- subplot[[agg_column]]
}
boundary_length_list <- split(boundary_length_dat, boundary_length_dat[[agg_column]])
dis_list <- names_to_list(relations)
if(apa_config$dis_method == "gowdis"){
dis_fun <- function(...){FD::gowdis(...)}
}else{
dis_fun <- apa_config$dis_method
}
dis_transform <- apa_config$dis_transform
ndiv_i <- list()
for(relation_i in relations){
ndiv_i[[relation_i]] <- dat_list$tree_dat
if(inherits(ndiv_i[[relation_i]], "sf")){
ndiv_i[[relation_i]] <- sf::st_drop_geometry(ndiv_i[[relation_i]])
}
ndiv_i[[relation_i]] <-
subset(ndiv_i[[relation_i]],
ndiv_i[[relation_i]][[agg_column]] == relation_i)[, c(agg_column, apa_config$tree_id_column, "apa_size_prop")]
if(nrow(ndiv_i[[relation_i]]) == 0){
ndiv_i[[relation_i]][1, ] <- c(list(relation_i), list(NA_character_), list(NA_real_))
}
boundary_i <- boundary_length_list[[relation_i]]
boundary_i_trees <- boundary_i[, paste0(apa_config$tree_id_column, c("", "_bc"))]
trees_present <-
stats::na.omit(unique(c(boundary_i_trees[[1]], boundary_i_trees[[2]],
ndiv_i[[relation_i]][[2]])))
trait_val_i <- trait_val[trees_present, , drop = FALSE]
if(apa_config$scope == "global"){
trait_val_i <- rbind(trait_val_i, minmax_dummies)
}
tree_dis <- as.matrix(dis_transform(dis_fun(trait_val_i)))
if(all(is.na(boundary_i[[paste0(apa_config$tree_id_column, "_bc")]]))){
ndiv_i[[relation_i]]$ndiv <- NA_real_
}else{
dis_i <- boundary_i
boundary_i_trees <- subset(boundary_i_trees, !is.na(dis_i[[paste0(apa_config$tree_id_column, "_bc")]]))
dis_i <- subset(dis_i, !is.na(dis_i[[paste0(apa_config$tree_id_column, "_bc")]]))
dis_i$.dis <- tree_dis[as.matrix(boundary_i_trees)]
dis_i_split <- split(dis_i, dis_i[[apa_config$tree_id_column]])
ndiv_i_list <-
lapply(dis_i_split,
FUN = function(x){
data.frame(relation = relation_i,
tree_id = x[[apa_config$tree_id_column]][1],
ndiv_i = sum(x$boundary_length*x$.dis) / sum(x$boundary_length))
})
ndiv_i_join <- do.call(rbind, ndiv_i_list)
names(ndiv_i_join) <- c(agg_column, apa_config$tree_id_column, "ndiv")
match_idx <- match_by(ndiv_i[[relation_i]], ndiv_i_join, by = c(agg_column, apa_config$tree_id_column))
ndiv_i[[relation_i]]$ndiv <- ndiv_i_join$ndiv[match_idx]
}
if(pdiv){
pdiv_proc <-
subset(ndiv_i[[relation_i]],
!is.na(ndiv_i[[relation_i]]$apa_size_prop))
pdiv_tree_id <- pdiv_proc[[apa_config$tree_id_column]]
tree_dis_pdiv <-
tree_dis[pdiv_tree_id, pdiv_tree_id, drop = FALSE]
pdiv_values <- (pdiv_proc$apa_size_prop %*% tree_dis_pdiv)[1, ]
ndiv_i[[relation_i]]$pdiv <- ndiv_i[[relation_i]]$apa_size_prop
if(length(pdiv_values) > 0){
ndiv_i[[relation_i]]$pdiv[!is.na(ndiv_i[[relation_i]]$apa_size_prop)] <- pdiv_values
}
}
}
ndiv_i <- do.call(rbind, ndiv_i)
match_idx <- match_by(dat_list$tree_dat, ndiv_i, by = c(agg_column, apa_config$tree_id_column))
dat_list$tree_dat$ndiv <- ndiv_i[match_idx, ][["ndiv"]]
dat_list$tree_dat$pdiv <- ndiv_i[match_idx, ][["pdiv"]]
proc_col <-
stats::na.omit(c(agg_column, apa_config$agg_class_column, "apa_size",
"apa_size_prop", "ndiv", "pdiv"[pdiv]))
tree_dat_proc <- dat_list$tree_dat[, proc_col]
if(inherits(tree_dat_proc, "sf")){
tree_dat_proc <- sf::st_drop_geometry(tree_dat_proc)
}
agg_classes <-
stats::setNames(c(agg_column, apa_config$agg_class_column),
c(names(dat_list)[[1]], apa_config$agg_class_column))
agg_classes <- stats::na.omit(agg_classes)
dat_list[[1]]$apa_size_prop <- 1
for(agg_class_i in names(agg_classes)){
tree_dat_proc_split <-
split(tree_dat_proc, paste(tree_dat_proc[[agg_column]], tree_dat_proc[[agg_classes[agg_class_i]]]))
if(pdiv){
agg_i_dat <-
lapply(tree_dat_proc_split,
FUN = function(x){
cbind(x[1, unique(c(agg_column, agg_classes[agg_class_i])), drop = FALSE],
ndiv = stats::weighted.mean(x$ndiv, x$apa_size, na.rm = TRUE),
pdiv = stats::weighted.mean(x$pdiv, x$apa_size, na.rm = TRUE))})
}else{
agg_i_dat <-
lapply(tree_dat_proc_split,
FUN = function(x){
cbind(x[1, unique(c(agg_column, agg_classes[agg_class_i])), drop = FALSE],
ndiv = stats::weighted.mean(x$ndiv, x$apa_size, na.rm = TRUE))})
}
agg_i_dat <- do.call(rbind, agg_i_dat)
match_columns <- unique(c(agg_column, agg_classes[agg_class_i]))
match_idx <- match_by(dat_list[[agg_class_i]], agg_i_dat, match_columns)
dat_list[[agg_class_i]]$ndiv <- agg_i_dat$ndiv[match_idx]
if(pdiv){
dat_list[[agg_class_i]]$pdiv <- agg_i_dat$pdiv[match_idx]
}
}
dat_list
} |
errCWD<-function(n,alp,phi,c,f)
{
if (missing(n)) stop("'n' is missing")
if (missing(alp)) stop("'alpha' is missing")
if (missing(phi)) stop("'phi' is missing")
if (missing(c)) stop("'c' is missing")
if (missing(f)) stop("'f' is missing")
if ((class(n) != "integer") & (class(n) != "numeric") || n<=0 ) stop("'n' has to be greater than 0")
if (alp>1 || alp<0 || length(alp)>1) stop("'alpha' has to be between 0 and 1")
if (phi>1 || phi<0) stop("Null hypothesis 'phi' has to be between 0 and 1")
if ((class(c) != "integer") & (class(c) != "numeric") || length(c) >1 || c<0 ) stop("'c' has to be positive")
if ((class(f) != "integer") & (class(f) != "numeric")) stop("'f' has to be numeric value")
x=0:n
k=n+1
pCW=0
qCW=0
seCW=0
LCW=0
UCW=0
cv=qnorm(1-(alp/2), mean = 0, sd = 1)
for(i in 1:k)
{
pCW[i]=x[i]/n
qCW[i]=1-pCW[i]
seCW[i]=sqrt(pCW[i]*qCW[i]/n)
LCW[i]=max(pCW[i]-((cv*seCW[i])+c),0)
UCW[i]=min(pCW[i]+((cv*seCW[i])+c),1)
}
alpstarCW=0
thetactr=0
for(m in 1:k)
{
if(phi > UCW[m] || phi<LCW[m])
{
thetactr=thetactr+1
alpstarCW[m]=dbinom(x[m],n,phi)
} else alpstarCW[m] = 0
}
delalpCW=round((alp-sum(alpstarCW))*100,2)
theta=round(100*thetactr/(n+1),2)
if(delalpCW<f)
Fail_Pass="failure" else Fail_Pass="success"
data.frame(delalp=delalpCW,theta,Fail_Pass)
}
errCSC<-function(n,alp,phi,c,f)
{
if (missing(n)) stop("'n' is missing")
if (missing(alp)) stop("'alpha' is missing")
if (missing(phi)) stop("'phi' is missing")
if (missing(c)) stop("'c' is missing")
if (missing(f)) stop("'f' is missing")
if ((class(n) != "integer") & (class(n) != "numeric") || n<=0 ) stop("'n' has to be greater than 0")
if (alp>1 || alp<0 || length(alp)>1) stop("'alpha' has to be between 0 and 1")
if (phi>1 || phi<0) stop("Null hypothesis 'phi' has to be between 0 and 1")
if (c<=0 || c>(1/(2*n))) stop("'c' has to be positive and less than or equal to 1/(2*n)")
if ((class(f) != "integer") & (class(f) != "numeric")) stop("'f' has to be numeric value")
x=0:n
k=n+1
pCS=0
qCS=0
seCS_L=0
seCS_U=0
LCS=0
UCS=0
cv=qnorm(1-(alp/2), mean = 0, sd = 1)
cv1=(cv^2)/(2*n)
cv2= cv/(2*n)
for(i in 1:k)
{
pCS[i]=x[i]/n
qCS[i]=1-pCS[i]
seCS_L[i]=sqrt((cv^2)-(4*n*(c+c^2))+(4*n*pCS[i]*(1-pCS[i]+(2*c))))
seCS_U[i]=sqrt((cv^2)+(4*n*(c-c^2))+(4*n*pCS[i]*(1-pCS[i]-(2*c))))
LCS[i]=max((n/(n+(cv)^2))*((pCS[i]-c+cv1)-(cv2*seCS_L[i])),0)
UCS[i]=min((n/(n+(cv)^2))*((pCS[i]+c+cv1)+(cv2*seCS_U[i])),1)
}
alpstarCS=0
thetactr=0
for(m1 in 1:k)
{
if(phi > UCS[m1] || phi<LCS[m1])
{
thetactr=thetactr+1
alpstarCS[m1]=dbinom(x[m1],n,phi)
} else alpstarCS[m1] = 0
}
delalpCS=round((alp-sum(alpstarCS))*100,2)
theta=round(100*thetactr/(n+1),2)
if(delalpCS<f)
Fail_Pass="failure" else Fail_Pass="success"
return(data.frame(delalp=delalpCS,theta,Fail_Pass))
}
errCAS<-function(n,alp,phi,c,f)
{
if (missing(n)) stop("'n' is missing")
if (missing(alp)) stop("'alpha' is missing")
if (missing(phi)) stop("'phi' is missing")
if (missing(c)) stop("'c' is missing")
if (missing(f)) stop("'f' is missing")
if ((class(n) != "integer") & (class(n) != "numeric") || n<=0 ) stop("'n' has to be greater than 0")
if (alp>1 || alp<0 || length(alp)>1) stop("'alpha' has to be between 0 and 1")
if (phi>1 || phi<0) stop("Null hypothesis 'phi' has to be between 0 and 1")
if ((class(c) != "integer") & (class(c) != "numeric") || length(c) >1 || c<0 ) stop("'c' has to be positive")
if ((class(f) != "integer") & (class(f) != "numeric")) stop("'f' has to be numeric value")
x=0:n
k=n+1
pCA=0
qCA=0
seCA=0
LCA=0
UCA=0
cv=qnorm(1-(alp/2), mean = 0, sd = 1)
for(i in 1:k)
{
pCA[i]=x[i]/n
qCA[i]=1-pCA[i]
seCA[i]=cv/sqrt(4*n)
LCA[i]=max((sin(asin(sqrt(pCA[i]))-seCA[i]-c))^2,0)
UCA[i]=min((sin(asin(sqrt(pCA[i]))+seCA[i]+c))^2,1)
}
alpstarCA=0
thetactr=0
for(m1 in 1:k)
{
if(phi > UCA[m1] || phi<LCA[m1])
{
thetactr=thetactr+1
alpstarCA[m1]=dbinom(x[m1],n,phi)
} else alpstarCA[m1] = 0
}
delalpCA=round((alp-sum(alpstarCA))*100,2)
theta=round(100*thetactr/(n+1),2)
if(delalpCA<f)
Fail_Pass="failure" else Fail_Pass="success"
return(data.frame(delalp=delalpCA,theta,Fail_Pass))
}
errCLT<-function(n,alp,phi,c,f)
{
if (missing(n)) stop("'n' is missing")
if (missing(alp)) stop("'alpha' is missing")
if (missing(phi)) stop("'phi' is missing")
if (missing(c)) stop("'c' is missing")
if (missing(f)) stop("'f' is missing")
if ((class(n) != "integer") & (class(n) != "numeric") || n<=0 ) stop("'n' has to be greater than 0")
if (alp>1 || alp<0 || length(alp)>1) stop("'alpha' has to be between 0 and 1")
if (phi>1 || phi<0) stop("Null hypothesis 'phi' has to be between 0 and 1")
if ((class(c) != "integer") & (class(c) != "numeric") || length(c) >1 || c<0 ) stop("'c' has to be positive")
if ((class(f) != "integer") & (class(f) != "numeric")) stop("'f' has to be numeric value")
x=0:n
k=n+1
pCLT=0
qCLT=0
seCLT=0
lgit=0
LCLT=0
UCLT=0
cv=qnorm(1-(alp/2), mean = 0, sd = 1)
pCLT[1]=0
qCLT[1]=1
LCLT[1] = 0
UCLT[1] = 1-((alp/2)^(1/n))
pCLT[k]=1
qCLT[k]=0
LCLT[k]= (alp/2)^(1/n)
UCLT[k]=1
lgiti=function(t) exp(t)/(1+exp(t))
for(j in 1:(k-2))
{
pCLT[j+1]=x[j+1]/n
qCLT[j+1]=1-pCLT[j+1]
lgit[j+1]=log(pCLT[j+1]/qCLT[j+1])
seCLT[j+1]=sqrt(pCLT[j+1]*qCLT[j+1]*n)
LCLT[j+1]=max(lgiti(lgit[j+1]-(cv/seCLT[j+1])-c),0)
UCLT[j+1]=min(lgiti(lgit[j+1]+(cv/seCLT[j+1])+c),1)
}
alpstarCLT=0
thetactr=0
for(m in 1:k)
{
if(phi > UCLT[m] || phi<LCLT[m])
{
thetactr=thetactr+1
alpstarCLT[m]=dbinom(x[m],n,phi)
} else alpstarCLT[m] = 0
}
delalpCLT=round((alp-sum(alpstarCLT))*100,2)
theta=round(100*thetactr/(n+1),2)
if(delalpCLT<f)
Fail_Pass="failure" else Fail_Pass="success"
return(data.frame(delalp=delalpCLT,theta,Fail_Pass))
}
errCTW<-function(n,alp,phi,c,f)
{
if (missing(n)) stop("'n' is missing")
if (missing(alp)) stop("'alpha' is missing")
if (missing(phi)) stop("'phi' is missing")
if (missing(c)) stop("'c' is missing")
if (missing(f)) stop("'f' is missing")
if ((class(n) != "integer") & (class(n) != "numeric") || n<=0 ) stop("'n' has to be greater than 0")
if (alp>1 || alp<0 || length(alp)>1) stop("'alpha' has to be between 0 and 1")
if (phi>1 || phi<0) stop("Null hypothesis 'phi' has to be between 0 and 1")
if ((class(c) != "integer") & (class(c) != "numeric") || length(c) >1 || c<0 ) stop("'c' has to be positive")
if ((class(f) != "integer") & (class(f) != "numeric")) stop("'f' has to be numeric value")
x=0:n
k=n+1
pCTW=0
qCTW=0
seCTW=0
DOF=0
cv=0
LCTW=0
UCTW=0
for(i in 1:k)
{
if(x[i]==0||x[i]==n)
{
pCTW[i]=(x[i]+2)/(n+4)
qCTW[i]=1-pCTW[i]
}else
{
pCTW[i]=x[i]/n
qCTW[i]=1-pCTW[i]
}
f1=function(p,n) p*(1-p)/n
f2=function(p,n) (p*(1-p)/(n^3))+(p+((6*n)-7)*(p^2)+(4*(n-1)*(n-3)*(p^3))-(2*(n-1)*((2*n)-3)*(p^4)))/(n^5)-(2*(p+((2*n)-3)*(p^2)-2*(n-1)*(p^3)))/(n^4)
DOF[i]=2*((f1(pCTW[i],n))^2)/f2(pCTW[i],n)
cv[i]=qt(1-(alp/2), df=DOF[i])
seCTW[i]=cv[i]*sqrt(f1(pCTW[i],n))
LCTW[i]=max(pCTW[i]-(seCTW[i]+c),0)
UCTW[i]=min(pCTW[i]+(seCTW[i]+c),1)
}
alpstarCTW=0
thetactr=0
for(m in 1:k)
{
if(phi > UCTW[m] || phi<LCTW[m])
{
thetactr=thetactr+1
alpstarCTW[m]=dbinom(x[m],n,phi)
} else alpstarCTW[m] = 0
}
delalpCTW=round((alp-sum(alpstarCTW))*100,2)
theta=round(100*thetactr/(n+1),2)
if(delalpCTW<f)
Fail_Pass="failure" else Fail_Pass="success"
return(data.frame(delalp=delalpCTW,theta,Fail_Pass))
}
errCAll<-function(n,alp,phi,c,f)
{
if (missing(n)) stop("'n' is missing")
if (missing(alp)) stop("'alpha' is missing")
if (missing(phi)) stop("'phi' is missing")
if (missing(c)) stop("'c' is missing")
if (missing(f)) stop("'f' is missing")
if ((class(n) != "integer") & (class(n) != "numeric") || n<=0 ) stop("'n' has to be greater than 0")
if (alp>1 || alp<0 || length(alp)>1) stop("'alpha' has to be between 0 and 1")
if (phi>1 || phi<0) stop("Null hypothesis 'phi' has to be between 0 and 1")
if (c<=0 || c>(1/(2*n))) stop("'c' has to be positive and less than or equal to 1/(2*n)")
if ((class(f) != "integer") & (class(f) != "numeric")) stop("'f' has to be numeric value")
df.1 = errCWD(n,alp,phi,c,f)
df.2 = errCSC(n,alp,phi,c,f)
df.3 = errCAS(n,alp,phi,c,f)
df.4 = errCLT(n,alp,phi,c,f)
df.5 = errCTW(n,alp,phi,c,f)
df.1$method = as.factor("CC-Wald")
df.2$method = as.factor("CC-Score")
df.3$method = as.factor("CC-ArcSine")
df.4$method = as.factor("CC-Logit-Wald")
df.5$method = as.factor("CC-Wald-T")
df.new= rbind(df.1,df.2,df.3,df.4,df.5)
return(df.new)
} |
fit <- lm(Ozone ~ Solar.R + Wind + Temp + I(Wind^2) + I(Temp^2) + I(Wind*Temp)+I(Wind*Temp^2) + I(Temp*Wind^2) + I(Temp^2*Wind^2),data=airquality)
visreg2d(fit, x="Wind", y="Temp")
visreg2d(fit, x="Wind", y="Temp", plot.type="persp")
visreg2d(fit, x="Wind", y="Temp", plot.type="rgl")
visreg2d(fit, x="Wind", y="Temp", plot.type="gg")
visreg2d(fit, x="Wind", y="Temp", type="contrast")
visreg2d(fit, x="Wind", y="Temp", type="contrast", plot.type="persp", col="slateblue")
visreg2d(fit, x="Wind", y="Temp", type="contrast", plot.type="rgl")
visreg2d(fit, x="Wind", y="Temp", type="contrast", plot.type='gg')
visreg2d(fit, x="Wind", y="Temp", cond=list('Solar.R'=100))
visreg2d(fit, x="Wind", y="Temp", cond=list('Solar.R'=500))
fit <- lm(log(Ozone) ~ Solar.R + Wind + Temp, data=airquality)
visreg2d(fit, "Wind", "Temp")
visreg2d(fit, "Wind", "Temp", trans=exp)
visreg2d(fit, "Wind", "Temp", trans=exp, plot.type='persp')
visreg2d(fit, "Wind", "Temp", trans=exp, plot.type='gg')
visreg2d(fit, "Wind", "Temp", xlab="MyAxis", zlab="MyTitle")
visreg2d(fit, "Wind", "Temp", color.palette=topo.colors)
visreg2d(fit, "Wind", "Temp", color=c('green', 'blue'))
visreg2d(fit, "Wind", "Temp", color=c('green', 'blue'), plot.type='gg')
visreg2d(fit, "Wind", "Temp", plot.type='gg', xlab="MyLabel", zlab="MyTitle")
airquality$Heat <- cut(airquality$Temp,3,labels=c("Cool","Mild","Hot"))
fit <- lm(Ozone ~ Solar.R + Wind + Heat,data=airquality)
visreg2d(fit, x="Wind", y="Solar.R")
visreg2d(fit, x="Wind", y="Solar.R",cond=list('Heat'='Hot'))
visreg2d(fit, x="Wind", y="Solar.R",cond=list('Heat'='Cool'))
visreg2d(fit, x="Wind", y="Heat")
visreg2d(fit, x="Heat", y="Wind")
visreg2d(fit, x="Wind", y="Heat", plot.type='gg')
visreg2d(fit, x="Heat", y="Wind", plot.type='gg')
airquality$Windy <- cut(airquality$Wind,2,labels=c("Windy","NotWindy"))
fit <- lm(Ozone ~ Solar.R + Windy + Heat + Month,data=airquality)
visreg2d(fit, "Heat", "Windy")
visreg2d(fit, "Solar.R", "Windy")
visreg2d(fit, "Heat", "Solar.R")
visreg2d(fit, "Heat", "Windy", plot.type='gg')
visreg2d(fit, "Heat", "Windy", plot.type="persp")
visreg2d(fit, "Heat", "Windy", plot.type="rgl") |
RVinePIT <- function(data, RVM) {
args <- preproc(c(as.list(environment()), call = match.call()),
check_data,
fix_nas,
check_if_01,
check_RVMs,
prep_RVMs)
list2env(args, environment())
T <- dim(data)[1]
d <- dim(data)[2]
o <- diag(RVM$Matrix)
if (any(o != length(o):1)) {
oldRVM <- RVM
RVM <- normalizeRVineMatrix(RVM)
data <- data[, o[length(o):1]]
}
N <- T
n <- d
V <- list()
V$direct <- array(0, dim = c(n, n, N))
V$indirect <- array(0, dim = c(n, n, N))
if (is.vector(data)) {
V$direct[n, , ] <- data[n:1]
} else {
V$direct[n, , ] <- t(data[, n:1])
}
vv <- as.vector(V$direct)
vv2 <- as.vector(V$indirect)
calcup <- as.vector(matrix(1, dim(RVM), dim(RVM)))
w1 <- as.vector(RVM$family)
w1[is.na(w1)] <- 0
th <- as.vector(RVM$par)
th[is.na(th)] <- 0
th2 <- as.vector(RVM$par2)
th2[is.na(th2)] <- 0
condirect <- as.vector(as.numeric(RVM$CondDistr$direct))
conindirect <- as.vector(as.numeric(RVM$CondDistr$indirect))
maxmat <- as.vector(RVM$MaxMat)
matri <- as.vector(RVM$Matrix)
matri[is.na(matri)] <- 0
maxmat[is.na(maxmat)] <- 0
condirect[is.na(condirect)] <- 0
conindirect[is.na(conindirect)] <- 0
tmp <- .C("RvinePIT",
as.integer(T),
as.integer(d),
as.integer(w1),
as.integer(maxmat),
as.integer(matri),
as.integer(condirect),
as.integer(conindirect),
as.double(th),
as.double(th2),
as.double(data),
as.double(rep(0,T*d)),
as.double(vv),
as.double(vv2),
as.integer(calcup),
PACKAGE = 'VineCopula')[[11]]
U <- matrix(tmp, ncol = d)
U <- reset_nas(U, args)
U <- U[, sort(o[length(o):1], index.return = TRUE)$ix]
return(U)
} |
t_apa <- function(x, es = "cohens_d", es_ci = FALSE,
format = c("text", "markdown", "rmarkdown", "html", "latex",
"latex_math", "docx", "plotmath"),
info = FALSE, print = TRUE)
{
format <- match.arg(format)
if (!inherits(x, "htest") && !grepl("t-test", x$method))
{
stop("'x' must be a call to `t_test` or `t.test`")
}
if (format == "docx")
{
return(apa_to_docx("t_apa", x, es = es))
}
if (es_ci && grepl("Two", x$method) && (es != "cohens_d" ||
grepl("Welch", x$method)))
{
warning(paste("Confidence intervals currently only supported for",
"'cohens_d' and non-Welch test. Will omit confidence",
"interval."))
es_ci <- FALSE
}
statistic <- fmt_stat(x$statistic)
df <- x$parameter
p <- fmt_pval(x$p.value)
d <- fmt_es(cohens_d(x, corr = if (es == "cohens_d") "none" else es))
d_ci <- if (es_ci) paste0(" ", cohens_d_ci(x)) else ""
if (grepl("Welch", x$method))
{
df <- fmt_stat(df, equal_sign = FALSE)
}
if (es != "cohens_d" && (grepl("One Sample|Paired", x$method)))
{
warning(paste0("'", es, "' not available for ", x$method, ",",
" 'cohens_d' will be reported instead."))
es <- "cohens_d"
}
if (info) message(x$method)
text <- paste0(fmt_symb("t", format), "(", df, ") ", statistic, ", ",
fmt_symb("p", format), " ", p, ", ", fmt_symb(es, format), " ",
d, d_ci)
if (format == "latex")
{
text <- fmt_latex(text)
}
else if (format == "latex_math")
{
text <- fmt_latex_math(text)
}
else if (format == "plotmath")
{
text <- fmt_plotmath(
text, "(\\([0-9]+\\.*[0-9]*\\) [<=] -?[0-9]+\\.[0-9]{2}, )",
"( [<=>] \\.[0-9]{3}, )", "( [<=] -?[0-9]+\\.[0-9]{2}$)"
)
print <- FALSE
}
if (print) cat(text) else text
} |
singleclusterplot = function(input,at = NULL,fromto = c(0.05,0.95),colpal = "standardheat",simulate = FALSE,daltonize = FALSE,cvd = "p",nrcol = 25,outer.col = "lightgrey",rev = FALSE,alpha = NULL,quartiles.col = c("grey","black","grey"),add.quartiles = TRUE)
{
if (!is.matrix(input) & !is.list(input)) stop("'input' must be a matrix or a list !")
if (is.null(at)) if (is.matrix(input)){at=1:ncol(input)} else if (is.list(input)){at=1:length(input)}
probes = length(at)
drawline = function(y,col="black",lwd=1,lty=1){lines(at[1:length(y)],y,type="l",col=col,lwd=lwd,lty=lty)}
if (is.matrix(input)){if (outer.col!="none") apply(input,1,drawline,col=outer.col)}
colpal = colorpalette(colpal,nrcol,simulate = simulate,daltonize = daltonize,cvd = cvd,alpha = alpha,rev = rev)
colpal = c(rev(colpal),colpal)
if (is.matrix(input)){
qline = apply(input,2,quantile,probs=seq(fromto[1],fromto[2],length=(length(colpal)+1)),na.rm=TRUE)
} else if (is.list(input)){
qline = lapply(input,quantile,probs=seq(fromto[1],fromto[2],length=(length(colpal)+1)),na.rm=TRUE)
qline = sapply(qline,c)
}
for (j in 1:length(colpal)){polygon(at[c(1:probes,probes:1)],c(qline[j,],qline[j+1,probes:1]),col = colpal[j],lty=0)}
if (add.quartiles){
if (is.matrix(input)){
drawline(apply(input,2,quantile,probs=0.5,na.rm=TRUE),col=quartiles.col[2],lwd=2)
drawline(apply(input,2,quantile,probs=0.25,na.rm=TRUE),col=quartiles.col[1],lwd=2)
drawline(apply(input,2,quantile,probs=0.75,na.rm=TRUE),col=quartiles.col[3],lwd=2)
} else if (is.list(input)){
drawline(sapply(input,quantile,probs=0.5,na.rm=TRUE),col=quartiles.col[2],lwd=2)
drawline(sapply(input,quantile,probs=0.25,na.rm=TRUE),col=quartiles.col[1],lwd=2)
drawline(sapply(input,quantile,probs=0.75,na.rm=TRUE),col=quartiles.col[3],lwd=2)
}
}
}
LSD.singleclusterplot = singleclusterplot
clusterplot = function(input,label = NULL,at = NULL,main = NULL,xlim = NULL,ylim = NULL,xlabels = NULL,fromto = c(0.05,0.95),colpal = "standardheat",simulate = FALSE,daltonize = FALSE,cvd = "p",nrcol = 25,outer.col = "lightgrey",quartiles.col = c("grey","black","grey"),add.quartiles = TRUE,separate = TRUE,rev = FALSE,size = TRUE,alpha = NULL,axes = TRUE,...)
{
if (!is.matrix(input) & !is.list(input)) stop("'input' must be a matrix or a list !")
if (is.null(at)) if (is.matrix(input)){at=1:ncol(input)} else if (is.list(input)){at=1:length(input)}
probes = length(at)
if (is.null(xlim)){xlim=c(min(at),max(at))}
maxp = xlim[2]
minp = xlim[1]
if (is.null(ylim)) if (is.matrix(input)){ylim=c(min(input,na.rm=TRUE),max(input,na.rm=TRUE))} else if (is.list(input)){ylim=c(min(unlist(input),na.rm=TRUE),max(unlist(input),na.rm=TRUE))}
if (is.null(xlabels)) xlabels = 1:length(at)
if (is.null(label)){
plot.new()
plot.window(xlim = xlim,ylim = ylim,...)
if (size){
if (is.matrix(input)){
main = paste(main," (
} else if (is.list(input)){
input.length.range = range(as.numeric(summary(input)[,"Length"]))
main = paste(main," (
}
}
title(main)
if (axes){
axis(1,at=at,labels=xlabels,...)
axis(2)
box()
}
singleclusterplot(input=input,at=at,fromto=fromto,colpal=colpal,simulate=simulate,daltonize=daltonize,cvd=cvd,nrcol=nrcol,outer.col=outer.col,add.quartiles=add.quartiles,quartiles.col=quartiles.col,rev=rev,alpha=alpha)
}
if (!is.null(label)) {
clusternames = sort(unique(label))
nrclusters = length(clusternames)
if (!is.matrix(input)) stop("'input' must be a matrix, if 'label' is specified !")
clustersets = split(1:nrow(input), factor(label))
if (!is.list(colpal)) colpal = as.list(colpal)
if (length(colpal) < nrclusters) colpal = rep(colpal, nrclusters)
if (separate == FALSE){
plot.new()
plot.window(xlim = xlim,ylim = ylim,...)
if (size){main = paste(main," (
title(main)
if (axes){
axis(1,at=at,labels=xlabels,...)
axis(2)
box()
}
}
if (separate == TRUE) par(mfrow = windowxy(nrclusters))
for (j in seq(clusternames)){
if (separate == TRUE){
if (length(main) == length(clustersets[[j]])) clustermain = main[j] else clustermain = paste(main,clusternames[j])
plot.new()
plot.window(xlim = xlim,ylim = ylim,...)
if (size){clustermain = paste(clustermain," (
title(clustermain)
if (axes){
axis(1,at=at,labels=xlabels,...)
axis(2)
box()
}
}
singleclusterplot(input=input[clustersets[[j]],,drop = FALSE],at=at,fromto=fromto,colpal=colpal[[j]],simulate=simulate,daltonize=daltonize,cvd=cvd,nrcol=nrcol,outer.col=outer.col,add.quartiles=add.quartiles,quartiles.col=quartiles.col,rev=rev,alpha=alpha)
}
}
}
LSD.clusterplot = clusterplot |
.MKrgsGetmse<- function(gg, A, K, r, a1, a3, B, bUp, delta, itmax){
b.a1.a3.B <- try(.MrgsGetba1a3B(r = r, K = K, A = A,
gg = gg, a1 = a1, a3 = a3, B = B, bUp=bUp,
delta = delta, itmax = itmax), silent = TRUE)
if(!is.list(b.a1.a3.B))
stop("Algorithm did not converge\n",
"=> increase 'itmax' or try various starting values")
b <- b.a1.a3.B$b; a1 <- b.a1.a3.B$a1; a3 <- b.a1.a3.B$a3
B <- b.a1.a3.B$B
var <- E(K, .Mrgsgetvar, A = A, gg = gg, b = b, a1 = a1, a3 = a3, B = B)
C1 <- E(K, .MrgsGetC1, gg = gg, A = A)
mse <- var + gg^2*C1 + r^2*b^2
cat("current gamma:\t", gg, "current mse:\t", mse, "\n")
return(mse)
}
rgsOptIC.MK <- function(r, K, ggLo = 0.5, ggUp = 1.0, a1.start = -0.25,
a3.start = 0.25, B.start, bUp = 1000, delta = 1e-6,
itmax = 1000, check = FALSE){
Reg2Mom <- .rgsDesignTest(K = K)
if(is.logical(Reg2Mom))
stop("second moment matrix of regressor distribution 'K'",
"is (numerically) not positive definite")
k <- dimension(img(K))
if(k > 1)
if(.rgsRegressorCheck(K))
stop("Regressor is a.e. K concentrated on a conic")
A <- distr::solve(Reg2Mom)
if(missing(B.start)) B.start <- A %*% A
res <- optimize(.MKrgsGetmse, lower = ggLo, upper = ggUp,
tol = .Machine$double.eps^0.3, A = A, K = K, r = r, a1 = a1.start,
a3 = a3.start, B = B.start, bUp = bUp, delta = delta, itmax = itmax)
gg <- res$minimum
b.a1.a3.B <- .MrgsGetba1a3B(r = r, K = K, A = A, gg = gg, a1 = a1.start,
a3 = a3.start, B = B.start, bUp = bUp, delta = delta,
itmax = itmax)
b <- b.a1.a3.B$b; a1 <- b.a1.a3.B$a1; a3 <- b.a1.a3.B$a3
B <- b.a1.a3.B$B
if(check){
C1 <- E(K, .MrgsGetC1, A = A, gg = gg)
C2 <- distr::solve(A) - gg^2*E(K, .MrgsGetC2, A = A, gg = gg)
C3 <- 1 + 1/gg - gg^2*E(K, .MrgsGetC3, A = A, gg = gg)
kont1 <- try(E(K, .MrgsGetch1, A = A, gg = gg, b = b, a1 = a1,
a3 = a3, B = B), silent = TRUE)
if(is.numeric(kont1))
cat("constraint (M2):\t", kont1 - C1, "\n")
else
cat("could not determine constraint (M2):\n", kont1, "\n")
kont2 <- try(E(K, .MrgsGetch2, A = A, gg = gg, b = b, a1 = a1,
a3 = a3, B = B), silent = TRUE)
if(is.numeric(kont2))
cat("constraint (M3):\t", kont2 - C2, "\n")
else
cat("could not determine constraint (M3):\n", kont2, "\n")
kont3 <- try(E(K, .MrgsGetch3, A = A, gg = gg, b = b, a1 = a1,
a3 = a3, B = B), silent = TRUE)
if(is.numeric(kont3)){
cat("constraint (M4):\t", kont3 - C3, "\n")
}else
cat("could not determine constraint (M4):\n", kont3, "\n")
rvgl <- try(.MrgsGetr(b = b, K = K, r = r, A = A, gg = gg, a1 = a1,
a3 = a3, B = B), silent = TRUE)
if(is.numeric(rvgl))
cat("MSE equation:\t", rvgl ,"\n")
else
cat("could not determine MSE equation:\n", rvgl ,"\n")
}
w <- .MrgsGetw
fct1 <- function(x){
B.mat <- matrix(B, ncol = k)
v <- as.vector(t(x[1:k]) %*% B.mat %*% x[1:k])
A.mat <- matrix(A, ncol = k)
z <- t(x[1:k]) %*% A.mat
z <- as.vector(z %*% t(z))
wfct <- w
w.vct <- wfct(u = x[k+1], v = v, z = z, gg = gg, b = b, a1 = a1, a3 = a3)
psi <- (((a1 + v)*x[k+1] + a3*x[k+1]^3)/(z + gg^2*x[(k+1)]^2)*w.vct
+ gg^2*x[k+1]/(z + gg^2*x[k+1]^2))
return(A.mat %*% x[1:k]*psi)
}
body(fct1) <- substitute({ B.mat <- matrix(B, ncol = k)
v <- as.vector(t(x[1:k]) %*% B.mat %*% x[1:k])
A.mat <- matrix(A, ncol = k)
z <- t(x[1:k]) %*% A.mat
z <- as.vector(z %*% t(z))
wfct <- w
w.vct <- wfct(u = x[k+1], v = v, z = z, gg = gg, b = b, a1 = a1, a3 = a3)
psi <- (((a1 + v)*x[k+1] + a3*x[k+1]^3)/(z + gg^2*x[(k+1)]^2)*w.vct
+ gg^2*x[k+1]/(z + gg^2*x[k+1]^2))
return(A.mat %*% x[1:k]*psi)},
list(w = w, b = b, a1 = a1, a3 = a3, B = B, A = A, gg = gg, k = k))
fct2 <- function(x){
B.mat <- matrix(B, ncol = k)
v <- as.vector(t(x[1:k]) %*% B.mat %*% x[1:k])
A.mat <- matrix(A, ncol = k)
z <- t(x[1:k]) %*% A.mat
z <- as.vector(z %*% t(z))
wfct <- w
w.vct <- wfct(u = x[k+1], v = v, z = z, gg = gg, b = b, a1 = a1, a3 = a3)
psi <- (((a1 + v)*x[k+1] + a3*x[k+1]^3)/(z + gg^2*x[(k+1)]^2)*w.vct
+ gg^2*x[k+1]/(z + gg^2*x[k+1]^2))
return(gg*(x[k+1]*psi - 1))
}
body(fct2) <- substitute({ B.mat <- matrix(B, ncol = k)
v <- as.vector(t(x[1:k]) %*% B.mat %*% x[1:k])
A.mat <- matrix(A, ncol = k)
z <- t(x[1:k]) %*% A.mat
z <- as.vector(z %*% t(z))
wfct <- w
w.vct <- wfct(u = x[k+1], v = v, z = z, gg = gg, b = b, a1 = a1, a3 = a3)
psi <- (((a1 + v)*x[k+1] + a3*x[k+1]^3)/(z + gg^2*x[(k+1)]^2)*w.vct
+ gg^2*x[k+1]/(z + gg^2*x[k+1]^2))
return(gg*(x[k+1]*psi - 1)) },
list(w = w, b = b, a1 = a1, a3 = a3, B = B, A = A, gg = gg, k = k))
return(IC(name = "IC of MK type",
Curve = EuclRandVarList(EuclRandVariable(Map = list(fct1),
Domain = EuclideanSpace(dimension = (trunc(k)+1)),
dimension = trunc(k)),
RealRandVariable(Map = list(fct2),
Domain = EuclideanSpace(dimension = (trunc(k)+1)))),
Risks = list(asMSE = res$objective, asBias = b, trAsCov = res$objective - r^2*b^2),
Infos = matrix(c("rgsOptIC.MK", "optimally robust IC for MK estimators and 'asMSE'",
"rgsOptIC.MK", paste("where a1 =", round(a1, 3), ", a3 =", round(a3, 3),
", b =", round(b, 3), "and gamma =", round(gg, 3))),
ncol=2, byrow = TRUE, dimnames=list(character(0), c("method", "message"))),
CallL2Fam = call("NormLinRegScaleFamily", theta = numeric(k),
RegDistr = K, Reg2Mom = Reg2Mom)))
} |
LearnTgs <- function(isfile = 0,
json.file = '',
input.dirname = '',
input.data.filename = '',
num.timepts = 2,
true.net.filename = '',
input.wt.data.filename = '',
is.discrete = TRUE,
num.discr.levels = 2,
discr.algo = '',
mi.estimator = 'mi.pca.cmi',
apply.aracne = FALSE,
clr.algo = 'CLR',
max.fanin = 14,
allow.self.loop = TRUE,
scoring.func = 'BIC',
output.dirname = '') {
if (isfile != 0) {
input.params <- rjson::fromJSON(file = json.file)
input.data.filename <- input.params$input.data.filename
num.timepts <- input.params$num.timepts
true.net.filename <- input.params$true.net.filename
input.wt.data.filename <- input.params$input.wt.data.filename
is.discrete <- input.params$is.discrete
num.discr.levels <- input.params$num.discr.levels
discr.algo <- input.params$discr.algo
mi.estimator <- input.params$mi.estimator
apply.aracne <- input.params$apply.aracne
clr.algo <- input.params$clr.algo
max.fanin <- input.params$max.fanin
allow.self.loop <- input.params$allow.self.loop
scoring.func <- input.params$scoring.func
input.dirname <- input.params$input.dirname
output.dirname <- input.params$output.dirname
rm(input.params)
}
if (input.dirname == '') {
input.dirname <- base::getwd()
}
if (output.dirname == '') {
output.dirname <- base::getwd()
} else if (!base::file.exists(output.dirname)) {
if (base::.Platform$OS.type == 'windows') {
output.dirname <-
base::normalizePath(output.dirname,
winslash = '\\',
mustWork = NA)
shell(
base::paste('mkdir ', output.dirname, sep = ''),
intern = TRUE,
mustWork = TRUE
)
} else if (base::.Platform$OS.type == 'unix') {
base::system(base::paste('mkdir ', output.dirname, sep = ''))
}
}
input.data.filename <-
base::paste(input.dirname, input.data.filename, sep = '/')
if (true.net.filename != '') {
true.net.filename <-
base::paste(input.dirname, true.net.filename, sep = '/')
}
if (input.wt.data.filename != '') {
input.wt.data.filename <-
base::paste(input.dirname, input.wt.data.filename, sep = '/')
}
base::print('The output directory name is:')
base::print(output.dirname)
base::print('')
output.filename <-
base::paste(output.dirname, 'output.txt', sep = '/')
output.file.conn <- base::file(output.filename, open = "wt")
base::sink(output.file.conn)
input.data.filename.ext <-
base::unlist(base::strsplit(input.data.filename, '[.]'))
input.data <- NULL
if (input.data.filename.ext[base::length(input.data.filename.ext)] == 'tsv') {
input.data <-
utils::read.table(input.data.filename, header = TRUE, sep = "\t")
timepts.names <- input.data[1:num.timepts, 1]
input.data <- input.data[,-1]
} else if (input.data.filename.ext[base::length(input.data.filename.ext)] == 'RData') {
base::load(input.data.filename)
timepts.names <- 1:num.timepts
}
orig.node.names <- base::colnames(input.data)
node.names <- base::c()
for (col.idx in 1:base::ncol(input.data))
{
new.node.name <-
base::paste('v', base::as.character(col.idx), sep = '')
node.names <- base::c(node.names, new.node.name)
}
base::rm(col.idx)
base::colnames(input.data) <- node.names
num.nodes <- base::ncol(input.data)
max.fanin <- base::min(num.nodes, 14)
num.samples.per.timept <- (base::nrow(input.data) / num.timepts)
if (is.discrete)
{
input.data.discr <- input.data
} else {
if (discr.algo == '') {
stop('Please specify the value of discr.algo.')
} else if (discr.algo == 'discretizeData.2L.wt.l') {
input.data.discr <-
discretizeData.2L.wt.l(input.data, input.wt.data.filename)
} else if (discr.algo == 'discretizeData.2L.Tesla') {
input.data.discr <-
discretizeData.2L.Tesla(input.data)
}
base::save(
input.data.discr,
file = base::paste(output.dirname, 'input.data.discr.RData', sep = '/')
)
}
input.data.discr.matrix <- base::data.matrix(input.data.discr)
input.data.discr.3D <-
base::array(
NA,
base::c(num.timepts, num.nodes, num.samples.per.timept),
dimnames = base::c(
base::list(timepts.names),
base::list(node.names),
base::list(1:num.samples.per.timept)
)
)
for (sample.idx in 1:num.samples.per.timept) {
start.row.idx <- (1 + (num.timepts * (sample.idx - 1)))
end.row.idx <- (num.timepts * sample.idx)
input.data.discr.3D[, , sample.idx] <-
input.data.discr.matrix[start.row.idx:end.row.idx,]
}
base::rm(sample.idx)
base::rm(input.data.discr.matrix)
start.time <- base::proc.time()
mi.net.adj.matrix <- NULL
mi.net.adj.matrix.list <- NULL
mut.info.matrix <- NULL
if (clr.algo == 'CLR') {
mut.info.matrix <-
base::matrix(
0,
nrow = num.nodes,
ncol = num.nodes,
dimnames = base::c(base::list(node.names), base::list(node.names))
)
if (mi.estimator == 'mi.pca.cmi') {
for (col.idx in 1:(num.nodes - 1)) {
for (col.idx.2 in (col.idx + 1):num.nodes) {
mut.info <-
ComputeCmiPcaCmi(input.data.discr[, col.idx],
input.data.discr[, col.idx.2])
mut.info.matrix[col.idx, col.idx.2] <- mut.info
mut.info.matrix[col.idx.2, col.idx] <- mut.info
}
base::rm(col.idx.2)
}
base::rm(col.idx)
} else if (mi.estimator == 'mi.empirical') {
mut.info.matrix <-
minet::build.mim(input.data.discr,
estimator = 'mi.empirical',
disc = 'none')
} else if (mi.estimator == 'mi.mm') {
mut.info.matrix <-
minet::build.mim(input.data.discr,
estimator = 'mi.mm',
disc = 'none')
}
if (apply.aracne == TRUE) {
mut.info.matrix.pre.aracne <- mut.info.matrix
mut.info.matrix <- minet::aracne(mut.info.matrix)
mut.info.matrix.post.aracne <- mut.info.matrix
elapsed.time <- (base::proc.time() - start.time)
base::writeLines('elapsed.time just after the ARACNE step= \n')
base::print(elapsed.time)
base::rm(elapsed.time)
base::save(
mut.info.matrix.pre.aracne,
file = base::paste(
output.dirname,
'mut.info.matrix.pre.aracne.RData',
sep = '/'
)
)
base::save(
mut.info.matrix.post.aracne,
file = base::paste(
output.dirname,
'mut.info.matrix.post.aracne.RData',
sep = '/'
)
)
base::rm(mut.info.matrix.pre.aracne,
mut.info.matrix.post.aracne)
} else {
base::save(
mut.info.matrix,
file = base::paste(output.dirname, 'mut.info.matrix.RData', sep = '/')
)
}
} else {
base::rm(mut.info.matrix)
}
if ((clr.algo == 'CLR') |
(clr.algo == 'CLR2') |
(clr.algo == 'CLR2.1') | (clr.algo == 'spearman')) {
base::rm(mi.net.adj.matrix.list)
mi.net.adj.matrix <-
base::matrix(
0,
nrow = num.nodes,
ncol = num.nodes,
dimnames = base::c(base::list(node.names), base::list(node.names))
)
} else if (clr.algo == 'CLR3') {
base::rm(mi.net.adj.matrix)
num.time.ivals <- (num.timepts - 1)
mi.net.adj.matrix.list <-
base::vector(mode = 'list', length = num.time.ivals)
base::rm(num.time.ivals)
}
mi.net.adj.matrix.list.filename <- NULL
if ((clr.algo == 'CLR') |
(clr.algo == 'CLR2') | (clr.algo == 'CLR2.1')) {
base::rm(mi.net.adj.matrix.list.filename)
}
if (clr.algo == 'CLR') {
mi.net.adj.matrix <-
LearnClrNetMfi(mut.info.matrix,
mi.net.adj.matrix,
num.nodes,
max.fanin,
output.dirname)
} else if (clr.algo == 'CLR2') {
mi.net.adj.matrix <-
LearnClr2NetMfi(
input.data.discr,
num.nodes,
node.names,
num.timepts,
max.fanin,
output.dirname,
mi.net.adj.matrix
)
} else if (clr.algo == 'CLR2.1') {
mi.net.adj.matrix <-
LearnClrNetMfiVer2.1(
input.data.discr,
num.nodes,
node.names,
num.timepts,
max.fanin,
output.dirname,
mi.net.adj.matrix
)
} else if (clr.algo == 'CLR3') {
mi.net.adj.matrix.list <-
LearnClr3NetMfi(
input.data.discr.3D,
num.nodes,
node.names,
num.timepts,
max.fanin,
mi.net.adj.matrix.list
)
mi.net.adj.matrix.list.filename <-
base::paste(output.dirname, 'mi.net.adj.matrix.list.RData', sep = '/')
base::save(mi.net.adj.matrix.list, file = mi.net.adj.matrix.list.filename)
base::rm(mi.net.adj.matrix.list)
}
elapsed.time <-
(base::proc.time() - start.time)
base::writeLines('elapsed.time just after the CLR step= \n')
base::print(elapsed.time)
base::rm(elapsed.time)
if (clr.algo == 'CLR') {
base::rm(mut.info.matrix)
}
if ((clr.algo == 'CLR') |
(clr.algo == 'CLR2') | (clr.algo == 'CLR2.1')) {
base::save(
mi.net.adj.matrix,
file = base::paste(output.dirname, 'mi.net.adj.matrix.RData', sep = '/')
)
}
unrolled.DBN.adj.matrix.list <- NULL
if ((clr.algo == 'CLR') |
(clr.algo == 'CLR2') | (clr.algo == 'CLR2.1')) {
unrolled.DBN.adj.matrix.list <-
learnDbnStructMo1Layer3dParDeg1_v2(
input.data.discr.3D,
mi.net.adj.matrix,
num.discr.levels,
num.nodes,
num.timepts,
max.fanin,
node.names,
clr.algo
)
base::rm(mi.net.adj.matrix)
} else if (clr.algo == 'CLR3') {
num.time.ivals <- (num.timepts - 1)
unrolled.DBN.adj.matrix.list <-
base::vector(mode = 'list', length = num.time.ivals)
time.ival.spec.dbn.adj.matrix <-
base::matrix(
0,
nrow = num.nodes,
ncol = num.nodes,
dimnames = base::c(base::list(node.names),
base::list(node.names))
)
for (time.ival.idx in 1:num.time.ivals) {
unrolled.DBN.adj.matrix.list[[time.ival.idx]] <-
time.ival.spec.dbn.adj.matrix
}
base::rm(time.ival.idx)
base::rm(num.time.ivals, time.ival.spec.dbn.adj.matrix)
unrolled.DBN.adj.matrix.list <-
LearnDbnStructMo1Clr3Ser(
input.data.discr.3D,
mi.net.adj.matrix.list.filename,
num.discr.levels,
num.nodes,
num.timepts,
max.fanin,
node.names,
unrolled.DBN.adj.matrix.list
)
base::rm(mi.net.adj.matrix.list.filename)
}
base::save(
unrolled.DBN.adj.matrix.list,
file = base::paste(output.dirname, 'unrolled.DBN.adj.matrix.list.RData', sep = '/')
)
base::rm(input.data.discr.3D)
if (!base::is.null(unrolled.DBN.adj.matrix.list)) {
rolled.DBN.adj.matrix <-
rollDbn_v2(
num.nodes,
node.names,
num.timepts,
unrolled.DBN.adj.matrix.list,
'any',
allow.self.loop
)
di.net.adj.matrix <- rolled.DBN.adj.matrix
base::rm(rolled.DBN.adj.matrix)
base::rownames(di.net.adj.matrix) <- orig.node.names
base::colnames(di.net.adj.matrix) <- orig.node.names
base::save(
di.net.adj.matrix,
file = base::paste(output.dirname, 'di.net.adj.matrix.RData', sep = '/')
)
adjmxToSif(di.net.adj.matrix, output.dirname)
if (true.net.filename != '') {
true.net.adj.matrix <- NULL
base::load(true.net.filename)
Result <- base::matrix(0, nrow = 1, ncol = 11)
base::colnames(Result) <-
base::list('TP',
'TN',
'FP',
'FN',
'TPR',
'FPR',
'FDR',
'PPV',
'ACC',
'MCC',
'F1')
if (base::is.matrix(true.net.adj.matrix)) {
predicted.net.adj.matrix <- di.net.adj.matrix
ResultVsTrue <-
calcPerfDiNet(predicted.net.adj.matrix,
true.net.adj.matrix,
Result,
num.nodes)
base::writeLines('Prediction vs Truth = \n')
base::print(ResultVsTrue)
base::rm(ResultVsTrue)
} else if (base::is.list(true.net.adj.matrix)) {
for (net.idx in 1:base::length(unrolled.DBN.adj.matrix.list)) {
predicted.net.adj.matrix <-
unrolled.DBN.adj.matrix.list[[net.idx]]
ResultVsTrue <-
calcPerfDiNet(predicted.net.adj.matrix,
true.net.adj.matrix[[net.idx]],
Result,
num.nodes)
Result <-
base::rbind(Result,
base::matrix(
ResultVsTrue[1,],
nrow = 1,
ncol = ncol(Result)
))
}
base::rm(net.idx)
ResultVsTrue <- base::colMeans(Result)
ResultVsTrue <-
base::matrix(colMeans(Result),
nrow = 1,
ncol = ncol(Result))
base::colnames(ResultVsTrue) <- base::colnames(Result)
base::writeLines('Prediction vs Truth = \n')
base::print(ResultVsTrue)
base::rm(ResultVsTrue)
}
base::save(Result,
file =
base::paste(output.dirname, 'Result.RData', sep = '/'))
base::rm(Result)
}
base::rm(di.net.adj.matrix)
}
base::rm(unrolled.DBN.adj.matrix.list)
elapsed.time <- (base::proc.time() - start.time)
base::writeLines('elapsed.time = \n')
base::print(elapsed.time)
base::rm(elapsed.time)
base::sink()
base::close(output.file.conn)
session.file <-
base::file(base::paste(output.dirname, 'sessionInfo.txt', sep = '/'),
open = "wt")
base::sink(session.file)
utils::sessionInfo()
base::sink()
base::close(session.file)
} |
.Renviron <- function() {
if (file.exists(".Renviron")) {
".Renviron"
} else {
file.path(home(), ".Renviron")
}
}
home <- function() {
if (!identical(Sys.getenv("HOME"), "")) {
file.path(Sys.getenv("HOME"))
} else {
file.path(normalizePath("~"))
}
}
is_named <- function(x) !is.null(names(x))
are_named <- function(x) is_named(x) && !"" %in% names(x)
has_name_ <- function(x, name) isTRUE(name %in% names(x))
define_args <- function(args, ...) {
dots <- list(...)
nms <- names(dots)
for (i in nms) {
if (!has_name_(args, i)) {
args[[i]] <- dots[[i]]
}
}
args
}
append_lines <- function(x, ...) {
args <- define_args(
c(x, list(...)),
append = TRUE,
fill = TRUE
)
do.call("cat", args)
}
is_incomplete <- function(x) {
con <- file(x)
x <- tryCatch(readLines(con), warning = function(w) return(TRUE))
close(con)
ifelse(isTRUE(x), TRUE, FALSE)
}
clean_renv <- function(var) {
x <- readlines(.Renviron())
x <- grep(sprintf("^%s=", var), x, invert = TRUE, value = TRUE)
writeLines(x, .Renviron())
}
check_renv <- function(var = NULL) {
if (!file.exists(.Renviron())) return(invisible())
if (is_incomplete(.Renviron())) {
append_lines("", file = .Renviron())
}
if (!is.null(var)) {
clean_renv(var)
}
invisible()
}
set_renv <- function(...) {
dots <- list(...)
stopifnot(are_named(dots))
vars <- names(dots)
x <- paste0(names(dots), "=", dots)
x <- paste(x, collapse = "\n")
for (var in vars) {
check_renv(var)
}
append_lines(x, file = .Renviron())
readRenviron(.Renviron())
} |
mapIdsToObfuscated <- function(ids, map) {
if (!all(ids %in% names(map)))
stop("Some IDs are not in map.")
as.character(vapply(ids, function(id) {
map[names(map) == as.character(id)]}, character(1)))
} |
context("head tail tests")
file <- system.file(package = "fpeek", "datafiles", "cigfou-ISO-8859-1.txt")
file_utf8 <- tempfile()
peek_iconv(path = file, newfile = file_utf8, from = "ISO-8859-1", to = "UTF-8")
test_that("ckeck head output", {
expect_output(peek_head(file_utf8, n = 1), regexp = "La Cigale et la Fourmi")
expect_equal(peek_head(file_utf8, n = 1, intern = TRUE), "La Cigale et la Fourmi")
})
test_that("ckeck tail output", {
expect_output(peek_tail(file_utf8, n = 1), regexp = "Eh bien! dansez maintenant.")
expect_equal(peek_tail(file_utf8, n = 1, intern = TRUE), "Eh bien! dansez maintenant.")
}) |
ggdist::cut_cdf_qi |
Sigma_sample_estimator <- function(x) {
p <- nrow(x)
n <- ncol(x)
if (is.data.frame(x)) x <- as.matrix(x)
a <- .rowMeans(x, m=p, n=n, na.rm = TRUE)
a_x_size <- matrix(rep(a,n),nrow=p, ncol=n)
tcrossprod(x-a_x_size)/(ncol(x)-1)
}
Q_hat_n <- function(x){
SS <- Sigma_sample_estimator(x)
invSS <- solve(SS)
Ip <- rep.int(1, nrow(x))
invSS - (invSS %*% Ip %*% t(Ip) %*% invSS)/as.numeric(t(Ip) %*% invSS %*% Ip)
}
Q_hat_n_fast <- function(invSS, Ip, tIp){
invSS - (invSS %*% Ip %*% tIp %*% invSS)/as.numeric(tIp %*% invSS %*% Ip)
}
Q <- function(Sigma){
invSS <- solve(Sigma)
Ip <- rep.int(1, nrow(Sigma))
invSS - (invSS %*% Ip %*% t(Ip) %*% invSS)/as.numeric(t(Ip) %*% invSS %*% Ip)
}
s_hat <- function(x) {
a <- rowMeans(x, na.rm = TRUE)
as.numeric(t(a) %*% Q_hat_n(x) %*% a)
}
s <- function(mu, Sigma) {
as.numeric(t(mu) %*% Q(Sigma) %*% mu)
}
s_hat_c <- function(x) as.numeric((1-nrow(x)/ncol(x))*s_hat(x) - nrow(x)/ncol(x))
R_GMV <- function(mu, Sigma){
p <- length(mu)
invSS <- solve(Sigma)
Ip <- rep.int(1, p)
as.numeric((t(Ip) %*% invSS %*% mu)/(t(Ip) %*% invSS %*% Ip))
}
R_hat_GMV <- function(x){
a <- rowMeans(x, na.rm = TRUE)
SS <- Sigma_sample_estimator(x)
invSS <- solve(SS)
Ip <- rep.int(1, nrow(x))
as.numeric((t(Ip) %*% invSS %*% a)/(t(Ip) %*% invSS %*% Ip))
}
R_b <- function(mu, b) as.numeric(b %*% mu)
R_hat_b <- function(x, b) as.numeric(b %*% rowMeans(x, na.rm = TRUE))
V_b <- function(Sigma, b) as.numeric(t(b) %*% Sigma %*% b)
V_hat_b <- function(x, b) {
Sigma <- Sigma_sample_estimator(x)
as.numeric(t(b) %*% Sigma %*% b)
}
V_GMV <- function(Sigma){
as.numeric(1/(rep.int(1, nrow(Sigma)) %*% solve(Sigma) %*% rep.int(1, nrow(Sigma))))
}
V_hat_GMV <- function(x){
Sigma <- Sigma_sample_estimator(x)
as.numeric(1/(rep.int(1, nrow(Sigma)) %*% solve(Sigma) %*% rep.int(1, nrow(Sigma))))
}
V_hat_c <- function(x) {V_hat_GMV(x)/(1-nrow(x)/ncol(x))}
V_hat_c_fast <- function(ones, invSS, tones, c) {
V_hat_GMV <- as.numeric(1/(tones %*% invSS %*% ones))
V_hat_GMV/(1-c)
}
alpha_star <- function(gamma, mu, Sigma, b, c){
R_GMV <- R_GMV(mu, Sigma)
R_b <- R_b(mu, b)
V_GMV <- V_GMV(Sigma)
V_b <- V_b(Sigma, b)
s <- s(mu, Sigma)
Exp1 <- (R_GMV-R_b)*(1+1/(1-c))/gamma
Exp2 <- (V_b-V_GMV)
Exp3 <- s/(gamma^2)/(1-c)
numerator <- Exp1 + Exp2 + Exp3
Exp4 <- V_GMV/(1-c)
Exp5 <- -2*(V_GMV + (R_b - R_GMV)/(gamma*(1-c)))
Exp6 <- ((s+c)/(1-c)^3)/(gamma^2)
denomenator <- Exp4 + Exp5 + Exp6 + V_b
as.numeric(numerator/denomenator)
}
alpha_hat_star_c <- function(gamma, x, b){
R_GMV <- R_hat_GMV(x)
R_b <- R_hat_b(x, b)
V_GMV <- V_hat_GMV(x)
V_b <- V_hat_b(x, b)
c <- nrow(x)/ncol(x)
s <- s_hat_c(x)
V_c <- V_GMV/(1-c)
Exp1 <- (R_GMV-R_b)*(1+1/(1-c))/gamma
Exp2 <- (V_b-V_c)
Exp3 <- s/(gamma^2)/(1-c)
numerator <- Exp1 + Exp2 + Exp3
Exp4 <- V_c/(1-c)
Exp5 <- -2*(V_c + (R_b - R_GMV)/(gamma*(1-c)))
Exp6 <- ((s+c)/(1-c)^3)/gamma^2
denomenator <- Exp4 + Exp5 + Exp6 + V_b
as.numeric(numerator/denomenator)
}
alpha_hat_star_c_fast <- function(gamma, c, s, b, R_GMV, R_b, V_c, V_b){
Exp1 <- (R_GMV-R_b)*(1+1/(1-c))/gamma
Exp2 <- (V_b-V_c)
Exp3 <- s/(gamma^2)/(1-c)
numerator <- Exp1 + Exp2 + Exp3
Exp4 <- V_c/(1-c)
Exp5 <- -2*(V_c + (R_b - R_GMV)/(gamma*(1-c)))
Exp6 <- ((s+c)/(1-c)^3)/gamma^2
denomenator <- Exp4 + Exp5 + Exp6 + V_b
as.numeric(numerator/denomenator)
}
alpha_star_GMV <- function(Sigma, b, c){
V_GMV <- V_GMV(Sigma)
V_b <- V_b(Sigma, b)
numer <- (1-c)*(V_b-V_GMV)
as.numeric(numer/(numer + c*V_GMV))
}
alpha_hat_star_c_GMV <- function(x, b, c = nrow(x)/ncol(x)){
V_GMV <- V_hat_GMV(x)
V_b <- V_hat_b(x, b)
c <- nrow(x)/ncol(x)
V_c <- V_GMV/(1-c)
numer <- (1-c)*(V_b-V_c)
as.numeric(numer/(numer + c*V_c))
}
B_hat <- function(gamma, x, b){
R_GMV <- R_hat_GMV(x)
R_b <- R_hat_b(x, b)
V_GMV <- V_hat_GMV(x)
V_b <- V_hat_b(x, b)
c <- nrow(x)/ncol(x)
s <- s_hat_c(x)
V_c <- V_GMV/(1-c)
Exp4 <- V_c/(1-c)
Exp5 <- -2*(V_c + (R_b - R_GMV)/(gamma*(1-c)))
Exp6 <- ((s+c)/(1-c)^3)/gamma^2
denomenator <- Exp4 + Exp5 + Exp6 + V_b
as.numeric(denomenator)
}
Var_alpha_simple <- function(Sigma, b, mu, n){
c <- nrow(Sigma)/n
V_b <- V_b(Sigma, b)
V_GMV <- V_GMV(Sigma)
Lb <- V_b/V_GMV - 1
numer <- 2*(1-c)*c^2*(Lb+1)
denom <- ((1-c)*Lb+c)^4
multip<- (2-c)*Lb +c
numer / denom * multip
}
Omega.Lest <- function(s_hat_c, cc, gamma, V_hat_c, L, Q_n_hat, eta.est){
(((1-cc)/(s_hat_c+cc) + (s_hat_c+cc)/gamma)/gamma + V_hat_c)*(1-cc)*L%*%Q_n_hat%*%t(L)+
gamma^{-2}*(2*(1-cc)*cc^3/(s_hat_c+cc)^2+ 4*(1-cc)*cc*s_hat_c*(s_hat_c+2*cc)/(s_hat_c+cc)^2 +
2*(1-cc)*cc^2*(s_hat_c+cc)^2/(s_hat_c^2)-s_hat_c^2)*eta.est%*%t(eta.est)
} |
<error descr="Unresolved reference">lalelu</error>("foo", "bar") |
.Transition <-
function(x,t){
c<-0;
while (c==0){
u<-rnorm(1,mean=x,sd=2+t^2); if (u>0 & u<10){c<-1}
}
u
}
.tstar <-
function(x){
10-x
}
.JumpRate <-
function(x,t){
1/(1+x)
}
.Transition.DC <-
function(x,t){
if (x==1){
if (t<2) {u<-1} else {v<-runif(1); if (v<1/2){u<-2} else {u<-3}}
}
else if (x==2){
v<-runif(1)
if (v<2/3){u<-1} else {u<-2}
}
else if (x==3){
v<-runif(1); w<-runif(1)
if ((v<1/2) & (w<1/2)) {u<-3} else {if (v<1/3){u<-1} else {u<-2}}
}
u
}
.tstar.DC <-
function(x){
if (x==1){z<-5} else if (x==2) {z<-6} else if (x==3) {z<-3.5}
z
}
.JumpRate.DC <-
function(x,t){
if (x==1){z<-t} else if (x==2) {z<-1} else if (x==3) {z<-t^2}
z
}
.InvYn <-
function(dat,t){
b=dat[dat>=t]
if (length(b)>0){
z<-1/length(b)
} else {z<-0}
z
}
.ker <-
function(x){
z<-c()
for (k in x){
if (abs(k)<1){
z<-c(z,(3/4)*(1-k^2))
}
else {z<-c(z,0)}
}
z
}
.Tri1 <-
function(dat , x){
N<-length(dat[1,])-1; m<-length(dat[,1]);
A<- ( dat[,1:N]==matrix(x,nrow=m,ncol=N,byrow=TRUE))
B<-A %*% as.matrix( rep(1,N)); C<-which(B==N);
dat[C,N+1]
}
.Tri2 <-
function(dat , x , y){
N<-length(dat[1,])-1; m<-length(dat[,1]);
A<- ( dat[1:(m-1),1:N]==matrix(x,nrow=(m-1),ncol=N,byrow=TRUE))
B<- ( dat[2:m,1:N]==matrix(y,nrow=(m-1),ncol=N,byrow=TRUE))
A2<-A %*% as.matrix( rep(1,N));
B2<-B %*% as.matrix( rep(1,N));
C<-which( (A2==N) & (B2==N) );
dat[C,N+1]
}
.CondSurv <-
function(dat , x , y , t){
A<-.Tri1(dat , x); B<-.Tri2(dat , x , y);
if (length(A)>0){
z<-length(B[B>t])/length(A)
} else {z<-0}
z
}
.InvYnBis <-
function(dat,t){
b<-dat[dat<=t]
z<-c()
if (length(b)>0){
for (k in 1:length(b)){
z<-c(z,.InvYn(dat,b[k]))
}
} else {z<-c()}
z
} |
multiply_shap <- function(
shap_1,
shap_2,
ex_1,
ex_2,
shap_1_names = NULL,
shap_2_names = NULL
) {
l <- validate_shap(shap_1, shap_2, ex_1, ex_2, shap_1_names, shap_2_names)
shap_1 <- l$shap_1
shap_2 <- l$shap_2
ex_1 <- l$ex_1
ex_2 <- l$ex_2
d <- purrr::map_dfc(
.x = 1:ncol(shap_1),
.f = ~{
(shap_1 %>% dplyr::pull(.x)) * c(ex_2) +
(shap_2 %>% dplyr::pull(.x)) * c(ex_1) +
((shap_1 %>% dplyr::pull(.x)) * (shap_2 %>% rowSums())) / 2 +
((shap_1 %>% rowSums()) * (shap_2 %>% dplyr::pull(.x))) / 2
}
) %>%
magrittr::set_colnames(colnames(shap_1)) %>%
suppressMessages()
preds_1 <- rowSums(shap_1 %>% dplyr::mutate(ex_val = ex_1))
preds_2 <- rowSums(shap_2 %>% dplyr::mutate(ex_val = ex_2))
preds_3 <- preds_1 * preds_2
expected_value <- mean(preds_3)
tot_s <- rowSums(abs(d))
shap_vals <- purrr::map_dfc(
.x = d,
.f = ~{
.x + (abs(.x) / tot_s) * (ex_1 * ex_2 - expected_value)
}
)
return(
list(
shap_vals = shap_vals,
expected_value = expected_value
)
)
}
validate_shap <- function(
shap_1,
shap_2,
ex_1,
ex_2,
shap_1_names,
shap_2_names
) {
if ("matrix" %in% class(shap_1)) {
shap_1 <- as.data.frame(shap_1)
}
if ("matrix" %in% class(shap_2)) {
shap_2 <- as.data.frame(shap_2)
}
shap_1_class <- purrr::map_chr(shap_1, class)
shap_2_class <- purrr::map_chr(shap_2, class)
if (min(c(shap_1_class, shap_2_class) %in% c("integer", "numeric")) == FALSE) {
stop("`shap1` and `shap2` must be only composed of numerical values")
}
if (!is.numeric(ex_1) | !is.numeric(ex_2)) {
stop("`ex_1` and `ex_2` must be numeric")
}
if (length(ex_1) > 1) {
warning("`ex1` has a length greater than 1, only using first element")
ex_1 <- ex_1[1]
}
if (length(ex_2) > 1) {
warning("`ex2` has a length greater than 1, only using first element")
ex_2 <- ex_2[1]
}
if ("array" %in% class(ex_1)) {
ex_1 <- c(ex_1)
}
if ("array" %in% class(ex_2)) {
ex_2 <- c(ex_2)
}
if (nrow(shap_1) != nrow(shap_2)) {
stop("`shap_1` and `shap_2` (or their elements) must have the same number of rows")
}
if (sum(c(is.null(shap_1_names), is.null(shap_2_names))) == 1) {
stop("You cannot specify only one of `shap_1_names` and `shap_2_names`. Please specify none or both.")
}
if ((is.null(shap_1_names) | is.null(shap_2_names)) & (min(dim(shap_1) == dim(shap_2)) == FALSE)) {
stop("`shap1` and `shap2` must have the same dimensions, or you must supply `shap_1_names` and `shap_2_names`")
} else if ((min(dim(shap_1) == dim(shap_2)) == FALSE) | !is.null(shap_1_names)) {
shap_2_missing_names <- setdiff(shap_1_names, shap_2_names)
shap_1_missing_names <- setdiff(shap_2_names, shap_1_names)
colnames(shap_1) <- shap_1_names
colnames(shap_2) <- shap_2_names
if (length(shap_2_missing_names) > 0) {
shap_2_missing <- matrix(0, nrow = nrow(shap_1), ncol = length(shap_2_missing_names)) %>%
as.data.frame() %>%
magrittr::set_colnames(shap_2_missing_names)
shap_2 <- shap_2 %>%
dplyr::bind_cols(shap_2_missing)
}
if (length(shap_1_missing_names > 0)) {
shap_1_missing <- matrix(0, nrow = nrow(shap_2), ncol = length(shap_1_missing_names)) %>%
as.data.frame() %>%
magrittr::set_colnames(shap_1_missing_names)
shap_1 <- shap_1 %>%
dplyr::bind_cols(shap_1_missing)
}
shap_2 <- shap_2 %>%
dplyr::select(colnames(shap_1))
}
list(
shap_1 = shap_1,
shap_2 = shap_2,
ex_1 = ex_1,
ex_2 = ex_2
)
}
globalVariables(
c(
".",
"avg_value",
"bar_end",
"bar_start",
"covariate",
"desc",
"head",
"installed.packages",
"is_positive",
"setNames",
"shap_val",
"value",
"var_val",
"variable"
)
) |
slegendre.inner.products <- function( n )
{
if ( n < 0 )
stop( "negative highest polynomial order" )
if ( n != round( n ) )
stop( "highest polynomial order is not integer" )
inner.products <- rep( 0, n + 1 )
j <- 1
for ( k in 0:n ) {
inner.products[j] <- 1 / ( 2 * k + 1 )
j <- j + 1
}
return ( inner.products )
} |
.pfda.bic <-
function(loglik,T,prms,n){
K = prms$K
p = prms$p
comp = switch(prms$model,
'DkBk' = (K-1) + K*p+ (K-1)*(p-K/2) + K^2*(K-1)/2 + K,
'DkB' = (K-1) + K*p+ (K-1)*(p-K/2) + K^2*(K-1)/2 + 1,
'DBk' = (K-1) + K*p+ (K-1)*(p-K/2) + K*(K-1)/2 + K,
'DB' = (K-1) + K*p+ (K-1)*(p-K/2) + K*(K-1)/2 + 1,
'AkjBk'= (K-1) + K*p + (K-1)*(p-K/2) + K^2,
'AkjB' = (K-1) + K*p + (K-1)*(p-K/2) + K*(K-1)+1,
'AkBk' = (K-1) + K*p + (K-1)*(p-K/2) + 2*K,
'AkB' = (K-1) + K*p + (K-1)*(p-K/2) + K+1,
'AjBk' = (K-1) + K*p + (K-1)*(p-K/2) + (K-1)+K,
'AjB' = (K-1) + K*p + (K-1)*(p-K/2) + (K-1)+1,
'ABk' = (K-1) + K*p + (K-1)*(p-K/2) + K+1,
'AB' = (K-1) + K*p + (K-1)*(p-K/2) + 2)
bic = loglik - 1/2 * comp * log(n)
}
.pfda.fstep <-
function(Y,T,kernel){
n = nrow(Y)
p = ncol(Y)
K = ncol(T)
m = colMeans(Y)
d = min(p-1,(K-1))
XX = as.matrix(Y - t(m*t(matrix(1,n,p))))
TT = t(apply(T,1,"/",sqrt(colSums(T))))
if (n>p & kernel==''){
S = t(XX) %*% XX /n
B = t(XX)%*%TT%*%t(TT)%*%XX / n
eig = svd(ginv(S)%*%B,nu=d,nv=0)
U = eig$u[,1:d]
}
else{
cat('Kernel mode!\n')
if (n<p | kernel=='linear') G = XX %*% t(XX)
if (kernel=='rbf') {sigma=1; G = as.matrix(exp(dist(XX,diag=T)^2/(2*sigma^2)))}
if (kernel=='sigmoid') {a=1;r=0.1;G = tanh(a * XX %*% t(XX) + r)}
lambda = 0
S = G %*% G + lambda*diag(n)
B = G %*% TT %*% t(TT) %*% G
H = svd(ginv(S)%*%B,nu=d,nv=0)$u[,1:d]
U = svd(t(Y) %*% H,nu=d,nv=0)$u[,1:d]
}
U
}
.pfda.loglikelihood <-
function(prms,Y,V,T){
p = prms$p
K = prms$K
prop = prms$prop
D = prms$D
d = ncol(V)
n = prop * nrow(Y)
ly = 0
for (k in 1:K){
bk = D[k,p,p]
if (d==1){
Dk = D[k,1,1]
ly = ly - 1/2 * n[k] * (log(Dk) + (p-d)*log(bk) + p*(1+log(2*pi)) - 2*log(prop[k]))
}
else {
Dk = D[k,(1:d),(1:d)]
ly = ly - 1/2 * n[k] * (log(det(Dk)) + (p-d)*log(bk) + p*(1+log(2*pi)) - 2*log(prop[k]))}
}
ly
}
.pfda.main <-
function(Y,cls,model='AkjBk',kernel='',graph=F){
n = nrow(Y)
p = ncol(Y)
g = as.numeric(cls)
K = length(unique(g))
T = matrix(0,n,K)
for (i in 1:n){T[i,g[i]] = 1}
V = .pfda.fstep(Y,T,kernel)
if (is.matrix(V)) for (i in 1:(K-1)) V[,i] = V[,i] / sqrt(sum(V[,i]^2))
else V = matrix(V / sqrt(sum(V^2)),ncol=1)
prms = .pfda.mstep(Y,V,T,model=model)
Lobs = .pfda.loglikelihood(prms,Y,V,T)
if (graph){
plot(as.data.frame(as.matrix(Y) %*% V[,1:2]),col=max.col(T),xlab='axis 1',ylab='axis 2',pch=20)
}
crit = .pfda.bic(Lobs,T,prms,n);
res = list(prms=prms,V=V,bic=crit,ll=Lobs,K=K)
class(res)='pfda'
res
}
.pfda.mstep <-
function(Y,U,T,model){
Y = as.matrix(Y)
n = nrow(Y)
p = ncol(Y)
K = ncol(T)
d = ncol(U)
mu = matrix(NA,K,K-1)
m = matrix(NA,K,p)
D = array(0,c(K,p,p))
X = Y %*% U
nk = colSums(T)
pk = nk/n
for (k in 1:K){
m[k,] = colMeans(Y[T[,k]==1,])
mu[k,] = m[k,] %*% U
YY = as.matrix(Y - .pfda.repmat(m[k,],n,1))
Ck = crossprod(t(.pfda.repmat(T[,k],p,1)) * YY, YY) / (nk[k]-1)
C = cov(Y)
if (model=='DkBk'){
D[k,(1:d),(1:d)] = crossprod(Ck%*%U,U)
bk = (.pfda.trace(Ck) - sum(diag(crossprod(Ck%*%U,U)))) / (p-d)
bk[bk<=0] = 1e-3
D[k,((d+1):p),((d+1):p)] = diag(rep(bk,p-d))
}
if (model=='DkB'){
D[k,(1:d),(1:d)] = crossprod(Ck%*%U,U)
bk = (.pfda.trace(C) - sum(diag(crossprod(C%*%U,U)))) / (p-d)
bk[bk<=0] = 1e-3
D[k,((d+1):p),((d+1):p)] = diag(rep(bk,p-d))
}
if (model=='DBk'){
D[k,(1:d),(1:d)] = crossprod(C%*%U,U)
bk = (.pfda.trace(Ck) - sum(diag(crossprod(Ck%*%U,U)))) / (p-d)
bk[bk<=0] = 1e-3
D[k,((d+1):p),((d+1):p)] = diag(rep(bk,p-d))
}
if (model=='DB'){
D[k,(1:d),(1:d)] = crossprod(C%*%U,U)
bk = (.pfda.trace(C) - sum(diag(crossprod(C%*%U,U)))) / (p-d)
bk[bk<=0] = 1e-3
D[k,((d+1):p),((d+1):p)] = diag(rep(bk,p-d))
}
if (model=='AkjBk'){
if (d==1){D[k,1,1] = diag(crossprod(Ck%*%U,U))} else {
D[k,(1:d),(1:d)] = diag(diag(crossprod(Ck%*%U,U)))}
bk = (.pfda.trace(Ck) - sum(diag(crossprod(Ck%*%U,U)))) / (p-d)
bk[bk<=0] = 1e-3
D[k,((d+1):p),((d+1):p)] = diag(rep(bk,p-d))
}
if (model=='AkjB'){
if (d==1){D[k,1,1] = diag(crossprod(Ck%*%U,U))} else {
D[k,(1:d),(1:d)] = diag(diag(crossprod(Ck%*%U,U)))}
bk = (.pfda.trace(C) - sum(diag(crossprod(C%*%U,U)))) / (p-d)
bk[bk<=0] = 1e-3
D[k,((d+1):p),((d+1):p)] = diag(rep(bk,p-d))
}
if (model=='AkBk'){
if (d==1){D[k,1,1] = sum(diag(crossprod(Ck%*%U,U)))/d} else{
D[k,(1:d),(1:d)] = diag(rep(sum(diag(crossprod(Ck%*%U,U)))/d,d))}
bk = (.pfda.trace(Ck) - sum(diag(crossprod(Ck%*%U,U)))) / (p-d)
bk[bk<=0] = 1e-3
D[k,((d+1):p),((d+1):p)] = diag(rep(bk,p-d))
}
if (model=='AkB'){
if (d==1){D[k,1,1] = sum(diag(crossprod(Ck%*%U,U)))/d} else{
D[k,(1:d),(1:d)] = diag(rep(sum(diag(crossprod(Ck%*%U,U)))/d,d))}
bk = (.pfda.trace(C) - sum(diag(crossprod(C%*%U,U)))) / (p-d)
bk[bk<=0] = 1e-3
D[k,((d+1):p),((d+1):p)] = diag(rep(bk,p-d))
}
if (model=='AjBk'){
if (d==1){D[k,1,1] = diag(crossprod(C%*%U,U))} else {
D[k,(1:d),(1:d)] = diag(diag(crossprod(C%*%U,U)))}
bk = (.pfda.trace(Ck) - sum(diag(crossprod(Ck%*%U,U)))) / (p-d)
bk[bk<=0] = 1e-3
D[k,((d+1):p),((d+1):p)] = diag(rep(bk,p-d))
}
if (model=='AjB'){
if (d==1){D[k,1,1] = diag(crossprod(C%*%U,U))} else{
D[k,(1:d),(1:d)] = diag(diag(crossprod(C%*%U,U)))}
bk = (.pfda.trace(C) - sum(diag(crossprod(C%*%U,U)))) / (p-d)
bk[bk<=0] = 1e-3
D[k,((d+1):p),((d+1):p)] = diag(rep(bk,p-d))
}
if (model=='ABk'){
if (d==1){D[k,1,1] = sum(diag(crossprod(C%*%U,U)))} else {
D[k,(1:d),(1:d)] = diag(rep(sum(diag(crossprod(C%*%U,U)))/d,d))}
bk = (.pfda.trace(Ck) - sum(diag(crossprod(Ck%*%U,U)))) / (p-d)
bk[bk<=0] = 1e-3
D[k,((d+1):p),((d+1):p)] = diag(rep(bk,p-d))
}
if (model=='AB'){
if (d==1){D[k,1,1] = sum(diag(crossprod(C%*%U,U)))} else {
D[k,(1:d),(1:d)] = diag(rep(sum(diag(crossprod(C%*%U,U)))/d,d))}
bk = (.pfda.trace(C) - sum(diag(crossprod(C%*%U,U)))) / (p-d)
bk[bk<=0] = 1e-3
D[k,((d+1):p),((d+1):p)] = diag(rep(bk,p-d))
}
}
prms = list(K=K,p=p,mean=mu,my=m,prop=pk,D=D,model=model)
}
.pfda.repmat <-
function(v,n,p){
if (p==1) M = matrix(1,n,1) %*% v
else M = matrix(rep(v,n),n,(length(v)*p),byrow='T')
M
}
.pfda.trace <-
function(t){
if (length(t)==1) T=t
else T=sum(diag(t))
T
}
.Random.seed <-
c(403L, 10L, -1989895324L, -1355772249L, -392693763L, 1763848692L,
-1594376486L, -1441251291L, 1310297407L, 1492304582L, -1407176880L,
-1963348109L, 1830973841L, 2008196032L, 621954462L, -691194007L,
1510060955L, 196754090L, 445699276L, -200299121L, 1824278981L,
851389820L, -2032527918L, -792235699L, -758303737L, 67142638L,
1374636744L, -1790827061L, -69599191L, -2144475528L, -1199692890L,
-472259135L, 337551955L, 1694738690L, 800268340L, -1411737161L,
1652411821L, -1121270076L, 25307882L, 1155353941L, -678788369L,
735551734L, 300875808L, 1222820323L, 817726081L, -2088817424L,
1798136718L, -1590911751L, -1035180405L, -1851697478L, 1570036156L,
-1282520321L, -1136808363L, -1904887444L, 409004482L, -126090723L,
-2045863273L, -1414188034L, -769060936L, -1992186981L, -1028593159L,
-1503512248L, 14189686L, -308293199L, -1231716381L, 547069106L,
494809348L, 728005191L, 1465761053L, -1024423020L, 1001520954L,
-314530939L, -818587553L, 1261152998L, -61271696L, 881490579L,
1331913521L, 790643168L, -2062125442L, -1861108471L, 1272778555L,
381830922L, -1708951828L, -1944067217L, -2074061659L, 1733412444L,
280727090L, 1007381805L, -1215833113L, -358376690L, -50766616L,
1532344043L, -1891428471L, 1067246360L, -1103153594L, -1020156255L,
158837043L, 520538274L, -665630828L, -1546705385L, -1884990963L,
-1442978012L, -1903910134L, 209825397L, 1387597263L, 762492630L,
2069052032L, 311551171L, -1204813215L, 1678154448L, 269843502L,
1082167769L, 2124467691L, 1917421658L, -1846900196L, -677958881L,
-1746165899L, 873088524L, 711860194L, 460563645L, 256566071L,
180784094L, 40913176L, -1646887813L, 1708335641L, 1400980136L,
620472406L, 309175505L, 285597827L, 1428779794L, -609229148L,
-501427609L, 1582336957L, -94329548L, -522133862L, -400249627L,
934022783L, 1938249734L, -1765958768L, 73655859L, -587336239L,
1465878400L, 1416256094L, -1217543767L, -1165610917L, -1523750806L,
2096056588L, 1640792911L, 1649205253L, -896418116L, 1253316498L,
-1278737267L, -1038294329L, -1498491986L, 350866952L, -2059214325L,
463565545L, -1936065096L, -1873773978L, -482785535L, -1780502125L,
-1964406206L, -1656627724L, 517974007L, -1489003539L, 368899204L,
897198762L, -2042249195L, 39872687L, -1730779466L, -280660768L,
867145763L, 917096257L, -1586451920L, -318949298L, -1506432839L,
-893854517L, -710057606L, 40246396L, -1125181249L, 1475669397L,
609529260L, -2005276798L, 1516957021L, 944317527L, 991396670L,
-1878856840L, 1475840091L, -2142859335L, 871632392L, 2120555702L,
470715377L, -749947357L, -988453262L, 1471265988L, -137414009L,
860271197L, -1603324844L, 2100600442L, -19866171L, -1175740641L,
-310097242L, 956394544L, -1721757997L, -892195599L, -233560032L,
1829162686L, -474191159L, 1376198779L, -1796399542L, 1082460844L,
1962196399L, 837799269L, -1374414052L, 1487978354L, -1125575699L,
-1656597465L, 1051561038L, -171954008L, -623976789L, -395412535L,
802346968L, -651510138L, 2046019425L, 405240563L, 2125079906L,
-1577126956L, 1143305943L, 87981261L, -1208001372L, 630950584L,
-1752042662L, -1163938416L, -1686929028L, 1826133268L, 143438146L,
-1746224384L, -488571972L, 601318224L, 755308834L, -1507263160L,
1728612364L, 1455194204L, 154995922L, -300521472L, 1265507156L,
1143597544L, 2097389562L, -519479600L, -1996950228L, 1905709300L,
-1537828974L, -661799840L, 1382231308L, 1815256800L, -546603678L,
1005351592L, -846722484L, 452667260L, 137476914L, -1284954512L,
1629842756L, 1869126616L, 507068538L, 802354736L, 1204187836L,
-933697708L, -1867460030L, -492342368L, 1628451004L, -1282954800L,
240825698L, 1318509544L, 1543405516L, 1436210940L, -842391566L,
1962577024L, 730536180L, 520131720L, -1563425478L, 369681232L,
190762092L, 1462284436L, -1339270830L, 321996512L, -1682988852L,
931033248L, -639438462L, 454840552L, 359637228L, 1636384828L,
-458153870L, -971454352L, 2093390372L, -1092787656L, 1328091674L,
715307216L, 1517565500L, 735401108L, -632085374L, -966447168L,
-1096190660L, -563681648L, 1605958690L, -265443320L, -1273908084L,
1662412828L, 1654080658L, -1372201856L, -2080506860L, 1512878888L,
-644658118L, -1831813936L, 592137324L, -250890572L, -722395118L,
1587748832L, -1836018740L, -1690389024L, -1752277982L, -1300490968L,
-1363853044L, 604200508L, 1542329586L, -141641488L, -163731644L,
1271070808L, -582534598L, 1983539440L, -139447620L, -641863148L,
831573186L, -1519630752L, 735270268L, -417583920L, -555705310L,
-162199384L, 1265745292L, -693212740L, 582285362L, -746251456L,
-1622301004L, 1800720712L, -151551302L, -1546348784L, 201894124L,
-1394517932L, 1022275346L, -312614496L, 995200972L, 2032547680L,
624702658L, -252666328L, 1323796908L, -2090863812L, 1830736754L,
-75688144L, -91551708L, 574299704L, 1317614298L, 1698867088L,
-1386451716L, 1992037780L, 1596461890L, -301241856L, -853096388L,
-1893275056L, -1955610462L, -865496248L, 1258045964L, 467325660L,
-1812592558L, 1704692864L, 655028052L, -1636524056L, 49830650L,
179224400L, 1014502444L, 1804407156L, -475375982L, 91085408L,
1920161932L, 1169779040L, 102893794L, 1545012392L, -1404543284L,
1206520316L, -420777038L, 1658657776L, -2130245180L, -1770435240L,
-1164363398L, -608396880L, 66060732L, 78114388L, 1642985922L,
1005726496L, -211256388L, -1012936624L, 692429794L, 1316210536L,
-742719668L, 665480956L, -766341390L, 548171008L, 2103698548L,
412069384L, 1828462010L, -673864240L, -818589716L, -432264300L,
1458770130L, -162202528L, -104671924L, -21935328L, -569312638L,
-829005336L, 139420652L, -2099268036L, -247538062L, 287798128L,
1878076708L, 1245362744L, 1507037594L, 1768048080L, 1576822204L,
374916756L, -1582988670L, 1059542464L, 2032193340L, 858493584L,
-1468463326L, 184702472L, 891486988L, 903682972L, 705274898L,
690484096L, 1810967316L, -24029912L, 1166766138L, 951374800L,
144972396L, 2011765812L, -50816878L, -334159520L, -557937332L,
29412832L, -1319472222L, -312915032L, -992637812L, -1944788036L,
1648843890L, -1596524560L, -1568615356L, -2034616232L, 198131770L,
1523232944L, 708751054L, 1075278843L, -363957155L, 1659349898L,
1203608488L, -1578346223L, -9424893L, 1547383556L, 2047561522L,
-67162745L, -62901295L, -1190916778L, 1545642804L, -2146886075L,
-654161265L, -787921912L, 459673638L, -125611821L, -422763979L,
2120540562L, 995411296L, 860482761L, -781873157L, -1505710132L,
1668325978L, 1471799119L, 1248169497L, 2122705966L, 412135996L,
-356266067L, -1150302121L, -840536672L, 1931624190L, -1870733877L,
855018445L, -1336574214L, -1078217416L, 387718881L, -1037962861L,
-1700963916L, -1198437694L, -88098089L, 1460923169L, -518157594L,
779299812L, 1301498645L, 664376127L, -1673546024L, 103428854L,
62983427L, 1709942341L, -1899815326L, 190772688L, 945357561L,
17805355L, 1192886172L, 216133578L, 2042579135L, 1600633353L,
-2123459810L, 251501260L, 1998066333L, -1086652633L, -1857908400L,
-885514258L, -606207845L, 2045831933L, -96890390L, 1583168136L,
2057833521L, -648135005L, 1949316772L, -1763240110L, -1649983833L,
-457047311L, 2005568054L, 937412308L, -373888411L, -1851234193L,
849103528L, 1845237766L, 1969460531L, -302802539L, -955406862L,
-628295104L, 181179305L, 614944283L, 872564716L, -235016198L,
1251644463L, -943856583L, 1629558350L, 520529564L, -1997628403L,
-1677614985L, 292120704L, 711110366L, 378799531L, 2036160173L,
1015819034L, -201485352L, -724971455L, -1159168269L, -1635245804L,
-472550238L, 1623437751L, -2074004095L, -1973419130L, -1240903228L,
-410904331L, -880084641L, 857191352L, 540726934L, 715104419L,
-1421571483L, -547643518L, -1811909392L, -841020263L, -2108054389L,
-1052725892L, 58678186L, -1931944993L, -48721559L, -315310722L,
-925557396L, -826505795L, -169572729L, 1821721200L, 412100878L,
-207653317L, 372106525L, 760393674L, 1732983016L, -1684399151L,
-1406755389L, 403257028L, -943719054L, 1837183047L, -1867025391L,
-2073873002L, 993282292L, 1576256133L, 983444431L, -162094392L,
-1537596570L, -394339949L, 2140705525L, -282572078L, 283309344L,
287636745L, 1080494651L, -840720500L, 464829978L, -1012267377L,
124600793L, 1319220846L, 2055583228L, 1692815213L, 1431353623L,
1217893600L, -1537779010L, 1033132171L, -53819635L, -1582381254L,
1514094456L, -321915615L, 905340371L, 1778926836L, -2027053598L
) |
getSunlightPosition <- function(date = NULL, lat = NULL, lon = NULL, data = NULL,
keep = c("altitude", "azimuth")) {
data <- .buildData(date = date, lat = lat, lon = lon, data = data)
available_var <- c("altitude", "azimuth")
stopifnot(all(keep %in% available_var))
data$requestDate <- .buildRequestDate(data$date)
data <- data %>%
.[, (available_var) := .getPosition(date = requestDate, lat = lat, lng = lon)] %>%
.[, c("date", "lat", "lon", keep), with = FALSE] %>%
as.data.frame()
return(data)
} |
if (is.null(argv) | length(argv)!=2) {
cat("Usage: pace.r miles time\n")
q()
}
dig <- 5
rundist <- as.numeric(argv[1])
runtime <- as.numeric(argv[2])
cat("Miles : ", format(rundist, digits=dig), "\n")
cat("Time : ", format(runtime, digits=dig), "\n")
totalseconds <- floor(runtime)*60 + (runtime-floor(runtime))*100
totalsecondspermile <- totalseconds / rundist
minutespermile <- floor(totalsecondspermile/60)
secondspermile <- totalsecondspermile - minutespermile*60
pace <- minutespermile + secondspermile/100
cat("Pace : ",
format(minutespermile, digits=1), "min",
format(secondspermile, digits=dig), "sec\n")
cat("Mph : ",
format( (rundist * 3600)/totalseconds, digits=dig),"\n") |
Inf.Dorf.calc1 <- function(p, Se, Sp, group.sz, pool.szs,
alpha = 2, a, trace = TRUE, print.time = TRUE, ...){
start.time <- proc.time()
N <- group.sz
if (length(p) == 1) {
p.vec <- expectOrderBeta(p = p, alpha = alpha, size = N, ...)
} else if (length(p) > 1) {
p.vec <- sort(p)
alpha <- NA
}
save.info <- inf.dorf.measures(prob = p.vec, se = Se, sp = Sp, N = N,
pool.sizes = pool.szs)
ET <- save.info$e
all.ind.testerror <- save.info$summary[,-1]
ind.testerror <- get.unique.index(all.ind.testerror[a, ],
which(colnames(all.ind.testerror) == "PSp"),
rowlabel = a)[,-1]
colnames(ind.testerror) <- c("PSe", "PSP", "PPPV", "PNPV", "individuals")
PSe.vec <- save.info$summary[,3]
PSp.vec <- save.info$summary[,4]
PSe <- sum(p.vec * PSe.vec) / sum(p.vec)
PSp <- sum((1 - p.vec) * (PSp.vec)) / sum(1 - p.vec)
PPPV <- sum(p.vec * PSe.vec) / sum(p.vec * PSe.vec +
(1 - p.vec) * (1 - PSp.vec))
PNPV <- sum((1 - p.vec) * PSp.vec) / sum((1 - p.vec) * PSp.vec +
p.vec * (1 - PSe.vec))
save.it <- c(alpha, N, ET, ET / N, PSe, PSp, PPPV, PNPV)
acc.ET <- matrix(data = save.it[5:8], nrow = 1, ncol = 4,
dimnames = list(NULL, c("PSe", "PSp", "PPPV", "PNPV")))
Se.display <- matrix(data = Se, nrow = 1, ncol = 2,
dimnames = list(NULL, "Stage" = 1:2))
Sp.display <- matrix(data = Sp, nrow = 1, ncol = 2,
dimnames = list(NULL, "Stage" = 1:2))
if (print.time) {
time.it(start.time)
}
list("algorithm" = "Informative two-stage hierarchical testing",
"prob" = list(p), "alpha" = alpha,
"Se" = Se.display, "Sp" = Sp.display,
"Config" = list("Block.sz" = save.it[2], "pool.szs" = pool.szs),
"p.vec" = p.vec, "ET" = save.it[3], "value" = save.it[4],
"Accuracy" = list("Individual" = ind.testerror, "Overall" = acc.ET))
} |
expected <- structure(14579, class = "Date")
test(id=0, code={
argv <- structure(list(e1 = structure(14580, class = "Date"), e2 = structure(1, units = "days", class = "difftime")), .Names = c("e1",
"e2"))
do.call('-.Date', argv);
}, o = expected);
|
rm(list=ls())
library(tidyverse)
library(curl)
library(readxl)
library(ragg)
library(lubridate)
library(extrafont)
library(ggtext)
library(ggstream)
library(paletteer)
theme_custom <- function() {
theme_classic() %+replace%
theme(plot.title.position="plot", plot.caption.position="plot",
strip.background=element_blank(), strip.text=element_text(face="bold", size=rel(1)),
plot.title=element_text(face="bold", size=rel(1.5), hjust=0,
margin=margin(0,0,5.5,0)),
text=element_text(family="Lato"))
}
url21 <- "https://www.ons.gov.uk/file?uri=%2fpeoplepopulationandcommunity%2fbirthsdeathsandmarriages%2fdeaths%2fdatasets%2fweeklyprovisionalfiguresondeathsregisteredinenglandandwales%2f2021/publishedweek3820211.xlsx"
temp <- tempfile()
temp <- curl_download(url=url21, destfile=temp, quiet=FALSE, mode="wb")
COVIDdeaths21 <- read_excel(temp, sheet=7, range="B12:AN31", col_names=FALSE) %>%
gather(week, COVID, c(2:ncol(.))) %>%
rename(age=`...1`) %>%
mutate(week=as.numeric(substr(week, 4, 5))-1,
date=as.Date("2021-01-02")+weeks(week-1))
url20 <- "https://www.ons.gov.uk/file?uri=%2fpeoplepopulationandcommunity%2fbirthsdeathsandmarriages%2fdeaths%2fdatasets%2fweeklyprovisionalfiguresondeathsregisteredinenglandandwales%2f2020/publishedweek532020.xlsx"
temp <- tempfile()
temp <- curl_download(url=url20, destfile=temp, quiet=FALSE, mode="wb")
COVIDdeaths20 <- read_excel(temp, sheet=6, range="B12:BC31", col_names=FALSE) %>%
gather(week, COVID, c(2:ncol(.))) %>%
rename(age=`...1`) %>%
mutate(week=as.numeric(substr(week, 4, 5))-1,
date=as.Date("2019-12-28")+weeks(week-1))
data <- bind_rows(COVIDdeaths20, COVIDdeaths21) %>%
mutate(age=factor(age, levels=c("<1", "1-4", "5-9", "10-14", "15-19", "20-24", "25-29", "30-34",
"35-39", "40-44", "45-49", "50-54", "55-59", "60-64", "65-69",
"70-74", "75-79", "80-84", "85-89", "90+")))
meanage <- data %>%
group_by(date) %>%
mutate(agemid=case_when(
age=="<1" ~ 0.5, age=="1-4" ~ 2.5, age=="5-9" ~ 7.5, age=="10-14" ~ 12.5,
age=="15-19" ~ 17.5, age=="20-24" ~ 22.5, age=="25-29" ~ 27.5, age=="30-34" ~ 32.5,
age=="35-39" ~ 37.5, age=="40-44" ~ 42.5, age=="45-49" ~ 47.5, age=="50-54" ~ 52.5,
age=="55-59" ~ 57.5, age=="60-64" ~ 62.5, age=="65-69" ~ 67.5, age=="70-74" ~ 72.5,
age=="75-79" ~ 77.5, age=="80-84" ~ 82.5, age=="85-89" ~ 87.5, age=="90+" ~ 92.5)) %>%
summarise(meanage=weighted.mean(agemid, COVID)) %>%
ungroup()
agg_tiff("Outputs/COVIDeathsMeanAge.tiff", units="in", width=9, height=6, res=500)
ggplot(meanage, aes(x=date, y=meanage))+
geom_line(colour="Red")+
scale_x_date(name="")+
scale_y_continuous(name="Average age of COVID death")+
theme_custom()+
labs(title="COVID vaccines have substantially reduced the average age of COVID deaths",
subtitle="Mean age of deaths involving COVID by week of registration in England & Wales",
caption="Data from ONS | Plot by @VictimOfMaths")
dev.off()
agg_tiff("Outputs/COVIDeathsEWxAgeProp.tiff", units="in", width=10, height=7, res=500)
ggplot(data %>% filter(date>=as.Date("2020-03-07")), aes(x=date, y=COVID, fill=age))+
geom_col(position="fill")+
scale_x_date(name="")+
scale_y_continuous(name="Proportion of COVID deaths", labels=label_percent(accuracy=1))+
scale_fill_paletteer_d("pals::stepped", name="Age")+
theme_custom()+
labs(title="The proportion of COVID deaths among older age groups fell as vaccines were rolled out",
subtitle="Deaths involving COVID by age and week of registration in England & Wales",
caption="Data from ONS | Plot by @VictimOfMaths")
dev.off()
agg_tiff("Outputs/COVIDeathsEWxAge.tiff", units="in", width=9, height=6, res=500)
ggplot(data %>% filter(date>=as.Date("2020-03-07")), aes(x=date, y=COVID, fill=age))+
geom_col()+
scale_x_date(name="")+
scale_y_continuous(name="Deaths involving COVID")+
scale_fill_paletteer_d("pals::stepped", name="Age")+
theme_custom()+
labs(title="COVID deaths in the delta wave are much lower than previous waves for all ages",
subtitle="Deaths involving COVID by age and week of registration in England & Wales",
caption="Data from ONS | Plot by @VictimOfMaths")
dev.off()
agg_tiff("Outputs/COVIDeathsEWxAgeStream.tiff", units="in", width=9, height=6, res=500)
ggplot(data %>% filter(date>=as.Date("2020-03-07")), aes(x=date, y=COVID, fill=age))+
geom_stream()+
scale_x_date(name="")+
scale_y_continuous(name="Deaths involving COVID")+
scale_fill_paletteer_d("pals::stepped", name="Age")+
theme_custom()+
labs(title="COVID deaths in the delta wave are much lower than previous waves for all ages",
subtitle="Mean age of deaths involving COVID by week of registration in England & Wales",
caption="Data from ONS | Plot by @VictimOfMaths")
dev.off() |
knitr::opts_chunk$set(
collapse = TRUE,
eval = FALSE,
comment = "
)
migrate_table <- data.frame(
operation = c("Авторизация", "Запрос метаданных", "Запрос отчётов"),
radwords = c("doAuth()", "reports(), metrics()", "statement() + getData()"),
rgoogleads = c("gads_auth_configure() + gads_auth()", "gads_get_metadata(), gads_get_fields()", "gads_get_report()")
)
DT::datatable(
migrate_table,
colnames = c("Операция", "RAdwords", "rgoogleads"),
options = list(pageLength = 5, dom = 'tip'))
library(magrittr)
library(rvest)
reports <- read_html("https://developers.google.com/google-ads/api/docs/migration/mapping") %>%
html_element(css = ".responsive") %>%
html_table(header = TRUE)
DT::datatable(
reports,
colnames = c("Тип отчёта в Google AdWords API", "Ресурс в Google Ads API"),
options = list(pageLength = 20)
) |
makeResampleInstance = function(desc, task, size, ...) {
assert(checkClass(desc, "ResampleDesc"), checkString(desc))
if (is.character(desc)) {
desc = makeResampleDesc(desc, ...)
}
if (!xor(missing(task), missing(size))) {
stop("One of 'size' or 'task' must be supplied")
}
if (!missing(task)) {
assertClass(task, classes = "Task")
size = getTaskSize(task)
blocking = task$blocking
} else {
task = NULL
blocking = factor()
}
if (!missing(size)) {
size = asCount(size)
}
if (length(blocking) && desc$stratify) {
stop("Blocking can currently not be mixed with stratification in resampling!")
}
fixed = desc$fixed
blocking.cv = desc$blocking.cv
if (fixed == FALSE) {
blocking.cv = desc$blocking.cv
}
if (length(blocking) > 0 && fixed && blocking.cv == FALSE) {
warningf("'Blocking' features in the task were detected but 'blocking.cv' was not set in 'resample()'.")
warningf("Setting 'blocking.cv' to TRUE to prevent undesired behavior. Set `blocking.cv' = TRUE` in `makeResampleDesc()` to silence this warning'.")
blocking.cv = TRUE
}
if (length(blocking) > 0 && !fixed && blocking.cv) {
if (is.null(task)) {
stop("Blocking always needs the task!")
}
levs = levels(blocking)
size2 = length(levs)
inst = instantiateResampleInstance(desc, size2, task)
inst$train.inds = lapply(inst$train.inds, function(i) sample(which(blocking %in% levs[i])))
ti = sample(size)
inst$test.inds = lapply(inst$train.inds, function(x) setdiff(ti, x))
inst$size = size
} else if (desc$stratify || !is.null(desc$stratify.cols)) {
if (is.null(task)) {
stop("Stratification always needs the task!")
}
if (desc$stratify) {
td = getTaskDesc(task)
stratify.cols = switch(td$type,
"classif" = getTaskTargetNames(task),
"surv" = getTaskTargetNames(task)[2L],
stopf("Stratification for tasks of type '%s' not supported", td$type))
} else {
stratify.cols = desc$stratify.cols
}
cn = c(getTaskFeatureNames(task), getTaskTargetNames(task))
i = which(stratify.cols %nin% cn)
if (length(i) > 0L) {
stopf("Columns specified for stratification, but not present in task: %s", collapse(stratify.cols[i]))
}
index = getTaskData(task, features = stratify.cols, target.extra = FALSE)[stratify.cols]
if (any(vlapply(index, is.double))) {
stop("Stratification on numeric double-precision variables not possible")
}
grp = tapply(seq_row(index), index, simplify = FALSE)
grp = unname(split(seq_row(index), grp))
train.inds = vector("list", length(grp))
test.inds = vector("list", length(grp))
for (i in seq_along(grp)) {
ci = grp[[i]]
if (length(ci)) {
inst = instantiateResampleInstance(desc, length(ci), task)
train.inds[[i]] = lapply(inst$train.inds, function(j) ci[j])
test.inds[[i]] = lapply(inst$test.inds, function(j) ci[j])
} else {
train.inds[[i]] = test.inds[[i]] = replicate(desc$iters, integer(0L), simplify = FALSE)
}
}
inst = instantiateResampleInstance(desc, size, task)
inst$train.inds = Reduce(function(i1, i2) Map(c, i1, i2), train.inds)
inst$test.inds = Reduce(function(i1, i2) Map(c, i1, i2), test.inds)
} else {
inst = instantiateResampleInstance(desc, size, task)
}
return(inst)
}
makeResampleInstanceInternal = function(desc, size, train.inds, test.inds, group = factor()) {
if (missing(test.inds) && !missing(train.inds)) {
test.inds = sample(size)
test.inds = lapply(train.inds, function(x) setdiff(test.inds, x))
}
if (!missing(test.inds) && missing(train.inds)) {
train.inds = sample(size)
train.inds = lapply(test.inds, function(x) setdiff(train.inds, x))
}
makeS3Obj("ResampleInstance",
desc = desc,
size = size,
train.inds = train.inds,
test.inds = test.inds,
group = group
)
}
print.ResampleInstance = function(x, ...) {
catf("Resample instance for %i cases.", x$size)
print(x$desc)
} |
case.series <-function(data,ICD=NULL,diagnosis,date="ADMDT",start="2001/1/1",end="2016/12/31",by1=NULL,by2=NULL,by3=NULL,by4=NULL,by5=NULL){
data$diag6=data$diag5=data$diag4=data$diag3=NA
data$diag3=substr(data[,diagnosis],1,3)
data$diag4=substr(data[,diagnosis],1,4)
data$diag5=substr(data[,diagnosis],1,5)
data$diag6=substr(data[,diagnosis],1,6)
data=data[data$diag3%in%ICD|data$diag4%in%ICD|data$diag5%in%ICD|data$diag6%in%ICD,]
test=as.character(data[,date])
test=test[test!=""&!is.na(test)]
if(any(!is.na(grep("/",test[1])),!is.na(grep("-",test[1])))) data[,date]=as.Date(as.character(data[,date])) else data[,date]=as.Date(paste(substr(data[,date],1,4),"/",substr(data[,date],5,6),"/",substr(data[,date],7,8),sep=""))
by=c(by1,by2,by3,by4,by5)
if(length(by)>0) by=by[!is.na(by)]
if(length(by)>0){
for(k in 1:length(by)){
a=paste(unique(data[,by[k]]))
b=paste("Level for", by[k],":",a[1])
for(t in 2:length(a)) b=paste(b,a[t])
print(b)
data$var.by=data[,by[k]]
names(data)[ncol(data)]=paste("var",k,sep="")
}
case=var1=var2=var3=var4=var5=date.test=NA
data$case=1
med=as.data.table(data)
if(length(by)==1) med=as.data.frame(med[,list(case=sum(case)),list(var1)])
if(length(by)==2) med=as.data.frame(med[,list(case=sum(case)),list(var1,var2)])
if(length(by)==3) med=as.data.frame(med[,list(case=sum(case)),list(var1,var2,var3)])
if(length(by)==4) med=as.data.frame(med[,list(case=sum(case)),list(var1,var2,var3,var4)])
if(length(by)==5) med=as.data.frame(med[,list(case=sum(case)),list(var1,var2,var3,var4,var5)])
names(med)[1:(ncol(med)-1)]=by
Output1=med
data$date.test=data[,date]
data=as.data.table(data)
if(length(by)==1) data=as.data.frame(data[,list(case=sum(case)),list(date.test,var1)])
if(length(by)==2) data=as.data.frame(data[,list(case=sum(case)),list(date.test,var1,var2)])
if(length(by)==3) data=as.data.frame(data[,list(case=sum(case)),list(date.test,var1,var2,var3)])
if(length(by)==4) data=as.data.frame(data[,list(case=sum(case)),list(date.test,var1,var2,var3,var4)])
if(length(by)==5) data=as.data.frame(data[,list(case=sum(case)),list(date.test,var1,var2,var3,var4,var5)])
data$code=data[,"date.test"]
for(t in 1:(ncol(Output1)-1)) data$code=paste(data$code,data[,paste("var",t,sep="")])
Output2=NULL
for(k in 1:nrow(Output1)){
med=data.frame(date=seq.Date(as.Date(start),as.Date(end),"1 day"))
for(t in 1:(ncol(Output1)-1)){
med$var=Output1[k,t]
names(med)[ncol(med)]=names(Output1)[t]
}
med$code=med$date
for(t in 1:(ncol(Output1)-1)) med$code=paste(med$code,med[,t+1])
med$case=data$case[match(med$code,data$code)]
med$case[is.na(med$case)]=0
med=med[,-which(names(med)=="code")]
Output2=rbind(Output2,med)
}
} else{
data$date.test=data[,date]
data$case=1
data=as.data.table(data)
data=as.data.frame(data[,list(case=sum(case)),list(date.test)])
med=data.frame(date=seq.Date(as.Date(start),as.Date(end),"1 day"))
med$case=data$case[match(med$date,data$date.test)]
med$case[is.na(med$case)]=0
Output1=NULL
Output2=med
}
print(Output1)
return(Output2)
} |
context("validate metadata")
root <- tempfile("git2rdata-is_git2rmeta")
dir.create(root)
test_that("is_git2rmeta checks root", {
expect_error(is_git2rmeta(file = "junk", root = 1),
"a 'root' of class numeric is not supported")
expect_error(is_git2rdata(file = "junk", root = 1),
"a 'root' of class numeric is not supported")
})
test_that("is_git2rmeta checks metadata", {
expect_false(is_git2rmeta(file = "junk", root = root))
expect_false(is_git2rdata(file = "junk", root = root))
expect_error(is_git2rmeta(file = "junk", root = root, message = "error"),
"`git2rdata` object not found.")
expect_warning(is_git2rmeta(file = "junk", root = root, message = "warning"),
"`git2rdata` object not found.")
expect_false(
suppressWarnings(
is_git2rmeta(file = "junk", root = root, message = "warning")
)
)
expect_warning(is_git2rdata(file = "junk", root = root, message = "warning"),
"`git2rdata` object not found.")
expect_false(
suppressWarnings(
is_git2rdata(file = "junk", root = root, message = "warning")
)
)
file <- basename(tempfile(tmpdir = root))
junk <- write_vc(test_data, file = file, root = root, sorting = "test_Date")
correct_yaml <- yaml::read_yaml(file.path(root, junk[2]))
file.remove(file.path(root, junk[2]))
expect_error(is_git2rmeta(file = file, root = root, message = "error"),
"Metadata file missing.")
expect_warning(is_git2rmeta(file = file, root = root, message = "warning"),
"Metadata file missing.")
expect_false(is_git2rmeta(file = file, root = root))
junk_yaml <- correct_yaml
junk_yaml[["..generic"]] <- NULL
yaml::write_yaml(junk_yaml, file.path(root, junk[2]))
expect_false(is_git2rmeta(file = file, root = root))
expect_error(is_git2rmeta(file = file, root = root, message = "error"),
"No '..generic' element.")
expect_warning(is_git2rmeta(file = file, root = root, message = "warning"),
"No '..generic' element.")
expect_false(
suppressWarnings(
is_git2rmeta(file = file, root = root, message = "warning")
)
)
junk_yaml <- correct_yaml
junk_yaml[["..generic"]][["hash"]] <- NULL
yaml::write_yaml(junk_yaml, file.path(root, junk[2]))
expect_false(is_git2rmeta(file = file, root = root))
expect_error(is_git2rmeta(file = file, root = root, message = "error"),
"Corrupt metadata, no hash found.")
expect_warning(is_git2rmeta(file = file, root = root, message = "warning"),
"Corrupt metadata, no hash found.")
expect_false(
suppressWarnings(
is_git2rmeta(file = file, root = root, message = "warning")
)
)
junk_yaml <- correct_yaml
junk_yaml[["..generic"]][["git2rdata"]] <- NULL
yaml::write_yaml(junk_yaml, file.path(root, junk[2]))
expect_false(is_git2rmeta(file = file, root = root))
expect_error(is_git2rmeta(file = file, root = root, message = "error"),
"Data stored using an older version of `git2rdata`.")
expect_warning(is_git2rmeta(file = file, root = root, message = "warning"),
"Data stored using an older version of `git2rdata`.")
expect_false(
suppressWarnings(
is_git2rmeta(file = file, root = root, message = "warning")
)
)
junk_yaml <- correct_yaml
junk_yaml[["..generic"]][["git2rdata"]] <- "0.0.3"
yaml::write_yaml(junk_yaml, file.path(root, junk[2]))
expect_false(is_git2rmeta(file = file, root = root))
expect_error(is_git2rmeta(file = file, root = root, message = "error"),
"Data stored using an older version of `git2rdata`.")
expect_warning(is_git2rmeta(file = file, root = root, message = "warning"),
"Data stored using an older version of `git2rdata`.")
expect_false(
suppressWarnings(
is_git2rmeta(file = file, root = root, message = "warning")
)
)
junk_yaml <- correct_yaml
junk_yaml[["..generic"]][["data_hash"]] <- NULL
yaml::write_yaml(junk_yaml, file.path(root, junk[2]))
expect_false(is_git2rmeta(file = file, root = root))
expect_error(is_git2rmeta(file = file, root = root, message = "error"),
"Corrupt metadata, no data hash found.")
expect_warning(is_git2rmeta(file = file, root = root, message = "warning"),
"Corrupt metadata, no data hash found.")
expect_false(
suppressWarnings(
is_git2rmeta(file = file, root = root, message = "warning")
)
)
junk_yaml <- correct_yaml
junk_yaml[["..generic"]][["hash"]] <- "zzz"
yaml::write_yaml(junk_yaml, file.path(root, junk[2]))
expect_false(is_git2rmeta(file = file, root = root))
expect_error(is_git2rmeta(file = file, root = root, message = "error"),
"Corrupt metadata, mismatching hash.")
expect_warning(is_git2rmeta(file = file, root = root, message = "warning"),
"Corrupt metadata, mismatching hash.")
expect_false(
suppressWarnings(
is_git2rmeta(file = file, root = root, message = "warning")
)
)
})
test_that("is_git2rdata checks data", {
file <- basename(tempfile(tmpdir = root))
junk <- write_vc(test_data, file = file, root = root, sorting = "test_Date")
correct_yaml <- yaml::read_yaml(file.path(root, junk[2]))
yaml::write_yaml(correct_yaml, file.path(root, junk[2]))
correct_data <- readLines(file.path(root, junk[1]), encoding = "UTF-8")
junk_header <- correct_data
junk_header[1] <- "junk"
writeLines(junk_header, file.path(root, junk[1]))
expect_false(is_git2rdata(file = file, root = root))
expect_error(is_git2rdata(file = file, root = root, message = "error"),
"Corrupt data, incorrect header.")
expect_warning(is_git2rdata(file = file, root = root, message = "warning"),
"Corrupt data, incorrect header.")
expect_false(
suppressWarnings(
is_git2rdata(file = file, root = root, message = "warning")
)
)
file.remove(file.path(root, junk[1]))
expect_false(is_git2rdata(file = file, root = root))
expect_error(is_git2rdata(file = file, root = root, message = "error"),
"Data file missing.")
expect_warning(is_git2rdata(file = file, root = root, message = "warning"),
"Data file missing.")
expect_false(
suppressWarnings(
is_git2rdata(file = file, root = root, message = "warning")
)
)
})
root <- git2r::init(root)
git2r::config(root, user.name = "Alice", user.email = "[email protected]")
test_that("is_git2rmeta handle git repositories", {
file <- basename(tempfile(tmpdir = git2r::workdir(root)))
junk <- write_vc(test_data, file = file, root = root, sorting = "test_Date")
expect_true(is_git2rmeta(file = file, root = root))
expect_true(is_git2rdata(file = file, root = root))
})
file.remove(list.files(git2r::workdir(root), recursive = TRUE,
full.names = TRUE))
file.remove(list.files(git2r::workdir(root), recursive = TRUE,
include.dirs = TRUE, full.names = TRUE)) |
clean_pbp_dat <- function(play_df) {
scores_vec <- c(
"Blocked Punt Touchdown",
"Blocked Punt (Safety)",
"Punt (Safety)",
"Blocked Field Goal Touchdown",
"Missed Field Goal Return Touchdown",
"Fumble Recovery (Opponent) Touchdown",
"Fumble Return Touchdown",
"Interception Return Touchdown",
"Pass Interception Return Touchdown",
"Punt Touchdown",
"Punt Return Touchdown",
"Sack Touchdown",
"Uncategorized Touchdown",
"Defensive 2pt Conversion",
"Uncategorized",
"Two Point Rush",
"Safety",
"Penalty (Safety)",
"Punt Team Fumble Recovery Touchdown",
"Kickoff Team Fumble Recovery Touchdown",
"Kickoff (Safety)",
"Passing Touchdown",
"Rushing Touchdown",
"Field Goal Good",
"Pass Reception Touchdown",
"Fumble Recovery (Own) Touchdown"
)
defense_score_vec <- c(
"Blocked Punt Touchdown",
"Blocked Field Goal Touchdown",
"Missed Field Goal Return Touchdown",
"Punt Return Touchdown",
"Fumble Recovery (Opponent) Touchdown",
"Fumble Return Touchdown",
"Kickoff Return Touchdown",
"Defensive 2pt Conversion",
"Safety",
"Sack Touchdown",
"Interception Return Touchdown",
"Pass Interception Return Touchdown",
"Uncategorized Touchdown"
)
turnover_vec <- c(
"Blocked Field Goal",
"Blocked Field Goal Touchdown",
"Blocked Punt",
"Blocked Punt Touchdown",
"Field Goal Missed",
"Missed Field Goal Return",
"Missed Field Goal Return Touchdown",
"Fumble Recovery (Opponent)",
"Fumble Recovery (Opponent) Touchdown",
"Fumble Return Touchdown",
"Defensive 2pt Conversion",
"Interception",
"Interception Return",
"Interception Return Touchdown",
"Pass Interception Return",
"Pass Interception Return Touchdown",
"Kickoff Team Fumble Recovery",
"Kickoff Team Fumble Recovery Touchdown",
"Punt Touchdown",
"Punt Return Touchdown",
"Sack Touchdown",
"Uncategorized Touchdown"
)
normalplay <- c(
"Rush",
"Pass",
"Pass Reception",
"Pass Incompletion",
"Pass Completion",
"Sack",
"Fumble Recovery (Own)"
)
penalty <- c(
"Penalty",
"Penalty (Kickoff)",
"Penalty (Safety)"
)
offense_score_vec <- c(
"Passing Touchdown",
"Rushing Touchdown",
"Field Goal Good",
"Pass Reception Touchdown",
"Fumble Recovery (Own) Touchdown",
"Punt Touchdown",
"Punt Team Fumble Recovery Touchdown",
"Kickoff Touchdown",
"Kickoff Team Fumble Recovery Touchdown"
)
punt_vec <- c(
"Blocked Punt",
"Blocked Punt Touchdown",
"Blocked Punt (Safety)",
"Punt (Safety)",
"Punt",
"Punt Touchdown",
"Punt Team Fumble Recovery",
"Punt Team Fumble Recovery Touchdown",
"Punt Return Touchdown"
)
kickoff_vec <- c(
"Kickoff",
"Kickoff Return (Offense)",
"Kickoff Return Touchdown",
"Kickoff Touchdown",
"Kickoff Team Fumble Recovery",
"Kickoff Team Fumble Recovery Touchdown",
"Kickoff (Safety)",
"Penalty (Kickoff)"
)
int_vec <- c(
"Interception",
"Interception Return",
"Interception Return Touchdown",
"Pass Interception",
"Pass Interception Return",
"Pass Interception Return Touchdown"
)
play_df <- play_df %>%
dplyr::mutate(
scoring_play = ifelse(.data$play_type %in% scores_vec, 1, 0),
td_play = ifelse(stringr::str_detect(.data$play_text, regex("touchdown|for a TD", ignore_case = TRUE)) &
!is.na(.data$play_text), 1, 0),
touchdown = ifelse(stringr::str_detect(.data$play_type, regex("touchdown", ignore_case = TRUE)), 1, 0),
safety = ifelse(stringr::str_detect(.data$play_text, regex("safety", ignore_case = TRUE)), 1, 0),
fumble_vec = ifelse(stringr::str_detect(.data$play_text, "fumble") & !is.na(.data$play_text), 1, 0),
kickoff_play = ifelse(.data$play_type %in% kickoff_vec, 1, 0),
kickoff_tb = ifelse(stringr::str_detect(.data$play_text, regex("touchback", ignore_case = TRUE)) &
(.data$kickoff_play == 1) & !is.na(.data$play_text), 1, 0),
kickoff_onside = ifelse(stringr::str_detect(.data$play_text, regex("on-side|onside|on side", ignore_case = TRUE)) &
(.data$kickoff_play == 1) & !is.na(.data$play_text), 1, 0),
kickoff_oob = ifelse(stringr::str_detect(.data$play_text, regex("out-of-bounds|out of bounds", ignore_case = TRUE)) &
(.data$kickoff_play == 1) & !is.na(.data$play_text), 1, 0),
kickoff_fair_catch = ifelse(stringr::str_detect(.data$play_text, regex("fair catch|fair caught", ignore_case = TRUE)) &
(.data$kickoff_play == 1) & !is.na(.data$play_text), 1, 0),
kickoff_downed = ifelse(stringr::str_detect(.data$play_text, regex("downed", ignore_case = TRUE)) &
(.data$kickoff_play == 1) & !is.na(.data$play_text), 1, 0),
kick_play = ifelse(stringr::str_detect(.data$play_text, regex("kick|kickoff", ignore_case = TRUE)) &
!is.na(.data$play_text), 1, 0),
kickoff_safety = ifelse(!(.data$play_type %in% c("Blocked Punt", "Penalty")) & .data$safety == 1 &
stringr::str_detect(.data$play_text, regex("kickoff", ignore_case = TRUE)), 1, 0),
punt = ifelse(.data$play_type %in% punt_vec, 1, 0),
punt_play = ifelse(stringr::str_detect(.data$play_text, regex("punt", ignore_case = TRUE)) &
!is.na(.data$play_text), 1, 0),
punt_tb = ifelse(stringr::str_detect(.data$play_text, regex("touchback", ignore_case = TRUE)) &
(.data$punt == 1) & !is.na(.data$play_text), 1, 0),
punt_oob = ifelse(stringr::str_detect(.data$play_text, regex("out-of-bounds|out of bounds", ignore_case = TRUE)) &
(.data$punt == 1) & !is.na(.data$play_text), 1, 0),
punt_fair_catch = ifelse(stringr::str_detect(.data$play_text, regex("fair catch|fair caught", ignore_case = TRUE)) &
(.data$punt == 1) & !is.na(.data$play_text), 1, 0),
punt_downed = ifelse(stringr::str_detect(.data$play_text, regex("downed", ignore_case = TRUE)) &
(.data$punt == 1) & !is.na(.data$play_text), 1, 0),
punt_safety = ifelse((.data$play_type %in% c("Blocked Punt", "Punt")) & .data$safety == 1 &
stringr::str_detect(.data$play_text, regex("punt", ignore_case = TRUE)), 1, 0),
penalty_safety = ifelse((.data$play_type %in% c("Penalty")) & .data$safety == 1, 1, 0),
punt_blocked = ifelse(.data$punt == 1 & stringr::str_detect(.data$play_text, regex("blocked", ignore_case = TRUE)), 1, 0),
rush = ifelse(
(.data$play_type == "Rush" & !is.na(.data$play_text)) |
.data$play_type == "Rushing Touchdown" |
(.data$play_type == "Safety" &
stringr::str_detect(.data$play_text, regex("run for", ignore_case = TRUE)) & !is.na(.data$play_text)) |
(.data$play_type == "Fumble Recovery (Opponent)" &
stringr::str_detect(.data$play_text, regex("run for", ignore_case = TRUE)) & !is.na(.data$play_text)) |
(.data$play_type == "Fumble Recovery (Opponent) Touchdown" &
stringr::str_detect(.data$play_text, regex("run for", ignore_case = TRUE)) & !is.na(.data$play_text)) |
(.data$play_type == "Fumble Recovery (Own)" &
stringr::str_detect(.data$play_text, regex("run for", ignore_case = TRUE)) & !is.na(.data$play_text)) |
(.data$play_type == "Fumble Recovery (Own) Touchdown" &
stringr::str_detect(.data$play_text, regex("run for", ignore_case = TRUE)) & !is.na(.data$play_text)) |
(.data$play_type == "Fumble Return Touchdown" &
stringr::str_detect(.data$play_text, regex("run for", ignore_case = TRUE)) & !is.na(.data$play_text)), 1, 0
),
pass = if_else(
.data$play_type == "Pass Reception" |
.data$play_type == "Pass Completion" |
.data$play_type == "Passing Touchdown" |
.data$play_type == "Sack" |
.data$play_type == "Pass" |
.data$play_type == "Interception" |
.data$play_type == "Pass Interception Return" |
.data$play_type == "Interception Return Touchdown" |
(.data$play_type == "Pass Incompletion" & !is.na(.data$play_text)) |
.data$play_type == "Sack Touchdown" |
(.data$play_type == "Safety" &
stringr::str_detect(.data$play_text, regex("sacked",
ignore_case = TRUE
)) & !is.na(.data$play_text)) |
(.data$play_type == "Safety" &
stringr::str_detect(.data$play_text, regex("pass complete",
ignore_case = TRUE
)) & !is.na(.data$play_text)) |
(.data$play_type == "Fumble Recovery (Own)" &
stringr::str_detect(.data$play_text, regex("pass complete|pass incomplete|pass intercepted",
ignore_case = TRUE
)) & !is.na(.data$play_text)) |
(.data$play_type == "Fumble Recovery (Own)" &
stringr::str_detect(.data$play_text, regex("sacked",
ignore_case = TRUE
)) & !is.na(.data$play_text)) |
(.data$play_type == "Fumble Recovery (Own) Touchdown" &
stringr::str_detect(.data$play_text, regex("pass complete|pass incomplete|pass intercepted",
ignore_case = TRUE
)) & !is.na(.data$play_text)) |
(.data$play_type == "Fumble Recovery (Opponent)" &
stringr::str_detect(.data$play_text, regex("pass complete|pass incomplete|pass intercepted",
ignore_case = TRUE
)) & !is.na(.data$play_text)) |
(.data$play_type == "Fumble Recovery (Opponent)" &
stringr::str_detect(.data$play_text, regex("sacked",
ignore_case = TRUE
)) & !is.na(.data$play_text)) |
(.data$play_type == "Fumble Recovery (Opponent) Touchdown" &
stringr::str_detect(.data$play_text, regex("pass complete|pass incomplete",
ignore_case = TRUE
)) & !is.na(.data$play_text)) |
(.data$play_type == "Fumble Return Touchdown" &
stringr::str_detect(.data$play_text, regex("pass complete|pass incomplete",
ignore_case = TRUE
)) & !is.na(.data$play_text)) |
(.data$play_type == "Fumble Return Touchdown" &
stringr::str_detect(.data$play_text, regex("sacked",
ignore_case = TRUE
)) & !is.na(.data$play_text)), 1, 0
),
sack_vec = ifelse(
((.data$play_type %in% c("Sack", "Sack Touchdown")) |
(.data$play_type %in% c(
"Fumble Recovery (Own)", "Fumble Recovery (Own) Touchdown",
"Fumble Recovery (Opponent)", "Fumble Recovery (Opponent) Touchdown",
"Fumble Return Touchdown"
) &
.data$pass == 1 & stringr::str_detect(.data$play_text, regex("sacked", ignore_case = TRUE))) |
(.data$play_type == "Safety" & stringr::str_detect(.data$play_text, regex("sacked", ignore_case = TRUE)))) &
!is.na(.data$play_text), 1, 0
),
play_type = ifelse(stringr::str_detect(.data$play_text, regex(" coin toss ", ignore_case = TRUE)),
"Coin Toss", .data$play_type
),
play_type = ifelse(.data$fumble_vec == 1 & .data$pass == 1 &
.data$change_of_poss == 1 & .data$td_play == 0 & .data$down != 4 &
!(.data$play_type %in% defense_score_vec),
"Fumble Recovery (Opponent)", .data$play_type
),
play_type = ifelse(.data$fumble_vec == 1 & .data$pass == 1 &
.data$change_of_poss == 1 & .data$td_play == 1,
"Fumble Recovery (Opponent) Touchdown", .data$play_type
),
play_type = ifelse(.data$fumble_vec == 1 & .data$rush == 1 &
.data$change_of_poss == 1 & .data$td_play == 0 &
!(.data$play_type %in% defense_score_vec),
"Fumble Recovery (Opponent)", .data$play_type
),
play_type = ifelse(.data$fumble_vec == 1 & .data$rush == 1 &
.data$change_of_poss == 1 & .data$td_play == 1,
"Fumble Recovery (Opponent) Touchdown", .data$play_type
),
td_check = ifelse(!str_detect(.data$play_type, "Touchdown"), 1, 0),
play_type = ifelse(.data$kickoff_play == 1 & .data$fumble_vec == 1 &
.data$td_play == 1 & .data$td_check == 1,
paste0(.data$play_type, " Touchdown"),
.data$play_type
),
play_type = ifelse(.data$punt_play == 1 & .data$td_play == 1 & .data$td_check == 1,
paste0(.data$play_type, " Touchdown"),
.data$play_type
),
play_type = ifelse(.data$kickoff_play == 1 & .data$fumble_vec == 0 &
.data$td_play == 1 & .data$td_check == 1,
"Kickoff Return Touchdown",
.data$play_type
),
play_type = ifelse(.data$td_play == 1 & .data$rush == 1 &
.data$fumble_vec == 0 & .data$td_check == 1,
"Rushing Touchdown",
.data$play_type
),
play_type = ifelse(.data$td_play == 1 & .data$pass == 1 & .data$td_check == 1 &
.data$fumble_vec == 0 & !(.data$play_type %in% int_vec),
"Passing Touchdown",
.data$play_type
),
play_type = ifelse(.data$pass == 1 & .data$play_type == "Pass Reception" &
.data$yards_gained == .data$yards_to_goal &
.data$fumble_vec == 0 & !(.data$play_type %in% int_vec),
"Passing Touchdown",
.data$play_type
),
play_type = ifelse(.data$play_type == "Blocked Field Goal" &
stringr::str_detect(.data$play_text, regex("for a TD", ignore_case = TRUE)),
"Blocked Field Goal Touchdown",
.data$play_type
),
play_type = ifelse(.data$play_type == "Punt Touchdown Touchdown", "Punt Touchdown", .data$play_type),
play_type = ifelse(.data$play_type == "Fumble Return Touchdown Touchdown", "Fumble Return Touchdown", .data$play_type),
play_type = ifelse(.data$play_type == "Rushing Touchdown Touchdown", "Rushing Touchdown", .data$play_type),
play_type = ifelse(.data$play_type == "Uncategorized Touchdown Touchdown", "Uncategorized Touchdown", .data$play_type),
play_type = ifelse(stringr::str_detect(.data$play_text, "pass intercepted for a TD") & !is.na(.data$play_text),
"Interception Return Touchdown", .data$play_type
),
play_type = ifelse(stringr::str_detect(.data$play_text, regex("sacked", ignore_case = TRUE)) &
stringr::str_detect(.data$play_text, regex("fumbled", ignore_case = TRUE)) &
stringr::str_detect(.data$play_text, regex("TD", ignore_case = TRUE)) &
!is.na(.data$play_text),
"Fumble Recovery (Opponent) Touchdown", .data$play_type
),
play_type = ifelse(.data$play_type == "Pass" & str_detect(.data$play_text, "pass complete"),
"Pass Completion", .data$play_type
),
play_type = ifelse(.data$play_type == "Pass" & str_detect(.data$play_text, "pass incomplete"),
"Pass Incompletion", .data$play_type
),
play_type = ifelse(.data$play_type == "Pass" & str_detect(.data$play_text, "pass intercepted"),
"Pass Interception", .data$play_type
),
play_type = ifelse(.data$play_type == "Pass" & str_detect(.data$play_text, "sacked"), "Sack", .data$play_type),
play_type = ifelse(.data$play_type == "Passing Touchdown" & str_detect(.data$play_text, "pass intercepted for a TD"),
"Interception Return Touchdown", .data$play_type
),
play_type = ifelse(.data$play_type == "Interception", "Interception Return", .data$play_type),
play_type = ifelse(.data$play_type == "Pass Interception", "Interception Return", .data$play_type),
play_type = ifelse(.data$play_type == "Pass Interception Return", "Interception Return", .data$play_type),
play_type = ifelse(.data$play_type == "Kickoff Touchdown" & .data$fumble_vec == 0,
"Kickoff Return Touchdown", .data$play_type
),
play_type = ifelse(.data$play_type %in% c("Kickoff", "Kickoff Return (Offense)") &
.data$fumble_vec == 1 & .data$change_of_pos_team == 1,
"Kickoff Team Fumble Recovery", .data$play_type
),
play_type = ifelse(.data$play_type == "Punt Touchdown" &
(.data$fumble_vec == 0 | (.data$fumble_vec == 1 & .data$game_id == 401112100)),
"Punt Return Touchdown", .data$play_type
),
play_type = ifelse(.data$play_type == "Punt" & .data$fumble_vec == 1 & .data$change_of_poss == 0,
"Punt Team Fumble Recovery", .data$play_type
),
play_type = ifelse(.data$play_type == "Punt Touchdown", "Punt Team Fumble Recovery Touchdown", .data$play_type),
play_type = ifelse(.data$play_type == "Kickoff Touchdown", "Kickoff Team Fumble Recovery Touchdown", .data$play_type),
play_type = ifelse(.data$play_type == "Fumble Return Touchdown" & (.data$pass == 1 | .data$rush == 1),
"Fumble Recovery (Opponent) Touchdown", .data$play_type
),
play_type = ifelse(.data$play_type %in% c("Pass Reception", "Rush", "Rushing Touchdown") &
(.data$pass == 1 | .data$rush == 1) & .data$safety == 1,
"Safety", .data$play_type
),
play_type = ifelse(.data$kickoff_safety == 1, "Kickoff (Safety)", .data$play_type),
play_type = ifelse(.data$punt_safety == 1, paste0(.data$play_type, " (Safety)"), .data$play_type),
play_type = ifelse(.data$penalty_safety == 1, paste0(.data$play_type, " (Safety)"), .data$play_type),
id_play = ifelse(.data$id_play == 400852742102997104 & .data$play_type == "Kickoff", 400852742102997106, .data$id_play),
id_play = ifelse(.data$id_play == 400852742102997106 & .data$play_type == "Defensive 2pt Conversion", 400852742102997104, .data$id_play),
sack = ifelse((.data$play_type %in% c("Sack") |
(.data$play_type %in% c(
"Fumble Recovery (Own)",
"Fumble Recovery (Own) Touchdown",
"Fumble Recovery (Opponent)",
"Fumble Recovery (Opponent) Touchdown"
) &
.data$pass == 1 & stringr::str_detect(.data$play_text, "sacked")) |
(.data$play_type == "Safety" & stringr::str_detect(.data$play_text, regex("sacked", ignore_case = TRUE)))) &
!is.na(.data$play_text), 1, 0),
int = ifelse(.data$play_type %in% c("Interception Return", "Interception Return Touchdown"), 1, 0),
int_td = ifelse(.data$play_type %in% c("Interception Return Touchdown"), 1, 0),
completion = ifelse(.data$play_type %in% c("Pass Reception", "Pass Completion", "Passing Touchdown") |
((.data$play_type %in% c(
"Fumble Recovery (Own)",
"Fumble Recovery (Own) Touchdown",
"Fumble Recovery (Opponent)",
"Fumble Recovery (Opponent) Touchdown"
) & .data$pass == 1 &
!stringr::str_detect(.data$play_text, "sacked"))), 1, 0),
pass_attempt = ifelse(.data$play_type %in% c(
"Pass Reception",
"Pass Completion",
"Passing Touchdown",
"Pass Incompletion",
"Interception Return",
"Interception Return Touchdown"
) |
((.data$play_type %in% c(
"Fumble Recovery (Own)",
"Fumble Recovery (Own) Touchdown",
"Fumble Recovery (Opponent)",
"Fumble Recovery (Opponent) Touchdown"
) & .data$pass == 1 &
!stringr::str_detect(.data$play_text, "sacked"))), 1, 0),
target = ifelse(.data$play_type %in% c(
"Pass Reception",
"Pass Completion",
"Passing Touchdown",
"Pass Incompletion"
) |
((.data$play_type %in% c(
"Fumble Recovery (Own)",
"Fumble Recovery (Own) Touchdown",
"Fumble Recovery (Opponent)",
"Fumble Recovery (Opponent) Touchdown"
) & .data$pass == 1 &
!stringr::str_detect(.data$play_text, "sacked"))), 1, 0),
pass_td = ifelse(.data$play_type %in% c("Passing Touchdown"), 1, 0),
rush_td = ifelse(.data$play_type %in% c("Rushing Touchdown"), 1, 0),
turnover_vec = ifelse(.data$play_type %in% turnover_vec, 1, 0),
offense_score_play = ifelse(.data$play_type %in% offense_score_vec, 1, 0),
defense_score_play = ifelse(.data$play_type %in% defense_score_vec, 1, 0),
downs_turnover = ifelse((.data$play_type %in% normalplay) &
(.data$yards_gained < .data$distance) & (.data$down == 4) &
!(.data$penalty_1st_conv), 1, 0),
scoring_play = ifelse(.data$play_type %in% scores_vec, 1, 0),
yds_punted = ifelse(.data$punt == 1, as.numeric(stringr::str_extract(
stringi::stri_extract_first_regex(.data$play_text, "(?<= punt for)[^,]+"),
"\\d+"
)), NA_real_),
yds_punt_gained = ifelse(.data$punt == 1, .data$yards_gained, NA_real_),
fg_inds = ifelse(stringr::str_detect(.data$play_type, "Field Goal"), 1, 0),
fg_made = ifelse(.data$play_type == "Field Goal Good", TRUE, FALSE),
yds_fg = ifelse(.data$fg_inds == 1, as.numeric(
stringr::str_remove(
stringr::str_extract(
.data$play_text,
regex("\\d{0,2} Yd FG|\\d{0,2} Yd Field|\\d{0,2} Yard Field", ignore_case = TRUE)
),
regex("yd FG|yd field|yard field", ignore_case = TRUE)
)
), NA),
yards_to_goal = ifelse(.data$fg_inds == 1 & !is.na(.data$yds_fg), .data$yds_fg - 17, .data$yards_to_goal),
yards_to_goal = ifelse(.data$id_play == "401112476101977728", 16, .data$yards_to_goal),
yards_to_goal = ifelse(.data$id_play == "401112476104999424", 36, .data$yards_to_goal),
pos_unit = dplyr::case_when(
.data$punt == 1 ~ "Punt Offense",
.data$kickoff_play == 1 ~ "Kickoff Return",
.data$fg_inds == 1 ~ "Field Goal Offense",
.data$play_type == "Defensive 2pt Conversion" ~ "Offense",
TRUE ~ "Offense"
),
def_pos_unit = dplyr::case_when(
.data$punt == 1 ~ "Punt Return",
.data$kickoff_play == 1 ~ "Kickoff Defense",
.data$fg_inds == 1 ~ "Field Goal Defense",
.data$play_type == "Defensive 2pt Conversion" ~ "Defense",
TRUE ~ "Defense"
),
lag_play_type3 = dplyr::lag(.data$play_type, 3),
lag_play_type2 = dplyr::lag(.data$play_type, 2),
lag_play_type = dplyr::lag(.data$play_type, 1),
lead_play_type = dplyr::lead(.data$play_type, 1),
lead_play_type2 = dplyr::lead(.data$play_type, 2),
lead_play_type3 = dplyr::lead(.data$play_type, 3)
)
return(play_df)
} |
getDistanceProbability <- function(q,distribution,param1,param2=NA,tbound=c(-Inf,Inf),unitSize=1,...){
if(!is.numeric(unitSize)){
stop('argument unitSize must be numeric')
}
if(!(length(unitSize)==1 || length(unitSize)==length(q))){
stop('argument unitSize must either have length one or length equal to length(q)')
}
truncBound <- tbound
if(is.na(param2)){
prob2 <- ptrunc(q=q,distribution=distribution,tbound=truncBound,param1,lower.tail=TRUE,log.p=NULL)
prob1 <- ptrunc(q=q-abs(unitSize),distribution=distribution,tbound=truncBound,param1,lower.tail=TRUE,log.p=NULL)
}else{
prob2 <- ptrunc(q=q,distribution=distribution,tbound=truncBound,param1,param2,lower.tail=TRUE,log.p=NULL)
prob1 <- ptrunc(q=q-abs(unitSize),distribution=distribution,tbound=truncBound,param1,param2,lower.tail=TRUE,log.p=NULL)
}
out <- prob2-prob1
return(out)
} |
TRACpp <- function(x, xAG, g = 0L, ret = 1L) {
.Call(`_collapse_TRACpp`, x, xAG, g, ret)
}
TRAlCpp <- function(x, xAG, g = 0L, ret = 1L) {
.Call(`_collapse_TRAlCpp`, x, xAG, g, ret)
}
TRAmCpp <- function(x, xAG, g = 0L, ret = 1L) {
.Call(`_collapse_TRAmCpp`, x, xAG, g, ret)
}
fndistinctCpp <- function(x, ng = 0L, g = 0L, gs = NULL, narm = TRUE) {
.Call(`_collapse_fndistinctCpp`, x, ng, g, gs, narm)
}
fndistinctlCpp <- function(x, ng = 0L, g = 0L, gs = NULL, narm = TRUE, drop = TRUE) {
.Call(`_collapse_fndistinctlCpp`, x, ng, g, gs, narm, drop)
}
fndistinctmCpp <- function(x, ng = 0L, g = 0L, gs = NULL, narm = TRUE, drop = TRUE) {
.Call(`_collapse_fndistinctmCpp`, x, ng, g, gs, narm, drop)
}
BWCpp <- function(x, ng = 0L, g = 0L, gs = NULL, w = NULL, narm = TRUE, theta = 1, set_mean = 0, B = FALSE, fill = FALSE) {
.Call(`_collapse_BWCpp`, x, ng, g, gs, w, narm, theta, set_mean, B, fill)
}
BWmCpp <- function(x, ng = 0L, g = 0L, gs = NULL, w = NULL, narm = TRUE, theta = 1, set_mean = 0, B = FALSE, fill = FALSE) {
.Call(`_collapse_BWmCpp`, x, ng, g, gs, w, narm, theta, set_mean, B, fill)
}
BWlCpp <- function(x, ng = 0L, g = 0L, gs = NULL, w = NULL, narm = TRUE, theta = 1, set_mean = 0, B = FALSE, fill = FALSE) {
.Call(`_collapse_BWlCpp`, x, ng, g, gs, w, narm, theta, set_mean, B, fill)
}
fbstatsCpp <- function(x, ext = FALSE, ng = 0L, g = 0L, npg = 0L, pg = 0L, w = NULL, stable_algo = TRUE, array = TRUE, setn = TRUE, gn = NULL) {
.Call(`_collapse_fbstatsCpp`, x, ext, ng, g, npg, pg, w, stable_algo, array, setn, gn)
}
fbstatsmCpp <- function(x, ext = FALSE, ng = 0L, g = 0L, npg = 0L, pg = 0L, w = NULL, stable_algo = TRUE, array = TRUE, gn = NULL) {
.Call(`_collapse_fbstatsmCpp`, x, ext, ng, g, npg, pg, w, stable_algo, array, gn)
}
fbstatslCpp <- function(x, ext = FALSE, ng = 0L, g = 0L, npg = 0L, pg = 0L, w = NULL, stable_algo = TRUE, array = TRUE, gn = NULL) {
.Call(`_collapse_fbstatslCpp`, x, ext, ng, g, npg, pg, w, stable_algo, array, gn)
}
fdiffgrowthCpp <- function(x, n = 1L, diff = 1L, fill = NA_real_, ng = 0L, g = 0L, gs = NULL, t = NULL, ret = 1L, rho = 1, names = TRUE, power = 1) {
.Call(`_collapse_fdiffgrowthCpp`, x, n, diff, fill, ng, g, gs, t, ret, rho, names, power)
}
fdiffgrowthmCpp <- function(x, n = 1L, diff = 1L, fill = NA_real_, ng = 0L, g = 0L, gs = NULL, t = NULL, ret = 1L, rho = 1, names = TRUE, power = 1) {
.Call(`_collapse_fdiffgrowthmCpp`, x, n, diff, fill, ng, g, gs, t, ret, rho, names, power)
}
fdiffgrowthlCpp <- function(x, n = 1L, diff = 1L, fill = NA_real_, ng = 0L, g = 0L, gs = NULL, t = NULL, ret = 1L, rho = 1, names = TRUE, power = 1) {
.Call(`_collapse_fdiffgrowthlCpp`, x, n, diff, fill, ng, g, gs, t, ret, rho, names, power)
}
flagleadCpp <- function(x, n = 1L, fill = NULL, ng = 0L, g = 0L, t = NULL, names = TRUE) {
.Call(`_collapse_flagleadCpp`, x, n, fill, ng, g, t, names)
}
flagleadmCpp <- function(x, n = 1L, fill = NULL, ng = 0L, g = 0L, t = NULL, names = TRUE) {
.Call(`_collapse_flagleadmCpp`, x, n, fill, ng, g, t, names)
}
flagleadlCpp <- function(x, n = 1L, fill = NULL, ng = 0L, g = 0L, t = NULL, names = TRUE) {
.Call(`_collapse_flagleadlCpp`, x, n, fill, ng, g, t, names)
}
fmeanCpp <- function(x, ng = 0L, g = 0L, gs = NULL, w = NULL, narm = TRUE) {
.Call(`_collapse_fmeanCpp`, x, ng, g, gs, w, narm)
}
fmeanmCpp <- function(x, ng = 0L, g = 0L, gs = NULL, w = NULL, narm = TRUE, drop = TRUE) {
.Call(`_collapse_fmeanmCpp`, x, ng, g, gs, w, narm, drop)
}
fmeanlCpp <- function(x, ng = 0L, g = 0L, gs = NULL, w = NULL, narm = TRUE, drop = TRUE) {
.Call(`_collapse_fmeanlCpp`, x, ng, g, gs, w, narm, drop)
}
fmodeCpp <- function(x, ng = 0L, g = 0L, gs = NULL, w = NULL, narm = TRUE, ret = 0L) {
.Call(`_collapse_fmodeCpp`, x, ng, g, gs, w, narm, ret)
}
fmodelCpp <- function(x, ng = 0L, g = 0L, gs = NULL, w = NULL, narm = TRUE, ret = 0L) {
.Call(`_collapse_fmodelCpp`, x, ng, g, gs, w, narm, ret)
}
fmodemCpp <- function(x, ng = 0L, g = 0L, gs = NULL, w = NULL, narm = TRUE, drop = TRUE, ret = 0L) {
.Call(`_collapse_fmodemCpp`, x, ng, g, gs, w, narm, drop, ret)
}
fnthCpp <- function(x, Q = 0.5, ng = 0L, g = 0L, gs = NULL, w = NULL, narm = TRUE, ret = 1L) {
.Call(`_collapse_fnthCpp`, x, Q, ng, g, gs, w, narm, ret)
}
fnthmCpp <- function(x, Q = 0.5, ng = 0L, g = 0L, gs = NULL, w = NULL, narm = TRUE, drop = TRUE, ret = 1L) {
.Call(`_collapse_fnthmCpp`, x, Q, ng, g, gs, w, narm, drop, ret)
}
fnthlCpp <- function(x, Q = 0.5, ng = 0L, g = 0L, gs = NULL, w = NULL, narm = TRUE, drop = TRUE, ret = 1L) {
.Call(`_collapse_fnthlCpp`, x, Q, ng, g, gs, w, narm, drop, ret)
}
fprodCpp <- function(x, ng = 0L, g = 0L, w = NULL, narm = TRUE) {
.Call(`_collapse_fprodCpp`, x, ng, g, w, narm)
}
fprodmCpp <- function(x, ng = 0L, g = 0L, w = NULL, narm = TRUE, drop = TRUE) {
.Call(`_collapse_fprodmCpp`, x, ng, g, w, narm, drop)
}
fprodlCpp <- function(x, ng = 0L, g = 0L, w = NULL, narm = TRUE, drop = TRUE) {
.Call(`_collapse_fprodlCpp`, x, ng, g, w, narm, drop)
}
fscaleCpp <- function(x, ng = 0L, g = 0L, w = NULL, narm = TRUE, set_mean = 0, set_sd = 1) {
.Call(`_collapse_fscaleCpp`, x, ng, g, w, narm, set_mean, set_sd)
}
fscalemCpp <- function(x, ng = 0L, g = 0L, w = NULL, narm = TRUE, set_mean = 0, set_sd = 1) {
.Call(`_collapse_fscalemCpp`, x, ng, g, w, narm, set_mean, set_sd)
}
fscalelCpp <- function(x, ng = 0L, g = 0L, w = NULL, narm = TRUE, set_mean = 0, set_sd = 1) {
.Call(`_collapse_fscalelCpp`, x, ng, g, w, narm, set_mean, set_sd)
}
fvarsdCpp <- function(x, ng = 0L, g = 0L, gs = NULL, w = NULL, narm = TRUE, stable_algo = TRUE, sd = TRUE) {
.Call(`_collapse_fvarsdCpp`, x, ng, g, gs, w, narm, stable_algo, sd)
}
fvarsdmCpp <- function(x, ng = 0L, g = 0L, gs = NULL, w = NULL, narm = TRUE, stable_algo = TRUE, sd = TRUE, drop = TRUE) {
.Call(`_collapse_fvarsdmCpp`, x, ng, g, gs, w, narm, stable_algo, sd, drop)
}
fvarsdlCpp <- function(x, ng = 0L, g = 0L, gs = NULL, w = NULL, narm = TRUE, stable_algo = TRUE, sd = TRUE, drop = TRUE) {
.Call(`_collapse_fvarsdlCpp`, x, ng, g, gs, w, narm, stable_algo, sd, drop)
}
mrtl <- function(X, names = FALSE, ret = 0L) {
.Call(`_collapse_mrtl`, X, names, ret)
}
mctl <- function(X, names = FALSE, ret = 0L) {
.Call(`_collapse_mctl`, X, names, ret)
}
psmatCpp <- function(x, g, t = NULL, transpose = FALSE) {
.Call(`_collapse_psmatCpp`, x, g, t, transpose)
}
pwnobsmCpp <- function(x) {
.Call(`_collapse_pwnobsmCpp`, x)
}
qFCpp <- function(x, ordered = TRUE, na_exclude = TRUE, keep_attr = TRUE, ret = 1L) {
.Call(`_collapse_qFCpp`, x, ordered, na_exclude, keep_attr, ret)
}
funiqueCpp <- function(x, sort = TRUE) {
.Call(`_collapse_funiqueCpp`, x, sort)
}
fdroplevelsCpp <- function(x, check_NA = TRUE) {
.Call(`_collapse_fdroplevelsCpp`, x, check_NA)
}
seqid <- function(x, o = NULL, del = 1L, start = 1L, na_skip = FALSE, skip_seq = FALSE, check_o = TRUE) {
.Call(`_collapse_seqid`, x, o, del, start, na_skip, skip_seq, check_o)
}
groupid <- function(x, o = NULL, start = 1L, na_skip = FALSE, check_o = TRUE) {
.Call(`_collapse_groupid`, x, o, start, na_skip, check_o)
}
varyingCpp <- function(x, ng = 0L, g = 0L, any_group = TRUE) {
.Call(`_collapse_varyingCpp`, x, ng, g, any_group)
}
varyingmCpp <- function(x, ng = 0L, g = 0L, any_group = TRUE, drop = TRUE) {
.Call(`_collapse_varyingmCpp`, x, ng, g, any_group, drop)
}
varyinglCpp <- function(x, ng = 0L, g = 0L, any_group = TRUE, drop = TRUE) {
.Call(`_collapse_varyinglCpp`, x, ng, g, any_group, drop)
} |
"pmcode_84148" |
gg_pipe <- function(data, ggObj) {
if (!is.data.frame(data)) {
data <- as.data.frame(data)
}
subs <- substitute(ggObj)
dep <- deparse(subs)
if (any(grepl("ggplot", dep))) {
ggplot_id <- which(grepl("ggplot", dep))
if (length(ggplot_id) > 1) {
stop("only one ggplot model can be handled", call. = FALSE)
} else {
dep1 <- dep[ggplot_id]
dep1_split <- strsplit(dep1, split = "")[[1]]
len.dep1_split <- length(dep1_split)
first_left_bracket <- which(dep1_split %in% "(" == TRUE)[1]
new_dep1 <- paste0(c(dep1_split[1:first_left_bracket], "data, ",
dep1_split[(first_left_bracket + 1):len.dep1_split]),
collapse = "")
dep[ggplot_id] <- new_dep1
parse_dep <- parse(text = paste0(dep, collapse = ""))
eval(parse_dep)
}
} else stop("ggplot object cannot be found", call. = FALSE)
} |
context("Charlson and comorbidity counting")
test_that("github issue
mydf <- data.frame(
visit_id = c("a", "b", "c", "a", "b", "d"),
icd9 = c("441", "412.93", "042", "250.0", "250.0", "250.0"),
stringsAsFactors = TRUE
)
expect_warning(res <- charlson(mydf, return_df = TRUE), NA)
expect_equal(res$Charlson, c(2, 2, 6, 1))
})
test_that("github issue
mydf <- data.frame(visit_id = "a", icd9 = "250.0")
comorbids <- icd9_comorbid_quan_deyo(mydf, short_code = FALSE, return_df = TRUE)
set.seed(123)
use_ncol_cmb <- ncol(comorbids) - 1
comorbids <- rbind(comorbids, data.frame(
visit_id = letters[2:10],
matrix(runif(use_ncol_cmb * 9) > 0.7,
ncol = 17, dimnames = list(character(0), names(comorbids[2:18]))
)
))
c2.inv <- cbind(
t(comorbids[2, 2:18]),
c(1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 3, 6, 6)
)
expect_equivalent(
charlson_from_comorbid(comorbids, hierarchy = TRUE)[2],
sum(apply(c2.inv, 1, prod))
)
})
test_that("only matrix or data.frame accepted", {
expect_error(charlson(c(1, 2)))
expect_error(charlson(c(1, 2),
visit_id = "roam", return_df = TRUE,
stringsAsFactors = TRUE
))
expect_error(charlson(list(1, 2)))
expect_error(charlson(list(1, 2),
visit_id = "roam", return_df = TRUE,
stringsAsFactors = TRUE
))
})
test_that("Charlson score", {
mydf <- data.frame(
visit_id = c("a", "b", "c"),
icd9 = c("441", "412.93", "042"),
stringsAsFactors = TRUE
)
expect_equal(
charlson_from_comorbid(
icd9_comorbid_quan_deyo(mydf,
short_code = FALSE,
hierarchy = TRUE,
return_df = TRUE
)
),
charlson(mydf, short_code = FALSE, return_df = FALSE)
)
expect_equivalent(
charlson(mydf,
return_df = TRUE,
stringsAsFactors = TRUE,
short_code = FALSE
),
structure(list(
visit_id = structure(1:3,
.Label = c("a", "b", "c"),
class = "factor"
),
Charlson = c(1, 1, 6)
),
.Names = c("visit_id", "Charlson"),
row.names = c(NA, -3L),
class = "data.frame"
)
)
mydff <- data.frame(
visit_id = c("a", "b", "c"),
icd9 = c("441", "412.93", "042"),
stringsAsFactors = FALSE
)
expect_identical(
charlson(mydff,
return_df = TRUE,
stringsAsFactors = FALSE,
short_code = FALSE
),
structure(list(
visit_id = c("a", "b", "c"),
Charlson = c(1, 1, 6)
),
.Names = c("visit_id", "Charlson"),
row.names = c(NA, -3L),
class = "data.frame"
)
)
expect_identical(
charlson(mydff,
return_df = TRUE,
stringsAsFactors = TRUE,
short_code = FALSE
),
structure(list(
visit_id = factor(c("a", "b", "c")),
Charlson = c(1, 1, 6)
),
.Names = c("visit_id", "Charlson"),
row.names = c(NA, -3L),
class = "data.frame"
)
)
mydfff <- mydff
names(mydfff)[1] <- "v"
expect_equivalent(
charlson(mydfff,
return_df = TRUE,
stringsAsFactors = FALSE,
short_code = FALSE
),
structure(list(
v = c("a", "b", "c"),
Charlson = c(1, 1, 6)
),
.Names = c("v", "Charlson"),
row.names = c(NA, -3L),
class = "data.frame"
)
)
mydffff <- cbind(mydfff, data.frame(v2 = mydfff$v, stringsAsFactors = FALSE))
mydffff$v <- NULL
expect_identical(get_icd_name(mydffff, NULL), "icd9")
expect_equivalent(
charlson(mydffff,
visit_name = "v2",
return_df = TRUE,
stringsAsFactors = FALSE,
short_code = FALSE
),
structure(list(
v2 = c("a", "b", "c"),
Charlson = c(1, 1, 6)
),
.Names = c("v2", "Charlson"),
row.names = c(NA, -3L),
class = "data.frame"
)
)
})
test_that("Charlson - errors?", {
baddf <- data.frame(
visit_id = c("d", "d"),
icd9 = c("2500", "25042"),
stringsAsFactors = TRUE
)
cmb <- icd9_comorbid_quan_deyo(baddf, hierarchy = FALSE, short_code = TRUE)
expect_error(charlson_from_comorbid(cmb, hierarchy = FALSE))
baddf <- data.frame(
visit_id = c("d", "d"),
icd9 = c("57224", "57345"),
stringsAsFactors = TRUE
)
cmb <- icd9_comorbid_quan_deyo(baddf, hierarchy = FALSE, short_code = TRUE)
expect_error(charlson_from_comorbid(cmb, hierarchy = FALSE))
})
test_that("count icd9 codes", {
mydf <- data.frame(
visit_name = c("r", "r", "s"),
icd9 = c("441", "412.93", "042")
)
expect_equal(
count_codes(mydf, return_df = TRUE),
data.frame(
visit_name = c("r", "s"),
icd_count = c(2, 1)
)
)
expect_equal(count_codes(mydf), c(2, 1))
cmb <- icd9_comorbid_quan_deyo(mydf, short_code = FALSE, return_df = TRUE)
expect_equivalent(count_comorbid(cmb), count_codes(mydf))
wide <- data.frame(
visit_id = c("r", "s", "t"),
icd9_1 = c("0011", "441", "456"),
icd9_2 = c(NA, "442", NA),
icd9_3 = c(NA, NA, "510")
)
expect_equal(
count_codes_wide(wide),
c("r" = 1, "s" = 2, "t" = 2)
)
widezero <- data.frame(
visit_id = c("j"),
icd9_a = NA,
icd9_b = NA
)
expect_equal(
count_codes_wide(widezero),
c("j" = 0)
)
widezero2 <- data.frame(
visit_id = c("j"),
icd9_a = NA
)
expect_equal(
count_codes_wide(widezero2),
c("j" = 0)
)
widezero3 <- data.frame(
visit_id = c("j", "j"),
icd9_a = c(NA, NA)
)
expect_equal(
count_codes_wide(widezero3, aggr = TRUE),
c("j" = 0)
)
widezero4 <- data.frame(
visit_id = c("j", "j"),
icd9_a = c(NA, NA),
icd9_b = c(NA, NA)
)
expect_equal(
count_codes_wide(widezero4, aggr = TRUE),
c("j" = 0)
)
widezero3b <- data.frame(
visit_id = c("j", "j"),
icd9_a = c(NA, NA)
)
expect_equal(
count_codes_wide(widezero3b, aggr = FALSE),
c("j" = 0, "j" = 0)
)
widezero4b <- data.frame(
visit_id = c("j", "j"),
icd9_a = c(NA, NA),
icd9_b = c(NA, NA)
)
expect_equal(
count_codes_wide(widezero4b, aggr = FALSE),
c("j" = 0, "j" = 0)
)
widezero5 <- data.frame(
visit_id = c("j", "k"),
icd9_a = c(NA, NA)
)
expect_equal(
count_codes_wide(widezero5),
c("j" = 0, "k" = 0)
)
widezero6 <- data.frame(
visit_id = c("j", "k"),
icd9_a = c(NA, NA),
icd9_b = c(NA, NA)
)
expect_equal(
count_codes_wide(widezero6),
c("j" = 0, "k" = 0)
)
})
test_that("count wide directly (old func) same as reshape count", {
widedf <- data.frame(
visit_id = c("a", "b", "c"),
icd9_01 = c("441", "4424", "441"),
icd9_02 = c(NA, "443", NA)
)
expect_equivalent(
count_codes_wide(widedf),
count_codes(wide_to_long(widedf))
)
})
test_that("van_walraven_from_comorbid score calculation", {
comorbids <- icd9_comorbid_quan_elix(
mydf <- data.frame(visit_id = "a", icd9 = "250.0"),
return_df = TRUE
)
set.seed(123)
use_ncol_cmb <- ncol(comorbids) - 1
comorbids <- rbind(
comorbids,
data.frame(
visit_id = letters[2:10],
matrix(stats::runif(use_ncol_cmb * 9) > 0.7,
ncol = use_ncol_cmb,
dimnames = list(character(0), names(comorbids[2:31]))
)
)
)
c2.inv <- cbind(
t(comorbids[2, -1]),
c(
7, 5, -1, 4, 2, 0, 7, 6, 3, 0, 0, 0, 5, 11, 0, 0,
9, 12, 4, 0, 3, -4, 6, 5, -2, -2, 0, -7, 0, -3
)
)
expect_equivalent(
van_walraven_from_comorbid(comorbids, hierarchy = TRUE)[2],
sum(apply(c2.inv, 1, prod))
)
})
test_that("van_walraven comorbidity index and score", {
mydf <- data.frame(
id = factor(c(rep(1, 20), rep(2, 20), rep(3, 18))),
value =
c(
"324.1", "285.9", "599.70", "038.9", "278.00", "38.97",
"V88.01", "112.0", "427.89", "790.4", "401.9", "53.51", "584.9",
"415.12", "995.91", "996.69", "83.39", "V46.2", "V58.61", "276.69",
"515", "V14.6", "784.0", "V85.1", "427.31", "V85.44", "300",
"86.28", "569.81", "041.49", "486", "45.62", "V15.82", "496",
"261", "280.9", "275.2", "96.59", "V49.86", "V10.42", "276.8",
"710.4", "311", "041.12", "276.0", "790.92", "518.84", "552.21",
"V85.41", "278.01", "V15.82", "96.72", "070.70", "285.29", "276.3",
"V66.7", "272.4", "790.92"
)
)
expect_equivalent(
van_walraven(mydf, visit_name = "id", icd_name = "value"),
van_walraven_from_comorbid(
icd9_comorbid_quan_elix(mydf, visit_name = "id", icd_name = "value")
)
)
expect_equivalent(
van_walraven(mydf, visit_name = "id", icd_name = "value", return_df = TRUE),
if (getOption("stringsAsFactors")) {
data.frame(
id = factor(c("1", "2", "3")),
vanWalraven = c(10, 12, -2)
)
} else {
data.frame(
id = c("1", "2", "3"),
vanWalraven = c(10, 12, -2)
)
}
)
expect_equal(
van_walraven(mydf, icd_name = "value"),
structure(c(10, 12, -2), names = c("1", "2", "3"))
)
})
test_that("github issue
mydf <- data.frame(visit_id = "a", icd9 = "250.0")
comorbids <- comorbid_quan_deyo(mydf, short_code = FALSE, return_df = TRUE)
expect_equivalent(
charlson_from_comorbid(comorbids, scoring_system = "original"), 1
)
expect_equivalent(
charlson_from_comorbid(comorbids, scoring_system = "charlson"), 1
)
expect_equivalent(
charlson_from_comorbid(comorbids, scoring_system = "quan"), 0
)
expect_equivalent(charlson(mydf, scoring_system = "o"), 1)
expect_equivalent(charlson(mydf, scoring_system = "c"), 1)
expect_equivalent(charlson(mydf, scoring_system = "q"), 0)
set.seed(456)
use_ncol_cmb <- ncol(comorbids) - 1
comorbids <- rbind(comorbids, data.frame(
visit_id = letters[2:10],
matrix(runif(use_ncol_cmb * 9) > 0.7,
ncol = 17, dimnames = list(
character(0),
names(comorbids[2:18])
)
)
))
comorbids[, "DM"] <- comorbids[, "DM"] & !comorbids[, "DMcx"]
comorbids[, "LiverMild"] <-
comorbids[, "LiverMild"] & !comorbids[, "LiverSevere"]
comorbids[, "Cancer"] <- comorbids[, "Cancer"] & !comorbids[, "Mets"]
original_weights <- c(1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 3, 6, 6)
quan_weights <- c(0, 2, 0, 0, 2, 1, 1, 0, 2, 0, 1, 2, 1, 2, 4, 6, 4)
expect_equivalent(
charlson_from_comorbid(comorbids, hierarchy = TRUE)[2],
sum(apply(cbind(t(comorbids[2, 2:18]), original_weights), 1, prod))
)
expect_equivalent(
charlson_from_comorbid(
comorbids,
hierarchy = TRUE, scoring_system = "original"
)[3],
sum(apply(cbind(t(comorbids[3, 2:18]), original_weights), 1, prod))
)
expect_equivalent(
charlson_from_comorbid(
comorbids,
hierarchy = TRUE, scoring_system = "charlson"
)[3],
sum(apply(cbind(t(comorbids[3, 2:18]), original_weights), 1, prod))
)
expect_equivalent(
charlson_from_comorbid(
comorbids,
hierarchy = TRUE, scoring_system = "quan"
)[4],
sum(apply(cbind(t(comorbids[4, 2:18]), quan_weights), 1, prod))
)
expect_equivalent(
charlson_from_comorbid(
comorbids,
hierarchy = TRUE, scoring_system = "o"
)[5],
sum(apply(cbind(t(comorbids[5, 2:18]), original_weights), 1, prod))
)
expect_equivalent(
charlson_from_comorbid(
comorbids,
hierarchy = TRUE, scoring_system = "q"
)[6],
sum(apply(cbind(t(comorbids[6, 2:18]), quan_weights), 1, prod))
)
expect_error(charlson_from_comorbid(comorbids,
hierarchy = TRUE, scoring_system = "z"
))
}) |
skip_on_ci()
skip_on_cran()
if (!(file.exists(system.file("enumpart/enumpart", package="redist")) ||
file.exists(system.file('enumpart/enumpart.exe', package = 'redist')))) {
redist.init.enumpart()
}
dir <- withr::local_tempdir()
test_that('enumpart preparation runs correctly', {
expected <- c(
21, 23, 3, 23, 3, 21, 6, 23, 3, 6, 4, 21, 3, 4, 1, 3, 1, 6, 1,
4, 2, 21, 2, 4, 1, 2, 18, 21, 2, 18, 6, 9, 1, 9, 1, 14, 9, 14,
2, 16, 16, 18, 1, 15, 14, 15, 1, 13, 13, 15, 2, 17, 16, 17, 1,
12, 12, 13, 1, 11, 11, 12, 2, 19, 17, 19, 1, 10, 10, 11, 1, 8,
8, 10, 2, 20, 19, 20, 1, 7, 7, 8, 1, 5, 5, 7, 2, 22, 20, 22, 5,
22, 5, 25, 22, 25, 5, 24, 22, 24, 24, 25
)
capture.output(
redist.prep.enumpart(adj = adj,
unordered_path = file.path(dir, 'unordered'),
ordered_path = file.path(dir, 'ordered'))
)
expect_equal(scan(file.path(dir, 'ordered.dat')), expected)
})
test_that('enumpart can sample without constraints', {
sample_path <- file.path(dir, 'sample.dat')
if (file.exists(sample_path)) {
file.remove(sample_path)
}
capture.output(
redist.run.enumpart(
ordered_path = file.path(dir, 'ordered'),
out_path = file.path(dir, 'sample'),
ndists = 3, all = F, n = 10)
)
m <- matrix(scan(file.path(dir, 'sample.dat')), nrow = 25)
expect_equal(dim(m), c(25, 10))
expect_equal(range(m), c(0, 2))
})
test_that('enumpart can read in data', {
capture.output(
full <- redist.read.enumpart(out_path = file.path(dir, 'sample'))
)
expect_equal(nrow(full), 25)
expect_equal(ncol(full), 10)
expect_equal(range(full), c(1, 3))
})
test_that('enumpart can read in partial data', {
full <- redist.read.enumpart(out_path = file.path(dir, 'sample'))
samp <- redist.read.enumpart(out_path = file.path(dir, 'sample'),
skip = 5)
expect_equal(nrow(samp), 25)
expect_equal(ncol(samp), 5)
expect_equal(range(samp), c(1, 3))
expect_true(all(full[, 6:10] == samp))
samp <- redist.read.enumpart(out_path = file.path(dir, 'sample'),
n_max = 8)
expect_equal(nrow(samp), 25)
expect_equal(ncol(samp), 8)
expect_equal(range(samp), c(1, 3))
expect_true(all(full[, 1:8] == samp))
})
test_that('enumpart can sample with unit count constraints', {
sample_path <- file.path(dir, 'sample.dat')
if (file.exists(sample_path)) {
file.remove(sample_path)
}
capture.output(
redist.run.enumpart(file.path(dir, 'ordered'), file.path(dir, 'sample'),
ndists = 3,
all = F, n = 100, lower = 4, upper = 16)
)
m <- matrix(scan(sample_path), nrow = 25)
expect_equal(dim(m), c(25, 100))
expect_equal(range(m), c(0, 2))
range_sizes <- range(apply(m, 2, function(x) range(table(x))))
expect_equal(range_sizes, c(4, 16))
})
test_that('enumpart can sample with population constraints', {
sample_path <- file.path(dir, 'sample.dat')
if (file.exists(sample_path)) file.remove(sample_path)
write(pop, file.path(dir, 'pop.dat'), ncolumns = length(pop))
target <- sum(pop) / 3
capture.output(
redist.run.enumpart(file.path(dir, 'ordered'), file.path(dir, 'sample'),
ndists = 3, all = T, lower = round(target * 0.9), upper = round(target * 1.1),
weight_path = file.path(dir, 'pop'))
)
m <- matrix(scan(sample_path), nrow = 25)
expect_equal(dim(m), c(25, 927))
expect_equal(range(m), c(0, 2))
dev <- redist.parity(m, pop)
expect_true(max(dev) <= 0.1)
})
withr::deferred_clear() |
gal_uvw = function(distance, lsr=F, ra,dec, pmra, pmdec,
vrad, plx) {
nra = length(ra)
if(missing(ra) || missing(dec))
stop('the ra, dec (j2000) position keywords must be supplied (degrees)')
if(!missing(distance)) {
if(any(distance<=0))
stop('all distances must be > 0')
plx = 1e3/distance
}
else {
if(missing(plx))
stop('either a parallax or distance must be specified')
if(any(plx<=0)) stop('parallaxes must be > 0')
}
radeg = 180/pi
cosd = cos(dec/radeg)
sind = sin(dec/radeg)
cosa = cos(ra/radeg)
sina = sin(ra/radeg)
k = 4.74047
a_g = cbind(c( 0.0548755604, +0.4941094279, -0.8676661490),
c(0.8734370902, -0.4448296300, -0.1980763734),
c(0.4838350155, 0.7469822445, +0.4559837762))
vec1 = vrad
vec2 = k*pmra/plx
vec3 = k*pmdec/plx
print(( a_g[1,1]*cosa*cosd+a_g[1,2]*sina*cosd+a_g[1,3]*sind)*vec1)
u = ( a_g[1,1]*cosa*cosd+a_g[1,2]*sina*cosd+a_g[1,3]*sind)*vec1+
(-a_g[1,1]*sina +a_g[1,2]*cosa)*vec2+
(-a_g[1,1]*cosa*sind-a_g[1,2]*sina*sind+a_g[1,3]*cosd)*vec3
v = ( a_g[2,1]*cosa*cosd+a_g[2,2]*sina*cosd+a_g[2,3]*sind)*vec1+
(-a_g[2,1]*sina +a_g[2,2]*cosa)*vec2+
(-a_g[2,1]*cosa*sind-a_g[2,2]*sina*sind+a_g[2,3]*cosd)*vec3
w = ( a_g[3,1]*cosa*cosd+a_g[3,2]*sina*cosd+a_g[3,3]*sind)*vec1+
(-a_g[3,1]*sina +a_g[3,2]*cosa)*vec2+
(-a_g[3,1]*cosa*sind-a_g[3,2]*sina*sind+a_g[3,3]*cosd)*vec3
lsr_vel=c(-8.5,13.38,6.49)
if(lsr) {
u = u+lsr_vel[1]
v = v+lsr_vel[2]
w = w+lsr_vel[3]
}
return(list(u=u,v=v,w=w))
} |
context("lmvar extractors")
test_that("logLik works correctly", {
n = nobs(fit)
mu = fitted( fit, sigma=FALSE)
sigma = as.numeric(exp(fit$X_sigma %*% coef(fit, mu=FALSE)))
res = (fit$y - mu) / sigma
logL = -0.5 * n * log(2 * pi) - sum(log(sigma)) - 0.5 * sum(res * res)
expect_equal( logLik(fit)[1], logL)
expect_identical( names(attributes(logLik(fit))), c( "df", "class"))
expect_equal( attr(logLik(fit), "df"), 7)
})
test_that("coef works correctly", {
expect_equivalent( coef(fit), c( fit$coefficients_mu, fit$coefficients_sigma))
expect_identical( coef( fit, mu = FALSE), fit$coefficients_sigma)
expect_identical( coef( fit, sigma = FALSE), fit$coefficients_mu)
expect_equivalent( coef( fit, mu = FALSE, sigma = FALSE), numeric())
})
test_that("dfree works correctly", {
expect_identical( dfree(fit), ncol(fit$X_mu) + ncol(fit$X_sigma))
expect_identical( dfree( fit, sigma = FALSE), ncol(fit$X_mu))
expect_identical( dfree( fit, mu = FALSE), ncol(fit$X_sigma))
no_fit = lmvar_no_fit( fit$y, fit$X_mu[,-1], fit$X_sigma[,-1])
expect_identical( dfree(fit), dfree(no_fit))
}) |
print.fdt_cat.multiple <- function (x,
columns=1:6,
round=2,
row.names=FALSE,
right=TRUE, ...)
{
tnames <- names(x)
for (i in 1:length(tnames)) {
res <- x[tnames[i]][[tnames[i]]]
if(is.list(res)){
res <- cbind(res[[1]][, 1],
round(res[[1]][, 2:6],
round))[columns]
}
cat(tnames[i], '\n')
names(res) <- c('Category',
'f',
'rf',
'rf(%)',
'cf',
'cf(%)')[columns]
print.data.frame(res,
row.names=row.names,
right=right, ...)
cat('\n')
}
} |
.rec.main <-
function(x, x.name, new.var, old, new, ivar, n.obs, dname, quiet) {
n.values <- length(old)
miss_old <- FALSE
if (!is.null(old)) if (old[1] == "missing") miss_old <- TRUE
miss_new <- FALSE
if (!is.null(new)) {
if (new[1] != "missing") {
if (n.values != length(new)) {
cat("\n"); stop(call.=FALSE, "\n","------\n",
"The same number of values must be specified for both\n",
"old and new values_\n\n")
}
}
else {
for (i in 1:n.values) new[i] <- "missing"
miss_new <- TRUE
}
}
if (!quiet) {
if (ivar == 1) {
cat("\nRecoding Specification\n")
.dash(22)
for (i in 1:n.values) cat(" ", old[i], "-->", new[i], "\n")
cat("\n")
if (miss_new)
cat("\nR represents missing data with a NA for 'not assigned'.\n\n")
cat("Number of cases (rows) to recode:", n.obs, "\n")
if (is.null(new.var))
cat("\nReplace existing values of each specified variable",
", no value for option: new.var\n", sep="")
}
cat("\n")
old.unique <- sort(unique(x))
n.unique <- length(old.unique)
cat("--- Recode:", x.name, "---------------------------------\n")
if ("numeric" %in% class(x))
cat("Unique values of", x.name, "in the data:", old.unique, "\n")
else if ("factor" %in% class(x))
cat("Unique values of", x.name, "in the data:", levels(x), "\n")
cat("Number of unique values of", x.name, "in the data:", n.unique, "\n")
for (i in 1:n.values) {
is.in <- FALSE
for (j in 1:n.unique)
if (old[i] == old.unique[j]) is.in <- TRUE
if (!is.in) {
cat(">>> Note: A value specified to recode, ", old[i],
", is not in the data.\n\n", sep="")
}
}
cat("Number of values of", x.name, "to recode:", n.values, "\n")
if (!is.null(new.var)) cat("Recode to variable:", new.var, "\n")
}
if ("factor" %in% class(x)) x <- as.character(x)
new.x <- x
for (i in 1:n.values) {
for (j in 1:n.obs) {
if (!miss_old) {
if (!is.na(new.x[j]))
if (x[j] == old[i]) new.x[j] <- ifelse (!miss_new, new[i], NA)
}
else
if (is.na(new.x[j])) new.x[j] <- new[1]
}
}
new.x <- type.convert(new.x)
return(new.x)
} |
lv4 <- function() {
r <- NULL
a <- NULL
initial <- function(t = 0, pars = NULL) {
r <<- pars[["r"]]
a <<- pars[["a"]]
pars[["y0"]]
}
derivs <- function(t, y, .) {
list(vapply(seq_along(y), function(i)
r[i] * y[i] * (1 - sum(a[i, ] * y)), numeric(1)))
}
list(derivs = derivs, initial = initial, t = c(0, 100))
} |
rasterPCA <- function(img, nSamples = NULL, nComp = nlayers(img), spca = FALSE, maskCheck = TRUE, ...){
if(nlayers(img) <= 1) stop("Need at least two layers to calculate PCA.")
ellip <- list(...)
if("norm" %in% names(ellip)) {
warning("Argument 'norm' has been deprecated. Use argument 'spca' instead.\nFormer 'norm=TRUE' corresponds to 'spca=TRUE'.", call. = FALSE)
ellip[["norm"]] <- NULL
}
if(nComp > nlayers(img)) nComp <- nlayers(img)
if(!is.null(nSamples)){
trainData <- sampleRandom(img, size = nSamples, na.rm = TRUE)
if(nrow(trainData) < nlayers(img)) stop("nSamples too small or img contains a layer with NAs only")
model <- princomp(trainData, scores = FALSE, cor = spca)
} else {
if(maskCheck) {
totalMask <- !sum(calc(img, is.na))
if(cellStats(totalMask, sum) == 0) stop("img contains either a layer with NAs only or no single pixel with valid values across all layers")
img <- mask(img, totalMask , maskvalue = 0)
}
covMat <- layerStats(img, stat = "cov", na.rm = TRUE)
model <- princomp(covmat = covMat[[1]], cor=spca)
model$center <- covMat$mean
model$n.obs <- ncell(img)
if(spca) {
S <- diag(covMat$covariance)
model$scale <- sqrt(S * (model$n.obs-1)/model$n.obs)
}
}
out <- .paraRasterFun(img, rasterFun=raster::predict, args = list(model = model, na.rm = TRUE, index = 1:nComp), wrArgs = ellip)
names(out) <- paste0("PC", 1:nComp)
structure(list(call = match.call(), model = model, map = out), class = c("rasterPCA", "RStoolbox"))
} |
ddgamma <- function(x, shape, rate = 1, scale = 1/rate, log = FALSE) {
if (!missing(rate) && !missing(scale)) {
if (abs(rate * scale - 1) < 1e-15)
warning("specify 'rate' or 'scale' but not both")
else stop("specify 'rate' or 'scale' but not both")
}
cpp_ddgamma(x, shape, scale, log[1L])
}
pdgamma <- function(q, shape, rate = 1, scale = 1/rate, lower.tail = TRUE, log.p = FALSE) {
pgamma(floor(q)+1, shape, scale = scale, lower.tail = lower.tail[1L], log.p = log.p[1L])
}
rdgamma <- function(n, shape, rate = 1, scale = 1/rate) {
floor(rgamma(n, shape, scale = scale))
} |
generate_test_data <- function(n, P, split = 0.8){
if( !is.vector(n) | length(n) != 1 | n < 1 ){ cat("Error: n has to be a positive integer \n") ; return(NULL) }
if( !is.vector(P) | length(P) != 1 | P < 1 ){ cat("Error: P has to be a positive integer \n") ; return(NULL) }
if( !is.vector(split) | length(split) != 1 | split < 0 | split > 1 ){ cat("Error: split has to be a between o and 1 \n") ; return(NULL) }
n <- round(n)
P <- round(P)
X <- sapply(1:P, function(p) stats::runif(n,0,1))
t <- sapply( 1:n, function(i) -16*( (sum(X[i,]^2) / length(X[i,])) -0.5) )
fx <- 1 / ( 1 + exp(t) )
y <- fx + stats::rnorm(n, 0, 0.1)
index <- sample(1:n, floor(n*split), replace = F)
X_train <- X[index,]
X_test <- X[-index,]
y_train <- y[index]
y_test <- y[-index]
return( list("X_train" = X_train, "y_train" = y_train, "X_test" = X_test, "y_test" = y_test) )
} |
`item.tau` <-
function(LC,ud,class,steps,model, maxchange=2, maxrange=c(-100,100)){
Pxji <- P.xji(LC,ud,steps)
der <- d.tau(LC$i.stat$Tx, LC$i.stat$S.ih, LC$i.stat$steps,
(ud$n.x*class),ud$resp,ud$n.unique, Pxji, model)
der$d1d2 <- ifelse(abs(der$d1d2) > maxchange, sign(der$d1d2)*maxchange, der$d1d2)
LC$item.par$tau <- LC$item.par$tau - der$d1d2
LC$item.par$tau[t(t(LC$item.par$tau) - colMeans(LC$item.par$tau, na.rm=TRUE)) > maxrange[2]] <- maxrange[2]
LC$item.par$tau[t(t(LC$item.par$tau) - colMeans(LC$item.par$tau, na.rm=TRUE)) < maxrange[1]] <- maxrange[1]
LC$item.par$tau <- t(t(LC$item.par$tau) - colMeans(LC$item.par$tau,
na.rm=TRUE))
LC$item.par$delta <- t(apply(LC$item.par$tau,1,
function(XXX) XXX + LC$item.par$delta.i))
LC
} |
MertonSpread <- function(leverage,tau,sigmaV)
{
dt1 <- (-log(leverage) + 0.5*tau*sigmaV^2)/(sigmaV*sqrt(tau))
dt2 <- dt1-sigmaV*sqrt(tau)
-log(pnorm(dt2)+pnorm(-dt1)/leverage)/tau
}
leverage = 0.6
tau = 2
sigmaV = seq(from=0.01, to = 0.5, length=50)
cta = MertonSpread(leverage,tau,sigmaV)
plot(sigmaV,cta,type="l")
leverage = 0.6
sigmaV = 0.25
tau = seq(from = 0.01, to = 5, length = 50)
ctb = MertonSpread(leverage,tau,sigmaV)
plot(tau,ctb,type="l")
leverage = seq(from=0.01, to = 0.99, length = 50)
sigmaV = 0.25
tau = 2
ctc = MertonSpread(leverage,tau,sigmaV)
plot(leverage,ctc,type="l")
sigmaV = 0.5
tau = 2
ctc = MertonSpread(leverage,tau,sigmaV)
plot(leverage,ctc,type="l")
sigmaV = 0.5
tau = 4
ctc = MertonSpread(leverage,tau,sigmaV)
plot(leverage,ctc,type="l") |
testthat::context("TeeCSVPipe")
testthat::setup({
bdpar.Options$reset()
bdpar.Options$configureLog()
})
testthat::test_that("initialize",{
propertyName <- ""
alwaysBeforeDeps <- list()
notAfterDeps <- list()
withData <- TRUE
withSource <- TRUE
outputPath <- NULL
path <- file.path("testFiles",
"testTeeCSVPipe",
"output_tsms.csv")
bdpar.Options$set(key = "teeCSVPipe.output.path",
value = path)
testthat::expect_silent(TeeCSVPipe$new(propertyName,
alwaysBeforeDeps,
notAfterDeps,
withData,
withSource,
outputPath))
})
testthat::teardown({
bdpar.Options$reset()
bdpar.Options$configureLog()
})
testthat::setup({
bdpar.Options$reset()
bdpar.Options$configureLog()
})
testthat::test_that("initialize propertyName type error",{
propertyName <- NULL
alwaysBeforeDeps <- list()
notAfterDeps <- list()
withData <- TRUE
withSource <- TRUE
outputPath <- NULL
testthat::expect_error(TeeCSVPipe$new(propertyName,
alwaysBeforeDeps,
notAfterDeps,
withData,
withSource,
outputPath),
"[TeeCSVPipe][initialize][FATAL] Checking the type of the 'propertyName' variable: NULL",
fixed = TRUE)
})
testthat::teardown({
bdpar.Options$reset()
bdpar.Options$configureLog()
})
testthat::setup({
bdpar.Options$reset()
bdpar.Options$configureLog()
})
testthat::test_that("initialize alwaysBeforeDeps type error",{
propertyName <- ""
alwaysBeforeDeps <- NULL
notAfterDeps <- list()
withData <- TRUE
withSource <- TRUE
outputPath <- NULL
testthat::expect_error(TeeCSVPipe$new(propertyName,
alwaysBeforeDeps,
notAfterDeps,
withData,
withSource,
outputPath),
"[TeeCSVPipe][initialize][FATAL] Checking the type of the 'alwaysBeforeDeps' variable: NULL",
fixed = TRUE)
})
testthat::teardown({
bdpar.Options$reset()
bdpar.Options$configureLog()
})
testthat::setup({
bdpar.Options$reset()
bdpar.Options$configureLog()
})
testthat::test_that("initialize notAfterDeps type error",{
propertyName <- ""
alwaysBeforeDeps <- list()
notAfterDeps <- NULL
withData <- TRUE
withSource <- TRUE
outputPath <- NULL
testthat::expect_error(TeeCSVPipe$new(propertyName,
alwaysBeforeDeps,
notAfterDeps,
withData,
withSource,
outputPath),
"[TeeCSVPipe][initialize][FATAL] Checking the type of the 'notAfterDeps' variable: NULL",
fixed = TRUE)
})
testthat::teardown({
bdpar.Options$reset()
bdpar.Options$configureLog()
})
testthat::setup({
bdpar.Options$reset()
bdpar.Options$configureLog()
})
testthat::test_that("initialize withData type error",{
propertyName <- ""
alwaysBeforeDeps <- list()
notAfterDeps <- list()
withData <- NULL
withSource <- TRUE
outputPath <- NULL
testthat::expect_error(TeeCSVPipe$new(propertyName,
alwaysBeforeDeps,
notAfterDeps,
withData,
withSource,
outputPath),
"[TeeCSVPipe][initialize][FATAL] Checking the type of the 'withData' variable: NULL",
fixed = TRUE)
})
testthat::teardown({
bdpar.Options$reset()
bdpar.Options$configureLog()
})
testthat::setup({
bdpar.Options$reset()
bdpar.Options$configureLog()
})
testthat::test_that("initialize withSource type error",{
propertyName <- ""
alwaysBeforeDeps <- list()
notAfterDeps <- list()
withData <- TRUE
withSource <- NULL
outputPath <- NULL
testthat::expect_error(TeeCSVPipe$new(propertyName,
alwaysBeforeDeps,
notAfterDeps,
withData,
withSource,
outputPath),
"[TeeCSVPipe][initialize][FATAL] Checking the type of the 'withSource' variable: NULL",
fixed = TRUE)
})
testthat::teardown({
bdpar.Options$reset()
bdpar.Options$configureLog()
})
testthat::setup({
bdpar.Options$reset()
bdpar.Options$configureLog()
})
testthat::test_that("initialize resourcesAbbreviationsPath type error",{
propertyName <- ""
alwaysBeforeDeps <- list()
notAfterDeps <- list()
withData <- TRUE
withSource <- TRUE
outputPath <- NULL
bdpar.Options$set(key = "teeCSVPipe.output.path",
value = NULL)
testthat::expect_error(TeeCSVPipe$new(propertyName,
alwaysBeforeDeps,
notAfterDeps,
withData,
withSource,
outputPath),
"[TeeCSVPipe][initialize][FATAL] Path of TeeCSVPipe output is neither defined in initialize or in bdpar.Options",
fixed = TRUE)
outputPath <- 1
testthat::expect_error(TeeCSVPipe$new(propertyName,
alwaysBeforeDeps,
notAfterDeps,
withData,
withSource,
outputPath),
"[TeeCSVPipe][initialize][FATAL] Checking the type of the 'outputPath' variable: numeric",
fixed = TRUE)
outputPath <- "example.json"
testthat::expect_error(TeeCSVPipe$new(propertyName,
alwaysBeforeDeps,
notAfterDeps,
withData,
withSource,
outputPath),
"[TeeCSVPipe][initialize][FATAL] Checking the extension of the file: json",
fixed = TRUE)
})
testthat::teardown({
bdpar.Options$reset()
bdpar.Options$configureLog()
})
testthat::setup({
bdpar.Options$reset()
bdpar.Options$configureLog()
})
testthat::test_that("pipe",{
testthat::skip_if_not_installed("rjson")
propertyName <- ""
alwaysBeforeDeps <- list()
notAfterDeps <- list()
withData <- TRUE
withSource <- TRUE
outputPath <- NULL
pathOutput <- file.path("testFiles",
"testTeeCSVPipe",
"output_tsms.csv")
bdpar.Options$set(key = "teeCSVPipe.output.path",
value = pathOutput)
pipe <- TeeCSVPipe$new(propertyName,
alwaysBeforeDeps,
notAfterDeps,
withData,
withSource,
outputPath)
Bdpar$new()
path <- file.path("testFiles",
"testTeeCSVPipe",
"testFile.tsms")
instance <- ExtractorSms$new(path)
testthat::expect_equal(pipe$pipe(instance),
instance)
testthat::expect_equal(file.exists(pathOutput),
TRUE)
testthat::expect_equal(pipe$pipe(instance),
instance)
testthat::expect_equal(file.exists(pathOutput),
TRUE)
file.remove(file.path("testFiles",
"testTeeCSVPipe",
"output_tsms.csv"))
})
testthat::teardown({
bdpar.Options$reset()
bdpar.Options$configureLog()
if (file.exists(file.path("testFiles",
"testTeeCSVPipe",
"output_tsms.csv"))) {
file.remove(file.path("testFiles",
"testTeeCSVPipe",
"output_tsms.csv"))
}
})
testthat::setup({
bdpar.Options$reset()
bdpar.Options$configureLog()
})
testthat::test_that("pipe instance invalid",{
testthat::skip_if_not_installed("rjson")
propertyName <- ""
alwaysBeforeDeps <- list()
notAfterDeps <- list()
withData <- TRUE
withSource <- TRUE
outputPath <- NULL
pathOutput <- file.path("testFiles",
"testTeeCSVPipe",
"output_tsms.csv")
bdpar.Options$set(key = "teeCSVPipe.output.path",
value = pathOutput)
pipe <- TeeCSVPipe$new(propertyName,
alwaysBeforeDeps,
notAfterDeps,
withData,
withSource,
outputPath)
Bdpar$new()
path <- file.path("testFiles",
"testTeeCSVPipe",
"testFile.tsms")
instance <- ExtractorSms$new(path)
instance$invalidate()
testthat::expect_equal(pipe$pipe(instance),
instance)
testthat::expect_equal(file.exists(pathOutput),
FALSE)
})
testthat::teardown({
bdpar.Options$reset()
bdpar.Options$configureLog()
if (file.exists(file.path("testFiles",
"testTeeCSVPipe",
"output_tsms.csv"))) {
file.remove(file.path("testFiles",
"testTeeCSVPipe",
"output_tsms.csv"))
}
})
testthat::setup({
bdpar.Options$reset()
bdpar.Options$configureLog()
})
testthat::test_that("pipe instance type error",{
testthat::skip_if_not_installed("rjson")
propertyName <- ""
alwaysBeforeDeps <- list()
notAfterDeps <- list()
withData <- TRUE
withSource <- TRUE
outputPath <- NULL
pathOutput <- file.path("testFiles",
"testTeeCSVPipe",
"output_tsms.csv")
bdpar.Options$set(key = "teeCSVPipe.output.path",
value = pathOutput)
pipe <- TeeCSVPipe$new(propertyName,
alwaysBeforeDeps,
notAfterDeps,
withData,
withSource,
outputPath)
Bdpar$new()
instance <- NULL
testthat::expect_error(pipe$pipe(instance),
"[TeeCSVPipe][pipe][FATAL] Checking the type of the 'instance' variable: NULL",
fixed = TRUE)
})
testthat::teardown({
bdpar.Options$reset()
bdpar.Options$configureLog()
}) |
melt_canonical <- function(dat,canonical="",genus="",species="",subspecies=""){
newdat <- as.data.frame(dat)
if(genus==""){
return(NULL)
} else {
newdat <- rename_column(newdat,genus,"genus",silent=TRUE)
newdat$genus <- NA
}
if(species==""){
return(NULL)
} else {
newdat <- rename_column(newdat,species,"species",silent=TRUE)
newdat$species <- NA
}
if(subspecies!=""){
newdat <- rename_column(newdat,subspecies,"subspecies",silent=TRUE)
newdat$subspecies <- NA
}
if(canonical!=""){
newdat <- rename_column(newdat,canonical,"canonical")
}
pb = txtProgressBar(min = 0, max = nrow(newdat), initial = 0)
for(i in 1:nrow(newdat)){
if(!is.empty(newdat$canonical[i])){
tl <- guess_taxo_rank(newdat$canonical[i])
newdat$genus[i] <- toproper(strsplit(newdat$canonical[i]," ")[[1]][1])
if(tl=="Species" | tl=="Subspecies"){
newdat$species[i] <- tolower(strsplit(newdat$canonical[i]," ")[[1]][2])
}
if(tl=="Subspecies" & subspecies!=""){
newdat$subspecies[i] <- tolower(strsplit(newdat$canonical[i]," ")[[1]][3])
}
}
setTxtProgressBar(pb,i)
}
newdat <- rename_column(newdat,"genus",genus)
newdat <- rename_column(newdat,"species",species)
if(subspecies!=""){
newdat <- rename_column(newdat,"subspecies",subspecies)
}
newdat <- rename_column(newdat,"canonical",canonical)
return(newdat)
} |
updateGeneTable <-
function(genetable){
chromosomerows <- grep("CHR", genetable[,1], ignore.case=T, value=F)
chromosomeheaders <- genetable[chromosomerows,]
totalrows <- nrow(genetable)
resultslist <- list()
for(chr in 1:length(chromosomerows)){
if(chr == length(chromosomerows)){
goto <- totalrows
}
else{
goto <- chromosomerows[(chr+1)]-1
}
chrlength <- genetable[chromosomerows[chr],5]
this_genes <- genetable[(chromosomerows[chr]+1):goto,]
new_this_genes <- this_genes
k <- 1
if(!this_genes$Left[1]==1){
new_this_genes <- insertRow(new_this_genes,newrow=c("IG","",1,1,this_genes$Left[1]-1,this_genes$Left[1]-1),index=1)
k <- k + 1
}
for(i in 1:nrow(this_genes)){
if(i == nrow(this_genes)) break
if(this_genes$Left[i+1] - this_genes$Right[i] > 1){
k <- k+1
new_this_genes <- insertRow(new_this_genes,newrow=c("IG","",1,this_genes$Right[i]+1,this_genes$Left[(i+1)]-1, this_genes$Left[(i+1)]-this_genes$Right[i]-1),index=k)
}
k <- k+1
}
if(!this_genes$Right[nrow(this_genes)]==chrlength){
new_this_genes <- insertRow(new_this_genes,newrow=c("IG","",1,this_genes$Right[nrow(this_genes)]+1, chrlength, chrlength-this_genes$Right[nrow(this_genes)]),index=k+1)
}
rownames(new_this_genes) <- 1:nrow(new_this_genes)
}
new_this_genes[,3:6] <- sapply(new_this_genes[,3:6],function(x) (as.numeric(x)))
return(new_this_genes)
} |
app_sys <- function(...){
system.file(..., package = "ReviewR")
}
get_golem_config <- function(
value,
config = Sys.getenv("R_CONFIG_ACTIVE", "default"),
use_parent = TRUE
){
config::get(
value = value,
config = config,
file = app_sys("golem-config.yml"),
use_parent = use_parent
)
} |
fixedLogitLassoInf=function(x,y,beta,lambda,alpha=.1, type=c("partial,full"), tol.beta=1e-5, tol.kkt=0.1,
gridrange=c(-100,100), bits=NULL, verbose=FALSE,
linesearch.try=10, this.call=NULL){
type = match.arg(type)
checkargs.xy(x,y)
if (missing(beta) || is.null(beta)) stop("Must supply the solution beta")
if (missing(lambda) || is.null(lambda)) stop("Must supply the tuning parameter value lambda")
checkargs.misc(beta=beta,lambda=lambda,alpha=alpha,
gridrange=gridrange,tol.beta=tol.beta,tol.kkt=tol.kkt)
if (!is.null(bits) && !requireNamespace("Rmpfr",quietly=TRUE)) {
warning("Package Rmpfr is not installed, reverting to standard precision")
bits = NULL
}
n=length(y)
p=ncol(x)
if(length(beta)!=p+1) stop("Since family='binomial', beta must be of length ncol(x)+1, that is, it should include an intercept")
vars = which(abs(beta[-1]) > tol.beta / sqrt(colSums(x^2)))
nvar=length(vars)
pv=vlo=vup=sd=rep(NA, nvar)
ci=tailarea=matrix(NA,nvar,2)
bhat=c(beta[1],beta[-1][vars])
s2=sign(bhat)
xm=cbind(1,x[,vars])
xnotm=x[,-vars]
etahat = xm %*% bhat
prhat = as.vector(exp(etahat) / (1 + exp(etahat)))
ww=prhat*(1-prhat)
w=diag(ww)
z=etahat+(y-prhat)/ww
g=scale(t(x),FALSE,1/ww)%*%(z-etahat)/lambda
if (any(abs(g) > 1+tol.kkt) )
warning(paste("Solution beta does not satisfy the KKT conditions",
"(to within specified tolerances)"))
if(length(vars)==0){
cat("Empty model",fill=T)
return()
}
if (any(sign(g[vars]) != sign(beta[-1][vars])))
warning(paste("Solution beta does not satisfy the KKT conditions",
"(to within specified tolerances). You might try rerunning",
"glmnet with a lower setting of the",
"'thresh' parameter, for a more accurate convergence."))
MM=solve(scale(t(xm),F,1/ww)%*%xm)
gm = c(0,-g[vars]*lambda)
dbeta = MM %*% gm
MM = MM*n
bbar = (bhat - dbeta)*sqrt(n)
A1= matrix(-(mydiag(s2))[-1,],nrow=length(s2)-1)
b1= ((s2 * dbeta)[-1])*sqrt(n)
V = (diag(length(bbar))[-1,])/sqrt(n)
null_value = rep(0,nvar)
if (type=='full') {
is_wide = n < (2 * p)
if (!is_wide) {
M = debiasingMatrix(1/n*(scale(t(x),FALSE,1/ww)%*%x), is_wide, n, vars, verbose=FALSE, max_try=linesearch.try, warn_kkt=TRUE)
} else {
M = debiasingMatrix(t(scale(t(x),1/sqrt(ww))), is_wide, n, vars, verbose=FALSE, max_try=linesearch.try, warn_kkt=TRUE)
}
I <- matrix(diag(dim(xm)[2])[-1,],nrow=dim(xm)[2]-1)
if (is.null(dim(M))) {
M_notE <- M[-vars]
} else {
M_notE <- M[,-vars]
}
M_notE = matrix(M_notE,nrow=nvar)
V <- matrix(cbind(I/sqrt(n),M_notE[,-1]/n),nrow=dim(xm)[2]-1)
xnotm_w = scale(t(xnotm),FALSE,1/ww)
xnotm_w_xm = xnotm_w%*%xm
c <- matrix(c(gm[-1],xnotm_w_xm%*%(-dbeta)),ncol=1)
d <- -dbeta[-1]
null_value = -(M[,-1]%*%c/n - d)
A0 = matrix(0,ncol(xnotm),length(bbar))
A0 = cbind(A0,diag(nrow(A0)))
fill = matrix(0,nrow(A1),ncol(xnotm))
A1 = cbind(A1,fill)
A1 = rbind(A1,A0,-A0)
b1 = matrix(c(b1,rep(lambda,2*nrow(A0))),ncol=1)
MMbr = (xnotm_w%*%xnotm - xnotm_w_xm%*%(MM/n)%*%t(xnotm_w_xm))*n
MM = cbind(MM,matrix(0,nrow(MM),ncol(MMbr)))
MMbr = cbind(matrix(0,nrow(MMbr),nrow(MM)),MMbr)
MM = rbind(MM,MMbr)
etahat_bbar = xm %*% (bbar/sqrt(n))
gnotm = (scale(t(xnotm),FALSE,1/ww)%*%(z-etahat_bbar))*sqrt(n)
bbar = matrix(c(bbar,gnotm),ncol=1)
}
if (is.null(dim(V))) V=matrix(V,nrow=1)
tol.poly = 0.01
if (max((A1 %*% bbar) - b1) > tol.poly)
stop(paste("Polyhedral constraints not satisfied; you must recompute beta",
"more accurately. With glmnet, make sure to use exact=TRUE in coef(),",
"and check whether the specified value of lambda is too small",
"(beyond the grid of values visited by glmnet).",
"You might also try rerunning glmnet with a lower setting of the",
"'thresh' parameter, for a more accurate convergence."))
sign=numeric(nvar)
coef0=numeric(nvar)
for(j in 1:nvar){
if (verbose) cat(sprintf("Inference for variable %i ...\n",vars[j]))
if (is.null(dim(V))) vj = V
else vj = matrix(V[j,],nrow=1)
coef0[j] = vj%*%bbar
sign[j] = sign(coef0[j])
vj = vj * sign[j]
limits.info = TG.limits(bbar, A1, b1, vj, Sigma=MM)
a = TG.pvalue.base(limits.info, null_value=null_value[j], bits=bits)
pv[j] = a$pv
if (is.na(s2[j])) {
pv[j] = 2 * min(pv[j], 1 - pv[j])
}
vlo[j] = a$vlo
vup[j] = a$vup
sd[j] = a$sd
if (type=='full') {
vlo[j] = vlo[j]/sqrt(n)
vup[j] = vup[j]/sqrt(n)
sd[j] = sd[j]/sqrt(n)
}
a = TG.interval.base(limits.info,
alpha=alpha,
gridrange=gridrange,
flip=(sign[j]==-1),
bits=bits)
ci[j,] = (a$int-null_value[j])
tailarea[j,] = a$tailarea
}
se0 = sqrt(diag(V%*%MM%*%t(V)))
zscore0 = (coef0+null_value)/se0
out = list(type=type,lambda=lambda,pv=pv,ci=ci,
tailarea=tailarea,vlo=vlo,vup=vup,sd=sd,
vars=vars,alpha=alpha,coef0=coef0,zscore0=zscore0,
call=this.call,
info.matrix=MM)
class(out) = "fixedLogitLassoInf"
return(out)
}
print.fixedLogitLassoInf <- function(x, tailarea=TRUE, ...) {
cat("\nCall:\n")
dput(x$call)
cat(sprintf("\nStandard deviation of noise (specified or estimated) sigma = %0.3f\n",
x$sigma))
cat(sprintf("\nTesting results at lambda = %0.3f, with alpha = %0.3f\n",x$lambda,x$alpha))
cat("",fill=T)
tab = cbind(x$vars,
round(x$coef0,3),
round(x$zscore0,3),
round(x$pv,3),round(x$ci,3))
colnames(tab) = c("Var", "Coef", "Z-score", "P-value", "LowConfPt", "UpConfPt")
if (tailarea) {
tab = cbind(tab,round(x$tailarea,3))
colnames(tab)[(ncol(tab)-1):ncol(tab)] = c("LowTailArea","UpTailArea")
}
rownames(tab) = rep("",nrow(tab))
print(tab)
cat(sprintf("\nNote: coefficients shown are %s regression coefficients\n",
ifelse(x$type=="partial","partial","full")))
invisible()
} |
init_wd <- getwd()
knitr::opts_chunk$set(echo = TRUE, comment = "
knitr::opts_knit$set(root.dir = tempdir(check = TRUE))
pacman::p_load(testthat, exampletestr, stringr)
usethis::create_package("tempkg", open = FALSE)
fs::file_copy(
system.file("extdata", c("detect.R", "match.R"), package = "exampletestr"),
"tempkg/R/"
)
knitr::opts_knit$set(root.dir = paste0(tempdir(), "/", "tempkg"))
usethis::proj_set(".")
knitr::opts_knit$set(root.dir = tempdir(check = TRUE))
if (fs::dir_exists("tempkg")) fs::dir_delete("tempkg")
knitr::opts_knit$set(root.dir = init_wd) |
transformdata.back <- function(i.data, i.name = "rates", i.cutoff.original = NA, i.range.x.final = NA, i.fun = mean) {
if (is.na(i.cutoff.original)) i.cutoff.original <- min(as.numeric(rownames(i.data)[1:(min(3, NROW(i.data)))]))
if (i.cutoff.original < 1) i.cutoff.original <- 1
if (i.cutoff.original > 53) i.cutoff.original <- 53
if (any(is.na(i.range.x.final)) | !is.numeric(i.range.x.final) | length(i.range.x.final) != 2) i.range.x.final <- c(min(as.numeric(rownames(i.data)[1:(min(3, NROW(i.data)))])), max(as.numeric(rownames(i.data)[(max(1, NROW(i.data) - 2)):NROW(i.data)])))
if (i.range.x.final[1] < 1) i.range.x.final[1] <- 1
if (i.range.x.final[1] > 53) i.range.x.final[1] <- 53
if (i.range.x.final[2] < 1) i.range.x.final[2] <- 1
if (i.range.x.final[2] > 53) i.range.x.final[2] <- 53
if (i.range.x.final[1] == i.range.x.final[2]) i.range.x.final[2] <- i.range.x.final[2] - 1
if (i.range.x.final[2] == 0) i.range.x.final[2] <- 53
n.seasons <- NCOL(i.data)
column <- NULL
seasons <- data.frame(column = names(i.data), stringsAsFactors = F) %>%
extract(column, into = c("anioi", "aniof", "aniow"), regex = "^[^\\d]*(\\d{4})(?:[^\\d]*(\\d{4}))?(?:[^\\d]*(\\d{1,}))?[^\\d]*$", remove = F)
seasons[is.na(seasons)] <- ""
seasons$aniof[seasons$aniof == ""] <- seasons$anioi[seasons$aniof == ""]
seasonsname <- seasons$anioi
seasonsname[seasons$aniof != ""] <- paste(seasonsname[seasons$aniof != ""], seasons$aniof[seasons$aniof != ""], sep = "/")
seasonsname[seasons$aniow != ""] <- paste(seasonsname[seasons$aniow != ""], "(", seasons$aniow[seasons$aniow != ""], ")", sep = "")
seasons$season <- seasonsname
rm("seasonsname")
names(i.data) <- seasons$season
i.data$week <- as.numeric(row.names(i.data))
season <- data <- week <- NULL
data.out <- i.data %>%
gather(season, data, -week, na.rm = T)
data.out$year <- NA
data.out$year[data.out$week < i.cutoff.original] <- as.numeric(substr(data.out$season, 6, 9))[data.out$week < i.cutoff.original]
data.out$year[data.out$week >= i.cutoff.original] <- as.numeric(substr(data.out$season, 1, 4))[data.out$week >= i.cutoff.original]
data.out$season <- NULL
year <- week <- NULL
data.out <- data.out %>%
filter(!is.na(year) & !is.na(week)) %>%
group_by(year, week) %>%
summarise(data = i.fun(data, na.rm = T)) %>%
arrange(year, week)
week.f <- i.range.x.final[1]
week.l <- i.range.x.final[2]
if (week.f > week.l) {
i.range.x.values.52 <- data.frame(week = c(week.f:52, 1:week.l), week.no = 1:(52 - week.f + 1 + week.l))
i.range.x.values.53 <- data.frame(week = c(week.f:53, 1:week.l), week.no = 1:(53 - week.f + 1 + week.l))
data.out$season <- ""
data.out$season[data.out$week < week.f] <- paste(data.out$year - 1, data.out$year, sep = "/")[data.out$week < week.f]
data.out$season[data.out$week >= week.f] <- paste(data.out$year, data.out$year + 1, sep = "/")[data.out$week >= week.f]
seasons.all <- unique(data.out$season)
seasons.53 <- unique(subset(data.out, data.out$week == 53 & !is.na(data.out$data))$season)
seasons.52 <- seasons.all[!(seasons.all %in% seasons.53)]
data.scheme <- rbind(
merge(data.frame(season = seasons.52, stringsAsFactors = F), i.range.x.values.52, stringsAsFactors = F),
merge(data.frame(season = seasons.53, stringsAsFactors = F), i.range.x.values.53, stringsAsFactors = F)
)
data.scheme$year <- NA
data.scheme$year[data.scheme$week < week.f] <- as.numeric(substr(data.scheme$season, 6, 9))[data.scheme$week < week.f]
data.scheme$year[data.scheme$week >= week.f] <- as.numeric(substr(data.scheme$season, 1, 4))[data.scheme$week >= week.f]
} else {
i.range.x.values.52 <- data.frame(week = week.f:min(52, week.l), week.no = 1:(min(52, week.l) - week.f + 1))
i.range.x.values.53 <- data.frame(week = week.f:week.l, week.no = 1:(week.l - week.f + 1))
data.out$season <- ""
data.out$season <- paste(data.out$year, data.out$year, sep = "/")
seasons.all <- unique(data.out$season)
seasons.53 <- unique(subset(data.out, data.out$week == 53 & !is.na(data.out$data))$season)
seasons.52 <- seasons.all[!(seasons.all %in% seasons.53)]
data.scheme <- rbind(
merge(data.frame(season = seasons.52, stringsAsFactors = F), i.range.x.values.52, stringsAsFactors = F),
merge(data.frame(season = seasons.53, stringsAsFactors = F), i.range.x.values.53, stringsAsFactors = F)
)
data.scheme$year <- NA
data.scheme$year <- as.numeric(substr(data.scheme$season, 1, 4))
}
data.final <- merge(data.scheme, data.out, by = c("season", "year", "week"), all.x = T)
data.final$yrweek <- data.final$year * 100 + data.final$week
data.final$week.no <- NULL
data.final <- data.final[order(data.final$yrweek), ]
names(data.final)[names(data.final) == "data"] <- i.name
transformdata.back.output <- list(data = data.final)
transformdata.back.output$call <- match.call()
return(transformdata.back.output)
} |
kpPlotRegions <- function(karyoplot, data, data.panel=1, r0=NULL, r1=NULL,
col="black",
border=NULL, avoid.overlapping=TRUE, num.layers=NULL,
layer.margin=0.05, clipping=TRUE, ...) {
if(missing(karyoplot)) stop("The parameter 'karyoplot' is required")
if(!methods::is(karyoplot, "KaryoPlot")) stop("'karyoplot' must be a valid 'KaryoPlot' object")
if(missing(data)) stop("The parameter 'data' is required")
data <- toGRanges(data)
if(!methods::is(data, "GRanges")) stop("'data' must be a GRanges object or something accepted by toGRanges")
if(length(data)==0) {
return(invisible(karyoplot))
}
if(is.null(border)) border <- col
chr <- as.character(seqnames(data))
x0 <- start(data)
x1 <- end(data)
if(avoid.overlapping==TRUE) {
bins <- disjointBins(data)
if(is.null(num.layers)) num.layers <- max(bins)
layer.height <- (1-((num.layers-1)*layer.margin))/num.layers
y0 <- (layer.height+layer.margin) * (bins-1)
y1 <- layer.height * (bins) + layer.margin * (bins-1)
} else {
y0 <- 0
y1 <- 1
}
kpRect(karyoplot=karyoplot, chr=chr, x0=x0, x1=x1, y0=y0, y1=y1, ymin=0, ymax=1,
r0=r0, r1=r1, data.panel=data.panel, col=col,
border=border, clipping=clipping, ... )
invisible(karyoplot)
} |
test_that("plot succeeds even if some computation fails", {
df <- data_frame(x = 1:2, y = 1)
p1 <- ggplot(df, aes(x, y)) + geom_point()
b1 <- ggplot_build(p1)
expect_equal(length(b1$data), 1)
p2 <- p1 + geom_smooth()
suppressWarnings(
expect_warning(b2 <- ggplot_build(p2), "Computation failed")
)
expect_equal(length(b2$data), 2)
})
test_that("error message is thrown when aesthetics are missing", {
p <- ggplot(mtcars) + stat_sum()
expect_error(ggplot_build(p), "x and y$")
}) |
context("Combining two graph objects into a single graph object")
test_that("graphs can be combined", {
nodes_1 <- create_node_df(n = 10)
edges_1 <-
create_edge_df(
from = 1:9,
to = 2:10)
graph_1 <-
create_graph(
nodes_df = nodes_1,
edges_df = edges_1)
nodes_2 <- create_node_df(n = 10)
edges_2 <-
create_edge_df(
from = 1:9,
to = 2:10)
graph_2 <-
create_graph(
nodes_df = nodes_2,
edges_df = edges_2)
combined_graph_1 <-
combine_graphs(graph_1, graph_2)
expect_is(
combined_graph_1, "dgr_graph")
expect_true(
all(
names(combined_graph_1) ==
c(
"graph_info",
"nodes_df",
"edges_df",
"global_attrs",
"directed",
"last_node",
"last_edge",
"node_selection",
"edge_selection",
"cache",
"graph_actions",
"graph_log")))
expect_true(
!is.null(combined_graph_1$global_attrs))
expect_true(
is_graph_directed(combined_graph_1))
expect_true(
ncol(combined_graph_1$nodes_df) == 3)
expect_true(
nrow(combined_graph_1$nodes_df) == 20)
expect_true(
ncol(combined_graph_1$edges_df) == 4)
expect_true(
nrow(combined_graph_1$edges_df) == 18)
}) |
stopifnot(require("testthat"),
require("glmmTMB"),
require("lme4"))
source(system.file("test_data/glmmTMB-test-funs.R",
package="glmmTMB", mustWork=TRUE))
data(sleepstudy, cbpp, package = "lme4")
context("variance structures")
test_that("diag", {
expect_equal(logLik(fm_diag1),logLik(fm_diag2_lmer))
expect_equal(logLik(fm_diag1),logLik(fm_diag2))
})
test_that("cs_us", {
expect_equal(logLik(fm_us1),logLik(fm_cs1))
expect_equal(logLik(fm_us1),logLik(fm_us1_lmer))
})
test_that("cs_homog", {
expect_equal(logLik(fm_nest),logLik(fm_nest_lmer))
})
test_that("basic ar1", {
vv <- VarCorr(fm_ar1)[["cond"]]
cc <- cov2cor(vv[[2]])
expect_equal(cc[1,],cc[,1])
expect_equal(unname(cc[1,]),
cc[1,2]^(0:(nrow(cc)-1)))
})
test_that("print ar1 (>1 RE)", {
cco <- gsub(" +"," ",
trimws(capture.output(print(summary(fm_ar1),digits=1))))
expect_equal(cco[12:14],
c("Subject (Intercept) 4e-01 0.6",
"Subject.1 row1 4e+03 60.8 0.87 (ar1)",
"Residual 8e+01 8.9"))
})
test_that("ar1 requires factor time", {
skip_on_cran()
expect_error(glmmTMB(Reaction ~ 1 +
(1|Subject) + ar1(as.numeric(row)+0| Subject), fsleepstudy),
"expects a single")
expect_is(glmmTMB(Reaction ~ 1 +
(1|Subject) + ar1(relevel(factor(row),"2")+0| Subject),
fsleepstudy),
"glmmTMB")
})
get_vcout <- function(x,g="\\bSubject\\b") {
cc <- capture.output(print(VarCorr(x)))
cc1 <- grep(g,cc,value=TRUE,perl=TRUE)
ss <- strsplit(cc1,"[^[:alnum:][:punct:]]+")[[1]]
return(ss[nchar(ss)>0])
}
test_that("varcorr_print", {
skip_on_cran()
ss <- get_vcout(fm_cs1)
expect_equal(length(ss),5)
expect_equal(ss[4:5],c("0.081","(cs)"))
ss2 <- get_vcout(fm_ar1,g="\\bSubject.1\\b")
expect_equal(length(ss2),5)
expect_equal(ss2[4:5],c("0.873","(ar1)"))
set.seed(101)
dd <- data.frame(y=rnorm(1000),c=factor(rep(1:2,500)),
w=factor(rep(1:10,each=100)),
s=factor(rep(1:10,100)))
m1 <- suppressWarnings(glmmTMB(y~c+(c|w)+(1|s),data=dd,
family=gaussian))
cc <- squash_white(capture.output(print(VarCorr(m1),digits=2)))
expect_equal(cc,
c("Conditional model:", "Groups Name Std.Dev. Corr",
"w (Intercept) 3.1e-05",
"c2 4.9e-06 0.98",
"s (Intercept) 3.4e-05",
"Residual 9.6e-01"))
})
test_that("cov_struct_order", {
skip_on_cran()
ff <- system.file("test_data","cov_struct_order.rds",package="glmmTMB")
if (nchar(ff)>0) {
dat <- readRDS(ff)
} else {
set.seed(101)
nb <- 100
ns <- nb*3
nt <- 100
cor <- .7
dat <- data.frame(Block = factor(rep(1:nb, each = ns/nb*nt)),
Stand = factor(rep(1:ns, each = nt)),
Time = rep(1:nt, times = ns),
blockeff = rep(rnorm(nb, 0, .5), each = ns/nb*nt),
standeff = rep(rnorm(ns, 0, .8), each = nt),
resid = c(t(MASS::mvrnorm(ns, mu = rep(0, nt),
Sigma = 1.2*cor^abs(outer(0:(nt-1),0:(nt-1),"-"))))))
dat$y <- with(dat, 5 + blockeff + standeff + resid)+rnorm(nrow(dat), 0, .1)
dat$Time <- factor(dat$Time)
}
fit1 <- glmmTMB(y ~ (1|Block) + (1|Stand)+ ar1(Time +0|Stand), data = dat)
expect_equal(unname(fit1$fit$par),
c(4.98852432, -4.22220615, -0.76452645, -0.24762133, 0.08879302, 1.00022657), tol=1e-3)
}) |
ps_plothts <- function(files){
stdin()
tzo <- conf <- mapalette <- NULL
load(file = system.file("extdata/settings.RData",package="htsr"))
nbst <- length (files)
ser <- as.character(NA)[1:nbst]
for(i in 1:nbst) {
ser[i] <- paste0("ser_id",i)
}
myfil <- tibble::tibble(filename = files, series_id = ser, color = "black",
lineshape = 1, linewidth = 0.2, plotpoint = FALSE, pointshape = 20,
pointsize = 8)
fil <- myfil
save(nbst, fil, conf, mapalette, tzo, file = system.file("extdata/settings.RData",package="htsr"))
runApp(system.file("extdata/app_plothts", package="htsr"))
} |
SHASHo <- function (mu.link = "identity", sigma.link = "log",
nu.link = "identity", tau.link = "log")
{
mstats <- checklink("mu.link", "Sinh-Arcsinh", substitute(mu.link),
c("inverse", "log", "identity", "own"))
dstats <- checklink("sigma.link", "Sinh-Arcsinh", substitute(sigma.link),
c("inverse", "log", "identity", "own"))
vstats <- checklink("nu.link", "Sinh-Arcsinh", substitute(nu.link),
c("inverse", "log", "identity", "own"))
tstats <- checklink("tau.link", "Sinh-Arcsinh", substitute(tau.link),
c("inverse", "log", "identity", "own"))
structure(list(family = c("SHASHo", "Sinh-Arcsinh"),
parameters = list(mu = TRUE, sigma = TRUE, nu = TRUE, tau = TRUE),
nopar = 4, type = "Continuous",
mu.link = as.character(substitute(mu.link)),
sigma.link = as.character(substitute(sigma.link)),
nu.link = as.character(substitute(nu.link)),
tau.link = as.character(substitute(tau.link)),
mu.linkfun = mstats$linkfun, sigma.linkfun = dstats$linkfun,
nu.linkfun = vstats$linkfun, tau.linkfun = tstats$linkfun,
mu.linkinv = mstats$linkinv, sigma.linkinv = dstats$linkinv,
nu.linkinv = vstats$linkinv, tau.linkinv = tstats$linkinv,
mu.dr = mstats$mu.eta, sigma.dr = dstats$mu.eta,
nu.dr = vstats$mu.eta, tau.dr = tstats$mu.eta,
dldm = function(y, mu, sigma, nu, tau)
{
z <- (y-mu)/sigma
r <- (1/2) * (exp(tau * asinh(z) - nu) - exp(-tau * asinh(z) + nu))
c <- (1/2)*(exp(tau*asinh(z)-nu)+exp(-(tau*asinh(z)-nu)))
x <- tau*asinh(z)-nu
dldm <- (1/(sigma*(1+z^2)^(1/2)))*(r*tau*c - ((tau*sinh(x))/c) +
z/(1+z^2)^(1/2))
dldm
},
d2ldm2 = function(y, mu, sigma, nu, tau)
{
z <- (y-mu)/sigma
r <- (1/2) * (exp(tau * asinh(z) - nu) - exp(-tau * asinh(z) + nu))
c <- (1/2)*(exp(tau*asinh(z)-nu)+exp(-(tau*asinh(z)-nu)))
x <- tau*asinh(z)-nu
dldm <- (1/(sigma*(1+z^2)^(1/2)))*(r*tau*c - ((tau*sinh(x))/c) +
z/(1+z^2)^(1/2))
d2ldm2 <- -dldm*dldm
d2ldm2
},
dldd = function(y, mu, sigma, nu, tau)
{
z <- (y-mu)/sigma
r <- (1/2) * (exp(tau * asinh(z) - nu) - exp(-tau * asinh(z) + nu))
c <- (1/2)*(exp(tau*asinh(z)-nu)+exp(-(tau*asinh(z)-nu)))
x <- tau*asinh(z)-nu
dldd <- ((z)/(sigma*(1+z^2)^(1/2)))*(r*tau*c-((tau*sinh(x))/c)+
z/((1+z^2)^(1/2)))-(1/sigma)
dldd
},
d2ldd2 = function(y, mu, sigma, nu, tau) {
z <- (y-mu)/sigma
r <- (1/2) * (exp(tau * asinh(z) - nu) - exp(-tau * asinh(z) + nu))
c <- (1/2)*(exp(tau*asinh(z)-nu)+exp(-(tau*asinh(z)-nu)))
x <- tau*asinh(z)-nu
dldd <- ((z)/(sigma*(1+z^2)^(1/2)))*(r*tau*c-((tau*sinh(x))/c)+
z/((1+z^2)^(1/2)))-(1/sigma)
d2ldd2 <- -dldd*dldd
d2ldd2
},
dldv = function(y, mu, sigma, nu, tau)
{
z <- (y-mu)/sigma
r <- (1/2) * (exp(tau * asinh(z) - nu) - exp(-tau * asinh(z) + nu))
c <- (1/2)*(exp(tau*asinh(z)-nu)+exp(-(tau*asinh(z)-nu)))
dldv<- r*cosh(tau*asinh(z)-nu)-(1/c)*sinh(tau*asinh(z)-nu)
dldv
},
d2ldv2 = function(y, mu, sigma, nu, tau)
{
z <- (y-mu)/sigma
r <- (1/2) * (exp(tau * asinh(z) - nu) - exp(-tau * asinh(z) + nu))
c <- (1/2)*(exp(tau*asinh(z)-nu)+exp(-(tau*asinh(z)-nu)))
dldv<- r*cosh(tau*asinh(z)-nu)-(1/c)*sinh(tau*asinh(z)-nu)
d2ldv2 <- -dldv*dldv
d2ldv2
},
dldt = function(y, mu, sigma, nu, tau) {
z <- (y-mu)/sigma
r <- (1/2) * (exp(tau * asinh(z) - nu) - exp(-tau * asinh(z) + nu))
c <- (1/2)*(exp(tau*asinh(z)-nu)+exp(-(tau*asinh(z)-nu)))
dldt <- (-r*cosh(tau*asinh(z)-nu)+(1/c)*sinh(tau*asinh(z)-nu))*
(asinh(z)) + 1/tau
dldt
},
d2ldt2 = function(y, mu, sigma, nu, tau) {
z <- (y-mu)/sigma
r <- (1/2) * (exp(tau * asinh(z) - nu) - exp(-tau * asinh(z) + nu))
c <- (1/2)*(exp(tau*asinh(z)-nu)+exp(-(tau*asinh(z)-nu)))
dldt <- (-r*cosh(tau*asinh(z)-nu)+(1/c)*sinh(tau*asinh(z)-nu))*
(asinh(z)) + 1/tau
d2ldt2 <- -dldt*dldt
d2ldt2
},
d2ldmdd = function(y, mu, sigma, nu, tau) {
z <- (y-mu)/sigma
r <- (1/2) * (exp(tau * asinh(z) - nu) - exp(-tau * asinh(z) + nu))
c <- (1/2)*(exp(tau*asinh(z)-nu)+exp(-(tau*asinh(z)-nu)))
x <- tau*asinh(z)-nu
dldm <- (1/(sigma*(1+z^2)^(1/2)))*(r*tau*c - ((tau*sinh(x))/c) +
z/(1+z^2)^(1/2))
dldd <- ((z)/(sigma*(1+z^2)^(1/2)))*(r*tau*c-((tau*sinh(x))/c)+
z/((1+z^2)^(1/2)))-(1/sigma)
d2ldmdd <- -dldd*dldm
d2ldmdd
},
d2ldmdv = function(y, mu, sigma, nu, tau) {
z <- (y-mu)/sigma
r <- (1/2) * (exp(tau * asinh(z) - nu) - exp(-tau * asinh(z) + nu))
c <- (1/2)*(exp(tau*asinh(z)-nu)+exp(-(tau*asinh(z)-nu)))
x <- tau*asinh(z)-nu
dldm <- (1/(sigma*(1+z^2)^(1/2)))*(r*tau*c - ((tau*sinh(x))/c) +
z/(1+z^2)^(1/2))
dldv<- r*cosh(tau*asinh(z)-nu)-(1/c)*sinh(tau*asinh(z)-nu)
d2ldmdv <- -dldm*dldv
d2ldmdv
},
d2ldmdt = function(y, mu, sigma, nu, tau) {
z <- (y-mu)/sigma
r <- (1/2) * (exp(tau * asinh(z) - nu) - exp(-tau * asinh(z) + nu))
c <- (1/2)*(exp(tau*asinh(z)-nu)+exp(-(tau*asinh(z)-nu)))
x <- tau*asinh(z)-nu
dldm <- (1/(sigma*(1+z^2)^(1/2)))*(r*tau*c - ((tau*sinh(x))/c) +
z/(1+z^2)^(1/2))
dldt <- (-r*cosh(tau*asinh(z)-nu)+(1/c)*sinh(tau*asinh(z)-nu))*
(asinh(z)) + 1/tau
d2ldmdt <- -dldm*dldt
d2ldmdt
},
d2ldddv = function(y, mu, sigma, nu, tau) {
z <- (y-mu)/sigma
r <- (1/2) * (exp(tau * asinh(z) - nu) - exp(-tau * asinh(z) + nu))
c <- (1/2)*(exp(tau*asinh(z)-nu)+exp(-(tau*asinh(z)-nu)))
x <- tau*asinh(z)-nu
dldd <- ((z)/(sigma*(1+z^2)^(1/2)))*(r*tau*c-((tau*sinh(x))/c)+
z/((1+z^2)^(1/2)))-(1/sigma)
dldv<- r*cosh(tau*asinh(z)-nu)-(1/c)*sinh(tau*asinh(z)-nu)
d2ldddv <- -dldd*dldv
d2ldddv
},
d2ldddt = function(y, mu, sigma, nu, tau) {
z <- (y-mu)/sigma
r <- (1/2) * (exp(tau * asinh(z) - nu) - exp(-tau * asinh(z) + nu))
c <- (1/2)*(exp(tau*asinh(z)-nu)+exp(-(tau*asinh(z)-nu)))
x <- tau*asinh(z)-nu
dldd <- ((z)/(sigma*(1+z^2)^(1/2)))*(r*tau*c-((tau*sinh(x))/c)+
z/((1+z^2)^(1/2)))-(1/sigma)
dldt <- (-r*cosh(tau*asinh(z)-nu)+(1/c)*sinh(tau*asinh(z)-nu))*
(asinh(z)) + 1/tau
d2ldddt <- -dldd*dldt
d2ldddt
},
d2ldvdt = function(y, mu, sigma, nu, tau) {
z <- (y-mu)/sigma
r <- (1/2) * (exp(tau * asinh(z) - nu) - exp(-tau * asinh(z) + nu))
c <- (1/2)*(exp(tau*asinh(z)-nu)+exp(-(tau*asinh(z)-nu)))
dldv<- r*cosh(tau*asinh(z)-nu)-(1/c)*sinh(tau*asinh(z)-nu)
dldt <- (-r*cosh(tau*asinh(z)-nu)+(1/c)*sinh(tau*asinh(z)-nu))*
(asinh(z)) + 1/tau
d2ldvdt <- -dldv*dldt
d2ldvdt
},
G.dev.incr = function(y, mu, sigma, nu, tau, ...) -2 *
dSHASHo(y, mu, sigma, nu, tau, log = TRUE),
rqres = expression(rqres(pfun = "pSHASHo",
type = "Continuous", y = y, mu = mu, sigma = sigma, nu = nu, tau = tau)),
mu.initial = expression(mu <- (y + mean(y))/2),
sigma.initial = expression(sigma <- rep(sd(y)/5, length(y))),
nu.initial = expression(nu <- rep(0.5, length(y))),
tau.initial = expression(tau <- rep(0.5, length(y))),
mu.valid = function(mu) TRUE,
sigma.valid = function(sigma) all(sigma > 0),
nu.valid = function(nu) TRUE,
tau.valid = function(tau) all(tau > 0),
y.valid = function(y) TRUE,
mean = function(mu, sigma, nu, tau) {
q <- 1 / tau
K1 <- besselK(0.25,(q+1) / 2)
K2 <- besselK(0.25,(q-1) / 2)
P <- exp(1/4) / (8*pi)^(1/2) * (K1 + K2)
return( mu + sigma * sinh(nu/tau) * P)
},
variance = function(mu, sigma, nu, tau) {
q1 <- 1 / tau
K1 <- besselK(0.25, (q1+1) / 2)
K2 <- besselK(0.25, (q1-1) / 2)
P1 <- exp(1/4) / (8*pi)^(1/2) * (K1 + K2)
q2 <- 2 / tau
K3 <- besselK(0.25, (q2+1) / 2)
K4 <- besselK(0.25, (q2-1) / 2)
P2 <- exp(1/4) / (8*pi)^(1/2) * (K3 + K4)
return( sigma^2 / 2 * (cosh(2*nu/tau) * P2 -1) - sigma^2 * (sinh(nu/tau) * P1)^2)
}
),
class = c("gamlss.family", "family"))
}
dSHASHo <- function(x, mu=0, sigma=1, nu=0, tau=1, log = FALSE)
{
if (any(sigma < 0))
stop(paste("sigma must be positive", "\n", ""))
if (any(tau < 0))
stop(paste("tau must be positive", "\n", ""))
z <- (x-mu)/sigma
c <- cosh(tau*asinh(z)-nu)
r <- sinh(tau*asinh(z)-nu)
loglik <- -log(sigma) + log(tau) -0.5*log(2*pi) -0.5*log(1+(z^2)) +log(c) -0.5*(r^2)
loglik <- -log(sigma) + log(tau) -log(2*pi)/2 -log(1+(z^2))/2 +log(c) -(r^2)/2
if (log == FALSE)
fy <- exp(loglik)
else fy <- loglik
fy
}
pSHASHo <- function(q, mu=0, sigma=1, nu=0, tau=1,
lower.tail = TRUE, log.p = FALSE){
if (any(sigma < 0))
stop(paste("sigma must be positive", "\n", ""))
if (any(tau < 0))
stop(paste("tau must be positive", "\n", ""))
z <- (q-mu)/sigma
r <- sinh(tau * asinh(z) - nu)
p <- pNO(r)
if (lower.tail == TRUE)
p <- p
else p <- 1 - p
if (log.p == FALSE)
p <- p
else p <- log(p)
p
}
qSHASHo <- function(p, mu=0, sigma=1, nu=0, tau=1, lower.tail = TRUE,
log.p = FALSE)
{
if (log.p==TRUE) p <- exp(p) else p <- p
if (any(p <= 0)|any(p >= 1)) stop(paste("p must be between 0 and 1", "\n", ""))
if (lower.tail==TRUE) p <- p else p <- 1-p
y <- mu + sigma*sinh((1/tau)*asinh(qnorm(p))+(nu/tau))
y
}
rSHASHo <- function(n, mu=0, sigma=1, nu=0, tau=1){
if (any(n <= 0))
stop(paste("n must be a positive integer", "\n", ""))
n <- ceiling(n)
p <- runif(n)
r <- qSHASHo(p, mu = mu, sigma = sigma, nu=nu, tau=tau)
r
} |
zunpooled_TX <-
function(data, Ns, delta) {
N <- sum(Ns)
if (!is.null(data)) {
x <- data[1,1]
y <- data[1,2]
} else {
x <- rep(0:Ns[1], each=(Ns[2]+1))
y <- rep.int(0:Ns[2], Ns[1]+1)
}
p1 <- x/Ns[1]
p2 <- y/Ns[2]
numerator <- p1 - p2 - delta
denominator <- sqrt(p2*(1-p2)/Ns[2]+(p1)*(1-p1)/Ns[1])
TX <- numerator / denominator
TX[numerator == 0 & denominator == 0] <- 0
return(cbind(x, y, TX, deparse.level=0))
} |
library(kappalab)
mu <- game(c(0,runif(31)))
stopifnot(abs(mu@data - conjugate(conjugate(mu))@data) < 1e-6)
mu <- capacity(c(0,rep(1,15)))
stopifnot(abs(mu@data - conjugate(conjugate(mu))@data) < 1e-6)
mu <- card.game(c(0,rnorm(17)))
stopifnot(abs(mu@data - conjugate(conjugate(mu))@data) < 1e-6) |
print.tepDICA <-
function (x,...) {
res.tepDICA <- x
if (!inherits(res.tepDICA, "tepDICA")) stop ("no convenient data")
cat("**Results for Discriminant Correspondence Analysis**\n")
cat ("The analysis was performed on ", nrow(res.tepDICA$fi),
"individuals, described by", nrow(res.tepDICA$fj), "variables\n")
cat("*The results are available in the following objects:\n\n")
res <- array("", c(23, 2), list(1:23, c("name", "description")))
res[1,] <- c("$fi","Factor scores of the groups")
res[2,] <- c("$di","Squared distances of the groups")
res[3,] <- c("$ci","Contributions of the groups")
res[4,] <- c("$ri", "Cosines of the groups")
res[5,] <- c("$fj","Factor scores of the columns")
res[6,] <- c("$dj","square distances of the columns")
res[7,] <- c("$cj","Contributions for the columns")
res[8,] <- c("$rj", "Cosines of the columns")
res[9,] <- c("$lx", "Latent variables of X (DATA)")
res[10,] <- c("$ly", "Latent variables of Y (DESIGN)")
res[11,] <- c("$t","Explained Variance")
res[12,] <- c("$eigs","Eigenvalues")
res[13,] <- c("$M","masses")
res[14,] <- c("$W","weights")
res[15,] <- c("$c","center")
res[16,] <- c("$pdq","GSVD data")
res[17,] <- c("$X","X matrix to decompose")
res[18,] <- c("$hellinger","a boolean. TRUE if Hellinger distance was used")
res[19,] <- c("$symmetric","a boolean. TRUE if symmetric scores used for biplot.")
res[20,] <- c("$fii","Factor scores of the individuals")
res[21,] <- c("$dii","Squared distances of the individuals")
res[22,] <- c("$rii", "Cosines of the individuals")
res[23,] <- c("$assign","Information for assignment of individuals to groups")
print(res)
} |
osita<-function(n,wv,seed){
set.seed(seed)
koko<-floor(n/wv)
tulos<-matrix(0,koko+1,wv)
arpavec<-matrix(1,n,1)
i<-1
while (i<=n){
arpavec[i]<-i
i<-i+1
}
i<-1
while (i<=koko){
j<-1
while (j<=wv){
uusidim<-n-((i-1)*wv+j)+1
arpa<-unidis(uusidim)
tulos[i,j]<-arpavec[arpa]
if (arpa==1){
arpavec<-arpavec[2:uusidim]
}
else{
if (arpa==uusidim){
arpavec<-arpavec[1:(uusidim-1)]
}
else{
arpavecnew<-matrix(0,uusidim-1,1)
arpavecnew[1:(arpa-1)]<-arpavec[1:(arpa-1)]
arpavecnew[arpa:(uusidim-1)]<-arpavec[(arpa+1):uusidim]
arpavec<-arpavecnew
}
}
j<-j+1
}
i<-i+1
}
ylipitlkm<-n-wv*koko
j<-1
while (j<=ylipitlkm){
uusidim<-n-(koko*wv+j)+1
arpa<-unidis(uusidim)
tulos[koko+1,j]<-arpavec[arpa]
if (arpa==1){
arpavec=arpavec[2:uusidim]
}
else{
if (arpa==uusidim){
arpavec=arpavec[1:(uusidim-1)]
}
else{
arpavecnew<-matrix(0,uusidim-1,1)
arpavecnew[1:(arpa-1)]<-arpavec[1:(arpa-1)]
arpavecnew[arpa:(uusidim-1)]<-arpavec[(arpa+1):uusidim]
arpavec<-arpavecnew
}
}
j<-j+1
}
j<-ylipitlkm+1
while (j<=wv){
tulos[koko+1,j]<-NA
j<-j+1
}
return(tulos)
} |
context("uspr.R")
library('TreeTools')
test_that("Bad tree input is handled correctly", {
expect_error(USPRDist(PectinateTree(1:8), PectinateTree(2:9)))
expect_error(USPRDist(PectinateTree(1:8), PectinateTree(1:9)))
expect_error(USPRDist(PectinateTree(1:9), PectinateTree(1:8)))
list2 <- list(PectinateTree(1:8), BalancedTree(1:8))
list3 <- list(PectinateTree(1:8), BalancedTree(1:8), BalancedTree(1:8))
expect_error(USPRDist(list2, list3))
expect_error(USPRDist(list2, list3, checks = FALSE))
nwk2 <- as.Newick(list2)
nwk3 <- as.Newick(list3)
expect_error(replug_dist(nwk2, nwk3))
expect_error(tbr_dist(nwk2, nwk3, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE))
expect_error(uspr_dist(nwk2, nwk3, FALSE, FALSE, FALSE))
})
test_that("TBR options", {
bal8 <- BalancedTree(8)
pec8 <- PectinateTree(8)
expect_warning(TBRDist(bal8, pec8, maf = TRUE, exact = FALSE))
expect_warning(
expect_equal(list(tbr_exact = 0L, n_maf = 1L,
maf_1 = "(((t1,t2),(t3,t4)),(t7,t8),(t5,t6));",
maf_2 = "(((t1,t2),(t3,t4)),(t7,t8),(t5,t6));"),
TBRDist(bal8, maf = TRUE, countMafs = TRUE)))
expect_warning(expect_equal(0L, TBRDist(bal8, exact = TRUE)))
expect_equal(list(tbr_min = 1, tbr_max = 3, n_maf = 13),
TBRDist(bal8, pec8, optimize = FALSE, countMafs = TRUE))
})
test_that("SPR distances are calculated correctly", {
tree1 <- BalancedTree(10)
tree2 <- PectinateTree(10)
treeR <- ape::read.tree(text="(t1, (((t5, t7), (t9, (t3, t2))), (t4, ((t6, t8), t10))));")
list1 <- list(one = tree1, oneAgain = tree1, two = tree2, three = treeR)
list2 <- list(tree1, tree2)
expect_equivalent(2L, USPRDist(tree1, tree2))
expect_equivalent(c(0, 2L), USPRDist(list(tree1, tree2), tree1))
expect_equivalent(c(0, 2L), USPRDist(tree1, list(tree1, tree2)))
goodRet <- structure(c(0, 2, 4, 2, 4, 4),
Size = 4L,
Labels = names(list1),
Diag = FALSE,
Upper = FALSE,
class = 'dist')
expect_equal(goodRet, USPRDist(list1))
expect_equal(goodRet, ReplugDist(list1))
first <- ReplugDist(list1, list1[[1]], maf = TRUE)
each <- ReplugDist(list1, maf = TRUE)
expect_equivalent(first[[1]], as.matrix(each[[1]])[, 1])
expect_equivalent(first[[2]][-1], each[[2]][-1, 1])
expect_equivalent(first[[3]][-1], each[[3]][-1, 1])
expect_equivalent(c(0, 1, 4, 1, 4, 4),
as.integer(TBRDist(list1, exact = TRUE)))
expect_equivalent(c(0, 1, 3, 1, 3, 3),
as.integer(TBRDist(list1, exact = FALSE)$tbr_min))
first <- TBRDist(list1, list1[[1]], maf = TRUE)
each <- TBRDist(list1, maf = TRUE)
expect_equivalent(first[[1]], as.matrix(each[[1]])[1, ])
expect_equivalent(first[[2]][-1], as.matrix(each[[2]])[-1, 1])
expect_equal(matrix(c(0, 2, 0, 2, 2, 0, 4, 4), 4, 2, byrow = TRUE,
dimnames = list(names(list1), names(list2))),
ReplugDist(list1, list2, allPairs = TRUE))
expect_equivalent(ReplugDist(list1, tree2, maf = TRUE)$maf_2,
ReplugDist(list1, list2, allPairs = TRUE, maf = TRUE)$maf_2[, 2])
Test <- function (tree1, tree2) {
td <- TBRDist(tree1, tree2, exact = TRUE, approximate = TRUE)
expect_true(USPRDist(tree1, tree2) >= TBRDist(tree1, tree2, exact = TRUE))
expect_true(td$tbr_exact >= td$tbr_min)
expect_true(td$tbr_exact <= td$tbr_max)
td4 <- TBRDist(list(tree1, tree1, tree2, tree2),
list(tree1, tree2, tree1, tree2), exact = TRUE)
expect_equal(0L, td4[[1]])
expect_equal(td4[[2]], td4[[3]])
expect_equal(0L, td4[[4]])
sd4 <- USPRDist(list(tree1, tree1, tree2, tree2),
list(tree1, tree2, tree1, tree2))
expect_equal(0L, sd4[[1]])
expect_equal(sd4[[2]], sd4[[3]])
expect_equal(0L, sd4[[4]])
rd4 <- ReplugDist(list(tree1, tree1, tree2, tree2),
list(tree1, tree2, tree1, tree2))
expect_equal(0L, rd4[[1]])
expect_equal(rd4[[2]], rd4[[3]])
expect_equal(0L, rd4[[4]])
}
Test(tree1, tree2)
Test(PectinateTree(13), BalancedTree(13))
expect_warning(
expect_equal(invisible(),
TBRDist(tree1, tree2, exact = FALSE, approximate = FALSE))
)
})
test_that("MAF info is calculated", {
tree1 <- BalancedTree(8)
tree2 <- PectinateTree(8)
expect_equal(LnUnrooted(8) / log(2), MAFInfo(tree1, tree1))
expect_lt(MAFInfo(tree1, tree2), LnUnrooted(8) / log(2))
}) |
rsolveQP <-
function(objective, lower=0, upper=1, linCons,
control=list(solver="quadprog", invoke=c("R", "AMPL", "NEOS")))
{
solver <- control$solver
invoke <- control$invoke[1]
if (invoke == "R") {
rfooLP <- match.fun ( paste("r", solver, "QP", sep=""))
ans <- rfooLP(objective, lower, upper, linCons, control)
}
if (invoke == "AMPL" ) {
ans <- ramplQP(objective, lower, upper, linCons, control)
}
if (invoke == "NEOS" ) {
ans <- rneosQP(objective, lower, upper, linCons, control)
}
ans$solver <- paste(invoke, ans$solver)
ans
}
.solveQP.MV.demo <-
function()
{
dataSet <- data("LPP2005REC", package="timeSeries", envir=environment())
LPP2005REC <- get(dataSet, envir=environment())
nAssets <- 6
data <- 100 * LPP2005REC[, 1:nAssets]
objective <- list(dvec=rep(0, nAssets), Dmat=cov(data))
lower <- 0
upper <- 1
mat <- rbind(
budget = rep(1, times=nAssets),
returns = colMeans(data))
matLower <- c(
budget = 1,
return = mean(data))
matUpper <- matLower
linCons <- list(mat, matLower, matUpper)
control <- list()
rquadprogQP(objective, lower, upper, linCons)
ripopQP(objective, lower, upper, linCons)
ampl <- ramplQP(objective, lower, upper, linCons)
ampl
for (solver in c(
"cplex", "donlp2", "loqo", "lpsolve", "minos", "snopt",
"ipopt", "bonmin", "couenne")) {
ans <- ramplQP(objective, lower, upper, linCons,
control=list(solver=solver))
print(ans)
}
neos <- rneosQP(objective, lower, upper, linCons,
control=list(solver="ipopt", category="nco"))
neos
for (solver in c(
"conopt", "filter", "knitro", "lancelot", "loqo", "minos", "mosek",
"pennon", "snopt"))
{
ans <- rneosQP(objective, lower, upper, linCons,
control=list(solver=solver, category="nco"))
print(ans)
}
kestrel <- rkestrelQP(objective, lower, upper, linCons,
control=list(solver="loqo"))
kestrel
} |
kpBars <- function(karyoplot, data=NULL, chr=NULL, x0=NULL, x1=x0, y1=NULL, y0=NULL,
ymin=NULL, ymax=NULL, data.panel=1, r0=NULL, r1=NULL,
clipping=TRUE, ...) {
if(!methods::is(karyoplot, "KaryoPlot")) stop(paste0("In kpBars: 'karyoplot' must be a valid 'KaryoPlot' object"))
if(is.null(y0)) {
if(is.null(data)) {
y0=karyoplot$plot.params[[paste0("data", data.panel, "min")]]
} else {
if(!("y0" %in% names(mcols(data)))) {
y0=karyoplot$plot.params[[paste0("data", data.panel, "min")]]
}
}
}
invisible(kpRect(karyoplot=karyoplot, data=data, chr=chr, x0=x0, x1=x1, y0=y0, y1=y1,
ymin=ymin, ymax=ymax, r0=r0, r1=r1,
data.panel=data.panel, clipping=clipping, ...))
} |
NumTiles <- structure(function
( lonR,
latR,
zoom =13,
CheckExistingFiles = TRUE,
tileExt = ".png",
tileDir= "~/mapTiles/OSM/",
verbose=0
){
nTiles=c(0,0)
if (!missing(lonR) & !missing(latR)) {
XYmin = LatLon2XY(lat=latR[1], lon=lonR[1],zoom=zoom)
XYmax = LatLon2XY(lat=latR[2], lon=lonR[2],zoom=zoom)
nTiles[1] = abs(XYmax$Tile[1,1]-XYmin$Tile[1,1])+1
nTiles[2] = abs(XYmax$Tile[1,2]-XYmin$Tile[1,2])+1
center = c(lat=mean(latR),lon=mean(lonR))
}
if (CheckExistingFiles) {
XY = LatLon2XY(lat=center["lat"], lon=center["lon"],zoom=zoom)
tileXY = XY$Tile + as.numeric(XY$Coords > 256)
if (nTiles[1] %% 2 == 0) {
X = (tileXY[1,1]-nTiles[1]/2):(tileXY[1,1]+nTiles[1]/2-1);
} else {
X = (tileXY[1,1]-(nTiles[1]-1)/2):(tileXY[1,1]+(nTiles[1]-1)/2);
}
if (nTiles[2] %% 2 == 0) {
Y = (tileXY[1,2]-nTiles[2]/2):(tileXY[1,2]+nTiles[2]/2-1);
} else {
Y = (tileXY[1,2]-(nTiles[2]-1)/2):(tileXY[1,2]+(nTiles[2]-1)/2);
}
fNameServer = function(x,y) paste0(paste(zoom, x, y, sep="_"),tileExt)
fList = as.vector(outer(X,Y,FUN = fNameServer))
ExistingFiles=list.files(path=tileDir)
fExist = sum(fList %in% ExistingFiles)
cat("still need to download", prod(nTiles)-fExist,"tiles from ", prod(nTiles), "requested files.\n")
}
return(nTiles)
}, ex = function(){
if (0){
for (zoom in 4:15) {
cat("OSM, zoom =", zoom, "\n")
NumTiles(lonR=c(-135,-66), latR=c(25,54) , zoom=zoom)
}
for (zoom in 4:15) {
cat("Google, zoom =", zoom, "\n")
NumTiles(lonR=c(-135,-66), latR=c(25,54) , zoom=zoom, tileDir= "~/mapTiles/Google/")
}
}
}) |
safely_transform_data <- function(safe_extractor, data, verbose = TRUE) {
if (class(safe_extractor) != "safe_extractor") {
stop(paste0("No applicable method for 'safely_transform_data' applied to an object of class '", class(safe_extractor), "'."))
}
if (is.null(data)) {
stop("No data provided!")
}
row_ind <- data.frame(row_ind = 1:nrow(data))
data <- cbind(row_ind, data)
term_names <- names(safe_extractor$variables_info)
term_names <- intersect(term_names, colnames(data))
if (verbose == TRUE) {
pb <- txtProgressBar(min = 0, max = length(term_names), style = 3)
}
for (var_temp in term_names) {
temp_info <- safe_extractor$variables_info[[var_temp]]
if (is.null(temp_info$new_levels)) {
if (verbose == TRUE) {
setTxtProgressBar(pb, which(term_names == var_temp))
}
next
}
new_var_name <- paste0(var_temp, "_new")
if (temp_info$type == "categorical") {
data <- merge(data, temp_info$new_levels, by = var_temp)
data[,new_var_name] <- as.factor(data[,new_var_name])
levels(data[,new_var_name]) <- c(levels(data[,new_var_name]),
setdiff(unique(temp_info$new_levels[,new_var_name]),
levels(data[,new_var_name])))
} else {
data[,new_var_name] <- sapply(data[,var_temp],
function(x) which.max(x<=c(temp_info$break_points, Inf)))
data[,new_var_name] <- sapply(data[,new_var_name],
function(x) temp_info$new_levels[x])
data[,new_var_name] <- as.factor(data[,new_var_name])
levels(data[,new_var_name]) <- c(levels(data[,new_var_name]),
setdiff(temp_info$new_levels, levels(data[,new_var_name])))
}
if (verbose == TRUE) {
setTxtProgressBar(pb, which(term_names == var_temp))
}
}
if (verbose == TRUE) {
close(pb)
}
data <- data[order(data$row_ind), colnames(data) != "row_ind"]
rownames(data) <- 1:nrow(data)
interaction_effects <- safe_extractor$interaction_effects
if (! is.null(interaction_effects)) {
for (i in 1:nrow(interaction_effects)) {
var1 <- interaction_effects$variable1[i]
var2 <- interaction_effects$variable2[i]
if (all(c(paste0(var1, "_new"), paste0(var2, "_new")) %in% colnames(data))) {
data[, paste0("interaction_", var1, "_", var2)] <-
interaction(data[, c(paste0(var1, "_new"), paste0(var2, "_new"))])
}
}
}
return(data)
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.