code
stringlengths 1
13.8M
|
---|
calc_edges <- function() {
vect <- execGRASS("g.list",
parameters = list(
type = "vector"
),
intern = TRUE)
if (!"streams_v" %in% vect)
stop("Missing data. Did you run derive_streams()?")
cnames<-execGRASS("db.columns",
parameters = list(
table = "streams_v"
), intern=T)
if("prev_str03" %in% cnames){
stop("There are complex confluences in the stream network. Please run correct_compl_confluences() for correction.")
}
temp_dir <- tempdir()
execGRASS("g.copy",
flags = c("overwrite", "quiet"),
parameters = list(
vector = "streams_v,edges"))
execGRASS("v.to.rast", flags = c("overwrite", "quiet"),
parameters = list(
input = "streams_v",
type = "line",
output = "streams_r",
use = "attr",
attribute_column = "stream"
))
message("Calculating reach contributing area (RCA) ...")
execGRASS("r.stream.basins",
flags = c("overwrite", "quiet"),
parameters = list(direction = "dirs",
stream_rast = "streams_r",
basins = "rca"))
message("Calculating upstream catchment areas ...")
areas <- do.call(rbind,
strsplit(execGRASS("r.stats",
flags = c("a", "quiet"),
parameters = list(input = "rca"),
intern = TRUE),
split = ' '))
areas <- as.data.frame(areas[-nrow(areas), ], stringsAsFactors = FALSE)
setDT(areas)
setnames(areas, names(areas),c("stream","area"))
areas[, names(areas) := lapply(.SD, as.numeric)]
dt.streams <- do.call(rbind,strsplit(
execGRASS("db.select",
parameters = list(
sql = "select stream, next_str, prev_str01,prev_str02 from edges"
),intern = T),
split = '\\|'))
colnames(dt.streams) <- dt.streams[1,]
dt.streams <- data.table(dt.streams[-1,, drop = FALSE], "total_area" = 0, "netID" = -1)
dt.streams[, names(dt.streams) := lapply(.SD, as.numeric)]
dt.streams<-merge(dt.streams, areas, by = "stream", all = T)
setkey(dt.streams, stream)
dt.streams[is.na(area), area := 0 ]
outlets <- dt.streams[next_str == -1, stream]
netID <- 1
for(i in outlets){
calcCatchmArea_assignNetID(dt.streams, id=i, netID)
netID <- netID + 1
}
dt.streams[, area := round(area / 1000000, 6)]
dt.streams[, total_area := round(total_area / 1000000, 6)]
dt.streams[, rid := seq_len(nrow(dt.streams)) - 1]
dt.streams[, OBJECTID := stream]
dt.streams[, ":=" (next_str = NULL, prev_str01 = NULL, prev_str02 = NULL)]
utils::write.csv(dt.streams, file.path(temp_dir, "stream_network.csv"), row.names = F)
dtype <- t(gsub("numeric", "Integer", sapply(dt.streams, class)))
dtype[,c("total_area","area")] <- c("Real", "Real")
write.table(dtype, file.path(temp_dir, "stream_network.csvt"), quote = T, sep = ",", row.names = F, col.names = F)
execGRASS("db.in.ogr", flags = c("overwrite","quiet"),
parameters = list(
input = file.path(temp_dir, "stream_network.csv"),
output = "stream_network"
),ignore.stderr = T)
execGRASS("v.db.join", flags = "quiet",
parameters = list(
map = "edges",
column = "stream",
other_table = "stream_network",
other_column = "stream"
))
execGRASS("v.db.renamecolumn",
parameters = list(
map = "edges",
column = "length,segLength"
))
execGRASS("v.db.renamecolumn",
parameters = list(
map = "edges",
column = "segLength,Length"
))
execGRASS("v.db.update", flags = c("quiet"),
parameters = list(
map = "edges",
column = "Length",
value = "round(Length, 2)"
))
execGRASS("v.db.dropcolumn", flags = "quiet",
parameters = list(
map = "edges",
columns = "cum_length"
))
execGRASS("v.db.renamecolumn", flags = "quiet",
parameters = list(
map = "edges",
column = "out_dist,upDist"
))
execGRASS("v.db.update", flags = c("quiet"),
parameters = list(
map = "edges",
column = "upDist",
value = "round(upDist, 2)"
))
execGRASS("v.db.renamecolumn", flags = "quiet",
parameters = list(
map = "edges",
column = "total_area,H2OArea"
))
execGRASS("v.db.renamecolumn", flags = "quiet",
parameters = list(
map = "edges",
column = "area,rcaArea"
))
execGRASS("db.droptable", flags = c("quiet","f"),
parameters = list(
table = "stream_network"
))
}
calcCatchmArea_assignNetID <- function(dt, id, net_ID){
if(dt[stream == id, prev_str01,] == 0){
dt[stream == id, total_area := area]
dt[stream == id, netID := net_ID]
} else {
a1 <- calcCatchmArea_assignNetID(dt, dt[stream == id, prev_str01], net_ID)
a2 <- calcCatchmArea_assignNetID(dt, dt[stream == id, prev_str02], net_ID)
dt[stream == id, total_area := a1 + a2 + dt[stream == id, area]]
dt[stream == id, netID := net_ID]
}
return(dt[stream == id, total_area])
}
get_cats_edges_in_catchment<-function(dt, str_id){
if(dt[stream == str_id, prev_str01] == 0){
return(dt[stream == str_id, cat])
} else {
a1 <- get_cats_edges_in_catchment(dt = dt, dt[stream == str_id, prev_str01])
a2 <- get_cats_edges_in_catchment(dt = dt, dt[stream == str_id, prev_str02])
return(c(dt[stream == str_id, cat], a1, a2))
}
}
get_streams_edges_in_catchment<-function(dt, str_id){
if(dt[stream == str_id, prev_str01] == 0){
return(dt[stream == str_id, stream])
} else {
a1 <- get_streams_edges_in_catchment(dt = dt, dt[stream == str_id, prev_str01])
a2 <- get_streams_edges_in_catchment(dt = dt, dt[stream == str_id, prev_str02])
return(c(dt[stream == str_id, stream], a1, a2))
}
} |
library(EnvStats)
q <- qnormMix(.95,mean1=-1,sd1=1, mean2=4,sd2=1, p.mix=.2)
expect_identical(round(q,2),4.67)
q <- qnormMix(.95,mean1=-1.5,sd1=1, mean2=6,sd2=1, p.mix=.2)
expect_identical(round(q,2),6.67)
q <- qnormMix(.95,mean1=-5,sd1=1, mean2=5,sd2=1, p.mix=.5)
expect_identical(round(q,2),6.28)
q <- qnormMix(0.5, 10, 2, 20, 2, 0.1)
expect_identical(round(q,2),10.28) |
get.duplicated <- function(x) {
c(apply(x, 2, function(xx) {
1 * !all(xx == xx[1])
}))
}
get.tdc.cov <- function(dta) {
x <- dta[, !(colnames(dta) %in% c("id", "start", "stop", "event")), drop = FALSE]
id <- dta$id
id.unq <- sort(unique(id))
tdcm <- do.call(rbind, mclapply(id.unq, function(ii) {
get.duplicated(x[id == ii,, drop = FALSE])
}))
c(apply(tdcm, 2, function(xx) {
1 * !all(xx == 0)
}))
}
get.tdc.subj.time <- function(dta) {
if (sum(get.tdc.cov(dta)) == 0) {
return(rep(0, nrow(dta)))
}
x <- dta[, !(colnames(dta) %in% c("id", "start", "stop", "event")), drop = FALSE]
id <- dta$id
id.unq <- unique(id)
tdcm <- do.call(rbind, mclapply(id.unq, function(ii) {
get.duplicated(x[id == ii,, drop = FALSE])
}))
c(apply(tdcm, 1, function(xx) {
1 * any(xx != 0)
}))
} |
trimSpace <- function(string) {
stringTrim <- sapply(string, function(x) {
x <- sub("^\\s*", "", x, perl=TRUE)
x <- sub("\\s*$","", x, perl=TRUE)
return(x)
}, USE.NAMES=FALSE)
return(stringTrim)
}
joinRegexExpand <- function(cmd, argExpand, matches, iterator, matchLength="match.length") {
if (iterator == 1 && matches[iterator] > 1) {
pre <- substr(cmd, 1, matches[iterator] - 1)
} else pre <- ""
post.end <- ifelse(iterator < length(matches), matches[iterator+1] - 1, nchar(cmd))
post <- substr(cmd, matches[iterator] + attr(matches, matchLength)[iterator], post.end)
cmd.expand <- paste(pre, argExpand, post, sep="")
return(cmd.expand)
}
expandCmd <- function(cmd, alphaStart=TRUE) {
hyphens <- gregexpr("(?!<(\\*|\\.))\\w+(?!(\\*|\\.))\\s*-\\s*(?!<(\\*|\\.))\\w+(?!(\\*|\\.))(@[\\d\\.-]+)?", cmd, perl=TRUE)[[1]]
if (hyphens[1L] > 0) {
cmd.expand <- c()
ep <- 1
for (v in 1:length(hyphens)) {
argsplit <- strsplit(substr(cmd, hyphens[v], hyphens[v] + attr(hyphens, "match.length")[v] - 1), "\\s*-\\s*", perl=TRUE)[[1]]
v_pre <- argsplit[1]
v_post <- argsplit[2]
v_post.suffix <- sub("^([^@]+)(@[\\d\\-.]+)?$", "\\2", v_post, perl=TRUE)
v_post <- sub("@[\\d\\-.]+$", "", v_post, perl=TRUE)
v_pre.alpha <- sub("\\d+$", "", v_pre, perl=TRUE)
v_post.alpha <- sub("\\d+$", "", v_post, perl=TRUE)
if (length(v_pre.alpha) > 0L && length(v_post.alpha) > 0L) {
if (v_pre.alpha != v_post.alpha) { return(cmd) }
}
v_pre.num <- as.integer(sub("\\w*(?<=[A-Za-z_])(\\d+)$", "\\1", v_pre, perl=TRUE))
v_post.match <- regexpr("^(?:\\w*(?<=[A-Za-z_])(\\d+)|(\\d+))$", v_post, perl=TRUE)
stopifnot(v_post.match[1L] > 0)
whichCapture <- which(attr(v_post.match, "capture.start") > 0)
v_post.num <- as.integer(substr(v_post, attr(v_post.match, "capture.start")[whichCapture], attr(v_post.match, "capture.start")[whichCapture] + attr(v_post.match, "capture.length")[whichCapture] - 1))
v_post.prefix <- substr(v_post, 1, attr(v_post.match, "capture.start")[whichCapture] - 1)
if (is.na(v_pre.num) || is.na(v_post.num)) stop("Cannot expand variables: ", v_pre, ", ", v_post)
v_expand <- paste(v_post.prefix, v_pre.num:v_post.num, v_post.suffix, sep="", collapse=" ")
cmd.expand[ep] <- joinRegexExpand(cmd, v_expand, hyphens, v)
ep <- ep + 1
}
return(paste(cmd.expand, collapse=""))
} else {
return(cmd)
}
}
parseFixStart <- function(cmd) {
cmd.parse <- c()
ep <- 1L
cmd <- gsub("([A-z]+\\w*)\\s*\\*(?=\\s+[A-z]+|\\s*$)", "\\1*1", cmd, perl=TRUE)
if ((fixed.starts <- gregexpr("[\\w\\.-]+\\s*([@*])\\s*[\\w\\.-]+", cmd, perl=TRUE)[[1]])[1L] > 0) {
for (f in 1:length(fixed.starts)) {
opchar <- substr(cmd, attr(fixed.starts, "capture.start")[f], attr(fixed.starts, "capture.start")[f] + attr(fixed.starts, "capture.length")[f] - 1)
argsplit <- strsplit(substr(cmd, fixed.starts[f], fixed.starts[f] + attr(fixed.starts, "match.length")[f] - 1), paste0("\\s*", ifelse(opchar=="*", "\\*", opchar), "\\s*"), perl=TRUE)[[1]]
v_pre <- argsplit[1]
v_post <- argsplit[2]
if (suppressWarnings(is.na(as.numeric(v_pre)))) {
var <- v_pre
val <- v_post
} else if (suppressWarnings(is.na(as.numeric(v_post)))) {
var <- v_post
val <- v_pre
} else stop("Cannot parse Mplus fixed/starts values specification: ", v_pre, v_post)
if (opchar == "@") {
cmd.parse[ep] <- joinRegexExpand(cmd, paste0(val, "*", var, sep=""), fixed.starts, f)
ep <- ep + 1L
} else {
cmd.parse[ep] <- joinRegexExpand(cmd, paste0("start(", val, ")*", var, sep=""), fixed.starts, f)
ep <- ep + 1L
}
}
return(paste(cmd.parse, collapse=""))
} else {
return(cmd)
}
}
parseConstraints <- function(cmd) {
cmd.split <- strsplit(cmd, "\n")[[1]]
cmd.split <- if(length(emptyPos <- which(cmd.split == "")) > 0L) { cmd.split[-1*emptyPos] } else { cmd.split }
cmd.nomodifiers <- paste0(gsub("(start\\([^\\)]+\\)\\*|[\\d\\-\\.]+\\*)", "", cmd.split, perl=TRUE), collapse=" ")
cmd.nomodifiers <- gsub("\\([^\\)]+\\)", "", cmd.nomodifiers, perl=TRUE)
cmd.tojoin <- c()
for (n in 1:length(cmd.split)) {
if ((parens <- gregexpr("(?<!start)\\(([^\\)]+)\\)", cmd.split[n], perl=TRUE)[[1L]])[1L] > 0) {
cmd.expand <- c()
for (p in 1:length(parens)) {
constraints <- substr(cmd.split[n], attr(parens, "capture.start")[p], attr(parens, "capture.start")[p] + attr(parens, "capture.length")[p] - 1)
con.split <- strsplit(trimSpace(constraints), "\\s+", perl=TRUE)[[1]]
con.split <- sapply(con.split, function(x) {
if (! suppressWarnings(is.na(as.numeric(x)))) {
make.names(paste0(".con", x))
} else { x }
})
prestrStart <- ifelse(p > 1, attr(parens, "capture.start")[p-1] + attr(parens, "capture.length")[p-1] + 1, 1)
precmd.split <- strsplit(trimSpace(substr(cmd.split[n], prestrStart, parens[p] - 1)), "\\s+", perl=TRUE)[[1]]
precmdLHSOp <- which(tolower(precmd.split) %in% c("by", "with", "on"))
if (any(precmdLHSOp)) {
lhsop <- paste0(precmd.split[1:precmdLHSOp[1L]], " ", collapse=" ")
rhs <- precmd.split[(precmdLHSOp+1):length(precmd.split)]
} else {
lhsop <- ""
rhs <- precmd.split
}
if (length(con.split) > 1L) {
rhs.backmatch <- rhs[(length(rhs)-length(con.split)+1):length(rhs)]
rhs.expand <- c()
if ((preMark.match <- regexpr("^\\s*[\\[\\{]", rhs.backmatch[1L], perl=TRUE))[1L] > 0) {
preMark <- substr(rhs.backmatch[1L], preMark.match[1L], preMark.match[1L] + attr(preMark.match, "match.length")[1L] - 1)
rhs.backmatch[1L] <- substr(rhs.backmatch[1L], preMark.match[1L] + attr(preMark.match, "match.length")[1L], nchar(rhs.backmatch[1L]))
} else { preMark <- "" }
if ((postMark.match <- regexpr("[\\]\\}]\\s*$", rhs.backmatch[length(rhs.backmatch)], perl=TRUE))[1L] > 0) {
postMark <- substr(rhs.backmatch[length(rhs.backmatch)], postMark.match[1L], nchar(rhs.backmatch[length(rhs.backmatch)]))
rhs.backmatch[length(rhs.backmatch)] <- substr(rhs.backmatch[length(rhs.backmatch)], 1, postMark.match[1L] - 1)
} else { postMark <- "" }
for (i in 1:length(rhs.backmatch)) {
rhs.expand[i] <- paste0(con.split[i], "*", rhs.backmatch[i])
}
rhs.expand <- paste0(preMark, paste(rhs.expand, collapse=" "), postMark)
if (length(rhs) - length(con.split) > 0L) {
cmd.expand <- c(cmd.expand, paste(lhsop, paste(rhs[1:(length(rhs)-length(con.split))], collapse=" "), rhs.expand))
} else {
cmd.expand <- c(cmd.expand, paste0(lhsop, rhs.expand))
}
} else {
if ((preMark.match <- regexpr("^\\s*[\\[\\{]", rhs[1L], perl=TRUE))[1L] > 0) {
preMark <- substr(rhs[1L], preMark.match[1L], preMark.match[1L] + attr(preMark.match, "match.length")[1L] - 1)
rhs[1L] <- substr(rhs[1L], preMark.match[1L] + attr(preMark.match, "match.length")[1L], nchar(rhs[1L]))
} else { preMark <- "" }
if ((postMark.match <- regexpr("[\\]\\}]\\s*$", rhs[length(rhs)], perl=TRUE))[1L] > 0) {
postMark <- substr(rhs[length(rhs)], postMark.match[1L], nchar(rhs[length(rhs)]))
rhs[length(rhs)] <- substr(rhs[length(rhs)], 1, postMark.match[1L] - 1)
} else { postMark <- "" }
rhs.expand <- c()
for (i in 1:length(rhs)) {
rhs.expand[i] <- paste0(con.split[1L], "*", rhs[i])
}
rhs.expand <- paste0(preMark, paste(rhs.expand, collapse=" "), postMark)
cmd.expand <- c(cmd.expand, paste0(lhsop, rhs.expand))
}
}
cmd.tojoin[n] <- paste(cmd.expand, collapse=" ")
} else { cmd.tojoin[n] <- cmd.split[n] }
}
toReturn <- paste(cmd.tojoin, collapse=" ")
attr(toReturn, "noModifiers") <- cmd.nomodifiers
return(toReturn)
}
expandGrowthCmd <- function(cmd) {
if (any(tolower(strsplit(cmd, "\\s+", perl=TRUE)[[1]]) %in% c("on", "at"))) {
stop("lavaan does not support random slopes or individually varying growth model time scores")
}
cmd.split <- strsplit(cmd, "\\s*\\|\\s*", perl=TRUE)[[1]]
if (!length(cmd.split) == 2) stop("Unknown growth syntax: ", cmd)
lhs <- cmd.split[1]
lhs.split <- strsplit(lhs, "\\s+", perl=TRUE)[[1]]
rhs <- cmd.split[2]
rhs.split <- strsplit(rhs, "(\\*|\\s+)", perl=TRUE)[[1]]
if (length(rhs.split) %% 2 != 0) stop("Number of variables and number of tscores does not match: ", rhs)
tscores <- as.numeric(rhs.split[1:length(rhs.split) %% 2 != 0])
vars <- rhs.split[1:length(rhs.split) %% 2 == 0]
cmd.expand <- c()
for (p in 0:(length(lhs.split)-1)) {
if (p == 0) {
cmd.expand <- c(cmd.expand, paste(lhs.split[(p+1)], "=~", paste("1*", vars, sep="", collapse=" + ")))
} else {
cmd.expand <- c(cmd.expand, paste(lhs.split[(p+1)], "=~", paste(tscores^p, "*", vars, sep="", collapse=" + ")))
}
}
return(cmd.expand)
}
wrapAfterPlus <- function(cmd, width=90, exdent=5) {
result <- lapply(cmd, function(line) {
if (nchar(line) > width) {
split <- c()
spos <- 1L
plusMatch <- gregexpr("+", line, fixed=TRUE)[[1]]
mpos <- 1L
if (plusMatch[1L] > 0L) {
charsRemain <- nchar(line)
while(charsRemain > 0L) {
toProcess <- substr(line, nchar(line) - charsRemain + 1, nchar(line))
offset <- nchar(line) - charsRemain + 1
if (nchar(remainder <- substr(line, offset, nchar(line))) <= (width - exdent)) {
split[spos] <- remainder
charsRemain <- 0
} else {
wrapAt <- which(plusMatch < (width + offset - exdent))
wrapAt <- wrapAt[length(wrapAt)]
split[spos] <- substr(line, offset, plusMatch[wrapAt])
charsRemain <- charsRemain - nchar(split[spos])
spos <- spos + 1
}
}
split <- trimSpace(split)
split <- sapply(1:length(split), function(x) {
if (x > 1) paste0(paste(rep(" ", exdent), collapse=""), split[x])
else split[x]
})
return(split)
} else {
return(strwrap(line, width=width, exdent=exdent))
}
} else {
return(line)
}
})
return(unname(do.call(c, result)))
}
mplus2lavaan.constraintSyntax <- function(syntax) {
syntax <- paste(lapply(trimSpace(strsplit(syntax, "\n")), function(x) { if (length(x) == 0L && is.character(x)) "" else x}), collapse="\n")
syntax <- gsub("(\\s*)!(.+)\n", "\\1
syntax.split <- gsub("(^\n|\n$)", "", unlist( strsplit(syntax, ";") ), perl=TRUE)
constraint.out <- c()
new.parameters <- c()
if (length(new.con.lines <- grep("^\\s*NEW\\s*\\([^\\)]+\\)", syntax.split, perl=TRUE, ignore.case=TRUE)) > 0L) {
for (cmd in syntax.split[new.con.lines]) {
new.con <- regexpr("^\\s*NEW\\s*\\(([^\\)]+)\\)", cmd, perl=TRUE, ignore.case=TRUE)
if (new.con[1L] == -1) stop("Unable to parse names of new contraints")
new.con <- substr(cmd, attr(new.con, "capture.start"), attr(new.con, "capture.start") + attr(new.con, "capture.length") - 1L)
new.con <- expandCmd(new.con)
new.parameters <- c(new.parameters, strsplit(trimSpace(new.con), "\\s+", perl=TRUE)[[1L]])
}
syntax.split <- syntax.split[-1L * new.con.lines]
parameters.undefined <- new.parameters
}
for (cmd in syntax.split) {
if (grepl("^\\s*
constraint.out <- c(constraint.out , gsub("\n", "", cmd, fixed=TRUE))
} else if (grepl("^\\s+$", cmd, perl=TRUE)) {
} else {
cmd <- gsub("**", "^", cmd, fixed=TRUE)
maths <- gregexpr("(SQRT|LOG|LOG10|EXP|ABS|SIN|COS|TAN|ASIN|ACOS|ATAN)\\s*\\(", cmd, perl=TRUE)[[1L]]
if (maths[1L] > 0) {
maths.replace <- c()
ep <- 1
for (i in 1:length(maths)) {
operator <- tolower(substr(cmd, attr(maths, "capture.start")[i], attr(maths, "capture.start")[i] + attr(maths, "capture.length")[i] - 1))
maths.replace[ep] <- joinRegexExpand(cmd, operator, maths, i, matchLength="capture.length")
ep <- ep + 1
}
cmd <- paste(maths.replace, collapse="")
}
if ((equals <- regexpr("=", cmd, fixed=TRUE))[1L] > 0) {
lhs <- trimSpace(substr(cmd, 1, equals - 1))
rhs <- trimSpace(substr(cmd, equals + attr(equals, "match.length"), nchar(cmd)))
if (regexpr("\\s+", lhs, perl=TRUE)[1L] > 0L) {
def <- rhs
body <- lhs
} else if (regexpr("\\s+", rhs, perl=TRUE)[1L] > 0L) {
def <- lhs
body <- rhs
} else {
def <- lhs
body <- rhs
}
if (def %in% new.parameters && def %in% parameters.undefined) {
constraint.out <- c(constraint.out, paste(def, ":=", body))
parameters.undefined <- parameters.undefined[!parameters.undefined==def]
} else {
constraint.out <- c(constraint.out, paste(def, "==", body))
}
} else {
constraint.out <- c(constraint.out, cmd)
}
}
}
wrap <- paste(wrapAfterPlus(constraint.out, width=90, exdent=5), collapse="\n")
return(wrap)
}
mplus2lavaan.modelSyntax <- function(syntax) {
if (is.character(syntax)) {
if (length(syntax) > 1L) { syntax <- paste(syntax, collapse="\n") }
} else {
stop("mplus2lavaan.modelSyntax accepts a single character string or character vector containing all model syntax")
}
by_line <- strsplit(syntax, "\r?\n", perl=TRUE)[[1]]
inputHeaders <- grep("^\\s*(title:|data.*:|variable:|define:|analysis:|model.*:|output:|savedata:|plot:|montecarlo:)", by_line, ignore.case=TRUE, perl=TRUE)
con_syntax <- c()
if (length(inputHeaders) > 0L) {
parsed_syntax <- divideInputIntoSections(by_line, "local")
if ("model.constraint" %in% names(parsed_syntax)) {
con_syntax <- strsplit(mplus2lavaan.constraintSyntax(parsed_syntax$model.constraint), "\n")[[1]]
}
syntax <- parsed_syntax$model
}
syntax <- paste(lapply(trimSpace(strsplit(syntax, "\n")), function(x) { if (length(x) == 0L && is.character(x)) "" else x}), collapse="\n")
syntax <- gsub("(\\s*)!(.+)\n*", "\\1
syntax.split <- trimSpace(unlist( strsplit(syntax, ";") ))
lavaan.out <- c()
for (cmd in syntax.split) {
if (grepl("^\\s*
lavaan.out <- c(lavaan.out, gsub("\n", "", cmd, fixed=TRUE))
} else if (grepl("^\\s*$", cmd, perl=TRUE)) {
} else {
cmd <- expandCmd(cmd)
cmd <- parseFixStart(cmd)
cmd <- parseConstraints(cmd)
if ((op <- regexpr("\\s+(by|on|with|pwith)\\s+", cmd, ignore.case=TRUE, perl=TRUE))[1L] > 0) {
lhs <- substr(cmd, 1, op - 1)
rhs <- substr(cmd, op + attr(op, "match.length"), nchar(cmd))
operator <- tolower(substr(cmd, attr(op, "capture.start"), attr(op, "capture.start") + attr(op, "capture.length") - 1))
if (operator == "by") { lav.operator <- "=~"
} else if (operator == "with" || operator == "pwith") { lav.operator <- "~~"
} else if (operator == "on") { lav.operator <- "~" }
lhs.split <- strsplit(lhs, "\\s+")[[1]]
if (operator == "pwith") {
rhs.split <- strsplit(rhs, "\\s+")[[1]]
if (length(lhs.split) != length(rhs.split)) { browser(); stop("PWITH command does not have the same number of arguments on the left and right sides.")}
cmd <- sapply(1:length(lhs.split), function(i) paste(lhs.split[i], lav.operator, rhs.split[i]))
} else {
rhs <- gsub("(?<!\\+)\\s+(?!\\+)", " + ", rhs, perl=TRUE)
if (length(lhs.split) > 1L) {
cmd <- sapply(lhs.split, function(larg) {
pair <- paste(larg, lav.operator, rhs)
return(pair)
})
} else {
cmd <- paste(lhs, lav.operator, rhs)
}
}
} else if ((means.scales <- regexpr("^\\s*([\\[\\{])([^\\]\\}]+)[\\]\\}]\\s*$", cmd, ignore.case=TRUE, perl=TRUE))[1L] > 0) {
operator <- substr(cmd, attr(means.scales, "capture.start")[1L], attr(means.scales, "capture.start")[1L] + attr(means.scales, "capture.length")[1L] - 1)
params <- substr(cmd, attr(means.scales, "capture.start")[2L], attr(means.scales, "capture.start")[2L] + attr(means.scales, "capture.length")[2L] - 1)
params.noModifiers <- sub("^\\s*[\\[\\{]([^\\]\\}]+)[\\]\\}]\\s*$", "\\1", attr(cmd, "noModifiers"), perl=TRUE)
means.scales.split <- strsplit(params, "\\s+")[[1]]
means.scales.noModifiers.split <- strsplit(params.noModifiers, "\\s+")[[1]]
if (operator == "[") {
cmd <- sapply(means.scales.split, function(v) {
if ((premult <- regexpr("([^\\*]+\\*[^\\*]+)\\*([^\\*]+)", v, perl=TRUE))[1L] > 0) {
modifier <- substr(v, attr(premult, "capture.start")[1L], attr(premult, "capture.start")[1L] + attr(premult, "capture.length")[1L] - 1)
paramName <- substr(v, attr(premult, "capture.start")[2L], attr(premult, "capture.start")[2L] + attr(premult, "capture.length")[2L] - 1)
paste0(paramName, " ~ ", modifier, "*1")
} else if ((premult <- regexpr("([^\\*]+)\\*([^\\*]+)", v, perl=TRUE))[1L] > 0) {
modifier <- substr(v, attr(premult, "capture.start")[1L], attr(premult, "capture.start")[1L] + attr(premult, "capture.length")[1L] - 1)
paramName <- substr(v, attr(premult, "capture.start")[2L], attr(premult, "capture.start")[2L] + attr(premult, "capture.length")[2L] - 1)
paste0(paramName, " ~ ", modifier, "*1")
} else {
paste(v, "~ 1")
}
})
} else if (operator == "{"){
cmd <- sapply(1:length(means.scales.split), function(v) paste(means.scales.noModifiers.split[v], "~*~", means.scales.split[v]))
} else { stop("What's the operator?!") }
} else if (grepl("|", cmd, fixed=TRUE)) {
cmd <- expandGrowthCmd(cmd)
} else {
vars.lhs <- strsplit(attr(cmd, "noModifiers"), "\\s+")[[1]]
vars.rhs <- strsplit(cmd, "\\s+")[[1]]
cmd <- sapply(1:length(vars.lhs), function(v) paste(vars.lhs[v], "~~", vars.rhs[v]))
}
cmd <- gsub("$", "|", cmd, fixed=TRUE)
double_asterisks <- grepl("\\s*[\\w\\(\\)\\.]+\\*[\\w\\(\\)\\.]+\\*[\\w\\(\\)\\.]+", cmd, perl=TRUE)
if (isTRUE(double_asterisks[1])) {
ss <- strsplit(cmd, "*", fixed=TRUE)[[1]]
if(length(ss) != 3) {
warning("problem interpreting double asterisk syntax: ", cmd)
} else {
cmd <- paste0(ss[1], "*", ss[3], " + ", ss[2], "*", ss[3])
}
}
lavaan.out <- c(lavaan.out, cmd)
}
}
lavaan.out <- c(lavaan.out, con_syntax)
wrap <- paste(wrapAfterPlus(lavaan.out, width=90, exdent=5), collapse="\n")
return(wrap)
}
mplus2lavaan <- function(inpfile, run=TRUE) {
stopifnot(length(inpfile) == 1L)
stopifnot(grepl("\\.inp$", inpfile, ignore.case=TRUE))
if (!file.exists(inpfile)) { stop("Could not find file: ", inpfile) }
inpfile.text <- scan(inpfile, what="character", sep="\n", strip.white=FALSE, blank.lines.skip=FALSE, quiet=TRUE)
sections <- divideInputIntoSections(inpfile.text, inpfile)
mplus.inp <- list()
mplus.inp$title <- trimSpace(paste(sections$title, collapse=" "))
mplus.inp$data <- divideIntoFields(sections$data, required="file")
mplus.inp$variable <- divideIntoFields(sections$variable, required="names")
mplus.inp$analysis <- divideIntoFields(sections$analysis)
meanstructure <- "default"
if(!is.null(mplus.inp$analysis$model)) {
if (tolower(mplus.inp$analysis$model) == "nomeanstructure") { meanstructure=FALSE }
}
information <- "default"
if(!is.null(mplus.inp$analysis$information)) {
information <- tolower(mplus.inp$analysis$information)
}
estimator <- "default"
if (!is.null(est <- mplus.inp$analysis$estimator)) {
if (toupper(est) == "MUML") warning("Mplus does not support MUML estimator. Using default instead.")
estimator <- est
if (!is.null(mplus.inp$variable$categorical) && toupper(substr(mplus.inp$analysis$estimator, 1, 2)) == "ML") {
warning("Lavaan does not yet support ML-based estimation for categorical data. Reverting to WLSMV")
estimator <- "WLSMV"
}
}
mplus.inp$variable$names <- strsplit(expandCmd(mplus.inp$variable$names), "\\s+", perl=TRUE)[[1]]
if (!is.null(mplus.inp$variable$categorical)) mplus.inp$variable$categorical <- strsplit(expandCmd(mplus.inp$variable$categorical), "\\s+", perl=TRUE)[[1]]
mplus.inp$model <- mplus2lavaan.modelSyntax(sections$model)
if ("model.constraint" %in% names(sections)) {
mplus.inp$model.constraint <- mplus2lavaan.constraintSyntax(sections$model.constraint)
mplus.inp$model <- paste(mplus.inp$model, mplus.inp$model.constraint, sep="\n")
}
mplus.inp$data <- readMplusInputData(mplus.inp, inpfile)
se="default"
bootstrap <- 1000L
test <- "default"
if (!is.null(mplus.inp$analysis$bootstrap)) {
boot.type <- "standard"
if ((boot.match <- regexpr("\\((\\w+)\\)", mplus.inp$analysis$bootstrap, perl=TRUE)) > 0L) {
boot.type <- tolower(substr(mplus.inp$analysis$bootstrap, attr(boot.match, "capture.start"), attr(boot.match, "capture.start") + attr(boot.match, "capture.length") - 1L))
}
if (boot.type == "residual") test <- "Bollen.Stine"
se <- "bootstrap"
if ((nboot.match <- regexpr("^\\s*(\\d+)", mplus.inp$analysis$bootstrap, perl=TRUE)) > 0L) {
bootstrap <- as.numeric(substr(mplus.inp$analysis$bootstrap, attr(nboot.match, "capture.start"), attr(nboot.match, "capture.start") + attr(nboot.match, "capture.length") - 1L))
}
}
if (run) {
fit <- sem(mplus.inp$model, data=mplus.inp$data, meanstructure=meanstructure, mimic="Mplus", estimator=estimator, test=test, se=se, bootstrap=bootstrap, information=information)
fit@external <- list(mplus.inp=mplus.inp)
} else {
fit <- mplus.inp
}
return(fit)
}
divideIntoFields <- function(section.text, required) {
if (is.null(section.text)) { return(NULL) }
section.text <- gsub("\\s*!.*$", "", section.text, perl=TRUE)
section.split <- strsplit(paste(section.text, collapse=" "), ";", fixed=TRUE)[[1]]
section.divide <- list()
for (cmd in section.split) {
if (grepl("^\\s*!.*", cmd, perl=TRUE)) next
if (grepl("^\\s+$", cmd, perl=TRUE)) next
if ( (leadingEquals <- regexpr("^\\s*[A-Za-z]+[A-Za-z_-]*\\s*(=)", cmd[1L], perl=TRUE))[1L] > 0) {
cmdName <- trimSpace(substr(cmd[1L], 1, attr(leadingEquals, "capture.start") - 1))
cmdArgs <- trimSpace(substr(cmd[1L], attr(leadingEquals, "capture.start") + 1, nchar(cmd[1L])))
} else {
cmd.spacesplit <- strsplit(trimSpace(cmd[1L]), "\\s+", perl=TRUE)[[1L]]
if (length(cmd.spacesplit) < 2L) {
} else {
cmdName <- trimSpace(cmd.spacesplit[1L])
if (length(cmd.spacesplit) > 2L && tolower(cmd.spacesplit[2L]) %in% c("is", "are")) {
cmdArgs <- paste(cmd.spacesplit[3L:length(cmd.spacesplit)], collapse=" ")
} else {
cmdArgs <- paste(cmd.spacesplit[2L:length(cmd.spacesplit)], collapse=" ")
}
}
}
section.divide[[make.names(tolower(cmdName))]] <- cmdArgs
}
if (!missing(required)) { stopifnot(all(required %in% names(section.divide))) }
return(section.divide)
}
splitFilePath <- function(abspath) {
if (!is.character(abspath)) stop("Path not a character string")
if (nchar(abspath) < 1 || is.na(abspath)) stop("Path is missing or of zero length")
components <- strsplit(abspath, split="[\\/]")[[1]]
lcom <- length(components)
stopifnot(lcom > 0)
relFilename <- components[lcom]
absolute <- FALSE
if (lcom == 1) {
dirpart <- NA_character_
}
else if (lcom > 1) {
components <- components[-lcom]
dirpart <- do.call("file.path", as.list(components))
if (grepl("^([A-Z]{1}:|/|//|\\\\)+.*$", dirpart, perl=TRUE)) absolute <- TRUE
}
return(list(directory=dirpart, filename=relFilename, absolute=absolute))
}
readMplusInputData <- function(mplus.inp, inpfile) {
inpfile.split <- splitFilePath(inpfile)
datfile.split <- splitFilePath(mplus.inp$data$file)
if (!is.na(datfile.split$directory) && datfile.split$absolute)
datFile <- mplus.inp$data$file
else if (is.na(inpfile.split$directory))
datFile <- mplus.inp$data$file
else
datFile <- file.path(inpfile.split$directory, mplus.inp$data$file)
if (!file.exists(datFile)) {
warning("Cannot find data file: ", datFile)
return(NULL)
}
missList <- NULL
if (!is.null(missSpec <- mplus.inp$variable$missing)) {
expandMissVec <- function(missStr) {
missSplit <- strsplit(missStr, "\\s+")[[1L]]
missVals <- c()
for (f in missSplit) {
if ((hyphenPos <- regexpr("\\d+(-)\\d+", f, perl=TRUE))[1L] > -1L) {
preHyphen <- substr(f, 1, attr(hyphenPos, "capture.start") - 1)
postHyphen <- substr(f, attr(hyphenPos, "capture.start") + 1, nchar(f))
missVals <- c(missVals, as.character(seq(preHyphen, postHyphen)))
} else {
missVals <- c(missVals, f)
}
}
return(as.numeric(missVals))
}
if (missSpec == "." || missSpec=="*") {
na.strings <- missSpec
} else if ((allMatch <- regexpr("\\s*ALL\\s*\\(([^\\)]+)\\)", missSpec, perl=TRUE))[1L] > -1L) {
missStr <- trimSpace(substr(missSpec, attr(allMatch, "capture.start"), attr(allMatch, "capture.start") + attr(allMatch, "capture.length") - 1L))
na.strings <- expandMissVec(missStr)
} else {
missBlocks <- gregexpr("(?:(\\w+)\\s+\\(([^\\)]+)\\))+", missSpec, perl=TRUE)[[1]]
missList <- list()
if (missBlocks[1L] > -1L) {
for (i in 1:length(missBlocks)) {
vname <- substr(missSpec, attr(missBlocks, "capture.start")[i,1L], attr(missBlocks, "capture.start")[i,1L] + attr(missBlocks, "capture.length")[i,1L] - 1L)
vmiss <- substr(missSpec, attr(missBlocks, "capture.start")[i,2L], attr(missBlocks, "capture.start")[i,2L] + attr(missBlocks, "capture.length")[i,2L] - 1L)
vnameHyphen <- regexpr("(\\w+)-(\\w+)", vname, perl=TRUE)[1L]
if (vnameHyphen > -1L) {
vstart <- which(mplus.inp$variable$names == substr(vname, attr(vnameHyphen, "capture.start")[1L], attr(vnameHyphen, "capture.start")[1L] + attr(vnameHyphen, "capture.length")[1L] - 1L))
vend <- which(mplus.inp$variable$names == substr(vname, attr(vnameHyphen, "capture.start")[2L], attr(vnameHyphen, "capture.start")[2L] + attr(vnameHyphen, "capture.length")[2L] - 1L))
if (length(vstart) == 0L || length(vend) == 0L) { stop("Unable to lookup missing variable list: ", vname) }
if (vstart > vend) { vstart.orig <- vstart; vstart <- vend; vend <- vstart.orig }
vname <- mplus.inp$variable$names[vstart:vend]
}
missVals <- expandMissVec(vmiss)
for (j in 1:length(vname)) {
missList[[ vname[j] ]] <- missVals
}
}
} else { stop("I don't understand this missing specification: ", missSpec) }
}
} else { na.strings <- "NA" }
if (!is.null(missList)) {
dat <- read.table(datFile, header=FALSE, col.names=mplus.inp$variable$names, colClasses="numeric")
dat[,names(missList)] <- lapply(names(missList), function(vmiss) {
dat[which(dat[,vmiss] %in% missList[[vmiss]]), vmiss] <- NA
return(dat[,vmiss])
})
names(dat) <- mplus.inp$variable$names
} else {
dat <- read.table(datFile, header=FALSE, col.names=mplus.inp$variable$names, na.strings=na.strings, colClasses="numeric")
}
if (!is.null(mplus.inp$variable$categorical)) {
dat[,c(mplus.inp$variable$categorical)] <- lapply(dat[,c(mplus.inp$variable$categorical), drop=FALSE], ordered)
}
return(dat)
}
divideInputIntoSections <- function(inpfile.text, filename) {
inputHeaders <- grep("^\\s*(title:|data.*:|variable:|define:|analysis:|model.*:|output:|savedata:|plot:|montecarlo:)", inpfile.text, ignore.case=TRUE, perl=TRUE)
stopifnot(length(inputHeaders) > 0L)
mplus.sections <- list()
for (h in 1:length(inputHeaders)) {
sectionEnd <- ifelse(h < length(inputHeaders), inputHeaders[h+1] - 1, length(inpfile.text))
section <- inpfile.text[inputHeaders[h]:sectionEnd]
sectionName <- trimSpace(sub("^([^:]+):.*$", "\\1", section[1L], perl=TRUE))
section[1L] <- sub("^[^:]+:(.*)$", "\\1", section[1L], perl=TRUE)
mplus.sections[[make.names(tolower(sectionName))]] <- section
}
return(mplus.sections)
} |
validate(need(!(input[["state_first"]] == input[["state_second"]]), "Please select two different states."))
if (input[["theory"]]) {
if (input[["calc_type"]] == "relative") {
wp <- differential_plot_theo()
} else {
wp <- differential_plot_theo_abs()
}
} else {
if (input[["calc_type"]] == "relative") {
wp <- differential_plot_exp()
} else {
wp <- differential_plot_exp_abs()
}
}
wp <- wp + coord_cartesian(xlim = c(input[["plot_x_range"]][[1]], input[["plot_x_range"]][[2]]),
ylim = c(input[["woods_plot_y_range"]][[1]], input[["woods_plot_y_range"]][[2]])) +
labs(title = input[["woods_plot_title"]],
x = input[["woods_plot_x_label"]],
y = input[["woods_plot_y_label"]]) |
CreateCalendarVariables <- function(data,
DateCols = NULL,
AsFactor = FALSE,
TimeUnits = "wday") {
data.table::setDTthreads(threads = max(1L, parallel::detectCores() - 2L))
if(!data.table::is.data.table(data)) data.table::setDT(data)
if(!is.logical(AsFactor)) {
print("AsFactor needs to be TRUE or FALSE")
return(data)
}
if(!(any(tolower(TimeUnits) %chin% c("second","minute","hour","wday","mday","yday","week","isoweek","wom","month","quarter","year")))) {
print("TimeUnits needs to be one of 'second', 'minute', 'hour', 'wday','mday', 'yday','week','wom','month', 'quarter', 'year'")
return(data)
}
for(i in DateCols) if(!is.character(DateCols[i])) DateCols[i] <- names(data)[DateCols[i]]
x <- 0L
TimeList <- list()
Cols <- c()
for(i in seq_len(length(DateCols))) {
if(any(TimeUnits %chin% c("second", "minute", "hour"))) {
if(min(data.table::as.ITime(data[[eval(DateCols[i])]])) - max(data.table::as.ITime(data[[eval(DateCols[i])]])) == 0L) {
TimeList[[i]] <- TimeUnits[!(tolower(TimeUnits) %chin% c("second", "minute", "hour"))]
Cols[i] <- length(TimeList[[i]])
} else {
TimeList[[i]] <- TimeUnits
Cols[i] <- length(TimeList[[i]])
}
} else {
TimeList[[i]] <- TimeUnits
Cols[i] <- length(TimeList[[i]])
}
}
NumCols <- ncol(data.table::copy(data))
data.table::alloc.col(DT = data, ncol(data) + sum(Cols))
for(i in seq_len(length(DateCols))) {
if(length(TimeList) != 0L) {
if(any(tolower(TimeList[[i]]) %chin% c("second", "minute", "hour"))) data.table::set(data, j = paste0("TIME_", eval(DateCols[i])), value = data.table::as.ITime(data[[eval(DateCols[i])]]))
if(any(tolower(TimeList[[i]]) %chin% c("wday","mday","yday","week","isoweek","wom","month","quarter","year"))) data.table::set(data, j = paste0("DATE_", eval(DateCols[i])), value = data.table::as.IDate(data[[eval(DateCols[i])]]))
}
}
for(i in seq_len(length(DateCols))) {
DateColRef <- DateCols[i]
if(any(tolower(TimeList[[i]]) %chin% c("second", "minute", "hour"))) {
DataCompute <- unique(data[, .SD, .SDcols = c(paste0("TIME_", DateColRef), paste0("DATE_", DateColRef))])
} else {
DataCompute <- unique(data[, .SD, .SDcols = c(paste0("DATE_", DateColRef))])
}
for(j in TimeList[[i]]) {
if(tolower(j) == "second") {
if(AsFactor) {
data.table::set(DataCompute, j = paste0(DateColRef, "_", j), value = as.factor(data.table::second(DataCompute[[eval(paste0("TIME_", DateColRef))]])))
} else {
data.table::set(DataCompute, j = paste0(DateColRef, "_", j), value = data.table::second(DataCompute[[eval(paste0("TIME_", DateColRef))]]))
}
} else if(tolower(j) == "minute") {
if(AsFactor) {
data.table::set(DataCompute, j = paste0(DateColRef, "_", TimeList[[i]][j]), value = as.factor(data.table::minute(DataCompute[[eval(paste0("TIME_", DateColRef))]])))
} else {
data.table::set(DataCompute, j = paste0(DateColRef, "_", j), value = data.table::minute(DataCompute[[paste0("TIME_", DateColRef)]]))
}
} else if(tolower(j) == "hour") {
if(AsFactor) {
data.table::set(DataCompute, j = paste0(DateColRef, "_", TimeList[[i]][j]), value = as.factor(data.table::hour(DataCompute[[eval(paste0("TIME_", DateColRef))]])))
} else {
data.table::set(DataCompute, j = paste0(DateColRef, "_", j), value = data.table::hour(DataCompute[[eval(paste0("TIME_", DateColRef))]]))
}
} else if(tolower(j) == "wday") {
if(AsFactor) {
data.table::set(DataCompute, j = paste0(DateColRef, "_", TimeList[[i]][j]), value = as.factor(data.table::wday(DataCompute[[eval(paste0("DATE_", DateColRef))]])))
} else {
data.table::set(DataCompute, j = paste0(DateColRef, "_", j), value = data.table::wday(DataCompute[[eval(paste0("DATE_", DateColRef))]]))
}
} else if(tolower(j) == "mday") {
if (AsFactor) {
data.table::set(DataCompute, j = paste0(DateColRef, "_", TimeList[[i]][j]), value = as.factor(data.table::mday(DataCompute[[eval(paste0("DATE_", DateColRef))]])))
} else {
data.table::set(DataCompute, j = paste0(DateColRef, "_", j), value = as.integer(data.table::mday(DataCompute[[eval(paste0("DATE_", DateColRef))]])))
}
} else if(tolower(j) == "yday") {
if (AsFactor) {
data.table::set(DataCompute, j = paste0(DateColRef, "_", TimeList[[i]][j]), value = as.factor(data.table::yday(DataCompute[[eval(paste0("DATE_", DateColRef))]])))
} else {
data.table::set(DataCompute, j = paste0(DateColRef, "_", j), value = data.table::yday(DataCompute[[eval(paste0("DATE_", DateColRef))]]))
}
} else if(tolower(j) == "week") {
if (AsFactor) {
data.table::set(DataCompute, j = paste0(DateColRef, "_", TimeList[[i]][j]), value = as.factor(data.table::week(DataCompute[[eval(paste0("DATE_", DateColRef))]])))
} else {
data.table::set(DataCompute, j = paste0(DateColRef, "_", j), value = data.table::week(DataCompute[[eval(paste0("DATE_", DateColRef))]]))
}
} else if(tolower(j) == "isoweek") {
if (AsFactor) {
data.table::set(DataCompute, j = paste0(DateColRef, "_", TimeList[[i]][j]), value = as.factor(data.table::isoweek(DataCompute[[eval(paste0("DATE_", DateColRef))]])))
} else {
data.table::set(DataCompute, j = paste0(DateColRef, "_", j), value = data.table::isoweek(DataCompute[[eval(paste0("DATE_", DateColRef))]]))
}
} else if(tolower(j) == "month") {
if (AsFactor) {
data.table::set(DataCompute, j = paste0(DateColRef, "_", TimeList[[i]][j]), value = as.factor(data.table::month(DataCompute[[eval(paste0("DATE_", DateColRef))]])))
} else {
data.table::set(DataCompute, j = paste0(DateColRef, "_", j), value = data.table::month(DataCompute[[eval(paste0("DATE_", DateColRef))]]))
}
} else if(tolower(j) == "wom") {
if (AsFactor) {
data.table::set(DataCompute, j = paste0(DateColRef, "_", TimeList[[i]][j]), value = as.factor(data.table::fifelse(ceiling(data.table::mday(DataCompute[[eval(paste0("DATE_", DateColRef))]])/7) == 5, 4, ceiling(data.table::mday(DataCompute[[eval(paste0("DATE_", DateColRef))]])/7))))
} else {
data.table::set(DataCompute, j = paste0(DateColRef, "_", j), value = data.table::fifelse(ceiling(data.table::mday(DataCompute[[eval(paste0("DATE_", DateColRef))]])/7) == 5, 4, ceiling(data.table::mday(DataCompute[[eval(paste0("DATE_", DateColRef))]])/7)))
}
} else if(tolower(j) == "quarter") {
if (AsFactor) {
data.table::set(DataCompute, j = paste0(DateColRef, "_", TimeList[[i]][j]), value = as.factor(data.table::quarter(DataCompute[[eval(paste0("DATE_", DateColRef))]])))
} else {
data.table::set(DataCompute, j = paste0(DateColRef, "_", j), value = as.integer(data.table::quarter(DataCompute[[eval(paste0("DATE_", DateColRef))]])))
}
} else if(tolower(j) == "year") {
if(AsFactor) {
data.table::set(DataCompute, j = paste0(DateColRef, "_", TimeList[[i]][j]), value = as.factor(data.table::year(DataCompute[[eval(paste0("DATE_", DateColRef))]])))
} else {
data.table::set(DataCompute, j = paste0(DateColRef, "_", j), value = data.table::year(DataCompute[[eval(paste0("DATE_", DateColRef))]]))
}
}
}
if(any(tolower(TimeList[[i]]) %chin% c("second", "minute", "hour"))) {
data <- merge(data, DataCompute, by = c(paste0("TIME_", DateColRef), paste0("DATE_", DateColRef)), all = FALSE)
} else {
data <- merge(data, DataCompute, by = c(paste0("DATE_", DateColRef)), all = FALSE)
}
if(any(tolower(TimeList[[i]]) %chin% c("second", "minute", "hour"))) data.table::set(data, j = paste0("TIME_", DateColRef), value = NULL)
if(any(tolower(TimeList[[i]]) %chin% c("wday","mday","yday","week","isoweek","wom","month","quarter","year"))) data.table::set(data, j = paste0("DATE_", DateColRef), value = NULL)
}
return(data)
}
CreateHolidayVariables <- function(data,
DateCols = NULL,
LookbackDays = NULL,
HolidayGroups = c('USPublicHolidays','EasterGroup','ChristmasGroup','OtherEcclesticalFeasts'),
Holidays = NULL,
Print = FALSE) {
data.table::setDTthreads(threads = max(1L, parallel::detectCores()-2L))
if(!data.table::is.data.table(data)) data.table::setDT(data)
for(dat in DateCols) {
if(any(class(data[[dat]]) %chin% c("IDate"))) {
data[, eval(dat) := as.Date(get(dat))]
} else if(any(class(data[[dat]]) %chin% c("IDateTime"))) {
data[, eval(dat) := as.POSIXct(get(dat))]
}
}
requireNamespace("timeDate", quietly = TRUE)
HolidayCountsInRange <- function(Start, End, Values) return(as.integer(length(which(x = Values %in% seq(as.Date(Start), as.Date(End), by = "days")))))
Holidays <- c()
if(!is.null(HolidayGroups)) {
for(counter in seq_len(length(HolidayGroups))) {
if(tolower(HolidayGroups[counter]) == "eastergroup") {
Holidays <- c(Holidays,"Septuagesima","Quinquagesima","PalmSunday","GoodFriday","EasterSunday","Easter","EasterMonday","RogationSunday",
"Ascension","Pentecost","PentecostMonday","TrinitySunday","CorpusChristi","AshWednesday")
}
if(tolower(HolidayGroups[counter]) == "christmasgroup") {
Holidays <- c(Holidays,"ChristTheKing","Advent1st","Advent1st","Advent3rd","Advent4th","ChristmasEve","ChristmasDay","BoxingDay","NewYearsDay")
}
if(tolower(HolidayGroups[counter]) == "otherecclesticalfeasts") {
Holidays <- c(Holidays,"SolemnityOfMary","Epiphany","PresentationOfLord",
"Annunciation","TransfigurationOfLord","AssumptionOfMary",
"AssumptionOfMary","BirthOfVirginMary","CelebrationOfHolyCross",
"MassOfArchangels","AllSaints","AllSouls")
}
if(tolower(HolidayGroups[counter]) == "uspublicholidays") {
Holidays <- c(Holidays,"USNewYearsDay","USInaugurationDay","USMLKingsBirthday","USLincolnsBirthday","USWashingtonsBirthday","USCPulaskisBirthday","USGoodFriday",
"USMemorialDay","USIndependenceDay","USLaborDay","USColumbusDay","USElectionDay","USVeteransDay","USThanksgivingDay","USChristmasDay")
}
}
}
for(i in DateCols) if(!is.character(DateCols[i])) DateCols[i] <- names(data)[DateCols[i]]
data.table::alloc.col(DT = data, ncol(data) + 1L)
MinDate <- data[, min(get(DateCols[1L]), na.rm = TRUE)]
library(timeDate)
HolidayVals <- sort(unique(as.Date(timeDate::holiday(year = unique(lubridate::year(data[[eval(DateCols)]])), Holiday = Holidays))))
for(i in seq_along(DateCols)) {
if(!is.null(LookbackDays)) {
x <- LookbackDays
} else {
x <- data[, quantile(x = (data[[eval(DateCols[i])]] - data[[(paste0("Lag1_",eval(DateCols[i])))]]), probs = 0.99)]
}
data[, eval(paste0("Lag1_", DateCols[i])) := get(DateCols[i]) - lubridate::days(x)]
data.table::setkeyv(x = data, cols = c(DateCols[i], paste0("Lag1_", eval(DateCols[i]))))
data.table::set(data, i = which(data[[eval(DateCols[i])]] == MinDate), j = eval(paste0("Lag1_",DateCols[i])), value = MinDate - x)
temp <- unique(data[, .SD, .SDcols = c(DateCols[i], paste0("Lag1_", eval(DateCols[i])))])
temp[, HolidayCounts := 0L]
NumRows <- seq_len(temp[,.N])
if(Print) {
for(Rows in NumRows) {
print(Rows)
data.table::set(x = temp, i = Rows, j = "HolidayCounts", value = sum(HolidayCountsInRange(Start = temp[[paste0("Lag1_", DateCols[i])]][[Rows]], End = temp[[eval(DateCols)]][[Rows]], Values = HolidayVals)))
}
} else {
for(Rows in NumRows) {
data.table::set(x = temp, i = Rows, j = "HolidayCounts", value = sum(HolidayCountsInRange(Start = temp[[paste0("Lag1_", DateCols[i])]][[Rows]], End = temp[[eval(DateCols)]][[Rows]], Values = HolidayVals)))
}
}
data[temp, on = c(eval(DateCols[i]), paste0("Lag1_", DateCols[i])), HolidayCounts := i.HolidayCounts]
if(length(DateCols) > 1L) data.table::setnames(data, "HolidayCounts", paste0(DateCols[i], "_HolidayCounts"))
data.table::set(data, j = eval(paste0("Lag1_", DateCols[i])), value = NULL)
}
return(data)
}
CalendarVariables <- function(data = NULL,
RunMode = "train",
ArgsList = NULL,
SkipCols = NULL) {
Start <- Sys.time()
tempnames <- names(data.table::copy(data))
if(tolower(RunMode) == "train") {
for(dat in ArgsList$Data$DateVariables) {
data <- RemixAutoML::CreateCalendarVariables(
data = data,
DateCols = dat,
AsFactor = FALSE,
TimeUnits = ArgsList$FE_Args$Calendar$CalendarVariables)
}
if(!is.null(SkipCols)) {
temp <- SkipCols[!SkipCols %chin% names(data)]
data[, (temp) := NULL]
}
if(!is.null(ArgsList)) {
ArgsList$CalendarVariables$DateCols <- ArgsList$Data$DateVariables
ArgsList$CalendarVariables$TimeUnits <- ArgsList$FE_Args$Calendar$CalendarVariables
ArgsList$CalendarVariables$AsFactor <- FALSE
ArgsList$FE_Columns$CalendarVariables_Training <- setdiff(names(data), tempnames)
End <- Sys.time()
ArgsList$RunTime$CalendarVariables_Training <- difftime(End, Start, units = "mins")
}
} else {
for(dat in DateVariables) {
data <- RemixAutoML::CreateCalendarVariables(
data = data,
DateCols = ArgsList$CalendarVariables$DateCols,
AsFactor = ArgsList$CalendarVariables$AsFactor,
TimeUnits = ArgsList$CalendarVariables$TimeUnits)
}
if(!is.null(SkipCols)) {
temp <- SkipCols[!SkipCols %chin% names(data)]
data[, (temp) := NULL]
}
if(!is.null(ArgsList)) {
End <- Sys.time()
ArgsList$RunTime$CalendarVariables_Scoring <- difftime(End, Start, units = "mins")
}
}
return(list(data = data, ArgsList = ArgsList))
}
HolidayVariables <- function(data = NULL,
RunMode = "train",
ArgsList = ArgsList,
SkipCols = NULL) {
Start <- Sys.time()
if(tolower(RunMode) == "train") {
tempnames <- names(data.table::copy(data))
for(dat in ArgsList$Data$DateVariables) {
for(i in seq_along(ArgsList$FE_Args$Holiday_Variables$HolidayVariables)) {
data <- RemixAutoML::CreateHolidayVariables(
data = data,
DateCols = dat,
LookbackDays = ArgsList$FE_Args$Holiday_Variables$LookBackDays,
HolidayGroups = ArgsList$FE_Args$Holiday_Variables$HolidayVariables[i],
Holidays = NULL,
Print = FALSE)
data.table::setnames(data, "HolidayCounts", paste0(dat, "_", ArgsList$FE_Args$Holiday$HolidayVariables[i], "_HolidayCounts"))
}
}
if(!is.null(SkipCols)) {
temp <- SkipCols[!SkipCols %chin% names(data)]
data[, (temp) := NULL]
}
ArgsList$HolidayVariables$DateCols <- ArgsList$Data$DateVariables
ArgsList$HolidayVariables$LookbackDays <- ArgsList$FE_Args$Holiday_Variables$LookBackDays
ArgsList$HolidayVariables$AsFactor <- FALSE
ArgsList$HolidayVariables$HolidaySets <- ArgsList$FE_Args$Holiday_Variables$HolidayVariables
ArgsList$FE_Columns$HolidayVariables_Training <- setdiff(names(data), tempnames)
End <- Sys.time()
ArgsList$RunTime$HolidayVariables_Training <- difftime(End, Start, units = "mins")
} else {
for(dat in ArgsList$FE_HolidayVariables$DateCols) {
for(i in seq_along(ArgsList$FE_HolidayVariables$HolidaySets)) {
data <- RemixAutoML::CreateHolidayVariables(
data = data,
DateCols = ArgsList$HolidayVariables$DateCols,
LookbackDays = ArgsList$HolidayVariables$LookbackDays,
HolidayGroups = ArgsList$HolidayVariables$HolidaySets[i],
Holidays = NULL,
Print = FALSE)
data.table::setnames(data, "HolidayCounts", paste0(dat, "_", ArgsList$HolidayVariables$HolidaySets[i], "_HolidayCounts"))
}
}
if(!is.null(SkipCols)) {
temp <- SkipCols[!SkipCols %chin% names(data)]
data[, (temp) := NULL]
}
End <- Sys.time()
ArgsList$RunTime$HolidayVariables_Scoring <- difftime(End, Start, units = "mins")
}
return(list(data = data, ArgsList = ArgsList))
} |
add.indicator <- function( strategy
, name
, arguments
, parameters=NULL
, label=NULL
, ...
, enabled=TRUE
, indexnum=NULL
, store=FALSE
)
{
if (!is.strategy(strategy)) {
strategy<-try(getStrategy(strategy))
if(inherits(strategy,"try-error"))
stop ("You must supply an object or the name of an object of type 'strategy'.")
store=TRUE
}
tmp_indicator<-list()
tmp_indicator$name<-name
if(is.null(label)) {
label <- paste(name,"ind",sep='.')
gl <- grep(label, names(strategy$indicators))
if (!identical(integer(0), gl)) label <- paste(label, length(gl)+1, sep=".")
}
tmp_indicator$label<-label
tmp_indicator$enabled=enabled
if (!is.list(arguments)) stop("arguments must be passed as a named list")
tmp_indicator$arguments<-arguments
if(!is.null(parameters)) tmp_indicator$parameters = parameters
if(length(list(...))) tmp_indicator<-c(tmp_indicator,list(...))
indexnum <- if (!is.null(indexnum)) {indexnum} else label
tmp_indicator$call<-match.call()
class(tmp_indicator)<-'strat_indicator'
strategy$indicators[[indexnum]]<-tmp_indicator
strategy$trials <- strategy$trials+1
if (store) assign(strategy$name,strategy,envir=as.environment(.strategy))
else return(strategy)
strategy$name
}
applyIndicators <- function(strategy, mktdata, parameters=NULL, ...) {
if(any(diff(.index(mktdata)) == 0)) {
warning("'mktdata' index contains duplicates; calling 'make.index.unique'")
mktdata <- make.index.unique(mktdata)
}
if (!is.strategy(strategy)) {
strategy<-try(getStrategy(strategy))
if(inherits(strategy,"try-error"))
stop ("You must supply an object of type 'strategy'.")
}
ret <- NULL
omit <- unique(do.call(c, lapply(names(strategy$indicators), grep, colnames(mktdata))))
cidx <- 1:NCOL(mktdata)
keep <- cidx[!cidx %in% omit]
mktdata <- mktdata[, keep]
for (indicator in strategy$indicators){
if(is.function(indicator$name)) {
indFun <- indicator$name
} else {
if(exists(indicator$name, mode="function")) {
indFun <- get(indicator$name, mode="function")
} else {
ind.name <- paste("ind", indicator$name, sep=".")
if(exists(ind.name, mode="function")) {
indFun <- get(ind.name, mode="function")
indicator$name <- ind.name
} else {
message("Skipping indicator ", indicator$name,
" because there is no function by that name to call")
next
}
}
}
if(!isTRUE(indicator$enabled)) next()
.formals <- formals(indicator$name)
.formals <- modify.args(.formals, indicator$arguments, dots=TRUE)
.formals <- modify.args(.formals, parameters, dots=TRUE)
.formals <- modify.args(.formals, NULL, ..., dots=TRUE)
.formals$`...` <- NULL
tmp_val <- do.call(indFun, .formals)
if(is.null(colnames(tmp_val)))
colnames(tmp_val) <- seq(ncol(tmp_val))
if(!identical(colnames(tmp_val),indicator$label))
colnames(tmp_val) <- paste(colnames(tmp_val),indicator$label,sep='.')
if (nrow(mktdata)==nrow(tmp_val) | length(mktdata)==length(tmp_val)) {
mktdata<-cbind(mktdata,tmp_val)
} else {
if(is.null(ret)) ret<-list()
ret[[indicator$name]]<-tmp_val
}
}
mktdata<<-mktdata
if(is.null(ret)) {
return(mktdata)
}
else return(ret)
} |
fullscreenDependency <- function() {
list(
html_dep_prod("lfx-fullscreen", "1.0.2", has_style = TRUE)
)
}
addFullscreenControl <- function(
map, position = "topleft", pseudoFullscreen = FALSE) {
map$dependencies <- c(map$dependencies, fullscreenDependency())
if (is.null(map$x$options))
map$x$options <- list()
map$x$options["fullscreenControl"] <-
list(list(position = position, pseudoFullscreen = pseudoFullscreen))
map
} |
leafsfirst.new<-function(pcf=NULL, lev=NULL, refe=NULL, type="lst",
levmet="radius", ordmet="etaisrec", ngrid=NULL,
dendat=NULL, rho=0, propor=NULL, dist.type="euclid")
{
if ((!is.null(lev)) || (!is.null(propor))) type<-"shape"
if (!is.null(dendat)) type<-"tail"
if (type=="tail")
lst<-leafsfirst.tail(dendat=dendat, rho=rho, refe=refe, dist.type=dist.type)
return(lst)
} |
.simplicity <- function(q, Q, j, lmin, lmax, lstep) {
eps <- .Machine$double.eps * 100
n <- length(Q)
i <- match(q, Q)[1]
v <- ifelse( (lmin %% lstep < eps ||
lstep - (lmin %% lstep) < eps) &&
lmin <= 0 && lmax >= 0, 1, 0)
1 - (i - 1) / (n - 1) - j + v
}
.simplicity.max <- function(q, Q, j) {
n <- length(Q)
i <- match(q, Q)[1]
v <- 1
1 - (i - 1) / (n - 1) - j + v
}
.coverage <- function(dmin, dmax, lmin, lmax) {
range <- dmax - dmin
1 - 0.5 * ( (dmax - lmax) ^ 2 + (dmin - lmin) ^ 2) / ( (0.1 * range) ^ 2)
}
.coverage.max <- function(dmin, dmax, span) {
range <- dmax - dmin
if (span > range) {
half <- (span - range) / 2
1 - 0.5 * (half ^ 2 + half ^ 2) / ( (0.1 * range) ^ 2)
}
else {
1
}
}
.density <- function(k, m, dmin, dmax, lmin, lmax) {
r <- (k - 1) / (lmax - lmin)
rt <- (m - 1) / (max(lmax, dmax) - min(dmin, lmin))
2 - max( r / rt, rt / r )
}
.density.max <- function(k, m) {
if (k >= m) {
2 - (k - 1) / (m - 1)
} else {
1
}
}
.legibility <- function(lmin, lmax, lstep) {
1
}
extended_range_breaks_ <- function(dmin, dmax, n = 5,
Q = c(1, 5, 2, 2.5, 4, 3),
w = c(0.25, 0.2, 0.5, 0.05)) {
eps <- .Machine$double.eps * 100
if (dmin > dmax) {
temp <- dmin
dmin <- dmax
dmax <- temp
}
if (dmax - dmin < eps) {
return(seq(from = dmin, to = dmax, length.out = n))
}
n <- length(Q)
best <- list()
best$score <- -2
j <- 1
while (j < Inf) {
for (q in Q) {
sm <- .simplicity.max(q, Q, j)
if ( (w[1] * sm + w[2] + w[3] + w[4]) < best$score) {
j <- Inf
break
}
k <- 2
while (k < Inf) {
dm <- .density.max(k, n)
if ( (w[1] * sm + w[2] + w[3] * dm + w[4]) < best$score)
break
delta <- (dmax - dmin) / (k + 1) / j / q
z <- ceiling(log(delta, base = 10))
while (z < Inf) {
step <- j * q * 10 ^ z
cm <- .coverage.max(dmin, dmax, step * (k - 1))
if ( (w[1] * sm + w[2] * cm + w[3] * dm + w[4]) < best$score)
break
min_start <- floor(dmax / (step)) * j - (k - 1) * j
max_start <- ceiling(dmin / (step)) * j
if (min_start > max_start) {
z <- z + 1
next
}
for (start in min_start:max_start) {
lmin <- start * (step / j)
lmax <- lmin + step * (k - 1)
lstep <- step
s <- .simplicity(q, Q, j, lmin, lmax, lstep)
c <- .coverage(dmin, dmax, lmin, lmax)
g <- .density(k, n, dmin, dmax, lmin, lmax)
l <- .legibility(lmin, lmax, lstep)
score <- w[1] * s + w[2] * c + w[3] * g + w[4] * l
if (score > best$score
&& lmin >= dmin
&& lmax <= dmax) {
best <- list(lmin = lmin,
lmax = lmax,
lstep = lstep,
score = score)
}
}
z <- z + 1
}
k <- k + 1
}
}
j <- j + 1
}
breaks <- seq(from = best$lmin, to = best$lmax, by = best$lstep)
if (length(breaks) >= 2) {
breaks[1] <- dmin
breaks[length(breaks)] <- dmax
}
breaks
}
extended_range_breaks <- function(n = 5, ...) {
function(x) {
extended_range_breaks_(min(x), max(x), n, ...)
}
}
zero_range <- function(x, tol = 1000 * .Machine$double.eps) {
if (length(x) == 1)
return(TRUE)
if (length(x) != 2)
stop("x must be length 1 or 2")
if (any(is.na(x)))
return(NA)
if (x[1] == x[2])
return(TRUE)
if (all(is.infinite(x)))
return(FALSE)
m <- min(abs(x))
if (m == 0)
return(FALSE)
abs( (x[1] - x[2]) / m) < tol
}
precision <- function(x) {
rng <- range(x, na.rm = TRUE)
span <- if (zero_range(rng))
abs(rng[1])
else diff(rng)
10 ^ floor(log10(span))
}
smart_digits <- function(x, ...) {
if (length(x) == 0)
return(character())
accuracy <- precision(x)
x <- round(x / accuracy) * accuracy
format(x, ...)
}
smart_digits_format <- function(x, ...) {
function(x) smart_digits(x, ...)
} |
setCluster <- function(spec, ..., remove = FALSE){
if(missing(spec))
spec <- parallel::detectCores()
if(remove){
if(is.null(.faoutlierClusterEnv$CLUSTER)){
message('There is no visible CLUSTER() definition')
return(invisible())
}
parallel::stopCluster(.faoutlierClusterEnv$CLUSTER)
.faoutlierClusterEnv$CLUSTER <- NULL
.faoutlierClusterEnv$ncores <- 1L
return(invisible())
}
if(!is.null(.faoutlierClusterEnv$CLUSTER)){
message('CLUSTER() has already been defined')
return(invisible())
}
.faoutlierClusterEnv$CLUSTER <- parallel::makeCluster(spec)
.faoutlierClusterEnv$ncores <- length(.faoutlierClusterEnv$CLUSTER)
parSapply(.faoutlierClusterEnv$CLUSTER, 1L:.faoutlierClusterEnv$ncores*2L,
function(x) invisible())
return(invisible())
} |
context("Testing dual-host with continuous structure")
test_that("Both hosts move", {
library(raster)
set.seed(860)
test.raster <- raster(nrows=100, ncols=100, xmn=-50, xmx=50, ymn=-50,ymx=50)
test.raster[] <- runif(10000, -80, 180)
test.raster <- focal(focal(test.raster, w=matrix(1, 5, 5), mean), w=matrix(1, 5, 5), mean)
skip_if_not_installed("igraph")
library(igraph)
t_incub_fct <- function(x){rnorm(x,mean = 5,sd=1)}
p_max_fct <- function(x){rbeta(x,shape1 = 5,shape2=2)}
p_Move_fct <- function(t){return(0.1)}
sdMove_fct = function(t,current.env.value){return(100/(current.env.value+1))}
p_Exit_fct <- function(t){return(0.08)}
proba <- function(t,p_max,t_incub){
if(t <= t_incub){p=0}
if(t >= t_incub){p=p_max}
return(p)
}
time_contact = function(t){round(rnorm(1, 3, 1), 0)}
start.pos <- c(0,0)
set.seed(805)
test.nosoiA <- nosoiSim(type="dual", popStructure="continuous",
length.sim=200,
max.infected.A=500,
max.infected.B=500,
init.individuals.A=1,
init.individuals.B=0,
init.structure.A=start.pos,
init.structure.B=NA,
structure.raster.A=test.raster,
structure.raster.B=test.raster,
pExit.A=p_Exit_fct,
param.pExit.A=NA,
timeDep.pExit.A=FALSE,
diff.pExit.A=FALSE,
pMove.A=p_Move_fct,
param.pMove.A=NA,
timeDep.pMove.A=FALSE,
diff.pMove.A=FALSE,
diff.sdMove.A=TRUE,
sdMove.A=sdMove_fct,
param.sdMove.A=NA,
attracted.by.raster.A=TRUE,
nContact.A=time_contact,
param.nContact.A=NA,
timeDep.nContact.A=FALSE,
diff.nContact.A=FALSE,
pTrans.A=proba,
param.pTrans.A=list(p_max=p_max_fct,
t_incub=t_incub_fct),
timeDep.pTrans.A=FALSE,
diff.pTrans.A=FALSE,
prefix.host.A="H",
pExit.B=p_Exit_fct,
param.pExit.B=NA,
timeDep.pExit.B=FALSE,
diff.pExit.B=FALSE,
pMove.B=p_Move_fct,
param.pMove.B=NA,
timeDep.pMove.B=FALSE,
diff.pMove.B=FALSE,
diff.sdMove.B=TRUE,
sdMove.B=sdMove_fct,
param.sdMove.B=NA,
attracted.by.raster.B=TRUE,
nContact.B=time_contact,
param.nContact.B=NA,
timeDep.nContact.B=FALSE,
diff.nContact.B=FALSE,
pTrans.B=proba,
param.pTrans.B=list(p_max=p_max_fct,
t_incub=t_incub_fct),
timeDep.pTrans.B=FALSE,
diff.pTrans.B=FALSE,
prefix.host.B="V")
expect_output(print(test.nosoiA), "a dual host with a continuous structure")
full.results.nosoi <- rbindlist(list(test.nosoiA$host.info.A$table.hosts,test.nosoiA$host.info.B$table.hosts))
full.results.nosoi.state <- rbindlist(list(test.nosoiA$host.info.A$table.state,test.nosoiA$host.info.B$table.state))
g <- graph.data.frame(full.results.nosoi[inf.by != "NA-1",c(1,2)],directed=F)
expect_equal(transitivity(g, type="global"), 0)
expect_equal(clusters(g, "weak")$no, 1)
expect_equal(diameter(g, directed=F, weights=NA), 8)
expect_equal(all(grepl("H-", test.nosoiA$host.info.A$table.hosts$inf.by) == FALSE),TRUE)
expect_equal(all(grepl("V-", test.nosoiA$host.info.A$table.hosts[-1]$inf.by) == TRUE),TRUE)
expect_equal(all(grepl("V-", test.nosoiA$host.info.B$table.hosts$inf.by) == FALSE),TRUE)
expect_equal(all(grepl("H-", test.nosoiA$host.info.B$table.hosts[-1]$inf.by) == TRUE),TRUE)
expect_equal(test.nosoiA$total.time, 22)
expect_equal(test.nosoiA$host.info.A$N.infected, 326)
expect_equal(test.nosoiA$host.info.B$N.infected, 579)
expect_equal(test.nosoiA$type, "dual")
expect_equal(test.nosoiA$host.info.A$popStructure, "continuous")
expect_equal(test.nosoiA$host.info.B$popStructure, "continuous")
expect_equal(nrow(subset(full.results.nosoi.state, hosts.ID == "H-1")),2)
skip_if_not_installed("dplyr")
dynOld <- getDynamicOld(test.nosoiA)
dynNew <- getDynamic(test.nosoiA)
expect_equal(dynOld, dynNew)
r_0 <- getR0(test.nosoiA)
expect_equal(r_0$N.inactive.A,
ifelse(length(r_0$R0.hostA.dist) == 1 && is.na(r_0$R0.hostA.dist),
0, length(r_0$R0.hostA.dist)))
expect_equal(r_0$N.inactive.B,
ifelse(length(r_0$R0.hostB.dist) == 1 && is.na(r_0$R0.hostB.dist),
0, length(r_0$R0.hostB.dist)))
})
test_that("One host (A) moves", {
library(raster)
set.seed(860)
test.raster <- raster(nrows=100, ncols=100, xmn=-50, xmx=50, ymn=-50,ymx=50)
test.raster[] <- runif(10000, -80, 180)
test.raster <- focal(focal(test.raster, w=matrix(1, 5, 5), mean), w=matrix(1, 5, 5), mean)
skip_if_not_installed("igraph")
library(igraph)
t_incub_fct <- function(x){rnorm(x,mean = 5,sd=1)}
p_max_fct <- function(x){rbeta(x,shape1 = 5,shape2=2)}
p_Move_fct <- function(t){return(0.1)}
sdMove_fct = function(t,current.env.value){return(100/(current.env.value+1))}
p_Exit_fct <- function(t){return(0.08)}
proba <- function(t,p_max,t_incub){
if(t <= t_incub){p=0}
if(t >= t_incub){p=p_max}
return(p)
}
time_contact = function(t){round(rnorm(1, 3, 1), 0)}
start.pos <- c(0,0)
set.seed(805)
test.nosoiA <- nosoiSim(type="dual", popStructure="continuous",
length.sim=200,
max.infected.A=500,
max.infected.B=500,
init.individuals.A=1,
init.individuals.B=0,
init.structure.A=start.pos,
init.structure.B=NA,
structure.raster.A=test.raster,
structure.raster.B=test.raster,
pExit.A=p_Exit_fct,
param.pExit.A=NA,
timeDep.pExit.A=FALSE,
diff.pExit.A=FALSE,
pMove.A=p_Move_fct,
param.pMove.A=NA,
timeDep.pMove.A=FALSE,
diff.pMove.A=FALSE,
diff.sdMove.A=TRUE,
sdMove.A=sdMove_fct,
param.sdMove.A=NA,
attracted.by.raster.A=TRUE,
nContact.A=time_contact,
param.nContact.A=NA,
timeDep.nContact.A=FALSE,
diff.nContact.A=FALSE,
pTrans.A=proba,
param.pTrans.A=list(p_max=p_max_fct,
t_incub=t_incub_fct),
timeDep.pTrans.A=FALSE,
diff.pTrans.A=FALSE,
prefix.host.A="H",
pExit.B=p_Exit_fct,
param.pExit.B=NA,
timeDep.pExit.B=FALSE,
diff.pExit.B=FALSE,
pMove.B=NA,
param.pMove.B=NA,
timeDep.pMove.B=FALSE,
diff.pMove.B=FALSE,
diff.sdMove.B=FALSE,
sdMove.B=NA,
param.sdMove.B=NA,
attracted.by.raster.B=FALSE,
nContact.B=time_contact,
param.nContact.B=NA,
timeDep.nContact.B=FALSE,
diff.nContact.B=FALSE,
pTrans.B=proba,
param.pTrans.B=list(p_max=p_max_fct,
t_incub=t_incub_fct),
timeDep.pTrans.B=FALSE,
diff.pTrans.B=FALSE,
prefix.host.B="V")
full.results.nosoi <- rbindlist(list(test.nosoiA$host.info.A$table.hosts,test.nosoiA$host.info.B$table.hosts))
full.results.nosoi.state <- rbindlist(list(test.nosoiA$host.info.A$table.state,test.nosoiA$host.info.B$table.state))
g <- graph.data.frame(full.results.nosoi[inf.by != "NA-1",c(1,2)],directed=F)
expect_equal(transitivity(g, type="global"), 0)
expect_equal(clusters(g, "weak")$no, 1)
expect_equal(diameter(g, directed=F, weights=NA), 10)
expect_equal(all(grepl("H-", test.nosoiA$host.info.A$table.hosts$inf.by) == FALSE),TRUE)
expect_equal(all(grepl("V-", test.nosoiA$host.info.A$table.hosts[-1]$inf.by) == TRUE),TRUE)
expect_equal(all(grepl("V-", test.nosoiA$host.info.B$table.hosts$inf.by) == FALSE),TRUE)
expect_equal(all(grepl("H-", test.nosoiA$host.info.B$table.hosts[-1]$inf.by) == TRUE),TRUE)
expect_equal(nrow(test.nosoiA$host.info.B$table.hosts), nrow(test.nosoiA$host.info.B$table.state))
expect_equal(test.nosoiA$total.time, 24)
expect_equal(test.nosoiA$host.info.A$N.infected, 682)
expect_equal(test.nosoiA$host.info.B$N.infected, 606)
expect_equal(test.nosoiA$type, "dual")
expect_equal(test.nosoiA$host.info.A$popStructure, "continuous")
expect_equal(test.nosoiA$host.info.B$popStructure, "continuous")
H1_moves <- subset(full.results.nosoi.state, hosts.ID == "H-1")
expect_equal(nrow(H1_moves),5)
expect_equal(H1_moves$current.env.value[1] < H1_moves$current.env.value[5],TRUE)
skip_if_not_installed("dplyr")
dynOld <- getDynamicOld(test.nosoiA)
dynNew <- getDynamic(test.nosoiA)
expect_equal(dynOld, dynNew)
r_0 <- getR0(test.nosoiA)
expect_equal(r_0$N.inactive.A,
ifelse(length(r_0$R0.hostA.dist) == 1 && is.na(r_0$R0.hostA.dist),
0, length(r_0$R0.hostA.dist)))
expect_equal(r_0$N.inactive.B,
ifelse(length(r_0$R0.hostB.dist) == 1 && is.na(r_0$R0.hostB.dist),
0, length(r_0$R0.hostB.dist)))
})
test_that("One host (B) moves", {
library(raster)
set.seed(860)
test.raster <- raster(nrows=100, ncols=100, xmn=-50, xmx=50, ymn=-50,ymx=50)
test.raster[] <- runif(10000, -80, 180)
test.raster <- focal(focal(test.raster, w=matrix(1, 5, 5), mean), w=matrix(1, 5, 5), mean)
skip_if_not_installed("igraph")
library(igraph)
t_incub_fct <- function(x){rnorm(x,mean = 5,sd=1)}
p_max_fct <- function(x){rbeta(x,shape1 = 5,shape2=2)}
p_Move_fct <- function(t){return(0.1)}
sdMove_fct = function(t,current.env.value){return(100/(current.env.value+1))}
p_Exit_fct <- function(t){return(0.08)}
proba <- function(t,p_max,t_incub){
if(t <= t_incub){p=0}
if(t >= t_incub){p=p_max}
return(p)
}
time_contact = function(t){round(rnorm(1, 3, 1), 0)}
start.pos <- c(0,0)
set.seed(19)
test.nosoiA <- nosoiSim(type="dual", popStructure="continuous",
length.sim=200,
max.infected.A=500,
max.infected.B=500,
init.individuals.A=1,
init.individuals.B=0,
init.structure.A=start.pos,
init.structure.B=NA,
structure.raster.A=test.raster,
structure.raster.B=test.raster,
pExit.A=p_Exit_fct,
param.pExit.A=NA,
timeDep.pExit.A=FALSE,
diff.pExit.A=FALSE,
pMove.A=NA,
param.pMove.A=NA,
timeDep.pMove.A=FALSE,
diff.pMove.A=FALSE,
diff.sdMove.A=TRUE,
sdMove.A=NA,
param.sdMove.A=NA,
attracted.by.raster.A=TRUE,
nContact.A=time_contact,
param.nContact.A=NA,
timeDep.nContact.A=FALSE,
diff.nContact.A=FALSE,
pTrans.A=proba,
param.pTrans.A=list(p_max=p_max_fct,
t_incub=t_incub_fct),
timeDep.pTrans.A=FALSE,
diff.pTrans.A=FALSE,
prefix.host.A="H",
pExit.B=p_Exit_fct,
param.pExit.B=NA,
timeDep.pExit.B=FALSE,
diff.pExit.B=FALSE,
pMove.B=p_Move_fct,
param.pMove.B=NA,
timeDep.pMove.B=FALSE,
diff.pMove.B=FALSE,
diff.sdMove.B=TRUE,
sdMove.B=sdMove_fct,
param.sdMove.B=NA,
attracted.by.raster.B=FALSE,
nContact.B=time_contact,
param.nContact.B=NA,
timeDep.nContact.B=FALSE,
diff.nContact.B=FALSE,
pTrans.B=proba,
param.pTrans.B=list(p_max=p_max_fct,
t_incub=t_incub_fct),
timeDep.pTrans.B=FALSE,
diff.pTrans.B=FALSE,
prefix.host.B="V")
full.results.nosoi <- rbindlist(list(test.nosoiA$host.info.A$table.hosts,test.nosoiA$host.info.B$table.hosts))
full.results.nosoi.state <- rbindlist(list(test.nosoiA$host.info.A$table.state,test.nosoiA$host.info.B$table.state))
g <- graph.data.frame(full.results.nosoi[inf.by != "NA-1",c(1,2)],directed=F)
expect_equal(transitivity(g, type="global"), 0)
expect_equal(clusters(g, "weak")$no, 1)
expect_equal(diameter(g, directed=F, weights=NA), 10)
expect_equal(all(grepl("H-", test.nosoiA$host.info.A$table.hosts$inf.by) == FALSE),TRUE)
expect_equal(all(grepl("V-", test.nosoiA$host.info.A$table.hosts[-1]$inf.by) == TRUE),TRUE)
expect_equal(all(grepl("V-", test.nosoiA$host.info.B$table.hosts$inf.by) == FALSE),TRUE)
expect_equal(all(grepl("H-", test.nosoiA$host.info.B$table.hosts[-1]$inf.by) == TRUE),TRUE)
expect_equal(nrow(test.nosoiA$host.info.A$table.hosts), nrow(test.nosoiA$host.info.A$table.state))
expect_equal(test.nosoiA$total.time, 26)
expect_equal(test.nosoiA$host.info.A$N.infected, 627)
expect_equal(test.nosoiA$host.info.B$N.infected, 520)
expect_equal(test.nosoiA$type, "dual")
expect_equal(test.nosoiA$host.info.A$popStructure, "continuous")
expect_equal(test.nosoiA$host.info.B$popStructure, "continuous")
H1_moves <- subset(full.results.nosoi.state, hosts.ID == "V-1")
expect_equal(nrow(H1_moves),2)
expect_equal(H1_moves$current.env.value[1] < H1_moves$current.env.value[2],TRUE)
skip_if_not_installed("dplyr")
dynOld <- getDynamicOld(test.nosoiA)
dynNew <- getDynamic(test.nosoiA)
expect_equal(dynOld, dynNew)
r_0 <- getR0(test.nosoiA)
expect_equal(r_0$N.inactive.A,
ifelse(length(r_0$R0.hostA.dist) == 1 && is.na(r_0$R0.hostA.dist),
0, length(r_0$R0.hostA.dist)))
expect_equal(r_0$N.inactive.B,
ifelse(length(r_0$R0.hostB.dist) == 1 && is.na(r_0$R0.hostB.dist),
0, length(r_0$R0.hostB.dist)))
})
test_that("Epidemic dies out", {
library(raster)
set.seed(860)
test.raster <- raster(nrows=100, ncols=100, xmn=-50, xmx=50, ymn=-50,ymx=50)
test.raster[] <- runif(10000, -80, 180)
test.raster <- focal(focal(test.raster, w=matrix(1, 5, 5), mean), w=matrix(1, 5, 5), mean)
skip_if_not_installed("igraph")
library(igraph)
t_incub_fct <- function(x){rnorm(x,mean = 5,sd=1)}
p_max_fct <- function(x){rbeta(x,shape1 = 5,shape2=2)}
p_Move_fct <- function(t){return(0.1)}
sdMove_fct = function(t,current.env.value){return(100/(current.env.value+1))}
p_Exit_fct <- function(t){return(0.08)}
proba <- function(t,p_max,t_incub){
if(t <= t_incub){p=0}
if(t >= t_incub){p=p_max}
return(p)
}
time_contact = function(t){round(rnorm(1, 3, 1), 0)}
start.pos <- c(0,0)
set.seed(1000)
test.nosoiA <- nosoiSim(type="dual", popStructure="continuous",
length.sim=200,
max.infected.A=500,
max.infected.B=500,
init.individuals.A=0,
init.individuals.B=1,
init.structure.A=NA,
init.structure.B=start.pos,
structure.raster.A=test.raster,
structure.raster.B=test.raster,
pExit.A=p_Exit_fct,
param.pExit.A=NA,
timeDep.pExit.A=FALSE,
diff.pExit.A=FALSE,
pMove.A=NA,
param.pMove.A=NA,
timeDep.pMove.A=FALSE,
diff.pMove.A=FALSE,
diff.sdMove.A=TRUE,
sdMove.A=NA,
param.sdMove.A=NA,
attracted.by.raster.A=TRUE,
nContact.A=time_contact,
param.nContact.A=NA,
timeDep.nContact.A=FALSE,
diff.nContact.A=FALSE,
pTrans.A=proba,
param.pTrans.A=list(p_max=p_max_fct,
t_incub=t_incub_fct),
timeDep.pTrans.A=FALSE,
diff.pTrans.A=FALSE,
prefix.host.A="H",
pExit.B=p_Exit_fct,
param.pExit.B=NA,
timeDep.pExit.B=FALSE,
diff.pExit.B=FALSE,
pMove.B=p_Move_fct,
param.pMove.B=NA,
timeDep.pMove.B=FALSE,
diff.pMove.B=FALSE,
diff.sdMove.B=TRUE,
sdMove.B=sdMove_fct,
param.sdMove.B=NA,
attracted.by.raster.B=FALSE,
nContact.B=time_contact,
param.nContact.B=NA,
timeDep.nContact.B=FALSE,
diff.nContact.B=FALSE,
pTrans.B=proba,
param.pTrans.B=list(p_max=p_max_fct,
t_incub=t_incub_fct),
timeDep.pTrans.B=FALSE,
diff.pTrans.B=FALSE,
prefix.host.B="V")
full.results.nosoi <- rbindlist(list(test.nosoiA$host.info.A$table.hosts,test.nosoiA$host.info.B$table.hosts))
full.results.nosoi.state <- rbindlist(list(test.nosoiA$host.info.A$table.state,test.nosoiA$host.info.B$table.state))
expect_equal(nrow(test.nosoiA$host.info.A$table.hosts), nrow(test.nosoiA$host.info.A$table.state))
expect_equal(test.nosoiA$total.time, 4)
expect_equal(test.nosoiA$host.info.A$N.infected, 0)
expect_equal(test.nosoiA$host.info.B$N.infected, 1)
expect_equal(test.nosoiA$type, "dual")
expect_equal(test.nosoiA$host.info.A$popStructure, "continuous")
expect_equal(test.nosoiA$host.info.B$popStructure, "continuous")
H1_moves <- subset(full.results.nosoi.state, hosts.ID == "V-1")
expect_equal(nrow(H1_moves),2)
skip_if_not_installed("dplyr")
dynOld <- getDynamicOld(test.nosoiA)
dynNew <- getDynamic(test.nosoiA)
expect_equal(dynOld, dynNew)
r_0 <- getR0(test.nosoiA)
expect_equal(r_0$N.inactive.A,
ifelse(length(r_0$R0.hostA.dist) == 1 && is.na(r_0$R0.hostA.dist),
0, length(r_0$R0.hostA.dist)))
expect_equal(r_0$N.inactive.B,
ifelse(length(r_0$R0.hostB.dist) == 1 && is.na(r_0$R0.hostB.dist),
0, length(r_0$R0.hostB.dist)))
})
test_that("Error if no host move", {
library(raster)
set.seed(860)
test.raster <- raster(nrows=100, ncols=100, xmn=-50, xmx=50, ymn=-50,ymx=50)
test.raster[] <- runif(10000, -80, 180)
test.raster <- focal(focal(test.raster, w=matrix(1, 5, 5), mean), w=matrix(1, 5, 5), mean)
skip_if_not_installed("igraph")
library(igraph)
t_incub_fct <- function(x){rnorm(x,mean = 5,sd=1)}
p_max_fct <- function(x){rbeta(x,shape1 = 5,shape2=2)}
p_Move_fct <- function(t){return(0.1)}
sdMove_fct = function(t,current.env.value){return(100/(current.env.value+1))}
p_Exit_fct <- function(t){return(0.08)}
proba <- function(t,p_max,t_incub){
if(t <= t_incub){p=0}
if(t >= t_incub){p=p_max}
return(p)
}
time_contact = function(t){round(rnorm(1, 3, 1), 0)}
start.pos <- c(0,0)
set.seed(1000)
expect_error(
nosoiSim(type="dual", popStructure="continuous",
length.sim=200,
max.infected.A=500,
max.infected.B=500,
init.individuals.A=0,
init.individuals.B=1,
init.structure.A=NA,
init.structure.B=start.pos,
structure.raster.A=test.raster,
structure.raster.B=test.raster,
pExit.A=p_Exit_fct,
param.pExit.A=NA,
timeDep.pExit.A=FALSE,
diff.pExit.A=FALSE,
pMove.A=NA,
param.pMove.A=NA,
timeDep.pMove.A=FALSE,
diff.pMove.A=FALSE,
diff.sdMove.A=TRUE,
sdMove.A=NA,
param.sdMove.A=NA,
attracted.by.raster.A=TRUE,
nContact.A=time_contact,
param.nContact.A=NA,
timeDep.nContact.A=FALSE,
diff.nContact.A=FALSE,
pTrans.A=proba,
param.pTrans.A=list(p_max=p_max_fct,
t_incub=t_incub_fct),
timeDep.pTrans.A=FALSE,
diff.pTrans.A=FALSE,
prefix.host.A="H",
pExit.B=p_Exit_fct,
param.pExit.B=NA,
timeDep.pExit.B=FALSE,
diff.pExit.B=FALSE,
pMove.B=NA,
param.pMove.B=NA,
timeDep.pMove.B=FALSE,
diff.pMove.B=FALSE,
diff.sdMove.B=TRUE,
sdMove.B=NA,
param.sdMove.B=NA,
attracted.by.raster.B=FALSE,
nContact.B=time_contact,
param.nContact.B=NA,
timeDep.nContact.B=FALSE,
diff.nContact.B=FALSE,
pTrans.B=proba,
param.pTrans.B=list(p_max=p_max_fct,
t_incub=t_incub_fct),
timeDep.pTrans.B=FALSE,
diff.pTrans.B=FALSE,
prefix.host.B="V"),
"At least one host must move.")
})
test_that("One host (B) moves, host count", {
library(raster)
set.seed(860)
test.raster <- raster(nrows=100, ncols=100, xmn=-50, xmx=50, ymn=-50,ymx=50)
test.raster[] <- runif(10000, -80, 180)
test.raster <- focal(focal(test.raster, w=matrix(1, 5, 5), mean), w=matrix(1, 5, 5), mean)
skip_if_not_installed("igraph")
library(igraph)
t_incub_fct <- function(x){rnorm(x,mean = 5,sd=1)}
p_max_fct <- function(x){rbeta(x,shape1 = 5,shape2=2)}
p_Move_fct <- function(t){return(0.1)}
sdMove_fct = function(t,current.env.value){return(100/(current.env.value+1))}
p_Exit_fct <- function(t){return(0.08)}
proba <- function(t,p_max,t_incub){
if(t <= t_incub){p=0}
if(t >= t_incub){p=p_max}
return(p)
}
time_contact_A = function(t){round(rnorm(1, 3, 1), 0)}
time_contact_B <- function(t, current.env.value, host.count.A){
temp.val = round(((current.env.value-host.count.A)/current.env.value)*rnorm(1, 3, 1), 0)
if(length(temp.val) == 0 || temp.val <= 0) {
return(0)
}
if(temp.val >= 0) {
return(temp.val)
}
}
start.pos <- c(0,0)
set.seed(19)
test.nosoiA <- nosoiSim(type="dual", popStructure="continuous",
length.sim=200,
max.infected.A=500,
max.infected.B=500,
init.individuals.A=1,
init.individuals.B=0,
init.structure.A=start.pos,
init.structure.B=NA,
structure.raster.A=test.raster,
structure.raster.B=test.raster,
pExit.A=p_Exit_fct,
param.pExit.A=NA,
timeDep.pExit.A=FALSE,
diff.pExit.A=FALSE,
pMove.A=NA,
param.pMove.A=NA,
timeDep.pMove.A=FALSE,
diff.pMove.A=FALSE,
diff.sdMove.A=TRUE,
sdMove.A=NA,
param.sdMove.A=NA,
attracted.by.raster.A=TRUE,
nContact.A=time_contact_A,
param.nContact.A=NA,
timeDep.nContact.A=FALSE,
diff.nContact.A=FALSE,
pTrans.A=proba,
param.pTrans.A=list(p_max=p_max_fct,
t_incub=t_incub_fct),
timeDep.pTrans.A=FALSE,
diff.pTrans.A=FALSE,
prefix.host.A="H",
pExit.B=p_Exit_fct,
param.pExit.B=NA,
timeDep.pExit.B=FALSE,
diff.pExit.B=FALSE,
pMove.B=p_Move_fct,
param.pMove.B=NA,
timeDep.pMove.B=FALSE,
diff.pMove.B=FALSE,
diff.sdMove.B=TRUE,
sdMove.B=sdMove_fct,
param.sdMove.B=NA,
attracted.by.raster.B=FALSE,
nContact.B=time_contact_B,
param.nContact.B=NA,
timeDep.nContact.B=FALSE,
hostCount.nContact.B=TRUE,
diff.nContact.B=TRUE,
pTrans.B=proba,
param.pTrans.B=list(p_max=p_max_fct,
t_incub=t_incub_fct),
timeDep.pTrans.B=FALSE,
diff.pTrans.B=FALSE,
prefix.host.B="V")
full.results.nosoi <- rbindlist(list(test.nosoiA$host.info.A$table.hosts,test.nosoiA$host.info.B$table.hosts))
full.results.nosoi.state <- rbindlist(list(test.nosoiA$host.info.A$table.state,test.nosoiA$host.info.B$table.state))
g <- graph.data.frame(full.results.nosoi[inf.by != "NA-1",c(1,2)],directed=F)
expect_equal(transitivity(g, type="global"), 0)
expect_equal(clusters(g, "weak")$no, 1)
expect_equal(diameter(g, directed=F, weights=NA), 9)
expect_equal(all(grepl("H-", test.nosoiA$host.info.A$table.hosts$inf.by) == FALSE),TRUE)
expect_equal(all(grepl("V-", test.nosoiA$host.info.A$table.hosts[-1]$inf.by) == TRUE),TRUE)
expect_equal(all(grepl("V-", test.nosoiA$host.info.B$table.hosts$inf.by) == FALSE),TRUE)
expect_equal(all(grepl("H-", test.nosoiA$host.info.B$table.hosts[-1]$inf.by) == TRUE),TRUE)
expect_equal(nrow(test.nosoiA$host.info.A$table.hosts), nrow(test.nosoiA$host.info.A$table.state))
expect_equal(test.nosoiA$total.time, 25)
expect_equal(test.nosoiA$host.info.A$N.infected, 280)
expect_equal(test.nosoiA$host.info.B$N.infected, 540)
expect_equal(test.nosoiA$type, "dual")
expect_equal(test.nosoiA$host.info.A$popStructure, "continuous")
expect_equal(test.nosoiA$host.info.B$popStructure, "continuous")
H1_moves <- subset(full.results.nosoi.state, hosts.ID == "V-1")
expect_equal(nrow(H1_moves),6)
expect_equal(H1_moves$current.env.value[1] < H1_moves$current.env.value[2],TRUE)
skip_if_not_installed("dplyr")
dynOld <- getDynamicOld(test.nosoiA)
dynNew <- getDynamic(test.nosoiA)
expect_equal(dynOld, dynNew)
r_0 <- getR0(test.nosoiA)
expect_equal(r_0$N.inactive.A,
ifelse(length(r_0$R0.hostA.dist) == 1 && is.na(r_0$R0.hostA.dist),
0, length(r_0$R0.hostA.dist)))
expect_equal(r_0$N.inactive.B,
ifelse(length(r_0$R0.hostB.dist) == 1 && is.na(r_0$R0.hostB.dist),
0, length(r_0$R0.hostB.dist)))
}) |
context("Base64")
test_that("some sample strings can be base64-encoded", {
expect_equal(renv_base64_encode("renv"), "cmVudg==")
original <- rawToChar(as.raw(1:255))
encoded <- renv_base64_encode(original)
decoded <- renv_base64_decode(encoded)
expect_equal(original, decoded)
})
test_that("some random base64 strings can be round-tripped", {
set.seed(123)
bytes <- 1:255
text <- replicate(1000, {
n <- sample(128L, 1L)
rawToChar(as.raw(sample(bytes, n, replace = TRUE)))
})
encoded <- lapply(text, renv_base64_encode)
decoded <- lapply(encoded, renv_base64_decode)
expect_true(all(text == decoded))
}) |
size_c.three_way_mixed_cxbina.model_5_b <- function(alpha, beta, delta, a, b, n, cases)
{
c <- 5
c.new <- 1000
while (abs(c -c.new)>1e-6)
{
c <- c.new
dfn <- a*(b-1)
dfd <- a*(b-1)*(c-1)
lambda <- ncp(dfn,dfd,alpha,beta)
if (cases == "maximin")
{
c.new <- 2*lambda/(n*delta*delta)
}
else if (cases == "minimin")
{
c.new <- 4*lambda/(a*b*n*delta*delta)
}
}
return(ceiling(c.new))
} |
"densityfn" <-
function(x, family, ...){
args <- list(...)
switch( family,
"negbin"= dnbinom(x,size=exp(args$size), mu=args$mu) ,
"negbin.ncar"= dnbinom(x, size=exp(args$size[1]), mu=args$mu)
*exp( (length(x)>1)*(args$size[2]+x* args$size[3] )) /(1+exp( args$size[2]+ x*args$size[3])),
"poisson"= dpois(x, lambda=args$mu),
"geometric"=dgeom(x, prob= args$mu),
)
} |
logFileListRead<- function (fileNameList, readFunction=logFileRead, columnList=NULL)
{
adjustedColumnList = list()
for(col in columnList)
{
if(col == "Apache")
{
col = c("userip", "ignored column 1", "username", "ApacheTimestamp", "url", "httpcode", "responsebytes")
}
adjustedColumnList = c(adjustedColumnList, col)
}
df = data.frame()
for(fileName in fileNameList)
{
df = rbind(df, readFunction(fileName, columnList=adjustedColumnList))
}
return(df)
} |
context("Conditional RA")
test_that("Conditional RA", {
N <- 100
declaration <-
randomizr::declare_ra(
N = N,
num_arms = 3,
simple = TRUE
)
Z <- randomizr::conduct_ra(declaration)
Z2 <- conduct_conditional_ra(
declaration = declaration,
assignment_vec = Z,
conditions = c("T1", "T2")
)
table(Z, Z2, useNA = "always")
declaration <- randomizr::declare_ra(N = N, num_arms = 3)
Z <- randomizr::conduct_ra(declaration)
Z2 <- conduct_conditional_ra(
declaration = declaration,
assignment_vec = Z,
conditions = c("T1", "T2")
)
table(Z)
table(Z2)
table(Z, Z2)
blocks <- rep(c("A", "B", "C"), times = c(50, 100, 200))
declaration <-
declare_ra(blocks = blocks, prob_each = c(.1, .8, .1))
Z <- conduct_ra(declaration)
table(blocks, Z)
Z2 <- conduct_conditional_ra(
declaration = declaration,
assignment_vec = Z,
conditions = c("T1", "T2")
)
table(Z, blocks)
table(Z2, blocks)
table(Z, Z2)
clusters <- rep(letters, times = 1:26)
declaration <- declare_ra(clusters = clusters, num_arms = 3)
Z <- conduct_ra(declaration)
table(Z, clusters)
Z2 <- conduct_conditional_ra(
declaration = declaration,
assignment_vec = Z,
conditions = c("T1", "T2")
)
table(Z, clusters)
table(Z2, clusters)
table(Z, Z2)
clusters <- rep(letters, times = 1:26)
blocks <- rep(NA, length(clusters))
blocks[clusters %in% letters[1:5]] <- "block_1"
blocks[clusters %in% letters[6:10]] <- "block_2"
blocks[clusters %in% letters[11:15]] <- "block_3"
blocks[clusters %in% letters[16:20]] <- "block_4"
blocks[clusters %in% letters[21:26]] <- "block_5"
declaration <-
declare_ra(
clusters = clusters,
blocks = blocks,
num_arms = 3
)
Z <- conduct_ra(declaration)
Z2 <- conduct_conditional_ra(
declaration = declaration,
assignment_vec = Z,
conditions = c("T1", "T2")
)
table(Z, clusters)
table(Z2, clusters)
table(Z, Z2)
expect_true(TRUE)
})
test_that("Conditional without conditions!", {
N <- 100
declaration <- declare_ra(N)
Z <- randomizr::conduct_ra(declaration)
Z2 <- conduct_conditional_ra(
declaration = declaration,
assignment_vec = Z,
conditions = c(0, 1)
)
expect_true(all(table(Z) == N/2))
table(Z2)
table(Z, Z2)
}) |
mysplinefun<-function(x, y = NULL,
method = c("fmm", "periodic", "natural", "monoH.FC")[1],
ties = mean)
{
xmin<-min(x)
xmax<-max(x)
ff<-splinefun(x=x, y=y, method=method, ties=ties)
fff<-function(x, deriv)
{
sapply(x, function(X){
if(X<xmin |X>xmax)
return(0)
else
return(ff(X))
})
}
}
fitmarg<-function(x, logy, logp=0, usenormal=FALSE)
{
if(!usenormal)
{
logpost<-logy-max(logy)+logp-max(logp)
post<-exp(logpost)
post.func = splinefun(x, post)
xx = seq(min(x), max(x), len = 1000)
z=integrate(post.func, min(x), max(x))$value
post.func = mysplinefun(x=xx, y=(post.func(xx) / z) )
}
else
{
xx = seq(min(x), max(x), len = 1000)
meany=sum(x*exp(logp+logy))/sum(exp(logp+logy))
sdy=sqrt( sum(((x-meany)^2)*exp(logp+logy))/sum(exp(logp+logy)) )
post.func = function(x){dnorm(x, mean=meany, sd=sdy)}
}
return(post.func)
}
fitmargBMA<-function(margs, ws, len=100)
{
ws<-ws/sum(ws)
xmin <- quantile((unlist(lapply(margs, function(X){min(X[,1])}))), 0.25)
xmax <- quantile(unlist(lapply(margs, function(X){max(X[,1])})), 0.75)
xx<-seq(xmin, xmax, len=len)
margsws<-lapply(1:length(margs), function(i){
func<-fitmarg(margs[[i]][,1], log(margs[[i]][,2]))
ws[i]*func(xx)
})
margsws<-do.call(cbind, margsws)
d<-data.frame(x=xx, y=apply(margsws, 1, sum))
names(d)<-c("x", "y")
return(d)
}
fitmatrixBMA<-function(models, ws, item)
{
lmatrix<-lapply(models, function(X){X[[item]]})
auxbma<-ws[1]*lmatrix[[1]]
for(i in 2:length(lmatrix)){auxbma<-auxbma+ws[i]*lmatrix[[i]]}
return(auxbma)
}
fitlistBMA<-function(models, ws, item)
{
nlist<-names(models[[1]][[item]])
auxlist<-as.list(rep(NA, length(nlist)))
names(auxlist)<-nlist
for(ele in nlist)
{
lmatrix<-lapply(models, function(X){X[[item]][[ele]]})
auxbma<-ws[1]*lmatrix[[1]]
for(i in 2:length(lmatrix)){auxbma<-auxbma+ws[i]*lmatrix[[i]]}
auxlist[[ele]]<-auxbma
}
return(auxlist)
}
fitmargBMA2<-function(models, ws, item)
{
if(is.null(models[[1]][[item]]))
return(NULL)
nlist<-names(models[[1]][[item]])
auxlist<-as.list(rep(NA, length(nlist)))
names(auxlist)<-nlist
for(ele in nlist)
{
lmatrix<-lapply(models, function(X){X[[item]][[ele]]})
xxr<-c(NA, NA)
xxr[1]<-sum(ws*unlist(lapply(lmatrix,function(X){min(X[,1])})))
xxr[2]<-sum(ws*unlist(lapply(lmatrix,function(X){max(X[,1])})))
xx<-seq(xxr[1], xxr[2], length.out=81)
auxbma<-rep(0, length(xx))
for(i in 1:length(lmatrix)){
auxspl<- mysplinefun(x=lmatrix[[i]][,1], y=lmatrix[[i]][,2])
auxbma<-auxbma+ws[i]*auxspl(xx)
}
auxlist[[ele]]<-cbind(xx, auxbma)
}
return(auxlist)
}
BMArho<-function(models, rho, logrhoprior=rep(1, length(rho)) )
{
mlik<-unlist(lapply(models, function(X){X$mlik[1]}))
post.func <- fitmarg(rho, mlik, logrhoprior)
ws<-(post.func(rho))
ws<-ws/sum(ws)
fvalues<-mclapply(1:length(models), function(X){ws[X]*models[[X]]$summary.fitted.values[,1]})
fvalues<-data.frame(fvalues)
fvalues<-apply(fvalues, 1, sum)
return(fvalues)
}
INLABMA<-function(models, rho, logrhoprior=rep(1, length(rho)), impacts=FALSE, usenormal=FALSE )
{
mlik<-unlist(lapply(models, function(X){X$mlik[1]}))
post.func <- fitmarg(rho, mlik, logrhoprior, usenormal)
ws<-(post.func(rho))
ws<-ws/sum(ws)
mfit<-list(rho=list())
mfit$rho$marginal<-data.frame(x=seq(min(rho), max(rho), len=100))
mfit$rho$marginal$y<-post.func(mfit$rho$marginal$x)
mfit$rho$marginal<-as.matrix(mfit$rho$marginal)
margsum <- INLA::inla.zmarginal(mfit$rho$marginal, TRUE)
mfit$rho$mean<-margsum$mean
mfit$rho$sd<-margsum$sd
mfit$rho$quantiles<-unlist(margsum[-c(1:2)])
mateff<-c("summary.fixed", "summary.lincomb",
"summary.linear.predictor", "summary.fitted.values")
lmat<-mclapply(mateff, function(X){fitmatrixBMA(models, ws, X)})
names(lmat)<-mateff
mfit<-c(mfit, lmat)
listeff<-c("dic", "cpo")
leff<-mclapply(listeff, function(X){fitlistBMA(models, ws, X)})
names(leff)<-listeff
mfit<-c(mfit, leff)
listmarg<-c("marginals.fixed", "marginals.lincomb",
"marginals.lincomb.derived",
"marginals.linear.predictor",
"marginals.hyperpar", "marginals.spde2.blc")
margeff<-mclapply(listmarg, function(X){fitmargBMA2(models, ws, X)})
names(margeff)<-listmarg
mfit<-c(mfit, margeff)
mfit$impacts<-FALSE
if(impacts)
{
mfit$impacts<-TRUE
summimp<-c("summary.total.impacts", "summary.direct.impacts",
"summary.indirect.impacts")
matsummimp<-mclapply(summimp, function(X){fitmatrixBMA(models, ws, X)})
names(matsummimp)<-summimp
mfit<-c(mfit, matsummimp)
margimp<-c("marginals.total.impacts","marginals.direct.impacts",
"marginals.indirect.impacts")
lmargimp<-mclapply(margimp, function(X){fitmargBMA2(models, ws, X)})
names(lmargimp)<-margimp
mfit<-c(mfit, lmargimp)
mfit<-recompute.impacts(mfit)
}
return(mfit)
} |
retrieveMetadata <- function(
target,
projectName,
meta_modelName,
meta_attributeNames = "all",
target_modelName,
target_recordNames = "all",
...) {
temp <- retrieveTemplate(target, projectName)
paths <- .obtain_linkage_paths(target_modelName, meta_modelName, temp)
separate_branches <- length(paths) == 2
target_id_map <- .map_identifiers_by_path(
target = target,
path = paths$target_path,
projectName = projectName,
...)
if (!identical(target_recordNames, "all")) {
target_id_map <- target_id_map[target_id_map[,1] %in% target_recordNames,]
}
if (separate_branches) {
meta_id_map <- .map_identifiers_by_path(
target = target,
path = paths$meta_path,
projectName = projectName,
...)
meta_id_map <- meta_id_map[
meta_id_map[,ncol(meta_id_map)] %in%
target_id_map[,ncol(target_id_map)],
]
meta_record_names <- meta_id_map[,1, drop = TRUE]
} else {
meta_record_names <- target_id_map[,ncol(target_id_map), drop = TRUE]
meta_record_names <- meta_record_names[!is.na(meta_record_names)]
}
meta_raw <- retrieve(
target = target, projectName = projectName, modelName = meta_modelName,
recordNames = meta_record_names,
attributeNames = meta_attributeNames)
meta_id_col_name <- temp$models[[meta_modelName]]$template$identifier
id_col_index <- match(meta_id_col_name, colnames(meta_raw))
meta <- if (separate_branches) {
.expand_metadata_to_have_1row_per_id(meta_raw, meta_id_map, id_col_index)
} else {
meta_raw
}
if (separate_branches) {
meta <- .trim_duplicated_columns(meta, meta_id_map, meta_id_col_name)
meta <-merge(
meta_id_map, meta,
by.x = colnames(meta_id_map)[1],
all.y = TRUE,
by.y = meta_id_col_name)
meta_id_col_name <- colnames(meta_id_map)[ncol(meta_id_map)]
}
meta <- .trim_duplicated_columns(meta, target_id_map, meta_id_col_name)
output <- merge(
target_id_map, meta,
all.x = TRUE,
by.x = colnames(target_id_map)[ncol(target_id_map)],
by.y = meta_id_col_name)
output
}
.trace_model_to_proj <- function(
target_modelName,
template) {
path <- target_modelName
while (tail(path,1)!="project") {
path <- c(path, template$models[[tail(path,1)]]$template$parent)
}
path
}
.obtain_linkage_paths <- function(
target_modelName,
meta_modelName,
template) {
if (target_modelName == meta_modelName) {
return(NA)
}
target_path <- .trace_model_to_proj(target_modelName, template)
if (meta_modelName %in% target_path) {
ind <- match(meta_modelName, target_path)
return(list(
target_path = target_path[seq_len(ind)]
))
} else {
meta_path <- .trace_model_to_proj(meta_modelName, template)
target_ind <- min(match(meta_path, target_path), na.rm = TRUE)
meta_ind <- match(target_path[target_ind], meta_path)
return(list(
target_path = target_path[seq_len(target_ind)],
meta_path = meta_path[seq_len(meta_ind)]
))
}
}
.map_identifiers_by_path <- function(
target, path, projectName, ...) {
ids <- query(
target = target,
projectName = projectName,
queryTerms =
list(path[1],
'::all',
path[2],
'::identifier'),
format = "df",
...)
ind <- 2
while (ind < length(path)) {
new_id_map <- query(
target = target,
projectName = projectName,
queryTerms =
list(path[ind],
'::all',
path[ind+1],
'::identifier'),
format = "df",
...)
if (any(duplicated(new_id_map[1,]))) {
stop("Algorithm issue: mappings going upwards not unique")
}
order <- match(ids[,2], new_id_map[,1])
order <- order[!is.na(order)]
new_ids <- array(NA, dim = nrow(ids))
new_ids[ids[,2] %in% new_id_map[,1]] <- new_id_map[order ,2]
ids <- cbind(ids, new_ids)
ind <- ind + 1
}
colnames(ids) <- path
ids
}
.expand_metadata_to_have_1row_per_id <- function(meta_raw, meta_id_map, id_col_index) {
linker_ids <- meta_id_map[,ncol(meta_id_map)]
if (any(duplicated(linker_ids))) {
inds <- match(unique(linker_ids), linker_ids)
meta <- meta_raw[inds,]
meta_left <- meta_raw[-inds,]
linker_ids_left <- linker_ids[-inds]
for (i in 2:max(table(linker_ids))) {
inds <- match(unique(linker_ids_left), linker_ids_left)
next_meta <- meta_left[inds,]
colnames(next_meta) <- paste(colnames(next_meta), i, sep="_")
colnames(next_meta)[id_col_index] <- colnames(meta)[id_col_index]
meta <- merge(meta, next_meta, all.x = TRUE, by = colnames(meta)[id_col_index])
meta_left <- meta_left[-inds,]
linker_ids_left <- linker_ids_left[-inds]
}
} else {
meta <- meta_raw
}
meta
}
.trim_duplicated_columns <- function(meta, target_id_map, meta_id_col_name) {
potential_columns <- c(colnames(meta), colnames(target_id_map))
if (any(duplicated(potential_columns))) {
ind <- which(duplicated(potential_columns, fromLast = TRUE))
ind <- setdiff(ind, grep(meta_id_col_name, colnames(meta)))
if (length(ind)>0) {
meta <- meta[,-ind]
}
}
meta
} |
convT_norm_relu <- function(ch_in, ch_out, norm_layer, ks = 3, stride = 2, bias = TRUE) {
args = list(
ch_in = as.integer(ch_in),
ch_out = as.integer(ch_out),
norm_layer = norm_layer,
ks = as.integer(ks),
stride = as.integer(stride),
bias = bias
)
do.call(upit()$models$cyclegan$convT_norm_relu, args)
}
pad_conv_norm_relu <- function(ch_in, ch_out, pad_mode, norm_layer, ks = 3,
bias = TRUE, pad = 1, stride = 1, activ = TRUE,
init = nn()$init$kaiming_normal_, init_gain = 0.02) {
args <- list(
ch_in = as.integer(ch_in),
ch_out = as.integer(ch_out),
pad_mode = pad_mode,
norm_layer = norm_layer,
ks = as.integer(ks),
bias = bias,
pad = as.integer(pad),
stride = as.integer(stride),
activ = activ,
init = init,
init_gain = init_gain
)
do.call(upit()$models$cyclegan$pad_conv_norm_relu, args)
}
ResnetBlock <- function(dim, pad_mode = "reflection", norm_layer = NULL, dropout = 0.0, bias = TRUE) {
if(missing( dim) ) {
upit()$models$cyclegan$ResnetBlock
} else {
args <- list(
dim = as.integer(dim),
pad_mode = pad_mode,
norm_layer = norm_layer,
dropout = dropout,
bias = bias
)
do.call(upit()$models$cyclegan$ResnetBlock, args)
}
}
resnet_generator <- function(ch_in, ch_out, n_ftrs = 64, norm_layer = NULL,
dropout = 0.0, n_blocks = 9, pad_mode = "reflection") {
args <- list(
ch_in = as.integer(ch_in),
ch_out = as.integer(ch_out),
n_ftrs = as.integer(n_ftrs),
norm_layer = norm_layer,
dropout = dropout,
n_blocks = as.integer(n_blocks),
pad_mode = pad_mode
)
if(is.null(args$norm_layer))
args$norm_layer <- NULL
do.call(upit()$models$cyclegan$resnet_generator, args)
}
conv_norm_lr <- function(ch_in, ch_out, norm_layer = NULL, ks = 3, bias = TRUE,
pad = 1, stride = 1, activ = TRUE, slope = 0.2,
init = nn()$init$normal_, init_gain = 0.02) {
args <- list(
ch_in = as.integer(ch_in),
ch_out = as.integer(ch_out),
norm_layer = norm_layer,
ks = as.integer(ks),
bias = bias,
pad = as.integer(pad),
stride = as.integer(stride),
activ = activ,
slope = slope,
init = init,
init_gain = init_gain
)
if(is.null(args$norm_layer))
args$norm_layer <- NULL
do.call(upit()$models$cyclegan$conv_norm_lr, args)
}
discriminator <- function(ch_in, n_ftrs = 64, n_layers = 3, norm_layer = NULL, sigmoid = FALSE) {
args = list(
ch_in = as.integer(ch_in),
n_ftrs = as.integer(n_ftrs),
n_layers = as.integer(n_layers),
norm_layer = norm_layer,
sigmoid = sigmoid
)
if(is.null(args$norm_layer))
args$norm_layer <- NULL
do.call(upit()$models$cyclegan$discriminator,args)
}
CycleGAN <- function(ch_in = 3, ch_out = 3, n_features = 64, disc_layers = 3,
gen_blocks = 9, lsgan = TRUE, drop = 0.0, norm_layer = NULL) {
args <- list(
ch_in = as.integer(ch_in),
ch_out = as.integer(ch_out),
n_features = as.integer(n_features),
disc_layers = as.integer(disc_layers),
gen_blocks = as.integer(gen_blocks),
lsgan = lsgan,
drop = drop,
norm_layer = norm_layer
)
if(is.null(args$norm_layer))
args$norm_layer <- NULL
do.call(upit()$models$cyclegan$CycleGAN, args)
}
RandPair <- function(itemsB) {
upit()$data$unpaired$RandPair(
itemsB = itemsB
)
}
get_dls <- function(pathA, pathB, num_A = NULL, num_B = NULL,
load_size = 512, crop_size = 256, bs = 4,
num_workers = 2) {
args <- list(
pathA = pathA,
pathB = pathB,
num_A = num_A,
num_B = num_B,
load_size = as.integer(load_size),
crop_size = as.integer(crop_size),
bs = as.integer(bs),
num_workers = as.integer(num_workers)
)
if(!is.null(args[['num_A']]))
args[['num_A']] = as.integer(args[['num_A']])
else
args[['num_A']] <- NULL
if(!is.null(args[['num_B']]))
args[['num_B']] = as.integer(args[['num_B']])
else
args[['num_B']] <- NULL
do.call(upit()$data$unpaired$get_dls, args)
}
CycleGANLoss <- function(cgan, l_A = 10.0, l_B = 10, l_idt = 0.5, lsgan = TRUE) {
upit()$train$cyclegan$CycleGANLoss(
cgan = cgan,
l_A = l_A,
l_B = l_B,
l_idt = l_idt,
lsgan = lsgan
)
}
CycleGANTrainer = function(...) {
args = list(...)
do.call(upit()$train$cyclegan$CycleGANTrainer, args)
}
ShowCycleGANImgsCallback <- function(imgA = FALSE, imgB = TRUE, show_img_interval = 10) {
upit()$train$cyclegan$ShowCycleGANImgsCallback(
imgA = imgA,
imgB = imgB,
show_img_interval = as.integer(show_img_interval)
)
}
combined_flat_anneal <- function(pct, start_lr, end_lr = 0, curve_type = "linear") {
upit()$train$cyclegan$combined_flat_anneal(
pct = pct,
start_lr = start_lr,
end_lr = end_lr,
curve_type = curve_type
)
}
cycle_learner <- function(dls, m, opt_func = Adam(), show_imgs = TRUE,
imgA = TRUE, imgB = TRUE, show_img_interval = 10,
...) {
args <- list(
dls = dls,
m = m,
opt_func = opt_func,
show_imgs = show_imgs,
imgA = imgA,
imgB = imgB,
show_img_interval = as.integer( show_img_interval),
...
)
do.call(upit()$train$cyclegan$cycle_learner, args)
}
URLs_HORSE_2_ZEBRA <- function(filename = 'horse2zebra', unzip = TRUE) {
download.file('https://people.eecs.berkeley.edu/~taesung_park/CycleGAN/datasets/horse2zebra.zip',
destfile = paste(filename,'.zip',sep = ''))
if(unzip)
unzip(paste(filename,'.zip',sep = ''))
}
FolderDataset <- function(path, transforms = NULL) {
args = list(
path = path,
transforms = transforms
)
if(is.null(args$transforms))
args$transforms <- NULL
do.call(upit()$inference$cyclegan$FolderDataset, args)
}
load_dataset <- function(test_path, bs = 4, num_workers = 4) {
upit()$inference$cyclegan$load_dataset(
test_path = test_path,
bs = as.integer(bs),
num_workers = as.integer(num_workers)
)
}
get_preds_cyclegan <- function(learn, test_path, pred_path, bs = 4, num_workers = 4, suffix = "tif") {
upit()$inference$cyclegan$get_preds_cyclegan(
learn = learn,
test_path = test_path,
pred_path = pred_path,
bs = as.integer(bs),
num_workers = as.integer(num_workers),
suffix = suffix
)
}
export_generator <- function(learn, generator_name = "generator", path = '.', convert_to = "B") {
upit()$inference$cyclegan$export_generator(
learn = learn,
generator_name = generator_name,
path = path,
convert_to = convert_to
)
} |
context("powerSignificance")
test_that("numeric test for powerSignificance(): 1", {
expect_equal(object = powerSignificance(zo = qnorm(p = 1 - 0.05/2),
c = 1, level = 0.05,
alternative = "two.sided"),
expected = 0.5,
tol = 0.0001)
expect_equal(object = powerSignificance(zo = qnorm(p = 1 - 0.0056/2),
c = 1, level = 0.05,
alternative = "two.sided"),
expected = 0.791,
tol = 0.001)
})
test_that("numeric test for powerSignificance(): 2", {
zo <- seq(-1, 1, 1)
apply_grid <- expand.grid(priors = c("conditional", "predictive", "EB"),
c = c(0.5, 2),
h = c(0, 1),
alt = c("two.sided", "one.sided"),
shrinkage = c(0, 0.5),
stringsAsFactors = FALSE)
out <- lapply(X=seq_len(nrow(apply_grid)), FUN=function(i){
powerSignificance(zo = zo,
c = apply_grid$c[i],
level = 0.05,
designPrior = apply_grid$priors[i],
alternative = apply_grid$alt[i],
h = apply_grid$h[i],
shrinkage = apply_grid$shrinkage[i])
})
expect_equal(out,
list(c(0.105128839609805, 0.025, 0.105128839609805),
c(0.153164887002923, 0.0547655919261876,
0.153164887002923),
c(0.025, 0.025, 0.025),
c(0.29261875345421, 0.025, 0.29261875345421),
c(0.376346981956041, 0.128904320268035, 0.376346981956041),
c(0.025, 0.025, 0.025),
c(0.105128839609805, 0.025, 0.105128839609805),
c(0.214070597849871, 0.107563528680747, 0.214070597849871),
c(0.0547655919261876, 0.0547655919261876, 0.0547655919261876),
c(0.29261875345421, 0.025, 0.29261875345421),
c(0.418288337676037, 0.229408341649887, 0.418288337676037),
c(0.128904320268035, 0.128904320268035, 0.128904320268035),
c(0.174187261617932, 0.05, 0.174187261617932),
c(0.221937205879032, 0.08963317592404, 0.221937205879032),
c(0.05, 0.05, 0.05),
c(0.40879721979387, 0.05, 0.40879721979387),
c(0.447033381345704, 0.171143362995444, 0.447033381345704),
c(0.05, 0.05, 0.05),
c(0.174187261617932, 0.05, 0.174187261617932),
c(0.276562743130718, 0.149101020851907, 0.276562743130718),
c(0.08963317592404, 0.08963317592404, 0.08963317592404),
c(0.40879721979387, 0.05, 0.40879721979387),
c(0.465266702002727, 0.26707081412311, 0.465266702002727),
c(0.171143362995444, 0.171143362995444, 0.171143362995444),
c(0.0540918623618536, 0.025, 0.0540918623618536),
c(0.0948227125608762, 0.0547655919261876, 0.0948227125608762),
c(0.025, 0.025, 0.025),
c(0.105128839609805, 0.025, 0.105128839609805),
c(0.234736296915959, 0.128904320268035, 0.234736296915959),
c(0.025, 0.025, 0.025),
c(0.0540918623618536, 0.025, 0.0540918623618536),
c(0.154818677461375, 0.107563528680747, 0.154818677461375),
c(0.0547655919261876, 0.0547655919261876,
0.0547655919261876),
c(0.105128839609805, 0.025, 0.105128839609805),
c(0.317915585520223, 0.229408341649887, 0.317915585520223),
c(0.128904320268035, 0.128904320268035, 0.128904320268035),
c(0.0982997943579869, 0.05, 0.0982997943579869),
c(0.145863130961703, 0.08963317592404, 0.145863130961703),
c(0.05, 0.05, 0.05),
c(0.174187261617932, 0.05, 0.174187261617932),
c(0.294113061454605, 0.171143362995444, 0.294113061454605),
c(0.05, 0.05, 0.05),
c(0.0982997943579869, 0.05, 0.0982997943579869),
c(0.207052810036395, 0.149101020851907, 0.207052810036395),
c(0.08963317592404, 0.08963317592404, 0.08963317592404),
c(0.174187261617932, 0.05, 0.174187261617932),
c(0.361506457143659, 0.26707081412311, 0.361506457143659),
c(0.171143362995444, 0.171143362995444, 0.171143362995444)))
}) |
perturb_parameters <- function(datastore, annealing.parms, toppredlock=FALSE) {
Prefsd <- elt(annealing.parms, "Prefsd")
u_sd <- elt(annealing.parms, "u_sd")
h_sd <- elt(annealing.parms, "h_sd")
biogeo_sd <- elt(annealing.parms, "biogeo_sd")
mort_sd <- elt(annealing.parms, "mort_sd")
ressd <- elt(annealing.parms, "ressd")
PREF_NIT_kelp <- elt(datastore, "PREF_NIT_kelp")
PREF_AMM_kelp <- elt(datastore, "PREF_AMM_kelp")
PREF_NIT_phyt <- elt(datastore, "PREF_NIT_phyt")
PREF_AMM_phyt <- elt(datastore, "PREF_AMM_phyt")
PREF_phyt_omni <- elt(datastore, "PREF_phyt_omni")
PREF_det_omni <- elt(datastore, "PREF_det_omni")
PREF_benthslar_omni <- elt(datastore, "PREF_benthslar_omni")
PREF_benthclar_omni <- elt(datastore, "PREF_benthclar_omni")
PREF_omni_carn <- elt(datastore, "PREF_omni_carn")
PREF_benthslar_carn <- elt(datastore, "PREF_benthslar_carn")
PREF_benthclar_carn <- elt(datastore, "PREF_benthclar_carn")
PREF_fishplar_carn <- elt(datastore, "PREF_fishplar_carn")
PREF_fishdlar_carn <- elt(datastore, "PREF_fishdlar_carn")
PREF_omni_fishplar <- elt(datastore, "PREF_omni_fishplar")
PREF_benthslar_fishplar <- elt(datastore, "PREF_benthslar_fishplar")
PREF_benthclar_fishplar <- elt(datastore, "PREF_benthclar_fishplar")
PREF_omni_fishp <- elt(datastore, "PREF_omni_fishp")
PREF_carn_fishp <- elt(datastore, "PREF_carn_fishp")
PREF_benthslar_fishp <- elt(datastore, "PREF_benthslar_fishp")
PREF_benthclar_fishp <- elt(datastore, "PREF_benthclar_fishp")
PREF_fishdlar_fishp <- elt(datastore, "PREF_fishdlar_fishp")
PREF_fishplar_fishp <- elt(datastore, "PREF_fishplar_fishp")
PREF_omni_fishm <- elt(datastore, "PREF_omni_fishm")
PREF_carn_fishm <- elt(datastore, "PREF_carn_fishm")
PREF_benthslar_fishm <- elt(datastore, "PREF_benthslar_fishm")
PREF_benthclar_fishm <- elt(datastore, "PREF_benthclar_fishm")
PREF_fishdlar_fishm <- elt(datastore, "PREF_fishdlar_fishm")
PREF_fishplar_fishm <- elt(datastore, "PREF_fishplar_fishm")
PREF_omni_fishdlar <- elt(datastore, "PREF_omni_fishdlar")
PREF_benthslar_fishdlar <- elt(datastore, "PREF_benthslar_fishdlar")
PREF_benthclar_fishdlar <- elt(datastore, "PREF_benthclar_fishdlar")
PREF_carn_fishd <- elt(datastore, "PREF_carn_fishd")
PREF_benths_fishd <- elt(datastore, "PREF_benths_fishd")
PREF_benthc_fishd <- elt(datastore, "PREF_benthc_fishd")
PREF_fishplar_fishd <- elt(datastore, "PREF_fishplar_fishd")
PREF_fishdlar_fishd <- elt(datastore, "PREF_fishdlar_fishd")
PREF_fishp_fishd <- elt(datastore, "PREF_fishp_fishd")
PREF_fishm_fishd <- elt(datastore, "PREF_fishm_fishd")
PREF_fishd_fishd <- elt(datastore, "PREF_fishd_fishd")
PREF_disc_fishd <- elt(datastore, "PREF_disc_fishd")
PREF_corp_fishd <- elt(datastore, "PREF_corp_fishd")
PREF_phyt_benthslar <- elt(datastore, "PREF_phyt_benthslar")
PREF_phyt_benthclar <- elt(datastore, "PREF_phyt_benthclar")
PREF_det_benthslar <- elt(datastore, "PREF_det_benthslar")
PREF_det_benthclar <- elt(datastore, "PREF_det_benthclar")
PREF_phyt_benths <- elt(datastore, "PREF_phyt_benths")
PREF_det_benths <- elt(datastore, "PREF_det_benths")
PREF_sed_benths <- elt(datastore, "PREF_sed_benths")
PREF_kelp_benthc <- elt(datastore, "PREF_kelp_benthc")
PREF_kelpdebris_benthc <- elt(datastore, "PREF_kelpdebris_benthc")
PREF_benths_benthc <- elt(datastore, "PREF_benths_benthc")
PREF_corp_benthc <- elt(datastore, "PREF_corp_benthc")
PREF_carn_bird <- elt(datastore, "PREF_carn_bird")
PREF_benths_bird <- elt(datastore, "PREF_benths_bird")
PREF_benthc_bird <- elt(datastore, "PREF_benthc_bird")
PREF_fishp_bird <- elt(datastore, "PREF_fishp_bird")
PREF_fishm_bird <- elt(datastore, "PREF_fishm_bird")
PREF_fishd_bird <- elt(datastore, "PREF_fishd_bird")
PREF_disc_bird <- elt(datastore, "PREF_disc_bird")
PREF_corp_bird <- elt(datastore, "PREF_corp_bird")
PREF_carn_seal <- elt(datastore, "PREF_carn_seal")
PREF_benths_seal <- elt(datastore, "PREF_benths_seal")
PREF_benthc_seal <- elt(datastore, "PREF_benthc_seal")
PREF_fishp_seal <- elt(datastore, "PREF_fishp_seal")
PREF_fishm_seal <- elt(datastore, "PREF_fishm_seal")
PREF_fishd_seal <- elt(datastore, "PREF_fishd_seal")
PREF_bird_seal <- elt(datastore, "PREF_bird_seal")
PREF_disc_seal <- elt(datastore, "PREF_disc_seal")
PREF_corp_seal <- elt(datastore, "PREF_corp_seal")
PREF_omni_ceta <- elt(datastore, "PREF_omni_ceta")
PREF_carn_ceta <- elt(datastore, "PREF_carn_ceta")
PREF_benths_ceta <- elt(datastore, "PREF_benths_ceta")
PREF_benthc_ceta <- elt(datastore, "PREF_benthc_ceta")
PREF_fishp_ceta <- elt(datastore, "PREF_fishp_ceta")
PREF_fishm_ceta <- elt(datastore, "PREF_fishm_ceta")
PREF_fishd_ceta <- elt(datastore, "PREF_fishd_ceta")
PREF_bird_ceta <- elt(datastore, "PREF_bird_ceta")
PREF_seal_ceta <- elt(datastore, "PREF_seal_ceta")
PREF_disc_ceta <- elt(datastore, "PREF_disc_ceta")
uC_kelp <- elt(datastore, "uC_kelp")
ddexudC_kelp <- elt(datastore, "ddexudC_kelp")
u_kelp <- elt(datastore, "u_kelp")
u_phyt <- elt(datastore, "u_phyt")
u_omni <- elt(datastore, "u_omni")
u_carn <- elt(datastore, "u_carn")
u_fishplar <- elt(datastore, "u_fishplar")
u_fishp <- elt(datastore, "u_fishp")
u_fishm <- elt(datastore, "u_fishm")
u_fishdlar <- elt(datastore, "u_fishdlar")
u_fishd <- elt(datastore, "u_fishd")
u_benthslar <- elt(datastore, "u_benthslar")
u_benthclar <- elt(datastore, "u_benthclar")
u_benths <- elt(datastore, "u_benths")
u_benthc <- elt(datastore, "u_benthc")
u_bird <- elt(datastore, "u_bird")
u_seal <- elt(datastore, "u_seal")
u_ceta <- elt(datastore, "u_ceta")
h_kelp <- elt(datastore, "h_kelp")
h_phyt <- elt(datastore, "h_phyt")
h_omni <- elt(datastore, "h_omni")
h_carn <- elt(datastore, "h_carn")
h_fishplar <- elt(datastore, "h_fishplar")
h_fishp <- elt(datastore, "h_fishp")
h_fishm <- elt(datastore, "h_fishm")
h_fishdlar <- elt(datastore, "h_fishdlar")
h_fishd <- elt(datastore, "h_fishd")
h_benthslar <- elt(datastore, "h_benthslar")
h_benthclar <- elt(datastore, "h_benthclar")
h_benths <- elt(datastore, "h_benths")
h_benthc <- elt(datastore, "h_benthc")
h_bird <- elt(datastore, "h_bird")
h_seal <- elt(datastore, "h_seal")
h_ceta <- elt(datastore, "h_ceta")
bda_par_bird <- elt(datastore, "bda_par_bird")
bda_par_seal <- elt(datastore, "bda_par_seal")
bda_par_ceta <- elt(datastore, "bda_par_ceta")
xmt <- elt(datastore, "xmt")
xnst <- elt(datastore, "xnst")
xdst <- elt(datastore, "xdst")
xndt <- elt(datastore, "xndt")
xddt <- elt(datastore, "xddt")
xqs_p1 <- elt(datastore, "xqs_p1")
xqs_p2 <- elt(datastore, "xqs_p2")
xqs_p3 <- elt(datastore, "xqs_p3")
xmsedt <- elt(datastore, "xmsedt")
xmsens <- elt(datastore, "xmsens")
xnsedt <- elt(datastore, "xnsedt")
xnsens <- elt(datastore, "xnsens")
xdsedt <- elt(datastore, "xdsedt")
xdsens <- elt(datastore, "xdsens")
xxwave_kelp <- elt(datastore, "xxwave_kelp")
xxst <- elt(datastore, "xxst")
xxdt <- elt(datastore, "xxdt")
xxomni <- elt(datastore, "xxomni")
xxcarn <- elt(datastore, "xxcarn")
xxbenthslar <- elt(datastore, "xxbenthslar")
xxbenthclar <- elt(datastore, "xxbenthclar")
xxbenths <- elt(datastore, "xxbenths")
xxbenthc <- elt(datastore, "xxbenthc")
xxpfishlar <- elt(datastore, "xxpfishlar")
xxdfishlar <- elt(datastore, "xxdfishlar")
xxpfish <- elt(datastore, "xxpfish")
xxmfish <- elt(datastore, "xxmfish")
xxdfish <- elt(datastore, "xxdfish")
xxbird <- elt(datastore, "xxbird")
xxseal <- elt(datastore, "xxseal")
xxceta <- elt(datastore, "xxceta")
xxcorp_det <- elt(datastore, "xxcorp_det")
xdisc_corp <- elt(datastore, "xdisc_corp")
xkelpdebris_det <- elt(datastore, "xkelpdebris_det")
xdsink_s <- elt(datastore, "xdsink_s")
xdsink_d <- elt(datastore, "xdsink_d")
xkelpshade <- elt(datastore, "xkelpshade")
xwave_kelpdebris <- elt(datastore, "xwave_kelpdebris")
xdfdp <- elt(datastore, "xdfdp")
xpfish_migcoef <- elt(datastore, "xpfish_migcoef")
xmfish_migcoef <- elt(datastore, "xmfish_migcoef")
xdfish_migcoef <- elt(datastore, "xdfish_migcoef")
xbird_migcoef <- elt(datastore, "xbird_migcoef")
xseal_migcoef <- elt(datastore, "xseal_migcoef")
xceta_migcoef <- elt(datastore, "xceta_migcoef")
xmax_exploitable_f_KP <- elt(datastore, "xmax_exploitable_f_KP")
xmax_exploitable_f_PF <- elt(datastore, "xmax_exploitable_f_PF")
xmax_exploitable_f_DF <- elt(datastore, "xmax_exploitable_f_DF")
xmax_exploitable_f_MF <- elt(datastore, "xmax_exploitable_f_MF")
xmax_exploitable_f_SB <- elt(datastore, "xmax_exploitable_f_SB")
xmax_exploitable_f_CB <- elt(datastore, "xmax_exploitable_f_CB")
xmax_exploitable_f_CZ <- elt(datastore, "xmax_exploitable_f_CZ")
xmax_exploitable_f_BD <- elt(datastore, "xmax_exploitable_f_BD")
xmax_exploitable_f_SL <- elt(datastore, "xmax_exploitable_f_SL")
xmax_exploitable_f_CT <- elt(datastore, "xmax_exploitable_f_CT")
annual_obj <- elt(datastore, "annual_obj")
if(Prefsd>0){
PREF_NIT_kelpx<-max(0,rnorm(1,PREF_NIT_kelp,Prefsd*PREF_NIT_kelp))
PREF_AMM_kelpx<-max(0,rnorm(1,PREF_AMM_kelp,Prefsd*PREF_AMM_kelp))
prefsum<-PREF_NIT_kelpx+PREF_AMM_kelpx
PREF_NIT_kelp<-PREF_NIT_kelpx/prefsum
PREF_AMM_kelp<-PREF_AMM_kelpx/prefsum
PREF_NIT_phytx<-max(0,rnorm(1,PREF_NIT_phyt,Prefsd*PREF_NIT_phyt))
PREF_AMM_phytx<-max(0,rnorm(1,PREF_AMM_phyt,Prefsd*PREF_AMM_phyt))
prefsum<-PREF_NIT_phytx+PREF_AMM_phytx
PREF_NIT_phyt<-PREF_NIT_phytx/prefsum
PREF_AMM_phyt<-PREF_AMM_phytx/prefsum
PREF_phyt_omnix<-max(0,rnorm(1,PREF_phyt_omni,Prefsd*PREF_phyt_omni))
PREF_det_omnix<-max(0,rnorm(1,PREF_det_omni,Prefsd*PREF_det_omni))
PREF_benthslar_omnix<-max(0,rnorm(1,PREF_benthslar_omni,Prefsd*PREF_benthslar_omni))
PREF_benthclar_omnix<-max(0,rnorm(1,PREF_benthclar_omni,Prefsd*PREF_benthclar_omni))
prefsum<-PREF_phyt_omnix+PREF_det_omnix+PREF_benthslar_omnix+PREF_benthclar_omnix
PREF_phyt_omni<-PREF_phyt_omnix/prefsum
PREF_det_omni<-PREF_det_omnix/prefsum
PREF_benthslar_omni<-PREF_benthslar_omnix/prefsum
PREF_benthclar_omni<-PREF_benthclar_omnix/prefsum
PREF_omni_carnx<-max(0,rnorm(1,PREF_omni_carn,Prefsd*PREF_omni_carn))
PREF_fishplar_carnx<-max(0,rnorm(1,PREF_fishplar_carn,Prefsd*PREF_fishplar_carn))
PREF_fishdlar_carnx<-max(0,rnorm(1,PREF_fishdlar_carn,Prefsd*PREF_fishdlar_carn))
PREF_benthslar_carnx<-max(0,rnorm(1,PREF_benthslar_carn,Prefsd*PREF_benthslar_carn))
PREF_benthclar_carnx<-max(0,rnorm(1,PREF_benthclar_carn,Prefsd*PREF_benthclar_carn))
prefsum<-PREF_omni_carnx+PREF_fishplar_carnx+PREF_fishdlar_carnx+PREF_benthslar_carnx+PREF_benthclar_carnx
PREF_omni_carn<-PREF_omni_carnx/prefsum
PREF_fishplar_carn<-PREF_fishplar_carnx/prefsum
PREF_fishdlar_carn<-PREF_fishdlar_carnx/prefsum
PREF_benthslar_carn<-PREF_benthslar_carnx/prefsum
PREF_benthclar_carn<-PREF_benthclar_carnx/prefsum
PREF_omni_fishplarx<-max(0,rnorm(1,PREF_omni_fishplar,Prefsd*PREF_omni_fishplar))
PREF_benthslar_fishplarx<-max(0,rnorm(1,PREF_benthslar_fishplar,Prefsd*PREF_benthslar_fishplar))
PREF_benthclar_fishplarx<-max(0,rnorm(1,PREF_benthclar_fishplar,Prefsd*PREF_benthclar_fishplar))
prefsum<-PREF_omni_fishplarx+PREF_benthslar_fishplarx+PREF_benthclar_fishplarx
PREF_omni_fishplar<-PREF_omni_fishplarx/prefsum
PREF_benthslar_fishplar<-PREF_benthslar_fishplarx/prefsum
PREF_benthclar_fishplar<-PREF_benthclar_fishplarx/prefsum
PREF_omni_fishpx<-max(0,rnorm(1,PREF_omni_fishp,Prefsd*PREF_omni_fishp))
PREF_carn_fishpx<-max(0,rnorm(1,PREF_carn_fishp,Prefsd*PREF_carn_fishp))
PREF_fishdlar_fishpx<-max(0,rnorm(1,PREF_fishdlar_fishp,Prefsd*PREF_fishdlar_fishp))
PREF_fishplar_fishpx<-max(0,rnorm(1,PREF_fishplar_fishp,Prefsd*PREF_fishplar_fishp))
PREF_benthslar_fishpx<-max(0,rnorm(1,PREF_benthslar_fishp,Prefsd*PREF_benthslar_fishp))
PREF_benthclar_fishpx<-max(0,rnorm(1,PREF_benthclar_fishp,Prefsd*PREF_benthclar_fishp))
prefsum<-PREF_omni_fishpx+PREF_carn_fishpx+PREF_fishdlar_fishpx+PREF_fishplar_fishpx+PREF_benthslar_fishpx+PREF_benthclar_fishpx
PREF_omni_fishp<-PREF_omni_fishpx/prefsum
PREF_carn_fishp<-PREF_carn_fishpx/prefsum
PREF_fishdlar_fishp<-PREF_fishdlar_fishpx/prefsum
PREF_fishplar_fishp<-PREF_fishplar_fishpx/prefsum
PREF_benthslar_fishp<-PREF_benthslar_fishpx/prefsum
PREF_benthclar_fishp<-PREF_benthclar_fishpx/prefsum
PREF_omni_fishmx<-max(0,rnorm(1,PREF_omni_fishm,Prefsd*PREF_omni_fishm))
PREF_carn_fishmx<-max(0,rnorm(1,PREF_carn_fishm,Prefsd*PREF_carn_fishm))
PREF_fishdlar_fishmx<-max(0,rnorm(1,PREF_fishdlar_fishm,Prefsd*PREF_fishdlar_fishm))
PREF_fishplar_fishmx<-max(0,rnorm(1,PREF_fishplar_fishm,Prefsd*PREF_fishplar_fishm))
PREF_benthslar_fishmx<-max(0,rnorm(1,PREF_benthslar_fishm,Prefsd*PREF_benthslar_fishm))
PREF_benthclar_fishmx<-max(0,rnorm(1,PREF_benthclar_fishm,Prefsd*PREF_benthclar_fishm))
prefsum<-PREF_omni_fishmx+PREF_carn_fishmx+PREF_benthslar_fishmx+PREF_benthclar_fishmx+PREF_fishdlar_fishmx+PREF_fishplar_fishmx
PREF_omni_fishm<-PREF_omni_fishmx/prefsum
PREF_carn_fishm<-PREF_carn_fishmx/prefsum
PREF_fishdlar_fishm<-PREF_fishdlar_fishmx/prefsum
PREF_fishplar_fishm<-PREF_fishplar_fishmx/prefsum
PREF_benthslar_fishm<-PREF_benthslar_fishmx/prefsum
PREF_benthclar_fishm<-PREF_benthclar_fishmx/prefsum
PREF_omni_fishdlarx<-max(0,rnorm(1,PREF_omni_fishdlar,Prefsd*PREF_omni_fishdlar))
PREF_benthslar_fishdlarx<-max(0,rnorm(1,PREF_benthslar_fishdlar,Prefsd*PREF_benthslar_fishdlar))
PREF_benthclar_fishdlarx<-max(0,rnorm(1,PREF_benthclar_fishdlar,Prefsd*PREF_benthclar_fishdlar))
prefsum<-PREF_omni_fishdlarx+PREF_benthslar_fishdlarx+PREF_benthclar_fishdlarx
PREF_omni_fishdlar<-PREF_omni_fishdlarx/prefsum
PREF_benthslar_fishdlar<-PREF_benthslar_fishdlarx/prefsum
PREF_benthclar_fishdlar<-PREF_benthclar_fishdlarx/prefsum
PREF_carn_fishdx<-max(0,rnorm(1,PREF_carn_fishd,Prefsd*PREF_carn_fishd))
PREF_benths_fishdx<-max(0,rnorm(1,PREF_benths_fishd,Prefsd*PREF_benths_fishd))
PREF_benthc_fishdx<-max(0,rnorm(1,PREF_benthc_fishd,Prefsd*PREF_benthc_fishd))
PREF_fishplar_fishdx<-max(0,rnorm(1,PREF_fishplar_fishd,Prefsd*PREF_fishplar_fishd))
PREF_fishdlar_fishdx<-max(0,rnorm(1,PREF_fishdlar_fishd,Prefsd*PREF_fishdlar_fishd))
PREF_fishp_fishdx<-max(0,rnorm(1,PREF_fishp_fishd,Prefsd*PREF_fishp_fishd))
PREF_fishm_fishdx<-max(0,rnorm(1,PREF_fishm_fishd,Prefsd*PREF_fishm_fishd))
PREF_fishd_fishdx<-max(0,rnorm(1,PREF_fishd_fishd,Prefsd*PREF_fishd_fishd))
PREF_disc_fishdx<-max(0,rnorm(1,PREF_disc_fishd,Prefsd*PREF_disc_fishd))
PREF_corp_fishdx<-max(0,rnorm(1,PREF_corp_fishd,Prefsd*PREF_corp_fishd))
prefsum<-PREF_carn_fishdx+PREF_benths_fishdx+PREF_benthc_fishdx+PREF_fishplar_fishdx+PREF_fishdlar_fishdx+PREF_fishp_fishdx+PREF_fishm_fishdx+PREF_fishd_fishdx+PREF_disc_fishdx+PREF_corp_fishdx
PREF_carn_fishd<-PREF_carn_fishdx/prefsum
PREF_benths_fishd<-PREF_benths_fishdx/prefsum
PREF_benthc_fishd<-PREF_benthc_fishdx/prefsum
PREF_fishplar_fishd<-PREF_fishplar_fishdx/prefsum
PREF_fishdlar_fishd<-PREF_fishdlar_fishdx/prefsum
PREF_fishp_fishd<-PREF_fishp_fishdx/prefsum
PREF_fishm_fishd<-PREF_fishm_fishdx/prefsum
PREF_fishd_fishd<-PREF_fishd_fishdx/prefsum
PREF_disc_fishd<-PREF_disc_fishdx/prefsum
PREF_corp_fishd<-PREF_corp_fishdx/prefsum
PREF_phyt_benthslarx<-max(0,rnorm(1,PREF_phyt_benthslar,Prefsd*PREF_phyt_benthslar))
PREF_det_benthslarx<-max(0,rnorm(1,PREF_det_benthslar,Prefsd*PREF_det_benthslar))
prefsum<-PREF_phyt_benthslarx+PREF_det_benthslarx
PREF_phyt_benthslar<-PREF_phyt_benthslarx/prefsum
PREF_det_benthslar<-PREF_det_benthslarx/prefsum
PREF_phyt_benthclarx<-max(0,rnorm(1,PREF_phyt_benthclar,Prefsd*PREF_phyt_benthclar))
PREF_det_benthclarx<-max(0,rnorm(1,PREF_det_benthclar,Prefsd*PREF_det_benthclar))
prefsum<-PREF_phyt_benthclarx+PREF_det_benthclarx
PREF_phyt_benthclar<-PREF_phyt_benthclarx/prefsum
PREF_det_benthclar<-PREF_det_benthclarx/prefsum
PREF_phyt_benthsx<-max(0,rnorm(1,PREF_phyt_benths,Prefsd*PREF_phyt_benths))
PREF_det_benthsx<-max(0,rnorm(1,PREF_det_benths,Prefsd*PREF_det_benths))
PREF_sed_benthsx<-max(0,rnorm(1,PREF_sed_benths,Prefsd*PREF_sed_benths))
prefsum<-PREF_phyt_benthsx+PREF_det_benthsx+PREF_sed_benthsx
PREF_phyt_benths<-PREF_phyt_benthsx/prefsum
PREF_det_benths<-PREF_det_benthsx/prefsum
PREF_sed_benths<-PREF_sed_benthsx/prefsum
PREF_phyt_benths_lim<-0.25
if(PREF_phyt_benths>PREF_phyt_benths_lim) {
PREFdif<-PREF_phyt_benths - PREF_phyt_benths_lim
PREF_phyt_benths<-PREF_phyt_benths_lim
PREF_det_benths_t <- PREF_det_benths + (PREFdif * PREF_det_benths/(PREF_det_benths+PREF_sed_benths))
PREF_sed_benths_t <- PREF_sed_benths + (PREFdif * PREF_sed_benths/(PREF_det_benths+PREF_sed_benths))
PREF_det_benths<-PREF_det_benths_t
PREF_sed_benths<-PREF_sed_benths_t}
PREF_kelp_benthcx<-max(0,rnorm(1,PREF_kelp_benthc,Prefsd*PREF_kelp_benthc))
PREF_kelpdebris_benthcx<-max(0,rnorm(1,PREF_kelpdebris_benthc,Prefsd*PREF_kelpdebris_benthc))
PREF_benths_benthcx<-max(0,rnorm(1,PREF_benths_benthc,Prefsd*PREF_benths_benthc))
PREF_corp_benthcx<-max(0,rnorm(1,PREF_corp_benthc,Prefsd*PREF_corp_benthc))
prefsum<-PREF_benths_benthcx+PREF_corp_benthcx+PREF_kelp_benthcx+PREF_kelpdebris_benthcx
PREF_kelp_benthc<-PREF_kelp_benthcx/prefsum
PREF_kelpdebris_benthc<-PREF_kelpdebris_benthcx/prefsum
PREF_benths_benthc<-PREF_benths_benthcx/prefsum
PREF_corp_benthc<-PREF_corp_benthcx/prefsum
PREF_kelp_benthc_lim<-0.05
PREF_kelpdebris_benthc_lim<-0.01
if(PREF_kelp_benthc>PREF_kelp_benthc_lim || PREF_kelpdebris_benthc>PREF_kelpdebris_benthc_lim) {
PREFdif<-0
if(PREF_kelp_benthc>PREF_kelp_benthc_lim){
PREFdif<-PREFdif + (PREF_kelp_benthc - PREF_kelp_benthc_lim)
PREF_kelp_benthc<-PREF_kelp_benthc_lim }
if(PREF_kelpdebris_benthc>PREF_kelpdebris_benthc_lim){
PREFdif<-PREFdif + (PREF_kelpdebris_benthc - PREF_kelpdebris_benthc_lim)
PREF_kelpdebris_benthc<-PREF_kelpdebris_benthc_lim }
PREF_benths_benthc_t <- PREF_benths_benthc + (PREFdif * PREF_benths_benthc/(PREF_benths_benthc+PREF_corp_benthc+PREF_kelp_benthc+PREF_kelpdebris_benthc))
PREF_corp_benthc_t <- PREF_corp_benthc + (PREFdif * PREF_corp_benthc/(PREF_benths_benthc+PREF_corp_benthc+PREF_kelp_benthc+PREF_kelpdebris_benthc))
PREF_benths_benthc<-PREF_benths_benthc_t
PREF_corp_benthc<-PREF_corp_benthc_t}
if (toppredlock == FALSE) {
PREF_carn_birdx<-max(0,rnorm(1,PREF_carn_bird,Prefsd*PREF_carn_bird))
PREF_benths_birdx<-max(0,rnorm(1,PREF_benths_bird,Prefsd*PREF_benths_bird))
PREF_benthc_birdx<-max(0,rnorm(1,PREF_benthc_bird,Prefsd*PREF_benthc_bird))
PREF_fishp_birdx<-max(0,rnorm(1,PREF_fishp_bird,Prefsd*PREF_fishp_bird))
PREF_fishm_birdx<-max(0,rnorm(1,PREF_fishm_bird,Prefsd*PREF_fishm_bird))
PREF_fishd_birdx<-max(0,rnorm(1,PREF_fishd_bird,Prefsd*PREF_fishd_bird))
PREF_disc_birdx<-max(0,rnorm(1,PREF_disc_bird,Prefsd*PREF_disc_bird))
PREF_corp_birdx<-max(0,rnorm(1,PREF_corp_bird,Prefsd*PREF_corp_bird))
prefsum<-PREF_carn_birdx+PREF_benths_birdx+PREF_benthc_birdx+PREF_fishp_birdx+PREF_fishm_birdx+PREF_fishd_birdx+PREF_disc_birdx+PREF_corp_birdx
PREF_carn_bird<-PREF_carn_birdx/prefsum
PREF_benths_bird<-PREF_benths_birdx/prefsum
PREF_benthc_bird<-PREF_benthc_birdx/prefsum
PREF_fishp_bird<-PREF_fishp_birdx/prefsum
PREF_fishm_bird<-PREF_fishm_birdx/prefsum
PREF_fishd_bird<-PREF_fishd_birdx/prefsum
PREF_disc_bird<-PREF_disc_birdx/prefsum
PREF_corp_bird<-PREF_corp_birdx/prefsum
PREF_carn_sealx<-max(0,rnorm(1,PREF_carn_seal,Prefsd*PREF_carn_seal))
PREF_benths_sealx<-max(0,rnorm(1,PREF_benths_seal,Prefsd*PREF_benths_seal))
PREF_benthc_sealx<-max(0,rnorm(1,PREF_benthc_seal,Prefsd*PREF_benthc_seal))
PREF_fishp_sealx<-max(0,rnorm(1,PREF_fishp_seal,Prefsd*PREF_fishp_seal))
PREF_fishm_sealx<-max(0,rnorm(1,PREF_fishm_seal,Prefsd*PREF_fishm_seal))
PREF_fishd_sealx<-max(0,rnorm(1,PREF_fishd_seal,Prefsd*PREF_fishd_seal))
PREF_bird_sealx<-max(0,rnorm(1,PREF_bird_seal,Prefsd*PREF_bird_seal))
PREF_disc_sealx<-max(0,rnorm(1,PREF_disc_seal,Prefsd*PREF_disc_seal))
PREF_corp_sealx<-max(0,rnorm(1,PREF_corp_seal,Prefsd*PREF_corp_seal))
prefsum<-PREF_carn_sealx+PREF_benths_sealx+PREF_benthc_sealx+PREF_fishp_sealx+PREF_fishm_sealx+PREF_fishd_sealx+PREF_bird_sealx+PREF_disc_sealx+PREF_corp_sealx
PREF_carn_seal<-PREF_carn_sealx/prefsum
PREF_benths_seal<-PREF_benths_sealx/prefsum
PREF_benthc_seal<-PREF_benthc_sealx/prefsum
PREF_fishp_seal<-PREF_fishp_sealx/prefsum
PREF_fishm_seal<-PREF_fishm_sealx/prefsum
PREF_fishd_seal<-PREF_fishd_sealx/prefsum
PREF_bird_seal<-PREF_bird_sealx/prefsum
PREF_disc_seal<-PREF_disc_sealx/prefsum
PREF_corp_seal<-PREF_corp_sealx/prefsum
PREF_omni_cetax<-max(0,rnorm(1,PREF_omni_ceta,Prefsd*PREF_omni_ceta))
PREF_carn_cetax<-max(0,rnorm(1,PREF_carn_ceta,Prefsd*PREF_carn_ceta))
PREF_benths_cetax<-max(0,rnorm(1,PREF_benths_ceta,Prefsd*PREF_benths_ceta))
PREF_benthc_cetax<-max(0,rnorm(1,PREF_benthc_ceta,Prefsd*PREF_benthc_ceta))
PREF_fishp_cetax<-max(0,rnorm(1,PREF_fishp_ceta,Prefsd*PREF_fishp_ceta))
PREF_fishm_cetax<-max(0,rnorm(1,PREF_fishm_ceta,Prefsd*PREF_fishm_ceta))
PREF_fishd_cetax<-max(0,rnorm(1,PREF_fishd_ceta,Prefsd*PREF_fishd_ceta))
PREF_bird_cetax<-max(0,rnorm(1,PREF_bird_ceta,Prefsd*PREF_bird_ceta))
PREF_seal_cetax<-max(0,rnorm(1,PREF_seal_ceta,Prefsd*PREF_seal_ceta))
PREF_disc_cetax<-max(0,rnorm(1,PREF_disc_ceta,Prefsd*PREF_disc_ceta))
prefsum<-PREF_omni_cetax+PREF_carn_cetax+PREF_benths_cetax+PREF_benthc_cetax+PREF_fishp_cetax+PREF_fishm_cetax+PREF_fishd_cetax+PREF_bird_cetax+PREF_seal_cetax+PREF_disc_cetax
PREF_omni_ceta<-PREF_omni_cetax/prefsum
PREF_carn_ceta<-PREF_carn_cetax/prefsum
PREF_benths_ceta<-PREF_benths_cetax/prefsum
PREF_benthc_ceta<-PREF_benthc_cetax/prefsum
PREF_fishp_ceta<-PREF_fishp_cetax/prefsum
PREF_fishm_ceta<-PREF_fishm_cetax/prefsum
PREF_fishd_ceta<-PREF_fishd_cetax/prefsum
PREF_bird_ceta<-PREF_bird_cetax/prefsum
PREF_seal_ceta<-PREF_seal_cetax/prefsum
PREF_disc_ceta<-PREF_disc_cetax/prefsum
}
}
prefstore<-list(PREF_NIT_kelp,PREF_AMM_kelp,
PREF_NIT_phyt,PREF_AMM_phyt,PREF_phyt_omni,PREF_det_omni,PREF_benthslar_omni,PREF_benthclar_omni,PREF_omni_carn,PREF_benthslar_carn,PREF_benthclar_carn,PREF_fishplar_carn,PREF_fishdlar_carn,
PREF_omni_fishplar,PREF_benthslar_fishplar,PREF_benthclar_fishplar,
PREF_omni_fishp,PREF_carn_fishp,PREF_benthslar_fishp,PREF_benthclar_fishp,PREF_fishdlar_fishp,PREF_fishplar_fishp,
PREF_omni_fishm,PREF_carn_fishm,PREF_benthslar_fishm,PREF_benthclar_fishm,PREF_fishdlar_fishm,PREF_fishplar_fishm,
PREF_omni_fishdlar,PREF_benthslar_fishdlar,PREF_benthclar_fishdlar,
PREF_carn_fishd,PREF_benths_fishd,PREF_benthc_fishd,PREF_fishplar_fishd,PREF_fishdlar_fishd,PREF_fishp_fishd,PREF_fishm_fishd,PREF_fishd_fishd,PREF_disc_fishd,PREF_corp_fishd,
PREF_phyt_benthslar,PREF_phyt_benthclar,
PREF_det_benthslar,PREF_det_benthclar,
PREF_phyt_benths,PREF_det_benths,PREF_sed_benths,
PREF_kelp_benthc,PREF_kelpdebris_benthc,PREF_benths_benthc,PREF_corp_benthc,
PREF_carn_bird,PREF_benths_bird,PREF_benthc_bird,PREF_fishp_bird,PREF_fishm_bird,PREF_fishd_bird,PREF_disc_bird,PREF_corp_bird,
PREF_carn_seal,PREF_benths_seal,PREF_benthc_seal,PREF_fishp_seal,PREF_fishm_seal,PREF_fishd_seal,PREF_bird_seal,PREF_disc_seal,PREF_corp_seal,
PREF_omni_ceta,PREF_carn_ceta,PREF_benths_ceta,PREF_benthc_ceta,PREF_fishp_ceta,PREF_fishm_ceta,PREF_fishd_ceta,PREF_bird_ceta,PREF_seal_ceta,PREF_disc_ceta)
names(prefstore)<-c("PREF_NIT_kelp","PREF_AMM_kelp",
"PREF_NIT_phyt","PREF_AMM_phyt","PREF_phyt_omni","PREF_det_omni","PREF_benthslar_omni","PREF_benthclar_omni","PREF_omni_carn","PREF_benthslar_carn","PREF_benthclar_carn","PREF_fishplar_carn","PREF_fishdlar_carn",
"PREF_omni_fishplar","PREF_benthslar_fishplar","PREF_benthclar_fishplar",
"PREF_omni_fishp","PREF_carn_fishp","PREF_benthslar_fishp","PREF_benthclar_fishp","PREF_fishdlar_fishp","PREF_fishplar_fishp",
"PREF_omni_fishm","PREF_carn_fishm","PREF_benthslar_fishm","PREF_benthclar_fishm","PREF_fishdlar_fishm","PREF_fishplar_fishm",
"PREF_omni_fishdlar","PREF_benthslar_fishdlar","PREF_benthclar_fishdlar",
"PREF_carn_fishd","PREF_benths_fishd","PREF_benthc_fishd","PREF_fishplar_fishd","PREF_fishdlar_fishd","PREF_fishp_fishd","PREF_fishm_fishd","PREF_fishd_fishd","PREF_disc_fishd","PREF_corp_fishd",
"PREF_phyt_benthslar","PREF_phyt_benthclar",
"PREF_det_benthslar","PREF_det_benthclar",
"PREF_phyt_benths","PREF_det_benths","PREF_sed_benths",
"PREF_kelp_benthc","PREF_kelpdebris_benthc","PREF_benths_benthc","PREF_corp_benthc",
"PREF_carn_bird","PREF_benths_bird","PREF_benthc_bird","PREF_fishp_bird","PREF_fishm_bird","PREF_fishd_bird","PREF_disc_bird","PREF_corp_bird",
"PREF_carn_seal","PREF_benths_seal","PREF_benthc_seal","PREF_fishp_seal","PREF_fishm_seal","PREF_fishd_seal","PREF_bird_seal","PREF_disc_seal","PREF_corp_seal",
"PREF_omni_ceta","PREF_carn_ceta","PREF_benths_ceta","PREF_benthc_ceta","PREF_fishp_ceta","PREF_fishm_ceta","PREF_fishd_ceta","PREF_bird_ceta","PREF_seal_ceta","PREF_disc_ceta")
if(u_sd>0){
uC_kelp<-max(0,rnorm(1,uC_kelp,uC_kelp*u_sd))
ddexudC_kelp<-max(0,rnorm(1,ddexudC_kelp,ddexudC_kelp*u_sd))
u_kelp<-max(0,rnorm(1,u_kelp,u_kelp*u_sd))
u_phyt<-max(0,rnorm(1,u_phyt,u_phyt*u_sd))
u_omni<-max(0,rnorm(1,u_omni,u_omni*u_sd))
u_carn<-max(0,rnorm(1,u_carn,u_carn*u_sd))
u_fishplar<-max(0,rnorm(1,u_fishplar,u_fishplar*u_sd))
u_fishp<-max(0,rnorm(1,u_fishp,u_fishp*u_sd))
u_fishm<-max(0,rnorm(1,u_fishm,u_fishm*u_sd))
u_fishdlar<-max(0,rnorm(1,u_fishdlar,u_fishdlar*u_sd))
u_fishd<-max(0,rnorm(1,u_fishd,u_fishd*u_sd))
u_benthslar<-max(0,rnorm(1,u_benthslar,u_benthslar*u_sd))
u_benthclar<-max(0,rnorm(1,u_benthclar,u_benthclar*u_sd))
u_benths<-max(0,rnorm(1,u_benths,u_benths*u_sd))
u_benthc<-max(0,rnorm(1,u_benthc,u_benthc*u_sd))
if (toppredlock == FALSE) {
u_bird<-max(0,rnorm(1,u_bird,u_bird*u_sd))
u_seal<-max(0,rnorm(1,u_seal,u_seal*u_sd))
u_ceta<-max(0,rnorm(1,u_ceta,u_ceta*u_sd))
}
}
ustore<-list(uC_kelp,ddexudC_kelp,u_kelp,u_phyt,u_omni,u_carn,u_fishplar,u_fishp,u_fishm,u_fishdlar,u_fishd,u_benthslar,u_benthclar,u_benths,u_benthc,u_bird,u_seal,u_ceta)
names(ustore)<-c("uC_kelp","ddexudC_kelp","u_kelp","u_phyt","u_omni","u_carn","u_fishplar","u_fishp","u_fishm","u_fishdlar","u_fishd","u_benthslar","u_benthclar","u_benths","u_benthc","u_bird","u_seal","u_ceta")
if(h_sd>0){
h_kelp<-max(0,rnorm(1,h_kelp,h_kelp*h_sd))
h_phyt<-max(0,rnorm(1,h_phyt,h_phyt*h_sd))
h_omni<-max(0,rnorm(1,h_omni,h_omni*h_sd))
h_carn<-max(0,rnorm(1,h_carn,h_carn*h_sd))
h_fishplar<-max(0,rnorm(1,h_fishplar,h_fishplar*h_sd))
h_fishp<-max(0,rnorm(1,h_fishp,h_fishp*h_sd))
h_fishm<-max(0,rnorm(1,h_fishm,h_fishm*h_sd))
h_fishdlar<-max(0,rnorm(1,h_fishdlar,h_fishdlar*h_sd))
h_fishd<-max(0,rnorm(1,h_fishd,h_fishd*h_sd))
h_benthslar<-max(0,rnorm(1,h_benthslar,h_benthslar*h_sd))
h_benthclar<-max(0,rnorm(1,h_benthclar,h_benthclar*h_sd))
h_benths<-max(0,rnorm(1,h_benths,h_benths*h_sd))
h_benthc<-max(0,rnorm(1,h_benthc,h_benthc*h_sd))
if (toppredlock == FALSE) {
h_bird<-max(0,rnorm(1,h_bird,h_bird*h_sd))
h_seal<-max(0,rnorm(1,h_seal,h_seal*h_sd))
h_ceta<-max(0,rnorm(1,h_ceta,h_ceta*h_sd))
bda_par_bird<-max(0,rnorm(1,bda_par_bird,bda_par_bird*h_sd))
bda_par_seal<-max(0,rnorm(1,bda_par_seal,bda_par_seal*h_sd))
bda_par_ceta<-max(0,rnorm(1,bda_par_ceta,bda_par_ceta*h_sd))
}
}
hstore<-list(h_kelp,h_phyt,h_omni,h_carn,h_fishplar,h_fishp,h_fishm,h_fishdlar,h_fishd,h_benthslar,h_benthclar,h_benths,h_benthc,h_bird,h_seal,h_ceta,bda_par_bird,bda_par_seal,bda_par_ceta)
names(hstore)<-c("h_kelp","h_phyt","h_omni","h_carn","h_fishplar","h_fishp","h_fishm","h_fishdlar","h_fishd","h_benthslar","h_benthclar","h_benths","h_benthc","h_bird","h_seal","h_ceta","bda_par_bird","bda_par_seal","bda_par_ceta")
if(biogeo_sd>0){
xmt<-max(0,rnorm(1,xmt,xmt*biogeo_sd))
xnst<-max(0,rnorm(1,xnst,xnst*biogeo_sd))
xdst<-max(0,rnorm(1,xdst,xdst*biogeo_sd))
xndt<-max(0,rnorm(1,xndt,xndt*biogeo_sd))
xddt<-max(0,rnorm(1,xddt,xddt*biogeo_sd))
xqs_p1<-max(0,rnorm(1,xqs_p1,xqs_p1*biogeo_sd))
qsp1lim<-0.5
if(xqs_p1>qsp1lim) {xqs_p1<-qsp1lim}
if(xqs_p1<0) {xqs_p1<-0}
xqs_p2<-max(0,rnorm(1,xqs_p2,xqs_p2*biogeo_sd))
qsp2lim<-0.001
if(xqs_p2>qsp2lim) {xqs_p2<-qsp2lim}
if(xqs_p2<0) {xqs_p2<-0}
xqs_p3<-max(0,rnorm(1,xqs_p3,xqs_p3*biogeo_sd))
qsp3lim<-0.025
if(xqs_p3<qsp3lim) {xqs_p3<-qsp3lim}
if(xqs_p3<0) {xqs_p3<-0}
xmsedt<-max(0,rnorm(1,xmsedt,xmsedt*biogeo_sd))
if(xmsedt>0.015) {xmsedt<-0.015}
if((xqs_p1*xmsedt)>0.2) {xqs_p1 <- 0.2/xmsedt}
xmsens<--1*xmsens
xmsens<-max(0,rnorm(1,xmsens,xmsens*biogeo_sd))
xmsens<--1*xmsens
xnsedt<-max(0,rnorm(1,xnsedt,xnsedt*biogeo_sd))
xnsens<--1*xnsens
xnsens<-max(0,rnorm(1,xnsens,xnsens*biogeo_sd))
xnsens<--1*xnsens
xdsedt<-max(0,rnorm(1,xdsedt,xdsedt*biogeo_sd))
xdsens<-max(0,rnorm(1,xdsens,xdsens*biogeo_sd))
xdsink_s<-max(0,rnorm(1,xdsink_s,xdsink_s*biogeo_sd))
xdsink_d<-max(0,rnorm(1,xdsink_d,xdsink_d*biogeo_sd))
if(xdsink_s>1) xdsink_s<-1
if(xdsink_d<0) xdsink_s<-0
xkelpdebris_det<- max(0,rnorm(1,xkelpdebris_det,xkelpdebris_det*biogeo_sd))
xxcorp_det<-max(0,rnorm(1,xxcorp_det,xxcorp_det*biogeo_sd))
if(xxcorp_det>0.5) xxcorp_det<-0.5
xdisc_corp<-max(0,rnorm(1,xdisc_corp,xdisc_corp*biogeo_sd))
if(xdisc_corp>0.7) xdisc_corp<-0.7
}
biogeostore<-list(xmt,xnst,xdst,xndt,xddt,xqs_p1,xqs_p2,xqs_p3,xmsedt,xmsens,xnsedt,xnsens,xdsedt,xdsens,xdsink_s,xdsink_d,xkelpdebris_det,xxcorp_det,xdisc_corp)
names(biogeostore)<-c("xmt","xnst","xdst","xndt","xddt","xqs_p1","xqs_p2","xqs_p3","xmsedt","xmsens","xnsedt","xnsens","xdsedt","xdsens","xdsink_s","xdsink_d","xkelpdebris_det","xxcorp_det","xdisc_corp")
if(mort_sd>0){
xxwave_kelp<-max(0,rnorm(1,xxwave_kelp,xxwave_kelp*mort_sd))
xxst<-max(0,rnorm(1,xxst,xxst*mort_sd))
xxdt<-max(0,rnorm(1,xxdt,xxdt*mort_sd))
xxomni<-max(0,rnorm(1,xxomni,xxomni*mort_sd))
xxcarn<-max(0,rnorm(1,xxcarn,xxcarn*mort_sd))
xxbenthslar<-max(0,rnorm(1,xxbenthslar,xxbenthslar*mort_sd))
xxbenthclar<-max(0,rnorm(1,xxbenthclar,xxbenthclar*mort_sd))
xxbenths<-max(0,rnorm(1,xxbenths,xxbenths*mort_sd))
xxbenthc<-max(0,rnorm(1,xxbenthc,xxbenthc*mort_sd))
xxpfishlar<-max(0,rnorm(1,xxpfishlar,xxpfishlar*mort_sd))
xxdfishlar<-max(0,rnorm(1,xxdfishlar,xxdfishlar*mort_sd))
xxpfish<-max(0,rnorm(1,xxpfish,xxpfish*mort_sd))
xxmfish<-max(0,rnorm(1,xxmfish,xxmfish*mort_sd))
xxdfish<-max(0,rnorm(1,xxdfish,xxdfish*mort_sd))
if (toppredlock == FALSE) {
xxbird<-max(0,rnorm(1,xxbird,xxbird*mort_sd))
xxseal<-max(0,rnorm(1,xxseal,xxseal*mort_sd))
xxceta<-max(0,rnorm(1,xxceta,xxceta*mort_sd))
}
}
mortstore<-list(xxwave_kelp,xxst,xxdt,xxomni,xxcarn,xxbenthslar,xxbenthclar,xxbenths,xxbenthc,xxpfishlar,xxdfishlar,xxpfish,xxmfish,xxdfish,xxbird,xxseal,xxceta)
names(mortstore)<-c("xxwave_kelp","xxst","xxdt","xxomni","xxcarn","xxbenthslar","xxbenthclar","xxbenths","xxbenthc","xxpfishlar","xxdfishlar","xxpfish","xxmfish","xxdfish","xxbird","xxseal","xxceta")
if(ressd>0){
xkelpshade<- max(0,rnorm(1,xkelpshade,xkelpshade*ressd))
xwave_kelpdebris<- max(0,rnorm(1,xwave_kelpdebris,xwave_kelpdebris*ressd))
xdfdp<-max(0,rnorm(1,xdfdp,xdfdp*ressd))
xpfish_migcoef <- max(0,rnorm(1,xpfish_migcoef,xpfish_migcoef*ressd))
xmfish_migcoef <- max(0,rnorm(1,xmfish_migcoef,xmfish_migcoef*ressd))
xdfish_migcoef <- max(0,rnorm(1,xdfish_migcoef,xdfish_migcoef*ressd))
xbird_migcoef <- max(0,rnorm(1,xbird_migcoef,xbird_migcoef*ressd))
xseal_migcoef <- max(0,rnorm(1,xseal_migcoef,xseal_migcoef*ressd))
xceta_migcoef <- max(0,rnorm(1,xceta_migcoef,xceta_migcoef*ressd))
if(xpfish_migcoef>0.01) xpfish_migcoef<-0.01
if(xmfish_migcoef>0.01) xmfish_migcoef<-0.01
if(xdfish_migcoef>0.01) xdfish_migcoef<-0.01
if(xbird_migcoef >0.01) xbird_migcoef <-0.01
if(xseal_migcoef >0.01) xseal_migcoef <-0.01
if(xceta_migcoef >0.01) xceta_migcoef <-0.01
xmax_exploitable_f_KP <- max(0,rnorm(1,xmax_exploitable_f_KP,xmax_exploitable_f_KP*ressd))
if(xmax_exploitable_f_KP>0.5) xmax_exploitable_f_KP<-0.5
xmax_exploitable_f_PF <- max(0,rnorm(1,xmax_exploitable_f_PF,xmax_exploitable_f_PF*ressd))
if(xmax_exploitable_f_PF>1) xmax_exploitable_f_PF<-1
xmax_exploitable_f_DF <- max(0,rnorm(1,xmax_exploitable_f_DF,xmax_exploitable_f_DF*ressd))
if(xmax_exploitable_f_DF>1) xmax_exploitable_f_DF<-1
xmax_exploitable_f_MF <- max(0,rnorm(1,xmax_exploitable_f_MF,xmax_exploitable_f_MF*ressd))
if(xmax_exploitable_f_MF>1) xmax_exploitable_f_MF<-1
xmax_exploitable_f_SB <- max(0,rnorm(1,xmax_exploitable_f_SB,xmax_exploitable_f_SB*ressd))
if(xmax_exploitable_f_SB>0.5) xmax_exploitable_f_SB<-0.5
xmax_exploitable_f_CB <- max(0,rnorm(1,xmax_exploitable_f_CB,xmax_exploitable_f_CB*ressd))
if(xmax_exploitable_f_CB>0.5) xmax_exploitable_f_CB<-0.5
xmax_exploitable_f_CZ <- max(0,rnorm(1,xmax_exploitable_f_CZ,xmax_exploitable_f_CZ*ressd))
if(xmax_exploitable_f_CZ>0.5) xmax_exploitable_f_CZ<-0.5
xmax_exploitable_f_BD <- max(0,rnorm(1,xmax_exploitable_f_BD,xmax_exploitable_f_BD*ressd))
if(xmax_exploitable_f_BD>0.5) xmax_exploitable_f_BD<-0.5
xmax_exploitable_f_SL <- max(0,rnorm(1,xmax_exploitable_f_SL,xmax_exploitable_f_SL*ressd))
if(xmax_exploitable_f_SL>0.5) xmax_exploitable_f_SL<-0.5
xmax_exploitable_f_CT <- max(0,rnorm(1,xmax_exploitable_f_CT,xmax_exploitable_f_CT*ressd))
if(xmax_exploitable_f_CT>0.5) xmax_exploitable_f_CY<-0.5
}
reststore<-list(xkelpshade,xwave_kelpdebris,xdfdp,
xpfish_migcoef,
xmfish_migcoef,
xdfish_migcoef,
xbird_migcoef,
xseal_migcoef,
xceta_migcoef,
xmax_exploitable_f_KP,
xmax_exploitable_f_PF,
xmax_exploitable_f_DF,
xmax_exploitable_f_MF,
xmax_exploitable_f_SB,
xmax_exploitable_f_CB,
xmax_exploitable_f_CZ,
xmax_exploitable_f_BD,
xmax_exploitable_f_SL,
xmax_exploitable_f_CT)
names(reststore)<-c("xkelpshade","xwave_kelpdebris","xdfdp",
"xpfish_migcoef",
"xmfish_migcoef",
"xdfish_migcoef",
"xbird_migcoef",
"xseal_migcoef",
"xceta_migcoef",
"xmax_exploitable_f_KP",
"xmax_exploitable_f_PF",
"xmax_exploitable_f_DF",
"xmax_exploitable_f_MF",
"xmax_exploitable_f_SB",
"xmax_exploitable_f_CB",
"xmax_exploitable_f_CZ",
"xmax_exploitable_f_BD",
"xmax_exploitable_f_SL",
"xmax_exploitable_f_CT")
perturbed <- c(
prefstore,
ustore,
hstore,
biogeostore,
mortstore,
reststore,
"annual_obj" = annual_obj
)
} |
regressionMetrics <- function(trues,preds,
metrics=NULL,
train.y=NULL)
{
if (!is.null(dim(preds))) stop("regressionMetrics:: expecting a vector as predictions.")
knownMetrics <- c('mae','mse','rmse','mape','nmse','nmae','theil')
if (is.null(metrics))
metrics <- if (is.null(train.y)) setdiff(knownMetrics,c("nmse","nmae")) else knownMetrics
if (any(c('nmse','nmad') %in% metrics) && is.null(train.y))
stop('regressionMetrics:: train.y parameter not specified.',call.=FALSE)
if (!all(metrics %in% knownMetrics))
stop("regressionMetrics:: don't know how to calculate -> ",call.=FALSE,
paste(metrics[which(!(metrics %in% knownMetrics))],collapse=','))
if (length(preds) != length(trues)) {
warning("regressionMetrics:: less predictions than test cases, filling with NAs.")
t <- trues
t[] <- NA
t[names(preds)] <- preds
preds <- t
}
N <- length(trues)
sae <- sum(abs(trues-preds))
sse <- sum((trues-preds)^2)
r <- c(mae=sae/N,mse=sse/N,rmse=sqrt(sse/N),mape=sum(abs((trues-preds)/trues))/N)
if (!is.null(train.y))
r <- c(r,c(nmse=sse/sum((trues-mean(train.y))^2),
theil=sum((trues-preds)^2)/sum((c(train.y[length(train.y)],trues[-length(trues)])-preds)^2),
nmae=sae/sum(abs(trues-mean(train.y)))))
return(r[metrics])
}
classificationMetrics <- function(trues,preds,
metrics=NULL,
benMtrx=NULL,
allCls=unique(c(levels(as.factor(trues)),levels(as.factor(preds)))),
posClass=allCls[1],
beta=1
)
{
if (!is.null(dim(preds))) stop("classificationMetrics:: expecting a vector as predictions.")
twoClsMetrics <- c('fpr','fnr','tpr','tnr','rec','sens','spec',
'prec','rpp','lift','F','ppv','fdr','npv','for','plr','nlr','dor')
knownMetrics <- c(twoClsMetrics,c('acc','err','totU',
'microF','macroF',"macroRec","macroPrec"))
if (is.null(metrics)) {
metrics <- knownMetrics
if (length(allCls) > 2) metrics <- setdiff(metrics,twoClsMetrics)
if (is.null(benMtrx)) metrics <- setdiff(metrics,'totU')
}
if (any(twoClsMetrics %in% metrics) && length(allCls) > 2)
stop("classificationMetrics:: some of the metrics are only available for two class problems.",call.=FALSE)
if (any(c('totU') %in% metrics) && is.null(benMtrx))
stop('classificationMetrics:: benMtrx parameter not specified.',call.=FALSE)
if (!all(metrics %in% knownMetrics))
stop("classificationMetrics:: don't know how to calculate -> ",call.=FALSE,
paste(metrics[which(!(metrics %in% knownMetrics))],collapse=','))
r <- rep(NA,length(knownMetrics))
names(r) <- knownMetrics
if (length(preds) != length(trues)) {
warning("classificationMetrics:: less predictions than test cases, filling with NAs.")
t <- trues
t[] <- NA
t[names(preds)] <- preds
preds <- t
}
preds <- factor(preds,levels=allCls)
trues <- factor(trues,levels=allCls)
N <- length(trues)
cm <- as.matrix(table(preds,trues))
a <- sum(diag(cm))/N
r[c('acc','microF','err')] <- c(a,a,1-a)
if (length(allCls) == 2) {
negClass <- setdiff(allCls,posClass)
r[c('tpr','rec','sens')] <- cm[posClass,posClass]/sum(cm[,posClass])
r[c('spec','tnr')] <- cm[negClass,negClass]/sum(cm[,negClass])
r['fpr'] <- cm[posClass,negClass]/sum(cm[,negClass])
r['fnr'] <- cm[negClass,posClass]/sum(cm[,posClass])
r[c('prec','ppv')] <- cm[posClass,posClass]/sum(cm[posClass,])
r['npv'] <- cm[negClass,negClass]/sum(cm[negClass,])
r['fdr'] <- cm[posClass,negClass]/sum(cm[posClass,])
r['for'] <- cm[negClass,posClass]/sum(cm[negClass,])
r['plr'] <- r['tpr']/r['fpr']
r['nlr'] <- r['fnr']/r['tnr']
r['dor'] <- r['plr']/r['nlr']
r['rpp'] <- sum(cm[posClass,])/N
r['lift'] <- r['rec']/sum(cm[posClass,])
r['F'] <- (1+beta^2)*r['prec']*r['rec']/(beta^2*r['prec']+r['rec'])
}
if (any(c("macroF","macroRec","macroPrec") %in% metrics)) {
F <- R <- P <- 0
for(cl in allCls) {
pr <- cm[cl,cl]/sum(cm[cl,])
rc <- cm[cl,cl]/sum(cm[,cl])
F <- F+(1+beta^2)*pr*rc/(beta^2*pr+rc)
P <- P + pr
R <- R + rc
}
r["macroF"] <- F/length(allCls)
r["macroRec"] <- R/length(allCls)
r["macroPrec"] <- P/length(allCls)
}
if (!is.null(benMtrx))
if (!all(dim(cm)==dim(benMtrx)))
stop("classificationMetrics:: dimensions of confusion and cost/benefit matrices do not match",call.=FALSE)
else r['totU'] <- sum(cm*benMtrx)
return(r[metrics])
} |
json_array_agg <- function(x) {
UseMethod("json_array_agg")
}
json_array_agg.json2 <- function(x) {
new_json2(sprintf("[%s]", paste0(x, collapse = ",")))
}
json_array_agg.integer <- function(x) {
agg_array(x)
}
json_array_agg.double <- function(x) {
agg_array(x)
}
json_array_agg.logical <- function(x) {
agg_array(x)
}
json_array_agg.character <- function(x) {
agg_array(x)
}
json_array_agg.factor <- function(x) {
agg_array(x)
}
json_array_agg.POSIXct <- function(x) {
agg_array(x)
}
json_array_agg.POSIXlt <- function(x) {
agg_array(x)
}
json_array_agg.Date <- function(x) {
agg_array(x)
}
json_array_agg.complex <- function(x) {
agg_array(x)
}
agg_array <- function(x) {
new_json2(jsonlite::toJSON(x))
}
json_array_length <- function(x, wrap_scalars = FALSE) {
path <- "$"
write_json_tbl(x)
array_info_df <- exec_sqlite_json(
glue_sql("
SELECT
JSON_ARRAY_LENGTH(data, {path}) AS result,
JSON_TYPE(data, {path}) AS type
FROM my_tbl", .con = con)
)
if (is_true(wrap_scalars)) {
array_lengths <- array_info_df$result + !array_info_df$type %in% c("array", "null")
} else {
if (!all(array_info_df$type %in% c("array", "null") | is.na(array_info_df$type))) {
stop_jsontools(
c(
x = "`x` has scalar elements.",
i = "use `wrap_scalars = TRUE` to consider scalars as length 1 array."
)
)
}
array_lengths <- array_info_df$result
}
as.integer(array_lengths)
}
json_array_types <- function(x) {
json_each(x)$type
}
is_json_array <- function(x, null = TRUE, na = TRUE) {
x <- as.character(x)
(grepl("^\\s*\\[", x) & grepl("]\\s*$", x) & !is.na(x)) |
(null & x == "null" & !is.na(x)) |
(na & is.na(x))
}
json_wrap_scalars <- function(x) {
write_json_tbl(x)
exec_sqlite_json(
"SELECT
CASE
WHEN JSON_VALID(my_tbl.data) THEN CASE
WHEN
JSON_TYPE(my_tbl.data) NOT IN ('array', 'object')
THEN
JSON_ARRAY(my_tbl.data)
ELSE
my_tbl.data
END
WHEN my_tbl.data IS NULL THEN 'null'
ELSE JSON_ARRAY(JSON_QUOTE(my_tbl.data))
END AS result
FROM my_tbl"
)$result %>% json2()
} |
rnvmix_ <- function(n, rmix, qmix, groupings = rep(1, d), loc = rep(0, d),
scale = diag(2), factor = NULL,
method = c("PRNG", "sobol", "ghalton"), skip = 0,
which = c("nvmix", "maha2"), ...)
{
stopifnot(n >= 1)
method <- match.arg(method)
if(!hasArg(qmix)) qmix <- NULL
if(!hasArg(rmix)) rmix <- NULL
if(is.null(factor)) {
factor <- chol(scale)
d <- nrow(factor)
k <- d
} else {
d <- nrow(factor <- as.matrix(factor))
k <- ncol(factor)
factor <- t(factor)
}
stopifnot(length(groupings) == d)
numgroups <- length(unique(groupings))
inversion <- FALSE
if(method != "PRNG") inversion <- TRUE
if(method == "PRNG" & is.null(rmix)) inversion <- TRUE
if(inversion & is.null(qmix))
stop("'qmix' needs to be provided for methods 'sobol' and 'ghalton'")
is.rmix.sample <- if(is.numeric(rmix)){
stopifnot(all(rmix > 0),
all.equal(dim(rmix <- cbind(rmix)), c(n, numgroups)))
TRUE
} else FALSE
if(!is.rmix.sample){
mix_list <- get_mix_(qmix = qmix, rmix = rmix, groupings = groupings,
callingfun = "rnvmix", ... )
mix_ <- mix_list[[1]]
special.mix <- mix_list[[2]]
use.q <- mix_list$use.q
inversion <- use.q
}
rtW <- if(inversion){
dim. <- if(which == "nvmix") k + 1 else 2
U <- switch(method,
"sobol" = {
qrng::sobol(n, d = dim., randomize = "digital.shift",
skip = skip)
},
"ghalton" = {
qrng::ghalton(n, d = dim., method = "generalized")
},
"PRNG" = {
matrix(runif(n* dim.), ncol = dim.)
})
sqrt(mix_(U[, 1]))
} else if(is.rmix.sample) sqrt(rmix) else sqrt(mix_(n))
if(!is.matrix(rtW)) rtW <- cbind(rtW)
if(which == "nvmix") {
Z <- if(!inversion) {
matrix(rnorm(n * k), ncol = k)
} else {
qnorm(U[, 2:(k+1)])
}
Y <- Z %*% factor
X <- if(numgroups == 1) as.vector(rtW) * Y else rtW[, groupings] * Y
sweep(X, 2, loc, "+")
} else {
Zsq <- if(!inversion) {
rgamma(n, shape = d/2, scale = 2)
} else {
qgamma(U[, 2], shape = d/2, scale = 2)
}
as.vector(rtW)^2 * Zsq
}
}
rnvmix <- function(n, rmix, qmix, loc = rep(0, d), scale = diag(2),
factor = NULL, method = c("PRNG", "sobol", "ghalton"),
skip = 0, ...)
{
d <- if(is.null(factor)) dim(scale)[1] else nrow(factor <- as.matrix(factor))
method <- match.arg(method)
rnvmix_(n, rmix = rmix, qmix = qmix, loc = loc, scale = scale,
factor = factor, method = method, skip = skip, which = "nvmix", ...)
}
rgnvmix <- function(n, qmix, groupings = 1:d, loc = rep(0, d),
scale = diag(2), factor = NULL,
method = c("PRNG", "sobol", "ghalton"), skip = 0, ...)
{
d <- if(is.null(factor)) dim(scale)[1] else nrow(factor <- as.matrix(factor))
method <- match.arg(method)
rnvmix_(n, qmix = qmix, groupings = groupings, loc = loc,
scale = scale, factor = factor, method = method, skip = skip,
which = "nvmix", ...)
} |
clognormlike <- function(parm, nXvar, nmuZUvar, nuZUvar, nvZVvar,
muHvar, uHvar, vHvar, Yvar, Xvar, S, N, FiMat) {
beta <- parm[1:(nXvar)]
omega <- parm[(nXvar + 1):(nXvar + nmuZUvar)]
delta <- parm[(nXvar + nmuZUvar + 1):(nXvar + nmuZUvar + nuZUvar)]
phi <- parm[(nXvar + nmuZUvar + nuZUvar + 1):(nXvar + nmuZUvar +
nuZUvar + nvZVvar)]
mu <- as.numeric(crossprod(matrix(omega), t(muHvar)))
Wu <- as.numeric(crossprod(matrix(delta), t(uHvar)))
Wv <- as.numeric(crossprod(matrix(phi), t(vHvar)))
epsilon <- Yvar - as.numeric(crossprod(matrix(beta), t(Xvar)))
ll <- numeric(N)
for (i in 1:N) {
ur <- exp(mu[i] + exp(Wu[i]/2) * qnorm(FiMat[i, ]))
ll[i] <- log(mean(1/exp(Wv[i]/2) * dnorm((epsilon[i] +
S * ur)/exp(Wv[i]/2))))
}
return(ll)
}
cstlognorm <- function(olsObj, epsiRes, S, nmuZUvar, nuZUvar, uHvar,
muHvar, nvZVvar, vHvar) {
m2 <- moment(epsiRes, order = 2)
m3 <- moment(epsiRes, order = 3)
varu <- tryCatch((nleqslv(x = 0.01, fn = function(x) -exp(9 *
x^2/2) + 3 * exp(5 * x^2/2) - 2 * exp(3 * x^2/2) - S *
m3, method = "Newton")$x)^2, error = function(e) e)
if (inherits(varu, "error"))
varu <- 0.01
varv <- if ((m2 - exp(varu) * (exp(varu) - 1)) < 0) {
abs(m2 - exp(varu) * (exp(varu) - 1))
} else {
(m2 - exp(varu) * (exp(varu) - 1))
}
dep_u <- 1/2 * log((log(1/2 + sqrt(4 * ((epsiRes^2 - varv)^2)^(1/2))/2))^2)
dep_v <- 1/2 * log((epsiRes^2 - exp(varu) * (exp(varu) -
1))^2)
reg_hetu <- if (nuZUvar == 1) {
lm(log(varu) ~ 1)
} else {
lm(dep_u ~ ., data = as.data.frame(uHvar[, 2:nuZUvar]))
}
if (any(is.na(reg_hetu$coefficients)))
stop("At least one of the OLS coefficients of 'uhet' is NA: ",
paste(colnames(uHvar)[is.na(reg_hetu$coefficients)],
collapse = ", "), ". This may be due to a singular matrix due to potential perfect multicollinearity",
call. = FALSE)
reg_hetv <- if (nvZVvar == 1) {
lm(log(varv) ~ 1)
} else {
lm(dep_v ~ ., data = as.data.frame(vHvar[, 2:nvZVvar]))
}
if (any(is.na(reg_hetv$coefficients)))
stop("at least one of the OLS coefficients of 'vhet' is NA: ",
paste(colnames(vHvar)[is.na(reg_hetv$coefficients)],
collapse = ", "), ". This may be due to a singular matrix due to potential perfect multicollinearity",
call. = FALSE)
reg_hetmu <- if (nmuZUvar == 1) {
lm(epsiRes ~ 1)
} else {
lm(epsiRes ~ ., data = as.data.frame(muHvar[, 2:nmuZUvar]))
}
if (any(is.na(reg_hetmu$coefficients)))
stop("at least one of the OLS coefficients of 'muhet' is NA: ",
paste(colnames(muHvar)[is.na(reg_hetmu$coefficients)],
collapse = ", "), ". This may be due to a singular matrix due to potential perfect multicollinearity",
call. = FALSE)
delta <- coefficients(reg_hetu)
names(delta) <- paste0("Zu_", colnames(uHvar))
phi <- coefficients(reg_hetv)
names(phi) <- paste0("Zv_", colnames(vHvar))
omega <- coefficients(reg_hetmu)
names(omega) <- paste0("Zmu_", colnames(muHvar))
if (names(olsObj)[1] == "(Intercept)") {
beta <- beta <- c(olsObj[1] + S * exp(varu/2), olsObj[-1])
} else {
beta <- olsObj
}
return(c(beta, omega, delta, phi))
}
cgradlognormlike <- function(parm, nXvar, nmuZUvar, nuZUvar, nvZVvar,
muHvar, uHvar, vHvar, Yvar, Xvar, S, N, FiMat) {
beta <- parm[1:(nXvar)]
omega <- parm[(nXvar + 1):(nXvar + nmuZUvar)]
delta <- parm[(nXvar + nmuZUvar + 1):(nXvar + nmuZUvar + nuZUvar)]
phi <- parm[(nXvar + nmuZUvar + nuZUvar + 1):(nXvar + nmuZUvar +
nuZUvar + nvZVvar)]
mu <- as.numeric(crossprod(matrix(omega), t(muHvar)))
Wu <- as.numeric(crossprod(matrix(delta), t(uHvar)))
Wv <- as.numeric(crossprod(matrix(phi), t(vHvar)))
epsilon <- Yvar - as.numeric(crossprod(matrix(beta), t(Xvar)))
qFimat <- qnorm(FiMat)
WuqFi <- sweep(qFimat, MARGIN = 1, STATS = exp(Wu/2), FUN = "*")
WumuqFi <- sweep(WuqFi, MARGIN = 1, STATS = mu, FUN = "+")
WumuqFiepsi <- sweep(S * exp(WumuqFi), MARGIN = 1, STATS = epsilon,
FUN = "+")
WuWvmuqFiepsi <- sweep(WumuqFiepsi, MARGIN = 1, STATS = exp(Wv/2),
FUN = "/")
dqFi <- dnorm(WuWvmuqFiepsi)
WvdqFi <- apply(sweep(dqFi, MARGIN = 1, STATS = exp(Wv/2),
FUN = "/"), 1, sum)
sigx1 <- sweep(dqFi * (WumuqFiepsi), MARGIN = 1, STATS = exp(3 *
Wv/2), FUN = "/")
sigx2 <- sweep(dqFi * exp(WumuqFi) * (WumuqFiepsi), MARGIN = 1,
STATS = exp(3 * Wv/2), FUN = "/")
sigx3 <- sweep(dqFi * exp(WumuqFi) * qFimat * (WumuqFiepsi),
MARGIN = 1, STATS = exp(Wu/2)/exp(3 * Wv/2), FUN = "*")
sigx4 <- sweep(0.5 * (dqFi * (WumuqFiepsi)^2), MARGIN = 1,
STATS = exp(Wv), FUN = "/")
sigx5 <- sweep((sigx4 - 0.5 * dqFi), MARGIN = 1, STATS = exp(Wv/2),
FUN = "/")
gx <- matrix(nrow = N, ncol = nXvar)
for (k in 1:nXvar) {
gx[, k] <- apply(sweep(sigx1, MARGIN = 1, STATS = Xvar[,
k], FUN = "*"), 1, sum)/WvdqFi
}
gmu <- matrix(nrow = N, ncol = nmuZUvar)
for (k in 1:nmuZUvar) {
gmu[, k] <- apply(sweep(sigx2, MARGIN = 1, STATS = -(S *
muHvar[, k]), FUN = "*"), 1, sum)/WvdqFi
}
gu <- matrix(nrow = N, ncol = nuZUvar)
for (k in 1:nuZUvar) {
gu[, k] <- apply(sweep(sigx3, MARGIN = 1, STATS = -(0.5 *
(S * uHvar[, k])), FUN = "*"), 1, sum)/WvdqFi
}
gv <- matrix(nrow = N, ncol = nvZVvar)
for (k in 1:nvZVvar) {
gv[, k] <- apply(sweep(sigx5, MARGIN = 1, STATS = vHvar[,
k], FUN = "*"), 1, sum)/WvdqFi
}
gradll <- cbind(gx, gmu, gu, gv)
return(gradll)
}
lognormAlgOpt <- function(start, olsParam, dataTable, S, nXvar,
muHvar, nmuZUvar, N, FiMat, uHvar, nuZUvar, vHvar, nvZVvar,
Yvar, Xvar, method, printInfo, itermax, stepmax, tol, gradtol,
hessianType, qac) {
startVal <- if (!is.null(start))
start else cstlognorm(olsObj = olsParam, epsiRes = dataTable[["olsResiduals"]],
S = S, uHvar = uHvar, nuZUvar = nuZUvar, vHvar = vHvar,
nvZVvar = nvZVvar, nmuZUvar = nmuZUvar, muHvar = muHvar)
startLoglik <- sum(clognormlike(startVal, nXvar = nXvar,
nuZUvar = nuZUvar, nvZVvar = nvZVvar, nmuZUvar = nmuZUvar,
muHvar = muHvar, uHvar = uHvar, vHvar = vHvar, Yvar = Yvar,
Xvar = Xvar, S = S, N = N, FiMat = FiMat))
if (method %in% c("bfgs", "bhhh", "nr", "nm")) {
maxRoutine <- switch(method, bfgs = function(...) maxBFGS(...),
bhhh = function(...) maxBHHH(...), nr = function(...) maxNR(...),
nm = function(...) maxNM(...))
method <- "maxLikAlgo"
}
mleObj <- switch(method, ucminf = ucminf(par = startVal,
fn = function(parm) -sum(clognormlike(parm, nXvar = nXvar,
nuZUvar = nuZUvar, nvZVvar = nvZVvar, nmuZUvar = nmuZUvar,
muHvar = muHvar, uHvar = uHvar, vHvar = vHvar, Yvar = Yvar,
Xvar = Xvar, S = S, N = N, FiMat = FiMat)), gr = function(parm) -colSums(cgradlognormlike(parm,
nXvar = nXvar, nuZUvar = nuZUvar, nvZVvar = nvZVvar,
nmuZUvar = nmuZUvar, muHvar = muHvar, uHvar = uHvar,
vHvar = vHvar, Yvar = Yvar, Xvar = Xvar, S = S, N = N,
FiMat = FiMat)), hessian = 0, control = list(trace = if (printInfo) 1 else 0,
maxeval = itermax, stepmax = stepmax, xtol = tol,
grtol = gradtol)), maxLikAlgo = maxRoutine(fn = clognormlike,
grad = cgradlognormlike, start = startVal, finalHessian = if (hessianType ==
2) "bhhh" else TRUE, control = list(printLevel = if (printInfo) 2 else 0,
iterlim = itermax, reltol = tol, tol = tol, qac = qac),
nXvar = nXvar, nuZUvar = nuZUvar, nvZVvar = nvZVvar, nmuZUvar = nmuZUvar,
muHvar = muHvar, uHvar = uHvar, vHvar = vHvar, Yvar = Yvar,
Xvar = Xvar, S = S, N = N, FiMat = FiMat), sr1 = trust.optim(x = startVal,
fn = function(parm) -sum(clognormlike(parm, nXvar = nXvar,
nuZUvar = nuZUvar, nvZVvar = nvZVvar, nmuZUvar = nmuZUvar,
muHvar = muHvar, uHvar = uHvar, vHvar = vHvar, Yvar = Yvar,
Xvar = Xvar, S = S, N = N, FiMat = FiMat)), gr = function(parm) -colSums(cgradlognormlike(parm,
nXvar = nXvar, nuZUvar = nuZUvar, nvZVvar = nvZVvar,
nmuZUvar = nmuZUvar, muHvar = muHvar, uHvar = uHvar,
vHvar = vHvar, Yvar = Yvar, Xvar = Xvar, S = S, N = N,
FiMat = FiMat)), method = "SR1", control = list(maxit = itermax,
cgtol = gradtol, stop.trust.radius = tol, prec = tol,
report.level = if (printInfo) 2 else 0, report.precision = 1L)),
sparse = trust.optim(x = startVal, fn = function(parm) -sum(clognormlike(parm,
nXvar = nXvar, nuZUvar = nuZUvar, nvZVvar = nvZVvar,
nmuZUvar = nmuZUvar, muHvar = muHvar, uHvar = uHvar,
vHvar = vHvar, Yvar = Yvar, Xvar = Xvar, S = S, N = N,
FiMat = FiMat)), gr = function(parm) -colSums(cgradlognormlike(parm,
nXvar = nXvar, nuZUvar = nuZUvar, nvZVvar = nvZVvar,
nmuZUvar = nmuZUvar, muHvar = muHvar, uHvar = uHvar,
vHvar = vHvar, Yvar = Yvar, Xvar = Xvar, S = S, N = N,
FiMat = FiMat)), hs = function(parm) as(jacobian(function(parm) -colSums(cgradlognormlike(parm,
nXvar = nXvar, nuZUvar = nuZUvar, nvZVvar = nvZVvar,
nmuZUvar = nmuZUvar, muHvar = muHvar, uHvar = uHvar,
vHvar = vHvar, Yvar = Yvar, Xvar = Xvar, S = S, N = N,
FiMat = FiMat)), parm), "dgCMatrix"), method = "Sparse",
control = list(maxit = itermax, cgtol = gradtol,
stop.trust.radius = tol, prec = tol, report.level = if (printInfo) 2 else 0,
report.precision = 1L, preconditioner = 1L)),
mla = mla(b = startVal, fn = function(parm) -sum(clognormlike(parm,
nXvar = nXvar, nuZUvar = nuZUvar, nvZVvar = nvZVvar,
nmuZUvar = nmuZUvar, muHvar = muHvar, uHvar = uHvar,
vHvar = vHvar, Yvar = Yvar, Xvar = Xvar, S = S, N = N,
FiMat = FiMat)), gr = function(parm) -colSums(cgradlognormlike(parm,
nXvar = nXvar, nuZUvar = nuZUvar, nvZVvar = nvZVvar,
nmuZUvar = nmuZUvar, muHvar = muHvar, uHvar = uHvar,
vHvar = vHvar, Yvar = Yvar, Xvar = Xvar, S = S, N = N,
FiMat = FiMat)), print.info = printInfo, maxiter = itermax,
epsa = gradtol, epsb = gradtol), nlminb = nlminb(start = startVal,
objective = function(parm) -sum(clognormlike(parm,
nXvar = nXvar, nuZUvar = nuZUvar, nvZVvar = nvZVvar,
nmuZUvar = nmuZUvar, muHvar = muHvar, uHvar = uHvar,
vHvar = vHvar, Yvar = Yvar, Xvar = Xvar, S = S,
N = N, FiMat = FiMat)), gradient = function(parm) -colSums(cgradlognormlike(parm,
nXvar = nXvar, nuZUvar = nuZUvar, nvZVvar = nvZVvar,
nmuZUvar = nmuZUvar, muHvar = muHvar, uHvar = uHvar,
vHvar = vHvar, Yvar = Yvar, Xvar = Xvar, S = S,
N = N, FiMat = FiMat)), control = list(iter.max = itermax,
trace = if (printInfo) 1 else 0, eval.max = itermax, rel.tol = tol,
x.tol = tol)))
if (method %in% c("ucminf", "nlminb")) {
mleObj$gradient <- colSums(cgradlognormlike(mleObj$par,
nXvar = nXvar, nuZUvar = nuZUvar, nvZVvar = nvZVvar,
nmuZUvar = nmuZUvar, muHvar = muHvar, uHvar = uHvar,
vHvar = vHvar, Yvar = Yvar, Xvar = Xvar, S = S, N = N,
FiMat = FiMat))
}
mlParam <- if (method %in% c("ucminf", "nlminb")) {
mleObj$par
} else {
if (method == "maxLikAlgo") {
mleObj$estimate
} else {
if (method %in% c("sr1", "sparse")) {
names(mleObj$solution) <- names(startVal)
mleObj$solution
} else {
if (method == "mla") {
mleObj$b
}
}
}
}
if (hessianType != 2) {
if (method %in% c("ucminf", "nlminb"))
mleObj$hessian <- jacobian(function(parm) colSums(cgradlognormlike(parm,
nXvar = nXvar, nuZUvar = nuZUvar, nvZVvar = nvZVvar,
nmuZUvar = nmuZUvar, muHvar = muHvar, uHvar = uHvar,
vHvar = vHvar, Yvar = Yvar, Xvar = Xvar, S = S,
N = N, FiMat = FiMat)), mleObj$par)
if (method == "sr1")
mleObj$hessian <- jacobian(function(parm) colSums(cgradlognormlike(parm,
nXvar = nXvar, nuZUvar = nuZUvar, nvZVvar = nvZVvar,
nmuZUvar = nmuZUvar, muHvar = muHvar, uHvar = uHvar,
vHvar = vHvar, Yvar = Yvar, Xvar = Xvar, S = S,
N = N, FiMat = FiMat)), mleObj$solution)
}
mleObj$logL_OBS <- clognormlike(parm = mlParam, nXvar = nXvar,
nuZUvar = nuZUvar, nvZVvar = nvZVvar, nmuZUvar = nmuZUvar,
muHvar = muHvar, uHvar = uHvar, vHvar = vHvar, Yvar = Yvar,
Xvar = Xvar, S = S, N = N, FiMat = FiMat)
mleObj$gradL_OBS <- cgradlognormlike(parm = mlParam, nXvar = nXvar,
nuZUvar = nuZUvar, nvZVvar = nvZVvar, nmuZUvar = nmuZUvar,
muHvar = muHvar, uHvar = uHvar, vHvar = vHvar, Yvar = Yvar,
Xvar = Xvar, S = S, N = N, FiMat = FiMat)
return(list(startVal = startVal, startLoglik = startLoglik,
mleObj = mleObj, mlParam = mlParam))
}
fnExpULogNorm <- function(u, sigma, mu) {
1/(u * sigma * sqrt(2 * pi)) * exp(-(log(u) - mu)^2/(2 *
sigma^2) - u)
}
fnCondEffLogNorm <- function(u, sigmaU, sigmaV, mu, epsilon,
S) {
1/(sigmaU * sigmaV) * dnorm((log(u) - mu)/sigmaU) * dnorm((epsilon +
S * u)/sigmaV)
}
clognormeff <- function(object, level) {
beta <- object$mlParam[1:(object$nXvar)]
omega <- object$mlParam[(object$nXvar + 1):(object$nXvar +
object$nmuZUvar)]
delta <- object$mlParam[(object$nXvar + object$nmuZUvar +
1):(object$nXvar + object$nmuZUvar + object$nuZUvar)]
phi <- object$mlParam[(object$nXvar + object$nmuZUvar + object$nuZUvar +
1):(object$nXvar + object$nmuZUvar + object$nuZUvar + object$nvZVvar)]
Xvar <- model.matrix(object$formula, data = object$dataTable,
rhs = 1)
muHvar <- model.matrix(object$formula, data = object$dataTable,
rhs = 2)
uHvar <- model.matrix(object$formula, data = object$dataTable,
rhs = 3)
vHvar <- model.matrix(object$formula, data = object$dataTable,
rhs = 4)
mu <- as.numeric(crossprod(matrix(omega), t(muHvar)))
Wu <- as.numeric(crossprod(matrix(delta), t(uHvar)))
Wv <- as.numeric(crossprod(matrix(phi), t(vHvar)))
epsilon <- model.response(model.frame(object$formula, data = object$dataTable)) -
as.numeric(crossprod(matrix(beta), t(Xvar)))
u <- numeric(object$Nobs)
for (i in 1:object$Nobs) {
ur <- exp(mu[i] + exp(Wu[i]/2) * qnorm(object$FiMat[i,
]))
density_epsilon <- (mean(1/exp(Wv[i]/2) * dnorm((epsilon[i] +
object$S * ur)/exp(Wv[i]/2))))
u[i] <- integrate(f = fnCondEffLogNorm, lower = 0, upper = Inf,
sigmaU = exp(Wu[i]/2), sigmaV = exp(Wv[i]/2), mu = mu[i],
epsilon = epsilon[i], S = object$S)$value/density_epsilon
}
if (object$logDepVar == TRUE) {
teJLMS <- exp(-u)
res <- bind_cols(u = u, teJLMS = teJLMS)
} else {
res <- bind_cols(u = u)
}
return(res)
}
cmarglognorm_Eu <- function(object) {
omega <- object$mlParam[(object$nXvar + 1):(object$nXvar +
object$nmuZUvar)]
delta <- object$mlParam[(object$nXvar + object$nmuZUvar +
1):(object$nXvar + object$nmuZUvar + object$nuZUvar)]
muHvar <- model.matrix(object$formula, data = object$dataTable,
rhs = 2)
uHvar <- model.matrix(object$formula, data = object$dataTable,
rhs = 3)
mu <- as.numeric(crossprod(matrix(omega), t(muHvar)))
Wu <- as.numeric(crossprod(matrix(delta), t(uHvar)))
mu_mat <- kronecker(matrix(omega[2:object$nmuZUvar], nrow = 1),
matrix(exp(mu + exp(Wu)/2), ncol = 1))
Wu_mat <- kronecker(matrix(delta[2:object$nuZUvar], nrow = 1),
matrix(exp(mu + exp(Wu)/2 + Wu)/2, ncol = 1))
idTRUE_mu <- substring(names(omega)[-1], 5) %in% substring(names(delta)[-1],
4)
idTRUE_Wu <- substring(names(delta)[-1], 4) %in% substring(names(omega)[-1],
5)
margEff <- cbind(mu_mat[, idTRUE_mu] + Wu_mat[, idTRUE_Wu],
mu_mat[, !idTRUE_mu], Wu_mat[, !idTRUE_Wu])
colnames(margEff) <- paste0("Eu_", c(colnames(muHvar)[-1][idTRUE_mu],
colnames(muHvar)[-1][!idTRUE_mu], colnames(uHvar)[-1][!idTRUE_Wu]))
return(margEff)
}
cmarglognorm_Vu <- function(object) {
omega <- object$mlParam[(object$nXvar + 1):(object$nXvar +
object$nmuZUvar)]
delta <- object$mlParam[(object$nXvar + object$nmuZUvar +
1):(object$nXvar + object$nmuZUvar + object$nuZUvar)]
muHvar <- model.matrix(object$formula, data = object$dataTable,
rhs = 2)
uHvar <- model.matrix(object$formula, data = object$dataTable,
rhs = 3)
mu <- as.numeric(crossprod(matrix(omega), t(muHvar)))
Wu <- as.numeric(crossprod(matrix(delta), t(uHvar)))
mu_mat <- kronecker(matrix(omega[2:object$nmuZUvar], nrow = 1),
matrix(2 * (exp(Wu) - 1) * exp(2 * mu + exp(Wu)), ncol = 1))
Wu_mat <- kronecker(matrix(delta[2:object$nuZUvar], nrow = 1),
matrix(exp(Wu) * exp(2 * mu + exp(Wu) + Wu), ncol = 1))
idTRUE_mu <- substring(names(omega)[-1], 5) %in% substring(names(delta)[-1],
4)
idTRUE_Wu <- substring(names(delta)[-1], 4) %in% substring(names(omega)[-1],
5)
margEff <- cbind(mu_mat[, idTRUE_mu] + Wu_mat[, idTRUE_Wu],
mu_mat[, !idTRUE_mu], Wu_mat[, !idTRUE_Wu])
colnames(margEff) <- paste0("Vu_", c(colnames(muHvar)[-1][idTRUE_mu],
colnames(muHvar)[-1][!idTRUE_mu], colnames(uHvar)[-1][!idTRUE_Wu]))
return(margEff)
} |
library(fs)
test_that("list_output works with nested and unnested files", {
skip_if_pandoc_not_installed()
skip_on_os("windows")
f <- new_factory(path = path_temp(), move_in = FALSE)
on.exit(dir_delete(f))
file_copy(
path("test_reports", "simple.Rmd"),
path(f, "report_sources")
)
nested_dir <- path(f, "report_sources", "nested")
dir_create(nested_dir)
file_copy(
path("test_reports", "parameterised.Rmd"),
nested_dir
)
file_delete(path(f, "report_sources", "example_report.Rmd"))
compile_reports(factory = f, timestamp = "test")
output_files <- list_outputs(f)
expected_files <- c(
file.path("simple", "test", "simple.Rmd"),
file.path("simple", "test", "simple.html"),
file.path("simple", "test", "simple.md"),
file.path("simple", "test", "simple_files", "figure-gfm", "pressure-1.png"),
file.path("nested", "parameterised", "test", "parameterised.Rmd"),
file.path("nested", "parameterised", "test", "parameterised.md")
)
expect_true(all(
mapply(
grepl,
pattern = sort(expected_files),
x = sort(output_files),
MoreArgs = list(fixed = TRUE)
)
))
output_files <- list_outputs(f, "simple")
expected_files <- c(
file.path("simple", "test", "simple.Rmd"),
file.path("simple", "test", "simple.html"),
file.path("simple", "test", "simple.md"),
file.path("simple", "test", "simple_files", "figure-gfm", "pressure-1.png")
)
expect_true(all(
mapply(
grepl,
pattern = sort(expected_files),
x = sort(output_files),
MoreArgs = list(fixed = TRUE)
)
))
})
test_that("list_output works, one file compiled", {
skip_if_pandoc_not_installed()
f <- new_factory(path = path_temp(), move_in = FALSE)
on.exit(dir_delete(f))
nested_dir <- path(f, "report_sources", "nested")
dir_create(nested_dir)
file_copy(
path("test_reports", "parameterised.Rmd"),
nested_dir
)
compile_reports(factory = f, "parameterised", timestamp = "test")
output_files <- list_outputs(f)
expected_files <- c(
file.path("nested", "parameterised", "test", "parameterised.Rmd"),
file.path("nested", "parameterised", "test", "parameterised.md")
)
expect_true(all(
mapply(
grepl,
pattern = sort(expected_files),
x = sort(output_files),
MoreArgs = list(fixed = TRUE)
)
))
})
test_that("list_output works, with subfolders", {
skip_if_pandoc_not_installed()
f <- new_factory(path = path_temp(), move_in = FALSE)
on.exit(dir_delete(f))
nested_dir <- path(f, "report_sources", "nested")
dir_create(nested_dir)
file_copy(
path("test_reports", "parameterised.Rmd"),
nested_dir
)
compile_reports(factory = f, "parameterised", timestamp = "test", subfolder = "bob")
output_files <- list_outputs(f)
expected_files <- c(
file.path("nested", "parameterised", "bob", "test", "parameterised.Rmd"),
file.path("nested", "parameterised", "bob", "test", "parameterised.md")
)
expect_true(all(
mapply(
grepl,
pattern = sort(expected_files),
x = sort(output_files),
MoreArgs = list(fixed = TRUE)
)
))
}) |
agent_returnKVoxels <-
function(xmlResultData)
{
return(as.numeric(xmlAttrs(xmlResultData[[1]][[1]])[[4]]))
} |
RoxyTopic <- R6::R6Class("RoxyTopic", public = list(
sections = list(),
filename = "",
format = function(...) {
order <- c("backref", "docType", "encoding", "name", "alias", "title",
"format", "source", "usage", "param", "value", "description",
"details", "minidesc", "field", "slot", "rcmethods", "note",
"section", "examples", "references", "seealso", "author",
"concept", "keyword", "rawRd")
sections <- move_names_to_front(self$sections, order)
formatted <- lapply(sections, format, ...)
paste0(
made_by("%"),
paste0(unlist(formatted), collapse = "\n")
)
},
is_valid = function() {
all(self$has_section(c("title", "name")))
},
has_section = function(type) {
type %in% names(self$sections)
},
get_section = function(type) {
self$sections[[type]]
},
get_value = function(type) {
self$get_section(type)$value
},
get_rd = function(type) {
format(self$get_section(type))
},
get_name = function() {
self$get_value("name")
},
inherits_from = function(type) {
if (!self$has_section("inherit")) {
return(character())
}
inherit <- self$get_value("inherit")
inherits_field <- map_lgl(inherit$fields, function(x) type %in% x)
sources <- inherit$source[inherits_field]
if ("NULL" %in% sources)
return(character())
sources
},
inherits_section_from = function() {
if (!self$has_section("inherit_section")) {
return(character())
}
self$get_value("inherit_section")$source
},
add = function(x, overwrite = FALSE) {
if (inherits(x, "RoxyTopic")) {
self$add(x$sections, overwrite = overwrite)
} else if (inherits(x, "rd_section")) {
self$add_section(x, overwrite = overwrite)
} else if (is.list(x)) {
for (section in x) {
self$add_section(section, overwrite = overwrite)
}
} else if (is.null(x)) {
} else {
stop("Don't know how to add object of type ", class(x)[1])
}
invisible()
},
add_section = function(section, overwrite = FALSE) {
if (is.null(section)) return()
type <- section$type
if (self$has_section(type) && !overwrite) {
section <- merge(self$get_section(type), section)
}
self$sections[[type]] <- section
invisible()
}
))
move_names_to_front <- function(x, to_front) {
nms <- names(x)
x[union(intersect(to_front, nms), nms)]
} |
sfcr_shock <- function(variables, start, end) {
if (!rlang::is_list(variables)) rlang::abort("Please define the variables in a `sfcr_shock()`.")
if (all(vapply(variables, rlang::is_formula, logical(1)) != 1)) rlang::abort("Please use R equations syntax to define the values of the variables.")
structure(
list(
variables = variables,
start = start,
end = end
),
class = c("sfcr_shock", "list")
)
} |
processImage <- function(file_path = "", language = "English",
profile = c("documentConversion",
"documentArchiving",
"textExtraction",
"barcodeRecognition"),
textType = c("normal", "typewriter", "matrix",
"index", "ocrA", "ocrB", "e13b",
"cmc7", "gothic"),
imageSource = c("auto", "photo", "scanner"),
correctOrientation = c("true", "false"),
correctSkew = c("true", "false"),
readBarcodes = c("false", "true"),
exportFormat = c("txt", "txtUnstructured",
"rtf", "docx", "xlsx", "pptx",
"pdfSearchable", "pdfTextAndImages",
"pdfa", "xml",
"xmlForCorrectedImage", "alto"),
description = "", pdfPassword = "", ...) {
if (!file.exists(file_path)) {
stop("File Doesn't Exist. Please check the path.")
}
profile <- match.arg(profile, choices = profile)
textType <- match.arg(textType, choices = textType)
correctSkew <- match.arg(correctSkew, choices = correctSkew)
imageSource <- match.arg(imageSource, choices = imageSource)
correctOrientation <- match.arg(correctOrientation,
choices = correctOrientation)
readBarcodes <- match.arg(readBarcodes, choices = readBarcodes)
exportFormat <- match.arg(exportFormat, choices = exportFormat)
querylist <- list(language = language,
profile = profile,
textType = textType,
imageSource = imageSource,
correctOrientation = correctOrientation,
correctSkew = correctSkew,
readBarcodes = readBarcodes,
exportFormat = exportFormat,
description = description,
pdfPassword = pdfPassword)
body <- upload_file(file_path)
process_details <- abbyy_POST("processImage",
query = querylist,
body = body, ...)
resdf <- ldply(process_details, rbind, .id = NULL)
row.names(resdf) <- NULL
resdf[] <- lapply(resdf, as.character)
cat("Status of the task: ", resdf$status, "\n")
cat("Task ID: ", resdf$id, "\n")
resdf
} |
dbDataType_DBIObject <- function(dbObj, obj, ...) {
dbiDataType(obj)
}
setMethod("dbDataType", signature("DBIObject"), dbDataType_DBIObject) |
data_dir <- file.path("..", "testdata")
tempfile_nc <- function() {
tempfile_helper("yseasmax_")
}
file_out <- tempfile_nc()
yseasmax("SIS",
file.path(data_dir, "ex_yseas.nc"),
file_out)
file <- nc_open(file_out)
test_that("data is correct", {
actual <- ncvar_get(file, "SIS")
expected_data <- c(
seq(556, 568, by = 3),
seq(557, 569, by = 3),
seq(558, 570, by = 3),
seq(601, 613, by = 3),
seq(602, 614, by = 3),
seq(603, 615, by = 3),
631,
seq(619, 628, by = 3),
seq(617, 629, by = 3),
seq(618, 630, by = 3),
seq(511, 523, by = 3),
seq(512, 524, by = 3),
seq(513, 525, by = 3)
)
expected <- aperm(array(expected_data, c(3, 5, 4)), c(1, 2, 3))
expect_equivalent(actual, expected)
})
test_that("attributes are correct", {
actual <- ncatt_get(file, "lon", "units")$value
expect_equal(actual, "degrees_east")
actual <- ncatt_get(file, "lon", "long_name")$value
expect_equal(actual, "longitude")
actual <- ncatt_get(file, "lon", "standard_name")$value
expect_equal(actual, "longitude")
actual <- ncatt_get(file, "lon", "axis")$value
expect_equal(actual, "X")
actual <- ncatt_get(file, "lat", "units")$value
expect_equal(actual, "degrees_north")
actual <- ncatt_get(file, "lat", "long_name")$value
expect_equal(actual, "latitude")
actual <- ncatt_get(file, "lat", "standard_name")$value
expect_equal(actual, "latitude")
actual <- ncatt_get(file, "lat", "axis")$value
expect_equal(actual, "Y")
actual <- ncatt_get(file, "time", "units")$value
expect_equal(actual, "hours since 1983-01-01 00:00:00")
actual <- ncatt_get(file, "time", "long_name")$value
expect_equal(actual, "time")
actual <- ncatt_get(file, "time", "standard_name")$value
expect_equal(actual, "time")
actual <- ncatt_get(file, "time", "calendar")$value
expect_equal(actual, "standard")
actual <- ncatt_get(file, "SIS", "standard_name")$value
expect_equal(actual, "SIS_standard")
actual <- ncatt_get(file, "SIS", "long_name")$value
expect_equal(actual, "Surface Incoming Shortwave Radiation")
actual <- ncatt_get(file, "SIS", "units")$value
expect_equal(actual, "W m-2")
actual <- ncatt_get(file, "SIS", "_FillValue")$value
expect_equal(actual, -999)
actual <- ncatt_get(file, "SIS", "cmsaf_info")$value
expect_equal(actual, "cmsafops::yseasmax for variable SIS")
global_attr <- ncatt_get(file, 0)
expect_equal(length(global_attr), 1)
actual <- names(global_attr[1])
expect_equal(actual, "Info")
actual <- global_attr[[1]]
expect_equal(actual, "Created with the CM SAF R Toolbox.")
})
test_that("coordinates are correct", {
actual <- ncvar_get(file, "lon")
expect_identical(actual, array(seq(5, 6, by = 0.5)))
actual <- ncvar_get(file, "lat")
expect_identical(actual, array(seq(45, 47, by = 0.5)))
actual <- ncvar_get(file, "time")
expect_equal(actual, array(c(157056, 150456, 152664, 154872)))
})
nc_close(file)
test_that("no error is thrown if var does not exist", {
file_out <- tempfile_nc()
expect_warning(
yseasmax("someVariable",
file.path(data_dir, "ex_yseas.nc"),
file_out),
"Variable 'someVariable' not found. Variable 'SIS' will be used instead.")
})
test_that("no error is thrown if var is empty", {
file_out <- tempfile_nc()
expect_warning(
yseasmax("",
file.path(data_dir, "ex_yseas.nc"),
file_out),
"Variable '' not found. Variable 'SIS' will be used instead.")
})
test_that("error is thrown if var is NULL", {
file_out <- tempfile_nc()
expect_error(
yseasmax(NULL,
file.path(data_dir, "ex_yseas.nc"),
file_out),
"variable must not be NULL"
)
})
test_that("error is thrown if input file does not exist", {
file_out <- tempfile_nc()
expect_error(
yseasmax("SIS",
file.path(data_dir, "ex_doesNotExist.nc"),
file_out),
"Input file does not exist")
})
test_that("error is thrown if input file does not exist", {
file_out <- tempfile_nc()
expect_error(
yseasmax("SIS",
NULL,
file_out),
"Input filepath must be of length one and not NULL"
)
})
test_that("error is thrown if output file already exists", {
file_out <- tempfile_nc()
cat("test\n", file = file_out)
expect_error(
yseasmax("SIS",
file.path(data_dir, "ex_yseas.nc"),
file_out),
paste0("File '",
file_out,
"' already exists. Specify 'overwrite = TRUE' if you want to overwrite it."),
fixed = TRUE
)
expect_equal(readLines(con = file_out), "test")
})
test_that("no error is thrown if overwrite = TRUE", {
file_out <- tempfile_nc()
cat("test\n", file = file_out)
expect_error(
yseasmax("SIS",
file.path(data_dir, "ex_yseas.nc"),
file_out,
overwrite = TRUE),
NA)
})
test_that("no error is thrown if output file already exists", {
expect_error(
yseasmax("SIS",
file.path(data_dir, "ex_yseas.nc"),
NULL),
"Output filepath must be of length one and not NULL"
)
}) |
print.ABCSMC <- function(x, ...) {
if(class(x) != "ABCSMC"){
stop("'x' is not a ABCSMC object")
}
cat("An object of class: 'ABCSMC'\n")
cat(paste0("Consists of ", nrow(x$tols), " generations with ", nrow(x$priors), " parameters.\n"))
cat("\nData:\n")
print(x$data, row.names = FALSE)
temp <- x$tols %>%
as.data.frame() %>%
mutate(Generation = 1:n()) %>%
dplyr::select(Generation, everything()) %>%
mutate(ESS = do.call("c", x$ESS))
cat("\nTolerances:\n")
print(temp, row.names = FALSE)
temp <- x$priors %>%
mutate(p1 = as.character(signif(p1, 2))) %>%
mutate(p2 = as.character(signif(p2, 2))) %>%
mutate(temp = ifelse(dist == "unif", paste0("U(lower = ", p1, ", upper = ", p2, ")"), NA)) %>%
mutate(temp = ifelse(dist == "gamma", paste0("G(shape = ", p1, ", rate = ", p2, ")"), temp)) %>%
mutate(temp = ifelse(dist == "norm", paste0("N(mean = ", p1, ", sd = ", p2, ")"), temp)) %>%
mutate(temp = paste0(parnames, " ~ ", temp)) %>%
dplyr::select(temp)
colnames(temp) <- ""
cat("\nPriors:\n")
print(temp, row.names = FALSE, col.names = FALSE, quote = FALSE)
} |
library(tinytest)
library(ggiraph)
library(ggplot2)
library(xml2)
source("setup.R")
{
eval(test_scale, envir = list(name = "scale_colour_continuous_interactive"))
eval(test_scale, envir = list(name = "scale_color_continuous_interactive"))
eval(test_scale, envir = list(name = "scale_fill_continuous_interactive"))
eval(test_scale, envir = list(name = "scale_colour_grey_interactive"))
eval(test_scale, envir = list(name = "scale_color_grey_interactive"))
eval(test_scale, envir = list(name = "scale_fill_grey_interactive"))
eval(test_scale, envir = list(name = "scale_colour_hue_interactive"))
eval(test_scale, envir = list(name = "scale_color_hue_interactive"))
eval(test_scale, envir = list(name = "scale_fill_hue_interactive"))
eval(test_scale, envir = list(name = "scale_colour_binned_interactive"))
eval(test_scale, envir = list(name = "scale_color_binned_interactive"))
eval(test_scale, envir = list(name = "scale_fill_binned_interactive"))
eval(test_scale, envir = list(name = "scale_colour_discrete_interactive"))
eval(test_scale, envir = list(name = "scale_color_discrete_interactive"))
eval(test_scale, envir = list(name = "scale_fill_discrete_interactive"))
eval(test_scale, envir = list(name = "scale_colour_date_interactive"))
eval(test_scale, envir = list(name = "scale_color_date_interactive"))
eval(test_scale, envir = list(name = "scale_fill_date_interactive"))
eval(test_scale, envir = list(name = "scale_colour_datetime_interactive"))
eval(test_scale, envir = list(name = "scale_color_datetime_interactive"))
eval(test_scale, envir = list(name = "scale_fill_datetime_interactive"))
} |
".Last.projection"<-
local({
val <- list(projection = "", parameters = NULL, orientation = NULL)
function(new) if(!missing(new)) val <<- new else val
})
"mapproject"<-
function(x, y, projection = "", parameters = NULL, orientation = NULL)
{
r <- NULL
if (is.list(x)) {
r <- x$range[1:2]
y <- x$y
x <- x$x
}
if (length(x) != length(y))
stop("lengths of x and y must match")
if (is.null(r))
r <- range(x[!is.na(x)])
new.projection <- (projection != "")
if (new.projection) {
if (is.null(orientation)) orientation = c(90, 0, mean(r))
else if (length(orientation) != 3)
stop("orientation argument must have 3 elements")
}
else {
if (nchar(.Last.projection()$projection) == 0) {
return(list(x = x, y = y))
}
p <- .Last.projection()
projection <- p$projection
if (is.null(parameters)) parameters <- p$parameters
else if (length(parameters) != length(p$parameters))
stop(paste("expecting", length(p$parameters),
"parameters for", projection, "projection"))
if (is.null(orientation)) orientation <- p$orientation
else if (length(orientation) != 3)
stop("orientation argument must have 3 elements")
}
error <- .C(C_setproj,
as.character(projection),
as.double(parameters),
as.integer(length(parameters)),
as.double(orientation),
error = character(1))$error
if (error != "")
stop(error)
.Last.projection(list(projection = projection,
parameters = parameters,
orientation = orientation))
.C(C_doproj,
x = as.double(x),
y = as.double(y),
as.integer(length(x)),
range = double(4),
error = integer(1),
NAOK = TRUE)[c("x", "y", "range", "error")]
}
map.grid <-
function(lim, nx = 9, ny = 9, labels = TRUE, pretty = TRUE, cex = 1,
col = 4, lty = 2, font = 2, ...) {
pretty.range <-
function(lim, ...) {
x <- pretty(lim, ...)
if (abs(x[1]-lim[1]) > abs(x[2]-lim[1])) x <- x[-1]
n <- length(x)
if (abs(x[n]-lim[2]) > abs(x[n-1]-lim[2])) x <- x[-n]
x[1] <- lim[1]; x[length(x)] <- lim[2]
x
}
auto.format <-
function(x) {
for (digits in 0:6) {
s <- formatC(x, digits = digits, format = "f")
if (all(duplicated(s) == duplicated(x))) break
}
s
}
if (missing(lim)) lim = .map.range()
if (is.list(lim)) {
lim <- lim$range
}
if (lim[2]-lim[1] > 360) {
lim[2] <- lim[1] + 360
}
if (pretty) {
x <- pretty.range(lim[1:2], n = nx)
y <- pretty.range(lim[3:4], n = ny)
} else {
x <- seq(lim[1], lim[2], length.out = nx)
y <- seq(lim[3], lim[4], length.out = ny)
}
p <- mapproject(expand.grid(x = c(seq(lim[1], lim[2], length.out = 100), NA),
y = y))
p <- maps::map.wrap(p)
lines(p,
col = col, lty = lty, ...)
lines(mapproject(expand.grid(y = c(seq(lim[3], lim[4],
length.out = 100), NA), x = x)), col = col, lty = lty, ...)
if (labels) {
tx <- x[2]
xinc <- median(diff(x))
ty <- y[length(y)-2]
yinc <- median(diff(y))
text(mapproject(expand.grid(x = x + xinc*0.05,
y = ty + yinc*0.5)),
labels = auto.format(x), cex = cex, adj = c(0, 0), col = col,
font=font, ...)
text(mapproject(expand.grid(x = tx + xinc*0.5,
y = y + yinc*0.05)),
labels = auto.format(y), cex = cex, adj = c(0, 0), col = col,
font=font, ...)
}
} |
library("testthat")
context("forestplotRegrObj")
set.seed(1000)
n <- 1000
cov <- data.frame(
ftime = rexp(n),
fstatus = sample(0:2, n, replace = TRUE),
x1 = runif(n),
x2 = runif(n),
x3 = runif(n)
)
library(rms)
dd <<- datadist(cov)
options(datadist = "dd")
test_that("Basic test for coverage for forestplotRegrObj", {
fit1 <- cph(Surv(ftime, fstatus == 1) ~ x1 + x2 + x3, data = cov)
fit2 <- cph(Surv(ftime, fstatus == 2) ~ x1 + x2 + x3, data = cov)
forestplotRegrObj(regr.obj = fit1, new_page = TRUE)
library(forestplot)
forestplotRegrObj(
regr.obj = list(fit1, fit2),
legend = c("Status = 1", "Status = 2"),
legend_args = fpLegend(title = "Type of regression"),
new_page = TRUE
)
modifyNameFunction <- function(x) {
if (x == "x1") {
return("Covariate A")
}
if (x == "x2") {
return(expression(paste("My ", beta[2])))
}
return(x)
}
forestplotRegrObj(
regr.obj = list(fit1, fit2),
col = fpColors(box = c("darkblue", "darkred")),
variablesOfInterest.regexp = "(x2|x3)",
legend = c("First model", "Second model"),
legend_args = fpLegend(title = "Models"),
rowname.fn = modifyNameFunction, new_page = TRUE
)
forestplotRegrObj(
regr.obj = list(fit1, fit2),
col = fpColors(box = c("darkblue", "darkred")),
variablesOfInterest.regexp = "(x2|x3)",
order.regexps = c("x3", "x2"),
legend = c("First model", "Second model"),
legend_args = fpLegend(title = "Models"),
rowname.fn = modifyNameFunction, new_page = TRUE
)
}) |
kprod <- function(..., FUN = `*`) {
Reduce(
function(X, Y) kronecker(X, Y, FUN),
list(...)
)
} |
summary.bess=function(object, ...){
beta = object$beta
if(object$method == "sequential"){
K.opt.aic = which.min(object$AIC)
K.opt.bic = which.min(object$BIC)
K.opt.ebic = which.min(object$EBIC)
predictors.aic = beta[,K.opt.aic]
predictors.bic = beta[,K.opt.bic]
predictors.ebic = beta[,K.opt.ebic]
if(sum(predictors.aic!=0)>1) predictor.a = "predictors" else predictor.a = "predictor"
if(sum(predictors.bic!=0)>1) predictor.b = "predictors" else predictor.b = "predictor"
if(sum(predictors.ebic!=0)>1) predictor.e = "predictors" else predictor.e = "predictor"
cat("-------------------------------------------------------------------------------\n")
cat(" Primal-dual active algorithm with tuning parameter determined by sequential method", "\n\n")
cat(" Best model determined by AIC includes" , sum(predictors.aic!=0), predictor.a, "with AIC =",
object$AIC[K.opt.aic], "\n\n")
cat(" Best model determined by BIC includes" , sum(predictors.bic!=0), predictor.b, "with BIC =",
object$BIC[K.opt.bic], "\n\n")
cat(" Best model determined by EBIC includes" , sum(predictors.ebic!=0), predictor.e, "with EBIC =",
object$EBIC[K.opt.ebic], "\n")
cat("-------------------------------------------------------------------------------\n")
} else {
cat("------------------------------------------------------------------------------\n")
cat(" Primal-dual active algorithm with tuning parameter determined by gsection method", "\n\n")
if(sum(beta[,ncol(beta)]!=0)>0) predictor = "predictors" else predictor = "predictor"
cat(" Best model includes", sum(beta[,ncol(beta)]!=0), predictor, "with", "\n\n")
if(logLik(object)[length(logLik(object))]>=0)
cat(" log-likelihood: ", logLik(object)[length(logLik(object))],"\n") else cat(" log-likelihood: ", logLik(object)[length(logLik(object))],"\n")
if(deviance(object)[length(deviance(object))]>=0)
cat(" deviance: ", deviance(object)[length(deviance(object))],"\n") else cat(" deviance: ", deviance(object)[length(deviance(object))],"\n")
if(object$AIC[length(object$AIC)]>=0)
cat(" AIC: ", object$AIC[length(object$AIC)],"\n") else cat(" AIC: ", object$AIC[length(object$AIC)],"\n")
if(object$BIC[length(object$BIC)]>=0)
cat(" BIC: ", object$BIC[length(object$BIC)],"\n") else cat(" BIC: ", object$BIC[length(object$BIC)],"\n")
if(object$EBIC[length(object$EBIC)]>=0)
cat(" EBIC: ", object$EBIC[length(object$EBIC)],"\n") else cat(" EBIC: ", object$EBIC[length(object$EBIC)],"\n")
cat("------------------------------------------------------------------------------\n")
}
} |
msc.length <- function(file, samples, groups) {
if (!file.exists(file)) stop("ERROR: File doesn't exist")
if (length(file)<1) stop("ERROR: Your input parameter is empty")
if (length(file)>1) stop("ERROR: Your input parameter is too long")
freq <- list()
sequences <- ape::read.dna(file, 'fasta')
samples <- sort(unique(gsub('_con.*','',attr(sequences, 'names'))))
freq$length <- as.numeric(gsub('.*_len|_cir.*','',attr(sequences, 'names')))
df <- data.frame(freq$length)
freq$plot <- ggplot(df, aes(x=df$freq.length)) +
geom_histogram(bins=50, alpha=0.8, show.legend = F, col="black", fill='gray') +
xlab("Minicircle sequence length") + theme_minimal() +
theme(axis.title = element_text(face = "bold")) + ylab("Frequency of minicircle length")
return(freq)
} |
smoother.trackeRdata <- function(object,
session = NULL,
control = list(...),
...) {
operations <- attr(object, "operations")
if (!is.null(operations$smooth)) {
warning("'object' is already the result of smoother.")
return(object)
}
if (is.null(session)) {
session <- seq_len(length(object))
}
object <- object[session]
control$nsessions <- length(session)
control <- do.call("smoother_control.trackeRdata", control)
what <- match(unlist(control$what), names(object[[1]]))
if (any(is.na(what))) {
stop("At least one of 'what' is not available.")
}
smooth_fun <- function(j) {
zoo::rollapply(object[[j]], width = control$width, match.fun(control$fun))
}
foreach_object <- eval(as.call(c(list(quote(foreach::foreach),
j = seq.int(nsessions(object))))))
if (control$parallel) {
setup_parallel()
objectNew <- foreach::`%dopar%`(foreach_object, smooth_fun(j))
}
else {
objectNew <- foreach::`%do%`(foreach_object, smooth_fun(j))
}
for (k in seq_len(length(object))) {
inds <- index(objectNew[[k]])
objectNew[[k]][, -what] <- object[[k]][inds, -what]
}
class(objectNew) <- "trackeRdata"
operations$smooth <- control
attr(objectNew, "operations") <- operations
attr(objectNew, "units") <- getUnits(object)
attr(objectNew, "sport") <- get_sport(object)
attr(objectNew, "file") <- attr(object, "file")
return(objectNew)
}
smoother_control.trackeRdata <- function(fun = "mean",
width = 10,
parallel = FALSE,
what = c("speed", "heart_rate"),
nsessions = NA, ...) {
if (!is.character(fun)) {
stop("'fun' should be a character string")
} else {
match.fun(fun)
}
if (is.vector(what)) {
what <- list(what)
}
list(fun = fun, width = width, parallel = parallel, what = what, nsessions = nsessions)
} |
"survey_results" |
UFA_profile_visualizer <- function(PARAM_SA) {
print("Initiated producing mass spectra!")
input_path_hrms <- PARAM_SA[which(PARAM_SA[, 1] == 'PARAM0010'), 2]
if (tolower(PARAM_SA[which(PARAM_SA[, 1] == 'PARAM0011'), 2]) == "all") {
file_name_hrms <- dir(path = input_path_hrms)
file_name_hrms <- file_name_hrms[grep(paste0(".", tolower(PARAM_SA[which(PARAM_SA[, 1] == 'PARAM0012'), 2]), "$"), file_name_hrms, ignore.case = TRUE)]
} else {
samples_string <- PARAM_SA[which(PARAM_SA[, 1] == 'PARAM0011'), 2]
file_name_hrms <- strsplit(samples_string, ";")[[1]]
}
input_path_pl <- PARAM_SA[which(PARAM_SA[, 1] == 'PARAM0013'), 2]
file_names_peaklist1 <- dir(path = input_path_pl, pattern = ".Rdata")
file_names_peaklist2 <- dir(path = input_path_pl, pattern = "peaklist_")
file_names_peaklist <- file_names_peaklist1[file_names_peaklist1%in%file_names_peaklist2]
file_names_peaklist_hrms1 <- gsub(".Rdata", "", file_names_peaklist)
file_names_peaklist_hrms2 <- gsub("peaklist_", "", file_names_peaklist_hrms1)
file_names_peaklist_hrms <- file_name_hrms%in%file_names_peaklist_hrms2
L_PL <- length(which(file_names_peaklist_hrms == TRUE))
if (length(file_name_hrms) != L_PL) {
stop("Error!!! peaklist files are not available for the entire selected HRMS files!")
}
output_path <- PARAM_SA[which(PARAM_SA[, 1] == 'PARAM0014'), 2]
if (!dir.exists(output_path)) {
dir.create(output_path)
print("Created output directory!")
}
output_path_spectra <- paste0(output_path, "/UFA_spectra/")
if (!dir.exists(output_path_spectra)) {
dir.create(output_path_spectra)
print("Created UFA_spectra directory!")
}
molecular_formula <- eval(parse(text = paste0("c(", PARAM_SA[which(PARAM_SA[, 1] == 'SA0001'), 2], ")")))
RT_target <- eval(parse(text = paste0("c(", PARAM_SA[which(PARAM_SA[, 1] == 'SA0002'), 2], ")")))
delta_rt <- as.numeric(PARAM_SA[which(PARAM_SA[, 1] == 'SA0003'), 2])
IonPathways <- eval(parse(text = paste0("c(", PARAM_SA[which(PARAM_SA[, 1] == 'SA0004'), 2], ")")))
peak_spacing <- as.numeric(PARAM_SA[which(PARAM_SA[, 1] == 'SA0005'), 2])
intensity_cutoff_str <- PARAM_SA[which(PARAM_SA[, 1] == 'SA0006'), 2]
UFA_IP_memeory_variables <- eval(parse(text = paste0("c(", PARAM_SA[which(PARAM_SA[, 1] == "SA0007"), 2], ")")))
mass_accuracy <- as.numeric(PARAM_SA[which(PARAM_SA[, 1] == 'SA0008'), 2])
number_processing_threads <- as.numeric(PARAM_SA[which(PARAM_SA[, 1] == 'SA0009'), 2])
exportSpectra <- if (tolower(PARAM_SA[which(PARAM_SA[, 1] == 'SA0010'), 2]) == "yes") {TRUE} else {FALSE}
exportedAnnotatedSpectraTable = if (tolower(PARAM_SA[which(PARAM_SA[, 1] == 'SA0011'), 2]) == "yes") {TRUE} else {FALSE}
if (exportSpectra == TRUE | exportedAnnotatedSpectraTable == TRUE) {
EL <- element_sorter()
Elements <- EL[[1]]
Elements_mass_abundance <- EL[[2]]
L_Elements <- length(Elements)
x_el_c <- which(Elements == "C")
x_el_b <- which(Elements == "B")
x_el_br <- which(Elements == "Br")
x_el_cl <- which(Elements == "Cl")
x_el_k <- which(Elements == "K")
x_el_s <- which(Elements == "S")
x_el_se <- which(Elements == "Se")
x_el_si <- which(Elements == "Si")
IonPW_DC <- ionization_pathway_deconvoluter(IonPathways, Elements)
L_PW <- length(IonPathways)
L_MolF <- length(molecular_formula)
RT_target_ion <- c()
MoleFormVecMat <- do.call(rbind, lapply(1:L_MolF, function (i_molf) {
FormulaVector <- formula_vector_generator(molecular_formula[i_molf], Elements, L_Elements)
rt1 <- RT_target[i_molf]
molf_deconvoluter_ipw <- do.call(rbind, lapply(1:L_PW, function (pathway) {
molv_ipw <- c()
IonPW <- IonPW_DC[[pathway]]
Ion_coeff <- IonPW[[1]]
Ion_adduct <- IonPW[[2]]
MoleFormVec <- Ion_coeff*FormulaVector + Ion_adduct
x_neg <- which(MoleFormVec < 0)
if (length(x_neg) == 0) {
RT_target_ion <<- c(RT_target_ion, rt1)
molv_ipw <- MoleFormVec
}
molv_ipw
}))
molf_deconvoluter_ipw
}))
L_MoleFormVecMat <- dim(MoleFormVecMat)[1]
molecular_formula_hill <- hill_molecular_formula_printer(Elements, MoleFormVecMat, number_processing_threads)
IP_calculator <- "IP_calculator <- function(i_mat) {
c <- MoleFormVecMat[i_mat, x_el_c]
b <- MoleFormVecMat[i_mat, x_el_b]
br <- MoleFormVecMat[i_mat, x_el_br]
cl <- MoleFormVecMat[i_mat, x_el_cl]
k <- MoleFormVecMat[i_mat, x_el_k]
s <- MoleFormVecMat[i_mat, x_el_s]
se <- MoleFormVecMat[i_mat, x_el_se]
si <- MoleFormVecMat[i_mat, x_el_si]
intensity_cutoff <- intensity_cutoff_str
isotopic_profile_calculator(MoleFormVecMat[i_mat, ], Elements_mass_abundance, peak_spacing, intensity_cutoff, UFA_IP_memeory_variables)
}"
IP_calculator <- gsub("intensity_cutoff_str", intensity_cutoff_str, IP_calculator)
eval(parse(text = IP_calculator))
ip_db_function <- function(i) {
IPP <- IsotopicProfile_DataBase[[i]]
x_100 <- which.max(IPP[, 2])
L_IPP <- length(IPP[, 2])
r13c_ip <- 0
if (L_IPP > x_100) {
M13C <- abs(IPP[, 1] - IPP[x_100, 1] - 1.00335484)
M13C <- M13C[(x_100 + 1):L_IPP]
x_101 <- which.min(M13C)[1]
if (M13C[x_101] <= 0.015) {
x_101 <- x_101 + x_100
r13c_ip <- IPP[x_101, 2]/IPP[x_100, 2]*100
}
}
c(IPP[x_100, 1], r13c_ip, x_100, L_IPP)
}
SpectraAnalysis_call <- function (i_pl) {
peaklist <- loadRdata(paste0(input_path_pl, "/peaklist_", file_name_hrms[i_pl], ".Rdata"))
MassSpecFile <- paste0(input_path_hrms, "/", file_name_hrms[i_pl])
outputer003 <- MS_deconvoluter(MassSpecFile)
spectraList <- outputer003[[1]]
MS_polarity <- outputer003[[3]]
mzList.m <- do.call(rbind, lapply(1:L_MoleFormVecMat, function(j) {
Annotation <- c()
x_pl <- which(abs(peaklist[, 8] - mz_DataBase[j]) <= mass_accuracy &
abs(peaklist[, 3] - RT_target_ion[j]) <= delta_rt)
if (length(x_pl) > 0) {
if (length(x_pl) > 1) {
x_min <- which.min(abs(peaklist[x_pl, 8] - mz_DataBase[j]))
x_pl <- x_pl[x_min[1]]
}
R13C_PL <- peaklist[x_pl, 11]
RangeScan <- peaklist[x_pl, 1]:peaklist[x_pl, 2]
NumberScans <- length(RangeScan)
IsotopicProfile <- IsotopicProfile_DataBase[[j]]
size_IP <- SizeIP_IsotopicProfile_DataBase[j]
R13C_IP <- R13C_DataBase[j]
x_100 <- MAIso_IsotopicProfile_DataBase[j]
MW_exp <- matrix(rep(0, size_IP*NumberScans), ncol = NumberScans)
INT_exp <- MW_exp
for (sc in 1:NumberScans) {
PEAKS <- spectraList[[RangeScan[sc]]]
for (Iso in 1:size_IP) {
x_Iso <- which(abs(PEAKS[, 1] - IsotopicProfile[Iso, 1]) <= mass_accuracy)
if (length(x_Iso) > 0) {
if (length(x_Iso) > 1) {
x_Iso_min <- which.min(abs(PEAKS[x_Iso, 1] - IsotopicProfile[Iso, 1]))
x_Iso <- x_Iso[x_Iso_min[1]]
}
MW_exp[Iso, sc] <- PEAKS[x_Iso, 1]
INT_exp[Iso, sc] <- PEAKS[x_Iso, 2]
}
}
}
sum_INT_exp <- rowSums(INT_exp)
Ave_MW_exp <- rowSums(MW_exp*INT_exp)/sum_INT_exp
PCS <- sum(sum_INT_exp*IsotopicProfile[, 2])/sqrt(sum(sum_INT_exp^2)*sum(IsotopicProfile[, 2]^2))*1000
NEME <- sqrt(sum((Ave_MW_exp - IsotopicProfile[, 1])^2)/size_IP)*1000
MW_exp1 <- MW_exp
MW_exp1[which(MW_exp1 > 0)] <- 1
nd <- colSums(MW_exp1)
Int_100 <- INT_exp[x_100, ]
max_Int <- max(Int_100)
x_80 <- which(Int_100/max_Int > 0.2)
NDCS <- length(which(nd[x_80] == size_IP))
L_80 <- x_80[length(x_80)] - x_80[1] + 1
RCS <- NDCS/L_80*100
Annotation <- c(x_pl, j, size_IP, molecular_formula_hill[j],
round(IsotopicProfile[x_100, 1], 5),
round(peaklist[x_pl, 8], 5),
mass_accuracy,
peaklist[x_pl, 3],
round(max_Int, 0),
round(NEME, 2),
round(PCS, 2),
round(R13C_PL, 2),
round(R13C_IP, 2),
NDCS,
round(RCS, 2))
Annotation <- data.frame(Annotation)
rownames(Annotation) <- c("PeakID", "ID_IonFormula", "sizeIP", "IonFormula", "m/z theoretical", "m/z peaklist", "Mass accuracy (Da)", "RetentionTime(min)", "PeakHeight", "NEME (mDa)", "PCS (per-mille)", "R13C peaklist (%)", "R13C theoretical (%)", "NDCS @ 80%", "RCS (%) @ 80%")
if (exportSpectra == TRUE) {
exp_spectra <- data.frame(matrix(cbind(Ave_MW_exp, sum_INT_exp/sum_INT_exp[x_100]*100), ncol = 2))
colnames(exp_spectra) <- c("mz", "int")
theo_spectra <- data.frame(IsotopicProfile)
colnames(theo_spectra) <- c("mz", "int")
lablel_spectra <- data.frame(cbind(round(exp_spectra[, 1], 5), sapply(1:size_IP, function(la_i) {max(c(exp_spectra[la_i, 2], theo_spectra[la_i, 2]))+2})))
max_int <- 1.15*max(c(theo_spectra[, 2], exp_spectra[, 2]))
colnames(lablel_spectra) <- c("mz", "int")
spectra_figure <- ggplot(data=exp_spectra, aes(x=mz, y=int)) +
geom_segment(data=theo_spectra, aes(x=mz, xend=mz, y=0, yend=int, col = "Theoretical"), size = 6) +
geom_segment(data=exp_spectra, aes(x=mz, xend=mz, y=0, yend=int, col = "Experimental"), size = 2) +
scale_color_discrete(name = c()) +
xlab("m/z") + ylab("Intensity (%)") +
scale_x_continuous(limits = c(theo_spectra[1, 1]-1, theo_spectra[size_IP, 1]+1), expand = c(0, 0)) +
scale_y_continuous(limits = c(0, max_int), expand = c(0, 0)) +
geom_text(data=lablel_spectra, aes(x=mz, y=int, label = round(mz, 5)), cex = 5) +
annotate("text", x = (theo_spectra[1, 1]+theo_spectra[size_IP, 1])/2, y = max_int/1.05, label = paste0("[", molecular_formula_hill[j], "]", MS_polarity), size = 5) +
labs(title = file_name_hrms[i_pl]) +
theme_bw() + theme(legend.position = c(0.83, 0.95), legend.margin = margin(0, 0, 0, 0), panel.grid.major = element_blank(), panel.grid.minor = element_blank(),
text = element_text(size = 24), plot.title = element_text(size = 14))
my_table <- tableGrob(Annotation, theme = ttheme_default())
spectra_figure <- grid.arrange(spectra_figure, my_table, ncol = 2)
ggsave(filename=paste0("/UFA_spectra_", file_name_hrms[i_pl], "_", j, "_", molecular_formula_hill[j], "_",round(RT_target[j], 2), ".png"),
plot = spectra_figure,
device = "png",
path = output_path_spectra,
scale = 1,
width = 16,
height = 8,
units = "in",
dpi = 100)
}
Annotation <- t(rbind(file_name_hrms[i_pl], Annotation))
}
Annotation
}))
mzList.m
}
osType <- Sys.info()[['sysname']]
if (osType == "Windows") {
clust <- makeCluster(number_processing_threads)
registerDoSNOW(clust)
IsotopicProfile_DataBase <- foreach(counter = 1:L_MoleFormVecMat, .verbose = FALSE) %dopar% {
IP_calculator(counter)
}
ip_db_mat <- foreach(counter = 1:L_MoleFormVecMat, .combine = "rbind", .verbose = FALSE) %dopar% {
ip_db_function(counter)
}
mz_DataBase <- ip_db_mat[, 1]
R13C_DataBase <- ip_db_mat[, 2]
MAIso_IsotopicProfile_DataBase <- ip_db_mat[, 3]
SizeIP_IsotopicProfile_DataBase <- ip_db_mat[, 4]
AnnotatedSpectraTable <- foreach(counter = 1:L_PL, .combine = "rbind", .verbose = FALSE) %dopar% {
SpectraAnalysis_call(counter)
}
stopCluster(clust)
}
if (osType == "Linux") {
IsotopicProfile_DataBase <- mclapply(1:L_MoleFormVecMat, function (counter) {
IP_calculator(counter)
}, mc.cores = number_processing_threads)
ip_db_mat <- do.call(rbind, mclapply(1:L_MoleFormVecMat, function (counter) {
ip_db_function(counter)
}, mc.cores = number_processing_threads))
mz_DataBase <- ip_db_mat[, 1]
R13C_DataBase <- ip_db_mat[, 2]
MAIso_IsotopicProfile_DataBase <- ip_db_mat[, 3]
SizeIP_IsotopicProfile_DataBase <- ip_db_mat[, 4]
AnnotatedSpectraTable <- do.call(rbind, mclapply(1:L_PL, function (counter) {
SpectraAnalysis_call(counter)
}, mc.cores = number_processing_threads))
closeAllConnections()
}
if (exportedAnnotatedSpectraTable == TRUE) {
AnnotatedSpectraTable <- data.frame(AnnotatedSpectraTable)
rownames(AnnotatedSpectraTable) <- c()
colnames(AnnotatedSpectraTable) <- c("Filename", "PeakID", "ID_IonFormula", "sizeIP", "IonFormula", "m/z theoretical", "m/z peaklist", "Mass accuracy (Da)", "RetentionTime(min)", "PeakHeight", "NEME (mDa)", "PCS (per-mille)", "R13C peaklist (%)", "R13C theoretical (%)", "NDCS @ 80%", "RCS (%) @ 80%")
save(AnnotatedSpectraTable, file = paste0(output_path, "/AnnotatedSpectraTable.Rdata"))
write.csv(AnnotatedSpectraTable, file = paste0(output_path, "/AnnotatedSpectraTable.csv"))
}
print("Completed producing mass spectra!")
}
} |
print.summary.phreg <- function(x,
digits = max(getOption("digits") - 3, 3),
...){
class(x) <- c("summary.coxreg", class(x))
print(x, digits)
} |
library(textreuse)
dir <- system.file("extdata/ats", package = "textreuse")
corpus <- TextReuseCorpus(dir = dir, tokenizer = tokenize_ngrams, n = 5,
progress = FALSE)
jaccard_similarity(corpus[["remember00palm"]],
corpus[["remembermeorholy00palm"]])
comparisons <- pairwise_compare(corpus, jaccard_similarity, progress = FALSE)
round(comparisons[1:3, 1:3], digits = 3)
candidates <- pairwise_candidates(comparisons)
candidates[candidates$score > 0.1, ] |
FitARz <-
function (z, p, demean = TRUE, MeanMLEQ = FALSE, lag.max = "default")
{
stopifnot(length(z) > 0, length(z) > max(p), length(p) >
0)
is.wholenumber <-
function(x, tol = .Machine$double.eps^0.5) abs(x - round(x)) < tol
stopifnot(is.wholenumber(p), p>0)
ztsp <- tsp(z)
if (lag.max == "default")
MaxLag <- min(300, ceiling(length(z)/5))
else MaxLag = lag.max
MaxIter <- 10
n <- length(z)
pvec <- sort(p)
pvec <- pvec[pvec > 0]
if (length(pvec) == 0)
pvec <- 0
if (length(p) == 1 && pvec != 0)
pvec <- 1:p
PMAX <- max(pvec)
SubQ <- length(pvec) < PMAX
indMeanQ <- demean || MeanMLEQ
if (indMeanQ)
mz <- mean(z)
else mz <- 0
y <- z - mz
ans <- GetFitARz(y, pvec)
LL <- ans$loglikelihood
etol <- 1
mu <- iter <- 0
if (MeanMLEQ && PMAX != 0)
while (etol > 1e-06 && iter < MaxIter) {
LLPrev <- LL
iter <- iter + 1
mu <- GetARMeanMLE(y, ans$phiHat)
ans <- GetFitAR(y - mu, pvec)
LL <- ans$loglikelihood
etol <- abs(LL - LLPrev)/LLPrev
if (ans$convergence != 0)
stop("GetARFit returned convergence = ", ans$convergence)
}
muHat <- mu + mz
zetaHat <- ans$zetaHat
phiHat <- ans$phiHat
if (PMAX != 0)
res <- BackcastResidualsAR(y, phiHat, Q = 100, demean = FALSE)
else res <- y
fits <- y - res
sigsq <- sum(res^2)/n
racf <- (acf(res, plot = FALSE, lag.max = MaxLag)$acf)[-1]
if (SubQ) {
varNames <- paste("zeta(", pvec, ")", sep = "")
covHat <- solve(InformationMatrixARz(zetaHat, pvec))/n
dimnames(covHat) <- list(varNames, varNames)
sdRacf <- sqrt(diag(VarianceRacfARz(zetaHat, pvec, MaxLag,
n)))
}
else {
if (PMAX > 0) {
varNames <- paste("phi(", 1:PMAX, ")", sep = "")
covHat <- SiddiquiMatrix(phiHat)/n
dimnames(covHat) <- list(varNames, varNames)
sdRacf <- sqrt(diag(VarianceRacfAR(phiHat, MaxLag,
n)))
}
else {
varNames <- character(0)
covHat <- numeric(0)
sdRacf <- rep(1/sqrt(n), MaxLag)
}
}
RacfMatrix <- matrix(c(racf, sdRacf), ncol = 2)
dimnames(RacfMatrix) <- list(1:MaxLag, c("ra", "Sd(ra)"))
LBQ <- LjungBoxTest(res, lag.max = MaxLag, k = length(zetaHat))
if (SubQ) {
m <- length(pvec)
if (m < 13) {
pVEC <- deparse(as.numeric(pvec), width.cutoff = 180)
pVEC <- substr(pVEC, 2, nchar(pVEC))
}
else {
pVECa <- deparse(as.numeric(pvec[1:4]), width.cutoff = 180)
pVECa <- substr(pVECa, 2, nchar(pVECa)-1)
pVECb <- deparse(as.numeric(pvec[(m-2):m]), width.cutoff = 180)
pVECb <- substr(pVECb, 3, nchar(pVECb))
pVEC <- paste(pVECa, ",...,", pVECb, ", m=",m)
}
ModelTitle <- paste("ARz", pVEC, sep = "")
ModelTitle <- gsub(" ", "", ModelTitle)
}
else ModelTitle <- paste("AR(", p, ")", sep = "")
ans <- list(loglikelihood = ans$loglikelihood, phiHat = phiHat,
sigsqHat = sigsq, muHat = muHat, covHat = covHat, zetaHat = zetaHat,
RacfMatrix = RacfMatrix, LjungBoxQ = LBQ, res = res,
fits = fits + mz, SubsetQ = SubQ, pvec = pvec, demean = demean,
FitMethod = "MLE", iterationCount = iter, convergence = ans$convergence,
MeanMLE = MeanMLEQ, tsp = ztsp, call = match.call(),
ARModel = "ARz", DataTitle = attr(z, "title"), ModelTitle = ModelTitle,
z = z)
class(ans) <- "FitAR"
ans
} |
context("read_urban_concentrations")
testthat::skip_on_cran()
skip_if(Sys.getenv("TEST_ONE") != "")
test_that("read_urban_concentrations", {
test_sf <- read_urban_concentrations()
expect_true(is(test_sf, "sf"))
})
test_that("read_urban_concentrations", {
expect_error(read_urban_concentrations(year=9999999))
}) |
ellecub <-
function(m,ordinal,assepai,assecsi){
prob<-assepai*(dbinom(0:(m-1),m-1,1-assecsi)-1/m)+1/m
pconi<-prob[ordinal]
return(sum(log(pconi)))
} |
in_case_list <- function(..., preserve = FALSE, default = NA) {
inputs <- in_case_setup(..., preserve = preserve, fn = "in_case_list()")
replace(
inputs$fs, inputs$x, default, preserve,
list = TRUE,
default_env = rlang::caller_env(),
current_env = rlang::current_env()
)
}
switch_case_list <- function(x, ..., preserve = FALSE, default = NA) {
fn_case_list(
x = x,
fn = `%in%`,
...,
preserve = preserve,
default = default
)
}
grep_case_list <- function(x, ..., preserve = FALSE, default = NA) {
fn_case_list(
x = x,
fn = function(x, pattern, ...) grepl(pattern, x, ...),
...,
preserve = preserve,
default = default
)
}
fn_case_list <- function(x, fn, ..., preserve = FALSE, default = NA) {
inputs <- fn_case_setup(...)
replace(
inputs$fs, x, default, preserve, fn, inputs$args, list = TRUE,
default_env = rlang::caller_env(),
current_env = rlang::current_env()
)
}
fn_switch_case_list <- function(x, fn, ..., preserve = FALSE, default = NA) {
inputs <- fn_switch_case_setup(
...,
fn = fn,
default_env = rlang::caller_env(),
current_env = rlang::current_env()
)
do.call(
switch_case_list,
c(
list(x = x), inputs$fs, inputs$args,
list(preserve = preserve, default = default)
)
)
} |
ergm.mma<-function(restricted.model,full.model,direct.effect,mediator){
tot.AME<-ergm.AME(restricted.model,direct.effect,return.dydx=TRUE)
tot.dydx<-tot.AME$dydx
tot.AME<-tot.AME$AME
p.AME<-ergm.AME(full.model,direct.effect,return.dydx=TRUE)
p.dydx<-p.AME$dydx
p.AME<-p.AME$AME
if(length(tot.dydx)!=length(p.dydx)){
if(length(tot.dydx)<length(p.dydx)){
p.dydx<-p.dydx[1:length(tot.dydx)]
}else{
tot.dydx<-tot.dydx[1:length(p.dydx)]
}
}
rownames(tot.AME)<-paste("total effect:",rownames(tot.AME))
rownames(p.AME)<-paste("partial effect:",rownames(p.AME))
mma.me<-tot.AME[1,1]-p.AME[1,1]
cov.ame<-2*stats::cor(p.dydx,tot.dydx)*tot.AME[,2]*p.AME[,2]
mma.se<-sqrt(tot.AME[,2]^2+p.AME[,2]^2-cov.ame)
mma.z<-mma.me/mma.se
p.mma<-2*stats::pnorm(-abs(mma.z))
ind<-matrix(signif(c(mma.me,mma.se,mma.z,p.mma),digits=5),nrow=1,ncol=4)
colnames(ind)<-colnames(tot.AME)
if(length(mediator)>1) {
mediator<-paste(mediator,collapse=", ")}
rownames(ind)<-paste("indirect effect:",direct.effect,"->",mediator)
out<-rbind(tot.AME,p.AME,ind)
proportion.mediated<-1-(p.AME[1]/tot.AME[1])
attr(out,"description")<-paste("proportion of",direct.effect,"mediated by",mediator," = ",round(proportion.mediated,digits=3))
return(out)
} |
angle.matrix<-function(RR,node,Y=NULL,select.axes=c("no","yes"),type=c("phenotypes","rates"),cova=NULL,clus=0.5)
{
if (!requireNamespace("smatr", quietly = TRUE)) {
stop("Package \"smatr\" needed for this function to work. Please install it.",
call. = FALSE)
}
if (!requireNamespace("rlist", quietly = TRUE)) {
stop("Package \"rlist\" needed for this function to work. Please install it.",
call. = FALSE)
}
unitV<-function(x){ sum(x^2)^.5 }
match.arg(select.axes)
node->n
RR$tree->tree
if(select.axes=="yes")
{
data.frame(species=unlist(lapply(strsplit(rownames(Y),"_"),"[[",1)),stage=unlist(lapply(strsplit(rownames(Y),"_"),"[[",2)))->df
data.frame(df,Y)->fulldata
as.numeric(fulldata[,2])->fulldata[,2]
fulldata[which(rownames(fulldata)%in%tips(multi2di(tree),node)),]->fd
which(apply(sapply(summary(lm(as.matrix(fd[,-c(1:2)])~fd[,1]:fd[,2])), function(x) x$coef[,4])[-1,],2,function(x) length(which(x<0.05)))==length(unique(fd$species)))->sel
if(length(sel)>1){
Y[,sel]->y.sel
print(paste("I am using", paste(colnames(y.sel),collapse=" & "),"as variables"))
}else{
Y->y.sel
print(paste("I am using the whole dataset"))
}
if(is.null(cova)){
RRphylo(tree,y.sel,clus=clus)->rr.sel
}else{
RRphylo(tree,cova)->RRcova
c(RRcova$aces,cova)->covari
names(covari)<-c(rownames(RRcova$aces),names(cova))
RRphylo(tree,y.sel,cov=covari,clus=clus)->rr.sel
}
match.arg(type)
if(type=="rates") {
rr.sel$multiple.rates->y.onto
evo.dir(rr.sel,angle.dimension = "rates",pair.type="node",node=n,random="no")->evo.p
}else{
y.sel->y.onto
evo.dir(rr.sel,y.type = "original",angle.dimension = "phenotypes",y=y.sel,pair.type="node",node=n,random="no")->evo.p
}
} else {
if(is.null(cova)){
RRphylo(tree,Y,clus=clus)->RR
}else{
RRphylo(tree,cova)->RRcova
c(RRcova$aces,cova)->covari
names(covari)<-c(rownames(RRcova$aces),names(cova))
RRphylo(tree,Y,cov=covari,clus=clus)->RR
}
if(type=="rates") {
RR$multiple.rates->y.onto
evo.dir(RR,angle.dimension = "rates",pair.type="node",node=n,random="no")->evo.p
}else{
Y->y.onto
evo.dir(RR,y.type = "original",angle.dimension = "phenotypes",y=Y,pair.type="node",node=n,random="no")->evo.p
}
}
deg2rad <- function(deg) {(deg * pi) / (180)}
rad2deg <- function(rad) {(rad * 180) / (pi)}
node->n
retrieve.angles(evo.p,wishlist="angles.between.species",focus="node",write2csv="no",node=n,random="no")->ret.ang
retrieve.angles(evo.p,wishlist="anglesMRCA",focus="node",write2csv="no",node=n,random="no")[,c(1,2,4)]->vecS
retrieve.angles(evo.p,wishlist="anglesMRCA",focus="node",write2csv="no",node=n,random="no")->angMRCA
ret.ang[,c(2,4,3)]->btwsp
data.frame(do.call(rbind,strsplit(as.character(btwsp[,1]), "/")),btwsp[,c(2,3)])->ansp
colnames(ansp)<-c("sp1","sp2","anglesBTWspecies","angleBTWspecies2MRCA")
ansp[order(ansp[,1],ansp[,2]),]->ansp
matrix(ncol=length(unique(ansp[,1])),nrow=length(unique(ansp[,2])))->anglesBTW.mat
matrix(ncol=length(unique(ansp[,1])),nrow=length(unique(ansp[,2])))->anglesBTW2MRCA.mat
colnames(anglesBTW.mat)<-unique(ansp[,1])
rownames(anglesBTW.mat)<-unique(ansp[,2])
colnames(anglesBTW2MRCA.mat)<-unique(ansp[,1])
rownames(anglesBTW2MRCA.mat)<-unique(ansp[,2])
for (i in 1:length(colnames(anglesBTW.mat))){
as.numeric(as.character(ansp[which(ansp[,1]%in%colnames(anglesBTW.mat)[i]),3]))->anglesBTW.mat[,i]
as.numeric(as.character(ansp[which(ansp[,1]%in%colnames(anglesBTW2MRCA.mat)[i]),4]))->anglesBTW2MRCA.mat[,i]
}
vecS[order(vecS[,2]),]->vecS
angMRCA[order(angMRCA[,2]),]->angMRCA
data.frame(angMRCA,"vector.size"=vecS[,3])->anglesMRCA
data.frame(species=anglesMRCA$species,angle=as.numeric(as.character(anglesMRCA$angle)),size=as.numeric(as.character(anglesMRCA$vector.size)))->VS
cos(deg2rad(VS$angle))*VS$size->xx
sin(deg2rad(VS$angle))*VS$size->yy
data.frame(VS,XX=xx,YY=yy)->VS
unique(unlist(lapply(strsplit(as.character(VS$species),split="_"),"[[",1)))->group
if(length(group)==1) stop("Check group names or attempt to compare single group")
gg<-list()
for (i in 1:length(group)){
VS[grep(group[i],VS$species),]->gg[[i]]
}
names(gg)<-group
lapply(gg,function(x) apply(x[,4:5],2,function(z) z[1]-sum(z[2:length(z)])))->x.and.y
lapply(x.and.y,function(x) rad2deg(atan(x[2]/x[1])))->vector.angle
lapply(x.and.y,function(x) (x[1]^2+x[2]^2)^.5)->vector.length
rbind(ontogenetic.angle=as.data.frame(vector.angle),ontogenetic.size=as.data.frame(vector.length),as.data.frame(x.and.y))->onto.vector
abs(apply(onto.vector,1,diff))[1:2]->onto.real
combn(colnames(onto.vector),2)->couples
onto.real<-matrix(ncol=2,nrow=dim(couples)[2])
for (i in 1 :dim(couples)[2]){
apply(onto.vector[1:2,which(colnames(onto.vector)%in%couples[,i])],1,diff)->onto.real[i,]
}
rownames(onto.real)<-apply(couples,2,function(x) paste(x,collapse = "/"))
colnames(onto.real)<-c("ontogenetic.angle.diff","ontogenetic.size.diff")
onto.random<-list()
for(w in 1:1000)
{
data.frame(species=VS$species,VS[sample(as.numeric(rownames(VS))),2:5])->VS.r
unique(unlist(lapply(strsplit(as.character(VS.r$species),split="_"),"[[",1)))->group
gg<-list()
for (i in 1:length(group)){
VS.r[grep(group[i],VS.r$species),]->gg[[i]]
}
names(gg)<-group
lapply(gg,function(x) apply(x[,4:5],2,function(z) z[1]-sum(z[2:length(z)])))->x.and.y
lapply(x.and.y,function(x) rad2deg(atan(x[2]/x[1])))->vector.angle
lapply(x.and.y,function(x) (x[1]^2+x[2]^2)^.5)->vector.length
rbind(ontogenetic.angle=as.data.frame(vector.angle),ontogenetic.size=as.data.frame(vector.length),as.data.frame(x.and.y))->onto.vectorR
onto.r<-matrix(ncol=2,nrow=dim(couples)[2])
for (i in 1 :dim(couples)[2]){
apply(onto.vectorR[1:2,which(colnames(onto.vectorR)%in%couples[,i])],1,diff)->onto.r[i,]
}
rownames(onto.r)<-apply(couples,2,function(x) paste(x,collapse = "/"))
colnames(onto.r)<-c("ontogenetic.angle.diff","ontogenetic.size.diff")
onto.r->onto.random[[w]]
}
p.ontogenetic.vector.angle<-array()
p.ontogenetic.vector.size<-array()
for(m in 1:dim(couples)[2]){
length(which(unlist(lapply(onto.random,function(x) x[m,1]))>onto.real[m,1]))/1000->p.ontogenetic.vector.angle[m]
length(which(unlist(lapply(onto.random,function(x) x[m,2]))>onto.real[m,2]))/1000->p.ontogenetic.vector.size[m]
}
names(p.ontogenetic.vector.angle)<-rownames(onto.real)
names(p.ontogenetic.vector.size)<-rownames(onto.real)
if(length(which(p.ontogenetic.vector.angle>0.950))>0) p.ontogenetic.vector.angle[which(p.ontogenetic.vector.angle>0.950)]<-1-p.ontogenetic.vector.angle[which(p.ontogenetic.vector.angle>0.950)]
if(length(which(p.ontogenetic.vector.size>0.950))>0) p.ontogenetic.vector.size[which(p.ontogenetic.vector.size>0.950)]<-1-p.ontogenetic.vector.size[which(p.ontogenetic.vector.size>0.950)]
cbind(onto.vector,"p"=rbind(p.ontogenetic.vector.angle,p.ontogenetic.vector.size,rep("",length(p.ontogenetic.vector.angle)),rep("",length(p.ontogenetic.vector.angle))))->onto.vector
if(length(which(p.ontogenetic.vector.angle<0.05))>0) print(paste("ontogenetic vectors to MRCA differ in angle for",paste(names(p.ontogenetic.vector.angle[which(p.ontogenetic.vector.angle<0.05)]),collapse=" and ")))
if(length(which(p.ontogenetic.vector.size<0.05))>0) print(paste("ontogenetic vectors to MRCA differ in size for",paste(names(p.ontogenetic.vector.size[which(p.ontogenetic.vector.size<0.05)]),collapse=" and ")))
list("angles between species"=anglesBTW.mat,"angles between species 2 MRCA"=anglesBTW2MRCA.mat)->matt
mats<-list()
for(f in 1:length(matt)){
matt[[f]]->aaa
unique(unlist(lapply(strsplit(colnames(aaa),"_"),"[[",1)))->sp.col
unique(unlist(lapply(strsplit(rownames(aaa),"_"),"[[",1)))->sp.row
colmat<-list()
for (i in 1:length(sp.col)){
aaa[,grep(sp.col[i],colnames(aaa))]->colmat[[i]]
}
couple.mat<-list()
for (i in 1:length(colmat)){
colmat[[i]]->bbb
rowmat<-list()
for (k in 1:length(sp.row)){
bbb[grep(sp.row[k],rownames(bbb)),]->rowmat[[k]]
}
for(w in 1:length(rowmat))
{
rlist::list.append(couple.mat,rowmat[[w]])->couple.mat
}
}
couple.mat->mats[[f]]
}
names(matt)->names(mats)
mats$`angles between species`->matte
t(sapply(matte,function(x) unique(sapply(strsplit(c(rownames(x),colnames(x)),"_"),"[[",1))))->pair.group
ontogenesis<-list()
ontogenesis.mats<-list()
for(g in 1:dim(pair.group)[1]){
pair.group[g,]->group
group.mats<-list()
for(i in 1:length(group)){
retrieve.angles(evo.p,wishlist="angles.between.species",focus="species",species=group[i],random="no",write2csv = "no")->g1
g1[grep(group[i],sapply(strsplit(as.character(g1[,2]),split="/"),"[[",1))[which(grep(group[i],sapply(strsplit(as.character(g1[,2]),split="/"),"[[",1))%in%grep(group[i],sapply(strsplit(as.character(g1[,2]),split="/"),"[[",2)))],]->g1
data.frame(do.call(rbind,strsplit(as.character(g1[,2]), "/")),g1[,c(3,4)])->g1
unique(c(as.character(g1[,1]),as.character(g1[,2])))[order(unique(c(as.character(g1[,1]),as.character(g1[,2]))))]->group1
g1[,c(2,1,3,4)]->g1.inv
colnames(g1.inv)<-colnames(g1)
rbind(g1,g1.inv)->g11
xtabs(as.numeric(as.character(anglesBTWspecies))~as.character(X1)+as.character(X2),data=g11)->group.mat
names(dimnames(group.mat))<-NULL
as.matrix(group.mat)->group.mat
group.mat->group.mats[[i]]
}
group.matt<-list()
which(sapply(strsplit(colnames(group.mats[[1]]),"_"),"[[",2)%in%sapply(strsplit(colnames(group.mats[[2]]),"_"),"[[",2))->in1
which(sapply(strsplit(colnames(group.mats[[2]]),"_"),"[[",2)%in%sapply(strsplit(colnames(group.mats[[1]]),"_"),"[[",2))->in2
group.mats[[1]][in1,in1]->s1
group.mats[[2]][in2,in2]->s2
s1->group.matt[[1]]
s2->group.matt[[2]]
s1[which(lower.tri(s1))]<-s2[which(lower.tri(s2))]
rownames(s2)->rownames(s1)
diag(s1) <- rep("", length(diag(s1)))
as.data.frame(rbind(s1,rownames(s1)))->s3
data.frame(s3,c(colnames(s3),""))->s3
colnames(s3)[length(colnames(s1))+1]<-""
s3->ontogenesis.mats[[g]]
PCgroup<-list()
ontog<-list()
for (i in 1:length(group.matt)){
group.matt[[i]]->group.mat
y.onto[match(rownames(group.mat),rownames(y.onto)),]->groupY
groupY->PCgroup[[i]]
as.matrix(groupY)->groupY
apply(groupY,1,unitV)->group.len
apply(groupY,2,function(x) x[1]-sum(x[2:length(x)]))->Y.resultant
rad2deg(acos((groupY[1,]%*%Y.resultant)/(unitV(groupY[1,])*unitV(Y.resultant))))->onto.angle
unitV(Y.resultant)->onto.vector.size
cbind(onto.angle,onto.vector.size)->onto.tot
colnames(onto.tot)<-c("angle","size")
rownames(onto.tot)<-group[i]
onto.tot->ontog[[i]]
}
do.call(rbind,ontog)->ontog
onto1.ran<-matrix(ncol=2,nrow=100)
for(h in 1:100){
seq(1,dim(PCgroup[[1]])[1],1)->ss
sample(c(rep(1,length(ss)),rep(2,length(ss))),length(ss))->sam
while(sum(sam)==length(ss)|sum(sam)==2*length(ss)) sample(c(rep(1,length(ss)),rep(2,length(ss))),length(ss))->sam
sam2<-array()
for(k in 1:length(sam)){ if (sam[k]==2) sam2[k]<-1 else sam2[k]<-2}
c(sam,sam2)->sam3
c(ss,ss)->ss
matss<-matrix(ncol=dim(PCgroup[[1]])[2],nrow = length(ss))
for(i in 1:length(sam3)){
PCgroup[[sam3[i]]][ss[i],]->tem
as.matrix(tem)->tem
tem->matss[i,]
}
unlist(lapply(PCgroup,rownames))->rownames(matss)
unlist(lapply(PCgroup,colnames)[[1]])->colnames(matss)
l<-list()
matss[1:(length(ss)/2),]->l[[1]]
matss[((length(ss)/2)+1):length(ss),]->l[[2]]
matrix(nrow=2,ncol=2)->ontog.ran
for (i in 1:2){
l[[i]]->groupY
as.matrix(groupY)->groupY
apply(groupY,1,unitV)->group.len
apply(groupY,2,function(x) x[1]-sum(x[2:length(x)]))->Y.resultant
rad2deg(acos((groupY[1,]%*%Y.resultant)/(unitV(groupY[1,])*unitV(Y.resultant))))->onto.angle
unitV(Y.resultant)->onto.vector.size
cbind(onto.angle,onto.vector.size)->onto.tot
colnames(onto.tot)<-c("angle","size")
rownames(onto.tot)<-group[i]
onto.tot->ontog.ran[i,]
}
apply(ontog.ran,2,diff)->onto1.ran[h,]
}
colnames(onto1.ran)<-c("angleDiff","sizeDiff")
apply(ontog,2,diff)->ontodiffs
length(which(onto1.ran[,1]>ontodiffs[1]))/100->p.onto1.angle
length(which(onto1.ran[,2]>ontodiffs[2]))/100->p.onto1.size
if(p.onto1.angle<0.05|p.onto1.angle>0.95) print(paste("ontogenetic vectors to 1st stage differ in angle for",paste(rownames(ontog),collapse=" and ")))
if(p.onto1.size<0.05|p.onto1.size>0.95) print(paste("ontogenetic vectors to 1st stage differ in size for",paste(rownames(ontog),collapse=" and ")))
rbind(ontog,p=c(p.onto1.angle,p.onto1.size))->ontogenesis[[g]]
}
apply(pair.group,1, function(x) paste(x[1],x[2],sep="/"))->names(ontogenesis.mats)
apply(pair.group,1, function(x) paste(x[1],x[2],sep="/"))->names(ontogenesis)
ontogenesis.list<-list()
ontogenesis.mats->ontogenesis.list[[1]]
ontogenesis->ontogenesis.list[[2]]
names(ontogenesis.list)<-c("matrices","vectors")
SMAT.res<-list()
for (h in 1:length(mats))
{
mats[[h]]->matta
smat.res<-list()
nam<-array()
for (i in 1:length(matta)){
matta[[i]]->mat
which(is.na(match(unlist(lapply(strsplit(colnames(mat),"_"),"[[",2)),unlist(lapply(strsplit(rownames(mat),"_"),"[[",2)))))->colout
which(is.na(match(unlist(lapply(strsplit(rownames(mat),"_"),"[[",2)),unlist(lapply(strsplit(colnames(mat),"_"),"[[",2)))))->rowout
if (length(colout)>0) mat[,-colout]->mat1 else mat->mat1
if (length(rowout)>0) mat1[-rowout,]->mat1
if(dim(mat1)[1]<3) {
list(matrix=mat,paedomorphosis.test=NULL,biogenetic.test=NULL)->smat.res[[i]]
c(colnames(mat),rownames(mat))->totnam
unlist(lapply(strsplit(totnam,split="_"),"[[",1))->group
paste(unique(group),collapse="/")->nam[i]
warning("too few ontogenetic stages for meaningful test for ",paste(nam[i]))
} else {
diag(mat1)->biogen.vector
as.data.frame(c(mat1[1,],mat1[,1]))->mat2
unlist(lapply(strsplit(rownames(mat2),split="_"),"[[",1))->group
unlist(lapply(strsplit(rownames(mat2),split="_"),"[[",2))->age
data.frame(group=group,age=age,angle=mat2[,1])->mat3
as.numeric(as.character(mat3$age))->mat3$age
if(class(try(smatr::sma(angle~age*group,data=mat3)->res.slope,silent=TRUE))=="try-error") {
summary(lm(angle~group/age-1,data=mat3))->a
t(a$coef[c(3,4),c(1,4)])->a1
confint(lm(angle~group/age-1,data=mat3))->a2
t(a2[3:4,])->a2
summary(lm(angle~age,data=subset(mat3,mat3$group==levels(mat3$group)[1])))$r.squared->r1
summary(lm(angle~age,data=subset(mat3,mat3$group==levels(mat3$group)[2])))$r.squared->r2
c(r1,r2)->rr
names(rr)<-colnames(a1)
rbind(a1[1,],a2,rr,a1[2,])->A
rownames(A)<-c("slope","lower.CI.lim","upper.CI.lim","R2","p")
unique(group)->colnames(A)
data.frame(LogL=logLik(lm(angle~group/age-1,data=mat3))[1],p=summary(lm(angle~group*age,data=mat3))$coef[4,4])->B
list("Results of comparing lines among groups by lm"=B,"Coefficients by group in variable 'group' by lm"=A)->sma.res
} else {
res.slope=res.slope
res.slope$commoncoef[c(1,2,7)]->a
rbind(a[[3]],rbind(t(res.slope$r2),t(res.slope$pval)))->b
rownames(b)[4:5]<-c("R2","p")
list(as.data.frame(a[1:2]),b)->sma.res
names(sma.res)<-c("Results of comparing lines among groups","Coefficients by group in variable 'group'")
}
data.frame(age=as.numeric(unique(age)),biogen.vector)->bg.data
smatr::sma(biogen.vector~age,data=bg.data)->bg.res
list(as.data.frame(bg.res$coef),data.frame(p=unlist(bg.res$p),R2=unlist(bg.res$r2)))->bg.test
names(bg.test)<-c("biogenetic law test coefficients","biogenetic law test significance")
list(matrix=mat,paedomorphosis.test=sma.res,biogenetic.test=bg.test)->smat.res[[i]]
paste(unique(group),collapse="/")->nam[i]
}
}
names(smat.res)<-nam
if(h==1){
for (i in 1:length(smat.res)){
if(length(smat.res[[i]]$paedomorphosis.test)>0){
if(as.data.frame(smat.res[[i]]$paedomorphosis.test[[2]][1,1])*as.data.frame(smat.res[[i]]$paedomorphosis.test[[2]][1,2])<0&smat.res[[i]]$paedomorphosis.test[[1]]$p<.05 | length(which(as.data.frame(smat.res[[i]]$paedomorphosis.test[[2]][5,])>.05))==1 &smat.res[[i]]$paedomorphosis.test[[1]]$p<.05){
print(paste("It looks there is paedomorphosis between",nam[i]))
}
if(smat.res[[i]]$biogenetic.test$`biogenetic law test coefficients`$coef.SMA.[2]>0&smat.res[[i]]$biogenetic.test$`biogenetic law test significance`$p<.05){
print(paste("Biogenetic law is confirmed for",nam[i]))
}
}
}
}else{
for (i in 1:length(smat.res)){
if(length(smat.res[[i]]$paedomorphosis.test)>0){
if(as.data.frame(smat.res[[i]]$paedomorphosis.test[[2]][1,1])*as.data.frame(smat.res[[i]]$paedomorphosis.test[[2]][1,2])<0&smat.res[[i]]$paedomorphosis.test[[1]]$p<.05 | length(which(as.data.frame(smat.res[[i]]$paedomorphosis.test[[2]][5,])>.05))==1 &smat.res[[i]]$paedomorphosis.test[[1]]$p<.05){
print(paste("It looks there is paedomorphosis between",nam[i],"2 MRCA"))
}
if(smat.res[[i]]$biogenetic.test$`biogenetic law test coefficients`$coef.SMA.[2]>0&smat.res[[i]]$biogenetic.test$`biogenetic law test significance`$p<.05){
print(paste("Biogenetic law is confirmed for",nam[i],"2 MRCA"))
}
}
}
}
smat.res->SMAT.res[[h]]
}
names(SMAT.res)<-names(matt)
return(list(regression.matrix=SMAT.res,angles.2.MRCA.and.vector.size=anglesMRCA,ontogenetic.vectors2MRCA=onto.vector,ontogenetic.vectors.to.1st.stage=ontogenesis.list))
} |
suppressMessages(library( LatticeKrig))
options( echo=FALSE)
test.for.zero.flag<- 1
data( ozone2)
x<-ozone2$lon.lat
y<- ozone2$y[16,]
good <- !is.na( y)
x<- x[good,]
y<- y[good]
x<- x[1:15,]
y<- y[1:15]
N<- length( y)
a.wght<- 5
lambda <- 1.5
obj<- LKrig( x,y,NC=16, lambda=lambda, a.wght=a.wght, alpha=1, nlevel=1, NtrA=5,iseed=122)
LKinfo<- obj$LKinfo
K<- LKrig.cov( x,x,LKinfo)
tempM<- K
diag(tempM) <- (lambda) + diag(tempM)
Mi<- solve( tempM)
T.matrix<- cbind( rep(1,N),x)
d.coef0 <- solve( t(T.matrix)%*%Mi%*%T.matrix, t(T.matrix)%*%Mi%*%y)
test.for.zero( obj$d.coef, d.coef0, tag="d from LKrig and by hand")
temp2<- chol( tempM)
c.coef0 <- forwardsolve(temp2, transpose = TRUE,
(y- T.matrix%*%d.coef0), upper.tri = TRUE)
c.coef0 <- backsolve(temp2, c.coef0)
obj0<- mKrig( x,y, lambda=lambda, m=2, cov.function="LKrig.cov",
cov.args=list(LKinfo=LKinfo),
NtrA=5, iseed=122)
test.for.zero( obj0$c, c.coef0, tag="c from mKrig and by hand" )
test.for.zero( obj0$fitted.values, obj$fitted.values)
test.for.zero( lambda*obj0$c, (y-obj$fitted.values),
tag="c from mKrig and from residuals of LatticeKrig (this is big!)" )
test.for.zero( obj$trA.info, obj0$trA.info, tag="Monte Carlo traces")
alpha<- c(1,.5,.2)
nlevel<-3
a.wght<- list(5,5,10)
lambda<- .1
obj<- LKrig( x,y,NC=5, lambda=lambda,
nlevel=nlevel, alpha=alpha,a.wght=a.wght, NtrA=5,iseed=122)
LKinfo<- obj$LKinfo
obj0<- mKrig( x,y, lambda=lambda, m=2, cov.function="LKrig.cov",
cov.args=list(LKinfo=LKinfo),
NtrA=5, iseed=122)
test.for.zero( obj0$fitted.values, obj$fitted.values)
test.for.zero( obj$d.coef, obj0$d, tag= "d from Lattice Krig and mKrig")
xTest<- rbind(x, c( -100,20) )
yTest<- c( y, 1000)
LKinfoTest<- LKrigSetup( x, NC=5, nlevel=3, nu=1, a.wght=5)
obj<- LKrig( xTest,yTest, LKinfo= LKinfoTest, lambda=.1)
xTest<- rbind(x, x)
yTest<- c( y, y)
LKinfoTest<- LKrigSetup( x, NC=5, nlevel=3, nu=1, a.wght=5)
obj<- LKrig( xTest,yTest, LKinfo= LKinfoTest, lambda=.1)
rm( obj, obj0)
test.for.zero.flag<- 1
alpha<- c(1,.5,.5)
nlevel<-3
a.wght<- list(5,5,10)
lnDet<- function(A){
sum( log( eigen( A, symmetric=TRUE)$values))}
data( ozone2)
x<-ozone2$lon.lat
y<- ozone2$y[,16]
good <- !is.na( y)
x<- x[good,]
y<- y[good]
x<- x[1:6,]
y<- y[1:6]
N<- length( y)
lambda <- .8
obj<- LKrig( x,y,NC=3, NC.buffer=1, lambda=lambda,
nlevel=nlevel,alpha=alpha,a.wght=a.wght,
NtrA=5,iseed=122)
LKinfo<- obj$LKinfo
grid.info<- LKinfo$grid.info
PHI<- LKrig.basis( x,LKinfo)
Q <- LKrig.precision(LKinfo)
Q<- as.matrix(Q)
Mtest<- PHI%*% (solve( Q)) %*% t( PHI) + diag(lambda, N)
temp<- t(PHI)%*%PHI + lambda*Q
A<- Q*lambda
B1<- PHI%*% (solve( A)) %*% t( PHI) + diag(1, N)
B2<- t(PHI)%*%PHI + A
test.for.zero(lnDet( B1),lnDet( B2)- lnDet(A))
test.for.zero(
lnDet( PHI%*% (solve( Q*lambda)) %*% t( PHI) + diag(1, N)),
lnDet( t(PHI)%*%PHI + Q*lambda) - lnDet(Q*lambda) )
test.for.zero( lambda*B1, Mtest)
test.for.zero(lnDet( Mtest), lnDet(B2) - lnDet(lambda*Q) + N*log(lambda) )
test.for.zero(lnDet( Mtest), lnDet(B2) - lnDet(Q) + (-LKinfo$latticeInfo$m + N)*log(lambda) )
temp<- t(PHI)%*%PHI + lambda*Q
chol( temp)-> Mc
lnDetReg <- 2 * sum(log(diag(Mc)))
lnDetQ<- 2* sum( log( diag( chol(Q))))
lnDetCov<- lnDetReg - lnDetQ + (-LKinfo$latticeInfo$m + N)*log(lambda)
test.for.zero( lnDetCov, lnDet( Mtest))
test.for.zero( obj$lnDetCov, lnDet( Mtest), tag="LatticeKrig and direct test of lnDetCov")
set.seed(123)
weights<- runif(N)
W<- diag(weights)
lambda<- .5
PHI<- as.matrix(LKrig.basis( x,LKinfo))
Q <- as.matrix(LKrig.precision(LKinfo))
M1<- PHI%*%solve( Q)%*%t(PHI) + lambda*solve( W)
B1<- (t(PHI)%*%(W/lambda)%*%PHI + Q)
B2<- (1/lambda) * ( t(PHI)%*%(W)%*%PHI + lambda*Q)
B3<- t(PHI)%*%(W)%*%PHI + lambda*Q
N2<- nrow(Q)
hold<- lnDet( M1)
test.for.zero( lnDet( B1) - lnDet(Q) - lnDet( W/lambda), hold, tag="Det with weights1")
test.for.zero( lnDet( B2) - lnDet(Q) - lnDet( W/lambda), hold, tag="Det with weights1=2")
test.for.zero( lnDet( B3) - lnDet(Q) - sum( log( weights)) + (N-N2)*log(lambda), hold, tag="Det with weights3")
rm( obj)
data( ozone2)
x<-ozone2$lon.lat[1:10,]
y<- ozone2$y[16,1:10]
good <- !is.na( y)
x<- x[good,]
y<- y[good]
N<- length( y)
lambda <- .8
obj<- LKrig( x,y,NC=5, lambda=lambda,nlevel=nlevel,alpha=alpha,a.wght=a.wght,
NtrA=5,iseed=122)
obj0<- mKrig( x,y, lambda=lambda, m=2, cov.function="LKrig.cov",
cov.args=list(LKinfo=obj$LKinfo),
NtrA=5, iseed=122)
test.for.zero( obj$lnDetCov,obj0$lnDetCov, tag= "lnDetCov for mKrig and LatticeKrig")
test.for.zero( obj$quad.form, obj0$quad.form, tag= "quadratic forms for rho hat")
test.for.zero( obj0$lnProfileLike, obj$lnProfileLike,
tag="Profile Likelihood concentrated on lambda" )
rm( obj, obj0)
data( ozone2)
x<-ozone2$lon.lat[1:10,]
y<- ozone2$y[16,1:10]
good <- !is.na( y)
x<- x[good,]
y<- y[good]
N<- length( y)
alpha<- c(1,.5,.25)
nlevel<-3
a.wght<- list(5, 5, 4.5)
lambda <- .5
N<- length(y)
set.seed(243)
weights<- runif(N)*10 + 30
obj<- LKrig( x,y,weights,NC=5,
lambda=lambda,alpha=alpha,nlevel=nlevel, a.wght=a.wght, NtrA=5,iseed=122)
obj0<- mKrig( x,y,weights, lambda=lambda, m=2, cov.function="LKrig.cov",
cov.args=list(LKinfo=obj$LKinfo),
NtrA=5, iseed=122)
obj1<- Krig( x,y,weights=weights, lambda=lambda,GCV=TRUE, m=2,
cov.function="LKrig.cov", cov.args=list(LKinfo=obj$LKinfo))
test.for.zero( obj0$fitted.values, obj1$fitted.values)
test.for.zero( predict(obj0), predict(obj1), tag="predicted values mKrig/Krig w/weights")
test.for.zero( obj0$rhohat, obj1$rhohat,tag="compare rhohat for mKrig and Krig with weights")
test.for.zero( obj$fitted.values, obj0$fitted.values)
test.for.zero( obj$rho.MLE, obj0$rho.MLE)
test.for.zero( obj$lnDetCov, obj0$lnDetCov)
data( ozone2)
x<-ozone2$lon.lat[1:20,]
y<- ozone2$y[16,1:20]
good <- !is.na( y)
x<- x[good,]
y<- y[good]
N<- length(y)
set.seed(243)
weights<- runif(N)*10
N<- length( y)
alpha<- c(1,.5,.5)
nlevel<-3
a.wght<- list(4.2,4.5,4.5)
lambda <- .8
obj<- LKrig( x,y,weights=weights,NC=15, lambda=lambda,alpha=alpha,
nlevel=nlevel,a.wght=a.wght, return.cholesky=TRUE)
obj2<- LKrig( x,y,weights=weights,NC=15, lambda=2*lambda,alpha=alpha,
nlevel=nlevel,a.wght=a.wght, use.cholesky=obj$Mc)
obj3<- LKrig( x,y,weights=weights,NC=15, lambda=2*lambda,alpha=alpha,
nlevel=nlevel,a.wght=a.wght, return.cholesky=FALSE)
test.for.zero( obj3$c.coef, obj2$c.coef,
tag="reuse Mc test of LatticeKrig.coef c")
test.for.zero( obj3$d.coef, obj2$d.coef,
tag="reuse Mctest of LatticeKrig.coef d")
test.for.zero( obj2$lnProfileLike, obj3$lnProfileLike,
tag="reuse Mc test of lnProfileLike")
cat("Done testing LatticeKrig",fill=TRUE)
options( echo=FALSE) |
NULL
pkgplan__create_progress_bar <- function(what) {
bar <- new.env(parent = emptyenv())
bar$what <- what[, c("type", "filesize", "package", "cache_status")]
bar$what$idx <- seq_len(nrow(what))
bar$what$current <- 0L
bar$what$need <- bar$what$filesize
bar$what$status <- "todo"
bar$what$skip <-
what$type %in% c("installed", "deps") |
what$cache_status != "miss"
bar$what$status[bar$what$skip] <- "skip"
bar$what$cache_status[what$type %in% c("installed", "deps")] <- NA_character_
pkgplan__initial_pb_message(bar)
bar$chars <- progress_chars()
bar$chunks <- new.env(parent = emptyenv())
bar$start_at <- Sys.time()
bar$events <- list()
bar$lastmsg <- "Connecting..."
bar$show_time <- tolower(Sys.getenv("PKG_OMIT_TIMES")) != "true"
bar$show_size <- tolower(Sys.getenv("PKG_OMIT_SIZES")) != "true"
bar
}
pkgplan__init_progress_bar <- function(bar) {
bar$timer <- new_async_timer(
1/10,
function() pkgplan__show_progress_bar(bar)
)
bar$timer$listen_on("error", function(e) { stop(e) })
bar
}
pkgplan__initial_pb_message <- function(bar) {
num <- sum(bar$what$status == "todo")
unk <- sum(is.na(bar$what$filesize[bar$what$status == "todo"]))
bts <- sum(bar$what$filesize[bar$what$status == "todo"], na.rm = TRUE)
nch <- sum(bar$what$cache_status %in% "hit")
cbt <- sum(bar$what$filesize[bar$what$cache_status %in% "hit"], na.rm = TRUE)
if (num == 0) {
cli_alert_info(c(
"No downloads are needed",
if (nch > 0) ", {nch} pkg{?s} ",
if (cbt > 0) "{.size ({pretty_bytes(cbt)})} ",
if (nch > 0) "{qty(nch)}{?is/are} cached"
))
} else {
cli_alert_info(c(
"Getting",
if (bts > 0) " {num-unk} pkg{?s} {.size ({pretty_bytes(bts)})}",
if (bts > 0 && unk > 0) " and",
if (unk > 0) " {unk} pkg{?s} with unknown size{?s}",
if (nch > 0) ", {nch} ",
if (cbt > 0) "{.size ({pretty_bytes(cbt)})} ",
if (nch > 0) "cached"
))
}
if (should_show_progress_bar()) {
bar$status <- cli_status("", .auto_close = FALSE)
}
}
pkgplan__update_progress_bar <- function(bar, idx, event, data) {
time <- Sys.time()
sec <- as.character(floor(as.double(time - bar$start_at, units = "secs")))
if (event == "done") {
if (data$download_status == "Got") {
bar$what$status[idx] <- "got"
sz <- na.omit(file.size(c(data$fulltarget, data$fulltarget_tree)))[1]
if (!is.na(sz)) bar$what$filesize[idx] <- sz
cli_alert_success(c(
"Got {.pkg {data$package}} ",
"{.version {data$version}} ({data$platform})",
if (!is.na(sz) && bar$show_size) " {.size ({pretty_bytes(sz)})}"
))
if (!is.na(bar$what$filesize[idx])) {
bar$chunks[[sec]] <- (bar$chunks[[sec]] %||% 0) -
bar$what$current[idx] + bar$what$filesize[idx]
bar$what$current[idx] <- bar$what$filesize[idx]
}
} else if (grepl("^(Had|Current)", data$download_status)) {
bar$what$status[idx] <- "had"
bar$what$current[idx] <- 0L
bar$what$need[idx] <- 0L
if (identical(data$cache_status, "miss") && data$type != "deps") {
cli_alert_success(c(
"Cached copy of {.pkg {data$package}} ",
"{.version {data$version}} ({data$platform}) is the latest build"
))
}
} else if (data$download_status == "Failed") {
cli_alert_danger(c(
"Failed to download {.pkg {data$package}} ",
"{.version {data$version}} ({data$platform})"
))
bar$what$status[idx] <- "error"
bar$what$need[idx] <- bar$what$current[idx]
} else {
stop("Unknown download status, internal pkgdepends error :(")
}
return(TRUE)
}
if (event == "error") {
cli_alert_danger(c(
"Failed to download {.pkg {data$package}} ",
"{.version {data$version}} ({data$platform})"
))
bar$what$status[idx] <- "error"
bar$what$need[idx] <- bar$what$current[idx]
return(TRUE)
}
bar$what$status[idx] <- "data"
bar$events$data <- unique(c(bar$events$data, idx))
bar$chunks[[sec]] <- (bar$chunks[[sec]] %||% 0) -
bar$what$current[idx] + data$current
bar$what$current[idx] <- data$current
if (data$total > 0) bar$what$filesize[idx] <- bar$what$need[idx] <- data$total
TRUE
}
pkgplan__show_progress_bar <- function(bar) {
if (is.null(bar$status)) return()
if (sum(!bar$what$skip) == 0) return()
parts <- calculate_progress_parts(bar)
str <- paste0(
" {parts$rate} {parts$line} {parts$percent} ",
"| {parts$pkg_done}/{parts$pkg_total} pkg{?s} ",
if (!is.na(parts$bytes_total)) "| ETA {parts$eta} ",
"| {parts$msg}"
)
bar$events <- list()
cli_status_update(bar$status, str)
}
calculate_rate <- function(start, now, chunks) {
time_at <- as.double(now - start, units = "secs")
time_at_s <- as.integer(floor(time_at))
labels <- as.character(seq(time_at_s, time_at_s - 3L, by = -1L))
data <- unlist(mget(labels, envir = chunks, ifnotfound = 0L))
fact <- time_at - max(time_at_s - 3, 0)
rate <- sum(data) / fact
if (is.nan(rate)) rate <- 0
if (rate == 0 && time_at < 4) {
rstr <- strrep(" ", 8)
} else {
rstr <- paste0(pretty_bytes(rate, style = "6"), "/s")
}
list(rate = rate, rstr = rstr)
}
calculate_eta <- function(total, current, rate) {
if (rate == 0) {
etas <- NA
estr <- "??s "
} else {
todo <- total - current
etas <- as.difftime(todo / rate, units = "secs")
if (etas < 1) {
estr <- "<1s "
} else {
estr <- format(pretty_dt(etas, compact = TRUE), width = 6)
}
}
list(etas = etas, estr = estr)
}
calculate_progress_parts <- function(bar) {
parts <- list()
whatx <- bar$what[! bar$what$skip, ]
now <- Sys.time()
pkg_done <- sum(whatx$status %in% c("got", "had", "error"))
pkg_total <- nrow(whatx)
parts$pkg_done <- format(c(pkg_done, pkg_total), justify = "right")[1]
parts$pkg_total <- as.character(pkg_total)
pkg_percent <- pkg_done / pkg_total
bytes_done <- sum(whatx$current, na.rm = TRUE)
bytes_total <- sum(whatx$need)
parts$bytes_total <- bytes_total
bytes_percent <- bytes_done / bytes_total
percent <- if (!is.na(bytes_percent)) bytes_percent else pkg_percent
if (round(percent * 100) == 100 && percent < 1) percent <- 0.99
parts$percent <- format(
paste0(round(100 * percent), "%"),
width = 4,
justify = "right"
)
rate <- calculate_rate(bar$start_at, now, bar$chunks)
parts$rate <- rate$rstr
parts$msg <- bar$lastmsg
if (length(bar$events$data) > 0) {
pkgs <- bar$what$package[bar$what$idx %in% bar$events$data]
parts$msg <- paste0(
"Getting ",
glue_collapse(pkgs, sep = ", ", last = " and ")
)
bar$lastmsg <- parts$msg
}
parts$line <- make_bar(bar$chars, percent, width = 15)
if (!is.na(bytes_total)) {
parts$eta <- calculate_eta(bytes_total, bytes_done, rate$rate)$estr
}
parts
}
pkgplan__done_progress_bar <- function(bar) {
if (is.null(bar$status)) return()
end_at <- Sys.time()
dt <- pretty_dt(Sys.time() - bar$start_at)
cli_status_clear(bar$status)
bar$status <- NULL
bts <- pretty_bytes(sum(bar$what$current))
dld <- sum(bar$what$status == "got")
cch <- sum(bar$what$status == "had")
err <- sum(bar$what$status == "error")
if (sum(!bar$what$skip) == 0) {
} else if (err == 0 && dld == 0) {
cli_alert_success("No downloads needed, all packages are cached")
} else if (err == 0) {
cli_alert_success(
paste0(
"Downloaded {dld} package{?s} {.size ({bts})}",
if (bar$show_time) "in {.time {dt}}"
)
)
} else {
cli_alert_danger(
"Failed to download {err} package{?s}. "
)
}
} |
library(lpSolve)
m <- matrix(c(3, 2, 1, 9, 2, 3, 2, 9, 1, 9, 3, 9, 9, 1, 9, 9), 4, dimnames = list(c(1, 2, 3, "U"), c("A", "B", "C", "D")))
m
fm <- lp.assign(m)
fm$solution
student <- rownames(m)
ix <- round(fm$solution %*% seq_len(ncol(m)))
job <- colnames(m)[ifelse(ix == 0, NA, ix)]
data.frame(student, job) |
context("Estimator - horvitz_thompson")
test_that("Horvitz-Thompson matches d-i-m under certain conditions", {
n <- 4
dat <- data.frame(
y0 = rnorm(n),
z = rep(0:1, each = n / 2),
ps = rep(0.5, n)
)
dat$y1 <- dat$y0 + 0.43
dat$y <- ifelse(dat$z, dat$y1, dat$y0)
expect_equal(
coef(horvitz_thompson(
y ~ z,
condition_prs = ps,
data = dat
)),
coef(difference_in_means(
y ~ z,
data = dat
))
)
})
test_that("Horvitz-Thompson works in simple case", {
n <- 40
dat <- data.frame(
y = rnorm(n)
)
simp_decl <- randomizr::declare_ra(N = n, prob = 0.4, simple = T)
dat$z <- randomizr::conduct_ra(simp_decl)
ht_simp <- horvitz_thompson(
y ~ z,
data = dat,
ra_declaration = simp_decl,
return_condition_pr_mat = TRUE
)
ht_simp_no <- horvitz_thompson(
y ~ z,
data = dat,
ra_declaration = simp_decl,
return_condition_pr_mat = TRUE,
se_type = "none"
)
expect_equal(
ht_simp$coefficients,
ht_simp_no$coefficients
)
expect_equivalent(
as.numeric(tidy(ht_simp_no)[c("std.error", "p.value", "conf.low", "conf.high")]),
rep(NA_real_, 4)
)
ht_const <- horvitz_thompson(
y ~ z,
data = dat,
ra_declaration = simp_decl,
se_type = "constant"
)
ht_rev <- horvitz_thompson(
y ~ z,
data = dat,
condition1 = 1,
condition2 = 0,
ra_declaration = simp_decl,
return_condition_pr_mat = TRUE
)
dat$z[1] <- 2
expect_error(
horvitz_thompson(
y ~ z,
data = dat,
condition1 = 0,
condition2 = 2,
ra_declaration = simp_decl
)
)
expect_equal(
tidy(ht_simp)[, c("estimate", "std.error")],
tidy(ht_rev)[, c("estimate", "std.error")] * c(-1, 1)
)
expect_equal(
ht_simp$condition_pr_mat,
NULL
)
comp_decl <- randomizr::declare_ra(N = n, prob = 0.4, simple = FALSE)
dat$z_comp <- randomizr::conduct_ra(comp_decl)
dat$pr_comp <- 0.4
expect_equal(
ht_comp <- horvitz_thompson(y ~ z_comp, data = dat, simple = FALSE),
horvitz_thompson(y ~ z_comp, data = dat, ra_declaration = comp_decl)
)
expect_equal(
ht_comp,
horvitz_thompson(y ~ z_comp, data = dat, simple = FALSE, condition_prs = pr_comp)
)
ht_comp_no <- horvitz_thompson(y ~ z_comp, data = dat, simple = FALSE, se_type = "none")
expect_equal(
ht_comp$coefficients,
ht_comp_no$coefficients
)
expect_equivalent(
as.numeric(tidy(ht_comp_no)[c("std.error", "p.value", "conf.low", "conf.high")]),
rep(NA_real_, 4)
)
dat$pr_wrong <- dat$pr_comp
dat$pr_wrong[1] <- 0.5
expect_error(
horvitz_thompson(y ~ z_comp, data = dat, simple = FALSE, condition_prs = pr_wrong),
"Treatment probabilities must be fixed for complete randomized designs"
)
ht_with <- with(
dat,
horvitz_thompson(y ~ z_comp, simple = FALSE, condition_prs = pr_comp)
)
pr_comp <- dat$pr_comp
y <- dat$y
z_comp <- dat$z_comp
ht_glob <- horvitz_thompson(y ~ z_comp, simple = FALSE, condition_prs = pr_comp)
ht_rec <- horvitz_thompson(y ~ z_comp, simple = FALSE, condition_prs = 0.4)
expect_equal(
ht_with,
ht_glob
)
expect_equal(
ht_with,
ht_rec
)
ht_nod <- horvitz_thompson(y ~ z_comp, ra_declaration = comp_decl)
ht_d <- horvitz_thompson(y ~ z_comp, data = dat, ra_declaration = comp_decl)
expect_equal(
tidy(ht_nod),
tidy(ht_d)
)
})
test_that("Horvitz-Thompson works with clustered data", {
n <- 8
dat <- data.frame(
y = rnorm(n),
cl = rep(1:4, each = 2)
)
clust_crs_decl <- randomizr::declare_ra(N = nrow(dat), clusters = dat$cl, prob = 0.5)
dat$z <- randomizr::conduct_ra(clust_crs_decl)
ht_crs_decl <- horvitz_thompson(y ~ z, data = dat, ra_declaration = clust_crs_decl)
expect_true(
!is.na(ht_crs_decl$coefficients)
)
expect_equivalent(
ht_crs_decl$df,
NA
)
ht_crs_decl_no <- horvitz_thompson(y ~ z, data = dat, ra_declaration = clust_crs_decl, se_type = "none")
expect_equal(
ht_crs_decl$coefficients,
ht_crs_decl_no$coefficients
)
expect_equivalent(
as.numeric(tidy(ht_crs_decl_no)[c("std.error", "p.value", "conf.low", "conf.high")]),
rep(NA_real_, 4)
)
expect_message(
horvitz_thompson(y ~ z, data = dat, clusters = cl, condition_prs = rep(0.5, nrow(dat))),
"Assuming simple cluster randomization"
)
expect_message(
horvitz_thompson(y ~ z, data = dat, clusters = cl, condition_prs = rep(0.5, nrow(dat)), simple = FALSE),
NA
)
expect_equal(
ht_crs_decl,
horvitz_thompson(y ~ z, data = dat, clusters = cl, simple = FALSE)
)
expect_error(
horvitz_thompson(y ~ z, data = dat, ra_declaration = clust_crs_decl, se_type = "constant"),
"`se_type` = 'constant' only supported for simple random"
)
clust_srs_decl <- randomizr::declare_ra(
N = nrow(dat),
clusters = dat$cl,
prob = 0.4,
simple = TRUE
)
ht_srs_decl <- horvitz_thompson(y ~ z, data = dat, ra_declaration = clust_srs_decl)
ht_srs_decl_no <- horvitz_thompson(y ~ z, data = dat, ra_declaration = clust_srs_decl, se_type = "none")
expect_equal(
ht_srs_decl$coefficients,
ht_srs_decl_no$coefficients
)
expect_equivalent(
as.numeric(tidy(ht_srs_decl_no)[c("std.error", "p.value", "conf.low", "conf.high")]),
rep(NA_real_, 4)
)
clust_srs_mat <- declaration_to_condition_pr_mat(clust_srs_decl)
ht_srs_nodecl <- horvitz_thompson(y ~ z, data = dat, condition_pr_mat = clust_srs_mat)
ht_srs_nodecl_no <- horvitz_thompson(y ~ z, data = dat, condition_pr_mat = clust_srs_mat, se_type = "none")
expect_equal(
ht_srs_nodecl$coefficients,
ht_srs_nodecl_no$coefficients
)
expect_identical(
ht_srs_decl,
ht_srs_cl <- horvitz_thompson(y ~ z, data = dat, clusters = cl, condition_pr_mat = clust_srs_mat)
)
ht_srs_cl_no <- horvitz_thompson(y ~ z, data = dat, clusters = cl, condition_pr_mat = clust_srs_mat, se_type = "none")
expect_equal(
ht_srs_cl$coefficients,
ht_srs_cl_no$coefficients
)
clbl_dat <- data.frame(
cl_new = cl_new <- c(1, 2, 3, 4, 5, 5, 6, 6, 7, 7, 8, 8),
bl = rep(1:3, each = 4),
y = rnorm(12)
)
blcl_ra <- randomizr::declare_ra(blocks = clbl_dat$bl, clusters = clbl_dat$cl_new, block_m = c(1, 2, 1))
clbl_dat$z_clbl <- randomizr::conduct_ra(blcl_ra)
expect_equivalent(
horvitz_thompson(y ~ z_clbl, data = clbl_dat, ra_declaration = blcl_ra),
horvitz_thompson(y ~ z_clbl, data = clbl_dat, blocks = bl, clusters = cl_new)
)
dat$ps <- 0.4
expect_identical(
ht_srs_decl,
ht_srs_prs <- horvitz_thompson(y ~ z, data = dat, clusters = cl, condition_prs = ps)
)
ht_srs_prs_no <- horvitz_thompson(y ~ z, data = dat, clusters = cl, condition_prs = ps, se_type = "none")
expect_equal(
ht_srs_prs$coefficients,
ht_srs_prs_no$coefficients
)
expect_error(
horvitz_thompson(y ~ z, data = dat, ra_declaration = clust_srs_decl, se_type = "constant"),
"`se_type` = 'constant' only supported for simple random designs at the moment"
)
dat$p_wrong <- dat$ps
dat$p_wrong[1] <- 0.545
expect_error(
horvitz_thompson(y ~ z, data = dat, clusters = cl, condition_prs = p_wrong),
"`condition_prs` must be constant within `cluster`"
)
dat$p_wrong[1] <- 1.5
expect_error(
horvitz_thompson(y ~ z, data = dat, clusters = cl, condition_prs = p_wrong),
"`condition_prs` must be a vector of positive values no greater than 1"
)
dat$z_wrong <- dat$z
dat$z_wrong[1:2] <- c(0, 1)
table(dat$z_wrong, dat$cl)
expect_error(
horvitz_thompson(y ~ z_wrong, data = dat, clusters = cl, condition_prs = ps),
"Treatment condition must be constant within `clusters`"
)
})
test_that("Horvitz-Thompson works with missingness", {
n <- 40
dat <- data.frame(
y = rnorm(n),
bl = rep(1:10, each = 4),
ps = 0.35
)
decl <- randomizr::declare_ra(n, prob = 0.35)
dat$z <- randomizr::conduct_ra(decl)
missing_dat <- dat
missing_dat$y[1] <- NA
expect_error(
ht_miss <- horvitz_thompson(y ~ z, data = missing_dat, ra_declaration = decl),
NA
)
expect_error(
ht_miss_pr <- horvitz_thompson(y ~ z, data = missing_dat, condition_prs = 0.35, simple = FALSE),
NA
)
expect_equal(ht_miss, ht_miss_pr)
expect_error(
horvitz_thompson(y ~ z, data = missing_dat, ra_declaration = decl),
NA
)
missing_dat$ps[2] <- NA
dat$drop_these <- c(1, 1, rep(0, times = n - 2))
expect_warning(
ht_miss <- horvitz_thompson(y ~ z, data = missing_dat, condition_prs = ps),
"missingness in the condition_pr"
)
expect_equal(
horvitz_thompson(y ~ z, data = dat, condition_prs = ps, subset = drop_these == 0),
ht_miss
)
})
test_that("Estimating Horvitz-Thompson can be done two ways with blocks", {
n <- 40
dat <- data.frame(
y = rnorm(n),
bl = rep(1:10, each = 4)
)
bl_ra <- randomizr::declare_ra(blocks = dat$bl)
dat$z <- randomizr::conduct_ra(bl_ra)
bl_pr_mat <- declaration_to_condition_pr_mat(bl_ra)
ht_declare_bl <- horvitz_thompson(y ~ z, data = dat, ra_declaration = bl_ra)
ht_condmat_bl <- horvitz_thompson(y ~ z, data = dat, condition_pr_mat = bl_pr_mat)
expect_equivalent(
tidy(ht_declare_bl),
tidy(ht_condmat_bl)
)
ht_declare_bl_no <- horvitz_thompson(y ~ z, data = dat, ra_declaration = bl_ra, se_type = "none")
ht_condmat_bl_no <- horvitz_thompson(y ~ z, data = dat, condition_pr_mat = bl_pr_mat, se_type = "none")
expect_equal(
ht_declare_bl$coefficients,
ht_declare_bl_no$coefficients
)
expect_equal(
ht_condmat_bl$coefficients,
ht_condmat_bl_no$coefficients
)
dat$mps <- rep(1:20, each = 2)
mp_ra <- randomizr::declare_ra(blocks = dat$mps)
dat$z <- randomizr::conduct_ra(mp_ra)
mp_pr_mat <- declaration_to_condition_pr_mat(mp_ra)
ht_declare_mp <- horvitz_thompson(y ~ z, data = dat, ra_declaration = mp_ra)
ht_condmat_mp <- horvitz_thompson(y ~ z, data = dat, condition_pr_mat = mp_pr_mat)
expect_equivalent(
tidy(ht_declare_mp),
tidy(ht_condmat_mp)
)
dat$p <- tapply(dat$z, dat$bl, mean)[dat$bl]
expect_message(
ht_declare_mp <- horvitz_thompson(y ~ z, data = dat, blocks = bl, condition_prs = p, simple = TRUE),
"Assuming complete random assignment of clusters within blocks."
)
expect_message(
ht_declare_mp <- horvitz_thompson(y ~ z, data = dat, blocks = bl, condition_prs = p, simple = FALSE),
NA
)
})
test_that("Horvitz-Thompson properly checks arguments and data", {
n <- 8
dat <- data.frame(
y = rnorm(n),
ps = 0.4,
z = sample(rep(0:1, each = n / 2)),
x = runif(n),
cl = rep(1:4, each = 2),
bl = rep(1:2, each = 4)
)
decl <- randomizr::declare_ra(N = n, prob = 0.4, simple = FALSE)
expect_identical(
horvitz_thompson(y ~ z, data = dat),
horvitz_thompson(y ~ z, data = dat, condition_prs = rep(0.5, times = nrow(dat)))
)
expect_error(
horvitz_thompson(y ~ z, data = dat, condition_prs = ps, ra_declaration = decl),
"Cannot use `ra_declaration` with any of"
)
expect_error(
horvitz_thompson(y ~ z, data = dat, condition_pr_mat = declaration_to_condition_pr_mat(decl), ra_declaration = decl),
"Cannot use `ra_declaration` with any of"
)
expect_error(
horvitz_thompson(y ~ z + x, data = dat, ra_declaration = decl),
"must have only one variable on the right-hand side"
)
expect_error(
horvitz_thompson(y ~ z, data = dat, ra_declaration = randomizr::declare_ra(N = n + 1, prob = 0.4)),
"variable lengths differ"
)
ht_o <- horvitz_thompson(y ~ z, data = dat, ci = FALSE)
expect_equivalent(
as.matrix(tidy(horvitz_thompson(y ~ z, data = dat, ci = FALSE))[, c("p.value", "conf.low", "conf.high")]),
matrix(NA, nrow = 1, ncol = 3)
)
expect_error(
horvitz_thompson(
y ~ z,
data = dat,
condition_pr_mat = matrix(rnorm(4), 2, 2)
),
"cleaning the data"
)
})
test_that("Works without variation in treatment", {
set.seed(1)
dat <- data.frame(
y = rnorm(20),
bl = 1:5,
ps = 0.4
)
dat$z_const <- 1
ht_const_1 <- horvitz_thompson(
y ~ z_const,
data = dat
)
ht_const_cond1 <- horvitz_thompson(
y ~ z_const,
data = dat,
condition2 = 1
)
expect_equivalent(
ht_const_1,
ht_const_cond1
)
expect_equivalent(coef(ht_const_1), mean(dat$y))
expect_equivalent(ht_const_1$std.error, 1 / (nrow(dat)) * sqrt(sum(dat$y ^ 2)))
expect_equivalent(
ht_const_1$df,
NA
)
ht_const <- horvitz_thompson(
y ~ z_const,
data = dat,
condition_prs = ps
)
expect_equivalent(coef(ht_const), mean(dat$y / dat$ps))
expect_equivalent(ht_const$std.error, 1 / (nrow(dat)) * sqrt(sum((dat$y / dat$ps) ^ 2)))
ht_block <- horvitz_thompson(
y ~ z_const,
data = dat,
blocks = bl,
condition_prs = ps,
return_condition_pr_mat = TRUE
)
expect_equivalent(coef(ht_block), mean(dat$y / dat$ps))
dat$z_diff <- as.numeric(dat$bl <= 2)
ht_block <- horvitz_thompson(
y ~ z_diff,
data = dat,
blocks = bl,
condition_prs = rep(0.4, nrow(dat))
)
ht_block
dat$z <- 0
ht_zero <- horvitz_thompson(
y ~ z,
data = dat,
blocks = bl,
condition_prs = rep(0.5, nrow(dat))
)
expect_identical(ht_zero$term, "z0")
ht_rev <- horvitz_thompson(
y ~ z,
data = dat,
blocks = bl,
condition1 = 0,
condition_prs = rep(0.5, nrow(dat))
)
expect_identical(ht_rev$term, "z")
expect_identical(
tidy(ht_zero)[c("estimate", "std.error")],
tidy(ht_rev)[c("estimate", "std.error")] * c(-1, 1)
)
cpm <- diag(0.5, nrow = 4, ncol = 4)
y <- rnorm(2)
t <- c(0, 1)
expect_error(
horvitz_thompson(y ~ t, condition_pr_mat = cpm),
NA
)
t <- c(1, 1)
expect_error(
horvitz_thompson(y ~ t, condition_pr_mat = cpm),
NA
)
})
test_that("multi-valued treatments not allowed in ra_declaration", {
dat <- data.frame(
y = rnorm(20),
ps = 0.4
)
decl_multi <- randomizr::declare_ra(N = 20, prob_each = c(0.4, 0.4, 0.2))
dat$z <- randomizr::conduct_ra(decl_multi)
expect_error(
horvitz_thompson(y ~ z, data = dat, ra_declaration = decl_multi),
"Cannot use horvitz_thompson\\(\\) with a `ra_declaration` with"
)
ht_condition <- horvitz_thompson(
y ~ z,
data = dat,
condition_prs = ps,
condition1 = "T1",
condition2 = "T2"
)
subdat <- dat[dat$z != "T3", ]
ht_subdat <- horvitz_thompson(
y ~ z,
data = subdat,
condition_prs = ps
)
ht_subset <- horvitz_thompson(
y ~ z,
data = dat,
subset = z != "T3",
condition_prs = ps
)
expect_equal(
ht_condition,
ht_subdat
)
expect_equal(
ht_condition,
ht_subset
)
}) |
library(shinytest)
expect_pass(testApp("../", suffix = osName())) |
plot.mlmc.test <- function(x, which="all", cols=NA, ...) {
if(length(which)==1 && which=="all") {
which <- c("var", "mean", "consis", "kurt", "Nl", "cost")
}
p <- list()
if("var" %in% which) {
p <- c(p, list(
ggplot(data.frame(l=rep(0:x$L, 2),
var=c(log2(x$var1), log2(x$var2)),
Method=c(rep("MLMC", x$L+1), rep("MC", x$L+1)))) +
geom_point(aes_string(x="l", y="var", colour="Method")) +
geom_line(aes_string(x="l", y="var", colour="Method", linetype="Method")) +
xlab("Level") +
ylab(expression(log[2](Variance)))
))
}
if("mean" %in% which) {
p <- c(p, list(
ggplot(data.frame(l=rep(0:x$L, 2),
mean=c(log2(x$del1), log2(x$del2)),
Method=c(rep("MLMC", x$L+1), rep("MC", x$L+1)))) +
geom_point(aes_string(x="l", y="mean", colour="Method")) +
geom_line(aes_string(x="l", y="mean", colour="Method", linetype="Method")) +
xlab("Level") +
ylab(expression(log[2](Mean)))
))
}
if("consis" %in% which) {
p <- c(p, list(
ggplot(data.frame(l=0:x$L,
consis=x$chk1)) +
geom_point(aes_string(x="l", y="consis")) +
geom_line(aes_string(x="l", y="consis")) +
xlab("Level") +
ylab("Consistency check")
))
}
if("kurt" %in% which) {
p <- c(p, list(
ggplot(data.frame(l=0:x$L,
kurt=x$kur1)) +
geom_point(aes_string(x="l", y="kurt")) +
geom_line(aes_string(x="l", y="kurt")) +
xlab("Level") +
ylab("Kurtosis")
))
}
if("Nl" %in% which) {
p <- c(p, list(
ggplot(data.frame(l=unlist(lapply(sapply(x$Nl, length), seq)),
Nl=log2(unlist(x$Nl)),
Epsilon=as.factor(rep(x$eps.v, times=sapply(x$Nl, length))))) +
geom_point(aes_string(x="l", y="Nl", colour="Epsilon")) +
geom_line(aes_string(x="l", y="Nl", colour="Epsilon", linetype="Epsilon")) +
xlab("Level") +
ylab(expression(log[2](N[l])))
))
}
if("cost" %in% which) {
p <- c(p, list(
ggplot(data.frame(eps=rep(x$eps.v, 2),
cost=c(x$eps.v^2*x$mlmc_cost, x$eps.v^2*x$std_cost),
Method=c(rep("MLMC", length(x$eps.v)), rep("MC", length(x$eps.v))))) +
geom_point(aes_string(x="eps", y="cost", colour="Method")) +
geom_line(aes_string(x="eps", y="cost", colour="Method", linetype="Method")) +
xlab(expression(log[10](epsilon))) +
ylab(expression(log[10](epsilon^2*Cost))) +
scale_x_log10() +
scale_y_log10() +
annotation_logticks()
))
}
if(is.na(cols)) {
if(length(p) <= 3)
cols <- length(p)
if(length(p) == 4)
cols <- 2
if(length(p) > 4)
cols <- 3
}
p <- c(p, list(cols=cols))
do.call(multiplot, p)
} |
rx_one_or_more <- function(.data = NULL, mode = "greedy") {
switch(
mode,
greedy = new_rx(paste0(.data, "+")),
lazy = new_rx(paste0(.data, "+?")),
stop("Please, provide valid 'mode' argument")
)
}
rx_none_or_more <- function(.data = NULL, mode = "greedy") {
switch(
mode,
greedy = new_rx(paste0(.data, "*")),
lazy = new_rx(paste0(.data, "*?")),
stop("Please, provide valid 'mode' argument")
)
}
rx_multiple <- function(.data = NULL, value = NULL, min = NULL, max = NULL) {
if(!is.null(value)) {
value <- new_rx(paste0("(", sanitize(value), ")"))
}
if(is.null(min) & is.null(max)) {
rep <- paste0("*")
} else if(!is.null(min) & is.null(max)) {
rep <- paste0("{", min, ",}")
} else if(!is.null(min) & !is.null(max)) {
rep <- paste0("{", min, ",", max, "}")
} else if(is.null(min) & !is.null(max)) {
rep <- paste0("{,", max, "}")
}
paste0(.data, value, rep)
paste0(.data, value, rep)
} |
print.hdnom.nomogram <- function(x, ...) {
print(as_nomogram_raw(
fit = x$"nomogram", fun = x$"bhfun",
fun.at = x$"fun.at", funlabel = x$"funlabel",
lp = TRUE, vnames = "labels", ...
))
invisible(x)
}
plot.hdnom.nomogram <- function(x, ...) {
plot(as_nomogram_raw(
fit = x$"nomogram", fun = x$"bhfun",
fun.at = x$"fun.at", funlabel = x$"funlabel",
lp = TRUE, vnames = "labels", ...
))
invisible(x)
} |
knitr::opts_chunk$set(
collapse = TRUE,
comment = "
);
raaGraph <- DiagrammeR::create_graph();
raaGraph <- DiagrammeR::add_node(raaGraph, label="Gedrag");
raaGraph <- DiagrammeR::add_node(raaGraph, label="Intentie", to=1);
raaGraph <- DiagrammeR::add_node(raaGraph, label="Attitude", to=2);
raaGraph <- DiagrammeR::add_node(raaGraph, label="Waargenomen norm", to=2);
raaGraph <- DiagrammeR::add_node(raaGraph, label="Waargenomen gedragscontrole", to=2);
raaGraph <-
behaviorchange::apply_graph_theme(raaGraph,
c("layout", "dot", "graph"),
c("rankdir", "LR", "graph"),
c("outputorder", "nodesfirst", "graph"),
c("fixedsize", "false", "node"),
c("shape", "box", "node"),
c("style", "rounded,filled", "node"),
c("color", "
c("color", "
c("dir", "forward", "edge"),
c("fillcolor", "
if (knitr::is_latex_output()) {
dot_code <- DiagrammeR::generate_dot(raaGraph);
graphSvg <-
DiagrammeRsvg::export_svg(DiagrammeR::grViz(dot_code));
graphSvg <-
sub(".*\n<svg ", "<svg ", graphSvg);
graphSvg <- gsub('<svg width=\"[0-9]+pt\" height=\"[0-9]+pt\"\n viewBox=',
'<svg width="2000px" height="1000px" viewBox=',
graphSvg);
grid::grid.newpage();
grid::grid.raster(png::readPNG(rsvg::rsvg_png(charToRaw(graphSvg))));
} else if (knitr::is_html_output()) {
DiagrammeR::render_graph(raaGraph);
}
abcd_specs_dutch_xtc <- behaviorchange::abcd_specs_dutch_xtc;
names(abcd_specs_dutch_xtc) <-
c("Gedrags-veranderings-principes",
"Voorwaarden voor effectiviteit",
"Toepassingen",
"Sub-determinanten",
"Determinanten",
"Sub-gedragingen",
"Doelgedrag");
if (knitr::is_latex_output()) {
cat("\n
\\newpage\n
\\blandscape\n
\n
");
print(
kableExtra::kable_styling(kableExtra::column_spec(
knitr::kable(abcd_specs_dutch_xtc,
caption="Een voorbeeld van een ABCD matrix.",
booktabs = TRUE,
row.names = FALSE,
longtable = TRUE),
column = 1:7,
width = c("2.5cm", "5cm", "4cm",
"3.5cm", "2cm", "2.5cm", "1.8cm")
)));
cat("\n
\\elandscape\n
\\newpage\n
\n
");
} else {
knitr::kable(abcd_specs_dutch_xtc);
}
knitr::include_graphics("abcd_specs_dutch_xtc.png"); |
predict.GMDH <- function(object, x, type = "class", ...){
store<-NULL
i=1
store[[i]]<-list()
idn = 1:object$nvar
combinations = t(combn(order(idn), 2))
store[[i]][[1]]<-lapply(1:object$neurons[i], function(j) cbind(1, x[, combinations[j, ]]))
store[[i]][[2]]<-lapply(1:object$neurons[i], function(j) as.numeric(t(object$architecture[[i]][[2]][[j]]) %*% t(store[[i]][[1]][[j]])))
store[[i]][[3]]<-do.call("cbind", lapply(object$architecture[[i]][[7]], function(j) store[[i]][[2]][[j]]))
if (i != object$nlayer){
repeat{
i<-i+1
store[[i]]<-list()
idn = c(1:object$sneurons[i-1])
combinations = t(combn(order(idn), 2))
store[[i]][[1]]<-lapply(1:object$neurons[i], function(j) cbind(1, store[[i-1]][[3]][, combinations[j, ]]))
store[[i]][[2]]<-lapply(1:object$neurons[i], function(j) as.numeric(t(object$architecture[[i]][[2]][[j]]) %*% t(store[[i]][[1]][[j]])))
store[[i]][[3]]<-do.call("cbind", lapply(object$architecture[[i]][[7]], function(j) store[[i]][[2]][[j]]))
if (i == object$nlayer) break
}
}
prob <- as.numeric(store[[object$nlayer]][[2]][[object$architecture[[i]][[8]]]])
prob2<-ifelse(prob<0, 0, ifelse(prob>1, 1, prob))
if (type == "probability"){
out<-cbind(1-prob2,prob2)
colnames(out) <- object$levels
}else if (type == "class"){
out <- factor(ifelse(prob2>=0.5,object$levels[2],object$levels[1]))
}else {stop("Please correct type option.")}
return(out)
} |
knitr::opts_chunk$set(
collapse = TRUE,
comment = "
)
library(Repliscope)
knitr::kable(head(Repliscope::W303_G2, 5))
knitr::kable(head(W303,5)) |
randInitializeWeights <- function(L_in, L_out) {
W <- matrix(0,L_out, 1 + L_in)
epsilon_init <- 0.12
rnd <- runif(L_out * (1 + L_in))
rnd <- matrix(rnd,L_out,1 + L_in)
W <- rnd * 2 * epsilon_init - epsilon_init
W
} |
pow.work <- function(t,pow="const",p){
nt = length(t)
const <- function(t,c) {y<- rep(c,length(t));return (y)}
linear <- function(t,c){y<- c*t; return (y)}
parab <- function(t,c){y<-(1/2)*c*t^2; return (y)}
if(pow=="const") model <- list(pow=const,wrk=linear)
if(pow=="linear") model <- list(pow=linear,wrk=parab)
p.w <- matrix(ncol=2,nrow=nt)
p.w[,1] <- model$pow(t,p)
p.w[,2] <- model$wrk(t,p)
return(list(t=t,p.w=p.w))
}
pow.work.plot <- function(x){
ymax <- 1.2*max(x$p.w[,1]); tmax <- max(x$t)
tlab <- expression(italic("t")*"[sec]")
plab <- expression(italic("p")*"("*italic(t)*")[W]")
wlab <- expression(italic("w")*"("*italic(t)*")[J]")
par(mar = c(5,5,2,5))
matplot(x$t,x$p.w[,1],type="l", ylim=c(0,ymax), xlab=tlab,
ylab=plab,lty=1,col=1)
par(new=T)
ymax <- 1.4*max(x$p.w[,2])
plot(x$t,x$p.w[,2], axes=F, type="l", ylim=c(0,ymax), xlab=NA,
ylab=NA,lty=2,col=1)
axis(side=4); mtext(side=4, line=3, wlab)
legend('topright',legend=c("Power","Work"),lty=1:2,col=1,bg='white',cex=0.7)
} |
.fetchNASIS_components <- function(SS = TRUE,
rmHzErrors = TRUE,
nullFragsAreZero = TRUE,
fill = FALSE,
stringsAsFactors = default.stringsAsFactors(),
dsn = NULL) {
if(exists('component.hz.problems', envir=soilDB.env))
assign('component.hz.problems', value=character(0), envir=soilDB.env)
f.comp <- get_component_data_from_NASIS_db(SS = SS, stringsAsFactors = stringsAsFactors, dsn = dsn, nullFragsAreZero = nullFragsAreZero)
f.chorizon <- get_component_horizon_data_from_NASIS_db(SS = SS, fill = fill, dsn = dsn, nullFragsAreZero = nullFragsAreZero)
f.copm <- get_component_copm_data_from_NASIS_db(SS = SS, stringsAsFactors = stringsAsFactors, dsn = dsn)
f.cogeomorph <- get_component_cogeomorph_data_from_NASIS_db2(SS = SS, dsn = dsn)
f.otherveg <- get_component_otherveg_data_from_NASIS_db(SS = SS, dsn = dsn)
f.ecosite <- get_component_esd_data_from_NASIS_db(SS = SS, stringsAsFactors = stringsAsFactors, dsn = dsn)
f.diaghz <- get_component_diaghz_from_NASIS_db(SS = SS, dsn = dsn)
f.restrict <- get_component_restrictions_from_NASIS_db(SS = SS, dsn = dsn)
filled.ids <- character(0)
if(rmHzErrors & nrow(f.chorizon) > 0) {
f.chorizon.test <- aqp::checkHzDepthLogic(f.chorizon, c('hzdept_r', 'hzdepb_r'), idname = 'coiid', fast = TRUE)
filled.idx <- which(is.na(f.chorizon$chiid))
if(length(filled.idx) > 0) {
filled.ids <- as.character(f.chorizon$coiid[filled.idx])
}
good.ids <- as.character(f.chorizon.test$coiid[which(f.chorizon.test$valid)])
bad.ids <- as.character(f.chorizon.test$coiid[which(!f.chorizon.test$valid)])
if(length(filled.ids) > 0) {
good.ids <- unique(c(good.ids, filled.ids))
bad.ids <- unique(bad.ids[!bad.ids %in% filled.ids])
}
f.chorizon <- f.chorizon[which(f.chorizon$coiid %in% good.ids), ]
assign('component.hz.problems', value=bad.ids, envir=soilDB.env)
}
if(nrow(f.chorizon) > 0) {
depths(f.chorizon) <- coiid ~ hzdept_r + hzdepb_r
} else {
stop("No horizon data in NASIS component query result.", call.=FALSE)
}
site(f.chorizon) <- f.comp
.SD <- NULL
.BY <- NULL
pm <- data.table::data.table(f.copm)[, .formatParentMaterialString(.SD, uid = .BY$coiid, name.sep=' & '), by = "coiid"]
pm$siteiid <- NULL
if (nrow(pm) > 0)
site(f.chorizon) <- pm
lf <- data.table::data.table(f.cogeomorph)[, .formatLandformString(.SD, uid = .BY$coiid, name.sep=' & '), by = "coiid"]
lf$peiid <- NULL
if (nrow(lf) > 0)
site(f.chorizon) <- lf
es <- data.table::data.table(f.ecosite)[, .formatEcositeString(.SD, name.sep=' & '), by = "coiid", .SDcols = colnames(f.ecosite)]
es$coiid <- NULL
if (nrow(es) > 0)
site(f.chorizon) <- es
ov <- data.table::data.table(f.otherveg)[, .formatOtherVegString(.SD, name.sep=' & '), by = "coiid", .SDcols = colnames(f.otherveg)]
ov$coiid <- NULL
if (nrow(ov) > 0)
site(f.chorizon) <- ov
diagnostic_hz(f.chorizon) <- f.diaghz[which(f.diaghz$coiid %in% f.chorizon$coiid),]
restrictions(f.chorizon) <- f.restrict[which(f.restrict$coiid %in% f.chorizon$coiid),]
if(exists('component.hz.problems', envir=soilDB.env))
if(length(get("component.hz.problems", envir = soilDB.env)) > 0)
message("-> QC: horizon errors detected:\n\tUse `get('component.hz.problems', envir=soilDB.env)` for component record IDs (coiid)")
if(!fill & length(filled.ids) == 0) {
res <- try(hzidname(f.chorizon) <- 'chiid')
if(inherits(res, 'try-error')) {
if(!rmHzErrors) {
warning("cannot set `chiid` as unique component horizon key -- duplicate horizons present with rmHzErrors=FALSE")
} else {
warning("cannot set `chiid` as unique component horizon key -- defaulting to `hzID`")
}
}
} else {
warning("cannot set `chiid` as unique component horizon key - `NA` introduced by fill=TRUE", call.=F)
}
m <- metadata(f.chorizon)
m$origin <- 'NASIS components'
metadata(f.chorizon) <- m
hzdesgnname(f.chorizon) <- "hzname"
hztexclname(f.chorizon) <- "texture"
return(f.chorizon)
} |
prediction.bruto <-
function(model,
data = NULL,
at = NULL,
type = "fitted",
calculate_se = FALSE,
...) {
type <- match.arg(type)
data <- data
if (missing(data) || is.null(data)) {
pred <- predict(model, type = type, ...)
pred <- make_data_frame(fitted = pred[,1L], se.fitted = rep(NA_real_, length(pred)))
} else {
data <- build_datalist(data, at = at, as.data.frame = TRUE)
at_specification <- attr(data, "at_specification")
if (!is.matrix(data)) {
data <- as.matrix(data)
}
tmp <- predict(model, newdata = data, type = type, ...)
pred <- make_data_frame(data, fitted = tmp[,1L], se.fitted = rep(NA_real_, nrow(data)))
}
vc <- NA_real_
structure(pred,
class = c("prediction", "data.frame"),
at = if (is.null(at)) at else at_specification,
type = type,
call = if ("call" %in% names(model)) model[["call"]] else NULL,
model_class = class(model),
row.names = seq_len(nrow(pred)),
vcov = vc,
jacobian = NULL,
weighted = FALSE)
} |
mult.wge=function(fac1=0,fac2=0,fac3=0,fac4=0,fac5=0,fac6=0)
{
fac1=-fac1
fac2=-fac2
fac3=-fac3
fac4=-fac4
fac5=-fac5
fac6=-fac6
bfac1=c(1,fac1)
bfac2=c(1,fac2)
bfac3=c(1,fac3)
bfac4=c(1,fac4)
bfac5=c(1,fac5)
bfac6=c(1,fac6)
pfac1=PolynomF::polynom(bfac1)
pfac2=PolynomF::polynom(bfac2)
pfac3=PolynomF::polynom(bfac3)
pfac4=PolynomF::polynom(bfac4)
pfac5=PolynomF::polynom(bfac5)
pfac6=PolynomF::polynom(bfac6)
mfac=pfac1*pfac2*pfac3*pfac4*pfac5*pfac6
tmpcoef=coef(mfac)
model.coef=tmpcoef[-1]
model.coef=-model.coef
out1=list(char.poly=mfac,model.coef=model.coef)
return(out1)
} |
svc <- paws::rekognition()
test_that("describe_projects", {
expect_error(svc$describe_projects(), NA)
})
test_that("describe_projects", {
expect_error(svc$describe_projects(MaxResults = 20), NA)
})
test_that("list_collections", {
expect_error(svc$list_collections(), NA)
})
test_that("list_collections", {
expect_error(svc$list_collections(MaxResults = 20), NA)
})
test_that("list_stream_processors", {
expect_error(svc$list_stream_processors(), NA)
})
test_that("list_stream_processors", {
expect_error(svc$list_stream_processors(MaxResults = 20), NA)
}) |
b31gacl <- function(dep, maop, d, wth, depth, l){
checkmate::assert_double(dep, lower = 0, upper = 6e3, finite = TRUE, any.missing = FALSE, min.len = 1)
checkmate::assert_double(maop, lower = 25.4, upper = 1.27e5, finite = TRUE, any.missing = FALSE, min.len = 1)
checkmate::assert_double(d, lower = 3.93e-2, upper = 1.27e5, finite = TRUE, any.missing = FALSE, min.len = 1)
checkmate::assert_double(wth, lower = 0, upper = 1.275e4, finite = TRUE, any.missing = FALSE, min.len = 1)
checkmate::assert_double(depth, lower = 0, upper = 2.54e4, finite = TRUE, any.missing = FALSE, min.len = 1)
checkmate::assert_double(l, lower = 0, upper = 1.275e4, finite = TRUE, any.missing = FALSE, min.len = 1)
PS <- trunc(1.1*dep*(1 - depth/wth) + .5)
PS[PS > dep] <- dep[PS > dep]
J <- 2/3*depth/wth
AP <- rep(Inf, length(J))
x <- rep(0 , length(J))
cnd <- maop > PS
AP[cnd] <- 1e-3*trunc(
1e3*sqrt((J[cnd]/(1 - 1.1*dep[cnd]*(1 - J[cnd])/maop[cnd]))^2 - 1) + .5)
x[cnd] <- trunc(
1.1*dep[cnd]*(1 - J[cnd])/(1 - 2/3*depth[cnd]/sqrt(
wth[cnd]*AP[cnd]^2 + 1)) + .5)
cnd2 <- cnd & (x > dep)
x[cnd2] <- dep[cnd2]
AP[cnd & (x > maop | AP > 4)] <- 4
trunc(1000*sqrt(d*wth)*1.12*AP)*.001
} |
"pw.freq" |
prepost.test <- function(baseline, post, treatment, conf.level = 0.95, delta="estimate") {
Z <- NULL
if ("factor" %in% class(treatment)) {
treat <- !(treatment==levels(treatment)[1])
}
else {
treat <- !(treatment==0)
}
if (length(unique(treatment))!=2)
stop("Can only handle exactly two treatments")
if (!(class(treatment) %in% c("integer", "numeric", "logical"))) {
stop("treatment must be numeric (or logical)")
}
if (!missing(conf.level) && (length(conf.level) != 1 || !is.finite(conf.level) ||
conf.level < 0 || conf.level > 1))
stop("'conf.level' must be a single number between 0 and 1")
DF <- data.frame(Z = treatment,
R = !is.na(post),
Y1 = baseline,
Y2 = post)
DF <- DF[!is.na(DF$Y1),]
pi1 <- glm(R ~ Y1, family="binomial", data=DF, subset=(Z==1))
pi0 <- glm(R ~ Y1, family="binomial", data=DF, subset=(Z==0))
pihat1 <- predict(pi1, newdata=DF, type="response")
pihat0 <- predict(pi0, newdata=DF, type="response")
reg1 <- lm(Y2 ~ Y1, data=DF, subset=(Z==1))
reg0 <- lm(Y2 ~ Y1, data=DF, subset=(Z==0))
ehat1 <- predict(reg1, newdata=DF)
ehat0 <- predict(reg0, newdata=DF)
N <- nrow(DF)
N1 <- sum(DF$Z)
N0 <- N-N1
deltahat <- N1/N
if (is.numeric(delta) && delta>0 && delta <1) {
deltahat <- delta
}
DF$Y2[is.na(DF$Y2)] <- 0
mu21 <- with(DF, sum(R*Z*Y2/pihat1 - (Z-deltahat)*ehat1 -(R-pihat1)*Z*ehat1/pihat1 )/N1)
mu20 <- ifelse(N0==0, 0, with(DF, sum(R*(1-Z)*Y2/pihat0 + (Z-deltahat)*ehat0 - (R-pihat0)*(1-Z)*ehat1/pihat0)/N0))
mu21 <- with(DF, sum(R*Z*Y2/pihat1 - (Z-deltahat)*ehat1 -(R-pihat1)*Z*ehat1/pihat1 )/ (deltahat*N))
mu20 <- ifelse(N0==0, 0, with(DF, sum(R*(1-Z)*Y2/pihat0 + (Z-deltahat)*ehat0 - (R-pihat0)*(1-Z)*ehat1/pihat0)/((1-deltahat)*N)))
betahat <- mu21 - mu20
iFun1 <- with(DF, R*Z*(Y2-mu21)/(deltahat*pihat1) -
(Z-deltahat)*(ehat1 - mu21)/(deltahat) -
(R-pihat1)*Z*(ehat1 - mu21)/(deltahat*pihat1))
iFun2 <- with(DF, -(R*(1-Z)*(Y2-mu20))/((1-deltahat)*pihat0) -
((Z-deltahat)*(ehat0-mu20))/(1-deltahat) -
(R-pihat0)*(1-Z)*(ehat1-mu20)/((1-deltahat)*pihat0))
iFun <- iFun1 + iFun2
sebetahat <- sqrt(sum((iFun/N)^2))
method <- "Semiparametric Estimation of Treatment Effect in a Pretest-Posttest Study with Missing Data"
tstat <- betahat/sebetahat
alpha <- 1 - conf.level
cint <- betahat + sebetahat*qnorm(1 - alpha/2) * c(-1, 1)
names(tstat) <- "z"
names(betahat) <- c("estimated treatment effect")
attr(cint, "conf.level") <- conf.level
rval <- list(statistic = tstat, se=sebetahat, p.value = 1-pchisq(tstat^2, df=1),
conf.int = cint, estimate=betahat, method = method)
class(rval) <- "htest"
return(rval)
} |
Outline <- R6Class("Outline",
inherit = OutlineCommon,
public = list(
P=NULL,
scale=NULL,
units=NA,
gf=NULL,
gb=NULL,
h=NULL,
im=NULL,
initialize=function(P=NULL, scale=NA, im=NULL, units=NA) {
self$P <- matrix(0, 0, 2)
colnames(self$P) <- c("X", "Y")
self$im <- im
self$scale <- scale
self$units <- sub(units, "um", "\U00B5m")
if (!is.null(P)) {
fragment <- Fragment$new()
fragment$initializeFromPoints(P)
pids <- self$addPoints(fragment$P)
self$mapFragment(fragment, pids)
}
},
getImage = function() {
return(self$im)
},
replaceImage = function(im) {
self$im <- im
},
mapFragment = function(fragment, pids) {
if (length(fragment$gf) != length(pids)) {
stop("Number of fragment indices being mapped is not equal to number of pids supplied")
}
self$gf <- self$mapPids(fragment$gf, self$gf, pids)
self$gb <- self$mapPids(fragment$gb, self$gb, pids)
},
mapPids = function(x, y, pids) {
y[pids[which(is.na(x))]] <- NA
nna <- which(!is.na(x))
y[pids[nna]] <- pids[x[nna]]
return(y)
},
addPoints = function(P) {
if (!is.matrix(P)) {
if (length(P) == 2) {
P <- matrix(P, nrow=1)
} else {
stop("P must be a matrix or vector of length 2")
}
}
if (!(ncol(P) == 2)) {
stop("P must have 2 (X, Y) columns")
}
pids <- rep(NA, nrow(P))
for (i in (1:nrow(P))) {
if (nrow(self$P) == 0) {
self$P <- rbind(self$P, c(P[i,]))
pids[i] <- nrow(self$P)
self$h[1] <- 1
} else {
id <- which(apply(t(self$P) == P[i,], 2, all))
if (length(id) > 1) {
stop(paste("Point register has duplicates", self$P[id,], collapse=", "))
}
if (length(id) == 1) {
pids[i] <- id
}
if (length(id) == 0) {
self$P <- rbind(self$P, c(P[i,]))
pids[i] <- nrow(self$P)
self$h <- c(self$h, pids[i])
}
}
}
return(pids)
},
getPoints = function() {
return(self$P[,c("X", "Y")])
},
getPointsScaled = function() {
if (is.na(self$scale)) {
return(self$P[,c("X", "Y")])
}
return(cbind(self$scale*self$P[,c("X", "Y")]))
},
getRimSet = function() {
return(1:nrow(self$P))
},
getOutlineSet = function() {
return(which(!is.na(self$gf)))
},
getOutlineLengths = function() {
return(vecnorm(self$getPointsScaled()[self$getOutlineSet(),] -
self$getPointsScaled()[self$gf[self$getOutlineSet()],]))
},
addFeatureSet = function(fs) {
if (fs$type %in% self$getFeatureSetTypes()) {
stop(paste("There is already a", fs$type, "attached to this outline"))
}
self$featureSets <- c(self$featureSets, fs)
}
)
)
plot.Outline <- function(x, ...) {
plot(x$self$P)
}
flatplot.Outline <- function(x, axt="n",
xlim=NULL,
ylim=NULL,
add=FALSE,
image=TRUE,
scalebar=1,
rimset=FALSE,
pids=FALSE,
pid.joggle=0,
lwd.outline=1,
...) {
plot.image <- image
scalebar <- ifelse(is.numeric(scalebar) && x$scale, scalebar, FALSE)
s <- which(!is.na(x$gb))
d <- na.omit(x$gb)
if (!add) {
im <- x$getImage()
if (plot.image && !is.null(im)) {
xs <- 1:ncol(im)
ys <- 1:nrow(im)
if (is.null(xlim)) {
xlim <- c(0, max(xs))
}
if (is.null(ylim)) {
ylim <- c(0, max(ys))
}
plot(NA, NA, xlim=xlim, ylim=ylim, asp=1,
xaxt=axt, yaxt=axt, bty="n",
xlab="", ylab="")
rasterImage(im, 0, 0, ncol(im), nrow(im))
} else {
xs <- x$getPoints()[s,"X"]
ys <- x$getPoints()[s,"Y"]
plot(xs, ys, asp=1,
pch=".", xaxt=axt, yaxt=axt, xlab="", ylab="",
bty="n", xlim=xlim, ylim=ylim)
}
}
if (rimset) {
points(x$P[x$getRimSet(),], col=getOption("rimset.col"), pch=19)
}
segments(x$P[s,1], x$P[s,2], x$P[d,1], x$P[d,2],
col=getOption("outline.col"), lwd=lwd.outline)
if (pids) {
text(x$P[,"X"], x$P[,"Y"] + runif(nrow(x$P), -pid.joggle, pid.joggle), 1:nrow(x$P), ...)
}
if (!add && scalebar && !is.na(x$scale)) {
sby <- min(ys) - 0.02*(max(ys) - min(ys))
sblen <- 1000*scalebar/(x$scale)
lines(c(max(xs) - sblen, max(xs)),c(sby, sby), lwd=2)
}
}
simplifyOutline <- function(P, min.frac.length=0.001, plot=FALSE) {
N <- nrow(P)
Q <- rbind(P, P[1,])
v <- diff(Q)
l <- vecnorm(v)
e <- extprod3d(cbind(v[c(N, 1:(N-1)),], 0), cbind(v, 0))[,3]
S <- l/sum(l) < min.frac.length
i.rem <- which((e <= 0) & (S | (S[c(N, 1:(N-1))])))
if (plot) {
plot(P, col="white")
if (any(S)) {
segments(P[S,1], P[S,2], P[S,1]+v[S,1], P[S,2] + v[S,2], col="red")
}
if (any(!S)) {
segments(P[!S,1], P[!S,2], P[!S,1]+v[!S,1], P[!S,2] + v[!S,2], col="black")
}
points(P[e>0,1], P[e>0, 2], col="green")
points(P[e==0,1], P[e==0, 2], col="orange")
points(P[e<0,1], P[e<0, 2], col="blue")
points(P[i.rem,1], P[i.rem, 2], pch="X", col="brown")
}
if (length(i.rem) > 0) {
message(paste("simplifyOutline: Removing vertex", i.rem[1]))
return(simplifyOutline(P[-i.rem[1],],
min.frac.length=min.frac.length, plot=plot))
} else {
return(P)
}
} |
context("Product space construction")
library(dst)
test_that("productSpace", {
tt1 <- 1:3
specnb1 <- c(1,1,2)
infovar1 =matrix(c(5,7,2,2), ncol = 2, dimnames = list(NULL, c("varnb", "size")) )
expect_error(productSpace(tt=tt1, specnb=specnb1, infovar = infovar1), "tt parameter must be a matrix.")
tt1 <- matrix(c(1,0,1,0,0,1,0,1,1,1,1,1),nrow=3, byrow = TRUE, dimnames =list(NULL, c("foul", "fair", "foul", "fair")) )
specnb1 <- c(1,2)
infovar1 =matrix(c(5,7,2,2), ncol = 2, dimnames = list(NULL, c("varnb", "size")) )
expect_error(productSpace(tt=tt1, specnb=specnb1, infovar = infovar1), "specnb parameter must be a numeric vector of length nrow\\(tt\\)")
tt1 <- matrix(c(1,0,1,0,0,1,0,1,1,1,1,1),nrow=3, byrow = TRUE, dimnames =list(NULL, c("foul", "fair", "foul", "fair")) )
specnb1 <- c(1,1,2)
infovar1 =matrix(c(5,2), ncol = 2, dimnames = list(NULL, c("varnb", "size")) )
expect_error(productSpace(tt=tt1, specnb=specnb1, infovar = infovar1), "infovar parameter must be a 2 column matrix with sum of 2nd column = ncol\\(tt\\).")
tt1 <- matrix(c(1,0,1,0,0,1,0,1,1,1,1,1),nrow=3, byrow = TRUE, dimnames =list(NULL, c("foul", "fair", "foul", "fair")) )
specnb1 <- c(1,1,3)
infovar1 =matrix(c(5, 7, 2, 2), ncol = 2, dimnames = list(NULL, c("varnb", "size")) )
expect_error(productSpace(tt=tt1, specnb=specnb1, infovar = infovar1), "specnb values must be a sequence of numbers increasing by increments of 1 at most.")
}) |
collapsibleTree.Node <- function(df, hierarchy_attribute = "level",
root = df$name, inputId = NULL, attribute = "leafCount",
aggFun = sum, fill = "lightsteelblue",
linkLength = NULL, fontSize = 10, tooltip = FALSE,
tooltipHtml = NULL,nodeSize = NULL, collapsed = TRUE,
zoomable = TRUE, width = NULL, height = NULL, ...) {
nodeAttr <- c("leafCount", "count")
if(!is(df) %in% "Node") stop("df must be a data tree object")
if(!is.character(fill)) stop("fill must be a either a color or column name")
if(!is.null(tooltipHtml)) if(!(tooltipHtml %in% df$fields)) stop("tooltipHtml column name is incorrect")
if(!is.null(nodeSize)) if(!(nodeSize %in% c(df$fields, nodeAttr))) stop("nodeSize column name is incorrect")
leftMargin <- nchar(root)
rightLabelVector <- df$Get("name", filterFun = function(x) x$level==df$height)
rightMargin <- max(sapply(rightLabelVector, nchar))
hierarchy <- unique(ToDataFrameTree(df, hierarchy_attribute)[[hierarchy_attribute]])
if(length(hierarchy) <= 1) stop("hierarchy vector must be greater than length 1")
options <- list(
hierarchy = hierarchy,
input = inputId,
attribute = attribute,
linkLength = linkLength,
fontSize = fontSize,
tooltip = tooltip,
collapsed = collapsed,
zoomable = zoomable,
margin = list(
top = 20,
bottom = 20,
left = (leftMargin * fontSize/2) + 25,
right = (rightMargin * fontSize/2) + 25
)
)
jsonFields <- NULL
if(fill %in% df$fields) {
df$Do(function(x) x$fill <- x[[fill]])
jsonFields <- c(jsonFields, "fill")
} else {
options$fill <- fill
}
if(tooltip & is.null(tooltipHtml)) {
t <- data.tree::Traverse(df, hierarchy_attribute)
if(substitute(identity)=="identity") {
data.tree::Do(t, function(x) {
x$WeightOfNode <- x[[attribute]]
})
} else {
data.tree::Do(t, function(x) {
x$WeightOfNode <- data.tree::Aggregate(x, attribute, aggFun)
x$WeightOfNode <- prettyNum(
x$WeightOfNode, big.mark = ",", digits = 3, scientific = FALSE
)
})
}
jsonFields <- c(jsonFields, "WeightOfNode")
}
if(tooltip & !is.null(tooltipHtml)) {
df$Do(function(x) x$tooltip <- x[[tooltipHtml]])
jsonFields <- c(jsonFields, "tooltip")
}
if(!is.null(nodeSize)) {
scaleFactor <- 10/data.tree::Aggregate(df, nodeSize, stats::median)
t <- data.tree::Traverse(df, hierarchy_attribute)
data.tree::Do(t, function(x) {
x$SizeOfNode <- data.tree::Aggregate(x, nodeSize, aggFun)
x$SizeOfNode <- round(sqrt(x$SizeOfNode*scaleFactor)*pi, 2)
})
options$margin$left <- options$margin$left + df$SizeOfNode - 10
jsonFields <- c(jsonFields, "SizeOfNode")
}
if(is.null(jsonFields)) jsonFields <- NA
data <- data.tree::ToListExplicit(df, unname = TRUE, keepOnly = jsonFields)
x <- list(
data = data,
options = options
)
htmlwidgets::createWidget(
"collapsibleTree", x, width = width, height = height,
htmlwidgets::sizingPolicy(viewer.padding = 0)
)
} |
linear_class <- setClass(
"linear_class",
slots = c(
extract = "list",
fit = "stanfit",
data = "list"
),
contains = "b_results"
)
setMethod(f="summary", signature(object="linear_class"), definition=function(object) {
alpha <- mean(object@extract$mu_a)
beta <- mean(object@extract$mu_b)
sigma <- mean(object@extract$mu_s)
alpha_hdi <- mcmc_hdi(object@extract$mu_a)
beta_hdi <- mcmc_hdi(object@extract$mu_b)
sigma_hdi <- mcmc_hdi(object@extract$mu_s)
cat(sprintf("intercept (alpha):\t%.2f +/- %.5f, 95%% HDI: [%.2f, %.2f]\n",
alpha, mcmcse::mcse(object@extract$mu_a)$se, alpha_hdi[1], alpha_hdi[2]))
cat(sprintf("slope (beta):\t\t%.2f +/- %.5f, 95%% HDI: [%.2f, %.2f]\n",
beta, mcmcse::mcse(object@extract$mu_b)$se, beta_hdi[1], beta_hdi[2]))
cat(sprintf("sigma:\t\t\t%.2f +/- %.5f, 95%% HDI: [%.2f, %.2f]\n",
sigma, mcmcse::mcse(object@extract$mu_s)$se, sigma_hdi[1], sigma_hdi[2]))
})
setMethod(f="show", signature(object="linear_class"), definition=function(object) {
show(object@fit)
})
setMethod(f="plot", signature(x="linear_class", y="missing"), definition=function(x, ...) {
return(plot_fit(object=x, ...))
})
setMethod(f="plot_fit", signature(object="linear_class"), definition=function(object, ...) {
slope <- intercept <- s <- x <- y <- NULL
arguments <- list(...)
subjects <- TRUE
if (!is.null(arguments$subjects)) {
subjects <- arguments$subjects
}
df_data <- data.frame(x=object@data$x, y=object@data$y, s=object@data$s)
n <- length(unique(df_data$s))
x_min <- floor(min(df_data$x))
y_min <- floor(min(df_data$y))
x_max <- ceiling(max(df_data$x))
y_max <- ceiling(max(df_data$y))
diff_x <- x_max - x_min
x_min <- x_min - 0.1*diff_x
x_max <- x_max + 0.1*diff_x
diff_y <- y_max - y_min
y_min <- y_min - 0.1*diff_y
y_max <- y_max + 0.1*diff_y
df_data <- df_data %>% group_by(s, x) %>% summarize(y=mean(y, na.rm=TRUE))
if (!subjects) {
m <- min(100, length(object@extract$mu_a))
df_fit <- data.frame(intercept=object@extract$mu_a,
slope=object@extract$mu_b)
df_fit <- sample_n(df_fit, m)
graph <- ggplot() +
geom_point(data=df_data, aes(x=x, y=y), color="
geom_abline(data=df_fit, aes(slope=slope, intercept=intercept), color="
xlim(x_min, x_max) +
ylim(y_min, y_max) +
ylab("response") +
xlab("question index")
} else {
m <- min(20, length(object@extract$mu_a))
df_fit <- NULL
for (i in 1:n) {
df <- data.frame(intercept=object@extract$alpha[,i],
slope=object@extract$beta[,i],
s=i)
df <- sample_n(df, m)
df_fit <- rbind(df_fit, df)
}
n_col <- ceiling(sqrt(n))
graph <- ggplot() +
geom_point(data=df_data, aes(x=x, y=y), color="
geom_abline(data=df_fit, aes(slope=slope, intercept=intercept), color="
facet_wrap(. ~ s, ncol=n_col) +
ylab("response") +
xlab("question index")
}
return(graph)
})
setMethod(f="plot_trace", signature(object="linear_class"), definition=function(object) {
rstan::traceplot(object@fit, pars=c("mu_a", "mu_b", "mu_s"), inc_warmup=TRUE)
})
setMethod(f="get_parameters", signature(object="linear_class"), definition=function(object) {
df <- data.frame(slope=object@extract$mu_a,
intercept=object@extract$mu_b,
sigma=object@extract$mu_s)
return(df)
})
setMethod(f="get_subject_parameters", signature(object="linear_class"), definition=function(object) {
df <- data.frame(slope=numeric(), intercept=numeric(), sigma=numeric(), subject=numeric())
n <- length(unique(object@data$s))
for (i in 1:n) {
df_subject <- data.frame(slope = object@extract$alpha[,i],
intercept = object@extract$beta[,i],
sigma = object@extract$sigma[,i],
subject = i)
df <- rbind(df, df_subject)
}
return(df)
})
setMethod(f="compare_means", signature(object="linear_class"), definition=function(object, ...) {
arguments <- list(...)
wrong_arguments <- "The provided arguments for the compare_means function are invalid, compare_means(linear_class, fit2=linear_class) is required! You can also provide the rope parameters, e.g. compare_means(linear_class, fit2=linear_class, rope_intercept=numeric, rope_slope=numeric)."
if (length(arguments) == 0) {
stop(wrong_arguments)
}
rope_intercept <- NULL
if (!is.null(arguments$rope_intercept)) {
rope_intercept <- arguments$rope_intercept
}
rope_intercept <- prepare_rope(rope_intercept)
rope_slope <- NULL
if (!is.null(arguments$rope_slope)) {
rope_slope <- arguments$rope_slope
}
rope_slope <- prepare_rope(rope_slope)
intercept1 <- object@extract$mu_a
slope1 <- object@extract$mu_b
if (!is.null(arguments$fit2) || class(arguments[[1]])[1] == "linear_class") {
if (!is.null(arguments$fit2)) {
fit2 <- arguments$fit2
} else {
fit2 <- arguments[[1]]
}
intercept2 <- fit2@extract$mu_a
slope2 <- fit2@extract$mu_b
cat("---------- Intercept ----------\n")
intercept <- difference(y1=intercept1, y2=intercept2, rope=rope_intercept)
cat("\n---------- Slope ----------\n")
slope <- difference(y1=slope1, y2=slope2, rope=rope_slope)
cat("\n")
return(rbind(intercept, slope))
} else {
stop(wrong_arguments)
}
})
setMethod(f="plot_means_difference", signature(object="linear_class"), definition=function(object, ...) {
arguments <- list(...)
wrong_arguments <- "The provided arguments for the plot_means_difference function are invalid, plot_means_difference(linear_class, fit2=linear_class) is required! You can optionallly provide the rope and bins (number of bins in the histogram) parameters, e.g. plot_means_difference(linear_class, fit2=linear_class, rope_intercept=numeric, rope_slope=numeric, bins=numeric)."
if (length(arguments) == 0) {
stop(wrong_arguments)
}
par <- NULL
if (!is.null(arguments$par)) {
par <- arguments$par
if (!(par == "slope" || par == "intercept")) {
w <- sprintf("Parameter %s not recognized, parameters used in this model are slope and intercept! Using the default setting for comparison.", par)
warning(w)
par <- NULL
} else {
cat(sprintf("\n---------- Using only the %s parameter. ----------\n\n", par))
}
}
rope_intercept <- NULL
if (!is.null(arguments$rope_intercept)) {
rope_intercept <- arguments$rope_intercept
}
rope_intercept <- prepare_rope(rope_intercept)
rope_slope <- NULL
if (!is.null(arguments$rope_slope)) {
rope_slope <- arguments$rope_slope
}
rope_slope <- prepare_rope(rope_slope)
intercept1 <- object@extract$mu_a
slope1 <- object@extract$mu_b
if (!is.null(arguments$fit2) || class(arguments[[1]])[1] == "linear_class") {
if (!is.null(arguments$fit2)) {
fit2 <- arguments$fit2
} else {
fit2 <- arguments[[1]]
}
intercept2 <- fit2@extract$mu_a
slope2 <- fit2@extract$mu_b
bins <- 30
if (!is.null(arguments$bins)) {
bins <- arguments$bins
}
graph_intercept <- plot_difference(y1=intercept1, y2=intercept2, rope=rope_intercept, bins=bins)
graph_intercept <- graph_intercept +
ggtitle("Intercept") +
theme(plot.title=element_text(hjust=0.5))
graph_slope <- plot_difference(y1=slope1, y2=slope2, rope=rope_slope, bins=bins)
graph_slope <- graph_slope +
ggtitle("Slope") +
theme(plot.title=element_text(hjust=0.5))
if (is.null(par)) {
graph <- cowplot::plot_grid(graph_intercept, graph_slope, ncol=2, nrow=1, scale=0.9)
} else if (par == "slope") {
graph <- graph_slope
} else if (par == "intercept") {
graph <- graph_intercept
}
return(graph)
} else {
stop(wrong_arguments)
}
})
setMethod(f="plot_means", signature(object="linear_class"), definition=function(object, ...) {
intercept <- slope <- NULL
arguments <- list(...)
par <- NULL
if (!is.null(arguments$par)) {
par <- arguments$par
if (!(par == "slope" || par == "intercept")) {
w <- sprintf("Parameter %s not recognized, parameters used in this model are slope and intercept! Using the default setting for comparison.", par)
warning(w)
par <- NULL
} else {
cat(sprintf("\n---------- Using only the %s parameter. ----------\n\n", par))
}
}
df1 <- data.frame(intercept=object@extract$mu_a, slope=object@extract$mu_b)
x_min_intercept <- min(df1$intercept)
x_max_intercept <- max(df1$intercept)
x_min_slope <- min(df1$slope)
x_max_slope <- max(df1$slope)
graph_intercept <- ggplot() +
geom_density(data=df1, aes(x=intercept), fill="
graph_slope <- ggplot() +
geom_density(data=df1, aes(x=slope), fill="
df2 <- NULL
if (length(arguments) > 0) {
if (!is.null(arguments$fit2) || class(arguments[[1]])[1] == "linear_class") {
if (!is.null(arguments$fit2)) {
fit2 <- arguments$fit2
} else {
fit2 <- arguments[[1]]
}
df2 <- data.frame(intercept=fit2@extract$mu_a, slope=fit2@extract$mu_b)
x_min_intercept <- min(x_min_intercept, df2$intercept)
x_max_intercept <- max(x_max_intercept, df2$intercept)
x_min_slope <- min(x_min_slope, df2$slope)
x_max_slope <- max(x_max_slope, df2$slope)
graph_intercept <- graph_intercept +
geom_density(data=df2, aes(x=intercept), fill="
graph_slope <- graph_slope +
geom_density(data=df2, aes(x=slope), fill="
}
}
diff <- x_max_intercept - x_min_intercept
x_min_intercept <- x_min_intercept - 0.1*diff
x_max_intercept <- x_max_intercept + 0.1*diff
diff <- x_max_slope - x_min_slope
x_min_slope <- x_min_slope - 0.1*diff
x_max_slope <- x_max_slope + 0.1*diff
graph_intercept <- graph_intercept +
xlab("intercept") +
xlim(x_min_intercept, x_max_intercept)
graph_slope <- graph_slope +
xlab("slope") +
xlim(x_min_slope, x_max_slope)
if (is.null(par)) {
graph <- cowplot::plot_grid(graph_intercept, graph_slope, ncol=2, nrow=1, scale=0.9)
} else if (par == "slope") {
graph <- graph_slope
} else if (par == "intercept") {
graph <- graph_intercept
}
return(graph)
})
setMethod(f="compare_distributions", signature(object="linear_class"), definition=function(object, ...) {
arguments <- list(...)
wrong_arguments <- "The provided arguments for the compare_distributions function are invalid, compare_distributions(linear_class, fit2=linear_class) is required! You can also provide the rope parameter, e.g. compare_distributions(linear_class, fit2=linear_class, rope_intercept=numeric, rope_slope=numeric)."
if (length(arguments) == 0) {
stop(wrong_arguments)
}
rope_intercept <- NULL
if (!is.null(arguments$rope_intercept)) {
rope_intercept <- arguments$rope_intercept
}
rope_intercept <- prepare_rope(rope_intercept)
rope_slope <- NULL
if (!is.null(arguments$rope_slope)) {
rope_slope <- arguments$rope_slope
}
rope_slope <- prepare_rope(rope_slope)
n <- 100000
mu_intercept1 <- mean(object@extract$mu_a)
sigma_intercept1 <- mean(object@extract$sigma_a)
intercept1 <- stats::rnorm(n, mean=mu_intercept1, sd=sigma_intercept1)
mu_slope1 <- mean(object@extract$mu_b)
sigma_slope1 <- mean(object@extract$sigma_b)
slope1 <- stats::rnorm(n, mean=mu_slope1, sd=sigma_slope1)
if (!is.null(arguments$fit2) || class(arguments[[1]])[1] == "linear_class") {
if (!is.null(arguments$fit2)) {
fit2 <- arguments$fit2
} else {
fit2 <- arguments[[1]]
}
mu_intercept2 <- mean(fit2@extract$mu_a)
sigma_intercept2 <- mean(fit2@extract$sigma_a)
intercept2 <- stats::rnorm(n, mean=mu_intercept2, sd=sigma_intercept2)
mu_slope2 <- mean(fit2@extract$mu_b)
sigma_slope2 <- mean(fit2@extract$sigma_b)
slope2 <- stats::rnorm(n, mean=mu_slope2, sd=sigma_slope2)
cat("---------- Intercept ----------\n")
intercept <- difference(y1=intercept1, y2=intercept2, rope=rope_intercept)
cat("\n---------- Slope ----------\n")
slope <- difference(y1=slope1, y2=slope2, rope=rope_slope)
cat("\n")
return(rbind(intercept, slope))
} else {
stop(wrong_arguments)
}
})
setMethod(f="plot_distributions", signature(object="linear_class"), definition=function(object, ...) {
slope <- intercept <- group <- y <- y_min <- NULL
df_mean <- data.frame(intercept=mean(object@extract$mu_a), slope=mean(object@extract$mu_b), group="1")
n <- min(100, length(object@extract$mu_a))
df <- data.frame(intercept=object@extract$mu_a, slope=object@extract$mu_b, group="1")
df <- sample_n(df, n)
x_min <- min(object@data$x)
x_max <- max(object@data$x)
y_min <- min(object@data$y)
y_max <- max(object@data$y)
graph <- ggplot()
arguments <- list(...)
if (length(arguments) > 0) {
if (!is.null(arguments$fit2) || class(arguments[[1]])[1] == "linear_class") {
if (!is.null(arguments$fit2)) {
fit2 <- arguments$fit2
} else {
fit2 <- arguments[[1]]
}
df_mean <- rbind(df_mean, data.frame(intercept=mean(fit2@extract$mu_a), slope=mean(fit2@extract$mu_b), group="2"))
n <- min(100, length(fit2@extract$mu_a))
df2 <- data.frame(intercept=fit2@extract$mu_a, slope=fit2@extract$mu_b, group="2")
df2 <- sample_n(df2, n)
df <- rbind(df, df2)
x_min <- min(x_min, fit2@data$x)
x_max <- max(x_max, fit2@data$x)
y_min <- min(y_min, fit2@data$y)
y_max <- max(y_max, fit2@data$y)
}
}
diff_x <- x_max - x_min
x_min <- x_min - 0.1*diff_x
x_max <- x_max + 0.1*diff_x
diff_y <- y_max - y_min
y_min <- y_min - 0.1*diff_y
y_max <- y_max + 0.1*diff_y
graph <- ggplot() +
geom_abline(data=df, aes(slope=slope, intercept=intercept, color=group), alpha=0.1, size=1) +
geom_abline(data=df_mean, aes(slope=slope, intercept=intercept, color=group), size=1.5) +
scale_color_manual(values=c("
xlim(x_min, x_max) +
ylim(y_min, y_max) +
xlab("") +
ylab("")
return(graph)
})
setMethod(f="plot_distributions_difference", signature(object="linear_class"), definition=function(object, ...) {
arguments <- list(...)
wrong_arguments <- "The provided arguments for the plot_distributions_difference function are invalid, plot_distributions_difference(linear_class, fit2=linear_class) is required! You can also provide the rope and bins (number of bins in the histogram) parameters, e.g. plot_distributions_difference(linear_class, fit2=linear_class, rope_intercept=numeric, rope_slope=numeric, bins=numeric)."
if (length(arguments) == 0) {
stop(wrong_arguments)
}
par <- NULL
if (!is.null(arguments$par)) {
par <- arguments$par
if (!(par == "slope" || par == "intercept")) {
w <- sprintf("Parameter %s not recognized, parameters used in this model are slope and intercept! Using the default setting for comparison.", par)
warning(w)
par <- NULL
} else {
cat(sprintf("\n---------- Using only the %s parameter. ----------\n\n", par))
}
}
rope_intercept <- NULL
if (!is.null(arguments$rope_intercept)) {
rope_intercept <- arguments$rope_intercept
}
rope_intercept <- prepare_rope(rope_intercept)
rope_slope <- NULL
if (!is.null(arguments$rope_slope)) {
rope_slope <- arguments$rope_slope
}
rope_slope <- prepare_rope(rope_slope)
n <- 100000
mu_intercept1 <- mean(object@extract$mu_a)
sigma_intercept1 <- mean(object@extract$sigma_a)
intercept1 <- stats::rnorm(n, mean=mu_intercept1, sd=sigma_intercept1)
mu_slope1 <- mean(object@extract$mu_b)
sigma_slope1 <- mean(object@extract$sigma_b)
slope1 <- stats::rnorm(n, mean=mu_slope1, sd=sigma_slope1)
if (!is.null(arguments$fit2) || class(arguments[[1]])[1] == "linear_class") {
if (!is.null(arguments$fit2)) {
fit2 <- arguments$fit2
} else {
fit2 <- arguments[[1]]
}
mu_intercept2 <- mean(fit2@extract$mu_a)
sigma_intercept2 <- mean(fit2@extract$sigma_a)
intercept2 <- stats::rnorm(n, mean=mu_intercept2, sd=sigma_intercept2)
mu_slope2 <- mean(fit2@extract$mu_b)
sigma_slope2 <- mean(fit2@extract$sigma_b)
slope2 <- stats::rnorm(n, mean=mu_slope2, sd=sigma_slope2)
bins <- 30
if (!is.null(arguments$bins)) {
bins <- arguments$bins
}
graph_intercept <- plot_difference(y1=intercept1, y2=intercept2, rope=rope_intercept, bins=bins)
graph_intercept <- graph_intercept +
ggtitle("Intercept") +
theme(plot.title=element_text(hjust=0.5))
graph_slope <- plot_difference(y1=slope1, y2=slope2, rope=rope_slope, bins=bins)
graph_slope <- graph_slope +
ggtitle("Slope") +
theme(plot.title=element_text(hjust=0.5))
if (is.null(par)) {
graph <- cowplot::plot_grid(graph_intercept, graph_slope, ncol=2, nrow=1, scale=0.9)
} else if (par == "slope") {
graph <- graph_slope
} else if (par == "intercept") {
graph <- graph_intercept
}
return(graph)
} else {
stop(wrong_arguments)
}
}) |
tam_find_root <- function( x1, y1, prob.lvl, theta )
{
N <- length(y1)
dfr <- cbind( x1, y1 )
dfr <- dfr[ order( dfr[,1] ), ]
x1 <- dfr[,1]
y1 <- dfr[,2]
y2 <- y1 - prob.lvl
i0 <- which( y2 < 0 )
i1 <- which( y2 > 0 )
thetasol <- NA
if ( ( length(i1) > 0 ) & ( length(i0) > 0 ) ){
i0 <- max( i0 )
i1 <- min( i1 )
theta0 <- theta[i0]
theta1 <- theta[i1]
a0 <- y2[i0]
a1 <- y2[i1]
slo <- ( a1 - a0 ) / ( theta1 - theta0 )
thetasol <- theta0 - a0 / slo
}
return(thetasol)
} |
HImeanactabs <- function (data, catch="Food", hand="Hand", indiv = "Indiv", RightHand = "R", LeftHand = "L"
, col = 2:((length(levels(data[[catch]])))+1), ylab = "Mean handedness index"
, main="Hand preference regarding to the performed task", legend.text = FALSE, beside = TRUE
, ylim = c(-1,1), names.arg=levels(data[[catch]]), legendlocation=FALSE, standarderror=TRUE
, cex=1, pt.cex=2, pch=15, horiz = FALSE, savetable = FALSE, file = "HImeanPerActabs.csv")
{
for (i in 1:nlevels(data[[catch]])) {
seldata<- data[data[[catch]]==levels(data[[catch]])[i],]
Tab<- table(seldata[[indiv]], seldata[[hand]])
NewTab<-as.data.frame.matrix(Tab)
ifelse (is.null(NewTab[[RightHand]]) == TRUE, HITab<-(-NewTab[[LeftHand]])/NewTab[[LeftHand]], ifelse (is.null(NewTab[[LeftHand]]) == TRUE, HITab<-NewTab[[RightHand]]/NewTab[[RightHand]], HITab<-(NewTab[[RightHand]]-NewTab[[LeftHand]])/(NewTab[[RightHand]]+NewTab[[LeftHand]])))
if("HIperActivity" %in% ls() == FALSE) {HIperActivity<-c()} else {}
HIperActivity<-cbind(HIperActivity,HITab)
}
HIperActivity<-t(HIperActivity)
colnames(HIperActivity)<-levels(data[[indiv]])
rownames(HIperActivity)<-levels(data[[catch]])
HIperActivity
HImeanPerActabs<-rowMeans(HIperActivity, na.rm=TRUE)
HImeanPerActabs[which(HImeanPerActabs<0)]<-HImeanPerActabs[which(HImeanPerActabs<0)]*-1
graph<-as.matrix(HImeanPerActabs)
graphHImean<-barplot(graph, beside = beside, ylab=ylab, main=main, legend.text = legend.text, col=col, ylim=ylim, names.arg=names.arg)
if (standarderror == TRUE) {
standarddeviations<-apply(HIperActivity,1,sd,na.rm=TRUE)
standarderror <- standarddeviations/sqrt(ncol(HIperActivity))
arrows(graphHImean, HImeanPerActabs + standarderror, graphHImean, HImeanPerActabs - standarderror, angle = 90, code=3, length=0.1)
} else {
}
if (legendlocation == TRUE) {
message("Click where you want to place the legend")
legendplace <- locator(1)
legend(legendplace$x,legendplace$y,as.vector(levels(data[[catch]])),col=col,bty="n",pch=pch, cex=cex, pt.cex=pt.cex, horiz=horiz)
} else {
}
HImeanactabs<-as.data.frame(HImeanPerActabs)
if (savetable == "csv") {write.csv(HImeanactabs, file = file)} else{}
if (savetable == "csv2") {write.csv2(HImeanactabs, file = file)} else {}
HImeanactabs
} |
print.rocJM <-
function (x, ...) {
cat("\nAreas under the time-dependent ROC curves\n\n")
cat("Estimation: Monte Carlo (", x$M, " samples)\n", sep = "")
if (x$diffType == "absolute") {
lx <- length(x$abs.diff)
ld <- paste(round(x$abs.diff, 2), collapse = ", ")
} else {
lx <- length(x$rel.diff)
ld <- paste(round(x$rel.diff, 2), collapse = ", ")
}
cat("Difference: ", x$diffType, ", lag = ", lx,
" (", ld,")\n", sep = "")
cat("Thresholds range: (", round(x$min.cc, 2), ", ",
round(x$max.cc, 2), ")\n\n", sep = "")
times <- x$times
aucs <- x$AUCs
for (i in seq_along(times)) {
cat("Case:", names(times)[i], "\n")
cat("Recorded time(s):", paste(round(x$times[[i]], 2),
collapse = ", "), "\n")
ac <- if (is.matrix(aucs)) round(aucs[, i], 4) else
round(aucs[[i]], 4)
thr <- round(x$optThr[[i]], 4)
m <- cbind(x$dt, round(x$dt +
tail(x$times[[i]], 1), 2), ac, thr)
colnames(m) <- if ((nc <- ncol(thr)) == 1) {
c("dt", "t + dt", "AUC", "Cut")
} else {
c("dt", "t + dt", "AUC",
paste("Cut.", 1:nc, sep = ""))
}
rownames(m) <- rep("", nrow(m))
print(m)
cat("\n")
}
invisible(x)
} |
dist2eu <- function(X,C) {
ndata = nrow(X)
ncentres = nrow(C)
res = matrix(0,ndata,ncentres)
for (i in 1:ndata)
{
for (j in 1:ncentres)
{
res[i,j] = sum((X[i,]-C[j,])^2)
}
}
return(res)
} |
predPow <-
function( data, obs = "observed", pred = "predicted", obs_mean = NULL, nu = 0, round = 4, extOut = FALSE, extOutFile = NULL ){
mainfunc.stats( data, obs, pred, obs_mean, nu, round, "prediction", extOut, extOutFile, match.call() )
} |
extract.level <- function(var, infile, outfile, level = 1, nc34 = 4,
overwrite = FALSE, verbose = FALSE, nc = NULL) {
check_variable(var)
if (is.null(nc)) check_infile(infile)
check_outfile(outfile)
outfile <- correct_filename(outfile)
check_overwrite(outfile, overwrite)
check_nc_version(nc34)
calc_time_start <- Sys.time()
file_data <- read_file(infile, var, nc = nc)
if (!is.null(nc)) nc_in <- nc
else nc_in <- nc_open(infile)
if (length(names(nc_in$dim) == 4)) {
start <- c(1, 1, 1, 1)
count <- c(-1, -1, -1, -1)
dummy <- match(names(nc_in$dim), c(TIME_NAMES$DEFAULT, LON_NAMES$DEFAULT,
LAT_NAMES$DEFAULT))
leveldim <- which(is.na(dummy))
levellen <- nc_in$dim[[leveldim]]$len
if (level != "all") {
if (level > levellen) {
stop(paste0("Dimension ", nc_in$dim[[leveldim]]$name, " has length: ",
levellen))
}
loop <- 1
start[leveldim] <- level
count[leveldim] <- 1
result1 <- ncvar_get(nc_in, file_data$variable$name, start = start,
count = count)
} else {
loop <- levellen
result1 <- ncvar_get(nc_in, file_data$variable$name, start = start,
count = count)
}
}
if (is.null(nc)) nc_close(nc_in)
nc_format <- get_nc_version(nc34)
cmsaf_info <- (paste0("cmsafops::extract.level for variable ",
file_data$variable$name,
" and level ",
level))
global_att_list <- names(file_data$global_att)
global_att_list <- global_att_list[toupper(global_att_list) %in% toupper(GLOBAL_ATT_DEFAULT)]
global_attributes <- file_data$global_att[global_att_list]
for (i in seq_len(loop)) {
if (level != "all") {
outfile1 <- outfile
data1 <- result1
} else {
outfile1 <- paste0(strsplit(outfile, split = ".nc"), "_level", i, ".nc")
if (length(dim(result1)) == 3) {
data1 <- switch(leveldim,
result1[i, , ],
result1[, i, ],
result1[, , i]
)
}else if (length(dim(result1)) == 4) {
data1 <- switch(leveldim,
result1[i, , , ],
result1[, i, , ],
result1[, , i, ],
result1[, , , i]
)
}
}
data1[is.na(data1)] <- file_data$variable$attributes$missing_value
result <- data1
if (file_data$time_info$has_time_bnds) {
time_bnds <- get_time_bounds_from_file(infile, nc = nc)
vars_data <- list(result = result, time_bounds = time_bnds)
}else{
vars_data <- list(result = result)
}
dims <- define_dims(file_data$grid$is_regular,
file_data$dimension_data$x,
file_data$dimension_data$y,
file_data$dimension_data$t,
NB2,
file_data$time_info$units,
with_time_bnds = file_data$time_info$has_time_bnds)
vars <- define_vars(file_data$variable, dims, nc_format$compression, with_time_bnds = file_data$time_info$has_time_bnds)
write_output_file(
outfile1,
nc_format$force_v4,
vars,
vars_data,
file_data$variable$name,
file_data$grid$vars, file_data$grid$vars_data,
cmsaf_info,
file_data$time_info$calendar,
file_data$variable$attributes,
global_attributes,
with_time_bnds = file_data$time_info$has_time_bnds
)
}
calc_time_end <- Sys.time()
if (verbose) message(get_processing_time_string(calc_time_start, calc_time_end))
} |
Mobius.set.func <- function(object, n, k) {
msf <- Mobius.set.func.internal(object, n, k)
new("Mobius.set.func", data = msf$data, subsets = msf$subsets,
n = msf$n, k=msf$k)
}
setMethod("k.truncate.Mobius", signature(object = "set.func", k = "numeric"),
function(object, k, ...) {
msf <- k.truncate.Mobius.internal(object, k)
new("Mobius.set.func", data = msf$data, subsets = msf$subsets,
n = msf$n, k=msf$k)
}
)
setMethod("k.truncate.Mobius", signature(object = "Mobius.set.func",
k = "numeric"),
function(object, k, ...) {
if (!(k %in% 1:object@n))
stop("wrong arguments")
bs <- binom.sum(object@n, k)
new("Mobius.set.func", data = object@data[1:bs], n = object@n,
subsets = object@subsets[1:bs], k = k)
}
)
setMethod("Mobius", signature(object = "set.func"),
function(object, ...) {
msf <- k.truncate.Mobius.internal(object, object@n)
new("Mobius.set.func", data = msf$data, subsets = msf$subsets,
n = msf$n, k=msf$k)
}
)
setMethod("as.Mobius.set.func", signature(object = "set.func"),
function(object, ...) {
mu <- .C("binary2natural",
as.integer(object@n),
as.double(object@data),
as.integer(object@subsets),
mu = double(2^object@n),
PACKAGE="kappalab")$mu
new("Mobius.set.func", data = mu, subsets = object@subsets,
n = object@n, k = object@n)
}
)
setMethod("as.Mobius.set.func", signature(object = "card.set.func"),
function(object, ...) {
subsets <- .C("k_power_set",
as.integer(object@n),
as.integer(object@n),
subsets = integer(2^object@n),
PACKAGE="kappalab")$subsets
mu <- .C("cardinal2setfunction",
as.integer(object@n),
as.double(object@data),
mu = double(2^object@n),
PACKAGE="kappalab")$mu
mu <- .C("binary2natural",
as.integer(object@n),
as.double(mu),
as.integer(subsets),
mu = double(2^object@n),
PACKAGE="kappalab")$mu
new("Mobius.set.func", data = mu, subsets = subsets,
n = object@n, k = object@n)
}
)
setMethod("as.Mobius.set.func", signature(object = "Mobius.card.set.func"),
function(object, ...) {
subsets <- .C("k_power_set",
as.integer(object@n),
as.integer(object@n),
subsets = integer(2^object@n),
PACKAGE="kappalab")$subsets
mu <- .C("cardinal2setfunction",
as.integer(object@n),
as.double(object@data),
mu = double(2^object@n),
PACKAGE="kappalab")$mu
mu <- .C("binary2natural",
as.integer(object@n),
as.double(mu),
as.integer(subsets),
mu = double(2^object@n),
PACKAGE="kappalab")$mu
new("Mobius.set.func", data = mu, subsets = subsets,
n = object@n, k = object@n)
}
)
setMethod("is.monotone", signature(object = "Mobius.set.func"),
function(object, verbose = FALSE, epsilon = 1e-9, ...) {
if (!is.logical(verbose))
stop("wrong arguments")
.C("is_monotone_Mobius",
as.integer(object@n),
as.integer(object@k),
as.double(object@data),
as.integer(object@subsets),
as.integer(verbose),
as.double(epsilon),
result = integer(1),
PACKAGE="kappalab")$result[1] == 0
}
)
setMethod("is.cardinal", signature(object = "Mobius.set.func"),
function(object, ...) {
.C("is_kcardinal",
as.integer(object@n),
as.integer(object@k),
as.double(object@data),
result = integer(1),
PACKAGE="kappalab")$result[1] == 0
}
)
setMethod("is.kadditive", signature(object = "Mobius.set.func", k = "numeric"),
function(object, k, epsilon = 1e-9, ...) {
if (!(k %in% 1:object@n))
stop("wrong arguments")
.C("is_kadditive_Mobius",
as.integer(object@n),
as.integer(object@k),
as.integer(k),
as.double(object@data),
as.double(epsilon),
result = integer(1),
PACKAGE="kappalab")$result[1] == 0
}
)
setMethod("show", signature(object = "Mobius.set.func"),
function(object) {
cat(paste("\t\t",is(object)[1],"\n",sep=""))
.C("Rprint_setfunction",
as.integer(object@n),
as.integer(object@k),
as.double(object@data),
as.integer(object@subsets),
as.integer(1),
PACKAGE="kappalab")
cat("")
}
)
setMethod("to.data.frame", signature(object = "Mobius.set.func"),
function(object, ...) {
if (object@n < max.n.display) {
subsets <- .C("k_power_set_char",
as.integer(object@n),
as.integer(object@k),
as.integer(object@subsets),
subsets = character(binom.sum(object@n,object@k)),
PACKAGE="kappalab")$subsets
d <- data.frame(c(object@n,object@k,object@data),
row.names = c(" n", " k", subsets))
}
else
d <- data.frame(c(object@n,object@k,object@data),
row.names = c("n", "k", object@subsets))
names(d)[1] <- is(object)[1]
d
}
)
setMethod("Shapley.value", signature(object = "Mobius.set.func"),
function(object, ...) {
result <- .C("Shapley_value_Mobius",
as.integer(object@n),
as.integer(object@k),
as.double(object@data),
as.integer(object@subsets),
phi = double(object@n),
PACKAGE="kappalab")$phi
names(result) <- 1:object@n
result
}
)
setMethod("interaction.indices", signature(object = "Mobius.set.func"),
function(object, ...) {
phi <- .C("interaction_indices_Mobius",
as.integer(object@n),
as.integer(object@k),
as.double(object@data),
as.integer(object@subsets),
result = double(object@n*object@n),
PACKAGE="kappalab")$result
result <- matrix(phi,object@n,object@n)
diag(result) <- rep(NA,object@n)
dimnames(result) <- list(1:object@n,1:object@n)
result
}
) |
plot.bayesQR <- function(x, var=NULL, quantile=NULL, burnin=0, credint=c(.025,.975), plottype=NULL,
main=NULL, xlab=NULL, ylab=NULL, xlim=NULL, ylim=NULL,...){
pandterm <- function(message) {
stop(message, call. = FALSE)
}
if (length(plottype)!=1){
pandterm("Plottype should be 'quantile' or 'trace' or 'hist'")
} else if (!(plottype %in% c("trace","quantile","hist"))){
pandterm("Plottype should be 'quantile' or 'trace' or 'hist'")
}
nqr <- length(x)
nvar <- length(x[[1]]$names)
if (is.null(var)){
var <- 1:nvar
} else if (is.character(var)){
if (!all(var %in% x[[1]]$names)){
pandterm("Variable name does not exist in x")
}
var <- which(x[[1]]$names %in% var)
} else if (is.numeric(var)){
if (!all(var %in% c(1:nvar))){
pandterm("Incorrect variable index")
}
}
if (plottype=="quantile") {
if (nqr<3) {
pandterm("To few estimated quantiles to create a quantile plot")
}
QRsumobj <- summary(object=x, burnin=burnin, credint=credint)
z1 <- FALSE; z2 <- FALSE
for (i in 1:length(var)){
nbrcol <- ifelse(QRsumobj[[i]]$normal.approx,5,3)
plotdata <- matrix(sapply(QRsumobj,"[[","betadraw"),nrow=nvar)[var[i],]
plotdata <- cbind(sapply(QRsumobj,"[[","quantile"),matrix(plotdata,ncol=nbrcol,byrow=TRUE))
if (all(sapply(QRsumobj,"[[","normal.approx"))){
plotdata <- plotdata[,c(1,2,5,6)]
} else {
plotdata <- plotdata[,c(1:4)]
}
if (is.null(xlim)) xlim <- c(0,1)
if (is.null(ylim)) ylim <- c(min(plotdata[,2:4]),max(plotdata[,2:4])); z1 <- TRUE
if (is.null(main)) main <- ""
if (is.null(xlab)) xlab <- "quantile"
if (is.null(ylab)){
ylab <- paste("Beta ",var[i],sep="")
z2 <- TRUE
}
plot(x=NULL, y=NULL, xlim=xlim, ylim=ylim, main=main, xlab=xlab, ylab=ylab, ...)
small <- min(plotdata[,2:4])-(max(plotdata[,2:4])-min(plotdata[,2:4]))
polygon(x=c(plotdata[1,1],plotdata[,1],plotdata[nqr,1]),
y=c(small,plotdata[,4],small),col="grey",border=FALSE)
polygon(x=c(plotdata[1,1],plotdata[,1],plotdata[nqr,1]),
y=c(small,plotdata[,3],small),col="white",border=FALSE)
points(x=plotdata[,1],y=plotdata[,2],typ="o",lty=2)
points(x=plotdata[,1],y=plotdata[,3],typ="l",col="darkgrey")
points(x=plotdata[,1],y=plotdata[,4],typ="l",col="darkgrey")
box(lwd=1.3,col="white")
box(lwd=1.3,col="black")
if (i < length(var)){
ans <- readline("Do you want to see the next plot (type 'y' or 'n'):\n")
while ((ans != "y") & (ans != "n")) ans <- readline("Incorrect input, type 'y' or 'n':\n")
if (ans == "n") break
}
if (z1) ylim <- NULL
if (z2) ylab <- NULL
}
}
if (plottype=="trace") {
if (!is.null(quantile)){
allquant <- sapply(x,"[[","quantile")
if(!all(quantile %in% allquant)){
pandterm("Specified quantile does not exist in x")
}
loopvec <- which(allquant %in% quantile)
} else {
loopvec <- 1:nqr
}
ans <- "n"
for (i in loopvec){
for (ii in var){
if (is.null(ylab)){
ylab <- paste("Beta ",ii,sep="")
}
if (is.null(main)){
main <- paste("Quantile: ", x[[i]]$quantile, " - Beta ", ii)
}
plotdata <- x[[i]]$betadraw[,ii]
plot(plotdata[(burnin+1):length(plotdata)], typ="l", xlab="iteration", ylab=ylab, main=main)
if (!((i==tail(loopvec,n=1))&(ii==tail(var,n=1)))){
ans <- readline("Do you want to see the next plot (type 'y' or 'n'):\n")
while ((ans != "y") & (ans != "n")) ans <- readline("Incorrect input, type 'y' or 'n':\n")
if (ans == "n") break
}
}
if (ans == "n") break
}
}
if (plottype=="hist") {
if (!is.null(quantile)){
allquant <- sapply(x,"[[","quantile")
if(!all(quantile %in% allquant)){
pandterm("Specified quantile does not exist in x")
}
loopvec <- which(allquant %in% quantile)
} else {
loopvec <- 1:nqr
}
ans <- "n"
for (i in loopvec){
for (ii in var){
plotdata <- x[[i]]$betadraw[(burnin+1):nrow(x[[i]]$betadraw),ii]
if(is.null(xlab)) xlab <- "beta"
if (is.null(main)){
main <- paste("Quantile: ", x[[i]]$quantile, " - Beta ", ii)
}
hist(plotdata, breaks=100, prob=TRUE, xlab=xlab, main=main)
if (x[[i]]$normal.approx){
xseq <- seq(min(plotdata),max(plotdata),.001)
sigma.normal <- sqrt(diag(matrix(x[[i]]$sigma.normal,nrow=sqrt(length(x[[i]]$sigma.normal)))))
points(xseq,dnorm(x=xseq,mean=mean(plotdata),sd=sigma.normal[ii]),typ="l",col="blue",lwd=2)
legend("topright","Normal approximation",lty=1,lwd=2,col="blue")
}
if (!((i==tail(loopvec,n=1))&(ii==tail(var,n=1)))){
ans <- readline("Do you want to see the next plot (type 'y' or 'n'):\n")
while ((ans != "y") & (ans != "n")) ans <- readline("Incorrect input, type 'y' or 'n':\n")
if (ans == "n") break
}
}
if (ans == "n") break
}
}
} |
library("testthat")
library("survival")
context("test-believedBroken.R")
test_that("Check asymptotic variance in Cox example with failure ties and strata", {
test <- read.table(header=T, sep = ",", text = "
start, length, event, x1, x2
0, 4, 1,0,0
0, 3, 1,2,0
0, 3, 0,0,1
0, 2, 1,0,1
0, 2, 1,1,1
0, 1, 0,1,0
0, 1, 1,1,0
")
gold <- coxph(Surv(length, event) ~ x1 + strata(x2), test, ties = "breslow")
dataPtr <- createCyclopsData(Surv(length, event) ~ x1 + strata(x2), data = test,
modelType = "cox")
cyclopsFit <- fitCyclopsModel(dataPtr)
tolerance <- 1E-4
}) |
do_four_factors_df <- function(df_games, teams) {
GameID <- Day <- Game <- Team <- Player.x <- FG <- FGA <- ThreeP <- FT <- FTA <- NULL
DRB <- ORB <- TOV <- Type <- EFGP <- TOVP <- ORBP <- FTRate <- NULL
df5 <- data.frame()
for (i in teams) {
team_Game <- unique(df_games$Game[df_games$Team == i])
df2 <- df_games %>%
filter(Game %in% team_Game) %>%
select(Day, Game, Team, Player.x, FG, FGA, ThreeP, FT, FTA, DRB, ORB, TOV) %>%
group_by(Team) %>%
mutate(Type = ifelse(Team == i, "Offense", "Defense")) %>%
ungroup()
df3 <- df2 %>%
group_by(Type) %>%
summarise(EFGP = (sum(FG) + 0.5 * sum(ThreeP)) / sum(FGA),
TOVP = sum(TOV) / (sum(FGA) + 0.44 * sum(FTA) + sum(TOV)),
ORB = sum(ORB),
DRB = sum(DRB),
ORBP = NA,
FTRate = sum(FT) / sum(FGA)) %>%
ungroup()
df3$ORBP[1] <- df3$ORB[1] / (df3$ORB[1] + df3$DRB[2])
df3$ORBP[2] <- df3$ORB[2] / (df3$ORB[2] + df3$DRB[1])
df4 <- df3 %>%
select(-ORB, -DRB) %>%
mutate(EFGP = round(EFGP * 100, 2),
TOVP = round(TOVP * 100, 2),
ORBP = round(ORBP * 100, 2),
FTRate = round(FTRate, 2)) %>%
mutate(Team = i) %>%
select(Team, everything())
df5 <- bind_rows(df5, df4)
}
df6 <- df5 %>%
filter(Type == "Defense") %>%
mutate(order_EFGP = Team[order(EFGP)]) %>%
mutate(order_TOVP = Team[order(TOVP, decreasing = TRUE)]) %>%
mutate(order_ORBP = Team[order(ORBP)]) %>%
mutate(order_FTRate = Team[order(FTRate)])
df6 <- as.data.frame(df6)
for (i in teams) {
orders_cols <- apply(df6[,7:10], 2, function(x){grep(i, x)})
df6[df6$Team == i, 3:6] <- paste(df6[df6$Team == i, 3:6],
" (", orders_cols, ")", sep = "")
}
df7 <- df5 %>%
filter(Type == "Offense") %>%
mutate(order_EFGP = Team[order(EFGP, decreasing = TRUE)]) %>%
mutate(order_TOVP = Team[order(TOVP)]) %>%
mutate(order_ORBP = Team[order(ORBP, decreasing = TRUE)]) %>%
mutate(order_FTRate = Team[order(FTRate, decreasing = TRUE)])
df7 <- as.data.frame(df7)
for (i in teams) {
orders_cols <- apply(df7[,7:10], 2, function(x){grep(i, x)})
df7[df7$Team == i, 3:6] <- paste(df7[df7$Team == i, 3:6],
" (", orders_cols, ")", sep = "")
}
df8 <- bind_rows(df6, df7) %>%
select(-contains("order")) %>%
arrange(rev(Team))
return(list(df_rank = df8, df_no_rank = df5))
} |
test_that("snippet generation works", {
local_test_setup(
git = FALSE, use_precommit = FALSE, package = TRUE, install_hooks = FALSE
)
usethis::use_package("R", "Depends", "3.6.0")
expect_error(
out <- capture_output(snippet_generate("additional-deps-roxygenize")),
NA,
)
expect_equal(out, "")
usethis::use_package("styler")
expect_error(
out <- capture_output(snippet_generate("additional-deps-roxygenize")),
NA,
)
expect_match(
out, " - id: roxygenize\n.* - styler\n$",
)
desc::desc_set("Remotes", "r-lib/styler")
expect_warning(
out <- capture_output(snippet_generate("additional-deps-roxygenize")),
"you have remote dependencies "
)
expect_match(
out, " - id: roxygenize\n.* - styler\n$",
)
})
test_that("snippet generation only includes hard dependencies", {
local_test_setup(
git = FALSE, use_precommit = FALSE, package = TRUE,
install_hooks = FALSE, open = FALSE
)
usethis::use_package("styler")
usethis::use_package("lintr", type = "Suggest")
expect_warning(
out <- capture_output(snippet_generate("additional-deps-roxygenize")),
NA
)
expect_match(
out, " - id: roxygenize\n.* - styler\n$",
)
})
test_that("GitHub Action CI setup works", {
local_test_setup(
git = FALSE, use_precommit = FALSE, package = TRUE, install_hooks = FALSE
)
use_precommit_config(
root = getwd(),
open = FALSE, verbose = FALSE
)
expect_error(use_ci("stuff"), "must be one of")
use_ci("gha", root = getwd())
expect_true(file_exists(".github/workflows/pre-commit.yaml"))
})
test_that("Pre-commit CI GitHub Action template is parsable", {
expect_error(
yaml::read_yaml(system.file("pre-commit-gha.yaml", package = "precommit")),
NA
)
})
test_that("Pre-commit CI setup works", {
local_test_setup(
git = FALSE, use_precommit = FALSE, package = TRUE, install_hooks = FALSE
)
use_precommit_config(
root = getwd(),
open = FALSE, verbose = FALSE
)
use_ci(root = getwd(), open = FALSE)
expect_false(file_exists(".github/workflows/pre-commit.yaml"))
})
test_that("Pre-commit CI setup works", {
local_test_setup(
git = FALSE, use_precommit = FALSE, package = TRUE, install_hooks = FALSE
)
expect_error(use_ci(root = getwd(), open = FALSE), "o `.pre-commit-config.yaml`")
})
test_that("Autoupdate is not conducted when renv present in incompatible setup", {
skip_on_cran()
mockery::stub(ensure_renv_precommit_compat, "version_precommit", "2.13.0")
local_test_setup(
git = TRUE, use_precommit = TRUE, install_hooks = FALSE, open = FALSE
)
initial <- rev_read() %>%
rev_as_pkg_version()
writeLines("", "renv.lock")
expect_warning(
ensure_renv_precommit_compat(
package_version_renv = package_version("0.13.0"), root = getwd()
),
"Autoupdate aborted"
)
downgraded <- rev_read() %>%
rev_as_pkg_version()
expect_true(downgraded < initial)
fs::file_delete("renv.lock")
expect_warning(
ensure_renv_precommit_compat(
package_version("0.13.0"),
root = getwd()
),
NA
)
}) |
calc_streak <- function(x) {
if (!is.atomic(x)) {
x <- x[, 1]
}
if (any(!x %in% c("H", "M"))) {
stop('Input should only contain hits ("H") and misses ("M")')
}
y <- rep(0, length(x))
y[x == "H"] <- 1
y <- c(0, y, 0)
wz <- which(y == 0)
streak <- diff(wz) - 1
return(data.frame(length = streak))
} |
l_dens2D <- function(type, n = c(50, 50), bw = NULL, tol = 1e-6, trans = sqrt, ...){
arg <- list(...)
match.arg(type, c("cond", "joint"))
arg$xtra <- list("type" = type, "n" = n, "bw" = bw,
"tol" = tol, "trans" = trans, "grad" = list())
o <- structure(list("fun" = "l_dens2D",
"arg" = arg),
class = "gamLayer")
return(o)
}
l_dens <- l_dens2D
l_dens2D.1D <- l_dens2D.Check1DNumeric <- l_dens2D.PtermNumeric <- function(a){
xtra <- a$xtra
a$xtra <- NULL
if( is.null(a$data$res$y) ){
message("l_dens2D(): Partial residuals are not available")
return( NULL )
}
dXY <- .fastKernDens(dat = a$data$res, xlimit = NULL, ylimit = NULL,
cond = (xtra$type == "cond"), bw = xtra$bw, ngr = xtra$n,
tol = xtra$tol)$dXY
a$data <- data.frame("d" = xtra$trans(as.numeric(t(dXY$fhat))),
"x" = rep(dXY$x1, each = xtra$n[1]),
"y" = rep(dXY$x2, xtra$n[2]))
a$mapping <- aes(x = x, y = y, fill = d)
a$inherit.aes <- FALSE
if( is.null(a$na.rm) ){ a$na.rm <- TRUE }
out <- list()
out[[1]] <- do.call("geom_raster", a)
out[[2]] <- scale_fill_gradientn(colours = viridis(50, begin = 0.2),
na.value = "white",
name=ifelse(xtra$type == "cond", "p(y|x)", "p(x,y)"))
class(out) <- "listOfLayers"
return( out )
}
l_dens2D.2D <- l_dens2D.Check2DNumericNumeric <- function(a){
return( l_dens2D.1D(a) )
} |
rd_model <- function(data, variable.pred, mode = 0, scale = TRUE){
if(!is.null(variable.pred) && !is.null(data)){
modelo.rd <- NULL
form <- formula(paste0(variable.pred,"~."))
if(mode == 0){
modelo.rd <- pcr(form, data = data, scale = scale, validation = 'CV')
}
else if(mode == 1){
modelo.rd <- plsr(form, data = data, scale = scale, validation = 'CV')
}
optimal.n <- which.min(RMSEP(modelo.rd)$val[1, 1, ]) - 1
names(optimal.n) <- NULL
modelo.rd$optimal.n.comp <- optimal.n
modelo.rd$call$formula <- form
modelo.rd$call$scale <- scale
return(modelo.rd)
}
else{
return(NULL)
}
}
rd_prediction <- function(model, test.data, ncomp = NULL) {
if(!is.null(test.data) && !is.null(model)){
ncomp <- ifelse(is.null(ncomp), model$optimal.n.comp, ncomp)
return(predict(model,test.data, ncomp = ncomp))
}
return(NULL)
}
rd_type <- function(mode.rd = 0){
mode.rd <- ifelse(is.null(mode.rd), 0, mode.rd)
tipo <- "NA"
if(mode.rd == 0){ tipo <- "ACP" }
else if(mode.rd == 1){ tipo <- "MCP" }
return(tipo)
}
plot_RMSE <- function(model, n.comp, titles = c("RMSE Segun Numero de Componentes",
"Numero de Componente","RMSE")){
RMSE.CV <- pls::RMSEP(model)$val[1, 1, ]
df <- data.frame(Componentes = 0:(length(RMSE.CV) - 1), Error = RMSE.CV)
x_y.RMSE <- list()
for (i in 1:dim(df)[1]) {
x_y.RMSE[[i]] <- list(value = c(df[i,1],df[i,2]))
}
line.Values <- list()
maximo <- ceiling(max(df[,2]))
values <- 0:maximo
for (i in 1:length(values)) {
line.Values[[i]] <- list(value = c(n.comp,values[i]))
}
opts <- list(
xAxis = list(
type = "value",
name = titles[2],
nameTextStyle = list(fontSize = 13),
max = max(df[,1]),
interval = 2
),
yAxis = list(
type = "value",
name = titles[3],
nameTextStyle = list(fontSize = 13),
max = maximo
),
series = list(
list(
type = "line",
symbolSize = 6,
lineStyle = list(width = 2,type = 'solid'),
color = "
data = x_y.RMSE,
tooltip = list(formatter = e_JS(paste0(
"function(params){
return('<b>",titles[2],": </b>' + params.value[0] + '<br /><b>",titles[3],": </b>' + params.value[1].toFixed(4))
}
")))),
list(
type = "line",
symbol = "none",
lineStyle = list(width = 2, type = 'dashed'),
tooltip = list(show = F),
color = "blue",
data = line.Values
)
)
)
e_charts() |>
e_list(opts) |>
e_title(text = titles[1]) |>
e_tooltip() |>
e_datazoom(show = F) |>
e_show_loading()
}
plot_pred_rd <- function(model, n.comp, titles = c("Varianza Explicada en Predictores",
"Numero de Componentes","Porcentaje de Varianza Explicada")){
var.explicada <- cumsum(pls::explvar(model)) / 100
df <- data.frame(Componentes = 1:length(var.explicada), Varianza = var.explicada * 100)
x_y.Varianza <- list()
for (i in 1:dim(df)[1]) {
x_y.Varianza[[i]] <- list(value = c(df[i,1],df[i,2]))
}
line.Values <- list()
maximo <- ceiling(max(df[,2]))
values <- 0:maximo
for (i in 1:length(values)) {
line.Values[[i]] <- list(value = c(n.comp,values[i]))
}
opts <- list(
xAxis = list(
type = "value",
name = titles[2],
nameTextStyle = list(fontSize = 13),
max = max(df[,1]),
interval = 2
),
yAxis = list(
type = "value",
name = titles[3],
nameTextStyle = list(fontSize = 13),
axisLabel = list(formatter = '{value} %'),
max = maximo
),
series = list(
list(
type = "line",
symbolSize = 6,
lineStyle = list(width = 2,type = 'solid'),
color = "
data = x_y.Varianza,
tooltip = list(formatter = e_JS(paste0(
"function(params){
return('<b>",titles[2],": </b>' + params.value[0] + '<br /><b>",titles[3],": </b>' + params.value[1].toFixed(4))
}
")))),
list(
type = "line",
symbol = "none",
lineStyle = list(width = 2, type = 'dashed'),
tooltip = list(show = F),
color = "blue",
data = line.Values
)
)
)
e_charts() |>
e_list(opts) |>
e_title(text = titles[1]) |>
e_tooltip() |>
e_datazoom(show = F) |>
e_show_loading()
}
plot_var_pred_rd <- function(model, n.comp, titles = c("Varianza Explicada en Variable a Predecir",
"Numero de Componente","Porcentaje de Varianza Explicada")){
var.explicada <- drop(pls::R2(model, estimate = "train", intercept = FALSE)$val)
df <- data.frame(Componentes = 1:length(var.explicada), Varianza = var.explicada * 100)
x_y.Varianza <- list()
for (i in 1:dim(df)[1]) {
x_y.Varianza[[i]] <- list(value = c(df[i,1],df[i,2]))
}
line.Values <- list()
maximo <- ceiling(max(df[,2]))
values <- 0:maximo
for (i in 1:length(values)) {
line.Values[[i]] <- list(value = c(n.comp,values[i]))
}
opts <- list(
xAxis = list(
type = "value",
name = titles[2],
nameTextStyle = list(fontSize = 13),
max = max(df[,1]),
interval = 2
),
yAxis = list(
type = "value",
name = titles[3],
nameTextStyle = list(fontSize = 13),
axisLabel = list(formatter = '{value} %'),
max = maximo
),
series = list(
list(
type = "line",
symbolSize = 6,
lineStyle = list(width = 2,type = 'solid'),
color = "
data = x_y.Varianza,
tooltip = list(formatter = e_JS(paste0(
"function(params){
return('<b>",titles[2],": </b>' + params.value[0] + '<br /><b>",titles[3],": </b>' + params.value[1].toFixed(4))
}
")))),
list(
type = "line",
symbol = "none",
lineStyle = list(width = 2, type = 'dashed'),
tooltip = list(show = F),
color = "blue",
data = line.Values
)
)
)
e_charts() |>
e_list(opts) |>
e_title(text = titles[1]) |>
e_tooltip() |>
e_datazoom(show = F) |>
e_show_loading()
}
codeRd <- function(variable.predecir, mode, scale){
return(paste0("rd_model(data, '",variable.predecir,"', mode = ",mode, ", scale = ", scale, ")"))
}
codeRdPred <- function(nombreModelo, ncomp){
return(paste0("rd_prediction(model = ", nombreModelo, ", test.data, ncomp = ", ncomp, ")"))
}
codeRdIG <- function(variable.predecir){
return(paste0("general_indices(test.data[,'",variable.predecir,"'], prediccion.rd)"))
} |
library(readxl)
suppressPackageStartupMessages(library(dplyr))
library(ggplot2)
library(readr)
pop_xls <- read_excel("xls/gapdata003.xls")
pop_xls %>% str()
pop_xls %>% head()
pop_raw <- pop_xls %>%
select(country = Area, year = Year, pop = Population)
pop_raw %>% str()
summary(pop_raw$year)
year_freq <- pop_raw %>%
count(year)
(p <- ggplot(year_freq, aes(x = year, y = n)) +
geom_bar(stat = "identity"))
p + xlim(c(1800, 2010))
p + xlim(c(1945, 1955))
p + xlim(c(2000, 2015))
year_min <- 1950
year_max <- 2008
pop_raw <- pop_raw %>%
filter(year %>% between(year_min, year_max))
str(pop_raw)
pop_raw %>%
filter(country == "India")
pop_raw <- pop_raw %>%
mutate(pop = pop %>% as.integer())
write_tsv(pop_raw, "01_pop.tsv")
devtools::session_info() |
"A4.simu" <- function()
{
tclRequire("Tktable")
font0 <- tkfont.create(family="times",size=35,weight="bold",slant="italic")
font1<-tkfont.create(family="times",size=14,weight="bold")
font2<-tkfont.create(family="times",size=16,weight="bold",slant="italic")
font3<-tkfont.create(family="times",size=10,weight="bold")
font4<-tkfont.create(family="times",size=12)
fourAmod<- function(Mx,peak1,peak2,peak3,peak4)
{
expA <- c(Mx/2, Mx/2,Mx/2,(1-Mx)/2,(1-Mx)/2,(1-Mx)/2)
expB <- c(Mx/2, (1-Mx)/2,(1-Mx)/2,Mx/2,Mx/2,(1-Mx)/2)
expC<-c((1-Mx)/2,Mx/2,(1-Mx)/2,Mx/2,(1-Mx)/2,Mx/2)
expD<-c((1-Mx)/2,(1-Mx)/2,Mx/2,(1-Mx)/2,Mx/2,Mx/2)
obsA <- peak1/(peak1+peak2+peak3+peak4)
obsB <- peak2/(peak1+peak2+peak3+peak4)
obsC<-peak3/(peak1+peak2+peak3+peak4)
obsD<-peak4/(peak1+peak2+peak3+peak4)
resid <- (expA-obsA)^2+(expB-obsB)^2+(expC-obsC)^2+(expD-obsD)^2
genotypes<-c("AB,CD","AC,BD","AD,BC","BC,AD","BD,AC","CD,AB")
Mx.Conditioned<-abs(c((peak1+peak2)/(peak1+peak2+peak3+peak4),
(peak1+peak3)/(peak1+peak2+peak3+peak4),
(peak1+peak4)/(peak1+peak2+peak3+peak4),
(peak2+peak3)/(peak1+peak2+peak3+peak4),
(peak2+peak4)/(peak1+peak2+peak3+peak4),
(peak3+peak4)/(peak1+peak2+peak3+peak4)))
result<-data.frame(genotypes,expA,expB,expC,expD,obsA,obsB,obsC,obsD,resid,Mx.Conditioned)
return(result)
}
fourAmodG<-function(obsMx,peak1,peak2,peak3,peak4)
{
Mx<-seq(0.1,0.9,by=0.05)
res<-sapply(Mx, function(i) fourAmod(i,peak1,peak2,peak3,peak4)$resid)
par(mar = c(4,4,2,3)+0.1,oma = c(1,2,0.3,5))
plot(Mx,(res[1,]),type='n',lab=c(6,5,1),xlab="Mx (mixture proportion)",ylab="Residuals",cex.lab=1.2,las=1,ylim=c(min(res),max(res)))
title("Four-allele model simulations")
pchref<-c(18,15,17,4,8,16)
col<-c("lightgreen","magenta","purple","cyan","red","darkblue")
for(i in 1:6)
{
points(Mx,(res[i,]),pch=pchref[i],col=col[i])
lines(Mx,(res[i,]),col=col[i])
}
abline(v=obsMx,col="gray",lty=2)
legend(0.95,max(res),c("AB,CD","AC,BD","AD,BC","BC,AD","BD,AC","CD,AB",'Obs. Mx'),
,pch =c(pchref,32),lty=2,col=c(col,'gray'),bg='white',xpd=NA,cex=1.2,
box.lwd=2)
}
fourAmodT<-function(peak1,peak2,peak3,peak4)
{
Mx<-seq(0.1,0.9,by=0.05)
geno<-c("AB,CD","AC,BD","AD,BC","BC,AD","BD,AC","CD,AB")
res<-sapply(Mx, function(i) signif(fourAmod(i,peak1,peak2,peak3,peak4)$resid,digits=3))
row.names(res)<-c(geno)
colnames(res)<-paste('Mx=',Mx,sep='')
minX<-which(res==min(res),arr.ind=TRUE)
minC<-signif(Mx[minX[,2]],digits=2)
minY<-row.names(minX)
return(list(minY,minC,res))
}
tt <- tktoplevel()
tkwm.title(tt,"Four-allele model simulations")
frame1 <- tkframe(tt, relief="groove", borderwidth=2)
frame2 <- tkframe(tt, relief="groove", borderwidth=2)
xyframe <- tkframe(frame1, relief="groove", borderwidth=2)
labframe <- tkframe(frame1, relief="groove", borderwidth=2)
limframe <- tkframe(frame2, relief="groove", borderwidth=2)
posframe <- tkframe(frame2, relief="groove", borderwidth=2)
legframe <- tkframe(frame2, relief="groove", borderwidth=2)
xy1var <- tclVar(553)
xy2var <- tclVar(472)
xy3var<- tclVar(623)
xy4var<- tclVar(738)
mixvar <- tclVar(0.70)
TFrame <- tkframe(tt, relief="groove")
labh <- tklabel(TFrame)
tkgrid(tklabel(TFrame,text="Four-allele model", font=font2, foreground="red"), labh)
tkpack(TFrame)
xy1.entry <- tkentry(xyframe, textvariable=xy1var, width=8)
xy2.entry <- tkentry(xyframe, textvariable=xy2var, width=8)
xy3.entry <- tkentry(xyframe, textvariable=xy3var, width=8)
xy4.entry <- tkentry(xyframe, textvariable=xy4var, width=8)
peak1<-function()
{
p1<-as.numeric(tclvalue(xy1var))
if(p1<0){tkmessageBox(message="Invalid value for the peak height of allele
else{return(p1)}
}
peak2<-function()
{
p2<-as.numeric(tclvalue(xy2var))
if(p2<0){tkmessageBox(message="Invalid value for the peak height of allele
}
else{return(p2)}
}
peak3<-function()
{
p3<-as.numeric(tclvalue(xy3var))
if(p3<0){tkmessageBox(message="Invalid value for the peak height of allele
}
else{return(p3)}
}
peak4<-function()
{
p4<-as.numeric(tclvalue(xy4var))
if(p4<0){tkmessageBox(message="Invalid value for the peak height of allele
}
else{return(p4)}
}
tkgrid(tklabel(xyframe, text="- Peak heights (rfu) -", font=font3,foreground="blue"), columnspan=5)
tkgrid(tklabel(xyframe,text="Allele
tkgrid(tklabel(xyframe,text="Allele
tkgrid(tklabel(xyframe,text="Allele
tkgrid(tklabel(xyframe,text="Allele
lab.entry <- tkentry(labframe, textvariable=mixvar, width=8)
prop<-function()
{
Mx<-as.numeric(tclvalue(mixvar))
if(Mx>1 || Mx<0){
tkmessageBox(message="Mx is the mixture proportion, it must be comprised in the interval [0,1]",icon="error",type="ok")
}
else{return(Mx)}
}
tkgrid(tklabel(labframe, text="- Mixture proportion -",font=font3, foreground="blue"), columnspan=3)
tkgrid(tklabel(labframe,text="Mx : "), lab.entry)
tkpack(xyframe, labframe, side="left")
tkpack(frame1)
RCSFrame <- tkframe(tt, relief="groove")
plotFunction<-function()
{
p1<-peak1()
p2<-peak2()
p3<-peak3()
p4<-peak4()
obsMx<-prop()
fourAmodG(obsMx,p1,p2,p3,p4)
}
propFunction<-function()
{
p1<-peak1()
p2<-peak2()
p3<-peak3()
p4<-peak4()
res<-fourAmodT(p1,p2,p3,p4)
resdata<-res[[3]]
resgeno<-res[[1]]
resmix<-res[[2]]
myRarray<- c("Genotype","Mx=0.1","Mx=0.15","Mx=0.20","Mx=0.25","Mx=0.30",
"Mx=0.35","Mx=0.40","Mx=0.45","Mx=0.50","Mx=0.55","Mx=0.60","Mx=0.65",
"Mx=0.70","Mx=0.75","Mx=0.80","Mx=0.85","Mx=0.90",
"AB,CD",resdata[1,],
"AC,BD",resdata[2,],
"AD,BC",resdata[3,],
"BC,AD",resdata[4,],
"BD,AC",resdata[5,],
"CD,AB",resdata[6,])
dim(myRarray) <- c(18,7)
tclarray <- tclArray()
for(i in 0:17)
{
for (j in (0:6))
{
tclarray[[i,j]] <- myRarray[i+1,j+1]
}
}
myRarray2<-resgeno
dim(myRarray2)<-c(1,length(resgeno))
myRarray3<-resmix
dim(myRarray3)<-c(1,length(resmix))
tclarray2 <- tclArray()
for(k in 0:(length(resgeno)-1))
{
tclarray2[[0,k]] <- myRarray2[1,k+1]
}
tclarray3 <- tclArray()
for(h in 0:(length(resmix)-1))
{
tclarray3[[0,h]] <- myRarray3[1,h+1]
}
save1<-function(filename1="simulation4.txt",filename2="likelihood4.txt")
{
write.table(myRarray,file=filename1,row.names=FALSE,col.names=FALSE)
loctab<-data.frame(resmix,resgeno)
colnames(loctab)<-c('Mixture proportion','Genotype combination')
write.table(loctab,file=filename2,row.names=FALSE)
}
saveFunction<-function()
{
ss<-tktoplevel()
SSframe <- tkframe(ss, relief="groove",width=35)
tkwm.title(ss,"")
filevar1 <- tclVar("simulation4.txt")
filevar1.entry <- tkentry(SSframe, textvariable=filevar1, width=12)
filevar2 <- tclVar("likelihood4.txt")
filevar2.entry <- tkentry(SSframe, textvariable=filevar2, width=12)
save1.butt<-tkbutton(ss, text="Enter", font=font3,command=function() save1(tclvalue(filevar1),tclvalue(filevar2)))
tkgrid(tklabel(SSframe,text="Simulations results",font=font4), filevar1.entry)
tkgrid(tklabel(SSframe,text="Maximum likelihood",font=font4), filevar2.entry)
tkgrid(filevar2.entry, save1.butt)
tkpack(SSframe)
}
tclRequire("Tktable")
tt<-tktoplevel()
tkwm.title(tt,"Most likely genotypes combination")
table1 <- tkwidget(tt,"table",variable=tclarray,rows=18,colwidth=8,cols=7,titlerows=1,background="white")
table2 <- tkwidget(tt,"table",variable=tclarray2,
cols=length(resgeno),selectmode="extended",colwidth=10,rows=1,background="lightblue")
table3 <- tkwidget(tt,"table",variable=tclarray3,
cols=length(resmix),selectmode="extended",colwidth=10,rows=1,background="lightblue")
tit1<-tkwidget(tt,"label",text="Matrix of the residuals",font=font1,foreground="blue")
tit2<-tkwidget(tt,"label",text="Maximum likelihood estimation results",font=font1,foreground="blue")
tit3<-tkwidget(tt,"label",text="Most likely genotype combinations",font=font1,foreground="blue")
tit4<-tkwidget(tt,"label",text="Corresponding mixture proportions",font=font1,foreground="blue")
filelab<-tkwidget(tt,"label",text="-Save the results-",font=font3,foreground="blue")
save.butt<-tkbutton(tt, text="Save", font=font3,command=saveFunction)
tkpack(tit1,table1,tit3,table2,tit4,table3,save.butt)
}
filterFunction<-function()
{
p1<-peak1()
p2<-peak2()
p3<-peak3()
p4<-peak4()
obsMx<-prop()
tmp1<-fourAmod(obsMx,p1,p2,p3,p4)
Mx.C<-signif(tmp1$Mx.Conditioned,digits=2)
genotypes<-as.character(tmp1$genotypes)
tab1<-c("Genotype",genotypes,"Mx conditioned",Mx.C)
dim(tab1)<-c(7,2)
tab1array <- tclArray()
for(m in 0:6){
for (n in (0:1)){
tab1array[[m,n]] <- tab1[m+1,n+1]}}
saveHH<-function(name1="filter4.txt")
{
write.table(tab1,name1,row.names=FALSE,col.names=FALSE)
}
saveFunction2<-function()
{
hh<-tktoplevel()
HHframe<- tkframe(hh, relief="groove")
tkwm.title(hh,"Filenames")
filtervar<- tclVar("filter4.txt")
filtervar.entry <- tkentry(HHframe, textvariable=filtervar, width=12)
saveHH.butt<-tkbutton(hh, text="Enter", font=font3,command=function() saveHH(tclvalue(filtervar)))
tkgrid(tklabel(HHframe,text="Genotypes filter",font=font4), filtervar.entry)
tkgrid(filtervar.entry, saveHH.butt)
tkpack(HHframe)
}
tclRequire("Tktable")
tt2<-tktoplevel()
tkwm.title(tt2,"Genotypes filter")
save2.butt<-tkbutton(tt2, text="Save", font=font3,command=saveFunction2)
tab1.tcl<-tkwidget(tt2,"table",variable=tab1array,rows=8,colwidth=18,cols=2,titlerows=1,background="white")
tkpack(tab1.tcl,save2.butt)
}
A1.but <- tkbutton(RCSFrame, text="Plot simulations",font=font3, command=plotFunction)
A2.but <- tkbutton(RCSFrame, text="Simulation details", font=font3,command=propFunction)
A3.but <- tkbutton(RCSFrame, text="Genotype filter", font=font3,command=filterFunction)
tkgrid(A1.but,A2.but,A3.but,ipadx=20)
tkpack(RCSFrame)
} |
Cons_check <- function(Costs_FS,Costs_MET,path=tempdir()) {
dir.create(file.path(path,"Consistency_checks"))
Costs_sum= aggregate(Costs_MET$value,by=list(Costs_MET$year, Costs_MET$Fleet_segment,Costs_MET$variable_name ),FUN="sum")
colnames(Costs_sum)=c("year","Fleet_segment","variable_name","Sum_costs_by_metier")
Merge=merge(Costs_sum,Costs_FS,by=c("year","Fleet_segment","variable_name") )[,c(1,2,3,4,7)]
colnames(Merge)=c("year","Fleet_segment","variable_name","Sum_costs_by_metier","Costs_by_fleet_segment")
Merge$DIFF= round((Merge$Sum_costs_by_metier - Merge$Costs_by_fleet_segment)/ Merge$Costs_by_fleet_segment*100,1)
write.table(Merge,file.path(path,"Consistency_checks","Consistency_checks.csv"),sep=";",row.names=F)
unlink(file.path(tempdir(),"Consistency_checks"),recursive=T)
} |
coord <- data.frame(X = c(0, 200, 0, 200), Y = c(0, 0, 200, 200)) + 5000
plot <- rep("plot1", 4)
context("number corner")
test_that("number corner", {
corner <- numberCorner(projCoord = coord, plot = plot, origin = c(FALSE, FALSE, TRUE, FALSE), clockWise = TRUE)
expect_is(corner, "data.frame")
expect_equal(dim(corner), c(4, 4))
expect_equal(
numberCorner(projCoord = coord, plot = plot, origin = c(FALSE, FALSE, TRUE, FALSE), clockWise = TRUE)$corner,
c(4, 3, 1, 2)
)
expect_equal(
numberCorner(projCoord = coord, plot = plot, origin = c(FALSE, FALSE, TRUE, FALSE), clockWise = FALSE)$corner,
c(2, 3, 1, 4)
)
expect_equal(
numberCorner(projCoord = coord[c(1, 4, 3, 2), ], plot = plot, origin = c(FALSE, FALSE, TRUE, FALSE), clockWise = FALSE)$corner,
c(2, 4, 1, 3)
)
expect_equal(
numberCorner(projCoord = coord[c(1, 4, 3, 2), ], plot = plot, origin = c(FALSE, FALSE, TRUE, FALSE), clockWise = TRUE)$corner,
c(4, 2, 1, 3)
)
expect_equal(
numberCorner(projCoord = coord, plot = plot, origin = c(FALSE, TRUE, FALSE, FALSE), clockWise = TRUE)$corner,
c(2, 1, 3, 4)
)
expect_equal(
numberCorner(projCoord = coord, plot = plot, origin = c(FALSE, TRUE, FALSE, FALSE), clockWise = FALSE)$corner,
c(4, 1, 3, 2)
)
})
test_that("number corner error", {
expect_error(numberCorner())
expect_error(numberCorner(longlat = "fdz", projCoord = "fze"))
expect_error(numberCorner(longlat = coord[1:2, ], plot = plot, origin = c(FALSE, FALSE, TRUE, FALSE), clockWise = TRUE))
expect_error(numberCorner(projCoord = coord[1:2, ], plot = plot, origin = c(FALSE, FALSE, TRUE, FALSE), clockWise = TRUE))
expect_error(numberCorner(projCoord = coord, plot = plot[1:2], origin = c(FALSE, FALSE, TRUE, FALSE), clockWise = TRUE))
expect_error(numberCorner(projCoord = coord, plot = plot, origin = c(FALSE, TRUE, TRUE, FALSE), clockWise = TRUE))
plot[2] <- "plot2"
expect_error(numberCorner(projCoord = coord, plot = plot, origin = c(FALSE, FALSE, TRUE, FALSE), clockWise = TRUE))
}) |
knitr::opts_chunk$set(
collapse = TRUE,
comment = "
)
do.call(knitr::read_chunk, list(path = "scripts/tutorial-ggplot.R"))
knitr::include_graphics("img/ggplot1.png")
knitr::include_graphics("img/ggplot2.png")
knitr::include_graphics("img/ggplot3.png")
knitr::include_graphics("img/ggplot4.png")
knitr::include_graphics("img/ggplot5.png") |
test_that("in POSIXct format and only changed date and dt", {
set.seed(1)
step<- rgamma(1000, c(1, 2.5, 10), c(1, 1, 1))
angle<- runif(1000, -pi, pi)
date<- seq(c(ISOdate(2020, 6, 17, tz = "UTC")), by = "hour", length.out = 1000)
date<- date + lubridate::seconds(runif(length(date), -15, 15))
dt<- as.numeric(diff(date)) * 60
dt<- c(dt, NA)
id<- rep(1:10, each = 100)
dat<- data.frame(id, date, dt, step, angle)
dat1<- round_track_time(dat = dat, id = "id", int = 3600, tol = 20, time.zone = "UTC",
units = "secs")
expect_is(dat1, "data.frame")
expect_is(dat1$date, "POSIXct")
expect_equal(dat1$dt[1], 3600)
expect_error(expect_identical(dat1$date, dat$date))
expect_error(expect_identical(dat1$dt, dat$dt))
expect_equal(dat1$step, dat$step)
}) |
AutoPipe_tSNE=function(me,perplexity=30,max_iter=500,groups_men){
set.seed(5000)
ana=Rtsne::Rtsne(t(me), check_duplicates = F, dim=3, perplexity=perplexity, max_iter=max_iter)
ana=data.frame(ana$Y)
rownames(ana)=colnames(me)
ana$cluster=groups_men[rownames(ana), ]$cluster
max=max(groups_men[rownames(ana), ]$cluster)
ana$col="red"
col<-RColorBrewer::brewer.pal(n=max,name = "Paired")
for(i in 1:max){
color=col[i]
ana[ana$cluster==i, ]$col=color
}
plot(ana[,1:2], col=ana$col, pch=19, bty="n", main="t-distributed stochastic neighbor embedding")
} |
NULL
connectparticipant <- function(config = list()) {
svc <- .connectparticipant$operations
svc <- set_config(svc, config)
return(svc)
}
.connectparticipant <- list()
.connectparticipant$operations <- list()
.connectparticipant$metadata <- list(
service_name = "connectparticipant",
endpoints = list("*" = list(endpoint = "participant.connect.{region}.amazonaws.com", global = FALSE), "cn-*" = list(endpoint = "participant.connect.{region}.amazonaws.com.cn", global = FALSE), "us-iso-*" = list(endpoint = "participant.connect.{region}.c2s.ic.gov", global = FALSE), "us-isob-*" = list(endpoint = "participant.connect.{region}.sc2s.sgov.gov", global = FALSE)),
service_id = "ConnectParticipant",
api_version = "2018-09-07",
signing_name = "execute-api",
json_version = "1.1",
target_prefix = ""
)
.connectparticipant$service <- function(config = list()) {
handlers <- new_handlers("restjson", "v4")
new_service(.connectparticipant$metadata, handlers, config)
} |
xshewhartrunsrules.crit <- function(L0, mu=0, type="12") {
if (type=="14" & L0>255) {
stop("L0 too large for type=\"14\"")
} else {
c1 <- 1
c2 <- 1.5
arl1 <- xshewhartrunsrules.arl(mu,c=c1,type=type)
arl2 <- xshewhartrunsrules.arl(mu,c=c2,type=type)
a.error <- 1; c.error <- 1
while ( a.error>1e-6 && c.error>1e-8 ) {
c3 <- c1 + (L0-arl1)/(arl2-arl1)*(c2-c1)
arl3 <- xshewhartrunsrules.arl(mu,c=c3,type=type)
c1 <- c2; c2 <- c3
arl1 <- arl2; arl2 <- arl3
a.error <- abs(arl2-L0); c.error <- abs(c2-c1)
}
}
c3
} |
chk_nlist <- function(x, x_name = NULL) {
if (vld_nlist(x)) {
return(invisible())
}
if (is.null(x_name)) x_name <- deparse_backtick_chk(substitute(x))
chk_s3_class(x, "nlist", x_name = x_name)
chk_named(x, x_name = x_name)
x_name_names <- backtick_chk(p0("names(", unbacktick_chk(x_name), ")"))
chk_pars(names(x), x_name = x_name_names)
chk_not_any_na(names(x), x_name = x_name_names)
chk_unique(names(x), x_name = x_name_names)
chk_all(x, chk_numeric, x_name = x_name)
}
chk_nlists <- function(x, x_name = NULL) {
if (vld_nlists(x)) {
return(invisible())
}
if (is.null(x_name)) x_name <- deparse_backtick_chk(substitute(x))
chk_s3_class(x, "nlists", x_name = x_name)
chk_all(x, chk_nlist, x_name = x_name)
if (!vld_all_identical(lapply(x, names))) {
abort_chk("nlist elements of ", x_name, " must have matching names.", tidy = FALSE)
}
if (!vld_all_identical(lapply(x, lapply, dims))) {
abort_chk("nlist elements of ", x_name, " must have matching dimensions.", tidy = FALSE)
}
abort_chk("nlist elements of ", x_name, " must have matching types.", tidy = FALSE)
} |
"RS.data" |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.