code
stringlengths 1
13.8M
|
---|
test_that("Legacy widget html methods work", {
res <- widget_html("widgetA", "htmlwidgets", id = "id", style = NULL, class = NULL)
expect_identical(res$name, "canvas")
})
test_that("Legacy widget html methods are warned on unexpected output type", {
expect_warning(
res <- widget_html("widgetB", "htmlwidgets", id = "id", style = NULL, class = NULL),
"widgetB_html returned an object of class `logical` instead of a `shiny.tag`.",
fixed = TRUE
)
expect_identical(res, TRUE)
})
test_that("New-style widget html method works, and is preferred", {
res <- widget_html("widgetC", "htmlwidgets", id = "id", style = NULL, class = NULL)
expect_identical(
res,
widget_html.widgetC("widgetC", "htmlwidgets", id = "id", style = NULL, class = NULL))
})
test_that("New-style widget html methods do not trigger warning on non-tag output", {
expect_warning(
res <- widget_html("widgetD", "htmlwidgets", id = "id", style = NULL, class = NULL),
NA
)
expect_identical(res, TRUE)
})
test_that("Fallback logic still works", {
res <- widget_html("does_not_exist", "htmlwidgets", id = "id", style = NULL, class = NULL)
expect_identical(res, tags$div(id = "id"))
})
test_that("Legacy methods work with tagList() and HTML()", {
expect_warning({
widget_html("widgetE", "htmlwidgets", id = "id", style = NULL, class = NULL)
widget_html("widgetF", "htmlwidgets", id = "id", style = NULL, class = NULL)
}, NA)
})
|
context("utility functions")
a = 1:3
b = letters[2:9]
c = 9:20
set.seed(313)
expect_equal(length(recycle(a, b, c, prototype = "c")$b), length(c))
expect_equal(length(recycle(a, b, c, prototype = c)$a), length(c))
expect_equal(length(recycle(a, b, c, prototype = 5)$c), 5)
expect_equal(length(recycle(a, b, c, prototype = rnorm(100))$c), 100)
expect_equal(length(recycle(a, b, c)$c), max(length(a), length(b), length(c)))
expect_error(recycle(a, b, c, prototype = function() NULL))
expect_error(recycle(a, b, c, prototype = "haha"))
expect_error(recycle(a, b, c, prototype = -1))
x = list(a = 5,
b = 0,
c = "a",
d = NULL)
y = list(a = 3,
b = 7,
f = NA)
expect_equal(listmerge(x, y, type = "merge")$b, 7)
expect_null(listmerge(x, y, type = "template")$f, NULL)
expect_equal(listmerge(x, NULL, type = "merge"), x)
expect_equal(listmerge(x, list(h = "b"), type = "merge")$h, "b")
|
labkey.getModuleProperty <- function(baseUrl=NULL, folderPath, moduleName, propName)
{
baseUrl=labkey.getBaseUrl(baseUrl)
if (missing(folderPath)) stop (paste("A value must be specified for folderPath."))
if (missing(moduleName)) stop (paste("A value must be specified for moduleName."))
if (missing(propName)) stop (paste("A value must be specified for propName."))
folderPath <- encodeFolderPath(folderPath)
url <- paste(baseUrl, "project", folderPath, "getContainers.api", sep="")
params <- list(moduleProperties=c(moduleName))
response <- labkey.post(url, toJSON(params, auto_unbox=TRUE))
if (exists("response"))
{
result <- (fromJSON(response))
if (is.null(result$id))
{
return ("User does not have permission to perform this operation")
}
if (is.null(result$moduleProperties))
{
return (paste("Module property does not exist for", moduleName, "module in folder", folderPath))
}
moduleProperties <- result$moduleProperties
for(i in 1:nrow(moduleProperties))
{
row <- moduleProperties[i,]
if (tolower(row$module) == tolower(moduleName) && row$name == propName)
{
return (row$effectiveValue)
}
}
return (paste("Module property", propName, "does not exist for", moduleName, "module in folder", folderPath))
}
return (paste("Failed to get", moduleName, "module property for folder", folderPath))
}
labkey.setModuleProperty <- function(baseUrl=NULL, folderPath, moduleName, propName, propValue)
{
baseUrl=labkey.getBaseUrl(baseUrl)
if (missing(folderPath)) stop (paste("A value must be specified for folderPath."))
if (missing(moduleName)) stop (paste("A value must be specified for moduleName."))
if (missing(propName)) stop (paste("A value must be specified for propName."))
if (missing(propValue)) stop (paste("A value must be specified for propValue."))
folderPath <- encodeFolderPath(folderPath)
property <- list()
property$moduleName = moduleName
property$userId = 0
property$propName = propName
property$value = propValue
property$currentContainer = TRUE
params <- list(properties=list(property))
url <- paste(baseUrl, "core", folderPath, "saveModuleProperties.api", sep="")
response <- labkey.post(url, toJSON(params, auto_unbox=TRUE))
return (fromJSON(response, simplifyVector=FALSE, simplifyDataFrame=FALSE))
}
|
print.grm <-
function (x, digits = max(3, getOption("digits") - 4), ...) {
if (!inherits(x, "grm"))
stop("Use only with 'grm' objects.\n")
cat("\nCall:\n", paste(deparse(x$call), sep = "\n", collapse = "\n"), "\n\n", sep = "")
coefs <- if (x$IRT.param) IRT.parm(x, digits.abbrv = x$control$digits.abbrv)$parms else x$coef
coefs <- lapply(coefs, round, digits = digits)
if (all(sapply(coefs, length) == length(coefs[[1]])))
coefs <- do.call(rbind, coefs)
cat("Coefficients:\n")
print(coefs, print.gap = 2, quote = FALSE)
cat("\nLog.Lik:", round(x$log.Lik, digits))
cat("\n\n")
invisible(x)
}
|
symNodesI <- function(tree){
if(!inherits(tree,"phylo")) stop("The input tree must be in phylo-format.")
if(!is_binary(tree)) stop("The input tree must be binary.")
n <- length(tree$tip.label)
if(n==1){
warning("The function might not deliver accurate results for n=1.")
return(0)
}
Descs <- getDescMatrix(tree)
Ancs <- getAncVec(tree)
depthResults <- getNodesOfDepth(mat=Descs, root=n+1, n=n)
worklab <- matrix(rep(NA,2*(n+tree$Nnode)), ncol = 2)
inum <- rep(NA, n+tree$Nnode)
for(d in depthResults$maxdepth:0){
current_Nodes <- stats::na.omit(as.vector(depthResults$nodesOfDepth[d+1,]))
for(v in current_Nodes){
if(is.na(Descs[v,1])){
worklab[v,] <- c(0,0)
}else{
worklab[v,] <-sort(c(inum[Descs[v,]]),decreasing = FALSE)
}
}
inum[current_Nodes] <- symBucketLexicoSort(worklab[current_Nodes,])
}
numb_symNodes <- 0
for(i in (n+1):(n+tree$Nnode)){
if(worklab[i,1]!=0){
if(worklab[i,1]==worklab[i,2]){
numb_symNodes <- numb_symNodes + 1
}
}
}
return(n-1-numb_symNodes)
}
|
coerce_mat <- function(mat,cfun=as.simple_triplet_sym_matrix) if (is.null(dim(mat))) mat else cfun(mat)
coerce_blkmat <- function(Aij,cfun=as.simple_triplet_sym_matrix) lapply(Aij,coerce_mat,cfun=cfun)
coerce_const <- function(Ai,cfun=as.simple_triplet_sym_matrix) lapply(Ai,coerce_blkmat,cfun=cfun)
getdf_mat <- function(x)
{
if(!is.null(dim(x))) {
x <- as.simple_triplet_sym_matrix(x)
if (length(x$v) == 0)
return(NULL)
else
return(cbind(x$j,x$i,x$v))
}
else {
ind <- which(x != 0)
if (length(ind) == 0)
return(NULL)
else
return(cbind(ind,ind,x[ind]))
}
}
getdf_blkmat <- function(x)
{
do.call(rbind,lapply(1:length(x),function(j) {df <- getdf_mat(x[[j]]); if (is.null(df)) return(NULL) else return(cbind(j,df))}))
}
getdf_const <- function(x)
{
do.call(rbind,lapply(1:length(x), function(i) {df <- getdf_blkmat(x[[i]]); if (is.null(df)) return(NULL) else return(cbind(i,df))}))
}
readsdpa <- function(file="",verbose=FALSE)
{
if (file=="")
stop("'file' argument must be a non-empty string")
ret <- .Call(C_readsdpa,
as.character(file),
as.integer(verbose),
PACKAGE="Rcsdp")
names(ret) <- c("C","A","b","K");
names(ret$K) <- c("type","size");
ret$K$type <- c("s","l")[vector_csdp2R(ret$K$type)];
ret$K$size <- vector_csdp2R(ret$K$size);
m <- length(ret$b)-1;
prob.info <- get.prob.info(ret$K,m)
ret$C <- blkmatrix_csdp2R(ret$C,prob.info);
ret$A <- constraints_csdp2R(ret$A,prob.info);
ret$b <- vector_csdp2R(ret$b);
ret
}
readsdpa.sol <- function(K,C,m,file="")
{
if (file=="")
stop("'file' argument must be a non-empty string")
prob.info <- get.prob.info(K,m);
ret <- .Call(C_readsdpa_sol,
as.character(file),
as.integer(sum(prob.info$block.sizes)),
as.integer(m),
blkmatrix_R2csdp(C,prob.info),
PACKAGE="Rcsdp");
names(ret) <- c("X","y","Z");
ret$X <- blkmatrix_csdp2R(ret$X,prob.info);
ret$y <- vector_csdp2R(ret$y);
ret$Z <- blkmatrix_csdp2R(ret$Z,prob.info);
ret
}
writesdpa <- function(C,A,b,K,file="")
{
if (file=="")
stop("'file' argument must be a non-empty string")
prob.info <- get.prob.info(K,length(b));
validate.data(C,A,b,prob.info)
prob.data <- prepare.data(C,A,b,prob.info)
.Call(C_writesdpa,
as.character(file),
as.integer(sum(prob.info$block.sizes)),
as.integer(prob.info$nconstraints),
as.integer(prob.info$nblocks),
as.integer(c(0,prob.info$block.types)),
as.integer(c(0,prob.info$block.sizes)),
prob.data$C,
prob.data$A,
prob.data$b,
PACKAGE="Rcsdp")
}
writesdpa.sol <- function(X,Z,y,K,file="")
{
if (file=="")
stop("'file' argument must be a non-empty string")
prob.info <- get.prob.info(K,length(y));
.Call(C_writesdpa_sol,
as.character(file),
as.integer(sum(prob.info$block.sizes)),
as.integer(prob.info$nconstraints),
blkmatrix_R2csdp(X,prob.info),
vector_R2csdp(y),
blkmatrix_R2csdp(Z,prob.info),
PACKAGE="Rcsdp")
}
|
plot.pmc.ppc <- function(x, Style=NULL, Data=NULL, Rows=NULL, PDF=FALSE,
...)
{
if(missing(x)) stop("The x argument is required.")
if(class(x) != "pmc.ppc") stop("x is not of class pmc.ppc.")
if(is.null(Style)) Style <- "Density"
if(is.null(Rows)) Rows <- 1:nrow(x[["yhat"]])
if(Style == "Covariates") {
if(PDF == TRUE) {
pdf("PPC.Plots.Covariates.pdf")
par(mfrow=c(3,3))}
else par(mfrow=c(3,3), ask=TRUE)
if(is.null(Data))
stop("Data is required for Style=Covariates.")
if(is.null(Data[["X"]]) & is.null(Data[["x"]]))
stop("X or x is required in Data.")
if(is.null(Data[["X"]]))
co <- matrix(Data[["x"]], length(Data[["x"]]), 1)
else if(is.null(Data[["x"]])) co <- Data[["X"]]
temp <- summary(x, Quiet=TRUE)$Summary
mycol <- rgb(0, 100, 0, 50, maxColorValue=255)
for (i in 1:ncol(co)) {
plot(co[Rows,i], temp[Rows,5], col=mycol, pch=16, cex=0.75,
ylim=c(min(temp[Rows,c(1,4:6)]),max(temp[Rows,c(1,4:6)])),
xlab=paste("X[,",i,"]", sep=""),
ylab="yhat",
sub="Gray lines are yhat at 2.5% and 95%.")
panel.smooth(co[Rows,i], temp[Rows,5], col=mycol, pch=16,
cex=0.75)}}
if(Style == "Covariates, Categorical DV") {
if(PDF == TRUE) {
pdf("PPC.Plots.Covariates.Cat.pdf")
par(mfrow=c(3,3))}
else par(mfrow=c(3,3), ask=TRUE)
if(is.null(Data))
stop("Data is required for Style=Covariates.")
if(is.null(Data[["X"]]) & is.null(Data[["x"]]))
stop("X or x is required in Data.")
if(is.null(Data[["X"]]))
co <- matrix(Data[["x"]], length(Data[["x"]]), 1)
else if(is.null(Data[["x"]])) co <- Data[["X"]]
temp <- summary(x, Categorical=TRUE, Quiet=TRUE)$Summary
ncat <- length(table(temp[,1]))
mycol <- rgb(0, 100, 0, 50, maxColorValue=255)
for (i in 1:ncol(co)) {for (j in 2:(ncat+1)) {
plot(co[Rows,i], temp[Rows,j], col=mycol, pch=16, cex=0.75,
xlab=paste("X[,",i,"]", sep=""),
ylab=colnames(temp)[j])
panel.smooth(co[Rows,i], temp[Rows,j], col=mycol, pch=16,
cex=0.75)}}}
if(Style == "Density") {
if(PDF == TRUE) {
pdf("PPC.Plots.Density.pdf")
par(mfrow=c(3,3))}
else par(mfrow=c(3,3), ask=TRUE)
for (j in 1:length(Rows)) {
plot(density(x[["yhat"]][Rows[j],]),
main=paste("Post. Pred. Plot of yhat[", Rows[j],
",]", sep=""), xlab="Value",
sub="Black=Density, Red=y")
polygon(density(x[["yhat"]][Rows[j],]), col="black",
border="black")
abline(v=x[["y"]][Rows[j]], col="red")}}
if(Style == "DW") {
if(PDF == TRUE) pdf("PPC.Plots.DW.pdf")
par(mfrow=c(1,1))
epsilon.obs <- x[["y"]] - x[["yhat"]]
N <- nrow(epsilon.obs)
S <- ncol(epsilon.obs)
epsilon.rep <- matrix(rnorm(N*S), N, S)
d.obs <- d.rep <- rep(0, S)
for (s in 1:S) {
d.obs[s] <- sum(c(0,diff(epsilon.obs[,s]))^2, na.rm=TRUE) /
sum(epsilon.obs[,s]^2, na.rm=TRUE)
d.rep[s] <- sum(c(0,diff(epsilon.rep[,s]))^2, na.rm=TRUE) /
sum(epsilon.rep[,s]^2, na.rm=TRUE)}
result <- "no"
if(mean(d.obs > d.rep, na.rm=TRUE) < 0.025) result <- "positive"
if(mean(d.obs > d.rep, na.rm=TRUE) > 0.975) result <- "negative"
d.d.obs <- density(d.obs, na.rm=TRUE)
d.d.rep <- density(d.rep, na.rm=TRUE)
plot(d.d.obs, xlim=c(0,4),
ylim=c(0, max(d.d.obs$y, d.d.rep$y)), col="white",
main="Durbin-Watson test",
xlab=paste("d.obs=", round(mean(d.obs, na.rm=TRUE),2), " (",
round(as.vector(quantile(d.obs, probs=0.025, na.rm=TRUE)),2),
", ", round(as.vector(quantile(d.obs, probs=0.975, na.rm=TRUE)),
2), "), p(d.obs > d.rep) = ", round(mean(d.obs > d.rep,
na.rm=TRUE),3), " = ", result, " autocorrelation", sep=""))
polygon(d.d.obs, col=rgb(0,0,0,50,maxColorValue=255), border=NA)
polygon(d.d.rep, col=rgb(255,0,0,50,maxColorValue=255), border=NA)
abline(v=2, col="red")}
if(Style == "DW, Multivariate, C") {
if(PDF == TRUE) {
pdf("PPC.Plots.DW.M.pdf")
par(mfrow=c(1,1))}
else par(mfrow=c(1,1), ask=TRUE)
if(is.null(Data))
stop("Data is required for Style=Fitted, Multivariate, C.")
if(is.null(Data[["Y"]])) stop("Y is required in Data.")
M <- nrow(Data[["Y"]])
J <- ncol(Data[["Y"]])
epsilon.obs <- x[["y"]] - x[["yhat"]]
N <- nrow(epsilon.obs)
S <- ncol(epsilon.obs)
epsilon.rep <- matrix(rnorm(N*S), N, S)
d.obs <- d.rep <- rep(0, S)
for (j in 1:J) {
for (s in 1:S) {
d.obs[s] <- sum(c(0,diff(epsilon.obs[((j-1)*M+1):(j*M),s]))^2, na.rm=TRUE) /
sum(epsilon.obs[((j-1)*M+1):(j*M),s]^2, na.rm=TRUE)
d.rep[s] <- sum(c(0,diff(epsilon.rep[((j-1)*M+1):(j*M),s]))^2, na.rm=TRUE) /
sum(epsilon.rep[((j-1)*M+1):(j*M),s]^2, na.rm=TRUE)}
result <- "no"
if(mean(d.obs > d.rep, na.rm=TRUE) < 0.025) result <- "positive"
if(mean(d.obs > d.rep, na.rm=TRUE) > 0.975) result <- "negative"
d.d.obs <- density(d.obs, na.rm=TRUE)
d.d.rep <- density(d.rep, na.rm=TRUE)
plot(d.d.obs, xlim=c(0,4),
ylim=c(0, max(d.d.obs$y, d.d.rep$y)), col="white",
main="Durbin-Watson test",
xlab=paste("d.obs=", round(mean(d.obs, na.rm=TRUE),2), " (",
round(as.vector(quantile(d.obs, probs=0.025, na.rm=TRUE)),2),
", ", round(as.vector(quantile(d.obs, probs=0.975, na.rm=TRUE)),
2), "), p(d.obs > d.rep) = ", round(mean(d.obs > d.rep,
na.rm=TRUE),3), " = ", result, " autocorrelation", sep=""),
sub=paste("Y[,",j,"]",sep=""))
polygon(d.d.obs, col=rgb(0,0,0,50,maxColorValue=255),
border=NA)
polygon(d.d.rep, col=rgb(255,0,0,50,maxColorValue=255),
border=NA)
abline(v=2, col="red")}}
if(Style == "ECDF") {
if(PDF == TRUE) pdf("PPC.Plots.ECDF.pdf")
par(mfrow=c(1,1))
plot(ecdf(x[["y"]][Rows]), verticals=TRUE, do.points=FALSE,
main="Cumulative Fit",
xlab="y (black) and yhat (red; gray)",
ylab="Cumulative Frequency")
lines(ecdf(apply(x[["yhat"]][Rows,], 1, quantile, probs=0.975)),
verticals=TRUE, do.points=FALSE, col="gray")
lines(ecdf(apply(x[["yhat"]][Rows,], 1, quantile, probs=0.025)),
verticals=TRUE, do.points=FALSE, col="gray")
lines(ecdf(apply(x[["yhat"]][Rows,], 1, quantile, probs=0.500)),
verticals=TRUE, do.points=FALSE, col="red")}
if(Style == "Fitted") {
if(PDF == TRUE) pdf("PPC.Plots.Fitted.pdf")
par(mfrow=c(1,1))
temp <- summary(x, Quiet=TRUE)$Summary
plot(temp[Rows,1], temp[Rows,5], pch=16, cex=0.75,
ylim=c(min(temp[Rows,4], na.rm=TRUE),
max(temp[Rows,6], na.rm=TRUE)),
xlab="y", ylab="yhat", main="Fitted")
for (i in Rows) {
lines(c(temp[Rows[i],1], temp[Rows[i],1]),
c(temp[Rows[i],4], temp[Rows[i],6]))}
panel.smooth(temp[Rows,1], temp[Rows,5], pch=16, cex=0.75)}
if(Style == "Fitted, Multivariate, C") {
if(PDF == TRUE) {
pdf("PPC.Plots.Fitted.M.pdf")
par(mfrow=c(1,1))}
else par(mfrow=c(1,1), ask=TRUE)
if(is.null(Data))
stop("Data is required for Style=Fitted, Multivariate, C.")
if(is.null(Data[["Y"]])) stop("Y is required in Data.")
temp <- summary(x, Quiet=TRUE)$Summary
for (i in 1:ncol(Data[["Y"]])) {
temp1 <- as.vector(matrix(temp[,1], nrow(Data[["Y"]]),
ncol(Data[["Y"]]))[,i])
temp2 <- as.vector(matrix(temp[,4], nrow(Data[["Y"]]),
ncol(Data[["Y"]]))[,i])
temp3 <- as.vector(matrix(temp[,5], nrow(Data[["Y"]]),
ncol(Data[["Y"]]))[,i])
temp4 <- as.vector(matrix(temp[,6], nrow(Data[["Y"]]),
ncol(Data[["Y"]]))[,i])
plot(temp1, temp3, pch=16, cex=0.75,
ylim=c(min(temp2, na.rm=TRUE),
max(temp4, na.rm=TRUE)),
xlab=paste("Y[,", i, "]", sep=""), ylab="yhat",
main="Fitted")
for (j in 1:nrow(Data[["Y"]])) {
lines(c(temp1[j], temp1[j]),
c(temp2[j], temp4[j]))}
panel.smooth(temp1, temp3, pch=16, cex=0.75)}}
if(Style == "Fitted, Multivariate, R") {
if(PDF == TRUE) {
pdf("PPC.Plots.Fitted.M.pdf")
par(mfrow=c(1,1))}
else par(mfrow=c(1,1), ask=TRUE)
if(is.null(Data))
stop("Data is required for Style=Fitted, Multivariate, R.")
if(is.null(Data[["Y"]])) stop("Y is required in Data.")
temp <- summary(x, Quiet=TRUE)$Summary
for (i in 1:nrow(Data[["Y"]])) {
temp1 <- as.vector(matrix(temp[,1], nrow(Data[["Y"]]),
ncol(Data[["Y"]]))[i,])
temp2 <- as.vector(matrix(temp[,4], nrow(Data[["Y"]]),
ncol(Data[["Y"]]))[i,])
temp3 <- as.vector(matrix(temp[,5], nrow(Data[["Y"]]),
ncol(Data[["Y"]]))[i,])
temp4 <- as.vector(matrix(temp[,6], nrow(Data[["Y"]]),
ncol(Data[["Y"]]))[i,])
plot(temp1, temp3, pch=16, cex=0.75,
ylim=c(min(temp2, na.rm=TRUE),
max(temp4, na.rm=TRUE)),
xlab=paste("Y[,", i, "]", sep=""), ylab="yhat",
main="Fitted")
for (j in 1:ncol(Data[["Y"]])) {
lines(c(temp1[j], temp1[j]),
c(temp2[j], temp4[j]))}
panel.smooth(temp1, temp3, pch=16, cex=0.75)}}
if(Style == "Jarque-Bera") {
if(PDF == TRUE) pdf("PPC.Plots.Jarque.Bera.pdf")
par(mfrow=c(1,1))
epsilon.obs <- epsilon.rep <- x[["y"]][Rows] - x[["yhat"]][Rows,]
kurtosis <- function(x) {
m4 <- mean((x-mean(x, na.rm=TRUE))^4, na.rm=TRUE)
kurt <- m4/(sd(x, na.rm=TRUE)^4)-3
return(kurt)}
skewness <- function(x) {
m3 <- mean((x-mean(x, na.rm=TRUE))^3, na.rm=TRUE)
skew <- m3/(sd(x, na.rm=TRUE)^3)
return(skew)}
JB.obs <- JB.rep <- rep(0, ncol(epsilon.obs))
N <- nrow(epsilon.obs)
for (s in 1:ncol(epsilon.obs)) {
epsilon.rep[,s] <- rnorm(N, mean(epsilon.obs[,s],
na.rm=TRUE), sd(epsilon.obs[,s], na.rm=TRUE))
K.obs <- kurtosis(epsilon.obs[,s])
S.obs <- skewness(epsilon.obs[,s])
K.rep <- kurtosis(epsilon.rep[,s])
S.rep <- skewness(epsilon.rep[,s])
JB.obs[s] <- (N/6)*(S.obs^2 + ((K.obs-3)^2)/4)
JB.rep[s] <- (N/6)*(S.rep^2 + ((K.rep-3)^2)/4)}
p <- round(mean(JB.obs > JB.rep, na.rm=TRUE), 3)
result <- "Non-Normality"
if((p >= 0.025) & (p <= 0.975)) result <- "Normality"
d.obs <- density(JB.obs)
d.rep <- density(JB.rep)
plot(d.obs, xlim=c(min(d.obs$x,d.rep$x), max(d.obs$x,d.rep$x)),
ylim=c(0, max(d.obs$y, d.rep$y)), col="white",
main="Jarque-Bera Test",
xlab="JB", ylab="Density",
sub=paste("JB.obs=", round(mean(JB.obs, na.rm=TRUE),2),
" (", round(as.vector(quantile(JB.obs, probs=0.025,
na.rm=TRUE)),2), ",", round(as.vector(quantile(JB.obs,
probs=0.975, na.rm=TRUE)),2), "), p(JB.obs > JB.rep) = ",
p, " = ", result, sep=""))
polygon(d.obs, col=rgb(0,0,0,50,maxColorValue=255), border=NA)
polygon(d.rep, col=rgb(255,0,0,50,maxColorValue=255), border=NA)}
if(Style == "Jarque-Bera, Multivariate, C") {
if(PDF == TRUE) {
pdf("PPC.Plots.Jarque.Bera.pdf")
par(mfrow=c(1,1))}
else par(mfrow=c(1,1), ask=TRUE)
if(is.null(Data))
stop("Data is required for Style=Jarque-Bera, Multivariate, C.")
if(is.null(Data[["Y"]])) stop("Y is required in Data.")
M <- nrow(Data[["Y"]])
J <- ncol(Data[["Y"]])
epsilon.obs <- epsilon.rep <- x[["y"]] - x[["yhat"]]
kurtosis <- function(x) {
m4 <- mean((x-mean(x, na.rm=TRUE))^4, na.rm=TRUE)
kurt <- m4/(sd(x, na.rm=TRUE)^4)-3
return(kurt)}
skewness <- function(x) {
m3 <- mean((x-mean(x, na.rm=TRUE))^3, na.rm=TRUE)
skew <- m3/(sd(x, na.rm=TRUE)^3)
return(skew)}
JB.obs <- JB.rep <- rep(0, ncol(epsilon.obs))
N <- nrow(epsilon.obs)
for (j in 1:J) {
for (s in 1:ncol(epsilon.obs)) {
e.obs <- matrix(epsilon.obs[,s], M, J)
e.rep <- rnorm(M, mean(e.obs[,j], na.rm=TRUE),
sd(e.obs[,j], na.rm=TRUE))
K.obs <- kurtosis(e.obs[,j])
S.obs <- skewness(e.obs[,j])
K.rep <- kurtosis(e.rep)
S.rep <- skewness(e.rep)
JB.obs[s] <- (N/6)*(S.obs^2 + ((K.obs-3)^2)/4)
JB.rep[s] <- (N/6)*(S.rep^2 + ((K.rep-3)^2)/4)}
p <- round(mean(JB.obs > JB.rep, na.rm=TRUE), 3)
result <- "Non-Normality"
if((p >= 0.025) & (p <= 0.975)) result <- "Normality"
d.obs <- density(JB.obs)
d.rep <- density(JB.rep)
plot(d.obs, xlim=c(min(d.obs$x,d.rep$x), max(d.obs$x,d.rep$x)),
ylim=c(0, max(d.obs$y, d.rep$y)), col="white",
main="Jarque-Bera Test",
xlab=paste("JB for Y[,",j,"]", sep=""), ylab="Density",
sub=paste("JB.obs=", round(mean(JB.obs, na.rm=TRUE),2),
" (", round(as.vector(quantile(JB.obs, probs=0.025,
na.rm=TRUE)),2), ",", round(as.vector(quantile(JB.obs,
probs=0.975, na.rm=TRUE)),2), "), p(JB.obs > JB.rep) = ",
p, " = ", result, sep=""))
polygon(d.obs, col=rgb(0,0,0,50,maxColorValue=255),
border=NA)
polygon(d.rep, col=rgb(255,0,0,50,maxColorValue=255),
border=NA)}}
if(Style == "Mardia") {
if(PDF == TRUE) pdf("PPC.Plots.Mardia.pdf")
par(mfrow=c(2,1))
if(is.null(Data))
stop("Data is required for Style=Mardia, C.")
if(is.null(Data[["Y"]]))
stop("Variable Y is required for Style=Mardia, C.")
epsilon.obs <- x[["y"]] - x[["yhat"]]
M <- nrow(Data[["Y"]])
J <- ncol(Data[["Y"]])
K3.obs <- K3.rep <- K4.obs <- K4.rep <- rep(0, ncol(epsilon.obs))
for (s in 1:ncol(epsilon.obs)) {
e.obs <- matrix(epsilon.obs[,s], M, J)
e.obs.mu <- colMeans(e.obs)
e.obs.mu.mat <- matrix(e.obs.mu, M, J, byrow=TRUE)
e.obs.stand <- e.obs - e.obs.mu.mat
S.obs <- var(e.obs)
A.obs <- t(chol(S.obs))
A.inv.obs <- solve(A.obs)
Z.obs <- t(A.inv.obs %*% t(e.obs.stand))
Dij.obs <- Z.obs %*% t(Z.obs)
D2.obs <- diag(Dij.obs)
K3.obs[s] <- mean(as.vector(Dij.obs)^3)
K4.obs[s] <- mean(D2.obs^2)
e.rep <- rmvn(M, e.obs.mu.mat, S.obs)
e.rep.mu <- colMeans(e.rep)
e.rep.mu.mat <- matrix(e.rep.mu, M, J, byrow=TRUE)
e.rep.stand <- e.rep - e.rep.mu.mat
S.rep <- var(e.rep)
A.rep <- t(chol(S.rep))
A.inv.rep <- solve(A.rep)
Z.rep <- t(A.inv.rep %*% t(e.rep.stand))
Dij.rep <- Z.rep %*% t(Z.rep)
D2.rep <- diag(Dij.rep)
K3.rep[s] <- mean(as.vector(Dij.rep)^3)
K4.rep[s] <- mean(D2.rep^2)}
p.K3 <- round(mean(K3.obs > K3.rep), 3)
p.K4 <- round(mean(K4.obs > K4.rep), 3)
K3.result <- K4.result <- "Non-Normality"
if((p.K3 >= 0.025) & (p.K3 <= 0.975)) K3.result <- "Normality"
if((p.K4 >= 0.025) & (p.K4 <= 0.975)) K4.result <- "Normality"
d.K3.obs <- density(K3.obs)
d.K3.rep <- density(K3.rep)
d.K4.obs <- density(K4.obs)
d.K4.rep <- density(K4.rep)
plot(d.K3.obs, xlim=c(min(d.K3.obs$x, d.K3.rep$x),
max(d.K3.obs$x, d.K3.rep$x)),
ylim=c(0, max(d.K3.obs$y, d.K3.rep$y)), col="white",
main="Mardia's Test of MVN Skewness",
xlab="Skewness Test Statistic (K3)", ylab="Density",
sub=paste("K3.obs=", round(mean(K3.obs, na.rm=TRUE), 2),
" (", round(quantile(K3.obs, probs=0.025, na.rm=TRUE),
2), ", ", round(quantile(K3.obs, probs=0.975,
na.rm=TRUE), 2), "), p(K3.obs > K3.rep) = ",
p.K3, " = ", K3.result, sep=""))
polygon(d.K3.obs, col=rgb(0,0,0,50,maxColorValue=255), border=NA)
polygon(d.K3.rep, col=rgb(255,0,0,50,maxColorValue=255), border=NA)
plot(d.K4.obs, xlim=c(min(d.K4.obs$x, d.K4.rep$x),
max(d.K4.obs$x, d.K4.rep$x)),
ylim=c(0, max(d.K4.obs$y, d.K4.rep$y)), col="white",
main="Mardia's Test of MVN Kurtosis",
xlab="Kurtosis Test Statistic (K4)", ylab="Density",
sub=paste("K4.obs=", round(mean(K4.obs, na.rm=TRUE), 2),
" (", round(quantile(K4.obs, probs=0.025, na.rm=TRUE),
2), ", ", round(quantile(K4.obs, probs=0.975,
na.rm=TRUE), 2), "), p(K4.obs > K4.rep) = ",
p.K4, " = ", K4.result, sep=""))
polygon(d.K4.obs, col=rgb(0,0,0,50,maxColorValue=255), border=NA)
polygon(d.K4.rep, col=rgb(255,0,0,50,maxColorValue=255), border=NA)}
if(Style == "Predictive Quantiles") {
if(PDF == TRUE) pdf("PPC.Plots.PQ.pdf")
par(mfrow=c(1,1))
temp <- summary(x, Quiet=TRUE)$Summary
mycol <- rgb(0, 100, 0, 50, maxColorValue=255)
plot(temp[Rows,1], temp[Rows,7], ylim=c(0,1), col=mycol,
pch=16, cex=0.75, xlab="y", ylab="PQ",
main="Predictive Quantiles")
panel.smooth(temp[Rows,1], temp[Rows,7], col=mycol, pch=16,
cex=0.75)
abline(h=0.025, col="gray")
abline(h=0.975, col="gray")}
if(Style == "Residual Density") {
if(PDF == TRUE) pdf("PPC.Plots.Residual.Density.pdf")
par(mfrow=c(1,1))
epsilon <- x[["y"]] - x[["yhat"]]
epsilon.summary <- apply(epsilon, 1, quantile,
probs=c(0.025,0.500,0.975), na.rm=TRUE)
dens <- density(epsilon.summary[2,Rows], na.rm=TRUE)
plot(dens, col="black", main="Residual Density",
xlab=expression(epsilon), ylab="Density")
polygon(dens, col="black", border="black")
abline(v=0, col="red")}
if(Style == "Residual Density, Multivariate, C") {
if(PDF == TRUE) {
pdf("PPC.Plots.Residual.Density.pdf")
par(mfrow=c(1,1))}
else par(mfrow=c(1,1), ask=TRUE)
if(is.null(Data))
stop("Data is required for Style=Residual Density, Multivariate, C.")
if(is.null(Data[["Y"]]))
stop("Variable Y is required for Style=Residual Density, Multivariate, C.")
epsilon <- x[["y"]] - x[["yhat"]]
epsilon.summary <- apply(epsilon, 1, quantile,
probs=c(0.025,0.500,0.975), na.rm=TRUE)
epsilon.500 <- matrix(epsilon.summary[2,], nrow(Data[["Y"]]),
ncol(Data[["Y"]]))
for (i in 1:ncol(Data[["Y"]])) {
dens <- density(epsilon.500[,i], na.rm=TRUE)
plot(dens, col="black", main="Residual Density",
xlab=paste("epsilon[,", i, "]", sep=""),
ylab="Density")
polygon(dens, col="black", border="black")
abline(v=0, col="red")}}
if(Style == "Residual Density, Multivariate, R") {
if(PDF == TRUE) {
pdf("PPC.Plots.Residual.Density.pdf")
par(mfrow=c(1,1))}
else par(mfrow=c(1,1), ask=TRUE)
if(is.null(Data))
stop("Data is required for Style=Residual Density, Multivariate, R.")
if(is.null(Data[["Y"]]))
stop("Variable Y is required for Style=Residual Density, Multivariate, R.")
epsilon <- x[["y"]] - x[["yhat"]]
epsilon.summary <- apply(epsilon, 1, quantile,
probs=c(0.025,0.500,0.975), na.rm=TRUE)
epsilon.500 <- matrix(epsilon.summary[2,], nrow(Data[["Y"]]),
ncol(Data[["Y"]]))
for (i in 1:nrow(Data[["Y"]])) {
dens <- density(epsilon.500[i,], na.rm=TRUE)
plot(dens, col="black", main="Residual Density",
xlab=paste("epsilon[", i, ",]", sep=""),
ylab="Density")
polygon(dens, col="black", border="black")
abline(v=0, col="red")}}
if(Style == "Residuals") {
if(PDF == TRUE) pdf("PPC.Plots.Residuals.pdf")
par(mfrow=c(1,1))
epsilon <- x[["y"]] - x[["yhat"]]
epsilon.summary <- apply(epsilon, 1, quantile,
probs=c(0.025,0.500,0.975), na.rm=TRUE)
plot(epsilon.summary[2,Rows], pch=16, cex=0.75,
ylim=c(min(epsilon.summary[,Rows], na.rm=TRUE),
max(epsilon.summary[,Rows], na.rm=TRUE)),
xlab="y", ylab=expression(epsilon))
lines(rep(0, ncol(epsilon.summary[,Rows])), col="red")
for (i in Rows) {
lines(c(i,i), c(epsilon.summary[1,Rows[i]],
epsilon.summary[3,Rows[i]]), col="black")}}
if(Style == "Residuals, Multivariate, C") {
if(PDF == TRUE) {
pdf("PPC.Plots.Residuals.pdf")
par(mfrow=c(1,1))}
else par(mfrow=c(1,1), ask=TRUE)
if(is.null(Data))
stop("Data is required for Style=Residuals, Multivariate, C.")
if(is.null(Data[["Y"]]))
stop("Variable Y is required for Style=Residuals, Multivariate, C.")
epsilon <- x[["y"]] - x[["yhat"]]
epsilon.summary <- apply(epsilon, 1, quantile,
probs=c(0.025,0.500,0.975), na.rm=TRUE)
epsilon.025 <- matrix(epsilon.summary[1,], nrow(Data[["Y"]]),
ncol(Data[["Y"]]))
epsilon.500 <- matrix(epsilon.summary[2,], nrow(Data[["Y"]]),
ncol(Data[["Y"]]))
epsilon.975 <- matrix(epsilon.summary[3,], nrow(Data[["Y"]]),
ncol(Data[["Y"]]))
for (i in 1:ncol(Data[["Y"]])) {
plot(epsilon.500[,i], pch=16, cex=0.75,
ylim=c(min(epsilon.025[,i], na.rm=TRUE),
max(epsilon.975[,i], na.rm=TRUE)),
xlab=paste("Y[,", i, "]", sep=""), ylab=expression(epsilon))
lines(rep(0, nrow(epsilon.500)), col="red")
for (j in 1:nrow(Data[["Y"]])) {
lines(c(j,j), c(epsilon.025[j,i],
epsilon.975[j,i]), col="black")}}}
if(Style == "Residuals, Multivariate, R") {
if(PDF == TRUE) {
pdf("PPC.Plots.Residuals.pdf")
par(mfrow=c(1,1))}
else par(mfrow=c(1,1), ask=TRUE)
if(is.null(Data))
stop("Data is required for Style=Residuals, Multivariate, C.")
if(is.null(Data[["Y"]]))
stop("Variable Y is required for Style=Residuals, Multivariate, C.")
epsilon <- x[["y"]] - x[["yhat"]]
epsilon.summary <- apply(epsilon, 1, quantile,
probs=c(0.025,0.500,0.975), na.rm=TRUE)
epsilon.025 <- matrix(epsilon.summary[1,], nrow(Data[["Y"]]),
ncol(Data[["Y"]]))
epsilon.500 <- matrix(epsilon.summary[2,], nrow(Data[["Y"]]),
ncol(Data[["Y"]]))
epsilon.975 <- matrix(epsilon.summary[3,], nrow(Data[["Y"]]),
ncol(Data[["Y"]]))
for (i in 1:nrow(Data[["Y"]])) {
plot(epsilon.500[i,], pch=16, cex=0.75,
ylim=c(min(epsilon.025[i,], na.rm=TRUE),
max(epsilon.975[i,], na.rm=TRUE)),
xlab=paste("Y[", i, ",]", sep=""), ylab=expression(epsilon))
lines(rep(0, ncol(epsilon.500)), col="red")
for (j in 1:ncol(Data[["Y"]])) {
lines(c(j,j), c(epsilon.025[i,j],
epsilon.975[i,j]), col="black")}}}
if(Style == "Space-Time by Space") {
if(PDF == TRUE) {
pdf("PPC.Plots.SpaceTime.pdf")
par(mfrow=c(1,1))}
else par(mfrow=c(1,1), ask=TRUE)
if(is.null(Data))
stop("Data is required for Style=Space-Time by Space.")
if(is.null(Data[["longitude"]]))
stop("Variable longitude is required in Data.")
if(is.null(Data[["latitude"]]))
stop("Variable latitude is required in Data.")
if(is.null(Data[["S"]])) stop("Variable S is required in Data.")
if(is.null(Data[["T"]])) stop("Variable T is required in Data.")
temp <- summary(x, Quiet=TRUE)$Summary
for (s in 1:Data[["S"]]) {
plot(matrix(temp[,1], Data[["S"]], Data[["T"]])[s,],
ylim=c(min(c(matrix(temp[,4], Data[["S"]], Data[["T"]])[s,],
matrix(temp[,1], Data[["S"]], Data[["T"]])[s,]), na.rm=TRUE),
max(c(matrix(temp[,6], Data[["S"]], Data[["T"]])[s,],
matrix(temp[,1], Data[["S"]], Data[["T"]])[s,]), na.rm=TRUE)),
type="l", xlab="Time", ylab="y",
main=paste("Space-Time at Space s=",s," of ",
Data[["S"]], sep=""),
sub="Actual=Black, Fit=Red, Interval=Transparent Red")
polygon(c(1:Data[["T"]],rev(1:Data[["T"]])),
c(matrix(temp[,4], Data[["S"]], Data[["T"]])[s,],
rev(matrix(temp[,6], Data[["S"]], Data[["T"]])[s,])),
col=rgb(255, 0, 0, 50, maxColorValue=255), border=FALSE)
lines(matrix(temp[,5], Data[["S"]], Data[["T"]])[s,], col="red")}}
if(Style == "Space-Time by Time") {
if(PDF == TRUE) {
pdf("PPC.Plots.SpaceTime.pdf")
par(mfrow=c(1,1))}
else par(mfrow=c(1,1), ask=TRUE)
if(is.null(Data))
stop("Data is required for Style=Space-Time by Time.")
if(is.null(Data[["longitude"]]))
stop("Variable longitude is required in Data.")
if(is.null(Data[["latitude"]]))
stop("Variable latitude is required in Data.")
if(is.null(Data[["S"]])) stop("Variable S is required in Data.")
if(is.null(Data[["T"]])) stop("Variable T is required in Data.")
Heat <- (1-(x[["y"]]-min(x[["y"]], na.rm=TRUE)) /
max(x[["y"]]-min(x[["y"]], na.rm=TRUE), na.rm=TRUE)) * 99 + 1
Heat <- matrix(Heat, Data[["S"]], Data[["T"]])
for (t in 1:Data[["T"]]) {
plot(Data[["longitude"]], Data[["latitude"]],
col=heat.colors(120)[Heat[,t]],
pch=16, cex=0.75, xlab="Longitude", ylab="Latitude",
main=paste("Space-Time at t=",t," of ", Data[["T"]],
sep=""), sub="Red=High, Yellow=Low")}}
if(Style == "Spatial") {
if(PDF == TRUE) pdf("PPC.Plots.Spatial.pdf")
par(mfrow=c(1,1))
if(is.null(Data)) stop("Data is required for Style=Spatial.")
if(is.null(Data[["longitude"]]))
stop("Variable longitude is required in Data.")
if(is.null(Data[["latitude"]]))
stop("Variable latitude is required in Data.")
heat <- (1-(x[["y"]][Rows]-min(x[["y"]][Rows], na.rm=TRUE)) /
max(x[["y"]][Rows]-min(x[["y"]][Rows], na.rm=TRUE), na.rm=TRUE)) * 99 + 1
plot(Data[["longitude"]][Rows], Data[["latitude"]][Rows],
col=heat.colors(120)[heat],
pch=16, cex=0.75, xlab="Longitude", ylab="Latitude",
main="Spatial Plot", sub="Red=High, Yellow=Low")}
if(Style == "Spatial Uncertainty") {
if(PDF == TRUE) pdf("PPC.Plots.Spatial.Unc.pdf")
par(mfrow=c(1,1))
if(is.null(Data))
stop("Data is required for Style=Spatial Uncertainty.")
if(is.null(Data[["longitude"]]))
stop("Variable longitude is required in Data.")
if(is.null(Data[["latitude"]]))
stop("Variable latitude is required in Data.")
heat <- apply(x[["yhat"]], 1, quantile, probs=c(0.025,0.975))
heat <- heat[2,] - heat[1,]
heat <- (1-(heat[Rows]-min(heat[Rows])) /
max(heat[Rows]-min(heat[Rows]))) * 99 + 1
plot(Data[["longitude"]][Rows], Data[["latitude"]][Rows],
col=heat.colors(120)[heat],
pch=16, cex=0.75, xlab="Longitude", ylab="Latitude",
main="Spatial Uncertainty Plot",
sub="Red=High, Yellow=Low")}
if(Style == "Time-Series") {
if(PDF == TRUE) pdf("PPC.Plots.TimeSeries.pdf")
par(mfrow=c(1,1))
temp <- summary(x, Quiet=TRUE)$Summary
plot(Rows, temp[Rows,1],
ylim=c(min(temp[Rows,c(1,4)], na.rm=TRUE),
max(temp[Rows,c(1,6)], na.rm=TRUE)),
type="l", xlab="Time", ylab="y",
main="Plot of Fitted Time-Series",
sub="Actual=Black, Fit=Red, Interval=Transparent Red")
polygon(c(Rows,rev(Rows)),c(temp[Rows,4],rev(temp[Rows,6])),
col=rgb(255, 0, 0, 50, maxColorValue=255),
border=FALSE)
lines(Rows, temp[Rows,1])
lines(Rows, temp[Rows,5], col="red")}
if(Style == "Time-Series, Multivariate, C") {
if(PDF == TRUE) {
pdf("PPC.Plots.TimeSeries.pdf")
par(mfrow=c(1,1))}
else par(mfrow=c(1,1), ask=TRUE)
if(is.null(Data))
stop("Data is required for Style=Time-Series, Multivariate.")
if(is.null(Data[["Y"]]))
stop("Variable Y is required in Data.")
temp <- summary(x, Quiet=TRUE)$Summary
for (i in 1:ncol(Data[["Y"]])) {
tempy <- matrix(temp[Rows,1], nrow(Data[["Y"]]),
ncol(Data[["Y"]]))[,i]
qLB <- matrix(temp[Rows,4], nrow(Data[["Y"]]),
ncol(Data[["Y"]]))[,i]
qMed <- matrix(temp[Rows,5], nrow(Data[["Y"]]),
ncol(Data[["Y"]]))[,i]
qUB <- matrix(temp[Rows,6], nrow(Data[["Y"]]),
ncol(Data[["Y"]]))[,i]
plot(1:length(tempy), tempy,
ylim=c(min(Data[["Y"]][,i],
matrix(temp[Rows,4], nrow(Data[["Y"]]),
ncol(Data[["Y"]]))[,i], na.rm=TRUE),
max(Data[["Y"]][,i],
matrix(temp[Rows,6], nrow(Data[["Y"]]),
ncol(Data[["Y"]]))[,i], na.rm=TRUE)),
type="l", xlab="Time", ylab="y",
main=paste("Time-Series ", i, " of ", ncol(Data[["Y"]]), sep=""),
sub="Actual=Black, Fit=Red, Interval=Transparent Red")
polygon(c(1:length(tempy),rev(1:length(tempy))),c(qLB,rev(qUB)),
col=rgb(255, 0, 0, 50, maxColorValue=255),
border=FALSE)
lines(1:length(tempy), tempy)
lines(1:length(tempy), qMed, col="red")}}
if(Style == "Time-Series, Multivariate, R") {
if(PDF == TRUE) {
pdf("PPC.Plots.TimeSeries.pdf")
par(mfrow=c(1,1))}
else par(mfrow=c(1,1), ask=TRUE)
if(is.null(Data))
stop("Data is required for Style=Time-Series, Multivariate.")
if(is.null(Data[["Y"]]))
stop("Variable Y is required in Data.")
temp <- summary(x, Quiet=TRUE)$Summary
for (i in 1:nrow(Data[["Y"]])) {
tempy <- matrix(temp[Rows,1], nrow(Data[["Y"]]),
ncol(Data[["Y"]]))[i,]
qLB <- matrix(temp[Rows,4], nrow(Data[["Y"]]),
ncol(Data[["Y"]]))[i,]
qMed <- matrix(temp[Rows,5], nrow(Data[["Y"]]),
ncol(Data[["Y"]]))[i,]
qUB <- matrix(temp[Rows,6], nrow(Data[["Y"]]),
ncol(Data[["Y"]]))[i,]
plot(1:length(tempy), tempy,
ylim=c(min(Data[["Y"]][i,],
matrix(temp[Rows,4], nrow(Data[["Y"]]),
ncol(Data[["Y"]]))[i,], na.rm=TRUE),
max(Data[["Y"]][i,],
matrix(temp[Rows,6], nrow(Data[["Y"]]),
ncol(Data[["Y"]]))[i,], na.rm=TRUE)),
type="l", xlab="Time", ylab="y",
main=paste("Time-Series ", i, " of ", nrow(Data[["Y"]]), sep=""),
sub="Actual=Black, Fit=Red, Interval=Transparent Red")
polygon(c(1:length(tempy),rev(1:length(tempy))),c(qLB,rev(qUB)),
col=rgb(255, 0, 0, 50, maxColorValue=255),
border=FALSE)
lines(1:length(tempy), tempy)
lines(1:length(tempy), qMed, col="red")}}
if(PDF == TRUE) dev.off()
}
|
update_record <-
function(updates = NULL,
dimension,
old,
values = vector()) {
UseMethod("update_record")
}
update_record.record_update_set <-
function(updates = NULL,
dimension,
old,
values = vector()) {
stopifnot(!is_role_playing_dimension(dimension))
dim_txt <- dimension
dim_txt[, -1] <- prepare_join(dim_txt[, -1])
old_values <- unlist(dim_txt[old,-1])
stopifnot(length(old_values) == length(values))
names(values) <- names(old_values)
dru <- new_record_update(
dimension = attr(dimension, "name"),
old = old_values,
new = values
)
class <- class(updates)
updates <- c(updates, list(dru))
class(updates) <- class
updates
}
|
radial <- galbraith <- function(x, ...)
UseMethod("radial")
|
library(vdiffr)
df <- data.frame(x = 1:10, y = 21:30,
a = rep(c("g1","g2"), 5),
b = rep(c("t1","t2"), each = 5))
p <- ggplot(df, aes(x, y)) +
geom_point()
test_that("base plot did not change",{
expect_doppelganger("base plot", p)
})
px <- p + geom_xsidecol()
py <- p + geom_ysidecol()
test_that("ggside x-axis plotting",{
expect_doppelganger("xside top", px)
pxb <- px + ggside(x.pos = "bottom")
expect_doppelganger("xside bottom", pxb)
expect_doppelganger("xside top-pos-top", px + scale_x_continuous(position = "top"))
expect_doppelganger("xside bot-pos-top", pxb + scale_x_continuous(position = "top"))
expect_doppelganger("xside top-pos-top-wrap", px + scale_x_continuous(position = "top") + facet_wrap(a~b))
expect_doppelganger("xside bot-pos-top-grid", pxb + scale_x_continuous(position = "top") + facet_grid(vars(a), vars(b)))
expect_doppelganger("xside top-noaxis", px + theme(axis.text.x = element_blank()))
})
test_that("ggside y-axis plotting",{
expect_doppelganger("yside right", py)
pyl <- py + ggside(y.pos = "left")
expect_doppelganger("yside left", pyl)
expect_doppelganger("yside right-pos-right", py + scale_y_continuous(position = "right"))
expect_doppelganger("yside left-pos-right", pyl + scale_y_continuous(position = "right"))
expect_doppelganger("yside right-pos-right-wrap", py + scale_y_continuous(position = "right") + facet_wrap(a~b))
expect_doppelganger("yside left-pos-right-grid", pyl + scale_y_continuous(position = "right") + facet_grid(vars(a), vars(b)))
expect_doppelganger("yside right-noaxis", py + theme(axis.text.y = element_blank()))
})
pxy <- p + geom_xsidecol() + geom_ysidecol()
test_that("ggside xy-axis plotting", {
expect_doppelganger("xyside", pxy)
pxy_l <- pxy + ggside(y.pos = "left")
pxy_b <- pxy + ggside(x.pos = "bottom")
expect_doppelganger("xyside yl", pxy_l)
expect_doppelganger("xyside yl-pos-right", pxy_l + scale_y_continuous(position = "right"))
expect_doppelganger("xyside xb", pxy_b)
expect_doppelganger("xyside xb-pos-top", pxy_b + scale_x_continuous(position = "top"))
pxy_lb <- pxy + ggside(y.pos = "left", x.pos = "bottom")
expect_doppelganger("xyside lb", pxy_lb)
expect_doppelganger("xyside lb-pos-swap", pxy_lb + scale_x_continuous(position = "top")+scale_y_continuous(position = "right"))
expect_doppelganger("xyside no-x-text", pxy + theme(axis.text.x = element_blank()))
expect_doppelganger("xyside no-y-text", pxy + theme(axis.text.y = element_blank()))
expect_doppelganger("xyside facet-Grid", pxy + facet_grid(vars(a), vars(b)))
})
|
library(shiny)
library(shinydashboard)
library(mapdeck)
library(sf)
ui <- dashboardPage(
dashboardHeader()
, dashboardSidebar()
, dashboardBody(
mapdeckOutput(
outputId = "map"
, height = "600"
)
, actionButton(
inputId = "btn"
, label = "button"
)
)
)
server <- function( input, output ) {
set_token( Sys.getenv("MAPBOX") )
output$map <- renderMapdeck({
mapdeck( style = mapdeck_style("dark"), location = c(145, -37.8), zoom = 12 )
})
observeEvent({input$btn},{
r <- sample(1:nrow(roads), size = nrow(roads))
df <- roads[r, ]
df$rand_colour <- rnorm(n = nrow(df))
df$rand_width <- sample(1:2, size = nrow(df), replace = T)
mapdeck_update(map_id = "map") %>%
add_path(
data = df
, stroke_colour = "rand_colour"
, stroke_width = "rand_width"
, transitions = list(
path = 2000
, stroke_colour = 1000
, stroke_width = 1000
)
, update_view = FALSE
)
})
}
shinyApp(ui, server)
|
FinScan <- function(index_clusters_temp, index, filtering_post, type_minimaxi_post, mini_post, maxi_post, nb_sites, matrix_clusters, radius, areas, centres, pvals, maximize = TRUE){
if(length(index_clusters_temp)>0){
if(maximize == TRUE){
ordre <- order(index[index_clusters_temp], decreasing = TRUE)
}else{
ordre <- order(index[index_clusters_temp], decreasing = FALSE)
}
index_clusters_temp <- index_clusters_temp[ordre]
if(filtering_post == TRUE){
if(type_minimaxi_post == "sites/indiv"){
index_clusters <- post_filt_nb_sites(mini_post, maxi_post, nb_sites, index_clusters_temp, matrix_clusters)
}
if(type_minimaxi_post == "radius"){
index_clusters <- post_filt_radius(mini_post, maxi_post, radius, index_clusters_temp)
}
if(type_minimaxi_post == "area"){
index_clusters <- post_filt_area(mini_post, maxi_post, areas, index_clusters_temp)
}
}else{
index_clusters <- index_clusters_temp
}
}else{
index_clusters <- index_clusters_temp
}
final_index <- non_overlap(index_clusters, matrix_clusters)
pval_clusters <- pvals[final_index]
sites_clusters <- lapply(final_index, function(j) which(matrix_clusters[,j]==1))
if(sum(is.na(centres)) == 0){
centres_clusters <- centres[final_index,,drop = FALSE]
}else{
centres_clusters <- NA
}
if(sum(is.na(radius)) == 0){
radius_clusters <- radius[final_index]
}else{
radius_clusters <- NA
}
if(sum(is.na(areas)) == 0){
areas_clusters <- areas[final_index]
}else{
areas_clusters <- NA
}
return(list(pval_clusters = pval_clusters, sites_clusters = sites_clusters,
centres_clusters = centres_clusters, radius_clusters = radius_clusters,
areas_clusters = areas_clusters))
}
|
agg2monthly <- function(df, col_name, fun, allow_na = 0){
date_format <- '%Y-%m'
date_agg <- format(df[ , 1], format = date_format)
date_unique <- unique(date_agg)
date_out <- as.Date( paste0(date_unique, '-01'), format = '%Y-%m-%d' )
n_it <- length(date_out)
n_col <- length(col_name)
mat_in <- as.matrix( df[ , col_name, drop = FALSE] )
mat_out <- matrix(NA_real_, nrow = n_it, ncol = n_col)
fun_string <- c('sum', 'max', 'min', 'mean', 'first', 'last')
col_group <- match(x = fun, table = fun_string)
group_unique <- unique(col_group)
n_group <- length(group_unique)
for(i in 1:n_it){
row_index <- which(date_agg == date_unique[i])
for(j in 1:n_group){
col_index <- which(fun == fun_string[ group_unique[j] ])
sub_matrix <- mat_in[row_index, col_index, drop = FALSE]
mat_out[i, col_index] <- col_stats_cpp(x = sub_matrix, stats = fun_string[ group_unique[j] ], allow_na = allow_na)
rm(col_index, sub_matrix, j)
}
rm(i, row_index)
}
return( data.frame(Date = date_out, mat_out) )
}
|
require('dynr')
data("LinearOsc")
n = length(unique(LinearOsc$ID))
T = max(table(LinearOsc$ID))
out2 = matrix(LinearOsc$x,ncol=n,byrow=FALSE)
theTimes = LinearOsc$theTimes[1:T]
norder = 6
roughPenaltyMax = 4
lambdaLow = 1e-10
lambdaHi = 2
lambdaBy = .1
isPlot = 1
matt = plotGCV(theTimes,norder,roughPenaltyMax,out2,lambdaLow, lambdaHi,lambdaBy,isPlot)
sp = matt[matt[,"GCV"] == min(matt[,"GCV"]),"lambda"]
x = getdx(theTimes,norder,roughPenaltyMax,sp,out2,0)[[1]]
dx = getdx(theTimes,norder,roughPenaltyMax,sp,out2,1)[[1]]
d2x = getdx(theTimes,norder,roughPenaltyMax,sp,out2,2)[[1]]
dxall = data.frame(time = rep(theTimes,n),
x = matrix(x,ncol=1,byrow=FALSE),
dx = matrix(dx,ncol=1,byrow=FALSE),
d2x = matrix(d2x,ncol=1,byrow=FALSE))
g = lm(d2x~x+dx-1,data=dxall)
oldpar <- par(mgp=c(2.5,0.5,0))
car::crPlots(g,terms=~x,
main=c(""),layout=c(1,1),
cex.lab=1.3,cex.axis=1.2,
xlab=expression(hat(eta)[i](t)),
ylab=expression(paste("Component+Residuals ", " ",d^2,hat(eta)[i](t)/dt^2))
)
par(oldpar)
oldpar <- par(mgp=c(2.5,0.5,0))
car::crPlots(g,terms=~dx,
main=c(""),layout=c(1,1),
cex.lab=1.3,cex.axis=1.2,
xlab=expression(paste(d, hat(eta)[i](t)/dt)),
ylab=expression(paste("Component+Residuals ", " ",d^2,hat(eta)[i](t)/dt^2))
)
par(oldpar)
g2 = lm(d2x~x+dx+x:dx-1,data=dxall)
summary(g2)
theta_plot(g2, predictor = "x", moderator = "dx",
alpha = .05, jn = T, title0=" ",
predictorLab = "x", moderatorLab = "dx")
Osc <- function(t, y, parameters) {
dy <- numeric(2)
dy[1] <- y[2]
dy[2] <- parameters[1]*y[1]+parameters[2]*dy[1]
return(list(dy))
}
param <- coef(g)
dynr.flowField(Osc, xlim = c(-3, 3),
ylim = c(-3, 3),
xlab="x", ylab="dx/dt",
main=paste0("Oscillator model"),
cex.main=2,
parameters = param,
points = 15, add = FALSE,
col="blue",
arrow.type="proportional",
arrow.head=.05)
IC <- matrix(c(-2, -2), ncol = 2, byrow = TRUE)
dynr.trajectory(Osc, y0 = IC,
parameters = param,tlim=c(0,50))
|
knitr::opts_chunk$set(
collapse = TRUE,
comment = "
)
knitr::include_graphics('./bloodfeeding.png')
knitr::include_graphics('./bloodfeeding_DWD.png')
|
NULL
lgl <- function(...) {
.Call(ffi_squash, dots_values(...), "logical", is_spliced_bare, 1L)
}
int <- function(...) {
.Call(ffi_squash, dots_values(...), "integer", is_spliced_bare, 1L)
}
dbl <- function(...) {
.Call(ffi_squash, dots_values(...), "double", is_spliced_bare, 1L)
}
cpl <- function(...) {
.Call(ffi_squash, dots_values(...), "complex", is_spliced_bare, 1L)
}
chr <- function(...) {
.Call(ffi_squash, dots_values(...), "character", is_spliced_bare, 1L)
}
bytes <- function(...) {
dots <- map(dots_values(...), function(dot) {
if (is_bare_list(dot) || is_spliced(dot)) {
map(dot, cast_raw)
} else {
cast_raw(dot)
}
})
.Call(ffi_squash, dots, "raw", is_spliced_bare, 1L)
}
NULL
new_logical <- function(n, names = NULL) {
set_names(rep_len(na_lgl, n), names)
}
new_integer <- function(n, names = NULL) {
set_names(rep_len(na_int, n), names)
}
new_double <- function(n, names = NULL) {
set_names(rep_len(na_dbl, n), names)
}
new_character <- function(n, names = NULL) {
set_names(rep_len(na_chr, n), names)
}
new_complex <- function(n, names = NULL) {
set_names(rep_len(na_cpl, n), names)
}
new_raw <- function(n, names = NULL) {
set_names(vector("raw", n), names)
}
new_list <- function(n, names = NULL) {
set_names(vector("list", n), names)
}
rep_along <- function(along, x) {
rep_len(x, length(along))
}
rep_named <- function(names, x) {
names <- names %||% chr()
check_character(names, what = "`NULL` or a character vector")
set_names(rep_len(x, length(names)), names)
}
|
scan_project_files <- function(project_dir=".", scan_r_only=FALSE, scan_rnw_with_knitr=TRUE, scan_rprofile=TRUE)
{
if(!scan_r_only)
{
res <- try(requireNamespace("knitr", quietly=TRUE))
if(inherits(res, "try-error"))
warning("The knitr package must be installed to scan Rmarkdown-based files", call.=FALSE)
}
r_pat <- if(scan_r_only)
"\\.r$"
else "\\.(r|rnw|rmd|rpres|rhtml)$"
r_files <- dir(project_dir, pattern=r_pat, recursive=TRUE, ignore.case=TRUE, full.names=TRUE)
if(scan_rprofile && file.exists("~/.Rprofile"))
r_files <- c(r_files, "~/.Rprofile")
pkgs <- character(0)
errors <- character(0)
for(f in r_files)
{
scan_deps <- switch(tolower(tools::file_ext(f)),
"r"=scan_r,
"rmd"=, "rpres"=, "rhtml"=scan_rmd,
"rnw"=if(scan_rnw_with_knitr) scan_rmd else scan_rnw,
scan_r
)
res <- try(scan_deps(f), silent=TRUE)
if(inherits(res, "try-error"))
errors <- c(errors, f)
else pkgs <- c(pkgs, res)
}
if(length(errors) > 0)
warning("Following files could not be scanned:\n", paste(errors, collapse="\n"), call.=FALSE)
mft <- read_manifest(project_dir)
exclude <- c(
"checkpoint",
"pkgdepends",
c("base", "compiler", "datasets", "graphics", "grDevices", "grid",
"methods", "parallel", "splines", "stats", "stats4", "tcltk",
"tools", "utils")
)
any_rmd <- any(grepl("\\.(rmd|rpres|rhtml)$", r_files, ignore.case=TRUE))
pkgs <- setdiff(unique(c(pkgs, mft$refs, if(any_rmd) "rmarkdown")), c(mft$exclude, exclude))
list(pkgs=pkgs, errors=errors)
}
scan_rmd <- function(filename)
{
tempfile <- tempfile(fileext=".R")
on.exit(unlink(tempfile))
suppressWarnings(knitr::knit(filename, output=tempfile, tangle=TRUE, quiet=TRUE))
scan_r(tempfile)
}
scan_rnw <- function(filename)
{
tempfile <- tempfile(fileext=".R")
on.exit(unlink(tempfile))
suppressWarnings(utils::Stangle(filename, output=tempfile, quiet=TRUE))
scan_r(tempfile)
}
scan_r <- function(filename)
{
exprs <- parse(filename)
deps <- character(0)
for(e in exprs)
{
dep_e <- find_dependencies(e)
if(length(dep_e) > 0)
deps <- c(deps, dep_e)
}
unique(deps)
}
find_dependencies <- function(e)
{
if(is.atomic(e) || is.name(e))
return(character(0))
if(is.call(e))
{
fname <- as.character(e[[1]])
if(length(fname) == 1)
{
if(fname %in% c("library", "require"))
{
mc <- match.call(get(fname, baseenv()), e)
return(as.character(mc$package))
}
else return(unique(unlist(lapply(as.list(e[-1]), find_dependencies))))
}
else if(fname[1] %in% c("::", ":::"))
return(fname[2])
else return(character(0))
}
}
read_manifest <- function(project_dir)
{
mft_file <- file.path(project_dir, "checkpoint.yml")
if(!file.exists(mft_file))
mft_file <- file.path(project_dir, "checkpoint.yaml")
if(!file.exists(mft_file))
return(NULL)
mft <- yaml::read_yaml(mft_file)
mft[c("refs", "exclude")]
}
|
coef.fuzzylm <- function(object, complete = TRUE, ...){
xvars = colnames(object$x)
yvars = colnames(object$y)
if(is.null(yvars)) yvars = all.vars(object$call)[1]
n = length(xvars)
s = object$coef[,1]
l = object$coef[,2]
r = object$coef[,3]
res = matrix(c(s, s - l, s + r), ncol = 3,
dimnames = list(xvars, c("central tendency", "lower boundary", "upper boundary")), ...)
res
}
|
context("point")
test_that("convert point works", {
empty <- point("empty")
expect_is(empty, "character")
expect_equal(empty, "POINT EMPTY")
pt_a <- point(-116.4, 45.2)
pt_b <- point(0, 1)
expect_is(pt_a, "character")
expect_is(pt_b, "character")
expect_match(pt_a, "POINT")
expect_match(pt_b, "POINT")
expect_equal(point(-116.4, 45.2), "POINT (-116.4000000000000057 45.2000000000000028)")
expect_equal(point(-116.4, 45.2, fmt = 1), "POINT (-116.4 45.2)")
df <- data.frame(lon = -116.4, lat = 45.2)
df_a <- point(df, fmt = 2)
expect_is(df_a, "character")
expect_is(df_a[1], "character")
expect_equal(df_a[1], "POINT (-116.40 45.20)")
df2 <- us_cities[1:5,c('lat','long')]
df_b <- point(df2)
expect_is(df_b, "character")
expect_is(df_b[1], "character")
expect_equal(df_b[1], "POINT (32.4500000000000028 -99.7399999999999949)")
ussmall <- us_cities[1:5, ]
df <- data.frame(long = ussmall$long, lat = ussmall$lat)
mat <- matrix(c(df$long, df$lat), ncol = 2)
ptmat <- point(mat, fmt = 0)
expect_is(ptmat[1], "character")
expect_is(ptmat[2], "character")
expect_equal(ptmat[1], "POINT (-99.74 32.45)")
ls_a <- point(list(c(100.0, 3.1)), fmt = 2)
expect_is(ls_a, "character")
expect_is(ls_a[1], "character")
expect_equal(ls_a[1], "POINT (100.00 3.10)")
})
test_that("point fails correctly", {
expect_error(point(-116.4), "POINT input should be of length 2")
expect_error(point(), "no applicable method")
expect_error(point(NA), "no applicable method")
expect_error(point("a", "Adf"), "The following strings are not WKT")
})
|
harmony <- function(.data,
ugran = "year",
lgran = NULL,
hierarchy_tbl = NULL,
filter_in = NULL,
filter_out = NULL,
facet_h = NULL, ...) {
set1 <- search_gran(.data,
lowest_unit = lgran,
highest_unit = ugran,
hierarchy_tbl,
filter_in,
filter_out,
...)
if (is.null(facet_h)) {
facet_h <- 31
}
if (length(set1) == 1) {
stop("Only one granularity ", set1, " can be formed. Function requires checking compatibility for bivariate granularities")
}
set1_merge <- merge(set1, set1) %>%
tibble::as_tibble(.name_repair = "minimal") %>%
dplyr::filter(x != y) %>%
purrr::map_dfr(as.character)
data_mutate <- .data
for (i in seq_along(set1)){
data_mutate <- data_mutate %>% create_gran(set1[i], hierarchy_tbl)
}
ilevel <- array()
for (i in seq_along(set1))
{
ilevel[i] <- data_mutate %>%
dplyr::distinct(.data[[set1[[i]]]]) %>%
nrow()
}
levels_tbl <- tibble::tibble(set1, ilevel, .name_repair = "minimal")
har_data <- array(0, nrow(set1_merge))
for (i in seq_len(nrow(set1_merge)))
{
har_data[i] <- is_harmony(.data, gran1 = set1_merge$x[i], gran2 = set1_merge$y[i], hierarchy_tbl, facet_h = facet_h)
}
return_output <- set1_merge %>%
dplyr::mutate(harmony = har_data) %>%
dplyr::filter(harmony == "TRUE") %>%
dplyr::rename(facet_variable = x, x_variable = y) %>%
dplyr::left_join(levels_tbl, by = c("facet_variable" = "set1")) %>%
dplyr::left_join(levels_tbl, by = c("x_variable" = "set1")) %>%
dplyr::rename("facet_levels" = "ilevel.x", "x_levels" = "ilevel.y") %>%
dplyr::select(-harmony)
return(return_output)
}
|
PKNCAconc <- function(data, ...)
UseMethod("PKNCAconc")
PKNCAconc.default <- function(data, ...)
PKNCAconc.data.frame(as.data.frame(data), ...)
PKNCAconc.tbl_df <- function(data, ...)
PKNCAconc.data.frame(as.data.frame(data), ...)
PKNCAconc.data.frame <- function(data, formula, subject,
time.nominal, exclude, duration, volume,
exclude_half.life, include_half.life, ...) {
if (nrow(data) == 0) {
stop("data must have at least one row.")
}
if (!all(all.vars(formula) %in% names(data))) {
stop("All of the variables in the formula must be in the data")
}
parsedForm <- parseFormula(formula, require.two.sided=TRUE)
if (length(all.vars(parsedForm$lhs)) != 1)
stop("The left hand side of the formula must have exactly one variable")
if (length(all.vars(parsedForm$rhs)) != 1)
stop("The right hand side of the formula (excluding groups) must have exactly one variable")
check.conc.time(
conc=data[[as.character(parsedForm$lhs)]],
time=data[[as.character(parsedForm$rhs)]],
monotonic.time=FALSE
)
key.cols <- c(all.vars(parsedForm$rhs),
all.vars(parsedForm$groupFormula))
if (any(mask.dup <- duplicated(data[,key.cols])))
stop("Rows that are not unique per group and time (column names: ",
paste(key.cols, collapse=", "),
") found within concentration data. Row numbers: ",
paste(seq_along(mask.dup)[mask.dup], collapse=", "))
if (missing(subject)) {
tmp.groups <- all.vars(parsedForm$groupFormula)
if (length(tmp.groups) == 1) {
subject <- tmp.groups
} else {
subject <- all.vars(findOperator(parsedForm$groupFormula,
"/",
side="left"))
if (length(subject) == 0) {
subject <- tmp.groups[length(tmp.groups)]
} else if (length(subject) == 1) {
} else {
stop("Unknown how to handle subject definition from the formula")
}
}
} else {
if (!is.character(subject))
stop("subject must be a character string")
if (!(length(subject) == 1))
stop("subject must be a scalar")
if (!(subject %in% names(data)))
stop("The subject parameter must map to a name in the data")
}
ret <- list(data=data,
formula=formula,
subject=subject)
class(ret) <- c("PKNCAconc", class(ret))
if (missing(exclude)) {
ret <- setExcludeColumn(ret)
} else {
ret <- setExcludeColumn(ret, exclude=exclude)
}
if (missing(volume)) {
ret <- setAttributeColumn(ret, attr_name="volume", default_value=NA_real_)
} else {
ret <- setAttributeColumn(ret, attr_name="volume", col_or_value=volume)
if (!is.numeric(getAttributeColumn(ret, attr_name="volume")[[1]])) {
stop("Volume must be numeric")
}
}
if (missing(duration)) {
ret <- setDuration.PKNCAconc(ret)
} else {
ret <- setDuration.PKNCAconc(ret, duration=duration)
}
if (!missing(time.nominal)) {
ret <-
setAttributeColumn(object=ret,
attr_name="time.nominal",
col_name=time.nominal)
}
if (!missing(exclude_half.life)) {
ret <-
setAttributeColumn(object=ret,
attr_name="exclude_half.life",
col_name=exclude_half.life)
}
if (!missing(include_half.life)) {
ret <-
setAttributeColumn(object=ret,
attr_name="include_half.life",
col_name=include_half.life)
}
ret
}
formula.PKNCAconc <- function(x, ...)
x$formula
model.frame.PKNCAconc <- function(formula, ...)
formula$data[, all.vars(formula$formula), drop=FALSE]
getDepVar.PKNCAconc <- function(x, ...) {
x$data[, all.vars(parseFormula(x)$lhs)]
}
getIndepVar.PKNCAconc <- function(x, ...) {
x$data[, all.vars(parseFormula(x)$rhs)]
}
getGroups.PKNCAconc <- function(object, form=formula(object), level,
data=getData(object), sep) {
grpnames <- all.vars(parseFormula(form)$groups)
if (!missing(level))
if (is.factor(level) | is.character(level)) {
level <- as.character(level)
if (any(!(level %in% grpnames)))
stop("Not all levels are listed in the group names. Missing levels are: ",
paste(setdiff(level, grpnames), collapse=", "))
grpnames <- level
} else if (is.numeric(level)) {
if (length(level) == 1 &&
level > 0) {
grpnames <- grpnames[1:level]
} else {
grpnames <- grpnames[level]
}
}
data[, grpnames, drop=FALSE]
}
group_vars.PKNCAconc <- function(x) {
all.vars(parseFormula(as.formula(x))$groups)
}
getData.PKNCAconc <- function(object)
object$data
getDataName.PKNCAconc <- function(object)
"data"
setDuration.PKNCAconc <- function(object, duration, ...) {
if (missing(duration)) {
object <-
setAttributeColumn(object=object, attr_name="duration", default_value=0,
message_if_default="Assuming point rather than interval concentration measurement")
} else {
object <-
setAttributeColumn(object=object, attr_name="duration", col_or_value=duration)
}
duration.val <- getAttributeColumn(object=object, attr_name="duration")[[1]]
if (is.numeric(duration.val) &&
!any(is.na(duration.val)) &&
!any(is.infinite(duration.val)) &&
all(duration.val >= 0)) {
} else {
stop("duration must be numeric without missing (NA) or infinite values, and all values must be >= 0")
}
object
}
print.PKNCAconc <- function(x, n=6, summarize=FALSE, ...) {
cat(sprintf("Formula for concentration:\n "))
print(stats::formula(x), ...)
if (is.na(x$subject) || (length(x$subject) == 0)) {
cat("As a single-subject dataset.\n")
} else {
cat(sprintf("With %d subjects defined in the '%s' column.\n",
length(unique(x$data[,x$subject])),
x$subject))
}
if ("time.nominal" %in% names(x)) {
cat("Nominal time column is: ", x$time.nominal, "\n", sep="")
} else {
cat("Nominal time column is not specified.\n")
}
if (summarize) {
cat("\n")
grp <- getGroups(x)
if (ncol(grp) > 0) {
tmp.summary <- data.frame(Group.Name=names(grp),
Count=0)
for (i in 1:ncol(grp))
tmp.summary$Count[i] <- nrow(unique(grp[,1:i,drop=FALSE]))
cat("Group summary:\n")
names(tmp.summary) <- gsub("\\.", " ", names(tmp.summary))
print.data.frame(tmp.summary, row.names=FALSE)
} else {
cat("No groups.\n")
}
}
if (n != 0) {
if (n >= nrow(x$data)) {
cat("\nData for concentration:\n")
} else if (n < 0) {
cat(sprintf("\nFirst %d rows of concentration data:\n",
nrow(x$data)+n))
} else {
cat(sprintf("\nFirst %d rows of concentration data:\n",
n))
}
print.data.frame(utils::head(x$data, n=n), ..., row.names=FALSE)
}
}
summary.PKNCAconc <- function(object, n=0, summarize=TRUE, ...) {
print.PKNCAconc(object, n=n, summarize=summarize)
}
|
setMethodS3("getGenericS3", "default", function(name, envir=parent.frame(), inherits=TRUE, ...) {
fcn <- .findFunction(name, envir=envir, inherits=inherits)$fcn
if (is.null(fcn)) {
stop("No such function found: ", name)
} else if (!isGenericS3(fcn)) {
stop("The function found is not an S3 generic function: ", name)
}
fcn
})
|
fake_matched <- function(id_loan = NULL,
loan_size_outstanding = NULL,
loan_size_outstanding_currency = NULL,
loan_size_credit_limit = NULL,
loan_size_credit_limit_currency = NULL,
id_2dii = NULL,
level = NULL,
score = NULL,
sector = NULL,
name_ald = NULL,
sector_ald = NULL,
...) {
tibble(
id_loan = id_loan %||% "L162",
loan_size_outstanding = loan_size_outstanding %||% 1,
loan_size_outstanding_currency = loan_size_outstanding_currency %||% "EUR",
loan_size_credit_limit = loan_size_credit_limit %||% 2,
loan_size_credit_limit_currency = loan_size_credit_limit_currency %||% "EUR",
id_2dii = id_2dii %||% "UP1",
level = level %||% "ultimate_parent",
score = score %||% 1,
sector = sector %||% "automotive",
name_ald = name_ald %||% "shaanxi auto",
sector_ald = sector_ald %||% "automotive",
...
)
}
fake_ald <- function(name_company = NULL,
sector = NULL,
technology = NULL,
year = NULL,
production = NULL,
emission_factor = NULL,
plant_location = NULL,
is_ultimate_owner = NULL,
...) {
tibble(
name_company = name_company %||% "shaanxi auto",
sector = sector %||% "automotive",
technology = technology %||% "ice",
year = year %||% 2025,
production = production %||% 1,
emission_factor = emission_factor %||% 1,
plant_location = plant_location %||% "BF",
is_ultimate_owner = is_ultimate_owner %||% TRUE,
...
)
}
fake_scenario <- function(scenario = NULL,
sector = NULL,
technology = NULL,
region = NULL,
year = NULL,
tmsr = NULL,
smsp = NULL,
scenario_source = NULL,
...) {
tibble(
scenario = scenario %||% "sds",
sector = sector %||% "automotive",
technology = technology %||% "ice",
region = region %||% "global",
year = year %||% 2025,
tmsr = tmsr %||% 0.5,
smsp = smsp %||% -0.08,
scenario_source = scenario_source %||% "demo_2020",
...
)
}
fake_co2_scenario <- function(scenario = NULL,
sector = NULL,
region = NULL,
year = NULL,
emission_factor = NULL,
emission_factor_unit = NULL,
scenario_source = NULL,
...) {
tibble(
scenario = scenario %||% "b2ds",
sector = sector %||% "cement",
region = region %||% "global",
year = year %||% 2025,
emission_factor = emission_factor %||% 0.6,
emission_factor_unit = emission_factor_unit %||% "tons of CO2 per ton of cement",
scenario_source = scenario_source %||% "demo_2020",
...
)
}
fake_master <- function(id_loan = NULL,
loan_size_outstanding = NULL,
loan_size_outstanding_currency = NULL,
loan_size_credit_limit = NULL,
loan_size_credit_limit_currency = NULL,
sector_ald = NULL,
name_ald = NULL,
technology = NULL,
year = NULL,
production = NULL,
plant_location = NULL,
scenario = NULL,
region = NULL,
tmsr = NULL,
smsp = NULL,
...) {
tibble(
id_loan = id_loan %||% "L162",
loan_size_outstanding = loan_size_outstanding %||% 1,
loan_size_outstanding_currency = loan_size_outstanding_currency %||% "EUR",
loan_size_credit_limit = loan_size_credit_limit %||% 2,
loan_size_credit_limit_currency = loan_size_credit_limit_currency %||% "EUR",
sector_ald = sector_ald %||% "automotive",
name_ald = name_ald %||% "shaanxi auto",
technology = technology %||% "ice",
year = year %||% 2025,
production = production %||% 1,
plant_location = plant_location %||% "BF",
scenario = scenario %||% "sds",
region = region %||% "global",
tmsr = tmsr %||% 0.5,
smsp = smsp %||% -0.08,
...
)
}
|
NULL
cat_sim <- function(true, pool, ...){
if(!is.data.frame(pool)) pool <- as.data.frame(pool, stringsAsFactors=FALSE)
if(!all(c("a", "b", "c") %in% colnames(pool))) stop("cannot find a-, b-, or c-parameters in item pool")
opts <- list(...)
if(is.null(opts$min)) stop("minimum length is missing")
if(is.null(opts$max)) stop("maximum length is missing")
if(opts$min < 0 || opts$min > opts$max) stop("invalid min/max length values: ", opts$min, " -- ", opts$max)
if(nrow(pool) < opts$max) stop("insufficient items in item pool: ", nrow(pool))
theta <- ifelse(is.null(opts$theta), 0, opts$theta)
if(is.null(opts$D)) opts$D <- 1.702
if(is.null(opts$select_rule)) select_rule <- cat_select_maxinfo else select_rule <- opts$select_rule
if(is.null(opts$estimate_rule)) estimate_rule <- cat_estimate_mle else estimate_rule <- opts$estimate_rule
if(is.null(opts$stop_rule)) stop_rule <- cat_stop_default else stop_rule <- opts$stop_rule
len <- 0
stats <- matrix(nrow=opts$max, ncol=4, dimnames=list(NULL, c("u", "t", "se", "info")))
admin <- NULL
while(len < opts$max){
selection <- select_rule(len, theta, stats, admin, pool, opts)
item <- selection$item
item <- item[0:min(nrow(item), opts$max - len), ]
pool <- selection$pool
n <- nrow(item)
len <- len + n
admin <- rbind(admin, item)
p <- model_3pl_prob(true, item$a, item$b, item$c, opts$D)[1, ]
u <- as.integer(p > runif(n))
stats[1:n + (len - n), "u"] <- u
theta <- estimate_rule(len, theta, stats, admin, pool, opts)
info <- sum(model_3pl_info(theta, admin$a, admin$b, admin$c, opts$D))
se <- 1 / sqrt(info)
stats[1:n + (len - n), "t"] <- theta
stats[1:n + (len - n), "se"] <- se
stats[1:n + (len - n), "info"] <- info
if(stop_rule(len, theta, stats, admin, pool, opts)) break
}
admin <- cbind(stats[1:len, ], admin)
rs <- list(pool=pool, admin=admin, true=true, theta=theta)
class(rs) <- "cat"
rs
}
cat_estimate_mle <- function(len, theta, stats, admin, pool, opts){
u <- stats[1:len, "u"]
u <- matrix(rep(u, each=2), nrow=2)
if(is.null(opts$map_len)) opts$map_len <- 10
if(is.null(opts$map_prior)) opts$map_prior <- c(0, 1)
if (len < opts$map_len) priors <- list(t=opts$map_prior) else priors <- NULL
with(admin, model_3pl_estimate_jmle(u=u, a=a[1:len], b=b[1:len], c=c[1:len], D=opts$D, scale=NULL, priors=priors))$t[1]
}
cat_estimate_eap <- function(len, theta, stats, admin, pool, opts){
eap_mean <- ifelse(is.null(opts$eap_mean), 0, opts$eap_mean)
eap_sd <- ifelse(is.null(opts$eap_sd), 1, opts$eap_sd)
u <- stats[1:len, "u"]
u <- matrix(rep(u, each=2), nrow=2)
with(admin, model_3pl_eap_scoring(u=u, a=a[1:len], b=b[1:len], c=c[1:len], D=opts$D))$t[1]
}
cat_estimate_hybrid <- function(len, theta, stats, admin, pool, opts){
u <- stats[1:len, "u"]
if(all(u==0) || all(u==1)){
theta <- cat_estimate_eap(len, theta, stats, admin, pool, opts)
} else {
theta <- cat_estimate_mle(len, theta, stats, admin, pool, opts)
}
theta
}
cat_stop_default <- function(len, theta, stats, admin, pool, opts){
if(len < opts$min) return(FALSE)
if(len > opts$max) return(TRUE)
if(!is.null(opts$stop_se)){
se <- stats[len, "se"]
return(se <= opts$stop_se)
} else if(!is.null(opts$stop_mi)){
info <- model_3pl_info(theta, pool$a, pool$b, pool$c, opts$D)[1, ]
return(max(info) <= opts$stop_mi)
} else if(!is.null(opts$stop_cut)){
se <- stats[len, "se"]
ci_width <- ifelse(is.null(opts$ci_width), qnorm(.975), opts$ci_width)
lb <- theta - ci_width * se
ub <- theta + ci_width * se
return(lb > opts$stop_cut || ub < opts$stop_cut)
}
FALSE
}
cat_select_maxinfo <- function(len, theta, stats, admin, pool, opts){
if(is.null(opts$group)) group <- 1:nrow(pool) else group <- pool[, opts$group]
info <- model_3pl_info(theta, pool$a, pool$b, pool$c, opts$D)[1, ]
info <- aggregate(info, by=list(group), mean)
colnames(info) <- c("group", "info")
random <- min(ifelse(is.null(opts$info_random), 1, opts$info_random), nrow(info))
index <-info$group[order(-info$info)[1:random]]
if(length(index) > 1) index <- sample(index, 1)
index <- group %in% index
list(item=pool[index,], pool=pool[!index,])
}
cat_select_ccat <- function(len, theta, stats, admin, pool, opts){
if(is.null(opts$ccat_var)) stop("ccat_var is misisng")
if(is.null(opts$ccat_perc)) stop("ccat_perc is missing")
initial_random <- ifelse(is.null(opts$ccat_random), 0, opts$ccat_random)
info <- data.frame(id=1:nrow(pool), domain=pool[,opts$ccat_var])
info$info <- with(pool, model_3pl_info(theta, a, b, c, opts$D))[1, ]
if(len == 0) curr_perc <- rep(0, length(opts$ccat_perc)) else curr_perc <- freq(admin[1:len, opts$ccat_var], names(opts$ccat_perc))$perc
if(len < initial_random) domain <- sample(names(opts$ccat_perc), 1) else domain <- names(opts$ccat_perc)[which.max(opts$ccat_perc - curr_perc)]
info <- info[info$domain == domain, ]
random <- min(nrow(info), ifelse(is.null(opts$info_random), 1, opts$info_random))
index <- info$id[order(-info$info)[1:random]]
if(length(index) > 1) index <- sample(index, 1)
list(item=pool[index, ], pool=pool[-index, ])
}
cat_select_shadow <- function(len, theta, stats, admin, pool, opts){
if(!"shadow_id" %in% colnames(pool)) pool$shadow_id <- 1:nrow(pool)
if(is.null(opts$constraints)) stop("constraints is missing in the options")
if(!all(colnames(opts$constraints) %in% c("var", "level", "min", "max")))
stop("shadow_constr should be a data.frame with 4 columns: var, level, min, and max")
if(is.factor(opts$constraints$var)) opts$constraints$var <- levels(opts$constraints$var)[opts$constraints$var]
if(is.factor(opts$constraints$level)) opts$constraints$level <- levels(opts$constraints$level)[opts$constraints$level]
x <- ata(pool, 1, len=c(opts$min, opts$max), 1)
x <- ata_obj_relative(x, theta, "max")
for(i in 1:nrow(opts$constraints))
x <- with(opts$constraints[i,], ata_constraint(x, var, min=min, max=max, level=level))
if(!is.null(admin)) x <- ata_item_fixedvalue(x, match(admin$shadow_id, pool$shadow_id), min=1, forms=1)
x <- ata_solve(x, as.list=FALSE, details=F)
if(is.null(x$items)) stop("Failed to assemble a shadow test")
x$items <- x$items[!x$items$shadow_id %in% admin$shadow_id, ]
info <- data.frame(id=x$items$shadow_id, info=with(x$items, model_3pl_info(theta, a, b, c, opts$D))[1,])
random <- min(nrow(info), ifelse(is.null(opts$info_random), 1, opts$info_random))
index <- info$id[order(-info$info)[1:random]]
if(length(index) > 1) index <- sample(index, 1)
list(item=pool[index, ], pool=pool)
}
print.cat <- function(x, ...){
if(class(x) != "cat") stop("Not a 'cat' object.")
len <- nrow(x$admin)
cat("true=", round(x$true, 2), ", est.=", round(x$theta, 2),
", se=", round(x$admin$se[len], 2), ", p=", round(mean(x$admin$u), 2),
", used ", len, " items (", sum(x$admin$u)," correct).\n", sep="")
cat("Belows is a history of the CAT:\n")
if(len <= 10) {
print(x$admin)
} else {
print(x$admin[1:5, ])
cat("...\n")
print(x$admin[1:5 + len - 5, ])
}
invisible(x)
}
plot.cat <- function(x, ...){
if(class(x) != "cat") stop("Not a 'cat' object.")
opts <- list(...)
if(is.null(opts$ylim)) opts$ylimc <- c(-3, 3)
len <- nrow(x$admin)
x$admin$lb <- x$admin$t - 1.96 * x$admin$se
x$admin$ub <- x$admin$t + 1.96 * x$admin$se
x$admin$pos <- 1:len
x$admin$Responses <- factor(x$admin$u, levels=c(0, 1), labels=c("Wrong", "Right"))
ggplot(data=x$admin, aes_string(x="pos", y="t", color="Responses")) +
geom_point(aes_string(size="se")) +
geom_linerange(aes_string(ymin="lb", ymax="ub"), linetype=3) +
geom_point(aes(x=len, y=x$true), color="coral", pch=4, size=3) +
coord_cartesian(ylim=opts$ylim) + scale_size_continuous(range=c(1, 3)) +
xlab("Position") + ylab(expression(paste("Est. ", theta))) +
guides(size=F, alpha=F) + theme_bw() + theme(legend.key=element_blank())
}
cat_stop_projection <- function(len, theta, stats, admin, pool, opts){
if(len < opts$min) return(FALSE)
if(len >= opts$max) return(TRUE)
method <- match.arg(opts$projection_method, c('info', 'diff'))
if(is.null(opts$stop_cut)) stop('stop_cut is missing in the options')
if(is.null(opts$constraints)) stop("constraints is missing in the options")
if(!all(colnames(opts$constraints) %in% c("var", "level", "min", "max")))
stop("shadow_constr should be a data.frame with 4 columns: var, level, min, and max")
if(is.factor(opts$constraints$var)) opts$constraints$var <- levels(opts$constraints$var)[opts$constraints$var]
if(is.factor(opts$constraints$level)) opts$constraints$level <- levels(opts$constraints$level)[opts$constraints$level]
pool <- unique(rbind(pool, admin))
if(method == 'info'){
x <- ata(pool, 1, len=opts$max, 1)
x <- ata_obj_relative(x, theta, "max")
for(i in 1:nrow(opts$constraints))
x <- with(opts$constraints, ata_constraint(x, var[i], min=min[i], max=max[i], level=level[i]))
x <- ata_item_fixedvalue(x, admin$shadow_id, min=1, forms=1)
x <- ata_solve(x, as.list=FALSE, details=F)
if(is.null(x$items)) stop("Failed to assemble a projection test")
u <- c(stats[1:len, "u"], rep(1, opts$max - len))
u <- matrix(rep(u, each=2), nrow=2)
theta_ub <- with(x$items, model_3pl_estimate_jmle(u, a=a, b=b, c=c, D=opts$D, scale=NULL, priors=NULL))$t[1]
u <- c(stats[1:len, "u"], rep(0, opts$max - len))
u <- matrix(rep(u, each=2), nrow=2)
theta_lb <- with(x$items, model_3pl_estimate_jmle(u, a=a, b=b, c=c, D=opts$D, scale=NULL, priors=NULL))$t[1]
} else if(method == 'diff'){
if(is.null(opts$proj_width)) opts$proj_width <- 1.96
x <- ata(pool, 1, len=opts$max, 1)
x <- ata_obj_absolute(x, "b", (theta + opts$proj_width * stats[len, "se"]) * opts$max)
for(i in 1:nrow(opts$constraints))
x <- with(opts$constraints, ata_constraint(x, var[i], min=min[i], max=max[i], level=level[i]))
x <- ata_item_fixedvalue(x, admin$shadow_id, min=1, forms=1)
x <- ata_solve(x, as.list=FALSE, details=F)
if(is.null(x$items)) stop("Failed to assemble a projection test")
u <- c(stats[1:len, "u"], rep(1, opts$max - len))
u <- matrix(rep(u, each=2), nrow=2)
theta_ub <- with(x$items, model_3pl_estimate_jmle(u, a=a, b=b, c=c, D=opts$D, scale=NULL, priors=NULL))$t[1]
x <- ata(pool, 1, len=opts$max, 1)
x <- ata_obj_absolute(x, "b", (theta - opts$proj_width * stats[len, "se"]) * opts$max)
for(i in 1:nrow(opts$constraints))
x <- with(opts$constraints, ata_constraint(x, var[i], min=min[i], max=max[i], level=level[i]))
x <- ata_item_fixedvalue(x, admin$shadow_id, min=1, forms=1)
x <- ata_solve(x, as.list=FALSE, details=F)
if(is.null(x$items)) stop("Failed to assemble a projection test")
u <- c(stats[1:len, "u"], rep(0, opts$max - len))
u <- matrix(rep(u, each=2), nrow=2)
theta_lb <- with(x$items, model_3pl_estimate_jmle(u, a=a, b=b, c=c, D=opts$D, scale=NULL, priors=NULL))$t[1]
}
(theta_lb > opts$stop_cut || theta_ub < opts$stop_cut)
}
|
if (interactive()) pkgload::load_all(".")
test_is_not_false <- function() {
f <- function() {
a <- FALSE
return(is_not_false(a))
}
result <- f()
expectation <- FALSE
RUnit::checkIdentical(result, expectation)
a <- NULL
result <- is_not_false(a)
expectation <- FALSE
RUnit::checkIdentical(result, expectation)
result <- is_not_false(a, null_is_false = FALSE)
expectation <- TRUE
if (interactive())
RUnit::checkIdentical(result, expectation)
a <- "not_false"
f <- function() {
return(is_not_false(a))
}
result <- f()
expectation <- TRUE
if (interactive())
RUnit::checkIdentical(result, expectation)
f <- function() {
return(is_not_false(a, null_is_false = TRUE,
inherits = FALSE))
}
result <- f()
expectation <- FALSE
RUnit::checkIdentical(result, expectation)
}
if (interactive()) {
test_is_not_false()
}
test_is_false <- function() {
expectation <- TRUE
result <- is_false(FALSE)
RUnit::checkIdentical(result, expectation)
}
if (interactive()) {
test_is_false()
}
test_is_null_or_true <- function() {
expectation <- TRUE
result <- is_null_or_true(TRUE)
result <- is_null_or_true(NULL)
RUnit::checkIdentical(result, expectation)
expectation <- FALSE
result <- is_null_or_true(FALSE)
RUnit::checkIdentical(result, expectation)
result <- is_null_or_true("not true")
RUnit::checkIdentical(result, expectation)
}
if (interactive()) {
test_is_null_or_true()
}
|
LIML = function(ivmodel,beta0=0,alpha=0.05,manyweakSE=FALSE,heteroSE=FALSE,clusterID=NULL) {
if(class(ivmodel) != "ivmodel") {
print("LIML: You must supply an ivmodel class. See ivmodel function for details")
return(NULL)
}
Yadj = ivmodel$Yadj; Dadj = ivmodel$Dadj; Zadj = ivmodel$Zadj; ZadjQR = ivmodel$ZadjQR
LIMLMatrix1 = matrix(0,2,2)
LIMLMatrix1[1,1] = sum(Yadj^2)
LIMLMatrix1[1,2] = LIMLMatrix1[2,1] = sum(Dadj * Yadj)
LIMLMatrix1[2,2] = sum(Dadj^2)
LIMLMatrix2 = matrix(0,2,2); projYadj = qr.resid(ZadjQR,Yadj); projDadj = qr.resid(ZadjQR,Dadj)
LIMLMatrix2[1,1] = sum(projYadj^2)
LIMLMatrix2[1,2] = LIMLMatrix2[2,1] = sum(projDadj * projYadj)
LIMLMatrix2[2,2] = sum(projDadj^2)
kLIML = eigen(LIMLMatrix1 %*% invTwobyTwoSymMatrix(LIMLMatrix2))$values[2]
output = KClass(ivmodel,k=kLIML,beta0=beta0,alpha=alpha,manyweakSE=manyweakSE,heteroSE=heteroSE,clusterID=clusterID)
rownames(output$point.est) = rownames(output$std.err) = rownames(output$test.stat) = rownames(output$ci) = rownames(output$p.value) = NULL
return(c(output,list(k=kLIML)))
}
|
context("Install from git repo")
test_that("install_git with git2r", {
skip_on_cran()
skip_if_offline()
skip_if_not_installed("git2r")
Sys.unsetenv("R_TESTS")
lib <- tempfile()
on.exit(unlink(lib, recursive = TRUE), add = TRUE)
dir.create(lib)
url <- "https://github.com/gaborcsardi/pkgconfig.git"
install_git(url, lib = lib, git = "git2r", quiet = TRUE)
expect_silent(packageDescription("pkgconfig", lib.loc = lib))
expect_equal(
packageDescription("pkgconfig", lib.loc = lib)$RemoteUrl,
url
)
remote <- package2remote("pkgconfig", lib = lib)
expect_s3_class(remote, "remote")
expect_s3_class(remote, "git2r_remote")
expect_equal(format(remote), "Git")
expect_equal(remote$url, url)
expect_equal(remote$ref, NULL)
expect_equal(remote_sha(remote), remote$sha)
expect_true(!is.na(remote$sha) && nzchar(remote$sha))
})
test_that("install_git with git2r and ref", {
skip_on_cran()
skip_if_offline()
skip_if_over_rate_limit()
skip_if_not_installed("git2r")
Sys.unsetenv("R_TESTS")
lib <- tempfile()
on.exit(unlink(lib, recursive = TRUE), add = TRUE)
dir.create(lib)
url <- "https://github.com/gaborcsardi/pkgconfig.git"
install_git(url, lib = lib, ref = "travis", git = "git2r", quiet = TRUE)
expect_silent(packageDescription("pkgconfig", lib.loc = lib))
expect_equal(
packageDescription("pkgconfig", lib.loc = lib)$RemoteUrl,
url
)
remote <- package2remote("pkgconfig", lib = lib)
expect_s3_class(remote, "remote")
expect_s3_class(remote, "git2r_remote")
expect_equal(format(remote), "Git")
expect_equal(remote$url, url)
expect_equal(remote$ref, "travis")
expect_equal(remote_sha(remote), remote$sha)
expect_true(!is.na(remote$sha) && nzchar(remote$sha))
})
test_that("install_git with command line git", {
skip_on_cran()
skip_if_offline()
if (is.null(git_path())) skip("git is not installed")
Sys.unsetenv("R_TESTS")
lib <- tempfile()
on.exit(unlink(lib, recursive = TRUE), add = TRUE)
dir.create(lib)
url <- "https://github.com/cran/falsy.git"
install_git(url, git = "external", lib = lib, quiet = TRUE)
expect_silent(packageDescription("falsy", lib.loc = lib))
expect_equal(packageDescription("falsy", lib.loc = lib)$RemoteUrl, url)
remote <- package2remote("falsy", lib = lib)
expect_s3_class(remote, "remote")
expect_s3_class(remote, "xgit_remote")
expect_equal(format(remote), "Git")
expect_equal(remote$url, url)
expect_equal(remote$ref, NULL)
expect_true(!is.na(remote$sha) && nzchar(remote$sha))
})
test_that("install_git with command line git and tag ref", {
skip_on_cran()
skip_if_offline()
if (is.null(git_path())) skip("git is not installed")
Sys.unsetenv("R_TESTS")
lib <- tempfile()
on.exit(unlink(lib, recursive = TRUE), add = TRUE)
dir.create(lib)
url <- "https://github.com/cran/falsy.git"
install_git(url, ref = "1.0", git = "external", lib = lib, quiet = TRUE)
expect_silent(packageDescription("falsy", lib.loc = lib))
expect_equal(packageDescription("falsy", lib.loc = lib)$RemoteUrl, url)
remote <- package2remote("falsy", lib = lib)
expect_s3_class(remote, "remote")
expect_s3_class(remote, "xgit_remote")
expect_equal(format(remote), "Git")
expect_equal(remote$url, url)
expect_equal(remote$ref, "1.0")
expect_true(!is.na(remote$sha) && nzchar(remote$sha))
})
test_that("install_git with command line git and full SHA ref", {
skip_on_cran()
skip_if_offline()
if (is.null(git_path())) skip("git is not installed")
Sys.unsetenv("R_TESTS")
lib <- tempfile()
on.exit(unlink(lib, recursive = TRUE), add = TRUE)
dir.create(lib)
url <- "https://github.com/cran/falsy.git"
install_git(url, ref = "0f39d9eb735bf16909831c0bb129063dda388375", git = "external", lib = lib, quiet = TRUE)
expect_silent(packageDescription("falsy", lib.loc = lib))
expect_equal(packageDescription("falsy", lib.loc = lib)$RemoteUrl, url)
remote <- package2remote("falsy", lib = lib)
expect_s3_class(remote, "remote")
expect_s3_class(remote, "xgit_remote")
expect_equal(format(remote), "Git")
expect_equal(remote$url, url)
expect_equal(remote$ref, "0f39d9eb735bf16909831c0bb129063dda388375")
expect_true(!is.na(remote$sha) && nzchar(remote$sha))
})
test_that("git_remote returns the url", {
skip_on_cran()
url <- "https://github.com/cran/falsy.git"
remote <- git_remote(url)
expect_equal(remote$url, "https://github.com/cran/falsy.git")
url <- "https://github.com/cran/falsy.git@master"
remote <- git_remote(url)
expect_equal(remote$url, "https://github.com/cran/falsy.git")
expect_equal(remote$ref, "master")
url <- "[email protected]:cran/falsy.git"
remote <- git_remote(url)
expect_equal(remote$url, "[email protected]:cran/falsy.git")
url <- "[email protected]:cran/falsy.git@master"
remote <- git_remote(url)
expect_equal(remote$url, "[email protected]:cran/falsy.git")
expect_equal(remote$ref, "master")
url <- "ssh://[email protected]:7999/proj/name.git"
remote <- git_remote(url)
expect_equal(remote$url, "ssh://[email protected]:7999/proj/name.git")
url <- "ssh://[email protected]:7999/proj/name.git@fixup/issue"
remote <- git_remote(url)
expect_equal(remote$url, "ssh://[email protected]:7999/proj/name.git")
expect_equal(remote$ref, "fixup/issue")
url <- "https://[email protected]/someuser/MyProject/_git/MyPackage"
remote <- git_remote(url)
expect_equal(remote$url, "https://[email protected]/someuser/MyProject/_git/MyPackage")
})
test_that("remote_package_name.git2r_remote returns the package name if it exists", {
skip_on_cran()
skip_if_offline()
skip_if_not_installed("git2r")
url <- "https://github.com/cran/falsy.git"
remote <- git_remote(url, git = "git2r")
expect_equal(remote_package_name(remote), "falsy")
url <- "https://github.com/igraph/rigraph.git@46bfafd"
remote <- git_remote(url, git = "git2r")
expect_equal(remote_package_name(remote), "igraph")
url <- "https://github.com/igraph/rigraph.git@master"
remote <- git_remote(url, git = "git2r")
expect_equal(remote_package_name(remote), "igraph")
url <- "https://gitlab.com/r-lib-grp/test-pkg.git"
remote <- git_remote(url, git = "git2r")
expect_equal(remote_package_name(remote), "test123")
url <- "https://gitlab.com/r-lib-grp/fake-private-repo.git"
remote <- git_remote(url, git = "git2r")
err <- tryCatch(remote_sha(remote), error = function(e) e)
expect_error(
expect_equal(remote_package_name(remote), NA_character_),
class = class(err),
label = conditionMessage(err)
)
})
test_that("remote_package_name.xgit_remote returns the package name if it exists", {
skip_on_cran()
skip_if_offline()
if (is.null(git_path())) skip("git is not installed")
url <- "https://github.com/cran/falsy.git"
remote <- git_remote(url, git = "external")
expect_equal(remote_package_name(remote), "falsy")
url <- "https://github.com/igraph/rigraph.git@46bfafd"
remote <- git_remote(url, git = "external")
expect_equal(remote_package_name(remote), "igraph")
url <- "https://github.com/igraph/rigraph.git@master"
remote <- git_remote(url, git = "external")
expect_equal(remote_package_name(remote), "igraph")
})
test_that("remote_sha.xgit remote returns the SHA if it exists", {
skip_on_cran()
skip_if_offline()
if (is.null(git_path())) skip("git is not installed")
url <- "https://github.com/cran/falsy.git"
remote <- git_remote(url, ref = "1.0", git = "external")
expect_equal(remote_sha(remote), "0f39d9eb735bf16909831c0bb129063dda388375")
remote <- git_remote(url, ref = "26a36cf957a18569e311ef75b6f61f822de945ef", git = "external")
expect_equal(remote_sha(remote), "26a36cf957a18569e311ef75b6f61f822de945ef")
})
test_that("remote_metadata.xgit_remote", {
r <- remote_metadata.xgit_remote(
list(url = "foo", subdir = "foo2", ref = "foo3")
)
e <- list(
RemoteType = "xgit",
RemoteUrl = "foo",
RemoteSubdir = "foo2",
RemoteRef = "foo3",
RemoteSha = NULL,
RemoteArgs = NULL
)
expect_equal(r, e)
})
test_that("remote_metadata.git2r_remote", {
r <- remote_metadata.git2r_remote(
list(url = "foo", subdir = "foo2", ref = "foo3")
)
e <- list(
RemoteType = "git2r",
RemoteUrl = "foo",
RemoteSubdir = "foo2",
RemoteRef = "foo3",
RemoteSha = NULL
)
expect_equal(r, e)
})
|
aaStruct <- function (x,y, sigWeight = TRUE){
Peps <- as.character(x[,1])
Peps <- strsplit(Peps,"")
Peps <- do.call(rbind,Peps)
A <- Peps == "A"
R <- Peps == "R"
N <- Peps == "N"
D <- Peps == "D"
Q <- Peps == "Q"
E <- Peps == "E"
G <- Peps == "G"
H <- Peps == "H"
L <- Peps == "L"
K <- Peps == "K"
Ef <- Peps == "F"
P <- Peps == "P"
S <- Peps == "S"
W <- Peps == "W"
Y <- Peps == "Y"
V <- Peps == "V"
C <- Peps == "C"
I <- Peps == "I"
M <- Peps == "M"
Tee <- Peps == "T"
ProbTot <- length(x[,1])
if (sigWeight == FALSE){
cat("sigWeight = FALSE \n")
A2 <- (colSums(A)/ProbTot)
R2 <- (colSums(R)/ProbTot)
N2 <- (colSums(N)/ProbTot)
D2 <- (colSums(D)/ProbTot)
Q2 <- (colSums(Q)/ProbTot)
E2 <- (colSums(E)/ProbTot)
G2 <- (colSums(G)/ProbTot)
H2 <- (colSums(H)/ProbTot)
L2 <- (colSums(L)/ProbTot)
K2 <- (colSums(K)/ProbTot)
Ef2 <- (colSums(Ef)/ProbTot)
P2 <- (colSums(P)/ProbTot)
S2 <- (colSums(S)/ProbTot)
W2 <- (colSums(W)/ProbTot)
Y2 <- (colSums(Y)/ProbTot)
V2 <- (colSums(V)/ProbTot)
C2 <- (colSums(C)/ProbTot)
I2 <- (colSums(I)/ProbTot)
M2 <- (colSums(M)/ProbTot)
Tee2 <- (colSums(Tee)/ProbTot)
CPeps <- rbind(A2,R2,N2,D2,Q2,E2,G2,H2,L2,K2,Ef2,P2,S2,W2,Y2,V2,C2,I2,M2,Tee2)
return(CPeps)
} else if (sigWeight == TRUE){
cat("sigWeight default = TRUE \n")
RawFile <- matrix(0,nrow(Peps),ncol(Peps))
Sigs <- y
for (i in 1:ncol(RawFile)){
RawFile[,i] = y
}
A1 <- A*RawFile
R1 <- R*RawFile
N1 <- N*RawFile
D1 <- D*RawFile
Q1 <- Q*RawFile
E1 <- E*RawFile
G1 <- G*RawFile
H1 <- H*RawFile
L1 <- L*RawFile
K1 <- K*RawFile
Ef1 <- Ef*RawFile
P1 <- P*RawFile
S1 <- S*RawFile
W1 <- W*RawFile
Y1 <- Y*RawFile
V1 <- V*RawFile
C1 <- C*RawFile
I1 <- I*RawFile
M1 <- M*RawFile
Tee1 <- Tee*RawFile
A2 <- (colSums(A)/ProbTot)*colSums(A1)/colSums(A)
R2 <- (colSums(R)/ProbTot)*colSums(R1)/colSums(R)
N2 <- (colSums(N)/ProbTot)*colSums(N1)/colSums(N)
D2 <- (colSums(D)/ProbTot)*colSums(D1)/colSums(D)
Q2 <- (colSums(Q)/ProbTot)*colSums(Q1)/colSums(Q)
E2 <- (colSums(E)/ProbTot)*colSums(E1)/colSums(E)
G2 <- (colSums(G)/ProbTot)*colSums(G1)/colSums(G)
H2 <- (colSums(H)/ProbTot)*colSums(H1)/colSums(H)
L2 <- (colSums(L)/ProbTot)*colSums(L1)/colSums(L)
K2 <- (colSums(K)/ProbTot)*colSums(K1)/colSums(K)
Ef2 <- (colSums(Ef)/ProbTot)*colSums(Ef1)/colSums(Ef)
P2 <- (colSums(P)/ProbTot)*colSums(P1)/colSums(P)
S2 <- (colSums(S)/ProbTot)*colSums(S1)/colSums(S)
W2 <- (colSums(W)/ProbTot)*colSums(W1)/colSums(W)
Y2 <- (colSums(Y)/ProbTot)*colSums(Y1)/colSums(Y)
V2 <- (colSums(V)/ProbTot)*colSums(V1)/colSums(V)
C2 <- (colSums(C)/ProbTot)*colSums(C1)/colSums(C)
I2 <- (colSums(I)/ProbTot)*colSums(I1)/colSums(I)
M2 <- (colSums(M)/ProbTot)*colSums(M1)/colSums(M)
Tee2 <- (colSums(Tee)/ProbTot)*colSums(Tee1)/colSums(Tee)
CPeps <- rbind(A2,R2,N2,D2,Q2,E2,G2,H2,L2,K2,Ef2,P2,S2,W2,Y2,V2,C2,I2,M2,Tee2)
CPepsNorm <- CPeps/(mean(y))
return(CPepsNorm)
} else {
cat("Error in sigWeight, select either TRUE or FALSE")
}
}
vMotif.lc <- function(Prot,ProtG,Length,Charge,SigCol,Kd = FALSE)
{
Peps <- vSep(Prot,Length,Charge)
if (Kd == TRUE){
a <- aaStruct(Peps,1/Peps[,SigCol])
}else if (Kd == FALSE){
a <- aaStruct(Peps,Peps[,SigCol])
}
Peps <- vSep(ProtG,Length,Charge)
if (Kd == TRUE){
b <- aaStruct(Peps,1/Peps[,SigCol])
}else if (Kd == FALSE){
b <- aaStruct(Peps,Peps[,SigCol])
}
Finalstruct <- (a/b)
Final <- data.frame(Finalstruct, row.names = c("A","R","N","D","Q","E","G","H","L","K",
"F","P","S","W","Y","V","C","I","M","T"))
colnames(Final) <- Length:1
aaMap <- melt(as.matrix(Final))
colnames(aaMap) <- c("AA","Position","Weight")
aaPlot <- ggplot(data = aaMap) + geom_raster(aes(x = aaMap[,2], y = aaMap[,1], fill = aaMap[,3],
alpha = ifelse(is.nan(aaMap[,3]) == TRUE, 0, 1)))+
scale_fill_distiller(palette = "Spectral")+
geom_tile(size = 0.5, fill = NA, colour = "black", aes(x = aaMap[,2], y = aaMap[,1]))+
scale_x_continuous(breaks = min(aaMap[,2]):max(aaMap[,2]))+
theme(panel.grid.major.x = element_blank(), panel.grid.major.y = element_blank(),
panel.background = element_rect(fill = "white"), axis.text = element_text(size=12),
axis.title = element_text(size = 14), plot.title = element_text(size = 17, face = "bold"))+
labs(title = paste("vMotif.lc Analysis Length",Length, "Charge",Charge), x = "Position", y = "AA", fill = "Weight")+
guides(alpha = FALSE)
print(aaPlot)
return(Final)
}
vMotif.l <- function(Prot,ProtG,Length,SigCol,Kd = FALSE)
{
Peps <- vSep(Prot,Length)
if (Kd == TRUE){
a <- aaStruct(Peps,1/Peps[,SigCol])
}else if (Kd == FALSE){
a <- aaStruct(Peps,Peps[,SigCol])
}
Peps <- vSep(ProtG,Length)
if (Kd == TRUE){
b <- aaStruct(Peps,1/Peps[,SigCol])
}else if (Kd == FALSE){
b <- aaStruct(Peps,Peps[,SigCol])
}
Finalstruct <- (a/b)
Final <- data.frame(Finalstruct, row.names = c("A","R","N","D","Q","E","G","H","L","K",
"F","P","S","W","Y","V","C","I","M","T"))
colnames(Final) <- Length:1
aaMap <- melt(as.matrix(Final))
colnames(aaMap) <- c("AA","Position","Weight")
aaPlot <- ggplot(data = aaMap) + geom_raster(aes(x = aaMap[,2], y = aaMap[,1], fill = aaMap[,3],
alpha = ifelse(is.nan(aaMap[,3]) == TRUE, 0, 1)))+
scale_fill_distiller(palette = "Spectral")+
geom_tile(size = 0.5, fill = NA, colour = "black", aes(x = aaMap[,2], y = aaMap[,1]))+
scale_x_continuous(breaks = min(aaMap[,2]):max(aaMap[,2]))+
theme(panel.grid.major.x = element_blank(), panel.grid.major.y = element_blank(),
panel.background = element_rect(fill = "white"), axis.text = element_text(size = 12),
axis.title = element_text(size = 14), plot.title = element_text(size = 17, face = "bold"))+
labs(title = paste("vMotif.l Analysis Length", Length), x = "Position", y = "AA", fill = "Weight")+
guides(alpha = FALSE)
print(aaPlot)
return(Final)
}
vComp.l <- function(Prot,ProtG,Length)
{
Peps <- vSep(Prot,Length)
a <- aaStruct(Peps,sigWeight = FALSE)
Peps <- vSep(ProtG,Length)
b <- aaStruct(Peps,sigWeight = FALSE)
Finalstruct <- (a/b)
Final <- data.frame(Finalstruct, row.names = c("A","R","N","D","Q","E","G","H","L","K",
"F","P","S","W","Y","V","C","I","M","T"))
colnames(Final) <- Length:1
aaMap <- melt(as.matrix(Final))
colnames(aaMap) <- c("AA","Position","Weight")
aaPlot <- ggplot(data = aaMap) + geom_raster(aes(x = aaMap[,2], y = aaMap[,1], fill = aaMap[,3],
alpha = ifelse(is.nan(aaMap[,3]) == TRUE, 0, 1)))+
scale_fill_distiller(palette = "Spectral")+
geom_tile(size = 0.5, fill = NA, colour = "black", aes(x = aaMap[,2], y = aaMap[,1]))+
scale_x_continuous(breaks = min(aaMap[,2]):max(aaMap[,2]))+
theme(panel.grid.major.x = element_blank(), panel.grid.major.y = element_blank(),
panel.background = element_rect(fill = "white"), axis.text = element_text(size=12),
axis.title = element_text(size=14), plot.title = element_text(size = 17, face = "bold"))+
labs(title = paste("vComp.l Analysis Length", Length), x = "Position", y = "AA", fill = "Weight")+
guides(alpha = FALSE)
print(aaPlot)
return(Final)
}
vComp.lc <- function(Prot,ProtG,Length,Charge)
{
Peps <- vSep(Prot,Length,Charge)
a <- aaStruct(Peps,sigWeight = FALSE)
Peps <- vSep(ProtG,Length,Charge)
b <- aaStruct(Peps,sigWeight = FALSE)
Finalstruct <- (a/b)
Final <- data.frame(Finalstruct, row.names = c("A","R","N","D","Q","E","G","H","L","K",
"F","P","S","W","Y","V","C","I","M","T"))
colnames(Final) <- Length:1
aaMap <- melt(as.matrix(Final))
colnames(aaMap) <- c("AA","Position","Weight")
aaPlot <- ggplot(data = aaMap) + geom_raster(aes(x = aaMap[,2], y = aaMap[,1], fill = aaMap[,3],
alpha = ifelse(is.nan(aaMap[,3]) == TRUE, 0, 1)))+
scale_fill_distiller(palette = "Spectral")+
geom_tile(size = 0.5, fill = NA, colour = "black", aes(x = aaMap[,2], y = aaMap[,1]))+
scale_x_continuous(breaks = min(aaMap[,2]):max(aaMap[,2]))+
theme(panel.grid.major.x = element_blank(), panel.grid.major.y = element_blank(),
panel.background = element_rect(fill = "white"), axis.text = element_text(size=12),
axis.title = element_text(size=14), plot.title = element_text(size = 17, face = "bold"))+
labs(title =paste("vComp.lc Analysis Length", Length,"Charge", Charge), x = "Position", y = "AA", fill = "Weight")+
guides(alpha = FALSE)
print(aaPlot)
return(Final)
}
|
zadr <- function(y, x, xnew = NULL, tol = 1e-05) {
dm <- dim(y)
D <- dm[2] ; d <- D - 1
n <- dm[1]
x <- model.matrix(~., as.data.frame(x) )
runtime <- proc.time()
a1 <- which( Rfast::rowsums( y > 0 ) == D )
a2 <- which( Rfast::rowsums( y > 0 ) != D )
n1 <- length(a1)
n2 <- n - n1
za <- y[a2, , drop = FALSE]
za[za == 0] <- 1
za[ za < 1 ] <- 0
theta <- table( apply(za, 1, paste, collapse = ",") )
theta <- as.vector(theta)
con <- n1 * log(n1/n) + sum( theta * log(theta/n) )
y1 <- y[a1, , drop = FALSE]
ly1 <- log( y1 )
x1 <- x[a1, , drop = FALSE]
ly2 <- log( y[a2, , drop = FALSE] )
x2 <- x[a2, , drop = FALSE]
n1 <- nrow(y1) ; n2 <- n - n1
beta.ini <- .lm.fit(x1, ly1[, -1] - ly1[, 1])$coefficients
ini.phi <- sum( Compositional::diri.nr(y1, type = 2)$param )
ini.par <- c( log(ini.phi), as.vector( t( beta.ini) ) )
z <- list(ly1 = ly1, ly2 = ly2, x1 = x1, x2 = x2, a1 = a1, a2 = a2)
oop <- options(warn = -1)
on.exit( options(oop) )
qa <- nlm( mixreg, ini.par, z = z )
el1 <- -qa$minimum
qa <- nlm( mixreg, qa$estimate, z = z )
el2 <- - qa$minimum
while ( el2 - el1 > tol ) {
el1 <- el2
qa <- nlm( mixreg, qa$estimate, z = z )
el2 <- -qa$minimum
}
qa <- optim( qa$estimate, mixreg, z = z, hessian = TRUE, control = list(maxiters = 10000) )
sigma <- try( solve( qa$hessian ), silent = TRUE )
if ( !identical( class(sigma), "try-error") ) {
seb <- sqrt( diag(sigma) )
seb <- matrix(seb[-1], ncol = d)
colnames(seb) <- colnames( y[, -1] )
rownames(seb) <- colnames(x)
} else {
sigma <- NULL
seb <- NULL
}
phi <- exp( qa$par[1] )
be <- matrix( qa$par[-1], ncol = d )
colnames(be) <- colnames( y[, -1] )
rownames(be) <- colnames(x)
est <- NULL
if ( !is.null(xnew) ) {
xnew <- model.matrix(~., as.data.frame(xnew) )
ma <- cbind(1, exp( xnew %*% be ) )
est <- ma / Rfast::rowsums(ma)
colnames(est) <- colnames(y)
}
runtime <- proc.time() - runtime
list(runtime = runtime, loglik = -qa$value + con, phi = phi, be = be, seb = seb, sigma = sigma, est = est )
}
|
restoreLibs <- function( md5hash, session_info = NULL, lib.loc = NULL) {
stopifnot( !is.null( md5hash ) | !is.null( session_info ) )
if (!requireNamespace("devtools", quietly = TRUE)) {
stop("devtools package required for restoreLibs function")
}
if (is.null(session_info)) {
session_info <- asession(md5hash)
}
if (!is.null(lib.loc)) {
oldPaths <- .libPaths()
.libPaths(c(lib.loc, .libPaths()))
on.exit(.libPaths(oldPaths))
}
pkgs <- session_info$packages
installed_pkgs <- installed.packages()[,c("Package", "Version")]
for (i in seq_along(pkgs$package)) {
is_allready_installed <- paste0(pkgs[i,"package"], pkgs[i,"version"]) %in%
apply(installed_pkgs, 1, paste0, collapse="")
if (pkgs[i,"package"] != "archivist" & !is_allready_installed) {
if (pkgs[i,"*"] == "*") {
cat("Package", pkgs[i,"package"], "was installed from local file and will not be reinstalled.\n\n")
} else {
if (grepl(pkgs[i,"source"], pattern = "^CRAN")) {
try(devtools::install_version(pkgs[i,"package"],
version = pkgs[i,"version"],
type="source",
dependencies = FALSE, reload = FALSE), silent=TRUE)
} else {
pkg <- gsub(gsub(pkgs[i,"source"], pattern=".*\\(", replacement=""), pattern="\\)", replacement="")
try(devtools::install_github(pkg,
dependencies = FALSE, reload = FALSE), silent=TRUE)
}
}
}
}
}
|
"nwts2ph"
|
NULL
budgets_create_budget <- function(AccountId, Budget, NotificationsWithSubscribers = NULL) {
op <- new_operation(
name = "CreateBudget",
http_method = "POST",
http_path = "/",
paginator = list()
)
input <- .budgets$create_budget_input(AccountId = AccountId, Budget = Budget, NotificationsWithSubscribers = NotificationsWithSubscribers)
output <- .budgets$create_budget_output()
config <- get_config()
svc <- .budgets$service(config)
request <- new_request(svc, op, input, output)
response <- send_request(request)
return(response)
}
.budgets$operations$create_budget <- budgets_create_budget
budgets_create_budget_action <- function(AccountId, BudgetName, NotificationType, ActionType, ActionThreshold, Definition, ExecutionRoleArn, ApprovalModel, Subscribers) {
op <- new_operation(
name = "CreateBudgetAction",
http_method = "POST",
http_path = "/",
paginator = list()
)
input <- .budgets$create_budget_action_input(AccountId = AccountId, BudgetName = BudgetName, NotificationType = NotificationType, ActionType = ActionType, ActionThreshold = ActionThreshold, Definition = Definition, ExecutionRoleArn = ExecutionRoleArn, ApprovalModel = ApprovalModel, Subscribers = Subscribers)
output <- .budgets$create_budget_action_output()
config <- get_config()
svc <- .budgets$service(config)
request <- new_request(svc, op, input, output)
response <- send_request(request)
return(response)
}
.budgets$operations$create_budget_action <- budgets_create_budget_action
budgets_create_notification <- function(AccountId, BudgetName, Notification, Subscribers) {
op <- new_operation(
name = "CreateNotification",
http_method = "POST",
http_path = "/",
paginator = list()
)
input <- .budgets$create_notification_input(AccountId = AccountId, BudgetName = BudgetName, Notification = Notification, Subscribers = Subscribers)
output <- .budgets$create_notification_output()
config <- get_config()
svc <- .budgets$service(config)
request <- new_request(svc, op, input, output)
response <- send_request(request)
return(response)
}
.budgets$operations$create_notification <- budgets_create_notification
budgets_create_subscriber <- function(AccountId, BudgetName, Notification, Subscriber) {
op <- new_operation(
name = "CreateSubscriber",
http_method = "POST",
http_path = "/",
paginator = list()
)
input <- .budgets$create_subscriber_input(AccountId = AccountId, BudgetName = BudgetName, Notification = Notification, Subscriber = Subscriber)
output <- .budgets$create_subscriber_output()
config <- get_config()
svc <- .budgets$service(config)
request <- new_request(svc, op, input, output)
response <- send_request(request)
return(response)
}
.budgets$operations$create_subscriber <- budgets_create_subscriber
budgets_delete_budget <- function(AccountId, BudgetName) {
op <- new_operation(
name = "DeleteBudget",
http_method = "POST",
http_path = "/",
paginator = list()
)
input <- .budgets$delete_budget_input(AccountId = AccountId, BudgetName = BudgetName)
output <- .budgets$delete_budget_output()
config <- get_config()
svc <- .budgets$service(config)
request <- new_request(svc, op, input, output)
response <- send_request(request)
return(response)
}
.budgets$operations$delete_budget <- budgets_delete_budget
budgets_delete_budget_action <- function(AccountId, BudgetName, ActionId) {
op <- new_operation(
name = "DeleteBudgetAction",
http_method = "POST",
http_path = "/",
paginator = list()
)
input <- .budgets$delete_budget_action_input(AccountId = AccountId, BudgetName = BudgetName, ActionId = ActionId)
output <- .budgets$delete_budget_action_output()
config <- get_config()
svc <- .budgets$service(config)
request <- new_request(svc, op, input, output)
response <- send_request(request)
return(response)
}
.budgets$operations$delete_budget_action <- budgets_delete_budget_action
budgets_delete_notification <- function(AccountId, BudgetName, Notification) {
op <- new_operation(
name = "DeleteNotification",
http_method = "POST",
http_path = "/",
paginator = list()
)
input <- .budgets$delete_notification_input(AccountId = AccountId, BudgetName = BudgetName, Notification = Notification)
output <- .budgets$delete_notification_output()
config <- get_config()
svc <- .budgets$service(config)
request <- new_request(svc, op, input, output)
response <- send_request(request)
return(response)
}
.budgets$operations$delete_notification <- budgets_delete_notification
budgets_delete_subscriber <- function(AccountId, BudgetName, Notification, Subscriber) {
op <- new_operation(
name = "DeleteSubscriber",
http_method = "POST",
http_path = "/",
paginator = list()
)
input <- .budgets$delete_subscriber_input(AccountId = AccountId, BudgetName = BudgetName, Notification = Notification, Subscriber = Subscriber)
output <- .budgets$delete_subscriber_output()
config <- get_config()
svc <- .budgets$service(config)
request <- new_request(svc, op, input, output)
response <- send_request(request)
return(response)
}
.budgets$operations$delete_subscriber <- budgets_delete_subscriber
budgets_describe_budget <- function(AccountId, BudgetName) {
op <- new_operation(
name = "DescribeBudget",
http_method = "POST",
http_path = "/",
paginator = list()
)
input <- .budgets$describe_budget_input(AccountId = AccountId, BudgetName = BudgetName)
output <- .budgets$describe_budget_output()
config <- get_config()
svc <- .budgets$service(config)
request <- new_request(svc, op, input, output)
response <- send_request(request)
return(response)
}
.budgets$operations$describe_budget <- budgets_describe_budget
budgets_describe_budget_action <- function(AccountId, BudgetName, ActionId) {
op <- new_operation(
name = "DescribeBudgetAction",
http_method = "POST",
http_path = "/",
paginator = list()
)
input <- .budgets$describe_budget_action_input(AccountId = AccountId, BudgetName = BudgetName, ActionId = ActionId)
output <- .budgets$describe_budget_action_output()
config <- get_config()
svc <- .budgets$service(config)
request <- new_request(svc, op, input, output)
response <- send_request(request)
return(response)
}
.budgets$operations$describe_budget_action <- budgets_describe_budget_action
budgets_describe_budget_action_histories <- function(AccountId, BudgetName, ActionId, TimePeriod = NULL, MaxResults = NULL, NextToken = NULL) {
op <- new_operation(
name = "DescribeBudgetActionHistories",
http_method = "POST",
http_path = "/",
paginator = list()
)
input <- .budgets$describe_budget_action_histories_input(AccountId = AccountId, BudgetName = BudgetName, ActionId = ActionId, TimePeriod = TimePeriod, MaxResults = MaxResults, NextToken = NextToken)
output <- .budgets$describe_budget_action_histories_output()
config <- get_config()
svc <- .budgets$service(config)
request <- new_request(svc, op, input, output)
response <- send_request(request)
return(response)
}
.budgets$operations$describe_budget_action_histories <- budgets_describe_budget_action_histories
budgets_describe_budget_actions_for_account <- function(AccountId, MaxResults = NULL, NextToken = NULL) {
op <- new_operation(
name = "DescribeBudgetActionsForAccount",
http_method = "POST",
http_path = "/",
paginator = list()
)
input <- .budgets$describe_budget_actions_for_account_input(AccountId = AccountId, MaxResults = MaxResults, NextToken = NextToken)
output <- .budgets$describe_budget_actions_for_account_output()
config <- get_config()
svc <- .budgets$service(config)
request <- new_request(svc, op, input, output)
response <- send_request(request)
return(response)
}
.budgets$operations$describe_budget_actions_for_account <- budgets_describe_budget_actions_for_account
budgets_describe_budget_actions_for_budget <- function(AccountId, BudgetName, MaxResults = NULL, NextToken = NULL) {
op <- new_operation(
name = "DescribeBudgetActionsForBudget",
http_method = "POST",
http_path = "/",
paginator = list()
)
input <- .budgets$describe_budget_actions_for_budget_input(AccountId = AccountId, BudgetName = BudgetName, MaxResults = MaxResults, NextToken = NextToken)
output <- .budgets$describe_budget_actions_for_budget_output()
config <- get_config()
svc <- .budgets$service(config)
request <- new_request(svc, op, input, output)
response <- send_request(request)
return(response)
}
.budgets$operations$describe_budget_actions_for_budget <- budgets_describe_budget_actions_for_budget
budgets_describe_budget_performance_history <- function(AccountId, BudgetName, TimePeriod = NULL, MaxResults = NULL, NextToken = NULL) {
op <- new_operation(
name = "DescribeBudgetPerformanceHistory",
http_method = "POST",
http_path = "/",
paginator = list()
)
input <- .budgets$describe_budget_performance_history_input(AccountId = AccountId, BudgetName = BudgetName, TimePeriod = TimePeriod, MaxResults = MaxResults, NextToken = NextToken)
output <- .budgets$describe_budget_performance_history_output()
config <- get_config()
svc <- .budgets$service(config)
request <- new_request(svc, op, input, output)
response <- send_request(request)
return(response)
}
.budgets$operations$describe_budget_performance_history <- budgets_describe_budget_performance_history
budgets_describe_budgets <- function(AccountId, MaxResults = NULL, NextToken = NULL) {
op <- new_operation(
name = "DescribeBudgets",
http_method = "POST",
http_path = "/",
paginator = list()
)
input <- .budgets$describe_budgets_input(AccountId = AccountId, MaxResults = MaxResults, NextToken = NextToken)
output <- .budgets$describe_budgets_output()
config <- get_config()
svc <- .budgets$service(config)
request <- new_request(svc, op, input, output)
response <- send_request(request)
return(response)
}
.budgets$operations$describe_budgets <- budgets_describe_budgets
budgets_describe_notifications_for_budget <- function(AccountId, BudgetName, MaxResults = NULL, NextToken = NULL) {
op <- new_operation(
name = "DescribeNotificationsForBudget",
http_method = "POST",
http_path = "/",
paginator = list()
)
input <- .budgets$describe_notifications_for_budget_input(AccountId = AccountId, BudgetName = BudgetName, MaxResults = MaxResults, NextToken = NextToken)
output <- .budgets$describe_notifications_for_budget_output()
config <- get_config()
svc <- .budgets$service(config)
request <- new_request(svc, op, input, output)
response <- send_request(request)
return(response)
}
.budgets$operations$describe_notifications_for_budget <- budgets_describe_notifications_for_budget
budgets_describe_subscribers_for_notification <- function(AccountId, BudgetName, Notification, MaxResults = NULL, NextToken = NULL) {
op <- new_operation(
name = "DescribeSubscribersForNotification",
http_method = "POST",
http_path = "/",
paginator = list()
)
input <- .budgets$describe_subscribers_for_notification_input(AccountId = AccountId, BudgetName = BudgetName, Notification = Notification, MaxResults = MaxResults, NextToken = NextToken)
output <- .budgets$describe_subscribers_for_notification_output()
config <- get_config()
svc <- .budgets$service(config)
request <- new_request(svc, op, input, output)
response <- send_request(request)
return(response)
}
.budgets$operations$describe_subscribers_for_notification <- budgets_describe_subscribers_for_notification
budgets_execute_budget_action <- function(AccountId, BudgetName, ActionId, ExecutionType) {
op <- new_operation(
name = "ExecuteBudgetAction",
http_method = "POST",
http_path = "/",
paginator = list()
)
input <- .budgets$execute_budget_action_input(AccountId = AccountId, BudgetName = BudgetName, ActionId = ActionId, ExecutionType = ExecutionType)
output <- .budgets$execute_budget_action_output()
config <- get_config()
svc <- .budgets$service(config)
request <- new_request(svc, op, input, output)
response <- send_request(request)
return(response)
}
.budgets$operations$execute_budget_action <- budgets_execute_budget_action
budgets_update_budget <- function(AccountId, NewBudget) {
op <- new_operation(
name = "UpdateBudget",
http_method = "POST",
http_path = "/",
paginator = list()
)
input <- .budgets$update_budget_input(AccountId = AccountId, NewBudget = NewBudget)
output <- .budgets$update_budget_output()
config <- get_config()
svc <- .budgets$service(config)
request <- new_request(svc, op, input, output)
response <- send_request(request)
return(response)
}
.budgets$operations$update_budget <- budgets_update_budget
budgets_update_budget_action <- function(AccountId, BudgetName, ActionId, NotificationType = NULL, ActionThreshold = NULL, Definition = NULL, ExecutionRoleArn = NULL, ApprovalModel = NULL, Subscribers = NULL) {
op <- new_operation(
name = "UpdateBudgetAction",
http_method = "POST",
http_path = "/",
paginator = list()
)
input <- .budgets$update_budget_action_input(AccountId = AccountId, BudgetName = BudgetName, ActionId = ActionId, NotificationType = NotificationType, ActionThreshold = ActionThreshold, Definition = Definition, ExecutionRoleArn = ExecutionRoleArn, ApprovalModel = ApprovalModel, Subscribers = Subscribers)
output <- .budgets$update_budget_action_output()
config <- get_config()
svc <- .budgets$service(config)
request <- new_request(svc, op, input, output)
response <- send_request(request)
return(response)
}
.budgets$operations$update_budget_action <- budgets_update_budget_action
budgets_update_notification <- function(AccountId, BudgetName, OldNotification, NewNotification) {
op <- new_operation(
name = "UpdateNotification",
http_method = "POST",
http_path = "/",
paginator = list()
)
input <- .budgets$update_notification_input(AccountId = AccountId, BudgetName = BudgetName, OldNotification = OldNotification, NewNotification = NewNotification)
output <- .budgets$update_notification_output()
config <- get_config()
svc <- .budgets$service(config)
request <- new_request(svc, op, input, output)
response <- send_request(request)
return(response)
}
.budgets$operations$update_notification <- budgets_update_notification
budgets_update_subscriber <- function(AccountId, BudgetName, Notification, OldSubscriber, NewSubscriber) {
op <- new_operation(
name = "UpdateSubscriber",
http_method = "POST",
http_path = "/",
paginator = list()
)
input <- .budgets$update_subscriber_input(AccountId = AccountId, BudgetName = BudgetName, Notification = Notification, OldSubscriber = OldSubscriber, NewSubscriber = NewSubscriber)
output <- .budgets$update_subscriber_output()
config <- get_config()
svc <- .budgets$service(config)
request <- new_request(svc, op, input, output)
response <- send_request(request)
return(response)
}
.budgets$operations$update_subscriber <- budgets_update_subscriber
|
NULL
cloudwatch <- function(config = list()) {
svc <- .cloudwatch$operations
svc <- set_config(svc, config)
return(svc)
}
.cloudwatch <- list()
.cloudwatch$operations <- list()
.cloudwatch$metadata <- list(
service_name = "monitoring",
endpoints = list("*" = list(endpoint = "monitoring.{region}.amazonaws.com", global = FALSE), "cn-*" = list(endpoint = "monitoring.{region}.amazonaws.com.cn", global = FALSE), "us-iso-*" = list(endpoint = "monitoring.{region}.c2s.ic.gov", global = FALSE), "us-isob-*" = list(endpoint = "monitoring.{region}.sc2s.sgov.gov", global = FALSE)),
service_id = "CloudWatch",
api_version = "2010-08-01",
signing_name = "monitoring",
json_version = "",
target_prefix = ""
)
.cloudwatch$service <- function(config = list()) {
handlers <- new_handlers("query", "v4")
new_service(.cloudwatch$metadata, handlers, config)
}
|
context("Overdispersion")
data("biomass")
biomass$olre <- 1:nrow(biomass)
biomass$YearC <- biomass$Year - mean(biomass$Year)
biomass$TemperatureS <- scale(biomass$Temperature)
biomass$PrecipitationS <- scale(biomass$Precipitation)
biomass$SpeciesDiversityC <- biomass$SpeciesDiversity - mean(biomass$SpeciesDiversity)
fit1 <- lme4::glmer(Extinction ~ YearC + TemperatureS + PrecipitationS +
SpeciesDiversityC + (1 | Population),
data = biomass, family = "poisson"
)
fit2 <- lme4::glmer(Extinction ~ YearC + TemperatureS + PrecipitationS +
SpeciesDiversityC + (1 | Population) + (1 | olre),
data = biomass, family = "poisson"
)
test_that("overdispersion is added", {
fit_olre <- model_overdisp(fit1, biomass, olre = TRUE)
expect_equal(fit_olre$mod@frame$overdisp, factor(1:nrow(biomass)))
expect_equal(fit_olre$dat$overdisp, factor(1:nrow(biomass)))
fit_noolre <- model_overdisp(fit1, biomass, olre = FALSE)
expect_equal(fit_noolre$mod@frame$overdisp, NULL)
})
test_that("overdispersion is not added when present", {
fit_olre <- model_overdisp(fit2, biomass, olre = TRUE)
expect_equal(fit_olre$mod@frame$overdisp, NULL)
})
|
iris2 <- iris %>%
mutate_all(as.character)
test_that("All columns must be character", {
expect_snapshot_error(add_column_headers(iris, "header_text"))
})
test_that("Nested headers are not allowed", {
header_string = "TEXT | TEXT {TEXT {TEXT} TEXT } | TEXT"
expect_snapshot_error(add_column_headers(iris2, header_string))
})
test_that("Header strings must have the same number of columns as the data frame", {
good_no_spanner <- "Sepal Length | Sepal Width | Petal Length | Petal Width | Species"
less_no_spanner <- "Sepal Length | Sepal Width | Petal Length | Petal Width"
more_no_spanner <- "Sepal Length | Sepal Width | Petal Length | Petal Width | Species | More"
good_spanner <- "Sepal {Length | Width} | Petal {Length | Width} | Species"
less_spanner <- "Sepal {Length | Width} | Petal {Length | Width}"
more_spanner <- "Sepal {Length | Width} | Petal {Length | Width} | Species | More"
nested_less <- "Sepal {Length | Width} | Petal {Length} | Species"
nested_more <- "Sepal {Length | Width} | Petal {Length | Width | More} | Species"
expect_silent(add_column_headers(iris2, good_no_spanner))
expect_silent(add_column_headers(iris2, good_spanner))
expect_snapshot_error(add_column_headers(iris2, less_no_spanner))
expect_snapshot_error(add_column_headers(iris2, more_no_spanner))
expect_snapshot_error(add_column_headers(iris2, less_spanner))
expect_snapshot_error(add_column_headers(iris2, more_spanner))
expect_snapshot_error(add_column_headers(iris2, nested_less))
expect_snapshot_error(add_column_headers(iris2, nested_more))
})
test_that("Unmatched spanner brackers", {
header_string = "TEXT | TEXT {TEXT {TEXT} TEXT | TEXT"
expect_snapshot_error(add_column_headers(iris2, header_string))
})
test_that("Spanning headers produce correctly", {
header_text_no_spanner <- "Sepal Length | Sepal Width | Petal Length | Petal Width | Species"
header_df_no_spanner <- data.frame(
Sepal.Length = c("Sepal Length"),
Sepal.Width = c("Sepal Width"),
Petal.Length = c("Petal Length"),
Petal.Width = c("Petal Width"),
Species = c("Species"),
stringsAsFactors = FALSE)
header_text_with_spanner <- "Sepal {Length | Width} | Petal {Length | Width} | Species"
header_df_with_spanner <- data.frame(
Sepal.Length = c("Sepal", "Length"),
Sepal.Width = c("", "Width"),
Petal.Length = c("Petal", "Length"),
Petal.Width = c("", "Width"),
Species = c("", "Species"),
stringsAsFactors = FALSE)
expect_true(all(add_column_headers(iris2, header_text_no_spanner) == bind_rows(header_df_no_spanner, iris2)))
expect_true(all(add_column_headers(iris2, header_text_with_spanner) == bind_rows(header_df_with_spanner, iris2)))
})
test_that("add_column_headers throws an error when you use a token and don't pass header_n", {
expect_snapshot_error({
mtcars2 <- mtcars %>%
mutate_all(as.character)
t <- tplyr_table(mtcars2, am) %>%
add_layer(
group_count(cyl)
)
b_t <- build(t) %>% mutate_all(as.character)
count_string <- "Rows | am0 **0** | am1 **1**"
add_column_headers(b_t, count_string)
})
})
test_that("add_column_headers returns the expected result when tokens are passed", {
mtcars2 <- mtcars %>%
mutate_all(as.character)
t <- tplyr_table(mtcars2, vs, cols = am) %>%
add_layer(
group_count(cyl)
)
b_t <- build(t) %>%
select(starts_with("row_label1") | starts_with("var1_"))
count_string <- "Rows | V N=**0** {auto N=**0_0** | man N=**0_1**} | S N=**1** {auto N=**1_0** | man N=**1_1**}"
tab <- add_column_headers(b_t, count_string, header_n(t))
expect_equal(tab, structure(list(row_label1 = c("", "Rows", "4", "6", "8"),
var1_0_0 = c("V N=18", "auto N=12", " 0 ( 0.0%)", " 0 ( 0.0%)", "12 (100.0%)"),
var1_0_1 = c("", "man N=6", " 1 ( 16.7%)", " 3 ( 50.0%)", " 2 ( 33.3%)"),
var1_1_0 = c("S N=14", "auto N=7", " 3 ( 42.9%)", " 4 ( 57.1%)", " 0 ( 0.0%)"),
var1_1_1 = c("", "man N=7", " 7 (100.0%)", " 0 ( 0.0%)", " 0 ( 0.0%)")),
row.names = c(NA, -5L),
class = c("tbl_df", "tbl", "data.frame")))
})
|
"CredIntRho" <-
function(x, aprox=FALSE, level=.95)
{
r <- rhoCA(x)
if (!aprox) {
vr <- varRCA(x)}
else { vr <- varRCA(x,T)}
zs <- - qnorm((1-level)/2)
sdr <- sqrt(vr)
lcl <- r - zs * sdr
ucl <- r + zs * sdr
return(list(lcl,ucl))
}
|
.prepareP <- function(o, unconditional, residuals, resDen, se, se.mult,
n, n2, xlab, ylab, main, ylim, xlim, too.far,
seWithMean, nsim = 0, ...) {
Q <- .initialize(o = o, unconditional = unconditional, residuals = residuals,
se.mult = se.mult, resDen = resDen, se = se)
if( !is.null(Q$Vmat) ) { o$gObj$Vp <- Q$Vmat }
P <- .createP(sm = o$gObj$smooth[[ o$ism ]],
x = o$gObj,
partial.resids = Q$partial.resids,
se = Q$se,
n = n, n2 = n2,
xlab = xlab, ylab = ylab,
main = main,
ylim = ylim, xlim = xlim,
too.far = too.far,
se1.mult = Q$se.mult, se2.mult = Q$se.mult,
seWithMean = seWithMean,
fitSmooth = Q$fv.terms,
w.resid = Q$w.resid,
resDen = resDen,
nsim = nsim,
...)
P$doPlotResid <- Q$partial.resids
return( P )
}
|
get_graphab <- function(res = TRUE, return = FALSE){
if(Sys.which("java") == ""){
warning("Please install java if you want to use Graphab")
}
data_dir <- rappdirs::user_data_dir()
if(!("graphab-2.6.jar" %in% list.files(paste0(data_dir, "/graph4lg_jar")))){
if(!dir.exists(paths = paste0(data_dir, "/graph4lg_jar"))){
dir.create(path = paste0(data_dir, "/graph4lg_jar"))
}
url <- "https://thema.univ-fcomte.fr/productions/download.php?name=graphab&version=2.6&username=Graph4lg&institution=R"
destfile <- "/graph4lg_jar/graphab-2.6.jar"
utils::download.file(url, paste0(data_dir, "/", destfile),
method = "auto",
mode = "wb")
graphab <- 1
if(res){
message("Graphab has been downloaded")
}
} else {
graphab <- 0
if(res){
message("Graphab is already on your machine")
}
}
if(return){
return(graphab)
}
}
|
plot.le <- function(x, ylim = NULL, xlim = NULL, ci = T, col = 1,
ylab = NULL, xlab = "Time", add = F, ...){
object <- x
att <- attributes(object)
if(is.null(ylab)){
ylab <- switch(att$type,
ll = "Loss of lifetime",
mrl = "Mean residual lifetime")
}
ci <- ci & att$var.type == "ci"
if(length(col) == 1){
col <- rep(col, length(object))
}
if(is.null(ylim)){
if(ci){
ylim <- range(unlist(lapply(object, function(x) x[, c("lower.ci", "upper.ci")])))
}else{
ylim <- range(unlist(lapply(object, function(x) x[, "Estimate"])))
}
}
for(i in 1:length(object)){
if(i == 1 & !add){
plot(Estimate ~ att$time, data = object[[i]], ylim = ylim, xlim = xlim,
type = "l", col = col[i], xlab = xlab, ylab = ylab, ...)
}else{
lines(Estimate ~ att$time, data = object[[i]], col = col[i], ...)
}
if(ci){
lines(lower.ci ~ att$time, data = object[[i]], lty = 2, col = col[i], ...)
lines(upper.ci ~ att$time, data = object[[i]], lty = 2, col = col[i], ...)
}
}
}
|
library(plm)
data("Grunfeld", package = "plm")
g_re <- plm(inv ~ value + capital, data = Grunfeld, model = "random")
g_fe <- plm(inv ~ value + capital, data = Grunfeld, model = "within")
g_pool <- plm(inv ~ value + capital, data = Grunfeld, model = "pooling")
g_pool_lm <- lm(inv ~ value + capital, data = Grunfeld)
g_fe_lm <- lm(inv ~ factor(firm) + value + capital, data = Grunfeld)
plm::pbgtest(inv ~ value + capital, order=1, data=Grunfeld)
plm::pbgtest(inv ~ value + capital, order=1, data=Grunfeld, model="pooling")
plm::pbgtest(g_pool, order=1)
plm::pbgtest(inv ~ value + capital, order=1, model="within", data=Grunfeld)
plm::pbgtest(g_fe, order=1)
plm::pbgtest(inv ~ value + capital, order=1, model="random", data=Grunfeld)
plm::pbgtest(g_re, order=1)
plm::pbgtest(g_pool, order = 1)
lmtest::bgtest(g_pool)
lmtest::bgtest(g_pool_lm)
plm::pbgtest(g_pool, order = 1, type="F")
lmtest::bgtest(g_pool, type="F")
lmtest::bgtest(g_pool_lm, type="F")
plm:::pbgtest( inv ~ value + capital, data = Grunfeld, order=1)
lmtest::bgtest(inv ~ value + capital, data = Grunfeld, order=1)
plm::pbgtest( inv ~ value + capital, data = Grunfeld, order=1, type="F")
lmtest::bgtest(inv ~ value + capital, data = Grunfeld, order=1, type="F")
plm::pbgtest(g_pool, order = 1, order.by=g_pool$model$capital)
lmtest::bgtest(g_pool, order = 1, order.by=g_pool$model$capital)
lmtest::bgtest(g_pool_lm, order = 1, order.by=g_pool_lm$model$capital)
plm::pbgtest( inv ~ value + capital, data=Grunfeld, order = 1, order.by=g_pool$model$capital)
lmtest::bgtest(inv ~ value + capital, data=Grunfeld, order = 1, order.by=g_pool$model$capital)
lmtest::bgtest(inv ~ value + capital, data=Grunfeld, order = 1, order.by=g_pool_lm$model$capital)
plm::pbgtest(g_pool, order = 1, order.by=g_pool$model$capital, type="F")
lmtest::bgtest(g_pool, order = 1, order.by=g_pool$model$capital, type="F")
lmtest::bgtest(g_pool_lm, order = 1, order.by=g_pool_lm$model$capital, type="F")
plm::pbgtest( inv ~ value + capital, data=Grunfeld, order = 1, order.by=g_pool$model$capital, type="F")
lmtest::bgtest(inv ~ value + capital, data=Grunfeld, order = 1, order.by=g_pool$model$capital, type="F")
lmtest::bgtest(inv ~ value + capital, data=Grunfeld, order = 1, order.by=g_pool_lm$model$capital, type="F")
plm::pbgtest(g_pool, order.by=~capital, order = 1, data=Grunfeld)
lmtest::bgtest(g_pool, order.by=~capital, data=Grunfeld)
lmtest::bgtest(g_pool_lm, order.by=~capital, data=Grunfeld)
plm::pbgtest( inv ~ value + capital, order.by=~capital, order = 1, data=Grunfeld, model="pooling")
plm::pbgtest( inv ~ value + capital, order.by=~capital, order = 1, data=Grunfeld)
lmtest::bgtest(inv ~ value + capital, order.by=~capital, data=Grunfeld)
lmtest::bgtest(inv ~ value + capital, order.by=~capital, data=Grunfeld)
plm::pbgtest(g_pool, order.by=~capital, order = 1, data=Grunfeld, type="F")
lmtest::bgtest(g_pool, order.by=~capital, data=Grunfeld, type="F")
lmtest::bgtest(g_pool_lm, order.by=~capital, data=Grunfeld, type="F")
plm::pbgtest( inv ~ value + capital, order.by=~capital, order = 1, data=Grunfeld, type="F", model="pooling")
plm::pbgtest( inv ~ value + capital, order.by=~capital, order = 1, data=Grunfeld, type="F")
lmtest::bgtest(inv ~ value + capital, order.by=~capital, data=Grunfeld, type="F")
lmtest::bgtest(inv ~ value + capital, order.by=~capital, data=Grunfeld, type="F")
plm::pbgtest(inv ~ value + capital, order=1, model="within", data=Grunfeld, order.by=~capital)
plm::pbgtest(g_fe, order=1, data=Grunfeld, order.by=~capital)
plm::pbgtest(inv ~ value + capital, order=1, model="within", data=Grunfeld, order.by=~g_fe$model$capital)
plm::pbgtest(g_fe, order=1, data=Grunfeld, order.by=~g_fe$model$capital)
plm::pbgtest(inv ~ value + capital, order=1, model="random", data=Grunfeld, order.by=~capital)
plm::pbgtest(g_re, order=1, data=Grunfeld, order.by=~capital)
plm::pbgtest(inv ~ value + capital, order=1, model="random", data=Grunfeld, order.by=g_re$model$capital)
plm::pbgtest(g_re, order=1, data=Grunfeld, order.by=g_re$model$capital)
g_re <- plm(inv ~ value, model = "random", data = Grunfeld)
g_re <- plm(inv ~ value + capital, model = "random", data = Grunfeld)
X <- model.matrix(g_re)
y <- pmodel.response(g_re)
df <- as.data.frame(cbind(y, X[,-1]))
lm.mod <- lm(y ~ X - 1)
lm.mod2 <- lm(df)
all.equal(lm.mod$residuals, g_re$residuals)
all.equal(lm.mod2$residuals, g_re$residuals)
lmtest::bgtest(lm.mod)
lmtest::bgtest(lm.mod2)
pbgtest(g_re, order = 1)
|
SinglefReject <-
function(nsim,bvec,lambdamu,lambdasd,nvec){
k <- length(bvec)
if (length(nvec) != k*(k + 1)/2) {
stop("length mismatch between bvec and nvec")
}
MLEres <- HWEmodelsMLE(nvec)
maxLL <- MLEres$fmaxloglik
psamp <- fsamp <- vector("list", nsim)
niter <- naccept <- count <- PrnH1 <- varterm1 <- 0
Lnorm <- lfactorial(sum(nvec)) - sum(lfactorial(nvec))
while (naccept < nsim){
M <- nsim - naccept
count <- count + M
samples <- SinglefPrior(nsim=M,alpha=bvec,lambdamu=lambdamu,
lambdasd=lambdasd)
LL <- if(M==1) {
MultLogLikP(samples$p, samples$f, nvec)
}
else {
apply(cbind(samples$f, samples$p), 1,
function(x) MultLogLikP(x[-1], x[1], nvec))
}
likterm <- LL + Lnorm
expterm <- exp(likterm)
PrnH1 <- PrnH1 + sum(expterm)
varterm1 <- varterm1 + sum(expterm^2)
if (any(LL > maxLL)) cat("Maximization is messed up\n")
accept <- log(runif(M)) < LL - maxLL
if (any(accept)) {
niter <- niter + 1
naccept <- naccept + sum(accept)
psamp[[niter]] <- samples$p[accept,,drop=FALSE]
fsamp[[niter]] <- samples$f[accept]
}
}
psamp <- do.call(rbind,psamp)
fsamp <- unlist(fsamp)
PrnH1 <- PrnH1/count
varest <- (varterm1/count - PrnH1^2)/count
cat("nsim norm constant (se) 95% interval: \n")
cat(nsim,PrnH1,"(",sqrt(varest),")",PrnH1-1.96*sqrt(varest),PrnH1+1.96*sqrt(varest),"\n")
accrate <- nsim/count
list(psamp=psamp,fsamp=fsamp,accrate=accrate,PrnH1=PrnH1,varest=varest)
}
|
.Tcl <- function(...)
structure(.External(.C_dotTcl, ...), class = "tclObj")
.Tcl.objv <- function(objv)
structure(.External(.C_dotTclObjv, objv), class = "tclObj")
.Tcl.callback <- function(...)
.External(.C_dotTclcallback, ...)
.Tcl.args <- function(...) {
pframe <- parent.frame(3)
name2opt <- function(x) if ( x != "") paste0("-", x) else ""
isCallback <- function(x)
is.function(x) || is.call(x) || is.expression(x)
makeAtomicCallback <- function(x, e) {
if (is.name(x))
x <- eval(x, e)
if (is.call(x)){
if(identical(x[[1L]], as.name("break")))
return("break")
if(identical(x[[1L]], as.name("function")))
x <- eval(x, e)
}
.Tcl.callback(x, e)
}
makeCallback <- function(x, e) {
if (is.expression(x))
paste(lapply(x, makeAtomicCallback, e), collapse = ";")
else
makeAtomicCallback(x, e)
}
val2string <- function(x) {
if (is.null(x)) return("")
if (is.tkwin(x)){ current.win <<- x ; return (.Tk.ID(x)) }
if (inherits(x,"tclVar")) return(names(unclass(x)$env))
if (isCallback(x)){
ref <- local({value <- x; envir <- pframe; environment()})
callback <- makeCallback(get("value", envir = ref),
get("envir", envir = ref))
callback <- paste("{", callback, "}")
assign(callback, ref, envir = current.win$env)
return(callback)
}
x <- gsub("\\", "\\\\", as.character(x), fixed=TRUE)
x <- gsub("\"", "\\\"", x, fixed=TRUE)
x <- gsub("[", "\\[", x, fixed=TRUE)
x <- gsub("$", "\\$", x, fixed=TRUE)
paste0("\"", x, "\"", collapse = " ")
}
val <- list(...)
nm <- names(val)
if (!length(val)) return("")
nm <- if (is.null(nm)) rep("", length(val)) else sapply(nm, name2opt)
current.win <-
if (exists("win", envir = parent.frame()))
get("win", envir = parent.frame())
else .TkRoot
val <- sapply(val, val2string)
paste(as.vector(rbind(nm, val)), collapse = " ")
}
.Tcl.args.objv <- function(...) {
pframe <- parent.frame(3)
isCallback <- function(x)
is.function(x) || is.call(x) || is.expression(x)
makeAtomicCallback <- function(x, e) {
if (is.name(x))
x <- eval(x, e)
if (is.call(x)){
if(identical(x[[1L]], as.name("break")))
return("break")
if(identical(x[[1L]], as.name("function")))
x <- eval(x, e)
}
.Tcl.callback(x, e)
}
makeCallback <- function(x, e) {
if (is.expression(x))
paste(lapply(x, makeAtomicCallback, e), collapse = ";")
else
makeAtomicCallback(x, e)
}
val2obj <- function(x) {
if (is.null(x)) return(NULL)
if (is.tkwin(x)){current.win <<- x ; return(as.tclObj(.Tk.ID(x)))}
if (inherits(x,"tclVar")) return(as.tclObj(names(unclass(x)$env)))
if (isCallback(x)){
ref <- local({value <- x; envir <- pframe; environment()})
callback <- makeCallback(get("value", envir = ref),
get("envir", envir = ref))
assign(callback, ref, envir = current.win$env)
return(as.tclObj(callback, drop = TRUE))
}
as.tclObj(x, drop = TRUE)
}
val <- list(...)
current.win <- .TkRoot
lapply(val, val2obj)
}
.Tk.ID <- function(win) win$ID
.Tk.newwin <- function(ID) {
win <- list(ID = ID, env = new.env(parent = emptyenv()))
win$env$num.subwin <- 0
class(win) <- "tkwin"
win
}
.Tk.subwin <- function(parent) {
ID <- paste(parent$ID, parent$env$num.subwin <- parent$env$num.subwin + 1,
sep = ".")
win <- .Tk.newwin(ID)
assign(ID, win, envir = parent$env)
assign("parent", parent, envir = win$env)
win
}
tkdestroy <- function(win) {
tcl("destroy", win)
ID <- .Tk.ID(win)
env <- get("parent", envir = win$env)$env
if (exists(ID, envir = env, inherits = FALSE))
rm(list = ID, envir = env)
}
is.tkwin <- function(x) inherits(x, "tkwin")
tclVar <- function(init = "") {
n <- .TkRoot$env$TclVarCount <- .TkRoot$env$TclVarCount + 1L
name <- paste0("::RTcl", n)
l <- list(env = new.env())
assign(name, NULL, envir = l$env)
reg.finalizer(l$env, function(env) tcl("unset", names(env)))
class(l) <- "tclVar"
tclvalue(l) <- init
l
}
tclObj <- function(x) UseMethod("tclObj")
"tclObj<-" <- function(x, value) UseMethod("tclObj<-")
tclObj.tclVar <- function(x){
z <- .External(.C_RTcl_ObjFromVar, names(x$env))
class(z) <- "tclObj"
z
}
"tclObj<-.tclVar" <- function(x, value){
value <- as.tclObj(value)
.External(.C_RTcl_AssignObjToVar, names(x$env), value)
x
}
tclvalue <- function(x) UseMethod("tclvalue")
"tclvalue<-" <- function(x, value) UseMethod("tclvalue<-")
tclvalue.tclVar <- function(x) tclvalue(tclObj(x))
tclvalue.tclObj <- function(x) .External(.C_RTcl_StringFromObj, x)
print.tclObj <- function(x,...) {
z <- tclvalue(x)
if (length(z)) cat("<Tcl>", z, "\n")
invisible(x)
}
"tclvalue<-.tclVar" <- function(x, value) {
name <- names(unclass(x)$env)
tcl("set", name, value)
x
}
tclvalue.default <- function(x) tclvalue(tcl("set", as.character(x)))
"tclvalue<-.default" <- function(x, value) {
name <- as.character(x)
tcl("set", name, value)
x
}
as.character.tclVar <- function(x, ...) names(unclass(x)$env)
as.character.tclObj <- function(x, ...)
.External(.C_RTcl_ObjAsCharVector, x)
as.double.tclObj <- function(x, ...)
.External(.C_RTcl_ObjAsDoubleVector, x)
as.integer.tclObj <- function(x, ...)
.External(.C_RTcl_ObjAsIntVector, x)
as.logical.tclObj <- function(x, ...)
as.logical(.External(.C_RTcl_ObjAsIntVector, x))
as.raw.tclObj <- function(x, ...)
.External(.C_RTcl_ObjAsRawVector, x)
is.tclObj <- function(x) inherits(x, "tclObj")
as.tclObj <- function(x, drop = FALSE) {
if (is.tclObj(x)) return(x)
z <- switch(storage.mode(x),
character = .External(.C_RTcl_ObjFromCharVector, x, drop),
double = .External(.C_RTcl_ObjFromDoubleVector, x,drop),
integer = .External(.C_RTcl_ObjFromIntVector, x, drop),
logical = .External(.C_RTcl_ObjFromIntVector, as.integer(x), drop),
raw = .External(.C_RTcl_ObjFromRawVector, x),
stop(gettextf("cannot handle object of mode '%s'",
storage.mode(x)), domain = NA)
)
class(z) <- "tclObj"
z
}
tclServiceMode <- function(on = NULL)
.External(.C_RTcl_ServiceMode, as.logical(on))
.TkRoot <- .Tk.newwin("")
tclvar <- structure(list(), class = "tclvar")
.TkRoot$env$TclVarCount <- 0
tkwidget <- function (parent, type, ...)
{
win <- .Tk.subwin(parent)
tcl(type, win, ...)
win
}
tkbutton <- function(parent, ...) tkwidget(parent, "button", ...)
tkcanvas <- function(parent, ...) tkwidget(parent, "canvas", ...)
tkcheckbutton <- function(parent, ...) tkwidget(parent, "checkbutton", ...)
tkentry <- function(parent, ...) tkwidget(parent, "entry", ...)
tkframe <- function(parent, ...) tkwidget(parent, "frame", ...)
tklabel <- function(parent, ...) tkwidget(parent, "label", ...)
tklistbox <- function(parent, ...) tkwidget(parent, "listbox", ...)
tkmenu <- function(parent, ...) tkwidget(parent, "menu", ...)
tkmenubutton <- function(parent, ...) tkwidget(parent, "menubutton", ...)
tkmessage <- function(parent, ...) tkwidget(parent, "message", ...)
tkradiobutton <- function(parent, ...) tkwidget(parent, "radiobutton", ...)
tkscale <- function(parent, ...) tkwidget(parent, "scale", ...)
tkscrollbar <- function(parent, ...) tkwidget(parent, "scrollbar", ...)
tktext <- function(parent, ...) tkwidget(parent, "text", ...)
ttkbutton <- function(parent, ...) tkwidget(parent, "ttk::button", ...)
ttkcheckbutton <- function(parent, ...) tkwidget(parent, "ttk::checkbutton", ...)
ttkcombobox <- function(parent, ...) tkwidget(parent, "ttk::combobox", ...)
ttkentry <- function(parent, ...) tkwidget(parent, "ttk::entry", ...)
ttkframe <- function(parent, ...) tkwidget(parent, "ttk::frame", ...)
ttklabel <- function(parent, ...) tkwidget(parent, "ttk::label", ...)
ttklabelframe <- function(parent, ...) tkwidget(parent, "ttk::labelframe", ...)
ttkmenubutton <- function(parent, ...) tkwidget(parent, "ttk::menubutton", ...)
ttknotebook <- function(parent, ...) tkwidget(parent, "ttk::notebook", ...)
ttkpanedwindow <- function(parent, ...) tkwidget(parent, "ttk::panedwindow", ...)
ttkprogressbar <- function(parent, ...) tkwidget(parent, "ttk::progressbar", ...)
ttkradiobutton <- function(parent, ...) tkwidget(parent, "ttk::radiobutton", ...)
ttkscale <- function(parent, ...) tkwidget(parent, "ttk::scale", ...)
ttkscrollbar <- function(parent, ...) tkwidget(parent, "ttk::scrollbar", ...)
ttkseparator <- function(parent, ...) tkwidget(parent, "ttk::separator", ...)
ttksizegrip <- function(parent, ...) tkwidget(parent, "ttk::sizegrip", ...)
ttkspinbox <- function(parent, ...) tkwidget(parent, "ttk::spinbox", ...)
ttktreeview <- function(parent, ...) tkwidget(parent, "ttk::treeview", ...)
tktoplevel <- function(parent = .TkRoot,...) {
w <- tkwidget(parent,"toplevel",...)
ID <- .Tk.ID(w)
tkbind(w, "<Destroy>",
function() {
if (exists(ID, envir = parent$env, inherits = FALSE))
rm(list = ID, envir = parent$env)
tkbind(w, "<Destroy>","")
})
utils::process.events()
w
}
tcl <- function(...) .Tcl.objv(.Tcl.args.objv(...))
tktitle <- function(x) tcl("wm", "title", x)
"tktitle<-" <- function(x, value) {
tcl("wm", "title", x, value)
x
}
tkbell <- function(...) tcl("bell", ...)
tkbind <- function(...) tcl("bind", ...)
tkbindtags <- function(...) tcl("bindtags", ...)
tkfocus <- function(...) tcl("focus", ...)
tklower <- function(...) tcl("lower", ...)
tkraise <- function(...) tcl("raise", ...)
tkclipboard.append <- function(...) tcl("clipboard", "append", ...)
tkclipboard.clear <- function(...) tcl("clipboard", "clear", ...)
tkevent.add <- function(...) tcl("event", "add", ...)
tkevent.delete <- function(...) tcl("event", "delete", ...)
tkevent.generate <- function(...) tcl("event", "generate", ...)
tkevent.info <- function(...) tcl("event", "info", ...)
tkfont.actual <- function(...) tcl("font", "actual", ...)
tkfont.configure <- function(...) tcl("font", "configure", ...)
tkfont.create <- function(...) tcl("font", "create", ...)
tkfont.delete <- function(...) tcl("font", "delete", ...)
tkfont.families <- function(...) tcl("font", "families", ...)
tkfont.measure <- function(...) tcl("font", "measure", ...)
tkfont.metrics <- function(...) tcl("font", "metrics", ...)
tkfont.names <- function(...) tcl("font", "names", ...)
tkgrab <- function(...) tcl("grab", ...)
tkgrab.current <- function(...) tcl("grab", "current", ...)
tkgrab.release <- function(...) tcl("grab", "release", ...)
tkgrab.set <- function(...) tcl("grab", "set", ...)
tkgrab.status <- function(...) tcl("grab", "status", ...)
tkimage.create <- function(...) tcl("image", "create", ...)
tkimage.delete <- function(...) tcl("image", "delete", ...)
tkimage.height <- function(...) tcl("image", "height", ...)
tkimage.inuse <- function(...) tcl("image", "inuse", ...)
tkimage.names <- function(...) tcl("image", "names", ...)
tkimage.type <- function(...) tcl("image", "type", ...)
tkimage.types <- function(...) tcl("image", "types", ...)
tkimage.width <- function(...) tcl("image", "width", ...)
tkXselection.clear <- function(...) tcl("selection", "clear", ...)
tkXselection.get <- function(...) tcl("selection", "get", ...)
tkXselection.handle <- function(...) tcl("selection", "handle", ...)
tkXselection.own <- function(...) tcl("selection", "own", ...)
tkwait.variable <- function(...) tcl("tkwait", "variable", ...)
tkwait.visibility <- function(...) tcl("tkwait", "visibility", ...)
tkwait.window <- function(...) tcl("tkwait", "window", ...)
tkgetOpenFile <- function(...) tcl("tk_getOpenFile", ...)
tkgetSaveFile <- function(...) tcl("tk_getSaveFile", ...)
tkchooseDirectory <- function(...) tcl("tk_chooseDirectory", ...)
tkmessageBox <- function(...) tcl("tk_messageBox", ...)
tkdialog <- function(...) tcl("tk_dialog", ...)
tkpopup <- function(...) tcl("tk_popup", ...)
tclfile.tail <- function(...) tcl("file", "tail", ...)
tclfile.dir <- function(...) tcl("file", "dir", ...)
tclopen <- function(...) tcl("open", ...)
tclclose <- function(...) tcl("close", ...)
tclputs <- function(...) tcl("puts", ...)
tclread <- function(...) tcl("read", ...)
tkwinfo <- function(...) tcl("winfo", ...)
tkwm.aspect <- function(...) tcl("wm", "aspect", ...)
tkwm.client <- function(...) tcl("wm", "client", ...)
tkwm.colormapwindows <- function(...) tcl("wm", "colormapwindows", ...)
tkwm.command <- function(...) tcl("wm", "command", ...)
tkwm.deiconify <- function(...) tcl("wm", "deiconify", ...)
tkwm.focusmodel <- function(...) tcl("wm", "focusmodel", ...)
tkwm.frame <- function(...) tcl("wm", "frame", ...)
tkwm.geometry <- function(...) tcl("wm", "geometry", ...)
tkwm.grid <- function(...) tcl("wm", "grid", ...)
tkwm.group <- function(...) tcl("wm", "group", ...)
tkwm.iconbitmap <- function(...) tcl("wm", "iconbitmap", ...)
tkwm.iconify <- function(...) tcl("wm", "iconify", ...)
tkwm.iconmask <- function(...) tcl("wm", "iconmask", ...)
tkwm.iconname <- function(...) tcl("wm", "iconname ", ...)
tkwm.iconposition <- function(...) tcl("wm", "iconposition", ...)
tkwm.iconwindow <- function(...) tcl("wm", "iconwindow ", ...)
tkwm.maxsize <- function(...) tcl("wm", "maxsize", ...)
tkwm.minsize <- function(...) tcl("wm", "minsize", ...)
tkwm.overrideredirect <- function(...) tcl("wm", "overrideredirect", ...)
tkwm.positionfrom <- function(...) tcl("wm", "positionfrom", ...)
tkwm.protocol <- function(...) tcl("wm", "protocol", ...)
tkwm.resizable <- function(...) tcl("wm", "resizable", ...)
tkwm.sizefrom <- function(...) tcl("wm", "sizefrom", ...)
tkwm.state <- function(...) tcl("wm", "state", ...)
tkwm.title <- function(...) tcl("wm", "title", ...)
tkwm.transient <- function(...) tcl("wm", "transient", ...)
tkwm.withdraw <- function(...) tcl("wm", "withdraw", ...)
tkgrid <- function(...) tcl("grid", ...)
tkgrid.bbox <- function(...) tcl("grid", "bbox", ...)
tkgrid.columnconfigure <- function(...) tcl("grid", "columnconfigure", ...)
tkgrid.configure <- function(...) tcl("grid", "configure", ...)
tkgrid.forget <- function(...) tcl("grid", "forget", ...)
tkgrid.info <- function(...) tcl("grid", "info", ...)
tkgrid.location <- function(...) tcl("grid", "location", ...)
tkgrid.propagate <- function(...) tcl("grid", "propagate", ...)
tkgrid.rowconfigure <- function(...) tcl("grid", "rowconfigure", ...)
tkgrid.remove <- function(...) tcl("grid", "remove", ...)
tkgrid.size <- function(...) tcl("grid", "size", ...)
tkgrid.slaves <- function(...) tcl("grid", "slaves", ...)
tkpack <- function(...) tcl("pack", ...)
tkpack.configure <- function(...) tcl("pack", "configure", ...)
tkpack.forget <- function(...) tcl("pack", "forget", ...)
tkpack.info <- function(...) tcl("pack", "info", ...)
tkpack.propagate <- function(...) tcl("pack", "propagate", ...)
tkpack.slaves <- function(...) tcl("pack", "slaves", ...)
tkplace <- function(...) tcl("place", ...)
tkplace.configure <- function(...) tcl("place", "configure", ...)
tkplace.forget <- function(...) tcl("place", "forget", ...)
tkplace.info <- function(...) tcl("place", "info", ...)
tkplace.slaves <- function(...) tcl("place", "slaves", ...)
tkactivate <- function(widget, ...) tcl(widget, "activate", ...)
tkadd <- function(widget, ...) tcl(widget, "add", ...)
tkaddtag <- function(widget, ...) tcl(widget, "addtag", ...)
tkbbox <- function(widget, ...) tcl(widget, "bbox", ...)
tkcanvasx <- function(widget, ...) tcl(widget, "canvasx", ...)
tkcanvasy <- function(widget, ...) tcl(widget, "canvasy", ...)
tkcget <- function(widget, ...) tcl(widget, "cget", ...)
tkcompare <- function(widget, ...) tcl(widget, "compare", ...)
tkconfigure <- function(widget, ...) tcl(widget, "configure", ...)
tkcoords <- function(widget, ...) tcl(widget, "coords", ...)
tkcreate <- function(widget, ...) tcl(widget, "create", ...)
tkcurselection <- function(widget, ...) tcl(widget, "curselection", ...)
tkdchars <- function(widget, ...) tcl(widget, "dchars", ...)
tkdebug <- function(widget, ...) tcl(widget, "debug", ...)
tkdelete <- function(widget, ...) tcl(widget, "delete", ...)
tkdelta <- function(widget, ...) tcl(widget, "delta", ...)
tkdeselect <- function(widget, ...) tcl(widget, "deselect", ...)
tkdlineinfo <- function(widget, ...) tcl(widget, "dlineinfo", ...)
tkdtag <- function(widget, ...) tcl(widget, "dtag", ...)
tkdump <- function(widget, ...) tcl(widget, "dump", ...)
tkentrycget <- function(widget, ...) tcl(widget, "entrycget", ...)
tkentryconfigure <- function(widget, ...) tcl(widget, "entryconfigure", ...)
tkfind <- function(widget, ...) tcl(widget, "find", ...)
tkflash <- function(widget, ...) tcl(widget, "flash", ...)
tkfraction <- function(widget, ...) tcl(widget, "fraction", ...)
tkget <- function(widget, ...) tcl(widget, "get", ...)
tkgettags <- function(widget, ...) tcl(widget, "gettags", ...)
tkicursor <- function(widget, ...) tcl(widget, "icursor", ...)
tkidentify <- function(widget, ...) tcl(widget, "identify", ...)
tkindex <- function(widget, ...) tcl(widget, "index", ...)
tkinsert <- function(widget, ...) tcl(widget, "insert", ...)
tkinvoke <- function(widget, ...) tcl(widget, "invoke", ...)
tkitembind <- function(widget, ...) tcl(widget, "bind", ...)
tkitemcget <- function(widget, ...) tcl(widget, "itemcget", ...)
tkitemconfigure <- function(widget, ...) tcl(widget, "itemconfigure", ...)
tkitemfocus <- function(widget, ...) tcl(widget, "focus", ...)
tkitemlower <- function(widget, ...) tcl(widget, "lower", ...)
tkitemraise <- function(widget, ...) tcl(widget, "raise", ...)
tkitemscale <- function(widget, ...) tcl(widget, "scale", ...)
tkmark.gravity <- function(widget, ...) tcl(widget, "mark", "gravity", ...)
tkmark.names <- function(widget, ...) tcl(widget, "mark", "names", ...)
tkmark.next <- function(widget, ...) tcl(widget, "mark", "next", ...)
tkmark.previous <- function(widget, ...) tcl(widget, "mark", "previous", ...)
tkmark.set <- function(widget, ...) tcl(widget, "mark", "set", ...)
tkmark.unset <- function(widget, ...) tcl(widget, "mark", "unset", ...)
tkmove <- function(widget, ...) tcl(widget, "move", ...)
tknearest <- function(widget, ...) tcl(widget, "nearest", ...)
tkpost <- function(widget, ...) tcl(widget, "post", ...)
tkpostcascade <- function(widget, ...) tcl(widget, "postcascade", ...)
tkpostscript <- function(widget, ...) tcl(widget, "postscript", ...)
tkscan.dragto <- function(widget, ...) tcl(widget, "scan", "dragto", ...)
tkscan.mark <- function(widget, ...) tcl(widget, "scan", "mark", ...)
tksearch <- function(widget, ...) tcl(widget, "search", ...)
tksee <- function(widget, ...) tcl(widget, "see", ...)
tkselect <- function(widget, ...) tcl(widget, "select", ...)
tkselection.adjust <- function(widget, ...)
tcl(widget, "selection", "adjust", ...)
tkselection.anchor <- function(widget, ...)
tcl(widget, "selection", "anchor", ...)
tkselection.clear <- function(widget, ...)
tcl(widget, "selection", "clear", ...)
tkselection.from <- function(widget, ...)
tcl(widget, "selection", "from", ...)
tkselection.includes <- function(widget, ...)
tcl(widget, "selection", "includes", ...)
tkselection.present <- function(widget, ...)
tcl(widget, "selection", "present", ...)
tkselection.range <- function(widget, ...)
tcl(widget, "selection", "range", ...)
tkselection.set <- function(widget, ...)
tcl(widget, "selection", "set", ...)
tkselection.to <- function(widget,...)
tcl(widget, "selection", "to", ...)
tkset <- function(widget, ...) tcl(widget, "set", ...)
tksize <- function(widget, ...) tcl(widget, "size", ...)
tktoggle <- function(widget, ...) tcl(widget, "toggle", ...)
tktag.add <- function(widget, ...) tcl(widget, "tag", "add", ...)
tktag.bind <- function(widget, ...) tcl(widget, "tag", "bind", ...)
tktag.cget <- function(widget, ...) tcl(widget, "tag", "cget", ...)
tktag.configure <- function(widget, ...) tcl(widget, "tag", "configure", ...)
tktag.delete <- function(widget, ...) tcl(widget, "tag", "delete", ...)
tktag.lower <- function(widget, ...) tcl(widget, "tag", "lower", ...)
tktag.names <- function(widget, ...) tcl(widget, "tag", "names", ...)
tktag.nextrange <- function(widget, ...) tcl(widget, "tag", "nextrange", ...)
tktag.prevrange <- function(widget, ...) tcl(widget, "tag", "prevrange", ...)
tktag.raise <- function(widget, ...) tcl(widget, "tag", "raise", ...)
tktag.ranges <- function(widget, ...) tcl(widget, "tag", "ranges", ...)
tktag.remove <- function(widget, ...) tcl(widget, "tag", "remove", ...)
tktype <- function(widget, ...) tcl(widget, "type", ...)
tkunpost <- function(widget, ...) tcl(widget, "unpost", ...)
tkwindow.cget <- function(widget, ...) tcl(widget, "window", "cget", ...)
tkwindow.configure <- function(widget, ...) tcl(widget,"window","configure",...)
tkwindow.create <- function(widget, ...) tcl(widget, "window", "create", ...)
tkwindow.names <- function(widget, ...) tcl(widget, "window", "names", ...)
tkxview <- function(widget, ...) tcl(widget, "xview", ...)
tkxview.moveto <- function(widget, ...) tcl(widget, "xview", "moveto", ...)
tkxview.scroll <- function(widget, ...) tcl(widget, "xview", "scroll", ...)
tkyposition <- function(widget, ...) tcl(widget, "ypositions", ...)
tkyview <- function(widget, ...) tcl(widget, "yview", ...)
tkyview.moveto <- function(widget, ...) tcl(widget, "yview", "moveto", ...)
tkyview.scroll <- function(widget, ...) tcl(widget, "yview", "scroll", ...)
tkpager <- function(file, header, title, delete.file)
{
title <- paste(title, header)
for ( i in seq_along(file) ) {
zfile <- file[[i]]
tt <- tktoplevel()
tkwm.title(tt,
if (length(title)) title[(i-1L) %% length(title)+1L] else "")
txt <- tktext(tt, bg = "grey90")
scr <- tkscrollbar(tt, repeatinterval = 5,
command = function(...) tkyview(txt,...))
tkconfigure(txt, yscrollcommand = function(...) tkset(scr,...))
tkpack(txt, side = "left", fill = "both", expand = TRUE)
tkpack(scr, side = "right", fill = "y")
chn <- tcl("open", zfile)
tkinsert(txt, "end", gsub("_\b","",tclvalue(tcl("read", chn))))
tcl("close", chn)
tkconfigure(txt, state = "disabled")
tkmark.set(txt, "insert", "0.0")
tkfocus(txt)
if (delete.file) tcl("file", "delete", zfile)
}
}
|
PLIsuperquantile_multivar = function(order,x,y,inputs,deltasvector,InputDistributions,samedelta=TRUE,percentage=TRUE,nboot=0,conf=0.95,bootsample=TRUE,bias=TRUE){
nmbredevariables=dim(x)[2]
nmbredepoints=dim(x)[1]
nmbrededeltas=length(deltasvector)
I <- J <- ICIinf <- ICIsup <- JCIinf <- JCIsup <- matrix(0,ncol=nmbrededeltas,nrow=nmbrededeltas)
simpson_v2 <- function(fun, a, b, n=100) {
if (a == -Inf & b == Inf) {
f <- function(t) (fun((1-t)/t) + fun((t-1)/t))/t^2
s <- simpson_v2(f, 0, 1, n)
} else if (a == -Inf & b != Inf) {
f <- function(t) fun(b-(1-t)/t)/t^2
s <- simpson_v2(f, 0, 1, n)
} else if (a != -Inf & b == Inf) {
f <- function(t) fun(a+(1-t)/t)/t^2
s <- simpson_v2(f, 0, 1, n)
} else {
h <- (b-a)/n
x <- seq(a, b, by=h)
y <- fun(x)
y[is.nan(y)]=0
s <- y[1] + y[n+1] + 2*sum(y[seq(2,n,by=2)]) + 4 *sum(y[seq(3,n-1, by=2)])
s <- s*h/3
}
return(s)
}
transinverse <- function(a,b,c) {
if (a > c) ans <- a / b - 1
else ans <- 1 - b / a
return(ans)
}
quantilehat <- quantile(y,order)
sqhat <- mean( y[y >= quantilehat] )
ys = sort(y,index.return=T)
xs = x[ys$ix,]
lqid=matrix(0,nrow=nmbrededeltas,ncol=nmbrededeltas)
i = inputs[1]
if (nboot > 0){
lqidb=array(0,dim=c(nmbrededeltas,nmbrededeltas,nboot))
sqhatb=NULL
}
Loi.Entree=InputDistributions[[i]]
if(!samedelta){
moy=Loi.Entree[[3]][1]
sigma=Loi.Entree[[3]][2]
vdd1=moy+deltasvector*sigma
} else {
vdd1=deltasvector
}
if ( Loi.Entree[[1]] =="norm"||Loi.Entree[[1]] =="lnorm"){
mu1=Loi.Entree[[2]][1]
sigma1=Loi.Entree[[2]][2]
phi1=function(tau){mu1*tau+(sigma1^2*tau^2)/2}
vlambda1=(vdd1-mu1)/sigma1^2
}
if ( Loi.Entree[[1]] =="unif"){
a1=Loi.Entree[[2]][1]
b1=Loi.Entree[[2]][2]
mu1=(a1+b1)/2
Mx1=function(tau){
if (tau==0){ 1 }
else {(exp(tau*b1)-exp(tau*a1) )/ ( tau * (b1-a1))}
}
phi1=function(tau){
if(tau==0){0}
else { log ( Mx1(tau))}
}
phit=function(tau){
if (tau==0){0}
else {log(expm1(tau*(b1-a1)) / (tau*(b1-a1)))}
}
gt=function(tau,delta){
phit(tau) -(delta-a1)*tau
}
vlambda1=c();
for (l in 1:nmbrededeltas){
tm=nlm(gt,0,vdd1[l])$estimate
vlambda1[l]=tm
}
}
if ( Loi.Entree[[1]] =="triangle"){
a1=Loi.Entree[[2]][1]
b1=Loi.Entree[[2]][2]
c1=Loi.Entree[[2]][3]
mu1=(a1+b1+c1)/3
Mx1=function(tau){
if (tau !=0){
dessus=(b1-c1)*exp(a1*tau)-(b1-a1)*exp(c1*tau)+(c1-a1)*exp(b1*tau)
dessous=(b1-c1)*(b1-a1)*(c1-a1)*tau^2
return ( 2*dessus/dessous)
} else {
return (1)
}
}
phi1=function(tau){return (log (Mx1(tau)))}
phit=function(tau){
if(tau!=0){
dessus=(a1-b1)*expm1((c1-a1)*tau)+(c1-a1)*expm1((b1-a1)*tau)
dessous=(b1-c1)*(b1-a1)*(c1-a1)*tau^2
return( log (2*dessus/dessous) )
} else { return (0)}
}
gt=function(tau,delta){
phit(tau)-(delta-a1)*tau
}
vlambda1=c();
for (l in 1:nmbrededeltas){
tm=nlm(gt,0,vdd1[l])$estimate
vlambda1[l]=tm
}
}
if ( Loi.Entree[[1]] =="tnorm"){
mu1=Loi.Entree[[2]][1]
sigma1=Loi.Entree[[2]][2]
min1=Loi.Entree[[2]][3]
phi1=function(tau){
mpls2=mu1+tau*sigma1^2
Fa1=pnorm(min1,mu1,sigma1)
Fia1=pnorm(min1,mpls2,sigma1)
lMx=mu1*tau+1/2*sigma1^2*tau^2 - (1-Fa1) + (1-Fia1)
return(lMx)
}
g=function(tau,delta){
if (tau == 0 ){ return(0)
} else { return(phi1(tau) -delta*tau)}
}
vlambda1=c();
for (l in 1:nmbrededeltas){
tm=nlm(g,0,vdd1[l])$estimate
vlambda1[l]=tm
}
}
i = inputs[2]
Loi.Entree=InputDistributions[[i]]
if(!samedelta){
moy=Loi.Entree[[3]][1]
sigma=Loi.Entree[[3]][2]
vdd2=moy+deltasvector*sigma
} else {
vdd2=deltasvector
}
if ( Loi.Entree[[1]] =="norm"||Loi.Entree[[1]] =="lnorm"){
mu2=Loi.Entree[[2]][1]
sigma2=Loi.Entree[[2]][2]
phi2=function(tau){mu2*tau+(sigma2^2*tau^2)/2}
vlambda2=(vdd2-mu2)/sigma2^2
}
if ( Loi.Entree[[1]] =="unif"){
a2=Loi.Entree[[2]][1]
b2=Loi.Entree[[2]][2]
mu2=(a2+b2)/2
Mx2=function(tau){
if (tau==0){ 1 }
else {(exp(tau*b2)-exp(tau*a2) )/ ( tau * (b2-a2))}
}
phi2=function(tau){
if(tau==0){0}
else { log ( Mx2(tau))}
}
phit=function(tau){
if (tau==0){0}
else {log(expm1(tau*(b2-a2)) / (tau*(b2-a2)))}
}
gt=function(tau,delta){
phit(tau) -(delta-a2)*tau
}
vlambda2=c();
for (l in 1:nmbrededeltas){
tm=nlm(gt,0,vdd2[l])$estimate
vlambda2[l]=tm
}
}
if ( Loi.Entree[[1]] =="triangle"){
a2=Loi.Entree[[2]][1]
b2=Loi.Entree[[2]][2]
c2=Loi.Entree[[2]][3]
mu2=(a2+b2+c2)/3
Mx2=function(tau){
if (tau !=0){
dessus=(b2-c2)*exp(a2*tau)-(b2-a2)*exp(c*tau)+(c2-a2)*exp(b2*tau)
dessous=(b2-c2)*(b2-a2)*(c2-a2)*tau^2
return ( 2*dessus/dessous)
} else {
return (1)
}
}
phi2=function(tau){return (log (Mx2(tau)))}
phit=function(tau){
if(tau!=0){
dessus=(a2-b2)*expm1((c2-a2)*tau)+(c2-a2)*expm1((b2-a2)*tau)
dessous=(b2-c2)*(b2-a2)*(c2-a2)*tau^2
return( log (2*dessus/dessous) )
} else { return (0)}
}
gt=function(tau,delta){
phit(tau)-(delta-a2)*tau
}
vlambda2=c();
for (l in 1:nmbrededeltas){
tm=nlm(gt,0,vdd2[l])$estimate
vlambda2[l]=tm
}
}
if ( Loi.Entree[[1]] =="tnorm"){
mu2=Loi.Entree[[2]][1]
sigma2=Loi.Entree[[2]][2]
min2=Loi.Entree[[2]][3]
phi2=function(tau){
mpls2=mu2+tau*sigma2^2
Fa2=pnorm(min2,mu2,sigma2)
Fia2=pnorm(min2,mpls2,sigma2)
lMx=mu2*tau+1/2*sigma2^2*tau^2 - (1-Fa2) + (1-Fia2)
return(lMx)
}
g=function(tau,delta){
if (tau == 0 ){ return(0)
} else { return(phi2(tau) -delta*tau)}
}
vlambda2=c();
for (l in 1:nmbrededeltas){
tm=nlm(g,0,vdd2[l])$estimate
vlambda2[l]=tm
}
}
pti1=rep(0,nmbrededeltas)
pti2=rep(0,nmbrededeltas)
for (K1 in 1:nmbrededeltas){
for (K2 in 1:nmbrededeltas){
if ((vdd1[K1]!=mu1) | (vdd2[K2]!=mu2)){
res=NULL ; respts=NULL
pti1[K1]=phi1(vlambda1[K1])
pti2[K2]=phi2(vlambda2[K2])
for (j in 1:nmbredepoints){
res[j]=exp(vlambda1[K1]*xs[j,inputs[1]]-pti1[K1] + vlambda2[K2]*xs[j,inputs[2]]-pti2[K2])
respts[j]=exp(vlambda1[K1]*x[j,inputs[1]]-pti1[K1] + vlambda2[K2]*x[j,inputs[2]]-pti2[K2])
}
sum_res = sum(res)
kid = 1
res1 = res[1]
res2 = res1/sum_res
while (res2 < order){
kid = kid + 1
res1 = res1 + res[kid]
res2 = res1/sum_res
}
if (bias){ lqid[K1,K2] = mean(y[y >= ys$x[kid]])
} else lqid[K1,K2] = mean(y * respts * ( y >= ys$x[kid] ) / (1-order))
} else lqid[K1,K2] = sqhat
}
}
if (nboot >0){
for (b in 1:nboot){
ib <- sample(1:length(y),replace=TRUE)
xb <- x[ib,]
yb <- y[ib]
quantilehatb <- quantile(yb,order)
sqhatb <- c(sqhatb, mean( yb * (yb >= quantilehatb) / ( 1 - order )))
ysb = sort(yb,index.return=T)
xsb = xb[ysb$ix,]
for (K1 in 1:nmbrededeltas){
for (K2 in 1:nmbrededeltas){
if ((vdd1[K1]!=mu1) | (vdd2[K2]!=mu2)){
res=NULL ; respts=NULL
for (j in 1:nmbredepoints){
res[j]=exp(vlambda1[K1]*xsb[j,inputs[1]]-pti1[K1] + vlambda2[K2]*xsb[j,inputs[2]]-pti2[K2])
respts[j]=exp(vlambda1[K1]*xb[j,inputs[1]]-pti1[K1] + vlambda2[K2]*xb[j,inputs[2]]-pti2[K2])
}
sum_res = sum(res)
kid = 1
res1 = res[1]
res2 = res1/sum_res
while (res2 < order){
kid = kid + 1
res1 = res1 + res[kid]
res2 = res1/sum_res
}
if (bias){ lqidb[K1,K2,b] = mean(yb[yb >= ysb$x[kid]])
} else lqidb[K1,K2,b] = mean(yb * respts * (yb >= ysb$x[kid] ) / (1-order))
} else lqidb[K1,K2,b] = sqhatb[b]
}
}
}
}
for (i in 1:nmbrededeltas){
for (j in 1:nmbrededeltas){
J[i,j]=lqid[i,j]
if (percentage==FALSE) I[i,j]=transinverse(lqid[i,j],sqhat,sqhat)
else I[i,j]=lqid[i,j]/sqhat-1
if (nboot > 0){
sqinf <- quantile(lqidb[i,j,],(1-conf)/2)
sqsup <- quantile(lqidb[i,j,],(1+conf)/2)
JCIinf[i,j]=sqinf
JCIsup[i,j]=sqsup
sqb <- mean(sqhatb)
if (percentage==FALSE){
if (bootsample){
ICIinf[i,j]=transinverse(sqinf,sqb,sqb)
ICIsup[i,j]=transinverse(sqsup,sqb,sqhat)
} else{
ICIinf[i,j]=quantile(transinverse(lqidb[i,j,],sqhatb,sqhatb),(1-conf)/2)
ICIsup[i,j]=quantile(transinverse(lqidb[i,j,],sqhatb,sqhatb),(1+conf)/2)
}
} else {
if (bootsample){
ICIinf[i,j]=sqinf/sqb-1
ICIsup[i,j]=sqsup/sqb-1
} else{
ICIinf[i,j]=quantile((lqidb[i,j,]/sqhatb-1),(1-conf)/2)
ICIsup[i,j]=quantile((lqidb[i,j,]/sqhatb-1),(1+conf)/2)
}
}
}
}
}
res <- list(PLI = I, PLICIinf = ICIinf, PLICIsup = ICIsup, quantile = J, quantileCIinf = JCIinf, quantileCIsup = JCIsup)
return(res)
}
|
context("Assemble dashboard")
test_that("Assembling dashboard is correct",{
temp <- tempfile(fileext = ".Rmd")
i2dashboard(datadir = "input-data") %>%
assemble(file = temp) -> dashboard
expect_s4_class(dashboard, "i2dashboard")
rmd <- readLines(temp)
new_rmd <- rmd[-31]
writeLines(new_rmd, temp)
ref_hash <- digest::digest(file = temp, serialize = FALSE, seed = 100)
expect_true(stringi::stri_detect_regex(ref_hash, "(1055ed29147446902eb224e7c17c52af|442bef06307bb6dea490435230f7e22e|11a5a0938212656d1fbb02496fb7cfa0|48152155fd455e7642da35e03f404dd5)"))
expect_warning(assemble(dashboard, file = temp, pages = "page1"), "i2dashboard dashboard does not contain a page named 'page1'")
})
|
summary.kppm <- function(object, ..., quick=FALSE) {
nama <- names(object)
result <- unclass(object)[!(nama %in% c("X", "po", "call", "callframe"))]
if(is.null(result$isPCP)) result$isPCP <- TRUE
Fit <- object$Fit
opt <- switch(Fit$method,
mincon = Fit$mcfit$opt,
clik =,
clik2 = Fit$clfit,
palm = Fit$clfit,
adapcl = Fit$cladapfit,
warning(paste("Unrecognised fitting method",
sQuote(Fit$method)))
)
if(Fit$method != "adapcl") {
result$optim.converged <- optimConverged(opt)
result$optim.status <- optimStatus(opt)
result$optim.nsteps <- optimNsteps(opt)
}
result$trend <- summary(as.ppm(object), ..., quick=quick)
if(isFALSE(quick)) {
theta <- coef(object)
if(length(theta) > 0) {
vc <- vcov(object, matrix.action="warn")
if(!is.null(vc)) {
se <- if(is.matrix(vc)) sqrt(diag(vc)) else
if(length(vc) == 1) sqrt(vc) else NULL
}
if(!is.null(se)) {
two <- qnorm(0.975)
lo <- theta - two * se
hi <- theta + two * se
zval <- theta/se
pval <- 2 * pnorm(abs(zval), lower.tail=FALSE)
psig <- cut(pval, c(0,0.001, 0.01, 0.05, 1),
labels=c("***", "**", "*", " "),
include.lowest=TRUE)
result$coefs.SE.CI <- data.frame(Estimate=theta, S.E.=se,
CI95.lo=lo, CI95.hi=hi,
Ztest=psig,
Zval=zval)
}
}
}
if(object$isPCP) result$psib <- mean(psib(object))
win <- as.owin(object, from="points")
vac <- varcount(object, B=win)
Lam <- integral(predict(object, window=win))
result$odi <- vac/Lam
class(result) <- "summary.kppm"
return(result)
}
coef.summary.kppm <- function(object, ...) {
return(object$coefs.SE.CI)
}
print.summary.kppm <- function(x, ...) {
terselevel <- spatstat.options('terse')
digits <- getOption('digits')
isPCP <- x$isPCP
splat(if(x$stationary) "Stationary" else "Inhomogeneous",
if(isPCP) "cluster" else "Cox",
"point process model")
if(waxlyrical('extras', terselevel) && nchar(x$Xname) < 20)
splat("Fitted to point pattern dataset", sQuote(x$Xname))
Fit <- x$Fit
if(waxlyrical('gory', terselevel)) {
switch(Fit$method,
mincon = {
splat("Fitted by minimum contrast")
splat("\tSummary statistic:", Fit$StatName)
print(Fit$mcfit)
},
clik =,
clik2 = {
splat("Fitted by maximum second order composite likelihood")
splat("\trmax =", Fit$rmax)
if(!is.null(wtf <- Fit$weightfun)) {
a <- attr(wtf, "selfprint") %orifnull% pasteFormula(wtf)
splat("\tweight function:", a)
}
printStatus(x$optim.status)
},
adapcl = {
splat("Fitted by adaptive second order composite likelihood")
splat("\tepsilon =", x$Fit$epsilon)
if(!is.null(wtf <- x$Fit$weightfun)) {
a <- attr(wtf, "selfprint") %orifnull% pasteFormula(wtf)
splat("\tweight function:", a)
}
},
palm = {
splat("Fitted by maximum Palm likelihood")
splat("\trmax =", Fit$rmax)
if(!is.null(wtf <- Fit$weightfun)) {
a <- attr(wtf, "selfprint") %orifnull% pasteFormula(wtf)
splat("\tweight function:", a)
}
printStatus(x$optim.status)
},
warning(paste("Unrecognised fitting method", sQuote(Fit$method)))
)
}
parbreak()
splat("----------- TREND MODEL -----")
print(x$trend, ...)
tableentry <- spatstatClusterModelInfo(x$clusters)
parbreak()
splat("-----------",
if(isPCP) "CLUSTER" else "COX",
"MODEL",
"-----------")
splat("Model:", tableentry$printmodelname(x))
parbreak()
cm <- x$covmodel
if(!isPCP) {
splat("\tCovariance model:", cm$model)
margs <- cm$margs
if(!is.null(margs)) {
nama <- names(margs)
tags <- ifelse(nzchar(nama), paste(nama, "="), "")
tagvalue <- paste(tags, margs)
splat("\tCovariance parameters:",
paste(tagvalue, collapse=", "))
}
}
pa <- x$clustpar
if (!is.null(pa)) {
splat("Fitted",
if(isPCP) "cluster" else "covariance",
"parameters:")
print(pa, digits=digits)
}
if(!is.null(mu <- x$mu)) {
if(isPCP) {
splat("Mean cluster size: ",
if(!is.im(mu)) paste(signif(mu, digits), "points") else "[pixel image]")
} else {
splat("Fitted mean of log of random intensity:",
if(!is.im(mu)) signif(mu, digits) else "[pixel image]")
}
}
if(!is.null(cose <- x$coefs.SE.CI)) {
parbreak()
splat("Final standard error and CI")
splat("(allowing for correlation of",
if(isPCP) "cluster" else "Cox",
"process):")
print(cose)
}
psi <- x$psib
odi <- x$odi
if(!is.null(psi) || !is.null(odi)) {
parbreak()
splat("----------- cluster strength indices ---------- ")
if(!is.null(psi)) {
psi <- signif(psi, digits)
if(isTRUE(x$stationary)) {
splat("Sibling probability", psi)
} else splat("Mean sibling probability", psi)
}
if(!is.null(odi))
splat("Count overdispersion index (on original window):",
signif(odi, digits))
}
invisible(NULL)
}
|
varcovgecub<-function(ordinal,Y,W,X,bet,gama,omega,shelter){
Y<-as.matrix(Y);W<-as.matrix(W); X<-as.matrix(X);
if (ncol(W)==1){
W<-as.numeric(W)
}
if (ncol(Y)==1){
Y<-as.numeric(Y)
}
if (ncol(X)==1){
X<-as.numeric(X)
}
probi<-probgecub(ordinal,Y,W,X,bet,gama,omega,shelter);
vvi<-1/probi; dicotom<-ifelse(ordinal==shelter,1,0)
paii<-logis(Y,bet);
csii<-logis(W,gama); deltai<-logis(X,omega);
m<-length(levels(factor(ordinal,ordered=TRUE)))
bierrei<-bitgama(m,ordinal,W,gama);
YY<-cbind(1,Y);
WW<-cbind(1,W); XX<-cbind(1,X);
np<-NCOL(YY)+NCOL(WW)+NCOL(XX);
mconi<-m-ordinal-(m-1)*csii;
vettAA<-paii*(1-paii)*(1-deltai)*(bierrei-1/m)*vvi
AA <- Hadprod(YY,vettAA)
vettBB<-paii*(1-deltai)*mconi*bierrei*vvi;
BB <- Hadprod(WW,vettBB)
vettCC<-deltai*(dicotom-probi)*vvi
CC <- Hadprod(XX,vettCC);
dconi<- paii*(1-paii)*(1-2*paii)*(1-deltai)*(bierrei-1/m)*vvi;
gconi<- paii*(1-deltai)*bierrei*(mconi^2-(m-1)*csii)*(1-csii)*vvi;
lconi <- deltai*(1-2*deltai)*(dicotom-probi)*vvi;
econi <- paii*(1-paii)*(1-deltai)*(bierrei)*mconi*vvi;
fconi<- (-paii)*(1-paii)*deltai*(1-deltai)*(bierrei-1/m)*vvi;
hconi<-(-paii)*deltai*(1-deltai)*bierrei*mconi*vvi;
inf11<-t(AA)%*%AA-t(YY)%*%Hadprod(YY,dconi);
inf22<-t(BB)%*%BB-t(WW)%*%Hadprod(WW,gconi);
inf33<-t(CC)%*%CC-t(XX)%*%Hadprod(XX,lconi);
inf21<-t(BB)%*%AA-t(WW)%*%Hadprod(YY,econi);
inf31<-t(CC)%*%AA-t(XX)%*%Hadprod(YY,fconi);
inf32<-t(CC)%*%BB-t(XX)%*%Hadprod(WW,hconi);
inf12<-t(inf21); inf13<-t(inf31); inf23<-t(inf32);
matinf<-rbind(cbind(inf11,inf12,inf13),cbind(inf21,inf22,inf23),cbind(inf31,inf32,inf33));
if(any(is.na(matinf))==TRUE){
warning("ATTENTION: NAs produced")
varmat<-matrix(NA,nrow=np,ncol=np)
} else {
if(det(matinf)<=0){
warning("ATTENTION: Variance-covariance matrix NOT positive definite")
varmat<-matrix(NA,nrow=np,ncol=np)
} else {
varmat<-solve(matinf)
}
}
return(varmat)
}
|
miss_ind <- function(data, prefix = "miss_") {
indicators <- sapply(data, FUN = function(col) as.numeric(is.na(col)))
colnames(indicators) <- paste0(prefix, colnames(data))
indicators <- indicators[, !colMeans(indicators) %in% c(0, 1)]
out <- cbind(data, indicators)
return(out)
}
|
pb.trimfill <- function (x, side, estimator = "L0", maxiter = 100, verbose = FALSE, ...){
if (!is.element("rma.uni", class(x)))
stop("Argument 'x' must be an object of class \"rma.uni\".")
if (!x$int.only)
stop("Trim-and-fill method only applicable for models without moderators.")
if (missing(side))
side <- NULL
estimator <- match.arg(estimator, c("L0", "R0", "Q0"))
if (x$k == 1)
stop("Stopped because k = 1.")
yi <- x$yi
vi <- x$vi
weights <- x$weights
ni <- x$ni
if (is.null(side)) {
res <- rma(yi, vi, weights = weights, mods = sqrt(vi),
intercept = TRUE, method = x$method, weighted = x$weighted, ...)
if (is.na(res$b[2])) res$b[2] <- 0
if (res$b[2] < 0) {
side <- "right"
}
else {
side <- "left"
}
}
else {
side <- match.arg(side, c("left", "right"))
}
if (side == "right") {
yi <- -1 * yi
}
idix <- sort(yi, index.return = TRUE)$ix
yi <- yi[idix]
vi <- vi[idix]
weights <- weights[idix]
ni <- ni[idix]
k <- length(yi)
k0.sav <- -1
k0 <- 0
iter <- 0
while (abs(k0 - k0.sav) > 0 & iter <= maxiter) {
k0.sav <- k0
iter <- iter + 1
yi.t <- yi[1:(k - k0)]
vi.t <- vi[1:(k - k0)]
weights.t <- weights[1:(k - k0)]
res <- rma(yi.t, vi.t, weights = weights.t, intercept = TRUE,
method = x$method, weighted = x$weighted, ...)
b <- c(res$b)
yi.c <- yi - b
yi.c.r <- rank(abs(yi.c), ties.method = "first")
yi.c.r.s <- sign(yi.c) * yi.c.r
if (estimator == "R0") {
k0 <- (k - max(-1 * yi.c.r.s[yi.c.r.s < 0])) - 1
se.k0 <- sqrt(2 * max(0, k0) + 2)
}
if (estimator == "L0") {
Sr <- sum(yi.c.r.s[yi.c.r.s > 0])
k0 <- (4 * Sr - k * (k + 1))/(2 * k - 1)
varSr <- 1/24 * (k * (k + 1) * (2 * k + 1) + 10 *
k0^3 + 27 * k0^2 + 17 * k0 - 18 * k * k0^2 -
18 * k * k0 + 6 * k^2 * k0)
se.k0 <- 4 * sqrt(varSr)/(2 * k - 1)
}
if (estimator == "Q0") {
Sr <- sum(yi.c.r.s[yi.c.r.s > 0])
k0 <- k - 1/2 - sqrt(2 * k^2 - 4 * Sr + 1/4)
varSr <- 1/24 * (k * (k + 1) * (2 * k + 1) + 10 *
k0^3 + 27 * k0^2 + 17 * k0 - 18 * k * k0^2 -
18 * k * k0 + 6 * k^2 * k0)
se.k0 <- 2 * sqrt(varSr)/sqrt((k - 1/2)^2 - k0 *
(2 * k - k0 - 1))
}
k0 <- max(0, k0)
k0 <- round(k0)
se.k0 <- max(0, se.k0)
if (verbose)
cat("Iteration:", iter, "\tmissing =", k0, "\t b =",
ifelse(side == "right", -1 * b, b), "\n")
}
if (k0 > 0) {
if (side == "right") {
yi.c <- -1 * (yi.c - b)
}
else {
yi.c <- yi.c - b
}
yi.fill <- c(x$yi.f, -1 * yi.c[(k - k0 + 1):k])
vi.fill <- c(x$vi.f, vi[(k - k0 + 1):k])
weights.fill <- c(x$weights.f, weights[(k - k0 + 1):k])
ni.fill <- c(x$ni.f, ni[(k - k0 + 1):k])
attr(yi.fill, "measure") <- x$measure
res <- rma(yi.fill, vi.fill, weights = weights.fill,
ni = ni.fill, intercept = TRUE, method = x$method,
weighted = x$weighted, ...)
res$fill <- c(rep(FALSE, k), rep(TRUE, k0))
res$ids <- c(x$ids, (x$k.f + 1):(x$k.f + k0))
if (x$slab.null) {
res$slab <- c(paste("Study", x$ids), paste("Filled",
seq_len(k0)))
}
else {
res$slab <- c(x$slab, paste("Filled", seq_len(k0)))
}
res$slab.null <- FALSE
}
else {
res <- x
res$fill <- rep(FALSE, k)
}
res$k0 <- k0
res$se.k0 <- se.k0
res$side <- side
res$k0.est <- estimator
if (estimator == "R0") {
m <- -1:(k0 - 1)
res$pval <- 1 - sum(choose(0 + m + 1, m + 1) * 0.5^(0 + m + 2))
}
else {
res$pval <- NA
}
class(res) <- c("rma.uni.trimfill", class(res))
return(res)
}
|
expected <- eval(parse(text="TRUE"));
test(id=0, code={
argv <- eval(parse(text="list(structure(c(TRUE, TRUE, TRUE, TRUE, TRUE), .Names = c(\"1\", \"2\", \"3\", \"4\", \"5\"), .Dim = 5L, .Dimnames = list(c(\"1\", \"2\", \"3\", \"4\", \"5\"))))"));
do.call(`any`, argv);
}, o=expected);
|
rt_ticket_attachments <- function(ticket_id, ...) {
url <- rt_url("ticket", ticket_id, "attachments")
out <- rt_GET(url, ...)
stopforstatus(out)
location <- stringr::str_locate(out$body, "Attachments: ")
if (all(dim(location) != c(1, 2))) {
stop("Error while processing response from RT.")
}
rest <- stringr::str_sub(out$body, location[1, 2] + 1, nchar(out$body))
attachments <- parse_rt_properties(rest)
result <- lapply(attachments, function(attachment) {
props <- as.list(
stringr::str_match(
attachment,
"\\(?(.+)\\) \\((.+) \\/ (.+)\\)")[1, (2:4)]
)
names(props) <- c("Name", "Type", "Size")
props
})
if (length(result) == 0) {
return(try_tibble(data.frame()))
}
try_tibble(
cbind(data.frame(id = names(result), stringsAsFactors = FALSE),
do.call(rbind, lapply(result, function(x) {
data.frame(x, stringsAsFactors = FALSE)
}))
)
)
}
|
SpatialPointsTopography<-function(points, elevation, slope = NULL, aspect = NULL, proj4string = CRS(as.character(NA))) {
if(!(inherits(points,"SpatialPoints")|| inherits(points,"matrix"))) stop("'points' has to be of class 'matrix' or 'SpatialPoints'.")
if(inherits(points,"SpatialPoints")) {
npoints = nrow(points@coords)
proj4string =points@proj4string
coords = points@coords
bbox = points@bbox
} else {
coords = as.matrix(points)
npoints = nrow(coords)
bbox = bbox(SpatialPoints(coords))
}
if(length(elevation)!=npoints) stop("'elevation' has to be of length equal to the number of points")
if(is.null(slope))
slope = as.numeric(rep(NA, length(elevation)))
else if(length(slope)!=npoints)
stop("'slope' has to be of length equal to the number of points")
if(is.null(aspect))
aspect = as.numeric(rep(NA, length(elevation)))
else if(length(aspect)!=npoints)
stop("'aspect' has to be of length equal to the number of points")
data = data.frame(elevation = elevation, slope = slope, aspect = aspect,
row.names = row.names(coords))
lt = new("SpatialPointsTopography",
coords = coords,
bbox = bbox,
proj4string = proj4string,
data = data)
return(lt)
}
setMethod("[", signature("SpatialPointsTopography"),definition =
function (x, i, j, ..., drop = TRUE)
{
missing.i = missing(i)
if (!missing(j))
warning("j index ignored")
if (missing.i) i = TRUE
if (is.matrix(i))
stop("matrix argument not supported in SpatialPointsTopography selection")
if (is(i, "Spatial"))
i = !is.na(over(x, geometry(i)))
if (is.character(i))
i <- match(i, row.names(x))
if (any(is.na(i)))
stop("NAs not permitted in row index")
sp = as(x,"SpatialPoints")[i, , drop=drop]
x@coords = sp@coords
x@bbox = sp@bbox
x@data = x@data[i, , ..., drop = FALSE]
x
}
)
print.SpatialPointsTopography = function(x, ..., digits = getOption("digits")) {
cat("Object of class SpatialPointsTopography\n")
cc = substring(paste(as.data.frame(
t(signif(coordinates(x), digits)))),2,999)
df = data.frame("coordinates" = cc, x@data)
row.names(df) = row.names(x@data)
print(df, ..., digits = digits)
}
setMethod("print", "SpatialPointsTopography", function(x, ..., digits = getOption("digits")) print.SpatialPointsTopography(x, ..., digits))
setMethod("show", "SpatialPointsTopography", function(object) print.SpatialPointsTopography(object))
head.SpatialPointsTopography <- function(x, n=6L, ...) {
n <- min(n, length(x))
ix <- sign(n)*seq(abs(n))
x[ ix , , drop=FALSE]
}
setMethod("head", "SpatialPointsTopography", function(x, n=6L, ...) head.SpatialPointsTopography(x,n,...))
tail.SpatialPointsTopography <- function(x, n=6L, ...) {
n <- min(n, length(x))
ix <- sign(n)*rev(seq(nrow(x), by=-1L, len=abs(n)))
x[ ix , , drop=FALSE]
}
setMethod("tail", "SpatialPointsTopography", function(x, n=6L, ...) tail.SpatialPointsTopography(x,n,...))
setMethod("spTransform", signature("SpatialPointsTopography", "CRS"),
function(x, CRSobj, ...) {
sp = spTransform(as(x,"SpatialPoints"), CRSobj)
new("SpatialPointsTopography",
coords = sp@coords,
bbox = sp@bbox,
proj4string = sp@proj4string,
data = x@data)
}
)
as.SpPtsTop.SpPixTop = function(from) {
spdf = as(from, "SpatialPixelsDataFrame")
new("SpatialPixelsTopography",
coords = spdf@coords,
coords.nrs = [email protected],
bbox = spdf@bbox,
grid = spdf@grid,
grid.index = [email protected],
proj4string = spdf@proj4string,
data = spdf@data)
}
setAs("SpatialPointsTopography", "SpatialPixelsTopography", as.SpPtsTop.SpPixTop)
as.SpPtsTop.SpGrdTop = function(from) {
as.SpPixTop.SpGrdTop(as.SpPtsTop.SpPixTop(from))
}
setAs("SpatialPointsTopography", "SpatialGridTopography", as.SpPtsTop.SpGrdTop)
|
"discrimin" <- function (dudi, fac, scannf = TRUE, nf = 2) {
if (!inherits(dudi, "dudi"))
stop("Object of class dudi expected")
if (!is.factor(fac))
stop("factor expected")
lig <- nrow(dudi$tab)
if (length(fac) != lig)
stop("Non convenient dimension")
rank <- dudi$rank
dudi <- redo.dudi(dudi, rank)
deminorm <- as.matrix(dudi$c1) * dudi$cw
deminorm <- t(t(deminorm)/sqrt(dudi$eig))
cla.w <- tapply(dudi$lw, fac, sum)
mean.w <- function(x) {
z <- x * dudi$lw
z <- tapply(z, fac, sum)/cla.w
return(z)
}
tabmoy <- apply(dudi$l1, 2, mean.w)
tabmoy <- data.frame(tabmoy)
row.names(tabmoy) <- levels(fac)
cla.w <- cla.w/sum(cla.w)
X <- as.dudi(tabmoy, rep(1, rank), as.vector(cla.w), scannf = scannf,
nf = nf, call = match.call(), type = "dis")
res <- list()
res$eig <- X$eig
res$nf <- X$nf
res$fa <- deminorm %*% as.matrix(X$c1)
res$li <- as.matrix(dudi$tab) %*% res$fa
w <- scalewt(dudi$tab, dudi$lw)
res$va <- t(as.matrix(w)) %*% (res$li * dudi$lw)
res$cp <- t(as.matrix(dudi$l1)) %*% (dudi$lw * res$li)
res$fa <- data.frame(res$fa)
row.names(res$fa) <- names(dudi$tab)
names(res$fa) <- paste("DS", 1:X$nf, sep = "")
res$li <- data.frame(res$li)
row.names(res$li) <- row.names(dudi$tab)
names(res$li) <- names(res$fa)
w <- apply(res$li, 2, mean.w)
res$gc <- data.frame(w)
row.names(res$gc) <- as.character(levels(fac))
names(res$gc) <- names(res$fa)
res$cp <- data.frame(res$cp)
row.names(res$cp) <- names(dudi$l1)
names(res$cp) <- names(res$fa)
res$call <- match.call()
class(res) <- "discrimin"
return(res)
}
"plot.discrimin" <- function (x, xax = 1, yax = 2, ...) {
if (!inherits(x, "discrimin"))
stop("Use only with 'discrimin' objects")
if ((x$nf == 1) || (xax == yax)) {
if (inherits(x, "coadisc")) {
appel <- as.list(x$call)
df <- eval.parent(appel$df)
fac <- eval.parent(appel$fac)
lig <- nrow(df)
if (length(fac) != lig)
stop("Non convenient dimension")
lig.w <- apply(df, 1, sum)
lig.w <- lig.w/sum(lig.w)
cla.w <- as.vector(tapply(lig.w, fac, sum))
mean.w <- function(x) {
z <- x * lig.w
z <- tapply(z, fac, sum)/cla.w
return(z)
}
w <- apply(df, 2, mean.w)
w <- data.frame(t(w))
sco.distri(x$fa[, xax], w, clabel = 1, xlim = NULL,
grid = TRUE, cgrid = 1, include.origin = TRUE, origin = 0,
sub = NULL, csub = 1)
return(invisible())
}
appel <- as.list(x$call)
dudi <- eval.parent(appel$dudi)
fac <- eval.parent(appel$fac)
lig <- nrow(dudi$tab)
if (length(fac) != lig)
stop("Non convenient dimension")
sco.quant(x$li[, 1], dudi$tab, fac = fac)
return(invisible())
}
if (xax > x$nf)
stop("Non convenient xax")
if (yax > x$nf)
stop("Non convenient yax")
fac <- eval.parent(as.list(x$call)$fac)
def.par <- par(no.readonly = TRUE)
on.exit(par(def.par))
layout(matrix(c(1, 2, 3, 4, 4, 5, 4, 4, 6), 3, 3),
respect = TRUE)
par(mar = c(0.2, 0.2, 0.2, 0.2))
s.arrow(x$fa, xax = xax, yax = yax, sub = "Canonical weights",
csub = 2, clabel = 1.25)
s.corcircle(x$va, xax = xax, yax = yax, sub = "Cos(variates,canonical variates)",
csub = 2, cgrid = 0, clabel = 1.25)
scatterutil.eigen(x$eig, wsel = c(xax, yax))
s.class(x$li, fac, xax = xax, yax = yax, sub = "Scores and classes",
csub = 2, clabel = 1.5)
s.corcircle(x$cp, xax = xax, yax = yax, sub = "Cos(components,canonical variates)",
csub = 2, cgrid = 0, clabel = 1.25)
s.label(x$gc, xax = xax, yax = yax, sub = "Class scores",
csub = 2, clabel = 1.25)
}
"print.discrimin" <- function (x, ...) {
if (!inherits(x, "discrimin"))
stop("to be used with 'discrimin' object")
cat("Discriminant analysis\n")
cat("call: ")
print(x$call)
cat("class: ")
cat(class(x), "\n")
cat("\n$nf (axis saved) :", x$nf)
cat("\n\neigen values: ")
l0 <- length(x$eig)
cat(signif(x$eig, 4)[1:(min(5, l0))])
if (l0 > 5)
cat(" ...\n\n")
else cat("\n\n")
sumry <- array("", c(5, 4), list(1:5, c("data.frame", "nrow",
"ncol", "content")))
sumry[1, ] <- c("$fa", nrow(x$fa), ncol(x$fa), "loadings / canonical weights")
sumry[2, ] <- c("$li", nrow(x$li), ncol(x$li), "canonical scores")
sumry[3, ] <- c("$va", nrow(x$va), ncol(x$va), "cos(variables, canonical scores)")
sumry[4, ] <- c("$cp", nrow(x$cp), ncol(x$cp), "cos(components, canonical scores)")
sumry[5, ] <- c("$gc", nrow(x$gc), ncol(x$gc), "class scores")
print(sumry, quote = FALSE)
cat("\n")
}
|
write.code <- function (x, file = stdout(), evaluated = FALSE, simplify = !evaluated,
deparseCtrl = c("keepInteger", "showAttributes",
"useSource", "keepNA", "digits17"))
{
if (!evaluated)
x <- substitute(x)
x <- if (simplify && is.call(x) && is.symbol(x[[1]]) && x[[1]] == quote(`{`))
vapply(as.list(x[-1]), deparse1, collapse = "\n",
width.cutoff = 60L, backtick = TRUE, control = deparseCtrl,
FUN.VALUE = "")
else deparse1(x, collapse = "\n", width.cutoff = 60L,
backtick = TRUE, control = deparseCtrl)
if (is.null(file))
x
else {
writeLines(x, file, useBytes = TRUE)
invisible(x)
}
}
|
my.objectivePhiOne.nlogL.rocnsef <- function(phi, fitlist, reu13.list.g, y.g,
n.g){
ret <- 0
for(i.aa in 1:length(reu13.list.g)){
for(i.scodon in 1:length(reu13.list.g[[i.aa]])){
Pos <- reu13.list.g[[i.aa]][[i.scodon]]
if(length(Pos) > 0){
xm <- matrix(cbind(1, phi, phi * Pos), ncol = 3)
exponent <- xm %*% fitlist[[i.aa]]$coef.mat
id.infinite <- rowSums(!is.finite(exponent)) > 0
if(any(id.infinite)){
xm.tmp <- matrix(cbind(1, Pos[id.infinite]), ncol = 2)
coef.tmp <- matrix(fitlist[[i.aa]]$coef.mat[-1,], nrow = 2)
exponent[id.infinite,] <- xm.tmp %*% coef.tmp
}
lp.vec <- my.inverse.mlogit(exponent, log = TRUE)
ret <- ret + sum(lp.vec[, i.scodon])
}
}
}
-ret
}
my.objectivePhiOne.nlogL.roc <- function(phi, fitlist, reu13.list.g, y.g, n.g){
ret <- 0
for(i.aa in 1:length(y.g)){
if(n.g[[i.aa]] > 0){
xm <- matrix(cbind(1, phi), ncol = 2)
exponent <- xm %*% fitlist[[i.aa]]$coef.mat
id.infinite <- rowSums(!is.finite(exponent)) > 0
if(any(id.infinite)){
xm.tmp <- matrix(1, nrow = sum(id.infinite), ncol = 1)
coef.tmp <- matrix(fitlist[[i.aa]]$coef.mat[-1,], nrow = 1)
exponent[id.infinite,] <- xm.tmp %*% coef.tmp
}
lp.vec <- my.inverse.mlogit(exponent, log = TRUE)
ret <- ret + sum((y.g[[i.aa]] * lp.vec)[y.g[[i.aa]] != 0])
}
}
-ret
}
my.objectivePhiOne.nlogL.nsef <- function(phi, fitlist, reu13.list.g, y.g, n.g){
ret <- 0
for(i.aa in 1:length(reu13.list.g)){
for(i.scodon in 1:length(reu13.list.g[[i.aa]])){
Pos <- reu13.list.g[[i.aa]][[i.scodon]]
if(length(Pos) > 0){
xm <- matrix(cbind(1, phi * Pos), ncol = 2)
exponent <- xm %*% fitlist[[i.aa]]$coef.mat
id.infinite <- rowSums(!is.finite(exponent)) > 0
if(any(id.infinite)){
xm.tmp <- matrix(Pos[id.infinite], ncol = 1)
coef.tmp <- matrix(fitlist[[i.aa]]$coef.mat[-1,], nrow = 1)
exponent[id.infinite,] <- xm.tmp %*% coef.tmp
}
lp.vec <- my.inverse.mlogit(exponent, log = TRUE)
ret <- ret + sum(lp.vec[, i.scodon])
}
}
}
-ret
}
|
pip_version <- function(python) {
if (!file.exists(python))
return(numeric_version("0.0"))
command <- "import sys; import pip; sys.stdout.write(pip.__version__)"
version <- system2(python, c("-c", shQuote(command)), stdout = TRUE, stderr = TRUE)
idx <- regexpr("[[:alpha:]]", version)
if (idx != -1)
version <- substring(version, 1, idx - 1)
numeric_version(version)
}
pip_install <- function(python,
packages,
pip_options = character(),
ignore_installed = FALSE,
conda = "auto",
envname = NULL)
{
args <- c("-m", "pip", "install", "--upgrade")
if (ignore_installed)
args <- c(args, "--ignore-installed")
args <- c(args, pip_options)
packages <- shQuote(gsub("[\"']", "", packages))
args <- c(args, packages)
if (conda == "auto") {
info <- python_info(python)
if (info$type != "conda" || numeric_conda_version(info$conda) < "4.9")
conda <- NULL
}
result <- if (is.null(conda) || identical(conda, FALSE))
system2t(python, args)
else
conda_run2(python, args, conda = conda, envname = envname)
if (result != 0L) {
pkglist <- paste(shQuote(packages), collapse = ", ")
msg <- paste("Error installing package(s):", pkglist)
stop(msg, call. = FALSE)
}
invisible(packages)
}
pip_uninstall <- function(python, packages) {
args <- c("-m", "pip", "uninstall", "--yes", packages)
result <- system2t(python, args)
if (result != 0L) {
pkglist <- paste(shQuote(packages), collapse = ", ")
msg <- paste("Error removing package(s):", pkglist)
stop(msg, call. = FALSE)
}
packages
}
pip_freeze <- function(python) {
args <- c("-m", "pip", "freeze")
output <- system2(python, args, stdout = TRUE)
matches <- strsplit(output, "(==|@)")
matches <- .mapply(c, list(matches, output), MoreArgs = NULL)
n <- vapply(matches, length, FUN.VALUE = numeric(1))
matches <- matches[n == 3]
packages <- vapply(matches, `[[`, 1L, FUN.VALUE = character(1))
versions <- vapply(matches, `[[`, 2L, FUN.VALUE = character(1))
requirement <- vapply(matches, `[[`, 3L, FUN.VALUE = character(1))
data.frame(
package = packages,
version = versions,
requirement = requirement,
stringsAsFactors = FALSE
)
}
|
`plot.crn` <- function(x, ...){ crn.plot(crn=x, ...) }
`crn.plot` <- function(crn, add.spline=FALSE, nyrs=NULL, f=0.5,
crn.line.col='grey50',
spline.line.col='red',
samp.depth.col='grey90',
samp.depth.border.col='grey80',
crn.lwd=1, spline.lwd=1.5,
abline.pos=1, abline.col='black',
abline.lty=1, abline.lwd=1,
xlab="Time",ylab="RWI",
...) {
if(!("crn" %in% class(crn))) stop("'crn' must be class crn")
op <- par(no.readonly=TRUE)
on.exit(par(op))
par(mar=c(3, 3, 3, 3), mgp=c(1.1, 0.1, 0),
tcl=0.5, xaxs='i')
yr.vec <- as.numeric(row.names(crn))
crn.names <- names(crn)
nCrn <- ncol(crn)
sd.exist <- crn.names[nCrn] == "samp.depth"
args0 <- list(...)
args1 <- args0
args1[["ylim2"]] <- NULL
args1[c("x", "y", "type", "axes", "xlab", "ylab")] <-
list(yr.vec, as.name("spl"), "n", FALSE, xlab, ylab)
args2 <- args1
args2[c("main", "sub", "xlab", "ylab")] <- list("", "", "", "")
if(sd.exist) {
samp.depth <- crn[[nCrn]]
nCrn <- nCrn-1
text.samp <- gettext("Sample Depth", domain="R-dplR")
sdargs <- args2
sdargs[["ylim"]] <- args0[["ylim2"]]
sdargs[["y"]] <- samp.depth
}
if(nCrn > 1) layout(matrix(seq_len(nCrn), nrow=nCrn, ncol=1))
nyrs2 <- nyrs
for(i in seq_len(nCrn)){
spl <- crn[[i]]
do.call("plot", args1)
if(sd.exist) {
par(new=TRUE)
do.call("plot", sdargs)
xx <- c(yr.vec,max(yr.vec,na.rm=TRUE),min(yr.vec,na.rm=TRUE))
yy <- c(samp.depth, 0, 0)
polygon(xx,yy,col=samp.depth.col,border=samp.depth.border.col)
axis(4, at=pretty(samp.depth))
mtext(text.samp, side=4, line=1.25)
}
par(new=TRUE)
do.call("plot", args2)
abline(h=abline.pos,lwd=abline.lwd,
lty=abline.lty,col=abline.col)
lines(yr.vec, spl, col=crn.line.col,lwd=crn.lwd)
tmp <- na.omit(spl)
if(add.spline) {
if(is.null(nyrs2)) nyrs2 <- length(tmp)*0.33
spl[!is.na(spl)] <- ffcsaps(y=tmp, x=seq_along(tmp), nyrs=nyrs2, f=f)
lines(yr.vec, spl, col=spline.line.col, lwd=spline.lwd)
}
axis(1)
axis(2)
axis(3)
if(!sd.exist) axis(4)
box()
}
}
|
nuclear <-
function(B) {
sum(abs(svd(B)$d))
}
|
.FilterEnsembleRegister = new.env()
makeFilterEnsemble = function(name, base.methods, desc, fun) {
assertString(name)
assertString(desc)
assertFunction(fun, c("task", "base.methods"))
obj = makeS3Obj("FilterEnsemble",
name = name,
desc = desc,
fun = fun
)
.FilterEnsembleRegister[[name]] = obj
obj
}
listFilterEnsembleMethods = function(desc = TRUE) {
tag2df = function(tags, prefix = "") {
unique.tags = sort(unique(unlist(tags)))
res = asMatrixRows(lapply(tags, "%in%", x = unique.tags))
colnames(res) = stri_paste(prefix, unique.tags)
rownames(res) = NULL
as.data.frame(res)
}
assertFlag(desc)
filters = as.list(.FilterEnsembleRegister)
df = data.frame(
id = names(filters)
)
description = extractSubList(filters, "desc")
if (desc) {
df$desc = description
}
res = setRowNames(sortByCol(df, "id"), NULL)
addClasses(res, "FilterMethodsList")
return(res)
}
print.FilterEnsembleMethodsList = function(x, len = 40, ...) {
if (!is.null(x$desc)) {
x$desc = clipString(x$desc, len = len)
}
NextMethod()
}
print.FilterEnsemble = function(x, ...) {
catf("Filter: '%s'", x$name)
}
makeFilterEnsemble(
name = "E-min",
desc = "Minimum ensemble filter. Takes the best minimum value across all base filter methods for each feature.",
base.methods = NULL,
fun = function(task, base.methods, nselect, more.args) {
fval.all.ranked = rankBaseFilters(task = task, method = base.methods,
nselect = nselect, more.args = more.args)
fval.ens = aggregate(fval.all.ranked$rank,
by = list(fval.all.ranked$name), FUN = min)
colnames(fval.ens) = c("name", "value")
fval.ens$type = fval.all.ranked$type[1:length(unique(fval.all.ranked$name))]
fval.ens$filter = "E-min"
fval.ens = mergeFilters(fval.all.ranked, fval.ens)
return(fval.ens)
}
)
makeFilterEnsemble(
name = "E-mean",
desc = "Mean ensemble filter. Takes the mean across all base filter methods for each feature.",
base.methods = NULL,
fun = function(task, base.methods, nselect, more.args) {
fval.all.ranked = rankBaseFilters(task = task, method = base.methods,
nselect = nselect, more.args = more.args)
fval.ens = aggregate(fval.all.ranked$rank,
by = list(fval.all.ranked$name), FUN = mean)
colnames(fval.ens) = c("name", "value")
fval.ens$type = fval.all.ranked$type[1:length(unique(fval.all.ranked$name))]
fval.ens$filter = "E-mean"
fval.ens = mergeFilters(fval.all.ranked, fval.ens)
return(fval.ens)
}
)
makeFilterEnsemble(
name = "E-max",
desc = "Maximum ensemble filter. Takes the best maximum value across all base filter methods for each feature.",
base.methods = NULL,
fun = function(task, base.methods, nselect, more.args) {
fval.all.ranked = rankBaseFilters(task = task, method = base.methods,
nselect = nselect, more.args = more.args)
fval.ens = aggregate(fval.all.ranked$rank,
by = list(fval.all.ranked$name), FUN = max)
colnames(fval.ens) = c("name", "value")
fval.ens$type = fval.all.ranked$type[1:length(unique(fval.all.ranked$name))]
fval.ens$filter = "E-max"
fval.ens = mergeFilters(fval.all.ranked, fval.ens)
return(fval.ens)
}
)
makeFilterEnsemble(
name = "E-median",
desc = "Median ensemble filter. Takes the median across all base filter methods for each feature.",
base.methods = NULL,
fun = function(task, base.methods, nselect, more.args) {
fval.all.ranked = rankBaseFilters(task = task, method = base.methods,
nselect = nselect, more.args = more.args)
fval.ens = aggregate(fval.all.ranked$rank,
by = list(fval.all.ranked$name), FUN = median)
colnames(fval.ens) = c("name", "value")
fval.ens$type = fval.all.ranked$type[1:length(unique(fval.all.ranked$name))]
fval.ens$filter = "E-median"
fval.ens = mergeFilters(fval.all.ranked, fval.ens)
return(fval.ens)
}
)
makeFilterEnsemble(
name = "E-Borda",
desc = "Borda ensemble filter. Takes the sum across all base filter methods for each feature.",
base.methods = NULL,
fun = function(task, base.methods, nselect, more.args) {
if (length(unique(base.methods)) == 1L) {
stopf("Sampling without replacement is currently not supported for simple filter methods. Please use `makeDiscreteParam()` instead of `makeDiscreteVectorParam()`.")
}
fval.all.ranked = rankBaseFilters(task = task, method = base.methods,
nselect = nselect, more.args = more.args)
fval.ens = aggregate(fval.all.ranked$rank,
by = list(fval.all.ranked$name), FUN = sum)
colnames(fval.ens) = c("name", "value")
fval.ens$type = fval.all.ranked$type[1:length(unique(fval.all.ranked$name))]
fval.ens$filter = "E-Borda"
fval.ens = mergeFilters(fval.all.ranked, fval.ens)
return(fval.ens)
}
)
rankBaseFilters = function(task, method = method,
nselect = nselect, more.args = more.args) {
fval.calc = generateFilterValuesData(task, method = method,
nselect = nselect, more.args = more.args)
value = NULL
fval.all.ranked = fval.calc$data[, rank := frank(value,
ties.method = "first"), by = filter]
setorderv(fval.all.ranked, c("filter", "rank"))
return(fval.all.ranked)
}
mergeFilters = function(simple_filters, ensemble_filters) {
simple_filters$rank = NULL
all.filters = rbind(simple_filters, ensemble_filters)
return(all.filters)
}
|
Even <- function(x){
result <- vector()
if(!is.numeric(x)){
cat("Even only works on numbers")
stop()
}
for(i in 1:length(x)){
if(x[i]/2 == round(x[i]/2)){
result[i] <- T
}else{
result[i] <- F
}
}
return(result)
}
|
rbweibull <-
function(n, prob, scale, shape)
{
if(max(length(prob), length(scale), length(shape)) > 1)
stop("parameters must be of length 1")
p <- runif(n)
q <- rep(0, length(p))
cases <- p > (1-prob)
q[cases] <- rweibull(sum(cases), scale=scale, shape=shape)
q
}
|
context("kgaps_imt")
u <- stats::quantile(newlyn, probs = c(0.85, 0.90, 0.95))
k_vals <- 1:4
all_res <- kgaps_imt(newlyn, u, k_vals)
all_IMT <- all_res$imt
all_p <- all_res$p
all_theta <- all_res$theta
ind_IMT <- ind_p <- ind_theta <- all_IMT
for (i in 1:length(u)) {
for (j in 1:length(k_vals)) {
temp <- kgaps_imt(newlyn, u = u[i], k = k_vals[j])
ind_IMT[i, j] <- temp$imt
ind_p[i, j] <- temp$p
ind_theta[i, j] <- temp$theta
}
}
my_tol <- 1e-5
test_that("IMT values agree", {
testthat::expect_equal(all_IMT, ind_IMT, tolerance = my_tol)
})
test_that("p-values agree", {
testthat::expect_equal(all_p, ind_p, tolerance = my_tol)
})
test_that("MLEs of theta values agree", {
testthat::expect_equal(all_theta, ind_theta, tolerance = my_tol)
})
|
.optimalDCAGE <- function(...,
dB,
yFinest,
ySource,
finestOnSource,
sourceAreas,
finestAreas,
gL,
gU,
cMethod,
alpha,
localCluster) {
message("determining clustering that minimizes CAGE/DCAGE")
centroids <- tryCatch(expr = sp::coordinates(obj = rgeos::gCentroid(spgeom = dB,
byid = TRUE)),
error = function(e) {
stop("unable to identify centroids of finest ",
"resolution spatial data\n", e$message,
call. = FALSE)
})
if (cMethod == 'kmeans') {
minResult <- .minCAGE(centroids = centroids,
yFinest = yFinest,
ySource = ySource,
finestOnSource = finestOnSource,
sourceAreas = sourceAreas,
finestAreas = finestAreas,
gL = gL,
gU = gU,
localCluster = localCluster)
} else {
minResult <- .minCAGESH(dB = dB,
centroids = centroids,
yFinest = yFinest,
ySource = ySource,
finestOnSource = finestOnSource,
sourceAreas = sourceAreas,
finestAreas = finestAreas,
gL = gL,
gU = gU,
alpha = alpha,
localCluster = localCluster)
}
YOpt <- .yOpt(idxit = minResult$cluster$cluster,
y = yFinest,
areas = finestAreas)
minResult[[ "yOpt" ]] <- YOpt
return( minResult )
}
|
merge_data_table_list <- function(
dt_list = NULL,
id = NULL,
silent = TRUE
) {
output <- Reduce(function(x, y) {
merge_data_tables(x, y, id = id, silent = silent)}, dt_list)
return(output)
}
|
source("ESEUR_config.r")
estimate=read.csv(paste0(ESEUR_dir, "developers/anchor-estimate.csv.xz"), as.is=TRUE)
brew_col=rainbow(3)
min_est=subset(estimate, QuestID == "MIN")
ml_est=subset(estimate, QuestID == "ML")
max_est=subset(estimate, QuestID == "MAX")
ml_ord=order(ml_est$Answer)
anc_mod=glm(Answer ~ Group, data=ml_est)
summary(anc_mod)
plot(max_est$Answer[ml_ord], col=brew_col[1])
points(min_est$Answer[ml_ord], col=brew_col[3])
points(ml_est$Answer[ml_ord], col=brew_col[2])
|
setMethod("initialize", "Zproject", function(.Object, root, debug=FALSE) {
if (!file.exists(root)) {
stop(paste0("Root folder ", root, " not found"))
} else {
.Object@root <- root
}
assign("debug", debug, envir = .options)
variants <- list()
bat.files <- list.files(root, ".bat$", full.names = TRUE, recursive = TRUE)
for (bat.file in bat.files) {
variant <- new("Zvariant", bat.file = bat.file)
suppressWarnings(variants[variant@name] <- variant)
}
.Object@variants <- variants
return(.Object)
})
setMethod("initialize", "Zresults", function(.Object, root) {
if (!file.exists(root)) {
warning("Results root path ", root, " does not exist")
return(.Object)
}
.Object@root = root
get_file <- function(output.folder, x) {
target <- list.files(output.folder, pattern = x, full.names = TRUE)
if (length(target) == 0) {
return(NA)
} else if (length(target) == 1) {
return(target)
} else {
warning(paste("More matches than 1 found for", x, "using only the first"))
return(target[1])
}
}
run.info.file <- get_file(root, "\\.run_info\\.txt")
if (!is.na(run.info.file)) {
[email protected] <- run.info.file
content <- readLines(run.info.file)
match_content <- content[grepl("^Finished at", content)]
if (length(match_content) == 1) {
date_string <- regmatches(match_content, gregexpr("(?<=\\().*?(?=\\))",
match_content,
perl = T))[[1]]
.Object@modified <- as.POSIXct(date_string)
if (.options$debug) {
message("Found run info file modified on: ", .Object@modified)
}
}
} else {
.Object@modified <- Sys.time()
if (.options$debug) {
message("Did not find run info file")
}
}
features.info.file <- get_file(root, "\\.features_info\\.txt$")
if (!is.na(features.info.file)) {
if (.options$debug) {
message("Reading in features info file ", features.info.file)
}
[email protected] <- read_features_info(features.info.file)
}
curve.file <- get_file(root, "\\.curves\\.txt")
if (!is.na(curve.file)) {
if (.options$debug) {
message("Reading in curve file ", curve.file)
}
.Object@curves <- read_curves(curve.file)
}
grp.curve.file <- get_file(root, "\\.grp_curves\\.txt")
if (!is.na(grp.curve.file)) {
if (.options$debug) {
message("Reading in groups curve file ", grp.curve.file)
}
[email protected] <- read_grp_curves(grp.curve.file)
}
rank.raster.file <- get_file(root, "\\.rank\\..+[(img)|(tif)|(asc)]$")
if (!is.na(rank.raster.file)) {
if (.options$debug) {
message("Reading in rank raster file ", rank.raster.file)
}
.Object@rank <- raster::raster(rank.raster.file)
}
wrscr.raster.file <- get_file(root, "\\.wrscr\\.")
if (!is.na(wrscr.raster.file)) {
if (.options$debug) {
message("Reading in wrscr raster file ", wrscr.raster.file)
}
.Object@wrscr <- raster(wrscr.raster.file)
}
prop.raster.file <- get_file(root, "\\.prop\\.")
if (!is.na(prop.raster.file)) {
if (.options$debug) {
message("Reading in prop raster file ", prop.raster.file)
}
.Object@prop <- raster(prop.raster.file)
}
ppa.lsm.file <- get_file(root, ".*nwout\\.1.*")
if (!is.na(ppa.lsm.file)) {
if (.options$debug) {
message("Reading in ppa lsm file ", ppa.lsm.file)
}
ppa.lsm.data <- read_ppa_lsm(ppa.lsm.file)
ppa.lsm.data <- merge(ppa.lsm.data[[1]], ppa.lsm.data[[3]], by.x = "Unit",
by.y = "Unit_number")
[email protected] <- ppa.lsm.data[, !(names(ppa.lsm.data) %in% c("Area_cells"))]
}
invisible(validObject(.Object))
return(.Object)
})
setMethod("initialize", "Zvariant", function(.Object, name=NULL, bat.file) {
if (!file.exists(bat.file)) {
stop(paste0("Variant .bat-file does not exist: ", bat.file))
}
if (is.null(name)) {
.Object@name <- strsplit(basename(bat.file), "\\.")[[1]][1]
} else {
.Object@name <- name
}
[email protected] <- bat.file
if (.options$debug) {
message("Reading in bat file ", bat.file)
}
call.params <- read_bat(bat.file)
[email protected] <- call.params
invisible(validObject(.Object))
if (.options$debug) {
message("Reading in dat file ", call.params$dat.file)
}
[email protected] <- read_dat(call.params$dat.file)
if (.options$debug) {
message("Reading in spp file ", call.params$spp.file)
}
spp.data <- read_spp(call.params$spp.file)
spp.data$name <- basename(zonator::file_path_sans_ext(spp.data$filepath))
[email protected] <- spp.data
[email protected] <- call.params$output.folder
use_groups <- get_dat_param(.Object, "use groups", warn_missing = FALSE)
if (!is.na(use_groups) & use_groups == 1) {
groups_file <- get_dat_param(.Object, "groups file", warn_missing = FALSE)
groups_file <- check_path(groups_file, dirname(bat.file),
require.file = TRUE)
if (.options$debug) {
message("Reading in groups file ", groups_file)
}
.Object@groups <- read_groups(groups_file)
group.ids <- unique(.Object@groups$output.group)
group.names <- paste0("group", group.ids)
names(group.names) <- group.ids
groupnames(.Object) <- group.names
}
use_condition_layer <- get_dat_param(.Object, "use condition layer",
warn_missing = FALSE)
if (!is.na(use_condition_layer) & use_condition_layer == 1) {
condition_file <- get_dat_param(.Object, "condition file",
warn_missing = FALSE)
condition_file <- check_path(condition_file, dirname(bat.file),
require.file = TRUE)
if (.options$debug) {
message("Reading in condition file ", condition_file)
}
[email protected] <- read.table(condition_file,
col.names = c("group", "raster"))
}
.Object@results <- new("Zresults", root = [email protected]$output.folder)
if (use_groups == "1" & has_results(.Object)$grp.curves) {
.Object@[email protected] <- regroup_curves(curves(.Object),
sppweights(.Object),
groups(.Object))
}
featurenames(.Object) <- spp.data$name
.Object@results_dirty <- FALSE
return(.Object)
})
|
createPlotAndBoundsSimonReview <- function(des, xmax=NULL, ymax=NULL){
m <- Sm <- decision <- analysis <- NULL
des <- as.data.frame(t(des))
coords <- expand.grid(0:des$n, 1:des$n)
diag.df <- data.frame(Sm=as.numeric(coords[,1]),
m=as.numeric(coords[,2]),
decision=rep("Continue", nrow(coords)))
diag.df$decision <- as.character(diag.df$decision)
diag.df$decision[coords[,1]>coords[,2]] <- NA
fails.sm <- c(0:des$r1, (des$r1+1):des$r)
fails.m <- c(rep(des$n1, length(0:des$r1)),
rep(des$n, length((des$r1+1):des$r))
)
tp.fail <- data.frame(Sm=fails.sm,
m=fails.m)
tp.success <- data.frame(Sm=(des$r+1):des$n,
m=rep(des$n, length((des$r+1):des$n))
)
if("e1" %in% names(des)){
tp.success.s1 <- data.frame(Sm=(des$e1+1):des$n1,
m=rep(des$n1, length((des$e1+1):des$n1))
)
tp.success <- rbind(tp.success, tp.success.s1)
}
success.index <- apply(diag.df, 1, function(y) any(as.numeric(y[1])==tp.success$Sm & as.numeric(y[2])==tp.success$m))
diag.df$decision[success.index] <- "Go decision"
fail.index <- apply(diag.df, 1, function(y) any(as.numeric(y[1])==tp.fail$Sm & as.numeric(y[2])==tp.fail$m))
diag.df$decision[fail.index] <- "No go decision"
for(i in 1:nrow(tp.fail)){
not.poss.fail.index <- diag.df$Sm==tp.fail$Sm[i] & diag.df$m>tp.fail$m[i]
diag.df$decision[not.poss.fail.index] <- NA
}
for(i in 1:nrow(tp.success)){
not.poss.pass.index <- diag.df$m-diag.df$Sm==tp.success$m[i]-tp.success$Sm[i] & diag.df$m>tp.success$m[i]
diag.df$decision[not.poss.pass.index] <- NA
}
diag.df.subset <- diag.df[!is.na(diag.df$decision),]
diag.df.subset$analysis <- "No"
diag.df.subset$analysis[diag.df.subset$m %in% tp.fail$m] <- "Yes"
diagram <- pkgcond::suppress_warnings(ggplot2::ggplot(data=diag.df.subset, mapping = aes(x=m, y=Sm, fill=decision, alpha=analysis))+
scale_alpha_discrete(range=c(0.5, 1)),
"Using alpha for a discrete variable is not advised")
diagram <- diagram +
geom_tile(color="white")+
labs(fill="Decision",
alpha="Analysis",
x="Number of participants",
y="Number of responses"
)+
coord_cartesian(expand = 0)+
theme_minimal()
xbreaks <- c(des$n1, des$n)
if(!is.null(xmax)){
diagram <- diagram +
expand_limits(x=xmax)
xbreaks <- c(xbreaks, xmax)
}
if(!is.null(ymax)){
diagram <- diagram +
expand_limits(y=ymax)
}
diagram <- diagram +
scale_x_continuous(breaks=xbreaks)+
scale_y_continuous(breaks = function(x) unique(floor(pretty(seq(0, (max(x) + 1) * 1.1)))))
print(diagram)
stop.bounds <- data.frame(m=c(des$n1, des$n),
success=c(Inf, des$r+1),
fail=c(des$r1, des$r))
return(list(diagram=diagram,
bounds.mat=stop.bounds))
}
|
library("withr")
remove_gitconfig <- function(suffix) {
user_home <- workflowr:::get_home()
config_original <- file.path(user_home, ".gitconfig")
config_tmp <- paste0(config_original, suffix)
if (fs::file_exists(config_original)) {
file.rename(from = config_original, to = config_tmp)
}
return(config_tmp)
}
restore_gitconfig <- function(config_tmp) {
user_home <- workflowr:::get_home()
config_original <- file.path(user_home, ".gitconfig")
if (fs::file_exists(config_tmp)) {
file.rename(from = config_tmp, to = config_original)
} else if (fs::file_exists(config_original)) {
fs::file_delete(config_original)
}
}
local_no_gitconfig <- withr::local_(set = remove_gitconfig,
reset = restore_gitconfig)
|
test_that("cleanDataset makes no noise for empties", {
expect_warning(cleanDataset(SummarizedExperiment()), NA)
expect_warning(cleanDataset(SingleCellExperiment()), NA)
})
test_that("cleanDataset works on row names", {
se <- SummarizedExperiment(list(counts=cbind(A=1:10, B=2:11)))
expect_warning(out <- cleanDataset(se), "rownames")
expect_identical(rownames(out), as.character(1:10))
rownames(se) <- rep("A", nrow(se))
expect_warning(out <- cleanDataset(se), "rownames")
expect_true(anyDuplicated(rownames(out))==0L)
rownames(se) <- LETTERS[1:10]
expect_warning(cleanDataset(se), NA)
out <- cleanDataset(SummarizedExperiment())
expect_identical(rownames(out), character(0))
})
test_that("cleanDataset works on column names", {
se <- SummarizedExperiment(list(counts=rbind(A=1:10, B=2:11)))
expect_warning(out <- cleanDataset(se), "colnames")
expect_identical(colnames(out), as.character(1:10))
colnames(se) <- rep("A", ncol(se))
expect_warning(out <- cleanDataset(se), "colnames")
expect_true(anyDuplicated(colnames(out))==0L)
colnames(se) <- LETTERS[1:10]
expect_warning(cleanDataset(se), NA)
out <- cleanDataset(SummarizedExperiment())
expect_identical(colnames(out), character(0))
})
test_that("cleanDataset works on colData names", {
se <- SummarizedExperiment(colData=DataFrame(A=1:2, A=3:4, row.names=LETTERS[1:2], check.names=FALSE))
expect_warning(out <- cleanDataset(se), "colnames(colData", fixed=TRUE)
expect_true(anyDuplicated(colnames(colData(out)))==0L)
colnames(colData(se)) <- letters[1:2]
expect_warning(cleanDataset(se), NA)
})
test_that("cleanDataset works on rowData names", {
se <- SummarizedExperiment(rowData=DataFrame(A=1:2, A=3:4, row.names=LETTERS[1:2], check.names=FALSE))
expect_warning(out <- cleanDataset(se), "colnames(rowData", fixed=TRUE)
expect_true(anyDuplicated(colnames(rowData(out)))==0L)
colnames(rowData(se)) <- letters[1:2]
expect_warning(cleanDataset(se), NA)
})
test_that("cleanDataset works on assay names", {
se <- SummarizedExperiment(list(cbind(A=1:10, B=2:11)))
rownames(se) <- letters[1:10]
expect_warning(out <- cleanDataset(se), "assayNames")
expect_identical(assayNames(out), "unnamed")
assays(se) <- list(assay(se), assay(se))
expect_warning(out <- cleanDataset(se), "assayNames")
expect_identical(assayNames(out), c("unnamed", "unnamed.1"))
assays(se) <- list(counts=assay(se), assay(se))
expect_warning(out <- cleanDataset(se), "assayNames")
expect_identical(assayNames(out), c("counts", "unnamed"))
assays(se) <- list(counts=assay(se), counts=assay(se))
expect_warning(out <- cleanDataset(se), "assayNames")
expect_identical(assayNames(out), c("counts", "counts.1"))
assays(se) <- list(counts=assay(se), logcounts=assay(se))
expect_warning(out <- cleanDataset(se), NA)
})
test_that("cleanDataset works on reducedDimNames", {
skip("waiting for a fix to the SCE itself")
reducedDims(sce) <- list(PCA=matrix(0,0,5), PCA=matrix(0,0,2))
expect_warning(out <- cleanDataset(sce), "reducedDimNames")
expect_identical(reducedDimNames(out), c("PCA", "PCA.1"))
reducedDims(sce) <- list(PCA=matrix(0,0,5), TSNE=matrix(0,0,2))
expect_warning(out <- cleanDataset(sce), NA)
})
|
noaa.datums <- function(station = 8467150) {
page <- XML::htmlParse(readLines(paste("https://tidesandcurrents.noaa.gov/datums.html?id=", station, sep = ""), warn=FALSE),
useInternalNodes = TRUE)
nodes <- XML::getNodeSet(page, "//td")
nodes.text <- XML::xmlSApply(nodes, XML::xmlValue)
datum <- as.factor(nodes.text[c(seq(from = 1, to = 46, by = 3), 52, 58, 64)])
description <- as.character(nodes.text[c(seq(from = 3, to = 48, by = 3), 54, 60, 66)])
m.STND <- as.numeric(nodes.text[c(2, 5, 8, 11, 14, 17, 20, 23, 26, 29, 32, 35, 38,
41, 44, 47, 53, 59, 65)])
datetime <- as.POSIXct(rep(NA, times= 19), format = "%m/%d/%Y %H:%M")
datetime[16:19] <- as.POSIXct(nodes.text[c(50, 56, 62, 68)], format = "%m/%d/%Y %H:%M")
output.tmp <- data.frame(datum, description, m.STND, datetime)
names(output.tmp)[1] <- "datum"
names(output.tmp)[3] <- paste("m.STND", station, sep = "")
names(output.tmp)[4] <- paste("time.", station, sep = "")
invisible(output.tmp)
}
|
concordia.intersection.ludwig <- function(x,wetherill=TRUE,exterr=FALSE,
alpha=0.05,model=1,anchor=0){
fit <- ludwig(x,exterr=exterr,model=model,anchor=anchor)
out <- fit
out$format <- x$format
if (wetherill & !measured.disequilibrium(x$d)){
wfit <- twfit2wfit(fit,x)
out$par <- wfit$par
out$cov <- wfit$cov
names(out$par) <- c('t[l]','t[u]')
} else {
out$par <- fit$par
out$cov <- fit$cov
}
np <- length(out$par)
out$alpha <- alpha
if (inflate(out)){
out$err <- matrix(NA,3,np)
rownames(out$err) <- c('s','ci','disp')
out$err['disp',] <- ntfact(alpha,fit)*sqrt(diag(out$cov))
} else {
out$err <- matrix(NA,2,np)
rownames(out$err) <- c('s','ci')
}
out$err['s',] <- sqrt(diag(out$cov))
out$err['ci',] <- ntfact(alpha)*out$err['s',]
colnames(out$err) <- names(out$par)
out
}
concordia.intersection.ab <- function(a,b,covmat=matrix(0,2,2),
exterr=FALSE,wetherill=FALSE,d=diseq()){
l8 <- lambda('U238')[1]
ta <- get.Pb207Pb206.age(a,d=d)[1]
out <- c(1,a)
if (wetherill) names(out) <- c('t[l]','t[u]')
else names(out) <- c('t[l]','a0')
if (b<0) {
tb <- get.Pb206U238.age(-b/a,d=d)[1]
tlu <- recursive.search(tm=tb,tM=ta,a=a,b=b,d=d)
out['t[l]'] <- tlu[1]
if (wetherill) out['t[u]'] <- tlu[2]
} else {
search.range <- c(ta,2/l8)
out['t[l]'] <- stats::uniroot(intersection.misfit.york,
interval=search.range,a=a,b=b,d=d)$root
}
out
}
recursive.search <- function(tm,tM,a,b,d=diseq(),depth=1){
out <- c(NA,NA)
if (depth<3){
mid <- (tm+tM)/2
mfmin <- intersection.misfit.york(tm,a=a,b=b,d=d)
mfmid <- intersection.misfit.york(mid,a=a,b=b,d=d)
mfmax <- intersection.misfit.york(tM,a=a,b=b,d=d)
if (mfmin*mfmid<0){
out[1] <- stats::uniroot(intersection.misfit.york,
interval=c(tm,mid),a=a,b=b,d=d)$root
} else {
out <- recursive.search(tm=tm,tM=mid,a=a,b=b,d=d,depth=depth+1)
}
if (mfmax*mfmid<0){
out[2] <- stats::uniroot(intersection.misfit.york,
interval=c(mid,tM),a=a,b=b,d=d)$root
} else {
tlu <- recursive.search(tm=mid,tM=tM,a=a,b=b,d=d,depth=depth+1)
if (is.na(out[1])) out[1] <- tlu[1]
if (is.na(out[2])) out[2] <- tlu[2]
}
if (all(is.na(out))){
tlu <- stats::optimise(intersection.misfit.york,
interval=c(tm,tM),a=a,b=b,d=d)$minimum
out <- rep(tlu,2)
}
}
out
}
twfit2wfit <- function(fit,x){
tt <- fit$par['t']
buffer <- 1
l5 <- lambda('U235')[1]
l8 <- lambda('U238')[1]
U <- iratio('U238U235')[1]
E <- matrix(0,3,3)
J <- matrix(0,2,3)
if (x$format %in% c(1,2,3)){
a0 <- 1
b0 <- fit$par['a0']
E[c(1,3),c(1,3)] <- fit$cov[1:2,1:2]
} else {
a0 <- fit$par['a0']
b0 <- fit$par['b0']
E <- fit$cov[1:3,1:3]
}
md <- mediand(x$d)
D <- mclean(tt,d=md)
disc.slope <- a0/(b0*U)
conc.slope <- D$dPb206U238dt/D$dPb207U235dt
if (disc.slope < conc.slope){
search.range <- c(tt,get.Pb207Pb206.age(b0/a0,d=md)[1])+buffer
tl <- tt
tu <- stats::uniroot(intersection.misfit.ludwig,interval=search.range,
t2=tt,a0=a0,b0=b0,d=md)$root
} else {
search.range <- c(0,tt-buffer)
if (check.equilibrium(d=md)) search.range[1] <- -1000
tl <- tryCatch(
stats::uniroot(intersection.misfit.ludwig,
interval=search.range,
t2=tt,a0=a0,b0=b0,d=md)$root
, error=function(e){
stop("Can't find the lower intercept.",
"Try fitting the data in Tera-Wasserburg space.")
})
tu <- tt
}
du <- mclean(tt=tu,d=md)
dl <- mclean(tt=tl,d=md)
XX <- du$Pb207U235 - dl$Pb207U235
YY <- du$Pb206U238 - dl$Pb206U238
BB <- a0/(b0*U)
D <- (YY-BB*XX)^2
dXX.dtu <- du$dPb207U235dt
dXX.dtl <- -dl$dPb207U235dt
dYY.dtu <- du$dPb206U238dt
dYY.dtl <- -dl$dPb206U238dt
dBB.da0 <- 1/(b0*U)
dBB.db0 <- -BB/b0
dD.dtl <- 2*(YY-BB*XX)*(dYY.dtl-BB*dXX.dtl)
dD.dtu <- 2*(YY-BB*XX)*(dYY.dtu-BB*dXX.dtu)
dD.da0 <- 2*(YY-BB*XX)*(-dBB.da0*XX)
dD.db0 <- 2*(YY-BB*XX)*(-dBB.db0*XX)
if (conc.slope > disc.slope){
J[1,1] <- 1
J[2,1] <- -dD.dtl/dD.dtu
J[2,2] <- -dD.da0/dD.dtu
J[2,3] <- -dD.db0/dD.dtu
} else {
J[1,1] <- -dD.dtu/dD.dtl
J[1,2] <- -dD.da0/dD.dtl
J[1,3] <- -dD.db0/dD.dtl
J[2,1] <- 1
}
out <- list()
out$par <- c(tl,tu)
out$cov <- J %*% E %*% t(J)
out
}
intersection.misfit.ludwig <- function(t1,t2,a0,b0,d=diseq()){
tl <- min(t1,t2)
tu <- max(t1,t2)
l5 <- lambda('U235')[1]
l8 <- lambda('U238')[1]
U <- iratio('U238U235')[1]
du <- mclean(tt=tu,d=d)
dl <- mclean(tt=tl,d=d)
XX <- du$Pb207U235 - dl$Pb207U235
YY <- du$Pb206U238 - dl$Pb206U238
BB <- a0/(b0*U)
YY - BB*XX
}
intersection.misfit.york <- function(tt,a,b,d=diseq()){
D <- mclean(tt=tt,d=d)
(D$Pb207Pb206-a)*D$Pb206U238 - b
}
discordia.line <- function(fit,wetherill,d=diseq()){
X <- c(0,0)
Y <- c(0,0)
l5 <- lambda('U235')[1]
l8 <- lambda('U238')[1]
J <- matrix(0,1,2)
usr <- graphics::par('usr')
if (wetherill){
if (measured.disequilibrium(d)){
U85 <- iratio('U238U235')[1]
fit2d <- tw3d2d(fit)
xy1 <- age_to_wetherill_ratios(fit$par[1],d=d)
x1 <- xy1$x[1]
x2 <- usr[2]
y1 <- xy1$x[2]
dydx <- 1/(U85*fit$par[2])
y2 <- y1 + (x2-x1)*dydx
X <- c(x1,x2)
Y <- c(y1,y2)
cix <- NA
ciy <- NA
} else {
tl <- fit$par[1]
tu <- fit$par[2]
X <- age_to_Pb207U235_ratio(fit$par,d=d)[,'75']
Y <- age_to_Pb206U238_ratio(fit$par,d=d)[,'68']
x <- seq(from=max(0,usr[1],X[1]),to=min(usr[2],X[2]),length.out=50)
du <- mclean(tt=tu,d=d)
dl <- mclean(tt=tl,d=d)
aa <- du$Pb206U238 - dl$Pb206U238
bb <- x - dl$Pb207U235
cc <- du$Pb207U235 - dl$Pb207U235
dd <- dl$Pb206U238
y <- aa*bb/cc + dd
dadtl <- -dl$dPb206U238dt
dbdtl <- -dl$dPb207U235dt
dcdtl <- -dl$dPb207U235dt
dddtl <- dl$dPb206U238dt
dadtu <- du$dPb206U238dt
dbdtu <- 0
dcdtu <- du$dPb207U235dt
dddtu <- 0
J1 <- dadtl*bb/cc + dbdtl*aa/cc - dcdtl*aa*bb/cc^2 + dddtl
J2 <- dadtu*bb/cc + dbdtu*aa/cc - dcdtu*aa*bb/cc^2 + dddtu
E11 <- fit$cov[1,1]
E12 <- fit$cov[1,2]
E22 <- fit$cov[2,2]
sy <- errorprop1x2(J1,J2,fit$cov[1,1],fit$cov[2,2],fit$cov[1,2])
ul <- y + ntfact(fit$alpha)*sy
ll <- y - ntfact(fit$alpha)*sy
t75 <- get.Pb207U235.age(x,d=d)[,'t75']
yconc <- age_to_Pb206U238_ratio(t75,d=d)[,'68']
overshot <- ul>yconc
ul[overshot] <- yconc[overshot]
cix <- c(x,rev(x))
ciy <- c(ll,rev(ul))
}
} else {
fit2d <- tw3d2d(fit)
X[1] <- age_to_U238Pb206_ratio(fit2d$par['t'],d=d)[,'86']
Y[1] <- age_to_Pb207Pb206_ratio(fit2d$par['t'],d=d)[,'76']
r75 <- age_to_Pb207U235_ratio(fit2d$par['t'],d=d)[,'75']
r68 <- 1/X[1]
Y[2] <- fit2d$par['a0']
xl <- X[1]
yl <- Y[1]
y0 <- Y[2]
tl <- check.zero.UPb(fit2d$par['t'])
U <- settings('iratio','U238U235')[1]
nsteps <- 100
x <- seq(from=max(.Machine$double.xmin,usr[1]),to=usr[2],length.out=nsteps)
y <- yl + (y0-yl)*(1-x*r68)
D <- mclean(tt=tl,d=d)
d75dtl <- D$dPb207U235dt
d68dtl <- D$dPb206U238dt
dyldtl <- (d75dtl*r68 - r75*d68dtl)/(U*r68^2)
J1 <- dyldtl*x*r68 + yl*x*d68dtl - y0*x*d68dtl
J2 <- 1 - x*r68
sy <- errorprop1x2(J1,J2,fit2d$cov[1,1],fit2d$cov[2,2],fit2d$cov[1,2])
ul <- y + ntfact(fit2d$alpha)*sy
ll <- y - ntfact(fit2d$alpha)*sy
yconc <- rep(0,nsteps)
t68 <- get.Pb206U238.age(1/x,d=d)[,'t68']
yconc <- age_to_Pb207Pb206_ratio(t68,d=d)[,'76']
if (y0>yl){
overshot <- (ll<yconc & ll<y0/2)
ll[overshot] <- yconc[overshot]
overshot <- (ul<yconc & ul<y0/2)
ul[overshot] <- yconc[overshot]
} else {
overshot <- ul>yconc
ul[overshot] <- yconc[overshot]
overshot <- ll>yconc
ll[overshot] <- yconc[overshot]
}
cix <- c(x,rev(x))
ciy <- c(ll,rev(ul))
}
graphics::polygon(cix,ciy,col='gray80',border=NA)
graphics::lines(X,Y)
}
tw3d2d <- function(fit){
out <- list(par=fit$par,cov=fit$cov,alpha=fit$alpha)
if (fit$format > 3){
labels <- c('t','a0')
out$par <- c(fit$par['t'],fit$par[3]/fit$par[2])
J <- matrix(0,2,3)
J[1,1] <- 1
J[2,2] <- -fit$par[3]/fit$par[2]^2
J[2,3] <- 1/fit$par[2]
out$cov <- J %*% fit$cov[1:3,1:3] %*% t(J)
names(out$par) <- labels
colnames(out$cov) <- labels
}
out
}
discordia.title <- function(fit,wetherill,sigdig=2,...){
lower.age <- roundit(fit$par[1],fit$err[,1],sigdig=sigdig,text=TRUE)
if (inflate(fit)){
args1 <- quote(a%+-%b~'|'~c~'|'~d~u~'(n='*n*')')
args2 <- quote(a%+-%b~'|'~c~'|'~d~u)
} else {
args1 <- quote(a%+-%b~'|'~c~u~'(n='*n*')')
args2 <- quote(a%+-%b~'|'~c~u)
}
list1 <- list(a=lower.age[1],b=lower.age[2],
c=lower.age[3],u='Ma',n=fit$n)
if (wetherill){
upper.age <- roundit(fit$par[2],fit$err[,2],sigdig=sigdig,text=TRUE)
expr1 <- quote('lower intercept =')
expr2 <- quote('upper intercept =')
list2 <- list(a=upper.age[1],b=upper.age[2],c=upper.age[3],u='Ma')
if (inflate(fit)){
list1$d <- lower.age[4]
list2$d <- upper.age[4]
}
} else if (fit$format%in%c(1,2,3)){
i76 <- roundit(fit$par['a0'],fit$err[,'a0'],sigdig=sigdig,text=TRUE)
expr1 <- quote('age =')
expr2 <- quote('('^207*'Pb/'^206*'Pb)'[o]*'=')
list2 <- list(a=i76[1],b=i76[2],c=i76[3],u='')
if (inflate(fit)){
list1$d <- lower.age[4]
list2$d <- i76[4]
}
} else if (fit$format%in%c(4,5,6)){
i64 <- roundit(fit$par['a0'],fit$err[,'a0'],sigdig=sigdig,text=TRUE)
i74 <- roundit(fit$par['b0'],fit$err[,'b0'],sigdig=sigdig,text=TRUE)
expr1 <- quote('age =')
expr2 <- quote('('^206*'Pb/'^204*'Pb)'[o]*'=')
expr3 <- quote('('^207*'Pb/'^204*'Pb)'[o]*'=')
list2 <- list(a=i64[1],b=i64[2],c=i64[3],u='')
list3 <- list(a=i74[1],b=i74[2],c=i74[3],u='')
if (inflate(fit)){
list1$d <- lower.age[4]
list2$d <- i64[4]
list3$d <- i74[4]
}
call3 <- substitute(e~a,list(e=expr3,a=args2))
line3 <- do.call('substitute',list(call3,list3))
} else if (fit$format%in%c(7,8)){
i86 <- 1/fit$par['a0']
i87 <- 1/fit$par['b0']
i86err <- i86*fit$err[,'a0']/fit$par['a0']
i87err <- i87*fit$err[,'b0']/fit$par['b0']
ri86 <- roundit(i86,i86err,sigdig=sigdig,text=TRUE)
ri87 <- roundit(i87,i87err,sigdig=sigdig,text=TRUE)
expr1 <- quote('age =')
expr2 <- quote('('^208*'Pb/'^206*'Pb)'[o]*'=')
expr3 <- quote('('^208*'Pb/'^207*'Pb)'[o]*'=')
list2 <- list(a=ri86[1],b=ri86[2],c=ri86[3],u='')
list3 <- list(a=ri87[1],b=ri87[2],c=ri87[3],u='')
if (inflate(fit)){
list1$d <- lower.age[4]
list2$d <- ri86[4]
list3$d <- ri87[4]
}
call3 <- substitute(e~a,list(e=expr3,a=args2))
line3 <- do.call('substitute',list(call3,list3))
}
call1 <- substitute(e~a,list(e=expr1,a=args1))
call2 <- substitute(e~a,list(e=expr2,a=args2))
line1 <- do.call('substitute',list(call1,list1))
line2 <- do.call('substitute',list(call2,list2))
if (fit$model==1){
line4 <- substitute('MSWD ='~a*', p('*chi^2*') ='~b,
list(a=signif(fit$mswd,sigdig),
b=signif(fit$p.value,sigdig)))
} else if (fit$model==3){
ci <- ci_log2lin_lud(fit=fit)
rounded.disp <- roundit(ci[1],ci[2:3],sigdig=sigdig,text=TRUE)
line4 <- substitute('overdispersion ='~a+b/-c~'Ma',
list(a=rounded.disp[1],b=rounded.disp[3],
c=rounded.disp[2]))
}
extrarow <- fit$format>3 & !wetherill
if (fit$model==1 & extrarow){
mymtext(line1,line=3,...)
mymtext(line2,line=2,...)
mymtext(line3,line=1,...)
mymtext(line4,line=0,...)
} else if (fit$model==2 & extrarow){
mymtext(line1,line=2,...)
mymtext(line2,line=1,...)
mymtext(line3,line=0,...)
} else if (fit$model==3 & extrarow){
mymtext(line1,line=3,...)
mymtext(line2,line=2,...)
mymtext(line3,line=1,...)
mymtext(line4,line=0,...)
} else if (fit$model==1){
mymtext(line1,line=2,...)
mymtext(line2,line=1,...)
mymtext(line4,line=0,...)
} else if (fit$model==2){
mymtext(line1,line=1,...)
mymtext(line2,line=0,...)
} else if (fit$model==3){
mymtext(line1,line=2,...)
mymtext(line2,line=1,...)
mymtext(line4,line=0,...)
}
}
|
anova.eRm <- function(object, ...){
models <- c(list(object), list(...))
if(any(unlist(lapply(models, function(m){ "llra" %in% class(m) })))) stop("At least one model is an LLRA; comparison to other models not possible.")
for(i in seq_along(models)[-1L]){
if(!identical(unname(models[[1L]][["X"]]), unname(models[[i]][["X"]]))) stop("Models are not nested.")
}
models <- models[order(unlist(lapply(models, function(m){ m[["npar"]] })), decreasing = TRUE)]
calls <- unlist(lapply(models, function(m){ deparse(m[["call"]]) }))
LLs <- unlist(lapply(models, function(m){ m[["loglik"]] }))
npar <- unlist(lapply(models, function(m){ m[["npar"]] }))
dev <- -2*LLs
LR <- abs(c(NA, LLs[1L] - LLs[-1L]))
df <- c(NA, npar[1L] - npar[-1L])
p <- pchisq(LR, df, lower.tail = FALSE)
return(
structure(
list(
calls = calls,
statistics = data.frame(LLs=LLs, dev=dev, npar=npar, LR=LR, df=df, p=p)
),
class="eRm_anova")
)
}
print.eRm_anova <- function(x, ...){
if(interactive()) writeLines("")
writeLines("Analysis of Deviances Table\n")
for(i in seq_along(x[[1L]])){
writeLines(strwrap(paste0("Model ", i, ": ", x[[1L]][[i]]), width = getOption("width"), exdent = 4L))
}
writeLines("")
x_print <- as.matrix(x[[2L]])
rownames(x_print) <- paste0("Model ", seq_along(x[[1L]]))
colnames(x_print) <- c("cond. LL", "Deviance", "npar", "LR", "df", "p-value")
printCoefmat(as.matrix(x_print), cs.ind=c(1,2), tst.ind=4, has.Pvalue=TRUE, na.print = "")
writeLines("")
message("Note: The models appear to be nested, please check this assumption.")
if(interactive()) writeLines("")
invisible(x)
}
|
rowdiff<-function (df, direction = "forward",exclude=NULL,na.rm=FALSE,na_action=NULL,...){
UseMethod("rowdiff")
}
rowdiff.data.frame<-function(df, direction = "forward", exclude=NULL, na.rm=FALSE,na_action=NULL,...){
if(!is.null(exclude)){
df<-Filter(function(x) ! class(x) %in% exclude, df)
}
stopifnot("Only forward and reverse are supported"= direction %in% c("forward","reverse"))
if(direction=="forward"){
res<-as.data.frame(sapply(df,function(x) x-dplyr::lead(x,1)))
}
if(direction=="reverse"){
res<-as.data.frame(sapply(df, function(x) x-dplyr::lag(x,1)))
}
if(na.rm){
res<-na_replace(res,how=na_action,...)
}
res
}
|
convert_bin <- function(m1, n1, m2, n2, type = c("logOR", "logRR", "RD")) {
lstt <- c("logOR", "logRR", "RD")
type <- match.arg(type)
util_check_nonneg(m1)
util_check_nonneg(n1)
util_check_nonneg(m2)
util_check_nonneg(n2)
if (length(m1) != length(n1) || length(n1) != length(m2) || length(m2) != length(n2)) {
stop("'m1', 'n1', 'm2', and 'n2' should have the same length.")
} else if (!is.element(type, lstt)) {
stop("Unknown 'type' specified.")
}
res <- NULL
if (type == "logOR") {
res$y <- log((m1 + 0.5)*(n2 - m2 + 0.5)/(n1 - m1 + 0.5)/(m2 + 0.5))
res$v <- 1.0/(m1 + 0.5) + 1.0/(n1 - m1 + 0.5) + 1.0/(m2 + 0.5) + 1.0/(n2 - m2 + 0.5)
} else if (type == "logRR") {
res$y <- log((m1 + 0.5)*(n2 + 0.5)/(n1 + 0.5)/(m2 + 0.5))
res$v <- 1.0/(m1 + 0.5) - 1.0/(n1 + 0.5) + 1.0/(m2 + 0.5) - 1.0/(n2 + 0.5)
} else if (type == "RD") {
res$y <- m1/n1 - m2/n2
res$v <- ((m1 + 0.0625)/(n1 + 0.125))*((n1 - m1 + 0.0625)/(n1 + 0.125))/n1 +
((m2 + 0.0625)/(n2 + 0.125))*((n2 - m2 + 0.0625)/(n2 + 0.125))/n2
}
res <- data.frame(res)
return(res)
}
|
ncdc_units_normal_mly <- list(
"MLY-CLDD-BASE45" = list(name = "MLY-CLDD-BASE45", units = "", description = "Long-term averages of monthly cooling degree days with base 45F"),
"MLY-CLDD-BASE50" = list(name = "MLY-CLDD-BASE50", units = "", description = "Long-term averages of monthly cooling degree days with base 50F"),
"MLY-CLDD-BASE55" = list(name = "MLY-CLDD-BASE55", units = "", description = "Long-term averages of monthly cooling degree days with base 55F"),
"MLY-CLDD-BASE57" = list(name = "MLY-CLDD-BASE57", units = "", description = "Long-term averages of monthly cooling degree days with base 57F"),
"MLY-CLDD-BASE60" = list(name = "MLY-CLDD-BASE60", units = "", description = "Long-term averages of monthly cooling degree days with base 60F"),
"MLY-CLDD-BASE70" = list(name = "MLY-CLDD-BASE70", units = "", description = "Long-term averages of monthly cooling degree days with base 70F"),
"MLY-CLDD-BASE72" = list(name = "MLY-CLDD-BASE72", units = "", description = "Long-term averages of monthly cooling degree days with base 72F"),
"MLY-CLDD-NORMAL" = list(name = "MLY-CLDD-NORMAL", units = "", description = "Long-term averages of monthly cooling degree days with base 65F"),
"MLY-DUTR-NORMAL" = list(name = "MLY-DUTR-NORMAL", units = "", description = "Long-term averages of monthly diurnal temperature range"),
"MLY-DUTR-STDDEV" = list(name = "MLY-DUTR-STDDEV", units = "", description = "Long-term standard deviations of monthly diurnal temperature range"),
"MLY-GRDD-BASE40" = list(name = "MLY-GRDD-BASE40", units = "", description = "Long-term averages of monthly growing degree days with base 40F"),
"MLY-GRDD-BASE45" = list(name = "MLY-GRDD-BASE45", units = "", description = "Long-term averages of monthly growing degree days with base 45F"),
"MLY-GRDD-BASE50" = list(name = "MLY-GRDD-BASE50", units = "", description = "Long-term averages of monthly growing degree days with base 50F"),
"MLY-GRDD-BASE55" = list(name = "MLY-GRDD-BASE55", units = "", description = "Long-term averages of monthly growing degree days with base 55F"),
"MLY-GRDD-BASE57" = list(name = "MLY-GRDD-BASE57", units = "", description = "Long-term averages of monthly growing degree days with base 57F"),
"MLY-GRDD-BASE60" = list(name = "MLY-GRDD-BASE60", units = "", description = "Long-term averages of monthly growing degree days with base 60F"),
"MLY-GRDD-BASE65" = list(name = "MLY-GRDD-BASE65", units = "", description = "Long-term averages of monthly growing degree days with base 65F"),
"MLY-GRDD-BASE70" = list(name = "MLY-GRDD-BASE70", units = "", description = "Long-term averages of monthly growing degree days with base 70F"),
"MLY-GRDD-BASE72" = list(name = "MLY-GRDD-BASE72", units = "", description = "Long-term averages of monthly growing degree days with base 72F"),
"MLY-GRDD-TB4886" = list(name = "MLY-GRDD-TB4886", units = "", description = "Long-term averages of monthly growing degree days with truncated bases 48F and 86F"),
"MLY-GRDD-TB5086" = list(name = "MLY-GRDD-TB5086", units = "", description = "Long-term averages of monthly growing degree days with truncated bases 50F and 86F"),
"MLY-HTDD-BASE40" = list(name = "MLY-HTDD-BASE40", units = "", description = "Long-term averages of monthly heating degree days with base 40F"),
"MLY-HTDD-BASE45" = list(name = "MLY-HTDD-BASE45", units = "", description = "Long-term averages of monthly heating degree days with base 45F"),
"MLY-HTDD-BASE50" = list(name = "MLY-HTDD-BASE50", units = "", description = "Long-term averages of monthly heating degree days with base 50F"),
"MLY-HTDD-BASE55" = list(name = "MLY-HTDD-BASE55", units = "", description = "Long-term averages of monthly heating degree days with base 55F"),
"MLY-HTDD-BASE57" = list(name = "MLY-HTDD-BASE57", units = "", description = "Long-term averages of monthly heating degree days with base 57F"),
"MLY-HTDD-BASE60" = list(name = "MLY-HTDD-BASE60", units = "", description = "Long-term averages of monthly heating degree days with base 60F"),
"MLY-HTDD-NORMAL" = list(name = "MLY-HTDD-NORMAL", units = "", description = "Long-term averages of monthly heating degree days with base 65F"),
"MLY-PRCP-25PCTL" = list(name = "MLY-PRCP-25PCTL", units = "", description = "25th percentiles of monthly precipitation totals"),
"MLY-PRCP-50PCTL" = list(name = "MLY-PRCP-50PCTL", units = "", description = "50th percentiles of monthly precipitation totals"),
"MLY-PRCP-75PCTL" = list(name = "MLY-PRCP-75PCTL", units = "", description = "75th percentiles of monthly precipitation totals"),
"MLY-PRCP-AVGNDS-GE001HI" = list(name = "MLY-PRCP-AVGNDS-GE001HI", units = "", description = "Long-term averages of number of days per month with precipitation >= 0.01 inches"),
"MLY-PRCP-AVGNDS-GE010HI" = list(name = "MLY-PRCP-AVGNDS-GE010HI", units = "", description = "Long-term averages of number of days per month with precipitation >= 0.10 inches"),
"MLY-PRCP-AVGNDS-GE050HI" = list(name = "MLY-PRCP-AVGNDS-GE050HI", units = "", description = "Long-term averages of number of days per month with precipitation >= 0.50 inches"),
"MLY-PRCP-AVGNDS-GE100HI" = list(name = "MLY-PRCP-AVGNDS-GE100HI", units = "", description = "Long-term averages of number of days per month with precipitation >= 1.00 inches"),
"MLY-PRCP-NORMAL" = list(name = "MLY-PRCP-NORMAL", units = "", description = "Long-term averages of monthly precipitation totals"),
"MLY-SNOW-25PCTL" = list(name = "MLY-SNOW-25PCTL", units = "", description = "25th percentiles of monthly snowfall totals"),
"MLY-SNOW-50PCTL" = list(name = "MLY-SNOW-50PCTL", units = "", description = "50th percentiles of monthly snowfall totals"),
"MLY-SNOW-75PCTL" = list(name = "MLY-SNOW-75PCTL", units = "", description = "75th percentiles of monthly snowfall totals"),
"MLY-SNOW-AVGNDS-GE001TI" = list(name = "MLY-SNOW-AVGNDS-GE001TI", units = "", description = "Long-term averages of number of days per month with Snowfall >= 0.1 inches"),
"MLY-SNOW-AVGNDS-GE010TI" = list(name = "MLY-SNOW-AVGNDS-GE010TI", units = "", description = "Long-term averages of number of days per month with Snowfall >= 1.0 inches"),
"MLY-SNOW-AVGNDS-GE030TI" = list(name = "MLY-SNOW-AVGNDS-GE030TI", units = "", description = "Long-term averages of number of days per month with Snowfall >= 3.0 inches"),
"MLY-SNOW-AVGNDS-GE050TI" = list(name = "MLY-SNOW-AVGNDS-GE050TI", units = "", description = "Long-term averages of number of days per month with Snowfall >= 5.0 inches"),
"MLY-SNOW-AVGNDS-GE100TI" = list(name = "MLY-SNOW-AVGNDS-GE100TI", units = "", description = "Long-term averages of number of days per month with Snowfall >= 10.0 inches"),
"MLY-SNOW-NORMAL" = list(name = "MLY-SNOW-NORMAL", units = "", description = "Long-term averages of monthly snowfall totals"),
"MLY-SNWD-AVGNDS-GE001WI" = list(name = "MLY-SNWD-AVGNDS-GE001WI", units = "", description = "Long-term averages of number of days per month with snow depth >= 1 inch"),
"MLY-SNWD-AVGNDS-GE003WI" = list(name = "MLY-SNWD-AVGNDS-GE003WI", units = "", description = "Long-term averages of number of days per month with snow depth >= 3 inches"),
"MLY-SNWD-AVGNDS-GE005WI" = list(name = "MLY-SNWD-AVGNDS-GE005WI", units = "", description = "Long-term averages of number of days per month with snow depth >= 5 inches"),
"MLY-SNWD-AVGNDS-GE010WI" = list(name = "MLY-SNWD-AVGNDS-GE010WI", units = "", description = "Long-term averages of number of days per month with snow depth >= 10 inches"),
"MLY-TAVG-NORMAL" = list(name = "MLY-TAVG-NORMAL", units = "", description = "Long-term averages of monthly average temperature"),
"MLY-TAVG-STDDEV" = list(name = "MLY-TAVG-STDDEV", units = "", description = "Long-term standard deviations of monthly average temperature"),
"MLY-TMAX-AVGNDS-GRTH040" = list(name = "MLY-TMAX-AVGNDS-GRTH040", units = "", description = "Long-term average number of days per month where tmax is greater than or equal to 40F"),
"MLY-TMAX-AVGNDS-GRTH050" = list(name = "MLY-TMAX-AVGNDS-GRTH050", units = "", description = "Long-term average number of days per month where tmax is greater than or equal to 50F"),
"MLY-TMAX-AVGNDS-GRTH060" = list(name = "MLY-TMAX-AVGNDS-GRTH060", units = "", description = "Long-term average number of days per month where tmax is greater than or equal to 60F"),
"MLY-TMAX-AVGNDS-GRTH070" = list(name = "MLY-TMAX-AVGNDS-GRTH070", units = "", description = "Long-term average number of days per month where tmax is greater than or equal to 70F"),
"MLY-TMAX-AVGNDS-GRTH080" = list(name = "MLY-TMAX-AVGNDS-GRTH080", units = "", description = "Long-term average number of days per month where tmax is greater than or equal to 80F"),
"MLY-TMAX-AVGNDS-GRTH090" = list(name = "MLY-TMAX-AVGNDS-GRTH090", units = "", description = "Long-term average number of days per month where tmax is greater than or equal to 90F"),
"MLY-TMAX-AVGNDS-GRTH100" = list(name = "MLY-TMAX-AVGNDS-GRTH100", units = "", description = "Long-term average number of days per month where tmax is greater than or equal to 100F"),
"MLY-TMAX-AVGNDS-LSTH032" = list(name = "MLY-TMAX-AVGNDS-LSTH032", units = "", description = "Long-term average number of days per month where tmax is less than or equal to 32F"),
"MLY-TMAX-NORMAL" = list(name = "MLY-TMAX-NORMAL", units = "", description = "Long-term averages of monthly maximum temperature"),
"MLY-TMAX-STDDEV" = list(name = "MLY-TMAX-STDDEV", units = "", description = "Long-term standard deviations of monthly maximum temperature"),
"MLY-TMIN-AVGNDS-LSTH000" = list(name = "MLY-TMIN-AVGNDS-LSTH000", units = "", description = "Long-term average number of days per month where tmin is less than or equal to 0F"),
"MLY-TMIN-AVGNDS-LSTH010" = list(name = "MLY-TMIN-AVGNDS-LSTH010", units = "", description = "Long-term average number of days per month where tmin is less than or equal to 10F"),
"MLY-TMIN-AVGNDS-LSTH020" = list(name = "MLY-TMIN-AVGNDS-LSTH020", units = "", description = "Long-term average number of days per month where tmin is less than or equal to 20F"),
"MLY-TMIN-AVGNDS-LSTH032" = list(name = "MLY-TMIN-AVGNDS-LSTH032", units = "", description = "Long-term average number of days per month where tmin is less than or equal to 32F"),
"MLY-TMIN-AVGNDS-LSTH040" = list(name = "MLY-TMIN-AVGNDS-LSTH040", units = "", description = "Long-term average number of days per month where tmin is less than or equal to 40F"),
"MLY-TMIN-AVGNDS-LSTH050" = list(name = "MLY-TMIN-AVGNDS-LSTH050", units = "", description = "Long-term average number of days per month where tmin is less than or equal to 50F"),
"MLY-TMIN-AVGNDS-LSTH060" = list(name = "MLY-TMIN-AVGNDS-LSTH060", units = "", description = "Long-term average number of days per month where tmin is less than or equal to 60F"),
"MLY-TMIN-AVGNDS-LSTH070" = list(name = "MLY-TMIN-AVGNDS-LSTH070", units = "", description = "Long-term average number of days per month where tmin is less than or equal to 70F"),
"MLY-TMIN-NORMAL" = list(name = "MLY-TMIN-NORMAL", units = "", description = "Long-term averages of monthly minimum temperature"),
"MLY-TMIN-PRBOCC-LSTH016" = list(name = "MLY-TMIN-PRBOCC-LSTH016", units = "", description = "probability of 16F or below at least once in the month"),
"MLY-TMIN-PRBOCC-LSTH020" = list(name = "MLY-TMIN-PRBOCC-LSTH020", units = "", description = "probability of 20F or below at least once in the month"),
"MLY-TMIN-PRBOCC-LSTH024" = list(name = "MLY-TMIN-PRBOCC-LSTH024", units = "", description = "probability of 24F or below at least once in the month"),
"MLY-TMIN-PRBOCC-LSTH028" = list(name = "MLY-TMIN-PRBOCC-LSTH028", units = "", description = "probability of 28F or below at least once in the month"),
"MLY-TMIN-PRBOCC-LSTH032" = list(name = "MLY-TMIN-PRBOCC-LSTH032", units = "", description = "probability of 32F or below at least once in the month"),
"MLY-TMIN-PRBOCC-LSTH036" = list(name = "MLY-TMIN-PRBOCC-LSTH036", units = "", description = "probability of 36F or below at least once in the month"),
"MLY-TMIN-STDDEV" = list(name = "MLY-TMIN-STDDEV", units = "", description = "Long-term standard deviations of monthly minimum temperature")
)
|
library(LakeMetabolizer)
data.path = system.file('extdata', package="LakeMetabolizer")
sp.data = load.all.data('sparkling', data.path)
ts.data = sp.data$data
u10 = wind.scale(ts.data)
ts.data = rmv.vars(ts.data, 'wnd', ignore.offset=TRUE)
ts.data = merge(ts.data, u10)
k600_cole = k.cole(ts.data)
k600_crusius = k.crusius(ts.data)
ha2m2 <- 10000
kd = sp.data$metadata$averagekd
wnd.z = 10
atm.press = 1018
lat = sp.data$metadata$latitude
lake.area = sp.data$metadata$lakearea*ha2m2
lwnet = calc.lw.net(ts.data, lat, atm.press)
ts.data = merge(ts.data, lwnet)
k600_read = k.read(ts.data, wnd.z=wnd.z, Kd=kd, atm.press=atm.press,
lat=lat, lake.area=lake.area)
k600_soloviev = k.read.soloviev(ts.data, wnd.z=wnd.z, Kd=kd,
atm.press=atm.press, lat=lat, lake.area=lake.area)
k600_macIntyre = k.macIntyre(ts.data, wnd.z=wnd.z, Kd=kd, atm.press=atm.press)
k600_heiskanen = k.heiskanen(ts.data, wnd.z, kd, atm.press)
k600_vachon = k.vachon(ts.data, lake.area)
cols <- c("
models <- list(
list('name'="MacIntyre", data = k600_macIntyre, col = cols[1], lty = 6, lwd = 1.7),
list('name'="Cole", data = k600_cole, col = cols[2], lty = 1, lwd = 1.2),
list('name'="Vachon", data = k600_vachon, col = cols[3], lty = 1, lwd = 1.1),
list('name'="Read", data = k600_read, col = cols[4], lty = 1, lwd = 1.2),
list('name'="Soloviev", data = k600_soloviev, col = cols[5], lty = 6, lwd = 1.7),
list('name'="Heiskanen", data = k600_heiskanen, col = cols[6], lty = 1, lwd = 1.2),
list('name'="Crusius", data = k600_crusius, col = cols[7], lty = 1, lwd = 1.2))
add_axes <- function(xlim, ylim, ylabel = pretty(ylim,10), panel.txt){
prc_x = 0.1
prc_y = 0.07
tick_len <- 0.15
ext_x <- c(xlim[1]-86400, pretty(xlim,3), xlim[2]+86400)
ext_y <- c(ylim[1]-10, pretty(ylim,10), ylim[2]+10)
ylab <- c("",ylabel,"")
if (is.na(ylabel[1])) ylab = NA
axis(side = 1, at = ext_x , labels = strftime(ext_x,'%H:%M'), tcl = tick_len)
axis(side = 2, at = ext_y, labels = ylab, tcl = tick_len)
axis(side = 3, at = ext_x, labels = NA, tcl = tick_len)
axis(side = 4, at = ext_y, labels = NA, tcl = tick_len)
x_txt <- (xlim[2] - xlim[1])*prc_x+xlim[1]
y_txt <- ylim[2]-(ylim[2] - ylim[1])*prc_y
text(x = x_txt, y_txt,labels = panel.txt)
}
add_night <- function(xlim, ylim){
rise.set = sun.rise.set(xlim[1]+43200, lat)
polygon(x = c(xlim[1], rise.set[1], rise.set[1], xlim[1]), y = c(ylim[1],ylim[1],ylim[2],ylim[2]), col = night_col,border = NA)
polygon(x = c(xlim[2], rise.set[2], rise.set[2], xlim[2]), y = c(ylim[1],ylim[1],ylim[2],ylim[2]), col = night_col,border = NA)
}
moving_ave <- function(df, window = 18){
out <- df[,2]*NA
l_win <- floor(window/2)
r_win <- window-l_win
end_win <- nrow(df)-r_win
for (i in l_win:end_win){
strt <- i-l_win+1
out[i] <- mean(df[strt:(strt+r_win), 2])
}
df[,2] <- out
return(df)
}
add_models <- function(models){
.empty = sapply(X = models, FUN = function(x) {
lines(moving_ave(x$data), col=x$col, lty = x$lty, lwd = x$lwd)
})
}
add_legend <- function(models, xlim, ylim, prc_x = 0.26, prc_y = 0.06){
y_strt <- ylim[2]-(ylim[2] - ylim[1])*prc_y
y_spc <- (ylim[2] - ylim[1])*0.05
x_len <- (xlim[2] - xlim[1])*0.16
x <- c((xlim[2] - xlim[1])*prc_x+xlim[1], (xlim[2] - xlim[1])*prc_x+xlim[1] + x_len)
for (i in 1:length(models)){
y = y_strt-(i-1)*y_spc
lines(x, c(y,y),
col =models[[i]]$col,
lty = models[[i]]$lty,
lwd = models[[i]]$lwd)
text(x[2],y, models[[i]]$name, pos = 4, cex = 0.65)
}
}
width = 3.37
night_col = 'grey90'
height = 1.9
l_mar = 0.35
b_mar = 0.3
t_mar = 0.05
r_mar= 0.05
gapper = 0.15
ylim = c(0,7.55)
xlim = as.POSIXct(c('2009-07-02 22:00', '2009-07-04 02:00', '2009-07-08 22:00', '2009-07-10 02:00'))
png('~/k600_figure.png', res=300, width=width, height=height, units = 'in')
layout(matrix(c(rep(1,10),rep(2,9)),nrow=1))
par(mai=c(b_mar,l_mar,t_mar,0), omi = c(0,0,0,r_mar),xpd=FALSE,
mgp = c(1.15,.05,0))
plot(c(0,NA),c(0,NA), type='l',
axes = FALSE,
xaxs = 'i', yaxs = 'i',
ylim=ylim,
ylab=expression(k[600]~(m~day^-1)),
xlab=strftime(mean(xlim[1:2]), '%Y-%m-%d'),
xlim=xlim[1:2])
add_night(xlim[1:2], ylim)
add_models(models)
add_legend(models, xlim, ylim)
add_axes(xlim[1:2], ylim, panel.txt = 'a)')
par(mai=c(b_mar,gapper,t_mar,0))
plot(c(0,NA),c(0,NA), type='l',
axes = FALSE,
xaxs = 'i', yaxs = 'i',
ylim=ylim,
ylab=NA,
xlab=strftime(mean(xlim[3:4]), '%Y-%m-%d'),
xlim=xlim[3:4])
add_night(xlim[3:4], ylim)
add_models(models)
add_axes(xlim[3:4], ylim, ylabel = NA, panel.txt = 'b)')
dev.off()
cat('Cole:', mean(k600_cole[,2]), '\n')
cat('Crusius:', mean(k600_crusius[,2]), '\n')
cat('MacIntyre:', mean(k600_macIntyre[,2]), '\n')
cat('Read:', mean(k600_read[,2]), '\n')
cat('Read_soloviev:', mean(k600_soloviev[,2]), '\n')
cat('heiskanen:', mean(k600_heiskanen[,2]), '\n')
cat('vachon:', mean(k600_vachon[,2]), '\n')
|
scale_ratio <- function(...) {
.Defunct(msg = "Abandoned since 1.5 to avoid possible confusion. Previous manifestoR versions contained ambiguous information on what it would actually calculate. Now split into `scale_ratio_1` (Kim/Fording, Laver/Garry) and `scale_ratio_2` implementing two different approaches that both could be somewhat considered 'ratio' scales")
}
|
split_multi_to_biallelic_snps <- function(mat,
ar_results,
o_ref,
o_alt,
snpeff){
check_is_this_class(mat, "matrix")
check_is_this_class(ar_results, "data.frame")
if (length(o_ref) != length(o_alt)) {
stop("o_ref and o_alt need to have same length")
}
if (length(o_ref) != nrow(mat)) {
stop("o_ref & o_alt need to have an entry for every mat row")
}
if (!is.null(snpeff)) {
check_is_this_class(snpeff, "list")
check_is_this_class(snpeff[[1]], "character")
if (length(snpeff) != length(o_ref)) {
stop("If not null snpeff needs to have an entry for every row in mat")
}
}
num_alleles <- apply(mat, 1, function(row) {
length(unique(row))
})
row_indices <- 1:nrow(mat)
split_rows_flag <- rep(row_indices, (num_alleles - 1))
mat_split <- mat[split_rows_flag, ]
ar_results_split <- ar_results[split_rows_flag, , drop = FALSE]
rownames(mat_split) <- rownames(ar_results_split)
o_ref_split <- o_ref[split_rows_flag]
o_alt_split <- unlist(sapply(unique(split_rows_flag), function(i) {
alleles = rep(o_alt[i], sum(split_rows_flag == i))
sapply(1:length(alleles), function(j) {
unlist(strsplit(alleles[j], ","))[j]
})
}))
snpeff_split <- snpeff[split_rows_flag]
return(list(mat_split = mat_split,
ar_results_split = ar_results_split,
o_ref_split = o_ref_split,
o_alt_split = o_alt_split,
snpeff_split = snpeff_split,
split_rows_flag = split_rows_flag))
}
|
fastcov2 <- function(x, y = NULL, col1, col2, df){
if(!is.matrix(x)){
x <- as.matrix(x)
}
if(is.null(y)){
y <- x
} else {
if(!is.matrix(x)){
y <- as.matrix(y)
}
}
stopifnot(nrow(x) == nrow(y))
if(missing(col1)){
col1 <- seq_len(ncol(x))
} else {
stopifnot(all(col1 >= 1 & col1 <= ncol(x)))
}
if(missing(col2)){
col2 <- seq_len(ncol(y))
} else {
stopifnot(all(col2 >= 1 & col2 <= ncol(y)))
}
if(!length(col1) || !length(col2)){
return(matrix(NA, nrow = length(col1), ncol = length(col2)))
}
cm1 <- colMeans(x[, col1, drop = FALSE])
cm2 <- colMeans(y[, col2, drop = FALSE])
nobs <- nrow(x)
if(missing(df)){
df <- nobs - 1
}
fastcov(x, y, nobs, col1, col2, cm1, cm2, df)
}
|
vec2symMat <- function (x, diag=TRUE, byrow=FALSE) {
m <- length(x)
d <- if (diag) 1 else -1
n <- floor((sqrt(1 + 8*m) - d)/2)
if (m != n*(n + d)/2)
stop("Cannot make a square matrix as the length of \"x\" is incorrect.")
mat <- Diag(n)
if (byrow) {
mat[upper.tri(mat, diag=diag)] <- x
index <- lower.tri(mat)
mat[index] <- t(mat)[index]
} else {
mat[lower.tri(mat, diag=diag)] <- x
index <- upper.tri(mat)
mat[index] <- t(mat)[index]
}
mat
}
|
Oyster_subincr_shell_height <-
function(subincr_matrix, IncG, Xstep=0.1){
print("Calculating subincremental shell height")
p1x<-subincr_matrix[,2]
p2x<-subincr_matrix[,3]
firstl<-round(p1x/Xstep+0.5,0)+1
lastl<-round(p2x/Xstep-0.5,0)+1
p1y<-0
p2y<-0
shell_height<-0
for(t in 1:(length(IncG[1,])-1)){
p1y<-append(p1y,IncG[firstl[t],t])
p2y<-append(p2y,IncG[lastl[t],t])
L<-sqrt((p2x[t]-p1x[t])^2+(p2y[t]-p1y[t])^2)
if(L<shell_height[length(shell_height)]){
L<-shell_height[length(shell_height)]
}
shell_height<-append(shell_height,L)
}
subincr_matrix<-cbind(subincr_matrix,p1y,p2y,shell_height,firstl,lastl)
dev.new(); plot(subincr_matrix[,c(1,8)], type = "l")
return(subincr_matrix)
}
|
pnw_palettes <- list(
Starfish = rbind(c('
Shuksan = rbind(c('
Bay = rbind(c('
Winter = rbind(c('
Lake = rbind(c('
Sunset = rbind(c('
Shuksan2 = rbind(c('
Cascades = rbind(c("
Sailboat = rbind(c('
Moth = rbind(c('
Spring = rbind(c('
Mushroom = rbind(c('
Sunset2 = rbind(c('
Anemone = rbind(c("
)
pnw_palette <- function(name, n, type = c("discrete", "continuous")) {
pal <- pnw_palettes[[name]]
if (is.null(pal)){
stop("Palette not found.")
}
if (missing(n)) {
n <- length(pal[1,])
}
if (missing(type)) {
if(n > length(pal[1,])){type <- "continuous"}
else{ type <- "discrete"}
}
type <- match.arg(type)
if (type == "discrete" && n > length(pal[1,])) {
stop("Number of requested colors greater than what discrete palette can offer, \n use as continuous instead.")
}
out <- switch(type,
continuous = grDevices::colorRampPalette(pal[1,])(n),
discrete = pal[1,][pal[2,] %in% c(1:n)],
)
structure(out, class = "PNWpalette", name = name)
}
print.PNWpalette <- function(x, ...) {
pallength <- length(x)
PNWpar <- par(mar=c(0.25,0.25,0.25,0.25))
on.exit(par(PNWpar))
image(1:pallength, 1,
as.matrix(1:pallength),
col = x,
axes=FALSE)
text(median(1:pallength), 1,
labels = paste0(attr(x,"name"),", n=",pallength),
cex = 3, family = "sans")
}
|
Oyster_subincr_av_thickness_X <-
function(IncG,subincr_matrix){
print("Calculating subincremental shell thickness")
IncT<-abs(IncG-IncG[,1])
IncT[IncT==0]<-NA
av_thickness<-colMeans(IncT,na.rm=TRUE)
av_thickness[is.na(av_thickness)]<-0
subincr_matrix<-cbind(subincr_matrix, av_thickness)
dev.new(); plot(subincr_matrix[,c(1,11)], type = "l")
return(subincr_matrix)
}
|
NULL
"conversion_table"
|
test.chngptm.logistic <- function() {
library("chngpt")
library("RUnit")
library("splines")
library("kyotil")
suppressWarnings(RNGversion("3.5.0"))
RNGkind("Mersenne-Twister", "Inversion")
tolerance=1e-1
if((file.exists("D:/gDrive/3software/_checkReproducibility") | file.exists("~/_checkReproducibility")) & R.Version()$system %in% c("x86_64, mingw32","x86_64, linux-gnu")) tolerance=1e-6
print(tolerance)
verbose = FALSE
fit=chngptm(formula.1=y~birth, formula.2= ~ NAb_SF162LS, dat.mtct, type="segmented", family="binomial", var.type="none")
checkEqualsNumeric(c(coef(fit),fit$chngpt), c(-3.1442857, 0.2040075, 0.5371311, -0.9231887, 6.0890449), tolerance=tolerance)
dat=sim.twophase.ran.inte(threshold.type="segmented", n=50, seed=1)
dat$y=ifelse(dat$y>10,1,0)
fit = chngptm (formula.1=y~z+(1|id), formula.2=~x, family="gaussian", dat, type="segmented", est.method="grid", var.type="bootstrap", ci.bootstrap.size=1)
fit <- chngptm(formula.1 = y ~ 1 + (1|id), formula.2 = ~ x, dat, type = "hinge", family = "binomial", var.type="none", est.method="grid")
n=1e3
set.seed(1)
X=data.frame(matrix(rnorm(3*n),ncol=3))
f=~I(X1>0)+X2*I(X1>0)+X3*I(X1>0)
lincomb=c(model.matrix(f, X) %*% c(0,1,0,0,1,1))
y=rbern(n,expit(lincomb))
dat=data.frame(y,X)
fit=chngptm(formula.1=y~X2+X3, formula.2=~X2*X1+X3*X1, dat, type="step", family="binomial", grid.search.max=5, var.type="none", verbose=2)
checkEqualsNumeric(c(coef(fit),fit$chngpt), c(0.18205886,-0.02300560,-0.11624135,0.64995016,0.95123073,1.22953518,-0.03592242), tolerance=tolerance)
data=sim.chngpt("thresholded", threshold.type="hinge", n=250, seed=1, beta=log(0.4), x.distr="norm", e.=4.1, b.transition=Inf, family="binomial")
data$xx=data$x; data$zz=data$z; data$yy=data$y
fit.0=glm(yy~zz+ns(xx,df=3), data, family="binomial")
fit = chngptm (formula.1=yy~zz, formula.2=~xx, family="binomial", data, type="hinge", est.method="smoothapprox", var.type="all", verbose=verbose, aux.fit=fit.0, lb.quantile=0.1, ub.quantile=0.9, tol=1e-4, maxit=1e3)
checkEqualsNumeric(sum(diag(fit$vcov[["model"]])), 0.614832, tolerance=tolerance)
checkEqualsNumeric(sum(diag(fit$vcov[["robust"]])), 2.311749, tolerance=tolerance)
fit = chngptm (formula.1=yy~zz, formula.2=~xx, family="binomial", data[1:120,], type="hinge", est.method="smoothapprox", var.type="bootstrap", verbose=verbose, lb.quantile=0.1, ub.quantile=0.9, tol=1e-4, maxit=1e3, ci.bootstrap.size=10, boot.test.inv.ci=TRUE)
checkEqualsNumeric(fit$vcov$symm[1,], c(-0.4888763,0.6802883,-6.0128577,4.4632592), tolerance=tolerance)
checkEqualsNumeric(fit$vcov$testinv[,4], c(1.043623,6.013954), tolerance=tolerance)
dat=sim.chngpt (mean.model="thresholded", threshold.type="upperhinge", family="binomial", sd=0.3, mu.z=0, alpha=0, coef.z=log(1.4), beta=-1, n=100, seed=1)
fit.0 = chngptm (formula.1=y~z, formula.2=~x, family="binomial", dat, type="upperhinge", est.method="smoothapprox", var.type="model", save.boot=T, ci.bootstrap.size=1, verbose=verbose)
fit.1 = chngptm (formula.1=y~z, formula.2=~x, family="binomial", dat, type="upperhinge", est.method="smoothapprox", var.type="robust", save.boot=T, ci.bootstrap.size=1, verbose=verbose, aux.fit=glm(y~z+ns(x,2), family="binomial", dat))
checkEqualsNumeric(fit.0$coef, c(0.2278046, 0.2595208, -0.8485878, 5.4202994), tolerance=tolerance)
checkEqualsNumeric(diag(fit.0$vcov), c(0.11440831,0.06818376,0.15998558,0.56502668), tolerance=tolerance)
checkEqualsNumeric(diag(fit.1$vcov), c(0.16060624,0.05749946,0.24441672,1.06928181), tolerance=tolerance)
data=sim.chngpt("quadratic", n=60, seed=1, beta=log(0.4), x.distr="norm", e.=4.1, b.transition=Inf, family="binomial")
fit.d = chngptm (formula.1=y~z, formula.2=~x, family="binomial", data, type="segmented", est.method="grid", var.type="none", weights=rep(1:2,each=30), verbose=verbose)
fit.e = chngptm (formula.1=y~z, formula.2=~x, family="binomial", data, type="segmented", est.method="smoothapprox", var.type="none", weights=rep(1:2,each=30), verbose=verbose)
checkEqualsNumeric(coef(fit.d), c(-8.4120932,0.8365985,1.9086715,135.5846880), tolerance=tolerance)
checkEqualsNumeric(coef(fit.e), c(-8.5621192,0.8431925,1.9465604,1713.3986973), tolerance=tolerance)
dat.2=sim.chngpt("thresholded", "step", n=200, seed=1, beta=1, alpha=-1, x.distr="norm", e.=4, family="binomial")
set.seed(1)
dat.2$success=rbinom(nrow(dat.2), 10, 1/(1 + exp(-dat.2$eta)))
dat.2$failure=10-dat.2$success
fit.2a=chngptm(formula.1=cbind(success,failure)~z, formula.2=~x, family="binomial", dat.2, type="step", est.method="grid", verbose=verbose, var.type="none")
fit.2b=chngptm(formula.1=cbind(success,failure)~z, formula.2=~x, family="binomial", dat.2, type="step", est.method="smoothapprox", var.type="none", verbose=verbose)
checkEqualsNumeric(fit.2a$coefficients, c(-0.8634819,0.3477191,0.9316376,3.9907330), tolerance=tolerance)
checkEqualsNumeric(fit.2b$coefficients, c(-0.8634819,0.3477191,0.9316376,3.9907330), tolerance=tolerance)
checkEqualsNumeric(diag(vcov(fit.2a$best.fit)), c(0.008068637,0.002473882,0.011023127), tolerance=tolerance)
checkEqualsNumeric(diag(vcov(fit.2b$best.fit)), c(0.008068637,0.002473882,0.011023127), tolerance=tolerance)
n <- dat.2$success + dat.2$failure
dat.2$y.2 <- ifelse(n == 0, 0, dat.2$success/n)
dat.2$weights <- n
fit.2a1=chngptm(formula.1=y.2~z, formula.2=~x, family="binomial", dat.2, type="step", est.method="grid", weights=dat.2$weights, var.type="none")
fit.2b1=chngptm(formula.1=y.2~z, formula.2=~x, family="binomial", dat.2, type="step", est.method="smoothapprox", weights=dat.2$weights, var.type="none")
checkEqualsNumeric(fit.2a1$coefficients, c(-0.8634819,0.3477191,0.9316376,3.9907330), tolerance=tolerance)
checkEqualsNumeric(fit.2b1$coefficients, c(-0.8634819,0.3477191,0.9316376,3.9907330), tolerance=tolerance)
checkEqualsNumeric(diag(vcov(fit.2a1$best.fit)), c(0.008068637,0.002473882,0.011023127), tolerance=tolerance)
checkEqualsNumeric(diag(vcov(fit.2b1$best.fit)), c(0.008068637,0.002473882,0.011023127), tolerance=tolerance)
data=sim.chngpt("thresholded", threshold.type="segmented", n=250, seed=1, beta=log(0.4), x.distr="norm", e.=4.1, b.transition=Inf, family="binomial")
data$xx=data$x
data$zz=data$z
data$yy=data$y
fit.0=glm(yy~zz+xx+I(xx^2), data, family="binomial")
fit.0$coefficients=c(alpha=-1, z=log(1.4), x=-1 , x.quad=0.3)
fit = chngptm (formula.1=yy~zz, formula.2=~xx, family="binomial", data, type="segmented", est.method="smoothapprox", var.type="all", verbose=verbose, aux.fit=fit.0, lb.quantile=0.1, ub.quantile=0.9, tol=1e-4, maxit=1e3)
checkEqualsNumeric(sum(diag(fit$vcov[["model"]])), 1.479249, tolerance=tolerance)
checkEqualsNumeric(sum(diag(fit$vcov[["robust"]])), 0.8593607, tolerance=tolerance)
data=sim.chngpt("thresholdedItxn", threshold.type="step", family="binomial", n=250, seed=1, beta=-log(.67), beta.itxn=0, x.distr="norm", e.=3.4, b.transition=Inf, verbose=verbose)
fit = chngptm (y~z, ~x*z, family="binomial", data, tol=1e-4, maxit=1e3, type="step", lb.quantile=0.1, ub.quantile=0.9, est.method="smoothapprox")
checkEqualsNumeric(fit$coefficients, c(-0.5252114,0.2145485,0.3734826,0.5539497,6.1272134), tolerance=tolerance)
fit = chngptm (y~z, ~x*z, family="binomial", data, tol=1e-4, maxit=1e3, type="step", lb.quantile=0.1, ub.quantile=0.9, est.method="grid", verbose=verbose)
checkEqualsNumeric(fit$coefficients, c(-0.5448914,0.1977217,0.4542935,0.6316745,5.9852531), tolerance=tolerance)
fit = chngptm (y~z, ~x*z, family="binomial", data, tol=1e-4, maxit=1e3, type="hinge", lb.quantile=0.1, ub.quantile=0.9, est.method="grid")
checkEqualsNumeric(fit$coefficients, c(-0.5368853,0.2618009,0.2526734,0.1231553,5.3939234), tolerance=tolerance)
fit = chngptm (y~z, ~x*z, family="binomial", data, tol=1e-4, maxit=1e3, type="segmented", lb.quantile=0.1, ub.quantile=0.9, est.method="grid", verbose=verbose)
checkEqualsNumeric(fit$coefficients, c(-1.27487926,2.11715992,0.20647617,-0.07942202,-0.74984522,0.81418233,2.65745247), tolerance=tolerance)
fit = chngptm (y~z, ~x*z, family="binomial", data, tol=1e-4, maxit=1e3, type="stegmented", lb.quantile=0.1, ub.quantile=0.9, est.method="grid", verbose=verbose)
checkEqualsNumeric(fit$coefficients, c(-0.77952531,0.89672361,0.05137674,0.41337727,-0.09905192,1.47335019,-0.30845593,-0.17022170,5.98525315), tolerance=tolerance)
}
|
seqdistmc <- function(channels, method=NULL, norm="none", indel="auto", sm=NULL,
with.missing=FALSE, full.matrix=TRUE, link="sum", cval=2, miss.cost=2, cweight=NULL,
what="diss", ch.sep = "@@@@TraMineRSep@@@@") {
whatlist <- c("diss","sm","seqmc")
if (!(what %in% whatlist)){
msg.stop("what should be one of ",paste0("'",whatlist,"'", collapse=","))
}
if (what=="diss" & is.null(method))
msg.stop("method cannot be NULL when what = 'diss'")
if (what=="sm" & is.null(sm))
msg.stop("sm cannot be NULL when what = 'sm'")
if (any(length(dim(sm[[1]]))==3) && !all(length(dim(sm[[1]]))==3))
msg.stop("One sm is 3-dimensional and some are not!")
timeVarying <- length(dim(sm[[1]])) == 3
if(length(indel) > 1 & any(indel=="auto"))
stop(" [!] 'auto' not allowed in vector or list indel")
if(is.list(indel) & length(indel)==1)
stop(" [!] When a list, indel must be of length equal to number of channels")
nchannels <- length(channels)
if (nchannels < 2) {
stop("[!] please specify at least two channels")
}
if (is.null(cweight)) {
cweight <- rep(1, nchannels)
}
for (i in 1:nchannels){
if (length(grep(ch.sep, alphabet(channels[[i]], with.missing=TRUE), fixed=TRUE))>0)
stop(" [!] ch.sep symbol (",ch.sep,") occurs in alphabet of at least one channel")
}
if (is.list(indel) & length(indel) != nchannels)
stop("[!] when a list, indel must be of length equal to number of channels")
if (length(with.missing) > 1 & length(with.missing) != nchannels )
stop("[!] when a vector, with.missing must be of length equal to number of channels")
numseq <- sapply(channels,nrow)
if(any(numseq!=numseq[1])) {
stop(" [!] sequence objects have different numbers of rows")
}
numseq <- numseq[1]
message(" [>] ", nchannels, " channels with ", numseq, " sequences")
if (what=="diss") {
metlist <- c("OM", "LCS", "DHD", "HAM")
if (!method %in% metlist) {
stop(" [!] method must be one of: ", paste(metlist, collapse=" "), call.=FALSE)
}
if (method=="LCS") {
method <- "OM"
sm <- "CONSTANT"
indel <- 1
cval <- 2
miss.cost <- 2
}
timeVarying <- method %in% c("DHD")
if (is.null(sm)) {
costmethod <- "CONSTANT"
if (method == "DHD") {
costmethod <- "TRATE"
}
sm <- rep(costmethod, nchannels)
}
}
if (length(sm)==1 && sm %in% c("CONSTANT", "TRATE", "INDELS", "INDELSLOG")){
sm <- rep(sm, nchannels)
}
if (length(indel)==1) {
indel <- rep(indel, nchannels)
}
if (length(with.missing)==1) {
with.missing <- rep(with.missing, nchannels)
}
if (what != "seqmc") {
if ((length(indel)!= nchannels) ||
(length(sm)!= nchannels) ||
(length(cweight)!= nchannels)) {
stop(" [!] you should supply one weight, one substitution matrix and one indel per channel")
}
}
if (is.list(indel))
indel_list <- list()
else
indel_list <- numeric(length=nchannels)
if (any(indel == "auto") & any(sm %in% c("INDELSLOG","INDELS")))
indel_list <- list()
substmat_list <- list()
alphabet_list <- list()
alphsize_list <-list()
maxlength_list <- numeric(length=nchannels)
for (i in 1:nchannels)
maxlength_list[i] <- ncol(channels[[i]])
md.cnames <- colnames(channels[[which.max(maxlength_list)]])
slength1 <- seqlength(channels[[1]])
for (i in 2:nchannels) {
if (sum(slength1 != seqlength(channels[[i]]))>0) {
warning(" [!] Some individuals have channels of different length. Shorter sequences will be filled with missing values and corresponding channel with.missing set as TRUE")
break
}
}
message(" [>] building combined sequences...", appendLF=F)
sep <- ch.sep
maxlength=max(maxlength_list)
newseqdata <- matrix("", nrow=numseq, ncol=maxlength)
rownames(newseqdata) <- rownames(channels[[1]])
newseqdataNA <- matrix(TRUE, nrow=numseq, ncol=maxlength)
for (i in 1:nchannels) {
seqchan <- channels[[i]]
void <- attr(seqchan, "void")
nr <- attr(seqchan, "nr")
for (j in 1:maxlength) {
if (j > maxlength_list[i]) {
newCol <- as.character(rep(void, numseq))
}
else {
newCol <- as.character(seqchan[,j])
}
newseqdataNA[,j] <- newseqdataNA[,j] & newCol == void
if (any(newCol==void)) with.missing[i] <- TRUE
newCol[newCol == void] <- nr
if (i > 1) {
newseqdata[,j] <- paste(newseqdata[,j], newCol, sep = sep)
}
else {
newseqdata[,j] <- newCol
}
}
}
newseqdata[newseqdataNA] <- NA
suppressMessages(newseqdata <- seqdef(newseqdata, cnames=md.cnames))
message(" OK")
if (what == "seqmc") {
return(newseqdata)
}
else {
for (i in 1:nchannels) {
if (!inherits(channels[[i]],"stslist")) {
stop(" [!] channel ", i, " is not a state sequence object, use 'seqdef' function to create one", call.=FALSE)
}
alphabet_list[[i]] <- attr(channels[[i]],"alphabet")
if (with.missing[i]) {
alphabet_list[[i]] <- c(alphabet_list[[i]],attr(channels[[i]],"nr"))
message(" [>] including missing value as an additional state" )
}
else {
if (any(channels[[i]]==attr(channels[[i]],"nr"))) {
stop(" [!] found missing values in channel ", i, ", set with.missing as TRUE for that channel")
}
}
alphsize_list[[i]] <- length(alphabet_list[[i]])
if(is.list(indel)){
if (length(indel[[i]])==1)
indel[[i]] <- rep(indel[[i]],alphsize_list[[i]])
if (length(indel[[i]]) != alphsize_list[[i]]){
cat("i = ",i,", indel length = ", length(indel[[i]]), ", alphabet size = ", alphsize_list[[i]], "\n alphabet = ", alphabet_list[[i]],"\n" )
stop(" [!] indel length does not much size of alphabet for at least one channel")
}
}
else if (!any(indel=="auto") & !is.list(indel_list)) {
indel_list[i] <- indel[i]
}
if (is.character(sm[[i]])) {
message(" [>] computing substitution cost matrix for channel ", i)
costs <- seqcost(channels[[i]], sm[[i]], with.missing=with.missing[i],
time.varying=timeVarying, cval=cval, miss.cost=miss.cost)
substmat_list[[i]] <- costs$sm
if (any(indel=="auto")) {
if (is.list(indel_list)){
if (length(costs$indel)==1) costs$indel <- rep(costs$indel,alphsize_list[[i]])
indel_list[[i]] <- costs$indel
}
else
indel_list[i] <- costs$indel
}
}
else {
if (any(indel[i] == "auto") & !is.list(indel_list))
indel_list[i] <- max(sm[[i]])/2
else
indel_list[i] <- indel[i]
checkcost(sm[[i]], channels[[i]], with.missing = with.missing[i], indel = indel_list[[i]])
substmat_list[[i]] <- sm[[i]]
}
substmat_list[[i]] <- cweight[i]* substmat_list[[i]]
}
if (any(indel=="auto")) indel <- indel_list
message(" [>] computing combined substitution and indel costs...", appendLF=FALSE)
alphabet <- attr(newseqdata,"alphabet")
alphabet_size <- length(alphabet)
newindel <- NULL
if (!timeVarying) {
newsm <- matrix(0, nrow=alphabet_size, ncol=alphabet_size)
if (is.list(indel)){
newindel <- rep(0,alphabet_size)
statelisti <- strsplit(alphabet[alphabet_size], sep, fixed=TRUE)[[1]]
for (chan in 1:nchannels){
ipos <- match(statelisti[chan], alphabet_list[[chan]])
newindel[alphabet_size] <- newindel[alphabet_size] + indel[[chan]][ipos]*cweight[chan]
}
}
for (i in 1:(alphabet_size-1)) {
statelisti <- strsplit(alphabet[i], sep, fixed=TRUE)[[1]]
if (is.list(indel)){
for (chan in 1:nchannels){
ipos <- match(statelisti[chan], alphabet_list[[chan]])
newindel[i] <- newindel[i] + indel[[chan]][ipos]*cweight[chan]
}
}
for (j in (i+1):alphabet_size) {
cost <- 0
statelistj <- strsplit(alphabet[j], sep, fixed=TRUE)[[1]]
for (chan in 1:nchannels) {
ipos <- match(statelisti[chan], alphabet_list[[chan]])
jpos <- match(statelistj[chan], alphabet_list[[chan]])
cost <- cost + substmat_list[[chan]][ipos, jpos]
}
newsm[i, j] <- cost
newsm[j, i] <- cost
}
}
} else {
newsm <- array(0, dim=c(alphabet_size, alphabet_size, maxlength))
for (t in 1:maxlength) {
for (i in 1:(alphabet_size-1)) {
statelisti <- strsplit(alphabet[i], sep, fixed=TRUE)[[1]]
for (j in (i+1):alphabet_size) {
cost <- 0
statelistj <- strsplit(alphabet[j], sep, fixed=TRUE)[[1]]
for (chan in 1:nchannels) {
ipos <- match(statelisti[chan], alphabet_list[[chan]])
jpos <- match(statelistj[chan], alphabet_list[[chan]])
cost <- cost + substmat_list[[chan]][ipos, jpos, t]
}
newsm[i, j, t] <- cost
newsm[j, i, t] <- cost
}
}
}
}
rownames(newsm) <- colnames(newsm) <- alphabet
message(" OK")
if (is.null(newindel) & !is.list(indel_list)) {
newindel <- sum(indel*cweight)
}
if (link=="mean") {
newindel <- newindel / sum(cweight)
newsm <- newsm / sum(cweight)
}
if (what == "sm") {
attr(newsm,"indel") <- newindel
attr(newsm,"alphabet") <- alphabet
attr(newsm,"cweight") <- cweight
return(newsm)
}
}
if (what == "diss") {
message(" [>] computing distances ...")
return(seqdist(newseqdata, method=method, norm=norm, indel=newindel,
sm=newsm, with.missing=FALSE, full.matrix=full.matrix))
}
}
|
test_that("nuclearPed() direct labelling works", {
expect_identical(nuclearPed(1), nuclearPed(children='3'))
expect_identical(nuclearPed(1, sex=2), nuclearPed(children='3', sex=2))
expect_identical(relabel(nuclearPed(1), letters[3:1]),
nuclearPed(fa='c', mo='b', children='a'))
expect_identical(relabel(nuclearPed(1), old=3, new="foo"),
nuclearPed(children="foo"))
})
test_that("nuclearPed() catches errors", {
expect_error(nuclearPed(0), '`nch` must be a positive integer: 0')
expect_error(nuclearPed(nch = 1:2), "`nch` must be a positive integer")
expect_error(nuclearPed(fa = 1:2), "`father` must have length 1")
expect_error(nuclearPed(mo = 1:2), "`mother` must have length 1")
expect_error(nuclearPed(sex='a'), "Illegal sex: a")
expect_error(nuclearPed(sex=1:2), "`sex` is longer than the number of individuals")
expect_error(nuclearPed(sex=integer(0)), "`sex` cannot be empty")
expect_error(nuclearPed(fa = 'a', child = 'a'), "Duplicated ID label: a")
expect_error(nuclearPed(child = c("b", "b")), "Duplicated ID label: b")
expect_error(nuclearPed(child = 1), "please specify a different label for the father")
expect_error(nuclearPed(mo = 1), "please specify a different label for the father")
expect_error(nuclearPed(child = 2), "please specify a different label for the mother")
expect_error(nuclearPed(fa = 2), "please specify a different label for the mother")
})
test_that("halfSibPed() has expected ordering", {
expect_equal(labels(halfSibPed()), as.character(1:5))
})
test_that("halfSibPed() recycles sex1 and sex2", {
expect_equal(halfSibPed(2,3,sex1=0,sex2=2:1),
halfSibPed(2,3,sex1=c(0,0), sex2=c(2,1,2)))
})
test_that("halfSibPed() catches errors", {
expect_error(halfSibPed(-1), "`nch1` must be a positive integer")
expect_error(halfSibPed(0), "`nch1` must be a positive integer")
expect_error(halfSibPed(1, 0), "`nch2` must be a positive integer")
expect_error(halfSibPed(sex1 = 'a'), "Illegal sex: a")
expect_error(halfSibPed(sex1 = 1:2), "`sex1` is longer than the number of individuals")
expect_error(halfSibPed(sex1 = integer(0)), "`sex1` cannot be empty")
})
|
legendBreaks = function(pos,
breaks,
col,
legend,
rev=TRUE,
outer=TRUE,
pch=15,
bg='white',
cex=par('cex'),
pt.cex=2.5*cex,
text.col=par('fg'),
title=NULL,
inset=0.05,
title.col=text.col,
adj=0,
width=Inf,
lines=Inf,
y.intersp,
...){
if(!missing(breaks)){
if(is.factor(breaks)){
if(length(grep("^Raster",class(breaks)))){
breaks = levels(breaks)[[1]]
} else {
breaks=list(legend=levels(breaks))
}
}
}
if( missing(legend) & missing(breaks))
warning("legend or breaks must be supplied")
if(missing(legend)&!missing(breaks)) {
if(is.list(breaks)){
legendCol = intersect(
c('legend','label','level','breaks','ID'),
names(breaks)
)
if(!length(legendCol)){
warning("can't find legend in breaks")
}
legend = breaks[[ legendCol[1] ]]
} else {
legend=breaks
}
}
if(missing(col)){
col='black'
if(!missing(breaks)) {
if(is.list(breaks)) {
if(any(names(breaks)=='col'))
col = breaks[['col']]
}
}
}
if(rev){
col=rev(col)
legend=rev(legend)
}
diffYmult = 0
if(length(col) == (length(legend)-1)) {
col = c(NA, col)
pch = c(NA,
pch[round(seq(1, length(pch), len=length(legend)-1))]
)
diffyMult=1
theTextCol = '
} else {
theTextCol = text.col
theNA = is.na(col)
if(any(theNA)){
col = col[!theNA]
legend = legend[!theNA]
}
}
if(any(nchar(as.character(legend)) > width)) {
legend = trimws(
gsub(
paste('(.{1,', width, '})(\\s|/|$)' ,sep=''),
'\\1\n ',
as.character(legend)
)
)
}
theNewLines = gregexpr('\n', as.character(legend))
toCrop = which(unlist(lapply(theNewLines, length)) >= lines)
if(length(toCrop)) {
cropPos = unlist(lapply(theNewLines[toCrop], function(qq) qq[lines]))
legend = as.character(legend)
legend[toCrop] =
trimws(substr(legend[toCrop], 1, cropPos))
}
shiftLegendText = rep(0, length(legend))
if(missing(y.intersp)){
if(is.character(legend)) {
theNewLines = gregexpr('\n', as.character(legend))
y.intersp=max(
c(1.25,
0.5+unlist(
lapply(theNewLines, function(qq) sum(qq>0))
)
)
) - 0.25
} else {
y.intersp = 1
if(is.numeric(legend)) {
legend = as.character(legend)
widthHere = strwidth(legend, cex=cex)
maxWidth = max(widthHere)
withMinus = grep("^[[:space:]]*[-]", legend)
toAddForMinus = rep(0, length(legend))
toAddForMinus[-withMinus] = pmin(
maxWidth - widthHere[-withMinus],
strwidth("-", cex=cex)
)
charNoDec = strwidth(gsub("(e|[.])[[:digit:]]*$", "", legend), cex=cex)
maxCharNoDec = max(charNoDec)
toAddLeft = pmin(
maxCharNoDec - charNoDec,
maxWidth - widthHere)
Ndec = strwidth(
gsub("^[[:space:]]*[[:punct:]]*([[:digit:]]|e[+])+ *", "", legend),
cex=cex)
maxDec = max(Ndec)
toAddRight = pmin(
maxDec - Ndec,
maxWidth - widthHere)
idealWidth = widthHere + toAddRight + toAddLeft
tooWide = idealWidth - maxWidth
shiftLegendText = pmin(
toAddForMinus + toAddLeft - 0.4*tooWide,
maxWidth - widthHere
)
}
}
}
if(all(is.na(y.intersp))){
y.intersp=1
}
adj = rep_len(adj, 2)
adj[2] = adj[2] + y.intersp/4
withTrans = grep("^
col[withTrans] = gsub("[[:xdigit:]]{2}$", "", col[withTrans])
if(outer){
oldxpd = par("xpd")
par(xpd=NA)
fromEdge = matrix(par("plt"), 2, 2,
dimnames=list(c("min","max"), c("x","y")))
propIn = apply(fromEdge, 2, diff)
if(is.character(pos)) {
forInset = c(0,0)
if(length(grep("left$", pos))){
forInset[1] = -fromEdge["min","x"]
} else if(length(grep("right$", pos))){
forInset[1] = fromEdge["max","x"]-1
}
if(length(grep("^top", pos))){
forInset[2] = -fromEdge["min","y"]
} else if(length(grep("^bottom", pos))){
forInset[2] = fromEdge["max","y"]-1
}
inset = forInset/propIn + inset
}
}
result=legend(
pos,
legend=legend,
bg=bg,
col=col,
pch=pch,
pt.cex=pt.cex,
inset=inset,
cex=cex,
text.col=theTextCol,
title.col=title.col,
title=title,
y.intersp=y.intersp,
adj=adj,
...
)
if(text.col != theTextCol) {
diffy = diff(result$text$y)/2
diffy = c(
diffy,diffy[length(diffy)]
)*diffyMult
result$text$y = result$text$y + diffy
result$text$xOrig = result$text$x
result$text$x = result$text$x + shiftLegendText/2 + max(strwidth(legend, cex=cex))/2
if(par("xlog")) result$text$x = 10^result$text$x
if(par("ylog")) result$text$y = 10^result$text$y
text(
result$text$x,
result$text$y,
legend,
col=text.col,
adj=0.5,
cex=cex)
}
if(outer){
par(xpd=oldxpd)
}
result$legend = legend
return(invisible(result))
}
|
.onAttach <- function(...) {
diveRpkgs <- diveRcore()
needed <- diveRpkgs[!is_attached(diveRpkgs)]
if (length(needed) == 0)
return()
diveR_attach()
}
is_attached <- function(x) {
paste0("package:", x) %in% search()
}
|
model_prediction <- function(poped.db=NULL,
design=list(
xt=poped.db$design[["xt"]],
groupsize=poped.db$design$groupsize,
m=poped.db$design[["m"]],
x=poped.db$design[["x"]],
a=poped.db$design[["a"]],
ni=poped.db$design$ni,
model_switch=poped.db$design$model_switch),
model = list(
fg_pointer=poped.db$model$fg_pointer,
ff_pointer=poped.db$model$ff_pointer,
ferror_pointer= poped.db$model$ferror_pointer),
parameters=list(
docc=poped.db$parameters$docc,
d = poped.db$parameters$d,
bpop = poped.db$parameters$bpop,
covd = poped.db$parameters$covd,
covdocc = poped.db$parameters$covdocc,
sigma = poped.db$parameters$sigma),
IPRED=FALSE,
DV=FALSE,
dosing=NULL,
predictions=NULL,
filename=NULL,
models_to_use="all",
model_num_points=NULL,
model_minxt=NULL,
model_maxxt=NULL,
include_sample_times=T,
groups_to_use="all",
include_a = TRUE,
include_x = TRUE,
manipulation=NULL,
PI = FALSE,
PI_conf_level = 0.95)
{
if(is.null(predictions)){
predictions=FALSE
if(!is.null(poped.db) || (!is.null(unlist(parameters))) && !is.null(unlist(model)) && !is.null(unlist(design))) predictions=TRUE
}
if(is.null(poped.db) && is.null(unlist(design))) stop("Either 'poped.db' or 'design' need to be defined")
design <- do.call(create_design,design)
NumOcc=poped.db$parameters$NumOcc
if(DV) IPRED=T
with(design,{
maxxt=poped.choose(poped.db$design_space$maxxt,xt)
minxt=poped.choose(poped.db$design_space$minxt,xt)
if(!is.null(dosing)){
if(!length(dosing)==m){
if(length(dosing) == 1) {
dosing <- rep(dosing,m)
} else {
stop("dosing argument does not have the right dimensions.
Must be 1 list or a list of lists the size of the number of groups")
}
}
}
if(predictions){
docc_size = 0
if((!isempty(parameters$docc[,2]))){
docc_size = size(parameters$docc[,2,drop=F],1)
}
d_size = 0
if((!isempty(parameters$d[,2]))){
d_size = size(parameters$d[,2,drop=F],1)
}
}
used_times <- zeros(size(xt))
for(i in 1:size(xt,1)) used_times[i,1:ni[i]] <- 1
if(all(groups_to_use=="all")){
groups_to_use = 1:size(xt,1)
}
if(all(models_to_use=="all")){
models_to_use = unique(as.vector(model_switch[used_times==1]))
}
df <- data.frame()
id_num_start <- 1
for(i in 1:length(groups_to_use)){
if(!exists("a")){
a_i = zeros(0,1)
} else if((isempty(a))){
a_i = zeros(0,1)
} else {
a_i = a[groups_to_use[i],,drop=F]
}
if(!exists("x")){
x_i = zeros(0,1)
} else if((isempty(x))){
x_i = zeros(0,1)
} else {
x_i = x[groups_to_use[i],,drop=F]
}
num_ids = groupsize[groups_to_use[i]]
if(all(is.null(model_num_points))){
xt_i = xt[groups_to_use[i],1:ni[groups_to_use[i]]]
model_switch_i = model_switch[groups_to_use[i],1:ni[groups_to_use[i]]]
if(!all(models_to_use == unique(as.vector(model_switch[used_times==1])))){
xt_i = xt_i[model_switch_i %in% models_to_use]
model_switch_i = model_switch_i[model_switch_i %in% models_to_use]
}
} else {
xt_i <- c()
model_switch_i <- c()
if(length(models_to_use)>1 && length(model_num_points)==1) model_num_points <- rep(model_num_points,length(models_to_use))
for(j in models_to_use){
if(is.null(model_minxt)){
minv <- min(as.vector(minxt[model_switch==j]),na.rm = TRUE)
} else {
if(length(models_to_use)>1 && length(model_minxt)==1) model_minxt <- rep(model_minxt,length(models_to_use))
minv = model_minxt[j]
}
if(is.null(model_maxxt)){
maxv <- max(as.vector(maxxt[model_switch==j]),na.rm = TRUE)
} else {
if(length(models_to_use)>1 && length(model_maxxt)==1) model_maxxt <- rep(model_maxxt,length(models_to_use))
maxv = model_maxxt[j]
}
tmp_num_pts <- model_num_points[j]
if(length(model_num_points)<j) tmp_num_pts <- model_num_points[1]
xt_i= c(xt_i,seq(minv,maxv,length.out=tmp_num_pts))
model_switch_i = c(model_switch_i,j*matrix(1,1,tmp_num_pts))
}
if(include_sample_times){
xt_i_extra = xt[groups_to_use[i],1:ni[groups_to_use[i]]]
model_switch_i_extra = model_switch[groups_to_use[i],1:ni[groups_to_use[i]]]
if(!all(models_to_use == unique(as.vector(model_switch[used_times==1])))){
xt_i_extra = xt_i_extra[model_switch_i_extra %in% models_to_use]
model_switch_i_extra = model_switch_i_extra[model_switch_i_extra %in% models_to_use]
}
tmp.include <- !(xt_i_extra %in% xt_i)
xt_i <- c(xt_i,xt_i_extra[tmp.include])
model_switch_i <- c(model_switch_i,model_switch_i_extra[tmp.include])
tmp.order <- order(xt_i)
xt_i <- xt_i[tmp.order]
model_switch_i <- model_switch_i[tmp.order]
}
}
pred <- NA
group.df <- data.frame(Time=xt_i,PRED=pred,Group=groups_to_use[i],Model=model_switch_i)
if(predictions){
bpop_val <- parameters$bpop[,2,drop=F]
b_ind = zeros(1,d_size)
bocc_ind = zeros(docc_size,NumOcc)
g0 = feval(model$fg_pointer,x_i,a_i,bpop_val,b_ind,bocc_ind)
pred_list <- feval(model$ff_pointer,model_switch_i,xt_i,g0,poped.db)
pred_list$poped.db <- NULL
pred <- drop(pred_list[[1]])
group.df["PRED"] <- pred
if(length(pred_list)>1){
extra_df <- data.frame(pred_list[-1])
group.df <- cbind(group.df,extra_df)
}
if(PI){
sigma_full = parameters$sigma
d_full = getfulld(parameters$d[,2],parameters$covd)
docc_full = getfulld(parameters$docc[,2,drop=F],parameters$covdocc)
cov <- v(as.matrix(model_switch_i),as.matrix(xt_i),
t(x_i),t(a_i),bpop_val,t(b_ind),bocc_ind,d_full,
sigma_full,docc_full,poped.db)[[1]]
PI_alpha <- 1-PI_conf_level
z_val <- qnorm(1-PI_alpha/2)
se_val <- sqrt(diag(cov))
PI_u <- pred + z_val*se_val
PI_l <- pred - z_val*se_val
group.df <- data.frame(group.df,PI_l=PI_l,PI_u=PI_u)
}
}
if(include_a && !isempty(a_i)){
rownames(a_i) <- NULL
group.df <- data.frame(group.df,a_i)
}
if(include_x && !isempty(x_i)){
rownames(x_i) <- NULL
group.df <- data.frame(group.df,x_i)
}
if(IPRED){
group.df.ipred <- data.frame()
bocc_start= 1
id_num_end <- id_num_start + num_ids - 1
id_vals <- id_num_start:id_num_end
if(predictions){
fulld = getfulld(parameters$d[,2],parameters$covd)
fulldocc = getfulld(parameters$docc[,2,drop=F],parameters$covdocc)
if(any(size(fulld)==0)){
b_sim_matrix = zeros(num_ids,0)
} else {
b_sim_matrix = mvtnorm::rmvnorm(num_ids,sigma=fulld)
}
bocc_sim_matrix = zeros(num_ids*NumOcc,length(parameters$docc[,2,drop=F]))
if(nrow(fulldocc)!=0) bocc_sim_matrix = mvtnorm::rmvnorm(num_ids*NumOcc,sigma=fulldocc)
}
for(j in 1:num_ids){
tmp.df <- group.df
if(predictions){
bocc_stop=bocc_start + NumOcc - 1
if(nrow(fulldocc)==0){
bocc_start=0
bocc_stop=0
}
fg_sim = feval(model$fg_pointer,x_i,a_i,parameters$bpop[,2,drop=F],b_sim_matrix[j,],t(bocc_sim_matrix[bocc_start:bocc_stop,]))
bocc_start = bocc_stop + 1
ipred <- feval(model$ff_pointer,model_switch_i,xt_i,fg_sim,poped.db)
ipred <- drop(ipred[[1]])
} else {
ipred <- xt_i*NA
}
ID <- id_vals[j]
tmp.df["ID"] <- ID
tmp.df["IPRED"] <- ipred
if(DV){
if(predictions){
eps_sim = mvtnorm::rmvnorm(length(xt_i),sigma=parameters$sigma)
dv <- feval(model$ferror_pointer,model_switch_i,xt_i,fg_sim,eps_sim,poped.db)
dv <- drop(dv[[1]])
} else {
dv <- xt_i*NA
}
tmp.df["DV"] <- dv
}
if(!is.null(dosing)){
dose.df <- data.frame(dosing[groups_to_use[i]])
for(nam in names(tmp.df[!(names(tmp.df) %in% c("IPRED","PRED","DV","Time"))])){
if(length(unique(tmp.df[nam]))==1) dose.df[nam] <- tmp.df[1,nam]
}
dose.df$dose_record_tmp <- 1
tmp.df <- dplyr::bind_rows(dose.df,tmp.df)
tmp.df <- tmp.df[order(tmp.df$Time,tmp.df$dose_record_tmp),]
tmp.df$dose_record_tmp <- NULL
}
group.df.ipred <- rbind(group.df.ipred,tmp.df)
}
id_num_start <- id_num_end + 1
group.df <- group.df.ipred
}
df <- rbind(df,group.df)
}
first_names <- c("ID","Time","DV","IPRED","PRED")
first_names <- first_names[first_names %in% names(df)]
other_names <- names(df[!(names(df) %in% first_names)])
df <- df[c(first_names,other_names)]
df$Group <- as.factor(df$Group)
df$Model <- as.factor(df$Model)
if(IPRED) df$ID <- as.factor(df$ID)
row.names(df) <- NULL
if(!is.null(manipulation)){
for(i in 1:length(manipulation)){
df <- within(df,{
eval(manipulation[[i]])
})
}
}
if(!is.null(filename)) write.table(x=df, filename, row.names=FALSE, quote=FALSE, na=".",sep=",")
return( df )
})
}
|
compmatch <- function(x, split) {
if (split %in% .special.characters)
split <- paste("\\", split, sep = "")
res <- any(grepl(split, x))
res
}
|
context("importBrukerFlex")
test_that("importBrukerFlex", {
expect_error(MALDIquantForeign:::.importBrukerFlex("tmp.tmp"))
path <- system.file(
file.path("exampledata", "brukerflex", "0_A1", "1", "1SLin", "fid"),
package="MALDIquantForeign")
s <- MALDIquantForeign:::.importBrukerFlex(path, verbose=FALSE)
expect_equal(s, import(path, verbose=FALSE))
expect_equal(s, importBrukerFlex(path, verbose=FALSE))
expect_equal(s, import(path, type="fid", verbose=FALSE))
expect_equal(trunc(mass(s[[1]])), 226:230)
expect_equal(intensity(s[[1]]), 1:5)
expect_equal(basename(metaData(s[[1]])$file), "fid")
expect_equal(metaData(s[[1]])$laserShots, 100)
expect_equal(metaData(s[[1]])$comments, paste0("TESTSAMPLE", 1:4))
})
|
knitr::opts_chunk$set(
collapse = TRUE,
comment = "
)
options(width = 90)
options(pillar.min_title_chars = 10)
options(tibble.print_max = 25)
library(msigdbr)
all_gene_sets = msigdbr(species = "Mus musculus")
head(all_gene_sets)
msigdbr_species()
h_gene_sets = msigdbr(species = "mouse", category = "H")
head(h_gene_sets)
cgp_gene_sets = msigdbr(species = "mouse", category = "C2", subcategory = "CGP")
head(cgp_gene_sets)
msigdbr_collections()
all_gene_sets %>%
dplyr::filter(gs_cat == "H") %>%
head()
|
combinefm <- function(fcasts, smat, weights, alg)
{
totalts <- nrow(smat)
if (!is.matrix(fcasts)) {
fcasts <- t(fcasts)
}
if (ncol(fcasts) != totalts) {
stop("Argument fcasts requires all the forecasts.")
}
fcasts <- t(fcasts)
if (alg == "chol") {
if (!is.null(weights)) {
weights <- methods::as(1/weights, "matrix.diag.csr")
}
allf <- CHOL(fcasts = fcasts, S = smat, weights = weights, allow.changes = TRUE)
} else {
if (!is.null(weights)) {
seqts <- 1:totalts
weights <- sparseMatrix(i = seqts, j = seqts, x = 1/weights)
}
if (alg == "lu") {
allf <- LU(fcasts = fcasts, S = smat, weights = weights, allow.changes = TRUE)
} else if (alg == "cg") {
allf <- CG(fcasts = fcasts, S = smat, weights = weights, allow.changes = TRUE)
}
}
return(allf)
}
|
intersectAll <- function(...)
{
x <- list(...)
ans <- x[[1]]
for (i in 1:length(x)) ans <- intersect(ans, x[[i]])
return(ans)
}
|
plot.PredictFindIt <- function(x,main,xlab, ylab, interactive=FALSE, ...){
labels <- "index"
if(missing(xlab)){
xlab <- "index of observation"
}
if(missing(ylab)){
ylab <- "Treatment Effect"
}
object <- x
treat.type <- object$treat.type
if(treat.type=="single"){
pred.data <- object$data
ATE <- object$ATE
pred.data.out.p <- pred.data[order(pred.data$Treatment.effect,
decreasing=FALSE),]
xp <- seq(1:nrow(pred.data.out.p))
zero.1 <- min(which(min(abs(pred.data.out.p$Treatment.effect))==
abs(pred.data.out.p$Treatment.effect)))
zero <- xp[zero.1]
low <- min(pred.data.out.p$Treatment.effect)
if(missing(main)){
main <- "Causal Moderation: Heterogeneous Treatment Effect "
}
plot(xp,pred.data.out.p$Treatment.effect,type="l",col="red",
main = main,
ylab= ylab,xlab=xlab)
text(zero,low, labels=as.character(zero))
abline(h=0,lty="dotdash")
abline(h=ATE, col="blue")
abline(v=zero, col="grey")
if(interactive ==TRUE){
if(labels=="index"){
p <- try(identify(xp,pred.data.out.p$Treatment.effect,
labels=rownames(pred.data.out.p)
),silent=TRUE)
}
else{
p <- try(identify(xp,pred.data.out.p$Treatment.effect,
labels=pred.data.out.p[,labels]
),silent=TRUE)
}
}
}
if(treat.type=="multiple"){
pred.data <- object$data
ATE <- object$ATE
pred.data.out.p <- pred.data[order(pred.data$Treatment.effect,
decreasing=FALSE),]
xp <- seq(1:nrow(pred.data.out.p))
zero.1 <- min(which(min(abs(pred.data.out.p$Treatment.effect))==
abs(pred.data.out.p$Treatment.effect)))
zero <- xp[zero.1]
low <- min(pred.data.out.p$Treatment.effect)
if(missing(main)){
main <- "Causal Interaction: Heterogeneous Treatment Effect"
}
plot(xp,pred.data.out.p$Treatment.effect,col="red",
main = main,
ylab= ylab,xlab=xlab)
text(zero,low, labels=as.character(zero))
abline(h=0,lty="dotdash")
abline(h=ATE, col="blue")
abline(v=zero, col="grey")
if(interactive ==TRUE){
if(labels=="index"){
p <- try(identify(xp,pred.data.out.p$Treatment.effect,
labels=rownames(pred.data.out.p)
),silent=TRUE)
}
else{
p <- try(identify(xp,pred.data.out.p$Treatment.effect,
labels=pred.data.out.p[,labels]
),silent=TRUE)
}
}
}
}
|
bq_table_download <-
function(x,
n_max = Inf,
page_size = NULL,
start_index = 0L,
max_connections = 6L,
quiet = NA,
bigint = c("integer", "integer64", "numeric", "character"),
max_results = deprecated()) {
x <- as_bq_table(x)
bigint <- match.arg(bigint)
if (lifecycle::is_present(max_results)) {
lifecycle::deprecate_warn(
"1.4.0", "bq_table_download(max_results)", "bq_table_download(n_max)"
)
n_max <- max_results
}
params <- set_row_params(
nrow = bq_table_nrow(x),
n_max = n_max,
start_index = start_index
)
n_max <- params$n_max
start_index <- params$start_index
schema_path <- bq_download_schema(x, tempfile())
withr::defer(file.remove(schema_path))
if (n_max == 0) {
table_data <- bq_parse_files(
schema_path,
file_paths = character(),
n = 0,
quiet = bq_quiet(quiet)
)
return(table_data)
}
pool <- curl::new_pool()
if (!bq_quiet(quiet)) {
message("Downloading first chunk of data.")
}
if (is.null(page_size)) {
chunk_size_from_user <- FALSE
} else {
assert_that(
is.numeric(page_size),
length(page_size) == 1,
page_size > 0
)
chunk_size_from_user <- TRUE
}
chunk_size <- page_size
chunk_plan <- bq_download_plan(
n_max,
chunk_size = chunk_size,
n_chunks = 1,
start_index = start_index
)
handle <- bq_download_chunk_handle(
x,
begin = chunk_plan$dat$chunk_begin[1],
max_results = chunk_plan$dat$chunk_rows[1]
)
curl::multi_add(
handle,
done = bq_download_callback(chunk_plan$dat$path[1]),
pool = pool
)
curl::multi_run(pool = pool)
path_first_chunk <- chunk_plan$dat$path[1]
withr::defer(file.remove(path_first_chunk))
chunk_data <- bq_parse_file(schema_path, path_first_chunk)
n_got <- nrow(chunk_data)
if (n_got >= n_max) {
if (!bq_quiet(quiet)) {
message("First chunk includes all requested rows.")
}
return(convert_bigint(chunk_data, bigint))
}
if (chunk_size_from_user && n_got < chunk_size) {
rlang::abort(c(
"First chunk is incomplete:",
x = glue("{big_mark(chunk_size)} rows were requested, but only \\
{big_mark(n_got)} rows were received."),
i = "Leave `page_size` unspecified or use an even smaller value."
))
}
if (!chunk_size_from_user) {
if (!bq_quiet(quiet)) {
message(glue("Received {big_mark(n_got)} rows in the first chunk."))
}
chunk_size <- trunc(0.75 * n_got)
}
n_max_new <- n_max - n_got
start_index_new <- n_got
chunk_plan <- bq_download_plan(
n_max_new,
chunk_size = chunk_size,
start_index = start_index_new
)
progress <- bq_progress(
"Downloading data [:bar] :percent ETA: :eta",
total = chunk_plan$n_chunks,
quiet = quiet
)
if (!bq_quiet(quiet)) {
message(glue_data(
chunk_plan,
"Downloading the remaining {big_mark(n_max)} rows in {n_chunks} \\
chunks of (up to) {big_mark(chunk_size)} rows."
))
}
for (i in seq_len(chunk_plan$n_chunks)) {
handle <- bq_download_chunk_handle(
x,
begin = chunk_plan$dat$chunk_begin[i],
max_results = chunk_plan$dat$chunk_rows[i]
)
curl::multi_add(
handle,
done = bq_download_callback(chunk_plan$dat$path[i], progress),
pool = pool
)
}
curl::multi_run(pool = pool)
withr::defer(file.remove(chunk_plan$dat$path))
table_data <- bq_parse_files(
schema_path,
c(path_first_chunk, chunk_plan$dat$path),
n = n_max,
quiet = bq_quiet(quiet)
)
convert_bigint(table_data, bigint)
}
convert_bigint <- function(df, bigint) {
if (bigint == "integer64") {
return(df)
}
as_bigint <- switch(bigint,
integer = as.integer,
numeric = as.numeric,
character = as.character
)
rapply_int64(df, f = as_bigint)
}
rapply_int64 <- function(x, f) {
if (is.list(x)) {
x[] <- lapply(x, rapply_int64, f = f)
x
} else if (bit64::is.integer64(x)) {
f(x)
} else {
x
}
}
set_row_params <- function(nrow, n_max = Inf, start_index = 0L) {
assert_that(is.numeric(n_max), length(n_max) == 1, n_max >= 0)
assert_that(is.numeric(start_index), length(start_index) == 1, start_index >= 0)
n_max <- max(min(n_max, nrow - start_index), 0)
list(n_max = n_max, start_index = start_index)
}
bq_download_plan <- function(n_max,
chunk_size = NULL,
n_chunks = NULL,
start_index = 0) {
params <- set_chunk_params(n_max, chunk_size, n_chunks)
list(
n_max = n_max,
chunk_size = params$chunk_size,
n_chunks = params$n_chunks,
dat = set_chunk_plan(
n_max,
params$chunk_size,
params$n_chunks,
start_index
)
)
}
set_chunk_params <- function(n_max, chunk_size = NULL, n_chunks = NULL) {
if (is.null(chunk_size) && is.null(n_chunks)) {
n_chunks <- 1
}
n_chunks <- n_chunks %||% Inf
chunk_size <- pmin(chunk_size %||% ceiling(n_max / n_chunks), n_max)
n_chunks <- pmin(n_chunks, ceiling(n_max / chunk_size))
list(chunk_size = chunk_size, n_chunks = n_chunks)
}
set_chunk_plan <- function(n_max, chunk_size, n_chunks, start_index = 0) {
chunk_begin <- start_index + (seq_len(n_chunks) - 1) * chunk_size
chunk_end <- pmin(chunk_begin + chunk_size, start_index + n_max)
chunk_rows <- chunk_end - chunk_begin
tibble::tibble(
chunk_begin,
chunk_rows,
path = sort(
tempfile(rep_len("bq-download-", length.out = n_chunks), fileext = ".json")
)
)
}
bq_download_chunk_handle <- function(x, begin = 0L, max_results = 1e4) {
x <- as_bq_table(x)
assert_that(is.numeric(begin), length(begin) == 1)
assert_that(is.numeric(max_results), length(max_results) == 1)
query <- list(
startIndex = format(begin, scientific = FALSE),
maxResults = format(max_results, scientific = FALSE)
)
url <- paste0(base_url, bq_path(x$project, dataset = x$dataset, table = x$table, data = ""))
url <- httr::modify_url(url, query = prepare_bq_query(query))
if (bq_has_token()) {
token <- .auth$get_cred()
signed <- token$sign("GET", url)
url <- signed$url
headers <- signed$headers
} else {
headers <- list()
}
h <- curl::new_handle(url = url)
curl::handle_setopt(h, useragent = bq_ua())
curl::handle_setheaders(h, .list = headers)
h
}
bq_download_callback <- function(path, progress = NULL) {
force(path)
function(result) {
if (!is.null(progress)) progress$tick()
bq_check_response(
result$status_code,
curl::parse_headers_list(result$headers)[["content-type"]],
result$content
)
con <- file(path, open = "wb")
withr::defer(close(con))
writeBin(result$content, con)
}
}
bq_parse_file <- function(fields, data) {
fields <- brio::read_file(fields)
data <- brio::read_file(data)
bq_parse(fields, data)
}
bq_download_schema <- function(x, path) {
x <- as_bq_table(x)
url <- bq_path(x$project, x$dataset, x$table)
query <- list(fields = "schema")
json <- bq_get(url, query = query, raw = TRUE)
writeBin(json, path)
path
}
|
mlgraph <-
function (net, layout = c("circ", "force", "stress", "rand",
"conc", "bip"), main = NULL, seed = NULL, maxiter = 100,
directed = TRUE, alpha = c(1, 1, 1), scope, collRecip, undRecip,
showLbs, showAtts, cex.main, coord, clu, cex, lwd, pch, lty,
bwd, bwd2, att, bg, mar, pos, asp, ecol, vcol, vcol0, col,
lbat, swp, loops, swp2, mirrorX, mirrorY, mirrorD, mirrorL,
lbs, mirrorV, mirrorH, rot, hds, scl, vedist, ffamily, fstyle,
fsize, fcol, valued, modes, elv, lng, ...)
{
mlv <- net
if (isTRUE("Multilevel" %in% attr(net, "class")) == TRUE) {
if ("bpn" %in% attr(mlv, "class") || "cn2" %in% attr(mlv,
"class")) {
net <- mlv$mlnet[, , which(mlv$modes == "1M")]
met <- mlv$mlnet[, , which(mlv$modes == "2M")]
}
else if ("cn" %in% attr(mlv, "class")) {
met <- multiplex::dichot(mlv$mlnet, c = max(mlv$mlnet) +
1L)
net <- mlv$mlnet
}
else {
vcn <- vector()
for (i in seq_len(length(mlv$mlnet))) {
vcn <- append(vcn, dimnames(mlv$mlnet[[i]])[[1]])
vcn <- append(vcn, dimnames(mlv$mlnet[[i]])[[2]])
}
rm(i)
bmlbs <- unique(vcn)
bmat <- multiplex::transf(mlv$mlnet[[1]], type = "toarray2",
lbs = bmlbs)
for (k in seq(from = 2, to = length(mlv$mlnet))) {
bmat <- multiplex::zbnd(bmat, multiplex::transf(mlv$mlnet[[k]],
type = "toarray2", lbs = bmlbs))
}
rm(k)
dimnames(bmat)[[1]] <- dimnames(bmat)[[2]] <- bmlbs
dimnames(bmat)[[3]] <- attr(mlv$mlnet, "names")
for (i in which(mlv$modes == "2M")) {
bmat[, , i] <- bmat[, , i] + t(bmat[, , i])
}
rm(i)
net <- bmat[, , which(mlv$modes == "1M")]
met <- bmat[, , which(mlv$modes == "2M")]
}
}
else {
if ((missing(modes) == FALSE && is.vector(modes) == TRUE) &&
(missing(clu) == FALSE && is.vector(clu) == TRUE)) {
ifelse(is.numeric(modes) == TRUE, modes <- paste(modes,
"M", sep = ""), NA)
mlv <- list(mlnet = net, lbs = list(dm = dimnames(net)[[1]][which(clu ==
1)], cdm = dimnames(net)[[1]][which(clu == 2)]),
modes = modes)
net <- mlv$mlnet[, , which(mlv$modes == "1M")]
met <- mlv$mlnet[, , which(mlv$modes == "2M")]
}
else {
stop("\"net\" should be of a \"Multilevel\" class object or at least a 3D array with clustering info.")
}
}
ifelse(isTRUE(dim(net)[3] == 1) == TRUE, net <- net[, , 1],
NA)
ifelse(missing(valued) == FALSE && isTRUE(valued == TRUE) ==
TRUE, valued <- TRUE, valued <- FALSE)
ifelse(missing(loops) == FALSE && isTRUE(loops == FALSE) ==
TRUE, loops <- FALSE, loops <- TRUE)
ifelse(missing(collRecip) == FALSE && isTRUE(collRecip ==
FALSE) == TRUE, collRecip <- FALSE, collRecip <- TRUE)
ifelse(missing(undRecip) == FALSE && isTRUE(undRecip == FALSE) ==
TRUE, undRecip <- FALSE, undRecip <- TRUE)
ifelse(missing(mirrorH) == FALSE && isTRUE(mirrorH == TRUE) ==
TRUE, mirrorY <- TRUE, NA)
ifelse(missing(mirrorV) == FALSE && isTRUE(mirrorV == TRUE) ==
TRUE, mirrorX <- TRUE, NA)
if (missing(showLbs) == FALSE && isTRUE(showLbs == TRUE) ==
TRUE) {
showLbs <- TRUE
}
else if (missing(showLbs) == FALSE && isTRUE(showLbs == FALSE) ==
TRUE) {
showLbs <- FALSE
}
else {
ifelse(is.null(dimnames(net)[[1]]) == FALSE, showLbs <- TRUE,
showLbs <- FALSE)
}
ifelse(missing(showAtts) == FALSE && isTRUE(showAtts == FALSE) ==
TRUE, showAtts <- FALSE, showAtts <- TRUE)
ifelse(missing(swp) == FALSE && isTRUE(swp == TRUE) == TRUE,
swp <- TRUE, swp <- FALSE)
ifelse(missing(swp2) == FALSE && isTRUE(swp2 == TRUE) ==
TRUE, swp2 <- TRUE, swp2 <- FALSE)
ifelse(isTRUE(directed == FALSE) == TRUE, directed <- FALSE,
NA)
if (missing(scope) == FALSE) {
if (isTRUE(is.list(scope) == TRUE) == FALSE)
stop("\"scope\" should be a list or a vector of lists.")
scope <- list(scope)
ifelse(is.null(scope[[1]]) == TRUE, scope <- scope[2:length(scope)],
NA)
if (isTRUE(length(scope) > 1L) == TRUE && isTRUE(names(scope[1]) ==
"coord") == TRUE) {
scope <- scope[rev(seq_len(length(scope)))]
flgrev <- TRUE
}
else {
flgrev <- FALSE
}
tmp <- scope[[1]]
if (isTRUE(length(scope) > 1L) == TRUE && isTRUE(length(scope[[1]]) >
1L) == TRUE) {
for (k in 2:length(scope)) {
tmp[length(tmp) + 1L] <- as.list(scope[k])
names(tmp)[length(tmp)] <- attr(scope[k], "names")
}
rm(k)
}
else if (isTRUE(length(scope) > 1L) == TRUE) {
names(tmp) <- attr(scope[1], "names")
for (k in 2:length(scope)) {
if (is.list(scope[[k]]) == TRUE && is.data.frame(scope[[k]]) ==
FALSE) {
for (j in seq_len(length(scope[[k]]))) {
tmp[length(tmp) + 1L] <- as.list(scope[[k]][j])
names(tmp)[length(tmp)] <- attr(scope[[k]][j],
"names")
}
rm(j)
}
else if (is.data.frame(scope[[k]]) == FALSE) {
tmp[length(tmp) + 1L] <- as.list(scope[k])
names(tmp)[length(tmp)] <- attr(scope[k], "names")
}
else if (is.data.frame(scope[[k]]) == TRUE) {
tmp[length(tmp) + 1L] <- as.vector(scope[k])
names(tmp)[length(tmp)] <- attr(scope[k], "names")
}
else {
NA
}
}
rm(k)
}
else {
tmp <- scope[[1]]
}
ifelse(isTRUE(flgrev == TRUE) == TRUE, scope <- tmp[rev(seq_len(length(tmp)))],
scope <- tmp)
for (i in seq_len(length(scope))) {
if (isTRUE(names(scope)[i] %in% c("seed", "main")) ==
TRUE) {
tmpi <- as.vector(scope[[i]])
assign(names(scope)[i], get("tmpi"))
}
else {
if (is.null((scope[[i]])) == FALSE) {
tmpi <- as.vector(scope[[i]])
ifelse(isTRUE(names(scope)[i] != "") == TRUE,
assign(names(scope)[i], get("tmpi")), NA)
}
}
}
rm(i)
}
else {
NA
}
ifelse(missing(lng) == TRUE, lng <- 50, NA)
ifelse(isTRUE(lng <= 2) == TRUE, lng <- 3L, NA)
if (missing(lwd) == TRUE) {
flglwd <- FALSE
lwd <- 1
}
else {
flglwd <- TRUE
}
ifelse(missing(fcol) == TRUE, fcol <- 1, NA)
if (missing(pch) == TRUE) {
pch <- c(rep(21, length(mlv$lbs$dm)), rep(22, length(mlv$lbs$cdm)))
ifelse(missing(vcol) == TRUE, vcol <- c("
NA)
ifelse(missing(vcol0) == TRUE, vcol0 <- c("
"
}
else {
ifelse(isTRUE(length(pch) == 2) == TRUE, pch <- c(rep(pch[1],
length(mlv$lbs$dm)), rep(pch[2], length(mlv$lbs$cdm))),
pch <- pch[seq_len(length(mlv$lbs$dm) + length(mlv$lbs$cdm))])
}
ifelse(missing(bwd) == TRUE, bwd <- 1, NA)
ifelse(isTRUE(bwd < 0L) == TRUE, bwd <- 0L, NA)
ifelse(missing(bg) == TRUE, bg <- graphics::par()$bg, NA)
ifelse(missing(cex.main) == TRUE, cex.main <- graphics::par()$cex.main,
NA)
ifelse(missing(rot) == TRUE, NA, rot <- rot[1] * -1)
if (isTRUE(length(alpha) < 2) == TRUE) {
alfa <- 1
alpha <- rep(alpha, 3)
}
else {
alfa <- alpha[2]
}
if (isTRUE(length(alpha) < 3) == TRUE)
alpha <- append(alpha, 0.1)
if (!(missing(hds)) && missing(scl) == TRUE) {
if (isTRUE(hds > 1L) == TRUE) {
hds <- (hds/1.5)
}
else if (isTRUE(hds < 1L) == TRUE) {
hds <- (hds/(hds + 0.15))
}
else if (isTRUE(hds == 0L) == TRUE) {
hds <- 0.01
}
else {
NA
}
}
else {
ifelse(missing(scl) == TRUE, hds <- 1L, hds <- 1L * scl)
}
ifelse(isTRUE(dim(net)[1] > 8) == TRUE || isTRUE(valued ==
TRUE) == TRUE || isTRUE(min(lwd) >= 4) == TRUE, hds <- hds *
0.75, NA)
ifelse(missing(scl) == TRUE, scl <- rep(1, 2), NA)
ifelse(isTRUE(length(scl) == 1) == TRUE, scl <- rep(scl,
2), scl <- scl[1:2])
ifelse(missing(vedist) == TRUE, vedist <- 0, NA)
ifelse(isTRUE(vedist > 1L) == TRUE, vedist <- 1L, NA)
n <- dim(net)[1]
ifelse(isTRUE(is.na(dim(net)[3]) == TRUE) == TRUE, z <- 1L,
z <- dim(net)[3])
if (missing(lbs) == TRUE) {
ifelse(is.null(dimnames(net)[[1]]) == TRUE, lbs <- as.character(seq_len(dim(net)[1])),
lbs <- dimnames(net)[[1]])
}
else {
NA
}
ifelse(isTRUE(swp == TRUE) == TRUE && isTRUE(z > 1L) == TRUE,
net <- net[, , rev(seq_len(z))], NA)
if (missing(att) == FALSE && is.array(att) == TRUE) {
if (isTRUE(n != dim(att)[1]) == TRUE) {
warning("Dimensions in \"net\" and \"att\" differ. No attributes are shown.")
showAtts <- FALSE
}
}
netd <- multiplex::dichot(net, c = 1L)
if (isTRUE(collRecip == TRUE) == TRUE && isTRUE(valued ==
TRUE) == FALSE) {
if (isTRUE(z == 1L) == TRUE) {
nt <- netd + t(netd)
rcp <- multiplex::dichot(nt, c = 2L)
rcp[lower.tri(rcp, diag = TRUE)] <- 0L
}
else {
nt <- array(0L, dim = c(n, n, z))
dimnames(nt)[[1]] <- dimnames(nt)[[2]] <- lbs
dimnames(nt)[[3]] <- dimnames(net)[[3]]
for (i in seq_len(z)) {
nt[, , i] <- netd[, , i] + t(netd[, , i])
}
rm(i)
rcp <- multiplex::dichot(nt, c = 2L)
for (i in seq_len(z)) {
rcp[, , i][lower.tri(rcp[, , i], diag = TRUE)] <- 0L
}
rm(i)
}
}
else {
NA
}
bd <- multiplex::bundles(netd, loops = loops, lb2lb = FALSE,
collapse = FALSE)
ifelse(isTRUE(z == 1L) == TRUE, r <- 1L, r <- length(bd[[1]]))
ifelse(isTRUE(sum(net) == 0) == TRUE && isTRUE(loops == TRUE) ==
TRUE, bd$loop <- character(0), NA)
bds <- multiplex::summaryBundles(bd, byties = TRUE)
m <- dim(met)[1]
ifelse(isTRUE(is.na(dim(met)[3]) == TRUE) == TRUE, zz <- 1L,
zz <- dim(met)[3])
if (isTRUE(zz == 1L) == TRUE) {
mt <- met + t(met)
rcpm <- multiplex::dichot(mt, c = 2L)
rcpm[lower.tri(rcpm, diag = TRUE)] <- 0L
}
else {
mt <- array(0L, dim = c(m, m, zz))
dimnames(mt)[[1]] <- dimnames(mt)[[2]] <- lbs
dimnames(mt)[[3]] <- dimnames(met)[[3]]
for (i in seq_len(zz)) {
mt[, , i] <- met[, , i] + t(met[, , i])
}
rm(i)
rcpm <- multiplex::dichot(mt, c = 2L)
for (i in seq_len(zz)) {
rcpm[, , i][lower.tri(rcpm[, , i], diag = TRUE)] <- 0L
}
rm(i)
}
bdm <- multiplex::bundles(met, loops = FALSE, lb2lb = FALSE,
collapse = FALSE)
bdsm <- multiplex::summaryBundles(bdm, byties = TRUE)
ifelse(isTRUE(zz == 1L) == TRUE, rr <- 1L, rr <- length(bdm[[1]]))
ifelse(missing(ecol) == TRUE, ecol <- grDevices::gray.colors(r,
start = 0.1, end = 0.5), NA)
ifelse(missing(ecol) == FALSE && isTRUE(length(ecol) == 2) ==
TRUE, ecol <- c(rep(ecol[1], length(which(mlv$modes ==
"1M"))), rep(ecol[2], length(which(mlv$modes == "2M")))),
NA)
ifelse(isTRUE(ecol == 0) == TRUE, ecol <- "
if (isTRUE(valued == TRUE) == TRUE) {
ifelse(missing(lty) == TRUE, lty <- rep(1, r + rr), NA)
}
else {
ifelse(missing(lty) == TRUE, lty <- seq_len(r + rr),
NA)
}
if (isTRUE((z + zz) == 1L) == TRUE) {
Lt <- lty[1]
vecol <- ecol[1]
}
else {
ifelse(isTRUE(length(ecol) == 1L) == TRUE, vecol <- rep(ecol,
z + zz), vecol <- rep(ecol, z + zz)[seq_len(z + zz)])
ifelse(isTRUE(length(lty) == 1L) == TRUE, Lt <- seq_len(r +
rr), Lt <- rep(lty, r + rr)[seq_len(r + rr)])
if (isTRUE(length(lty) == length(Lt)) == FALSE) {
Ltc <- seq_along(vecol)
}
else {
if (isTRUE(seq(lty) == lty) == TRUE) {
Ltc <- Lt
}
else {
ifelse(isTRUE(swp == TRUE) == TRUE && isTRUE(valued ==
TRUE) == FALSE, Ltc <- rev(seq_len(r + rr)),
Ltc <- seq_len(r + rr))
}
}
}
vltz <- Lt
if (missing(clu) == FALSE) {
if ("cn2" %in% attr(mlv, "class")) {
clu <- clu[[1]]
}
else {
NA
}
if (is.list(clu) == TRUE) {
ifelse(is.factor(clu[[1]]) == TRUE, uact <- levels(clu[[1]]),
uact <- unique(clu[[1]]))
ifelse(is.factor(clu[[2]]) == TRUE, uevt <- levels(clu[[2]]),
uevt <- unique(clu[[2]]))
clutmp <- clu
if (is.character(uact) == TRUE) {
tmpa <- as.vector(clu[[1]])
for (i in seq_len(length(uact))) {
tmpa[which(tmpa == uact[i])] <- i
}
rm(i)
clu[[1]] <- as.numeric(tmpa)
rm(tmpa)
}
if (is.character(uevt) == TRUE) {
tmpe <- as.vector(clu[[2]])
for (i in seq_len(length(uevt))) {
tmpe[which(tmpe == uevt[i])] <- i
}
rm(i)
clu[[2]] <- as.numeric(tmpe)
rm(tmpe)
}
if (any(clutmp[[2]] %in% clutmp[[1]]) == TRUE) {
k <- 0L
tmp2 <- clutmp[[2]]
while (any(tmp2 %in% clutmp[[1]]) == TRUE) {
tmp2 <- replace(tmp2, which(tmp2 == min(tmp2)),
(max(clutmp[[1]]) + k))
k <- k + 1L
}
clutmp[[2]] <- tmp2
rm(tmp2)
clu <- as.vector(unlist(clutmp))
}
else if (any(clu[[2]] %in% clu[[1]]) == TRUE) {
k <- 0L
tmp2 <- clu[[2]]
while (any(tmp2 %in% clu[[1]]) == TRUE) {
tmp2 <- replace(tmp2, which(tmp2 == min(tmp2)),
(max(clu[[1]]) + k))
k <- k + 1L
}
clu[[2]] <- tmp2
rm(tmp2)
clu <- as.vector(unlist(clu))
}
}
else {
NA
}
nclu <- nlevels(factor(clu))
}
else {
nclu <- 1L
}
flgcx <- FALSE
if (missing(cex) == TRUE && isTRUE(loops == FALSE) == TRUE) {
if (isTRUE(length(bds) == 0) == TRUE) {
cex <- 1L
}
else {
cex <- length(bds[[1]])/2L
if (isTRUE(length(bds) > 1L) == TRUE) {
for (i in 2:length(bds)) ifelse(isTRUE(cex <
(length(bds[[i]])/2L)) == TRUE, cex <- (length(bds[[i]])/2L),
NA)
}
cex <- ceiling(cex)
}
}
else if (missing(cex) == TRUE) {
cex <- 1L
}
if (isTRUE(length(cex) == 1L) == TRUE) {
cex <- rep(cex, n)
}
else {
if (is.vector(cex) == FALSE)
stop("'cex' must be a vector")
cex[which(is.na(cex))] <- 0
cex <- cex[seq_len(n)]
flgcx <- TRUE
}
if (isTRUE(flgcx == TRUE) == TRUE && isTRUE(max(cex) > 10L) ==
TRUE) {
if (isTRUE(mean(cex) > 20L) == TRUE) {
cex <- (((cex - min(cex))/(max(cex) - min(cex))) *
10L)
}
else {
cex <- (cex/(norm(as.matrix(cex), type = "M"))) *
10L
}
ifelse(isTRUE(min(cex) == 0) == TRUE, cex <- cex + 1L +
(2L/n), NA)
}
else if (isTRUE(flgcx == FALSE) == TRUE && isTRUE(valued ==
TRUE) == TRUE) {
ifelse(isTRUE(max(cex) >= 21L) == TRUE, cex <- 20L, NA)
}
else {
NA
}
if (missing(fsize) == TRUE) {
ifelse(isTRUE(max(cex) < 2) == TRUE, fsize <- cex * 0.66,
fsize <- cex * 0.33)
}
else {
fsize <- fsize/10
}
ifelse(isTRUE(valued == FALSE) == TRUE && isTRUE(bwd > 1L) ==
TRUE, bwd <- 1L, NA)
ifelse(isTRUE(max(cex) < 2) == TRUE, NA, bwd <- bwd * 0.75)
if (isTRUE(length(pch) == 1L) == TRUE) {
pch <- rep(pch, n)
}
else if (isTRUE(length(pch) == nclu) == TRUE) {
if (identical(pch, clu) == FALSE) {
tmppch <- rep(0, n)
for (i in seq_len(nclu)) {
tmppch[which(clu == (levels(factor(clu))[i]))] <- pch[i]
}
rm(i)
pch <- tmppch
rm(tmppch)
}
}
else if (isTRUE(length(pch) != n) == TRUE) {
pch <- rep(pch[1], n)
}
if (missing(vcol) == TRUE) {
vcol <- grDevices::gray.colors(nclu)
ifelse(missing(col) == TRUE, NA, vcol <- col)
}
else {
if (isTRUE(length(vcol) == 1L) == TRUE) {
vcol <- rep(vcol, n)
}
else if (isTRUE(length(vcol) == nclu) == TRUE) {
if (identical(vcol, clu) == FALSE) {
tmpvcol <- rep(0, n)
for (i in seq_len(nclu)) {
tmpvcol[which(clu == (levels(factor(clu))[i]))] <- vcol[i]
}
rm(i)
vcol <- tmpvcol
rm(tmpvcol)
}
}
else if (isTRUE(length(vcol) != n) == TRUE & isTRUE(nclu ==
1) == TRUE) {
vcol <- rep(vcol[1], n)
}
vcol[which(is.na(vcol))] <- graphics::par()$bg
vcol[which(vcol == 0)] <- graphics::par()$bg
}
if (isTRUE(any(pch %in% 21:25)) == TRUE) {
if (missing(vcol0) == TRUE) {
vcol0 <- vcol
}
else {
ifelse(missing(vcol0) == TRUE, NA, vcol0[which(is.na(vcol0))] <- 1)
}
if (isTRUE(length(vcol0) == 1L) == TRUE) {
vcol0 <- rep(vcol0, n)
}
else if (isTRUE(length(vcol0) == nclu) == TRUE) {
if (identical(vcol0, clu) == FALSE) {
tmpvcol0 <- rep(0, n)
for (i in seq_len(nclu)) {
tmpvcol0[which(clu == (levels(factor(clu))[i]))] <- vcol0[i]
}
rm(i)
vcol0 <- tmpvcol0
rm(tmpvcol0)
}
}
else if (isTRUE(length(vcol0) != n) == TRUE | isTRUE(nclu ==
1) == TRUE) {
vcol0 <- rep(vcol0[1], n)
}
}
else {
vcol0 <- vcol
}
ifelse(isTRUE(n > 20) == TRUE, ffds <- 0.2, ffds <- 0)
fds <- 180L - (n * ffds)
if (isTRUE(flgcx == TRUE) == TRUE) {
fds <- fds - 10L
}
else if (isTRUE(flgcx == FALSE) == TRUE) {
NA
}
if (isTRUE(max(scl) < 1) == TRUE) {
fds <- fds - (1/(mean(scl)/30L))
}
else if (isTRUE(max(scl) > 1) == TRUE) {
fds <- fds + (mean(scl) * 20L)
}
else {
NA
}
if (missing(coord) == FALSE) {
if (isTRUE(nrow(coord) == n) == FALSE)
stop("Length of 'coord' does not match network order.")
flgcrd <- TRUE
crd <- coord
}
else if (missing(coord) == TRUE) {
flgcrd <- FALSE
switch(match.arg(layout), force = {
crd <- frcd(zbnd(netd, met), seed = seed, maxiter = maxiter)
}, circ = {
crd <- data.frame(X = sin(2L * pi * ((0:(n - 1L))/n)),
Y = cos(2L * pi * ((0:(n - 1L))/n)))
}, stress = {
crd <- stsm(zbnd(netd, met), seed = seed, maxiter = maxiter,
...)
}, rand = {
set.seed(seed)
crd <- data.frame(X = round(stats::runif(n) * 1L,
5), Y = round(stats::runif(n) * 1L, 5))
}, conc = {
crd <- conc(netd, ...)
}, bip = {
act <- nrm(rng(length(mlv$lbs$dm)))
evt <- nrm(rng(length(mlv$lbs$cdm)))
Act <- cbind(rep(0, length(mlv$lbs$dm)), act)
Evt <- cbind(rep(1, length(mlv$lbs$cdm)), evt)
crd <- rbind(Act, Evt)
crd[which(is.nan(crd))] <- 0.5
crd[, 2] <- crd[, 2] * cos(pi) - crd[, 1] * sin(pi)
rownames(crd) <- lbs
fds <- fds - 30L
})
}
if (match.arg(layout) == "bip") {
ifelse(missing(asp) == TRUE, asp <- 2L, asp <- asp[1] *
2L)
}
else {
ifelse(missing(asp) == TRUE, asp <- 1, NA)
}
if (missing(rot) == FALSE) {
crd[, 1:2] <- xyrt(crd[, 1:2], as.numeric(rot))
crd[, 1:2] <- crd[, 1:2] - min(crd[, 1:2])
cnt <- 1L
ifelse(isTRUE(n == 2) == TRUE && isTRUE(rot == -90) ==
TRUE, rot <- -89.9, NA)
}
else {
cnt <- 0
}
if (isTRUE(flgcrd == FALSE) == TRUE) {
if (match.arg(layout) == "circ" && missing(pos) == TRUE) {
angl <- vector()
length(angl) <- n
for (i in seq_len(n)) {
ifelse((atan2((crd[i, 2] - cnt), (crd[i, 1] -
cnt)) * (180L/pi)) >= 0, angl[i] <- atan2((crd[i,
2] - cnt), (crd[i, 1] - cnt)) * (180L/pi),
angl[i] <- ((atan2((crd[i, 2] - cnt), (crd[i,
1] - cnt)) * (180L/pi))%%180L) + 180L)
}
rm(i)
pos <- vector()
for (i in seq_len(length(angl))) {
if (isTRUE(65 < angl[i]) == TRUE && isTRUE(115 >
angl[i]) == TRUE) {
pos <- append(pos, 3)
}
else if (isTRUE(115 <= angl[i]) == TRUE && isTRUE(245 >=
angl[i]) == TRUE) {
pos <- append(pos, 2)
}
else if (isTRUE(245 < angl[i]) == TRUE && isTRUE(295 >
angl[i]) == TRUE) {
pos <- append(pos, 1)
}
else {
pos <- append(pos, 4)
}
}
rm(i)
}
}
if (missing(pos) == TRUE) {
pos <- 4
}
else {
if (isTRUE(pos < 0L) == TRUE | isTRUE(pos > 4L) == TRUE)
stop("Invalid \"pos\" value.")
}
ifelse(missing(mirrorX) == FALSE && isTRUE(mirrorX == TRUE) ==
TRUE || missing(mirrorV) == FALSE && isTRUE(mirrorV ==
TRUE) == TRUE, crd[, 1] <- crd[, 1] * cos(pi) - crd[,
2] * sin(pi), mirrorX <- FALSE)
ifelse(missing(mirrorY) == FALSE && isTRUE(mirrorY == TRUE) ==
TRUE || missing(mirrorH) == FALSE && isTRUE(mirrorH ==
TRUE) == TRUE, crd[, 2] <- crd[, 2] * cos(pi) - crd[,
1] * sin(pi), mirrorY <- FALSE)
if (match.arg(layout) == "circ" && isTRUE(flgcrd == FALSE) ==
TRUE) {
if (isTRUE(mirrorX == TRUE) == TRUE && isTRUE(length(pos) ==
n) == TRUE) {
pos[which(pos == 2)] <- 0
pos[which(pos == 4)] <- 2
pos[which(pos == 0)] <- 4
}
else if (isTRUE(mirrorY == TRUE) == TRUE && isTRUE(length(pos) ==
n) == TRUE) {
pos[which(pos == 1)] <- 0
pos[which(pos == 3)] <- 1
pos[which(pos == 0)] <- 3
}
else {
NA
}
}
if (missing(mirrorL) == FALSE && isTRUE(mirrorL == TRUE) ==
TRUE) {
crd[, 1:2] <- xyrt(crd[, 1:2], as.numeric(45))
crd[, 1:2] <- crd[, 1:2] - min(crd[, 1:2])
crd[, 2] <- crd[, 2] * cos(pi) - crd[, 1] * sin(pi)
crd[, 1:2] <- xyrt(crd[, 1:2], as.numeric(-45))
crd[, 1:2] <- crd[, 1:2] - min(crd[, 1:2])
}
else if (missing(mirrorD) == FALSE && isTRUE(mirrorD == TRUE) ==
TRUE) {
crd[, 1:2] <- xyrt(crd[, 1:2], as.numeric(-45))
crd[, 1:2] <- crd[, 1:2] - min(crd[, 1:2])
crd[, 2] <- crd[, 2] * cos(pi) - crd[, 1] * sin(pi)
crd[, 1:2] <- xyrt(crd[, 1:2], as.numeric(45))
crd[, 1:2] <- crd[, 1:2] - min(crd[, 1:2])
}
else {
NA
}
if (isTRUE(n > 1) == TRUE) {
rat <- (max(crd[, 1]) - min(crd[, 1]))/(max(crd[, 2]) -
min(crd[, 2]))
crd[, 1] <- (crd[, 1] - min(crd[, 1]))/(max(crd[, 1]) -
min(crd[, 1]))
ifelse(isTRUE(rat > 0) == TRUE, crd[, 2] <- ((crd[, 2] -
min(crd[, 2]))/(max(crd[, 2]) - min(crd[, 2]))) *
(1L/rat), crd[, 2] <- ((crd[, 2] - min(crd[, 2]))/(max(crd[,
2]) - min(crd[, 2]))) * (rat))
}
else {
NA
}
fds <- fds + (vedist * -10)
if (isTRUE(flgcrd == TRUE) == TRUE && isTRUE(ncol(crd) >
2) == TRUE) {
lbgml <- tolower(as.vector(crd[, 3]))
lbnet <- tolower(as.vector(lbs))
lbp <- vector()
for (i in seq_len(nrow(crd))) {
lbp <- append(lbp, which(lbnet[i] == lbgml))
}
rm(i)
if (isTRUE(ncol(crd) > 3) == TRUE) {
atgml <- as.vector(crd[, 4])
atgml[which(is.na(atgml))] <- ""
atts <- atgml[lbp]
}
nds <- data.frame(X = as.numeric(as.vector(crd[lbp, 1])),
Y = as.numeric(as.vector(crd[lbp, 2])))
}
else {
nds <- data.frame(X = as.numeric(as.vector(crd[, 1])),
Y = as.numeric(as.vector(crd[, 2])))
}
nds <- ((2L/max(nds * (0.75))) * (nds * 0.75)) * (0.5)
mscl <- mean(scl)
cex <- cex * mscl
fsize <- fsize * mscl
omr <- graphics::par()$mar
omi <- graphics::par()$mai
if (missing(mar) == TRUE) {
mar <- c(0, 0, 0, 0)
}
else {
mar <- omr
}
ifelse(is.null(main) == TRUE, graphics::par(mar = mar), graphics::par(mar = mar +
c(0, 0, cex.main, 0)))
obg <- graphics::par()$bg
graphics::par(bg = grDevices::adjustcolor(bg, alpha = alpha[3]))
if (isTRUE(loops == TRUE) == TRUE) {
ylim <- c(min(nds[, 2]) - ((cex[1])/200L), max(nds[,
2]) + ((cex[1])/200L))
xlim <- c(min(nds[, 1]) - ((cex[1])/200L), max(nds[,
1]) + ((cex[1])/200L))
}
else if (isTRUE(flgcx == TRUE) == TRUE) {
ylim <- c(min(nds[, 2]) - (max(cex)/500L), max(nds[,
2]) + (max(cex)/500L))
xlim <- c(min(nds[, 1]) - (max(cex)/500L), max(nds[,
1]) + (max(cex)/500L))
}
else {
ylim <- c(min(nds[, 2]) - ((cex[1])/200L), max(nds[,
2]) + ((cex[1])/200L))
xlim <- c(min(nds[, 1]) - ((cex[1])/200L), max(nds[,
1]) + ((cex[1])/200L))
}
suppressWarnings(graphics::plot(nds, type = "n", axes = FALSE,
xlab = "", ylab = "", ylim = ylim, xlim = xlim, asp = asp,
main = main, cex.main = cex.main, ...))
tlbs <- vector()
if (isTRUE(length(bds) > 0) == TRUE) {
for (i in seq_len(length(attr(bds, "names")))) {
ifelse(isTRUE(length(multiplex::dhc(attr(bds, "names")[i],
sep = "")) > 4L) == TRUE, tlbs <- append(tlbs,
tolower(paste(multiplex::dhc(attr(bds, "names")[i],
sep = "")[1:4], collapse = ""))), tlbs <- append(tlbs,
tolower(attr(bds, "names"))[i]))
}
rm(i)
}
tlbsm <- vector()
if (isTRUE(length(bdsm) > 0) == TRUE) {
for (i in seq_len(length(attr(bdsm, "names")))) {
ifelse(isTRUE(length(multiplex::dhc(attr(bdsm, "names")[i],
sep = "")) > 4L) == TRUE, tlbsm <- append(tlbsm,
tolower(paste(multiplex::dhc(attr(bdsm, "names")[i],
sep = "")[1:4], collapse = ""))), tlbsm <- append(tlbsm,
tolower(attr(bdsm, "names"))[i]))
}
rm(i)
}
trcpm <- multiplex::transf(rcpm, type = "tolist")
netdrp <- net
ifelse(isTRUE(valued == TRUE) == TRUE && isTRUE(max(net) >
10L) == TRUE, fnnetdrp <- (norm(as.matrix(netdrp), type = "F")),
NA)
cx <- cex
if (isTRUE(swp == TRUE) == TRUE) {
Lt <- Lt[rev(seq_len(length(Lt)))]
lwd <- lwd[length(lwd):1]
ifelse(isTRUE(valued == TRUE) == TRUE, vecol <- vecol[rev(seq_len(length(vecol[seq_len(z)])))],
NA)
alfa <- alfa[rev(seq_len(length(alfa)))]
}
if (isTRUE(loops == TRUE) == TRUE && isTRUE(valued == TRUE) ==
TRUE && isTRUE(max(netdrp) > 10L) == TRUE) {
if (isTRUE(z == 1L) == TRUE) {
diag(netdrp) <- (diag(netdrp)/fnnetdrp) * (15L)
}
else {
for (i in seq_len(z)) {
diag(netdrp[, , i]) <- as.vector((diag(netdrp[,
, i])/fnnetdrp) * (15L))
}
}
}
else {
NA
}
if (isTRUE(loops == TRUE) == TRUE) {
tlbslp <- tlbs
tlbs <- tlbs[which(tlbs != "loop")]
}
else {
NA
}
if ((isTRUE(collRecip == TRUE) == TRUE && isTRUE(c("recp") %in%
attr(bds, "names")) == TRUE) && isTRUE(valued == TRUE) ==
FALSE) {
trcp <- multiplex::transf(rcp, type = "tolist")
}
else {
NA
}
if (isTRUE(length(c(tlbs, tlbsm)) > 0) == TRUE) {
for (k in seq_len(length(tlbsm))) {
prs <- as.numeric(multiplex::dhc(bdsm[[k]]))
pars <- as.matrix(nds[as.numeric(levels(factor(multiplex::dhc(bdsm[[k]])))),
])
rbdsm <- length(bdsm[[k]])
if (isTRUE(rbdsm > 0L) == TRUE) {
qn <- which(tlbsm[k] == attr(bdm, "names"))
if (isTRUE(zz == 1L) == TRUE) {
ifelse(isTRUE(length(lty) == 1) == TRUE, vlt <- rep(lty,
rbdsm), vlt <- rep(Lt[zz + 1], rbdsm))
ifelse(isTRUE(length(ecol) == 1L) == TRUE,
vecolm <- rep(ecol[1], rbdsm), vecolm <- rep(ecol[z +
1], rbdsm))
tbnd <- as.vector(unlist(bdm[qn]))
if (isTRUE(length(tbnd) > 0L) == TRUE) {
ifelse(isTRUE(any(tbnd %in% bdsm[[k]])) ==
TRUE, vlt <- append(vlt, rep(Lt, qn)),
NA)
ifelse(isTRUE(any(tbnd %in% bdsm[[k]])) ==
TRUE, vltz <- append(vltz, rep(Lt, qn)),
NA)
}
vltc <- vlt[1]
}
else if (isTRUE(zz > 1L) == TRUE) {
vlt <- vector()
for (i in seq_along(Lt)) {
tbnd <- as.vector(unlist(bdm[[qn]][i]))
if (isTRUE(length(tbnd) > 0L) == TRUE) {
ifelse(isTRUE(any(tbnd %in% bdsm[[k]])) ==
TRUE, vlt <- append(vlt, rep(Lt[(z +
1):length(Lt)][i], length(which(tbnd %in%
bdsm[[k]])))), NA)
ifelse(isTRUE(any(tbnd %in% bdsm[[k]])) ==
TRUE, vltz <- append(vltz, rep(Lt[(z +
1):length(Lt)][i], length(which(tbnd %in%
bdsm[[k]])))), NA)
}
}
rm(i)
if (isTRUE(length(lty) == 1L) == TRUE) {
vlt1 <- rep(lty, length(vlt))
vltc <- vlt
}
else {
vltc <- vector()
if (isTRUE(Lt == Ltc) == FALSE) {
for (i in seq_along(Ltc)) {
tbnd <- as.vector(unlist(bdm[[qn]][i]))
if (isTRUE(length(tbnd) > 0L) == TRUE) {
ifelse(isTRUE(any(tbnd %in% bdsm[[k]])) ==
TRUE, vltc <- append(vltc, rep(Ltc[(z +
1):length(Ltc)][i], length(which(tbnd %in%
bdsm[[k]])))), NA)
}
}
rm(i)
}
else {
if (isTRUE(seq(lty) == lty) == TRUE) {
vltc <- vlt
}
else {
for (i in seq_along(lty)) {
vltc <- append(vltc, replace(vlt[which(vlt ==
lty[i])], vlt[which(vlt == lty[i])] !=
i, i))
}
rm(i)
}
}
}
}
ifelse(isTRUE(swp2 == TRUE) == TRUE && isTRUE(tlbsm[k] %in%
c("recp")) == TRUE, bdsm[[k]] <- multiplex::swp(bdsm[[k]]),
NA)
if (isTRUE(valued == TRUE) == TRUE) {
Lw <- vector()
i <- 1
for (j in seq_len(length(bdsm[[k]]))) {
qn <- c(prs[i], prs[(i + 1)])
ifelse(isTRUE(z == 1L) == TRUE, Lw <- append(Lw,
met[qn[1], qn[2]]), Lw <- append(Lw, met[qn[1],
qn[2]] + t(met[qn[1], qn[2]])))
i <- i + 2L
}
rm(j)
rm(i)
if (isTRUE(max(met) > 10L) == TRUE) {
lw <- (Lw/fnnetdrp) * (10L * 5L)
}
else {
lw <- Lw
}
}
else if (isTRUE(valued == TRUE) == FALSE) {
ifelse(isTRUE(length(bdsm) == 0) == TRUE, NA,
lw <- rep(lwd[1], rbdsm))
}
lwdfct <- (lw + (1L/lw)) * mscl
if (isTRUE(collRecip == TRUE) == TRUE && isTRUE(tlbsm[k] %in%
c("recp")) == TRUE) {
bw <- 0L
hd <- 0L
ifelse(isTRUE(valued == TRUE) == TRUE, lw <- lwdfct *
1L, lw <- lwdfct * 2L)
}
else if (isTRUE(collRecip == TRUE) == FALSE &&
isTRUE(tlbsm[k] %in% c("recp")) == TRUE) {
bw <- bwd
hd <- hds
}
else {
bw <- bwd
hd <- 0
lw <- lwdfct
}
ifelse(isTRUE(directed == FALSE) == TRUE, hd <- 0L,
NA)
ifelse(isTRUE(flglwd == TRUE) == TRUE, lw <- rep(lwd[1],
rbdsm), NA)
if (isTRUE(collRecip == TRUE) == TRUE && isTRUE(tlbsm[k] %in%
c("recp")) == FALSE) {
flgcr <- numeric()
sbdsm <- multiplex::swp(bdsm[[k]])
if (any(sbdsm %in% unlist(trcpm)) == TRUE) {
for (i in seq_len(zz)) {
ifelse(any(sbdsm %in% trcpm[[i]]) == TRUE,
flgcr <- append(flgcr, as.numeric(i)),
NA)
}
rm(i)
}
}
else {
flgcr <- rep(0L, zz)
}
pars[, 1] <- pars[, 1] * scl[1]
pars[, 2] <- pars[, 2] * scl[2]
if (isTRUE(zz == 1L) == TRUE) {
ccbnd(pars, rbdsm, bdsm[[k]], vlt, cx * mscl,
lw, vecolm, bw, alfa, fds, flgcx, flgcr,
hd, m)
}
else {
ifelse(isTRUE(length(lty) == 1L) == TRUE, ccbnd(pars,
rbdsm, bdsm[[k]], vlt1, cx * mscl, lw, vecol[vltc],
bw, alfa, fds, flgcx, flgcr, hd, m), ccbnd(pars,
rbdsm, bdsm[[k]], vlt, cx * mscl, lw, vecol[vltc],
bw, alfa, fds, flgcx, flgcr, hd, m))
}
}
else {
NA
}
}
rm(k)
for (k in seq_len(length(tlbs))) {
prs <- as.numeric(multiplex::dhc(bds[[k]]))
pars <- as.matrix(nds[as.numeric(levels(factor(multiplex::dhc(bds[[k]])))),
])
rbds <- length(bds[[k]])
if (isTRUE(rbds > 0L) == TRUE) {
qn <- which(tlbs[k] == attr(bd, "names"))
if (isTRUE(z == 1L) == TRUE) {
vlt <- rep(Lt, rbds)
vecol <- rep(ecol[1], rbds)
tbnd <- as.vector(unlist(bd[qn]))
if (isTRUE(length(tbnd) > 0L) == TRUE) {
ifelse(isTRUE(any(tbnd %in% bds[[k]])) ==
TRUE, vlt <- append(vlt, rep(Lt, qn)),
NA)
ifelse(isTRUE(any(tbnd %in% bds[[k]])) ==
TRUE, vltz <- append(vltz, rep(Lt, qn)),
NA)
}
vltc <- vlt[1]
}
else if (isTRUE(z > 1L) == TRUE) {
vlt <- vector()
for (i in seq_along(Lt)) {
tbnd <- as.vector(unlist(bd[[qn]][i]))
if (isTRUE(length(tbnd) > 0L) == TRUE) {
ifelse(isTRUE(any(tbnd %in% bds[[k]])) ==
TRUE, vlt <- append(vlt, rep(Lt[i], length(which(tbnd %in%
bds[[k]])))), NA)
ifelse(isTRUE(any(tbnd %in% bds[[k]])) ==
TRUE, vltz <- append(vltz, rep(Lt[i],
length(which(tbnd %in% bds[[k]])))),
NA)
}
}
rm(i)
if (isTRUE(length(lty) == 1L) == TRUE) {
vlt1 <- rep(lty, length(vlt))
vltc <- vlt
}
else {
vltc <- vector()
if (isTRUE(Lt == Ltc) == FALSE) {
for (i in seq_along(Ltc)) {
tbnd <- as.vector(unlist(bd[[qn]][i]))
if (isTRUE(length(tbnd) > 0L) == TRUE) {
ifelse(isTRUE(any(tbnd %in% bds[[k]])) ==
TRUE, vltc <- append(vltc, rep(Ltc[i],
length(which(tbnd %in% bds[[k]])))),
NA)
}
}
rm(i)
}
else {
if (isTRUE(seq(lty) == lty) == TRUE) {
vltc <- vlt
}
else {
for (i in seq_along(lty)) {
vltc <- append(vltc, replace(vlt[which(vlt ==
lty[i])], vlt[which(vlt == lty[i])] !=
i, i))
}
rm(i)
}
}
}
}
if (isTRUE(flgcx == TRUE) == FALSE) {
cx <- rep(cex[1], 2)
}
if (isTRUE(valued == TRUE) == TRUE) {
Lw <- vector()
i <- 1
for (j in seq_len(length(bds[[k]]))) {
qn <- c(prs[i], prs[(i + 1)])
if (isTRUE(collRecip == TRUE) == TRUE) {
ifelse(isTRUE(z == 1L) == TRUE, Lw <- append(Lw,
netdrp[qn[1], qn[2]]), Lw <- append(Lw,
netdrp[qn[1], qn[2], vltc[j]] + t(netdrp[qn[1],
qn[2], vltc[j]])))
}
else if (isTRUE(collRecip == FALSE) == TRUE) {
ifelse(isTRUE(z == 1L) == TRUE, Lw <- append(Lw,
netdrp[qn[1], qn[2]]), Lw <- append(Lw,
netdrp[qn[1], qn[2], vltc[j]]))
}
i <- i + 2L
}
if (isTRUE(max(netdrp) > 10L) == TRUE) {
lw <- (Lw/fnnetdrp) * (10L * 3L)
}
else {
lw <- Lw
}
}
else if (isTRUE(valued == TRUE) == FALSE) {
ifelse(isTRUE(length(bds) == 0) == TRUE, NA,
lw <- rep(lwd[1], rbds))
}
ifelse(isTRUE(swp2 == TRUE) == TRUE && isTRUE(tlbs[k] %in%
c("recp")) == TRUE, bds[[k]] <- multiplex::swp(bds[[k]]),
NA)
ifelse(isTRUE(flglwd == TRUE) == TRUE, lw <- rep(lwd[1],
rbds), NA)
if (isTRUE(collRecip == TRUE) == TRUE && isTRUE(tlbs[k] %in%
c("recp")) == TRUE) {
bw <- 0L
hd <- hds
lw <- lw * mscl
}
else if (isTRUE(collRecip == TRUE) == FALSE &&
isTRUE(tlbs[k] %in% c("recp")) == TRUE) {
hd <- hds
}
else {
bw <- bwd
hd <- hds
lw <- lw * mscl
}
ifelse("cn" %in% attr(mlv, "class") && isTRUE(undRecip ==
TRUE) == TRUE, hd <- 0L, NA)
ifelse(isTRUE(directed == FALSE) == TRUE, hd <- 0L,
NA)
if (isTRUE(collRecip == TRUE) == TRUE && isTRUE(tlbs[k] %in%
c("recp")) == FALSE && isTRUE(valued == TRUE) ==
FALSE && isTRUE(c("recp") %in% attr(bds, "names")) ==
TRUE) {
flgcr <- numeric()
sbds <- multiplex::swp(bds[[k]])
if (any(sbds %in% unlist(trcp)) == TRUE) {
for (i in seq_len(z)) {
ifelse(any(sbds %in% trcp[[i]]) == TRUE,
flgcr <- append(flgcr, as.numeric(i)),
NA)
}
rm(i)
}
}
else {
flgcr <- rep(0L, z)
}
pars[, 1] <- pars[, 1] * scl[1]
pars[, 2] <- pars[, 2] * scl[2]
if (match.arg(layout) == "bip") {
if (missing(elv) == TRUE) {
elv <- 0.25
}
else {
ifelse(isTRUE(elv > 1L) == TRUE, elv <- 1L,
NA)
}
bzrc((pars), cex = cx, lty = vlt, col = vecol[vltc],
lwd = lw, elv = elv, ...)
}
else {
if (isTRUE(z == 1L) == TRUE) {
ccbnd(pars, rbds, bds[[k]], vlt, cx * mscl,
lw, vecol, bw, alfa, fds, flgcx, flgcr,
hd, n)
}
else {
ifelse(isTRUE(length(lty) == 1L) == TRUE,
ccbnd(pars, rbds, bds[[k]], vlt1, cx *
mscl, lw, vecol[vltc], bw, alfa, fds,
flgcx, flgcr, hd, n), ccbnd(pars, rbds,
bds[[k]], vlt, cx * mscl, lw, vecol[vltc],
bw, alfa, fds, flgcx, flgcr, hd, n))
}
}
}
else {
NA
}
}
rm(k)
}
else {
NA
}
if (isTRUE(loops == TRUE) == TRUE) {
if (isTRUE(swp == TRUE) == TRUE) {
bdlp <- bd$loop[rev(seq_len(length(bd$loop)))]
if (isTRUE(valued == TRUE) == FALSE) {
NA
}
else {
vecol <- vecol[rev(seq_len(length(vecol)))]
netdrpl <- netdrp[, , rev(seq_len(dim(netdrp)[3]))]
}
}
else {
bdlp <- bd$loop
ifelse(isTRUE(valued == TRUE) == TRUE, netdrpl <- netdrp,
NA)
}
dz <- (rng(z) + abs(min(rng(z))))/(10L)
ndss <- nds
ndss[, 1] <- ndss[, 1] * scl[1]
ndss[, 2] <- ndss[, 2] * scl[2]
if (isTRUE(z == 1L) == TRUE) {
lp <- as.vector(which(diag(net) > 0))
if (isTRUE(length(lp) > 0) == TRUE) {
for (i in seq_len(length(lp))) {
if (isTRUE(n < 3) == TRUE) {
dcx <- (cex[lp[i]] * 0.0075)
lpsz <- (cex[lp[i]] * 0.005) - (dz)
}
else {
dcx <- (cex[lp[i]] * 0.01)
lpsz <- (cex[lp[i]] * 0.0075) - (dz)
}
hc(ndss[lp[i], 1], ndss[lp[i], 2] + (dcx),
lpsz, col = vecol, lty = Lt, lwd = lwd)
}
rm(i)
}
else {
NA
}
}
else if (isTRUE(z > 1) == TRUE) {
ifelse(missing(bwd2) == TRUE, bwd2 <- 1L, NA)
ifelse(missing(bwd2) == FALSE && (isTRUE(bwd2 < 1L) ==
TRUE && isTRUE(bwd2 == 0) == FALSE), bwd2 <- 1L,
NA)
ifelse(missing(bwd2) == FALSE && isTRUE(bwd2 > 2L) ==
TRUE, bwd2 <- 2L, NA)
if (missing(bwd2) == FALSE && isTRUE(bwd2 == 0) ==
TRUE) {
dz <- rep(0, z)
}
else {
if (isTRUE(valued == TRUE) == TRUE) {
dz <- (bwd2 * 1L) * (rng(z) + abs(min(rng(z))))/(5L)
}
else {
dz <- (bwd2 * 1L) * (rng(z) + abs(min(rng(z))))/(10L)
}
}
ifelse(isTRUE(length(lwd) == 1) == TRUE, lwd <- rep(lwd,
z), NA)
for (k in seq_len(length(bdlp))) {
lp <- as.numeric(unique(multiplex::dhc(bdlp)[k][[1]]))
if (isTRUE(length(lp) > 0) == TRUE) {
for (i in seq_len(length(lp))) {
ifelse(isTRUE(cex[lp[i]] <= 3L) == TRUE |
isTRUE(n < 3) == TRUE, dz <- dz * 0.75,
NA)
if (isTRUE(n < 3) == TRUE) {
dcx <- cex[lp[i]]/110L
lpsz <- abs((cex[lp[i]] * 0.007) - dz[k])
}
else {
dcx <- cex[lp[i]]/100L
lpsz <- abs((cex[lp[i]] * 0.0075) - dz[k])
}
ifelse(isTRUE(length(lty) == 1) == TRUE,
Ltl <- rep(lty, length(bdlp)), Ltl <- Lt)
ifelse(isTRUE(valued == TRUE) == TRUE, hc(ndss[lp[i],
1], ndss[lp[i], 2] + (dcx), lpsz, col = grDevices::adjustcolor(vecol[k],
alpha = alfa), lty = Ltl[k], lwd = netdrpl[i,
i, k]), hc(ndss[lp[i], 1], ndss[lp[i],
2] + (dcx), lpsz, col = grDevices::adjustcolor(vecol[k],
alpha = alfa), lty = Ltl[k], lwd = lwd[k]))
hc(ndss[lp[i], 1], ndss[lp[i], 2] + (dcx),
lpsz, col = grDevices::adjustcolor(vecol[k],
alpha = alfa), lty = Lt[k], lwd = lwd[k])
}
rm(i)
}
else {
dz <- append(0, dz)
}
}
rm(k)
}
}
else {
NA
}
if (all(pch %in% 21:25) == TRUE) {
graphics::points(nds[, 1] * scl[1], nds[, 2] * scl[2],
pch = pch, cex = cex, col = grDevices::adjustcolor(vcol0,
alpha = alpha[1]), bg = grDevices::adjustcolor(vcol,
alpha = alpha[1]))
}
else {
graphics::points(nds[, 1] * scl[1], nds[, 2] * scl[2],
pch = pch, cex = cex, col = grDevices::adjustcolor(vcol,
alpha = alpha[1]), bg = grDevices::adjustcolor(vcol,
alpha = alpha[1]))
}
if (isTRUE(showLbs == TRUE) == TRUE) {
ndss <- nds
ndss[, 1] <- ndss[, 1] * scl[1]
ndss[, 2] <- ndss[, 2] * scl[2]
ifelse(missing(ffamily) == FALSE && isTRUE(ffamily %in%
names(grDevices::postscriptFonts())) == TRUE, graphics::par(family = ffamily),
NA)
if (isTRUE(length(pos) == 1) == TRUE) {
if (isTRUE(pos == 0) == TRUE) {
if (missing(fstyle) == TRUE || (missing(fstyle) ==
FALSE && isTRUE(fstyle %in% c("italic", "bold",
"bolditalic") == FALSE))) {
graphics::text(ndss, labels = lbs, cex = fsize,
adj = 0.5, col = fcol)
}
else if (missing(fstyle) == FALSE) {
if (isTRUE(fstyle == "italic") == TRUE) {
graphics::text(ndss, labels = as.expression(lapply(lbs,
function(x) bquote(italic(.(x))))), cex = fsize,
adj = 0.5, col = fcol)
}
else if (isTRUE(fstyle == "bold") == TRUE) {
graphics::text(ndss, labels = as.expression(lapply(lbs,
function(x) bquote(bold(.(x))))), cex = fsize,
adj = 0.5, col = fcol)
}
else if (isTRUE(fstyle == "bolditalic") ==
TRUE) {
graphics::text(ndss, labels = as.expression(lapply(lbs,
function(x) bquote(bolditalic(.(x))))),
cex = fsize, adj = 0.5, col = fcol)
}
}
}
else {
if (missing(fstyle) == TRUE || (missing(fstyle) ==
FALSE && isTRUE(fstyle %in% c("italic", "bold",
"bolditalic") == FALSE))) {
graphics::text(ndss, lbs, cex = fsize, pos = pos,
col = fcol, offset = (cex/4L), adj = c(0.5,
1))
}
else if (missing(fstyle) == FALSE) {
if (isTRUE(fstyle == "italic") == TRUE) {
graphics::text(ndss, as.expression(lapply(lbs,
function(x) bquote(italic(.(x))))), cex = fsize,
pos = pos, col = fcol, offset = (cex/4L),
adj = c(0.5, 1))
}
else if (isTRUE(fstyle == "bold") == TRUE) {
graphics::text(ndss, as.expression(lapply(lbs,
function(x) bquote(bold(.(x))))), cex = fsize,
pos = pos, col = fcol, offset = (cex/4L),
adj = c(0.5, 1))
}
else if (isTRUE(fstyle == "bolditalic") ==
TRUE) {
graphics::text(ndss, as.expression(lapply(lbs,
function(x) bquote(bolditalic(.(x))))),
cex = fsize, pos = pos, col = fcol, offset = (cex/4L),
adj = c(0.5, 1))
}
}
}
}
else if (isTRUE(length(pos) == n) == TRUE) {
if (missing(fstyle) == TRUE || (missing(fstyle) ==
FALSE && isTRUE(fstyle %in% c("italic", "bold",
"bolditalic") == FALSE))) {
graphics::text(ndss, lbs, cex = fsize, pos = pos,
col = fcol[1], offset = (cex/4L), adj = c(0.5,
1))
}
else if (missing(fstyle) == FALSE) {
if (isTRUE(fstyle == "italic") == TRUE) {
graphics::text(ndss, as.expression(lapply(lbs,
function(x) bquote(italic(.(x))))), cex = fsize,
pos = pos, col = fcol[1], offset = (cex/4L),
adj = c(0.5, 1))
}
else if (isTRUE(fstyle == "bold") == TRUE) {
graphics::text(ndss, as.expression(lapply(lbs,
function(x) bquote(bold(.(x))))), cex = fsize,
pos = pos, col = fcol[1], offset = (cex/4L),
adj = c(0.5, 1))
}
else if (isTRUE(fstyle == "bolditalic") == TRUE) {
graphics::text(ndss, as.expression(lapply(lbs,
function(x) bquote(bolditalic(.(x))))), cex = fsize,
pos = pos, col = fcol[1], offset = (cex/4L),
adj = c(0.5, 1))
}
}
}
else {
if (isTRUE(pos[1] == 0) == TRUE) {
if (missing(fstyle) == TRUE || (missing(fstyle) ==
FALSE && isTRUE(fstyle %in% c("italic", "bold",
"bolditalic") == FALSE))) {
graphics::text(ndss, labels = lbs, cex = fsize,
adj = 0.5, col = fcol)
}
else if (missing(fstyle) == FALSE) {
if (isTRUE(fstyle == "italic") == TRUE) {
graphics::text(ndss, labels = as.expression(lapply(lbs,
function(x) bquote(italic(.(x))))), cex = fsize,
adj = 0.5, col = fcol)
}
else if (isTRUE(fstyle == "bold") == TRUE) {
graphics::text(ndss, labels = as.expression(lapply(lbs,
function(x) bquote(bold(.(x))))), cex = fsize,
adj = 0.5, col = fcol)
}
else if (isTRUE(fstyle == "bolditalic") ==
TRUE) {
graphics::text(ndss, labels = as.expression(lapply(lbs,
function(x) bquote(bolditalic(.(x))))),
cex = fsize, adj = 0.5, col = fcol)
}
}
}
else {
if (missing(fstyle) == TRUE || (missing(fstyle) ==
FALSE && isTRUE(fstyle %in% c("italic", "bold",
"bolditalic") == FALSE))) {
graphics::text(ndss, lbs, cex = fsize, pos = pos[1],
col = fcol, offset = (cex/4L), adj = c(0.5,
1))
}
else if (missing(fstyle) == FALSE) {
if (isTRUE(fstyle == "italic") == TRUE) {
graphics::text(ndss, as.expression(lapply(lbs,
function(x) bquote(italic(.(x))))), cex = fsize,
pos = pos[1], col = fcol, offset = (cex/4L),
adj = c(0.5, 1))
}
else if (isTRUE(fstyle == "bold") == TRUE) {
graphics::text(ndss, as.expression(lapply(lbs,
function(x) bquote(bold(.(x))))), cex = fsize,
pos = pos[1], col = fcol, offset = (cex/4L),
adj = c(0.5, 1))
}
else if (isTRUE(fstyle == "bolditalic") ==
TRUE) {
graphics::text(ndss, as.expression(lapply(lbs,
function(x) bquote(bolditalic(.(x))))),
cex = fsize, pos = pos[1], col = fcol,
offset = (cex/4L), adj = c(0.5, 1))
}
}
}
}
}
if (isTRUE(showAtts == TRUE) == TRUE) {
ndss <- nds
ndss[, 1] <- ndss[, 1] * scl[1]
ndss[, 2] <- ndss[, 2] * scl[2]
if (isTRUE(flgcrd == TRUE) == TRUE && isTRUE(ncol(coord) >
3L) == TRUE) {
NA
}
else {
atts <- rep("", nrow(nds))
if (missing(att) == FALSE) {
if (is.array(att) == TRUE) {
if (is.na(dim(att)[3]) == TRUE | isTRUE(dim(att)[3] ==
1) == TRUE) {
ifelse(missing(lbat) == FALSE, atts[which((att) !=
0)] <- lbat, atts[which((att) != 0)] <- "1")
}
else {
if (missing(lbat) == FALSE) {
atts[which(diag(multiplex::mnplx(netd,
diag.incl = TRUE)) != 0)] <- lbat
}
else {
dimnames(netd)[[3]] <- NULL
neta <- multiplex::zbind(netd, att)
clss <- multiplex::expos(multiplex::rel.sys(neta,
att = (z + 1L):dim(neta)[3]), classes = TRUE)$Classes
attr(clss, "names")[which(attr(clss, "names") ==
"ALL")] <- multiplex::jnt(dimnames(att)[[3]],
sep = "")
for (i in 2:length(clss)) {
atts[which(lbs %in% clss[[i]])] <- attr(clss,
"names")[i]
}
rm(i)
}
}
}
else if (is.vector(att) == TRUE | is.factor(att) ==
TRUE) {
ifelse(isTRUE(length(att) == n) == TRUE, atts <- as.vector(att),
atts <- rep("", length(lbs)))
}
else {
atts <- rep("", length(lbs))
}
}
else {
NA
}
}
if (isTRUE(flgcx == FALSE) == TRUE) {
graphics::text(ndss, labels = atts, cex = fsize,
pos = pos%%4 + 1L, col = fcol, offset = (cex/4L),
adj = c(0.5, 1))
}
else if (isTRUE(flgcx == TRUE) == TRUE) {
graphics::text(ndss, labels = atts, cex = fsize,
pos = pos%%4 + 1L, col = fcol, offset = (min(cex)/4L),
adj = c(0.5, 1))
}
}
graphics::par(mar = omr)
graphics::par(bg = obg)
graphics::par(lend = 0)
graphics::par(mai = omi)
}
|
store_finam_data = function( from = NULL, to = format( Sys.Date() ), verbose = TRUE ) {
save_dir = .settings$finam_storage
symbols = .settings$finam_symbols
if( save_dir == '' ) stop( 'please set storage path via QuantTools_settings( \'finam_storage\', \'/storage/path/\' ) ' )
if( is.null( symbols ) ) stop( 'please set symbols vector via QuantTools_settings( \'finam_symbols\', c( \'symbol_1\', ...,\'symbol_n\' ) ) ' )
from_is_null = is.null( from )
for( symbol in symbols ) {
if( verbose ) message( symbol )
if( from_is_null ) from = NULL
if( verbose ) message( 'ticks:' )
dates_available = gsub( '.rds', '', list.files( paste( save_dir, symbol, sep = '/' ), pattern = '\\d{4}-\\d{2}-\\d{2}.rds' ) )
if( is.null( from ) && length( dates_available ) == 0 ) {
from = .settings$finam_storage_from
if( from == '' ) stop( 'please set Finam storage start date via QuantTools_settings( \'finam_storage_from\', \'YYYYMMDD\' )' )
message( 'not found in storage, \ntrying to download since storage start date' )
}
if( is.null( from ) && to >= max( dates_available ) ) {
from = max( dates_available )
message( paste( 'dates to be added:', from, '-', to ) )
}
from = as.Date( from )
to = as.Date( to )
dates = format( seq( from, to, 1 ) )
for( date in dates ) {
ticks = get_finam_data( symbol, date, period = 'tick' )
if( is.null( ticks ) ) next
dir.create( paste0( save_dir, '/' , symbol ), recursive = TRUE, showWarnings = FALSE )
saveRDS( ticks, file = paste0( save_dir, '/' , symbol, '/', date, '.rds' ) )
if( verbose ) message( paste( date, 'saved' ) )
}
if( verbose ) message( 'minutes:' )
if( from_is_null ) from = NULL
dates_available = gsub( '.rds', '-01', list.files( paste( save_dir, symbol, sep = '/' ), pattern = '\\d{4}-\\d{2}.rds' ) )
if( is.null( from ) && length( dates_available ) == 0 ) {
from = .settings$finam_storage_from
if( from == '' ) stop( 'please set Finam storage start date via QuantTools_settings( \'finam_storage_from\', \'YYYYMMDD\' )' )
message( 'not found in storage, \ntrying to download since storage start date' )
}
if( is.null( from ) && to >= max( dates_available ) ) {
from = max( dates_available )
message( paste( 'dates to be added:', from, '-', to ) )
}
from = as.Date( from )
to = as.Date( to )
data.table( from = as.Date( unique( format( seq( from, to, 1 ), '%Y-%m-01' ) ) ) )[, to := shift( from - 1, type = 'lead', fill = to ) ][, {
month = format( from, '%Y-%m' )
mins = get_finam_data( symbol, from, to, period = '1min' )
if( !is.null( mins ) ) {
dir.create( paste0( save_dir, '/' , symbol ), recursive = TRUE, showWarnings = FALSE )
saveRDS( mins, file = paste0( save_dir, '/' , symbol, '/', month, '.rds' ) )
if( verbose ) message( paste( month, 'saved' ) )
} else {
if( verbose ) message( paste( month, 'not available' ) )
}
}, by = from ]
}
}
store_iqfeed_data = function( from = NULL, to = format( Sys.Date() ), verbose = TRUE ) {
.store_iqfeed_data_mins ( from, to, verbose )
.store_iqfeed_data_ticks( from, to, verbose )
}
.store_iqfeed_data_ticks = function( from = NULL, to = format( Sys.Date() ), verbose = TRUE ) {
save_dir = .settings$iqfeed_storage
symbols = .settings$iqfeed_symbols
if( save_dir == '' ) stop( 'please set storage path via QuantTools_settings( \'iqfeed_storage\', \'/storage/path/\' ) ' )
if( is.null( symbols ) ) stop( 'please set symbols vector via QuantTools_settings( \'iqfeed_symbols\', c( \'symbol_1\', ...,\'symbol_n\' ) ) ' )
from_is_null = is.null( from )
for( symbol in symbols ) {
if( verbose ) message( symbol )
if( from_is_null ) from = NULL
dates_available = gsub( '.rds', '', list.files( paste( save_dir, symbol, sep = '/' ), pattern = '\\d{4}-\\d{2}-\\d{2}.rds' ) )
if( is.null( from ) && length( dates_available ) == 0 ) {
from = .settings$iqfeed_storage_from
if( from == '' ) stop( 'please set iqfeed storage start date via QuantTools_settings( \'iqfeed_storage_from\', \'YYYYMMDD\' )' )
message( 'not found in storage, \ntrying to download since storage start date' )
}
if( is.null( from ) && to >= max( dates_available ) ) {
from = max( dates_available )
message( paste( 'dates to be added:', from, '-', to ) )
}
curr_time = Sys.time()
attr( curr_time, 'tzone' ) = 'America/New_York'
if( format( curr_time, '%H:%M' ) %bw% c( '09:30', '16:00' ) && diff( as.Date( c( from, to ) ) ) > as.difftime( 3, units = 'days' ) ) {
message = 'please download data outside trading hours [ 9:30 - 16:30 America/New York ]'
if( length( dates_available ) == 0 ) {
message( message )
next
} else {
stop( message )
}
}
ticks = get_iqfeed_data( symbol, from, to, period = 'tick' )
if( !is.null( ticks ) ) {
dir.create( paste0( save_dir, '/' , symbol ), recursive = TRUE, showWarnings = FALSE )
time = NULL
ticks[, date := format( time, '%Y-%m-%d' ) ]
ticks[ , {
saveRDS( .SD, file = paste0( save_dir, '/' , symbol, '/', date, '.rds' ) )
if( verbose ) message( paste( date, 'saved' ) )
}, by = date ]
}
}
}
.store_iqfeed_data_mins = function( from = NULL, to = format( Sys.Date() ), verbose = TRUE ) {
save_dir = .settings$iqfeed_storage
symbols = .settings$iqfeed_symbols
if( save_dir == '' ) stop( 'please set storage path via QuantTools_settings( \'iqfeed_storage\', \'/storage/path/\' ) ' )
if( is.null( symbols ) ) stop( 'please set symbols vector via QuantTools_settings( \'iqfeed_symbols\', c( \'symbol_1\', ...,\'symbol_n\' ) ) ' )
from_is_null = is.null( from )
for( symbol in symbols ) {
if( verbose ) message( symbol )
if( from_is_null ) from = NULL
months_available = gsub( '.rds', '', list.files( paste( save_dir, symbol, sep = '/' ), pattern = '\\d{4}-\\d{2}.rds' ) )
if( is.null( from ) && length( months_available ) == 0 ) {
from = .settings$iqfeed_storage_from
if( from == '' ) stop( 'please set iqfeed storage start date via QuantTools_settings( \'iqfeed_storage_from\', \'YYYYMMDD\' )' )
message( 'not found in storage, \ntrying to download since storage start date' )
}
if( is.null( from ) && substr( to, 1, 7 ) >= max( months_available ) ) {
from = max( months_available )
message( paste( 'months to be added:', from, '-', substr( to, 1, 7 ) ) )
}
mins = get_iqfeed_data( symbol, paste0( substr( from, 1, 7 ), '-01' ), format( as.Date( to ) + 31 ), period = '1min' )
if( !is.null( mins ) && nrow( mins ) != 0 ) {
dir.create( paste0( save_dir, '/' , symbol ), recursive = TRUE, showWarnings = FALSE )
time = NULL
mins[, month := format( time, '%Y-%m' ) ]
mins[ , {
saveRDS( .SD, file = paste0( save_dir, '/' , symbol, '/', month, '.rds' ) )
if( verbose ) message( paste( month, 'saved' ) )
}, by = month ]
}
}
}
.get_local_data = function( symbol, from, to, source, period ) {
data_dir = switch( source, finam = .settings$finam_storage, iqfeed = .settings$iqfeed_storage )
if( data_dir == '' ) stop( paste0('please set storage path via QuantTools_settings( \'', source, '_storage\', \'/storage/path/\' )
use store_', source, '_data to add some data into the storage' ) )
if( period == 'tick' ) {
dates_available = gsub( '.rds', '', list.files( paste( data_dir, symbol, sep = '/' ), pattern = '\\d{4}-\\d{2}-\\d{2}.rds' ) )
dates_to_load = sort( dates_available[ dates_available %bw% substr( c( from, to ), 1, 10 ) ] )
data = vector( length( dates_to_load ), mode = 'list' )
names( data ) = dates_to_load
for( date in dates_to_load ) data[[ date ]] = readRDS( file = paste0( data_dir, '/' , symbol, '/', date, '.rds' ) )
data = rbindlist( data )
time_range = as.POSIXct( format( as.Date( c( from, to ) ) + c( 0, 1 ) ), 'UTC' )
time = NULL
if( !is.null( data ) ) data = data[ time > time_range[1] & time < time_range[2] ]
return( data )
}
if( period == '1min' ) {
months_available = gsub( '.rds', '', list.files( paste( data_dir, symbol, sep = '/' ), pattern = '\\d{4}-\\d{2}.rds' ) )
months_to_load = sort( months_available[ months_available %bw% substr( c( from, to ), 1, 7 ) ] )
if( length( months_to_load ) == 0 ) return( NULL )
data = vector( length( months_to_load ), mode = 'list' )
names( data ) = months_to_load
for( month in months_to_load ) data[[ month ]] = readRDS( file = paste0( data_dir, '/' , symbol, '/', month, '.rds' ) )
data = rbindlist( data )
time_range = as.POSIXct( format( as.Date( c( from, to ) ) + c( 0, 1 ) ), 'UTC' )
time = NULL
if( !is.null( data ) ) data = data[ time > time_range[1] & time < time_range[2] ]
return( data )
}
}
store_moex_data = function( from = NULL, to = format( Sys.Date() ), verbose = TRUE ) {
save_dir = .settings$moex_storage
if( save_dir == '' ) stop( 'please set storage path via QuantTools_settings( \'moex_storage\', \'/storage/path/\' ) ' )
temp_dir = .settings$temp_directory
if( temp_dir == '' ) stop( 'please set temp directory path via QuantTools_settings( \'temp_directory_\', \'/temp/directory/path/\' ) ' )
from_is_null = is.null( from )
if( from_is_null ) from = NULL
dates_available = gsub( '.rds', '', list.files( paste0( save_dir, '/futures/' ), pattern = '.rds' ) )
if( is.null( from ) && length( dates_available ) == 0 ) {
from = .settings$moex_storage_from
if( from == '' ) stop( 'please set moex storage start date via QuantTools_settings( \'moex_storage_from\', \'YYYYMMDD\' )' )
message( 'no data found in storage, \ntrying to download since storage start date' )
}
if( is.null( from ) && to >= max( dates_available ) ) {
from = max( dates_available )
message( paste( 'dates to be added:', from, '-', to ) )
}
for( date in as.Date( from ):as.Date( to ) ) {
date = as.Date( date, origin = '1970-01-01' )
dir_fut = paste0( save_dir, '/futures' )
dir_opt = paste0( save_dir, '/options' )
dir.create( dir_fut, recursive = T, showWarnings = F )
dir.create( dir_opt, recursive = T, showWarnings = F )
file_fut = paste0( dir_fut, '/', format( date ), '.rds' )
file_opt = paste0( dir_opt, '/', format( date ), '.rds' )
year = format( date, '%Y' )
yymmdd = format( date, '%y%m%d')
data_url = .settings$moex_data_url
if( !RCurl::url.exists( data_url ) ) stop( 'please set MOEX data url via QuantTools_settings( \'moex_data_url\', \'/moe/data/url/\' )' )
url = paste0( data_url, '/', year, '/FT', yymmdd, '.ZIP')
if( !RCurl::url.exists( url ) ) next
file_zip = paste0( temp_dir, '/', yymmdd, '.zip' )
unlink( list.files( temp_dir, full.names = T ), force = T, recursive = T )
dir.create( temp_dir, recursive = T, showWarnings = F )
download.file( url, destfile = file_zip, mode = 'wb', quiet = T )
unzip( file_zip, exdir = temp_dir )
files = list.files( temp_dir, pattern = 'ft|ot|FT|OT', recursive = T, full.names = T )
ft = files[ grepl( 'ft', tolower( files ) ) ]
ot = files[ grepl( 'ot', tolower( files ) ) ]
is_xls = grepl( '.xls', tolower( ft ) )
format_trades = function( trades ) {
code = contract = dat_time = NULL
trades[, code := as.factor( code ) ]
trades[, contract := as.factor( contract ) ]
trades[, dat_time := fasttime::fastPOSIXct( dat_time, 'UTC' ) ]
}
if( is_xls ) {
. = capture.output( { sheets = readxl::excel_sheets( ft ) } )
fut_sheet = sheets[ grepl( 'fut.*trade', sheets ) ]
opt_sheet = sheets[ grepl( 'opt.*trade', sheets ) ]
if( is.na( fut_sheet ) ) {
message( 'no futures trades sheet available' )
} else {
. = capture.output( { trades = setDT( readxl::read_excel( ft, sheet = fut_sheet ) ) } )
format_trades( trades )
saveRDS( trades, file_fut )
}
if( is.na( opt_sheet ) ) {
message( 'no options trades sheet available' )
} else {
. = capture.output( { trades = setDT( readxl::read_excel( ft, sheet = opt_sheet ) ) } )
format_trades( trades )
saveRDS( trades, file_opt )
}
if( verbose ) message( date, ' saved' )
} else {
if( is.null( ft ) ) {
message( 'no futures trades file available' )
} else {
trades = fread( ft )
format_trades( trades )
saveRDS( trades, file_fut )
}
if( is.null( ot ) ) {
message( 'no options trades file available' )
} else {
if( date == as.Date( '2008-09-15' ) ) next
trades = fread( ot )
format_trades( trades )
saveRDS( trades, file = file_opt )
}
if( verbose ) message( date, ' saved' )
}
}
}
|
calculatePeaks <- function(data, bins = c(0, 2, 3), labels = NULL,
ol.rm = FALSE, by.marker = FALSE, debug = FALSE) {
if (debug) {
print(paste("IN:", match.call()[[1]]))
print("data:")
print(str(data))
print("bins:")
print(bins)
print("labels:")
print(labels)
print("ol.rm:")
print(ol.rm)
print("by.marker:")
print(by.marker)
}
if (length(bins) != length(labels) - 1) {
stop("'bins' must be a vector of length 1 less than 'labels'!")
}
if (!is.logical(ol.rm)) {
stop("'ol.rm' must be logical!")
}
if (!is.logical(by.marker)) {
stop("'by.marker' must be logical!")
}
if (!"Sample.Name" %in% names(data)) {
stop("'data' must contain a column 'Sample.Name'.")
}
if (!"Height" %in% names(data)) {
stop("'data' must contain a column 'Height'.")
}
if (by.marker) {
if (!"Marker" %in% names(data)) {
stop("'data' must contain a column 'Marker'.")
}
}
if (!is.vector(labels)) {
stop("'labels' must be a character vector.")
}
if (!is.vector(bins)) {
stop("'bins' must be a numeric vector.")
}
if (ol.rm) {
data <- data[data$Allele != "OL" | is.na(data$Allele), ]
}
if (!is.numeric(bins)) {
message("'bins' not numeric. Converting to numeric.")
bins <- as.numeric(bins)
}
if (!is.character(labels)) {
message("'labels' not character. Converting to character.")
labels <- as.character(labels)
}
if (!is.numeric(data$Height)) {
message("'Height' not numeric. Converting to numeric.")
data$Height <- as.numeric(data$Height)
}
if ("Peaks" %in% names(data)) {
message("A column 'Peaks' already exist. It will be overwritten.")
data$Peaks <- NULL
}
if ("Group" %in% names(data)) {
message("A column 'Group' already exist. It will be overwritten.")
data$Group <- NULL
}
if ("Id" %in% names(data)) {
message("A column 'Id' already exist. It will be overwritten.")
data$Id <- NULL
}
data$Peaks <- as.integer(NA)
data$Group <- as.character(NA)
data$Id <- paste(data$Sample.Name, data$File.Name, sep = "_")
DT <- data.table::data.table(data)
if (by.marker) {
message("Counting number of peaks by marker...")
DT[, Peaks := sum(!is.na(Height), na.rm = TRUE), by = list(Id, Marker)]
} else {
message("Counting number of peaks by sample...")
DT[, Peaks := sum(!is.na(Height), na.rm = TRUE), by = list(Id)]
}
DT$Group <- as.character(NA)
if (length(labels) >= 1) {
message("Adding first group label...")
DT[Peaks <= bins[1]]$Group <- labels[1]
}
if (length(labels) >= 2) {
message("Adding last group label...")
DT[Peaks >= bins[length(bins)]]$Group <- labels[length(labels)]
}
if (length(labels) > 2) {
message("Adding other group labels...")
for (g in seq(from = 2, to = length(bins))) {
DT[Peaks > bins[g - 1] & Peaks <= bins[g]]$Group <- labels[g]
}
}
data <- as.data.frame((DT))
data$Group <- factor(data$Group, levels = labels)
data <- auditTrail(obj = data, f.call = match.call(), package = "strvalidator")
if (debug) {
print("data:")
print(str(data))
print(paste("EXIT:", match.call()[[1]]))
}
return(data)
}
|
test_that("retrofit_files() works", {
local_reprex_loud()
expect_equal(
retrofit_files(infile = NULL, wd = NULL, outfile = "DEPRECATED"),
list(infile = NULL, wd = NULL)
)
expect_equal(
retrofit_files(infile = "foo.R", wd = "whatever", outfile = "DEPRECATED"),
list(infile = "foo.R", wd = "whatever")
)
expect_snapshot(
retrofit_files(wd = "this", outfile = "that")
)
expect_snapshot(
retrofit_files(outfile = NA),
)
expect_snapshot(
retrofit_files(outfile = "some/path/blah")
)
expect_snapshot(
retrofit_files(infile = "a/path/foo.R", outfile = NA)
)
expect_snapshot(
retrofit_files(infile = "a/path/foo.R", outfile = "other/path/blah")
)
})
test_that("we don't add a suffix more than once", {
x <- "blah_r.R"
expect_equal(x, add_suffix(x, suffix = "r"))
})
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.