code
stringlengths 1
13.8M
|
---|
[
{
"title": "googleVis 0.2.9",
"href": "http://www.magesblog.com/2011/09/googlevis-029.html"
},
{
"title": "Statistical tests for variable selection",
"href": "http://robjhyndman.com/hyndsight/tests2/"
},
{
"title": "There is no Such Thing as Biomedical \"Big Data\"",
"href": "http://www.gettinggeneticsdone.com/2014/02/no-such-thing-biomedical-bigdata.html"
},
{
"title": "Generating and Visualizing Multivariate Data with R",
"href": "http://blog.revolutionanalytics.com/2016/02/multivariate_data_with_r.html"
},
{
"title": "When k-means Clustering Fails",
"href": "http://mazamascience.com/WorkingWithData/?p=1694"
},
{
"title": "Weekend Reading – Gold in October",
"href": "https://systematicinvestor.wordpress.com/2012/09/29/weekend-reading-gold-in-october/"
},
{
"title": "R Tools for Visual Studio 3.0 now available",
"href": "http://blog.revolutionanalytics.com/2016/05/r-tools-for-visual-studio-30-now-available.html"
},
{
"title": "EU rules that computer languages cannot be copyrighted",
"href": "http://shape-of-code.coding-guidelines.com/2012/05/02/eu-rules-that-computer-languages-cannot-be-copyrighted/"
},
{
"title": "How to download complete XML records from PubMed and extract data",
"href": "http://rpsychologist.com/how-to-download-complete-xml-records-from-pubmed-and-extract-data/"
},
{
"title": "Code Snippet : List of CRAN packages",
"href": "http://romainfrancois.blog.free.fr/index.php?post/2009/08/05/Code-Snippet-%3A-List-of-CRAN-packages"
},
{
"title": "rlist: a new package for working with list objects in R",
"href": "https://renkun.me/blog/2014/06/26/rlist-a-new-package-for-working-with-list-objects-in-r.html"
},
{
"title": "R 3.1 -> 3.2 upgrade notes",
"href": "https://nsaunders.wordpress.com/2015/04/20/r-3-1-3-2-upgrade-notes/"
},
{
"title": "Solving Tic-Tac-Toe with R data.tree",
"href": "http://ipub.com/tic-tac-toe/"
},
{
"title": "The Complexities of Customer Segmentation: Removing Response Intensity to Reveal Response Pattern",
"href": "http://joelcadwell.blogspot.com/2013/12/the-complexities-of-customer.html"
},
{
"title": "Announcing RStudio v1.0!",
"href": "https://blog.rstudio.org/2016/11/01/announcing-rstudio-v1-0/"
},
{
"title": "NCEP Global Forecast System",
"href": "http://joewheatley.net/ncep-global-forecast-system/"
},
{
"title": "Rrrrr! It’s Talk Like a Pirate Day!",
"href": "http://blog.revolutionanalytics.com/2013/09/rrrrr-its-talk-like-a-pirate-day.html"
},
{
"title": "Coffee and Productivity",
"href": "http://freakonometrics.hypotheses.org/16010"
},
{
"title": "knitr, github, and a new phase for the lab notebook",
"href": "https://web.archive.org/web/http://www.carlboettiger.info/wordpress/archives/4325"
},
{
"title": "Internal Consistency Reliability in R with Lambda4",
"href": "https://web.archive.org/web/http://blog.psychoanalytix.com/post/39811913495"
},
{
"title": "[Project Euler] – Problem 57",
"href": "https://web.archive.org/web/http://ygc.name/2011/05/20/project-euler-problem-57/"
},
{
"title": "Merge Relational Dataframes",
"href": "https://web.archive.org/web/https://fishr.wordpress.com/2013/11/11/merge-relational-dataframes/"
},
{
"title": "An Image Crossfader Function",
"href": "http://thebiobucket.blogspot.com/2011/11/image-crossfader-function.html"
},
{
"title": "With With",
"href": "http://www.stat.tamu.edu/site-directory/?q=node%2F54"
},
{
"title": "Another take on building a multi-lingual shiny app",
"href": "https://feedproxy.google.com/~r/ChristopheLadroueR/~3/-z9VGMGTpzw/"
},
{
"title": "Introduction to R",
"href": "http://blog.lunean.com/2016/01/18/introduction-to-r/"
},
{
"title": "Installing XGBoost on Ubuntu",
"href": "http://www.exegetic.biz/blog/2015/12/installing-xgboost-ubuntu/"
},
{
"title": "The ARIMAX model muddle",
"href": "http://robjhyndman.com/hyndsight/arimax/?utm_source=rss&utm_medium=rss&utm_campaign=arimax"
},
{
"title": "AdfTest Function Enhanced With Rcpp Armadillo",
"href": "https://web.archive.org/web/http://blog.quanttrader.org/2012/04/adftest-function-enhanced-with-rcpp-armadillo/"
},
{
"title": "Playing with leafletR",
"href": "http://tuxette.nathalievilla.org/?p=1659&lang=en"
},
{
"title": "Basic Data Types in r",
"href": "http://www.dataperspective.info/2016/02/basic-data-types-in-r.html"
},
{
"title": "Fast Threshold Clustering Algorithm (FTCA) test",
"href": "https://systematicinvestor.wordpress.com/2013/11/28/fast-threshold-clustering-algorithm-ftca-test/"
},
{
"title": "2013-1 TimingManager: Animation Sequences in JavaScript",
"href": "http://stattech.wordpress.fos.auckland.ac.nz/2012-13-timingmanager-animation-sequences-in-javascript/"
},
{
"title": "remote is the new Reot",
"href": "https://metvurst.wordpress.com/2015/01/03/remote-is-the-new-reot/"
},
{
"title": "Parallelization using plyr: loading objects and packages into worker nodes",
"href": "http://www.numbertheory.nl/2011/11/14/parallelization-using-plyr-loading-objects-and-packages-into-worker-nodes/"
},
{
"title": "R for Publication by Page Piccinini: Lesson 2 – Linear Regression",
"href": "http://datascienceplus.com/r-for-publication-by-page-piccinini-lesson-2-linear-regression/"
},
{
"title": "Julian Besag 1945-2010",
"href": "https://xianblog.wordpress.com/2010/08/07/julian-besag-1945-2010/"
},
{
"title": "Annotated source code",
"href": "http://digitheadslabnotebook.blogspot.com/2011/02/annotated-source-code.html"
},
{
"title": "Volcanic Solar Dimming, ENSO and Temperature Anomalies",
"href": "https://web.archive.org/web/https://chartsgraphs.wordpress.com/2011/01/21/volcanic-solar-dimming-enso-and-temperature-anomalies/"
},
{
"title": "Visualizing MLS Player Salaries with ggplot2",
"href": "http://datascienceplus.com/visualizing-mls-player-salaries/"
},
{
"title": "Speed up your R code using a just-in-time (JIT) compiler",
"href": "https://www.r-statistics.com/2012/04/speed-up-your-r-code-using-a-just-in-time-jit-compiler/"
},
{
"title": "Working with the RStudio CRAN logs",
"href": "http://blog.revolutionanalytics.com/2015/06/working-with-the-rstudio-cran-logs.html"
},
{
"title": "Body Weight in the United States – Part 3, \"Contributing Factors\"",
"href": "https://feedproxy.google.com/~r/graphoftheweek/fzVA/~3/pPXyjBvMAhc/body-weight-in-united-states-part-3.html"
},
{
"title": "The biomaRt package",
"href": "https://logfc.wordpress.com/2014/10/02/biomart/"
},
{
"title": "Central Limit Theorem\r\nA nice illustration of the Central Limit…",
"href": "https://web.archive.org/web/http://isomorphism.es//post/615407591"
},
{
"title": "Ripley Facts",
"href": "http://memosisland.blogspot.com/2013/06/ripley-facts.html"
},
{
"title": "R GIS: Generalizer for KML Paths",
"href": "http://thebiobucket.blogspot.com/2014/05/r-gis-generalizer-for-kml-paths.html"
},
{
"title": "Analyzing a simple experiment with heterogeneous variances using asreml, MCMCglmm and SAS",
"href": "http://www.quantumforest.com/2013/05/analyzing-a-simple-experiment-with-heterogeneous-variances-using-asreml-mcmcglmm-and-sas/"
},
{
"title": "Anova – Type I/II/III SS explained",
"href": "https://mcfromnz.wordpress.com/2011/03/02/anova-type-iiiiii-ss-explained/"
},
{
"title": "Showcasing the latest phylogenetic methods: AUTEUR",
"href": "https://web.archive.org/web/http://www.carlboettiger.info/wordpress/archives/2337"
}
] |
context("UNFv6: Missing Values")
test_that("Missing values calculated correctly", {
expect_equal(unf6(NA)$unf, "cJ6AyISHokEeHuTfufIqhg==")
})
test_that("Missing values calculated correctly", {
expect_equal(unf6(NA)$unf, "cJ6AyISHokEeHuTfufIqhg==")
})
test_that("Nonfinites optionally treated as NA", {
expect_equal(unf6(NaN, nonfinites_as_missing=TRUE)$unf, "cJ6AyISHokEeHuTfufIqhg==")
expect_equal(unf6(Inf, nonfinites_as_missing=TRUE)$unf, "cJ6AyISHokEeHuTfufIqhg==")
expect_equal(unf6(-Inf, nonfinites_as_missing=TRUE)$unf, "cJ6AyISHokEeHuTfufIqhg==")
}) |
"pisa2012_q_marginal" |
rBetaCopula <- function(x, n) {
if(!is.matrix(x)) {
warning("coercing 'x' to a matrix.")
stopifnot(is.matrix(x <- as.matrix(x)))
}
stopifnot(n >= 1L)
m <- nrow(x)
d <- ncol(x)
matrix(.C("rBetaCopula",
as.integer(apply(x, 2, rank)),
as.integer(m),
as.integer(d),
as.integer(n),
x = double(n * d),
PACKAGE = "npcp")$x, n, d)
}
rBetaCopulaRanks <- function(r, n) {
m <- nrow(r)
d <- ncol(r)
.C("rBetaCopula",
as.integer(r),
as.integer(m),
as.integer(d),
as.integer(n),
x = double(n * d),
PACKAGE = "npcp")$x
}
simCpDist <- function(x.learn = NULL, m = NULL, n, gamma = 0.25, delta = 1e-4,
B = 1000,
method = c("sim", "mult"),
b = NULL, weights = c("parzen", "bartlett"),
g = 5, L.method = c("max","median","mean","min")) {
method <- match.arg(method)
weights <- match.arg(weights)
L.method <- match.arg(L.method)
if (is.null(x.learn)) {
if (is.null(m))
stop("either 'x.learn' or 'm' needs to be specified")
else {
d <- 1
if (method != "sim")
stop("if 'x.learn' is not specified, only 'method = \"sim\"' can be used")
}
}
else {
if(!is.matrix(x.learn)) {
warning("coercing 'x.learn' to a matrix.")
stopifnot(is.matrix(x.learn <- as.matrix(x.learn)))
}
d <- ncol(x.learn)
if (is.null(m))
m <- nrow(x.learn)
else
stopifnot(nrow(x.learn) == m)
}
stopifnot(m > 1L && n > m)
stopifnot(gamma >= 0 && gamma <= 0.5)
stopifnot(delta >= 0 && delta <= 1)
nm <- n - m
if (method == "sim") {
if (d > 1L)
stop("Setting 'method = \"sim\"' is possible only for univariate (independent) observations")
do1 <- function() {
stat <- .C("seqCpDistStat",
as.double(runif(n)),
as.integer(m),
as.integer(n),
as.integer(d),
mac = double(nm),
mmc = double(nm),
mmk = double(nm),
mc = double(nm),
mk = double(nm),
as.double(gamma),
as.double(delta),
wmc = integer(nm),
wmk = integer(nm),
as.integer(1),
PACKAGE = "npcp")
c(stat$mac, stat$mmc, stat$mmk, stat$mc, stat$mk)
}
rep <- t(replicate(B, do1()))
mac0 <- rep[,seq.int(1,nm)]
mmc0 <- rep[,seq.int(nm+1,2*nm)]
mmk0 <- rep[,seq.int(2*nm+1,3*nm)]
mc0 <- rep[,seq.int(3*nm+1,4*nm)]
mk0 <- rep[,seq.int(4*nm+1,5*nm)]
pmax <- nm
}
else if (method == "beta") {
if (d <= 1L)
stop("Setting 'method = \"beta\"' is possible only for multivariate (independent) observations")
r <- apply(x.learn, 2, rank)
do1 <- function() {
stat <- .C("seqCpDistStat",
as.double(rBetaCopulaRanks(r, n)),
as.integer(m),
as.integer(n),
as.integer(d),
mac = double(nm),
mmc = double(nm),
mmk = double(nm),
mc = double(nm),
mk = double(nm),
as.double(gamma),
as.double(delta),
wmc = integer(nm),
wmk = integer(nm),
as.integer(1),
PACKAGE = "npcp")
c(stat$mac, stat$mmc, stat$mmk, stat$mc, stat$mk)
}
rep <- t(replicate(B, do1()))
mac0 <- rep[,seq.int(1,nm)]
mmc0 <- rep[,seq.int(nm+1,2*nm)]
mmk0 <- rep[,seq.int(2*nm+1,3*nm)]
mc0 <- rep[,seq.int(3*nm+1,4*nm)]
mk0 <- rep[,seq.int(4*nm+1,5*nm)]
pmax <- nm
}
else {
mm <- floor(m * m / n)
mmm <- m - mm
if (is.null(b))
b <- bOptEmpProc(x.learn, m = g, weights = weights,
L.method = L.method)
stopifnot(b >= 1L)
init.seq <- rnorm(B * (m + 2 * (b - 1)))
rep <- .C("seqCpDistMultNonSeq",
as.double(x.learn),
as.integer(m),
as.integer(n),
as.integer(d),
as.integer(B),
as.integer(1),
as.integer(b),
mac0 = double(B * mmm),
mmc0 = double(B * mmm),
mmk0 = double(B * mmm),
mc0 = double(B * mmm),
mk0 = double(B * mmm),
as.double(gamma),
as.double(delta),
as.double(init.seq),
as.integer(1),
PACKAGE = "npcp")
mac0 <- matrix(rep$mac0, B, mmm, byrow = TRUE)
mmc0 <- matrix(rep$mmc0, B, mmm, byrow = TRUE)
mmk0 <- matrix(rep$mmk0, B, mmm, byrow = TRUE)
mc0 <- matrix(rep$mc0, B, mmm, byrow = TRUE)
mk0 <- matrix(rep$mk0, B, mmm, byrow = TRUE)
pmax <- mmm
}
time.grid <- if (method %in% c("sim", "beta")) seq.int(m+1, n) / m
else seq.int(mm + 1, m) / mm
smac <- 4 * (1 - gamma)
smmc <- 3 - 4 * gamma
smmk <- 1.5 - 2 * gamma
smc <- 2
smk <- 1
structure(class = "sims.cpDist",
list(mac = mac0, mmc = mmc0, mmk = mmk0, mc = mc0, mk = mk0,
d = d, m = m, n = n, gamma = gamma, delta = delta,
B = B, method = method, pmax = pmax,
time.grid = time.grid, smac = smac, smmc = smmc,
smmk = smmk, smc = smc, smk = smk))
}
threshCpDist <- function(sims,
p = 1, alpha = 0.05, type = 7) {
scale <- FALSE
if (!inherits(sims, "sims.cpDist"))
stop("'sims' should be obtained by 'simCpDist()'")
method <- "cond"
nm <- sims$n - sims$m
pmax <- sims$pmax
stopifnot(alpha > 0 && alpha <= 0.5)
mac0 <- if (scale) sims$mac %*% diag(sims$time.grid^-sims$smac) else sims$mac
mmc0 <- if (scale) sims$mmc %*% diag(sims$time.grid^-sims$smmc) else sims$mmc
mmk0 <- if (scale) sims$mmk %*% diag(sims$time.grid^-sims$smmk) else sims$mmk
mc0 <- if (scale) sims$mc %*% diag(sims$time.grid^-sims$smc) else sims$mc
mk0 <- if (scale) sims$mk %*% diag(sims$time.grid^-sims$smk) else sims$mk
if (method == "cond") {
if (is.null(p))
stop("The value of 'p' needs to be specified when 'method = \"cond\"'")
stopifnot(p >= 1L)
if (p > pmax) stop("The maximum possible value for 'p' is ", pmax)
bs <- pmax %/% p
s <- rep(bs, p)
r <- pmax - p * bs
if (r > 0) s[seq_len(r)] <- bs + 1
bl <- c(0, cumsum(s))
if (sims$method %in% c("sim", "beta"))
st <- s
else {
bs <- nm %/% p
st <- rep(bs, p)
r <- nm - p * bs
if (r > 0) st[seq_len(r)] <- bs + 1
}
q.prob <- (1 - alpha)^(1/p)
computeThreshFunc <- function(rep) {
rep.max <- matrix(0, sims$B, 0)
for (i in seq.int(p))
rep.max <- cbind(rep.max,
apply(rep[,seq.int(bl[i]+1,bl[i+1]),drop=FALSE], 1, max))
threshold <- numeric(p)
threshold[1] <- quantile(rep.max[,1], probs = q.prob, type = type)
if (p > 1)
for (i in seq.int(2,p)) {
rep.max <- rep.max[rep.max[,i-1] <= threshold[i-1],]
threshold[i] <- quantile(rep.max[,i], probs = q.prob, type = type)
}
rep(threshold, times = st)
}
} else {
computeThreshFunc <- function(rep) {
means <- colMeans(rep)
if (method == "center.max") {
q <- quantile(apply(scale(rep, center = means, scale = FALSE), 1, max),
probs = 1 - alpha, type = type)
threshold <- q + means
} else if (method == "scale.max") {
sds <- apply(rep, 2, sd)
q <- quantile(apply(scale(rep, center = means, scale = sds), 1, max),
probs = 1 - alpha, type = type)
threshold <- sds * q + means
} else stop("method not implemented")
if (sims$method %in% c("sim", "beta"))
threshold
else {
bs <- nm %/% pmax
st <- rep(bs, pmax)
r <- nm - pmax * bs
if (r > 0) st[seq_len(r)] <- bs + 1
rep(threshold, times = st)
}
}
}
structure(class = "thresh.cpDist",
list(mac = computeThreshFunc(mac0),
mmc = computeThreshFunc(mmc0),
mmk = computeThreshFunc(mmk0),
mc = computeThreshFunc(mc0),
mk = computeThreshFunc(mk0),
d = sims$d, m = sims$m, n = sims$n, gamma = sims$gamma,
delta = sims$delta, B = sims$B, sim.method = sims$method,
smac = sims$smac, smmc = sims$smmc, smmk = sims$smmk,
smc = sims$smc, smk = sims$smk, scale = as.logical(scale),
p = p, alpha = alpha, type = type))
}
detCpDist <- function(x.learn, x, gamma = 0.25, delta = 1e-4) {
if(!is.matrix(x.learn)) {
warning("coercing 'x.learn' to a matrix.")
stopifnot(is.matrix(x.learn <- as.matrix(x.learn)))
}
if(!is.matrix(x)) {
warning("coercing 'x' to a matrix.")
stopifnot(is.matrix(x <- as.matrix(x)))
}
stopifnot(gamma >= 0 && gamma <= 0.5)
stopifnot(delta >= 0 && delta <= 1)
stopifnot(ncol(x) == (d <- ncol(x.learn)))
m <- nrow(x.learn)
nm <- nrow(x)
n <- m + nm
det <- .C("seqCpDistStat",
as.double(rbind(x.learn, x)),
as.integer(m),
as.integer(n),
as.integer(d),
mac = double(nm),
mmc = double(nm),
mmk = double(nm),
mc = double(nm),
mk = double(nm),
as.double(gamma),
as.double(delta),
wmc = integer(nm),
wmk = integer(nm),
as.integer(1),
PACKAGE = "npcp")
structure(class = "det.cpDist",
list(mac = det$mac, mmc = det$mmc, mmk = det$mmk,
mc = det$mc, mk = det$mk,
wmc = det$wmc + 1, wmk = det$wmk + 1,
d = d, m = m, gamma = gamma, delta = delta))
}
monCpDist <- function(det, thresh,
statistic = c("mac", "mmc", "mmk", "mk", "mc"),
plot = TRUE) {
if (!inherits(det, "det.cpDist"))
stop("'det' should be obtained by 'detCpDist()'")
if (!inherits(thresh, "thresh.cpDist"))
stop("'thresh' should be obtained by 'threshCpDist()'")
if (det$d != thresh$d || det$m != thresh$m)
stop("'det' and 'thresh' have not been computed from the same learning sample")
if (det$gamma != thresh$gamma)
stop("'det' and 'thresh' have not been computed with the same value of 'gamma'")
if (det$delta != thresh$delta)
stop("'det' and 'thresh' have not been computed with the same value of 'delta'")
statistic <- match.arg(statistic)
ds <- det[[statistic]]
ts <- thresh[[statistic]]
if ((l <- length(ds)) > length(ts))
stop("the number of detector values is greater than the number of threshold values")
if (statistic != "mac" && thresh$method == "mult" && thresh$gamma > 0.25)
warning("the test might be too conservative with these settings; consider decreasing gamma")
if (thresh$scale == TRUE)
{
time.grid <- seq.int(thresh$m+1, thresh$n) / thresh$m
ds <- switch(statistic,
"mac" = ds / time.grid^thresh$smac,
"mmc" = ds / time.grid^thresh$smmc,
"mmk" = ds / time.grid^thresh$smmk,
"mc" = ds / time.grid^thresh$smc,
"mk" = ds / time.grid^thresh$smk)
}
conds <- (ds <= ts[seq_len(l)])
alarm <- !all(conds)
ta <- if (alarm) which.max(1 - as.double(conds)) else NA
tm <- if (statistic %in% c("mac", "mmc")) det$wmc
else if (statistic == "mmk") det$wmk else NA
if (plot) {
mon.period <- seq.int(thresh$m+1, thresh$n)
plot(mon.period, ts, type = "l", lty = 1,
xlab = "Monitoring period", ylab = "",
ylim = c(0, 1.1 * max(ts, ds)))
points(mon.period[seq.int(l)], ds, type = "b", lty = 3)
legend("topleft", c("threshhold function", "detector function"), lty = c(1, 3))
}
list(alarm = alarm, time.alarm = if (alarm) ta + thresh$m else NA,
times.max = tm, time.change = if (alarm) tm[ta] else NA)
} |
VE.Jk.CBS.HT.RegCo.Hajek <- function(VecY.s, VecX.s, VecPk.s, MatPkl.s)
{
if(! is.vector(VecY.s) ){stop("VecY.s must be a vector.") }
if(! is.vector(VecX.s) ){stop("VecX.s must be a vector.") }
if(! is.vector(VecPk.s) ){stop("VecPk.s must be a vector.") }
if(! is.matrix(MatPkl.s) ){stop("MatPkl.s must be a matrix.") }
DimMat <- dim(MatPkl.s)
DimMatR <- DimMat[1]
DimMatC <- DimMat[2]
if(DimMatR != DimMatC ){stop("MatPkl.s must be a square matrix. Number of rows and columns has to be equal.")}
n <- length(VecY.s)
if(n != length(VecPk.s) ){stop("The lengths of VecY.s and VecPk.s are different.") }
if(n != length(VecX.s) ){stop("The lengths of VecY.s and VecX.s are different.") }
if(n != DimMatR ){stop("The lengths of VecY.s, VecPk.s and dimensions of MatPkl.s are different.") }
if(anyNA(VecPk.s) ){stop("There are missing values in VecPk.s.") }
if(min(VecPk.s)<=0|max(VecPk.s)>1 ){stop("There are invalid values in VecPk.s.") }
if(anyNA(MatPkl.s) ){stop("There are missing values in MatPkl.s.") }
if(min(MatPkl.s)<=0|max(MatPkl.s)>1){stop("There are invalid values in MatPkl.s.") }
if(anyNA(VecY.s) ){stop("There are missing values in VecY.s.") }
if(anyNA(VecX.s) ){stop("There are missing values in VecX.s.") }
VecEstTheta_k <- .C("Est_RegCo_Hajek_Excluding_All_Elements",
as.double(VecY.s),
as.double(VecX.s),
as.double(VecPk.s),
n,
VectVarEst = double(n),
PACKAGE = "samplingVarEst")$VectVarEst
EstTheta <- Est.RegCo.Hajek(VecY.s, VecX.s, VecPk.s)
Nhat <- .C("Est_Total_NHT",
as.double(rep(1.0, times=n)),
as.double(VecPk.s),
n,
PointEst = double(1),
PACKAGE = "samplingVarEst")$PointEst
VecPseudo.s <- (1 - {1/Nhat/VecPk.s}) * (EstTheta - VecEstTheta_k)
OUTPUT <- .C("VE_HT_form",
VecPseudo.s,
as.double(VecPk.s),
as.double(c(MatPkl.s)),
n,
VarEst = double(1),
PACKAGE = "samplingVarEst")$VarEst
if(OUTPUT<0 ){warning("The variance estimate contains negative values.") }
OUTPUT
} |
bartlett <- function(x) {
pmax(1 - abs(x), 0)
}
parzen <- function(x) {
ifelse(abs(x) <= 1/2, 1 - 6 * x^2 + 6 * abs(x)^3,
ifelse(1/2 <= abs(x) & abs(x) <= 1, 2 * (1 - abs(x))^3, 0))
}
pdfsumunif <- function(x, n) {
nx <- length(x)
.C("pdf_sum_unif",
as.integer(n),
as.double(x),
as.integer(nx),
pdf = double(nx),
PACKAGE = "npcp")$pdf
}
convrect <- function(x, n) {
pdfsumunif(x + n/2, n) / pdfsumunif(n / 2, n)
}
flattop <- function(x, a=0.5) {
pmin( pmax((1-abs(x))/(1-a), 0), 1)
}
mval <- function(rho, lagmax, kn, rho.crit) {
num.ins <- sapply(1:(lagmax-kn+1),
function(j) sum((abs(rho) < rho.crit)[j:(j+kn-1)]))
if(any(num.ins == kn))
return( which(num.ins == kn)[1] )
else {
if(any(abs(rho) > rho.crit)) {
lag.sig <- which(abs(rho) > rho.crit)
k.sig <- length(lag.sig)
if(k.sig == 1)
return( lag.sig )
else
return( max(lag.sig) )
}
else
return( 1 )
}
}
Lval <- function(x, method = mean) {
n <- nrow(x)
d <- ncol(x)
kn <- max(5, ceiling(log10(n)))
lagmax <- ceiling(sqrt(n)) + kn
rho.crit <- 1.96 * sqrt(log10(n)/n)
m <- numeric(d)
for (i in 1:d) {
rho <- acf(x[,i], lag.max = lagmax, type = "correlation",
plot = FALSE)$acf[-1]
m[i] <- mval(rho, lagmax, kn, rho.crit)
}
return( 2 * method(m) )
}
bOptEmpProc <- function(x, m = 5, weights = c("parzen", "bartlett"),
L.method = c("max","median","mean","min")) {
weights <- match.arg(weights)
L.method <- match.arg(L.method)
method <- switch(L.method,
min = min,
median = median,
mean = mean,
max = max)
stopifnot(m > 0L)
if(!is.matrix(x)) {
warning("coercing 'x' to a matrix.")
stopifnot(is.matrix(x <- as.matrix(x)))
}
n <- nrow(x)
d <- ncol(x)
U <- apply(x, 2, rank)/(n + 1)
kn <- max(5, ceiling(log10(n)))
lagmax <- ceiling(sqrt(n)) + kn
z <- seq(1/(m+1), 1 - 1/(m+1), len = m)
v <- vector("list",d)
for (i in 1:d)
v[[i]] <- z
g <- as.matrix(expand.grid(v))
ng <- nrow(g)
gamma.n <- array(NA, c(ng, ng, 2*lagmax+1))
for (i in 1:ng)
for (j in 1:i) {
gamma.n[i,j,] <- as.numeric(ccf(apply(ifelse(U <= g[i,],1,0),1,prod),
apply(ifelse(U <= g[j,],1,0),1,prod),
lag.max = lagmax, type = "covariance",
plot = FALSE)$acf)
gamma.n[j,i,] <- gamma.n[i,j,(2*lagmax+1):1]
}
L <- Lval(x, method=method)
K.n <- sigma.n <- matrix(0,ng,ng)
for (i in 1:ng)
for (j in 1:ng) {
ft <- flattop(-lagmax:lagmax/L)
sigma.n[i,j] <- sum(ft * gamma.n[i,j,])
K.n[i,j] <- sum(ft * (-lagmax:lagmax)^2 * gamma.n[i,j,])
}
sqrderiv <- switch(weights,
bartlett = 143.9977845,
parzen = 495.136227)
integralsqrker <- switch(weights,
bartlett = 0.5392857143,
parzen = 0.3723388234)
Gamma.n.2 <- sqrderiv / 4 * mean(K.n^2)
Delta.n <- integralsqrker * (mean(diag(sigma.n))^2 + mean(sigma.n^2))
ln.opt <- (4 * Gamma.n.2 / Delta.n * n)^(1/5)
return( round((ln.opt + 1) / 2) )
}
bOptRho <- function(x,
statistic = c("global", "pairwise"),
weights = c("parzen", "bartlett"),
L.method = c("pseudo","max","median","mean","min")) {
statistic <- match.arg(statistic)
weights <- match.arg(weights)
if(!is.matrix(x)) {
warning("coercing 'x' to a matrix.")
stopifnot(is.matrix(x <- as.matrix(x)))
}
n <- nrow(x)
d <- ncol(x)
stopifnot(d > 1L)
L.method <- match.arg(L.method)
f <- switch(statistic,
global = c(rep(0,2^d - 1),1),
pairwise = c(rep(0, d + 1), rep(1, choose(d,2)),
rep(0, 2^d - choose(d,2) - d - 1)))
powerset <- .C("k_power_set",
as.integer(d),
as.integer(d),
powerset = integer(2^d),
PACKAGE="npcp")$powerset
fbin <- .C("natural2binary",
as.integer(d),
as.double(f),
as.integer(powerset),
fbin = double(2^d),
PACKAGE="npcp")$fbin
out <- .C("influRho",
as.double(x),
as.integer(n),
as.integer(d),
as.double(fbin),
influ = double(n),
PACKAGE = "npcp")
influ <- out$influ
kn <- max(5, ceiling(log10(n)))
lagmax <- ceiling(sqrt(n)) + kn
tau.n <- as.numeric(ccf(influ, influ, lag.max = lagmax,
type = "covariance", plot = FALSE)$acf)
if (L.method == "pseudo")
L <- Lval(matrix(influ), method=min)
else {
method <- switch(L.method,
min = min,
median = median,
mean = mean,
max = max)
L <- Lval(x, method=method)
}
sqrderiv <- switch(weights,
bartlett = 143.9977845,
parzen = 495.136227)
integralsqrker <- switch(weights,
bartlett = 0.5392857143,
parzen = 0.3723388234)
ft <- flattop(-lagmax:lagmax/L)
Gamma.n.2 <- sqrderiv / 4 * sum(ft * (-lagmax:lagmax)^2 * tau.n)^2
Delta.n <- integralsqrker * 2 * sum(ft * tau.n)^2
ln.opt <- (4 * Gamma.n.2 / Delta.n * n)^(1/5)
list(b = round((ln.opt + 1) / 2),
influnonseq = influ,
fbin = fbin)
}
bOpt <- function(influ, weights = c("parzen", "bartlett")) {
if(!is.double(influ)) {
warning("coercing 'influ' to a double.")
stopifnot(is.double(influ <- as.double(influ)))
}
n <- length(influ)
weights <- match.arg(weights)
kn <- max(5, ceiling(log10(n)))
lagmax <- ceiling(sqrt(n)) + kn
gamma.n <- as.numeric(ccf(influ, influ, lag.max = lagmax,
type = "covariance", plot = FALSE)$acf)
L <- Lval(matrix(influ), method=min)
sqrderiv <- switch(weights,
bartlett = 143.9977845,
parzen = 495.136227)
integralsqrker <- switch(weights,
bartlett = 0.5392857143,
parzen = 0.3723388234)
ft <- flattop(-lagmax:lagmax/L)
Gamma.n.2 <- sqrderiv / 4 * sum(ft * (-lagmax:lagmax)^2 * gamma.n)^2
Delta.n <- integralsqrker * 2 * sum(ft * gamma.n)^2
ln.opt <- (4 * Gamma.n.2 / Delta.n * n)^(1/5)
round((ln.opt + 1) / 2)
} |
NULL
las_rescale = function(las, xscale, yscale, zscale)
{
xoffset <- las[["X offset"]]
yoffset <- las[["Y offset"]]
zoffset <- las[["Z offset"]]
if (!missing(xscale))
{
assert_is_a_number(xscale)
newX <- round((las@data[["X"]] - xoffset)/xscale) * xscale + xoffset
diff <- round(mean(abs(las@data[["X"]] - newX)), 4)
las@data[["X"]] <- newX
las@header@PHB[["X scale factor"]] <- xscale
message(glue::glue("X coordinates were moved by {diff} on average"))
}
if (!missing(yscale))
{
assert_is_a_number(yscale)
newY <- round((las@data[["Y"]] - yoffset)/yscale) * yscale + yoffset
diff <- round(mean(abs(las@data[["Y"]] - newY)), 4)
las@data[["Y"]] <- newY
las@header@PHB[["Y scale factor"]] <- yscale
message(glue::glue("Y coordinates were moved by {diff} on average"))
}
if (!missing(zscale))
{
assert_is_a_number(zscale)
newZ <- round((las@data[["Z"]] - zoffset)/zscale) * zscale + zoffset
diff <- round(mean(abs(las@data[["Z"]] - newZ)), 4)
las@data[["Z"]] <- newZ
las@header@PHB[["Z scale factor"]] <- zscale
message(glue::glue("Z coordinates were moved by {diff} on average"))
}
las <- lasupdateheader(las)
return(las)
}
las_reoffset = function(las, xoffset, yoffset, zoffset)
{
xscale <- las[["X scale factor"]]
yscale <- las[["Y scale factor"]]
zscale <- las[["Z scale factor"]]
xrange <- c(las[["Min X"]], las[["Max X"]])
yrange <- c(las[["Min Y"]], las[["Max Y"]])
zrange <- c(las[["Min Z"]], las[["Max Z"]])
if (!missing(xoffset))
{
assert_is_a_number(xoffset)
newX <- suppressWarnings(as.integer(round((xrange - xoffset)/xscale)) * xscale + xoffset)
if (anyNA(newX)) stop("Incorrect xoffset: integer overflow.", call. = FALSE)
newX <- round((las@data[["X"]] - xoffset)/xscale) * xscale + xoffset
diff <- round(mean(abs(las@data[["X"]] - newX)), 4)
las@data[["X"]] <- newX
las@header@PHB[["X offset"]] <- xoffset
message(glue::glue("X coordinates were moved by {diff} on average"))
}
if (!missing(yoffset))
{
assert_is_a_number(yoffset)
newY <- suppressWarnings(as.integer(round((yrange - yoffset)/yscale)) * yscale + yoffset)
if (anyNA(newY)) stop("Incorrect yoffset: integer overflow.", call. = FALSE)
newY <- round((las@data[["Y"]] - yoffset)/yscale) * yscale + yoffset
diff <- round(mean(abs(las@data[["Y"]] - newY)), 4)
las@data[["Y"]] <- newY
las@header@PHB[["Y offset"]] <- yoffset
message(glue::glue("Y coordinates were moved by {diff} on average"))
}
if (!missing(zoffset))
{
assert_is_a_number(zoffset)
newZ <- suppressWarnings(as.integer(round((zrange - zoffset)/zscale)) * zscale + zoffset)
if (anyNA(newZ)) stop("Incorrect zoffset: integer overflow.", call. = FALSE)
newZ <- round((las@data[["Z"]] - zoffset)/zscale) * zscale + zoffset
diff <- round(mean(abs(las@data[["Z"]] - newZ)), 4)
las@data[["Z"]] <- newZ
las@header@PHB[["Z offset"]] <- zoffset
message(glue::glue("Z coordinates were moved by {diff} on average"))
}
las <- lasupdateheader(las)
return(las)
}
las_quantize = function(las, by_reference = TRUE)
{
xscale <- las[["X scale factor"]]
yscale <- las[["Y scale factor"]]
zscale <- las[["Z scale factor"]]
xoffset <- las[["X offset"]]
yoffset <- las[["Y offset"]]
zoffset <- las[["Z offset"]]
if (isTRUE(by_reference))
{
quantize(las$X, xscale, xoffset)
quantize(las$Y, yscale, yoffset)
quantize(las$Z, zscale, zoffset)
return(invisible(las))
}
else
{
las@data[["X"]] <- quantize(las$X, xscale, xoffset, FALSE)
las@data[["Y"]] <- quantize(las$Y, yscale, yoffset, FALSE)
las@data[["Z"]] <- quantize(las$Z, zscale, zoffset, FALSE)
return(las)
}
}
las_update = function(las)
{
stopifnotlas(las)
header <- as.list(las@header)
new_header <- rlas::header_update(header, las@data)
new_header <- LASheader(new_header)
las@header <- new_header
return(las)
}
quantize = function(x, scale, offset, by_reference = TRUE, ...)
{
umin <- min(x)
umax <- max(x)
urange <- storable_coordinate_range(scale, offset)
if (umax > urange[2] | umin < urange[1])
stop("'x' contains unquantizable values out of the storable range.", call. = FALSE)
if (isTRUE(by_reference))
{
fast_quantization(x, scale, offset)
return(invisible(x))
}
else
{
y <- data.table::copy(x)
fast_quantization(y, scale, offset)
return(y)
}
}
is.quantized = function(x, scale, offset, ...)
{
p <- list(...)
if (!is.null(p$sample))
{
n <- min(100L, length(x))
s <- as.integer(seq(1L, length(x), length.out = n))
x <- x[s]
}
return(fast_countunquantized(x, scale, offset) == 0L)
}
count_not_quantized = fast_countunquantized
storable_coordinate_range <- function(scale, offset) {
assert_is_a_number(scale)
assert_is_a_number(offset)
storable_min <- -2147483647 * scale + offset
storable_max <- 2147483647 * scale + offset
return(c("min" = storable_min, "max" = storable_max))
}
header <- function(las)
{
return(las@header)
}
payload <- function(las)
{
return(las@data)
}
phb <- function(las)
{
if (!is(las, "LASheader"))
las <- header(las)
return(las@PHB)
}
vlr <- function(las)
{
if (!is(las, "LASheader"))
las <- header(las)
return(las@VLR)
}
evlr <- function(las)
{
if (!is(las, "LASheader"))
las <- header(las)
if (!methods::.hasSlot(las, "EVLR"))
return(NULL)
return(las@EVLR)
}
lasupdateheader = las_update |
stopifnot(require("testthat"), require("broom.mixed"))
if (require(lme4, quietly = TRUE)) {
load(system.file("extdata", "lme4_example.rda",
package = "broom.mixed",
mustWork = TRUE
))
context("lme4 models")
d <- as.data.frame(ChickWeight)
colnames(d) <- c("y", "x", "subj", "tx")
fit <<- lmer(y ~ tx * x + (x | subj), data = d)
test_that("tidy works on lme4 fits", {
td <- tidy(fit)
expect_equal(dim(td), c(12, 6))
expect_equal(
names(td),
c(
"effect", "group", "term", "estimate",
"std.error", "statistic"
)
)
expect_equal(
td$term,
c(
"(Intercept)", "tx2", "tx3", "tx4", "x",
"tx2:x", "tx3:x", "tx4:x",
"sd__(Intercept)",
"cor__(Intercept).x",
"sd__x",
"sd__Observation"
)
)
})
test_that("tidy/glance works on glmer fits", {
gm <- glmer(cbind(incidence, size - incidence) ~ period + (1 | herd),
cbpp, binomial,
nAGQ = 0
)
ggm <- broom::glance(gm)
expect_equal(names(ggm), c("nobs", "sigma", "logLik", "AIC", "BIC", "deviance", "df.residual"))
td <- tidy(gm)
expect_equal(
names(td),
c(
"effect", "group", "term", "estimate",
"std.error", "statistic", "p.value"
)
)
td_ran <- tidy(gm, "ran_pars")
expect_equal(names(td_ran), c("effect", "group", "term", "estimate"))
})
test_that("glance includes deviance iff method='ML'", {
expect(!("deviance" %in% names(glance(lmm0))),"deviance not included")
expect("REMLcrit" %in% names(glance(lmm0)),"REMLcrit not included")
expect("deviance" %in% names(glance(lmm0ML)),"deviance not included")
})
test_that("tidy works on non-linear fits", {
startvec <- c(Asym = 200, xmid = 725, scal = 350)
nm <- nlmer(circumference ~ SSlogis(age, Asym, xmid, scal) ~ Asym | Tree,
Orange,
start = startvec, nAGQ = 0L
)
gnm <- broom::glance(nm)
expect_equal(names(gnm), c("nobs", "sigma", "logLik", "AIC", "BIC", "deviance", "df.residual"))
td <- tidy(nm)
expect_equal(
names(td),
c(
"effect", "group", "term", "estimate",
"std.error", "statistic"
)
)
td_ran <- tidy(nm, "ran_pars")
expect_equal(names(td_ran), c("effect", "group", "term", "estimate"))
})
test_that("scales works", {
t1 <- tidy(fit, effects = "ran_pars")
t2 <- tidy(fit, effects = "ran_pars", scales = "sdcor")
expect_equal(t1$estimate, t2$estimate)
expect_error(
tidy(fit, effects = "ran_pars", scales = "varcov"),
"unrecognized ran_pars scale"
)
t3 <- tidy(fit, effects = "ran_pars", scales = "vcov")
get_sdvar <- function(x) {
(x %>% dplyr::filter(grepl("^(sd|var)",term))
%>% dplyr::select(estimate)
)}
expect_equal(
as.data.frame(get_sdvar(t3)),
as.data.frame(get_sdvar(t2) %>% mutate_all(~.^2))
)
expect_error(
tidy(fit, scales = "vcov"),
"must be provided for each effect"
)
})
test_that("tidy works with more than one RE grouping variable", {
dd <- expand.grid(f = factor(1:10), g = factor(1:5), rep = 1:3)
dd$y <- suppressMessages(simulate(~(1 | f) + (1 | g),
newdata = dd,
newparams = list(beta = 1, theta = c(1, 1)),
family = poisson, seed = 101
))[[1]]
gfit <- glmer(y ~ (1 | f) + (1 | g), data = dd, family = poisson)
tnames <- as.character(tidy(gfit, effects = "ran_pars")$term)
expect_equal(tnames, rep("sd__(Intercept)", 2))
})
test_that("augment works on lme4 fits with or without data", {
au1 <- suppressWarnings(broom::augment(fit))
au2 <- suppressWarnings(broom::augment(fit, d))
expect_equal(au1, au2[names(au1)])
})
dNAs <<- d
dNAs$y[c(1, 3, 5)] <- NA
test_that("augment works on lme4 fits with NAs", {
fitNAs <- lmer(y ~ tx * x + (x | subj), data = dNAs,
control=lmerControl(check.conv.grad=
.makeCC("warning", tol = 5e-2, relTol = NULL)))
au <- suppressWarnings(broom::augment(fitNAs))
expect_equal(nrow(au), sum(complete.cases(dNAs)))
})
test_that("augment works on lme4 fits with na.exclude", {
fitNAs <- lmer(y ~ tx * x + (x | subj),
data = dNAs, na.action = "na.exclude",
control=lmerControl(check.conv.grad=
.makeCC("warning", tol = 5e-2, relTol = NULL)))
au <- suppressWarnings(broom::augment(fitNAs, dNAs))
expect_equal(nrow(au), nrow(dNAs))
expect_equal(complete.cases(au), complete.cases(dNAs))
})
test_that("glance works on lme4 fits", {
g <- broom::glance(fit)
expect_equal(dim(g), c(1, 7))
})
test_that("ran_vals works", {
td0 <- tidy(lmm0, "ran_vals")
td1 <- tidy(lmm1, "ran_vals")
expect_equal(dim(td0), c(18, 6))
expect_equal(dim(td1), c(36, 6))
if (packageVersion("lme4") >= "1.1.18") {
td2 <- tidy(lmm2, "ran_vals")
expect_equal(dim(td2), c(36, 6))
expect_equal(names(td1), names(td2))
}
})
test_that("confint preserves term names", {
td3 <- tidy(lmm0, conf.int = TRUE, conf.method = "Wald", effects = "fixed")
expect_equal(td3$term, c("(Intercept)", "Days"))
})
}
test_that("tidy respects conf.level", {
tmpf <- function(cl=0.95) {
return(tidy(lmm0,conf.int=TRUE,conf.level=cl)[1,][["conf.low"]])
}
expect_equal(tmpf(),232.3019,tolerance=1e-4)
expect_equal(tmpf(0.5),244.831,tolerance=1e-4)
})
test_that("effects='ran_pars' + conf.int works", {
tt <- tidy(lmm0, effects="ran_pars", conf.int=TRUE, conf.method="profile",
quiet=TRUE)[c("conf.low","conf.high")]
tt0 <- structure(list(conf.low = c(26.007120448854, 27.8138472081303
), conf.high = c(52.9359835296834, 34.591049857869)), row.names = c(NA,
-2L), class = c("tbl_df", "tbl", "data.frame"))
tt0 <- structure(list(conf.low = c(26.00712, 27.81384),
conf.high = c(52.9359, 34.59104)),
row.names = c(NA, -2L),
class = c("tbl_df", "tbl", "data.frame"))
expect_equal(as.data.frame(tt0), as.data.frame(tt),
tolerance=1e-5)
})
test_that("augment returns a tibble", {
expect_is(augment(fit), "tbl")
})
test_that("conf intervals for ranef in correct order", {
t1 <- tidy(lmm1,conf.int=TRUE,effect="ran_pars",conf.method="profile",quiet=TRUE)
cor_vals <- t1[t1$term=="cor__(Intercept).Days",]
expect_true(cor_vals$conf.low>(-1) && cor_vals$conf.high<1)
}) |
library(rsmatrix)
set.seed(4321)
t2 <- sprintf("%03d", sample(101:200))
t1 <- sprintf("%03d", sample(1:100))
p2 <- runif(100)
p1 <- runif(100)
f <- sample(letters[1:3], 100, TRUE)
x <- data.frame(date = c(3, 2, 3, 2, 3, 3),
date_prev = c(1, 1, 2, 1, 2, 1),
price = 6:1,
price_prev = c(1, 1, 5, 1, 3, 1),
id = c("a", "b", "b", "c", "c", "d"),
id2 = rep(c("a", "b"), each = 3))
mat <- with(x, rs_matrix(date, date_prev, price, price_prev))
mats <- with(x, rs_matrix(date, date_prev, price, price_prev, sparse = TRUE))
matg <- with(x, rs_matrix(date, date_prev, price, price_prev, id2))
mata <- with(subset(x, id2 == "a"),
rs_matrix(date, date_prev, price, price_prev))
b <- solve(crossprod(mat("Z")), crossprod(mat("Z"), mat("y")))
bg <- solve(crossprod(matg("Z")), crossprod(matg("Z"), matg("y")))
ba <- solve(crossprod(mata("Z")), crossprod(mata("Z"), mata("y")))
g <- solve(crossprod(mat("Z"), mat("X")), crossprod(mat("Z"), mat("Y")))
gg <- solve(crossprod(matg("Z"), matg("X")), crossprod(matg("Z"), matg("Y")))
ga <- solve(crossprod(mata("Z"), mata("X")), crossprod(mata("Z"), mata("Y")))
identical(rsmatrix:::.rs_z(integer(0), character(0)),
matrix(double(0), ncol = 0))
identical(rsmatrix:::.rs_z(integer(0), character(0), logical(0)),
matrix(double(0), ncol = 0))
identical(rsmatrix:::.rs_z(rep("a", 2), rep("a", 2)),
matrix(0, ncol = 1, nrow = 2, dimnames = list(1:2, "a")))
identical(rsmatrix:::.rs_z(c(a = rep("a", 2)), c(b = rep("a", 2)), 1:2),
matrix(rep(0, 4), ncol = 2, dimnames = list(c("a1", "a2"), c("1.a", "2.a"))))
identical(rsmatrix:::.rs_z(c(a = 2:1), 2:1),
matrix(c(0, 0, 0, 0), ncol = 2, dimnames = list(c("a1", "a2"), 1:2)))
identical(rsmatrix:::.rs_z(1:2, c(a = 2:1)),
matrix(c(1, -1, -1, 1), ncol = 2, dimnames = list(c("a1", "a2"), 1:2)))
identical(rsmatrix:::.rs_z(3:2, 2:1),
matrix(c(0, -1, -1, 1, 1, 0), ncol = 3, dimnames = list(1:2, 1:3)))
identical(rsmatrix:::.rs_z(c(a = 2, b = 2), c(1, 1), c("a", "b")),
matrix(c(-1, 0, 0, -1, 1, 0, 0, 1), ncol = 4, dimnames = list(c("a", "b"), c("a.1", "b.1", "a.2", "b.2"))))
identical(rsmatrix:::.rs_z(factor(c(3:2, 2)), c(2:1, 1), letters[c(1, 1, 2)]),
matrix(c(0, -1, 0, 0, 0, -1, -1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0), ncol = 6, dimnames = list(1:3, c("a.1", "b.1", "a.2", "b.2", "a.3", "b.3"))))
identical(rsmatrix:::.rs_z(factor(3:2), 2:1),
rsmatrix:::.rs_z(3:2, 2:1))
identical(rsmatrix:::.rs_z(factor(2:1, levels = 1:3), factor(c(a = 1, b = 1))),
matrix(c(-1, 0, 1, 0), ncol = 2, dimnames = list(c("a", "b"), 1:2)))
identical(rsmatrix:::.rs_z(factor(letters[3:2]), factor(letters[2:1])),
rsmatrix:::.rs_z(letters[3:2], letters[2:1]))
identical(rsmatrix:::.rs_z(as.Date(c("2017-02-01", "2017-03-01", "2017-01-01")), as.Date(c("2017-01-01", "2017-02-01", "2017-01-01"))),
matrix(c(-1, 0, 0, 1, -1, 0, 0, 1, 0), ncol = 3, dimnames = list(1:3, c("2017-01-01", "2017-02-01", "2017-03-01"))))
all(rowSums(rsmatrix:::.rs_z(t2, t1)) == 0)
all(rowSums(rsmatrix:::.rs_z(t2, t1, f)) == 0)
all(rowSums(abs(rsmatrix:::.rs_z(t2, t1))) == 2)
identical(rs_matrix(integer(0), character(0), integer(0), double(0))("X"),
matrix(double(0), ncol = 0))
identical(rs_matrix(integer(0), character(0), integer(0), double(0))("Y"), double(0))
identical(rs_matrix(c(2, 4), 1:2, c(2, 5), 1:2)("X"),
matrix(c(2, -2, 0, 5), ncol = 2, dimnames = list(1:2, c(2, 4))))
identical(rs_matrix(c(2, 4), 1:2, c(2, 5), 1:2)("Z"),
matrix(c(1, -1, 0, 1), ncol = 2, dimnames = list(1:2, c(2, 4))))
identical(rs_matrix(c(2, 4), 1:2, c(2, 5), 1:2)("Y"),
c("1" = 1, "2" = 0))
identical(rsmatrix:::.rs_z(integer(0), integer(0), sparse = TRUE),
as(matrix(double(0), ncol = 0), "dgCMatrix"))
identical(rsmatrix:::.rs_z(1, 1, sparse = TRUE),
as(matrix(0, ncol = 1, dimnames = list(1, 1)), "dgCMatrix"))
identical(rsmatrix:::.rs_z(c(a = "a"), "a", sparse = TRUE),
as(matrix(0, ncol = 1, dimnames = list("a", "a")), "dgCMatrix"))
identical(rsmatrix:::.rs_z(c(2, 2), c(1, 1), c("a", "b"), TRUE),
as(matrix(c(-1, 0, 0, -1, 1, 0, 0, 1), ncol = 4, dimnames = list(1:2, c("a.1", "b.1", "a.2", "b.2"))), "dgCMatrix"))
identical(rsmatrix:::.rs_z(t2, t1, sparse = TRUE),
Matrix::Matrix(rsmatrix:::.rs_z(t2, t1), sparse = TRUE))
identical(rs_matrix(integer(0), integer(0), integer(0), integer(0), sparse = TRUE)("X"),
as(matrix(double(0), ncol = 0), "dgCMatrix"))
identical(rs_matrix(t2, t1, p2, p1, sparse = TRUE)("X"),
Matrix::Matrix(rs_matrix(t2, t1, p2, p1)("X"), sparse = TRUE))
identical(rs_matrix(integer(0), integer(0), integer(0), integer(0), sparse = TRUE)("Y"),
double(0))
identical(rs_matrix(c(2, 4), 1:2, c(2, 5), 1:2, sparse = TRUE)("Y"),
c("1" = 1, "2" = 0))
identical(as.numeric(ba[, 1]), as.numeric(bg[seq(1, 4, 2), 1]))
identical(as.numeric(ga[, 1]), as.numeric(gg[seq(1, 4, 2), 1]))
all.equal(as.numeric(b), c(1.306078088475809, 0.943826746689325))
all.equal(unname(rs_var(mat("y") - mat("Z") %*% b, mat("Z"))),
matrix(c(0.0904705916756374, 0.1445215722595884, 0.1445215722595884, 0.2748117902801680), ncol = 2))
all.equal(unname(rs_var(mat("y") - mat("Z") %*% b, mat("Z"), ids = x$id)),
matrix(c(0.091047862, 0.162948279, 0.162948279, 0.310083942), ncol = 2))
all.equal(as.numeric(g), c(0.2375, 0.3000))
all.equal(unname(rs_var(mat("Y") - mat("X") %*% g, mat("Z"), mat("X"))),
matrix(c(0.00358699951171875, 0.00703212890625000, 0.00703212890625000, 0.01743984374999999), ncol = 2))
all.equal(as.numeric(rs_var(mat("Y") - mat("X") %*% g, mat("Z"), mat("X"))),
as.numeric(rs_var(mats("Y") - mats("X") %*% g, mats("Z"), mats("X")))) |
imxReservedNames <- c('data', 'objective', 'likelihood', 'fitfunction', 'expectation', 'compute', 'one') |
silentPair2 = function(mtx, ctrl = 0, dig = 6) {
len = length(ctrl)
n = NROW(mtx)
p = NCOL(mtx)
if (p < 2)
stop("too few columns in mtx input to silentPairs")
npair = p - 1
cr1 = rep(NA, npair)
cr2 = rep(NA, npair)
cr3 = rep(NA, npair)
crall = rep(NA, npair)
for (typ in 1:3) {
for (i in 2:p) {
x0 = mtx[, i]
y0 = mtx[, 1]
if (len > 1) {
z0 = ctrl
na2 = naTriplet(x0, y0, z0)
x = na2$newx
y = na2$newy
z = na2$newctrl
}
if (len == 1) {
na2 = napair(x0, y0)
x = na2$newx
y = na2$newy
}
if (length(x) < 5) {
print("available observations<5")
break
}
im1 = i - 1
if (len > 1) {
if (typ == 1)
arxy = abs_stdrhserC(x, y, z)
if (typ == 1)
aryx = abs_stdrhserC(y, x, z)
if (typ == 2)
arxy = abs_stdresC(x, y, z)
if (typ == 2)
aryx = abs_stdresC(y, x, z)
if (typ < 3) {
av.crit4 =mean( compPortfo(arxy, aryx))
if (typ == 1) {
cr1[im1] = av.crit4
}
if (typ == 2) {
cr2[im1] = av.crit4
}
}
if (typ == 3) {
par1 = parcor_ijk(x, y, z)
rxy=par1$ouij
ryx=par1$ouji
del = rxy^2 - ryx^2
cr3[im1] = as.numeric(sign(del))
}
}
if (len == 1) {
if (typ == 1)
arxy = abs_stdrhserr(x, y)
if (typ == 1)
aryx = abs_stdrhserr(y, x)
if (typ == 2)
arxy = abs_stdres(x, y)
if (typ == 2)
aryx = abs_stdres(y, x)
if (typ < 3) {
av.crit4 =mean(compPortfo(arxy, aryx))
if (typ == 1) {
cr1[im1] = av.crit4
}
if (typ == 2) {
cr2[im1] = av.crit4
}
}
if (typ == 3) {
gmc0 = gmcmtx0(cbind(x, y))
rxy = gmc0[1, 2]
ryx = gmc0[2, 1]
del = rxy^2 - ryx^2
cr3[im1] = as.numeric(sign(del))
}
}
}
}
for (j in 1:npair) {
cr13 = c(cr1[j], cr2[j], cr3[j])
crall[j] = round(sum(cr13, na.rm = TRUE), dig)
}
return(crall)
} |
modCloudMask<-function(src,AppRoot,out.name,overwrite=FALSE,...){
arg<-list(...)
src<-pathWinLx(src)
if(!missing(AppRoot)){
AppRoot<-pathWinLx(AppRoot)
if(missing(out.name))
AppRoot<-file.path(AppRoot,"CloudMask")
else
AppRoot<-file.path(AppRoot,out.name)
dir.create(AppRoot,showWarnings = FALSE,recursive = TRUE)
}
imgdir.list<-list.dirs(src,recursive=FALSE)
if("dates"%in%names(arg)){imgdir.list<-imgdir.list[genGetDates(imgdir.list)%in%arg$dates]}
for(id in imgdir.list){
tif.list<-list.files(id,pattern = "\\.tif$",full.names = TRUE)
cloudmask<-tif.list[grepl(getRGISToolsOpt("MOD09BANDS")["quality"],tif.list)]
if(missing(AppRoot)){
out.img<-gsub(paste0(getRGISToolsOpt("MOD09BANDS")["quality"],".tif"),"_CLD.tif",cloudmask,ignore.case =TRUE)
}else{
out.img<-file.path(AppRoot,paste0(basename(id),paste0("_",getRGISToolsOpt("MOD09BANDS")["cloud"],".tif")))
}
if(!file.exists(out.img)|overwrite){
message(paste0("Creating cloud mask of date ",modGetDates(basename(id)),"."))
r <- raster(cloudmask)
stime<-Sys.time()
v <- matrix(as.numeric(matrix(intToBits(getValues(r)), ncol = 32, byrow = T)[,1:3]),ncol = 3)
r[] <- rowSums(v[,1:2])
r[r==1] <- NA
r[r!=1] <- 1
r_shadow <- r
r_shadow <- 1 - v[,3]
r_shadow[r_shadow == 0] <- NA
ras.cloud <- r * r_shadow
writeRaster(ras.cloud,out.img,overwrite=overwrite)
}else{
message(paste0("Cloud mask of date ",modGetDates(basename(id))," already exists."))
}
}
} |
knitr::opts_chunk$set(collapse = TRUE, comment = "
set.seed(5118)
library(redist)
library(dplyr)
library(ggplot2)
data(iowa)
print(iowa)
iowa_map = redist_map(iowa, existing_plan=cd_2010, pop_tol=0.01, total_pop = pop)
print(iowa_map)
plot(iowa_map, adj=T) + plot(iowa_map)
areas = as.numeric(units::set_units(sf::st_area(iowa_map$geometry), mi^2))
plot(iowa_map, fill = pop / areas) +
scale_fill_viridis_c(name="Population density (people / sq. mi)",
trans="sqrt")
plot(iowa_map, fill = dem_08 / tot_08) +
scale_fill_gradient2(name="Pct. Democratic '08", midpoint=0.5)
plot(iowa_map, fill = wvap / vap, by_distr = TRUE)
iowa_plans = redist_smc(iowa_map, nsims=1000, compactness=1)
print(iowa_plans)
redist.plot.plans(iowa_plans, draws=1:6, geom=iowa_map)
iowa_plans = match_numbers(iowa_plans, iowa_map$cd_2010)
print(iowa_plans)
county_perims = redist.prep.polsbypopper(iowa_map, iowa_map$adj)
iowa_plans = iowa_plans %>%
mutate(pop_dev = abs(total_pop / get_target(iowa_map) - 1),
comp = distr_compactness(iowa_map, "PolsbyPopper", perim_df=county_perims),
pct_min = group_frac(iowa_map, vap - wvap, vap),
pct_dem = group_frac(iowa_map, dem_08, dem_08 + rep_08))
print(iowa_plans)
plan_sum = group_by(iowa_plans, draw) %>%
summarize(max_dev = max(pop_dev),
avg_comp = mean(comp),
max_pct_min = max(pct_min),
dem_distr = sum(pct_dem > 0.5))
print(plan_sum)
library(patchwork)
hist(plan_sum, max_dev) + hist(iowa_plans, comp) +
plot_layout(guides="collect")
plot(iowa_plans, pct_dem, sort=FALSE, size=0.5)
pal = scales::viridis_pal()(5)[-1]
redist.plot.scatter(iowa_plans, pct_min, pct_dem,
color=pal[subset_sampled(iowa_plans)$district]) +
scale_color_manual(values="black") |
use_cassette <- function(name, ...,
record = NULL,
match_requests_on = NULL,
update_content_length_header = FALSE,
allow_playback_repeats = FALSE,
serialize_with = NULL,
persist_with = NULL,
preserve_exact_body_bytes = NULL,
re_record_interval = NULL,
clean_outdated_http_interactions = NULL) {
cassette <- insert_cassette(name,
record = record,
match_requests_on = match_requests_on,
update_content_length_header = update_content_length_header,
allow_playback_repeats = allow_playback_repeats,
serialize_with = serialize_with,
persist_with = persist_with,
preserve_exact_body_bytes = preserve_exact_body_bytes,
re_record_interval = re_record_interval,
clean_outdated_http_interactions = clean_outdated_http_interactions
)
if (is.null(cassette)) {
force(...)
return(NULL)
}
on.exit(cassette$eject())
cassette$call_block(...)
return(cassette)
}
check_empty_cassette <- function(cas) {
if (!any(nzchar(readLines(cas$file())))) {
warning(empty_cassette_message, call. = FALSE)
}
} |
resize <- function(image, height = NULL, width = NULL, fx = NULL, fy = NULL,
interpolation = "linear", target = "new") {
if (!isImage(image))
stop("'image' must be an Image object.")
test <- !c(is.null(height), is.null(width), is.null(fx), is.null(fy))
new_dims <- c(NA, NA)
if (sum(test[1:2]) == 2) {
if (sum(test[3:4]) > 0)
warning("When 'height' and 'width' are set 'fx' and 'fy' are ignored.")
new_dims <- c(height, width)
fx <- 0
fy <- 0
} else if (sum(test[1:2]) == 1) {
if (test[1]) {
if (test[4])
warning("When 'height' is set 'fy' is ignored.")
if (!test[3])
stop("When 'width' is not set, 'fx' must be set")
fy <- 0
width <- ncol(image) * fx
fx <- 0
} else {
if (test[3])
warning("When 'width' is set 'fx' is ignored.")
if (!test[4])
stop("When 'height' is not set, 'fy' must be set")
fx <- 0
height <- nrow(image) * fy
fy <- 0
}
new_dims <- c(height, width)
} else if (sum(test[3:4]) == 2) {
height <- 0
width <- 0
new_dims <- c(image$nrow() * fy, image$ncol() * fx)
} else {
stop("At least two of 'height', 'width', 'fx' and 'fy' must be set.")
}
interp <- switch(interpolation,
nearest = 0,
linear = 1,
cubic = 2,
area = 3,
Lanczos = 4,
exact = 5,
stop("This is not a valid interpolation method."))
if (isImage(target)) {
`_resize`(image, height, width, fx, fy, interp, target)
} else if (target == "new") {
out <- zeros(new_dims[1], new_dims[2], bitdepth = image$depth(),
nchan = image$nchan(), colorspace = image$space)
`_resize`(image, height, width, fx, fy, interp, out)
out
} else {
stop("Invalid target.")
}
}
flip <- function(image, type = 0, target = "new", in_place = NULL) {
if (!missing(in_place)) {
if (in_place) {
warning("in_place is deprecated. Use target='self' instead.")
target <- "self"
} else {
warning("in_place is deprecated. Use target='new' instead.")
target <- "new"
}
}
if (!isImage(image))
stop("This is not an Image object.")
if (isImage(target)) {
`_flip`(image, type, target)
} else if (target == "self") {
`_flip`(image, type, image)
} else if (target == "new") {
out <- cloneImage(image)
`_flip`(image, type, out)
out
} else {
stop("Invalid target.")
}
} |
print.MangroveSample <-
function(x,...){
cat("A Mangrove simulation.\n")
cat("Number of individuals: ")
cat(x$N)
cat("\nNumber of cases observed: ")
cat(x$Ncases)
cat("\nprevalence of the disease: ")
cat(x$K)
cat("\nResults:\n")
print.default(x$sample)
} |
is_string <- function(x) {
is.character(x) && length(x) == 1 && !is.na(x)
}
check_string <- function(x) {
stopifnot(is_string(x))
}
mypaste <- function(..., sep = " ") {
args <- lapply(list(...), as.character)
len <- setdiff(sapply(args, length), 1)
if (length(len) > 1) {
stop("All character vectors must have the same length (or length 1)")
}
paste(..., sep = sep)
}
scale <- function(x, from = c(0, 255), to = c(0, 5), round = TRUE) {
y <- (x - from[1]) /
(from[2] - from[1]) *
(to[2] - to[1]) +
to[1]
if (round) {
round(y)
} else {
y
}
}
capitalize <- function(x) {
substr(x, 1, 1) <- toupper(substr(x, 1, 1))
x
}
multicol <- function(x) {
xs <- strip_style(x)
max_len <- max(nchar(xs))
to_add <- max_len - nchar(xs)
x <- paste0(x, substring(" ", 1, to_add))
screen_width <- getOption("width")
num_cols <- trunc(screen_width / max_len)
num_rows <- ceiling(length(x) / num_cols)
x <- c(x, rep("", num_cols * num_rows - length(x)))
xm <- matrix(x, ncol = num_cols, byrow = TRUE)
apply(xm, 1, paste, collapse = "") %+% "\n"
}
re_table <- function(...) {
lapply(gregexpr(...), function(x) {
res <- cbind(
start = x,
end = x + attr(x, "match.length") - 1,
length = attr(x, "match.length")
)
res <- res[res[, "start"] != -1, , drop=FALSE]
})
}
non_matching <- function(table, str, empty = FALSE) {
mapply(table, str, SIMPLIFY = FALSE, FUN = function(t, s) {
if (! nrow(t)) {
cbind(start = 1, end = base::nchar(s), length = base::nchar(s))
} else {
start <- c(1, t[, "end"] + 1)
end <- c(t[, "start"] - 1, base::nchar(s))
res <- cbind(start = start, end = end, length = end - start + 1)
if (!empty) res[ res[, "length"] != 0, , drop = FALSE ] else res
}
})
}
myseq <- function(from, to, by = 1) {
stopifnot(by != 0)
if (by > 0) {
if (to < from) {
integer()
} else {
seq(from, to, by = by)
}
} else {
if (to > from) {
integer()
} else {
seq(from, to, by = by)
}
}
}
`%:%` <- myseq
emacs_version <- function() {
ver <- Sys.getenv("INSIDE_EMACS")
ver <- gsub("[^0-9\\.]+", "", ver, useBytes = TRUE)
if (ver == "") return(NA_integer_)
ver <- strsplit(ver, ".", fixed = TRUE)[[1]]
as.numeric(ver)
}
inside_emacs <- function() {
Sys.getenv("EMACS") != "" || Sys.getenv("INSIDE_EMACS") != ""
}
rstudio_with_ansi_support <- function() {
if (Sys.getenv("RSTUDIO", "") == "") return(FALSE)
if ((cols <- Sys.getenv("RSTUDIO_CONSOLE_COLOR", "")) != "" &&
!is.na(as.numeric(cols))) {
return(TRUE)
}
requireNamespace("rstudioapi", quietly = TRUE) &&
rstudioapi::isAvailable() &&
rstudioapi::hasFun("getConsoleHasColor")
}
rstudio_initialized <- function() {
rs <- Sys.getenv("RSTUDIO")
if (rs == "" || rs == "0") return(TRUE)
requireNamespace("rstudioapi", quietly = TRUE) &&
rstudioapi::isAvailable()
}
os_type <- function() {
.Platform$OS.type
}
rstudio_detect <- function() {
rstudio$detect()
}
is_count <- function(x) {
is.numeric(x) &&
length(x) == 1 &&
!is.na(x) &&
as.integer(x) == x &&
x >= 0
} |
print.SemiParBIV <- function(x, ...){
ppR <- pp(x)
cont1par <- ppR$cont1par
cont2par <- ppR$cont2par
cont3par <- ppR$cont3par
cop <- ppR$cop
lind <- ppR$lind
m1l <- ppR$m1l
m2l <- ppR$m2l
doff <- "log(\u00B7 - 2)"
bin.link <- x$bl
cp <- " theta = "; as.p <- x$theta.a
main.t <- "\nCOPULA: "
cat(main.t,cop)
pscr0(x, type = "copSS")
cat("\n\nEQUATION 1")
cat("\nLink function for mu.1:",m1l,"\n")
cat("Formula: "); print(x$formula[[1]])
cat("\nEQUATION 2")
cat("\nLink function for mu.2:",m2l,"\n")
cat("Formula: "); print(x$formula[[2]])
if(!is.null(x$X3) && is.null(x$X4) ){
cat("\nEQUATION 3")
cat("\nLink function for theta:",lind,"\n")
cat("Formula: "); print(x$formula[[3]])
}
if(!is.null(x$X3) && !is.null(x$X4) && is.null(x$X5)){
cat("\nEQUATION 3")
if(x$margins[2] != "BE") cat("\nLink function for sigma:","log","\n") else cat("\nLink function for sigma:","qlogis","\n")
cat("Formula: "); print(x$formula[[3]])
cat("\nEQUATION 4")
cat("\nLink function for theta:",lind,"\n")
cat("Formula: "); print(x$formula[[4]])
}
if(!is.null(x$X3) && !is.null(x$X4) && !is.null(x$X5)){
cat("\nEQUATION 3")
cat("\nLink function for sigma:","log","\n")
cat("Formula: "); print(x$formula[[3]])
cat("\nEQUATION 4")
if(x$margins[2] %in% c("DAGUM","SM")) cat("\nLink function for nu:","log","\n")
if(x$margins[2] %in% c("TW")) cat("\nLink function for nu:","qlogis","\n")
cat("Formula: "); print(x$formula[[4]])
cat("\nEQUATION 5")
cat("\nLink function for theta:",lind,"\n")
cat("Formula: "); print(x$formula[[5]])
}
cat("\n")
if(x$Model %in% c("B","BPO") && x$margins[2] %in% cont1par) cat("n = ",x$n,cp,format(as.p, digits=3)," total edf = ",format(x$t.edf, digits=3),"\n\n", sep="")
if(x$Model == "BPO0") cat("n = ",x$n," total edf = ",format(x$t.edf, digits=3),"\n\n", sep="")
if(x$Model == "BSS") cat("n = ",x$n," n.sel = ",x$n.sel,cp,format(as.p, digits=3),"\ntotal edf = ",format(x$t.edf, digits=3),"\n\n", sep="")
if(x$Model=="B" && x$margins[2] %in% cont2par ) cat("n = ",x$n," sigma = ",x$sigma2.a, cp, format(as.p, digits=3),"\ntotal edf = ",format(x$t.edf, digits=3),"\n\n", sep="")
if(x$Model=="B" && x$margins[2] %in% cont3par ) cat("n = ",x$n," sigma = ",x$sigma2.a, " nu = ",x$nu.a, "\ntheta = ", format(as.p, digits=3)," total edf = ",format(x$t.edf, digits=3),"\n\n", sep="")
invisible(x)
} |
NULL
sample_torus_tube <- function(n, ar = 2, sd = 0) {
r <- 1/ar
theta <- rs_torus_tube(n = n, r = r)
phi <- runif(n = n, min = 0, max = 2*pi)
res <- cbind(
x = (1 + r * cos(theta)) * cos(phi),
y = (1 + r * cos(theta)) * sin(phi),
z = r * sin(theta)
)
add_noise(res, sd = sd)
}
rs_torus_tube <- function(n, r) {
x <- c()
while (length(x) < n) {
theta <- runif(n, 0, 2*pi)
jacobian_theta <- (1 + r * cos(theta)) / (2*pi)
density_threshold <- runif(n, 0, 1/pi)
x <- c(x, theta[jacobian_theta > density_threshold])
}
x[1:n]
}
sample_tori_interlocked <- function(n, ar = 2, sd = 0) {
r <- 1/ar
ns <- as.vector(table(stats::rbinom(n = n, size = 1, prob = .5)))
res_1 <- sample_torus_tube(n = ns[1], ar = ar, sd = 0)
res_1 <- cbind(res_1, z = 0)
res_2 <- sample_torus_tube(n = ns[2], ar = ar, sd = 0)
res_2 <- cbind(x = res_2[, 1] + 1, y = 0, z = res_2[, 2])
res <- rbind(res_1, res_2)[sample(n), , drop = FALSE]
add_noise(res, sd = sd)
}
sample_torus_flat <- function(n, ar = 1, sd = 0) {
theta <- runif(n = n, min = 0, max = 2*pi)
phi <- runif(n = n, min = 0, max = 2*pi)
res <- cbind(
x = cos(theta),
y = sin(theta),
z = ar * cos(phi),
w = ar * sin(phi)
)
add_noise(res, sd = sd)
} |
fit = lm(weight ~ height, data=women)
summary(fit)
range(women$height)
(ndata = data.frame(height= c(58.5, 60.7)))
(p = predict(fit, newdata = ndata))
cbind(ndata, p)
plot(fit)
sum((fitted(fit) - women$weight)^2) |
'dsa01a' |
pkg.env <- new.env(parent = emptyenv())
pkg.env$styles_df <-
rbind(
actual = c("rgb(64,64,64)", "rgb(64,64,64)"),
previous =
c("rgb(166,166,166)", "rgb(166,166,166)"),
forecast =
c("url(
plan = c("white", "rgb(64,64,64)"),
total_white = c("white", "white")
)
colnames(pkg.env$styles_df) <- c("fill", "stroke")
pkg.env$widths <- data.frame(
interval = c('days', 'weeks', 'months', 'quarters', 'years'),
bar_width = c(16, 21.33, 32, 37.33, 42.66),
category_width = c(24, 32, 48, 56, 64)
)
rownames(pkg.env$widths) <- pkg.env$widths$interval
reset_margins <- function(){
pkg.env$margins <- list(
top = 75,
left = 80
)
}
reset_margins()
get_margins <- function(){
return(pkg.env$margins)
}
set_margins <- function(x = NULL, ...){
x = append(list(...), x)
if (!all(names(x) %in% names(pkg.env$margins))) {
stop(paste('Wrong names in given list! Should be', paste(names(pkg.env$margins), collapse = ' '), '!'))
}
if (!all(sapply(x, is.numeric))) {
stop('Only numeric margin values can be set')
}
pkg.env$margins[names(x)] <- x
}
pkg.env$colors_df <- cbind(
bar_colors = c(
"rgb(64,64,64)",
"rgb(166,166,166)",
"rgb(70,70,70)",
"rgb(90,90,90)" ,
"rgb(110,110,110)",
"rgb(127,127,127)"
),
text_colors = c("white", "black", "white", "white", "white", "black")
)
get_style <- function(style, styles_df = pkg.env$styles_df){
return(styles_df[style, ])
}
pkg.env$scatter_colors <-c(
"rgb(61, 56, 124)",
"rgb(0, 200, 154)",
"rgb(113, 103, 177)",
"rgb(0, 150, 193)" ,
"rgb(249, 248, 113)",
"rgb(147, 67, 134)"
)
get_scatter_colors <- function(series_number, scatter_colors = pkg.env$scatter_colors){
stopifnot(series_number %in% 1:6)
return(scatter_colors[series_number])
}
get_color_stacked <- function(series_number, colors_df = pkg.env$colors_df){
stopifnot(series_number %in% 1:6)
return(list(bar_color = colors_df[series_number,][['bar_colors']],
text_color = colors_df[series_number,][['text_colors']]))
}
get_interval_width <- function(interval){
stopifnot(interval %in% c("days", "weeks", "months", "quarters", "years"))
return(list(
bar_width = pkg.env$widths[[interval, "bar_width"]],
category_width = pkg.env$widths[[interval, "category_width"]]
))
}
set_colors <- function(colors_df){
stopifnot(all(dim(colors_df) == c(6,2)))
stopifnot(all(dimnames(colors_df)[[2]] %in% c("text_colors", "bar_colors")))
pkg.env$colors_df <- colors_df
}
set_scatter_colors <- function(new_scatter_colors){
pkg.env$scatter_colors <- new_scatter_colors
}
set_styles <- function(styles_df){
stopifnot(colnames(styles_df) %in% c('stroke', 'fill'))
pkg.env$styles_df <-styles_df
}
restore_defaults <- function() {
pkg.env$styles_df <-
rbind(
actual = c("rgb(64,64,64)", "rgb(64,64,64)"),
previous =
c("rgb(166,166,166)", "rgb(166,166,166)"),
forecast =
c("url(
plan = c("white", "rgb(64,64,64)"),
total_white = c("white", "white")
)
colnames(pkg.env$styles_df) <- c("fill", "stroke")
pkg.env$colors_df <- cbind(
bar_colors = c(
"rgb(64,64,64)",
"rgb(166,166,166)",
"rgb(70,70,70)",
"rgb(90,90,90)" ,
"rgb(110,110,110)",
"rgb(127,127,127)"
),
text_colors = c("white", "black", "white", "white", "white", "black"))
} |
if (requiet("testthat") &&
requiet("insight") &&
requiet("speedglm") &&
requiet("glmmTMB")) {
data(Salamanders)
Salamanders$cover <- abs(Salamanders$cover)
m1 <-
speedglm(count ~ mined + log(cover) + sample,
family = poisson(),
data = Salamanders
)
test_that("model_info", {
expect_true(model_info(m1)$is_poisson)
expect_true(model_info(m1)$is_count)
expect_false(model_info(m1)$is_negbin)
expect_false(model_info(m1)$is_binomial)
expect_false(model_info(m1)$is_linear)
})
test_that("find_predictors", {
expect_identical(find_predictors(m1), list(conditional = c("mined", "cover", "sample")))
expect_identical(
find_predictors(m1, flatten = TRUE),
c("mined", "cover", "sample")
)
expect_null(find_predictors(m1, effects = "random"))
})
test_that("find_random", {
expect_null(find_random(m1))
})
test_that("get_random", {
expect_warning(get_random(m1))
})
test_that("find_response", {
expect_identical(find_response(m1), "count")
})
test_that("get_response", {
expect_equal(get_response(m1), Salamanders$count)
})
test_that("get_predictors", {
expect_equal(colnames(get_predictors(m1)), c("mined", "cover", "sample"))
})
test_that("link_inverse", {
expect_equal(link_inverse(m1)(.2), exp(.2), tolerance = 1e-5)
})
test_that("linkfun", {
expect_equal(link_function(m1)(.2), log(.2), tolerance = 1e-5)
})
test_that("get_data", {
expect_equal(nrow(get_data(m1)), 644)
expect_equal(
colnames(get_data(m1)),
c("count", "mined", "cover", "sample")
)
})
test_that("find_formula", {
expect_length(find_formula(m1), 1)
expect_equal(
find_formula(m1),
list(conditional = as.formula("count ~ mined + log(cover) + sample")),
ignore_attr = TRUE
)
})
test_that("find_variables", {
expect_equal(
find_variables(m1),
list(
response = "count",
conditional = c("mined", "cover", "sample")
)
)
expect_equal(
find_variables(m1, flatten = TRUE),
c("count", "mined", "cover", "sample")
)
})
test_that("n_obs", {
expect_equal(n_obs(m1), 644)
})
test_that("find_parameters", {
expect_equal(
find_parameters(m1),
list(
conditional = c("(Intercept)", "minedno", "log(cover)", "sample")
)
)
expect_equal(nrow(get_parameters(m1)), 4)
expect_equal(
get_parameters(m1)$Parameter,
c("(Intercept)", "minedno", "log(cover)", "sample")
)
})
test_that("is_multivariate", {
expect_false(is_multivariate(m1))
})
test_that("find_terms", {
expect_equal(
find_terms(m1),
list(
response = "count",
conditional = c("mined", "log(cover)", "sample")
)
)
})
test_that("find_algorithm", {
expect_equal(find_algorithm(m1), list(algorithm = "eigen"))
})
test_that("find_statistic", {
expect_identical(find_statistic(m1), "z-statistic")
})
} |
library(EpiEstim)
library(incidence)
library(data.table)
createRtColumn <- function(data) {
RtTable <- t(data.frame(sapply(
colnames(data)[2:ncol(data)],
function(pref) {
createRtValue(data, pref)
}
)))
colnames(RtTable) <- c("Rt", "display")
RtTable <- data.table(RtTable, keep.rownames = TRUE)
RtTable[48:51, Rt := 0]
RtTable[48:51, display := "0 <i style='color:
RtTable[, rank := sprintf("%02d", rank(Rt, ties.method = "first"))]
RtTable[, display := paste0(rank, "|", display)]
RtTable
}
createRtValue <- function(data, region) {
mean_si <- 4.6
std_si <- 2.6
tryCatch(expr = {
incid <- createRegionIncidence(data, region)
res <- createEstimatedResultFromIncid(incid, mean_si, std_si)
values <- createLatestRtFromEstimated(res)
displayValue <- paste(values[2], createSymbolFromDifferenceValue(values))
c(values[2], displayValue)
}, error = function(e) {
NA
})
}
createRegionIncidence <- function(data, region) {
setDT(data)
incid <- incidence::as.incidence(
rowSums(data[, region, with = FALSE]),
dates = data$date
)
incid
}
continuousZero <- function(data) {
count <- 0
for (index in seq(length(data))) {
if (data[index] == 0) {
count <- count + 1
} else {
count <- 0
}
}
count
}
createEstimatedResultFromIncid <- function(incid, mean_si, std_si) {
continuous <- continuousZero(incid$counts)
index <- length(incid$counts) - continuous
res <- suppressMessages(
suppressWarnings(
EpiEstim::estimate_R(incid,
method = "parametric_si",
config = make_config(list(
mean_si = mean_si,
std_si = std_si,
t_end = max(incid$dates)
))
)
)
)
dt <- data.table::as.data.table(res$R)
cols <- colnames(dt)
dt[, (cols) := lapply(.SD, function(x) {
return(round(x, 2))
}), .SDcols = cols]
dt$dates <- res$dates[res$R$t_end]
dt$Incidence <- res$I[res$R$t_end]
if (continuous > 7) {
dt <- dt[1:(index + 1)]
dt[nrow(dt), 3] <- 0
}
dt
}
createLatestRtFromEstimated <- function(res) {
tail(res$`Mean(R)`, n = 2)
}
createSymbolFromDifferenceValue <- function(values) {
difference <- values[2] - values[1]
upColor <- "
tieColor <- "
downColor <- "
if (difference >= 0.2) {
return(
sprintf(
"<i style='color:%s;' class='fa fa-angle-double-up'></i>",
upColor
)
)
}
if (difference > 0 && difference < 0.2) {
return(
sprintf(
"<i style='color:%s;' class='fa fa-angle-up'></i>",
upColor
)
)
}
if (difference == 0) {
return(
sprintf(
"<i style='color:%s;' class='fa fa-lock'></i>",
tieColor
)
)
}
if (difference > -0.2 && difference < 0) {
return(
sprintf(
"<i style='color:%s;' class='fa fa-angle-down'></i>",
downColor
)
)
}
if (difference <= 0.2) {
return(
sprintf(
"<i style='color:%s;' class='fa fa-angle-double-down'></i>",
downColor
)
)
}
} |
getOMLConfig() |
posterior <- function(param = NULL, numbuys = NULL, numsells = NULL) {
param <- param_check(param)
if(is.null(numbuys)) stop("Missing data for 'numbuys'")
if(is.null(numsells)) stop("Missing data for 'numsells'")
if(length(numbuys) != length(numsells)) stop("Unequal lengths for 'numbuys' and 'numsells'")
rat1 <- param["mu"]/param["epsilon_s"]
rat2 <- param["mu"]/param["epsilon_b"]
rat1log1p <- log1p(rat1)
rat2log1p <- log1p(rat2)
prob_no <- 1.0 - param["alpha"]
prob_good <- param["alpha"] * (1.0 - param["delta"])
prob_bad <- param["alpha"] * param["delta"]
e1 <- -param["mu"] + numsells * rat1log1p
e2 <- -param["mu"] + numbuys * rat2log1p
e_max <- pmax.int(e1, e2, 0)
denom_helper <- e_max + log(prob_no * exp(-e_max) + prob_good * exp(e2-e_max) + prob_bad * exp(e1-e_max))
no_prob <- log(prob_no) - denom_helper
good_prob <- log(prob_good) + e2 - denom_helper
bad_prob <- log(prob_bad) + e1 - denom_helper
res <- cbind(exp(no_prob), exp(good_prob), exp(bad_prob))
colnames(res) <- c("no", "good", "bad")
class(res) <- c("matrix", "posterior")
res
} |
dlogr.step <-
function (x.t,y.t,betahat.tm1,varbetahat.tm1,tune.mat) {
if (!is.matrix(x.t)) {
dim(x.t) <- c(1,length(x.t))
}
temp <- apply(tune.mat,1,laplace.fn,x.t=x.t,y.t=y.t,betahat.tm1=betahat.tm1,varbetahat.tm1=varbetahat.tm1)
lambda <- tune.mat[which.max(temp),]
Rhat.t <- varbetahat.tm1
diag(Rhat.t) <- diag(Rhat.t) / lambda
laplace.t <- max(temp)
yhat.t <- dlogr.predict(x.t,betahat.tm1)
Del1 <- t(x.t) %*% (y.t - yhat.t)
Del2 <- -solve(Rhat.t) - (t(x.t) * matrix(rep(yhat.t*(1-yhat.t),dim(x.t)[2]),nrow=dim(x.t)[2],byrow=TRUE)) %*% x.t
betahat.t <- betahat.tm1 - (solve(Del2) %*% Del1)
varbetahat.t <- solve(-Del2)
diag(varbetahat.t) <- abs(diag(varbetahat.t))
return(list(betahat.t=betahat.t,varbetahat.t=varbetahat.t,laplace.t=laplace.t))
} |
calculate.CV <- function(formula, data, offset = NULL, weights = NULL, kernel = c("Gaussian", "Epanechnikov"), kbin = 25, family = c("gaussian", "binomial", "poisson"), KfoldCV = 5) {
family <- match.arg(family)
kernel <- match.arg(kernel)
n <- nrow(data)
if(is.null(weights)) {
weights <- rep(1, n)
}
ECM <- vector(length = 0)
random <- runif(n, min = 0, max = 1)
factor <- c(0:KfoldCV)/KfoldCV
groups <- cut(random, factor)
for (x in levels(groups)) {
train <- data[-which(groups == x),]
test <- data[which(groups == x),]
wtrain <- weights[-which(groups == x)]
wtest <- weights[which(groups == x)]
offtrain <- offset[-which(groups == x)]
offtest <- offset[which(groups == x)]
mod <- sback.fit(formula = formula, data = train, offset = offtrain, weights = wtrain, kernel = kernel, kbin = kbin, family = family, newdata = test, newoffset = offtest, pred = TRUE)
if(mod$fit$err == 0) {
response <- as.character(attr(terms(formula), "variables")[2])
ECM <- append(ECM, dev(test[,response], mod$pfitted.values, wtest, family = family))
} else {
ECM <- append(ECM, NA)
}
}
ECM
} |
library("testthat")
library("gratia")
library("mgcv")
dat <- data_sim("eg4", n = 400, seed = 42)
m <- gam(y ~ s(x0) + s(x1) + s(x2, by = fac),
data = dat, method = "REML")
test_that("penalty() works with a simple GAM", {
expect_silent(p <- penalty(m))
expect_s3_class(p, "penalty_df")
expect_named(p, c("smooth", "type", "penalty", "row", "col", "value"))
})
test_that("penalty() resclaing works with a simple GAM", {
expect_silent(p <- penalty(m, rescale = TRUE))
expect_s3_class(p, "penalty_df")
expect_named(p, c("smooth", "type", "penalty", "row", "col", "value"))
})
test_that("penalty() works with a factor by smooth", {
expect_silent(p <- penalty(m, smooth = "s(x2):fac2"))
expect_s3_class(p, "penalty_df")
expect_named(p, c("smooth", "type", "penalty", "row", "col", "value"))
})
test_that("penalty() rescaling works with a factor by smooth", {
expect_silent(p <- penalty(m, smooth = "s(x2):fac2", rescale = TRUE))
expect_s3_class(p, "penalty_df")
expect_named(p, c("smooth", "type", "penalty", "row", "col", "value"))
}) |
dateRangeInput <- function(inputId, label, start = NULL, end = NULL,
min = NULL, max = NULL, format = "yyyy-mm-dd", startview = "month",
weekstart = 0, language = "en", separator = " to ", width = NULL,
autoclose = TRUE) {
start <- dateYMD(start, "start")
end <- dateYMD(end, "end")
min <- dateYMD(min, "min")
max <- dateYMD(max, "max")
restored <- restoreInput(id = inputId, default = list(start, end))
start <- restored[[1]]
end <- restored[[2]]
attachDependencies(
div(id = inputId,
class = "shiny-date-range-input form-group shiny-input-container",
style = css(width = validateCssUnit(width)),
shinyInputLabel(inputId, label),
div(class = "input-daterange input-group input-group-sm",
tags$input(
class = "form-control",
type = "text",
`aria-labelledby` = paste0(inputId, "-label"),
title = paste("Date format:", format),
`data-date-language` = language,
`data-date-week-start` = weekstart,
`data-date-format` = format,
`data-date-start-view` = startview,
`data-min-date` = min,
`data-max-date` = max,
`data-initial-date` = start,
`data-date-autoclose` = if (autoclose) "true" else "false"
),
span(class = "input-group-addon input-group-prepend input-group-append",
span(class = "input-group-text",
separator
)
),
tags$input(
class = "form-control",
type = "text",
`aria-labelledby` = paste0(inputId, "-label"),
title = paste("Date format:", format),
`data-date-language` = language,
`data-date-week-start` = weekstart,
`data-date-format` = format,
`data-date-start-view` = startview,
`data-min-date` = min,
`data-max-date` = max,
`data-initial-date` = end,
`data-date-autoclose` = if (autoclose) "true" else "false"
)
)
),
datePickerDependency()
)
} |
loon_reactive.l_serialaxes <- function(loon.grob, output.grob, linkingInfo, buttons, position, selectBy,
linkingGroup, input, colorList, tabPanelName, outputInfo) {
plotBrush <- input$plotBrush
plotClick <- input$plotClick
loonWidgetsInfo <- outputInfo$loonWidgetsInfo
pull <- input[[paste0(tabPanelName, "pull")]]
initialDisplay <- is.null(output.grob)
if(!initialDisplay && (input[["navBarPage"]] != tabPanelName || pull > buttons["pull"])) {
if(pull > buttons["pull"]) {
buttons["pull"] <- pull
linkingGroup <- isolate(input[[paste0(tabPanelName, "linkingGroup")]])
}
if(linkingGroup != "none") {
linkedInfo <- linkingInfo[[linkingGroup]]
order <- match(loonWidgetsInfo$linkingKey, linkedInfo$linkingKey)
modifiedLinkingInfo <- set_linkingInfo(
loon.grob = loon.grob,
output.grob = output.grob,
linkedInfo = linkedInfo,
linkedStates = input[[paste0(tabPanelName, "linkedStates")]],
tabPanelName = tabPanelName,
order = order,
loonWidgetsInfo = loonWidgetsInfo
)
selected <- linkedInfo$selected
brushId <- which(selected)
selectByColor <- linkedInfo$selectByColor
output.grob <- modifiedLinkingInfo$output.grob
loon.grob <- modifiedLinkingInfo$loon.grob
loonWidgetsInfo <- modifiedLinkingInfo$loonWidgetsInfo
} else {
brushId <- outputInfo$brushId
selectByColor <- outputInfo$selectByColor
}
} else {
output.grob <- loon.grob
loonColor <- loonWidgetsInfo$loonColor
axesLayoutInShiny <- input[[paste0(tabPanelName, "axesLayout")]]
axesLayoutInLoon <- loonWidgetsInfo$axesLayout
plotShow <- input[[paste0(tabPanelName, "plot")]]
showGuides <- "showGuides" %in% plotShow
showAxes <- "showAxes" %in% plotShow
showAxesLabels <- "showAxesLabels" %in% plotShow
showLabels <- "showLabels" %in% plotShow
showArea <- "showArea" %in% plotShow
andrews <- "andrews" %in% plotShow
title <- loonWidgetsInfo$title
titleGpath <- if(!is.null(grid::getGrob(output.grob, "title"))) {
"title"
} else {
"title: textGrob arguments"
}
loonDefaultSerialaxesArgs <- loon_defaultSerialaxesSettings_args()
if(showLabels & title != "") {
titleGrob <- grid::textGrob(
name = titleGpath,
label = title,
y = unit(1, "npc") - unit(.8, "lines"),
gp = gpar(fontsize = loonDefaultSerialaxesArgs$titleFontsize,
fontface="bold"),
vjust = .5
)
} else {
titleGrob <- grob(name = titleGpath)
}
output.grob <- grid::setGrob(
gTree = output.grob,
gPath = titleGpath,
newGrob = titleGrob
)
scaling <- input[[paste0(tabPanelName, "scaling")]]
scaledActiveData <- switch(scaling,
"variable" = loonWidgetsInfo$variableScaledActiveData,
"observation" = loonWidgetsInfo$observationScaledActiveData,
"data" = loonWidgetsInfo$dataScaledActiveData,
"none" = loonWidgetsInfo$noneScaledActiveData)
N <- loonWidgetsInfo$N
whichIsDeactive <- which(!loonWidgetsInfo$active)
len.xaxis <- loonWidgetsInfo$lenSeqName
axesLabels <- loonWidgetsInfo$seqName
andrewsSeriesLength <- loonWidgetsInfo$andrewsSeriesLength
if(andrews) {
axesLabels <- round(seq(-pi, pi, length.out = len.xaxis), 2)
fourierTrans <- loonWidgetsInfo$fourierTrans
scaledActiveData <- as.matrix(scaledActiveData) %*% fourierTrans$matrix
dataRange <- range(scaledActiveData, na.rm = TRUE)
d <- if(diff(dataRange) == 0) 1 else diff(dataRange)
scaledActiveData <- (scaledActiveData - min(scaledActiveData, na.rm = TRUE))/d
}
if(axesLayoutInShiny == "parallel") {
xaxis <- seq(0, 1, length.out = len.xaxis)
axesGpath <- if(axesLayoutInShiny == axesLayoutInLoon) "parallelAxes" else "radialAxes"
yaxis <- grid.pretty(loonWidgetsInfo$ylim)
len.yaxis <- length(yaxis)
guidesGrob <- if(showGuides) {
gTree(
children = do.call(
gList,
lapply(seq(len.xaxis + len.yaxis + 1),
function(i) {
if(i == 1){
grid::rectGrob(gp = gpar(col = NA, fill = loonDefaultSerialaxesArgs$guidesBackground),
name = "bounding box")
} else if( i > 1 && i<= (1 + len.xaxis)){
condGrob(
test = showAxes,
grobFun = grid::linesGrob,
name = paste("x axis", i - 1),
x = unit(rep(xaxis[i - 1],2 ), "native"),
y = unit(c(0, 1), "native"),
gp = gpar(col = loonDefaultSerialaxesArgs$lineColor1,
lwd = loonDefaultSerialaxesArgs$guideLineWidth)
)
} else {
grid::linesGrob(
x = unit(c(0, 1), "native"),
y = unit(rep(yaxis[i - (1 + len.xaxis)],2 ), "native"),
gp = gpar(col =loonDefaultSerialaxesArgs$lineColor1,
lwd = loonDefaultSerialaxesArgs$guideLineWidth),
name = paste("y axis", i - (1 + len.xaxis))
)
}
})),
name = "guides"
)
} else {
gTree(
children = do.call(
gList,
lapply(seq(len.xaxis),
function(i) {
condGrob(
test = showAxes,
grobFun = grid::linesGrob,
name = paste("x axis", i),
x = unit(rep(xaxis[i],2 ), "native"),
y = unit(c(0, 1), "native"),
gp = gpar(col = loonDefaultSerialaxesArgs$lineColor2,
lwd = loonDefaultSerialaxesArgs$guideLineWidth)
)
}
)
),
name = "guides"
)
}
loonWidgetsInfo$showGuides <- showGuides
output.grob <- grid::setGrob(
gTree = output.grob,
gPath = "guides",
newGrob = guidesGrob
)
labelsGrob <- gTree(
children = do.call(
gList,
lapply(seq(len.xaxis),
function(i) {
condGrob(
test = showAxesLabels,
grobFun = grid::textGrob,
label = axesLabels[i],
name = paste("label", i),
x = unit(xaxis[i], "native"),
y = unit(0, "npc") + unit(1.2, "lines"),
gp = gpar(fontsize = loonDefaultSerialaxesArgs$labelFontsize), vjust = 1
)
}
)
),
name = "labels"
)
loonWidgetsInfo$showLabels <- showLabels
output.grob <- grid::setGrob(
gTree = output.grob,
gPath = "labels",
newGrob = labelsGrob
)
if(andrews) {
len.xaxis <- andrewsSeriesLength
x.axis <- seq(0, 1, length.out = len.xaxis)
} else {
x.axis <- xaxis
}
axesGrob <- gTree(
children = gList(
do.call(
gList,
lapply(seq_len(N),
function(i){
if (showArea) {
xx <- unit(c(x.axis, rev(x.axis)), "native")
yy <- unit(c(scaledActiveData[i, ], rep(0, len.xaxis)), "native")
loonWidgetsInfo$x[[i]] <<- xx
loonWidgetsInfo$y[[i]] <<- yy
grid::polygonGrob(
x = xx,
y = yy,
name = paste("polyline: showArea", i),
gp = gpar(fill = loonWidgetsInfo$color[i],
col = NA)
)
} else {
xx <- unit(x.axis, "native")
yy <- unit(scaledActiveData[i, ], "native")
loonWidgetsInfo$x[[i]] <<- xx
loonWidgetsInfo$y[[i]] <<- yy
grid::linesGrob(
x = xx,
y = yy,
name = paste("polyline", i),
gp = gpar(
col = loonWidgetsInfo$color[i],
lwd = if(is.na(loonWidgetsInfo$size[i])) loonDefaultSerialaxesArgs$linewidthDefault else loonWidgetsInfo$size[i]
)
)
}
}
)
)
),
name = axesGpath
)
loonWidgetsInfo$showAxes <- showAxes
output.grob <- grid::setGrob(
gTree = output.grob,
gPath = axesGpath,
newGrob = axesGrob
)
} else if(axesLayoutInShiny == "radial") {
xpos <- unit(0.5, "native")
ypos <- unit(0.5, "native")
radius <- loonDefaultSerialaxesArgs$radius
angle <- seq(0, 2*pi, length.out = len.xaxis + 1)[1:len.xaxis]
axesGpath <- if(axesLayoutInShiny == axesLayoutInLoon) "radialAxes" else "parallelAxes"
guidesGrob <- if(showGuides) {
gTree(
children = gList(
grid::rectGrob(gp = gpar(col = NA, fill = loonDefaultSerialaxesArgs$guidesBackground),
name = "bounding box"),
grid::polygonGrob(xpos + unit(radius * cos(seq(0, 2*pi, length=101)), "npc"),
ypos + unit(radius * sin(seq(0, 2*pi, length=101)), "npc"),
gp = gpar(fill = NA, col = l_getOption("guidelines"),
lwd = loonDefaultSerialaxesArgs$guideLineWidth),
name = "bounding line"
),
condGrob(
test = showAxes,
grobFun = grid::polylineGrob,
name = "axes",
x = xpos + unit(c(rep(0, len.xaxis) ,radius * cos(angle)), "npc"),
y = ypos + unit(c(rep(0, len.xaxis) ,radius * sin(angle)), "npc"),
id = rep(1:len.xaxis, 2),
gp = gpar(col = loonDefaultSerialaxesArgs$lineColor1,
lwd = loonDefaultSerialaxesArgs$guideLineWidth)
)
),
name = "guides"
)
} else {
gTree(
children = gList(
condGrob(
test = showAxes,
grobFun = grid::polylineGrob,
name = "axes",
x = unit(c(rep(0, len.xaxis) ,radius * cos(angle)), "npc") + xpos,
y = unit(c(rep(0, len.xaxis) ,radius * sin(angle)), "npc") + ypos,
id = rep(1:len.xaxis, 2),
gp = gpar(col = loonDefaultSerialaxesArgs$lineColor2,
lwd = loonDefaultSerialaxesArgs$guideLineWidth)
)
), name = "guides"
)
}
loonWidgetsInfo$showGuides <- showGuides
output.grob <- grid::setGrob(
gTree = output.grob,
gPath = "guides",
newGrob = guidesGrob
)
labelsGrob <- gTree(
children = do.call(
gList,
lapply(seq(len.xaxis),
function(i) {
condGrob(
test = showAxesLabels,
grobFun = grid::textGrob,
name = paste("label", i),
label = axesLabels[i],
x = unit((radius + loonDefaultSerialaxesArgs$radiusOffset) * cos(angle[i]), "npc") + xpos,
y = unit((radius + loonDefaultSerialaxesArgs$radiusOffset) * sin(angle[i]), "npc") + ypos,
gp = gpar(fontsize = loonDefaultSerialaxesArgs$labelFontsize), vjust = 0.5
)
}
)
),
name = "labels"
)
loonWidgetsInfo$showLabels <- showLabels
output.grob <- grid::setGrob(
gTree = output.grob,
gPath = "labels",
newGrob = labelsGrob
)
if(andrews) {
angle <- seq(0, 2*pi, length.out = andrewsSeriesLength + 1)[1:andrewsSeriesLength]
}
axesGrob <- gTree(
children = do.call(
gList,
lapply(seq_len(N),
function(i){
radialxais <- radius * scaledActiveData[i, ] * cos(angle)
radialyais <- radius * scaledActiveData[i, ] * sin(angle)
xx <- xpos + unit(c(radialxais, radialxais[1]), "npc")
yy <- ypos + unit(c(radialyais, radialyais[1]), "npc")
loonWidgetsInfo$x[[i]] <<- xx
loonWidgetsInfo$y[[i]] <<- yy
if(showArea){
grid::polygonGrob(
x = xx,
y = yy,
name = paste("polyline: showArea", i),
gp = gpar(fill = loonWidgetsInfo$color[i], col = NA)
)
} else {
grid::linesGrob(
x = xx,
y = yy,
name = paste("polyline", i),
gp = gpar(
col = loonWidgetsInfo$color[i],
lwd = if(is.na(loonWidgetsInfo$size[i])) loonDefaultSerialaxesArgs$linewidthDefault else loonWidgetsInfo$size[i]
)
)
}
}
)
),
name = axesGpath
)
loonWidgetsInfo$showAxes <- showAxes
output.grob <- grid::setGrob(
gTree = output.grob,
gPath = axesGpath,
newGrob = axesGrob
)
} else NULL
defaultSerialaxesSettings <- get_defaultSerialaxesSettings(axesLayoutInShiny)
vp <- grid::vpStack(
grid::plotViewport(margins = loonDefaultSerialaxesArgs$margins, name = "plotViewport"),
grid::dataViewport(xscale = defaultSerialaxesSettings$xscale,
yscale = defaultSerialaxesSettings$yscale,
name = "dataViewport")
)
grid::pushViewport(vp)
native.x <- list()
native.y <- list()
for(i in seq(N)) {
native.x[[i]] <- grid::convertX(loonWidgetsInfo$x[[i]], unitTo = "native", TRUE)
native.y[[i]] <- grid::convertY(loonWidgetsInfo$y[[i]], unitTo = "native", TRUE)
}
loonWidgetsInfo$native.x <- native.x
loonWidgetsInfo$native.y <- native.y
offset <- get_offset(vp = vp,
l = plotBrush$domain$left %||% plotClick$domain$left %||% -0.04,
r = plotBrush$domain$right %||% plotClick$domain$right %||% 1.04,
b = plotBrush$domain$bottom %||% plotClick$domain$bottom %||% -0.04,
t = plotBrush$domain$top %||% plotClick$domain$top %||% 1.04)
loonWidgetsInfo$offset <- offset
brushId <- if(initialDisplay) {
outputInfo$brushId
} else {
if(is.null(plotBrush) && is.null(plotClick)) {
outputInfo$brushId
} else {
if(!is.null(position))
get_brushId(
loon.grob = output.grob,
coord = list(
x = loonWidgetsInfo$x,
y = loonWidgetsInfo$y
),
position = position,
brushInfo = plotBrush,
vp = vp,
offset = offset,
clickInfo = plotClick,
N = N
)
}
}
sticky <- input[[paste0(tabPanelName, "sticky")]]
selectByColor <- input[[paste0(tabPanelName, "selectByColor")]]
if(sticky == "off") {
if(!is.null(selectByColor)) {
loonWidgetsInfo$lastSelection <- if(!is.null(plotBrush) || !is.null(plotClick)) brushId else integer(0)
brushId <- which(loonWidgetsInfo$color %in% selectByColor)
} else {
if(!is.null(outputInfo$selectByColor)) brushId <- loonWidgetsInfo$lastSelection
}
} else {
if(!is.null(selectByColor)) {
whichIsSelected <- union(which(loonWidgetsInfo$color %in% selectByColor), which(loonWidgetsInfo$selected))
} else {
whichIsSelected <- which(loonWidgetsInfo$selected)
}
if(is.null(plotBrush)) {
brushId <- whichIsSelected
} else {
brushId <- union(whichIsSelected, brushId)
}
}
selectStaticAll <- input[[paste0(tabPanelName, "selectStaticAll")]]
selectStaticNone <- input[[paste0(tabPanelName, "selectStaticNone")]]
selectStaticInvert <- input[[paste0(tabPanelName, "selectStaticInvert")]]
if(selectStaticAll > buttons["all"]) {
buttons["all"] <- selectStaticAll
brushId <- seq(N)
} else if(selectStaticNone > buttons["none"]) {
buttons["none"] <- selectStaticNone
brushId <- integer(0)
} else if(selectStaticInvert > buttons["invert"]) {
buttons["invert"] <- selectStaticInvert
brushId <- setdiff(seq(N), brushId)
} else NULL
brushId <- setdiff(brushId, whichIsDeactive)
loonWidgetsInfo$selected <- rep(FALSE, N)
loonWidgetsInfo$selected[brushId] <- TRUE
output.grob <- set_color_grob(
loon.grob = output.grob,
index = brushId,
newColor = select_color(),
axesGpath = axesGpath
)
colorApply <- input[[paste0(tabPanelName, "colorApply")]]
colorListButtons <- setNames(
lapply(colorList, function(col) input[[paste0(tabPanelName, col)]]),
colorList
)
colorPicker <- isolate(input[[paste0(tabPanelName, "colorPicker")]])
if(colorApply > buttons["colorApply"]) {
buttons["colorApply"] <- colorApply
loon.grob <- set_color_grob(
loon.grob = loon.grob,
index = brushId,
newColor = colorPicker,
axesGpath = axesGpath
)
loonWidgetsInfo$color[brushId] <- colorPicker
}
for(col in colorList) {
if(colorListButtons[[col]] > buttons[col]) {
buttons[col] <- colorListButtons[[col]]
loon.grob <- set_color_grob(
loon.grob = loon.grob,
index = brushId,
newColor = col,
axesGpath = axesGpath
)
loonWidgetsInfo$color[brushId] <- col
}
}
alphaApply <- input[[paste0(tabPanelName, "alphaApply")]]
if(alphaApply > buttons["alphaApply"]) {
buttons["alphaApply"] <- alphaApply
alpha <- isolate(input[[paste0(tabPanelName, "alpha")]])
loon.grob <- set_alpha_grob(
loon.grob = loon.grob,
index = brushId,
newAlpha = alpha,
axesGpath = axesGpath
)
output.grob <- set_alpha_grob(
loon.grob = output.grob,
index = brushId,
newAlpha = alpha,
axesGpath = axesGpath
)
loonWidgetsInfo$alpha[brushId] <- alpha
}
output.grob <- set_deactive_grob(
loon.grob = output.grob,
index = whichIsDeactive,
axesGpath = axesGpath
)
loon.grob <- set_deactive_grob(
loon.grob = loon.grob,
index = whichIsDeactive,
axesGpath = axesGpath
)
modifyDeactive <- input[[paste0(tabPanelName, "modifyDeactive")]]
if(modifyDeactive > buttons["deactive"]) {
buttons["deactive"] <- modifyDeactive
loon.grob <- set_deactive_grob(
loon.grob = loon.grob,
index = brushId,
axesGpath = axesGpath
)
output.grob <- set_deactive_grob(
loon.grob = output.grob,
index = brushId,
axesGpath = axesGpath
)
loonWidgetsInfo$active[brushId] <- FALSE
whichIsDeactive <- union(whichIsDeactive, brushId)
}
modifyReactive <- input[[paste0(tabPanelName, "modifyReactive")]]
if (modifyReactive > buttons["reactive"]) {
buttons["reactive"] <- modifyReactive
output.grob <- set_reactive_grob(
loon.grob = output.grob,
index = whichIsDeactive,
axesGpath = axesGpath,
showArea = showArea
)
loon.grob <- set_reactive_grob(
loon.grob = loon.grob,
index = whichIsDeactive,
axesGpath = axesGpath,
showArea = showArea
)
loonWidgetsInfo$active <- rep(TRUE, N)
}
absToPlus <- input[[paste0(tabPanelName, "absToPlus")]]
if(absToPlus > buttons["absToPlus"]) {
buttons["absToPlus"] <- absToPlus
if(length(brushId) > 0) {
newSize <- min(loonWidgetsInfo$size[brushId]) + 1
loonWidgetsInfo$size[brushId] <- rep(newSize, length(brushId))
loon.grob <- set_size_grob(loon.grob = loon.grob,
index = brushId,
newSize = loonWidgetsInfo$size,
axesGpath = axesGpath,
showArea = showArea)
output.grob <- set_size_grob(loon.grob = output.grob,
index = brushId,
newSize = loonWidgetsInfo$size,
axesGpath = axesGpath,
showArea = showArea)
}
}
absToMinus <- input[[paste0(tabPanelName, "absToMinus")]]
if(absToMinus > buttons["absToMinus"]) {
buttons["absToMinus"] <- absToMinus
if(length(brushId) > 0) {
newSize <- min(loonWidgetsInfo$size[brushId]) - 1
if(newSize <= 1) newSize <- 1
loonWidgetsInfo$size[brushId] <- rep(newSize, length(brushId))
loon.grob <- set_size_grob(loon.grob = loon.grob,
index = brushId,
newSize = loonWidgetsInfo$size,
axesGpath = axesGpath,
showArea = showArea)
output.grob <- set_size_grob(loon.grob = output.grob,
index = brushId,
newSize = loonWidgetsInfo$size,
axesGpath = axesGpath,
showArea = showArea)
}
}
relToPlus <- input[[paste0(tabPanelName, "relToPlus")]]
if(relToPlus > buttons["relToPlus"]) {
buttons["relToPlus"] <- relToPlus
if(length(brushId) > 0) {
loonWidgetsInfo$size[brushId] <- loonWidgetsInfo$size[brushId] + 1
loon.grob <- set_size_grob(loon.grob = loon.grob,
index = brushId,
newSize = loonWidgetsInfo$size,
axesGpath = axesGpath,
showArea = showArea)
output.grob <- set_size_grob(loon.grob = output.grob,
index = brushId,
newSize = loonWidgetsInfo$size,
axesGpath = axesGpath,
showArea = showArea)
}
}
relToMinus <- input[[paste0(tabPanelName, "relToMinus")]]
if(relToMinus > buttons["relToMinus"]) {
buttons["relToMinus"] <- relToMinus
if(length(brushId) > 0) {
newSize <- loonWidgetsInfo$size[brushId] - 1
newSize[which(newSize <= 1)] <- 1
loonWidgetsInfo$size[brushId] <- newSize
loon.grob <- set_size_grob(loon.grob = loon.grob,
index = brushId,
newSize = loonWidgetsInfo$size,
axesGpath = axesGpath,
showArea = showArea)
output.grob <- set_size_grob(loon.grob = output.grob,
index = brushId,
newSize = loonWidgetsInfo$size,
axesGpath = axesGpath,
showArea = showArea)
}
}
output.grob <- reorder_grob(output.grob,
number = N,
brushId,
axesGpath = axesGpath)
output.grob <- grid::setGrob(
gTree = output.grob,
gPath = "l_serialaxes",
newGrob = grid::editGrob(
grob = grid::getGrob(output.grob, "l_serialaxes"),
vp = vp
)
)
push <- input[[paste0(tabPanelName, "push")]]
if(push > buttons["push"]) {
buttons["push"] <- push
linkingGroup <- isolate(input[[paste0(tabPanelName, "linkingGroup")]])
} else {
newLinkingGroup <- isolate(input[[paste0(tabPanelName, "linkingGroup")]])
if(newLinkingGroup == "none") linkingGroup <- newLinkingGroup else NULL
}
linkingInfo <- update_linkingInfo(loon.grob,
tabPanelName = tabPanelName,
linkingInfo = linkingInfo,
linkingGroup = linkingGroup,
linkingKey = loonWidgetsInfo$linkingKey,
selected = loonWidgetsInfo$selected,
color = loonWidgetsInfo$color,
active = loonWidgetsInfo$active,
size = loonWidgetsInfo$size,
selectByColor = selectByColor,
linkedStates = input[[paste0(tabPanelName, "linkedStates")]])
}
list(
output.grob = output.grob,
loon.grob = loon.grob,
outputInfo = list(
brushId = brushId,
selectByColor = selectByColor,
linkingGroup = linkingGroup,
linkingInfo = linkingInfo,
loonWidgetsInfo = loonWidgetsInfo,
buttons = buttons
)
)
} |
library(glmmTMB)
m1 <- glmmTMB(count~ mined + (1|site),
zi=~mined,
family=poisson, data=Salamanders)
summary(m1)
simulate(m1)
class(m1)
res <- simulateResiduals(m1)
plot(res)
Salamanders$counts2 = simulate(m1)$sim_1
(m2 <- glmmTMB(counts2~spp + mined + (1|site),
zi=~spp + mined,
family=nbinom2, Salamanders))
res = simulateResiduals(m2)
plot(res)
pred = predict(m1,~reform)
pred = predict(m1)
hist(pred, breaks = 20)
x = fixef(m1)
x$cond[1] + x$cond[2]*as.numeric(Salamanders$mined)
res = simulateResiduals(m1)
plot(res)
m <- glmmTMB(count~ mined + (1|site),
zi=~mined,
family=poisson, data=Salamanders)
summary(m)
res = simulateResiduals(m)
plot(res)
Salamanders$count2 = simulate(m)$sim_1
m <- glmmTMB(count2~ mined + (1|site),
zi=~mined,
family=poisson, data=Salamanders)
res = simulateResiduals(m)
plot(res)
(m2 <- glmmTMB(count~spp + mined + (1|site),
zi=~spp + mined,
family=nbinom2, Salamanders))
res = simulateResiduals(m2)
plot(res)
Salamanders$count2 = simulate(m2)$sim_1
(m2 <- glmmTMB(count2~spp + mined + (1|site),
zi=~spp + mined,
family=nbinom2, Salamanders))
res = simulateResiduals(m2)
plot(res)
(m3 <- glmmTMB(count~spp + mined + (1|site),
zi=~spp + mined,
family=truncated_poisson, Salamanders))
res = simulateResiduals(m3)
plot(res)
data(cbpp, package="lme4")
(m4 <- glmmTMB(cbind(incidence, size-incidence) ~ period + (1 | herd),
data=cbpp, family=binomial))
res = simulateResiduals(m4)
plot(res)
sim1=function(nfac=40, nt=100, facsd=.1, tsd=.15, mu=0, residsd=1)
{
dat=expand.grid(fac=factor(letters[1:nfac]), t= 1:nt)
n=nrow(dat)
dat$REfac=rnorm(nfac, sd= facsd)[dat$fac]
dat$REt=rnorm(nt, sd= tsd)[dat$t]
dat$x=rnorm(n, mean=mu, sd=residsd) + dat$REfac + dat$REt
return(dat)
}
set.seed(101)
d1 = sim1(mu=100, residsd =10)
d2 = sim1(mu=200, residsd =5)
d1$sd="ten"
d2$sd="five"
dat = rbind(d1, d2)
m5 = glmmTMB(x~sd+(1|t), dispformula=~sd, dat)
res = simulateResiduals(m5)
plot(res)
fixef(m5)$disp
c(log(5^2), log(10^2)-log(5^2)) |
flux_difference_plotter<-function(wt_flux,mut_flux,fba_object,graph_fname="Flux_comparison"){
pdf(paste(graph_fname,".pdf",sep=""))
unique_list=sort(unique(fba_object$sub_system))
for(i in 1:length(unique_list))
{
vec1<-which(unique_list[i]==fba_object$sub_system)
y_axis_wt<-wt_flux$fluxes[vec1]
y_axis_mut<-mut_flux$fluxes[vec1]
y_limits<-c(min(c(y_axis_wt,y_axis_mut)),max(c(y_axis_wt,y_axis_mut)))
barplot(y_axis_wt,names.arg=vec1,xlab="Reaction",ylab="Flux",col="green",main=unique_list[i],ylim=y_limits,density=85,beside=TRUE)
par(new=TRUE)
barplot(y_axis_mut,names.arg=vec1,xlab="Reaction",ylab="Flux",col="red",main=unique_list[i],ylim=y_limits,density=85,beside=TRUE)
}
dev.off()
fba_sol_wt=wt_flux
fba_sol_mut=mut_flux
message("making differentials")
mut_flux_inc<-which(abs(fba_sol_mut$fluxes)>abs(fba_sol_wt$fluxes))
mut_flux_dec<-which(abs(fba_sol_mut$fluxes)<abs(fba_sol_wt$fluxes))
mut_flux_equ<-which(fba_sol_mut$fluxes==fba_sol_wt$fluxes)
if(length(mut_flux_inc)>0)
{
sys_inc_flux<-cbind(mut_flux_inc,fba_object$sub_system[mut_flux_inc])
pdf(paste("Inc_",graph_fname,".pdf",sep=""))
unique_list<-sort(unique(fba_object$sub_system[mut_flux_inc]))
for(i in 1:length(unique_list))
{
vec1<-as.numeric(sys_inc_flux[which(unique_list[i]==sys_inc_flux[,2])])
y_axis_wt<-fba_sol_wt$fluxes[vec1]
y_axis_mut<-fba_sol_mut$fluxes[vec1]
y_limits<-c(min(c(y_axis_wt,y_axis_mut)),max(c(y_axis_wt,y_axis_mut)))
if(max(y_limits)!=0)
{
barplot(y_axis_wt,names.arg=vec1,xlab="Reaction",ylab="Flux",col="green",main=unique_list[i],ylim =y_limits,density=85,beside=TRUE)
par(new=TRUE)
barplot(y_axis_mut,names.arg=vec1,xlab="Reaction",ylab="Flux",col="red",main=unique_list[i],ylim=y_limits,density=85,beside=TRUE)
}
print(i)
}
dev.off()
}
if(length(mut_flux_dec)>0)
{
unique_list<-sort(unique(fba_object$sub_system[mut_flux_dec]))
sys_dec_flux<-cbind(mut_flux_dec,fba_object$sub_system[mut_flux_dec])
pdf(paste("Dec_",graph_fname,".pdf",sep=""))
for(i in 1:length(unique_list))
{
vec1<-as.numeric(sys_dec_flux[which(unique_list[i]==sys_dec_flux[,2])])
y_axis_wt<-fba_sol_wt$fluxes[vec1]
y_axis_mut<-fba_sol_mut$fluxes[vec1]
y_limits<-c(min(c(y_axis_wt,y_axis_mut)),max(c(y_axis_wt,y_axis_mut)))
if(max(y_limits)!=0)
{
barplot(y_axis_wt,names.arg=vec1,xlab="Reaction",ylab="Flux",col="green",main=unique_list[i],ylim=y_limits,density=85,beside=TRUE)
par(new=TRUE)
barplot(y_axis_mut,names.arg=vec1,xlab="Reaction",ylab="Flux",col="red",main=unique_list[i],ylim=y_limits,density=85,beside=TRUE)
}
print(i)
}
dev.off()
}
} |
context("msiSlices")
test_that("msiSlices", {
p <- list(createMassPeaks(mass=1:5, intensity=1:5),
createMassPeaks(mass=1:5, intensity=2:6),
createMassPeaks(mass=1:5, intensity=3:7))
coordinates(p) <- cbind(x=c(2, 2, 3), y=c(2, 3, 2))
r <- array(c(3, 5, 4, NA), dim=c(x=2, y=2, z=1))
attr(r, "center") <- 3
attr(r, "tolerance") <- 0.5
attr(r, "method") <- "sum"
expect_equal(msiSlices(p, center=3, tolerance=0.5), r)
r <- array(c(NA, NA, NA, NA, 3, 5, NA, 4, NA), dim=c(x=3, y=3, z=1))
attr(r, "center") <- 3
attr(r, "tolerance") <- 0.5
attr(r, "method") <- "sum"
expect_equal(msiSlices(p, center=3, tolerance=0.5, adjust=FALSE), r)
})
test_that(".msiSlices", {
m <- matrix(c(1:5, 2:6, 3:7), byrow=TRUE, nrow=3)
attr(m, "mass") <- 1:5
coord <- cbind(x=c(1, 1, 2), y=c(1, 2, 1))
r <- array(c(3, 5, 4, NA), dim=c(x=2, y=2, z=1))
attr(r, "center") <- 3
attr(r, "tolerance") <- 0.5
attr(r, "method") <- "sum"
expect_equal(MALDIquant:::.msiSlices(m, coord, center=3, tolerance=0.5), r)
r[,,1] <- c(9, 15, 12, NA)
attr(r, "tolerance") <- 1
expect_equal(MALDIquant:::.msiSlices(m, coord, center=3, tolerance=1), r)
r[,,1] <- c(3, 5, 4, NA)
attr(r, "tolerance") <- 1
attr(r, "method") <- "mean"
expect_equal(MALDIquant:::.msiSlices(m, coord, center=3, tolerance=1,
method="mean"), r)
r[,,1] <- c(3, 5, 4, NA)
attr(r, "tolerance") <- 1
attr(r, "method") <- "median"
expect_equal(MALDIquant:::.msiSlices(m, coord, center=3, tolerance=1,
method="median"), r)
r <- array(c(6, 12, 9, NA, 9, 15, 12, NA), dim=c(x=2, y=2, z=2))
attr(r, "center") <- 2:3
attr(r, "tolerance") <- 1
attr(r, "method") <- "sum"
expect_equal(MALDIquant:::.msiSlices(m, coord, center=2:3, tolerance=1), r)
r[,,2] <- c(15, 25, 20, NA)
attr(r, "tolerance") <- 1:2
expect_equal(MALDIquant:::.msiSlices(m, coord, center=2:3, tolerance=1:2), r)
}) |
yearmax <- function(var, infile, outfile, nc34 = 4, overwrite = FALSE,
verbose = FALSE, nc = NULL) {
yearx_wrapper(1, var, infile, outfile, nc34, overwrite, verbose, nc = nc)
} |
sectionview.km <- function(model, type = "UK",
center = NULL, axis = NULL,
npoints = 100,
col_points = "red",
col_surf = "blue",
conf_lev = c(0.5, 0.8, 0.9, 0.95, 0.99),
conf_blend = NULL,
bg_blend = 5,
mfrow = NULL,
Xname = NULL, yname = NULL,
Xscale = 1, yscale = 1,
xlim = NULL, ylim = NULL,
title = NULL,
add = FALSE,
...) {
D <- model@d
if (is.null(center)) {
if (D != 1) stop("Section center in 'section' required for >1-D model.")
}
if (is.null(axis)) {
axis <- matrix(1:D, ncol = 1)
} else {
axis <- matrix(axis, ncol = 1)
}
if (is.null(conf_blend) ||
length(conf_blend) != length(conf_lev))
conf_blend <- rep(0.5/length(conf_lev), length(conf_lev))
if (is.null(mfrow) && (D>1)) {
nc <- round(sqrt(D))
nl <- ceiling(D/nc)
mfrow <- c(nc, nl)
}
if (!isTRUE(add)) {
if (D>1) {
close.screen( all.screens = TRUE )
split.screen(figs = mfrow)
}
assign(".split.screen.lim",matrix(NaN,ncol=4,nrow=D),envir=DiceView.env)
}
X_doe <- Xscale * model@X
n <- dim(X_doe)[1]
y_doe <- yscale * model@y
if ([email protected]) {
sdy_doe <- abs(yscale) * sqrt([email protected])
} else if (model@[email protected]) {
sdy_doe <- rep(abs(yscale) * sqrt(model@covariance@nugget), n)
} else {
sdy_doe <- rep(0, n)
}
rx <- apply(X_doe, 2, range)
if(!is.null(xlim)) rx <- matrix(xlim,nrow=2,ncol=D)
rownames(rx) <- c("min", "max")
drx <- rx["max", ] - rx["min", ]
if (is.null(ylim)) {
ymin <- min(y_doe-3*sdy_doe)
ymax <- max(y_doe+3*sdy_doe)
ylim <- c(ymin, ymax)
}
if (is.null(yname)) yname <- names(y_doe)
if (is.null(yname)) yname <- "y"
if (is.null(Xname)) Xname <- names(X_doe)
if (is.null(Xname)) Xname <- paste(sep = "", "X", 1:D)
fcenter <- tryFormat(x = center, drx = drx)
for (id in 1:dim(axis)[1]) {
if (D>1) screen(id, new=!add)
d <- axis[id,]
xdmin <- rx["min", d]
xdmax <- rx["max", d]
xlim = c(xdmin,xdmax)
xd <- seq(from = xdmin, to = xdmax, length.out = npoints)
x <- data.frame(t(matrix(as.numeric(center), nrow = D, ncol = npoints)))
if (!is.null(center)) if(!is.null(names(center))) names(x) <- names(center)
x[ , d] <- xd
y_mean <- array(0, npoints)
y_sd <- array(0, npoints)
for (i in 1:npoints) {
y <- predict(model, type = type, newdata = (x[i, ]), checkNames=FALSE)
y_mean[i] <- yscale * y$mean
y_sd[i] <- abs(yscale) * y$sd
}
if (is.null(title)){
if (D>1) {
title_d <- paste(collapse = ", ", paste(Xname[-d], '=', fcenter[-d]))
} else {
title_d <- paste(collapse = "~", yname, Xname[d])}
} else {
title_d <- title
}
if (isTRUE(add)) {
.split.screen.lim = get(x=".split.screen.lim",envir=DiceView.env)
xlim <- c(.split.screen.lim[d,1],.split.screen.lim[d,2])
ylim <- c(.split.screen.lim[d,3],.split.screen.lim[d,4])
if (D>1) {
plot(xd, y_mean,
type = "l", lty = 3,
xlim = xlim, ylim = ylim,
col = col_surf, xlab="", ylab="",
...)
} else {
lines(xd, y_mean,
lty = 3,
xlim = xlim, ylim = ylim,
col = col_surf,
...)
}
} else {
eval(parse(text=paste(".split.screen.lim[",d,",] = matrix(c(",xlim[1],",",xlim[2],",",ylim[1],",",ylim[2],"),nrow=1)")),envir=DiceView.env)
plot(xd, y_mean,
xlab = Xname[d], ylab = yname,
xlim = xlim, ylim = ylim,
main = title_d,
type = "l", lty = 3,
col = col_surf,
...)
if(D>1) abline(v=center[d],col='black',lty=2)
}
for (p in 1:length(conf_lev)) {
colp <- translude(col_surf, alpha = conf_blend[p])
polygon(c(xd,rev(xd)),
c(qnorm((1+conf_lev[p])/2, y_mean, y_sd),
rev(qnorm((1-conf_lev[p])/2, y_mean, y_sd))),
col = colp,
border = NA)
}
if (D>1) {
xrel <- scale(x = as.matrix(X_doe),
center = center,
scale = rx["max", ] - rx["min", ])
alpha <- apply(X = xrel[ , -d, drop = FALSE],
MARGIN = 1,
FUN = function(x) (1 - (sqrt(sum(x^2)/D)))^bg_blend)
} else {
alpha <- rep(1, n)
}
if ([email protected]) {
col1 <- fade(color = col_points, alpha = alpha)
points(X_doe[,d], y_doe,
col = col1,
pch = 20)
}
for (p in 1:length(conf_lev)) {
for (i in 1:n) {
lines(c(X_doe[i,d],X_doe[i,d]),
c(qnorm((1+conf_lev[p])/2, y_doe[i], sdy_doe[i]),
qnorm((1-conf_lev[p])/2, y_doe[i], sdy_doe[i])),
col = rgb(1,1-alpha[i], 1-alpha[i], alpha[i]*conf_blend[p]),
lwd = 5, lend = 1)
}
}
}
} |
coef.modgam <- function(object,...){
fit = object
fit$gamobj$coefficients
} |
vcov.synthdid_estimate = function(object,
method = c("bootstrap", "jackknife", "placebo"),
replications = 200, ...) {
method = match.arg(method)
if(method == 'bootstrap') {
se = bootstrap_se(object, replications)
} else if(method == 'jackknife') {
se = jackknife_se(object)
} else if(method == 'placebo') {
se = placebo_se(object, replications)
}
matrix(se^2)
}
synthdid_se = function(...) { sqrt(vcov(...)) }
bootstrap_se = function(estimate, replications) { sqrt((replications-1)/replications) * sd(bootstrap_sample(estimate, replications)) }
bootstrap_sample = function(estimate, replications) {
setup = attr(estimate, 'setup')
opts = attr(estimate, 'opts')
weights = attr(estimate, 'weights')
if (setup$N0 == nrow(setup$Y) - 1) { return(NA) }
theta = function(ind) {
if(all(ind <= setup$N0) || all(ind > setup$N0)) { NA }
else {
weights.boot = weights
weights.boot$omega = sum_normalize(weights$omega[sort(ind[ind <= setup$N0])])
do.call(synthdid_estimate, c(list(Y=setup$Y[sort(ind),], N0=sum(ind <= setup$N0), T0=setup$T0, X=setup$X[sort(ind), ,], weights=weights.boot), opts))
}
}
bootstrap.estimates = rep(NA, replications)
count = 0
while(count < replications) {
bootstrap.estimates[count+1] = theta(sample(1:nrow(setup$Y), replace=TRUE))
if(!is.na(bootstrap.estimates[count+1])) { count = count+1 }
}
bootstrap.estimates
}
jackknife_se = function(estimate, weights = attr(estimate, 'weights')) {
setup = attr(estimate, 'setup')
opts = attr(estimate, 'opts')
if (!is.null(weights)) {
opts$update.omega = opts$update.lambda = FALSE
}
if (setup$N0 == nrow(setup$Y) - 1 || (!is.null(weights) && sum(weights$omega != 0) == 1)) { return(NA) }
theta = function(ind) {
weights.jk = weights
if (!is.null(weights)) { weights.jk$omega = sum_normalize(weights$omega[ind[ind <= setup$N0]]) }
estimate.jk = do.call(synthdid_estimate,
c(list(Y=setup$Y[ind, ], N0=sum(ind <= setup$N0), T0=setup$T0, X = setup$X[ind, , ], weights = weights.jk), opts))
}
jackknife(1:nrow(setup$Y), theta)
}
jackknife = function(x, theta) {
n = length(x)
u = rep(0, n)
for (i in 1:n) {
u[i] = theta(x[-i])
}
jack.se = sqrt(((n - 1) / n) * (n - 1) * var(u))
jack.se
}
placebo_se = function(estimate, replications) {
setup = attr(estimate, 'setup')
opts = attr(estimate, 'opts')
weights = attr(estimate, 'weights')
N1 = nrow(setup$Y) - setup$N0
if (setup$N0 <= N1) { stop('must have more controls than treated units to use the placebo se') }
theta = function(ind) {
N0 = length(ind)-N1
weights.boot = weights
weights.boot$omega = sum_normalize(weights$omega[ind[1:N0]])
do.call(synthdid_estimate, c(list(Y=setup$Y[ind,], N0=N0, T0=setup$T0, X=setup$X[ind, ,], weights=weights.boot), opts))
}
sqrt((replications-1)/replications) * sd(replicate(replications, theta(sample(1:setup$N0))))
}
sum_normalize = function(x) {
if(sum(x) != 0) { x / sum(x) }
else { rep(1/length(x), length(x)) }
} |
monitor_isolate <- function(
ws_monitor,
xlim = NULL,
ylim = NULL,
tlim = NULL,
monitorIDs = NULL,
stateCodes = NULL,
timezone = "UTC"
) {
if ( monitor_isEmpty(ws_monitor) ) stop("ws_monitor object contains zero monitors")
monList <- list()
for (monitorID in names(ws_monitor$data)[-1]) {
mon <- monitor_subset(ws_monitor, xlim=xlim, ylim=ylim, tlim=tlim,
monitorIDs=monitorID, dropMonitors=TRUE, timezone=timezone)
monList[[monitorID]] <- monitor_trim(mon)
}
return(monList)
} |
crm_dtps <- function(skeleton,
target,
model,
cohort_sizes,
previous_outcomes = '',
next_dose = NULL,
user_dose_func = NULL,
verbose = FALSE,
i_am_patient = FALSE,
...) {
if(!all(cohort_sizes == ceiling(cohort_sizes)))
stop('cohort_sizes must be stricly positive integers.')
if(!all(cohort_sizes > 0))
stop('cohort_sizes must be stricly positive integers.')
max_depth <- length(cohort_sizes)
num_paths = 1 + sum(sapply(1:max_depth,
function(i) prod((cohort_sizes + 1)[1:i])))
if(num_paths >= 50 & num_paths < 100) {
message(paste0('You have requested ', num_paths,
' model evaluations. Be patient.'))
}
if(num_paths >= 100 & !i_am_patient) {
stop(paste0('You have requested ', num_paths,
' model evaluations but also flagged your impatience.',
' Run again with i_am_patient = TRUE'))
}
if(nchar(previous_outcomes) > 0)
dat <- df_parse_outcomes(previous_outcomes)
else
dat <- list(doses = c(), tox = c(), num_patients = 0)
num_doses <- length(skeleton)
previous_doses <- dat$doses
previous_tox <- dat$tox
previous_num_patients <- dat$num_patients
outcomes <- c('T', 'N')
cohort_paths <- lapply(cohort_sizes,
function(x) gtools::combinations(n = 2, r = x,
v = outcomes,
repeats.allowed=TRUE))
cohort_paths <- lapply(cohort_paths, function(x) apply(x, 1, paste0,
collapse = ''))
cohort_paths <- expand.grid(cohort_paths, stringsAsFactors = FALSE)
cache <- list()
root_node_id <- 1
fit <- stan_crm(outcome_str = previous_outcomes, skeleton = skeleton,
target = target, model = model, ...)
if(is.null(next_dose)) {
if(is.null(user_dose_func))
next_dose <- fit$recommended_dose
else
next_dose <- user_dose_func(fit)
}
root <- dose_finding_path_node(node_id = root_node_id,
parent_node_id = NA,
depth = 0,
outcomes = '',
next_dose = next_dose,
fit = fit,
parent_fit = NULL)
cache[['']] <- root
node_id <- root_node_id + 1
for(i in 1:nrow(cohort_paths)) {
cohort_path <- cohort_paths[i, ]
cohort_dose <- next_dose
dtp <- ""
parent <- root
for(j in 1:length(cohort_path)) {
if(!is.na(cohort_dose)) {
dtp <- ifelse(nchar(dtp) > 0,
paste0(dtp, ' ', cohort_dose, cohort_path[j]),
paste0(cohort_dose, cohort_path[j])
)
if(dtp %in% names(cache)) {
if(verbose) print(paste0('Fetching ', dtp, ' from cache'))
parent <- cache[[dtp]]
cohort_dose <- parent$next_dose
} else {
these_outcomes <- df_parse_outcomes(dtp)
dat$doses <- array(c(previous_doses, these_outcomes$doses))
dat$tox <- array(c(previous_tox, these_outcomes$tox))
dat$num_patients <- previous_num_patients +
these_outcomes$num_patients
if(verbose) print(paste0('Running ', dtp))
fit <- stan_crm(skeleton = skeleton, target = target, model = model,
doses_given = dat$doses, tox = dat$tox, ...)
if(is.null(user_dose_func))
cohort_dose <- fit$recommended_dose
else
cohort_dose <- user_dose_func(fit)
node <- dose_finding_path_node(node_id = node_id,
parent_node_id = parent$.node,
depth = j,
outcomes = as.character(cohort_path[j]),
next_dose = cohort_dose,
fit = fit,
parent_fit = parent$fit)
cache[[dtp]] <- node
parent <- node
node_id <- node_id + 1
}
}
}
}
class(cache) <- c("dose_finding_paths", "list")
cache
} |
summary.FixedContContIT <- function(object, ..., Object){
if (missing(Object)){Object <- object}
cat("\nFunction call:\n\n")
print(Object$Call)
cat("\n\n
cat("\n
cat("\n\nTotal number of trials: ", nrow(Object$Obs.Per.Trial))
cat("\nTotal number of patients: ", dim(Object$Data.Analyze)[1])
cat("\nM(SD) patients per trial: ", format(round(mean((Object$Obs.Per.Trial$Obs.per.trial)), 4), nsmall = 4), " (", format(round(sd((Object$Obs.Per.Trial$Obs.per.trial)), 4), nsmall = 4), ")",
" [min: ", min((Object$Obs.Per.Trial$Obs.per.trial)), "; max: ", max((Object$Obs.Per.Trial$Obs.per.trial)), "]", sep="")
cat("\nTotal number of patients in experimental treatment group: ", length(Object$Data.Analyze$Treat[Object$Data.Analyze$Treat==1]),
"\nTotal number of patients in control treatment group: ", length(Object$Data.Analyze$Treat[Object$Data.Analyze$Treat!=1]))
means_table <- rbind(tapply(Object$Data.Analyze$Surr, list(Object$Data.Analyze$Treat), mean), tapply(Object$Data.Analyze$True, list(Object$Data.Analyze$Treat), mean))
colnames(means_table) <- c("Control Treatment", "Experimental treatment")
rownames(means_table) <- c("Surrogate", "True endpoint")
cat("\n\nMean surrogate and true endpoint values in each treatment group: \n\n")
print(format(round(data.frame(means_table), 4), nsmall = 4))
Var_table <- rbind(tapply(Object$Data.Analyze$Surr, list(Object$Data.Analyze$Treat), var), tapply(Object$Data.Analyze$True, list(Object$Data.Analyze$Treat), var))
colnames(Var_table) <- c("Control Treatment", "Experimental treatment")
rownames(Var_table) <- c("Surrogate", "True endpoint")
cat("\n\nVar surrogate and true endpoint values in each treatment group: \n\n")
print(format(round(data.frame(Var_table), 4), nsmall = 4))
cat("\n\nCorrelations between the true and surrogate endpoints in the control (r_T0S0)")
cat("\nand the experimental treatment groups (r_T1S1):\n\n")
print(round(Object$Cor.Endpoints, 4), nsmall = 4)
cat("\n\n\n
cat("\n
cat("\n\n")
cat("Trial-level surrogacy (R2_ht): \n")
print(format(round(Object$R2ht, 4), nsmall = 4))
cat("\nIndividual-level surrogacy (R2_h.ind.clust): \n")
print(format(round(Object$R2h.ind.clust, 4), nsmall = 4))
cat("\nIndividual-level surrogacy assuming N=1 (R2_h.ind): \n")
print(format(round(Object$R2h.ind, 4), nsmall = 4))
}
summary.MixedContContIT <- function(object, ..., Object){
if (missing(Object)){Object <- object}
cat("\nFunction call:\n\n")
print(Object$Call)
cat("\n\n
cat("\n
cat("\n\nTotal number of trials: ", nrow(Object$Obs.Per.Trial))
cat("\nTotal number of patients: ", dim(Object$Data.Analyze)[1])
cat("\nM(SD) patients per trial: ", format(round(mean((Object$Obs.Per.Trial$Obs.per.trial)), 4), nsmall = 4), " (", format(round(sd((Object$Obs.Per.Trial$Obs.per.trial)), 4), nsmall = 4), ")",
" [min: ", min((Object$Obs.Per.Trial$Obs.per.trial)), "; max: ", max((Object$Obs.Per.Trial$Obs.per.trial)), "]", sep="")
cat("\nTotal number of patients in experimental treatment group: ", length(Object$Data.Analyze$Treat[Object$Data.Analyze$Treat==1]),
"\nTotal number of patients in control treatment group: ", length(Object$Data.Analyze$Treat[Object$Data.Analyze$Treat!=1]))
means_table <- rbind(tapply(Object$Data.Analyze$Surr, list(Object$Data.Analyze$Treat), mean), tapply(Object$Data.Analyze$True, list(Object$Data.Analyze$Treat), mean))
colnames(means_table) <- c("Control Treatment", "Experimental treatment")
rownames(means_table) <- c("Surrogate", "True endpoint")
cat("\n\nMean surrogate and true endpoint values in each treatment group: \n\n")
print(format(round(data.frame(means_table, stringsAsFactors = TRUE), 4), nsmall = 4))
Var_table <- rbind(tapply(Object$Data.Analyze$Surr, list(Object$Data.Analyze$Treat), var), tapply(Object$Data.Analyze$True, list(Object$Data.Analyze$Treat), var))
colnames(Var_table) <- c("Control Treatment", "Experimental treatment")
rownames(Var_table) <- c("Surrogate", "True endpoint")
cat("\n\nVar surrogate and true endpoint values in each treatment group: \n\n")
print(format(round(data.frame(Var_table, stringsAsFactors = TRUE), 4), nsmall = 4))
cat("\n\nCorrelations between the true and surrogate endpoints in the control (r_T0S0)")
cat("\nand the experimental treatment groups (r_T1S1):\n\n")
print(round(Object$Cor.Endpoints, 4), nsmall = 4)
cat("\n\n\n
cat("\n
cat("\n\n")
cat("Trial-level surrogacy (R2_ht): \n")
print(format(round(Object$R2ht, 4), nsmall = 4))
cat("\nIndividual-level surrogacy (R2_hind): \n")
print(format(round(Object$R2h.ind, 4), nsmall = 4))
} |
context("scale_*_cyclical")
test_that("basic tests", {
df <- data.frame(x=sample(1:26), y=sample(1:26),
letters)
p <- ggplot(df, aes(x, y, label=letters, color=letters)) + geom_text() +
scale_color_cyclical(values = c("
d <- layer_data(p)
expect_equal(d$colour, rep(c("
expect_equal("guide-box" %in% ggplotGrob(p)$layout$name, FALSE)
p <- ggplot(df, aes(x, y, label=letters, color=factor(x))) + geom_text() +
scale_color_cyclical(values = c("
d <- layer_data(p)
expect_equal(d$colour[order(d$x)], rep(c("
expect_equal("guide-box" %in% ggplotGrob(p)$layout$name, TRUE)
expect_error(
ggplot(df, aes(x, y, label=letters, color=factor(x))) + geom_text() +
scale_color_cyclical(values = c("
breaks = c(1, 2, 3),
labels = c("red", "blue")),
"`breaks` and `labels` must have the same length")
p <- ggplot(df, aes(x, y, label=letters, color=factor(x))) + geom_text() +
scale_color_cyclical(values = c("
expect_equal("guide-box" %in% ggplotGrob(p)$layout$name, FALSE)
})
test_that("visual appearance of scale_*_cyclical", {
df <- data.frame(x=1:30, y=1:30)
p <- ggplot(df, aes(x, y, fill = factor(x))) + geom_point(shape = 21, size = 3) +
scale_fill_cyclical(values = c("
expect_doppelganger("scale_fill_cyclical red-green-blue dots, no legend", p)
p <- ggplot(df, aes(x, y, color = factor(x))) + geom_point(size = 3) +
scale_color_cyclical(values = c("
expect_doppelganger("scale_fill_cyclical red-green-blue dots, with legend", p)
}) |
factor_to_dummy <- function(afactor)
{
if (!is.factor(afactor))
stop("\n'factor_to_dummy()' requires a factor")
num_obs = length(afactor)
categs = levels(afactor)
num_categs = length(categs)
obs_per_categ = tabulate(afactor)
dummy_matrix = matrix(0, num_obs, num_categs)
for (k in 1:num_categs) {
tmp <- afactor == categs[k]
dummy_matrix[tmp,k] = 1
}
colnames(dummy_matrix) = levels(afactor)
rownames(dummy_matrix) = 1:num_obs
dummy_matrix
} |
plotMortalityTableComparisons = function(
data, ...,
aes = NULL,
ages = NULL,
xlim = NULL, ylim = NULL,
xlab = NULL, ylab = NULL,
title = "",
legend.position = c(0.9,0.1), legend.justification = c(1, 0),
legend.title = "Sterbetafel",
legend.key.width = unit(25, "mm"),
reference = NULL)
{
if (missing(reference)) {
if (inherits(data, "mortalityTable")) {
reference = data;
} else {
reference = NULL;
}
}
if (!is.data.frame(data)) {
data = makeQxDataFrame(data, ..., reference = reference);
}
if (!is.null(ages)) {
data = data[data$x %in% ages,]
}
if (missing(xlab)) xlab = "Alter";
if (missing(ylab)) {
ylab = substitute(paste("Sterbewahrscheinlichkeit ", q[x],
" relativ zu ", refname),
env=list(refname=reference@name));
}
pl = ggplot(data, aes(x = x, y = y, color = group))
if (!is.null(aes)) {
pl = pl + aes
}
pl = pl +
theme_bw() +
theme(
plot.title = element_text(size=18, face="bold"),
legend.title = element_text(size=14, face="bold.italic"),
legend.justification = legend.justification, legend.position=legend.position,
legend.key = element_blank(),
legend.key.width = legend.key.width,
legend.background = element_rect(colour="gray50", linetype="solid")
) +
geom_line() +
coord_cartesian(xlim=xlim, ylim=ylim) +
scale_y_continuous(
name=ylab,
labels=percent
) +
scale_x_continuous(
name = xlab,
minor_breaks = function (limits) seq(max(round(min(limits)),0),round(max(limits)),1)
) +
xlab("Alter") + labs(colour = legend.title);
if (title != "") {
pl = pl + ggtitle(title);
}
pl
}
globalVariables(c("x", "y")) |
library("ISLR")
data("Wage")
Wage2 <- Wage[Wage$age >= 25 & Wage$age <= 55, ]
names(Wage2)[names(Wage2) %in% c("year","age")] <- c("period","age")
cohort <- Wage2$period - Wage2$age
indust_job <- ifelse(Wage2$jobclass=="1. Industrial", 1, 0)
hasdegree <- ifelse(Wage2$education %in%
c("4. College Grad", "5. Advanced Degree"), 1, 0)
married <- ifelse(Wage2$maritl == "2. Married", 1, 0)
Wage3 <- cbind(Wage2, cohort, indust_job, hasdegree, married)
rm(Wage, Wage2, cohort, indust_job, hasdegree, married)
library("plyr")
library("apc")
model1 <- apc.indiv.est.model(Wage3, dep.var="logwage")
apc.plot.fit(model1)
model2 <- apc.indiv.est.model(Wage3, dep.var = "married",
covariates = c("logwage", "hasdegree"),
model.design = "AC",
model.family = "binomial")
apc.plot.fit(model2)
model2$coefficients.covariates
Wage3_cc <- Wage3[Wage3$cohort>1950 & Wage3$cohort<1982, ]
model3 <- apc.indiv.est.model(Wage3_cc, dep.var = "married",
covariates = c("logwage", "hasdegree"),
model.design = "AC",
model.family = "binomial",
n.coh.excl.end = 3,
n.coh.excl.start = 3)
apc.plot.fit(model3)
model3$coefficients.covariates
library("car")
linearHypothesis(model3$fit, "logwage = hasdegree", test="F")
model4 <- apc.indiv.est.model(Wage3_cc, dep.var = "hasdegree",
model.family = "binomial",
covariates = "logwage",
model.design = "TS",
n.coh.excl.start = 3,
n.coh.excl.end = 3)
model4$result
myspec2 <- list(20,30,.002,"ols",.Machine$double.eps,.002,NULL,NULL)
names(myspec2) <- c("maxit.loop", "maxit.linesearch", "tolerance",
"init", "inv.tol", "d1.tol", "custom.kappa", "custom.zeta")
model4b <- apc.indiv.est.model(Wage3_cc, dep.var = "hasdegree",
model.family = "binomial",
covariates = "logwage",
model.design = "TS",
n.coh.excl.start = 3,
n.coh.excl.end = 3,
NR.controls = myspec2)
model4b$result
library("survey")
inv_wt <- runif(nrow(Wage3), 0, 1)
Wage_wt <- cbind(Wage3, inv_wt)
model5 <- apc.indiv.est.model(Wage_wt, dep.var = "logwage",
wt.var= "inv_wt")
apc.plot.fit(model5)
library("AER")
data("PSID7682")
period <- as.numeric(PSID7682$year) + 1975
entry <- period - PSID7682$experience
logwage <- log(PSID7682$wage)
inunion <- ifelse(PSID7682$union == "yes", 1, 0)
insouth <- ifelse(PSID7682$south == "yes", 1, 0)
psid2 <- cbind(PSID7682, period, entry, logwage, inunion, insouth)
names(psid2)[names(psid2) %in% c("experience", "entry")] <-
c("age", "cohort")
psid3 <- psid2[psid2$cohort >=1939, ]
rm(PSID7682, period, entry, logwage, inunion, insouth, psid2)
library("plm")
model6 <- apc.indiv.est.model(psid3, dep.var = "logwage",
covariates = c("inunion", "insouth"),
plmmodel = "within", id.var = "id",
model.design = "FAP")
apc.plot.fit(model6)
model6$coefficients.covariates
model6b <- apc.indiv.est.model(psid3, dep.var = "logwage",
plmmodel = "within", id.var = "id",
model.design = "FAP")
waldtest(model6$fit, model6b$fit)
collinear_1 <- apc.indiv.design.collinear(psid3)
design_1 <- apc.indiv.design.model(collinear_1, dep.var = "logwage",
covariates = c("inunion", "insouth"),
plmmodel = "random", id.var ="id")
plm_1 <- plm(design_1$model.formula,
data = collinear_1$full.design.collinear,
index = c("id", "period"), model = "random")
design_2 <- apc.indiv.design.model(collinear_1, dep.var = "logwage",
plmmodel = "random", id.var ="id")
fit_2 <- apc.indiv.fit.model(design_2)
waldtest(plm_1, fit_2$fit, test="F")
library("ISLR")
data("Wage")
Wage2 <- Wage[Wage$age >= 25 & Wage$age <= 55, ]
names(Wage2)[names(Wage2) %in% c("year","age")] <- c("period","age")
cohort <- Wage2$period - Wage2$age
indust_job <- ifelse(Wage2$jobclass=="1. Industrial", 1, 0)
hasdegree <- ifelse(Wage2$education %in%
c("4. College Grad", "5. Advanced Degree"), 1, 0)
married <- ifelse(Wage2$maritl == "2. Married", 1, 0)
Wage3 <- cbind(Wage2, cohort, indust_job, hasdegree, married)
rm(Wage, Wage2, cohort, indust_job, hasdegree, married)
test1 <- apc.indiv.model.table(Wage3, dep.var="logwage",
test= "Wald", dist="F",
model.family="gaussian",
TS=TRUE)
test1$table
test2 <- apc.indiv.model.table(Wage3, dep.var="married",
covariates = "hasdegree",
test="LR", dist="Chisq",
TS=TRUE, model.family="binomial")
test2$table
test2$NR.report
inv_wt <- runif(nrow(Wage3), 0, 1)
Wage_wt <- cbind(Wage3, inv_wt)
test3 <- apc.indiv.model.table(Wage_wt, dep.var="hasdegree",
covariates="logwage", test="Wald",
dist="Chisq",
model.family="binomial",
wt.var="inv_wt")
test3$table
library("AER")
data("PSID7682")
period <- as.numeric(PSID7682$year) + 1975
entry <- period - PSID7682$experience
logwage <- log(PSID7682$wage)
inunion <- ifelse(PSID7682$union == "yes", 1, 0)
insouth <- ifelse(PSID7682$south == "yes", 1, 0)
psid2 <- cbind(PSID7682, period, entry, logwage, inunion, insouth)
names(psid2)[names(psid2) %in% c("experience", "entry")] <-
c("age", "cohort")
psid3 <- psid2[psid2$cohort >=1939, ]
test4 <- apc.indiv.model.table(psid3, dep.var="logwage",
covariates = "insouth",
plmmodel="random", id.var="id",
model.family="gaussian",
test="Wald", dist="Chisq")
test4$table
test5 <- apc.indiv.model.table(psid3, dep.var="logwage",
plmmodel="within", id.var="id",
model.family="gaussian",
test="Wald", dist="Chisq")
test5$table
library("ISLR")
data("Wage")
Wage2 <- Wage[Wage$age >= 25 & Wage$age <= 55, ]
names(Wage2)[names(Wage2) %in% c("year","age")] <- c("period","age")
cohort <- Wage2$period - Wage2$age
indust_job <- ifelse(Wage2$jobclass=="1. Industrial", 1, 0)
hasdegree <- ifelse(Wage2$education %in%
c("4. College Grad", "5. Advanced Degree"), 1, 0)
married <- ifelse(Wage2$maritl == "2. Married", 1, 0)
Wage3 <- cbind(Wage2, cohort, indust_job, hasdegree, married)
rm(Wage, Wage2, cohort, indust_job, hasdegree, married)
test1 <- apc.indiv.compare.direct(Wage3, big.model="AP",
small.model="tP",
dep.var="logwage", model.family="gaussian",
test="Wald", dist="F")
test1
test2 <- apc.indiv.compare.direct(Wage3, big.model="TS",
small.model="PC",
dep.var="married", covariates="hasdegree",
model.family="binomial", test="LR", dist="Chisq")
test2[1:8]
inv_wt <- runif(nrow(Wage3), 0, 1)
Wage_wt <- cbind(Wage3, inv_wt)
test3 <- apc.indiv.compare.direct(Wage_wt, big.model="APC",
small.model="P",
dep.var="logwage",
covariates = c("hasdegree", "married"),
wt.var="inv_wt", test="Wald", dist="Chisq",
model.family="gaussian")
test3
library("AER")
data("PSID7682")
period <- as.numeric(PSID7682$year) + 1975
entry <- period - PSID7682$experience
logwage <- log(PSID7682$wage)
inunion <- ifelse(PSID7682$union == "yes", 1, 0)
insouth <- ifelse(PSID7682$south == "yes", 1, 0)
psid2 <- cbind(PSID7682, period, entry, logwage, inunion, insouth)
names(psid2)[names(psid2) %in% c("experience", "entry")] <-
c("age", "cohort")
psid3 <- psid2[psid2$cohort >=1939, ]
test4 <- apc.indiv.compare.direct(psid3, big.model="Pd",
small.model="t",
dep.var="logwage", covariates="insouth",
plmmodel="random", id.var="id",
model.family="gaussian", test="Wald", dist="F")
test4
test5 <- apc.indiv.compare.direct(psid3, big.model="FAP",
small.model="FP",
dep.var="logwage",
plmmodel="within", id.var="id",
model.family="gaussian", test="Wald",
dist="Chisq")
test5
library("ISLR")
data("Wage")
Wage2 <- Wage[Wage$age >= 25 & Wage$age <= 55, ]
names(Wage2)[names(Wage2) %in% c("year","age")] <- c("period","age")
cohort <- Wage2$period - Wage2$age
indust_job <- ifelse(Wage2$jobclass=="1. Industrial", 1, 0)
hasdegree <- ifelse(Wage2$education %in%
c("4. College Grad", "5. Advanced Degree"), 1, 0)
married <- ifelse(Wage2$maritl == "2. Married", 1, 0)
Wage3 <- cbind(Wage2, cohort, indust_job, hasdegree, married)
rm(Wage, Wage2, cohort, indust_job, hasdegree, married)
library("plyr")
library("apc")
model1 <- apc.indiv.est.model(Wage3, dep.var="logwage")
apc.plot.fit(model1) |
fat3.crd <- function(factor1,
factor2,
factor3,
resp,
quali=c(TRUE,TRUE,TRUE),
mcomp='tukey',
fac.names=c('F1','F2','F3'),
sigT=0.05,
sigF=0.05,
unfold=NULL) {
cat('------------------------------------------------------------------------\nLegend:\n')
cat('FACTOR 1: ',fac.names[1],'\n')
cat('FACTOR 2: ',fac.names[2],'\n')
cat('FACTOR 3: ',fac.names[3],'\n------------------------------------------------------------------------\n\n')
fatores<-data.frame(factor1,factor2,factor3)
Fator1<-factor(factor1)
Fator2<-factor(factor2)
Fator3<-factor(factor3)
nv1<-length(summary(Fator1))
nv2<-length(summary(Fator2))
nv3<-length(summary(Fator3))
J<-(length(resp))/(nv1*nv2*nv3)
lf1<-levels(Fator1)
lf2<-levels(Fator2)
lf3<-levels(Fator3)
anava<-aov(resp~Fator1*Fator2*Fator3)
anavaF3<-summary(anava)
SQa<-anavaF3[[1]][1,2]
SQb<-anavaF3[[1]][2,2]
SQc<-anavaF3[[1]][3,2]
SQab<-anavaF3[[1]][4,2]
SQac<-anavaF3[[1]][5,2]
SQbc<-anavaF3[[1]][6,2]
SQabc<-anavaF3[[1]][7,2]
SQE<-anavaF3[[1]][8,2]
SQT<-SQa+SQb+SQc+SQab+SQac+SQbc+SQabc+SQE
gla=nv1-1
glb=nv2-1
glc=nv3-1
glab=(nv1-1)*(nv2-1)
glac=(nv1-1)*(nv3-1)
glbc=(nv2-1)*(nv3-1)
glabc=(nv1-1)*(nv2-1)*(nv3-1)
glE=anavaF3[[1]][8,1]
glT=gla+glb+glc+glab+glac+glbc+glabc+glE
QMa=SQa/gla
QMb=SQb/glb
QMc=SQc/glc
QMab=SQab/glab
QMac=SQac/glac
QMbc=SQbc/glbc
QMabc=SQabc/glabc
QME=SQE/glE
QMT=SQT/glT
Fca=QMa/QME
Fcb=QMb/QME
Fcc=QMc/QME
Fcab=QMab/QME
Fcac=QMac/QME
Fcbc=QMbc/QME
Fcabc=QMabc/QME
an<-data.frame("DF"=c(gla, glb, glc, glab, glac, glbc, glabc, glE, glT ),
"SS"=c(round(c(SQa,SQb,SQc,SQab,SQac,SQbc,SQabc,SQE,SQT),5)),
"MS"=c(round(c(QMa,QMb,QMc,QMab,QMac,QMbc,QMabc,QME),5),''),
"Fc"=c(round(c(Fca,Fcb,Fcc,Fcab,Fcac,Fcbc,Fcabc),4),'',''),
"Pr>Fc"=c(round(c(1-pf(Fca,gla,glE), 1-pf(Fcb,glb,glE), 1-pf(Fcc,glc,glE), 1-pf(Fcab,glab,glE), 1-pf(Fcac,glac,glE),
1-pf(Fcbc,glbc,glE), 1-pf(Fcabc,glabc,glE)),4), ' ', ' '))
colnames(an)[5]="Pr>Fc"
rownames(an)=c(fac.names[1],fac.names[2],fac.names[3],paste(fac.names[1],'*',fac.names[2],sep=''),paste(fac.names[1],'*',fac.names[3],sep=''),
paste(fac.names[2],'*',fac.names[3],sep=''),paste(fac.names[1],'*',fac.names[2],'*',fac.names[3],sep=''),"Residuals","Total")
cat('------------------------------------------------------------------------
Analysis of Variance Table\n------------------------------------------------------------------------\n')
print(an)
cat('------------------------------------------------------------------------\n')
pvalor<-c(1-pf(Fca,gla,glE), 1-pf(Fcb,glb,glE), 1-pf(Fcc,glc,glE), 1-pf(Fcab,glab,glE), 1-pf(Fcac,glac,glE), 1-pf(Fcbc,glbc,glE), 1-pf(Fcabc,glabc,glE))
cv<-round(sqrt(QME)/mean(resp)*100, 2)
cat('CV =',cv,'%\n')
pvalor.shapiro<-shapiro.test(anava$residuals)$p.value
cat('\n------------------------------------------------------------------------\nShapiro-Wilk normality test\n')
cat('p-value: ',pvalor.shapiro, '\n')
if(pvalor.shapiro<=0.05){cat('WARNING: at 5% of significance, residuals can not be considered normal!
------------------------------------------------------------------------\n')}
if(pvalor.shapiro>0.05){cat('According to Shapiro-Wilk normality test at 5% of significance, residuals can be considered normal.
------------------------------------------------------------------------\n')}
if(is.null(unfold)){
if(1-pf(Fcab,glab,glE)>sigF &&
1-pf(Fcac,glac,glE)>sigF &&
1-pf(Fcbc,glbc,glE)>sigF &&
1-pf(Fcabc,glabc,glE)>sigF){unfold<-c(unfold,1)}
if(1-pf(Fcabc,glabc,glE)>sigF &&
1-pf(Fcab,glab,glE)<=sigF) {unfold<-c(unfold,2.1)}
if(1-pf(Fcabc,glabc,glE)>sigF &&
1-pf(Fcac,glac,glE)<=sigF) {unfold<-c(unfold,2.2)}
if(1-pf(Fcabc,glabc,glE)>sigF &&
1-pf(Fcbc,glbc,glE)<=sigF) {unfold<-c(unfold,2.3)}
if(1-pf(Fcabc,glabc,glE)<=sigF){unfold<-c(unfold,3)}
}
if(any(unfold==1)) {
cat('\nNo significant interaction: analyzing the simple effect
------------------------------------------------------------------------\n')
fatores<-data.frame('fator 1'=factor1,'fator 2' = factor2,'fator 3' = factor3)
for(i in 1:3){
if(quali[i]==TRUE && pvalor[i]<=sigF) {
cat(fac.names[i])
if(mcomp=='tukey'){
tukey(resp,fatores[,i],an[8,1],an[8,2],sigT)
}
if(mcomp=='duncan'){
duncan(resp,fatores[,i],an[8,1],an[8,2],sigT)
}
if(mcomp=='lsd'){
lsd(resp,fatores[,i],an[8,1],an[8,2],sigT)
}
if(mcomp=='lsdb'){
lsdb(resp,fatores[,i],an[8,1],an[8,2],sigT)
}
if(mcomp=='sk'){
scottknott(resp,fatores[,i],an[8,1],an[8,2],sigT)
}
if(mcomp=='snk'){
snk(resp,fatores[,i],an[8,1],an[8,2],sigT)
}
if(mcomp=='ccboot'){
ccboot(resp,fatores[,i],an[8,1],an[8,2],sigT)
}
if(mcomp=='ccF'){
ccF(resp,fatores[,i],an[8,1],an[8,2],sigT)
}
}
if(quali[i]==TRUE && pvalor[i]>sigF) {
cat(fac.names[i])
cat('\nAccording to the F test, the means of this factor are statistical equal.\n')
cat('------------------------------------------------------------------------\n')
mean.table<-tapply.stat(resp,fatores[,i],mean)
colnames(mean.table)<-c('Levels','Means')
print(mean.table)
cat('------------------------------------------------------------------------')
}
if(quali[i]==FALSE && pvalor[i]<=sigF){
cat(fac.names[i])
reg.poly(resp, fatores[,i], an[8,1],an[8,2], an[i,1], an[i,2])
}
if(quali[i]==FALSE && pvalor[i]>sigF) {
cat(fac.names[i])
cat('\nAccording to the F test, the means of this factor are statistical equal.\n')
cat('------------------------------------------------------------------------\n')
mean.table<-tapply.stat(resp,fatores[,i],mean)
colnames(mean.table)<-c('Levels','Means')
print(mean.table)
cat('------------------------------------------------------------------------')
}
cat('\n')
}
}
if(any(unfold==2.1)) {
cat("\n\n\nSignificant",paste(fac.names[1],'*',fac.names[2],sep='')," interaction: analyzing the interaction
------------------------------------------------------------------------\n")
cat("\nAnalyzing ", fac.names[1], ' inside of each level of ', fac.names[2], '
------------------------------------------------------------------------\n')
des1<-aov(resp~Fator2/Fator1)
l1<-vector('list',nv2)
names(l1)<-names(summary(Fator2))
v<-numeric(0)
for(j in 1:nv2) {
for(i in 0:(nv1-2)) v<-cbind(v,i*nv2+j)
l1[[j]]<-v
v<-numeric(0)
}
des1.tab<-summary(des1,split=list('Fator2:Fator1'=l1))[[1]]
glf1=c(as.numeric(des1.tab[3:(nv2+2),1]))
SQf1=c(as.numeric(des1.tab[3:(nv2+2),2]))
QMf1=SQf1/glf1
Fcf1=QMf1/QME
rn<-numeric(0)
for(i in 1:nv2){ rn<-c(rn, paste(paste(fac.names[1],':',fac.names[2],sep=''),lf2[i]))}
anavad1<-data.frame("DF"=c(glf1, glE),
"SS"=c(round(c(SQf1,SQE),5)),
"MS"=c(round(c(QMf1,QME),5)),
"Fc"=c(round(Fcf1,4),''),
"Pr>Fc"=c(round(1-pf(Fcf1,glf1,glE),4),' '))
colnames(anavad1)[5]="Pr>Fc"
rownames(anavad1)=c(rn,"Residuals")
cat('------------------------------------------------------------------------
Analysis of Variance Table\n------------------------------------------------------------------------\n')
print(anavad1)
cat('------------------------------------------------------------------------\n\n')
ii<-0
for(i in 1:nv2) {
ii<-ii+1
if(1-pf(Fcf1,glf1,glE)[ii]<=sigF){
if(quali[1]==TRUE){
cat('\n\n',fac.names[1],' inside of the level ',lf2[i],' of ',fac.names[2],'
------------------------------------------------------------------------')
if(mcomp=='tukey'){
tukey(resp[Fator2==lf2[i]],fatores[,1][Fator2==lf2[i]],an[8,1],an[8,2],sigT)
}
if(mcomp=='duncan'){
duncan(resp[Fator2==lf2[i]],fatores[,1][Fator2==lf2[i]],an[8,1],an[8,2],sigT)
}
if(mcomp=='lsd'){
lsd(resp[Fator2==lf2[i]],fatores[,1][Fator2==lf2[i]],an[8,1],an[8,2],sigT)
}
if(mcomp=='lsdb'){
lsdb(resp[Fator2==lf2[i]],fatores[,1][Fator2==lf2[i]],an[8,1],an[8,2],sigT)
}
if(mcomp=='sk'){
scottknott(resp[Fator2==lf2[i]],fatores[,1][Fator2==lf2[i]],an[8,1],an[8,2],sigT)
}
if(mcomp=='snk'){
snk(resp[Fator2==lf2[i]],fatores[,1][Fator2==lf2[i]],an[8,1],an[8,2],sigT)
}
if(mcomp=='ccboot'){
ccboot(resp[Fator2==lf2[i]],fatores[,1][Fator2==lf2[i]],an[8,1],an[8,2],sigT)
}
if(mcomp=='ccF'){
ccF(resp[Fator2==lf2[i]],fatores[,1][Fator2==lf2[i]],an[8,1],an[8,2],sigT)
}
}
else{
cat('\n\n',fac.names[1],' inside of the level ',lf2[i],' of ',fac.names[2],'
------------------------------------------------------------------------')
reg.poly(resp[Fator2==lf2[i]], factor1[Fator2==lf2[i]], an[8,1],an[8,2], des1.tab[i+2,1], des1.tab[i+2,2])
}
}
else{cat('\n\n',fac.names[1],' inside of the level ',lf2[i],' of ',fac.names[2],'\n')
cat('\nAccording to the F test, the means of this factor are statistical equal.\n')
cat('------------------------------------------------------------------------\n')
mean.table<-tapply.stat(resp[Fator2==lf2[i]],fatores[,1][Fator2==lf2[i]],mean)
colnames(mean.table)<-c('Levels','Means')
print(mean.table)
cat('------------------------------------------------------------------------\n')
}
}
cat('\n\n')
cat("\nAnalyzing ", fac.names[2], ' inside of each level of ', fac.names[1], '
------------------------------------------------------------------------\n')
des2<-aov(resp~Fator1/Fator2)
l2<-vector('list',nv1)
names(l2)<-names(summary(Fator1))
v<-numeric(0)
for(j in 1:nv1) {
for(i in 0:(nv2-2)) v<-cbind(v,i*nv1+j)
l2[[j]]<-v
v<-numeric(0)
}
des2.tab<-summary(des2,split=list('Fator1:Fator2'=l2))[[1]]
glf2=c(as.numeric(des2.tab[3:(nv1+2),1]))
SQf2=c(as.numeric(des2.tab[3:(nv1+2),2]))
QMf2=SQf2/glf2
Fcf2=QMf2/QME
rn<-numeric(0)
for(k in 1:nv1){ rn<-c(rn, paste(paste(fac.names[2],':',fac.names[1],sep=''),lf1[k]))}
anavad2<-data.frame("DF"=c(glf2, glE),
"SS"=c(round(c(SQf2,SQE),5)),
"MS"=c(round(c(QMf2,QME),5)),
"Fc"=c(round(Fcf2,4),''),
"Pr>Fc"=c(round(1-pf(Fcf2,glf2,glE),4),' '))
colnames(anavad2)[5]="Pr>Fc"
rownames(anavad2)=c(rn,"Residuals")
cat('------------------------------------------------------------------------
Analysis of Variance Table\n------------------------------------------------------------------------\n')
print(anavad2)
cat('------------------------------------------------------------------------\n\n')
ii<-0
for(i in 1:nv1) {
ii<-ii+1
if(1-pf(Fcf2,glf2,glE)[ii]<=sigF){
if(quali[2]==TRUE){
cat('\n\n',fac.names[2],' inside of the level ',lf1[i],' of ',fac.names[1],'
------------------------------------------------------------------------')
if(mcomp=='tukey'){
tukey(resp[Fator1==lf1[i]],fatores[,2][Fator1==lf1[i]],an[8,1],an[8,2],sigT)
}
if(mcomp=='duncan'){
duncan(resp[Fator1==lf1[i]],fatores[,2][Fator1==lf1[i]],an[8,1],an[8,2],sigT)
}
if(mcomp=='lsd'){
lsd(resp[Fator1==lf1[i]],fatores[,2][Fator1==lf1[i]],an[8,1],an[8,2],sigT)
}
if(mcomp=='lsdb'){
lsdb(resp[Fator1==lf1[i]],fatores[,2][Fator1==lf1[i]],an[8,1],an[8,2],sigT)
}
if(mcomp=='sk'){
scottknott(resp[Fator1==lf1[i]],fatores[,2][Fator1==lf1[i]],an[8,1],an[8,2],sigT)
}
if(mcomp=='snk'){
snk(resp[Fator1==lf1[i]],fatores[,2][Fator1==lf1[i]],an[8,1],an[8,2],sigT)
}
if(mcomp=='ccboot'){
ccboot(resp[Fator1==lf1[i]],fatores[,2][Fator1==lf1[i]],an[8,1],an[8,2],sigT)
}
if(mcomp=='ccF'){
ccF(resp[Fator1==lf1[i]],fatores[,2][Fator1==lf1[i]],an[8,1],an[8,2],sigT)
}
}
else{
cat('\n\n',fac.names[2],' inside of the level ',lf1[i],' of ',fac.names[1],'
------------------------------------------------------------------------')
reg.poly(resp[Fator1==lf1[i]], factor2[Fator1==lf1[i]], an[8,1], an[8,2], des2.tab[i+2,1], des2.tab[i+2,2])
}
}
else{cat('\n\n',fac.names[2],' inside of the level ',lf1[i],' of ',fac.names[1],'\n')
cat('\nAccording to the F test, the means of this factor are statistical equal.\n')
cat('------------------------------------------------------------------------\n')
mean.table<-tapply.stat(resp[Fator1==lf1[i]],fatores[,2][Fator1==lf1[i]],mean)
colnames(mean.table)<-c('Levels','Means')
print(mean.table)
cat('------------------------------------------------------------------------\n')
}
}
if(pvalor[5]>sigF && pvalor[6]>sigF) {
cat('\nAnalizing the effect of the factor ',fac.names[3],'
------------------------------------------------------------------------\n')
i<-3
{
if(quali[i]==TRUE && pvalor[i]<=sigF) {
cat(fac.names[i])
if(mcomp=='tukey'){
tukey(resp,fatores[,i],an[8,1],an[8,2],sigT)
}
if(mcomp=='duncan'){
duncan(resp,fatores[,i],an[8,1],an[8,2],sigT)
}
if(mcomp=='lsd'){
lsd(resp,fatores[,i],an[8,1],an[8,2],sigT)
}
if(mcomp=='lsdb'){
lsdb(resp,fatores[,i],an[8,1],an[8,2],sigT)
}
if(mcomp=='sk'){
scottknott(resp,fatores[,i],an[8,1],an[8,2],sigT)
}
if(mcomp=='snk'){
snk(resp,fatores[,i],an[8,1],an[8,2],sigT)
}
if(mcomp=="ccboot"){
ccboot(resp,fatores[,i],an[8,1],an[8,2],sigT)
}
if(mcomp=="ccF"){
ccF(resp,fatores[,i],an[8,1],an[8,2],sigT)
}
}
if(quali[i]==TRUE && pvalor[i]>sigF) {
cat(fac.names[i])
cat('\nAccording to the F test, the means of this factor are not different.\n')
cat('------------------------------------------------------------------------\n')
mean.table<-tapply.stat(resp,fatores[,i],mean)
colnames(mean.table)<-c('Niveis','Medias')
print(mean.table)
cat('------------------------------------------------------------------------')
}
if(quali[i]==FALSE && pvalor[i]<=sigF){
cat(fac.names[i])
reg.poly(resp, fatores[,i], an[8,1],an[8,2], an[i,1], an[i,2])
}
if(quali[i]==FALSE && pvalor[i]>sigF) {
cat(fac.names[i])
cat('\nAccording to the F test, the means of this factor are not different.\n')
cat('------------------------------------------------------------------------\n')
mean.table<-tapply.stat(resp,fatores[,i],mean)
colnames(mean.table)<-c('Levels','Means')
print(mean.table)
cat('------------------------------------------------------------------------')
}
cat('\n')
}
}
}
if(any(unfold==2.2)) {
cat("\n\n\nSignificant",paste(fac.names[1],'*',fac.names[3],sep='')," interaction: analyzing the interaction
------------------------------------------------------------------------\n")
cat("\nAnalyzing ", fac.names[1], ' inside of each level of ', fac.names[3], '
------------------------------------------------------------------------\n')
des3<-aov(resp~Fator3/Fator1)
l1<-vector('list',nv3)
names(l1)<-names(summary(Fator3))
v<-numeric(0)
for(j in 1:nv3) {
for(i in 0:(nv1-2)) v<-cbind(v,i*nv3+j)
l1[[j]]<-v
v<-numeric(0)
}
des3.tab<-summary(des3,split=list('Fator3:Fator1'=l1))[[1]]
glf3=c(as.numeric(des3.tab[3:(nv3+2),1]))
SQf3=c(as.numeric(des3.tab[3:(nv3+2),2]))
QMf3=SQf3/glf3
Fcf3=QMf3/QME
rn<-numeric(0)
for(j in 1:nv3){ rn<-c(rn, paste(paste(fac.names[1],':',fac.names[3],sep=''),lf3[j]))}
anavad3<-data.frame("DF"=c(glf3, glE),
"SS"=c(round(c(SQf3,SQE),5)),
"MS"=c(round(c(QMf3,QME),5)),
"Fc"=c(round(Fcf3,4),''),
"Pr>Fc"=c(round(1-pf(Fcf3,glf3,glE),4),' '))
colnames(anavad3)[5]="Pr>Fc"
rownames(anavad3)=c(rn,"Residuals")
cat('------------------------------------------------------------------------
Analysis of Variance Table\n------------------------------------------------------------------------\n')
print(anavad3)
cat('------------------------------------------------------------------------\n\n')
ii<-0
for(i in 1:nv3) {
ii<-ii+1
if(1-pf(Fcf3,glf3,glE)[ii]<=sigF){
if(quali[1]==TRUE){
cat('\n\n',fac.names[1],' inside of the level ',lf3[i],' of ',fac.names[3],'
------------------------------------------------------------------------')
if(mcomp=='tukey'){
tukey(resp[Fator3==lf3[i]],fatores[,1][Fator3==lf3[i]],an[8,1],an[8,2],sigT)
}
if(mcomp=='duncan'){
duncan(resp[Fator3==lf3[i]],fatores[,1][Fator3==lf3[i]],an[8,1],an[8,2],sigT)
}
if(mcomp=='lsd'){
lsd(resp[Fator3==lf3[i]],fatores[,1][Fator3==lf3[i]],an[8,1],an[8,2],sigT)
}
if(mcomp=='lsdb'){
lsdb(resp[Fator3==lf3[i]],fatores[,1][Fator3==lf3[i]],an[8,1],an[8,2],sigT)
}
if(mcomp=='sk'){
scottknott(resp[Fator3==lf3[i]],fatores[,1][Fator3==lf3[i]],an[8,1],an[8,2],sigT)
}
if(mcomp=='snk'){
snk(resp[Fator3==lf3[i]],fatores[,1][Fator3==lf3[i]],an[8,1],an[8,2],sigT)
}
if(mcomp=='ccboot'){
ccboot(resp[Fator3==lf3[i]],fatores[,1][Fator3==lf3[i]],an[8,1],an[8,2],sigT)
}
if(mcomp=='ccF'){
ccF(resp[Fator3==lf3[i]],fatores[,1][Fator3==lf3[i]],an[8,1],an[8,2],sigT)
}
}
else{
cat('\n\n',fac.names[1],' inside of the level ',lf3[i],' of ',fac.names[3],'
------------------------------------------------------------------------')
reg.poly(resp[Fator3==lf3[i]], factor1[Fator3==lf3[i]], an[8,1],an[8,2], des3.tab[i+2,1], des3.tab[i+2,2])
}
}
else{cat('\n\n',fac.names[1],' inside of the level ',lf3[i],' of ',fac.names[3],'\n')
cat('\nAccording to the F test, the means of this factor are statistical equal.\n')
cat('------------------------------------------------------------------------\n')
mean.table<-tapply.stat(resp[Fator3==lf3[i]],fatores[,1][Fator3==lf3[i]],mean)
colnames(mean.table)<-c('Levels','Means')
print(mean.table)
cat('------------------------------------------------------------------------\n')
}
}
cat('\n\n')
cat("\nAnalyzing", fac.names[3], ' inside of each level of ', fac.names[1], '
------------------------------------------------------------------------\n')
des4<-aov(resp~Fator1/Fator3)
l3<-vector('list',nv1)
names(l3)<-names(summary(Fator1))
v<-numeric(0)
for(j in 1:nv1) {
for(i in 0:(nv3-2)) v<-cbind(v,i*nv1+j)
l3[[j]]<-v
v<-numeric(0)
}
des4.tab<-summary(des4,split=list('Fator1:Fator3'=l3))[[1]]
glf4=c(as.numeric(des4.tab[3:(nv1+2),1]))
SQf4=c(as.numeric(des4.tab[3:(nv1+2),2]))
QMf4=SQf4/glf4
Fcf4=QMf4/QME
rn<-numeric(0)
for(k in 1:nv1){ rn<-c(rn, paste(paste(fac.names[3],':',fac.names[1],sep=''),lf1[k]))}
anavad4<-data.frame("DF"=c(glf4, glE),
"SS"=c(round(c(SQf4,SQE),5)),
"MS"=c(round(c(QMf4,QME),5)),
"Fc"=c(round(Fcf4,4),''),
"Pr>Fc"=c(round(1-pf(Fcf4,glf4,glE),4),' '))
colnames(anavad4)[5]="Pr>Fc"
rownames(anavad4)=c(rn,"Residuals")
cat('------------------------------------------------------------------------
Analysis of Variance Table\n------------------------------------------------------------------------\n')
print(anavad4)
cat('------------------------------------------------------------------------\n\n')
ii<-0
for(i in 1:nv1) {
ii<-ii+1
if(1-pf(Fcf4,glf4,glE)[ii]<=sigF){
if(quali[3]==TRUE){
cat('\n\n',fac.names[3],' inside of the level ',lf1[i],' of ',fac.names[1],'
------------------------------------------------------------------------')
if(mcomp=='tukey'){
tukey(resp[Fator1==lf1[i]],fatores[,3][Fator1==lf1[i]],an[8,1],an[8,2],sigT)
}
if(mcomp=='duncan'){
duncan(resp[Fator1==lf1[i]],fatores[,3][Fator1==lf1[i]],an[8,1],an[8,2],sigT)
}
if(mcomp=='lsd'){
lsd(resp[Fator1==lf1[i]],fatores[,3][Fator1==lf1[i]],an[8,1],an[8,2],sigT)
}
if(mcomp=='lsdb'){
lsdb(resp[Fator1==lf1[i]],fatores[,3][Fator1==lf1[i]],an[8,1],an[8,2],sigT)
}
if(mcomp=='sk'){
scottknott(resp[Fator1==lf1[i]],fatores[,3][Fator1==lf1[i]],an[8,1],an[8,2],sigT)
}
if(mcomp=='snk'){
snk(resp[Fator1==lf1[i]],fatores[,3][Fator1==lf1[i]],an[8,1],an[8,2],sigT)
}
if(mcomp=='ccboot'){
ccboot(resp[Fator1==lf1[i]],fatores[,3][Fator1==lf1[i]],an[8,1],an[8,2],sigT)
}
if(mcomp=='ccF'){
ccF(resp[Fator1==lf1[i]],fatores[,3][Fator1==lf1[i]],an[8,1],an[8,2],sigT)
}
}
else{
cat('\n\n',fac.names[3],' inside of the level ',lf1[i],' of ',fac.names[1],'
------------------------------------------------------------------------')
reg.poly(resp[Fator1==lf1[i]], factor3[Fator1==lf1[i]], an[8,1],an[8,2], des4.tab[i+2,1], des4.tab[i+2,2])
}
}
else{cat('\n\n',fac.names[3],' inside of the level ',lf1[i],' of ',fac.names[1],'\n')
cat('\nAccording to the F test, the means of this factor are statistical equal.\n')
cat('------------------------------------------------------------------------\n')
mean.table<-tapply.stat(resp[Fator1==lf1[i]],fatores[,3][Fator1==lf1[i]],mean)
colnames(mean.table)<-c('Levels','Means')
print(mean.table)
cat('------------------------------------------------------------------------\n')
}
}
if(pvalor[4]>sigF && pvalor[6]>sigF) {
cat('\nAnalizing the effect of the factor ',fac.names[2],'
------------------------------------------------------------------------\n')
i<-2
{
if(quali[i]==TRUE && pvalor[i]<=sigF) {
cat(fac.names[i])
if(mcomp=='tukey'){
tukey(resp,fatores[,i],an[8,1],an[8,2],sigT)
}
if(mcomp=='duncan'){
duncan(resp,fatores[,i],an[8,1],an[8,2],sigT)
}
if(mcomp=='lsd'){
lsd(resp,fatores[,i],an[8,1],an[8,2],sigT)
}
if(mcomp=='lsdb'){
lsdb(resp,fatores[,i],an[8,1],an[8,2],sigT)
}
if(mcomp=='sk'){
scottknott(resp,fatores[,i],an[8,1],an[8,2],sigT)
}
if(mcomp=='snk'){
snk(resp,fatores[,i],an[8,1],an[8,2],sigT)
}
if(mcomp=="ccboot"){
ccboot(resp,fatores[,i],an[8,1],an[8,2],sigT)
}
if(mcomp=="ccF"){
ccF(resp,fatores[,i],an[8,1],an[8,2],sigT)
}
}
if(quali[i]==TRUE && pvalor[i]>sigF) {
cat(fac.names[i])
cat('\nAccording to the F test, the means of this factor are not different.\n')
cat('------------------------------------------------------------------------\n')
mean.table<-tapply.stat(resp,fatores[,i],mean)
colnames(mean.table)<-c('Niveis','Medias')
print(mean.table)
cat('------------------------------------------------------------------------')
}
if(quali[i]==FALSE && pvalor[i]<=sigF){
cat(fac.names[i])
reg.poly(resp, fatores[,i], an[8,1],an[8,2], an[i,1], an[i,2])
}
if(quali[i]==FALSE && pvalor[i]>sigF) {
cat(fac.names[i])
cat('\nAccording to the F test, the means of this factor are not different.\n\n')
cat('------------------------------------------------------------------------\n')
mean.table<-tapply.stat(resp,fatores[,i],mean)
colnames(mean.table)<-c('Levels','Means')
print(mean.table)
cat('------------------------------------------------------------------------')
}
cat('\n')
}
}
}
if(any(unfold==2.3)) {
cat("\n\n\nSignificant",paste(fac.names[2],'*',fac.names[3],sep='')," interaction: analyzing the interaction
------------------------------------------------------------------------\n")
cat("\nAnalyzing ", fac.names[2], ' inside of each level of ', fac.names[3], '
------------------------------------------------------------------------\n')
des5<-aov(resp~Fator3/Fator2)
l2<-vector('list',nv3)
names(l2)<-names(summary(Fator3))
v<-numeric(0)
for(j in 1:nv3) {
for(i in 0:(nv2-2)) v<-cbind(v,i*nv3+j)
l2[[j]]<-v
v<-numeric(0)
}
des5.tab<-summary(des5,split=list('Fator3:Fator2'=l2))[[1]]
glf5=c(as.numeric(des5.tab[3:(nv3+2),1]))
SQf5=c(as.numeric(des5.tab[3:(nv3+2),2]))
QMf5=SQf5/glf5
Fcf5=QMf5/QME
rn<-numeric(0)
for(j in 1:nv3){ rn<-c(rn, paste(paste(fac.names[2],':',fac.names[3],sep=''),lf3[j]))}
anavad5<-data.frame("DF"=c(glf5, glE),
"SS"=c(round(c(SQf5,SQE),5)),
"MS"=c(round(c(QMf5,QME),5)),
"Fc"=c(round(Fcf5,4),''),
"Pr>Fc"=c(round(1-pf(Fcf5,glf5,glE),4),' '))
colnames(anavad5)[5]="Pr>Fc"
rownames(anavad5)=c(rn,"Residuals")
cat('------------------------------------------------------------------------
Analysis of Variance Table\n------------------------------------------------------------------------\n')
print(anavad5)
cat('------------------------------------------------------------------------\n\n')
ii<-0
for(i in 1:nv3) {
ii<-ii+1
if(1-pf(Fcf5,glf5,glE)[ii]<=sigF){
if(quali[2]==TRUE){
cat('\n\n',fac.names[2],' inside of the level ',lf3[i],' of ',fac.names[3],'
------------------------------------------------------------------------')
if(mcomp=='tukey'){
tukey(resp[Fator3==lf3[i]],fatores[,2][Fator3==lf3[i]],an[8,1],an[8,2],sigT)
}
if(mcomp=='duncan'){
duncan(resp[Fator3==lf3[i]],fatores[,2][Fator3==lf3[i]],an[8,1],an[8,2],sigT)
}
if(mcomp=='lsd'){
lsd(resp[Fator3==lf3[i]],fatores[,2][Fator3==lf3[i]],an[8,1],an[8,2],sigT)
}
if(mcomp=='lsdb'){
lsdb(resp[Fator3==lf3[i]],fatores[,2][Fator3==lf3[i]],an[8,1],an[8,2],sigT)
}
if(mcomp=='sk'){
scottknott(resp[Fator3==lf3[i]],fatores[,2][Fator3==lf3[i]],an[8,1],an[8,2],sigT)
}
if(mcomp=='snk'){
snk(resp[Fator3==lf3[i]],fatores[,2][Fator3==lf3[i]],an[8,1],an[8,2],sigT)
}
if(mcomp=='ccboot'){
ccboot(resp[Fator3==lf3[i]],fatores[,2][Fator3==lf3[i]],an[8,1],an[8,2],sigT)
}
if(mcomp=='ccF'){
ccF(resp[Fator3==lf3[i]],fatores[,2][Fator3==lf3[i]],an[8,1],an[8,2],sigT)
}
}
else{
cat('\n\n',fac.names[2],' inside of the level ',lf3[i],' of ',fac.names[3],'
------------------------------------------------------------------------')
reg.poly(resp[Fator3==lf3[i]], factor2[Fator3==lf3[i]], an[8,1], an[8,2], des5.tab[i+2,1], des5.tab[i+2,2])
}
}
else{cat('\n\n',fac.names[2],' inside of the level ',lf3[i],' of ',fac.names[3],'\n')
cat('\nAccording to the F test, the means of this factor are statistical equal.\n')
cat('------------------------------------------------------------------------\n')
mean.table<-tapply.stat(resp[Fator3==lf3[i]],fatores[,2][Fator3==lf3[i]],mean)
colnames(mean.table)<-c('Levels','Means')
print(mean.table)
cat('------------------------------------------------------------------------\n')
}
}
cat('\n\n')
cat("\nAnalyzing ", fac.names[3], ' inside of each level of ', fac.names[2], '
------------------------------------------------------------------------\n')
des6<-aov(resp~Fator2/Fator3)
l3<-vector('list',nv2)
names(l3)<-names(summary(Fator2))
v<-numeric(0)
for(j in 1:nv2) {
for(i in 0:(nv3-2)) v<-cbind(v,i*nv2+j)
l3[[j]]<-v
v<-numeric(0)
}
des6.tab<-summary(des6,split=list('Fator2:Fator3'=l3))[[1]]
glf6=c(as.numeric(des6.tab[3:(nv2+2),1]))
SQf6=c(as.numeric(des6.tab[3:(nv2+2),2]))
QMf6=SQf6/glf6
Fcf6=QMf6/QME
rn<-numeric(0)
for(i in 1:nv2){ rn<-c(rn, paste(paste(fac.names[3],':',fac.names[2],sep=''),lf2[i]))}
anavad6<-data.frame("DF"=c(glf6, glE),
"SS"=c(round(c(SQf6,SQE),5)),
"MS"=c(round(c(QMf6,QME),5)),
"Fc"=c(round(Fcf6,4),''),
"Pr>Fc"=c(round(1-pf(Fcf6,glf6,glE),4),' '))
colnames(anavad6)[5]="Pr>Fc"
rownames(anavad6)=c(rn,"Residuals")
cat('------------------------------------------------------------------------
Analysis of Variance Table\n------------------------------------------------------------------------\n')
print(anavad6)
cat('------------------------------------------------------------------------\n\n')
ii<-0
for(i in 1:nv2) {
ii<-ii+1
if(1-pf(Fcf6,glf6,glE)[ii]<=sigF){
if(quali[3]==TRUE){
cat('\n\n',fac.names[3],' inside of the leve ',lf2[i],' of ',fac.names[2],'
------------------------------------------------------------------------')
if(mcomp=='tukey'){
tukey(resp[Fator2==lf2[i]],fatores[,3][Fator2==lf2[i]],an[8,1],an[8,2],sigT)
}
if(mcomp=='duncan'){
duncan(resp[Fator2==lf2[i]],fatores[,3][Fator2==lf2[i]],an[8,1],an[8,2],sigT)
}
if(mcomp=='lsd'){
lsd(resp[Fator2==lf2[i]],fatores[,3][Fator2==lf2[i]],an[8,1],an[8,2],sigT)
}
if(mcomp=='lsdb'){
lsdb(resp[Fator2==lf2[i]],fatores[,3][Fator2==lf2[i]],an[8,1],an[8,2],sigT)
}
if(mcomp=='sk'){
scottknott(resp[Fator2==lf2[i]],fatores[,3][Fator2==lf2[i]],an[8,1],an[8,2],sigT)
}
if(mcomp=='snk'){
snk(resp[Fator2==lf2[i]],fatores[,3][Fator2==lf2[i]],an[8,1],an[8,2],sigT)
}
if(mcomp=='ccboot'){
ccboot(resp[Fator2==lf2[i]],fatores[,3][Fator2==lf2[i]],an[8,1],an[8,2],sigT)
}
if(mcomp=='ccF'){
ccF(resp[Fator2==lf2[i]],fatores[,3][Fator2==lf2[i]],an[8,1],an[8,2],sigT)
}
}
else{
cat('\n\n',fac.names[3],' inside of the leve ',lf2[i],' of ',fac.names[2],'
------------------------------------------------------------------------')
reg.poly(resp[Fator2==lf2[i]], factor3[Fator2==lf2[i]], an[8,1], an[8,2], des6.tab[i+2,1], des6.tab[i+2,2])
}
}
else{cat('\n\n',fac.names[3],' inside of the leve ',lf2[i],' of ',fac.names[2],'\n')
cat('\nAccording to the F test, the means of this factor are statistical equal.\n')
cat('------------------------------------------------------------------------\n')
mean.table<-tapply.stat(resp[Fator2==lf2[i]],fatores[,3][Fator2==lf2[i]],mean)
colnames(mean.table)<-c('Levels','Means')
print(mean.table)
cat('------------------------------------------------------------------------\n')
}
}
if(pvalor[4]>sigF && pvalor[5]>sigF) {
cat('\nAnalizing the effect of the factor ',fac.names[1],'
------------------------------------------------------------------------\n')
i<-1
{
if(quali[i]==TRUE && pvalor[i]<=sigF) {
cat(fac.names[i])
if(mcomp=='tukey'){
tukey(resp,fatores[,i],an[8,1],an[8,2],sigT)
}
if(mcomp=='duncan'){
duncan(resp,fatores[,i],an[8,1],an[8,2],sigT)
}
if(mcomp=='lsd'){
lsd(resp,fatores[,i],an[8,1],an[8,2],sigT)
}
if(mcomp=='lsdb'){
lsdb(resp,fatores[,i],an[8,1],an[8,2],sigT)
}
if(mcomp=='sk'){
scottknott(resp,fatores[,i],an[8,1],an[8,2],sigT)
}
if(mcomp=='snk'){
snk(resp,fatores[,i],an[8,1],an[8,2],sigT)
}
if(mcomp=="ccboot"){
ccboot(resp,fatores[,i],an[8,1],an[8,2],sigT)
}
if(mcomp=="ccF"){
ccF(resp,fatores[,i],an[8,1],an[8,2],sigT)
}
}
if(quali[i]==TRUE && pvalor[i]>sigF) {
cat(fac.names[i])
cat('\nAccording to the F test, the means of this factor are not different.\n')
cat('------------------------------------------------------------------------\n')
mean.table<-tapply.stat(resp,fatores[,i],mean)
colnames(mean.table)<-c('Niveis','Medias')
print(mean.table)
cat('------------------------------------------------------------------------')
}
if(quali[i]==FALSE && pvalor[i]<=sigF){
cat(fac.names[i])
reg.poly(resp, fatores[,i], an[8,1],an[8,2], an[i,1], an[i,2])
}
if(quali[i]==FALSE && pvalor[i]>sigF) {
cat(fac.names[i])
cat('\nAccording to the F test, the means of this factor are not different.\n')
cat('------------------------------------------------------------------------\n')
mean.table<-tapply.stat(resp,fatores[,i],mean)
colnames(mean.table)<-c('Levels','Means')
print(mean.table)
cat('------------------------------------------------------------------------')
}
cat('\n')
}
}
}
if(any(unfold==3)) {
cat("\n\n\nSignificant",paste(fac.names[1],'*',fac.names[2],'*',fac.names[3],sep='')," interaction: analyzing the interaction
------------------------------------------------------------------------\n")
cat("\nAnalyzing ", fac.names[1], ' inside of each level of ', fac.names[2], 'and',fac.names[3],'
------------------------------------------------------------------------\n')
SQc<-numeric(0)
SQf<-numeric(nv2*nv3)
rn<-numeric(0)
for(i in 1:nv2){
for(j in 1:nv3) {
for(k in 1:nv1) {SQf[(i-1)*nv3+j]=c(SQf[(i-1)*nv3+j]+ sum(resp[fatores[,2]==lf2[i] & fatores[,3]==lf3[j] & fatores[,1]==lf1[k]])^2) }
rn<-c(rn, paste(paste(fac.names[1],':',sep=''),lf2[i],lf3[j]))
SQc=c(SQc,(sum(resp[fatores[,2]==lf2[i] & fatores[,3]==lf3[j]])^2)/(nv1*J))
}
}
SQf=SQf/J
SQ=SQf-SQc
glf=rep(nv1-1,(nv2*nv3))
QM=SQ/glf
anavad7<-data.frame("DF"=c(glf,glE),
"SS"=c(SQ,SQE),
"MS"=c(QM,QME),
"Fc"=c(c(round((QM/QME),6)), ' '),
"Pr>Fc"=c(c(round(1-pf(QM/QME,glf,glE),6)),' '))
colnames(anavad7)[5]="Pr>Fc"
rownames(anavad7)=c(rn,"Residuals")
cat('------------------------------------------------------------------------
Analysis of Variance Table\n------------------------------------------------------------------------\n')
print(anavad7)
cat('------------------------------------------------------------------------\n\n')
ii<-0
for(i in 1:nv2) {
for(j in 1:nv3) {
ii<-ii+1
if(1-pf(QM/QME,glf,glE)[ii]<=sigF){
if(quali[1]==TRUE){
cat('\n\n',fac.names[1],' inside of the combination of the levels ',lf2[i],' of ',fac.names[2],' and ',lf3[j],' of ',fac.names[3],'
------------------------------------------------------------------------')
if(mcomp=='tukey'){
tukey(resp[fatores[,2]==lf2[i] & fatores[,3]==lf3[j]],fatores[,1][Fator2==lf2[i] & Fator3==lf3[j]],an[8,1],an[8,2],sigT)
}
if(mcomp=='duncan'){
duncan(resp[fatores[,2]==lf2[i] & fatores[,3]==lf3[j]],fatores[,1][Fator2==lf2[i] & Fator3==lf3[j]],an[8,1],an[8,2],sigT)
}
if(mcomp=='lsd'){
lsd(resp[fatores[,2]==lf2[i] & fatores[,3]==lf3[j]],fatores[,1][Fator2==lf2[i] & Fator3==lf3[j]],an[8,1],an[8,2],sigT)
}
if(mcomp=='lsdb'){
lsdb(resp[fatores[,2]==lf2[i] & fatores[,3]==lf3[j]],fatores[,1][Fator2==lf2[i] & Fator3==lf3[j]],an[8,1],an[8,2],sigT)
}
if(mcomp=='sk'){
scottknott(resp[fatores[,2]==lf2[i] & fatores[,3]==lf3[j]],fatores[,1][Fator2==lf2[i] & Fator3==lf3[j]],an[8,1],an[8,2],sigT)
}
if(mcomp=='snk'){
snk(resp[fatores[,2]==lf2[i] & fatores[,3]==lf3[j]],fatores[,1][Fator2==lf2[i] & Fator3==lf3[j]],an[8,1],an[8,2],sigT)
}
if(mcomp=='ccboot'){
ccboot(resp[fatores[,2]==lf2[i] & fatores[,3]==lf3[j]],fatores[,1][Fator2==lf2[i] & Fator3==lf3[j]],an[8,1],an[8,2],sigT)
}
if(mcomp=='ccF'){
ccF(resp[fatores[,2]==lf2[i] & fatores[,3]==lf3[j]],fatores[,1][Fator2==lf2[i] & Fator3==lf3[j]],an[8,1],an[8,2],sigT)
}
}
else{
cat('\n\n',fac.names[1],' inside of the combination of the levels ',lf2[i],' of ',fac.names[2],' and ',lf3[j],' of ',fac.names[3],'
------------------------------------------------------------------------')
reg.poly(resp[fatores[,2]==lf2[i] & fatores[,3]==lf3[j]], fatores[,1][Fator2==lf2[i] & Fator3==lf3[j]], an[8,1],an[8,2], nv1-1, SQ[ii])
}
}
else{cat('\n\n',fac.names[1],' inside of the combination of the levels ',lf2[i],' of ',fac.names[2],' and ',lf3[j],' of ',fac.names[3],'\n')
cat('\nAccording to the F test, the means of this factor are statistical equal.\n')
cat('------------------------------------------------------------------------\n')
mean.table<-tapply.stat(resp[fatores[,2]==lf2[i] & fatores[,3]==lf3[j]], fatores[,1][Fator2==lf2[i] & Fator3==lf3[j]],mean)
colnames(mean.table)<-c('Levels','Means')
print(mean.table)
cat('------------------------------------------------------------------------\n')
}
}
}
cat('\n\n')
cat("\nAnalyzing ", fac.names[2], ' inside of each level of ', fac.names[1], 'and',fac.names[3],'
------------------------------------------------------------------------\n')
SQc<-numeric(0)
SQf<-numeric(nv1*nv3)
rn<-numeric(0)
for(k in 1:nv1){
for(j in 1:nv3) {
for(i in 1:nv2) {SQf[(k-1)*nv3+j]=c(SQf[(k-1)*nv3+j]+ sum(resp[fatores[,1]==lf1[k] & fatores[,3]==lf3[j] & fatores[,2]==lf2[i]])^2) }
rn<-c(rn, paste(paste(fac.names[2],':',sep=''),lf1[k],lf3[j]))
SQc=c(SQc,(sum(resp[fatores[,1]==lf1[k] & fatores[,3]==lf3[j]])^2)/(nv2*J))
}
}
SQf=SQf/J
SQ=SQf-SQc
glf=rep(nv2-1,(nv1*nv3))
QM=SQ/glf
anavad8<-data.frame("DF"=c(glf,glE),
"SS"=c(SQ,SQE),
"MS"=c(QM,QME),
"Fc"=c(c(round((QM/QME),6)), ' '),
"Pr>Fc"=c(c(round(1-pf(QM/QME,glf,glE),6)),' '))
colnames(anavad8)[5]="Pr>Fc"
rownames(anavad8)=c(rn,"Residuals")
cat('------------------------------------------------------------------------
Analysis of Variance Table\n------------------------------------------------------------------------\n')
print(anavad8)
cat('------------------------------------------------------------------------\n\n')
ii<-0
for(k in 1:nv1) {
for(j in 1:nv3) {
ii<-ii+1
if(1-pf(QM/QME,glf,glE)[ii]<=sigF){
if(quali[2]==TRUE){
cat('\n\n',fac.names[2],' inside of the combination of the levels ',lf1[k],' of ',fac.names[1],' and ',lf3[j],' of ',fac.names[3],'
------------------------------------------------------------------------')
if(mcomp=='tukey'){
tukey(resp[fatores[,1]==lf1[k] & fatores[,3]==lf3[j]],fatores[,2][Fator1==lf1[k] & fatores[,3]==lf3[j]],an[8,1],an[8,2],sigT)
}
if(mcomp=='duncan'){
duncan(resp[fatores[,1]==lf1[k] & fatores[,3]==lf3[j]],fatores[,2][Fator1==lf1[k] & fatores[,3]==lf3[j]],an[8,1],an[8,2],sigT)
}
if(mcomp=='lsd'){
lsd(resp[fatores[,1]==lf1[k] & fatores[,3]==lf3[j]],fatores[,2][Fator1==lf1[k] & fatores[,3]==lf3[j]],an[8,1],an[8,2],sigT)
}
if(mcomp=='lsdb'){
lsdb(resp[fatores[,1]==lf1[k] & fatores[,3]==lf3[j]],fatores[,2][Fator1==lf1[k] & fatores[,3]==lf3[j]],an[8,1],an[8,2],sigT)
}
if(mcomp=='sk'){
scottknott(resp[fatores[,1]==lf1[k] & fatores[,3]==lf3[j]],fatores[,2][Fator1==lf1[k] & fatores[,3]==lf3[j]],an[8,1],an[8,2],sigT)
}
if(mcomp=='snk'){
snk(resp[fatores[,1]==lf1[k] & fatores[,3]==lf3[j]],fatores[,2][Fator1==lf1[k] & fatores[,3]==lf3[j]],an[8,1],an[8,2],sigT)
}
if(mcomp=='ccboot'){
ccboot(resp[fatores[,1]==lf1[k] & fatores[,3]==lf3[j]],fatores[,2][Fator1==lf1[k] & fatores[,3]==lf3[j]],an[8,1],an[8,2],sigT)
}
if(mcomp=='ccF'){
ccF(resp[fatores[,1]==lf1[k] & fatores[,3]==lf3[j]],fatores[,2][Fator1==lf1[k] & fatores[,3]==lf3[j]],an[8,1],an[8,2],sigT)
}
}
else{
cat('\n\n',fac.names[2],' inside of the combination of the levels ',lf1[k],' of ',fac.names[1],' and ',lf3[j],' of ',fac.names[3],'
------------------------------------------------------------------------')
reg.poly(resp[fatores[,1]==lf1[k] & fatores[,3]==lf3[j]],fatores[,2][Fator1==lf1[k] & fatores[,3]==lf3[j]], an[8,1], an[8,2], nv2-1, SQ[ii])
}
}
else{cat('\n\n',fac.names[2],' inside of the combination of the levels ',lf1[k],' of ',fac.names[1],' and ',lf3[j],' of ',fac.names[3],'\n')
cat('\nAccording to the F test, the means of this factor are statistical equal.\n')
cat('------------------------------------------------------------------------\n')
mean.table<-tapply.stat(resp[fatores[,1]==lf1[k] & fatores[,3]==lf3[j]],fatores[,2][Fator1==lf1[k] & fatores[,3]==lf3[j]],mean)
colnames(mean.table)<-c('Levels','Means')
print(mean.table)
cat('------------------------------------------------------------------------\n')
}
}
}
cat("\nAnalyzing ", fac.names[3], ' inside of each level of ', fac.names[1], 'and',fac.names[2],'
------------------------------------------------------------------------\n')
SQc<-numeric(0)
SQf<-numeric(nv1*nv2)
rn<-numeric(0)
for(k in 1:nv1){
for(i in 1:nv2) {
for(j in 1:nv3) {SQf[(k-1)*nv2+i]=c(SQf[(k-1)*nv2+i]+ sum(resp[fatores[,1]==lf1[k] & fatores[,2]==lf2[i] & fatores[,3]==lf3[j]])^2) }
rn<-c(rn, paste(paste(fac.names[3],':',sep=''),lf1[k],lf2[i]))
SQc=c(SQc,(sum(resp[fatores[,1]==lf1[k] & fatores[,2]==lf2[i]])^2)/(nv3*J))
}
}
SQf=SQf/J
SQ=SQf-SQc
glf=rep(nv3-1,(nv1*nv2))
QM=SQ/glf
anavad9<-data.frame("DF"=c(glf,glE),
"SS"=c(SQ,SQE),
"MS"=c(QM,QME),
"Fc"=c(c(round((QM/QME),6)), ' '),
"Pr>Fc"=c(c(round(1-pf(QM/QME,glf,glE),6)),' '))
colnames(anavad9)[5]="Pr>Fc"
rownames(anavad9)=c(rn,"Residuals")
cat('------------------------------------------------------------------------
Analysis of Variance Table\n------------------------------------------------------------------------\n')
print(anavad9)
cat('------------------------------------------------------------------------\n\n')
ii<-0
for(k in 1:nv1) {
for(i in 1:nv2) {
ii<-ii+1
if(1-pf(QM/QME,glf,glE)[ii]<=sigF){
if(quali[3]==TRUE){
cat('\n\n',fac.names[3],' inside of the combination of the levels ',lf1[k],' of ',fac.names[1],' and ',lf2[i],' of ',fac.names[2],'
------------------------------------------------------------------------')
if(mcomp=='tukey'){
tukey(resp[fatores[,1]==lf1[k] & fatores[,2]==lf2[i]],fatores[,3][fatores[,1]==lf1[k] & fatores[,2]==lf2[i]],an[8,1],an[8,2],sigT)
}
if(mcomp=='duncan'){
duncan(resp[fatores[,1]==lf1[k] & fatores[,2]==lf2[i]],fatores[,3][fatores[,1]==lf1[k] & fatores[,2]==lf2[i]],an[8,1],an[8,2],sigT)
}
if(mcomp=='lsd'){
lsd(resp[fatores[,1]==lf1[k] & fatores[,2]==lf2[i]],fatores[,3][fatores[,1]==lf1[k] & fatores[,2]==lf2[i]],an[8,1],an[8,2],sigT)
}
if(mcomp=='lsdb'){
lsdb(resp[fatores[,1]==lf1[k] & fatores[,2]==lf2[i]],fatores[,3][fatores[,1]==lf1[k] & fatores[,2]==lf2[i]],an[8,1],an[8,2],sigT)
}
if(mcomp=='sk'){
scottknott(resp[fatores[,1]==lf1[k] & fatores[,2]==lf2[i]],fatores[,3][fatores[,1]==lf1[k] & fatores[,2]==lf2[i]],an[8,1],an[8,2],sigT)
}
if(mcomp=='snk'){
snk(resp[fatores[,1]==lf1[k] & fatores[,2]==lf2[i]],fatores[,3][fatores[,1]==lf1[k] & fatores[,2]==lf2[i]],an[8,1],an[8,2],sigT)
}
if(mcomp=='ccboot'){
ccboot(resp[fatores[,1]==lf1[k] & fatores[,2]==lf2[i]],fatores[,3][fatores[,1]==lf1[k] & fatores[,2]==lf2[i]],an[8,1],an[8,2],sigT)
}
if(mcomp=='ccF'){
ccF(resp[fatores[,1]==lf1[k] & fatores[,2]==lf2[i]],fatores[,3][fatores[,1]==lf1[k] & fatores[,2]==lf2[i]],an[8,1],an[8,2],sigT)
}
}
else{
cat('\n\n',fac.names[3],' inside of the combination of the levels ',lf1[k],' of ',fac.names[1],' and ',lf2[i],' of ',fac.names[2],'
------------------------------------------------------------------------')
reg.poly(resp[fatores[,1]==lf1[k] & fatores[,2]==lf2[i]],fatores[,3][fatores[,1]==lf1[k] & fatores[,2]==lf2[i]], an[8,1], an[8,2], nv3-1, SQ[ii])
}
}
else{cat('\n\n',fac.names[3],' inside of the combination of the levels ',lf1[k],' of ',fac.names[1],' and ',lf2[i],' of ',fac.names[2],'\n')
cat('\nAccording to the F test, the means of this factor are statistical equal.\n')
cat('------------------------------------------------------------------------\n')
mean.table<-tapply.stat(resp[fatores[,1]==lf1[k] & fatores[,2]==lf2[i]],fatores[,3][fatores[,1]==lf1[k] & fatores[,2]==lf2[i]],mean)
colnames(mean.table)<-c('Levels','Means')
print(mean.table)
cat('------------------------------------------------------------------------\n')
}
}
}
}
out<-list()
out$residuals<-anava$residuals
out$df.residual<-anava$df.residual
out$coefficients<-anava$coefficients
out$effects<-anava$effects
out$fitted.values<-anava$fitted.values
out$means.factor1<-tapply.stat(resp,fatores[,1],mean)
out$means.factor2<-tapply.stat(resp,fatores[,2],mean)
out$means.factor3<-tapply.stat(resp,fatores[,3],mean)
tabmedia<-model.tables(anava, "means")
out$means.inside12<-tabmedia$tables$`Fator1:Fator2`
out$means.inside13<-tabmedia$tables$`Fator1:Fator3`
out$means.inside23<-tabmedia$tables$`Fator2:Fator3`
out$means.inside123<-tabmedia$tables$`Fator1:Fator2:Fator3`
invisible(out)
} |
plot.tariff<- function(x, top = NULL, min.prob = 0, ...){
sx <- summary(x)
dist.cod <- sx$csmf
if(!is.null(top)){
if(top < length(dist.cod)){
thre <- sort(dist.cod, decreasing=TRUE)[top]
min.prob <- max(min.prob, thre)
}
}
dist.cod.min <- dist.cod[dist.cod >= min.prob ]
dist.cod.min <- sort(dist.cod.min, decreasing = FALSE)
par(las = 2)
par(mar = c(5,15,4,2))
bar.color <- grey.colors(length(dist.cod.min))
bar.color <- rev(bar.color)
barplot(dist.cod.min , horiz = TRUE,names.arg = names(dist.cod.min), col = bar.color, cex.names=0.8, xlab = "Probability", ...)
} |
"_PACKAGE"
NULL
.onAttach <- function(libname, pkgname){
packageStartupMessage(paste0("kindisperse ", utils::packageVersion("kindisperse")))
} |
test_that("empty bookdown directory", {
skip_on_cran()
bookdown_dir <- tempfile()
dir.create(bookdown_dir, FALSE)
include_dir <- "include"
script_dir <- "R"
output_format <- "bookdown::bs4_book"
oyml <- file.path(bookdown_dir, "_output.yml")
byml <- file.path(bookdown_dir, "_bookdown.yml")
expect_message(add_to_bookdown(bookdown_dir), "updated")
expect_true(file.exists(oyml))
expect_silent(check_oyaml <- yaml::read_yaml(oyml))
expect_equal(names(check_oyaml), output_format)
expect_equal(check_oyaml[[output_format]]$css, "include/webex.css")
expect_equal(check_oyaml[[output_format]]$includes$after_body, "include/webex.js")
expect_equal(check_oyaml[[output_format]]$md_extensions, "-smart")
expect_true(file.exists(byml))
expect_silent(check_byaml <- yaml::read_yaml(byml))
expect_true("before_chapter_script" %in% names(check_byaml))
expect_equal(check_byaml$before_chapter_script, "R/webex.R")
css <- file.path(bookdown_dir, include_dir, "webex.css")
js <- file.path(bookdown_dir, include_dir, "webex.js")
r <- file.path(bookdown_dir, script_dir, "webex.R")
expect_true(file.exists(css))
expect_true(file.exists(js))
expect_true(file.exists(r))
})
test_that("empty bookdown_dir, include_dir", {
skip_on_cran()
tdir <- tempfile()
dir.create(tdir, FALSE)
oldwd <- getwd()
on.exit(setwd(oldwd))
setwd(tdir)
bookdown_dir <- ""
include_dir <- ""
script_dir <- ""
output_format <- "bookdown::bs4_book"
yml <- file.path(".", "_output.yml")
byml <- file.path(".", "_bookdown.yml")
expect_message(add_to_bookdown(bookdown_dir, include_dir, script_dir), "updated")
expect_true(file.exists(yml))
expect_silent(check_yaml <- yaml::read_yaml(yml))
expect_equal(names(check_yaml), output_format)
expect_equal(check_yaml[[output_format]]$css, "./webex.css")
expect_equal(check_yaml[[output_format]]$includes$after_body, "./webex.js")
expect_equal(check_yaml[[output_format]]$md_extensions, "-smart")
expect_true(file.exists(byml))
expect_silent(check_byaml <- yaml::read_yaml(byml))
expect_true("before_chapter_script" %in% names(check_byaml))
expect_equal(check_byaml$before_chapter_script, "./webex.R")
output_format2 <- "bookdown::html_book"
expect_message(add_to_bookdown(bookdown_dir, include_dir, script_dir, "html_book"), "updated")
expect_true(file.exists(yml))
expect_silent(check_yaml <- yaml::read_yaml(yml))
expect_equal(names(check_yaml), c(output_format, output_format2))
expect_equal(check_yaml[[output_format2]]$css, "./webex.css")
expect_equal(check_yaml[[output_format2]]$includes$after_body, "./webex.js")
expect_equal(check_yaml[[output_format2]]$md_extensions, "-smart")
expect_true(file.exists(byml))
expect_silent(check_byaml <- yaml::read_yaml(byml))
expect_true("before_chapter_script" %in% names(check_byaml))
expect_equal(check_byaml$before_chapter_script, "./webex.R")
})
test_that("preexisting _output.yml", {
skip_on_cran()
tdir <- tempfile()
dir.create(tdir, FALSE)
oldwd <- getwd()
on.exit(setwd(oldwd))
setwd(tdir)
bookdown_dir <- "."
include_dir <- "."
output_format <- "bookdown::bs4_book"
yml <- file.path(".", "_output.yml")
byml <- file.path(".", "_bookdown.yml")
write("bookdown::bs4_book:
default: true
df_print: kable
repo:
base: https://github.com/psyteachr/template
branch: master
subdir: book
includes:
in_header: include/header.html
after_body: include/script.js
css: [include/psyteachr.css, include/style.css]
theme:
primary: \"
", yml)
write("book_filename: \"_main\"
new_session: yes
output_dir: \"../docs\"
before_chapter_script: \"R/psyteachr_setup.R\"
delete_merged_file: true
clean: []
", byml)
expect_message(add_to_bookdown(), "updated")
expect_true(file.exists(yml))
expect_silent(check_yaml <- yaml::read_yaml(yml))
expect_equal(names(check_yaml), output_format)
expect_equal(names(check_yaml[[output_format]]), c("default", "df_print", "repo", "includes", "css", "theme", "md_extensions"))
expect_equal(check_yaml[[output_format]]$css, c("include/psyteachr.css", "include/style.css", "include/webex.css"))
expect_equal(check_yaml[[output_format]]$includes$after_body, c("include/script.js", "include/webex.js"))
expect_equal(check_yaml[[output_format]]$md_extensions, "-smart")
expect_true(file.exists(byml))
expect_silent(check_byaml <- yaml::read_yaml(byml))
expect_true("before_chapter_script" %in% names(check_byaml))
expect_equal(check_byaml$before_chapter_script, c("R/psyteachr_setup.R", "R/webex.R"))
})
test_that("new books", {
skip_on_cran()
tdir <- tempfile()
dir.create(tdir, FALSE)
oldwd <- getwd()
on.exit(setwd(oldwd))
setwd(tdir)
render = interactive()
add_to_bookdown(bookdown_dir = "demo_bs4",
output_format = "bs4_book",
render = render)
add_to_bookdown(bookdown_dir = "demo_git",
output_format = "gitbook",
render = render)
add_to_bookdown(bookdown_dir = "demo_html",
output_format = "html_book",
render = render)
add_to_bookdown(bookdown_dir = "demo_tufte",
output_format = "tufte_html_book",
render = render)
}) |
plot.createBasin<-
function(x,...)
{
if(missing(x))
{
stop("missing object!")
}
if(!any(class(x)==c('sim','createBasin')))
{
stop("bad class type!")
}
x <-x$operation
nRes<-length(x$reservoirs)
nRec<-length(x$reachs)
nJun<-length(x$junctions)
nSub<-length(x$subbasins)
nDiv<-length(x$diversions)
labelMat<-matrix(NA,2,nRes+nRec+nJun+nSub+nDiv)
if(ncol(labelMat)<1){stop("At least one element is needed for simulation !")}
name<-c()
i<-0;j<-0;k<-0;l<-0;m<-0
if(nRes>0){for(i in 1:nRes){labelMat[1,i] <-x$reservoirs[[i]]$label;labelMat[2,i] <-x$reservoirs[[i]]$downstream; name<-c(name,x$reservoirs[[i]]$name)}}
if(nRec>0){for(j in 1:nRec){labelMat[1,j+nRes] <-x$reachs [[j]]$label;labelMat[2,j+nRes] <-x$reachs [[j]]$downstream; name<-c(name,x$reachs [[j]]$name)}}
if(nJun>0){for(k in 1:nJun){labelMat[1,k+nRec+nRes] <-x$junctions [[k]]$label;labelMat[2,k+nRec+nRes] <-x$junctions [[k]]$downstream; name<-c(name,x$junctions [[k]]$name)}}
if(nSub>0){for(l in 1:nSub){labelMat[1,l+nRec+nRes+nJun] <-x$subbasins [[l]]$label;labelMat[2,l+nRec+nRes+nJun] <-x$subbasins [[l]]$downstream; name<-c(name,x$subbasins [[l]]$name)}}
if(nDiv>0){for(m in 1:nDiv){labelMat[1,m+nRec+nRes+nJun+nSub]<-x$diversions[[m]]$label;labelMat[2,m+nRec+nRes+nJun+nSub]<-x$diversions[[m]]$downstream; name<-c(name,x$diversions[[m]]$name,x$diversions[[m]]$name)}}
if(nDiv>0){for(m in 1:nDiv){labelMat<-cbind(labelMat,c(x$diversions[[m]]$label,x$diversions[[m]]$divertTo))}}
colnames(labelMat)<-name
rownames(labelMat)<-c("code","downstream")
if(sum(is.na(labelMat[2,]))>1 & sum(is.na(labelMat[2,]))<1){stop("wrong number of outlet!")}
idUpstream<-which(is.na(match(labelMat[1,],labelMat[2,]))==TRUE)
type<-c('Reservoir','Reach','Junction','Sub-basin','Diversion')
availableTypes<-c(ifelse(i>0,1,NA),ifelse(j>0,1,NA),ifelse(k>0,1,NA),ifelse(l>0,1,NA),ifelse(m>0,1,NA))
type<-type[which(!is.na(availableTypes))]
types<-rep(type,c(i,j,k,l,2*m)[which(!is.na(availableTypes))])
color.palette<-c(5,1,2,3,4)[which(!is.na(availableTypes))]
shape.palette <-c(17,1,3,15,10)[which(!is.na(availableTypes))]
size.palette<-c(10,0.01,10,10,10)[which(!is.na(availableTypes))]
names(size.palette)<-type
names(shape.palette)<-type
names(color.palette)<-type
net<-matrix(0,nRes+nRec+nJun+nSub+nDiv*2,nRes+nRec+nJun+nSub+nDiv*2)
for(n in 1:ncol(net))
{
con<-which(labelMat[2,n]==labelMat[1,])
if(length(con)>0) {net[n,con]<-1}
}
colnames(net)<-colnames(labelMat)
rownames(net)<-colnames(labelMat)
Net<-net[1:(nRes+nRec+nJun+nSub),]
if(nDiv>0)
{
for(i in 1:nDiv)
{
Net<-rbind(Net,net[nRes+nRec+nJun+nSub+(i-1)*2+1,,drop=FALSE]+net[nRes+nRec+nJun+nSub+(i)*2,,drop=FALSE])
}
Net<-Net[,-which(duplicated(labelMat[1,]))]
}
net<-network(Net)
set.vertex.attribute(net,"type",types)
ggnet2(net,color='type',,size='type',shape='type',
color.palette=color.palette,shape.palette=shape.palette,size.palette=size.palette,
label=TRUE,arrow.size = 9, arrow.gap = 0.025)+guides(size = FALSE)
} |
strata <- function(x, datum = "top") {
if(!is.data.frame(x)) {
x <- as.data.frame(x, stringsAsFactors=FALSE)
}
rqd_names <- c("bed_number", "base", "top", "rock_type", "prim_litho", "grain_size")
ind_names <- rqd_names %in% colnames(x)
if(!all(ind_names == TRUE)) {
stop(call.=FALSE, paste0("Column names does not agree with the column names required by a strata object. ",
"Column names (", paste0(rqd_names[ind_names == FALSE],
collapse=", "), ") are missing. Check column names"), sep="")
}
if(!all(lapply(x[c("bed_number", "base", "top")], class) %in% c("numeric", "integer"))) {
stop(call.=FALSE, "Columns (bed_number, base and top) should be numeric type")
}
if(any(x$base == x$top)) {
stop(call.=FALSE, paste0("Check thickness 'base-top' in bed numbers (",
paste0(head(x[(x$base == x$top) == TRUE, "bed_number"], 5),
collapse=", "), ") 'base and top can not be equal'"))
}
if(is.null(datum)) {
stop(call.=FALSE, "datum should be 'base' or 'top'. 'base' when thickness are measured up from the bottom (e.g. stratigraphic section); 'top' when depths are measured from the surface (e.g. core)")
}
if(datum == "base") {
if(any(x$base > x$top)) {
stop(call.=FALSE, paste0("Check thickness 'base-top' in bed numbers (",
paste0(head(x[(x$base < x$top) == FALSE, "bed_number"], 5), collapse=", "), ") 'top should be greather than base'"))
}
}else{
if(any(x$base < x$top)) {
stop(call.=FALSE, paste0("Check thickness 'base-top' in bed numbers (",
paste0(head(x[(x$base > x$top) == FALSE, "bed_number"], 5), collapse=", "), ") 'base should be greather than top to draw a well, or set (datum = base) to draw an outcrop section'"))
}
x[, c("base", "top")] <- x[, c("base", "top")] * -1
}
x <- events(from = x$base, to = x$top,
x[,-which(names(x) %in% c("base","top"))])
overlaps <- event_overlaps(x)
beds_over <- overlaps[which(overlaps$n>1),]
if(nrow(beds_over) > 0) {
colnames(beds_over) <- c("base","top", "n")
message("\n", "Error: overlapping beds are not allowed")
message(" This function returned a dataframe with the overlapping intervals", "\n")
return(beds_over)
}
gaps <- event_gaps(x)
if(nrow(gaps) > 0) {
if(nrow(gaps) == 1) {
mes <- "There is a range without information"
}else{
mes <- "There are some ranges without information"
}
warning(call.=FALSE, mes)
}
ind_fac <- sapply(x, class) == "factor"
if(any(ind_fac == TRUE)) {
x[ind_fac] <- apply(x[ind_fac], 2, as.character)
warning("factor coerced to character type")
}
ind <- sapply(x, class) == "character"
x[ind] <- apply(x[ind], 2, tolower)
x[ind] <- apply(x[ind], 2, trimws)
cnv_to_id <- function(x, colna, ref_tb) {
new_col <- paste("id_", colna, sep="")
x[, new_col] <- ref_tb[match(x[, colna], ref_tb[,
ifelse(is.numeric(x[, colna]), "id", "name")]), "id"]
return(x)
}
x <- cnv_to_id(x, "rock_type", rock.table)
x <- cnv_to_id(x, "prim_litho", litho.table)
x <- cnv_to_id(x, "grain_size", gs.table)
x[which(x$id_prim_litho == 25), "id_grain_size"] <- 15
x[which(x$id_prim_litho == 25), "grain_size"] <- "granule"
message(" 'beds data has been validated successfully'")
new("strata", x)
}
summary.strata <- function(object, grain.size=FALSE, ...) {
object$thk <- abs(object$to - object$from)
xc <- subset(object, object$rock_type == "covered")
xc$thk <- abs(xc$to - xc$from)
ans <- list()
ans$nbeds <- c(length(object$bed_number) - nrow(xc))
ans$ncover <- nrow(xc)
ans$thk <- max(object[c("to", "from")]) - min(object[c("to", "from")])
ans$thkcover <- sum(xc$thk)
litho_factor <- factor(object[,"prim_litho"], levels=litho.table[, "name"])
thick = tapply(object[,"thk"], litho.table[litho_factor, 2], sum)
xnbed <- data.frame(thick, round(thick * 100 / ans$thk, 2),
table(litho.table[litho_factor,2]))
xnbed <- xnbed[order(xnbed$thick, decreasing=TRUE),c(1:2,4)]
names(xnbed) <- c("Thickness", "Percent (%)", "Number beds")
if(nrow(xc) > 0) {
xnbedCA <- data.frame(sum(xc$thk), round(sum(xc$thk) *100 / ans$thk, 2), nrow(xc), row.names = "covered")
names(xnbedCA) <- c("Thickness", "Percent (%)", "Number beds")
xnbed <- rbind(xnbed, xnbedCA)
}
ans$table_res <- xnbed
if(grain.size == TRUE) {
if(!(grain.size %in% c("FALSE", "TRUE"))) {
stop(call.=FALSE, "the 'litho' argument must be 'FALSE' or 'TRUE'")
}
sub_sed <- object[which(object$rock_type == "sedimentary"),]
gs_factor <- factor(sub_sed[,"grain_size"], levels=gs.table[, "name"])
thick_GS <- tapply(sub_sed[,"thk"], gs.table[gs_factor, 2], sum)
xnbed_GS <- data.frame(thick_GS, round(thick_GS * 100 / ans$thk, 2),
table(gs.table[gs_factor,2]))
xnbed_GS <- xnbed_GS[match(gs.table[,"name"], xnbed_GS$Var1), ]
xnbed_GS <- xnbed_GS[complete.cases(xnbed_GS),c(1:2,4)]
names(xnbed_GS) <- c("Thickness", "Percent (%)", "Number beds")
sub_other <- object[!(row.names(object) %in% row.names(sub_sed)),]
if(nrow(sub_other) > 0) {
rock_factor <- factor(sub_other[,"rock_type"], levels=rock.table[, "name"])
thick_other <- tapply(sub_other[,"thk"], rock.table[rock_factor, 2], sum)
xnbed_other <- data.frame(thick_other, round(thick_other * 100 / ans$thk, 2),
table(rock.table[rock_factor, 2]))
xnbed_other <- xnbed_other[match(rock.table[,"name"], xnbed_other$Var1), ]
xnbed_other <- xnbed_other[complete.cases(xnbed_other),c(1:2,4)]
names(xnbed_other) <- c("Thickness", "Percent (%)", "Number beds")
}
if(nrow(sub_other) > 0) {
ans$table_GS <- rbind(xnbed_GS, xnbed_other)
}else{
ans$table_GS <- xnbed_GS
}
}
class(ans) <- c("summary.strata", "listof")
ans
}
print.summary.strata <- function(x, ...) {
xn <- data.frame(c("Number of beds: ",
"Number of covered intervals",
"Thickness of the section: ",
"Thickness of covered intervals: "),
c(x$nbeds, x$ncover, x$thk, x$thkcover))
names(xn) <- NULL
print(format(xn[1:2,], width = 4, justify = "left"), row.names = F)
print(format(xn[3:4,], width = 4, digits=3, justify = "left"), row.names = F)
cat("\nSummary by lithology:", "\n", "\n")
print(format(x$table_res, width = 12, digits=2, justify = "centre"))
if("table_GS" %in% names(x)) {
cat("\nSummary by Grain Size:", "\n", "\n")
print(format(x$table_GS, width = 12, digits=2, justify = "centre"))
}
invisible(x)
} |
expected <- eval(parse(text="FALSE"));
test(id=0, code={
argv <- eval(parse(text="list(structure(list(sec = c(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0), min = c(0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L), hour = c(20L, 19L, 19L, 19L, 19L, 19L, 19L, 19L, 19L, 20L, 20L, 20L, 20L, 19L, 19L, 19L, 20L, 20L, 20L, 19L, 20L, 19L, 19L, 19L, 20L), mday = c(30L, 31L, 31L, 31L, 31L, 31L, 31L, 31L, 31L, 30L, 30L, 30L, 30L, 31L, 31L, 31L, 30L, 30L, 30L, 31L, 30L, 31L, 31L, 31L, 30L), mon = c(5L, 11L, 11L, 11L, 11L, 11L, 11L, 11L, 11L, 5L, 5L, 5L, 5L, 11L, 11L, 11L, 5L, 5L, 5L, 11L, 5L, 11L, 11L, 11L, 5L), year = c(72L, 72L, 73L, 74L, 75L, 76L, 77L, 78L, 79L, 81L, 82L, 83L, 85L, 87L, 89L, 90L, 92L, 93L, 94L, 95L, 97L, 98L, 105L, 108L, 112L), wday = c(5L, 0L, 1L, 2L, 3L, 5L, 6L, 0L, 1L, 2L, 3L, 4L, 0L, 4L, 0L, 1L, 2L, 3L, 4L, 0L, 1L, 4L, 6L, 3L, 6L), yday = c(181L, 365L, 364L, 364L, 364L, 365L, 364L, 364L, 364L, 180L, 180L, 180L, 180L, 364L, 364L, 364L, 181L, 180L, 180L, 364L, 180L, 364L, 364L, 365L, 181L), isdst = c(1L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 1L, 1L, 1L, 1L, 0L, 0L, 0L, 1L, 1L, 1L, 0L, 1L, 0L, 0L, 0L, 1L)), .Names = c(\"sec\", \"min\", \"hour\", \"mday\", \"mon\", \"year\", \"wday\", \"yday\", \"isdst\"), tzone = c(\"\", \"EST\", \"EDT\")))"));
do.call(`is.pairlist`, argv);
}, o=expected); |
income_constant_prices <- function(data = ech::toy_ech_2018,
base_month = 6,
base_year = 2018,
index = "IPC",
level = "G",
mes = "mes",
ht11 = "ht11",
ht13 = "ht13",
ht19 = "ht19"){
assertthat::assert_that(is.data.frame(data))
assertthat::assert_that(dplyr::between(as.numeric(base_month),1,12), msg = glue::glue("Sorry... :( \n base_month is not between 1 and 12"))
assertthat::assert_that(index %in% c("IPC", "IPAB"), msg = glue::glue("Sorry... :( \n index is not IPC or IPAB"))
assertthat::assert_that(level %in% c("G", "R"), msg = glue::glue("Sorry... :( \n level is not G or R"))
assertthat::assert_that(mes %in% names(data), msg = glue::glue("Sorry... :( \n {mes} is not in data"))
assertthat::assert_that(ht11 %in% names(data), msg = glue::glue("Sorry... :( \n {ht11} is not in data"))
assertthat::assert_that(ht13 %in% names(data), msg = glue::glue("Sorry... :( \n {ht13} is not in data"))
assertthat::assert_that(ht19 %in% names(data), msg = glue::glue("Sorry... :( \n {ht19} is not in data"))
if(max(data$anio) %in% 2013:2015){
data <- organize_ht11(data = data, year = max(data$anio))
}
if (level == "G") {
if(index == "IPC"){
deflator <- deflate(base_month = base_month,
base_year = base_year,
index = "IPC",
level = "G",
df_year = max(data$anio))
} else{
deflator <- deflate(base_month = base_month,
base_year = base_year,
index = "IPAB",
level = "G",
df_year = max(data$anio))
}
data <- data %>%
dplyr::mutate(aux = as.integer(haven::zap_labels(.data[[mes]]))) %>%
dplyr::left_join(deflator, by = c("aux" = "mes"), keep = F)
data <- data %>%
dplyr::mutate(y_pc = .data[[ht11]] / .data[[ht19]],
y_pc_d = y_pc * deflator,
rv_d = .data[[ht13]] * deflator,
y_wrv_d = (.data[[ht11]] - .data[[ht13]]) * deflator,
y_wrv_pc_d = ((.data[[ht11]] - .data[[ht13]]) / .data[[ht19]]) * deflator) %>%
dplyr::select(-aux, -deflator)
message("Variables have been created: \n \t y_pc (income per capita current prices / ingreso per capita a precios corrientes);
y_pc_d (income per capita deflated / ingreso per capita deflactado);
rv_d (rental value deflated / valor locativo deflactado);
y_wrv_d (income without rental value deflated / ingreso sin valor locativo deflactado) &
y_wrv_pc_d (income without rental value per capita deflated / ingreso sin valor locativo per capita deflactado)")
}
if (level == "R") {
if(index == "IPC"){
deflator_i <- deflate(base_month = base_month, base_year = base_year, index = "IPC", level = "I", df_year = max(data$anio))
deflator_m <- deflate(base_month = base_month, base_year = base_year, index = "IPC", level = "M", df_year = max(data$anio))
} else{
deflator_i <- deflate(base_month = base_month, base_year = base_year, index = "IPAB", level = "I", df_year = max(data$anio))
deflator_m <- deflate(base_month = base_month, base_year = base_year, index = "IPAB", level = "M", df_year = max(data$anio))
}
data <- data %>%
dplyr::mutate(aux = as.integer(haven::zap_labels(data$mes))) %>%
dplyr::left_join(deflator_i, by = c("aux" = "mes"), keep = F) %>%
dplyr::rename(deflator_i = deflator) %>%
dplyr::left_join(deflator_m, by = c("aux" = "mes"), keep = F) %>%
dplyr::rename(deflator_m = deflator)
data <- data %>%
dplyr::mutate(deflator_r = ifelse(dpto == 1, deflator_m, deflator_i),
y_pc = .data[[ht11]] / .data[[ht19]],
y_pc_d_r = y_pc * deflator_r,
rv_d_r = .data[[ht13]] * deflator_r,
y_wrv_d_r = (.data[[ht11]] - .data[[ht13]]) * deflator_r,
y_wrv_pc_d_r = ((.data[[ht11]] - .data[[ht13]]) / .data[[ht19]]) * deflator_r) %>%
dplyr::select(-aux, -deflator_i, -deflator_m, -deflator_r)
message("Variables have been created: \n \t y_pc (income per capita current prices / ingreso per capita a precios corrientes)
y_pc_d_r (income per capita deflated / ingreso per capita deflactado);
rv_d_r (rental value deflated / valor locativo deflactado);
y_wrv_d_r (income without rental value deflated / ingreso sin valor locativo deflactado) &
y_wrv_pc_d_r (income without rental value per capita deflated / ingreso sin valor locativo per capita deflactado)")
}
return(data)
}
income_quantiles <- function(data = ech::toy_ech_2018,
quantile = 5,
weights = "pesoano",
income = "y_pc_d") {
assertthat::assert_that(is.data.frame(data))
assertthat::assert_that(weights %in% names(data))
assertthat::assert_that(quantile %in% c(5, 10))
assertthat::assert_that(income %in% names(data), msg = "Sorry... :( \n Income parameter is not calculated, please use income_constant_prices() to obtain the variable.")
weights = pull(data[,weights])
if (quantile == 5) {
data <- data %>% dplyr::mutate(quintil = statar::xtile(.data[[income]], n = 5, wt = weights))
message("A variable has been created: \n \t quintil (quintil de ingresos)")
} else {
data <- data %>% dplyr::mutate(decil = statar::xtile(.data[[income]], n = 10, wt = weights))
message("A variable has been created: \n \t decil (decil de ingresos)")
}
return(data)
}
labor_income_per_capita <- function(data = ech::toy_ech_2018,
numero = "numero",
pobpcoac = "pobpcoac",
g126_1 = "g126_1",
g126_2 = "g126_2",
g126_3 = "g126_3",
g126_4 = "g126_4",
g126_5 = "g126_5",
g126_6 = "g126_6",
g126_7 = "g126_7",
g126_8 = "g126_8",
g127_3 = "g127_3",
g128_1 = "g128_1",
g129_2 = "g129_2",
g130_1 = "g130_1",
g131_1 = "g131_1",
g133_1 = "g133_1",
g133_2 = "g133_2",
g134_1 = "g134_1",
g134_2 = "g134_2",
g134_3 = "g134_3",
g134_4 = "g134_4",
g134_5 = "g134_5",
g134_6 = "g134_6",
g134_7 = "g134_7",
g134_8 = "g134_8",
g135_3 = "g135_3",
g136_1 = "g136_1",
g137_2 = "g137_2",
g138_1 = "g138_1",
g139_1 = "g139_1",
g141_1 = "g141_1",
g141_2 = "g141_2",
g142 = "g142",
g144_1 = "g144_1",
g144_2_1 = "g144_2_1",
g144_2_3 = "g144_2_3",
g144_2_4 = "g144_2_4",
g144_2_5 = "g144_2_5"){
assertthat::assert_that(is.data.frame(data))
assertthat::assert_that(numero %in% names(data), msg = glue:glue("Sorry... :( \n {numero} is not in data"))
assertthat::assert_that(pobpcoac %in% names(data), msg = glue:glue("Sorry... :( \n {pobpcoac} is not in data"))
assertthat::assert_that(g126_1 %in% names(data), msg = glue:glue("Sorry... :( \n {g126_1} is not in data"))
assertthat::assert_that(g126_2 %in% names(data), msg = glue:glue("Sorry... :( \n {g126_2} is not in data"))
assertthat::assert_that(g126_3 %in% names(data), msg = glue:glue("Sorry... :( \n {g126_3} is not in data"))
assertthat::assert_that(g126_4 %in% names(data), msg = glue:glue("Sorry... :( \n {g126_4} is not in data"))
assertthat::assert_that(g126_5 %in% names(data), msg = glue:glue("Sorry... :( \n {g126_5} is not in data"))
assertthat::assert_that(g126_6 %in% names(data), msg = glue:glue("Sorry... :( \n {g126_6} is not in data"))
assertthat::assert_that(g126_7 %in% names(data), msg = glue:glue("Sorry... :( \n {g126_7} is not in data"))
assertthat::assert_that(g126_8 %in% names(data), msg = glue:glue("Sorry... :( \n {g126_8} is not in data"))
assertthat::assert_that(g127_3 %in% names(data), msg = glue:glue("Sorry... :( \n {g127_3} is not in data"))
assertthat::assert_that(g128_1 %in% names(data), msg = glue:glue("Sorry... :( \n {g128_1} is not in data"))
assertthat::assert_that(g129_2 %in% names(data), msg = glue:glue("Sorry... :( \n {g129_2} is not in data"))
assertthat::assert_that(g130_1 %in% names(data), msg = glue:glue("Sorry... :( \n {g130_1} is not in data"))
assertthat::assert_that(g131_1 %in% names(data), msg = glue:glue("Sorry... :( \n {g131_1} is not in data"))
assertthat::assert_that(g133_1 %in% names(data), msg = glue:glue("Sorry... :( \n {g133_1} is not in data"))
assertthat::assert_that(g134_1 %in% names(data), msg = glue:glue("Sorry... :( \n {g134_1} is not in data"))
assertthat::assert_that(g134_2 %in% names(data), msg = glue:glue("Sorry... :( \n {g134_2} is not in data"))
assertthat::assert_that(g134_3 %in% names(data), msg = glue:glue("Sorry... :( \n {g134_3} is not in data"))
assertthat::assert_that(g134_4 %in% names(data), msg = glue:glue("Sorry... :( \n {g134_4} is not in data"))
assertthat::assert_that(g134_5 %in% names(data), msg = glue:glue("Sorry... :( \n {g134_5} is not in data"))
assertthat::assert_that(g134_6 %in% names(data), msg = glue:glue("Sorry... :( \n {g134_6} is not in data"))
assertthat::assert_that(g134_7 %in% names(data), msg = glue:glue("Sorry... :( \n {g134_7} is not in data"))
assertthat::assert_that(g134_8 %in% names(data), msg = glue:glue("Sorry... :( \n {g134_8} is not in data"))
assertthat::assert_that(g135_3 %in% names(data), msg = glue:glue("Sorry... :( \n {g135_3} is not in data"))
assertthat::assert_that(g136_1 %in% names(data), msg = glue:glue("Sorry... :( \n {g136_1} is not in data"))
assertthat::assert_that(g137_2 %in% names(data), msg = glue:glue("Sorry... :( \n {g137_2} is not in data"))
assertthat::assert_that(g138_1 %in% names(data), msg = glue:glue("Sorry... :( \n {g138_1} is not in data"))
assertthat::assert_that(g139_1 %in% names(data), msg = glue:glue("Sorry... :( \n {g139_1} is not in data"))
assertthat::assert_that(g141_1 %in% names(data), msg = glue:glue("Sorry... :( \n {g141_1} is not in data"))
assertthat::assert_that(g141_2 %in% names(data), msg = glue:glue("Sorry... :( \n {g141_2} is not in data"))
assertthat::assert_that(g142 %in% names(data), msg = glue:glue("Sorry... :( \n {g142} is not in data"))
assertthat::assert_that(g144_1 %in% names(data), msg = glue:glue("Sorry... :( \n {g144_1} is not in data"))
assertthat::assert_that(g144_2_1 %in% names(data), msg = glue:glue("Sorry... :( \n {g144_2_1} is not in data"))
assertthat::assert_that(g144_2_3 %in% names(data), msg = glue:glue("Sorry... :( \n {g144_2_3} is not in data"))
assertthat::assert_that(g144_2_4 %in% names(data), msg = glue:glue("Sorry... :( \n {g144_2_4} is not in data"))
assertthat::assert_that(g144_2_5 %in% names(data), msg = glue:glue("Sorry... :( \n {g144_2_5} is not in data"))
data <- data %>%
dplyr::mutate(
main_work = ifelse(pobpcoac %in% 2:5, g126_1 + g126_2 + g126_3 + g126_4 + g126_5 + g126_6 + g126_7 + g126_8 + g127_3 + g128_1 + g129_2 + g130_1 + g131_1 + g133_1 + g133_2/12, NA),
second_work = ifelse(pobpcoac %in% 2:5, g134_1 + g134_2 + g134_3 + g134_4 + g134_5 + g134_6 + g134_7 + g134_8 + g135_3 + g136_1 + g137_2 + g138_1 + g139_1 + g141_1 + g141_2/12, NA),
self_employment = ifelse(pobpcoac %in% 2:5, g142 + g144_1 + g144_2_1 + g144_2_3 + g144_2_4 + g144_2_5, 4),
labor_income = main_work + second_work + self_employment
) %>%
dplyr::group_by(numero) %>%
dplyr::mutate(labor_income_h = sum(labor_income, na.rm = TRUE),
labor_income_h_percapita = labor_income_h /sum(!is.na(labor_income_h))) %>%
dplyr::ungroup()
message("Variables have been created: \n \t labor_income (Ingresos laborales) &
labor_income_h (Ingresos laborales del hogar) &
labor_income_h_percapita (Ingresos laborales per capita)")
return(data)
}
labor_income_per_hour <- function(data = ech::toy_ech_2018,
numero = "numero",
f85 = "f85",
pobpcoac = "pobpcoac",
pt4 = "pt4",
base_month = 6,
base_year = 2018,
mes = "mes"){
assertthat::assert_that(is.data.frame(data))
assertthat::assert_that(dplyr::between(base_month,1,12), msg = glue::glue("Sorry... :( \n base_month is not between 1 and 12"))
assertthat::assert_that(mes %in% names(data), msg = glue::glue("Sorry... :( \n {mes} is not in data"))
assertthat::assert_that(numero %in% names(data), msg = glue::glue("Sorry... :( \n {numero} is not in data"))
assertthat::assert_that(pobpcoac %in% names(data), msg = glue::glue("Sorry... :( \n {pobpcoac} is not in data"))
assertthat::assert_that(pt4 %in% names(data), msg = glue::glue("Sorry... :( \n {pt4} is not in data"))
assertthat::assert_that(f85 %in% names(data), msg = glue::glue("Sorry... :( \n {f85} is not in data"))
deflator_mdeo <- deflate(base_month = base_month, base_year = base_year, index = "IPC", level = "M", df_year = max(data$anio))
names(deflator_mdeo)[1] <- "deflator_mdeo"
deflator_int <- deflate(base_month = base_month, base_year = base_year, index = "IPC", level = "I", df_year = max(data$anio))
names(deflator_int)[1] <- "deflator_int"
data <- data %>% dplyr::mutate(aux = as.integer(haven::zap_labels(mes))) %>%
dplyr::left_join(deflator_mdeo, by = c("aux" = "mes"), keep = F) %>%
dplyr::left_join(deflator_int, by = c("aux" = "mes"), keep = F) %>%
dplyr::mutate(deflator = dplyr::case_when(dpto == 1 ~ deflator_mdeo,
TRUE ~ deflator_int)) %>%
dplyr::select(-aux, -deflator_int, -deflator_mdeo)
data <- data %>%
dplyr::mutate(
hours_per_month = f85 * 4.2,
total_income_per_hour = ifelse(pobpcoac == 2 & pt4 != 0, (pt4 / deflator) * 100 / hours_per_month, NA))
message("Variables have been created: \n \t hours_per_month (Cantidad de horas trabajadas al mes en ocupacion principal) &
total_income_per_hour (Total de ingresos por trabajo por hora)")
return(data)
} |
library(plgp)
library(tgp)
library(akima)
graphics.off()
rm(list=ls())
rect <- rbind(c(-2,2),c(-2,2))
X <- dopt.gp(125, Xcand=lhs(10*125, rect))$XX
C <- exp2d.C(X)
Xs <- rectscale(X, rect)
formals(data.CGP)$X <- Xs
formals(data.CGP)$C <- C
start <- ncol(Xs) + 5*length(unique(C))
end <- nrow(Xs)
prior <- prior.CGP(2)
out <- PL(dstream=data.CGP,
start=start, end=end,
init=draw.CGP,
lpredprob.CGP, propagate.CGP, prior=prior,
addpall.CGP, params.CGP)
XX <- dopt.gp(200, Xcand=lhs(200*10, rect))$XX
XXs <- rectscale(XX, rect)
CC <- exp2d.C(XX)
outp <- papply(XX=XXs, fun=pred.CGP, prior=prior)
ent <- class <- matrix(NA, nrow=length(outp), ncol=nrow(as.matrix(XX)))
for(i in 1:length(outp)) {
class[i,] <- apply(outp[[i]], 1, which.max)
ent[i,] <- apply(outp[[i]], 1, entropy)
}
mclass <- apply(class, 2, mean)
ment <- apply(ent, 2, mean)
CCp <- round(mclass)
miss <- CCp != CC
sum(miss)
X <- rectunscale(PL.env$pall$X, rect)
par(mfrow=c(1,2))
cols <- c(gray(0.85), gray(0.625), gray(0.4))
image(interp(XX[,1], XX[,2], mclass), col=cols,
xlab="x1", ylab="x2", main="class mean")
points(X); points(XX[miss,], pch=18, col=2)
image(interp(XX[,1], XX[,2], ment),
xlab="x1", ylab="x2", main="entropy mean")
points(X); points(XX[miss,], pch=18, col=2)
params <- params.CGP()
dev.new()
par(mfrow=c(3,2))
hist(params$d.2); hist(params$d.3)
hist(params$g.2); hist(params$g.3)
hist(params$lpost.2); hist(params$lpost.3) |
importance.plot.forestRK <- function(importance.forestRK.object = importance.forestRK(), colour.used = "dark green", fill.colour = "dark green", label.size = 10){
if(is.null(importance.forestRK.object)){
stop("'importance.forestRK.object' needs to be provided in the function call")
}
ent.status <- importance.forestRK.object$ent.status
if(ent.status == TRUE){ent.label <- "Entropy"}
else{ent.label <- "Gini Index"}
average.decrease.in.criteria.vec <- importance.forestRK.object$average.decrease.in.criteria.vec
importance.covariate.names.vec <- importance.forestRK.object$importance.covariate.names
average.decrease.in.criteria.df <- as.data.frame(average.decrease.in.criteria.vec, row.names = importance.covariate.names.vec)
g <- ggplot(average.decrease.in.criteria.df, aes(x=reorder(row.names(average.decrease.in.criteria.df),average.decrease.in.criteria.vec),y=average.decrease.in.criteria.vec)) + theme_grey(base_size = label.size) + coord_flip()
g2 <- g + geom_bar(stat = "identity", color=colour.used, fill = fill.colour) + theme(legend.position = "top") + labs(x="Covariate Names", y="Average Decrease in Splitting Criteria", title = paste("Importance Plot Based On The Splitting Criteria", ent.label))
g2
} |
prism_shape_pal <- function(palette = c("default", "filled", "complete")) {
palette <- match.arg(palette)
shapes <- ggprism::ggprism_data$shape_palettes[[palette]]
out <- manual_pal(shapes[["pch"]])
attr(out, "max_n") <- nrow(shapes)
out
} |
context("Basic Authorization")
test_that("testing auth status", {
expect_false(token_available())
expect_null(sf_access_token())
expect_true(session_id_available())
expect_is(sf_session_id(), "character")
})
test_that("testing basic auth", {
username <- Sys.getenv("SALESFORCER_USERNAME")
password <- Sys.getenv("SALESFORCER_PASSWORD")
security_token <- Sys.getenv("SALESFORCER_SECURITY_TOKEN")
session <- sf_auth(username = username,
password = password,
security_token = security_token)
expect_is(session, "list")
expect_named(session, c("auth_method", "token", "session_id", "instance_url"))
})
test_that("testing token and session availability after basic auth", {
expect_true(session_id_available())
expect_true(!is.null(sf_session_id()))
}) |
cplot <- function(graph, membership, l = layout.auto, map = FALSE,
verbose = FALSE, ...)
{
V(graph)$M <- 9999
V(graph)$M[which(V(graph)$name %in% names(membership))] <- membership
if (map) {
V(graph)$color <- V(graph)$M + 1
gplot(graph)
Sys.sleep(3)
}
M <- names(table(V(graph)$M))
K <- length(table(V(graph)$M))
vcol <- as.numeric(M) + 1
HM <- lapply(1:K, function(x) induced_subgraph(graph,
V(graph)$name[V(graph)$M == M[x]]))
names(HM) <- paste0("HM", M)
d <- igraph::degree(graph, mode = "all")*2 + 1
if (verbose) {
glv <- lapply(1:K, function(x) {
E(HM[[x]])$weight <- 1
plot(HM[[x]],
vertex.color = vcol[x],
vertex.size = d[V(HM[[x]])$name],
layout = l,
main = paste0("Hidden Module ", M[x]))
Sys.sleep(3)})
}
return(invisible(c(list(graph = graph), HM)))
}
mergeNodes <- function(graph, membership, HM, ...)
{
if (is.numeric(membership)) {
nodes <- names(membership)
membership <- paste0(HM, membership)
names(membership) <- nodes
}
LM <- NULL
for (i in 1:length(table(membership))) {
m <- names(table(membership))[i]
LMi <- V(graph)$name[which(V(graph)$name %in% names(membership)[membership == m])]
LM <- c(LM, list(LMi))
}
names(LM) <- names(table(membership))
gLM <- as_graphnel(graph)
for (i in 1:length(LM)) {
gLMi <- graph::combineNodes(LM[[i]], gLM, names(LM)[i], mean)
gLM <- gLMi
}
ig <- graph_from_graphnel(gLM)
if (length(V(ig)$color) == 0) V(ig)$color <- "white"
V(ig)$color[substr(V(ig)$name, 2, 2) == "V"] <- "orange"
vcol <- V(ig)$color
names(vcol) <- V(ig)$name
gplot(ig)
return(gLM = ig)
}
clusterGraph <- function(graph, type = "wtc", HM = "none", size = 5,
verbose = FALSE, ...)
{
if (!is_directed(graph)) {
ug <- graph
} else {
ug <- as.undirected(graph, mode = "collapse",
edge.attr.comb = "ignore")
}
if (type == "tahc") {
mst <- minimum.spanning.tree(ug, weights = NULL, algorithm = NULL)
G <- distances(mst, v = V(mst), to = V(mst), mode = "all",
weights = NA)
D <- 1 - cor(x = G, method = "spearman")
hMST <- hclust(as.dist(D), method = "average")
tahc <- cutree(hMST, h = 0.2)
cnames <- as.numeric(names(table(tahc)))[table(tahc) >= size]
membership <- tahc[tahc %in% cnames]
if(verbose) {
plot(hMST, labels = FALSE, xlab = "", sub = "")
abline(h = 0.2, col = "red")
Sys.sleep(3)
}
} else {
if (type == "ebc") cls <- cluster_edge_betweenness(ug, weights = NULL)
if (type == "fgc") cls <- cluster_fast_greedy(ug, weights = NULL)
if (type == "lbc") cls <- cluster_label_prop(ug, weights = NA)
if (type == "lec") cls <- cluster_leading_eigen(ug, weights = NA)
if (type == "loc") cls <- cluster_louvain(ug, weights = NA)
if (type == "sgc") cls <- cluster_spinglass(ug, weights = NA)
if (type == "wtc") cls <- cluster_walktrap(ug, weights = NULL)
cat("modularity =", modularity(cls), "\n\n")
print(sort(sizes(cls)))
cat("\n")
cnames <- as.numeric(names(sizes(cls)[sizes(cls) >= size]))
membership <- membership(cls)[membership(cls) %in% cnames]
if(verbose) {
plot(cls, ug)
Sys.sleep(3)
}
}
K <- length(cnames)
if (K == 0) return(message("WARNING: no communities with size >=", size, "."))
if (HM == "UV") {
gHC <- cplot(graph, membership = membership, map = FALSE,
verbose = FALSE)[-1]
ftm <- Vxx <- NULL
for (i in 1:K) {
d <- igraph::degree(gHC[[i]], mode = "in")
Vx <- V(gHC[[i]])$name[d == 0]
Vy <- V(gHC[[i]])$name[d != 0]
ftm <- rbind(ftm, cbind(Vx, rep(paste0("UV", i), length(Vx))))
ftm <- rbind(ftm, cbind(rep(paste0("UV", i), length(Vy)), Vy))
Vxx <- c(Vxx, Vx)
}
gLM <- graph_from_data_frame(ftm, directed = TRUE)
V(gLM)$color <- "yellow"
V(gLM)$color[substr(V(gLM)$name, 1, 1) == "U"] <- "lightblue"
V(gLM)$color[V(gLM)$name %in% Vxx] <- "green"
} else if (HM == "LV") {
ftm <- data.frame(from = c(paste0("LX", membership)),
to = names(membership))
gLM <- graph_from_data_frame(ftm, directed = TRUE)
V(gLM)$LV <- 0
V(gLM)$LV[1:K] <- 1
V(gLM)$color <- ifelse(V(gLM)$LV == 1, "lightblue", "yellow")
gHC <- NULL
} else if (HM == "CV") {
ftm <- data.frame(from = names(membership),
to = c(paste0("CY", membership)))
gLM <- graph_from_data_frame(ftm, directed = TRUE)
V(gLM)$LV <- 0
V(gLM)$LV[(vcount(gLM) - K + 1):vcount(gLM)] <- 1
V(gLM)$color <- ifelse(V(gLM)$LV == 1, "lightblue", "green")
gHC <- NULL
} else if (HM == "none") {
return( membership )
}
if (verbose == TRUE) {
plot(gLM)
}
return(list(gHM = gLM, membership = membership, gHC = gHC))
}
clusterScore <- function(graph, data, group, HM = "LV", type = "wtc",
size = 5, verbose = FALSE, ...)
{
nodes <- colnames(data)[colnames(data) %in% V(graph)$name]
dataY <- data[, nodes]
ig <- induced_subgraph(graph, vids = which(V(graph)$name %in% nodes))
ig <- simplify(ig, remove.loops = TRUE)
if (HM == "LV") {
LX <- clusterGraph(graph = ig, type = type,
HM = "LV",
size = size,
verbose = verbose)
if (length(LX) == 0) return(list(fit = NA, M = NA, dataHM = NA))
gLM <- LX[[1]]
membership <- LX[[2]]
LX <- V(gLM)$name[substr(V(gLM)$name, 1, 1) == "L"]
K <- as.numeric(names(table(membership)))
LV <- NULL
for(k in 1:length(LX)) {
Xk <- subset(names(membership), membership == K[k])
Y <- as.matrix(dataY[, which(colnames(dataY) %in% Xk)])
fa1 <- cate::factor.analysis(Y = Y, r = 1, method = "ml")$Z
LV <- cbind(LV, fa1)
}
colnames(LV) <- gsub("LX", "LV", LX)
rownames(LV) <- rownames(dataY)
dataLC <- cbind(group, LV)
model <- paste0(colnames(LV), "~group")
}
if (HM == "CV") {
LY <- clusterGraph(graph = ig, type = type,
HM = "CV",
size = size,
verbose = verbose)
if (length(LY) == 0) return(list(fit = NA, M = NA, dataHM = NA))
gLM <- LY[[1]]
membership <- LY[[2]]
LY <- V(gLM)$name[substr(V(gLM)$name, 1, 1) == "C"]
K <- as.numeric(names(table(membership)))
CV <- NULL
for(k in 1:length(LY)) {
Xk <- subset(names(membership), membership == K[k])
Y <- as.matrix(dataY[,which(colnames(dataY) %in% Xk)])
pc1 <- cate::factor.analysis(Y = Y, r = 1, method = "pc")$Z
CV <- cbind(CV, pc1)
}
colnames(CV) <- gsub("CY", "CV", LY)
rownames(CV) <- rownames(dataY)
dataLC <- cbind(group, CV)
model <- paste0(colnames(CV), "~group")
}
if (HM == "UV") {
if (!is.directed(graph)) {
return(message("UV is not applicable with udirected graph !"))
}
LXY <- clusterGraph(graph = ig, type = type,
HM = "UV",
size = size,
verbose = verbose)
if(length(LXY) == 0) return(list(fit = NA, M = NA, dataHM = NA))
membership <- LXY[[2]]
gLC <- LXY[[3]]
LXY <- paste0("HM", names(table(membership)))
UV <- na <- NULL
for (k in 1:length(LXY)) {
gk <- gLC[[which(names(gLC) %in% LXY)[k]]]
d <- igraph::degree(gk, mode = "in")
idx <- which(colnames(dataY) %in% V(gk)$name[d == 0])
Xk <- as.matrix(dataY[, idx])
idy <- which(colnames(dataY) %in% V(gk)$name[d > 0])
if (ncol(Xk) > nrow(Xk) | length(idx) == 0 | length(idy) == 0) {
na <- c(na, k)
next
}
Yk <- as.matrix(dataY[, idy])
Uk <- Xk%*%solve(t(Xk)%*%Xk)%*%t(Xk)%*%Yk
spc1 <- cate::factor.analysis(Y = as.matrix(Uk), r = 1,
method = "pc")$Z
UV <- cbind(UV, spc1)
}
if (length(na) == 0) {
colnames(UV) <- gsub("HM", "UV", LXY)
} else {
colnames(UV) <- gsub("HM", "UV", LXY[-na])
}
rownames(UV) <- rownames(dataY)
dataLC <- cbind(group, UV)
model <- paste0(colnames(UV), "~group")
}
if (length(group) > 0) {
fsr <- sem(model, data = dataLC, se = "standard", fixed.x = TRUE)
if (fsr@Fit@converged == TRUE) {
srmr <- fitMeasures(fsr, c("srmr"))
cat("Model converged:", fsr@Fit@converged, "\nSRMR:", srmr, "\n\n")
} else {
cat("Model converged:", fsr@Fit@converged, "\nSRMR:", NA, "\n\n")
fsr<- NULL
}
} else if (length(group) == 0) {
fsr <- NULL
dataLC <- cbind(group = rep(NA, nrow(dataY)), dataLC)
}
if (verbose == TRUE) {
X <- cbind(dataLC, data)
gM <- mergeNodes(graph, membership, HM = HM)
sem1 <- SEMfit(gM, X, group)
}
return(list(fit = fsr, membership = membership, dataHM = dataLC))
}
extendGraph <- function(g = list(), data, gnet, verbose = FALSE, ...)
{
graph <- g[[1]]
if (!is_directed(graph)) {
message("ERROR: The first input graph is not a directed graph.")
return(NULL)
}
guu <- g[[2]]
vids <- which(V(gnet)$name %in% colnames(data))
gnet <- induced_subgraph(graph = gnet, vids = vids)
vids <- which(V(graph)$name %in% colnames(data))
graph <- induced_subgraph(graph = graph, vids = vids)
ig <- graph - E(graph)[E(graph)$color == "red"]
if (!is.null(E(ig)$weight)) ig <- delete_edge_attr(ig, "weight")
if (!is.null(E(ig)$color)) ig <- delete_edge_attr(ig, "color")
if (!is.null(V(ig)$color)) ig <- delete_vertex_attr(ig, "color")
guv <- psi2guv(guu = guu, ig = ig, gnet = gnet, verbose = verbose)
if (ecount(guv) == 0) return(list(Ug = ig, guv = guv))
if (is.directed(guv) & is.directed(gnet)) {
Ug <- graph.union(g = list(ig, guv))
}
if (!is.directed(guv) & is.directed(gnet)) {
guv <- orientEdges(ug = guv, dg = gnet)
Ug <- graph.union(g = list(ig, guv))
}
if (!is.directed(guv) & !is.directed(gnet)) {
Ug <- graph.union(g = list(as.undirected(ig), guv))
}
E1 <- attr(E(Ug), "vnames")
E0 <- attr(E(ig), "vnames")
E(Ug)$color <- ifelse(E1 %in% E0, "blue", "red")
return(list(Ug = Ug, guv = guv))
}
psi2guv <- function(guu, ig, gnet, verbose, ...)
{
vids <- which(V(guu)$name %in% V(gnet)$name)
guu <- induced_subgraph(graph = guu, vids = vids)
if(verbose) {
plot(guu, main = "direct(or covariance) graph (guu) in gnet")
Sys.sleep(3)
}
ftm <- as_edgelist(guu)
vpath <- ftmuv <- NULL
for (i in 1:nrow(ftm)) {
mode <- ifelse(is.directed(guu) & is.directed(gnet), "out", "all")
if (distances(gnet, ftm[i, 1], ftm[i, 2], mode = mode,
weights = NA) == Inf) next
if (is.null(E(gnet)$pv)) {
suppressWarnings(path <- shortest_paths(gnet, ftm[i, 1],
ftm[i, 2],
mode = mode,
weights = NA)$vpath)
} else {
path <- all_shortest_paths(gnet, ftm[i, 1], ftm[i, 2],
mode = mode,
weights = NA)$res
}
if (length(path) > 1) {
fX2 <- NULL
for (k in 1:length(path)) {
pathk <- induced_subgraph(gnet, V(gnet)$name[path[[k]]])
fX2[k] <- -2*sum(log(E(pathk)$pv))
}
path <- path[[which(fX2 == max(fX2))[1]]]
} else {
path <- path[[1]]
}
V <- V(gnet)$name[path]
vpath <- c(vpath, V[-c(1, length(V))])
for(h in 1:(length(V) - 1)) ftmuv <- rbind(ftmuv, c(V[h], V[h + 1]))
}
ftmuv <- na.omit(ftmuv[duplicated(ftmuv) != TRUE,])
ftmuv <- matrix(ftmuv, ncol = 2)
if (nrow(ftmuv) > 0) {
mode <- ifelse(is.directed(guu) & is.directed(gnet), TRUE, FALSE)
guv <- graph_from_data_frame(ftmuv, directed = mode)
guv <- simplify(guv, remove.loops = TRUE)
vv <- V(guv)$name[-which(V(guv)$name %in% V(ig)$name)]
uv <- V(ig)$name[which(V(ig)$name %in% unique(vpath))]
V(guv)$color[V(guv)$name %in% V(guu)$name] <- "lightblue"
V(guv)$color[V(guv)$name %in% vv] <- "yellow"
V(guv)$color[V(guv)$name %in% uv] <- "green2"
if(verbose) {
plot(guv, main = "Extended connector graph (guv)")
Sys.sleep(3)
}
} else {
cat("\n", "no edges u->u (or u--v) found !", "\n\n")
guv <- make_empty_graph(n = 0)
}
return(guv)
} |
if (!isGeneric('makeAP')) {
setGeneric('makeAP', function(x, ...)
standardGeneric('makeAP'))
}
makeAP <- function(projectDir = tempdir(),
locationName = "flightArea",
surveyArea = NULL,
flightAltitude = 100,
launchAltitude = NULL,
followSurface = FALSE,
followSurfaceRes = NULL,
demFn = NULL,
altFilter = 1.0,
horizonFilter = 30,
flightPlanMode = "track",
useMP = FALSE,
presetFlightTask = "remote",
overlap = 0.8,
maxSpeed = 20.0,
maxFlightTime = 10,
picRate = 2,
windCondition = 0,
uavType = "pixhawk",
cameraType = "MAPIR2",
cmd=16,
uavViewDir = 0,
djiBasic = c(0, 0, 0,-90, 0),
dA = FALSE,
heatMap = FALSE,
picFootprint = FALSE,
rcRange = NULL,
copy = FALSE,
runDir=tempdir(),
gdalLink=NULL)
{
cat("setup environ and params...\n")
if (substr(projectDir,nchar(projectDir),nchar(projectDir)) == "/") projectDir <- substr(projectDir,1,nchar(projectDir)-1)
else if (substr(projectDir,nchar(projectDir),nchar(projectDir)) == "\\") projectDir <- substr(projectDir,1,nchar(projectDir)-1)
projstru <- setProjStructure (projectDir,
locationName,
flightAltitude,
uavType,
cameraType,
surveyArea,
demFn,
copy)
dateString <- projstru[3]
taskName <- projstru[2]
csvFn <- projstru[1]
logger <- log4r::create.logger(logfile = paste0(file.path(projectDir, locationName, dateString, "fp-data/log/"),strsplit(basename(taskName[[1]]), "\\.")[[1]][1],'.log'))
log4r::level(logger) <- "INFO"
log4r::levellog(logger,'INFO',"--------------------- START RUN ---------------------------")
log4r::levellog(logger, 'INFO', paste("Working folder: ", file.path(projectDir, locationName, dateString)))
if (heatMap) { picFootprint = TRUE }
if (uavType == "djip3") {
cameraType<-"dji4k"
factor <- 1.71
flightParams = c(flightPlanMode = flightPlanMode,
launchAltitude = launchAltitude,
flightAltitude = flightAltitude,
presetFlightTask = presetFlightTask,
overlap = overlap,
curvesize = djiBasic[1],
rotationdir = djiBasic[2],
gimbalmode = djiBasic[3],
gimbalpitchangle = djiBasic[4],
uavViewDir = uavViewDir,
followSurfaceRes = followSurfaceRes)
fliAltRatio <- 1 - overlap
uavOptimumSpeed <- ceiling(factor * flightAltitude * fliAltRatio)
}
else if (uavType == "pixhawk") {
if (cameraType == "MAPIR2") {
factor <- 1.55
} else if (cameraType == "GP3_7MP") {
factor <- 1.31
} else if (cameraType == "GP3_11MP") {
factor <-1.71
}
flightParams = c(flightPlanMode = flightPlanMode,
launchAltitude = launchAltitude,
flightAltitude = flightAltitude,
presetFlightTask = presetFlightTask,
overlap = overlap,
uavViewDir = uavViewDir,
followSurfaceRes = followSurfaceRes)
fliAltRatio <- 1 - overlap
uavOptimumSpeed <- ceiling(factor * flightAltitude * fliAltRatio)
}
if (useMP) {
t<-jsonlite::fromJSON(surveyArea)
listPos<-grep("command", t$mission$items$TransectStyleComplexItem$Items)
tmp<- t$mission$items$TransectStyleComplexItem$Items[listPos][[1]]
coord<-tmp[tmp["command"]==16, ]
df_coordinates<-t(as.data.frame(rlist::list.cbind(coord[,"params",])))[,5:6]
tracks<- ceiling(nrow(coord)/4)
trackDistance <- t$mission$items$TransectStyleComplexItem$CameraCalc$AdjustedFootprintFrontal[listPos]
crossDistance <- t$mission$items$TransectStyleComplexItem$CameraCalc$AdjustedFootprintSide[listPos]
totalTrackdistance <- trackDistance
fliAltRatio <- 1 - t$mission$items$TransectStyleComplexItem$CameraCalc$SideOverlap[listPos]/100
flightAltitude <- t$mission$items$TransectStyleComplexItem$CameraCalc$DistanceToSurface[listPos]
maxSpeed <- t$mission$cruiseSpeed
launchLat <- t$mission$plannedHomePosition[1]
launchLon <- t$mission$plannedHomePosition[2]
updir <- t$mission$items$angle[listPos]
if (updir <= 180) downdir <- updir + 180
else if (updir>180) downdir<- updir -180
crossdir <- geosphere::bearing(c(df_coordinates[2,][2],df_coordinates[2,][1] ),c(df_coordinates[3,][2],df_coordinates[3,][1] ),a = 6378137,f = 1 / 298.257223563)
missionArea <- t$mission$items$polygon[listPos]
launch2startHeading <- geosphere::bearing(c(launchLon, launchLat),c(df_coordinates[1,][2],df_coordinates[1,][1] ),a = 6378137,f = 1 / 298.257223563)
groundResolution<-t$mission$items$TransectStyleComplexItem$CameraCalc$ImageDensity[listPos]
flightLength <- 0
flightParams = c(flightPlanMode = flightPlanMode,
launchAltitude = launchAltitude,
flightAltitude = flightAltitude,
presetFlightTask = presetFlightTask,
overlap = 1- fliAltRatio ,
uavViewDir = uavViewDir,
followSurfaceRes = followSurfaceRes)
p <- makeFlightParam( c(missionArea[[1]][1],missionArea[[1]][5],
missionArea[[1]][2],missionArea[[1]][6] ,
missionArea[[1]][3],missionArea[[1]][7] ,
launchLat, launchLon),
flightParams, followSurface)
mode<-p$flightPlanMode
if (abs(as.numeric(flightParams["uavViewDir"])) == 0) {
uavViewDir <- updir
}
else {
uavViewDir <- abs(as.numeric(flightParams["uavViewDir"]))
}
tarea <- data.table::data.table(
longitude= as.data.frame(t$mission$items$polygon[listPos][1])[,2],
latitude=as.data.frame(t$mission$items$polygon[listPos][1])[,1])
tarea = sf::st_as_sf(tarea, coords = c("longitude", "latitude"),
crs = 4326)
tarea<- sf::st_bbox(tarea)
taskArea<-sf::st_as_sfc(sf::st_bbox(tarea))
taskAreaUTM <- sf::st_transform(taskArea, 4326)
surveyAreaUTM <- sf::st_area(taskAreaUTM)
mavDF <- data.frame()
heading <- updir
lns <- list()
lns <- launch2flightalt(p, lns, uavViewDir, launch2startHeading, uavType)
pos <- c(df_coordinates[1,][2],df_coordinates[1,][1])
footprint <- calcCamFoot(pos[1], pos[2], uavViewDir, trackDistance, flightAltitude, 0, 0,factor)
footprint<- sp::spTransform(footprint,sp::CRS("+proj=utm +zone=32 +ellps=GRS80 +towgs84=0,0,0,0,0,0,0 +units=m +no_defs"))
landscape<-abs(abs(footprint@bbox[1]-footprint@bbox[3])*overlap-abs(footprint@bbox[1]-footprint@bbox[3]))
portrait<- abs(abs(footprint@bbox[2]-footprint@bbox[4])*overlap-abs(footprint@bbox[2]-footprint@bbox[4]))
if (picFootprint) camera <- calcCamFoot(pos[1], pos[2], uavViewDir, trackDistance, flightAltitude, 0, 0)
else camera = "NULL"
if (uavType == "pixhawk") {
lns[length(lns) + 1] <- makeUavPointMAV(lat = pos[2],lon = pos[1], head = uavViewDir, group = 99 )
}
pOld <- pos
if (mode == "track") {
if (uavType == "pixhawk") {
lns[length(lns) + 1] <- makeUavPointMAV(lat = pos[2],lon = pos[1],head = uavViewDir,group = 99)
}
trackDistance <- len
multiply <- 1
}
else if (mode == "waypoints") {
if (uavType == "pixhawk") {
lns[length(lns) + 1] <- makeUavPointMAV(lat = pos[2],lon = pos[1],head = uavViewDir,group = 99)
}
}
else if (mode == "terrainTrack") group = 99
df_coord<-as.data.frame(df_coordinates)
names(df_coord)<-c("lat","lon")
for (j in seq(1:(nrow(df_coord)-1))) {
df_coord$heading[j] <- geosphere::bearing(c(df_coord$lon[j],df_coord$lat[j] ), c(df_coord$lon[j + 1],df_coord$lat[j + 1]),a = 6378137,f = 1 / 298.257223563)
df_coord$len[j] <- geosphere::distGeo(c(df_coord$lon[j],df_coord$lat[j] ), c(df_coord$lon[j + 1],df_coord$lat[j + 1]),a = 6378137,f = 1 / 298.257223563)
df_coord$multiply <- floor(df_coord$len / followSurfaceRes)
}
cat("calculating waypoints...\n")
pb <- pb <- utils::txtProgressBar(max = tracks, style = 3)
for (j in seq(1:(nrow(df_coord)-1))) {
pOld<- c(df_coord$lon[j],df_coord$lat[j])
for (i in seq(1:df_coord$multiply[j])) {
if (mode == "waypoints" || mode == "terrainTrack") {
if (i >= df_coord$multiply[j]) {group <- 99}
else {group <- 1}}
else {i <- 2}
pos <- calcNextPos(pOld[1], pOld[2], df_coord$heading[j], followSurfaceRes)
pOld <- pos
flightLength <- flightLength + followSurfaceRes
if (mode == "track") {
group <- 99
}
lns[length(lns) + 1] <- makeUavPointMAV(lat = pos[2], lon = pos[1], head = uavViewDir, group = group)
}
utils::setTxtProgressBar(pb, j)
}
close(pb)
}
else if (!useMP){
surveyArea <- calcSurveyArea(surveyArea, projectDir, logger, useMP)
p <- makeFlightParam(surveyArea, flightParams, followSurface)
mode <- as.character(p$flightPlanMode)
flightAltitude <- as.numeric(flightParams["flightAltitude"])
trackDistance <- calcTrackDistance(fliAltRatio, flightAltitude, factor)
totalTrackdistance <- trackDistance
crossDistance <- trackDistance
taskArea <- taskarea(p, csvFn)
taskAreaUTM <- sp::spTransform(taskArea, sp::CRS(paste("+proj=utm +zone=",long2UTMzone(p$lon1)," ellps=WGS84",sep = '')))
surveyAreaUTM <- rgeos::gArea(taskAreaUTM)
launch2startHeading <- geosphere::bearing(c(p$launchLon, p$launchLat),c(p$lon1, p$lat1),a = 6378137,f = 1 / 298.257223563)
updir <- geosphere::bearing(c(p$lon1, p$lat1),c(p$lon2, p$lat2),a = 6378137,f = 1 / 298.257223563)
downdir <- geosphere::bearing(c(p$lon2, p$lat2),c(p$lon1, p$lat1),a = 6378137,f = 1 / 298.257223563)
crossdir <- geosphere::bearing(c(p$lon2, p$lat2),c(p$lon3, p$lat3),a = 6378137,f = 1 / 298.257223563)
len <- geosphere::distGeo(c(p$lon1, p$lat1), c(p$lon2, p$lat2))
crosslen <- distGeo(c(p$lon2, p$lat2),c(p$lon3, p$lat3),a = 6378137,f = 1 / 298.257223563)
if (is.null(followSurfaceRes)) {
followSurfaceRes <- trackDistance
p$followSurfaceRes <- followSurfaceRes
}
if (followSurface) {
multiply <- floor(len / followSurfaceRes)
trackDistance <- followSurfaceRes
} else{
multiply <- floor(len / trackDistance)
}
tracks <- floor(crosslen / crossDistance)
heading <- updir
if (abs(as.numeric(flightParams["uavViewDir"])) == 0) {
uavViewDir <- updir
}
else {
uavViewDir <- abs(as.numeric(flightParams["uavViewDir"]))
}
group <- 1
flightLength <- 0
djiDF <- data.frame()
mavDF <- data.frame()
lns <- list()
lns <- launch2flightalt(p, lns, uavViewDir, launch2startHeading, uavType)
pos <- c(p$lon1, p$lat1)
footprint <- calcCamFoot(pos[1], pos[2], uavViewDir, trackDistance, flightAltitude, 0, 0,factor)
footprint<- sp::spTransform(footprint,sp::CRS("+proj=utm +zone=32 +ellps=GRS80 +towgs84=0,0,0,0,0,0,0 +units=m +no_defs"))
landscape<-abs(abs(footprint@bbox[1]-footprint@bbox[3])*overlap-abs(footprint@bbox[1]-footprint@bbox[3]))
portrait<- abs(abs(footprint@bbox[2]-footprint@bbox[4])*overlap-abs(footprint@bbox[2]-footprint@bbox[4]))
if (picFootprint) camera <- calcCamFoot(pos[1], pos[2], uavViewDir, trackDistance, flightAltitude, 0, 0)
else camera = "NULL"
if (uavType == "djip3") {
lns[length(lns) + 1] <- makeUavPoint(pos, uavViewDir, group = 99, p)
}
if (uavType == "pixhawk") {
lns[length(lns) + 1] <- makeUavPointMAV(lat = pos[2],lon = pos[1], head = uavViewDir, group = 99 )
}
pOld <- pos
if (mode == "track") {
if (uavType == "djip3") {
lns[length(lns) + 1] <- makeUavPoint(pos, uavViewDir, group = 99, p)
}
if (uavType == "pixhawk") {
lns[length(lns) + 1] <- makeUavPointMAV(lat = pos[2],lon = pos[1],head = uavViewDir,group = 99)
}
trackDistance <- len
multiply <- 1
}
else if (mode == "waypoints") {
if (uavType == "djip3") {
lns[length(lns) + 1] <- makeUavPoint(pos, uavViewDir, group = 99, p)
}
if (uavType == "pixhawk") {
lns[length(lns) + 1] <- makeUavPointMAV(lat = pos[2],lon = pos[1],head = uavViewDir,group = 99)
}
}
else if (mode == "terrainTrack") group = 99
cat("calculating waypoints...\n")
pb <- pb <- utils::txtProgressBar(max = tracks, style = 3)
for (j in seq(1:tracks)) {
for (i in seq(1:multiply)) {
if (mode == "waypoints" || mode == "terrainTrack") {
if (i >= multiply) {
group <- 99
}
else {
group <- 1
}
}
else {
i <- 2
}
pos <- calcNextPos(pOld[1], pOld[2], heading, trackDistance)
if (picFootprint) camera <- maptools::spRbind(camera, calcCamFoot( pos[1], pos[2], uavViewDir, trackDistance, flightAltitude,i,j))
pOld <- pos
flightLength <- flightLength + trackDistance
if (mode == "track") {
group <- 99
}
if (uavType == "djip3") {
lns[length(lns) + 1] <- makeUavPoint(pos, uavViewDir, group = group, p)
}
if (uavType == "pixhawk") {
lns[length(lns) + 1] <- makeUavPointMAV(lat = pos[2], lon = pos[1], head = uavViewDir, group = group)
}
}
if ((j %% 2 != 0)) {
pos <- calcNextPos(pOld[1], pOld[2], crossdir, crossDistance)
if (picFootprint) camera <- maptools::spRbind(camera, calcCamFoot( pos[1], pos[2], uavViewDir, trackDistance,flightAltitude,i,j))
pOld <- pos
flightLength <- flightLength + crossDistance
if (uavType == "djip3") {
lns[length(lns) + 1] <- makeUavPoint(pos, uavViewDir, group = 99, p)
}
if (uavType == "pixhawk") {
lns[length(lns) + 1] <-
makeUavPointMAV(
lat = pos[2],
lon = pos[1],
head = uavViewDir,
group = 99
)
}
heading <- downdir
}
else if ((j %% 2 == 0)) {
pos <- calcNextPos(pOld[1], pOld[2], crossdir, crossDistance)
if (picFootprint) camera <- maptools::spRbind(camera, calcCamFoot( pos[1], pos[2], uavViewDir,trackDistance,flightAltitude,i,j))
pOld <- pos
flightLength <- flightLength + crossDistance
if (uavType == "djip3") {
lns[length(lns) + 1] <- makeUavPoint(pos, uavViewDir, group = 99, p)
heading <- updir
}
if (uavType == "pixhawk") {
lns[length(lns) + 1] <- makeUavPointMAV( lat = pos[2], lon = pos[1], head = uavViewDir - 180, group = 99)
heading <- updir
}
}
utils::setTxtProgressBar(pb, j)
}
close(pb)
}
ft <- calculateFlightTime( maxFlightTime,
windCondition,
maxSpeed,
uavOptimumSpeed,
flightLength,
totalTrackdistance,
picRate,
logger)
rawTime <- ft[1]
maxFlightTime <- ft[2]
maxSpeed <- ft[3]
picIntervall <- ft[4]
fileConn <- file(file.path(runDir,"tmp.csv"))
cat("preprocessing DEM related stuff...\n")
if (uavType == "djip3") {
writeLines(unlist(lns[1:length(lns) - 1]), fileConn)
djiDF <- utils::read.csv(file.path(runDir,"tmp.csv"), sep = ",", header = FALSE)
names(djiDF) <-unlist(strsplit(makeUavPoint(pos,uavViewDir,group = 99,p,header = TRUE,sep = ' '),split = " "))
sp::coordinates(djiDF) <- ~ lon + lat
sp::proj4string(djiDF) <- sp::CRS("+proj=longlat +datum=WGS84 +no_defs")
result <- analyzeDSM(demFn,djiDF,p,altFilter,horizonFilter,followSurface,followSurfaceRes,logger,projectDir,dA,dateString,locationName,runDir,taskarea,gdalLink)
demFn <- result[[3]]
dfcor <- result[[2]]
nofiles <- ceiling(nrow(dfcor@data) / 90)
maxPoints <- 90
minPoints <- 1
if (nofiles < ceiling(rawTime / maxFlightTime)) {
nofiles <- ceiling(rawTime / maxFlightTime)
maxPoints <- ceiling(nrow(dfcor@data) / nofiles) + 1
mp <- maxPoints
minPoints <- 1
}
cat('generate control files...\n')
calcDjiTask( result[[2]],taskName,nofiles,maxPoints,p,logger, round(result[[6]], digits = 0), trackSwitch=FALSE,"flightDEM.tif",result[[8]], projectDir,dateString,locationName,runDir)
}
else if (uavType == "pixhawk") {
writeLines(unlist(lns), fileConn)
mavDF <- utils::read.csv(file.path(runDir,"tmp.csv"), colClasses=c("V4"="character",
"V5"="character",
"V6"="character",
"V7"="character"),sep = "\t", header = FALSE)
names(mavDF) <- c("a","b","c","d","e","f","g","latitude","longitude","altitude","id","j","lat","lon")
sp::coordinates(mavDF) <- ~ lon + lat
sp::proj4string(mavDF) <- sp::CRS("+proj=longlat +datum=WGS84 +no_defs")
if (is.null(launchAltitude)) {
result <- analyzeDSM(demFn,mavDF,p,altFilter,horizonFilter ,followSurface,followSurfaceRes,logger,projectDir,dA,dateString,locationName,runDir,taskArea,gdalLink)
lauchPos <- result[[1]]
dfcor <- result[[2]]
demFn <- result[[3]]
nofiles <- ceiling(rawTime / maxFlightTime)
maxPoints <- ceiling(nrow(dfcor@data) / nofiles) + 1
}
calcMAVTask(result[[2]],
taskName,
nofiles,
rawTime,
mode,
trackDistance,
maxFlightTime,
logger,
p,
len,
multiply,
tracks,
result,
maxSpeed / 3.6,
uavType,
file.path(runDir,"flightDEM.tif"),
maxAlt = result[[6]],
projectDir,
dateString,
locationName,
uavViewDir,
cmd,
runDir)
}
close(fileConn)
if (heatMap) {
cat("calculating picture coverage heat map\n")
fovH <- calcFovHeatmap(camera, result[[4]])
} else
{
fovH <- "NULL"
}
rcCover = "NULL"
log4r::levellog(logger, 'INFO', paste("taskName : ", taskName))
log4r::levellog(logger, 'INFO', paste("DEM filename : ", names(demFn)))
log4r::levellog(logger, 'INFO', paste("surveyArea : ", surveyAreaUTM))
log4r::levellog(logger, 'INFO', paste("launchAltitude : ", launchAltitude))
log4r::levellog(logger, 'INFO', paste("followSurface : ", followSurface))
log4r::levellog(logger, 'INFO', paste("altfilter : ", altFilter))
log4r::levellog(logger, 'INFO', paste("horizonFilter : ", horizonFilter))
log4r::levellog(logger, 'INFO', paste("flightPlanMode : ", flightPlanMode))
log4r::levellog(logger, 'INFO', paste("flightAltitude : ", flightAltitude))
log4r::levellog(logger, 'INFO', paste("presetFlightTask: ", presetFlightTask))
log4r::levellog(logger, 'INFO', paste("curvesize : ", p$curvesize))
if (uavType == "djiP3"){
log4r::levellog(logger, 'INFO', paste("rotationdir : ", p$rotationdir))
log4r::levellog(logger, 'INFO', paste("gimbalmode : ", p$gimbalmode))
log4r::levellog(logger, 'INFO',paste("gimbalpitchangle: ", p$gimbalpitchangle))
}
log4r::levellog(logger, 'INFO', paste("overlap : ", overlap))
log4r::levellog(logger, 'INFO', paste("uavViewDir : ", uavViewDir))
log4r::levellog(logger, 'INFO', paste("picFootprint : ", picFootprint))
log4r::levellog(logger,'INFO',paste("followSurfaceRes: ", followSurfaceRes))
log4r::levellog(logger, 'INFO', paste("surveyAreaCoords: ", list(surveyArea)))
log4r::levellog(logger, 'INFO', paste("windCondition : ", windCondition))
log4r::levellog(logger,'INFO',paste("calculated mission time : ", rawTime, " (min) "))
log4r::levellog(logger,'INFO',paste("estimated battery lifetime : ", maxFlightTime, " (min) "))
log4r::levellog(logger,'INFO',paste("Area covered : ", surveyAreaUTM / 10000, " (ha)"))
log4r::levellog(logger, 'INFO', "-")
log4r::levellog(logger,'INFO',"----- use the following task params! --------------")
log4r::levellog(logger,'INFO',paste("RTH flight altitude: ", round(result[[6]], digits = 0), " (m)"))
log4r::levellog(logger,'INFO',paste("max flight speed : ",round(maxSpeed, digits = 1)," (km/h) "))
log4r::levellog(logger,'INFO',paste("picture lapse rate : ", picIntervall, " (sec/pic) "))
log4r::levellog(logger, 'INFO', paste("trigger distance portrait : ", portrait))
log4r::levellog(logger, 'INFO', paste("trigger distance landscape : ", landscape))
log4r::levellog(logger,'INFO',"--------------------- END RUN -----------------------------")
if ((flightPlanMode == 'track' | flightPlanMode == 'terrainTrack') & rawTime > maxFlightTime) {
note <- "flighttime > battery lifetime! control files have been splitted. \n Fly save and have Fun..."
}
else if (flightPlanMode == 'waypoints') {
note <- "control files are splitted after max 98 waypoints (litchi control file restricted number)"
}
else { note <- " Fly save and have Fun..." }
dumpFile(paste0(file.path(projectDir, locationName, dateString, "fp-data/log/"),strsplit(basename(taskName), "\\.")[[1]][1],'.log'))
cat("\n ",
"\n NOTE 1:",as.character(note),"",
"\n NOTE 2: You will find all parameters in the logfile:",paste0(file.path(projectDir, locationName, dateString, "fp-data/log/"),strsplit(basename(taskName), "\\.")[[1]][1],'.log'),"","\n ")
x <- c(result[[1]],
result[[2]],
result[[5]],
result[[3]],
result[[4]],
camera,
taskArea,
rcCover,
fovH)
names(x) <- c("lp", "wp", "demA", "oDEM", "rDEM", "fp", "fA", "rcA", "hm")
system(paste0("rm -rf ",file.path(projectDir,locationName,dateString,"fp-data/run")))
return(x)
} |
predict.ic.sglfit <- function(object, newx, s = c("bic","aic","aicc"), type = c("response"), ...) {
type <- match.arg(type)
s <- match.arg(s)
if (s == "bic") {
object <- object$ic.fit$bic.fit
}
if (s == "aic") {
object <- object$ic.fit$aic.fit
}
if (s == "aicc") {
object <- object$ic.fit$aicc.fit
}
b0 <- t(as.matrix(object$b0))
rownames(b0) <- "(Intercept)"
nbeta <- c(b0, object$beta)
nfit <- c(1, newx) %*% nbeta
nfit
}
predict.ic.panel.sglfit <- function(object, newx, s = c("bic","aic","aicc"), type = c("response"), method = c("pooled","fe"),...) {
type <- match.arg(type)
method <- match.arg(method)
s <- match.arg(s)
N <- object$fit$nf
T <- dim(newx)[1]/N
if (s == "bic") {
object <- object$ic.panel.fit$bic.fit
}
if (s == "aic") {
object <- object$ic.panel.fit$aic.fit
}
if (s == "aicc") {
object <- object$ic.panel.fit$aicc.fit
}
if (method == "pooled"){
b0 <- t(as.matrix(object$b0))
rownames(b0) <- "(Intercept)"
nbeta <- object$beta
nfit <- newx%*%nbeta + rep(b0, times = N)
}
if (method == "fe"){
a0 <- object$a0
nbeta <- object$beta
nfit <- newx %*% nbeta + a0
}
nfit
} |
Plot.PCA <- function(PC, titles = NA, xlabel = NA, ylabel = NA, size = 1.1,
grid = TRUE, color = TRUE, linlab = NA, axes = TRUE, class = NA,
classcolor = NA, posleg = 2, boxleg = TRUE, savptc = FALSE,
width = 3236, height = 2000, res = 300, casc = TRUE) {
if (!is.character(titles[1]) || is.na(titles[1])) titles[1] = c("Scree-plot of the components variances")
if (!is.character(titles[2]) || is.na(titles[2])) titles[2] = c("Graph corresponding to the rows (observations)")
if (!is.character(titles[3]) || is.na(titles[3])) titles[3] = c("Graph corresponding to the columns (variables)")
if (!is.na(class[1])) {
class <- as.matrix(class)
if (nrow(PC$mtxscores) != length(class))
stop("'class' or 'data' input is incorrect, they should contain the same number of lines. Verify!")
}
if (!is.character(xlabel) && !is.na(xlabel[1]))
stop("'xlabel' input is incorrect, it should be of type character or string. Verify!")
if (!is.character(ylabel) && !is.na(ylabel[1]))
stop("'ylabel' input is incorrect, it should be of type character or string. Verify!")
if (!is.logical(color))
stop("'color' input is incorrect, it should be TRUE or FALSE. Verify!")
if (!is.numeric(size) || size < 0)
stop("'size' input is incorrect, it should be numerical and greater than zero. Verify!")
if (!is.logical(grid))
stop("'grid' input is incorrect, it should be TRUE or FALSE. Verify!")
if (!is.na(linlab[1]) && length(linlab) != nrow(PC$mtxscores))
stop("'linlab' input is incorrect, it should have the same number of rows as the input in the database. Verify!")
if (!is.numeric(posleg) || posleg < 0 || posleg > 4 || (floor(posleg)-posleg) != 0)
stop("'posleg' input is incorrect, it should be a integer number between [0,4]. Verify!")
if (!is.logical(boxleg))
stop("'boxleg' input is incorrect, it should be TRUE or FALSE. Verify!")
if (!is.logical(axes))
stop("'axes' input is incorrect, it should be TRUE or FALSE. Verify!")
if (!is.logical(savptc))
stop("'savptc' input is incorrect, it should be TRUE or FALSE. Verify!")
if (!is.numeric(width) || width <= 0)
stop("'width' input is incorrect, it should be numerical and greater than zero. Verify!")
if (!is.numeric(height) || height <= 0)
stop("'height' input is incorrect, it should be numerical and greater than zero. Verify!")
if (!is.numeric(res) || res <= 0)
stop("'res' input is incorrect, it should be numerical and greater than zero. Verify!")
if (!is.logical(casc && !savptc))
stop("'casc' input is incorrect, it should be TRUE or FALSE. Verify!")
if (is.na(xlabel[1]))
xlabel = paste("First coordinate (",round(PC$mtxAutvlr[1,2],2),"%)",sep="")
if (is.na(ylabel[1]))
ylabel = paste("Second coordinate (",round(PC$mtxAutvlr[2,2],2),"%)",sep="")
if (posleg==1) posleg = "topleft"
if (posleg==2) posleg = "topright"
if (posleg==3) posleg = "bottomright"
if (posleg==4) posleg = "bottomleft"
boxleg = ifelse(boxleg,"o","n")
num.class = 0
if (!is.na(class[1])) {
class.Table <- table(class)
class.Names <- names(class.Table)
num.class <- length(class.Table)
NomeLinhas <- as.matrix(class)
}
if (num.class != 0 && length(classcolor) != num.class && !is.na(classcolor) ||
num.class == 0 && length(classcolor) != 1 && !is.na(classcolor))
stop("'classcolor' input is incorrect, it should be in an amount equal to the number of classes in 'class'. Verify!")
if (savptc) {
cat("\014")
cat("\n\n Saving graphics to hard disk. Wait for the end!")
}
if (casc && !savptc) dev.new()
if (savptc) png(filename = "Figure PCA Variances.png", width = width, height = height, res = res)
mp <- barplot(PC$mtxAutvlr[,1],names.arg=paste(round(PC$mtxAutvlr[,2],2),"%",sep=""),
main = "Variance of the components")
if (savptc) { box(col = 'white'); dev.off() }
if (casc && !savptc) dev.new()
if (savptc) png(filename = "Figure PCA Scree Plot.png", width = width, height = height, res = res)
plot(1:length(PC$mtxAutvlr[,1]), PC$mtxAutvlr[,1],
type = "n",
xlab = "Order of the components",
ylab = "Variance",
xaxt = "n",
main = titles[1])
axis(1, c(1:length(PC$mtxAutvlr[,1])), c(1:length(PC$mtxAutvlr[,1])))
if (grid) {
args <- append(as.list(par('usr')), c('gray93','gray93'))
names(args) <- c('xleft', 'xright', 'ybottom', 'ytop', 'col', 'border')
do.call(rect, args)
grid(col = "white", lwd = 2, lty = 7, equilogs = T)
}
points(1:length(PC$mtxAutvlr[,1]), PC$mtxAutvlr[,1], type = "b")
if (savptc) { box(col = 'white'); dev.off() }
if (casc && !savptc) dev.new()
if (savptc) png(filename = "Figure PCA Observations.png", width = width, height = height, res = res)
plot(PC$mtxscores,
xlab = xlabel,
ylab = ylabel,
type = "n",
main = titles[2],
xlim = c(min(PC$mtxscores[,1])-0.05,max(PC$mtxscores[,1])+0.05),
ylim = c(min(PC$mtxscores[,2])-0.05,max(PC$mtxscores[,2])+0.05))
if (grid) {
args <- append(as.list(par('usr')), c('gray93','gray93'))
names(args) <- c('xleft', 'xright', 'ybottom', 'ytop', 'col', 'border')
do.call(rect, args)
grid(col = "white", lwd = 2, lty = 7, equilogs = T)
}
if (num.class == 0) {
points(PC$mtxscores,
pch = 16,
cex = size,
col = ifelse(color,"red","black"))
} else {
if (!is.na(classcolor[1])) {
cor.classe <- classcolor
}
else { cor.classe <- c("red") }
newdata <- PC$mtxscores
init.form <- 14
cor <- 1
for (i in 1:num.class) {
point.form <- init.form + i
if (!is.na(classcolor[1])) {
cor1 <- ifelse(color, cor.classe[i], "black")
}
else { cor1 <- ifelse(color, cor + i, "black") }
point.data <- newdata[which(class == class.Names[i]),]
points(point.data,
pch = point.form,
cex = size,
col = cor1)
}
if (posleg != 0 && num.class > 0) {
if (color) cor <- 2
init.form <- 15
cor <- ifelse(color, 2, 1)
if (color) {
if (!is.na(classcolor[1])) {
color_b <- classcolor
}
else { color_b <- cor:(cor + num.class) }
}
else { color_b <- cor }
legend(posleg, class.Names, pch = (init.form):(init.form + num.class), col = color_b,
text.col = color_b, bty = boxleg, text.font = 6, y.intersp = 0.8, xpd = TRUE)
}
}
if (axes) abline(h = 0, v = 0, cex = 1.5, lty = 2)
if (!is.na(linlab[1])) LocLab(PC$mtxscores, cex = 1, linlab)
if (savptc) { box(col = 'white'); dev.off() }
if (casc && !savptc) dev.new()
if (savptc) png(filename = "Figure PCA Correlations.png", width = width, height = height, res = res)
plot(0,0,
xlab = xlabel,
ylab = ylabel,
main = titles[3],
asp = 1,
axes = F,
type = "n",
xlim = c(-1.1,1.1),
ylim = c(-1.1,1.1))
if (grid) {
args <- append(as.list(par('usr')), c('gray93','gray93'))
names(args) <- c('xleft', 'xright', 'ybottom', 'ytop', 'col', 'border')
do.call(rect, args)
grid(col = "white", lwd = 2, lty = 7, equilogs = T)
}
symbols(0, 0, circles = 1, inches = FALSE, fg = 1, add = TRUE)
if (axes) abline(h = 0, v = 0, cex = 1.5, lty = 2)
arrows(0,0,PC$mtxCCP[1,],PC$mtxCCP[2,], lty=1, code = 2, length = 0.08, angle = 25, col = ifelse(color,"Red","Black"))
LocLab(t(PC$mtxCCP), cex = 1, colnames(PC$mtxCCP) , col = ifelse(color,"Blue","Black"), xpd = TRUE)
if (savptc) { box(col = 'white'); dev.off() }
if (savptc) cat("\n \n End!")
} |
sidebarResults.observeDownloadResults <- function(input, values, output) {
output$results.xlsx <- downloadHandler(
filename = function() {
paste0("sst-results-", Sys.Date(), ".xlsx")
},
content = function(path) {
tryCatch(
sstModel::write.sstOutput(values$sstOutput,
path = path,
keep = input$keep,
new.names = {tr <- sstModel::translate(values$sstOutput); sapply(input$keep,
function(txt) names(tr)[tr == txt])}),
error = function(e) {
showModal(
modalDialog(
title = "Error",
paste("Unable to save the excel output.",
"Please make sure that you have the correct version of Rtools installed.",
"You can still see, copy, and paste the content of the excel output from the tables displayed on the dashboard.",
sep = " ")
)
)
})
}
)
}
sidebarResults.observeNewSimulation <- function(input) {
observeEvent(input$newSim, {
showModal(
modalDialog(
title = "Do you want to run a new simulation ?",
"Every simulation data will be lost, make sure to download your results before.",
easyClose = F,
footer = tagList(
modalButton("Cancel"),
actionButton("reload", "Reload")
)
)
)
})
}
sidebarResults.observeDownloadWarnLog <- function(input, values, output) {
output$warnLog <- downloadHandler(
filename = function() {
paste0("input-excel-warning-", Sys.Date(), ".log")
},
content = function(path) {
cat(sstModel::generateError(error.log = data.frame(),
warning.log = values$model$warning.log),
file = path)
}
)
} |
get_prism_dailys <- function(type, minDate = NULL, maxDate = NULL,
dates = NULL, keepZip = TRUE, check = "httr")
{
prism_check_dl_dir()
check <- match.arg(check, c("httr", "internal"))
dates <- gen_dates(minDate = minDate, maxDate = maxDate, dates = dates)
if( min(as.numeric(format(dates,"%Y"))) < 1981 ) {
stop("You must enter a date that is later than 1980")
}
years <- unique(format(dates,"%Y"))
type <- match.arg(type, prism_vars())
uri_dates <- gsub(pattern = "-",replacement = "",dates)
uris <- sapply(uri_dates, function(x) {
paste(
"http://services.nacse.org/prism/data/public/4km", type, x,
sep = "/"
)
})
if(check == "internal"){
x <- httr::HEAD(uris[1])
fn <- x$headers$`content-disposition`
fn <- regmatches(fn,regexpr('\\"[a-zA-Z0-9_\\.]+',fn))
fn <- substr(fn,2,nchar((fn)))
fn <- gsub("provisional|early", "stable", fn)
file_names <- sapply(uri_dates, function(x)
gsub("[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]", x, fn)
)
to_download_lgl <- prism_check(file_names, lgl = TRUE)
uris <- uris[to_download_lgl]
}
download_pb <- txtProgressBar(min = 0, max = max(length(uris), 1), style = 3)
if(length(uris) > 0){
for(i in 1:length(uris)){
prism_webservice(uri = uris[i],keepZip)
setTxtProgressBar(download_pb, i)
}
} else {
setTxtProgressBar(download_pb, max(length(uris), 1))
}
close(download_pb)
} |
library(copula)
source(system.file("Rsource", "utils.R", package="copula", mustWork=TRUE))
isExplicit <- copula:::isExplicit
(doExtras <- copula:::doExtras())
options(warn = 1)
exprDerivs <- function(copula, u) {
cbind(copula:::dCdu (copula, u),
copula:::dCdtheta (copula, u),
copula:::dlogcdu (copula, u),
copula:::dlogcdtheta(copula, u))
}
numeDerivs <- function(copula, u) {
cbind(copula:::dCduNumer (copula, u, may.warn = FALSE),
copula:::dCdthetaNumer (copula, u, may.warn = FALSE),
copula:::dlogcduNumer (copula, u),
copula:::dlogcdthetaNumer(copula, u))
}
set.seed(123)
showProc.time()
mC <- mixCopula(list(gumbelCopula(2.5, dim = 2),
claytonCopula(pi, dim = 2),
indepCopula(dim = 2)),
fixParam(c(2,2,4)/8, c(TRUE, TRUE, TRUE)))
mC
u <- rCopula(100, mC)
u1 <- (0:16)/16
u12 <- as.matrix(expand.grid(u1=u1, u2=u1, KEEP.OUT.ATTRS=FALSE))
dC12 <- dCopula(u12, mC)
dE12 <- copula:::dExplicitCopula.algr(u12, mC)
i.n <- is.na(dE12)
i.1 <- u12 == 1
stopifnot(identical(i.n,
u12[,1] == 0 | u12[,2] == 0 | (i.1[,1] & i.1[,2])))
ii <- !i.n & !i.1[,1] & !i.1[,2]
stopifnot(all.equal(dC12[ii], dE12[ii]))
stopifnot(all.equal(pCopula(u12, mC), copula:::pExplicitCopula.algr(u12, mC)))
showProc.time()
derExp <- exprDerivs(mC, u)
showProc.time()
if(doExtras) {
derNum <- numeDerivs(mC, u)
print(cbind(sapply(1:ncol(derExp), function(i) all.equal(derExp[,i], derNum[,i]))),
quote=FALSE)
showProc.time()
}
dCd <- copula:::dCdu(mC, u12)
if(dev.interactive(orNone=TRUE)) {
image(u1,u1, matrix(dCd[,1], 16+1))
image(u1,u1, matrix(dCd[,2], 16+1))
}
head(cbind(copula:::dCdu(mC, u), copula:::dCdtheta(mC, u),
copula:::dlogcdu(mC, u), copula:::dlogcdtheta(mC, u)))
mC.surv <- rotCopula(mC)
isExplicit(mC.surv)
stopifnot(all.equal(dCopula(u, mC.surv), dCopula(1 - u, mC)))
stopifnot(all.equal(dCopula(u, rotCopula(mC.surv)), dCopula(u, mC)))
showProc.time()
derExpS <- exprDerivs(mC.surv, u)
derNumS <- numeDerivs(mC.surv, u)
sapply(1:ncol(derExpS), function(i) all.equal(derExpS[,i], derNumS[,i], tol=0))
stopifnot(sapply(1:ncol(derExpS), function(i) all.equal(derExpS[,i], derNumS[,i],
tol = 1e-3)))
showProc.time()
k.mC.g <- khoudrajiCopula(mC.surv, gumbelCopula(3, dim = 2), c(.2, .9))
isExplicit(k.mC.g)
k.mC.g
U <- rCopula(100000, k.mC.g)
u1 <- as.matrix((0:7)/7)
Cuu <- C.n(u1,u1)
stopifnot(all.equal(Cuu, c(0:3,5:8)/8, tol = 1e-15))
stopifnot(max(abs(pCopula(u, k.mC.g) - C.n(u, U))) < 0.002)
require(MASS)
kde <- kde2d(U[,1], U[,2], n = 9, lims = c(0.1, 0.9, 0.1, 0.9))
max(abs(dCopula(u, k.mC.g) / c(kde$z) - 1))
showProc.time()
derE.k <- exprDerivs(k.mC.g, u)
showProc.time()
if(doExtras) {
derN.k <- numeDerivs(k.mC.g, u)
print(cbind(sapply(1:ncol(derE.k),
function(i) all.equal(derE.k[,i], derN.k[,i], tol = 0))),
quote=FALSE)
stopifnot(sapply(1:2,
function(i) all.equal(derE.k[,i], derN.k[,i], tol = 2e-3)),
local({ i <- 7 ; all.equal(derE.k[,i], derN.k[,i], tol = 4e-3) }),
sapply(c(3:6, 8:ncol(derE.k)),
function(i) all.equal(derE.k[,i], derN.k[,i], tol = 1e-6))
)
showProc.time()
}
m.k.m <- mixCopula(list(mC, k.mC.g), c(.5, .5))
m.k.m
U <- rCopula(10000, m.k.m)
stopifnot(max(abs(pCopula(u, m.k.m) - C.n(u, U))) < 0.02)
monster <- khoudrajiCopula(m.k.m, mC.surv, c(.2, .8))
monster
U <- rCopula(10000, monster)
stopifnot(max(abs(pCopula(u, monster) - C.n(u, U))) < 0.007)
showProc.time()
derE.M <- exprDerivs(monster, u)
showProc.time()
if(doExtras) {
derN.M <- numeDerivs(monster, u)
print(cbind(sapply(1:ncol(derE.M), function(i) all.equal(derE.M[,i], derN.M[,i], tol=0))),
quote = FALSE)
showProc.time()
}
rM <- rotCopula(monster, flip=c(TRUE, FALSE))
isExplicit(rM)
rM
U <- rCopula(10000, rM)
max(abs(pCopula(u, rM) - C.n(u, U)))
stopifnot(identical(dCopula(u, rM), dCopula(cbind(1 - u[,1], u[,2]), monster)))
derE.rM <- exprDerivs(rM, u)
showProc.time()
if(doExtras) {
derN.rM <- numeDerivs(rM, u)
print(cbind(sapply(1:ncol(derE.rM), function(i) all.equal(derE.rM[,i], derN.rM[,i], tol=0))),
quote = FALSE)
showProc.time()
}
jC <- joeCopula(4, dim = 2)
stopifnot(all.equal(dCopula(u, jC), copula:::dExplicitCopula.algr(u, jC)))
stopifnot(all.equal(pCopula(u, jC), copula:::pExplicitCopula.algr(u, jC)))
showProc.time()
derE.j <- exprDerivs(jC, u)
derN.j <- numeDerivs(jC, u)
sapply(1:ncol(derE.j), function(i) all.equal(derE.j[,i], derN.j[,i], tol=0))
showProc.time()
rJ <- rotCopula(jC, flip=c(TRUE, FALSE))
derE.rJ <- exprDerivs(rJ, u)
derN.rJ <- numeDerivs(rJ, u)
sapply(1:ncol(derE.rJ), function(i) all.equal(derE.rJ[,i], derN.rJ[,i], tol=0))
showProc.time()
hiro <- mixCopula(list(jC, k.mC.g), c(.5, .5))
hiro |
library(testthat)
library(mockery)
set.seed(123)
data <- data.frame(
x = as.Date("2020-01-01") + 1:20,
y = rnorm(20),
rebase = 0,
target = as.double(NA)
)
spc_options <- list(value_field = "y", date_field = "x", screen_outliers = TRUE)
test_that("it returns a data frame", {
r <- ptd_spc_standard(data, spc_options)
expect_s3_class(r, "data.frame")
})
test_that("it returns expected values", {
r1 <- ptd_spc_standard(data, spc_options)
expect_snapshot(dplyr::glimpse(r1))
o <- spc_options
data$y[15] <- 10
r2 <- ptd_spc_standard(data, o)
expect_snapshot(dplyr::glimpse(r2))
o$screen_outliers <- FALSE
r3 <- ptd_spc_standard(data, o)
expect_snapshot(dplyr::glimpse(r3))
expect_true(r1$lpl[[1]] != r2$lpl[[1]])
expect_true(r1$lpl[[1]] != r3$lpl[[1]])
expect_true(r2$lpl[[1]] != r3$lpl[[1]])
expect_true(r1$upl[[1]] != r2$upl[[1]])
expect_true(r1$upl[[1]] != r3$upl[[1]])
expect_true(r2$upl[[1]] != r3$upl[[1]])
expect_true(r3$upl[[1]] - r3$lpl[[1]] > r2$upl[[1]] - r2$lpl[[1]])
})
test_that("it sets the trajectory field", {
o <- spc_options
o$trajectory <- "t"
msg <- paste0("Trajectory column (", o$trajectory, ") does not exist in .data")
expect_error(ptd_spc_standard(data, o), msg, fixed = TRUE)
d <- data
d$t <- 1:20
r1 <- ptd_spc_standard(d, o)
expect_equal(r1$trajectory, 1:20)
o$trajectory <- NULL
r2 <- ptd_spc_standard(data, o)
expect_equal(r2$trajectory, rep(as.double(NA), 20))
})
test_that("it creates the pseudo facet column if no facet_field is set", {
r <- ptd_spc_standard(data, spc_options)
expect_equal(r$f, rep("no facet", 20))
})
test_that("it sets the rebase_group field", {
o <- spc_options
r1 <- ptd_spc_standard(data, o)
expect_equal(r1$rebase_group, rep(0, 20))
o$rebase <- "rebase"
data <- mutate(data, rebase = c(rep(0, 10), 1, rep(0, 9)))
r2 <- ptd_spc_standard(data, o)
expect_equal(r2$rebase_group, c(rep(0, 10), rep(1, 10)))
})
test_that("setting fix_after_n_points changes the calculations", {
o <- spc_options
s0 <- ptd_spc_standard(data, o)
o$fix_after_n_points <- 12
s1 <- ptd_spc_standard(data, o)
expect_true(s1$lpl[[1]] != s0$lpl[[1]])
expect_true(s1$upl[[1]] != s0$upl[[1]])
}) |
"AorticStenosisTrials" |
NULL
elasticache <- function(config = list()) {
svc <- .elasticache$operations
svc <- set_config(svc, config)
return(svc)
}
.elasticache <- list()
.elasticache$operations <- list()
.elasticache$metadata <- list(
service_name = "elasticache",
endpoints = list("*" = list(endpoint = "elasticache.{region}.amazonaws.com", global = FALSE), "cn-*" = list(endpoint = "elasticache.{region}.amazonaws.com.cn", global = FALSE), "us-iso-*" = list(endpoint = "elasticache.{region}.c2s.ic.gov", global = FALSE), "us-isob-*" = list(endpoint = "elasticache.{region}.sc2s.sgov.gov", global = FALSE)),
service_id = "ElastiCache",
api_version = "2015-02-02",
signing_name = "elasticache",
json_version = "",
target_prefix = ""
)
.elasticache$service <- function(config = list()) {
handlers <- new_handlers("query", "v4")
new_service(.elasticache$metadata, handlers, config)
} |
EBlassoNEG.Gaussian <-
function(BASIS,Target,a_gamma,b_gamma,Epis = FALSE,verbose = 0,group = FALSE){
N = nrow(BASIS);
K = ncol(BASIS);
if (verbose>0) cat("EBLASSO Gaussian Model, NEG prior, N: ",N,",K: ",K,", Epis: ",Epis,"\n");
if(Epis){
N_effect = (K+1)*K/2;
Beta = rep(0,N_effect *4);
output<-.C("fEBLinearEpisEff",
BASIS = as.double(BASIS),
Target = as.double(Target),
a_gamma = as.double(a_gamma),
b_gamma = as.double(b_gamma),
Beta = as.double(Beta),
WaldScore = as.double(0),
Intercept = as.double(0),
N = as.integer(N),
K = as.integer(K),
ver = as.integer(verbose),
bMax = as.integer(N_effect),
residual = as.double(0),
group = as.integer(group),
PACKAGE ="EBglmnet");
}else {
N_effect = K;
Beta = rep(0,N_effect *4);
output<-.C("fEBLinearMainEff",
BASIS = as.double(BASIS),
Target = as.double(Target),
a_gamma = as.double(a_gamma),
b_gamma = as.double(b_gamma),
Beta = as.double(Beta),
WaldScore = as.double(0),
Intercept = as.double(0),
N = as.integer(N),
K = as.integer(K),
ver = as.integer(verbose),
residual = as.double(0),
PACKAGE ="EBglmnet");
}
result = matrix(output$Beta,N_effect,4);
ToKeep = which(result[,3]!=0);
if(length(ToKeep)==0) { Blup = matrix(0,1,4)
}else
{
nEff = length(ToKeep);
Blup = result[ToKeep,,drop=FALSE];
}
if(Epis){
blupMain = Blup[Blup[,1] ==Blup[,2],,drop = FALSE];
blupEpis = Blup[Blup[,1] !=Blup[,2],,drop = FALSE];
order1 = order(blupMain[,1]);
order2 = order(blupEpis[,1]);
Blup = rbind(blupMain[order1,],blupEpis[order2,]);
}
t = abs(Blup[,3])/(sqrt(Blup[,4])+ 1e-20);
pvalue = 2*(1- pt(t,df=(N-1)));
Blup = cbind(Blup,t,pvalue);
colnames(Blup) = c("locus1","locus2","beta","posterior variance","t-value","p-value");
hyperparameters = c(a_gamma, b_gamma);
names(hyperparameters) = c("a", "b");
fEBresult <- list(Blup,output$WaldScore,output$Intercept,output$residual,hyperparameters);
rm(list= "output")
names(fEBresult) <-c("fit","WaldScore","Intercept","residual variance","hyperparameters")
return(fEBresult)
} |
g.part3 = function(metadatadir = c(), f0, f1, myfun = c(),
params_sleep = c(), params_metrics = c(),
params_output = c(),
params_general = c(), ...) {
input = list(...)
params = extract_params(params_sleep = params_sleep,
params_metrics = params_metrics,
params_general = params_general, params_output = params_output, input = input)
params_sleep = params$params_sleep
params_metrics = params$params_metrics
params_general = params$params_general
params_output = params$params_output
if (!file.exists(paste(metadatadir, sep = ""))) {
dir.create(file.path(metadatadir))
}
if (!file.exists(paste(metadatadir, "/meta/ms3.out", sep = ""))) {
dir.create(file.path(paste(metadatadir, "/meta", sep = ""), "ms3.out"))
dir.create(file.path(paste(metadatadir, "/meta", sep = ""), "sleep.qc"))
}
fnames = dir(paste(metadatadir,"/meta/ms2.out", sep = ""))
if (f1 > length(fnames) | f1 == 0) f1 = length(fnames)
if (f0 > length(fnames) | f0 == 0) f0 = 1
ffdone = fdone = dir(paste(metadatadir,"/meta/ms3.out", sep = ""))
if (length(fdone) > 0) {
for (ij in 1:length(fdone)) {
tmp = unlist(strsplit(fdone[ij], ".RData"))
ffdone[ij] = tmp[1]
}
} else {
ffdone = c()
}
main_part3 = function(i, metadatadir = c(), f0, f1, myfun = c(),
params_sleep = c(), params_metrics = c(),
params_output = c(),
params_general = c(), fnames, ffdone) {
nightsperpage = 7
FI = file.info(paste(metadatadir, "/meta/ms2.out/", fnames[i], sep = ""))
if (is.na(FI$size) == TRUE) FI$size = 0
if (FI$size == 0 | is.na(FI$size) == TRUE | length(FI$size) == 0) {
cat(paste("P3 file ", fnames[i], sep = ""))
cat("Filename not recognised")
}
fname = unlist(strsplit(fnames[i], ".RData"))[1]
if (length(ffdone) > 0) {
skip = ifelse(test = length(which(ffdone == fname)) > 0, yes = 1, no = 0)
} else {
skip = 0
}
if (params_general[["overwrite"]] == TRUE) skip = 0
if (skip == 0) {
cat(paste(" ", i, sep = ""))
SUM = IMP = M = c()
load(paste(metadatadir, "/meta/basic/meta_", fnames[i], sep = ""))
load(paste(metadatadir, "/meta/ms2.out/", fnames[i], sep = ""))
if (M$filecorrupt == FALSE & M$filetooshort == FALSE) {
SLE = g.sib.det(M, IMP, I, twd = c(-12,12),
acc.metric = params_general[["acc.metric"]],
desiredtz = params_general[["desiredtz"]],
myfun = myfun,
sensor.location = params_general[["sensor.location"]],
params_sleep = params_sleep)
if (!is.null(SLE$output)) {
if (nrow(SLE$output) > 2*24*(3600/M$windowsizes[1])) {
SleepRegularityIndex = CalcSleepRegularityIndex(data = SLE$output,
epochsize = M$windowsizes[1],
desiredtz = params_general[["desiredtz"]])
} else {
SleepRegularityIndex = NA
}
} else {
SleepRegularityIndex = NA
}
L5list = SLE$L5list
SPTE_end = SLE$SPTE_end
SPTE_start = SLE$SPTE_start
tib.threshold = SLE$tib.threshold
longitudinal_axis = SLE$longitudinal_axis
if (length(SLE$output) > 0 & SLE$detection.failed == FALSE) {
ID = SUM$summary$ID
datename = as.character(unlist(strsplit(as.character(as.matrix(M$metashort[1]))," "))[1])
plottitle = " "
if (params_output[["do.part3.pdf"]] == TRUE) {
pdf(paste(metadatadir, "/meta/sleep.qc/graphperday_id_", ID, "_",
I$filename, ".pdf", sep = ""), width = 8.2, height = 11.7)
g.sib.plot(SLE, M, I, plottitle ,nightsperpage = nightsperpage, desiredtz = params_general[["desiredtz"]])
dev.off()
}
sib.cla.sum = c()
sib.cla.sum = g.sib.sum(SLE, M,
ignorenonwear = params_sleep[["ignorenonwear"]],
desiredtz = params_general[["desiredtz"]])
rec_starttime = IMP$metashort[1,1]
save(sib.cla.sum, L5list, SPTE_end, SPTE_start, tib.threshold, rec_starttime, ID,
longitudinal_axis, SleepRegularityIndex,
file = paste(metadatadir, "/meta/ms3.out/", fname, ".RData",sep = ""))
}
}
}
}
if (params_general[["do.parallel"]] == TRUE) {
cores = parallel::detectCores()
Ncores = cores[1]
if (Ncores > 3) {
if (length(params_general[["maxNcores"]]) == 0) params_general[["maxNcores"]] = Ncores
Ncores2use = min(c(Ncores - 1, params_general[["maxNcores"]]))
cl <- parallel::makeCluster(Ncores2use)
doParallel::registerDoParallel(cl)
} else {
cat(paste0("\nparallel processing not possible because number of available cores (",Ncores,") < 4"))
params_general[["do.parallel"]] = FALSE
}
cat(paste0('\n Busy processing ... see ', metadatadir,'/meta/ms3.out', ' for progress\n'))
GGIRinstalled = is.element('GGIR', installed.packages()[,1])
packages2passon = functions2passon = NULL
GGIRloaded = "GGIR" %in% .packages()
if (GGIRloaded) {
packages2passon = 'GGIR'
errhand = 'pass'
} else {
functions2passon = c("g.sib.det", "g.detecmidnight", "iso8601chartime2POSIX",
"g.sib.plot", "g.sib.sum", "HASPT", "HASIB", "CalcSleepRegularityIndex")
errhand = 'stop'
}
fe_dopar = foreach::`%dopar%`
fe_do = foreach::`%do%`
i = 0
`%myinfix%` = ifelse(params_general[["do.parallel"]], fe_dopar, fe_do)
output_list = foreach::foreach(i = f0:f1, .packages = packages2passon,
.export = functions2passon, .errorhandling = errhand) %myinfix% {
tryCatchResult = tryCatch({
main_part3(i, metadatadir, f0, f1, myfun,
params_sleep, params_metrics,
params_output,
params_general, fnames, ffdone)
})
return(tryCatchResult)
}
on.exit(parallel::stopCluster(cl))
for (oli in 1:length(output_list)) {
if (is.null(unlist(output_list[oli])) == FALSE) {
cat(paste0("\nErrors and warnings for ", fnames[oli]))
print(unlist(output_list[oli]))
}
}
} else {
for (i in f0:f1) {
main_part3(i, metadatadir, f0, f1, myfun,
params_sleep, params_metrics,
params_output,
params_general, fnames, ffdone)
}
}
} |
migrate <- function(
project = NULL,
packrat = c("lockfile", "sources", "library", "options", "cache"))
{
renv_consent_check()
renv_scope_error_handler()
project <- renv_project_resolve(project)
renv_scope_lock(project = project)
project <- renv_path_normalize(project, winslash = "/", mustWork = TRUE)
if (file.exists(file.path(project, "packrat/packrat.lock"))) {
packrat <- match.arg(packrat, several.ok = TRUE)
renv_migrate_packrat(project, packrat)
}
invisible(project)
}
renv_migrate_packrat <- function(project = NULL, components = NULL) {
project <- renv_project_resolve(project)
if (!requireNamespace("packrat", quietly = TRUE))
stopf("migration requires the 'packrat' package to be installed")
callbacks <- list(
lockfile = renv_migrate_packrat_lockfile,
sources = renv_migrate_packrat_sources,
library = renv_migrate_packrat_library,
options = renv_migrate_packrat_options,
cache = renv_migrate_packrat_cache
)
components <- components %||% names(callbacks)
callbacks <- callbacks[components]
for (callback in callbacks)
callback(project)
renv_migrate_packrat_infrastructure(project)
renv_imbue_impl(project)
fmt <- "* Project '%s' has been migrated from Packrat to renv."
vwritef(fmt, aliased_path(project))
vwritef("* Consider deleting the project 'packrat' folder if it is no longer needed.")
invisible(TRUE)
}
renv_migrate_packrat_lockfile <- function(project) {
plock <- file.path(project, "packrat/packrat.lock")
if (!file.exists(plock))
return(FALSE)
contents <- read(plock)
splat <- strsplit(contents, "\n{2,}")[[1]]
dcf <- lapply(splat, function(section) {
renv_dcf_read(text = section)
})
header <- dcf[[1]]
records <- dcf[-1L]
repos <- getOption("repos")
if (!is.null(header$Repos)) {
parts <- strsplit(header$Repos, "\\s*,\\s*")[[1]]
repos <- renv_properties_read(text = parts, delimiter = "=")
}
fields <- c("Package", "Version", "Source")
records <- lapply(records, function(record) {
record$Hash <- NULL
if (any(grepl("^Github", names(record))))
record$RemoteType <- "github"
map <- c(
"GithubRepo" = "RemoteRepo",
"GithubUsername" = "RemoteUsername",
"GithubRef" = "RemoteRef",
"GithubSha1" = "RemoteSha",
"GithubSHA1" = "RemoteSha",
"GithubSubdir" = "RemoteSubdir"
)
names(record) <- remap(names(record), map)
keep <- c(fields, grep("^Remote", names(record), value = TRUE))
as.list(record[keep])
})
names(records) <- extract_chr(records, "Package")
records <- renv_snapshot_fixup_renv(records)
lockfile <- structure(list(), class = "renv_lockfile")
lockfile$R <- renv_lockfile_init_r(project)
lockfile$R$Version <- header$RVersion
lockfile$R$Repositories <- as.list(repos)
renv_records(lockfile) <- records
lockfile <- renv_lockfile_fini(lockfile, project)
lockpath <- renv_lockfile_path(project = project)
renv_lockfile_write(lockfile, file = lockpath)
}
renv_migrate_packrat_sources <- function(project) {
packrat <- asNamespace("packrat")
srcdir <- packrat$srcDir(project = project)
if (!file.exists(srcdir))
return(TRUE)
pattern <- paste0(
"^",
"[^_]+",
"_",
"\\d+(?:[_.-]\\d+)*",
"\\.tar\\.gz",
"$"
)
suffixes <- list.files(
srcdir,
pattern = pattern,
recursive = TRUE
)
sources <- file.path(srcdir, suffixes)
targets <- renv_paths_source("cran", suffixes)
keep <- !file.exists(targets)
sources <- sources[keep]; targets <- targets[keep]
vprintf("* Migrating package sources from Packrat to renv ... ")
copy <- renv_progress(renv_file_copy, length(targets))
mapply(sources, targets, FUN = function(source, target) {
ensure_parent_directory(target)
copy(source, target)
})
vwritef("Done!")
TRUE
}
renv_migrate_packrat_library <- function(project) {
packrat <- asNamespace("packrat")
libdir <- packrat$libDir(project = project)
if (!file.exists(libdir))
return(TRUE)
sources <- list.files(libdir, full.names = TRUE)
if (empty(sources))
return(TRUE)
targets <- renv_paths_library(basename(sources), project = project)
names(targets) <- sources
targets <- targets[!file.exists(targets)]
if (empty(targets)) {
vwritef("* The renv library is already synchronized with the Packrat library.")
return(TRUE)
}
vprintf("* Migrating library from Packrat to renv ... ")
ensure_parent_directory(targets)
copy <- renv_progress(renv_file_copy, length(targets))
enumerate(targets, copy)
vwritef("Done!")
if (renv_cache_config_enabled(project = project)) {
vprintf("* Moving packages into the renv cache ... ")
records <- lapply(targets, renv_description_read)
sync <- renv_progress(renv_cache_synchronize, length(targets))
lapply(records, sync, linkable = TRUE)
vwritef("Done!")
}
TRUE
}
renv_migrate_packrat_options <- function(project) {
packrat <- asNamespace("packrat")
opts <- packrat$get_opts(project = project)
settings$ignored.packages(opts$ignored.packages, project = project)
}
renv_migrate_packrat_cache <- function(project) {
packrat <- asNamespace("packrat")
cache <- packrat$cacheLibDir()
packages <- list.files(cache, full.names = TRUE)
hashes <- list.files(packages, full.names = TRUE)
sources <- list.files(hashes, full.names = TRUE)
ok <- file.exists(file.path(sources, "DESCRIPTION"))
sources <- sources[ok]
targets <- map_chr(sources, renv_cache_path)
names(targets) <- sources
targets <- targets[!file.exists(targets)]
if (empty(targets)) {
vwritef("* The renv cache is already synchronized with the Packrat cache.")
return(TRUE)
}
if (renv_cache_config_enabled(project = project))
renv_migrate_packrat_cache_impl(targets)
TRUE
}
renv_migrate_packrat_cache_impl <- function(targets) {
vprintf("* Migrating Packrat cache to renv cache ... ")
ensure_parent_directory(targets)
copy <- renv_progress(renv_file_copy, length(targets))
result <- enumerate(targets, function(source, target) {
status <- catch(copy(source, target))
broken <- inherits(status, "error")
reason <- if (broken) conditionMessage(status) else ""
list(source = source, target = target, broken = broken, reason = reason)
})
vwritef("Done!")
status <- bind(result)
bad <- status[status$broken, ]
if (nrow(bad) == 0)
return(TRUE)
renv_pretty_print(
with(bad, sprintf("%s [%s]", format(source), reason)),
"The following packages could not be copied from the Packrat cache:",
"These packages may need to be reinstalled and re-cached."
)
}
renv_migrate_packrat_infrastructure <- function(project) {
unlink(file.path(project, ".Rprofile"))
renv_infrastructure_write(project)
vwritef("* renv support infrastructure has been written.")
TRUE
} |
fancycut <- function(x, na.bucket = NA, unmatched.bucket = NA,
out.as.factor = TRUE, ...) {
dots <- as.list(substitute(list(...)))[-1L]
if (length(dots) > 0) {
buckets <- names(dots)
intervals <- as.character(dots)
}
return(wafflecut(
x = x,
intervals = intervals,
buckets = buckets,
na.bucket = na.bucket,
unmatched.bucket = unmatched.bucket,
out.as.factor = out.as.factor
))
}
wafflecut <- function(x, intervals, buckets = intervals,
na.bucket = NA, unmatched.bucket = NA,
out.as.factor = TRUE) {
l <- length(intervals)
if(l != length(buckets)) {
stop('FancyCut requires a 1-1 map from intervals to buckets')
}
if (!is.numeric(x))
stop("'x' must be numeric")
out <- rep(NA, length(x))
intervals_df <- parse_intervals(intervals)
for(index in 1:l) {
b <- buckets[index]
lower <- intervals_df$left[index]
upper <- intervals_df$right[index]
left <- intervals_df$left_strict[index]
right <- intervals_df$right_strict[index]
mask <- rep(FALSE, length(x))
if(left & right) {mask <- x >= lower & x <= upper}
if(left & !right) {mask <- x >= lower & x < upper}
if(!left & right) {mask <- x > lower & x <= upper}
if(!left & !right) {mask <- x > lower & x < upper}
out[mask] <- b
}
if (sum(is.na(x)) == 0L) {
na.bucket <- NULL
} else {
out[is.na(x)] <- na.bucket
}
if (sum(is.na(out)) == 0L) {
unmatched.bucket <- NULL
} else {
out[is.na(out)] <- unmatched.bucket
}
levels <- unique(c(buckets, na.bucket, unmatched.bucket))
if(out.as.factor) {
return(factor(
out,
levels = levels,
exclude = NULL
))
} else {
return(out)
}
}
parse_intervals <- function(intervals) {
rx <- "^\\s*(\\(|\\[)\\s*((?:[-+]?\\d*\\.?\\d+(?:[eE][-+]?\\d+)?)|(?:[-+]?Inf))\\s*,\\s*((?:[-+]?\\d*\\.?\\d+(?:[eE][-+]?\\d+)?)|(?:[-+]?Inf))\\s*(\\)|\\])\\s*$"
lindex <- regexec(rx, intervals)
lmatch <- regmatches(intervals, lindex)
nrows <- length(lmatch)
ncols <- sapply(lmatch, length)
mmatch <- matrix(NA_character_, nrow = nrows, ncol = 5)
for (x in 1:nrows) {
row <- lmatch[[x]]
n <- length(row)
if (n > 0) {
mmatch[x, 1:n] <- lmatch[[x]][1:n]
}
}
intervals_df <- data.frame(
interval = intervals,
left = as.numeric(mmatch[, 3]),
right = as.numeric(mmatch[, 4]),
left_strict = (mmatch[, 2] == '['),
right_strict = (mmatch[, 5] == ']'),
match_count = ncols,
stringsAsFactors = FALSE
)
rx <- "^[-+]?\\d*\\.?\\d+(?:[eE][-+]?\\d+)?$"
points <- grepl(rx, intervals)
intervals_df$interval[points] <- intervals[points]
intervals_df$left[points] <- as.numeric(intervals[points])
intervals_df$right[points] <- as.numeric(intervals[points])
intervals_df$right_strict[points] <- TRUE
intervals_df$left_strict[points] <- TRUE
intervals_df$match_count[points] <- 5
for (x in 1:nrows) {
if (intervals_df$match_count[x] != 5) {
warning(paste0('The interval "',intervals_df$interval[x],'" is malformed.'))
next
}
if (intervals_df$right[x] < intervals_df$left[x]) {
warning(paste0('The interval "',intervals_df$interval[x],'" has right < left.'))
}
if (intervals_df$right[x] == intervals_df$left[x] &
(!intervals_df$left_strict[x] | !intervals_df$right_strict[x])) {
warning(paste0('The interval "',intervals_df$interval[x],'" is malformed.'))
}
}
return(intervals_df)
} |
knitr::opts_chunk$set(fig.width = 7, fig.height = 5)
options(digits = 2)
library(cycleRtools)
plot(x = intervaldata,
y = 1:3,
xvar = "timer.min",
xlab = "Time (min)",
laps = TRUE,
breaks = TRUE)
plot(intervaldata, y = 3, xvar = "timer.min", xlim = c(0, 50))
zone_time(data = intervaldata,
column = power.W,
zbounds = c(100, 200, 300),
pct = FALSE) / 60
zone_time(intervaldata, zbounds = 310, pct = TRUE)
zdist_plot(data = intervaldata,
binwidth = 10,
zbounds = c(100, 200, 300),
xlim = c(50, 400))
summary(intervaldata)
times_sec <- 2:20 * 60
prof <- mmv(data = intervaldata,
column = power.W,
windows = times_sec)
print(prof)
hypm <- lm(prof[1, ] ~ {1 / times_sec})
hypm <- setNames(coef(hypm), c("CP", "W'"))
print(hypm)
plot(times_sec, prof[1, ], ylim = c(hypm["CP"], max(prof[1, ])),
xlab = "Time (sec)", ylab = "Power (Watts)")
curve((hypm["W'"] / x) + hypm["CP"], add = TRUE, col = "red")
abline(h = hypm["CP"], lty = 2)
legend("topright", legend = c("Model", "CP"), bty = "n",
lty = c(1, 2), col = c("red", "black"))
ms <- Pt_model(prof[1, ], times_sec)
print(ms)
plot(times_sec, prof[1, ], ylim = c(hypm["CP"], max(prof[1, ])),
xlab = "Time (sec)", ylab = "Power (Watts)")
curve(ms$Pfn$exp(x), add = TRUE, col = "red")
library(leaflet)
leaflet(intervaldata) %>% addTiles() %>% addPolylines(~lon, ~lat) |
library("RUnit")
library("krm")
test.krm.mos.test <- function() {
tolerance=1e-3; verbose=FALSE
if(file.exists("D:/gDrive/3software/_checkReproducibility")) {
tolerance=1e-6
verbose=TRUE
}
RNGkind("Mersenne-Twister", "Inversion")
dat.file.name=paste(system.file(package="krm")[1],'/misc/y1.txt', sep="")
seq.file.name=paste(system.file(package="krm")[1],'/misc/sim1.fasta', sep="")
data=sim.liu.2008 (n=100, a=.1, seed=1)
test = krm.most(y~x, data, regression.type="logistic", formula.kern=~z.1+z.2+z.3+z.4+z.5, kern.type="rbf", n.rho=2, n.mc = 100, range.rho=.99, verbose=verbose)
checkEqualsNumeric(test$p.values, c(0.91, 0.90, 0.93, 0.91), tolerance = tolerance)
data=sim.liu.2008 (n=100, a=.1, seed=1)
test = krm.most(y~x, data, regression.type="logistic", formula.kern=~z.1+z.2+z.3+z.4+z.5, kern.type="rbf", n.rho=2, n.mc = 100, inference.method="perturbation", verbose=verbose)
if (R.Version()$system %in% c("x86_64, mingw32")) {
checkEqualsNumeric(test$p.values, c(0.87, NA, 0.89, NA), tolerance = tolerance)
}
data=sim.liu.2008 (n=50, a=.1, seed=1)
test = krm.most(y~x, data, regression.type="logistic", formula.kern=~z.1+z.2+z.3+z.4+z.5, kern.type="rbf", n.mc = 100, range.rho=.99, inference.method="Davies", verbose=verbose)
checkEqualsNumeric(test$p.values, 0.1223421, tolerance = tolerance)
dat=read.table(dat.file.name); names(dat)="y"
dat=cbind(dat, seq=unlist(readFastaFile(seq.file.name))); dat$seq=as.character(dat$seq)
test = krm.most (y~1, dat, regression.type="logistic", seq.file.name=seq.file.name, kern.type="mi", n.rho=2, n.mc = 5e1, inference.method="parametric.bootstrap", verbose=verbose)
checkEqualsNumeric(test$p.values, c(0.68, 0.60, 0.66, 0.60), tolerance = tolerance)
test.2 = krm.most (y~1, dat, regression.type="logistic", formula.kern=~seq, kern.type="mi", n.rho=2, n.mc = 5e1, inference.method="parametric.bootstrap", verbose=verbose)
checkEqualsNumeric(test.2$p.values, c(0.68, 0.60, 0.66, 0.60), tolerance = tolerance)
test.3 = krm.most (y~1, dat, regression.type="logistic", formula.kern=~seq, kern.type="mi", n.rho=2, n.mc = 5e1, inference.method="parametric.bootstrap", seq.start=1, seq.end=10, verbose=verbose)
checkEqualsNumeric(test.3$p.values, c(0.62, 0.48, 0.54, 0.46), tolerance = tolerance)
} |
helpfunctionmultistate2 <- function(x, dummy){sum(1/dummy[x])} |
text_extract = function( x, body = TRUE, header = TRUE, footer = TRUE, bookmark){
if( !inherits(x, "docx")){
stop("x must be a docx object.")
}
if( missing( bookmark ) )
out = .jcall(x$obj, "[S", "getWords", body, header, footer)
else {
if( length( bookmark ) != 1 || !is.character(bookmark))
stop("bookmark must be an atomic character.")
out = .jcall(x$obj, "[S", "getWords", casefold( bookmark, upper = FALSE ) )
}
out
}
list_bookmarks = function( x, body = TRUE, header = TRUE, footer = TRUE){
if( !inherits(x, "docx")){
stop("x must be a docx object.")
}
out = .jcall(x$obj, "[S", "getBookMarks", body, header, footer)
setdiff(out, "_GoBack" )
} |
makePriorIWish <- function(mu, sd, v, p, S){
prior <- list(
prior_mu = function(x) logDensityMvNorm(x, mu, sigma = diag(sd^2, length(x))),
prior_S = function(x) logDensityIWish(x, v = v, S = ((v-p+1) * S) )
)
return(prior)
} |
View_obs <-
function(x,
title)
{
if (missing(title))
title <- paste0("obs(", deparse(substitute(x))[1L], ")")
if (is.RStudio()) {
View(observations(x, compressed = FALSE), title)
} else {
utils::View(observations(x, compressed = FALSE), title)
}
} |
library(lme4)
dat <- read.csv(system.file("testdata","dat20101314.csv",package="lme4"))
NMcopy <- lme4:::Nelder_Mead
cc <- capture.output(lmer(y ~ (1|Operator)+(1|Part)+(1|Part:Operator), data=dat,
control=
lmerControl("NMcopy",
optCtrl= list(iprint=20))))
cc <- paste(cc,collapse="")
countStep <- function(str,n) {
length(gregexpr(paste0("\\(NM\\) ",n,": "),str)[[1]])
}
stopifnot(countStep(cc,140)==2 && countStep(cc,240)==1) |
additive.old<-function(x,y,arg,h=1,kernel="gauss",M=2)
{
d<-length(arg)
n<-length(y)
if (kernel=="gauss") ker<-function(t){ return( exp(-t^2/2) ) }
if (kernel=="uniform") ker<-function(t){ return( (abs(t) <= 1) ) }
if (kernel=="bart") ker<-function(t){ return( (1-t) ) }
G<-matrix(0,n,d)
hatc<-mean(y)
residual<-matrix(y-hatc,n,1)
for (m in 1:M){
for (j in 1:d){
colu<-x[,j]
pairdiffe<-matrix(colu,n,n,byrow=FALSE)-matrix(colu,n,n,byrow=TRUE)
Wj<-ker(pairdiffe)
Wj<-Wj/colSums(Wj)
G[,j]<-t(Wj)%*%residual
residual<-y-hatc-matrix(rowSums(G),n,1)
}
}
argu<-matrix(arg,dim(x)[1],d,byrow=TRUE)
W<-ker((x-argu)/h)/h
W<-W/colSums(W)
valuevec<-t(W)%*%residual
return(valuevec)
} |
setGeneric("install_packages", function(pkgs, repos, versions = NULL, verbose = FALSE, ...) standardGeneric("install_packages"))
setMethod("install_packages", c("character", "character"), function(pkgs, repos, versions, verbose, ...) {
chtypes = getStringType(repos)
if(any(chtypes %in% c("sessioninfo", "manifestdir")))
stop("Unsupported character format passed to repos argument")
repos = mapply(repoFromString, str = repos, type = chtypes)
man = PkgManifest(manifest = ManifestRow(name = character()), dep_repos = repos)
if(!is.null(versions)) {
if(is(versions, "character"))
versions = data.frame(name = names(versions),
version = versions, stringsAsFactors=FALSE)
if(any(!versions$names %in% pkgs))
stop("Versions specified for packages not being installed. This is not currently supported.")
if(any(!pkgs %in% versions$name)) {
manifest_df(man) = ManifestRow(name = versions$name)
man = .findThem(man, PkgManifest(dep_repos = repos))
man = SessionManifest(manifest = man, versions = versions)
}
} else {
versions = rep(NA_character_, times = length(pkgs))
names(versions) = pkgs
}
install_packages(pkgs, repos = man, verbose = verbose, versions = versions, ...)
})
setMethod("install_packages", c(pkgs = "character", repos= "missing"), function(pkgs, repos, versions = NULL, verbose, ...) {
install_packages(pkgs, repos = defaultRepos(), verbose = verbose,
versions = versions, ...)
})
setMethod("install_packages", c(pkgs = "SessionManifest", repos= "ANY"), function(pkgs, repos, verbose, ...) {
install_packages(versions_df(pkgs)$name, repos = pkgs, verbose = verbose, ...)
})
setMethod("install_packages", c(pkgs = "character", repos= "SessionManifest"), function(pkgs, repos, verbose, ...) {
if(nrow(versions_df(repos))) {
vdf = versions_df(repos)
rownames(vdf) = vdf$name
vers = vdf[pkgs, "version"]
ghrepo = lazyRepo(pkgs = pkgs, versions = vers, pkg_manifest = manifest(repos))
} else {
ghrepo = contrib.url(dep_repos(repos))
}
.install_packages(pkgs = pkgs, lazyrepo = ghrepo, man = manifest(repos), ...)
})
setMethod("install_packages", c(pkgs = "character", repos= "PkgManifest"), function(pkgs, repos, versions, verbose,...) {
if(nrow(manifest_df(repos)) == 0) {
ghrepo = contrib.url(dep_repos(repos))
} else {
if(missing(versions) || is.null(versions))
versions = rep(NA_character_, times = length(pkgs))
else if (is(versions, "data.frame")) {
ord = match(pkgs, versions$name)
ord = ord[!is.na(ord)]
versions = versions$name[ord]
} else if (!is(versions, "character") ||
(length(versions) != length(pkgs) && is.null(names(versions))))
stop("unsupported specification of package versions")
if(is.null(names(versions)))
names(versions) = pkgs
mtch = match(pkgs, names(versions))
miss = is.na(mtch)
if(any(miss)) {
new = rep(NA_character_, times = sum(miss))
names(new) = pkgs[miss]
versions = c(versions, new)
}
ghrepo= lazyRepo(pkgs, repos, verbose = verbose, versions = versions)
}
.install_packages(pkgs, ghrepo, man = repos, ...)
})
.install_packages = function(pkgs, lazyrepo, man, type = "source", ...) {
if ("lib" %in% list(...))
libloc = list(...)["lib.loc"]
else
libloc = .libPaths()[1]
if(type != "source")
warning("using type other than source is not officially supported with switchr. Use at your own risk")
avail1 = available.packages(lazyrepo, type = "source")
avail2 = available.packages(contrib.url(dep_repos(man), type = type))
new = !avail2[,"Package"] %in% avail1[,"Package"]
avail = rbind(avail1, avail2[new,])
oldpkgs = installed.packages(libloc)[,"Package"]
oldinfo = lapply(oldpkgs, function(x) file.info(system.file("DESCRIPTION", package = x)))
utils::install.packages(pkgs, available = avail, repos = unique(c(lazyrepo, contrib.url(dep_repos(man)))),
type = type, ...)
newpkgs = installed.packages(libloc)[,"Package"]
newinds = !newpkgs %in% oldpkgs
if(!all(newinds)) {
possupdates = newpkgs[!newinds]
newinfo = lapply(possupdates, function(x) file.info(system.file("DESCRIPTION", package = x)))
oldmatchinds = match(possupdates, oldpkgs)
updated = mapply(function(old, new) !identical(old, new), old = oldinfo[oldmatchinds],
new = newinfo)
installedpkgs = c(newpkgs[newinds], newpkgs[updated])
} else
installedpkgs = newpkgs
try(annotateDESCs(installedpkgs, man))
installedpkgs
}
|
graph.params2qpGraphFiles<-function(graph.params,outfileprefix="out",n.printed.dec=4,verbose=TRUE){
if(!(is.graph.params(graph.params))){
stop("The input graph.params is not a valid graph.params object (see generate.graph.params)\n")
}
if(length([email protected])==0){
stop("The input graph.params does not contain fstats estimates (the function generate.graph.params to create it may have been run without fstats object)\n")
}
if(!(n.printed.dec %in% 1:8)){stop("n.printed.dec must be an integer >=1 and <=8\n")}
f.prec=paste0("%10.",n.printed.dec,"f")
covfact=1e6;f3fact=1e3
if(nchar(outfileprefix)==0){outprefix="out"}
if(is.null([email protected])){stop("Invalid graph.params object: see generate.graph.params function\n")}
outfile=paste0(outfileprefix,".fstats")
cat(file=outfile,paste0("
for(i in 1:length([email protected])){
cat(sprintf("%15s",[email protected][i,2]),
sprintf("%15s",[email protected][i,2]),
sprintf(f.prec,[email protected][i]*f3fact),"\n",file=outfile,append=T)
}
for(i in 1:length([email protected])){
cat(sprintf("%15s",[email protected][i,2]),
sprintf("%15s",[email protected][i,3]),
sprintf(f.prec,[email protected][i]*f3fact),"\n",file=outfile,append=T)
}
tmp.n=length([email protected])+length([email protected])
tmp.nomcov=rbind(cbind([email protected][,2],[email protected][,2]),
[email protected][,2:3])
for(i in 1:tmp.n){
for(j in i:tmp.n){
cat(sprintf("%15s",tmp.nomcov[i,1]),sprintf("%15s",tmp.nomcov[i,2]),
sprintf("%15s",tmp.nomcov[j,1]),sprintf("%15s",tmp.nomcov[j,2]),
sprintf(f.prec,[email protected][i,j]*covfact),"\n",file=outfile,append=T)
}
}
if(verbose){cat("Fstats input file for qpGraph written in",outfile,"\n") }
outgraphfile=paste0(outfileprefix,".graph")
cat(file=outgraphfile,paste0("root\t",[email protected],"\n"))
cat(file=outgraphfile,paste0("label\t",graph.params@popref,"\t",graph.params@popref,"\n"),append=TRUE)
for(i in 1:nrow([email protected])){
[email protected][i,2]
cat(file=outgraphfile,paste0("label\t",dum.pops,"\t",dum.pops,"\n"),append=TRUE)
}
cat(file=outgraphfile,"\n",append=TRUE)
tmp.graph=graph.params@graph
adm.graph.rows=which(nchar(tmp.graph[,3])>0)
if(length(adm.graph.rows)>0){
adm.pops=unique(tmp.graph[adm.graph.rows,1])
for(i in adm.pops){
tmp.par=tmp.graph[tmp.graph[,1]==i,2]
cat(file=outgraphfile,paste0("admix\t",i,"\t",tmp.par[1],"\t",tmp.par[2],"\n"),append=TRUE)
}
tmp.graph=tmp.graph[-1*adm.graph.rows,]
}
for(i in 1:nrow(tmp.graph)){
cat(file=outgraphfile,paste0("edge\t",paste(tmp.graph[i,2:1],collapse="_"),"\t",tmp.graph[i,2],"\t",tmp.graph[i,1],"\n"),append=TRUE)
}
if(verbose){cat("Graph input file for qpGraph written in",outgraphfile,"\n") }
parfile=paste0(outfileprefix,".parqpGraph")
cat(file=parfile,"outpop: NULL\nforcezmode: YES\nlsqmode: NO\ndiag: .0001\nbigiter: 6\nhires: YES\nlambdascale: 1\n")
cat(file=parfile,paste0("fstatsname: ",outfile,"\n"),append=T)
if(verbose){cat("Parameter File for qpGraph with some default parameters written in",parfile,"\n")}
} |
dgift <- function(data, questions) {
data <- data[!apply(is.na(data) | data == "", 1, all),]
if (any(data[, questions] %in% c(NA, "", " "))) {
stop("Invalid questions Column contain empty cells")
}
return(data)
}
make_answer <- function(data, answercol) {
if (any(data[, answercol] %in% c(NA, "", " "))) {
stop("The column of Answers Selected contains Empty Cells")
}
data[, answercol] <- glue::glue("* {data[,answercol] }")
data
}
rename_df <- function(datalist, col_names, i) {
data <- as.data.frame(datalist[i])
names(data) <- col_names
return(data)
}
q_name <- function(data, questions) {
data$q_names = glue::glue("{substr(data[,questions] , start = 1 , stop = 40)}...")
return(data)
}
singular_input <- function(questions,
categories,
question_names,
question_type) {
l <-
as.list(c(questions, categories, question_names, question_type))
llen <- lapply(l , FUN = length)
lapply(llen, function(x)
if (x > 1)
stop(
"'questions', 'categories', 'question_names' and 'question_type' Length cannot be bigger than 1" ,
))
if (llen[1] == 0) {
stop("`questions` input is invalid")
}
} |
"genetic.dist" <-
function(theta)
{
-0.5*log(1-2*theta)
} |
expandlink<-function(link,ind,distan){
k<-ncol(ind)
expandone<-function(linkone,ind,distan){
ML<-linkone
ML1NN<-ind[ML[1],]
ML2NN<-ind[ML[2],]
newML1<-c(ML[1],ML1NN)
newML2<-c(ML[2],ML2NN)
newML<-cbind(rep(newML1,each=length(newML1)),rep(newML2,length(newML2)))
original<-which(apply(newML,1,function(x){all(sort(x)==sort(ML))}))
newML0<-newML[-original,]
d<-rep(0,nrow(newML0))
for(o in 1:nrow(newML0)){
if(any(newML0[o,]==ML[1])){
d[o]<-distan[ML[2],which(ML2NN==newML0[o,2])]
}else if(any(newML0[o,]==ML[2])){
d[o]<-distan[ML[1],which(ML1NN==newML0[o,1])]
}else{d[o]<-distan[ML[1],which(ML1NN==newML0[o,1])]+distan[ML[2],which(ML2NN==newML0[o,2])]}
}
return(list(link=newML0,distance=d))
}
if((nrow(link$ML)+nrow(link$CL))==0){
ML<-matrix(0,0,2)
CL<-matrix(0,0,2)
expandlink<-list(ML=ML,CL=CL)
}else{
expandML<-NULL
for(i in 1:nrow(link$ML)){
res<-expandone(link$ML[i,],ind,distan)
firstk<-order(res$distance)
expandML<-rbind(expandML,res$link[firstk[1:k],])
}
expandCL<-NULL
for(i in 1:nrow(link$CL)){
res<-expandone(link$CL[i,],ind,distan)
firstk<-order(res$distance)
expandCL<-rbind(expandCL,res$link[firstk[1:k],])
}
expandlink<-list(ML=rbind(link$ML,expandML),CL=rbind(link$CL,expandCL))
}
return(expandlink)
} |
Id <- "$Id: c212.interim.ptheta.R,v 1.5 2019/05/05 13:18:12 clb13102 Exp clb13102 $"
c212.interim.ptheta <- function(raw)
{
if (is.null(raw)) {
print("NULL raw data");
return(NULL)
}
model = attr(raw, "model")
if (is.null(model)) {
print("Missing model attribute");
return(NULL)
}
if (!("chains" %in% names(raw))) {
print("Missing chains data");
return(NULL)
}
if (!("maxBs" %in% names(raw))) {
print("Missing chains data");
return(NULL)
}
if (!("nBodySys" %in% names(raw))) {
print("Missing chains data");
return(NULL)
}
if (!("maxAEs" %in% names(raw))) {
print("Missing chains data");
return(NULL)
}
if (!("nAE" %in% names(raw))) {
print("Missing nAE data");
return(NULL)
}
if (!("theta" %in% names(raw))) {
print("Missing theta data");
return(NULL)
}
if (!("B" %in% names(raw))) {
print("Missing B data");
return(NULL)
}
if (!("AE" %in% names(raw))) {
print("Missing AE data");
return(NULL)
}
nchains = raw$chains
summ <- data.frame(interval = character(0), B = character(0), AE = character(0), ptheta = numeric(0))
samples_combined = rep(NA, (raw$iter - raw$burnin)*nchains)
for (i in 1:raw$nIntervals) {
for (b in 1:raw$nBodySys[i]) {
for (j in 1:raw$nAE[i, b]) {
mcmc_obj <- list(NA)
for (c in 1:nchains) {
mcmc_obj[[c]] <- mcmc(raw$theta[c, i, b, j, ])
}
mlist <- mcmc.list(mcmc_obj)
samples_combined <- c(raw$theta[1:nchains, i, b, j, ])
s <- ecdf(samples_combined)
th <- 1 - s(0)
row <- data.frame(interval = raw$Intervals[i], B = raw$B[i, b], AE = raw$AE[i, b, j], ptheta = th)
summ = rbind(summ, row)
}
}
}
rownames(summ) <- NULL
return(summ)
} |
test_that("Hard ranking loss is implemented correctly",{
y<-c(-3, 10.3,-8, 12, 14,-0.5, 29,-1.1,-5.7, 119)
yhat<-c(0.02, 0.6, 0.1, 0.47, 0.82, 0.04, 0.77, 0.09, 0.01, 0.79)
expect_equal(Rank()@risk(y,yhat),8/45)
expect_error(Rank()@risk(c(1,2,3,4),c(1,2,3)))
expect_equal(Rank()@risk(c(1,6,5,3,7,8,2),c(6,2,9,1,-2,4,5)),sum(sign(outer(c(1,6,5,3,7,8,2),c(1,6,5,3,7,8,2),function(x,z)z-x))-sign(outer(c(6,2,9,1,-2,4,5),c(6,2,9,1,-2,4,5),function(x,z)z-x))!=0)/42)
}) |
load.molecular.aberration.data <- function(
file,
patients = NULL,
annotation.fields = NULL
) {
aberration.data.and.anno <- read.table(
file,
sep = '\t',
header = TRUE
);
aberration.profiles <- NULL;
if (!is.null(patients)) {
if (any(!patients %in% colnames(aberration.data.and.anno))) {
warning(paste0(
'the following patients were not found in the given file:',
paste(
patients[!patients %in% colnames(aberration.data.and.anno)],
collapse = ','
)
));
patients <- patients[patients %in% colnames(aberration.data.and.anno)];
}
aberration.profiles <- matrix(
data = as.numeric(as.matrix(aberration.data.and.anno[,patients])),
nrow = nrow(aberration.data.and.anno)
);
colnames(aberration.profiles) <- patients;
rownames(aberration.profiles) <- rownames(aberration.data.and.anno);
}
colname.matches <- c();
colname.repl <- c();
for(i in annotation.fields) {
match.idx <- grep(tolower(i), tolower(colnames(aberration.data.and.anno)));
colname.matches <- c(colname.matches, match.idx);
if (length(match.idx) == 1) {
colname.repl <- c(colname.repl, i);
}
if (length(match.idx) > 1) {
colname.repl <- c(
colname.repl,
paste(
rep(i,length(match.idx)),
colnames(aberration.data.and.anno)[match.idx],
sep = '.'
)
);
}
}
colname.matches <- unique(colname.matches);
colname.repl <- unique(colname.repl);
aberration.anno <- NULL;
if (length(colname.matches) > 0) {
aberration.anno <- aberration.data.and.anno[,colname.matches];
if (length(colname.matches) > 1) {
colnames(aberration.anno) <- colname.repl;
}
}
else if (!is.null(annotation.fields)) {
warning(paste(
'annotation.fields (',annotation.fields,') didn\'t match any of the column names. The options for ',
file,' are: ', paste(colnames(aberration.data.and.anno)[grep('\\d\\d\\d', colnames(aberration.data.and.anno),
invert=TRUE)], collapse=', '),
sep = ''
));
}
if (!is.null(aberration.profiles) & !is.null(aberration.anno)) {
return(list( aberration.profiles = aberration.profiles, feature.annotation = aberration.anno));
}
if (!is.null(aberration.profiles)) {
return(aberration.profiles);
}
if (!is.null(aberration.anno)) {
return(aberration.anno);
}
return(aberration.data.and.anno);
} |
print.rfit <- function (x, ...) {
cat("Call:\n")
print(x$call)
coef <- coef(x)
cat("\nCoefficients:\n")
print(coef, ...)
} |
require(tsna)
require(testthat)
require(networkDynamicData)
linegraph<-network.initialize(10)
add.edges(linegraph,tail=1:9,head=2:10)
data(concurrencyComparisonNets)
test_that('tPath basic tests',{
line<-network.initialize(4)
add.edges.active(line,tail=1:3,head=2:4,onset=0:2,terminus=1:3)
expect_equal(names(tPath(line,v=1)),c('tdist','previous','gsteps','start','end','direction','type'))
expect_is(tPath(line,v=1),class = 'tPath')
expect_error(tPath(line,v=1,type='foo'))
expect_error(tPath(line,v=1,direction='foo'))
expect_error(tPath(line,v=1,type='latest.depart'),regexp='method is not yet implemented')
expect_equal(tPath(line,v=1)$tdist,c(0, 0, 1, 2))
expect_equal(tPath(line,v=2)$tdist,c(Inf,0,1,2))
expect_equal(tPath(line,v=1,start=0.5)$tdist, c(0,0,0.5,1.5))
expect_equal(tPath(line,v=1,start=2)$tdist, c(0,Inf,Inf,Inf))
expect_equal(tPath(line,v=1,end=2)$tdist, c(0,0,1,Inf))
line<-network.initialize(4)
add.edges.active(line,tail=1:3,head=2:4,onset=c(2,1,3),terminus=c(3,2,4))
expect_equal(tPath(line,v=1)$tdist,c(0,1,Inf,Inf))
test<-as.networkDynamic(network.initialize(4))
add.edges(test,1:3,2:4)
expect_equal(tPath(test,v=1,start=0)$tdist,c(0,0,0,0))
expect_equal(tPath(test,v=1,active.default=FALSE,start=0)$tdist,c(0,Inf,Inf,Inf))
test<-network.initialize(4)
add.edges(test,1:3,3:4)
activate.edges(test,e=1,at=2)
test<-as.networkDynamic(network.initialize(4))
expect_message(tPath(test,v=1),regexp="'start' time parameter for paths was not specified")
expect_error(tPath(network.initialize(3)),regexp='first argument must be a networkDynamic object')
expect_error(tPath(as.networkDynamic(network.initialize(2)),regexp='argument with valid vertex ids was not given'))
expect_equal(tPath(as.networkDynamic(network.initialize(0)),start=0,v=numeric(0))$tdist,numeric(0))
})
test_that("path in large base network matches",{
fwdDFS<-tPath(base,v=24)
expect_equal(sum(fwdDFS$tdist<Inf),772)
infset<-which(get.vertex.attribute.active(base,'status',at=102)>0)
pathset<-which(tPath(base,v=24,graph.step.time=1)$tdist<Inf)
})
data(moodyContactSim)
test_that("test of moody's example network",{
paths<-tPath(moodyContactSim,v=10)
expect_equal(paths$tdist,c(543, 454, 594, 0, 672, 661, 184, 679, 634, 0, 709, 581, 413, 625, 669, 535))
expect_equal(paths$previous,c(16,13,13,10,13,16,10,13,1,0,8,1,4,4,2,2))
expect_equal(paths$gsteps,c(5, 3, 3, 1, 3, 5, 1, 3, 6, 0, 4, 6, 2, 2, 4, 4))
})
test_that("test on network with two components",{
test<-network.initialize(10)
activate.vertices(test)
test[1:5,5:1]<-1
test[6:10,10:6]<-1
expect_equal(which(tPath(test,v=1)$tdist!=Inf),1:5)
expect_equal(which(tPath(test,v=6)$tdist!=Inf),6:10)
})
test_that("graph step time param works",{
test<-network.initialize(4)
add.edges.active(test,tail=1:3,head=2:4,onset=0:2,terminus=1:3)
expect_equal(tPath(test,v=1,graph.step.time=0)$tdist,c(0, 0, 1, 2))
expect_equal(tPath(test,v=1,graph.step.time=0.5)$tdist,c(0, 0.5, 1.5, 2.5))
expect_equal(tPath(test,v=1,graph.step.time=1)$tdist,c(0, 1, 2, 3))
expect_equal(tPath(test,v=1,graph.step.time=2)$tdist,c(0, Inf, Inf, Inf))
test<-network.initialize(4)
add.edges.active(test,tail=1:3,head=2:4,onset=0,terminus=10)
expect_equal(tPath(test,v=1,graph.step.time=1)$tdist,c(0, 1, 2, 3))
expect_equal(tPath(test,v=1,graph.step.time=2)$tdist,c(0, 2, 4, 6))
expect_equal(tPath(test,v=1,graph.step.time=0)$tdist,c(0, 0, 0, 0))
test<-as.networkDynamic(network.initialize(4))
add.edges(test,tail=1:3,head=2:4)
expect_equal(tPath(test,v=1,graph.step.time=1)$tdist,c(0, 1, 2, 3))
expect_equal(tPath(test,v=1,graph.step.time=2)$tdist,c(0, 2, 4, 6))
test<-network.initialize(4)
add.edges.active(test,tail=1:3,head=2:4,onset=0:2,terminus=1:3)
activate.edges(test,e=1,onset=5,terminus=10)
expect_equal(tPath(test,v=1,graph.step.time=2)$tdist,c(0, 7, Inf, Inf))
test<-network.initialize(10)
add.edges.active(test,tail=1:9,head=2:10,onset=0:9,terminus=10)
tPath(test,v=1,start=5,graph.step.time=2)$tdist
})
test<-network.initialize(10)
add.edges(test,tail=1:9,head=2:10)
activate.edges(test,onset=10:0,terminus=11:1)
results<-tPath(test,v=5,direction='bkwd',type='latest.depart')
expect_equal(results$tdist,c(Inf, Inf, Inf, 3, 0, Inf, Inf, Inf, Inf, Inf))
expect_equal(results$previous,c(0, 0, 0, 5, 0, 0, 0, 0, 0, 0))
expect_equal(results$gsteps,c(Inf, Inf, Inf, 1, 0, Inf, Inf, Inf, Inf, Inf))
test<-network.initialize(10)
add.edges(test,tail=1:9,head=2:10)
activate.edges(test,onset=0:10,terminus=1:11)
results<-tPath(test,v=10,direction='bkwd',type='latest.depart')
expect_equal(results$tdist,c(8,7,6,5,4,3,2,1,0,0))
expect_equal(results$previous,c(2,3,4,5,6,7,8,9,10,0))
expect_equal(results$gsteps,c(9, 8, 7, 6, 5, 4, 3, 2, 1, 0))
results<-tPath(moodyContactSim,v=10,direction='bkwd',type='latest.depart')
expect_equal(results$tdist,c(Inf, Inf, Inf, 723, Inf, Inf, 539, Inf, Inf, 0, Inf, Inf, Inf, Inf, Inf, Inf))
expect_equal(results$previous,c(0, 0, 0, 10, 0, 0, 10, 0, 0, 0, 0, 0, 0, 0, 0, 0))
results<-tPath(moodyContactSim,v=16,direction='bkwd',type='latest.depart')
expect_equal(results$tdist,c(180, 196, Inf, 13, Inf, 62, Inf, Inf, Inf, 723, 548, Inf, 271, 103, Inf, 0))
expect_equal(results$previous,c(16, 16, 0, 16, 0, 16, 0, 0, 0, 4, 1, 0, 2, 4, 0, 0))
test_that("graph step time param works for bakward path",{
test<-network.initialize(4)
add.edges.active(test,tail=1:3,head=2:4,onset=0:2,terminus=1:3)
expect_equal(tPath(test,v=4,graph.step.time=0,direction='bkwd',type='latest.depart')$tdist,c(2, 1, 0, 0))
expect_equal(tPath(test,v=4,graph.step.time=0.5,direction='bkwd',type='latest.depart')$tdist,c(2.5, 1.5, 0.5, 0.0))
expect_equal(tPath(test,v=4,graph.step.time=1,direction='bkwd',type='latest.depart')$tdist,c(3, 2, 1, 0))
expect_equal(tPath(test,v=4,graph.step.time=2,direction='bkwd',type='latest.depart')$tdist,c( Inf, Inf, Inf,0))
test<-network.initialize(4)
add.edges.active(test,tail=1:3,head=2:4,onset=0,terminus=10)
expect_equal(tPath(test,v=4,graph.step.time=1,direction='bkwd',type='latest.depart')$tdist,c(3, 2, 1, 0))
expect_equal(tPath(test,v=4,graph.step.time=2,direction='bkwd',type='latest.depart')$tdist,c(6, 4, 2, 0))
expect_equal(tPath(test,v=4,graph.step.time=0,direction='bkwd',type='latest.depart')$tdist,c(0, 0, 0, 0))
test<-as.networkDynamic(network.initialize(4))
add.edges(test,tail=1:3,head=2:4)
expect_equal(tPath(test,v=4,graph.step.time=1,direction='bkwd',type='latest.depart')$tdist,c(3, 2, 1, 0))
expect_equal(tPath(test,v=4,graph.step.time=2,direction='bkwd',type='latest.depart')$tdist,c(6, 4, 2, 0))
test<-network.initialize(4)
add.edges.active(test,tail=1:3,head=2:4,onset=0:2,terminus=1:3)
activate.edges(test,e=1,onset=5,terminus=10)
expect_equal(tPath(test,v=4,graph.step.time=2,direction='bkwd',type='latest.depart')$tdist,c(Inf, Inf, 9, 0))
})
test<-network.initialize(2)
add.edges.active(test,tail=1,head=2,onset=0,terminus=1)
activate.edges(test,onset=2,terminus=3)
tPath(test,v=2,start=0,end=3)
tsna:::paths.fwd.latest(test,v=2,start=0,end=3)
test<-network.initialize(5,direct=FALSE)
add.edges(test,tail=c(1,1,2,4),head=c(3,2,4,3))
activate.edges(test,at=c(1,2,3,4))
plot(test,displaylabels=TRUE,edge.label=get.edge.activity(test))
tsna:::paths.fwd.latest(test,v=1,start=0,end=4)
test<-network.initialize(5,direct=FALSE)
add.edges(test,tail=c(1,1,2,4),head=c(3,2,4,3))
activate.edges(test,at=c(1,2,3,4))
plot(test,displaylabels=TRUE,edge.label=get.edge.activity(test))
tPath(test,v=1,start=0)
tsna:::paths.fwd.latest(test,v=1,start=0)
test<-network.initialize(4,directed=FALSE)
add.edges(test,tail=c(1,1,2,4),head=c(3,2,4,3))
activate.edges(test,at=c(2,1,3,4))
plot(test,displaylabels=TRUE,edge.label=get.edge.activity(test))
test<-network.initialize(4)
add.edges.active(test,1:3,2:4,at=1)
tPath(test,v=1,start=0)$tdist
tPath(test,v=1,start=0,graph.step.time = 1)$tdist
paths5<-network.initialize(7)
network.vertex.names(paths5)<-LETTERS[1:7]
add.edges.active(paths5,tail=c(1,2),head=c(2,7),onset=c(1,4),terminus=c(2,5))
add.edges.active(paths5,tail=c(1,3),head=c(3,7),onset=c(0,6),terminus=c(2,7))
add.edges.active(paths5,tail=c(1,4),head=c(4,7),onset=c(4,5),terminus=c(5,6))
add.edges.active(paths5,tail=c(1,5),head=c(5,7),onset=c(6,9),terminus=c(7,10))
add.edges.active(paths5,tail=c(1,6),head=c(6,7),onset=c(4,10),terminus=c(5,11))
plot(paths5, mode='circle',displaylabels=TRUE,edge.label=get.edge.activity(paths5),edge.label.col='blue',edge.label.cex=0.6)
as.data.frame(paths5)
res2<-tPath(paths5,v=1)
expect_equal(res2$tdist[7],4) |
itemize_scales <- function(k_vec, R_scales, rel_vec,
mean_vec = rep(0, length(k_vec)),
sd_vec = rep(1, length(k_vec)), var_names = NULL){
if(is.null(var_names)) var_names <- paste0("x", 1:length(k_vec))
item_names <- NULL
item_index <- item_names_list <- list()
for(i in 1:length(k_vec)){
.index <- item_names
item_names_list[[i]] <- paste0(var_names[i], "_item", 1:k_vec[i])
item_names <- c(item_names, item_names_list[[i]])
item_index[[i]] <- (length(.index)+1):length(item_names)
}
names(item_index) <- var_names
intercor <- estimate_rel_sb(rel_initial = rel_vec, k = 1/k_vec)
k_mat <- matrix(k_vec, length(k_vec), length(k_vec))
intercor_mat <- matrix(intercor, length(k_vec), length(k_vec))
r_mat_item <- composite_r_scalar(mean_rxy = R_scales,
k_vars_x = 1/ k_mat, mean_intercor_x = intercor_mat,
k_vars_y = 1/ t(k_mat), mean_intercor_y = t(intercor_mat))
diag(r_mat_item) <- intercor
R <- matrix(NA, length(item_names), length(item_names))
for(i in 1:length(k_vec)) for(j in 1:length(k_vec)) R[item_index[[i]], item_index[[j]]] <- r_mat_item[i,j]
diag(R) <- 1
item_sds <- item_means <- NULL
for(i in 1:length(k_vec)){
item_means <- c(item_means, rep(mean_vec[i], k_vec[i]) / k_vec[i])
item_sds <- c(item_sds, rep(sd_vec[i] / sum(R[item_index[[i]], item_index[[i]]])^.5, k_vec[i]))
}
S <- diag(item_sds) %*% R %*% diag(item_sds)
S_scales <- diag(sd_vec) %*% R_scales %*% diag(sd_vec)
dimnames(R_scales) <- dimnames(S_scales) <- list(var_names, var_names)
dimnames(R) <- dimnames(S) <- list(item_names, item_names)
id_vec <- 1:ncol(S)
wt_mat <- matrix(0, ncol(S), length(item_index))
for(i in 1:length(item_index)) wt_mat[id_vec %in% item_index[[i]],i] <- 1
comb_cov <- t(wt_mat) %*% S
comb_var <- comb_cov %*% wt_mat
S_complete <- cbind(rbind(comb_var, t(comb_cov)), rbind(comb_cov, S))
rownames(S_complete) <- colnames(S_complete) <- c(var_names, item_names)
R_complete <- suppressWarnings(cov2cor(S_complete))
item_index_complete <- lapply(item_index, function(x) x + length(k_vec))
means_complete <- c(mean_vec, item_means)
sds_complete <- c(sd_vec, item_sds)
names(means_complete) <- names(sds_complete) <- c(var_names, item_names)
names(item_names_list) <- var_names
list(R_complete = R_complete,
S_complete = S_complete,
R_items = R,
S_items = S,
R_scales = R_scales,
S_scales = S_scales,
rel_vec = rel_vec,
means_complete = means_complete,
sds_complete = sds_complete,
item_means = item_means,
item_index = item_index,
item_index_complete = item_index_complete,
scale_names = var_names,
item_names = item_names_list)
}
simulate_psych_items <- function(n, k_vec, R_scales, rel_vec,
mean_vec = rep(0, length(k_vec)),
sd_vec = rep(1, length(k_vec)), var_names = NULL){
R_scales_obs <- R_scales
diag(R_scales_obs) <- 1 / rel_vec
R_scales_obs <- cov2cor(R_scales_obs)
obs_out <- itemize_scales(k_vec = k_vec, R_scales = R_scales_obs, rel_vec = rel_vec,
mean_vec = mean_vec, sd_vec = sd_vec, var_names = var_names)
true_out <- itemize_scales(k_vec = k_vec, R_scales = R_scales, rel_vec = rep(1, length(k_vec)),
mean_vec = mean_vec, sd_vec = sd_vec * rel_vec^.5, var_names = var_names)
error_out <- itemize_scales(k_vec = k_vec, R_scales = diag(length(k_vec)), rel_vec = rep(0, length(k_vec)),
mean_vec = rep(0, length(k_vec)), sd_vec = (sd_vec^2 - sd_vec^2 * rel_vec)^.5, var_names = var_names)
item_index <- true_out$item_index
R <- list(observed = obs_out$R_complete,
true = true_out$R_complete,
error = error_out$R_complete)
S <- list(observed = obs_out$S_complete,
true = true_out$S_complete,
error = error_out$S_complete)
params <- list(rel = obs_out$rel_vec,
means = obs_out$means_complete,
sds = obs_out$sds_complete,
scale_names = obs_out$scale_names,
item_names = obs_out$item_names,
item_index = obs_out$item_index_complete)
if(!is.infinite(n)){
if (!requireNamespace("MASS", quietly = TRUE)) {
stop("The package 'MASS' is not installed.\n",
" 'MASS' is required to simulate samples.\n",
" Please install 'MASS'.")
}
items_true <- MASS::mvrnorm(n = n, mu = true_out$item_means, Sigma = true_out$S_items)
items_error <- MASS::mvrnorm(n = n, mu = error_out$item_means, Sigma = error_out$S_items)
colnames(items_true) <- colnames(items_error) <- colnames(true_out$S_items)
items_obs <- items_true + items_error
items_obs <- as_tibble(items_obs, .name_repair = "minimal")
items_true <- as_tibble(items_true, .name_repair = "minimal")
items_error <- as_tibble(items_error, .name_repair = "minimal")
scales_obs <- simplify2array(lapply(true_out$item_index, function(x) apply(items_obs[,x], 1, sum)))
scales_true <- simplify2array(lapply(true_out$item_index, function(x) apply(items_true[,x], 1, sum)))
scales_error <- simplify2array(lapply(true_out$item_index, function(x) apply(items_error[,x], 1, sum)))
colnames(scales_obs) <- colnames(scales_true) <- colnames(scales_error) <- true_out$scale_names
rel_mat <- simplify2array(lapply(item_index, function(x){
R <- cor(items_obs[,x])
S <- cov(items_obs[,x])
c(alpha_u = mean(S[lower.tri(S)]) / mean(S),
alpha_s = mean(R[lower.tri(R)]) / mean(R))
}))
rel_mat[is.na(rel_mat)] <- NA
rel_mat <- rbind(rel_mat,
rxx_parallel = diag(cor(scales_obs, scales_true))^2)
list(data = list(observed = cbind(scales_obs, items_obs),
true = cbind(scales_true, items_true),
error = cbind(scales_error, items_error)),
R = R,
S = S,
params = params,
rel_mat = rel_mat)
}else{
list(R = R,
S = S,
params = params)
}
}
.compute_alpha <- function(sigma, ...){
k <- ncol(sigma)
wt <- rep(1, ncol(sigma))
numer <- sum(wt * diag(sigma))
denom <- c(wt %*% sigma %*% wt)
k / (k - 1) * (1 - numer / denom)
}
.alpha_items <- function(item_dat = NULL, S = NULL, R = NULL, item_index, item_wt = NULL){
if(!is.null(item_dat)){
if(is.null(dim(item_dat))) item_dat <- data.frame(t(item_dat), stringsAsFactors = FALSE)
S <- cov(item_dat)
R <- cov2cor(S)
}
rel_list <- list()
for(i in 1:length(item_index)){
if(length(item_index[[i]]) == 1){
rel_list[[i]] <- c(alpha_u = NA, alpha_s = NA)
}else{
.R <- R[item_index[[i]], item_index[[i]]]
.S <- S[item_index[[i]], item_index[[i]]]
if(is.null(item_wt)){
wt <- rep(1, ncol(.R))
}else{
wt <- item_wt[[i]]
}
rel_list[[i]] <- c(alpha_u = .compute_alpha(sigma = .S, wt = wt),
alpha_s = .compute_alpha(sigma = .R, wt = wt))
}
}
names(rel_list) <- names(item_index)
rel_mat <- simplify2array(rel_list)
rel_mat[is.na(rel_mat)] <- NA
rel_mat
}
compute_alpha <- function(sigma = NULL, data = NULL, standardized = FALSE, ...){
if(is.null(sigma)){
if(is.null(data)){
stop("Either sigma or data must be supplied", call. = FALSE)
}else{
sigma <- cov(data, ...)
}
}
if(standardized) sigma <- cov2cor(sigma)
.compute_alpha(sigma = sigma)
} |
catatis=function(Data,nblo,NameBlocks=NULL, NameVar=NULL, Graph=TRUE, Graph_weights=TRUE){
n=nrow(Data)
p=ncol(Data)
nvar=p/nblo
if (as.integer(nvar)!=nvar)
{
stop("number of columns modulo nblo != 0")
}
Blocks=rep(nvar,nblo)
J=rep(1:nblo , times = Blocks )
if (is.null(NameBlocks)) NameBlocks=paste("S",1:nblo,sep="-")
if(is.null(rownames(Data))) rownames(Data)=paste0("X", 1:nrow(Data))
if(is.null(colnames(Data))) colnames(Data)=rep(paste0("Y",1:nvar), nblo)
X=Data
if(length(NameBlocks)!=nblo)
{
stop("Error with the length of NameBlocks")
}
for (i in 1: ncol(Data))
{
if (is.numeric(Data[,i])==FALSE)
{
stop(paste("The data must be numeric (column",i,")"))
}
}
if ((sum(Data==0)+sum(Data==1))!=(dim(Data)[1]*dim(Data)[2]))
{
stop("only binary Data is accepted (0 or 1)")
}
if(n<3)
{
stop("At least 3 products are required")
}
if(nblo<2)
{
stop("At least 2 subjects are required")
}
if(nvar<3)
{
stop("At least 3 attributes are required")
}
if(sum(is.na(Data))>0)
{
print("NA detected:")
tabna=which(is.na(Data), arr.ind = TRUE)
print(tabna)
stop(paste("NA are not accepted"))
}
Xj=array(0,dim=c(n,nvar,nblo))
muk=NULL
for(j in 1:nblo)
{
Aj=as.matrix(X[,J==j])
normXj=sqrt(sum(Aj==1))
muk[j]=normXj
if(normXj==0)
{
stop(paste("error: the subject",NameBlocks[j], "has only 0"))
}
Xj[,,j]=Aj/normXj
}
mu=mean(muk)
facteurech=mu/muk
S=matrix(0,nblo,nblo)
diag(S)=rep(1,nblo)
for (i in 1:(nblo-1)) {
for (j in (i+1):nblo) {
S[i,j]=sum(diag(tcrossprod(Xj[,,i],Xj[,,j])))
S[j,i]=S[i,j]
} }
ressvd=svd(S)
u=ressvd$u[,1]
u=u*sign(u[1])
lambda=ressvd$d[1]
hom=lambda/sum(diag(S))
C=matrix(0,n,nvar)
for (j in 1:nblo) { C=C+(u[j]*Xj[,,j]) }
dw=rep(0,nblo)
erreur=matrix(0,n,nblo)
for (j in 1:nblo) {
a=Xj[,,j]-(u[j]*C)
dw[j]=sum(diag(tcrossprod(a)))
erreur[,j]=diag(tcrossprod(a))
}
Q=sum(dw)
obj=rep(0,n)
for (i in 1:n)
{
obj[i]=sum(erreur[i,])
}
rownames(C)=names(obj)=rownames(Data)
if (is.null(NameVar)==TRUE)
{
colnames(C)=colnames(Data)[1:nvar]
}else{
colnames(C)=NameVar
}
normC=sqrt(sum(diag(tcrossprod(C))))
s=NULL
for (i in 1:nblo)
{
s=c(s,sum(diag(tcrossprod(Xj[,,i],C)))/normC)
}
compromis=C
colomnnull=NULL
for (l in 1:ncol(compromis))
{
if (sum(compromis[,l])==0)
{
colomnnull=c(colomnnull,l)
}
}
rownull=NULL
for (l in 1:nrow(compromis))
{
if (sum(compromis[l,])==0)
{
rownull=c(rownull,l)
}
}
compromis2=compromis
if(length(colomnnull)>0)
{
compromis2=compromis[,-colomnnull]
warning("No block has a 1 for the variable(s): ", paste(colnames(compromis)[colomnnull], collapse=","))
}
if(length(rownull)>0)
{
compromis2=compromis2[-rownull,]
warning("No block has a 1 for the product(s): ", paste(rownames(compromis)[rownull], collapse=","))
}
e=CA(compromis2,graph=FALSE)
pouriner=round(e$eig[,2],2)
eigenvalues=round(e$eig[,1],4)
if (Graph==TRUE)
{
dev.new()
barplot(eigenvalues, col="blue", main="Eigenvalues")
dev.new()
print(plot.CA(e,title=paste("CATATIS")))
}
names(u)=names(s)=rownames(S)=colnames(S)= names(dw)=names(muk)=names(facteurech)=NameBlocks
if(Graph_weights==TRUE)
{
dev.new()
barplot(u)
title(paste("Weights"))
}
homogeneity=round(hom,3)*100
res=list(S=round(S,2),compromise=round(C,2),weights=round(u,2),lambda=round(lambda,2),overall_error=round(Q,2),
error_by_sub=round(dw,2), error_by_prod=round(obj,2), s_with_compromise=round(s,2), homogeneity=homogeneity, CA=e, eigenvalues=eigenvalues,
inertia=pouriner, scalefactors=round(facteurech,2), nb_1=muk**2, param=list(n=n, nblo=nblo, nvar=nvar))
class(res)="catatis"
return(res)
} |
information.gain <- function(formula, data, unit = "log") {
information.gain.body(formula, data, type = "infogain", unit)
}
gain.ratio <- function(formula, data, unit = "log") {
information.gain.body(formula, data, type = "gainratio", unit)
}
symmetrical.uncertainty <- function(formula, data, unit = "log") {
information.gain.body(formula, data, type = "symuncert", unit)
}
information.gain.body <- function(formula, data, type = c("infogain", "gainratio", "symuncert"), unit) {
type = match.arg(type)
new_data = get.data.frame.from.formula(formula, data)
new_data = discretize.all(formula, new_data)
attr_entropies = sapply(new_data, entropyHelper, unit)
class_entropy = attr_entropies[1]
attr_entropies = attr_entropies[-1]
joint_entropies = sapply(new_data[-1], function(t) {
entropyHelper(data.frame(cbind(new_data[[1]], t)), unit)
})
results = class_entropy + attr_entropies - joint_entropies
if(type == "gainratio") {
results = ifelse(attr_entropies == 0, 0, results / attr_entropies)
} else if(type == "symuncert") {
results = 2 * results / (attr_entropies + class_entropy)
}
attr_names = dimnames(new_data)[[2]][-1]
return(data.frame(attr_importance = results, row.names = attr_names))
} |
getRegions <- function(x) {
return(getItems(x, dim = 1.1))
}
"getRegions<-" <- function(x, value) {
.Deprecated("getItems")
getCells(x) <- value
return(x)
} |
prior.form <-
function(pri.lo = c(0, 0, 0, 0, 0, 15, 0, 0), pri.hi = c(0.15,
1, 1, 0.25, 15, 55, 0.1, 1.25), theta.dim = 8)
{
B0 <- 1000 * theta.dim
q0 <- cbind(runif(B0, pri.lo[1], pri.hi[1]), runif(B0, pri.lo[2],
pri.hi[2]), runif(B0, pri.lo[3], pri.hi[3]), runif(B0,
pri.lo[4], pri.hi[4]), runif(B0, pri.lo[5], pri.hi[5]),
runif(B0, pri.lo[6], pri.hi[6]), runif(B0, pri.lo[7],
pri.hi[7]), runif(B0, pri.lo[8], pri.hi[8]))
H.k <- q0
return(H.k)
} |
"sa_gdp_elec" |
studyStrap.predict <- function(ss.obj, X){
num.SSLs <- length(ss.obj$modelInfo$SSL)
if( is.matrix(ss.obj$simMat) ){
preds.mat <- matrix(nrow = nrow(X), ncol = ncol(ss.obj$simMat) + 2)
colnames(preds.mat) <- c("Avg", paste0(ss.obj$modelInfo$stack.type, "_Stacking"),
colnames(ss.obj$simMat) )
}else{
preds.mat <- matrix(nrow = nrow(X), ncol = 2)
colnames(preds.mat) <- c("Avg", paste0(ss.obj$modelInfo$stack.type, "_Stacking") )
}
raw.preds <- matrix(nrow = nrow(X),
ncol = ss.obj$modelInfo$numStraps * num.SSLs )
counter <- 1
for(SSL in 1:num.SSLs){
for(mod in 1:ss.obj$modelInfo$numStraps){
raw.preds[,counter] <- predict(ss.obj$models[[SSL]][[mod]], X)
counter <- counter + 1
}
}
if( is.matrix(ss.obj$simMat) ){
if( num.SSLs > 1){
ss.obj$simMat <- do.call(rbind, replicate(num.SSLs, ss.obj$simMat, simplify=FALSE))
ss.obj$simMat <- prop.table( ss.obj$simMat, 2 )
}
}
preds.mat[,1] <- rowMeans(raw.preds)
preds.mat[,2] <- cbind(1, raw.preds) %*% ss.obj$stack.coefs
if( is.matrix(ss.obj$simMat) ){
preds.mat[, 3:ncol(preds.mat) ] <- raw.preds %*% ss.obj$simMat
}
return(preds.mat)
} |
grid.torus <- function(d = 2 , grid.size = 100){
grid <- matrix(0, ncol = d, nrow = grid.size^d)
Axis <- seq(0, 2 * pi, length = grid.size)
for (i in 1:d){
grid[,i] <- rep(Axis, each = grid.size^(i-1))
}
return(grid)
} |
fdrtool = function(x,
statistic=c("normal", "correlation", "pvalue"),
plot=TRUE, color.figure=TRUE, verbose=TRUE,
cutoff.method=c("fndr", "pct0", "locfdr"),
pct0=0.75)
{
statistic = match.arg(statistic)
cutoff.method = match.arg(cutoff.method)
if ( is.vector(x) == FALSE )
stop("input test statistics must be given as a vector!")
if ( length(x) < 200 ) warning("There may be too few input test statistics for reliable FDR calculations!")
if (statistic=="pvalue")
{
if (max(x) > 1 | min(x) < 0)
stop("input p-values must all be in the range 0 to 1!")
}
if(verbose) cat("Step 1... determine cutoff point\n")
if (cutoff.method=="pct0")
{
if(statistic=="pvalue") x0 = quantile(x, probs=1-pct0)
else x0 = quantile(abs(x), probs=pct0)
}
else if ( cutoff.method=="locfdr" & (statistic=="normal" | statistic=="correlation") )
{
if(statistic=="normal") z = x
if(statistic=="correlation") z = atanh(x)
iqr = as.double(diff(quantile(z, probs=c(.25, .75))))
sdhat = iqr/(2*qnorm(.75))
N = length(z)
b = ifelse(N > 500000, 1, 4.3 * exp(-0.26*log(N,10)) )
z0 = b*sdhat
if(statistic=="normal") x0 = z0
if(statistic=="correlation") x0 = tanh(z0)
}
else
{
if(cutoff.method=="locfdr")
warning("cutoff.method=\"locfdr\" only available for normal and correlation statistic.")
x0 = fndr.cutoff(x, statistic)
}
if(verbose) cat("Step 2... estimate parameters of null distribution and eta0\n")
cf.out <- censored.fit(x=x, cutoff=x0, statistic=statistic)
if (statistic=="pvalue")
scale.param = NULL
else
scale.param <- cf.out[1,5]
eta0 = cf.out[1,3]
if(verbose) cat("Step 3... compute p-values and estimate empirical PDF/CDF\n")
nm = get.nullmodel(statistic)
pval = nm$get.pval(x, scale.param)
ee <- ecdf.pval(pval, eta0=eta0)
g.pval <- grenander(ee)
f.pval = approxfun( g.pval$x.knots, g.pval$f.knots, method="constant", rule=2)
f0.pval = function(x) return( ifelse(x > 1 | x < 0, 0, rep(1, length(x))) )
F.pval = approxfun( g.pval$x.knots, g.pval$F.knots, method="linear",
yleft=0, yright=g.pval$F.knots[length(g.pval$F.knots)])
F0.pval = function(x) return( ifelse(x > 1, 1, ifelse(x < 0, 0, x )) )
fdr.pval = function(p)
{
p[ p == .Machine$double.eps ] = 0
pmin( eta0 / f.pval(p), 1)
}
Fdr.pval = function(p) pmin( eta0*p / F.pval(p), 1)
if(verbose) cat("Step 4... compute q-values and local fdr\n")
qval <- Fdr.pval(pval)
lfdr <- fdr.pval(pval)
result = list(pval=pval, qval=qval, lfdr=lfdr,
statistic=statistic, param=cf.out)
if (plot)
{
if(verbose) cat("Step 5... prepare for plotting\n")
if(statistic=="pvalue")
{
f0 <- function(zeta) return( nm$f0(zeta, scale.param) )
F0 <- function(zeta) return( nm$F0(zeta, scale.param) )
get.pval <- function(zeta) return( nm$get.pval(1-zeta, scale.param) )
x0 = 1-x0
}
else
{
f0 <- function(zeta) return( 2*nm$f0(zeta, scale.param) )
F0 <- function(zeta) return( 2*nm$F0(zeta, scale.param)-1 )
get.pval <- function(zeta) return( nm$get.pval(zeta, scale.param) )
}
fdr = function(zeta) fdr.pval(get.pval(zeta))
Fdr = function(zeta) Fdr.pval(get.pval(zeta))
F = function(zeta) 1-eta0*get.pval(zeta)/Fdr(zeta)
FA = function(zeta) (F(zeta)-eta0*F0(zeta))/(1-eta0)
f = function(zeta) eta0*(f0(zeta))/fdr(zeta)
fA = function(zeta) (f(zeta)-eta0*f0(zeta))/(1-eta0)
ax = abs(x)
if (statistic=="pvalue") ax = 1-ax
xxx = seq(0, max(ax), length.out=500)
ll = pvt.plotlabels(statistic, scale.param, eta0)
par(mfrow=c(3,1))
if (color.figure)
cols = c(2,4)
else
cols = c(1,1)
hist(ax, freq=FALSE, bre=50,
main=ll$main, xlab=ll$xlab, cex.main=1.8)
lines(xxx, eta0*f0(xxx), col=cols[1], lwd=2, lty=3 )
lines(xxx, (1-eta0)*fA(xxx), col=cols[2], lwd=2 )
if (statistic=="pvalue")
pos1 = "topleft" else pos1="topright"
legend(pos1,
c("Mixture", "Null Component", "Alternative Component"),
lwd=c(1, 2, 2), col=c(1,cols), lty=c(1,3,1), bty="n", cex=1.5)
plot(xxx, F(xxx), lwd=1, type="l", ylim=c(0,1),
main="Density (first row) and Distribution Function (second row)",
xlab=ll$xlab, ylab="CDF", cex.main=1.5)
lines(xxx, eta0*F0(xxx), col=cols[1], lwd=2, lty=3)
lines(xxx, (1-eta0)*FA(xxx), col=cols[2], lwd=2)
plot(xxx, Fdr(xxx), type="l", lwd=2, ylim=c(0,1),
main="(Local) False Discovery Rate", ylab="Fdr and fdr",
xlab=ll$xlab, lty=3, cex.main=1.5)
lines(xxx, fdr(xxx), lwd=2)
if (eta0 > 0.98)
pos2 = "bottomleft" else pos2="topright"
legend(pos2,
c("fdr (density-based)", "Fdr (tail area-based)"),
lwd=c(2,2), lty=c(1,3), bty="n", cex=1.5)
par(mfrow=c(1,1))
rm(ax)
}
if(verbose) cat("\n")
return(result)
}
pvt.plotlabels <- function(statistic, scale.param, eta0)
{
if (statistic=="pvalue")
{
main = paste("Type of Statistic: p-Value (eta0 = ", round(eta0, 4), ")", sep="")
xlab ="1-pval"
}
if (statistic=="studentt")
{
df = scale.param
main = paste("Type of Statistic: t-Score (df = ", round(df,3),
", eta0 = ", round(eta0, 4), ")", sep="")
xlab = "abs(t)"
}
if (statistic=="normal")
{
sd = scale.param
main = paste("Type of Statistic: z-Score (sd = ", round(sd,3),
", eta0 = ", round(eta0, 4), ")", sep="")
xlab = "abs(z)"
}
if (statistic=="correlation")
{
kappa =scale.param
main = paste("Type of Statistic: Correlation (kappa = ", round(kappa,1),
", eta0 = ", round(eta0, 4), ")", sep="")
xlab = "abs(r)"
}
return(list(main=main, xlab=xlab))
} |
compute_margin_coordinates <- function(dim, LC.coordinates){
controls <- LC_coordinates2control_settings(LC.coordinates)
horizon <- c(controls$horizon$PLC, controls$horizon$FLC)
if (length(horizon) == 1){
horizon <- c(horizon, 0)
}
TT <- dim[1]
space.dim <- as.list(dim[-1])
out <- list(time = c(horizon[1] + 1, TT - horizon[2]))
out$space <- list()
out$dim <- c(diff(out$time) + 1)
for (ss in seq_along(space.dim)) {
out$space[[ss]] <- c(controls$space.cutoff + 1,
space.dim[[ss]] - controls$space.cutoff )
out$dim <- c(out$dim, diff(out$space[[ss]]) + 1)
}
return(out)
} |
siarproportionbysourceplot <-
function (siardata, siarversion = 0,
probs=c(95, 75, 50),
xlabels = NULL,
grp = NULL,
type = "boxes",
clr = gray((9:1)/10),
scl = 1,
xspc = 0,
prn = FALSE,
leg = FALSE)
{
if (siardata$SHOULDRUN == FALSE && siardata$GRAPHSONLY ==
FALSE) {
cat("You must load in some data first (via option 1) in order to use \n")
cat("this feature of the program. \n")
cat("Press <Enter> to continue")
readline()
invisible()
return(NULL)
}
if (length(siardata$output) == 0) {
cat("No output found - check that you have run the SIAR model. \n \n")
return(NULL)
}
if (siardata$numgroups < 2) {
cat("Number of groups = 1 - cannot run this option \n")
cat("Press <Enter> to continue")
readline()
invisible()
return(NULL)
}
cat("Plot of proportions by source \n")
cat("This requires more than one group in the output file. \n")
cat("Producing plot..... \n \n")
if (length(siardata$sources) > 0) {
sourcenames <- as.character(siardata$sources[, 1])
}
else {
}
if(is.null(grp)){
cat("Enter the source number you wish to plot \n")
cat("The choices are:\n")
title <- "The available options are:"
choose2 <- menu(sourcenames)
}
else{
choose2 <- grp
}
groupseq <- seq(1, siardata$numgroups, by = 1)
usepars <- siardata$output[, seq(choose2, ncol(siardata$output),
by = siardata$numsources + siardata$numiso)]
newgraphwindow()
if (siardata$TITLE != "SIAR data") {
plot(1, 1, xlab = "Group", ylab = "Proportion", main = paste(siardata$TITLE,
" by source: ", sourcenames[choose2], sep = ""),
xlim = c(min(groupseq)-xspc, max(groupseq)+xspc),
ylim = c(0, 1), type = "n", xaxt="n")
if(is.null(xlabels)){
axis(side=1, at=min(groupseq):max(groupseq), labels(groupseq))
}
else{
axis(side=1, at=min(groupseq):max(groupseq), labels=(xlabels))
}
}
else {
plot(1, 1,xlab = "Group", ylab = "Proportion", main = paste("Proportions by source: ",
sourcenames[choose2], sep = ""), xlim = c(min(groupseq)-xspc,
max(groupseq)+xspc), ylim = c(0, 1), type = "n", xaxt="n")
if(is.null(xlabels)){
axis(side=1, at=min(groupseq):max(groupseq), labels(groupseq))
}
else{
axis(side=1, at=min(groupseq):max(groupseq), labels=(xlabels))
}
}
if (siarversion > 0)
mtext(paste("siar v", siarversion), side = 1, line = 4,
adj = 1, cex = 0.6)
clrs <- rep(clr, 5)
for (j in 1:ncol(usepars)) {
temp <- hdr(usepars[, j], probs, h = bw.nrd0(usepars[,
j]))$hdr
line_widths <- seq(2,20,by=4)*scl
bwd <- c(0.1,0.15, 0.2, 0.25, 0.3)*scl
if(prn==TRUE){
cat(paste("Probability values for Group",j,"\n"))
}
for (k in 1:length(probs)){
temp2 <- temp[k, ]
if(type=="boxes"){
polygon(c(groupseq[j]-bwd[k], groupseq[j]-bwd[k],groupseq[j]+bwd[k], groupseq[j]+bwd[k]),
c(
max(min(temp2[!is.na(temp2)]),0),
min(max(temp2[!is.na(temp2)]),1),
min(max(temp2[!is.na(temp2)]),1),
max(min(temp2[!is.na(temp2)]),0)),
col = clrs[k])
}
if(type=="lines"){
lines(c(groupseq[j], groupseq[j]),
c(max(min(temp2[!is.na(temp2)]),0),
min(max(temp2[!is.na(temp2)]),1)), lwd = line_widths[k], lend = 2)
}
if(prn==TRUE){
cat(paste("\t", probs[k],
"% lower =", format(max(min(temp2[!is.na(temp2)]),0),digits=2,scientific=FALSE),
"upper =", format(min(max(temp2[!is.na(temp2)]),1),digits=2,scientific=FALSE),"\n"))
}
}
}
if(leg==TRUE){
if(type=="lines"){
legnames <- character(length=length(probs))
for(i in 1:length(probs)){
legnames[i] <- paste(probs[i],"%",sep="")
}
legend(mean(c(min(groupseq), max(groupseq))), 1.02, legend = legnames,
lwd = c(2, 6, 10), ncol = length(probs), xjust = 0.5, text.width = strwidth(legnames)/2,
bty = "n")
}
if(type=="boxes"){
print("Legends not yet supported for box style graph. Use type=lines with leg=TRUE instead.")
}
}
cat("Please maximise this graph before saving or printing. \n")
cat("Press <Enter> to continue")
readline()
invisible()
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.