code
stringlengths 1
13.8M
|
---|
"summary.longiPenal"<-
function(object,level=.95, len=6, d=2, lab=c("coef","hr"), ...)
{
x <- object
if (!inherits(x, "longiPenal"))
stop("Object must be of class 'longiPenal'")
z<-abs(qnorm((1-level)/2))
co <- x$coef
se <- sqrt(diag(x$varH))
or <- exp(co)
li <- exp(co-z * se)
ls <- exp(co+z * se)
p <- signif(1 - pchisq((co/se)^2, 1), 5)
rl <- cbind(co, se, p)
dimnames(rl) <- list(names(co), c(lab[1], "SE","p"))
ddl <- dim(rl)
al <- formatC(rl, d, len,format="f")
dim(al) <- ddl
if(length(ddl) == 1){
ddl<-c(1,ddl)
dim(al)<-ddl
labl<-" "
}
else
labl <- dimnames(rl)[[1]]
mxl <- max(nchar(labl)) + 1
al[which(al[,3]==formatC(0, d, len,format="f")),3]<-formatC("<1e-16", d, len,format="f")
if(x$TwoPart==0){
cat("Longitudinal outcome:\n")
cat("------------- \n")
cat(paste(rep(" ",mxl),collapse=""),paste(" ",dimnames(rl)[[2]]),"\n")
for(i in (x$nvar[1]+1):ddl[1])
{
labl[i] <- paste(c(rep(" ", mxl - nchar(labl[i])), labl[i]),collapse = "")
cat(labl[i], al[i, 1:3]," \n")
}
r <- cbind(or, li, ls)
dimnames(r) <- list(names(co), c(lab[2], paste(level*100,"%",sep=""), "C.I."))
n<-r
dd <- dim(n)
n[n > 999.99] <- Inf
a <- formatC(n, d, len,format="f")
dim(a) <- dd
if(length(dd) == 1){
dd<-c(1,dd)
dim(a)<-dd
lab<-" "
}
else
lab <- dimnames(n)[[1]]
mx <- max(nchar(lab)) + 1
}
else{
cat("Binary outcome:\n")
cat("------------- \n")
cat(paste(rep(" ",mxl),collapse=""),paste(" ",dimnames(rl)[[2]]),"\n")
for(i in (x$nvar[1]+x$nvarY+1):(ddl[1]))
{
labl[i] <- paste(c(rep(" ", mxl - nchar(labl[i])), labl[i]),collapse = "")
cat(labl[i], al[i, 1:3]," \n")
}
cat("\n")
cat("Continuous outcome:\n")
cat("------------- \n")
cat(paste(rep(" ",mxl),collapse=""),paste(" ",dimnames(rl)[[2]]),"\n")
for(i in (x$nvar[1]+1):(ddl[1]-x$nvarB))
{
labl[i] <- paste(c(rep(" ", mxl - nchar(labl[i])), labl[i]),collapse = "")
cat(labl[i], al[i, 1:3]," \n")
}
r <- cbind(or, li, ls)
dimnames(r) <- list(names(co), c(lab[2], paste(level*100,"%",sep=""), "C.I."))
n<-r
dd <- dim(n)
n[n > 999.99] <- Inf
a <- formatC(n, d, len,format="f")
dim(a) <- dd
if(length(dd) == 1){
dd<-c(1,dd)
dim(a)<-dd
lab<-" "
}
else
lab <- dimnames(n)[[1]]
mx <- max(nchar(lab)) + 1
}
cat("\n")
cat("Terminal event:\n")
cat("--------------- \n")
cat(paste(rep(" ",mx),collapse=""),paste(" ",dimnames(n)[[2]]),"\n")
for(i in 1:x$nvar[1])
{
lab[i] <- paste(c(rep(" ", mx - nchar(lab[i])), lab[i]),collapse = "")
cat(lab[i], a[i, 1], "(", a[i, 2], "-", a[i, 3], ") \n")
}
} |
test_that("bpc returns a bpc object the bt-U-ordereffect model", {
skip_on_cran()
test_btU <- load_testdata('test_btUordereffect')
m1 <- bpc(
data = test_btU,
player0 = 'player0',
player1 = 'player1',
result_column = 'y',
cluster = 'cluster',
z_player1 = 'z1',
model_type = 'bt-U-ordereffect',
solve_ties = 'random',
win_score = 'higher',
iter = 2000,
warmup = 600,
show_chain_messages = F,
seed = 8484
)
expect_s3_class(m1, 'bpc')
expect_no_error(summary(m1))
}) |
library(survival)
library(BART)
data(lung)
N <- length(lung$status)
table(lung$ph.karno, lung$pat.karno)
h <- which(is.na(lung$ph.karno))
lung$ph.karno[h] <- lung$pat.karno[h]
times <- lung$time
delta <- lung$status-1
times <- ceiling(times/30)
table(times)
table(delta)
x.train <- cbind(lung$age, lung$sex, lung$ph.karno)
dimnames(x.train)[[2]] <- c('age(yr)', 'M(1):F(2)', 'ph.karno(0:100:10)')
post <- mc.surv.bart(x.train=x.train, times=times, delta=delta, x.test=x.train,
mc.cores=8, seed=99)
K <- post$K
h <- seq(1, N*K, floor(N/10)*K)
for(i in h) {
post.mcmc <- post$yhat.test[ , (i-1)+1:K]
z <- gewekediag(post.mcmc)$z
y <- max(c(4, abs(z)))
if(i==1) plot(post$times, z, ylim=c(-y, y), type='l',
xlab='t', ylab='z')
else lines(post$times, z, type='l')
}
lines(post$times, rep(-1.96, K), type='l', lty=2)
lines(post$times, rep( 1.96, K), type='l', lty=2) |
test_that("convertMassUnitsSI", {
expect_equal(convertMassUnitsSI(2, 'mg', 'kg'), 2e-6)
expect_equal(convertMassUnitsSI(2, 'mg', 'ug'), 2e3)
expect_equal(convertMassUnitsSI(2, 'g', 'ug'), 2e6)
}) |
reconstruct_attrs <- poorman:::reconstruct_attrs
reconstruct_attrs.data.frame <- poorman:::reconstruct_attrs.data.frame
expect_identical(
reconstruct_attrs(data.frame(), data.frame()),
data.frame(),
info = "classes are restored for data.frames"
)
expect_identical(
reconstruct_attrs(data.frame(), structure(data.frame(), class = c("foo", "data.frame"))),
structure(data.frame(), class = c("foo", "data.frame")),
info = "classes are restored for additional classes"
)
expect_identical(
reconstruct_attrs(
structure(list(), .Names = character(0), row.names = c(NA, -1L), class = "data.frame"),
structure(list(), class = "data.frame", row.names = integer(0), .Names = character(0), foo = 1)
),
structure(list(), class = "data.frame", row.names = c(NA, -1L), .Names = character(0), foo = 1),
info = "attributes of `template` are kept"
)
data <- data.frame(a = c(1, 2))
template <- data.frame()
x <- reconstruct_attrs(data, template)
expect <- data.frame(a = c(1, 2))
expect_identical(x, expect, info = "compact row names are retained")
expect_identical(
.row_names_info(x, type = 0L),
.row_names_info(expect, type = 0L),
info = "Explicitly ensure internal row name structure is identical"
)
df <- structure(list(x = 1), class = c("tmp", "data.frame"), row.names = 1L, foo = "bar")
out <- poorman:::remove_attributes(df)
expect_identical(
out,
data.frame(x = 1),
info = "remove_attributes() strips attributes before dispatch"
)
df <- structure(list(x = 1), class = c("tmp", "data.frame"), row.names = "a", foo = "bar")
out <- poorman:::remove_attributes(df)
expect_identical(
out,
data.frame(x = 1, row.names = "a"),
info = "remove_attributes() strips attributes before dispatch"
) |
dOFV.vs.cov <-
function(xpdb1,
xpdb2,
covariates=xvardef("covariates",xpdb1),
ylb=expression(paste(Delta, OFV[i])),
main="Default",
smooth=TRUE,
abline=c(0,0),
ablcol="grey",
abllwd=2,
abllty="dashed",
max.plots.per.page=1,
...)
{
if(is.null(xpdb2)){
cat("Comparison database needed for this plot!")
return(NULL)
}
iv1 <- [email protected]
iv2 <- [email protected]
if(!all(iv1$ID == iv2$ID)){
cat("All ID labels for both databases must match\n")
return(NULL)
}
comp.frame <- data.frame(id=iv1$ID,obj1=iv1$OBJ,obj2=iv2$OBJ)
comp.frame$d.obj <- comp.frame$obj2 - comp.frame$obj1
xpdb1@Data[!duplicated(xpdb1@Data[,xvardef("id",xpdb1)]),"d.obj"] <- comp.frame$d.obj
xpdb1@Prefs@Labels$d.obj <- "Change in individual OFV"
cat.covs <- c()
cont.covs <- c()
for (i in covariates) {
if(is.factor(xpdb1@Data[[i]])){
cat.covs <- c(cat.covs,i)
}else{
cont.covs <- c(cont.covs,i)
}
}
cov.list <- list(cont.covs=cont.covs,cat.covs=cat.covs)
if(is.null(cat.covs)) cov.list$cat.covs <- NULL
if(is.null(cont.covs)) cov.list$cont.covs <- NULL
plotList <- vector("list",length(cov.list))
plot.num <- 0
for (j in 1:length(cov.list)) {
xplot <- xpose.plot.default(cov.list[[j]],
"d.obj",
xpdb1,
onlyfirst=T,
inclZeroWRES=T,
smooth=smooth,
abline=c(0,0),
ablcol=ablcol,
abllwd=abllwd,
abllty=abllty,
ylb=ylb,
main=NULL,
pass.plot.list=TRUE,
...)
plot.num <- plot.num+1
plotList[[plot.num]] <- xplot
}
default.plot.title <- paste("Individual change in OFV vs. Covariate(s)\n",
"(Run",xpdb2@Runno," - Run",xpdb1@Runno,")",sep="" )
plotTitle <- xpose.multiple.plot.title(object=xpdb1,
plot.text = default.plot.title,
main=main,
no.runno=T,
...)
obj <- xpose.multiple.plot(plotList,plotTitle,max.plots.per.page=max.plots.per.page,
...)
return(obj)
} |
knitr::opts_chunk$set(fig.width=6, fig.height=4, warning=FALSE)
library(VWPre)
data(VWdat)
library(VWPre)
load("aligndat.rda")
check_msg_time(data = aligndat, Msg = "VowelOnset")
check_all_msgs(data = aligndat)
aligned1 <- align_msg(data = aligndat, Msg = "VowelOnset")
check_msg_time(data = aligned1, Msg = "VowelOnset")
aligned2 <- create_time_series(data = aligned1, Adjust = 0)
check_time_series(data = aligned2)
check_msg_time(data = aligned2, Msg = "VowelOnset") |
library(rpf)
lmp.item<-rpf.lmp(q=2)
par<-c(.69,.71,-.5,-8.48,.52,-3.32)
theta<-seq(-3,3,.1)
P<-rpf.prob(lmp.item, par, theta)
plot(theta, P[2,], type="l", ylim=c(0,1), xlab="Theta", ylab="P(Theta)")
round(rpf.dLL(lmp.item, par, theta[1], weight=c(5,7)),2)
rpf.dTheta(lmp.item, par, where=-.5, dir=1)
library(numDeriv)
dTheta.wrap<-function(theta, spec, par, cat=1){
rpf.prob(spec, par, theta)[cat]
}
grad(dTheta.wrap, -.5, spec=lmp.item, par=par)
hessian(dTheta.wrap, -.5, spec=lmp.item, par=par) |
cchs <- function(formula, data=parent.frame(), inSubcohort, stratum,
samplingFractions, cohortStratumSizes, precision=NULL,
returnAdjustedTimes=FALSE,
swap=TRUE, dropNeverAtRiskRows=TRUE, dropSubcohEventsDfbeta=FALSE,
adjustSampFracIfAnyNAs=FALSE, keepAllCoxphElements=FALSE,
confidenceLevel=0.95, verbose=FALSE, annotateErrors=TRUE,
coxphControl, ...) {
originalCall <- match.call()
checkCchsArgumentsNotMissing(formula, inSubcohort, stratum)
result <- getExtraVariables(data, originalCall,
samplingFractions, cohortStratumSizes, verbose)
if (length(result) > 0)
for (varName in names(result))
assign(varName, result[[varName]])
checkCchsArguments(formula, data, inSubcohort, stratum, confidenceLevel)
inSubcohort <- as.logical(inSubcohort)
if (annotateErrors) {
tryCatch(
modelFrame <- model.frame(formula, data, na.action=na.omit),
error=function(e) {
if (e$call[[1]] == "Surv") {
errorReason <- "the Surv object being illegal"
} else if (grepl("^object .* not found$", e$message)) {
errorReason <- "a variable in the model-formula\nnot existing"
} else if (grepl("^variable lengths differ", e$message)) {
errorReason <-
"variables in the model-formula having\ndifferent lengths"
} else {
errorReason <- "a problem with the model-formula"
}
stop(e$message, "\n[This error is caused by ", errorReason, ".]")
}
)
} else {
modelFrame <- model.frame(formula, data, na.action=na.omit)
}
rowsToDrop <- attr(modelFrame, "na.action")
if (is.null(rowsToDrop)) {
rowsToDrop <- numeric(0)
} else {
rowsToDrop <- as.vector(rowsToDrop)
}
rowsToDrop <- union(rowsToDrop, which(is.na(inSubcohort)))
rowsToDrop <- union(rowsToDrop, which(is.na(stratum)))
nBeforeDroppingRows <- length(inSubcohort)
rowsToKeep <- setdiff(1:nBeforeDroppingRows, rowsToDrop)
nRowsToDrop <- length(rowsToDrop)
droppedRowsMessage <- { if (nRowsToDrop==0) "" else
paste0(nRowsToDrop, " observation", { if (nRowsToDrop==1)
" was" else "s were" }, " dropped because of NAs.") }
if (!is.factor(stratum)) {
stratum <- tryCatch(
as.factor(stratum),
error=function(e) stop("stratum cannot be converted to a factor")
)
}
stratum <- droplevels(stratum)
checkSubcohortStrataNotEmpty(stratum, inSubcohort,
message="before dropping NA-containing rows")
coxphControl <- getCoxphControl(coxphControl, ...)
samplingFractionInfo <- checkSamplingFractions(samplingFractions,
cohortStratumSizes, stratum, inSubcohort, verbose)
samplingFractions <- getSamplingFractions(samplingFractions,
cohortStratumSizes, stratum, inSubcohort, samplingFractionInfo)
checkCohortStratumSizes(samplingFractions, cohortStratumSizes,
stratum, inSubcohort, verbose=FALSE)
if (adjustSampFracIfAnyNAs && nRowsToDrop > 0)
samplingFractions <- adjustSamplingFractions(samplingFractions,
stratum, inSubcohort, rowsToKeep, verbose)
modelFrame <- model.frame(formula, data, na.action=na.pass)
survObject <- model.response(modelFrame)
if (!inherits(survObject, "Surv"))
stop("the response has to be a Surv object")
censoringType <- attr(survObject, "type")
if (censoringType == "right") {
if (!identical(colnames(survObject), c("time", "status")))
stop("INTERNAL ERROR: unable to process Surv object")
allExitTimes <- survObject[,"time"]
} else if (censoringType == "counting") {
if (!identical(colnames(survObject), c("start", "stop", "status")))
stop("INTERNAL ERROR: unable to process Surv object")
allExitTimes <- survObject[,"stop"]
} else {
stop("censoring type \"", censoringType, "\" is not allowed")
}
modelFrame <- modelFrame[rowsToKeep,]
if (nrow(modelFrame) == 0) stop("no observations")
stratum <- stratum[rowsToKeep]
inSubcohort <- inSubcohort[rowsToKeep]
samplingFractions <- samplingFractions[rowsToKeep]
stratum <- droplevels(stratum)
checkSubcohortStrataNotEmpty(stratum, inSubcohort,
message="after dropping NA-containing rows")
modelMatrix <- model.matrix(attr(modelFrame, "terms"), modelFrame)
modelMatrix <- modelMatrix[, -1, drop=FALSE]
prohibitedSpecialNames <- c("cluster", "strata")
prohibitedSpecialTermsList <-
attr(terms(formula, specials=prohibitedSpecialNames), "specials")
for (i in prohibitedSpecialNames)
if (!is.null(prohibitedSpecialTermsList[[i]]))
stop("formula is not allowed to contain these special terms: ",
paste(prohibitedSpecialNames, collapse=", "))
survObject <- model.response(modelFrame)
if (censoringType == "right") {
timeAtEntry <- rep(0, nrow(survObject))
timeAtExit <- survObject[,"time"]
} else {
timeAtEntry <- survObject[,"start"]
timeAtExit <- survObject[,"stop"]
}
isCase <- survObject[,"status"]
isCase <- as.logical(isCase)
isNonsubcohortNoncase <- !inSubcohort & !isCase
if (any(isNonsubcohortNoncase)) {
nNonsubNoncase <- sum(isNonsubcohortNoncase)
if (nNonsubNoncase == 1) {
stop("there is 1 non-subcohort non-case")
} else {
stop("there are ", nNonsubNoncase, " non-subcohort non-cases")
}
}
n <- nrow(modelMatrix)
nEachStatus <- c(
subcohortNoncases=sum(inSubcohort & !isCase),
subcohortCases=sum(inSubcohort & isCase),
nonsubcohortCases=sum(!inSubcohort)
)
adjustedEntryAndExitTimes <-
adjustTiedEventTimes(timeAtEntry, timeAtExit, isCase, precision)
timeAtEntry <- adjustedEntryAndExitTimes$timeAtEntry
timeAtExit <- adjustedEntryAndExitTimes$timeAtExit
tiedTimesMessage <- adjustedEntryAndExitTimes$message
entryAndExitTimes <- c(timeAtEntry, timeAtExit)
epsilon <- minDifference(entryAndExitTimes) / 10
if (verbose) cat("
if (epsilon == 0)
stop("epsilon has underflowed to zero, because some entry and \n",
"exit times were too close together")
stratumSizes <- tapply(stratum, stratum, length)
subcohortStratumSizes <- sapply(X=levels(stratum),
FUN=function(x) sum(inSubcohort[stratum==x]))
if (verbose) {
sf <- round(samplingFractions[match(levels(stratum), stratum)], 6)
cat("\n
"\n
"\n
}
modelMatrixColumnNames <- colnames(modelMatrix)
colnames(modelMatrix) <- paste0("dummyName", 1:ncol(modelMatrix))
modelMatrixPlus <- modelMatrix
rm(modelMatrix)
extraVariables <- c("timeAtEntry", "timeAtExit", "isCase", "inSubcohort",
"stratum", "samplingFractions", "id")
id <- 1:n
modelMatrixPlus <- as.data.frame(modelMatrixPlus)
for (varName in extraVariables)
modelMatrixPlus[[varName]] <- get(varName)
if (swap)
modelMatrixPlus <- doSplitting(modelMatrixPlus, epsilon,verbose=verbose)
nonSubcohortRows <- which(!modelMatrixPlus$inSubcohort)
modelMatrixPlus$timeAtEntry[nonSubcohortRows] <-
modelMatrixPlus$timeAtExit[nonSubcohortRows] - epsilon
modelMatrixPlus <- modelMatrixPlus[
modelMatrixPlus$timeAtEntry != modelMatrixPlus$timeAtExit,]
modelMatrixPlus$useForDfbeta <- modelMatrixPlus$inSubcohort
if (dropSubcohEventsDfbeta)
modelMatrixPlus <-
splitSubcohortCasesJustBeforeEvent(modelMatrixPlus, epsilon)
if (dropNeverAtRiskRows)
modelMatrixPlus <-
dropRowsThatAreNeverAtRisk(modelMatrixPlus, verbose=FALSE)
useForDfbeta <- NULL
varNames <- c(extraVariables, "useForDfbeta")
for (varName in varNames)
assign(varName, modelMatrixPlus[[varName]])
modelMatrix <- modelMatrixPlus[,setdiff(names(modelMatrixPlus), varNames)]
rm(modelMatrixPlus)
modelMatrix <- as.matrix(modelMatrix)
colnames(modelMatrix) <- modelMatrixColumnNames
if (annotateErrors) {
result <- withCallingHandlers(
tryCatch(
coxph(Surv(timeAtEntry, timeAtExit, isCase) ~ modelMatrix +
offset(-log(samplingFractions)), control=coxphControl),
error=function(e) stop(e$message,
"\n[This error was thrown by coxph, which was called by cchs.]")
),
warning=function(w) {
warning(conditionMessage(w),
"\n[This warning was thrown by coxph, which was called by cchs.]")
invokeRestart("muffleWarning")
}
)
} else {
result <- coxph(Surv(timeAtEntry, timeAtExit, isCase) ~ modelMatrix +
offset(-log(samplingFractions)), control=coxphControl)
}
names(result$coefficients) <- modelMatrixColumnNames
dimnames(result$var) <- list(modelMatrixColumnNames, modelMatrixColumnNames)
dfbetaUncombined <- residuals(result, type="dfbeta")
if (is.matrix(dfbetaUncombined)) {
dfbetaUncombined <- dfbetaUncombined[useForDfbeta, , drop=FALSE]
} else {
dfbetaUncombined <- matrix(dfbetaUncombined[useForDfbeta], ncol=1)
}
dfbetaCombined <- aggregate(dfbetaUncombined,
list(stra=stratum[useForDfbeta], id=id[useForDfbeta]), sum)
dfbetaStrata <- dfbetaCombined$stra
dfbetaCombined <- dfbetaCombined[, !(names(dfbetaCombined) %in%
c("stra","id")), drop=FALSE]
nParameters <- ncol(dfbetaUncombined)
samplingFractions <- samplingFractions[match(levels(stratum), stratum)]
stratumWeights <- subcohortStratumSizes * (1 - samplingFractions)
varAdjustment <- matrix(0, nParameters, nParameters)
for (i in levels(stratum)) {
result$var <- result$var + stratumWeights[i] *
var(dfbetaCombined[dfbetaStrata==i,])
}
beta <- result$coefficients
se <- sqrt(diag(result$var))
z <- qnorm((1 + confidenceLevel) / 2)
result$coeffsTable <- cbind(exp(beta), exp(beta-z*se), exp(beta+z*se),
1-pchisq((beta/se)^2, 1), beta, se)
colnames(result$coeffsTable) <-
c("HR", "CIlower", "CIupper", "p", "logHR", "SElogHR")
result$call <- originalCall
result$n <- n
result$nEachStatus <- nEachStatus
result$nevent <- n - unname(result$nEachStatus["subcohortNoncases"])
result$nStrata <- nlevels(stratum)
message <- droppedRowsMessage
if (message != "" && tiedTimesMessage != "")
message <- paste0(message, "\n")
if (tiedTimesMessage != "")
message <- paste0(message, tiedTimesMessage)
result$message <- message
if (confidenceLevel != 0.95)
result$confidenceLevel <- confidenceLevel
varsToKeep <- c("coefficients","var","iter","n","nevent","nStrata","call",
"coeffsTable","nEachStatus","message","confidenceLevel","loglik")
if (!keepAllCoxphElements)
for (varName in names(result))
if (!(varName %in% varsToKeep))
result[varName] <- NULL
if (returnAdjustedTimes) {
adjustedTimes <- unname(allExitTimes)
adjustedTimes[rowsToKeep] <- adjustedEntryAndExitTimes$timeAtExit
result$adjustedTimes <- adjustedTimes
}
result <- setS3class(result, "cchs")
return(result)
} |
norm_test_fun <- function(x,
method = "anderson",
pval = 0.05,
xn = 'x',
bin = FALSE) {
if (!method %in% c("shapiro", "anderson", "ks")) {
warning("Method should only be 'ks', 'shapiro' or 'anderson'.
Setting method as 'anderson'")
method <- "anderson"
}
if (length(x) > 5000) {
warning(paste0(xn, " is very large (>5000), normality test may be accurate.
Consider changing method to 'Anderson-Darling'"))
}
if (length(x) < 3) {
warning(paste0(xn," is very small (<3), normality test can't be performed."))
return(0)
}
if (method == "shapiro") {
out <- shapiro.test(x)
} else if (method == "anderson") {
out <- anderson.test(x)
} else {
out <- ks.test(x, "pnorm", mean = mean(x), sd = sd(x))
}
if (bin) {
return(out$p.value > pval)
} else {
return(out)
}
}
anderson.test <- function (x){
DNAME <- deparse(substitute(x))
x <- sort(x[complete.cases(x)])
n <- length(x)
if (n < 8)
stop("sample size must be greater than 7")
logp1 <- pnorm( (x-mean(x))/sd(x), log.p=TRUE)
logp2 <- pnorm( -(x-mean(x))/sd(x), log.p=TRUE )
h <- (2 * seq(1:n) - 1) * (logp1 + rev(logp2))
A <- -n - mean(h)
AA <- (1 + 0.75/n + 2.25/n^2) * A
if (AA < 0.2) {
pval <- 1 - exp(-13.436 + 101.14 * AA - 223.73 * AA^2)
}
else if (AA < 0.34) {
pval <- 1 - exp(-8.318 + 42.796 * AA - 59.938 * AA^2)
}
else if (AA < 0.6) {
pval <- exp(0.9177 - 4.279 * AA - 1.38 * AA^2)
}
else if (AA < 10) {
pval <- exp(1.2937 - 5.709 * AA + 0.0186 * AA^2)
}
else pval <- 3.7e-24
out <- list(statistic = c(A = A), p.value = pval,
method = "Anderson-Darling normality test",
data.name = DNAME)
class(out) <- "htest"
return(out)
} |
test_that("P", {
data("Train", package = "mlogit")
data = prepare(form = choice ~ price | 0 | time + comfort + change,
choice_data = Train)
expect_snapshot(print(data))
expect_snapshot(summary(data))
})
test_that("MMNP", {
data("Train", package = "mlogit")
data = prepare(form = choice ~ price | 0 | time + comfort + change,
choice_data = Train,
re = c("price","time"))
expect_snapshot(print(data))
expect_snapshot(summary(data))
})
test_that("without choice variable", {
data("Train", package = "mlogit")
Train[["choice"]] = NULL
data = prepare(form = choice ~ price | 0 | time + comfort + change,
choice_data = Train,
re = c("price","time"),
alternatives = c("A","B"))
expect_error(mcmc(data))
expect_snapshot(print(data))
expect_snapshot(summary(data))
})
test_that("train and test data", {
data("Train", package = "mlogit")
data = prepare(form = choice ~ price | 0 | time + comfort + change,
choice_data = Train,
re = c("price","time"),
test_prop = 0.2)
expect_type(data, "list")
expect_snapshot(print(data))
for(i in 1:2){
expect_snapshot(print(data[[i]]))
expect_snapshot(summary(data[[i]]))
}
}) |
unstrip <- function(x)
{
dd <- dim(x)
y <- x
if(length(dd)==2){
dd2 <- dd[2]
if(dd2==1) y<- c(x[,1])
if(dd2==2) y<- cbind(c(x[,1]),c(x[,2]))
if(dd2>2) y<- cbind(c(x[,1]),c(x[,2]),c(x[,3]))
if(dd2>3)for(i in 4:dd2) y <- cbind(y,c(x[,i]))
y
}
if(length(dd)==1 || length(dd)==0){
y <- c(unlist(c(unlist(x))))
names(y) <- NULL
}
y
}
hare <- function(data, delta, cov, penalty, maxdim, exclude,
include, prophaz = FALSE, additive = FALSE, linear, fit, silent = TRUE)
{
call <- match.call()
if(!missing(data)) data <- unstrip(data)
if(!missing(delta)) delta <- unstrip(delta)
if(!missing(cov)) cov <- unstrip(cov)
if(!missing(exclude)) exclude <- unstrip(exclude)
if(!missing(include)) include <- unstrip(include)
MAXKNOTS <- -3
MAXSPACE <- -3
z <- .C("sharex",
mk = as.integer(MAXKNOTS),
ms = as.integer(MAXSPACE),
PACKAGE = "polspline")
MAXKNOTS <- z$mk
MAXSPACE <- z$ms
if(missing(data))
stop("there has to be data")
if(length(data) < 25)
stop("not enough data")
if(min(data) < 0)
stop("negative data")
if(missing(delta))
delta <- data - data + 1
if(length(data) != length(delta))
stop("data and delta have different length")
dd <- abs(delta - 0.5)
if(min(dd) < 0.5 || max(dd) > 0.5)
stop("delta not all 0 or 1")
ndata <- length(data)
if(missing(cov)) {
ncov <- 0
cov <- 0
iia <- order(data)
delta <- delta[iia]
data <- data[iia]
}
else {
if(length(cov) == ndata)
cov <- matrix(cov, ncol = 1, nrow = ndata)
if(length(cov[, 1]) != ndata)
stop("covariates not ndata * ncov matrix")
ncov <- length(cov[1, ])
cov <- cbind(cov, 1)
y <- cbind(data, cov)
keys <- 1:ndata
for(i in (ncov + 2):1)
keys <- keys[sort.list(y[keys, i])]
data <- data[keys]
delta <- delta[keys]
cov <- cov[keys, 1:ncov]
}
if(additive) {
if(!missing(exclude))
stop("cannot have exclude and additive")
if(!missing(include))
stop("cannot have include and additive")
prophaz <- FALSE
include <- c(0, 0)
}
if(missing(exclude) + missing(include) == 0)
stop("only 1 from exclude and include allowed")
vexclude <- 0
if(missing(exclude) == FALSE) {
if(length(exclude) == 2)
exclude <- matrix(exclude, ncol = 2, nrow = 1)
if(length(exclude[1, ]) != 2)
stop("exclude has wrong shape")
if(min(exclude) < 0 || max(exclude) > ncov)
stop("exclude has wrong values")
vexclude <- as.vector(t(exclude))
vexclude <- c(length(vexclude)/2, vexclude)
if(prophaz && ncov > 0) {
vexclude <- c(vexclude, as.vector(rbind(1:ncov, 0)))
vexclude[1] <- vexclude[1] + ncov
}
}
if(missing(include) == FALSE || additive) {
if(length(include) == 2)
include <- matrix(include, ncol = 2, nrow = 1)
if(length(include[1, ]) != 2)
stop("include has wrong shape")
if(min(include) < 0 || max(include) > ncov)
stop("include has wrong values")
include <- t(apply(include, 1, sort))
if(length(include) == 2)
include <- matrix(include, ncol = 2, nrow = 1)
if(prophaz)
include <- include[include[, 1] > 0, ]
vexclude <- as.vector(t(include))
vexclude <- c( - length(vexclude)/2, vexclude)
}
if(missing(include) && missing(exclude) && prophaz && ncov > 0)
vexclude <- c(ncov, as.vector(rbind(1:ncov, 0)))
mindist <- 5
if(missing(penalty))
penalty <- log(ndata)
if(missing(maxdim)) {
maxdim <- floor(6 * (ndata)^0.2)
if(maxdim > MAXSPACE - 1)
maxdim <- MAXSPACE - 1
maxdim <- - maxdim
}
if(maxdim > MAXSPACE - 1) {
maxdim <- MAXSPACE - 1
print(paste("maximum dimension reduced to", maxdim))
}
lins <- rep(0, MAXSPACE)
if(!missing(linear)) {
linear[linear <= 0] <- ncov + 1
linear[linear > ncov + 1] <- ncov + 1
lins[linear] <- 1
}
if(additive)
vexclude <- c(-1, 0, 0)
fitter <- 0
bbtt <- matrix(0, ncol = 6, nrow = abs(maxdim))
cckk <- matrix(0, ncol = (MAXKNOTS + 1), nrow = (ncov + 1))
if(!missing(fit)) {
if(class(fit)!="hare")
stop("fit is not a hare object")
fitter <- fit$ndim
if(fit$ncov != ncov)
stop("ncov and fit's ncov are different")
bbtt[1:fit$ndim, ] <- fit$fcts
bbtt <- as.vector(t(bbtt))
bbtt[is.na(bbtt)] <- -1
a1 <- length(fit$knots[1, ])
cckk[, 1:a1] <- fit$knots
cckk <- as.vector(cckk)
cckk[is.na(cckk)] <- -1
}
z <- .C("share",
as.integer(ncov),
ndim = as.integer(ndata),
as.double(data),
as.integer(delta),
as.double(cov),
as.double(penalty),
as.integer(mindist),
as.integer(maxdim),
bbtt = as.double(bbtt),
cckk = as.double(cckk),
as.integer(vexclude),
as.integer(lins),
as.integer(silent),
logl = as.double(rep(0, MAXSPACE)),
as.integer(fitter),
ad = as.integer(rep(0, MAXSPACE)),
as.integer(0),
PACKAGE = "polspline")
maxdim <- abs(maxdim)
z$bbtt <- matrix(z$bbtt, nrow = maxdim, ncol = 6, byrow = TRUE)[1:z$ndim,
]
z$cckk <- matrix(z$cckk, nrow = ncov + 1, ncol = MAXKNOTS + 1, byrow =
TRUE)
z$cckk <- z$cckk[, 1:(1 + max(z$cckk[, 1]))]
z$cckk <- matrix(z$cckk, nrow = ncov + 1)
l1 <- max(z$cckk[, 1])
for(i in 1:(ncov + 1))
if(z$cckk[i, 1] != l1) z$cckk[i, (z$cckk[i, 1] + 2):(l1 + 1)] <-
NA
if(l1 > 0 && ncov > 0)
dimnames(z$cckk) <- list(c("T", 1:ncov), c("K", 1:l1))
if(l1 > 0 && ncov == 0)
dimnames(z$cckk) <- list(c("T"), c("K", 1:l1))
if(l1 == 0 && ncov > 0)
dimnames(z$cckk) <- list(c("T", 1:ncov), "K")
if(l1 == 0 && ncov == 0)
dimnames(z$cckk) <- list(c("T"), "K")
l1 <- max((1:MAXSPACE)[z$logl > -1e+100])
z$bbtt <- matrix(z$bbtt, ncol = 6)
dimnames(z$bbtt) <- list(1:(z$ndim), c("dim1", "knot1", "dim2", "knot2",
"beta", "SE"))
z$bbtt[z$bbtt[, 3] == -1, 3:4] <- NA
if(is.na(l1)) {
z$logl <- z$logl[1]
z$ad <- z$ad[1]
}
else {
z$logl <- z$logl[1:l1]
z$ad <- z$ad[1:l1]
}
z$ad[z$logl < -1e+100] <- NA
z$logl[z$logl < -1e+100] <- NA
z$logl <- cbind(z$logl, z$ad)
dimnames(z$logl) <- list(NULL, c("log-lik", "A/D"))
ranges <- NA
if(ncov == 1)
ranges <- matrix(range(cov), ncol = 1, nrow = 2)
if(ncov > 1)
ranges <- apply(cov, 2, range)
fit <- list(call = call, ncov = ncov, ndim = z$ndim, fcts = z$bbtt, knots
= z$cckk, penalty = penalty, max = max(data), ranges = ranges,
logl = z$logl, sample = ndata)
class(fit) <- "hare"
fit
}
plot.hare <- function(x, cov, n = 100, which = 0, what = "d", time, add = FALSE,
xlim, xlab, ylab, type, ...)
{
if(class(x)!="hare")
stop("x is not a hare object")
if(!missing(cov))cov <- unstrip(cov)
if(!missing(time))time <- unstrip(time)
fit <- x
nocov <- 0
if(fit$ncov == 0)
nocov <- 1
else {
if(length(cov) != fit$ncov)
stop("covariates are wrong")
}
if(which == 0) {
if(missing(xlim)) {
if(nocov == 0) {
u1 <- qhare(0.01, cov, fit)
u2 <- qhare(0.99, cov, fit)
}
else {
u1 <- qhare(0.01, fit = fit)
u2 <- qhare(0.99, fit = fit)
}
u3 <- 1.1 * u1 - 0.1 * u2
u2 <- min(u2, fit$max)
u4 <- 1.1 * u2 - 0.1 * u1
if(u3 < 0)
u3 <- 0
else if(u4/u3 > 5)
u3 <- 0
}
else {
u3 <- xlim[1]
u4 <- xlim[2]
}
xx <- (0:(n - 1))/(n - 1) * (u4 - u3) + u3
if(fit$ncov > 0)
yy <- cov
}
else {
if(which < 0 || which > fit$ncov)
stop("which is wrong")
if(missing(time))
stop("time is missing")
if(missing(xlim)) {
u3 <- fit$ranges[1, which]
u4 <- fit$ranges[2, which]
}
else {
u3 <- xlim[1]
u4 <- xlim[2]
}
xx <- (0:(n - 1))/(n - 1) * (u4 - u3) + u3
yy <- matrix(cov, ncol = fit$ncov, nrow = n, byrow = TRUE)
yy[, which] <- xx
xx <- time
}
iwhat <- 0
if(what == "d" || what == "D")
iwhat <- 3
if(what == "h" || what == "H")
iwhat <- 2
if(nocov == 0)
yy <- xhare(iwhat, xx, yy, fit)
else yy <- xhare(iwhat, xx, arg4 = fit)
if(what == "s" || what == "S")
yy <- 1 - yy
if(missing(xlab))
xlab <- ""
if(missing(ylab))
ylab <- ""
if(missing(type))
type <- "l"
xx <- (0:(n - 1))/(n - 1) * (u4 - u3) + u3
if(!add)
plot(xx, yy, xlab = xlab, ylab = ylab, type = type, ...)
else lines(xx, yy, type = type, ...)
}
print.hare <- function(x,...)
{
summary.hare(x)
}
summary.hare <- function(object,...)
{
if(class(object)!="hare")
stop("object is not a hare object")
fit <- object
s3 <- as.vector(t(fit$logl))
s3[is.na(s3)] <- 0
s1 <- as.vector(t(fit$fcts))
s2 <- as.vector(fit$knots)
s1[is.na(s1)] <- -1
s2[is.na(s2)] <- -1
.C("ssumm",
as.double(fit$penalty),
as.integer(fit$sample),
as.double(s3),
as.integer(length(s3)/2),
as.double(s2),
as.double(s1),
as.integer(fit$ndim),
as.integer(fit$ncov),
PACKAGE = "polspline")
invisible()
}
dhare <- function(q,cov,fit)
{
if(class(fit)!="hare")
stop("fit is not a hare object")
xhare(3, q,cov,fit)
}
hhare <- function(q,cov,fit)
{
if(class(fit)!="hare")
stop("fit is not a hare object")
xhare(2, q,cov,fit)
}
phare <- function(q,cov,fit)
{
if(class(fit)!="hare")
stop("fit is not a hare object")
xhare(0, q,cov,fit)
}
qhare <- function(p,cov,fit)
{
if(class(fit)!="hare")
stop("fit is not a hare object")
xhare(1, p,cov,fit)
}
rhare <- function(n, cov,fit)
{
if(class(fit)!="hare")
stop("fit is not a hare object")
xhare(1, runif(n), cov,fit)
}
xhare <- function(arg1,arg2,arg3,arg4)
{
iwhat <- arg1
if(!missing(arg2))arg2 <- unstrip(arg2)
if(!missing(arg3))arg3 <- unstrip(arg3)
q <- arg2
cov <- arg3
fit <- arg4
if(class(fit)!="hare")
stop("fit is not a hare object")
zz <- 0
if(missing(arg4)) {
zz <- 7
fit <- cov
if(is.null(fit$ncov))
stop("fit missing")
}
if(fit$ncov == 0) {
if(!missing(arg3) && zz==0)
stop("there should be no covariates")
else cov <- 0
}
else {
if(is.matrix(cov) == FALSE)
cov <- matrix(cov, ncol = fit$ncov)
nd <- length(cov[, 1])
nc <- length(cov[1, ])
nq <- length(q)
if(nc != fit$ncov)
stop("not the right number of covariates")
if(nd != 1 && nq != 1 && nd != nq)
stop("no matching number of cases")
if(nq == 1)
q <- rep(q, nd)
if(nd == 1 && nq != 1)
cov <- matrix(cov, nrow = nq, ncol = nc, byrow = TRUE)
}
fit$fcts <- as.vector(t(fit$fcts))
fit$fcts[is.na(fit$fcts)] <- -1
fit$knots <- as.vector(fit$knots)
fit$knots[is.na(fit$knots)] <- 0
z <- .C("sphare",
as.integer(fit$ncov),
as.integer(fit$ndim),
as.integer(length(q)),
as.double(cov),
as.integer(iwhat),
q = as.double(q),
as.double(fit$knots),
as.double(fit$fcts),
PACKAGE = "polspline")
z$q
}
heft <- function(data, delta, penalty, knots, leftlin, shift,
leftlog, rightlog, maxknots, mindist, silent = TRUE)
{
call <- match.call()
if(!missing(data))data <- unstrip(data)
if(!missing(delta))delta <- unstrip(delta)
if(!missing(knots))knots<- unstrip(knots)
if(missing(leftlin))leftlin<-2
leftlin<-leftlin*1
nx <- -1
z <- .C("sheftx",
z = as.integer(nx),
PACKAGE = "polspline")
lgth <- z$z
lgth <- 40
if(missing(mindist))
mindist <- 5
if(mindist < 2) {
warning("mindist reset to 2")
mindist <- 2
}
if(missing(delta))
delta <- data - data + 1
if(length(data) != length(delta))
stop("data and delta have different length")
if(min(data) < 0)
stop("negative data")
if(min(data) == 0) {
if(!missing(leftlog)){
if(leftlog != 0)
stop("** hard-zeros, leftlog has to be 0 **")
}
else{
leftlog <- 0
warning("*** hard zeros: leftlog set to 0 ***")
}
if(leftlin==2){
warning("*** hard zeros: leftlin set to TRUE ***")
leftlin <- 1
}
}
leftlin <- (leftlin==1)
dd <- abs(delta - 0.5)
if(min(dd) < 0.5 || max(dd) > 0.5)
stop("delta not all 0 or 1")
delta <- delta[order(data)]
data <- sort(data)
nx <- length(data)
if(!missing(maxknots) && !missing(knots) && maxknots < length(knots))
stop("maxknots is smaller than length(knots)")
if(missing(maxknots))
maxknots <- 0
if(missing(penalty))
penalty <- log(nx)
if(maxknots > lgth - 5) {
maxknots <- lgth - 5
warning(paste("maxknots reduced to", maxknots))
}
if(!missing(shift))
if(shift <= - min(data))
stop("shift too small")
if(missing(shift))
shift <- quantile(data[delta==1], 0.75)
nknots <- 0
iauto <- 0
if(!missing(knots)) {
nknots <- length(knots)
if(nknots > lgth - 5)
stop(paste("nknots can be at most", lgth - 5))
iauto <- 2
uu <- knots[2:nknots] - knots[1:(nknots - 1)]
if(min(uu) < 0)
stop("knots not in sequence")
if(knots[1] < 0)
stop("knot 1 is negative")
knots <- c(knots, rep(0, lgth - nknots))
}
if(iauto < 2)
knots <- rep(0, lgth)
error <- c(1, rep(0, 20))
if(silent != TRUE)
error[7] <- 37
tails <- c(0, 0, 0, 0, 1)
if(!missing(leftlog) || min(data) == 0) {
if(leftlog <= -1)
stop("leftlog should be smaller than -1")
tails[1] <- 1
tails[2] <- leftlog
}
if(!missing(rightlog)) {
if(rightlog < -1)
stop("rightlog should be at least -1")
tails[3] <- 1
tails[4] <- rightlog
}
if(leftlin)
tails[5] <- 0
z <- .C("sheft",
as.integer(nx),
as.double(data),
as.integer(delta),
nk = as.integer(nknots),
knots = as.double(knots),
as.double(penalty),
tails = as.double(tails),
as.integer(iauto),
logl = as.double(rep(0, lgth)),
theta = as.double(rep(0, lgth)),
iknots = as.integer(rep(0, lgth)),
error = as.integer(error),
as.double(shift),
as.integer(maxknots),
ad = as.integer(rep(0, lgth)),
as.integer(mindist),
PACKAGE = "polspline")
error <- z$error
if(z$nk < -100) error[2] <- 1
z$logl[abs(z$logl) < 1e-100] <- 0
z$logl[z$ad == 2] <- 0
if(error[2] == 0){
fit <-list(call = call, knots = z$knots[1:z$nk], logl = z$logl[2:(z$nk
+ 1)], thetak = z$theta[1:z$nk], thetap = z$theta[z$nk +
(1:2)], thetal = z$theta[z$nk + (3:4)], penalty =
penalty, shift = shift, sample = length(data), logse =
z$tails[c(2, 4)], max = max(data), adddel = z$ad[2:(z$nk
+ 1)])
class(fit) <- "heft"
fit
}
else {
print("sorry......")
invisible()
}
}
plot.heft <- function(x, n = 100, what = "d", add = FALSE, xlim, xlab, ylab, type,
...)
{
if(class(x)!="heft")
stop("x is not a heft object")
fit <- x
if(missing(xlim)) {
u2 <- min(qheft(0.99, fit), fit$max)
u3 <- 0
u4 <- 1.1 * u2
xlim <- c(u3, u4)
}
u3 <- xlim[1]
u4 <- xlim[2]
xx <- (0:(n - 1))/(n - 1) * (u4 - u3) + u3
if(u3 == 0)
xx <- (1:n)/n * u4
yy <- c(-10, -10)
if(what == "d" || what == "D")
yy <- dheft(xx, fit)
if(what == "h" || what == "H")
yy <- hheft(xx, fit)
if(what == "f" || what == "F" || what == "p" || what == "P")
yy <- pheft(xx, fit)
if(what == "s" || what == "S")
yy <- 1-pheft(xx, fit)
if(yy[1] < -8)
stop("What is wrong? Well: what is wrong.")
if(missing(xlab))
xlab <- ""
if(missing(ylab))
ylab <- ""
if(missing(type))
type <- "l"
if(!add)
plot(xx, yy, xlim = xlim, xlab = xlab, ylab = ylab, type = type,
...)
else lines(xx, yy, type = type, ...)
}
print.heft <- function(x,...)
{
summary.heft(x)
}
summary.heft <- function(object,...)
{
if(class(object)!="heft")
stop("object is not a heft object")
fit <- object
ul <- fit$penalty
um <- fit$sample
ll <- fit$logl
kk <- (1:length(ll))
kk <- kk[fit$ad != 2]
ll <- ll[fit$ad != 2]
ad <- fit$ad[fit$ad != 2]
bb <- -2 * ll + ul * (kk-2)
if(fit$thetal[1]!=0) bb <- bb+ul
if(fit$thetal[2]!=0) bb <- bb+ul
if(fit$thetap[2]!=0) bb <- bb+ul
if(fit$thetap[2]==0 && min(kk)==2) bb <- bb+ul
cc1 <- bb
cc2 <- bb
cc2[1] <- Inf
cc1[length(bb)] <- 0
if(length(bb) > 1) {
for(i in 1:(length(bb) - 1)) {
cc1[i] <- max((ll[(i + 1):(length(bb))] - ll[i])/(kk[(i +
1):(length(bb))] - kk[i]))
cc2[i + 1] <- min((ll[1:i] - ll[i + 1])/(kk[1:i] - kk[i +
1]))
}
}
c3 <- cc2 - cc1
cc1[c3 < 0] <- NA
cc2[c3 < 0] <- NA
uu <- cbind(kk, ad, ll, bb, 2 * cc1, 2 * cc2)
ww <- rep("", length(bb))
dimnames(uu) <- list(ww, c("knots", "A(0)/D(1)", "loglik", "AIC",
"minimum penalty", "maximum penalty"))
print(round(uu, 2))
cat(paste("the present optimal number of knots is ", kk[bb == min(bb)],
"\n"))
if(ul == log(um))
cat(paste("penalty(AIC) was the default: BIC=log(samplesize): log(",
um, ")=", round(ul, 2), "\n"))
else cat(paste("penalty(AIC) was ", round(ul, 2),
", the default (BIC) ", "would have been", round(log(um
), 2), "\n"))
if(min(kk) == 3 && fit$thetap[2] != 0) {
cat(paste("models with fewer than", kk[1], "knots",
"can be fitted, but they are not optimal for the\n"))
cat(paste("present choice of penalty - choose penalty in",
"heft larger to see these fits\n"))
}
if(min(kk) > 3) {
cat(paste("models with fewer than", kk[1], "knots",
"can be fitted, but they are not optimal for the\n"))
cat(paste("present choice of penalty - choose penalty in",
"heft larger to see these fits\n"))
}
uuu <- matrix(NA, ncol = 3, nrow = 2, dimnames = list(c("left tail",
"right tail"), c("theta", "SE", "t")))
uuu[, 1] <- fit$thetal
if(fit$logse[1] > 0) {
uuu[1, 2] <- fit$logse[1]
uuu[1, 3] <- abs(fit$thetal[1]/fit$logse[1])
}
if(fit$logse[2] > 0) {
uuu[2, 2] <- fit$logse[2]
uuu[2, 3] <- abs(fit$thetal[2]/fit$logse[2])
}
print(round(uuu, 2))
invisible()
}
dheft <- function(q, fit)
{
if(class(fit)!="heft")
stop("fit is not a heft object")
y <- hheft(q, fit)
z <- 1 - pheft(q, fit)
y * z
}
hheft <- function(q, fit)
{
if(class(fit)!="heft")
stop("fit is not a heft object")
q <- unstrip(q)
y <- fit$thetap[1] + q * fit$thetap[2] + fit$thetal[1] * log(q/(q + fit$
shift)) + fit$thetal[2] * log(q + fit$shift)
for(i in 1:length(fit$knots)) {
if(fit$thetak[i] != 0)
y <- y + fit$thetak[i] * ((abs(q - fit$knots[i]) + q -
fit$knots[i])/2)^3
}
exp(y)
}
pheft <- function(q, fit)
{
if(class(fit)!="heft")
stop("fit is not a heft object")
q <- unstrip(q)
sq <- rank(q)
q <- sort(q)
z <- .C("heftpq",
as.double(c(fit$knots,rep(0,100))),
as.double(c(fit$shift,rep(0,100))),
as.double(c(fit$thetak,rep(0,100))),
as.double(c(fit$thetal,rep(0,100))),
as.double(c(fit$thetap,rep(0,100))),
as.integer(1),
pp = as.double(q),
as.double(q),
as.integer(length(fit$knots)),
as.integer(length(q)),
PACKAGE = "polspline")
zz <- z$pp[sq]
zz[q < 0] <- 0
zz
}
qheft <- function(p, fit)
{
if(class(fit)!="heft")
stop("fit is not a heft object")
p <- unstrip(p)
sp <- rank(p)
p <- sort(p)
z <- .C("heftpq",
as.double(c(fit$knots,rep(0,100))),
as.double(c(fit$shift,rep(0,100))),
as.double(c(fit$thetak,rep(0,100))),
as.double(c(fit$thetal,rep(0,100))),
as.double(c(fit$thetap,rep(0,100))),
as.integer(0),
as.double(p),
qq = as.double(p),
as.integer(length(fit$knots)),
as.integer(length(p)),
PACKAGE = "polspline")
zz <- z$qq[sp]
zz[p < 0] <- NA
zz[p == 0] <- 0
zz[p == 1] <- Inf
zz[p > 1] <- NA
zz
}
rheft <- function(n, fit)
{
if(class(fit)!="heft")
stop("fit is not a heft object")
pp <- runif(n)
qheft(pp, fit)
}
oldlogspline.to.logspline <- function(obj,data)
{
nobj <- list()
nobj$call <- obj$call
if(is.null(obj$call))nobj$call <- "translated from oldlogspline"
nobj$knots <- sum(obj$coef[-(1:2)]!=0)
nobj$coef.pol <- obj$coef[1:2]
nobj$coef.kts <- obj$coef[-(1:2)]
nobj$coef.kts <- nobj$coef.kts[nobj$coef.kts!=0]
nobj$knots <- obj$knots[obj$coef[-(1:2)]!=0]
nobj$maxknots <- length(obj$coef)-2
nobj$penalty <- obj$penalty
nobj$bound <- obj$bound
nobj$samples <- obj$sample
nobj$logl <- obj$logl[obj$logl!=0]
lx <- length(nobj$logl)
nobj$logl <- cbind(nobj$maxknots+1-(lx:1),c(rep(2,lx-1),1),nobj$logl)
class(nobj) <- "logspline"
if(!missing(data))nobj$range <- obj$range
else {
lx <- 1/(nobj$samples+1)
nobj$range <- qlogspline(c(lx,1-lx),nobj)
}
nobj$mind
nobj
}
poldlogspline <- function(q, fit)
{
fitx <- oldlogspline.to.logspline(fit)
plogspline(q,fitx)
}
qoldlogspline <- function(p, fit)
{
fitx <- oldlogspline.to.logspline(fit)
qlogspline(p,fitx)
}
roldlogspline <- function(n, fit)
{
if(class(fit)!="oldlogspline")
stop("fit is not an oldlogspline object")
pp <- runif(n)
qoldlogspline(pp, fit)
}
doldlogspline <- function(q, fit)
{
x <- q
if(class(fit)!="oldlogspline")
stop("fit is not an oldlogspline object")
q <- unstrip(q)
y <- fit$coef[1] + x * fit$coef[2]
for(i in 1:length(fit$knots)) {
if(fit$coef[i+2] != 0)
y <- y + fit$coef[i+2] * ((abs(x - fit$knots[i]) +
x - fit$knots[i])/2)^3
}
y <- exp(y)
if(fit$bound[1] > 0)
y[x < fit$bound[2]] <- 0
if(fit$bound[3] > 0)
y[x > fit$bound[4]] <- 0
y
}
plot.oldlogspline <- function(x, n = 100, what = "d", xlim, xlab = "", ylab = "", type = "l", add = FALSE, ...)
{
fit <- x
if(class(fit)!="oldlogspline")
stop("fit is not an oldlogspline object")
if(missing(xlim)) {
u1 <- qoldlogspline(0.01, fit)
u2 <- qoldlogspline(0.99, fit)
u3 <- 1.1 * u1 - 0.1 * u2
u4 <- 1.1 * u2 - 0.1 * u1
}
else {
u3 <- xlim[1]
u4 <- xlim[2]
}
xx <- (0:(n - 1))/(n - 1) * (u4 - u3) + u3
if(what == "d" || what == "D")
yy <- doldlogspline(xx, fit)
if(what == "f" || what == "F" || what == "p" || what == "P")
yy <- poldlogspline(xx, fit)
if(what == "s" || what == "S")
yy <- 1 - poldlogspline(xx, fit)
if(what == "h" || what == "H")
yy <- doldlogspline(xx, fit)/(1 - poldlogspline(xx, fit))
if(missing(xlab))
xlab <- ""
if(missing(ylab))
ylab <- ""
if(missing(type))
type <- "l"
if(add==FALSE)plot(xx, yy, xlab = xlab, ylab = ylab, type = type, ...)
else lines(xx,yy, type = type, ...)
}
print.oldlogspline <- function(x,...)
{
summary.oldlogspline(x)
}
summary.oldlogspline <- function(object,...)
{
if(class(object)!="oldlogspline")
stop("fit is not an oldlogspline object")
fit <- object
if(fit$delete==FALSE)stop(paste("summary.oldlogspline can only provide",
"information if delete in oldlogspline is TRUE"))
ul <- fit$penalty
um <- fit$sample
ll <- fit$logl
kk <- (1:length(ll))
kk <- kk[ll != 0] + 2
ll <- ll[ll != 0]
error<-FALSE
rr <- ll[1:(length(ll)-1)]-ll[2:length(ll)]
if(length(ll)>1 && max(rr)>0)error<-TRUE
bb <- -2 * ll + ul * kk
cc1 <- bb
cc2 <- bb
cc2[1] <- Inf
cc1[length(bb)] <- 0
if(length(bb) > 1) {
for(i in 1:(length(bb) - 1)) {
cc1[i] <- max((ll[(i + 1):(length(bb))] - ll[i])/(
kk[(i + 1):(length(bb))] - kk[i]))
cc2[i + 1] <- min((ll[1:i] - ll[i + 1])/(kk[1:i] - kk[i + 1]))
}
}
c3 <- cc2 - cc1
cc1[c3 < 0] <- NA
cc2[c3 < 0] <- NA
uu <- cbind(kk, ll, bb, 2 * cc1, 2 * cc2)
ww <- rep("", length(bb))
if(error){
cat("Warning - imprecision in loglikelihood (possibly due to heavy tails)\n")
cat("the output of summary.oldlogspline might not be correct\n")
}
dimnames(uu) <- list(ww, c("knots", "loglik", "AIC", "minimum penalty",
"maximum penalty"))
print(round(uu, 2))
cat(paste("the present optimal number of knots is ", kk[bb== min(bb)],"\n"))
if(ul == log(um))
cat(paste("penalty(AIC) was the default: BIC=log(samplesize): log(",
um, ")=", round(ul, 2),"\n"))
else
cat(paste("penalty(AIC) was ", round(ul, 2),", the default (BIC) ",
"would have been", round(log(um), 2),"\n"))
if(min(kk) > 3 && fit$delete==TRUE){
cat(paste( "models with fewer than", kk[1],"knots ",
"can be fitted, but they are not optimal for\n"))
cat(paste("the present choice of penalty - choose penalty in",
"oldlogspline larger\nto see these fits\n"))
}
if(min(kk) > 3 && fit$delete==3)
cat(paste("models with fewer than", kk[1],"knots ",
"were not fitted because of convergence problems\n"))
invisible()
}
oldlogspline <- function(uncensored, right, left, interval, lbound, ubound,
nknots, knots, penalty, delete = TRUE)
{
nsample <- rep(0, 6)
if(!missing(uncensored))uncensored <- unstrip(uncensored)
if(!missing(right))right <- unstrip(right)
if(!missing(left))left <- unstrip(left)
if(!missing(interval))interval <- unstrip(interval)
if(!missing(knots))knots <- unstrip(knots)
if(!missing(interval)) {
if(length(interval[1, ]) != 2)
stop("interval must have two columns")
if(min(abs(interval[, 1] - interval[, 2])) < 0) stop(
"not all lower bounds smaller than upper bounds")
nsample[3] <- length(interval)/2
nsample[1] <- length(interval)/2
if(!missing(lbound))
interval[interval[, 1] < lbound, 1] <- lbound
if(!missing(ubound))
interval[interval[, 2] > ubound, 2] <- ubound
sample <- as.vector(t(interval))
ror <- order(interval[,1],interval[,2])
if(nsample[3]>1){
ro1 <- interval[ror[(1:(nsample[3]-1))],1]==interval[ror[2:nsample[3]],1]
ro2 <- interval[ror[(1:(nsample[3]-1))],2]==interval[ror[2:nsample[3]],2]
nsample[6] <- nsample[3]-sum(ro1+ro2==2)
}
else nsample[6] <- 1
}
if(!missing(uncensored)) {
uncensored2 <- uncensored[!is.na(uncensored)]
u2 <- length(uncensored) - length(uncensored2)
if(u2 > 0)
print(paste("***", u2, " NAs ignored in uncensored"))
uncensored <- uncensored2
if(nsample[1] > 0)
sample <- c(uncensored, sample)
if(nsample[1] == 0)
sample <- uncensored
nsample[1] <- length(uncensored) + nsample[1]
nsample[2] <- length(uncensored)
uncensored <- sort(uncensored)
if(nsample[2]>1)
nsample[6] <- sum(uncensored[2:nsample[2]] !=
uncensored[1:(nsample[2]-1)]) + 1 + nsample[6]
else
nsample[6] <- nsample[6]+1
}
if(nsample[1] == 0) stop("you either need uncensored or interval censored data")
if(!missing(right)) {
if(nsample[1] > 0)
sample <- c(sample, right)
if(nsample[1] == 0)
sample <- right
nsample[1] <- length(right) + nsample[1]
nsample[4] <- length(right)
right <- sort(right)
if(nsample[4]>1){
nsample[6] <- sum(right[2:nsample[4]]!=right[1:(nsample[4]-1)])+
1 + nsample[6]
}
else nsample[6] <- nsample[6]+1
}
if(!missing(left)) {
if(nsample[1] > 0)
sample <- c(sample, left)
if(nsample[1] == 0)
sample <- left
nsample[1] <- length(left) + nsample[1]
nsample[5] <- length(left)
left <- sort(left)
if(nsample[5]>1){
nsample[6] <- sum(left[2:nsample[5]]!=left[1:(nsample[5]-1)])+
1 + nsample[6]
}
else nsample[6] <- nsample[6]+1
}
if(missing(penalty)) penalty <- log(nsample[1])
n1 <- 4 * nsample[1]^0.2 + 1
if(!missing(nknots))
n1 <- nknots + 1
if(!missing(knots)) n1 <- length(knots) + 1
if(!missing(knots)) {
nknots <- length(knots)
knots <- sort(knots)
iautoknot <- 0
if(knots[1] > min(sample))
stop("first knot must be smaller than smallest sample")
if(knots[nknots] < max(sample))
stop("last knot should be larger than largest sample")
}
else {
if(missing(nknots))
nknots <- 0
knots <- vector(mode = "double", length = max(nknots, 50))
iautoknot <- 1
}
xbound <- c(1, 0, 0, 0, 0)
if(!missing(lbound)) {
xbound[2] <- 1
xbound[3] <- lbound
if(lbound > min(sample))
stop("lbound should be smaller than smallest sample")
}
if(!missing(ubound)) {
xbound[4] <- 1
xbound[5] <- ubound
if(ubound < max(sample))
stop("ubound should be larger than largest sample")
}
SorC <- vector(mode = "integer", length = 35)
SorC[1] <- 1
SorC[17] <- 0
nsample[6] <- nsample[6]-1
if(length(table(sample))<3)stop("Not enough unique values")
z <- .C("logcensor",
as.integer(delete),
as.integer(iautoknot),
as.double(sample),
as.integer(nsample),
bd = as.double(xbound),
SorC = as.integer(SorC),
nk = as.integer(nknots),
kt = as.double(knots),
cf = as.double(c(knots, 0, 0)),
as.double(penalty),
as.double(sample),
as.double(sample),
logl = as.double(rep(0, n1 + 1)),
PACKAGE = "polspline")
SorC <- z$SorC
if(SorC[1] == -1 && SorC[28] == 0 && nsample[1]!=nsample[2] && nsample[2]>15){
SorC <- vector(mode = "integer", length = 35)
SorC[1] <- 1
SorC[17] <- 1
z <- .C("logcensor",
as.integer(delete),
as.integer(iautoknot),
as.double(sample),
as.integer(nsample),
bd = as.double(xbound),
SorC = as.integer(SorC),
nk = as.integer(nknots),
kt = as.double(knots),
cf = as.double(c(knots, 0, 0)),
as.double(penalty),
as.double(sample),
as.double(sample),
logl = as.double(rep(0, n1 + 1)),
PACKAGE = "polspline")
}
bound <- c(z$bd[2], z$bd[3], z$bd[4], z$bd[5])
SorC <- z$SorC
if(abs(SorC[1]) > 2) {
for(i in 3:abs(SorC[1]))
cat(paste("===> warning: knot ", SorC[i - 1],
" removed - double knot\n"))
if(SorC[1] < 0)
SorC[1] <- -1
if(SorC[1] == 23)
SorC[1] <- -3
}
if(abs(SorC[1]) > 3) {
cat("* several double knots suggests that your data is *\n")
cat("* strongly rounded, attention might be required *\n")
SorC[1] <- 1
}
if(SorC[1] == -3)
stop("* too many double knots")
if(SorC[1] == -1 && SorC[28] == 0)
stop("* no convergence")
if(SorC[28] > 0)
cat(paste("* convergence problems, smallest number of knots",
" tried is ", SorC[28] + 1," *\n"))
if(SorC[1] == 2)
stop("* sample is too small")
if(SorC[1] == -2)
stop(paste("* too many knots, at most ", SorC[2],
"knots possible"))
if(SorC[22] == 1) {
cat("possible discontinuity at lower end\n")
cat(paste("consider rerunning with lbound=", z$kt[1],
"\n"))
}
if(SorC[22] == 3) {
cat("possible infinite density at lower end\n")
cat("running program with fewer knots\n")
}
if(SorC[21] == 1)
cat("running with maximum degrees of freedom\n")
if(SorC[25] >0)
cat("* problems are possibly due to a very heavy right tail *\n")
if(SorC[24] >0)
cat("* problems are possibly due to a very heavy left tail *\n")
if(SorC[23] == 3) {
cat("possible infinite density at upper end\n")
cat("running program with fewer knots\n")
}
if(SorC[23] == 1) {
cat("possible discontinuity at upper end\n")
cat(paste("consider rerunning with ubound=", z$kt[z$nk],
"\n"))
}
if(delete && SorC[28]>0)delete<-3
coef <- z$cf[1:(z$nk + 2)]
uu <- 3:z$nk
if(delete == FALSE)uu <- 1
fit <- list(coef = coef, knots = z$kt[1:z$nk], bound = bound, logl = z$logl[
uu], penalty = penalty, sample = nsample[1], delete = delete)
class(fit) <- "oldlogspline"
fit
}
lspec <- function(data, period, penalty, minmass, knots, maxknots, atoms,
maxatoms, maxdim, odd = FALSE, updown = 3,silent=TRUE)
{
call <- match.call()
if(!missing(data))data <- unstrip(data)
if(!missing(period))period <- unstrip(period)
if(!missing(knots))knots <- unstrip(knots)
if(!missing(atoms))atoms <- unstrip(atoms)
if(missing(period) && missing(data))
stop(" either data or period should be specified ")
if(!missing(period) && !missing(data))
stop(" only one of data or period should be specified ")
if(!missing(period))
ny <- 2 * length(period)
if(missing(period)) {
ny <- length(data)
period <- Mod(fft(data))^2/(ny * 2 * pi)
period <- period[1:floor((length(period) + 2)/2)]
odd <- TRUE
if(floor(ny/2) == ny/2)
odd <- FALSE
}
else{
if(odd) ny <- ny + 1
period <- c(1,period)
}
if(min(period) <= 0)
stop(" all period elements should be larger than 0 ")
if(length(period) < 10)
stop("too few observations")
z <- .C("tspspsx",
z = as.integer(rep(-1, 12)),
PACKAGE = "polspline")
lgth <- z$z[1]
nx <- length(period)
if(missing(penalty))
penalty <- log(nx - 1)
dimatt <- 0
ktsatt <- 1
spkatt <- 1
nknots <- 0
natoms <- 0
if(!missing(maxknots)){
maxknots <- max(1, maxknots)
}
else {
maxknots <- -1
ktsatt <- 0
}
if(!missing(maxatoms)){
maxatoms <- max(0, maxatoms)
}
else {
maxatoms <- -1
spkatt <- 0
}
if(missing(minmass)){
if(!missing(data))
minmass <- var(data)*(-log(1-0.95^(1/nx))-1)/ny
else{
minmass <- mean(period[2:length(period)])*2*pi
minmass <- minmass*(-log(1-0.95^(1/nx))-1)/ny
}
}
minmass <- minmass * ny /(2*pi)
if(!missing(knots)) {
nknots <- length(knots)
if(nknots>1){
uu <- knots[2:nknots] - knots[1:(nknots - 1)]
if(min(uu) <= 0)
stop("knots not in sequence")
}
if(knots[1] < 0)
stop("knot 1 too small")
if(knots[nknots] > pi)
stop("last knot too large")
knots <- c(knots, rep(0, lgth - nknots))
if(ktsatt * maxknots < ktsatt * nknots)
stop("more knots than maxknots")
}
else{
knots <- rep(0, lgth)
}
if(!missing(atoms)) {
natoms <- length(atoms)
atoms <- round((atoms * ny)/(2 * pi))
if(natoms>1){
uu <- atoms[2:natoms] - knots[1:(natoms - 1)]
if(min(uu) <= 0)
stop("atoms not in sequence or too close")
}
if(atoms[1] < 1)
stop("atom 1 too small")
if(atoms[natoms] > ny/2)
stop("last atom too large")
atoms <- c(atoms, rep(0, lgth - natoms))
if(spkatt * maxatoms < spkatt * natoms)
stop("more atoms than maxatoms")
}
else{
atoms <- rep(0, lgth)
}
u1 <- max(nknots, 1, maxknots) + max(natoms, maxatoms)
if(u1 > lgth - 5)
stop("too many dimensions")
if(!missing(maxdim)) {
dimatt <- 1
if(u1 > maxdim)
stop("maxdim too small for other specifications")
if(maxdim > lgth - 5)
stop(paste("maxdim can be at most", lgth - 5))
}
else{
maxdim <- max(4 * nx^0.2, 15, u1)
}
dims <- c(nx, maxdim, dimatt, maxknots, ktsatt, nknots, maxatoms,
spkatt, natoms, odd, updown, 1*silent, 0)
z <- .C("tspsps",
dims = as.integer(dims),
as.double(period),
knots = as.double(c(knots,rep(0,nx))),
atoms = as.integer(c(atoms,rep(0,nx))),
as.double(penalty),
logl = as.double(rep(0, lgth)),
theta = as.double(rep(0, lgth)),
ad = as.integer(rep(0, lgth)),
minmass = as.double(minmass),
PACKAGE = "polspline")
dims <- z$dims
minmass <- minmass /( ny /(2*pi))
if(dims[12] == 1)
stop(paste("numerical problems -\n",
"probably too many knots or knots too close together",
" or a very sharp atom"))
if(dims[12] == 2)
stop("no convergence")
z$logl[abs(z$logl) < 1e-100] <- 0
z$logl[z$ad == 2] <- 0
mass <- ((z$theta[(dims[6] + 4) + (1:dims[9])]) * 2 * pi)/ny
atoms <- (z$atoms[1:dims[9]] * 2 * pi)/ny
if(dims[9] == 0) {
mass <- 0
atoms <- 0
}
thetak <- z$theta[5:(dims[6] + 4)]
knots <- z$knots[1:dims[6]]
if(dims[6] == 0) {
thetak <- 0
knots <- 0
}
logl <- z$logl[dims[6]+dims[9]]
fit <- list(call = call, thetap = z$theta[1:4], nknots = dims[6], knots =
knots, thetak = thetak, natoms = dims[9], atoms = atoms,
mass = mass, penalty = penalty, minmass = minmass,
sample = ny, logl = logl, updown = dims[11])
class(fit) <- "lspec"
fit
}
clspec <- function(lag, fit, cov = TRUE, mm)
{
if(class(fit)!="lspec")
stop("fit is not an lspec object")
if(!missing(lag))lag <- unstrip(lag)
llag <- abs(lag)
if(max(abs(round(llag) - llag)) > 0.01)
stop("some lags are not integer")
if(missing(mm)) {
mm <- max(c(1024, fit$sample, max(llag + 1)))
mm <- 2^(1 + floor(log(mm - 0.1)/log(2)))
}
if(mm < max(llag + 1))
stop("mm too small")
rr <- dlspec(((0:mm) * pi)/mm, fit)$d
rr <- c(rr, rr[mm:2])
rr <- (Re(fft(rr)) * pi)/mm
rr <- rr[llag + 1]
if(fit$natoms>0){
for(i in 1:fit$natoms) {
rr <- rr + 2 * cos(lag * fit$atoms[i]) * fit$mass[i]
}
}
if(cov == FALSE)
rr <- rr/rr[1]
rr
}
dlspec <- function(freq, fit)
{
if(class(fit)!="lspec")
stop("fit is not an lspec object")
if(!missing(freq))freq <- unstrip(freq)
freq <- freq - floor(freq/(2 * pi)) * 2 * pi
freq[freq > pi] <- 2 * pi - freq[freq > pi]
y <- rep(fit$thetap[1], length(freq)) + freq * fit$thetap[2]
y <- y + freq^2 * fit$thetap[3] + freq^3 * fit$thetap[4]
if(fit$nknots > 0) {
for(i in 1:fit$nknots) {
z <- freq - fit$knots[i]
y[z > 0] <- y[z > 0] + z[z > 0]^3 * fit$thetak[i]
}
}
d1 <- exp(y)
modfreq <- round((freq * fit$sample)/(2 * pi))
modmatch <- round((fit$atoms * fit$sample)/(2 * pi))
uu <- rep(0, round(fit$sample/2) + 2)
uu[modmatch] <- fit$mass
uu <- c(NA, uu)
l1 <- uu[modfreq+1]
modfreq <- ((2 * pi)/fit$sample) * modfreq
list(d = d1, modfreq = modfreq, m = l1)
}
plspec <- function(freq, fit, mm)
{
if(class(fit)!="lspec")
stop("fit is not an lspec object")
if(!missing(freq))freq <- unstrip(freq)
if(missing(mm)) {
mm <- max(c(4096, fit$sample))
mm <- 2^floor(log(mm - 0.1)/log(2))
}
ff <- freq[freq >= - pi]
ff <- ff[ff <= pi]
gg <- c(abs(ff), pi)
uu <- (c((1:mm) - 0.5) * pi)/mm
tt <- dlspec(uu, fit)$d
ss <- cumsum(tt)/mm
ss <- (c(0, 0, ss, ss[mm]) * pi)
tt <- (gg * mm)/pi
vv <- floor(tt)
tt <- tt - vv
tt <- (1 - tt) * ss[vv + 2] + tt * ss[vv + 3]
if(fit$natoms > 0) {
for(i in 1:fit$natoms)
tt[gg >= fit$atoms[i]] <- tt[gg >= fit$atoms[i]] + fit$
mass[i]
}
if(length(gg) < length(freq) + 1 || is.na(gg[1]))
warning("plspec is only valid for frequencies between -pi and pi")
ss <- rep(NA, length(freq))
ss[abs(freq) <= pi] <- tt[ - length(tt)] + tt[length(tt)]
ss[freq < 0] <- 2 * tt[length(tt)] - ss[freq < 0]
if(fit$natoms > 0) {
for(i in 1:fit$natoms)
ss[freq == - fit$atoms[i]] <- ss[freq == - fit$atoms[
i]] + fit$mass[i]
}
ss
}
rlspec <- function(n, fit, mean = 0, cosmodel = FALSE, mm)
{
if(class(fit)!="lspec")
stop("fit is not an lspec object")
if(missing(mm)) {
mm <- max(c(1024, fit$sample, n))
mm <- 2^(1 + floor(log(mm - 0.1)/log(2)))
}
if(mm < max(n/2 + 1))
stop("mm too small")
rr <- (dlspec(((0:mm) * pi)/mm, fit)$d * pi)/(2 * mm)
rr[1] <- rr[1] * 2
rr[mm + 1] <- rr[mm + 1] * 2
rr <- sqrt(rr)
uu <- rnorm(rr, 0, rr)
uu <- c(uu, uu[mm:2])
vv <- rnorm(rr, 0, rr)
vv <- c(vv, - vv[mm:2])
vv[c(1, (mm + 1))] <- 0
uu <- uu + vv * (1i)
uu <- Re(fft(uu))
uu <- uu[1:n] + mean
if(fit$natoms > 0) {
cc <- runif(1)*2*pi-pi
if(cosmodel) aa <- 2*sqrt(fit$mass)
else
aa <- 2 * rnorm(fit$natoms, 0, sqrt(fit$mass))
aa[fit$atoms == pi] <- 2 * aa[fit$atoms == pi]
for(i in 1:fit$natoms)
uu <- uu + aa[i] * cos((1:n) * fit$atoms[i] + pi * cc)
}
uu
}
plot.lspec <- function(x, what = "b", n, add = FALSE, xlim, ylim, xlab, ylab, type, ...)
{
fit <- x
if(class(fit)!="lspec")
stop("fit is not an lspec object")
if(add) {
plim <- (par()$usr)[1:2]
if(!missing(xlim)) {
plim[1] <- max(xlim[1], plim[1])
plim[2] <- min(xlim[2], plim[2])
}
}
else {
plim <- c(0, pi)
if(what =="p"||what=="P"||what=="f"||what=="F")plim[1]<- -pi
if(!missing(xlim)) {
plim[1] <- xlim[1]
plim[2] <- xlim[2]
}
}
if(missing(xlab))
xlab <- ""
if(missing(ylab))
ylab <- ""
if(what == "l" || what == "L") {
if(missing(type))
type <- "h"
if(fit$natoms>0){
x5 <- c(-fit$atoms,fit$atoms)
tt <- round(plim[2]/(2*pi))+1
vv <- round(plim[1]/(2*pi))-1
x1 <- x5
for(i in vv:tt)
if(i!=0)x1 <- c(x1, x5+i*2*pi)
y1 <- dlspec(x1,fit)$m
y1 <- y1[x1 <= plim[2]]
x1 <- x1[x1 <= plim[2]]
y1 <- y1[x1 > plim[1]]
x1 <- x1[x1 > plim[1]]
x1 <- c(x1[1], x1)
y1 <- c(0, y1)
if(!add)
plot(x1, y1, xlim = plim, xlab = xlab, ylab = ylab,
type = type, ...)
else lines(x1, y1, type = type, ...)
abline(h = 0)}
else{
if(add) abline(h=0)
else plot(plim,c(0,0),xlab = xlab, ylab = ylab,type="l",...)
}
}
if(what == "d" || what == "D" || what == "b" || what == "B") {
if(missing(type))
type <- "l"
if(missing(n))
n <- max(100, fit$sample + 1)
xx <- (0:(n - 1))/(n - 1) * (plim[2] - plim[1]) + plim[1]
yy <- dlspec(xx, fit)$d
if(fit$natoms == 0)
what <- "d"
if(missing(ylim))ylim<-range(yy)
}
if(what == "b" || what == "B") {
type <- "l"
x5 <- c(-fit$atoms,fit$atoms)
tt <- round(plim[2]/(2*pi))+1
vv <- round(plim[1]/(2*pi))-1
x3 <- x5
for(i in vv:tt)
if(i!=0)x3 <- c(x3, x5+i*2*pi)
y3 <- dlspec(x3, fit)
y3 <- max(yy)*1.1
if(fit$nknots==1)y3 <- 2*y3
if(!missing(ylim))y3 <- ylim[2]
x2 <- x3
y2 <- dlspec(x2, fit)$d
x4 <- x3
y4 <- y2
for(i in 1:length(x3)) {
yy <- c(yy[xx < x2[i]], y2[i], y3, y4[i], yy[xx > x4[
i]])
xx <- c(xx[xx < x2[i]], x2[i], x3[i], x4[i], xx[xx > x4[
i]])
}
if(missing(ylim))ylim<-range(yy)
yy <- yy[xx >= plim[1]]
xx <- xx[xx >= plim[1]]
yy <- yy[xx <= plim[2]]
xx <- xx[xx <= plim[2]]
y2 <- y2[x2 >= plim[1]]
x2 <- x2[x2 >= plim[1]]
y2 <- y2[x2 <= plim[2]]
x2 <- x2[x2 <= plim[2]]
}
if(what == "f" || what == "F" || what == "p" || what == "P") {
if(!missing(xlim)){
if(xlim[1]< -pi || xlim[2]>pi)
stop("for this plot the range cannot strecth beyond (-pi,pi)")
}
if(missing(xlim)){
plim[1] <- max(plim[1],-pi)
plim[2] <- min(plim[2],pi)
}
if(missing(type))
type <- "l"
if(missing(n))
n <- max(100, fit$sample + 1)
xx <- (0:(n - 1))/(n - 1) * (plim[2] - plim[1]) + plim[1]
yy <- plspec(xx, fit)
if(missing(ylim))ylim<-range(yy)
if(fit$natoms > 0) {
x2 <- fit$atoms
y3 <- plspec(x2, fit)
y2 <- y3 - fit$mass
for(i in 1:fit$natoms) {
yy <- c(yy[xx < x2[i]], y2[i], y3[i], yy[xx >
x2[i]])
xx <- c(xx[xx < x2[i]], x2[i], x2[i], xx[xx >
x2[i]])
}
x2 <- -fit$atoms
y3 <- plspec(x2, fit)
y2 <- y3 - fit$mass
for(i in 1:fit$natoms) {
yy <- c(yy[xx < x2[i]], y2[i], y3[i], yy[xx >
x2[i]])
xx <- c(xx[xx < x2[i]], x2[i], x2[i], xx[xx >
x2[i]])
}
yy <- yy[xx >= plim[1]]
xx <- xx[xx >= plim[1]]
yy <- yy[xx <= plim[2]]
xx <- xx[xx <= plim[2]]
}
}
if(what != "l" && what != "L") {
if(!add)
plot(xx, yy, xlim = plim, xlab = xlab, ylab = ylab,
type = type, ylim = ylim, ...)
else lines(xx, yy, type = type, ...)
if(what =="b" || what=="B")points(x2,y2)
}
invisible()
}
print.lspec <- function(x,...)
{
summary.lspec(x)
}
summary.lspec <- function(object,...)
{
fit <- object
if(class(fit)!="lspec")
stop("fit is not an lspec object")
aa <- " Logspline Spectral Estimation\n"
aa <- paste(aa,"=============================\n")
aa <- paste(aa,"The fit was obtained by the command:\n ")
cat(aa)
print(fit$call)
aic <- round(-2*fit$logl+fit$penalty*(fit$nknots+fit$natoms),2)
logl <- round(fit$logl,2)
ns <- fit$natoms
nk <- fit$nknots
nd <- ns + nk
if(ns==0 && nk==1)
aa <- paste(" Only 1 basis function, a constant, was fitted.\n")
if(ns==0 && nk>1)
aa <- paste(" A spline with",nk,"knots, was fitted;",
"there were no lines in the model.\n")
if(ns>0 && nk>1)
aa <- paste(" A spline with",nk,"knots, was fitted;",
"there were also",ns,"lines in the model.\n")
if(ns>0 && nk==1)
aa <- paste(" There were",nd,"basisfunctions, a constant and",
ns,"lines, in the model.\n")
aa <- paste(aa,"The log-likelihood of the model was",logl,
"which corresponds to an AIC\n value of",aic,".\n\n")
aa <- paste(aa,"The program went though",abs(fit$updown))
if(fit$updown>0)
aa <-paste(aa,"updown cycles, and reached a stable solution.\n")
if(fit$updown<0)
aa <-paste(aa,"updown cycles, and did not reach a stable solution.\n")
p1 <- round(fit$penalty,2)
n1 <- round(fit$minmass,4)
nn <- floor(fit$sample/2)
p2 <- round(log(nn),2)
uu <- plspec(pi,fit)
n2 <- round(uu*(-log(1-0.95^(1/nn))-1)/fit$sample,4)
p3 <- (p1==p2)
p4 <- TRUE
if(n1/n2 > 1.2 || n2/n1 > 1.2) p4 <- FALSE
if(p3==TRUE && p4==TRUE)aa<-paste(aa,
"Both penalty (AIC) and minmass were the default values. For penalty this\n",
"was log(n)=log(",nn,")=",p1," (as in BIC) and for minmass this was",n1,".\n")
if(p3==TRUE && p4==FALSE)aa<-paste(aa,
"Penalty (AIC) had the default values",
"log(n)=log(",nn,")=",p1," (as in BIC).\n Minmass was",n1,
", the default would have been",n2,".\n")
if(p3==FALSE && p4==FALSE)aa<-paste(aa,
"Penalty was",p1,", the default would have been",
"log(n)=log(",nn,")=",p2,"\n(as in BIC). Minmass was",n1,
", the default would have been",n2,".\n")
if(p3==FALSE && p4==TRUE)aa<-paste(aa,
"Penalty was",p1,", the default would have been, log(n)=log(",nn,")=",
p2,"\n (as in BIC). Minmass was the default",n1,".\n\n")
if(nk>1){aa<-paste(aa,"The locations of the knots were:")
for(i in 1:nk)aa<-paste(aa,round(fit$knots[i],3))
aa<-paste(aa,"\n")
}
if(ns>0){
aa<-paste(aa,"The locations and the mass in each line were:\n")
bb <- matrix(0,ncol=4,nrow=ns)
for(i in 1:ns){
bb[i,1]<-round(fit$atoms[i],3)
bb[i,2]<-2*pi/(fit$atoms[i])
bb[i,2]<-round(bb[i,2],2)
bb[i,3]<-round(fit$mass[i],5)
bb[i,4]<- round(100*fit$mass[i]/uu,2)
}
dimnames(bb) <- list(rep("",ns),c("angular frequency","period","mass",
"% of total mass"))
}
cat(aa)
if(ns>0)print(bb)
invisible()
}
polymars <- function(responses, predictors, maxsize, gcv = 4., additive = FALSE, startmodel,
weights, no.interact, knots, knot.space = 3, ts.resp, ts.pred,
ts.weights, classify, factors, tolerance = 1e-06, verbose = FALSE)
{
call <- match.call()
ism0 <- missing(classify)
ism1 <- missing(ts.resp)
ism2 <- missing(maxsize)
ism3 <- missing(ts.pred)
ism4 <- missing(ts.weights)
ism5 <- missing(knots)
ism6 <- missing(factors)
ism7 <- missing(startmodel)
ism8 <- missing(weights)
ism9 <- missing(no.interact)
if(!missing(responses))
responses <- unstrip(responses)
if(!missing(predictors))
predictors <- unstrip(predictors)
if(!missing(weights))
weights <- unstrip(weights)
if(!missing(no.interact))
no.interact <- unstrip(no.interact)
if(!missing(knots))
knots <- unstrip(knots)
if(!missing(ts.resp))
ts.resp <- unstrip(ts.resp)
if(!missing(ts.pred))
ts.pred <- unstrip(ts.pred)
if(!missing(ts.weights))
ts.weights <- unstrip(ts.weights)
if(!missing(factors))
factors <- unstrip(factors)
responses <- as.matrix(responses)
predictors <- data.matrix(predictors)
nresponses <- ncol(responses)
npredictors <- ncol(predictors)
ncases <- nrow(predictors)
if(ism0)
classify <- FALSE
if(mode(responses) == "character" || classify == TRUE) {
if(ncol(responses) > 1) {
stop("When using character responses or classify = TRUE only 1 response per case is allowed\n"
)
}
char.responses <- responses
int.responses <- as.integer(as.factor(responses))
nresponses <- length(unique(responses))
responses <- matrix(ncol = nresponses, nrow = ncases, data =
int.responses)
for(i in 1:nresponses) {
responses[, i] <- (responses[, i] == (unique(
int.responses)[i]))
}
conversion <- matrix(ncol = 2, nrow = nresponses, c(unique(
char.responses), unique(int.responses)))
classify <- TRUE
if(!ism1) {
char.responses.test <- ts.resp
ts.resp <- matrix(ncol = nresponses, nrow = length(
char.responses.test), data = 0)
for(i in 1:nresponses) {
ts.resp[, i] <- as.integer(char.responses.test ==
conversion[i, 1])
}
}
}
else {
conversion <- FALSE
classify <- FALSE
}
if(ism2) maxsize <- ceiling(min(6 * (ncases^(1/3)), ncases/4, 100))
if(!ism1 || !ism3) {
if(ism1 || ism3) {
stop("Both ts.resp (testsets responses) and ts.pred (testset predictors) should be specified\n"
)
}
if(!is.matrix(ts.resp))
ts.resp <- as.matrix(ts.resp)
if(!is.matrix(ts.pred))
ts.pred <- as.matrix(ts.pred)
if(ncol(ts.resp) != nresponses) {
stop("Testset should have the same number of responses as the training set\n "
)
}
if(ncol(ts.pred) != npredictors) {
stop("Testset should have the same number of predictors as the training set\n "
)
}
if(nrow(ts.resp) != nrow(ts.pred)) {
stop("Testset ts.pred and ts.resp should have the same number of cases (rows)"
)
}
testsetmatrix <- cbind(ts.resp, ts.pred)
testsetcases <- nrow(testsetmatrix)
testset <- TRUE
if(!ism4) {
if(length(ts.weights) != testsetcases) {
stop("length of testset weights misspecified\n"
)
}
testset.weighted <- TRUE
testsetmatrix <- cbind(ts.resp * ts.weights, ts.pred)
}
else {
testset.weighted <- FALSE
ts.weights <- 0
}
}
else {
testsetmatrix <- 0
testsetcases <- 0
testset <- FALSE
testset.weighted <- FALSE
ts.weights <- 0
}
mesh.specified <- FALSE
mesh.vector <- 0
if(nrow(responses) != nrow(predictors)) {
stop("The number of rows (cases) of the response and predictor matricies should be the same"
)
}
if(!ism5 && !is.matrix(knots) && length(knots) != npredictors && length(
knots) != 1) {
stop("Length of vector of `knots per predictor' should be equal to number of predictors or 1\n"
)
}
if(!ism5) {
if(!is.matrix(knots)) {
if(length(knots) == 1) {
knots <- rep(knots, npredictors)
if(!ism6) {
for(i in 1:length(factors)) {
if(!is.vector(factors)) {
stop("`factors' should be a vector whose elements are indicies of predictors that are factors\n"
)
}
knots[factors[i]] <- -1
}
}
}
}
else {
mesh <- knots
mesh.vector <- vector(length = ncol(mesh) * nrow(mesh),
mode = "double")
knots <- vector(length = npredictors, mode = "integer")
k <- 0
for(i in 1:npredictors) {
knots[i] <- length(unique(mesh[is.na(mesh[
, i]) == FALSE, i]))
for(j in 1:knots[i]) {
k <- k + 1
mesh.vector[k] <- unique(mesh[!is.na(
mesh[, i]), i])[j]
}
}
if(!ism6) {
for(i in 1:length(factors)) {
if(!is.vector(factors)) {
stop("`factors' should be a vector whose elements are indicies of predictors that are factors\n"
)
}
knots[factors[i]] <- -1
}
}
mesh.specified <- TRUE
}
}
if(ism5) {
knots <- rep(min(20, round(ncases/4)), npredictors)
if(!ism6) {
for(i in 1:length(factors)) {
if(!is.vector(factors)) {
stop("`factors' should be a vector whose elements are indicies of predictors that are factors\n"
)
}
knots[factors[i]] <- -1
}
}
}
startmodelsize <- 1
no.remove <- 0
no.remove.size <- 0
if(!ism7) {
if(is.vector(startmodel))
startmodel <- t(as.matrix(startmodel))
v1 <- (class(startmodel) == "polymars")
if(length(v1) == 0)
v1 <- FALSE
if(!(is.matrix(startmodel) || v1) || (is.matrix(startmodel) &&
(ncol(startmodel) != 4 && (ncol(startmodel) != 5)))) {
stop(paste(
"startmodel should be a matrix with each row corresponding to",
"a function with number of columns = 4 (or 5 for extra boolean\n",
"column specifying predictors which cannot be removed)",
"or startmodel should be a polymars object\n"))
}
if(is.matrix(startmodel)) {
if(ncol(startmodel) == 5) {
no.remove <- vector(length = (nrow(startmodel))
)
j <- 0
for(i in 1:nrow(startmodel)) {
if(startmodel[i, 5] == TRUE) {
j <- j + 1
no.remove[j] <- i
}
}
no.remove.size <- j
}
startknots <- as.vector(t(cbind(startmodel[, 2],
startmodel[, 4])))
startknots[is.na(startknots)] <- 0.
startmodel <- matrix(startmodel[, 1:4], ncol = 4)
startmodel[!is.na(startmodel[, 2]), 2] <- 1
startmodel[is.na(startmodel[, 2]), 2] <- 0
startmodel[is.na(startmodel[, 3]), 3] <- 0
startmodel[startmodel[, 3] == 0, 4] <- 0
for(i in 1:nrow(startmodel)) {
if((!is.na(startmodel[i, 4])) && startmodel[
i, 3] != 0)
startmodel[i, 4] <- 1
}
startmodel[is.na(startmodel[, 4]), 4] <- 0
startmodelsize <- nrow(startmodel) + 1
}
else {
startmodelsize <- startmodel$model.size
startmodel <- startmodel$model[-1, ]
startknots1 <- startmodel$knot1
startknots2 <- startmodel$knot2
L1 <- FALSE
if(!is.null(startmodel$level1)) {
L1 <- TRUE
level1 <- startmodel$level1
}
if(L1) {
startmodel$knot1[!is.na(level1)] <- 1
startknots1[!is.na(level1)] <- level1[!is.na(
level1)]
}
startknots <- cbind(startknots1, startknots2)
startknots <- as.vector(t(startknots))
startknots[is.na(startknots)] <- 0.
startmodel <- cbind(startmodel[, "pred1"], startmodel[
, "knot1"], startmodel[, "pred2"], startmodel[
, "knot2"])
startmodel[, 2] <- !is.na(startmodel[, 2])
startmodel[, 4] <- !is.na(startmodel[, 4])
}
}
else {
startmodel <- 0
startknots <- 0
}
if(!ism8) {
if(length(weights) != ncases) {
stop("Number of weights not equal to the numnber of cases\n"
)
}
weighted <- TRUE
responses <- responses * weights
}
else {
weighted <- FALSE
weights <- 0
}
datamatrix <- cbind(responses, predictors)
if(!ism9) {
if(!is.matrix(no.interact) || ncol(no.interact) != 2) {
stop("list of interactions disallowed has been misspecified,must be a 2*n matrix"
)
}
no.interact <- t(no.interact)
no.interact.size <- ncol(no.interact)
}
else {
no.interact.size <- 0
no.interact <- 0
}
if(startmodelsize > maxsize) {
stop("start model should not be of greater size than the max model size\n"
)
}
if(startmodelsize != 1) {
for(i in 1:(startmodelsize - 1)) {
if(startmodel[i, 1] == 0) {
stop("first column of startmodel cannot be zero\n"
)
}
if(startmodel[i, 2] == 1) {
if(startknots[(i * 2) - 1] < min(predictors[
, startmodel[i, 1]]) || startknots[
(i * 2) - 1] > max(predictors[,
startmodel[i, 1]])) {
stop("Knot out of range of its predictor \n"
)
}
}
if(startmodel[i, 4] == 1) {
if(startknots[(i * 2)] <= min(predictors[,
startmodel[i, 3]]) || startknots[(
i * 2)] >= max(predictors[, startmodel[
i, 3]])) {
stop("Knot out of range of its predictor\n"
)
}
}
}
if(max(startmodel[, c(1, 3)] > npredictors)) {
stop("Initial model misspecified on input\n")
}
}
startmodel <- t(startmodel)
resultmodelsize <- 0
end.state <- 0
step.count <- 0
z <- .C("polymarsF",
as.integer(npredictors),
as.integer(nresponses),
as.integer(ncases),
as.double(datamatrix),
as.integer(knots),
as.double(mesh.vector),
as.integer(mesh.specified),
as.integer(maxsize),
as.double(gcv),
as.integer(additive),
as.integer(startmodelsize),
start.model = as.integer(startmodel),
start.knots = as.double(startknots),
as.integer(weighted),
as.double(weights),
as.integer(no.interact.size),
as.integer(no.interact),
as.integer(no.remove.size),
as.integer(no.remove),
as.integer(knot.space),
as.integer(testset),
as.double(testsetmatrix),
as.integer(testsetcases),
as.integer(testset.weighted),
as.double(ts.weights),
as.integer(classify),
as.double(tolerance),
as.integer(verbose),
best.model = as.integer(matrix(nrow = maxsize, ncol = 4, data
= rep(0, maxsize * 4))),
coefficients = as.double(matrix(nrow = maxsize, ncol =
nresponses, data = rep(0., maxsize * nresponses))),
steps = as.integer(matrix(nrow = maxsize * 2, ncol = 2,
data = rep(0, maxsize * 4))),
rss.gcv = as.double(matrix(nrow = maxsize * 2, ncol =
nresponses + 1, data = rep(0., maxsize * 2 * (
nresponses + 1)))),
modelsize = as.integer(resultmodelsize),
modelknots = as.double(matrix(nrow = maxsize, ncol = 2, data =
rep(0., maxsize * 2))),
coefficient.se.term = as.double(rep(0., maxsize)),
end.state = as.integer(end.state),
step.count = as.integer(step.count),
PACKAGE = "polspline")
if(z$end.state != 0 && z$end.state != 5) {
switch(z$end.state,
stop("Mis-specification of initial model\n"),
stop("Initial model with non-linear function must contain the corresponding linear function\n"
),
stop("Initial model contains two-predictor functions that require prerequisite functions\n"
))
}
else {
model <- matrix(z$best.model[1:((z$modelsize - 1) * 4)], ncol
= 4, byrow = TRUE)
knot.values <- matrix(z$modelknots[1:((z$modelsize - 1) * 2)],
ncol = 2, byrow = TRUE)
for(i in 1:nrow(model)) {
if(model[i, 2] != 0) {
model[i, 2] <- knot.values[i, 1]
}
else {
model[i, 2] <- NA
}
if(model[i, 4] != 0) {
model[i, 4] <- knot.values[i, 2]
}
else {
model[i, 4] <- NA
}
}
if(length(knots[model[, 1]]) != 0 && min(knots[model[, 1]]) <
0) {
factor1 <- TRUE
levels1 <- rep(NA, z$modelsize - 1)
factor.variables <- unique(model[knots[model[, 1]] <
0, 1])
for(i in 1:length(factor.variables)) {
for(j in 1:length(model[, 1])) {
if(model[j, 1] == factor.variables[
i]) {
levels1[j] <- model[j, 2]
}
}
model[model[, 1] == factor.variables[i], 2] <-
NA
}
levels1 <- c(NA, levels1)
}
else {
factor1 <- FALSE
}
coefs <- matrix(z$coefficients[1:(z$modelsize * nresponses)],
ncol = nresponses)
if(z$modelsize > 1) {
if(factor1 == FALSE) {
model <- rbind(c(0, NA, 0, NA), model)
model <- data.frame(model, coefs)
if(nresponses == 1) {
dimnames(model) <- list(1:z$modelsize,
c("pred1", "knot1", "pred2",
"knot2", "coefs"))
}
else {
dimnames(model) <- list(1:z$modelsize,
c("pred1", "knot1", "pred2",
"knot2", paste("Coefs", 1:
nresponses)))
}
}
if(factor1 == TRUE) {
model[(knots[model[, 1]] < 0), 2] <- NA
model <- rbind(c(0, NA, 0, NA), model)
model <- data.frame(model[, 1:2], levels1,
model[, 3:4], coefs)
if(nresponses == 1) {
dimnames(model) <- list(1:z$modelsize,
c("pred1", "knot1", "level1",
"pred2", "knot2", "coefs"))
}
else {
dimnames(model) <- list(1:z$modelsize,
c("pred1", "knot1", "level1",
"pred2", "knot2", paste("Coefs",
1:nresponses)))
}
}
}
else {
model <- data.frame(0, NA, 0, NA, coefs)
if(nresponses == 1) {
dimnames(model) <- list(1:z$modelsize, c(
"pred1", "knot1", "pred2", "knot2",
"coefs"))
}
else {
dimnames(model) <- list(1:z$modelsize, c(
"pred1", "knot1", "pred2", "knot2",
paste("Coefs", 1:nresponses)))
}
}
ranges.and.medians <- matrix(ncol = npredictors, nrow = 3,
data = 0)
for(i in 1:npredictors) {
ranges.and.medians[1, i] <- min(predictors[, i])
}
for(i in 1:npredictors) {
ranges.and.medians[2, i] <- max(predictors[, i])
}
for(i in 1:npredictors) {
ranges.and.medians[3, i] <- median(predictors[, i])
}
steps <- matrix(z$steps[1:(2 * (z$step.count + 1))], ncol = 2,
byrow = TRUE)
rss.gcv <- matrix(z$rss.gcv[1:((nresponses + 1) * (z$step.count +
1))], ncol = nresponses + 1, byrow = TRUE)
fitting <- data.frame(steps, rss.gcv)
if(testset == FALSE) {
if(nresponses == 1) {
dimnames(fitting) <- list(1:(nrow(fitting)),
c("0/1", "size", "RSS", "GCV"))
}
else {
dimnames(fitting) <- list(1:nrow(fitting),
c("0/1", "size", paste("RSS", 1:
nresponses), "GCV"))
}
}
else {
if(classify == FALSE) {
if(nresponses == 1) {
dimnames(fitting) <- list(1:(nrow(
fitting)), c("0/1", "size",
"RSS", "T.S. RSS"))
}
else {
dimnames(fitting) <- list(1:nrow(
fitting), c("0/1", "size",
paste("RSS", 1:nresponses),
"T.S. RSS"))
}
}
else {
if(nresponses == 1) {
dimnames(fitting) <- list(1:(nrow(
fitting)), c("0/1", "size",
"RSS", "T.S.M.C."))
}
else {
dimnames(fitting) <- list(1:nrow(
fitting), c("0/1", "size",
paste("RSS", 1:nresponses),
"T.S.M.C."))
}
}
}
if(factor1 == TRUE) {
model2 <- model[-1, ]
factors.in.model <- unique(model2[knots[model2[, 1]] <
0, 1])
maxfactors <- 0
for(i in 1:length(factors.in.model)) {
maxfactors <- max(maxfactors, length(unique(
predictors[, factors.in.model[i]])))
}
factor.matrix <- matrix(ncol = length(factors.in.model),
nrow = maxfactors + 2, data = NA)
for(i in 1:length(factors.in.model)) {
factor.matrix[1, i] <- factors.in.model[i]
factor.matrix[2, i] <- length(unique(predictors[
, factors.in.model[i]]))
for(j in 3:(length(unique(predictors[,
factors.in.model[i]])) + 2)) {
factor.matrix[j, i] <- unique(
predictors[, factors.in.model[
i]])[j - 2]
}
}
}
else {
factor.matrix <- 0
}
if(nresponses == 1) {
model <- cbind(model, model[,1])
dimnames(model)[[2]][length(dimnames(model)[[2]])] <-
"SE"
}
else {
for(i in 1:nresponses) {
model <- cbind(model, model[,1])
dimnames(model)[[2]][length(dimnames(model)[[
2]])] <- paste("SE", i)
}
}
result <- list(model = model, fitting = fitting, model.size = z$
modelsize, responses = nresponses, ranges.and.medians =
ranges.and.medians, call = call, conversion =
conversion, factor.matrix = factor.matrix)
class(result) <- "polymars"
dd <- design.polymars(result,predictors)
model2 <- result$model
rsquared2 <- rep(0,nresponses)
for(i in 1:nresponses){
if(z$modelsize>1) mm <- summary(lm(responses[, i] ~ dd[, -1]))
else mm <- summary(lm(responses[, i] ~ 1 ))
rsquared2[i] <- mm$r.squared
mm <- mm$coefficients
model2[,i+factor1+4] <- mm[,1]
model2[,i+factor1+4+nresponses] <- mm[,2]
}
result$model <- model2
result$Rsquared <- rsquared2
if(z$modelsize > 1) {
fitted <- predict.polymars(result, x = predictors)
residuals <- responses - fitted
}
else {
fitted <- matrix(ncol = nresponses, nrow = ncases,
data = coefs[1, 1])
residuals <- matrix(ncol = nresponses, nrow = ncases,
data = responses - coefs[1, 1])
}
result$residuals = residuals
result$fitted = fitted
return(result)
}
}
predict.polymars<-function(object,x,classify=FALSE,intercept,...)
{
if(missing(intercept))
{
intercept<-TRUE
}
if(!missing(x))x <- unstrip(x)
if(class(object)!="polymars")
stop("object is not a polymars object")
pmars.model <- object
if(!(is.matrix(x)))
{
if(length(unique(pmars.model$model[, "pred1"]))== 1 || ncol(pmars.model$ranges.and.medians)== 1 )
{
x<-matrix(data=x,ncol=1)
}
}
if((is.matrix(x) && ncol(x)
!= length(unique(pmars.model$model[,"pred1"]))))
{
if(ncol(x) != ncol(pmars.model$ranges.and.medians))
{
stop("Input should be a matrix with number of columns equal to either number of original predictors or number of predictors in model\n")
}
}
if(is.matrix(x) && ncol(x) == length(unique(pmars.model$model[, "pred1"])) && ncol(x) != ncol(pmars.model$ranges.and.medians))
{
tempmatrix<-x
x<-matrix(nrow=nrow(tempmatrix),ncol=ncol(pmars.model$ranges.and.medians),data = 0)
for(i in 1:length(unique(pmars.model$model[, "pred1"])))
{
for(j in 1:nrow(tempmatrix))
{
x[j,sort(unique(pmars.model$model[,"pred1"]))[i]]<-x[j]
}
}
}
if(!(is.matrix(x)))
{
if(!(length(x) == ncol(pmars.model$ranges.and.medians) || length(x) == unique(pmars.model$model[, "pred1"])))
{
stop("The vector of values must be equal in length to either the number of original predictors or predictors in the model\n")
}
if(length(x) == unique(pmars.model$model[, "pred1"]) && length(x) != ncol(pmars.model$ranges.and.medians))
{
x <- rep(0, ncol(pmars.model$ranges.and.medians))
for(i in 1:length(unique(pmars.model$model[, "pred1"])))
{
x[sort(unique(pmars.model$model[, "pred1"]))[i]]<-x[i]
}
}
x <- t(as.matrix(x))
}
if(dimnames(pmars.model$model)[[2]][3] == "level1")
{
level1<-TRUE
pmars.model$model<-pmars.model$model[,c(1:(5+pmars.model$responses))]
}
else
{
level1<-FALSE
pmars.model$model<-pmars.model$model[,c(1:(4+pmars.model$responses))]
}
responses<-pmars.model$responses
Y <- matrix(ncol = responses, nrow = nrow(x), data = rep(0, nrow(x)))
Y1 <- matrix(ncol = 1, nrow = nrow(x), data = rep(0, nrow(x)))
Y2 <- matrix(ncol = 1, nrow = nrow(x), data = rep(0, nrow(x)))
if(is.logical(intercept))
{
if(intercept==TRUE)
{
for(i in 1:responses)Y[,i] <- pmars.model$model[1,ncol(pmars.model$model)-responses+i]
}
else
{
if(intercept==FALSE)
{
for(i in 1:responses)Y[,i] <- 0.0
}
}
}
else
{
if(is.numeric(intercept))
{
if(length(intercept)==responses)
{
for(i in 1:responses)Y[,i] <- intercept[i]
}
else
{
if(length(intercept) != 1)
{
stop("Intercept arguement mispecified \n")
}
for(i in 1:responses)Y[,i] <- intercept
}
}
}
if(pmars.model$model.size>1)
{
for(i in 2:pmars.model$model.size)
{
Y2[] <- 1
Y1[] <- x[,pmars.model$model[i, "pred1"]]
if(!is.na(pmars.model$model[i, "knot1"]))
{
Y1 <- Y1 - pmars.model$model[i,"knot1"]
Y1[Y1 < 0,] <- 0
}
if(level1)
{
if(!is.na(pmars.model$model[i, "level1"]))
{
Y1<- (Y1 == pmars.model$model[i, "level1"])
}
}
if(!is.na(pmars.model$model[i, "pred2"]) & pmars.model$model[i, "pred2"] != 0)
{
Y2[] <- x[,pmars.model$model[i,"pred2"]]
if(!is.na(pmars.model$model[i,"knot2" ]))
{
Y2 <- Y2 - pmars.model$model[i,"knot2"]
Y2[Y2 < 0,] <- 0
}
}
for(j in 1:responses){Y[,j]<-Y[,j]+(Y1 * Y2 * pmars.model$model[i,ncol(pmars.model$model)-responses+j])}
}
}
if(classify == TRUE)
{
for(i in 1:nrow(Y))
{
Y[i,]<-Y[i,]==max(Y[i,])
}
if(is.matrix(pmars.model$conversion))
Z<-Y
Y<-matrix(ncol=1,nrow=nrow(Z))
for(i in 1:nrow(Y))
{
for(j in 1:ncol(Z))
{
if(Z[i,j] == 1) Y[i,] <- pmars.model$conversion[j]
}
}
}
return(Y)
}
print.polymars<-function(x,...)
{
summary.polymars(x)
}
summary.polymars<-function(object,...)
{
if(class(object)!="polymars")
stop("object is not a polymars object")
pmars.model <- object
cat("Call:\n")
print(pmars.model$call)
cat("\nModel fitting\n\n")
print(pmars.model$fitting)
cat("\n\nModel produced\n\n")
print(pmars.model$model)
if(pmars.model$responses != 1)
cat("\nRESPONSES :", pmars.model$responses, "\n")
if(!is.null(pmars.model$Rsquared))
cat("\nRsquared :",round(pmars.model$Rsquared,3),"\n")
invisible()
}
plot.polymars<-function(x,predictor1,response,predictor2,xx,add=FALSE,n,xyz=FALSE,contour.polymars=FALSE,xlim,ylim,intercept,...)
{
if(class(x)!="polymars")
stop("x is not a polymars object")
pmars.model <- x
if(missing(xx))xx<-pmars.model$ranges.and.medians[3,]
if(length(xx) != ncol(pmars.model$ranges.and.medians))
{
stop("xx should be of length equal to the number of predictors in original data\n")
}
x <- xx
if(!missing(predictor2))xyz <- TRUE
if(missing(predictor2) && (!missing(response)) && pmars.model$responses == 1)
{
if(missing(predictor1) && xyz == TRUE)
{
stop("You must specify 2 predictor numbers")
}
xyz<-TRUE
predictor2<-response
response<-1
}
if(contour.polymars == TRUE)
{
xyz<-TRUE
}
if(missing(intercept))
{
intercept<-TRUE
}
if(xyz==TRUE)
{
if(missing(n))n<-33
if(missing(response))
{
if(missing(xlim))
{
persp.polymars(pmars.model,
predictor1,
predictor2,
n=n,
contour.polymars=contour.polymars,
intercept=intercept,
...)
}
else
{
persp.polymars(pmars.model,
predictor1,
predictor2,
n=n,
xlim=xlim,
contour.polymars=contour.polymars,
intercept=intercept,
...)
}
}
else
{
if(missing(xlim))
{
persp.polymars(pmars.model,
predictor1,
predictor2,
response,
n=n,
contour.polymars=contour.polymars,
intercept=intercept,
...)
}
else
{
persp.polymars(pmars.model,
predictor1,
predictor2,
response,
n=n,
xlim=xlim,
contour.polymars=contour.polymars,
intercept=intercept,
...)
}
}
invisible(return())
}
else
{
if(missing(predictor1))
{
cat("predictor should be specified \n")
}
if(pmars.model$responses != 1 && missing(response)&& missing(predictor2))
{
cat("Response should be specified (default: response =1)\n")
}
inmodel<-FALSE
for(i in 2:pmars.model$model.size)
{
if(pmars.model$model[i,"pred1"] == predictor1)inmodel<-TRUE
}
if(is.matrix(pmars.model$factor.matrix))
{
if(length(pmars.model$factor.matrix[1,pmars.model$factor.matrix[1,]== predictor1]) != 0)
{
isfactor<-TRUE
}
else
{
isfactor<-FALSE
}
}
else
{
isfactor<-FALSE
}
if(isfactor == TRUE)
{
pred.values <- matrix(nrow = pmars.model$factor.matrix[2,pmars.model$factor.matrix[1,]==predictor1], ncol = ncol(pmars.model$ranges.and.medians),data = x, byrow = TRUE)
factors<-pmars.model$factor.matrix[-c(1,2),pmars.model$factor.matrix[1,]==predictor1]
pred.values[,predictor1]<- factors[!is.na(factors)]
mesh<-factors[!is.na(factors)]
}
else
{
if(missing(n))n<-100
if(missing(xlim))xlim<-c(pmars.model$ranges.and.medians[1,predictor1],pmars.model$ranges.and.medians[2,predictor1])
pred.values <- matrix(nrow = n, ncol = ncol(pmars.model$ranges.and.medians),
data = x, byrow = TRUE)
mesh <- matrix(seq(xlim[1],xlim[2],(xlim[2]-xlim[1])/(n-1)),nrow=1)
pred.values[,predictor1]<-mesh
}
if(missing(response) && missing(predictor2))response<-1
if(missing(response))response <- 1
if(response > pmars.model$responses || response < 0)
{
stop("response arguement = ",response,"is out of range\n")
}
model<-pmars.model$model
Y<-predict.polymars(pmars.model,pred.values,intercept=intercept)
if(isfactor == FALSE)
{
if(add == FALSE)
{
if(pmars.model$responses == 1)
{
plot(mesh,Y,...,type="l",xlab=paste("Predictor ",predictor1),ylab="Response")
}
else
{
plot(mesh,
Y[,response],
type="l",
xlab=paste("Predictor ",predictor1),
ylab=paste("Response ",response),
...)
}
}
else
{
points(mesh,
Y,
type="l")
}
}
if(isfactor == TRUE)
{
if(add == FALSE)
{
if(pmars.model$responses == 1)
{
plot(mesh,Y,...,xlab=paste("Predictor ",predictor1),ylab="Response")
}
else
{
plot(mesh,
Y[,response],
type="l",
xlab=paste("Predictor ",predictor1),
ylab=paste("Response ",response),
...)
}
}
else
{
points(mesh,
Y,
type="l")
}
}
invisible()
}
}
persp.polymars<-function(x, predictor1, predictor2, response, n= 33,xlim,ylim,xx,contour.polymars,main,intercept,...)
{
if(class(x)!="polymars")
stop("x is not a polymars object")
pmars.model <- x
if(missing(xx))xx<-pmars.model$ranges.and.medians[3,]
if(missing(xlim))xlim<-c(pmars.model$ranges.and.medians[1,predictor1],pmars.model$ranges.and.medians[2,predictor1])
if(missing(ylim))ylim<-c(pmars.model$ranges.and.medians[1,predictor2],pmars.model$ranges.and.medians[2,predictor2])
if(missing(predictor1) || missing(predictor2))
{
stop("You must specify 2 predictor numbers\n")
}
if(pmars.model$responses != 1 && missing(response))
{
cat("Response should be specified (default: response =1)\n")
}
if(missing(response))response <- 1
if(response > pmars.model$responses || response < 0)
{
stop("response arguement = ",response,"is out of range\n")
}
if(sum(as.integer(predictor1==pmars.model$model[,1])) == 0)
{
stop("Predictor 1 not in model\n")
}
if(sum(as.integer(predictor2==pmars.model$model[,1])) == 0)
{
stop("Predictor 2 not in model\n")
}
X <- seq(xlim[1],xlim[2],(xlim[2] - xlim[1])/(n-1))
y <- seq(ylim[1],ylim[2],(ylim[2] - ylim[1])/(n-1))
meshX <- rep(X, n)
meshY <- rep(y, n)
meshY <- sort(meshY)
pred.values <- matrix(nrow = n^2, ncol = ncol(pmars.model$ranges.and.medians),
data = xx, byrow = TRUE)
for(i in 1:(n^2))pred.values[i, predictor1] <- meshX[i]
for(i in 1:(n^2))pred.values[i, predictor2] <- meshY[i]
Z <- predict.polymars(pmars.model, pred.values,intercept=intercept)[,response]
Z <- matrix(Z, ncol = n, byrow = FALSE)
xtitle<-paste("Predictor", predictor1)
ytitle<-paste("Predictor", predictor2)
if(pmars.model$responses > 1)
{
if(missing(main) && (!contour.polymars))
{
ztitle <- paste("Response", response)
}
if(missing(main) && (contour.polymars))
{
ztitle <- paste("Contour of response",response)
}
}
else
{
if(missing(main) && (!contour.polymars))ztitle <- "Response"
if(missing(main) && contour.polymars)ztitle <- paste("Contour of response")
}
if(!contour.polymars)
{
persp(X, y, Z, xlab = xtitle, ylab= ytitle, zlab = ztitle, ...)
}
else
{
contour(X, y, Z, xlab = xtitle, ylab = ytitle , main = ztitle, ...)
}
invisible()
}
design.polymars<-function(object,x)
{
if(!missing(x))x <- unstrip(x)
if(class(object)!="polymars")
stop("object is not a polymars object")
pmars.model <- object
if(!(is.matrix(x)))
{
if(length(unique(pmars.model$model[, "pred1"]))== 1 || ncol(pmars.model$ranges.and.medians)== 1 )
{
x<-matrix(data=x,ncol=1)
}
}
if((is.matrix(x) && ncol(x)
!= length(unique(pmars.model$model[,"pred1"]))))
{
if(ncol(x) != ncol(pmars.model$ranges.and.medians))
{
stop("Input should be a matrix with number of columns equal to either number of original predictors or number of predictors in model\n")
}
}
if(is.matrix(x) && ncol(x) == length(unique(pmars.model$model[, "pred1"])) && ncol(x) != ncol(pmars.model$ranges.and.medians))
{
tempmatrix<-x
x<-matrix(nrow=nrow(tempmatrix),ncol=ncol(pmars.model$ranges.and.medians),data = 0)
for(i in 1:length(unique(pmars.model$model[, "pred1"])))
{
for(j in 1:nrow(tempmatrix))
{
x[j,sort(unique(pmars.model$model[,"pred1"]))[i]]<-x[j]
}
}
}
if(!(is.matrix(x)))
{
if(!(length(x) == ncol(pmars.model$ranges.and.medians) || length(x) == unique(pmars.model$model[, "pred1"])))
{
stop("The vector of values must be equal in length to either the number of original predictors or predictors in the model\n")
}
if(length(x) == unique(pmars.model$model[, "pred1"]) && length(x) != ncol(pmars.model$ranges.and.medians))
{
x <- rep(0, ncol(pmars.model$ranges.and.medians))
for(i in 1:length(unique(pmars.model$model[, "pred1"])))
{
x[sort(unique(pmars.model$model[, "pred1"]))[i]]<-x[i]
}
}
x <- t(as.matrix(x))
}
if(dimnames(pmars.model$model)[[2]][3] == "level1")
{
level1<-TRUE
pmars.model$model<-pmars.model$model[,c(1:(5+pmars.model$responses))]
}
else
{
level1<-FALSE
pmars.model$model<-pmars.model$model[,c(1:(4+pmars.model$responses))]
}
responses<-pmars.model$responses
Y <- matrix(ncol = 1, nrow = nrow(x), data = rep(1, nrow(x)))
Y1 <- matrix(ncol = 1, nrow = nrow(x), data = rep(0, nrow(x)))
Y2 <- matrix(ncol = 1, nrow = nrow(x), data = rep(0, nrow(x)))
if(pmars.model$model.size>1)
{
for(i in 2:pmars.model$model.size)
{
Y2[] <- 1
Y1[] <- x[,pmars.model$model[i, "pred1"]]
if(!is.na(pmars.model$model[i, "knot1"]))
{
Y1 <- Y1 - pmars.model$model[i,"knot1"]
Y1[Y1 < 0,] <- 0
}
if(level1)
{
if(!is.na(pmars.model$model[i, "level1"]))
{
Y1<- (Y1 == pmars.model$model[i, "level1"])
}
}
if(!is.na(pmars.model$model[i, "pred2"]) & pmars.model$model[i, "pred2"] != 0)
{
Y2[] <- x[,pmars.model$model[i,"pred2"]]
if(!is.na(pmars.model$model[i,"knot2" ]))
{
Y2 <- Y2 - pmars.model$model[i,"knot2"]
Y2[Y2 < 0,] <- 0
}
}
Y<-cbind(Y,Y1 * Y2)
}
}
return(Y)
}
logspline <- function(x, lbound, ubound, maxknots=0, knots, nknots=0,
penalty= -1, silent = TRUE,mind= -1, error.action=2)
{
call <- match.call()
if(!missing(x))x <- unstrip(x)
data <- x
if(length(table(data))<3)stop("Not enough unique values")
ilx <- 0; iux <- 0
if(!missing(lbound)){ilx <- 1;jlx <- lbound}
if(!missing(ubound)){iux <- 1;jux <- ubound}
u2 <- length(data)
data <- data[!is.na(data)]
nsample <- length(data)
if(nsample<10)stop("not enough data")
if(u2 !=nsample) print(paste("***", u2-nsample, " NAs ignored in data"))
data <- sort(data)
if(!missing(lbound)) if(data[1] < lbound) stop("data below lbound")
if(!missing(ubound)) if(data[nsample] > ubound) stop("data above ubound")
mm <- range(data)
if(!missing(lbound)) mm <- range(c(mm, lbound))
if(!missing(ubound)) mm <- range(c(mm, ubound))
ilow <- (!missing(lbound)) * 1
iupp <- (!missing(ubound)) * 1
low <- 0
upp <- 0
if(ilow == 1) low <- lbound
if(iupp == 1) upp <- ubound
intpars <- c(-100, rep(0, 9))
z <- .C("nlogcensorx", z = as.integer(intpars),
PACKAGE = "polspline")
maxp <- z$z[1]
kts <- vector(mode = "double", length = max(maxp))
if(maxknots > maxp - 5) warning(paste("maxknots reduced to", maxp))
nknots <- -nknots
if(!missing(knots)) {
nknots <- length(knots)
knots <- sort(knots)
if(!missing(lbound)) if(min(knots) < lbound)
stop("data (knots) below lbound")
if(!missing(ubound)) if(max(knots) > ubound)
stop("data (knots) above ubound")
if(nknots < 3) stop("need at least three starting knots")
if(nknots > maxp - 5) stop(paste("at most", maxp - 5, "knots possible"))
kts[1:nknots] <- knots
}
silent <- (silent == FALSE)
intpars <- c(nsample, maxknots, nknots, silent, 1-ilow, 1-iupp,mind)
dpars <- c(penalty, low, upp)
data <- c(data, rep(0, maxp))
z <- .C("nlogcensor",
ip = as.integer(intpars),
coef = as.double(data),
dp = as.double(dpars),
logl = as.double(rep(0, maxp)),
ad = as.integer(rep(0, maxp)),
kts = as.double(kts),
PACKAGE = "polspline")
if(z$ip[1] != 0 && z$ip[1]<100) {
if(z$ip[1] == 17) warning("too many knots beyond data")
if(z$ip[1] == 18) warning("too many knots before data")
if(z$ip[1] == 39) warning("too much data close together")
if(z$ip[1] == 40) warning("no model could be fitted")
if(z$ip[1] == 2) warning("error while solving system")
if(z$ip[1] == 8) warning("too much step-halving")
if(z$ip[1] == 5) warning("too much step-halving")
if(z$ip[1] == 7)
warning("numerical problems, likely tail related. Try lbound/ubound")
if(z$ip[1] == 1) warning("no convergence")
i <- 0
if(missing(knots))i<- 1
if(z$ip[1] == 3 && i==1)
warning("right tail extremely heavy, try running with ubound")
if(z$ip[1] == 4 && i==1)
warning("left tail extremely heavy, try running with lbound")
if(z$ip[1] == 6 && i==1)
warning("both tails extremely heavy, try running with lbound and ubound")
if(z$ip[1] == 3 && i==0)
warning("right tail too heavy or not enough knots in right tail")
if(z$ip[1] == 4 && i==0)
warning("left tail too heavy or not enough knots in left tail")
if(z$ip[1] == 6 && i==0)
warning("both tails too heavy or not enough knots in both tail")
if(error.action==0) stop("fatal error")
if(error.action==1) {
print("no object returned")
invisible()
}
if(error.action==2) {
if(ilx==0 && iux==0)z <- oldlogspline(x)
if(ilx==0 && iux==1)z <- oldlogspline(x,ubound=jux)
if(ilx==1 && iux==0)z <- oldlogspline(x,lbound=jlx)
if(ilx==1 && iux==1)z <- oldlogspline(x,lbound=jlx,ubound=jux)
z <- oldlogspline.to.logspline(z,x)
z$call <- call
warning("re-ran with oldlogspline")
z
}
}
else{
if(z$ip[1]>100) {
warning(" Not all models could be fitted")
}
logl <- cbind(z$ad, z$logl)
logl <- cbind(2+(1:z$ip[3]),logl[1+(1:z$ip[3]), ])
kk <- (1:length(logl[,1]))
kk <- kk[logl[, 2] == 0 ]
if(length(kk)>0)logl <- logl[-kk,]
fit <- list(call = call, nknots = z$ip[2], coef.pol = z$coef[1:2], coef.kts =
z$coef[2 + (1:z$ip[2])], knots = z$kts[1:z$ip[2]], maxknots = z$ip[3]+2,
penalty = z$dp[1], bound = c(ilow, low, iupp, upp), samples = nsample,
logl = logl, range = mm, mind = z$ip[7])
class(fit) <- "logspline"
fit}
}
plogspline <- function(q, fit)
{
if(class(fit)!="logspline")
stop("fit is not a logspline object")
if(!missing(q))q <- unstrip(q)
sq <- rank(q)
q <- sort(q)
z <- .C("rpqlsd",
as.double(c(fit$coef.pol, fit$coef.kts)),
as.double(fit$knots),
as.double(fit$bound),
as.integer(1),
pp = as.double(q),
as.integer(length(fit$knots)),
as.integer(length(q)),
PACKAGE = "polspline")
zz <- z$pp[sq]
if(fit$bound[1] > 0) zz[q<fit$bound[2]] <- 0
if(fit$bound[3] > 0) zz[q>fit$bound[4]] <- 1
zz
}
qlogspline <- function(p, fit)
{
if(class(fit)!="logspline")
stop("fit is not a logspline object")
if(!missing(p))p <- unstrip(p)
sp <- rank(p)
p <- sort(p)
z <- .C("rpqlsd",
as.double(c(fit$coef.pol, fit$coef.kts)),
as.double(fit$knots),
as.double(fit$bound),
as.integer(0),
qq = as.double(p),
as.integer(length(fit$knots)),
as.integer(length(p)),
PACKAGE = "polspline")
zz <- z$qq[sp]
zz[p<0] <- NA
zz[p>1] <- NA
zz
}
rlogspline <- function(n, fit)
{
if(class(fit)!="logspline")
stop("fit is not a logspline object")
pp <- runif(n)
qlogspline(pp, fit)
}
dlogspline <- function(q, fit)
{
if(class(fit)!="logspline")
stop("fit is not a logspline object")
if(!missing(q))q <- unstrip(q)
x <- q
y <- fit$coef.pol[1] + x * fit$coef.pol[2]
for(i in 1:length(fit$knots))
y <- y + fit$coef.kts[i] * ((abs(x - fit$knots[i]) +x- fit$knots[i])/2)^3
y <- exp(y)
if(fit$bound[1] > 0) y[x < fit$bound[2]] <- 0
if(fit$bound[3] > 0) y[x > fit$bound[4]] <- 0
y
}
plot.logspline <-function(x, n = 100, what = "d", add = FALSE, xlim, xlab = "", ylab = "", type = "l", ...)
{
fit <- x
if(class(fit)!="logspline")
stop("fit is not a logspline object")
if(add){
plim <- (par()$usr)[1:2]
u4 <- plim[1]
u3 <- plim[2]
if(!missing(xlim)) {
u4 <- max(xlim[1], plim[1])
u3 <- min(xlim[2], plim[2])
}
}
else{
if(missing(xlim)) {
u1 <- qlogspline(0.01, fit)
u2 <- qlogspline(0.99, fit)
u3 <- 1.1 * u1 - 0.1 * u2
u4 <- 1.1 * u2 - 0.1 * u1
}
else {
u3 <- xlim[1]
u4 <- xlim[2]
}}
xx <- (0:(n - 1))/(n - 1) * (u4 - u3) + u3
if(what == "d" || what == "D") yy <- dlogspline(xx, fit)
if(what == "f" || what == "F" || what == "p" || what == "P")
yy <- plogspline(xx, fit)
if(what == "s" || what == "S") yy <- 1 - plogspline(xx, fit)
if(what == "h" || what == "H") yy <- dlogspline(xx, fit)/(1 - plogspline(xx, fit))
if(missing(xlab)) xlab <- ""
if(missing(ylab)) ylab <- ""
if(missing(type)) type <- "l"
if(add)lines(xx,yy, ...)
else plot(xx, yy, xlab = xlab, ylab = ylab, type = type, ...)
invisible()
}
print.logspline <- function(x,...)
{
summary.logspline(x)
}
summary.logspline <- function(object,...)
{
fit <- object
if(class(fit)!="logspline")
stop("fit is not a logspline object")
ul <- fit$penalty
um <- fit$samples[1]
if(length(fit$samples)>1)
um <- fit$samples[1]+ fit$samples[4]
else
um <- fit$samples
kk <- fit$logl[fit$logl[,2] != 0,1]
ad <- fit$logl[fit$logl[,2] != 0,2]
ll <- fit$logl[fit$logl[,2] != 0,3]
bb <- -2 * ll + ul * (kk-1)
cc1 <- bb
cc2 <- bb
cc2[1] <- Inf
cc1[length(bb)] <- 0
if(length(bb) > 1) {
for(i in 1:(length(bb) - 1)) {
cc1[i] <- max((ll[(i + 1):(length(bb))] - ll[i])/(kk[(i + 1):
(length(bb))] - kk[i]))
cc2[i + 1] <- min((ll[1:i] - ll[i + 1])/(kk[1:i] - kk[i + 1]))
}
}
c3 <- cc2 - cc1
cc1[c3 < 0] <- NA
cc2[c3 < 0] <- NA
uu <- cbind(kk, ad, ll, bb, 2 * cc1, 2 * cc2)
ww <- rep("", length(bb))
dimnames(uu) <- list(ww, c("knots", "A(1)/D(2)", "loglik", "AIC",
"minimum penalty", "maximum penalty"))
print(round(uu, 2))
cat(paste("the present optimal number of knots is ",kk[bb== min(bb)],"\n"))
if(ul == log(um))
cat(paste("penalty(AIC) was the default: BIC=log(samplesize): log(",
um, ")=", round(ul, 2), "\n"))
else cat(paste("penalty(AIC) was ", round(ul, 2),
", the default (BIC) ", "would have been", round(log(um), 2), "\n"))
invisible()
}
polyclass <- function(data, cov, weight, penalty, maxdim, exclude, include,
additive = FALSE, linear, delete=2, fit, silent = TRUE, normweight = TRUE, tdata,
tcov, tweight, cv, select=0, loss, seed)
{
call <- match.call()
if(!missing(cov))cov <- unstrip(cov)
if(!missing(exclude))exclude <- unstrip(exclude)
if(!missing(include))include <- unstrip(include)
if(!missing(data))data <- unstrip(data)
if(!missing(weight))weight <- unstrip(weight)
if(!missing(tdata))tdata <- unstrip(tdata)
if(!missing(tweight))tweight <- unstrip(tweight)
if(!missing(tcov))tcov <- unstrip(tcov)
it <- 0
ntdata <- 0
if(!missing(cv)) it <- 2
if(!missing(tdata))it <- 1
if(!missing(tdata))if(is.factor(tdata)) tdata <- as.integer(tdata)
if(missing(cv)) cv <- 0
if(it==1||it==0) cv <- 0
if(it==2){
if(!missing(seed)){
if(sum(seed)!=0){
if(length(seed)>11) assign(".Random.seed", seed[1:12], envir=.GlobalEnv)
else set.seed(seed[1])
seed <- get(".Random.seed", envir=.GlobalEnv, inherits = FALSE)
}
}
else{
if(!missing(fit)){
if(fit$method==2) assign(".Random.seed", fit$seef, envir=.GlobalEnv)
}
seed <- get(".Random.seed", envir=.GlobalEnv, inherits = FALSE)
}
}
z <- .C("spolyx", mk = as.integer(rep(-3,13)),
PACKAGE = "polspline")
MAXKNOTS <- z$mk[1]
MAXSPACE <- z$mk[2]
if(missing(data)) stop("there has to be data")
if(length(data) < 25) stop("not enough data")
if(is.factor(data)) data <- as.integer(data)
if(is.integer(data) == FALSE){
if(max(abs(as.integer(data) - data)) < 0.001)
data <- as.integer(data)
else stop("not-integer data")
}
if(it == 1) {
if(is.integer(tdata) == FALSE){
if(max(abs(as.integer(tdata) - tdata)) < 0.001)
tdata <- as.integer(tdata)
else stop("not-integer test data")
}
alldata <- c(data,tdata)
if(min(alldata)<0) stop("negative data")
clss <- min(alldata):max(alldata)
if(min(alldata) == 1){
data <- data - 1
tdata <- tdata - 1
}
ntdata <- length(tdata)
if(missing(tweight)) tweight <- rep(1,ntdata)
if(length(tweight)!=ntdata)stop("length tweight is incorrect")
if(normweight == TRUE)tweight <- tweight*ntdata/sum(tweight)
}
else{
if(min(data)<0) stop("negative data")
clss <- min(data):max(data)
if(min(data) == 1) data <- data - 1
}
nclass <- length(clss)
ndata <- length(data)
nu <- exists(".Random.seed", envir=.GlobalEnv, inherits = FALSE)
if(nu) xx <- get(".Random.seed", envir=.GlobalEnv, inherits = FALSE)
yy <- sample(ndata)
if(nu)assign(".Random.seed", xx, envir=.GlobalEnv)
if(missing(weight)) weight <- rep(1,ndata)
if(it==2){
if(sum(abs(seed[1]))==0) myord <- 1:ndata
else myord <- sample(ndata)
data <- data[myord]
weight <- weight[myord]
}
if(length(weight)!=ndata)stop("length weight is incorrect")
if(normweight == TRUE)weight <- weight*ndata/sum(weight)
if(missing(cov)) {
stop("covariates required")
}
else {
if(length(cov) == ndata)
cov <- matrix(cov, ncol = 1, nrow = ndata)
if(length(cov[, 1]) != ndata)
stop("covariates not ndata * ncov matrix")
if(it==2)cov <- cov[myord,]
ncov <- length(cov[1, ])
nms <- 1:ncov
if(is.matrix(cov))
nms <- dimnames(cov)[[2]]
if(length(nms) != ncov)
nms <- 1:ncov
}
if(missing(penalty) && it ==0)
penalty <- log(ndata)
if(missing(penalty) && it >0)
penalty <- 0
il <- 1
if(select==1) il <- 0
if(select==2) il <- 2
if(delete!=0 && delete !=1) delete <- 2
iml <- missing(loss)
if(iml) loss <- 1 - diag(rep(1,nclass))
if(il!=1 && !iml)
stop("if loss is specified, select has to be 0")
if((it == 0) && !iml)
warning("loss only has effect when there is a test-set or CV is used")
if(it == 1){
if(missing(tcov)) {
if(ncov!=0)stop("missing tcov")
tcov <- 0
}
else {
if(length(tcov) == ntdata)
tcov <- matrix(tcov, ncol = 1, nrow = ntdata)
if(length(tcov[, 1]) != ntdata)
stop("test-covariates not ntdata * ncov matrix")
ntcov <- length(cov[1, ])
if(ntcov!=ncov) stop("wrong number of test-covariates")
}
}
naction <- nclass
if(it>0){
if(is.matrix(loss)==FALSE)stop("loss is not a matrix")
if(length(loss[1,])!=nclass)stop("loss has not nclass columns")
naction <- length(loss[,1])
}
if(additive) {
if(!missing(exclude)) stop("cannot have exclude and additive")
if(!missing(include)) stop("cannot have include and additive")
include <- c(0, 0)
}
if(missing(exclude) + missing(include) == 0)
stop("only 1 from exclude and include allowed")
vexclude <- 0
if(missing(exclude) == FALSE) {
if(length(exclude) == 2)
exclude <- matrix(exclude, ncol = 2, nrow = 1)
if(length(exclude[1, ]) != 2) stop("exclude has wrong shape")
if(min(exclude) < 0 || max(exclude) > ncov)
stop("exclude has wrong values")
vexclude <- as.vector(t(exclude))
vexclude <- c(length(vexclude)/2, vexclude)
}
if(missing(include) == FALSE || additive) {
if(length(include) == 2)
include <- matrix(include, ncol = 2, nrow = 1)
if(length(include[1, ]) != 2)
stop("include has wrong shape")
if(min(include) < 0 || max(include) > ncov)
stop("include has wrong values")
include <- t(apply(include, 1, sort))
if(length(include) == 2)
include <- matrix(include, ncol = 2, nrow = 1)
vexclude <- as.vector(t(include))
vexclude <- c( - length(vexclude)/2, vexclude)
}
if(missing(maxdim)) {
maxdim <- floor(4 * (ndata)^(1/3))+1
maxdim <- min(ndata/2, MAXSPACE-1, (nclass-1)*maxdim)
maxdim <- - maxdim
}
if(maxdim > MAXSPACE - 1) {
maxdim <- MAXSPACE - 1
print(paste("maximum dimension reduced to", maxdim))
}
lins <- rep(0, MAXSPACE)
if(!missing(linear)) {
linear[linear <= 0] <- ncov + 1
linear[linear > ncov + 1] <- ncov + 1
lins[linear] <- 1
}
if(additive)
vexclude <- c(-1, 0, 0)
fitter <- 0
bbtt <- matrix(0, ncol = 4 + max(data), nrow = abs(maxdim))
cckk <- matrix(0, ncol = (MAXKNOTS + 1), nrow = ncov+1)
if(!missing(fit)) {
if(class(fit)!="polyclass")stop("fit is not a polyclass object")
fitter <- (fit$nclass-1)*(fit$nbas)
if(fit$ncov != ncov)
stop("ncov and fit's ncov are different")
if(fit$nclass != nclass)
stop("nclass and fit's nclass are different")
a1 <- length(fit$fcts[1,])
bbtt[1:fit$nbas, ] <- fit$fcts[,-a1]
bbtt <- as.vector(t(bbtt))
bbtt[is.na(bbtt)] <- -1
a1 <- length(fit$knots[1, ])
a2 <- as.vector(t(fit$knots))
cckk <- as.vector(cckk)
cckk <- c(a1,a2,cckk)
cckk[is.na(cckk)] <- -1
}
mindist <- 3*nclass
if(missing(tdata)){
tdata<-0
tcov <-0
tweight <- 0
}
ranges <- NA
if(ncov == 1)
ranges <- matrix(range(cov), ncol = 1, nrow = 2)
if(ncov > 1)
ranges <- apply(cov, 2, range)
cov <- as.single(t(cov))
aicx <- as.single(rep(0,1000))
intpars <-c(ndata,nclass,ncov,mindist,maxdim,silent,fitter,cv,it,ntdata,
naction,il,delete)
anova <- loss
if(length(anova)<abs(maxdim)*4)anova<-c(anova,rep(0,abs(4*maxdim)))
z <- .C("spoly",
intpars = as.integer(intpars),
as.integer(data),
as.single(cov),
anova = as.double(anova),
as.double(penalty),
bbtt = as.double(bbtt),
cckk = as.double(cckk),
as.integer(vexclude),
as.integer(lins),
logl = as.double(rep(0, 11*MAXSPACE+1)),
as.double(weight),
as.integer(tdata),
as.single(t(tcov)),
as.double(tweight),
bbb = as.double(rep(0, MAXSPACE*nclass)),
aicx=as.single(aicx),
PACKAGE = "polspline")
ndim <- z$intpars[1]
aicx <- z$aicx[1:4]
aicy <- 0
if(it==2){
aicy <- z$aicx[6:(z$aicx[5])]
aicy <- matrix(aicy,ncol=3,byrow=TRUE)
if(z$aicx[5]<995)aicy[length(aicy[,1]),2]<- Inf
dimnames(aicy) <- list(NULL,c("pen-min","pen-max","cv-loss"))
}
nclass <- z$intpars[2]+1
nbas <- z$intpars[3]
maxdim <- abs(maxdim)
z$bbtt <- matrix(z$bbtt, nrow = maxdim, ncol = 3 + nclass, byrow = TRUE)
z$bbtt <- z$bbtt[1:nbas, ]
z$cckk <- matrix(z$cckk, nrow = ncov+1, ncol = MAXKNOTS + 1, byrow = TRUE)
z$cckk <- z$cckk[1:ncov,]
z$cckk <- matrix(z$cckk, nrow = ncov)
z$cckk <- z$cckk[, 1:(1 + max(z$cckk[, 1]))]
z$cckk <- matrix(z$cckk, nrow = ncov)
l1 <- max(z$cckk[, 1])
for(i in 1:(ncov))
if(z$cckk[i, 1] != l1) z$cckk[i, (z$cckk[i, 1] + 2):(l1 + 1)] <-
NA
if(l1 > 0)
dimnames(z$cckk) <- list(nms, c("K", 1:l1))
if(l1 == 0)
dimnames(z$cckk) <- list(nms, "K")
z$bbtt <- matrix(z$bbtt, ncol = 3 + nclass)
z$bbtt <- cbind(z$bbtt, 0)
dimnames(z$bbtt) <- list(1:nbas, c("dim1", "knot1",
"dim2", "knot2", as.character(clss)))
z$bbtt[z$bbtt[, 3] == -1, 3:4] <- NA
z$bbtt[z$bbtt[, 4] == 0, 4] <- NA
z$bbtt[1,1] <- NA
i <- z$logl[1]
z$logl <- matrix(z$logl[2:(11*i+1)],ncol=11,byrow=TRUE)
z$logl[z$logl[,10]<0,10] <- NA
z$logl[z$logl[,11]<0,11] <- NA
z$logl[1,11] <- Inf
dimnames(z$logl) <- list(NULL, c("dim","loss","l-lik-trn","loss-trn",
"sq-err-trn","l-lik-test","loss-tst","sq-err-tst"
, "A/D","pen-min","pen-max"))
if(it!=1){
dimnames(z$logl)[[2]][2] <- "AIC"
z$logl <- z$logl[,-(6:8)]
}
anova <- z$anova[2:(1+z$anova[1])]
anova[anova<0] <- NA
anova <- matrix(anova,ncol=3,byrow=TRUE)
dimnames(ranges) <- list(c("min", "max"), nms)
z$bbtt[0, 0] <- NA
z$bbtt[z$bbtt[, 2] == 0, 2] <- NA
z$bbtt[z$bbtt[, 2] == 0, 4] <- NA
if(nclass==naction)
yyy <- clss
else
yyy <- 1:naction
if(it!=0)dimnames(loss) <- list(as.character(yyy),clss)
if(il!=1)loss <- -1
bbb <- z$bbb
bbb <- bbb[1:(nbas*nclass)]
bbb <- matrix(bbb,nrow=nbas,byrow=TRUE)
if(it==0){
nfit <- list(call = call, ncov = ncov, ndim = ndim, nclass = nclass,
nbas = nbas, fcts = z$bbtt, knots = z$cckk, penalty = penalty,
method = it, ranges = ranges, logl= z$logl,
sample = ndata, wgtsum = sum(weight), covnames = nms,
classnames = clss, beta = bbb, delete = delete, anova = anova)
}
else{
if(it==1)
nfit <- list(call = call, ncov = ncov, ndim = ndim, nclass = nclass,
nbas = nbas, naction = naction, fcts = z$bbtt, knots = z$cckk,
loss = loss, penalty = penalty, method = it, ranges = ranges,
logl= z$logl, sample = ndata, tsample = ntdata,
wgtsum = sum(weight), covnames = nms, classnames = clss, beta = bbb,
delete = delete, anova = anova,
select = select, twgtsum = sum(tweight))
else
nfit <- list(call = call, ncov = ncov, ndim = ndim, nclass = nclass,
nbas = nbas, naction = naction, fcts = z$bbtt, knots = z$cckk,
cv = cv, loss = loss, penalty = penalty, method = it, ranges = ranges,
logl= z$logl, sample = ndata, wgtsum = sum(weight), covnames = nms,
classnames = clss, cv.aic = aicx, cv.tab = aicy, seed = seed,
beta = bbb, delete = delete, anova = anova, select = select)
}
class(nfit) <- "polyclass"
nfit
}
cpolyclass <- function(cov, fit)
{
if(class(fit)!="polyclass")stop("fit is not a polyclass object")
if(!missing(cov))cov <- unstrip(cov)
xxx <- ppolyclass(cov, fit)
yyy <- fit$classnames
if(fit$method!=0){
if(length(fit$loss)==1)
fit$loss <- 1 - diag(rep(1,fit$nclass))
xxx <- t(-fit$loss%*%t(xxx))
if(fit$nclass==fit$naction)
yyy <- fit$classnames
else
yyy <- 1:fit$naction
}
zzz <- xxx[, 1]
www <- rep(yyy[1], length(zzz))
for(i in 2:length(yyy)) {
www[zzz < xxx[, i]] <- yyy[i]
zzz[zzz < xxx[, i]] <- xxx[zzz < xxx[, i], i]
}
www
}
ppolyclass <- function(data, cov, fit)
{
imf <- missing(fit)
if(imf) {
fit <- cov
cov <- data
}
if(!missing(cov))cov <- unstrip(cov)
if(!missing(data))data <- unstrip(data)
if(!missing(data) && is.factor(data)) data <- as.integer(data)
if(class(fit)!="polyclass")stop("fit is not a polyclass object")
if(is.matrix(cov) == FALSE)
cov <- matrix(cov, ncol = 1)
if(length(cov[1, ]) != fit$ncov) {
if(length(cov[1, ]) == 1 && length(cov[, 1]) == fit$ncov)
cov <- t(cov)
else stop("incorrect number of covariates")
}
ncase <- length(cov[, 1])
nclass <- fit$nclass
nbas <- length(fit$fcts[, 1])
if(imf || missing(data))
data <- rep(-1, ncase)
if(length(data) == 1)
data <- rep(data, ncase)
if(is.integer(data) == FALSE)
if(max(abs(as.integer(data) - data)) < 0.001)
data <- as.integer(data)
else stop("not-integer data")
w2 <- fit$classnames
if(data[1] != -1 && (min(w2) > min(data) || max(w2) < max(data)))
stop("data has wrong range")
if(min(data) == 0)
data <- data + 1
ppp <- matrix(0, ncol = nclass, nrow = ncase)
for(i in 1:(nclass - 1))
ppp[, i] <- (fit$fcts[1, (4 + i)])
if(nbas > 1)
for(j in 2:nbas) {
uuu <- cov[, fit$fcts[j, 1]]
if(is.na(fit$fcts[j, 2]) == FALSE) {
uuu <- uuu - fit$knots[fit$fcts[j, 1], fit$fcts[
j, 2] + 1]
uuu[uuu < 0] <- 0
}
vvv <- rep(1, ncase)
if(is.na(fit$fcts[j, 3]) == FALSE) {
vvv <- cov[, fit$fcts[j, 3]]
if(is.na(fit$fcts[j, 4]) == FALSE) {
vvv <- vvv - fit$knots[fit$fcts[j, 3], fit$
fcts[j, 4] + 1]
vvv[vvv < 0] <- 0
}
}
uuu <- uuu * vvv
for(i in 1:(nclass - 1))
ppp[, i] <- ppp[, i] + uuu * fit$fcts[j, (4 + i
)]
}
ppp <- ppp-apply(ppp,1,max)
ppp <- exp(ppp)
zzz <- ppp[, nclass]
for(i in 1:(nclass - 1))
zzz <- zzz + ppp[, i]
for(i in 1:nclass)
ppp[, i] <- ppp[, i]/zzz
if(data[1] == -1)
dimnames(ppp) <- list(NULL, fit$classnames)
else ppp <- ppp[cbind(1:ncase, data)]
ppp
}
plot.polyclass <- function(x,cov, which, lims, what, data, n, xlab, ylab, zlab, ...)
{
if(class(x)!="polyclass")stop("x is not a polyclass object")
if(!missing(cov))cov <- unstrip(cov)
fit <- x
here <- c(-1, -1)
if(length(which) == 1 || length(which) == 2)
here[1] <- length(which)
here[2] <- as.integer(what)
if(here[2]< 1||here[2]>8) stop("what is wrong")
if(min(here) < 0)
stop("which is wrong")
if(here[2] < 5.5 && here[1] == 1)
stop("which and what contradict")
if(here[2] > 5.5 && here[1] == 2)
stop("which and what contradict")
if(length(cov) != fit$ncov)
stop("length of cov is wrong")
clbs <- fit$covnames
ww <- fit$classnames
w1 <- (1:fit$ncov)
if(missing(lims))
lims <- NULL
if(length(lims) != 0 && length(lims) != (here[1] * 2))
stop("lims is wrong")
wa <- 0
for(i in 1:length(which)){
if(is.numeric(which) == FALSE)
wa <- c(wa,w1[which[i] == clbs])
else wa <- c(wa,w1[w1 == which[i]])
}
wa <- wa[-1]
if(length(wa) != here[1])
stop("which is wrong")
wb <- clbs[wa]
if(here[2] < 3.5 || here[2] > 7.5) {
if(missing(data))
stop("data is missing")
if(length(data) > 1)
stop("only one class (data) allowed")
}
if(length(lims) == 0) {
if(here[1] == 1)
lims <- fit$ranges[, wa]
else lims <- c(fit$ranges[, wa[1]], fit$ranges[, wa[2]])
}
if(missing(xlab))
xlab <- as.character(wb[1])
if(missing(ylab)) {
if(here[1] == 2)
ylab <- as.character(wb[2])
if(here[2] > 6.5)
ylab <- "probability"
if(here[2] == 6)
ylab <- "class"
}
if(missing(zlab) && here[2] == 2)
zlab <- "probability"
if(missing(n) && here[1] == 1)
n <- 250
if(missing(n) && here[1] == 2)
n <- 50
if(here[1] == 1) {
cov <- matrix(cov, byrow = TRUE, nrow = n, ncol = fit$ncov)
c1 <- lims[1] + ((lims[2] - lims[1]) * (0:(n - 1)))/(n - 1)
cov[, wa] <- c1
}
if(here[1] == 2) {
cov <- matrix(cov, byrow = TRUE, nrow = n * n, ncol = fit$ncov)
c1 <- lims[1] + ((lims[2] - lims[1]) * (0:(n - 1)))/(n - 1)
c11 <- (rep(c1, n))
cov[, wa[1]] <- c11
c2 <- lims[3] + ((lims[4] - lims[3]) * (0:(n - 1)))/(n - 1)
c22 <- sort(rep(c2, n))
cov[, wa[2]] <- c22
}
if(here[2] <= 3) {
v1 <- ppolyclass(data, cov, fit)
v1 <- matrix(v1, n, n)
if(here[2] == 1)
contour(c1, c2, v1, xlab = xlab, ylab = ylab, ...)
if(here[2] == 2)
persp(c1, c2, v1, xlab = xlab, ylab = ylab, zlab = zlab,
...)
if(here[2] == 3)
image(c1, c2, v1, xlab = xlab, ylab = ylab, ...)
}
if(here[2] == 6) {
v1 <- cpolyclass(cov, fit)
plot(c1, v1, type = "l", ylim = range(ww), xlab = xlab, ylab =
ylab, ...)
}
if(here[2] == 8) {
v1 <- ppolyclass(data, cov, fit)
plot(c1, v1, type = "l", xlab = xlab, ylab = ylab, ...)
}
if(here[2] == 4 || here[2] == 5) {
v1 <- cpolyclass(cov, fit)
v1 <- matrix(v1, n, n)
if(here[2] == 5)
image(c1, c2, v1, xlab = xlab, ylab = ylab, ...)
}
if(here[2] == 4) {
zz <- range(v1)
z1 <- 1 * (v1 < zz[1] + 0.5)
contour(c1, c2, z1, xlab = xlab, ylab = ylab, levels = 0.5,
labex = 0, ...)
if(zz[2] - zz[1] > 1)
for(i in (zz[1] + 1):(zz[2] - 1)) {
z1 <- 1 * (v1 < i + 0.5)
contour(c1, c2, z1, labex = 0, levels = 0.5,
add = TRUE, ...)
}
}
if(here[2] == 7) {
v1 <- ppolyclass(cov, fit)
plot(c1, v1[, 1], type = "l", xlab = xlab, ylab = ylab, ylim =
c(0, 1), ...)
abline(h = c(0, 1))
zz <- length(v1[1, ])
if(zz > 2)
for(i in 2:zz) {
v1[, 1] <- v1[, 1] + v1[, i]
lines(c1, v1[, 1])
}
}
invisible()
}
rpolyclass <- function(n, cov, fit)
{
if(class(fit)!="polyclass")stop("fit is not a polyclass object")
if(!missing(cov))cov <- unstrip(cov)
if(n < 1)
stop("n is wrong")
if(is.matrix(cov) == FALSE)
cov <- matrix(cov, nrow = 1)
if(length(cov[1, ]) != fit$ncov)
stop("wrong number of covariates")
if(n > 1 && length(cov[, 1]) == 1)
cov <- matrix(cov, nrow = n, ncol = fit$ncov, byrow = TRUE)
if(n != length(cov[, 1]))
stop("cov has wrong number of rows")
vv <- ppolyclass(cov, fit)
ww <- runif(n)
zz <- rep(fit$nclass, n)
for(i in 2:fit$nclass)
vv[, i] <- vv[, i] + vv[, (i - 1)]
for(i in fit$nclass:1)
zz[ww < vv[, i]] <- i
if(min(fit$classnames) == 0)
zz <- zz - 1
zz
}
print.polyclass <- function(x,...)
{
summary.polyclass(x)
}
summary.polyclass <- function(object,...)
{
if(class(object)!="polyclass")stop("object is not a polyclass object")
fit <- object
it <- fit$method
cat("========================POLYCLASS summary=======================\n")
cat(paste("The fit was obtained with\n "))
cat("\b\b")
print(fit$call)
cat(paste("There were",fit$nclass,"classes and", fit$ncov,"covariates.\n"))
if(it == 1) cat(paste("There were", fit$sample, "trial cases and",
fit$tsample, "test cases.\n\n"))
else cat(paste("There were",fit$sample,"cases.\n\n"))
if(it == 0) {
cat("The model selection was carried out using AIC.\n")
if(0.99 < fit$penalty/log(fit$wgtsum)&&fit$penalty/log(fit$wgtsum) <1.01){
cat(paste("The penalty was the default, log("))
cat(paste(round(fit$wgtsum), "\b)="))
cat(paste(round(log(fit$wgtsum), 2), "\b.\n"))
}
else{
cat(paste("The penalty was", round(fit$penalty, 2),
"\b, the default would have been log("))
cat(paste("\b",round(fit$wgtsum), "\b)="))
cat(paste(round(log(fit$wgtsum), 2), "\b.\n"))
}
}
if(it == 1) cat("The model selection was carried out using a test set.\n")
if(it == 2) cat(paste("The model selection was carried out using", fit$cv,
"\b-fold cross-validation.\n"))
if((it == 1 || it == 2) && fit$select==0) {
a2 <- range(fit$loss + diag(rep(1, fit$nclass)))
if(a2[2] == 1 && a2[1] == 1) cat("The standard loss-matrix was used.\n")
else cat("A loss matrix was provided.\n")
}
if((it == 1 || it == 2) && fit$select ==2) {
cat("The sum of squared probabilities was used for the loss.\n")
}
if((it == 1 || it == 2) && fit$select ==1) {
cat("Minus the test set log likelihood was used for the loss.\n")
}
if(length(fit$logl)<12)fit$logl<-t(as.matrix(fit$logl))
a2 <- fit$logl[fit$logl[,1]==fit$ndim, ]
cat(paste("The model had dimension", fit$ndim,
"\b, log-likelihood",round(a2[3], 2)))
if(it == 0) cat(paste(" and AIC", round( a2[2], 2), "\b.\n\n"))
if(it == 1) cat(paste(" and loss", round( a2[2], 2), "\b.\n\n"))
if(it == 2) cat(paste(" and AIC", round( a2[2], 2), "\b.\n\n"))
if(it == 2){
cat(paste("The penalty was cross-validated between",
round(fit$cv.aic[1],2)))
if(fit$cv.aic[2]<0) cat(paste(" and Inf to",round(fit$cv.aic[4],2)))
else cat(paste(" and",round(fit$cv.aic[2],2),"to",
round(fit$cv.aic[4],2)))
cat(paste(" (loss",round(fit$cv.aic[3],2),"\b).\n"))
cat(paste("The default penalty would have been log(",
round(fit$wgtsum), "\b)=", round(log(fit$wgtsum), 2), "\b.\n"))
}
cat("The locations of the knots:\n")
dimnames(fit$knots)[[2]][1] <- "Number"
print(round(fit$knots, 3))
cat(paste("\n There are", fit$nbas, "basis functions, summarized below:\n"))
a3 <- length(dimnames(fit$fcts)[[2]])
for(i in 5:a3)
dimnames(fit$fcts)[[2]][i] <- paste("Class",dimnames(fit$fcts)[[2]][i])
print(round(fit$fcts, 3))
cat("The first basis function is the constant function. For all others,\n")
cat("the first column and the third column indicate on which covariates\n")
cat("that basis function depends. If the third column is NA, the basis\n")
cat("function depends on only one covariate.\n")
cat("For the nonconstant basis functions the second and the fourth column\n")
cat("indicate on which knot the function depend. If these columns are NA,\n")
cat("the basis function is linear in this covariate.\n")
cat("The remaining columns give the coefficients.\n")
cat("\n")
cat("================================================================\n")
if(fit$method==0)
cat("The influence of the penalty parameter is summarized below:\n")
if(fit$method==1)
cat("The effect of the penalty in the final run is summarized below:\n")
if(fit$method==2)
cat("The equivalence of the penalty parameter is summarized below:\n")
dimnames(fit$logl)[[1]] <- rep("",length(fit$logl[,1]))
fit$logl[,3:5] <- fit$logl[,3:5]/fit$wgtsum
if(fit$method==1)fit$logl[,6:8] <- fit$logl[,6:8]/fit$twgtsum
fit$logl[,"AIC"] <- (-fit$logl[,"AIC"])
print(round(fit$logl, 3))
fit$logl[,"AIC"] <- (-fit$logl[,"AIC"])
if(fit$method==2){
cat("The relation between the CV-loss and the penalty is summarized below:\n")
dimnames(fit$cv.tab)[[1]] <- rep("",length(fit$cv.tab[,1]))
print(round(fit$cv.tab, 3))}
cat("================================================================\n")
cat("The importance-anova decomposition is:\n")
anova <- fit$anova
anova[,3] <- anova[,3]*100
dimnames(anova) <- list(rep("",length(anova[,1])),
c("Cov-1","Cov-2","Percentage"))
print(round(anova,2))
cat("================================================================\n")
invisible()
}
beta.polyclass <- function(fit, which, xsp = 0.4, cex)
{
if(class(fit)!="polyclass")stop("fit is not a polyclass object")
plot(c(0, 1), c(0, 1), axes = FALSE, xlab = "", xlim = c(0.1, 0.9), ylim =
c(0.1, 0.9), ylab = "", type = "n")
lines(c(0, 1, 1, 0, 0), c(1, 1, 0, 0, 1))
xsp <- xsp/4
if(missing(which))
which <- fit$classnames
if(fit$classnames[1] == 0)
which <- which + 1
if(missing(cex))
cex <- par()$cex
nb <- fit$nbas
lines( c(4 * xsp,4*xsp),c(0,1))
b11 <- fit$beta
for(i in 1:nb) {
b1 <- fit$fcts[i, ]
y1 <- 1 - (i - 0.5)/nb
y0 <- 1 - (i - 1)/nb
y2 <- 1 - (i - 0)/nb
lines(c(0, 4 * xsp), c(y0, y0))
lines(c(2 * xsp, 2 * xsp), c(y0, y2))
aa <- fit$covnames[b1[1]]
if(is.na(b1[2])) aa <- paste(aa, "linear")
else aa <- paste(aa, "at", signif(fit$knots[b1[1], b1[2] + 1], 2))
if(i==1)aa <- "constant"
text(xsp, y1, aa, cex = cex)
if(is.na(b1[3]) == FALSE) {
aa <- fit$covnames[b1[3]]
if(is.na(b1[4])) aa <- paste(aa, "linear")
else aa <- paste(aa, "at", signif(fit$knots[b1[3], b1[4] + 1], 2))
text(xsp * 3, y1, aa, cex = cex)
}
lines(c(4 * xsp + 0.03, 0.97), c(1 - (i - 0.1)/nb, 1 - (i - 0.1)/nb))
}
b2 <- range(b11)
for(i in 1:nb) {
b1 <- round(((0.92-4*xsp)*(b11[i,]-b2[1]))/(b2[2]-b2[1])+4*xsp+0.04,2)
aa <- rep(1, length(b1))
for(j in 1:length(b1)) aa[j] <- sum(abs(b1[1:j] - b1[j]) < 0.01)
bb <- max(aa)
if(bb > 1) bb <- 0.7/(nb * bb)
for(j in which) text(b1[j], 1 - (i - 0.2)/nb + bb * (aa[j] - 1),
as.character(fit$classnames[j]))
}
invisible()
}
testhare <- c(4.974595958,0,1,2.456985,8,38,5.229125,1,3.422498434,0,0,2.177377,7,49,5.277500,0
,4.290693972,1,0,4.381446,20,54,5.485566,0,11.301950208,0,0,3.526174,10,65,4.621450,0
,10.683645663,0,1,1.150400,5,34,4.766442,1,3.741203855,1,0,5.087841,13,52,6.405083,0
,7.141522554,0,1,1.056958,8,46,4.682535,1,2.563535609,1,1,1.278860,6,25,4.556451,1
,3.701746380,0,1,3.999343,10,54,6.084539,0,6.395697579,0,1,1.336799,7,59,4.631800,0
,0.275924575,1,0,4.426891,10,45,5.141796,1,7.993160854,0,0,1.512389,8,53,4.976703,0
,10.650698724,0,1,2.227674,3,56,4.770898,0,1.015110143,1,0,5.693455,13,71,4.648958,0
,3.805403838,1,0,2.315779,5,45,4.921255,1,8.068892808,1,1,3.921555,14,29,4.820110,1
,0.944656017,1,0,6.564750,10,46,4.194352,1,1.320377850,1,1,2.505369,30,53,5.652503,0
,4.858707158,0,1,3.818449,8,24,5.283514,1,12.207398556,0,1,3.033311,3,50,4.506939,0
,10.981959783,0,0,2.896733,17,62,4.637291,0,3.407096607,0,1,1.175291,13,58,5.060192,0
,7.508765234,0,0,3.347511,9,44,5.096031,1,5.665519855,0,1,1.961776,3,30,5.067910,1
,11.655373133,0,0,3.555977,3,44,4.810457,0,1.961668982,1,1,7.169299,5,28,5.025885,1
,9.198057574,1,1,2.027242,7,40,5.077524,1,8.781112429,0,0,3.017898,19,61,4.660392,0
,1.093624486,1,1,6.143122,8,39,4.956558,1,2.924913855,1,1,4.724365,13,67,4.627196,0
,10.315301712,0,0,3.139070,15,70,6.654374,0,3.512635454,0,1,1.635826,3,50,6.034860,0
,1.883750176,1,1,2.651898,7,51,5.034317,1,4.690818787,1,1,5.931697,13,29,5.220239,0
,4.525531470,0,1,1.882305,4,48,6.011017,0,1.643812226,1,1,4.041397,17,39,5.052686,1
,4.777447362,0,1,1.906920,5,34,5.695211,1,2.127848760,0,0,2.436000,7,42,4.745345,1
,8.193705169,0,0,5.602968,15,39,5.161291,1,10.871217535,0,1,3.307412,3,50,4.506939,0
,3.646383136,0,1,3.742517,4,42,5.357143,1,1.809633167,1,1,3.949245,5,28,5.015292,1
,9.403031955,0,1,1.650917,9,66,4.374088,0,2.598629459,0,0,5.005954,20,54,5.270361,0
,0.716941135,0,0,2.367035,15,29,5.151093,1,1.149237689,1,0,3.917028,3,66,5.229125,0
,6.634921268,1,0,3.619424,2,39,4.753973,1,6.677475551,0,0,2.680901,3,59,4.619330,0
,1.853423959,1,0,7.280685,13,39,5.374839,1,1.977927943,1,1,1.479475,5,40,5.329045,1
,3.071427712,1,1,6.376802,17,61,4.761905,0,5.729473555,0,1,2.209530,4,70,5.251685,0
,12.999040825,0,1,3.899504,7,45,4.800717,1,4.059714382,0,1,3.377176,5,32,4.535342,1
,0.744850674,1,0,4.984910,20,55,6.196016,0,0.855878893,1,0,7.315716,7,47,5.851493,1
,9.801334057,0,0,1.829557,5,54,5.921052,1,8.941585984,0,0,3.592722,4,33,4.594660,1
,2.148160196,1,0,2.452756,7,41,4.230605,1,3.481338340,1,1,4.809930,7,34,5.247021,1
,6.696794779,0,1,5.234634,17,46,4.693797,0,3.835564836,1,0,4.091902,13,32,5.455447,1
,1.005947051,1,0,8.155644,17,56,5.352583,0,3.254906373,0,0,2.894001,13,38,4.860499,1
,3.673054257,0,1,3.269990,8,56,6.070261,0,6.852679664,0,0,2.728159,3,27,4.902511,1
,3.766346315,0,0,1.107997,6,52,4.599488,0,0.722085805,1,1,2.561833,13,31,4.705882,1
,5.808865352,0,1,4.302919,12,48,4.778376,0,0.758610999,1,1,4.873266,13,68,4.781478,0
,8.604434532,0,0,3.738226,10,50,4.980119,0,0.649993805,1,0,1.755723,5,62,5.077524,0
,8.846562896,0,0,2.704749,7,33,5.344762,1,8.872918703,0,1,1.049267,13,57,4.848485,0
,4.903475126,0,0,4.319373,3,52,5.294117,0,11.336698376,0,0,2.686271,4,60,4.475359,0
,3.752937025,0,1,6.675384,8,52,5.418258,1,1.177802659,1,1,1.113398,12,35,5.425139,1
,8.781171469,0,1,1.224230,8,43,5.010377,1,9.027689550,0,1,2.545398,6,23,5.728220,1
,3.495951230,0,1,4.830449,13,23,4.115462,1,3.514034237,0,1,4.004309,17,54,4.984073,0
,9.832671190,0,0,2.545287,5,22,3.697551,1,4.314590803,1,0,3.193582,10,49,4.848485,1
,1.502567605,1,0,4.507756,6,41,5.131558,1,5.588331033,0,1,4.173111,8,38,4.328138,1
,4.473941263,1,1,3.461476,17,51,5.038911,0,3.918024806,1,0,2.239662,3,43,4.666667,1
,9.091762674,0,1,1.049056,5,48,4.836185,0,1.080213129,1,1,4.159584,30,43,5.518136,1
,7.434034034,0,1,3.408190,5,43,4.984073,1,4.965729778,0,1,3.387252,8,32,5.659616,1
,4.086529910,0,1,2.015208,5,52,4.860499,0,2.178294984,1,1,4.064211,5,39,5.221878,1
,3.600221681,0,1,1.750524,10,50,4.355976,1,0.633003287,1,0,4.965685,17,22,4.923234,1
,6.274058768,0,1,1.757575,7,28,4.841229,1,0.808337851,1,1,1.089425,7,39,5.014839,1
,8.438751752,0,1,1.066097,3,53,5.454607,0,8.904313667,1,1,3.176962,13,43,5.180603,1
,5.557537136,1,1,2.930177,10,29,5.474375,1,5.199344839,0,1,1.768274,10,29,5.959141,1
,3.468550196,0,1,3.505014,5,32,4.535342,1,6.120901706,0,1,1.958902,8,46,4.682535,1
,2.638955051,1,0,5.593618,10,46,5.555451,1,5.793515954,0,1,3.807752,7,49,4.944132,1
,0.002876836,0,1,2.647479,7,22,4.960784,1,5.017167941,0,1,3.003154,7,37,4.974027,1
,3.084513249,0,1,1.570307,13,45,4.847189,1,10.665055115,0,1,3.320491,8,41,6.014000,1
,10.370884446,0,0,1.159649,5,50,5.252364,0,3.721920684,0,1,3.817728,4,42,5.357143,1
,3.608864926,1,0,2.325495,10,44,4.724556,1,4.270983923,1,1,2.324736,7,53,4.250432,0
,3.312833467,1,0,3.841955,8,58,4.827945,0,4.252580879,1,1,1.212867,8,32,5.516086,1
,1.926961555,1,1,4.700265,12,68,5.286123,0,2.337763801,1,0,3.896992,3,55,5.374839,0
,3.856019937,0,0,2.141238,8,52,4.819277,0,7.267598202,0,1,2.835711,6,33,4.508021,1
,2.815494852,1,0,3.391577,5,34,5.421687,1,6.646758005,0,1,7.461381,7,44,5.115846,1
,5.757489061,0,1,1.978295,27,67,4.908459,0,7.072637549,0,0,1.542867,7,55,4.781461,1
,0.242647827,0,1,4.614868,8,24,5.329045,1,5.721986249,0,0,3.803286,27,53,4.607373,0
,1.869202546,1,1,1.941625,9,44,5.059026,1,7.093576279,0,0,1.198225,5,43,4.660392,1
,4.924369386,1,0,7.218409,10,56,4.414404,0,5.541972634,0,1,2.106218,15,48,5.223193,1
,4.777928196,1,1,3.838427,10,34,4.902511,1,2.288150712,1,1,4.782008,11,52,5.386785,0
,3.875663734,0,1,3.145217,5,40,5.038871,1,6.435290254,0,0,1.437421,11,43,4.915615,1
,3.651424411,0,0,4.634806,3,57,5.128117,0,3.437407842,1,1,3.025336,10,53,5.583828,1
,5.942381638,0,1,1.767708,7,45,5.386785,1,1.844332249,1,1,1.122816,7,48,5.038911,1
,6.159103114,0,0,1.327351,13,64,5.153882,0,0.864839180,0,0,3.753201,8,73,4.666667,0
,4.012976033,0,0,3.832005,10,43,5.257000,1,1.308869072,1,0,2.659949,16,56,4.921529,0
,4.437839179,0,1,4.888316,10,45,4.615931,1,5.572560916,0,1,3.144377,3,44,5.951397,1
,2.299757972,1,1,4.471766,13,36,4.850811,1,1.677105018,1,1,4.428328,8,29,5.453168,1
,6.498067211,0,0,2.614110,5,53,4.861484,0,4.267064982,1,0,3.097657,3,55,5.052686,0
,1.625555118,1,1,1.600333,7,30,4.548680,1,8.675170724,0,0,2.526882,8,35,5.315730,1
,2.231627990,1,1,3.222574,15,47,5.000000,1,4.314665431,0,1,4.291517,10,59,4.652018,0
,4.281169110,0,1,4.733467,10,51,4.913402,0,3.290729834,0,1,2.427684,9,60,4.493949,0
,2.659039388,1,1,2.069452,9,47,4.276304,1,4.318971205,0,0,4.470250,8,25,4.694526,1
,5.860576271,0,0,2.539947,17,52,4.668973,0,2.315594708,1,1,4.020647,23,29,5.115846,1
,2.804584226,1,1,5.534834,7,36,5.553775,1,7.975260185,0,0,3.792803,5,62,4.660265,0
,1.106782961,1,1,9.358279,28,37,5.333333,1,2.501710933,1,0,2.566242,10,58,5.552011,0
,7.529826116,1,1,1.469639,17,51,4.672253,0,7.114662981,0,0,3.256092,10,60,5.295317,0
,4.663097138,1,1,3.591701,7,48,5.770498,0,2.589886223,1,1,3.334416,10,76,4.668973,0
,4.424446586,0,0,1.197006,8,49,4.948717,1,0.602173486,1,0,5.747700,9,37,5.517594,1
,2.288680116,0,1,6.681697,15,25,5.313040,1,4.138863511,0,0,1.686351,10,28,6.073310,1
,0.143355979,0,1,4.700276,17,67,4.686909,0,1.620006848,1,1,3.728193,13,66,4.550068,0
,4.152298128,1,1,5.478927,7,23,4.908459,1,6.251817060,0,1,4.321571,13,61,4.886249,0
,9.441347218,0,1,1.159361,27,53,4.478945,0,4.664831930,0,1,1.100395,4,33,4.594265,1
,7.672083922,0,1,3.318092,15,65,5.225269,0,2.932772797,1,0,4.330532,5,45,4.753750,1
,3.730763147,1,0,1.196599,9,75,4.292613,0,8.669274496,0,1,4.078849,10,54,4.733728,0
,3.210499902,1,0,4.826839,10,57,4.997703,0,5.740347834,0,1,3.048919,11,58,4.652018,0
,1.818485626,1,1,4.413885,22,61,6.091449,0,6.508071678,1,1,3.734937,10,55,4.892449,0
,8.548549536,0,1,1.650514,5,30,4.886216,1,4.437758745,0,0,6.218407,10,63,4.491464,0
,2.424876189,1,0,3.361363,3,55,5.374839,0,1.102067806,1,1,4.190865,7,46,6.214974,1
,4.385735762,1,1,4.993822,3,30,4.607373,1,2.014738577,1,1,6.646280,15,72,4.615620,0
,6.180637882,0,1,3.544051,10,62,5.223193,0,4.254613903,0,1,2.863435,7,50,4.503865,0
,8.403602001,0,1,3.592442,14,36,5.345836,1,4.647634726,0,1,2.318024,4,31,4.966996,1
,5.131910086,0,1,2.351699,11,40,5.277500,1,1.344906684,1,0,1.606278,8,46,5.404638,1
,1.532573575,1,0,6.670015,19,68,5.116169,0,1.232745480,0,1,5.068406,13,51,4.965363,0
,1.037380417,1,0,6.248003,8,49,4.891389,0,8.589759928,0,1,1.391212,7,62,4.145781,0
,1.790415350,1,1,2.591412,8,46,4.952207,1,8.620250368,0,0,2.692689,10,39,5.423261,1
,0.767846852,1,0,8.315848,10,43,5.219121,1,9.112477010,0,0,2.741637,8,35,5.315730,1
,0.931669122,1,1,2.236075,13,42,4.789794,1,12.337309722,0,0,1.643076,13,44,4.923659,1
,5.651648457,0,1,2.655148,7,67,5.625326,0,9.887523533,0,1,3.089551,17,51,5.336655,0
,0.527471159,1,0,5.521132,9,37,5.517594,1,2.635730106,1,1,2.267928,10,64,4.408289,0
,0.684619288,1,1,1.799920,12,30,4.272742,1,3.696458325,0,0,1.079333,6,30,5.070603,1
,1.492219376,0,1,2.218436,10,31,5.324759,1,6.978924470,0,1,1.167364,13,53,5.045599,0
,4.287556533,0,1,3.195852,20,61,4.694526,0,9.563446520,0,1,1.345802,7,53,4.789546,0
,6.543785368,1,1,3.793917,12,36,5.006571,1,2.673429072,0,0,6.431115,3,31,4.652324,1
,3.497507432,1,1,3.026543,14,37,6.250000,1,7.274422542,0,1,1.216880,5,63,4.731417,0
,11.423145428,0,1,3.599164,10,55,4.921529,0,9.941126805,0,1,3.158260,3,48,4.649801,0
,5.630463122,0,1,4.641624,10,54,4.535342,0,4.441394618,0,1,1.021535,5,53,4.521807,0
,6.712453194,0,1,1.310118,13,44,5.340002,1,0.220304507,0,0,1.260606,10,62,5.526557,0
,6.194856035,1,1,3.475943,7,39,4.075414,1,6.909127758,0,1,7.057882,7,65,4.798963,0
,6.178783402,0,1,4.908034,8,33,5.257357,0,3.377913012,0,1,6.624742,13,53,4.633481,0
,7.414119272,0,0,5.547483,13,33,4.953681,1,6.751937838,0,1,2.774286,8,39,5.521156,1
,5.701612672,0,0,7.545380,11,34,5.052686,1,2.066030486,1,1,3.576639,8,55,5.370431,0
,9.254740104,0,1,1.728768,5,22,4.881406,1,9.515433653,0,1,3.845054,17,51,5.336655,0
,4.151351126,1,0,4.953528,7,49,5.263158,1,4.188501767,1,1,4.221468,15,45,4.741448,0
,3.896630265,1,1,4.916828,8,44,5.235233,1,1.822954456,1,0,1.203984,12,49,4.864693,1
,3.568449743,0,0,5.435036,15,41,5.120809,1,1.912548820,1,1,7.668028,13,41,4.704970,1
,11.219042551,0,0,2.074424,2,61,4.635125,0,11.056621394,0,0,2.503133,17,65,6.250000,0
,7.418174278,0,1,3.182933,5,46,6.531973,1,3.800188802,0,1,6.740669,4,53,5.155131,0
,5.602112811,0,0,1.622664,13,58,4.535342,0,6.257003117,0,1,3.171081,13,42,4.430379,1
,0.518431071,1,1,5.500709,23,24,5.401257,1,4.727847199,0,1,4.545202,10,59,4.652018,0
,0.724555281,1,1,4.653086,10,49,4.991069,1,1.903682072,1,1,3.880554,10,30,4.759858,1
,2.109078189,1,1,4.012484,12,68,5.286123,0,4.259027131,1,1,3.932303,7,48,5.770498,0
,2.744858570,1,1,4.085707,10,33,4.850712,1,2.220550942,1,1,4.523721,11,52,5.386785,0
,7.380341967,1,0,1.163121,13,53,5.154913,0,1.774944681,1,1,3.547665,5,28,5.015292,1
,3.751020721,0,1,2.283102,3,33,4.419417,1,6.856077620,0,1,7.096555,7,65,4.798963,0
,1.547230496,1,0,3.234160,4,48,4.991342,0,11.157422549,0,1,5.202307,18,52,4.991342,0
,11.707553546,0,0,3.998700,7,57,4.980119,0,7.696203645,1,0,2.601323,5,54,5.247021,0
,5.096846550,1,0,1.701784,6,46,5.869379,1,0.723263842,1,1,3.526346,7,53,4.808812,0
,5.397526946,0,0,1.536233,4,56,4.631800,0,6.440626935,0,1,2.975475,6,45,4.570437,1
,1.409737724,0,0,7.226188,13,55,4.761905,0,6.641189913,0,1,4.236774,10,64,5.952871,0
,6.508748090,0,1,3.301017,12,38,6.141898,1,4.921195146,0,0,1.177132,9,33,5.142595,1
,0.664008194,1,1,3.267832,12,56,5.796012,0,2.199608074,1,0,3.221255,7,46,5.229125,1
,7.090401998,1,1,1.632593,8,50,5.270361,0,1.412069554,1,1,4.108582,13,50,4.778376,1
,7.200055097,0,0,1.507909,6,58,4.453618,0,4.685260559,0,1,8.766184,9,59,4.642308,0
,4.333250147,1,1,4.984817,13,28,5.241720,1,7.054771580,0,1,2.586440,8,31,5.624463,1
,2.085870276,1,1,1.130246,20,33,4.176713,1,2.760954702,1,1,2.942864,7,21,5.052686,0
,8.557328138,0,1,5.510820,10,52,4.466325,0,4.062899508,1,1,1.980304,23,51,5.439283,1
,4.257764630,0,1,3.032253,2,29,5.006571,1,6.023658070,0,1,3.500897,5,44,6.110101,1
,1.665516820,1,0,5.236119,10,49,5.370431,1,4.713091096,1,1,3.199575,10,49,4.784644,0
,1.136867995,1,0,6.025526,17,70,5.580490,0,1.489985563,1,1,3.291423,33,56,5.514099,0
,3.156369948,1,0,2.237694,10,44,4.724556,1,4.897130273,0,1,3.183937,5,71,4.439968,0
,2.909972143,1,1,3.821681,10,76,4.668973,0,0.473195174,1,1,4.441651,20,39,5.015292,1
,1.846312487,1,1,3.836800,8,32,5.168114,1,5.763980772,0,1,5.612299,17,33,5.014839,1
,5.833846860,1,0,1.199922,13,54,4.870861,0,1.191873836,1,1,3.212073,8,44,5.355851,1
,3.438199779,1,1,2.004780,12,67,6.013071,0,4.176913133,1,0,3.683610,17,51,5.038911,0
,3.245481586,1,0,4.344132,13,39,5.504342,1,6.260273716,0,1,2.581937,5,55,4.631800,0
,9.268476483,0,1,3.358700,7,51,4.887685,0,10.335815530,0,1,7.678052,14,52,4.701095,0
,2.965332277,1,1,5.646492,11,69,5.112992,0,2.341294041,1,1,5.263336,10,38,5.376453,1
,6.822608427,0,1,2.639990,7,28,4.808812,1,2.250602160,1,1,2.721885,5,69,5.138322,0
,6.300978500,0,0,2.470716,3,59,4.619330,0,2.465998212,0,0,6.831051,3,31,4.652324,1
,1.066376267,1,1,4.962165,17,49,4.718646,0,4.101858701,0,1,1.495405,7,57,6.959705,0
,1.693029265,0,1,3.683914,5,29,5.407597,1,1.007218322,1,0,6.338842,12,45,5.142595,1
,8.734197112,0,0,2.838642,17,68,5.090253,0,4.708322109,0,0,5.810662,12,50,5.094267,0
,5.008139408,1,1,1.529218,25,66,5.656162,0,7.197577796,0,0,3.648851,8,34,5.040121,1
,8.259847282,0,1,3.615229,4,33,5.120764,1,5.956809496,0,1,2.401528,18,57,4.960819,0
,0.135921434,1,1,4.131625,17,41,5.661268,1,4.811381111,0,1,4.171850,12,32,5.661270,1
,11.183490087,0,0,2.992922,12,63,5.554637,0,7.198701062,0,0,5.353519,17,46,4.693797,0
,6.782307836,1,1,3.679246,2,39,4.753973,1,5.698595772,0,0,7.207650,11,34,5.052686,1
,3.805061920,0,1,4.685538,16,54,4.572111,0,8.245176787,0,1,2.535451,8,53,5.549887,0
,2.845214672,0,1,3.296065,3,64,5.185781,0,9.193522879,0,0,3.012419,18,56,4.577911,0
,6.649138882,0,1,3.782074,5,31,5.303301,1,6.293812206,1,0,2.499770,6,43,4.508264,1
,1.355687434,1,0,4.915542,12,58,5.221878,0,2.837860446,1,0,1.412286,8,37,5.720019,1
,9.479260786,0,0,2.993065,5,62,4.567398,0,7.091882950,0,1,2.566071,17,56,4.860499,1
,2.213798429,1,1,3.622597,3,62,5.023578,0,4.572762277,0,0,2.816184,12,40,5.115846,1
,7.170860199,0,0,3.612320,3,55,5.796012,0,4.790314174,0,1,2.316505,7,49,4.516129,0
,1.607172815,1,0,4.952683,12,36,5.366974,1,0.896199928,1,0,5.897702,17,30,5.043083,1
,3.449437714,1,1,3.232331,28,65,4.362469,0,9.753663756,0,1,7.526090,7,53,5.416025,1
,6.601340091,0,1,1.681674,17,39,5.500175,1,6.854971010,0,0,3.997224,13,40,5.169417,1
,1.568935028,1,1,7.717937,9,33,4.575657,1,2.867387874,1,0,4.569772,13,39,5.504342,1
,2.090968802,1,0,3.980132,10,69,6.748466,0,9.157326991,0,1,3.296270,7,58,4.736275,0
,9.830822584,0,1,1.160683,5,22,4.881406,1,4.054926218,0,1,3.163021,20,61,4.694526,0
,1.440696348,1,0,3.394164,13,60,4.533199,0,8.996651002,0,1,3.415186,7,42,4.362469,1
,1.297020996,1,0,3.878364,8,54,5.120809,0,1.378073953,1,1,3.211672,13,75,5.229125,0
,2.098558667,1,1,5.720832,6,66,5.111615,0,3.759938275,0,1,4.306056,11,50,6.312191,1
,4.893984218,0,0,3.603907,7,61,5.063291,0,7.869546174,0,0,1.644335,3,42,4.322629,1
,2.344838072,1,1,4.683118,12,53,5.078968,1,4.960971610,0,1,1.089539,5,41,4.694526,1
,9.330519779,0,0,1.536900,7,53,4.536092,0,1.105600935,1,0,4.549566,7,37,5.220239,0
,0.560492445,0,0,3.006726,18,50,4.766442,1,3.049369954,1,1,1.514977,4,48,5.318160,1
,4.341731608,0,1,4.888017,7,40,5.488113,1,5.456516720,0,0,5.344640,7,50,4.529359,0
,5.750005841,0,1,1.312609,3,30,5.067910,1,1.564712708,1,0,5.955641,20,57,5.544314,0
,5.426550372,0,1,2.767213,8,49,4.603557,1,2.779461860,1,1,6.596323,12,53,5.261336,1
,7.205733380,0,0,5.545706,5,38,5.697535,1,5.048819190,1,0,1.322403,13,23,4.203487,1
,0.545143450,1,0,4.658551,20,53,4.655240,0,5.995114114,0,1,2.703438,6,45,4.570437,1
,1.765002397,0,1,6.481649,8,33,4.577839,1,3.507343644,1,1,1.281901,8,61,5.733408,0
,0.532532162,1,0,1.495249,15,55,5.248639,0,5.797894983,0,0,5.659140,13,50,4.810457,0
,6.604511283,1,1,3.800932,8,29,5.164568,1,7.346587367,0,1,4.530767,20,55,5.481173,0
,5.069804537,0,1,4.181787,12,44,5.488113,1,0.339388948,0,0,9.448597,4,42,4.698308,1
,8.358854636,0,1,2.142333,10,62,5.352583,0,6.467726356,0,1,1.634026,10,53,5.015566,0
,6.643724944,1,1,8.089670,17,41,4.733485,1,6.919238833,0,1,2.022032,7,32,4.549815,1
,1.939941117,1,1,2.050028,10,52,4.655240,0,2.982583506,1,0,5.139349,12,28,4.741448,1
,1.810415111,0,1,3.409266,5,29,5.407597,1,8.570213281,0,1,4.862987,23,36,4.930935,1
,1.855761762,1,1,3.593343,7,38,5.733508,1,0.301597154,1,0,5.599797,8,45,5.832464,1
,0.231423197,0,0,1.062920,10,62,5.526557,0,0.345455383,1,0,7.131616,3,45,5.174546,1
,4.800155273,1,1,3.416740,11,55,4.980119,0,8.094522422,0,0,3.268683,7,27,6.061189,1
,4.578820375,0,1,2.946149,3,46,5.328577,1,4.411192012,1,0,2.870890,7,43,5.075993,1
,1.998407097,1,0,1.101122,7,48,5.038911,1,1.920281072,1,0,4.796375,15,44,4.624277,1
,6.057648934,0,0,1.113554,8,64,4.374088,0,6.048239898,0,0,5.933510,5,58,4.302066,1
,7.355324373,0,0,1.706783,9,58,4.706487,0,4.214826654,0,1,1.175137,7,57,6.959705,0
,6.013621460,1,1,2.599346,17,38,4.466325,1,2.043685884,1,1,4.903934,10,61,4.631770,0
,5.407607486,0,1,4.750799,12,44,5.488113,1,4.125843671,1,1,4.593218,7,45,4.814913,1
,10.592281430,0,1,1.023707,17,64,4.533199,1,8.030761938,0,0,5.385319,13,72,4.879078,0
,0.430427654,1,1,5.721438,20,40,4.650769,1,9.111283940,1,1,2.968081,7,40,5.077524,1
,7.515296381,1,0,2.305218,4,49,5.474375,0,3.820811482,0,0,5.717769,10,50,4.893999,1
,6.483626840,0,0,1.974287,6,58,4.453618,0,4.508714258,1,1,6.328627,17,50,5.390110,1
,2.375836544,1,1,1.053986,17,31,4.885980,1,1.999463538,1,0,4.978882,13,52,4.839637,0
,3.040389022,0,1,2.748946,13,55,4.166667,0,7.211924252,0,1,1.815023,8,46,4.682535,1
,4.881413052,1,0,2.811838,5,57,4.327874,0,2.005410373,1,0,3.951752,10,53,5.685352,0
,4.609222177,1,1,5.766891,3,33,5.161291,1,6.752495877,0,1,1.183893,4,37,4.784644,1
,0.208722173,0,1,6.427892,8,30,5.055576,1,2.176248739,1,1,4.656861,22,47,5.376453,1
,3.760232022,0,0,5.711707,15,55,4.701095,0,3.925154710,1,1,1.224083,18,42,5.096089,1
,5.312882297,0,1,2.241892,4,38,5.241315,1,10.110561068,0,1,2.274060,10,56,4.706487,0
,3.453600597,1,1,3.411477,5,36,4.696845,1,5.103860480,0,1,1.733784,3,37,5.504342,1
,1.394725925,1,1,4.341496,10,27,5.015566,1,4.579546124,1,1,5.020559,18,49,5.370431,0
,1.792864341,1,0,3.038276,10,36,4.080358,1,9.071814150,0,1,2.779851,3,33,5.185781,1
,6.091612196,0,1,3.155432,12,40,4.558028,1,8.145444102,0,0,3.384657,12,42,4.960784,1
,4.180701950,0,1,2.975262,3,33,4.419417,1,11.523924806,0,1,1.243254,13,58,4.672253,0
,12.248223385,0,1,3.902886,8,45,5.164568,1,4.068336384,0,0,3.626877,6,53,5.326697,0
,3.242764985,1,0,1.518876,9,75,4.292613,0,6.572059410,0,1,1.648025,12,56,4.938272,0
,1.561962705,1,0,3.583774,4,48,4.991342,0,3.597322147,1,1,3.786477,10,33,5.299465,1
,8.625950781,1,1,1.283505,13,58,5.006571,0,1.107881892,1,1,5.897928,8,39,4.272742,1
,6.834428988,1,1,3.316387,8,29,5.164568,1,1.457088764,1,0,3.600972,15,62,4.793944,0
,9.031411884,0,1,1.342553,8,34,5.832464,1,6.058416348,0,0,3.868683,3,56,4.109609,0
,6.172218574,1,1,3.529930,13,45,4.766596,1,7.229191107,0,0,1.194385,9,58,4.706487,0
,1.655939195,1,1,2.396522,10,46,4.535342,0,4.750297681,0,0,2.868717,4,70,5.251685,0
,6.528564183,1,1,3.688580,10,44,4.800717,1,1.669746362,0,0,4.164697,17,64,5.237828,0
,3.979072929,0,0,3.743189,3,66,4.723693,0,1.083100761,1,1,5.689226,12,44,5.904718,1
,7.408603030,0,1,2.350274,8,34,5.941006,1,1.797228265,1,0,6.354184,7,46,4.827945,1
,8.524258140,0,1,1.573125,8,66,4.637013,0,2.638208892,1,1,2.877399,9,35,4.493895,1
,0.800734659,1,0,6.762334,13,65,4.921255,0,2.972883404,1,1,4.039496,5,65,4.593059,0
,3.090467846,1,1,3.326468,4,50,5.318160,0,10.907723850,0,0,7.988268,7,53,5.416025,1
,7.952081973,0,1,5.389587,13,33,4.953681,1,9.147780870,1,1,3.650056,13,43,5.180603,1
,8.456363376,0,1,3.100534,7,50,5.000000,0,1.751124609,1,1,5.671827,13,23,4.705882,1
,0.778723502,1,1,4.347169,8,51,5.153882,0,0.432523381,1,0,4.831612,3,60,5.561514,0
,2.802435199,1,1,5.764255,16,31,5.391265,1,9.200843995,0,1,1.090543,7,46,4.778846,0
,0.989988651,0,1,3.249323,10,35,4.683626,1,8.125830337,0,1,1.916480,7,52,5.201327,0
,3.900602465,0,1,2.245333,8,60,5.207717,0,6.148142134,1,1,4.151725,10,49,4.766442,1
,3.924550284,0,1,1.006315,6,62,5.669801,0,3.992218117,0,1,3.059786,10,45,5.024872,1
,9.364093883,0,0,3.155882,10,50,5.132883,0,2.041921540,1,0,6.337418,7,46,4.827945,1
,1.058448307,1,0,1.079272,12,35,5.425139,1,2.561950667,1,0,3.701132,16,60,5.242941,0
,2.891827123,1,0,4.975455,10,34,5.554567,1,1.065517223,1,0,4.755102,28,68,6.280743,0
,2.235826966,1,1,2.710385,10,30,4.561979,1,4.603598643,0,0,1.202609,5,70,4.921255,0
,3.359902494,0,0,1.549379,8,60,5.094267,0,6.284424408,0,0,4.048934,7,28,4.563989,1
,6.993184066,0,1,4.821479,13,25,5.363205,1,7.456607707,0,1,4.032171,10,54,4.733728,0
,0.323357455,1,0,5.590777,16,52,4.648111,0,10.908473437,0,0,4.361976,17,53,5.115846,0
,4.245837517,0,1,1.965004,5,32,4.750900,1,6.287846860,0,1,3.153801,10,36,5.624385,1
,0.919237043,1,0,1.130161,10,68,4.548680,0,6.168837323,0,1,1.878300,6,39,5.784654,1
,8.086577793,0,0,5.794484,12,44,5.929093,1,6.249093265,0,0,1.603284,13,37,5.359112,1
,8.853233659,0,1,4.791674,12,57,5.000000,0,0.808009138,1,0,1.653349,9,48,6.154446,1
,2.050633455,1,1,7.609329,13,41,4.704970,1,11.241442774,0,0,3.096521,15,35,5.094267,1
,7.042880279,0,1,4.289976,8,31,5.164568,1,8.608023099,0,1,4.688038,10,54,4.733728,0
,6.093956216,0,1,3.094646,5,50,4.960819,1,2.201470875,1,1,5.949804,15,39,5.687042,1
,3.846729506,0,1,3.157237,5,32,4.535342,1,10.008533299,0,1,1.044579,5,59,4.562997,0
,1.394567123,1,0,1.831151,7,55,4.624277,0,5.958579279,0,1,9.670487,10,60,5.182124,0
,7.237163870,0,0,3.682433,10,50,4.798963,1,7.344749431,0,1,3.719710,13,28,5.386379,1
,4.124655849,0,1,1.828047,14,35,5.370431,1,6.491587475,0,1,4.974500,13,61,4.886249,0
,3.461048257,1,0,3.288262,3,56,4.680553,0,6.569245920,0,1,1.003871,12,56,4.938272,0
,5.387570287,0,1,3.880464,3,62,4.364066,0,1.864536497,1,0,5.825757,13,66,4.800717,0
,5.276904936,1,1,2.449857,28,38,5.580232,1,0.180977227,0,0,6.761964,8,30,5.055576,1
,0.002821072,0,1,3.259859,6,37,4.976703,1,6.273446445,0,1,2.089837,7,28,4.808812,1
,5.248314074,1,0,4.016090,15,54,5.326697,0,4.074713586,0,1,1.614378,3,37,4.766442,1
,6.782409465,0,1,1.454855,6,33,5.474375,1,3.537634320,0,1,2.668229,7,37,6.389871,0
,4.227414512,0,0,1.983044,8,41,5.266344,1,0.306393552,1,0,4.095328,15,64,5.033223,0
,5.763045197,0,1,1.500819,6,47,6.994941,1,1.378936682,1,0,3.451202,13,51,4.771733,0
,3.336393456,0,1,1.219664,3,50,6.034860,0,2.636784901,1,1,1.505303,7,39,5.219121,1
,6.289302730,0,1,1.367362,8,50,5.270361,0,1.525525784,1,1,7.044859,13,59,4.904786,0
,1.034546670,1,1,5.103517,13,41,5.732484,1,3.084538560,1,1,3.894454,7,42,5.295317,1
,1.680179850,1,0,4.599548,10,56,4.614682,0,7.051316381,0,1,3.207134,13,38,5.178184,1
,5.178364865,0,0,2.461112,10,49,4.741448,1,10.209324904,0,1,1.720407,13,58,4.672253,0
,3.462754478,0,0,4.651890,13,30,5.474375,1,1.943983868,1,1,1.979412,9,44,5.059026,1
,0.333382673,0,1,1.829802,7,39,6.185896,1,7.365314347,0,1,3.992740,3,35,4.835737,1
,6.456711297,0,0,1.955456,10,48,5.391265,1,5.352734390,0,1,1.381658,12,60,4.603557,0
,3.117490032,1,1,4.862531,8,39,4.827945,1,10.464199781,0,1,2.654715,3,56,4.770898,0
,6.648622390,0,0,1.418481,6,58,4.453618,0,0.504983812,1,1,2.871429,10,33,4.681194,1
,1.132197103,1,1,5.547047,20,68,4.619330,0,8.204686747,0,0,2.118565,7,33,5.344762,1
,0.548629641,1,0,8.382483,12,47,6.641995,1,6.545835318,0,1,2.851951,16,61,4.830680,0
,1.614850887,1,0,5.732372,20,41,4.907975,1,4.326508009,1,1,1.128477,10,51,5.877699,1
,9.631693548,0,1,1.589332,7,53,4.987757,0,3.743417556,0,0,4.826593,10,41,5.021689,1
,0.711919688,1,0,5.311243,8,34,6.344507,1,1.406760777,1,0,7.458299,8,67,4.723693,0
,5.347828937,0,1,1.984332,12,60,4.603557,0,4.511347593,0,1,4.925597,3,45,5.590170,1
,5.807706633,0,1,3.428959,8,34,5.422386,1,4.077919875,0,1,6.562409,30,55,4.556451,0
,6.051981912,0,1,1.919870,8,58,5.257000,0,5.732195844,0,1,4.652636,8,65,4.467590,0
,6.597855218,0,1,1.732938,13,44,5.340002,1,10.118567072,0,1,3.887820,4,45,6.558120,1
,6.110741125,0,0,2.134272,5,53,4.861484,0,0.231828608,0,0,4.035851,17,60,5.882353,0
,0.777022351,1,0,3.151760,7,30,5.295317,1,2.341648323,0,0,2.016336,4,40,5.103104,1
,6.279425508,0,0,1.674932,17,39,5.555556,1,10.721694673,0,1,2.376518,6,53,4.899540,0
,9.511700864,0,1,4.601852,15,51,5.106757,0,6.266444418,0,0,2.530300,18,57,4.960819,0
,7.547995835,0,1,4.582309,5,32,5.625000,1,1.458256258,1,1,4.566258,25,31,6.240738,1
,1.733922908,1,1,4.160118,7,26,5.763505,1,10.296185095,0,0,6.443537,5,62,5.588507,1
,0.616789585,1,0,6.848180,33,51,4.901409,0,0.190548415,0,1,1.433576,3,36,5.096031,1
,4.597437441,0,1,4.877607,7,37,5.624713,0,0.487907532,0,0,2.016946,8,37,5.237828,1
,4.573749060,1,1,4.648474,13,28,5.241720,0,6.351624030,1,1,1.223701,7,48,5.416645,0
,5.444808172,0,1,5.103662,18,37,4.252083,1,1.510551130,1,1,1.072253,2,47,4.330127,1
,1.584217669,1,1,7.082270,13,59,4.904786,0,7.480105808,0,1,4.115575,17,45,4.933737,1
,0.264522374,0,1,3.556272,17,31,6.419274,1,2.422858835,1,1,3.409903,5,32,4.760953,1
,5.450188944,1,0,1.790452,6,46,5.869379,1,6.809018450,0,1,2.416089,6,43,4.508264,1
,2.337273737,1,1,4.383944,8,48,4.841229,1,2.079221042,1,1,5.173593,15,68,5.039189,0
,8.267420054,0,0,1.442905,3,42,4.322629,0,6.906518314,0,1,4.727592,8,31,5.164568,1
,6.078153858,1,1,4.333743,6,51,5.767761,0,7.729096555,0,1,3.453160,13,54,5.266344,0
,6.317741962,0,0,1.894594,17,36,4.713139,1,3.696723353,0,0,2.437806,5,59,5.142595,0
,9.527512819,0,1,3.724191,8,55,5.015292,0,5.980060589,0,1,3.536102,5,50,4.960819,1
,5.877707151,0,0,2.010150,10,49,4.741448,1,7.932894615,0,1,3.430362,4,33,5.120764,1
,2.042875027,1,1,4.943407,12,68,5.286123,0,6.313682701,0,1,3.987807,27,38,5.095541,1
,3.943776047,0,0,4.250438,11,48,4.869480,0,4.190370232,0,1,3.602082,10,42,6.369427,0
,0.713752810,1,0,4.484808,5,59,5.642155,0,2.061156533,1,0,6.993872,10,58,4.997703,0
,7.265972062,0,1,2.504398,8,34,5.941006,1,5.897122381,0,1,5.522065,14,54,4.563989,0
,1.117037792,1,1,2.476285,15,37,5.296764,1,2.519232332,1,1,3.615621,13,40,4.956558,1
,6.722139594,1,1,2.970835,5,46,5.735394,1,2.939263624,1,0,4.783918,9,44,5.553775,1
,1.263742230,1,0,5.732667,2,41,5.327739,1,3.554809385,0,0,2.372134,8,52,4.819277,0
,2.005697320,1,0,3.737095,10,69,6.748466,0,9.113218144,0,0,1.729485,3,54,4.810457,0
,1.903591075,1,0,3.448379,8,66,4.916011,1,6.852265320,0,1,3.989755,10,50,5.811836,0
,1.148551185,1,0,7.176261,30,50,5.486694,1,11.838745909,0,0,2.502532,4,60,4.475359,0
,9.465308131,0,1,3.676296,8,55,5.015292,0,3.652735761,1,0,3.855869,4,59,5.521156,0
,7.709119867,0,1,5.872373,8,37,4.879415,1,11.227722208,0,1,1.768116,3,35,5.304117,1
,4.514278936,0,1,3.831382,3,43,5.050762,1,8.168211337,0,1,3.540189,14,36,5.345836,1
,2.110967002,1,1,4.250007,10,61,4.631770,0,0.170603433,0,1,2.554719,11,55,5.151093,0
,6.142549669,0,0,4.511429,12,37,4.976703,1,7.829874802,0,0,1.761346,10,27,5.290843,1
,4.041970755,0,0,3.166400,7,56,4.615620,0,0.881428488,1,0,4.963674,15,38,7.096774,1
,3.895394592,0,1,2.966590,7,50,4.503865,0,4.734134560,0,1,1.159845,8,42,4.921529,1
,5.549556897,0,1,2.515525,12,42,6.321264,1,5.957744865,1,1,4.282083,10,29,5.096089,1
,2.612217756,1,1,3.283575,5,32,4.760953,1,2.351318893,1,0,9.433906,7,38,5.474375,1
,3.929335919,0,1,1.969351,3,52,4.938272,0,6.057858733,0,1,3.963494,5,50,4.960819,1
,11.505287025,0,1,2.650312,9,45,4.705882,1,2.425573164,1,1,3.449912,12,40,4.870861,1
,8.537079653,0,0,1.919414,10,66,4.686909,0,10.535590867,0,0,5.666118,7,69,4.506939,0
,2.136745227,1,1,6.834885,15,25,5.313040,1,1.926380538,1,1,3.226138,7,38,5.733508,1
,8.115219096,0,0,1.465175,17,54,4.364066,0,5.472815064,0,1,1.782311,8,37,5.856070,1
,1.130849975,1,0,4.216702,28,68,6.280743,0,1.163170736,1,0,7.256262,30,50,5.486694,1
,3.365204161,0,1,4.654139,33,56,4.904786,0,1.162584948,1,1,3.640609,10,36,5.333006,1
,5.522293804,0,1,3.506413,10,34,4.881406,1,4.280830294,0,1,2.784282,6,56,4.540842,1
,0.536465463,1,1,6.009152,8,23,5.519851,1,5.372209310,0,1,4.339617,3,23,4.655240,1
,8.092996790,0,0,4.227226,10,44,5.476925,1,1.660042695,1,1,4.237032,10,43,4.773922,1
,5.046438225,0,1,1.891330,27,67,4.908459,0,0.558458981,1,1,2.598031,4,32,5.487283,1
,2.431132009,1,0,4.795160,12,42,4.423004,1,3.911608432,0,1,3.265681,7,56,4.615620,0
,1.728596360,1,0,5.031483,27,70,4.604683,0,6.485542586,0,1,2.892669,18,57,4.960819,0
,4.455368793,1,0,1.558971,25,66,5.656162,0,3.356534447,0,0,3.694339,15,69,4.902490,0
,3.764739669,1,1,3.220591,12,32,5.806452,1,8.603190010,1,0,3.052693,21,67,4.610694,0
,11.808447247,0,1,2.969251,17,54,4.851086,0,0.937717720,0,1,3.799739,10,35,4.683626,1
,0.356154239,0,0,1.705729,10,41,4.631800,1,7.749694261,0,0,1.176630,8,60,4.778846,0
,0.377782457,1,1,3.880753,3,33,4.944419,1,1.442296832,1,0,3.641154,13,60,4.533199,0
,2.966253607,1,1,1.941936,7,38,4.672253,1,4.243549545,0,0,4.388806,17,51,4.668973,0
,5.085674026,0,1,4.854721,8,65,4.467590,0,6.941571441,0,0,3.838433,12,45,4.422167,1
,1.894093992,1,1,3.204225,3,45,5.520686,1,1.899673674,0,0,4.416458,3,53,4.835737,0
,3.350516263,1,1,3.591717,4,59,5.521156,0,0.412278028,1,0,5.988977,18,51,5.081007,0
,2.308422808,1,1,4.487907,17,42,4.800717,1,4.680489773,1,0,2.724866,3,64,5.201457,0
,2.165319102,1,0,4.384848,13,52,4.839637,0,11.550136209,0,1,1.629800,7,50,4.860499,1
,7.761382067,0,0,1.970176,8,66,4.686909,0,2.709093688,1,1,3.535508,13,63,5.252364,0
,2.795778440,1,1,4.713039,10,33,4.850712,1,3.655220622,0,1,2.232516,7,50,4.503865,0
,5.131761460,0,0,3.191143,10,41,5.096031,1,1.933563557,1,1,1.074989,8,44,5.247021,1
,2.386244466,1,0,3.105901,12,57,5.652957,0,6.579558296,1,1,4.911388,10,52,4.686909,0
,1.653981343,1,1,3.754309,7,49,4.693797,0,3.616694513,0,0,3.340925,3,52,4.590991,0
,3.348087451,1,1,3.596255,7,31,6.202187,1,3.937152011,0,1,7.048824,11,26,4.871677,1
,4.448652618,0,0,1.899213,5,25,5.007613,1,3.314438255,1,0,3.568579,10,47,4.921255,0
,4.378647088,0,1,3.082401,8,32,5.659616,1,2.894753047,1,1,4.472487,10,27,5.454546,1
,1.143836483,1,0,4.906165,20,59,4.583412,0,2.078924038,1,1,5.038161,8,41,4.759858,1
,7.632391183,1,1,3.766607,8,29,5.164568,1,2.532663288,1,0,2.514690,27,65,5.000000,0
,7.292980488,0,1,3.598738,13,38,5.178184,1,7.161655578,0,1,3.531391,13,38,5.178184,1
,7.995158576,0,1,1.195915,27,61,4.631800,0,5.356725785,0,1,2.807563,3,66,4.983549,0
,5.864581357,0,1,1.653911,7,59,4.631800,0,3.815578056,0,1,3.490062,4,42,5.357143,1
,2.088531185,1,1,4.074264,15,58,5.646925,0,5.654649446,0,1,2.197629,4,70,5.251685,0
,8.609041606,1,0,4.729127,12,50,5.163978,0,2.017008686,1,0,1.169704,10,51,5.929271,0
,1.592183384,1,1,2.791577,5,53,5.521473,0,11.340743492,0,1,3.460046,8,45,5.164568,1
,2.497296371,1,0,3.562934,10,69,4.637013,0,0.924902449,1,0,5.116869,17,40,5.094267,1
,0.877101983,1,0,6.776094,13,65,4.921255,0,2.914522704,1,1,3.430647,7,48,5.266344,1
,7.691193425,0,0,1.529326,6,51,5.516086,0,3.500237395,1,1,1.383206,5,43,5.420771,1
,6.667544048,0,1,3.596455,8,42,5.120432,1,5.717699502,0,1,3.406005,5,44,5.070667,1
,1.343640841,1,1,6.402983,5,46,4.819277,1,5.281411490,0,0,3.603305,9,52,4.923659,0
,8.514367758,0,0,2.699022,5,62,4.567398,0,0.345293636,1,0,7.424966,3,45,5.174546,1
,3.256129004,1,1,4.639832,13,66,3.875617,0,3.943650016,0,0,1.172036,10,28,6.073310,1
,5.750799191,0,1,3.048840,7,38,5.242941,1,0.837870163,1,1,4.224013,13,50,6.273158,0
,1.888124836,1,1,4.047443,8,29,5.453168,1,5.908433161,0,0,1.920029,20,41,4.454354,1
,1.049472063,1,0,5.003171,12,44,5.904718,1,3.586541725,0,0,3.288140,7,34,5.095541,1
,6.635303664,0,0,5.943127,14,54,4.563989,0,5.947873756,1,1,1.189624,6,39,5.784654,1
,7.800592523,0,1,5.629727,14,52,5.514311,0,1.939952102,1,1,3.039541,15,43,6.136303,1
,1.348567277,1,0,3.943967,15,61,5.455447,0,3.894577310,1,1,2.075025,7,48,4.843221,0
,3.193398435,1,1,3.658782,28,65,4.362469,0,4.817679250,0,0,1.025435,9,33,5.142595,0
,2.860194480,1,1,1.165846,23,45,4.733485,1,1.352776048,1,1,1.676759,27,30,5.454546,1
,4.912300494,1,1,3.153114,7,32,5.381357,1,4.890521981,0,1,2.582964,13,57,4.463000,0
,5.764706540,0,1,2.609888,5,30,4.907975,1,2.767113746,1,0,2.928604,7,57,4.798963,0
,4.431233163,0,0,1.345869,10,41,5.454546,1,1.623662089,1,1,3.463678,27,45,4.827945,1
,2.181049038,1,1,3.567826,3,62,5.023578,0,5.721996131,0,1,3.195724,6,42,5.452375,1
,1.627023852,1,1,5.494793,12,51,5.421687,1,2.751551764,1,1,1.687466,5,41,6.545970,1
,7.271223955,0,1,1.774302,10,25,4.980119,1,5.997232191,1,0,4.188447,7,57,5.617264,0
,2.815038151,1,1,4.098625,8,39,4.827945,1,6.538407808,0,1,3.898660,20,39,4.493895,1
,3.586533365,1,1,2.288991,13,31,5.397807,1,1.168339111,1,1,3.037992,4,59,5.014839,0
,1.131764089,1,0,6.213348,5,42,4.535342,1,5.773740162,1,1,5.733903,10,32,4.886216,0
,5.724049911,0,1,3.742550,6,46,5.034317,1,8.209486959,0,1,1.318034,7,43,4.960819,0
,5.905065192,0,0,1.436360,4,39,5.034317,0,1.312351945,0,1,2.809238,7,44,6.481796,1
,5.327245863,0,0,1.840784,20,41,4.454354,1,2.016230894,1,0,2.050312,10,43,5.386785,1
,7.273675921,0,1,3.319160,8,33,5.303301,1,0.679058643,1,1,4.789431,10,49,4.991069,1
,6.208641571,0,1,1.570053,7,28,4.841229,0,10.193101052,0,1,2.174288,10,49,5.081007,0
,7.983991138,0,1,1.496593,7,23,5.228350,1,2.707835686,0,0,1.239440,12,65,5.219121,0
,1.505188818,1,1,7.607318,13,59,4.904786,0,0.849979285,1,0,5.965647,8,50,5.625326,0
,2.102019053,1,1,4.984487,13,39,4.567398,1,3.535572626,0,1,3.852810,13,50,5.059026,0
,2.768660076,0,1,3.661696,18,44,5.257000,1,10.673161184,1,0,3.207696,8,42,5.589223,1
,4.175695912,0,1,1.367948,7,53,4.960819,0,1.419040855,0,0,1.312853,10,48,4.311626,0
,1.093027247,1,1,3.994409,5,53,5.359112,1,1.966217633,1,0,5.150978,9,51,4.593059,0
,2.128991491,1,0,3.787845,10,53,5.685352,0,8.002784875,0,1,1.808558,5,27,4.950651,1
,6.583644943,0,0,1.654456,17,39,5.555556,1,1.113495390,1,1,4.798297,8,49,4.652018,1
,0.550321056,1,1,6.502562,8,23,5.519851,1,0.723488188,1,1,2.155395,13,31,4.705882,1
,0.954077783,1,0,3.535933,3,66,4.525292,0,4.975119414,1,0,1.484172,7,43,4.733485,1
,6.969411931,0,0,1.264260,6,58,4.453618,0,6.712535366,0,0,2.394560,12,75,4.742505,0
,2.433923883,1,0,2.138953,4,59,5.576548,0,10.577546171,0,0,2.936957,17,65,6.250000,0
,4.503652270,0,1,1.208192,23,54,4.712121,1,2.298074342,0,0,1.138595,7,43,5.104738,1
,3.400328945,0,1,1.293391,13,32,4.694526,1,1.984224372,1,0,2.070662,7,42,4.745345,1
,5.792484433,0,0,1.958561,10,46,5.940885,1,5.048172060,0,1,4.938344,10,53,5.009940,0
,3.724314833,0,1,4.003840,10,60,5.359738,0,0.703710942,1,1,1.118981,7,39,5.014839,1
,3.068358640,1,0,4.306813,12,59,6.130060,0,2.793717710,1,1,1.469506,10,41,4.800717,1
,0.924667925,1,0,4.032832,7,37,5.220239,1,6.724213596,0,0,7.648622,33,59,6.595520,0
,5.042581582,1,1,3.873687,17,45,5.423261,1,6.244645554,0,1,1.163991,13,40,5.484352,1
,1.069207876,1,0,5.898909,12,44,5.904718,0,9.685962357,0,1,7.351507,7,53,5.416025,1
,0.602467925,1,1,4.164720,17,61,4.385608,0,6.735501806,0,1,3.681254,12,40,4.558028,1
,2.416085327,1,1,2.998528,13,41,4.991342,1,0.883339159,1,0,4.666377,22,25,4.960784,1
,1.029780683,1,1,7.469264,30,50,5.486694,1,2.678863910,1,1,6.210031,12,53,5.261336,1
,0.218703943,0,0,5.567017,5,52,5.920780,0,1.135487874,1,1,4.155227,20,49,4.923659,0
,9.563382952,0,1,3.367165,6,35,5.514311,1,9.073131862,0,0,2.514044,8,47,5.217020,1
,4.171939803,0,0,3.681111,7,56,4.615620,0,1.519229517,1,1,3.118656,29,67,4.563989,0
,10.778655855,0,1,4.842721,5,45,5.329681,0,4.337965989,0,1,3.301727,10,57,4.985775,0
,7.280470398,0,1,4.031752,17,51,4.841229,0,1.007990146,1,0,5.502734,15,45,4.988877,1
,2.495596502,1,1,3.492195,11,44,5.163978,1,7.608110567,0,1,1.963279,12,65,4.590991,0
,2.118993806,1,1,3.483719,10,68,4.472136,0,3.558037083,1,1,1.582273,5,55,4.519892,0
,6.017214503,0,1,7.847679,17,60,5.052686,0,11.540110101,0,0,1.896460,12,52,5.416025,0
,1.668611958,1,1,3.480821,10,63,4.499433,0,3.584244189,1,1,4.929268,7,34,5.247021,1
,2.127077850,1,1,2.093565,7,64,5.488114,0,9.608429844,0,1,3.034624,8,55,5.015292,0
,3.864569215,0,0,5.000620,10,50,4.893999,1,12.183432204,0,1,1.583632,12,44,4.830909,1
,1.336573082,1,1,3.780255,8,54,5.120809,0,2.681474118,1,1,2.129729,14,36,5.863020,0
,3.537433598,1,1,3.872712,12,32,5.806452,1,0.588255172,1,0,8.332843,12,47,6.641995,1
,0.454187936,0,1,3.252996,3,39,4.758241,1,6.768400653,0,0,1.001791,11,43,4.915615,1
,1.388905490,1,0,3.142672,17,63,4.328138,0,7.651766392,1,1,6.356338,2,33,5.962848,1
,3.187847518,0,1,3.994416,8,27,5.583828,1,3.393697460,0,1,3.739596,3,53,5.115846,0
,9.269941007,0,1,3.593269,8,50,4.632703,0,1.861529935,1,0,4.709698,22,47,5.376453,1
,8.697841282,0,0,4.726717,8,55,4.385608,0,5.412574003,0,1,3.330418,10,57,5.153882,0
,6.995501779,0,0,2.478400,3,59,4.619330,0,4.330310494,0,0,3.917717,3,66,4.723693,0
,2.576165090,1,1,6.066838,18,54,5.661270,1,1.145485281,1,0,5.285216,2,41,5.327739,1
,5.329422753,1,1,3.671430,7,62,4.766442,0,3.614254190,1,0,2.784692,7,54,5.677647,0
,4.165876197,1,1,6.332982,5,41,4.944419,1,4.256215571,1,0,3.280260,8,58,4.550068,1
,5.088455679,0,1,4.052454,13,39,6.069946,1,6.116862102,0,1,3.977267,12,38,6.141898,1
,12.683461723,0,0,1.517336,5,70,5.043558,0,4.453783981,1,0,3.871339,10,49,4.848485,1
,8.785755092,1,1,1.782309,12,56,4.752127,0,4.130454944,1,1,6.445827,17,50,5.390110,1
,0.404675946,1,0,3.814497,11,53,5.064476,0,4.072113862,0,0,5.897269,9,29,4.561979,1
,0.740543355,1,0,6.359513,8,51,4.902490,0,3.758998262,1,0,4.317449,17,40,5.201327,1
,6.319724316,1,1,5.784411,5,54,4.899540,1,3.303044922,1,1,7.979470,13,48,5.404638,1
,7.902598376,0,1,1.160953,10,59,4.682826,1,1.647431277,1,0,3.702247,13,57,5.096031,0
,5.017489872,0,1,3.201001,3,42,4.781478,1,9.130907734,0,1,4.749968,7,42,5.421048,1
,2.178538364,1,0,2.171074,17,59,4.796997,0,11.721630968,0,0,1.572511,12,52,5.416025,0
,4.982466788,0,1,1.210030,7,51,4.503865,0,0.989578057,0,0,7.625260,20,62,4.660392,0
,0.198328257,0,1,3.333207,5,52,5.318160,0,1.195690138,1,0,2.651865,17,52,5.229125,0
,1.917606493,1,1,2.632788,9,48,4.704970,0,3.186227065,1,1,1.684827,7,38,4.672253,1
,3.760231427,1,1,1.262469,10,51,5.877699,1,0.371193691,1,0,6.507787,17,52,6.180629,0
,6.708271608,0,1,2.340167,8,34,5.941006,1,3.092500668,1,0,4.816183,22,64,4.801516,0
,8.129999085,0,0,3.453010,15,49,4.520859,0,5.612371164,1,0,1.525717,10,44,5.160907,1
,11.718612459,0,1,3.267709,17,46,4.704970,0,1.803107106,1,0,1.905088,27,67,4.742505,0
,7.042425661,0,0,4.871394,10,54,5.229125,0,4.486400924,0,1,2.693333,7,60,4.991342,0
,2.458152764,1,1,4.166088,8,45,4.724556,1,5.081226855,1,1,3.079174,7,56,4.991069,0
,4.851088949,0,0,1.655767,5,70,4.921255,0,7.004463844,0,0,1.899629,8,32,4.412188,1
,8.060202675,0,1,2.705815,10,34,5.033223,1,1.011714856,1,1,3.617589,21,45,5.993707,1
,0.694334841,1,0,2.869502,12,52,4.933737,0,4.379653798,0,1,1.582491,5,33,5.115846,1
,1.992834936,1,0,3.526825,3,25,5.451704,1,1.692036651,1,0,4.520205,17,44,5.333006,1
,4.328743730,0,1,1.668471,7,47,4.594265,0,5.694383361,0,1,1.481445,3,30,5.067910,1
,1.597763919,1,1,5.364744,20,41,4.907975,1,3.566550944,1,1,2.005828,7,48,4.843221,0
,5.294380126,0,0,5.128683,7,50,4.529359,0,1.312443267,1,1,4.076913,15,72,4.896896,0
,2.146343958,1,1,3.147547,12,41,4.983549,1,0.499759341,1,0,2.347291,18,36,4.506939,1
,0.377100646,0,1,4.048678,7,46,5.412659,1,5.598202728,1,1,2.681853,20,35,5.842951,1
,11.677910276,0,0,4.933050,22,66,4.810457,0,1.700213530,1,0,4.104284,12,53,4.516129,0
,9.020224218,0,1,1.600587,5,43,4.902490,1,6.147254227,0,1,2.120357,8,49,4.603557,1
,7.100147957,0,1,1.412633,17,39,5.500175,1,3.997298399,1,1,5.518855,6,43,6.373774,1
,3.041361978,1,0,4.398945,8,46,5.128117,1,8.169657531,0,1,1.744003,7,23,5.228350,1
,6.510418634,0,1,1.718795,10,53,5.015566,0,6.828632147,1,1,2.016646,12,48,5.709323,0
,0.861517178,1,1,5.276851,8,40,4.902511,1,4.886619000,1,0,6.515116,17,45,4.648958,1
,9.676396170,0,1,3.117316,11,52,5.201327,0,3.716733232,0,1,2.108093,13,67,4.444445,0
,1.115358443,1,1,5.601878,20,68,4.619330,0,1.303703540,1,1,3.507072,12,42,5.685352,1
,0.196904947,0,1,3.018814,10,26,4.643764,1,1.976208169,1,1,3.824025,27,42,5.509923,1
,0.531891014,1,1,3.220245,20,48,6.090869,1,3.118361672,1,0,5.715606,17,45,4.413292,1
,9.422584610,1,0,2.823839,12,24,4.830680,1,0.002647261,0,1,2.844084,7,22,4.960784,1
,1.108267800,1,0,6.904618,8,49,4.891389,1,2.908533443,1,1,2.273455,14,36,5.863020,1
,6.778521025,1,1,2.785345,8,37,5.554637,1,6.173335308,0,1,3.364887,8,60,4.913402,0
,6.935856038,0,0,3.818273,6,62,5.615465,0,0.166028795,0,0,5.748957,15,38,6.033400,1
,2.725026051,1,0,4.288533,8,46,5.128117,1,4.612444617,1,1,3.030475,8,40,5.657501,1
,1.281861622,1,1,5.674807,2,41,5.327739,1,9.339026857,1,1,3.871680,13,43,5.180603,1
,10.735606323,0,1,3.281375,8,55,5.015292,1,2.365873897,1,0,2.535791,27,65,5.000000,0
,11.766978135,0,0,2.478327,18,57,5.420771,0,4.271065215,0,1,1.213050,11,41,5.412659,1
,0.616397869,1,1,4.264361,17,22,4.923234,1,4.027677286,0,0,1.190329,10,28,6.073310,0
,8.809499703,1,1,3.195532,7,56,4.242424,0,0.945373395,1,1,1.512612,5,55,4.933303,0
,4.457597521,0,1,4.558692,17,51,4.668973,0,10.968263637,0,0,1.326111,5,70,5.043558,0
,1.893454171,1,1,5.306797,7,32,4.839637,1,3.071826066,1,0,1.022251,10,34,5.266344,1
,3.332564283,0,1,4.634036,7,34,5.247021,1,5.498649241,0,0,1.586036,6,46,5.096031,1
,0.983960569,1,1,3.265702,8,46,4.830680,1,0.002802357,0,1,2.041719,7,22,4.960784,1
,0.146355799,0,1,3.405241,5,35,5.520686,1,10.276878528,0,1,1.884935,5,22,4.881406,1
,5.476632789,0,0,4.334094,17,69,5.071590,0,2.890838924,0,1,5.516277,16,31,5.391265,1
,0.740958745,1,0,8.021389,10,43,5.219121,1,5.918680277,0,0,2.598374,13,46,5.080005,1
,6.252165398,0,1,3.278764,7,63,4.374999,0,8.144544727,0,1,5.920142,14,52,5.514311,0
,7.272492165,0,1,3.646474,10,50,5.811836,0,6.113740440,0,1,1.835806,7,44,4.984073,1
,0.240082926,0,1,3.061189,17,31,6.419274,1,2.826153656,0,1,3.521198,18,44,5.257000,1
,6.600704658,0,1,1.072442,4,37,4.784644,1,0.589988940,1,0,1.087028,15,55,5.248639,0
,3.565092881,0,1,4.115564,11,50,6.312191,1,2.239053344,1,1,3.994855,12,40,4.870861,1
,3.933021427,0,1,1.398644,4,48,6.011017,0,6.621891045,0,1,2.730461,12,26,4.247670,1
,10.185764983,0,0,6.359992,5,62,5.588507,0,0.895861781,1,1,5.710522,8,40,4.902511,1
,5.411703008,0,1,1.056656,5,43,4.864693,1,8.565351894,0,1,3.348079,8,54,4.451705,0
,6.421887837,0,1,9.766629,10,60,5.182124,0,6.298610506,0,0,1.253611,8,47,4.191617,1
,0.296036847,0,1,4.810572,17,55,4.791564,0,10.851082559,0,1,3.814584,12,48,4.861484,0
,7.980208024,0,1,5.487632,10,52,4.466325,0,3.638660988,0,0,2.472651,8,40,5.326697,1
,7.442259017,0,1,3.339889,3,61,4.619330,0,7.968494005,0,1,2.078416,8,31,5.624463,1
,5.638054393,1,1,2.112804,10,29,5.474375,1,2.274404360,1,1,3.887508,11,44,5.163978,1
,2.806913498,1,1,3.938302,20,64,4.650930,0,4.014024757,0,1,4.271452,10,51,4.913402,0
,4.495647291,0,1,1.342853,17,29,4.752127,1,8.205679002,0,1,1.964848,7,52,5.201327,0
,10.934958368,0,1,3.501997,28,68,6.308775,0,1.075246807,1,0,3.377548,17,58,6.343058,0
,10.341693264,0,0,1.863250,12,44,4.830909,1,7.693085207,0,0,3.165204,7,54,5.481173,0
,5.395744543,0,1,4.457089,12,48,4.778376,0,5.382884698,0,1,3.773782,6,42,5.452375,1
,6.508818756,0,1,2.879026,7,35,5.219121,1,2.990790446,1,1,2.670362,10,61,4.677072,0
,2.069391739,1,1,3.632507,3,67,4.224999,0,1.827157268,0,1,3.541418,5,29,5.407597,1
,1.100326438,1,0,3.830103,33,30,5.132883,1,1.041723950,1,0,5.027721,10,42,5.174546,1
,4.441376641,0,1,2.877808,8,41,5.219121,1,3.330571019,1,1,5.685433,10,39,4.650930,1
,2.516389054,1,0,2.964775,4,54,4.549815,0,6.898805381,0,1,1.874730,4,37,4.784644,1
,11.377748124,0,1,4.265396,3,58,5.652957,0,5.471347402,0,1,5.358442,13,50,4.810457,0
,9.166552745,0,1,3.909758,7,64,4.820110,0,5.878316974,0,0,5.537839,4,24,4.572111,1
,0.786873728,0,1,4.422472,8,69,4.519892,0,4.164596816,0,1,1.600253,3,43,5.237828,1
,11.542036207,0,0,4.889596,6,53,4.952207,0,6.423826599,1,1,3.495273,10,67,4.419417,0
,3.506319462,0,0,1.130150,8,60,5.094267,0,9.498279253,1,1,3.506834,5,62,4.839637,0
,12.609272471,0,1,5.727082,13,36,5.344762,1,4.777893721,0,0,4.754430,4,52,4.829433,0
,2.623103825,1,1,4.749771,10,33,4.850712,1,2.883005385,1,0,2.714045,7,57,4.798963,0
,5.539691312,0,1,2.584135,3,43,4.984073,1,9.465759539,0,1,2.798065,10,62,5.352583,0
,4.067581461,1,1,3.001352,9,32,5.290592,1,3.096699745,0,1,3.091198,8,27,5.583828,1
,4.130818534,0,1,3.075352,10,42,6.369427,1,2.200995115,1,1,2.587967,8,43,5.194805,1
,0.844702499,0,0,3.591533,8,73,4.666667,0,2.302195126,1,0,5.316506,7,41,4.784644,1
,7.954934108,0,1,5.940959,17,25,5.132883,1,1.886116862,1,1,3.103121,12,45,4.983549,1
,4.210835201,0,0,1.733284,10,54,5.717564,0,2.131763536,0,1,4.552194,12,31,5.333333,1
,2.606818892,1,1,4.143072,13,53,4.983549,0,0.967519928,0,1,3.333475,20,30,4.631800,1
,1.577373909,1,0,6.969599,11,44,4.744147,1,0.928473702,1,0,5.932476,13,71,4.648958,0
,3.928922370,1,0,3.830729,7,35,4.771733,1,8.851494640,1,0,1.376508,17,59,5.419018,0
,11.587160723,0,1,2.125626,13,44,4.606335,1,9.169098320,0,1,3.465583,14,44,5.085716,1
,5.396104741,0,1,3.956372,8,28,5.770498,1,1.627566038,1,0,4.930407,20,69,4.319955,1
,2.878920068,1,0,2.669477,4,59,5.576548,0,8.402053876,0,0,1.943587,7,57,4.250432,0
,6.477433818,0,1,3.327807,15,60,5.254470,0,2.249856597,1,1,1.139254,8,49,5.303301,1
,6.756788662,0,1,3.181352,10,46,5.447472,1,4.565235126,0,1,3.432877,15,41,5.153882,1
,1.107124452,1,0,4.116839,7,54,4.841229,0,5.829129699,0,1,1.574529,4,37,4.784644,1
,3.344702727,1,1,3.640927,4,40,5.327739,1,4.106279312,0,0,1.307658,3,43,5.237828,1
,5.597583817,0,1,1.789552,5,33,5.063291,1,4.889016791,0,0,5.076378,22,62,5.309829,0
,1.614847931,1,0,1.776776,12,39,5.697535,1,5.700602099,1,1,3.228300,7,33,5.180268,1
,4.154081131,1,1,5.970791,8,64,5.303301,1,3.498311267,0,1,4.873115,16,54,4.572111,0
,1.725516742,1,1,3.473946,12,45,4.983549,1,3.088626334,0,1,1.506025,8,27,5.787719,1
,1.813557055,1,1,2.023611,5,41,4.886249,1,6.473210783,0,1,3.951521,5,45,5.585256,1
,11.904159777,0,1,3.296605,13,51,4.621678,0,4.618503669,1,1,4.554781,13,28,5.241720,1
,2.148192411,1,0,1.794118,12,46,5.109458,0,1.399641222,1,1,3.882329,5,46,6.495191,1
,6.524877740,0,1,4.092007,8,58,4.980119,0,4.257113222,0,1,2.469730,12,61,5.106757,0
,8.075994008,0,0,3.373190,7,53,4.521553,0,3.195923170,0,0,3.511813,17,54,5.196646,0
,2.868072448,1,0,3.325605,7,69,5.717564,0,0.134297954,1,0,4.100890,5,21,4.593059,1
,10.994402054,0,1,3.463139,10,52,4.650930,1,2.920086105,1,1,3.686064,10,76,4.668973,0
,7.114572805,0,1,2.859050,8,34,5.941006,1,6.318422168,0,0,3.211131,7,44,5.080005,1
,4.261228357,1,1,5.402267,17,43,5.416025,1,0.771896875,0,1,1.980577,7,43,5.966562,1
,5.793255777,0,0,7.124845,6,36,4.997398,1,1.139539301,1,1,2.473024,15,37,5.296764,1
,2.413839400,1,0,3.237792,20,64,4.650930,0,8.958817363,0,1,1.670993,10,59,4.682826,0
,2.822138019,1,0,2.963084,15,61,4.687360,0,3.422131727,0,1,3.251236,7,46,5.206833,1
,7.993613717,0,0,2.485564,6,69,4.621613,0,8.782852576,0,0,2.436971,8,35,5.315730,1
,5.397216464,0,1,3.467820,12,26,5.660932,1,2.223455060,1,0,3.055016,15,43,6.136303,1
,6.027392173,0,1,3.852236,10,34,4.881406,1,1.084586392,1,1,9.265588,28,37,5.333333,1
,1.243429923,1,1,4.679064,4,41,5.488114,1,5.668138399,0,1,1.388813,5,42,5.257000,1
,9.413060378,0,1,3.270537,11,52,5.201327,0,1.342867651,0,0,5.186753,2,41,5.327739,1
,6.845927230,0,1,1.199097,4,21,4.731417,1,0.002594154,0,1,2.510500,7,22,4.960784,1
,3.591301516,1,0,3.714610,3,43,5.735394,1,8.740492055,0,1,1.320928,5,48,4.836185,0
,4.955223384,0,1,3.600015,2,29,5.006571,1,6.020906848,0,1,3.806682,4,41,4.847189,1
,6.102748685,0,1,1.163931,3,61,5.229125,0,1.794673196,1,0,4.604695,13,36,5.077524,1
,0.961791287,1,0,1.897478,8,45,5.135196,1,4.509406570,0,1,4.138604,10,53,4.759858,0
,12.773241666,0,1,3.246316,8,46,4.459131,1,5.380294851,0,1,3.815470,5,44,5.070667,1
,2.745994109,1,1,1.887257,7,22,4.613830,1,1.070126274,1,0,4.309045,7,37,5.220239,1
,4.643592929,1,0,2.261823,5,57,4.327874,0,6.427625098,0,1,3.039427,7,39,4.605263,1
,4.353008821,0,1,2.223608,12,61,5.106757,0,9.791228457,0,1,3.375487,7,58,4.736275,0
,0.958278043,1,1,5.988311,17,46,6.027281,1,2.460672056,1,1,1.545435,8,49,5.303301,1
,3.814724112,1,1,4.389676,8,65,4.850712,0,6.472706164,1,1,4.784875,17,64,5.970874,0
,7.701176767,1,1,4.943282,8,56,4.587815,0,7.717132494,1,1,1.110856,5,34,5.350588,1
,5.253226996,1,1,7.734394,15,45,5.315730,1,2.861673863,0,1,3.736011,10,42,5.829612,1
,5.639338624,0,0,2.882569,9,50,5.423261,0,1.139180586,1,1,4.084861,10,23,4.850811,1
,1.520258869,1,1,4.470049,3,41,5.096031,1,2.082513273,1,1,4.215316,8,48,4.841229,1
,0.451145072,1,0,5.746813,23,45,4.408289,1,5.234386256,0,1,1.974097,4,53,5.359112,0
,1.842741723,1,1,3.575789,5,28,5.015292,1,2.634777668,1,0,1.380876,13,58,4.781478,0
,3.054968894,1,0,4.879149,13,32,5.344762,1,7.791163189,0,0,3.961538,13,60,4.494666,0
,7.626997128,1,1,2.394343,5,54,5.247021,0,5.744454066,0,0,3.634870,13,67,6.004324,0
,0.944391903,1,0,6.257458,13,65,4.921255,0,5.512751067,1,1,7.889996,7,55,5.698029,1
,1.473004997,1,0,3.968566,8,51,4.381244,0,3.695483268,1,1,3.887463,5,32,4.535342,1
,2.256091719,1,0,3.728736,7,56,5.366974,1,7.127344269,0,1,3.892185,4,54,4.408289,1
,5.495598537,0,1,3.465119,12,36,5.488114,1,2.024487003,1,0,3.465722,8,66,4.916011,0
,0.432877386,1,0,4.224798,2,43,5.810369,1,8.950332014,0,1,5.615929,17,54,4.577900,1
,3.217240913,0,1,3.111013,2,51,5.412659,0,7.391514326,0,0,2.190754,10,61,4.921529,0
,3.679468099,0,1,7.903886,11,26,4.871677,1,3.752650627,0,1,4.828012,13,50,5.416760,1
,1.359926640,1,1,5.950181,17,60,5.205962,0,1.779742809,1,1,2.673481,5,41,4.886249,1
,0.240097132,0,1,3.313608,17,31,6.419274,1,7.041773733,0,1,5.807772,12,56,4.984073,0
,5.274468466,0,0,4.617729,7,25,4.563989,1,8.570785937,1,1,4.003927,9,43,5.323971,1
,0.869031974,1,0,4.322735,20,53,4.843404,0,3.080014795,1,1,1.763183,8,30,6.171599,1
,3.117189568,1,1,5.752226,16,31,5.391265,1,2.512258594,1,0,4.975315,5,47,5.248639,1
,0.393262367,1,1,4.707015,20,45,5.584316,1,0.029258002,1,1,1.660248,19,22,4.983549,1
,10.925533555,0,1,1.099849,27,53,4.478945,0,11.226861385,0,0,2.292972,5,55,5.170100,0
,10.520582169,0,1,1.818042,12,44,4.830909,1,2.969630861,1,1,3.246843,4,40,5.327739,0
,5.093499793,1,1,3.942309,17,65,5.269940,0,7.722979982,0,1,2.268929,8,53,5.549887,0
,1.736996954,1,1,3.268263,27,42,5.509923,1,1.649247269,1,1,3.067915,7,49,4.693797,0
,2.631835290,1,1,4.405991,10,33,4.850712,1,11.325181177,0,1,4.104817,12,43,4.408289,1
,2.499554846,1,1,5.046849,10,39,4.641669,1,1.753958309,1,0,1.852803,13,55,4.766442,0
,10.798710395,0,1,1.132757,6,50,4.850811,0,5.488611585,0,0,7.169972,10,53,5.352583,0
,0.404717449,1,0,7.609000,3,45,5.174546,1,6.785996019,0,0,1.977703,9,58,4.706487,0
,5.457547924,1,0,2.626157,5,50,4.736275,1,6.090189523,1,1,4.484178,18,26,5.439283,1
,4.043925973,0,1,3.879262,10,57,4.985775,0,5.207490385,0,0,2.532634,3,40,4.701095,1
,6.700785566,0,0,1.046443,9,58,4.706487,0,8.063779312,0,0,3.863380,12,42,4.960784,1
,3.443084267,0,0,4.055330,13,55,6.196016,0,3.742564597,1,1,1.921520,30,43,5.659616,1
,7.347604840,0,1,2.289630,7,53,5.764246,0,4.522371497,0,0,1.743172,10,54,5.717564,0
,3.141453858,0,0,4.195614,12,59,6.130060,0,2.610096567,1,0,4.644975,10,63,5.178184,0
,5.676462759,0,1,3.145090,10,47,4.974681,0,0.823474027,0,1,1.847302,7,43,5.966562,1
,5.064240342,1,0,4.547300,20,47,5.318160,0,3.814753975,1,1,7.130591,13,48,5.404638,1
,1.948065239,1,1,2.895129,10,30,4.561979,1,0.307495726,0,1,1.999431,10,25,4.796997,1
,1.087432697,1,0,6.069058,8,39,4.956558,1,11.623339762,0,0,3.126116,16,47,4.983549,1
,5.289909514,0,1,1.343425,3,53,4.921255,0,11.200088930,0,0,1.754649,6,65,5.038911,0
,3.818835120,1,1,7.430773,18,34,6.383217,1,3.581836528,0,1,3.594143,7,57,5.625326,0
,3.978130150,0,0,4.689308,25,34,4.976703,1,3.966332634,0,0,3.086715,23,61,5.420771,0
,5.853514645,0,0,3.617398,7,44,5.080005,1,5.024868105,0,1,4.186934,13,54,4.516129,0
,5.014721549,0,1,6.599893,18,51,5.025885,0,6.123538776,0,1,9.218273,10,60,5.182124,0
,0.250535083,1,0,4.255883,17,60,5.882353,0,5.315638988,0,1,3.552472,9,39,6.211300,1
,8.943173794,0,0,2.027613,6,69,4.621613,0,0.643938181,1,1,2.772486,12,52,4.933737,0
,11.665038561,0,0,2.688076,18,57,5.420771,0,6.386061328,0,1,3.495028,13,38,5.178184,1
,3.754429619,0,1,1.990836,10,64,5.102694,0,1.803470788,1,0,3.253845,8,62,5.516086,0
,4.626745340,1,1,2.398580,13,31,5.486540,1,4.498457366,0,1,3.056049,5,56,5.943168,0
,11.781940349,0,1,4.172124,3,43,6.165568,1,0.636151513,1,0,4.104137,27,64,4.985775,0
,7.071992117,0,0,7.530330,23,31,4.819277,1,2.054706340,1,1,4.429761,12,43,5.359078,1
,4.081329354,0,0,4.672746,13,30,5.474375,1,4.410354002,0,1,1.943130,20,47,4.821142,0
,10.830422327,0,0,1.406592,6,65,5.038911,0,2.021974852,0,1,4.598517,17,30,4.839637,1
,0.984429151,0,1,3.572986,10,35,4.683626,1,2.791930722,1,1,6.938314,13,51,4.778846,0
,1.319126947,1,0,6.658133,13,47,5.487805,1,3.403215755,0,1,2.820119,10,66,4.724556,0
,1.114048386,1,1,7.170733,33,40,5.796012,1,1.726013194,1,1,1.201856,13,52,5.957490,0
,4.236758247,0,0,1.039859,7,50,5.940885,0,2.399982465,1,0,1.548747,17,60,4.980119,0
,10.192507553,0,1,2.264828,6,53,4.899540,0,4.533598947,0,1,6.302760,25,43,5.699880,1
,12.328957704,0,1,2.500607,17,65,6.250000,0,5.429531916,0,1,2.670772,5,30,4.907975,1
,1.984378344,1,0,3.649803,3,44,4.984073,1,1.674410282,1,1,8.669409,8,56,5.178184,0
,7.865770458,0,1,1.528876,5,34,4.921255,1,1.729541664,1,1,3.158925,7,55,4.967597,0
,6.968346506,0,0,3.422479,9,44,5.773003,1,8.564097952,0,1,3.386506,10,54,5.510658,0
,1.338175242,1,1,6.946359,17,27,5.624977,1,1.230913660,1,0,3.834779,12,59,4.672253,0
,10.367612136,0,0,3.435516,5,61,5.352583,0,1.067652497,1,1,2.511027,14,67,5.642155,0
,0.620071897,0,0,1.027446,13,35,5.772393,1,0.911272438,1,1,5.543977,8,40,4.902511,1
,1.303407766,1,1,4.257949,8,49,4.652018,1,5.699432411,1,1,4.889994,10,52,4.686909,0
,1.547658188,1,1,1.968839,8,55,4.930935,0,7.358874620,0,1,4.041300,20,55,5.481173,0
,5.571559299,0,1,4.007010,10,53,5.009940,0,3.676279729,0,1,1.946200,5,68,4.550068,0
,6.684024578,1,1,3.979688,17,56,4.615620,0,1.379151664,1,0,3.149651,15,61,5.455447,0
,0.629674483,1,0,2.702666,5,48,4.980119,1,4.742057996,0,0,4.425371,3,52,5.294117,0
,2.015700490,1,1,1.872647,8,48,5.590170,1,3.043686427,1,0,1.679111,7,65,5.007613,0
,6.327232707,0,0,1.423567,8,65,4.607373,0,2.035903059,1,1,3.731202,16,47,5.404638,1
,9.197425396,0,1,3.015058,7,53,4.498833,0,7.587141539,0,0,1.617228,11,34,5.381357,0
,10.418610171,0,1,2.941383,13,44,4.606335,1,7.188468185,0,0,2.067625,3,59,4.619330,0
,5.295264044,1,0,1.067770,7,43,4.733485,1,2.325087890,1,0,3.215898,13,51,5.070603,0
,6.965401833,0,0,1.519877,13,52,4.881905,0,1.138303286,1,1,1.082815,5,63,4.786756,0
,3.677413744,1,1,5.675761,17,64,4.606335,0,5.864792939,1,1,3.875541,8,42,4.423004,1
,0.760840306,1,1,3.999757,7,53,4.808812,0,3.656370708,1,0,5.468261,8,46,4.427997,1
,7.977481081,0,1,2.167356,10,53,5.555122,0,3.666981993,1,1,6.772681,12,49,4.419417,0
,8.704343821,0,0,4.100223,15,40,5.115846,1,5.110168507,0,0,1.458944,4,43,4.302066,1
,1.647651386,0,1,6.557593,5,56,4.465782,0,1.090246173,1,0,1.680488,6,42,4.899540,1
,2.257441262,1,1,3.015033,3,42,4.843404,1,1.953757578,1,1,3.024511,12,45,4.983549,1
,5.885632485,0,1,2.956574,13,43,5.479188,1,8.194390338,0,1,7.579026,13,35,5.359112,1
,10.215228869,0,0,3.524750,17,41,4.841229,1,4.927100198,0,1,2.800356,7,33,4.802921,1
,2.083129417,1,0,2.583179,7,41,4.230605,1,8.860463429,0,1,2.872429,9,57,4.811160,0
,1.122232053,1,0,3.870709,33,30,5.132883,1,5.192371923,1,1,1.290323,5,33,5.063291,1
,3.076037616,1,0,5.099107,12,28,4.741448,1,2.968698078,1,0,3.007386,12,63,5.313040,0
,6.855113339,1,1,2.081363,4,40,4.946170,1,9.073883685,0,1,1.796952,5,62,5.796012,0
,4.529704834,0,0,2.538278,9,38,4.974681,1,8.954766884,1,1,1.717818,12,30,5.174546,1
,4.204872587,1,0,5.798053,30,71,4.536092,0,0.143972740,1,0,4.512659,17,41,5.661268,1
,4.089147902,0,1,3.590890,10,54,6.084539,0,0.914337276,0,0,4.581733,10,35,4.682535,1
,1.338854033,1,1,1.164560,7,51,5.146990,0,1.504048270,1,1,4.605996,4,25,5.055576,1
,4.054685644,0,0,1.846526,8,41,5.266344,1,8.813238157,0,1,3.038083,8,59,4.680553,0
,4.712040939,0,1,2.912451,12,69,4.759858,0,1.047072230,1,0,3.203211,7,45,4.753973,1
,3.748215334,0,1,4.663353,17,46,5.115846,0,3.691430329,0,1,2.328387,13,67,4.444445,0
,1.622962498,1,1,3.417064,17,49,6.172840,0,1.703839499,1,1,3.225836,7,38,5.021689,1
,3.692680160,0,1,6.947422,8,52,5.418258,1,8.236347756,0,1,3.231294,6,35,5.514311,1
,5.493944034,0,0,1.195176,10,48,5.391265,1,1.740439738,1,0,7.561386,12,68,5.422877,0
,9.467880878,0,0,4.429035,13,47,4.811160,0,5.534484262,1,1,4.177471,8,25,4.901409,1
,3.263101583,0,1,3.750295,8,56,4.689338,1,11.031644791,0,1,3.568039,4,45,6.558120,1
,3.631417685,0,1,1.254824,12,62,5.587602,1,0.664138551,1,1,4.826736,17,22,4.923234,1
,0.551372078,1,0,2.199468,13,47,4.798963,1,1.280112444,1,1,6.409967,5,46,4.819277,1
,3.409553875,1,1,3.845603,7,38,5.454546,1,6.523484412,0,1,6.081367,7,60,4.466325,0
,12.469131390,0,1,3.269804,5,51,4.028379,0,5.796893424,1,0,2.936419,12,57,4.910347,0
,4.892863138,0,0,3.538422,13,31,4.908459,1,9.756522156,0,0,4.437509,17,61,5.994789,0
,5.956365864,0,1,3.241240,12,48,5.077524,0,4.273672079,0,1,1.138052,13,35,6.052149,1
,1.283302727,0,0,1.568443,10,75,4.771733,0,2.593753193,1,1,4.576124,12,56,4.561979,0
,11.022361688,0,1,2.562302,7,46,4.723693,1,4.341809339,0,1,1.490054,8,34,5.070667,1
,1.740294068,1,0,3.022582,4,48,4.991342,1,1.207288043,1,0,5.583565,2,41,5.327739,0
,6.801260934,0,1,1.536236,17,50,6.431975,0,1.239098875,1,1,4.007751,17,40,6.295086,1
,1.923827177,1,0,4.302293,3,53,4.835737,0,0.242325526,1,0,7.432516,17,39,5.169417,1
,1.962638927,1,0,5.646618,9,51,4.593059,0,4.765866071,1,0,3.637664,10,49,4.848485,1
,7.497251748,1,1,4.630456,20,58,4.856782,0,9.718406642,0,0,3.369178,7,58,5.153882,0
,6.159691103,1,1,4.243608,6,51,5.767761,0,8.714778531,1,1,2.099445,13,61,4.899540,0
,1.347854222,0,1,1.391289,7,35,4.977315,1,5.704179743,0,0,2.328115,5,53,4.861484,0
,6.091363504,0,1,1.109748,6,39,5.784654,1,1.326709378,1,0,3.207921,8,30,5.148021,1
,3.637595079,1,1,5.518120,13,42,5.084070,1,0.493235736,1,1,2.206559,10,33,4.681194,1
,6.604112747,0,1,3.543386,5,30,4.381244,1,0.704454326,1,0,5.358995,8,34,6.344507,1
,6.813956765,1,0,2.938351,5,54,5.247021,0,7.243775093,0,1,1.665633,12,65,4.590991,0
,6.249772609,0,0,1.794299,10,48,5.391265,1,9.608482597,0,1,1.714089,27,53,4.478945,0
,0.500085713,1,0,4.301557,15,65,6.157191,0,5.480820839,0,1,3.189965,8,54,4.850712,0
,12.710331207,0,1,5.678058,18,52,4.991342,0,2.313843835,1,1,1.115404,8,48,5.590170,1
,0.056334345,0,1,3.687239,10,70,5.263158,0,6.024738466,0,1,3.098768,10,47,4.974681,1
,1.745185673,1,0,3.256935,7,38,5.021689,1,3.839304875,0,1,3.408107,10,57,4.985775,0
,4.140271763,1,0,4.057547,13,63,4.419417,0,1.711032155,0,1,5.459517,13,61,4.374088,0
,2.945441471,0,0,1.405297,4,61,4.695976,0,3.901363912,0,1,2.377022,8,60,5.207717,0
,6.368603051,0,0,3.065122,15,63,4.943196,0,11.082273203,0,1,3.236662,8,55,5.015292,0
,6.616702935,0,0,4.697332,15,48,5.120432,0,8.089383595,0,1,2.602792,8,31,5.624463,1
,4.535427619,1,1,2.685595,6,33,5.096031,1,0.341383471,1,1,4.014767,20,45,5.584316,1
,4.216489097,0,0,1.873561,10,64,5.645998,0,10.816620562,0,1,5.293613,3,44,4.869480,1
,2.918432540,1,1,1.946789,13,45,4.847189,1,5.836709913,0,1,2.577243,9,36,5.385101,1
,2.466280534,1,0,3.519277,11,57,4.479032,0,1.710999982,1,1,2.286678,13,47,5.217020,1
,0.454346596,1,1,5.080456,23,24,5.401257,1,1.411485907,1,0,4.108376,10,45,4.921529,1
,5.112801510,0,1,1.299017,4,53,5.359112,0,2.124665475,0,1,1.207315,12,25,5.714959,1
,4.209002507,0,1,2.060803,6,56,4.540842,1,2.920484931,0,1,4.500004,33,56,4.904786,0
,4.774739271,1,0,1.534296,3,40,4.631800,1,2.784954072,1,1,4.319636,23,29,5.115846,0
,2.501403175,0,0,3.549613,10,69,4.637013,0,4.784769985,1,0,4.576474,20,42,6.797196,0
,3.484472023,1,1,2.526027,10,19,5.109458,1,0.938932350,1,0,5.619398,17,40,5.094267,1
,0.159616361,0,0,5.332134,15,38,6.033400,1,9.910881360,0,1,1.597291,7,31,5.029849,1
,3.745673862,1,1,3.402812,8,46,5.206833,0,5.694807992,1,0,1.506099,3,69,5.391265,0
,7.085666266,0,1,3.803988,15,65,5.225269,0,7.315893747,0,1,1.828972,12,65,4.590991,0
,0.359526952,1,0,7.432276,12,37,4.762347,1,3.824774877,1,0,5.918078,13,43,4.988877,1
,0.982772838,0,0,3.579282,8,73,4.666667,0,2.296799624,1,0,3.528680,7,56,5.366974,1
,1.171713557,1,1,3.774755,5,21,4.599488,1,0.403966122,0,1,5.021954,18,68,4.713064,0
,8.476544176,0,1,2.615981,6,33,4.508021,1,9.171684436,0,0,3.688894,10,50,5.132883,0
,4.501429805,0,1,3.868264,5,40,4.997703,1,7.027335286,0,1,1.930436,13,37,5.359112,1
,8.157290614,1,0,4.747985,12,50,5.163978,0,4.795064767,0,1,4.249273,10,44,4.635125,1
,0.150124681,0,0,5.595037,15,38,6.033400,1,1.685459787,1,1,5.868948,13,66,4.800717,0
,5.789678885,0,0,2.262091,5,39,4.834520,1,1.807590302,1,1,4.509097,15,42,4.879078,1
,5.849247382,1,0,4.852739,5,37,5.174506,1,6.505686522,0,0,4.137111,22,49,4.402515,1
,5.965507461,0,1,1.939071,6,39,5.784654,1,4.745028981,1,1,1.053258,18,42,5.096089,1
,7.397726906,0,1,2.246315,12,38,5.659616,1,2.582139688,1,1,6.767324,13,26,5.555122,1
,11.620713389,0,0,2.144898,11,43,5.224291,1,2.354342799,1,0,2.147087,5,38,5.151093,1
,5.919569252,0,1,5.462591,12,56,4.984073,0,2.982479436,1,0,4.404439,13,39,5.504342,1
,2.043911104,1,1,4.789044,12,68,5.286123,0,6.471085244,0,0,3.951506,21,49,5.303301,0
,0.399796909,1,1,5.164287,20,40,4.650769,1,10.989655194,0,0,3.660749,8,56,4.508264,0
,5.618858888,1,0,2.844965,5,50,4.736275,1,8.903061433,0,1,2.530546,15,61,4.723693,0
,5.849367329,1,1,5.745089,15,53,4.800717,0,3.330087391,1,1,5.408377,13,42,5.084070,1
,2.450236703,1,1,4.486764,10,42,5.766097,1,9.622271528,1,1,2.089677,7,40,5.077524,1
,7.346688972,1,1,3.503054,7,65,4.423004,0,2.478651273,1,1,4.371018,10,33,4.850712,1
,3.895491310,0,1,4.428101,7,64,4.550068,0,0.353398250,1,0,2.659858,20,46,5.386311,1
,6.413721112,0,1,5.275857,17,33,5.014839,1,11.180874380,0,1,2.211318,7,29,4.952207,1
,8.942891953,0,0,1.842506,7,53,4.536092,0,3.260151861,1,1,4.456568,8,45,5.620375,1
,0.290494463,1,0,5.976378,12,47,4.677072,0,3.055727764,0,1,6.463273,10,59,4.953681,0
,0.766194332,1,0,7.256844,4,58,4.550068,0,1.521106293,1,0,8.229116,15,50,5.385101,0
,3.305986019,1,0,4.139014,9,44,5.553775,1,6.471743709,0,1,2.100047,8,49,4.603557,1
,3.950150655,0,0,3.332262,6,53,5.326697,0,0.990544543,1,1,3.381248,8,46,4.830680,1
,3.924687900,0,0,3.280216,7,56,4.615620,0,7.015305440,0,0,3.387375,6,62,5.615465,0
,1.417217070,1,1,3.455838,13,75,5.229125,0,10.686381647,0,1,4.642725,5,62,4.231886,0
,7.922455770,0,1,2.804753,8,53,5.549887,0,6.190366155,0,0,7.111154,23,31,4.819277,1
,5.878891469,0,1,2.374204,9,38,5.137896,1,1.770688727,1,1,4.540284,17,39,5.052686,1
,2.177196351,1,0,1.940813,10,64,4.850811,0,3.530529798,0,1,6.152448,10,46,5.410267,1
,5.916002765,1,1,4.911436,10,49,4.766442,1,7.569615889,0,1,3.472873,12,61,5.182124,0
,5.298762161,0,0,1.040475,9,33,5.142595,1,1.420976909,0,1,2.265722,10,31,5.324759,1
,5.471085865,0,0,3.298073,9,52,4.923659,0,8.747444579,0,0,1.321746,3,42,4.322629,1
,1.065527489,1,0,5.262461,15,45,4.988877,1,3.061506056,1,1,2.738867,17,44,5.483719,1
,3.101768774,1,1,4.103467,5,65,4.593059,0,3.487619817,0,0,2.029529,2,41,4.245699,1
,6.933932250,0,1,3.596082,5,45,5.585256,1,1.627501819,1,1,3.103932,17,52,4.820110,0
,0.459899324,1,0,3.436148,11,53,5.064476,0,1.196476922,1,0,2.791036,16,56,4.921529,0
,11.015911395,0,1,3.866773,10,52,4.650930,1,6.493480081,0,1,1.706045,5,47,6.004324,0
,4.323063906,1,1,3.060590,7,32,5.381357,0,5.687045109,0,1,1.883918,6,47,6.994941,1
,8.029955685,0,1,1.344839,7,30,5.055576,1,4.521019032,0,1,3.643624,20,50,4.960819,0
,8.275488496,0,0,4.024176,10,44,5.476925,1,0.978927475,0,1,3.900913,10,35,4.683626,1
,4.191739835,0,0,1.734116,5,70,4.921255,0,4.186580516,1,0,1.027840,18,54,5.588332,1
,3.085856359,0,0,3.336492,10,33,5.154913,1,0.536928066,0,1,2.596819,4,32,5.487283,1
,0.194680564,0,1,6.918101,8,30,5.055576,1,3.133393842,0,1,1.236199,5,35,4.615620,1
,1.455958072,1,1,4.255443,15,72,4.896896,0,5.071150410,0,1,4.203627,10,46,5.047441,1
,5.197252612,0,1,1.523496,12,58,4.759858,0,1.686088255,1,0,6.768106,10,48,4.881406,1
,7.146015864,0,1,3.070468,8,33,5.303301,1,3.140740703,1,1,5.457124,16,31,5.391265,1
,2.995375995,0,1,2.412651,10,66,4.724556,0,5.847248092,0,1,6.272926,8,58,5.552737,0
,7.864718976,1,1,6.764246,2,33,5.962848,1,6.584225602,0,1,5.211285,13,39,4.881905,1
,0.754393587,1,0,5.989793,8,50,5.625326,0,2.387815383,1,1,4.734830,12,56,4.561979,0
,1.461969210,1,0,6.026673,7,38,4.850712,1,2.690545692,1,1,4.465125,12,55,5.000000,0
,9.542404099,0,1,1.977479,7,55,4.273348,0,6.744635861,0,1,4.647560,7,60,4.351666,0
,3.172623868,1,1,1.919412,9,37,5.423261,1,9.720524347,0,1,3.777607,5,32,5.055576,1
,3.337827875,1,0,3.729475,3,56,4.680553,0,8.633958091,1,1,2.907124,8,42,4.718646,1
,4.773444195,0,1,3.721566,13,57,4.762347,0,8.238257041,0,1,3.775267,5,63,4.518320,0
,3.198572387,0,1,4.746129,33,56,4.904786,0,3.267366762,1,1,4.905366,12,47,5.813129,1
,0.931400313,1,0,3.859220,13,32,5.659425,1,8.459602638,1,0,4.539977,12,40,4.469809,1
,10.128541824,0,1,3.368283,8,50,4.632703,0,5.195980381,0,1,1.910538,10,36,5.642155,1
,5.852506169,0,1,3.876700,7,34,5.090253,1,6.510188589,0,1,3.003975,7,57,5.063291,0
,3.872470390,0,1,4.161128,10,51,4.913402,0,7.483627495,0,0,1.357098,2,61,4.908459,0
,4.131692208,1,1,5.209945,5,54,4.561979,0,2.694968577,1,1,1.119132,7,39,5.219121,1
,3.787812692,0,1,1.064101,3,50,6.034860,0,1.253495932,1,1,3.296340,7,24,5.180798,1
,1.165154219,1,1,5.900348,17,41,5.090253,1,4.895568901,0,1,7.015869,7,33,5.352583,1
,2.806630443,0,1,2.610060,5,37,5.094267,1,1.191073802,1,0,7.813818,30,50,5.486694,1
,9.939381623,1,1,3.169415,7,56,4.242424,0,3.225863512,1,1,2.121407,10,41,5.197775,1
,0.344448391,1,1,3.092457,3,33,4.944419,1,3.726565385,0,1,2.963556,7,37,6.389871,1
,0.826608778,0,0,6.412055,13,65,4.921255,0,2.980067010,1,0,2.558614,7,49,4.252083,1
,1.236738356,1,0,7.448885,29,39,4.705882,1,3.880393843,0,1,3.072832,7,32,5.837676,1
,3.729013091,0,0,4.712902,23,66,5.153882,0,4.170337678,0,1,3.894594,7,41,5.379040,1
,9.514441755,0,1,3.584329,8,50,4.632703,0,8.285041461,0,0,4.233252,10,54,5.229125,0
,5.500631989,0,0,2.195442,7,38,4.869480,1,3.914963804,0,1,3.152215,7,64,5.135196,0
,2.088464898,1,0,4.029423,15,37,5.087983,1,4.695915733,1,0,3.674853,8,58,4.550068,0
,0.508263771,0,1,1.563390,13,62,5.781450,0,6.423408436,0,1,2.161117,12,26,4.247670,1
,3.814645745,0,0,3.108560,3,66,4.723693,0,5.121649589,1,1,5.113745,14,23,5.261336,1
,6.925514557,0,0,2.878833,5,59,4.870607,0,11.682487699,0,1,1.528803,10,52,5.357143,0
,1.416111472,1,1,4.456550,5,37,4.937851,1,3.878054673,0,1,2.235307,14,41,5.752427,1
,2.087212526,1,0,4.277159,3,63,4.549815,0,11.490053811,0,1,4.920517,22,55,4.577900,0
,1.256101345,1,1,2.184008,5,46,4.841229,1,0.994297127,0,1,3.027359,10,35,4.683626,1
,5.419649140,0,1,7.539007,15,51,5.045987,0,3.663966622,0,1,6.360583,13,53,4.633481,0
,8.512175783,0,0,4.602498,15,56,6.017664,0,2.665814427,1,1,3.777707,17,68,5.045599,0
,8.666369135,0,1,3.935311,7,50,5.000000,0,9.608806822,0,1,8.645478,10,52,4.821142,0
,12.072595912,0,1,5.177734,3,44,4.869480,1,2.886999926,1,1,5.462131,17,23,4.753973,1
,0.803767962,1,1,2.498672,13,42,4.789794,1,10.416958992,0,1,2.119693,14,66,4.364066,0
,1.026551632,1,0,3.098744,21,45,5.993707,0,7.667335959,0,1,3.372685,15,65,5.225269,0
,7.624852608,0,1,3.687177,14,36,5.345836,1,4.142351924,0,1,1.987480,6,36,4.463393,1
,3.429760437,1,1,3.259736,6,29,5.800728,1,7.490867481,0,1,1.704233,4,21,4.731417,1
,3.869903494,0,1,4.590009,10,51,4.913402,0,4.281969914,0,0,3.971873,17,36,4.761905,1
,3.944002883,0,0,3.128415,7,34,5.095541,1,10.968094598,0,0,1.468032,13,63,4.535342,0
,5.212150599,0,1,3.214282,7,46,5.512261,1,3.622001163,0,1,2.950828,7,37,6.389871,1
,0.385548345,1,1,1.019563,12,42,5.732484,1,1.914268609,1,0,4.715359,3,53,4.835737,0
,0.588104360,1,1,9.685002,10,38,4.781461,1,5.557568658,0,1,2.078265,6,37,4.923234,1
,3.822249042,0,1,1.666467,3,37,4.766442,1,10.318248702,0,1,4.334759,5,62,4.231886,0
,2.823237952,1,0,2.649425,3,49,5.624385,0,2.492673108,0,0,3.377316,20,64,4.650930,0
,1.269897482,1,1,4.920774,13,33,5.045987,1,9.357387723,0,1,1.830246,7,31,5.029849,1
,1.645688168,1,1,3.882723,13,66,4.550068,0,4.903651221,0,1,4.779042,8,49,4.503865,0
,4.510936888,1,1,2.734384,13,33,5.145105,1,0.956381372,1,0,6.548832,7,54,5.517594,1
,1.914477820,1,1,3.041214,18,50,5.257000,0,4.989703569,0,1,4.949574,3,23,4.655240,1
,8.088284030,0,1,1.665561,2,61,4.908459,0,5.214449620,0,1,1.544228,7,59,4.801516,0
,0.561215906,1,1,6.643541,8,23,5.519851,1,7.285200687,0,1,2.316143,12,51,5.257000,0
,1.658712539,1,1,2.431853,10,46,4.535342,0,8.969450859,0,1,7.833928,14,52,4.701095,0
,1.862072924,0,1,3.555554,12,66,4.759858,0,0.624934128,0,1,1.302327,8,26,4.827945,1
,1.355911583,1,1,4.869751,10,23,4.850811,1,8.520290436,0,1,3.430283,14,44,5.085716,1
,9.497624757,0,1,2.607802,8,67,4.815713,0,0.984252049,1,1,4.560475,15,44,5.948074,1
,6.512466904,0,1,3.379775,3,51,4.772126,0,9.250421339,0,1,1.691052,15,33,5.401378,1
,3.061095497,1,0,3.273423,10,64,5.357143,0,8.622636667,0,1,1.536653,7,52,5.201327,0
,6.451470142,0,1,3.627664,12,34,5.249339,1,10.723622760,0,1,2.081499,17,61,4.714770,0
,4.626932916,0,1,3.223785,3,37,5.263158,1,1.174603368,1,0,3.604351,7,39,5.823232,1
,3.277367725,0,0,4.956886,15,40,5.209758,1,4.576979733,1,1,3.188168,14,47,5.550788,1
,1.751395382,1,1,6.483675,18,68,4.672253,0,3.228455601,1,1,4.016816,10,66,4.350764,0
,8.824059396,1,1,1.913446,17,64,4.533199,0,6.269713275,0,0,6.857850,10,67,5.624713,0
,6.376485793,0,0,3.339430,15,40,4.719673,1,3.583907430,1,0,3.372652,20,60,5.611407,0
,1.034547914,1,1,4.694744,30,43,5.518136,1,5.683106629,0,1,1.782150,5,34,5.695211,1
,9.372136899,0,0,4.141078,15,51,5.106757,0,8.749752051,0,1,2.565810,7,52,5.095541,0
,2.875352299,1,0,5.988152,10,46,5.555451,1,1.999597713,1,0,7.011769,5,28,5.025885,1
,3.684788517,1,0,4.480205,13,32,5.455447,1,11.260781370,0,0,1.302029,7,46,4.933737,1
,2.667858936,1,1,7.545183,5,36,4.577911,1,0.165100834,0,1,5.976905,15,38,6.033400,1
,3.677171814,1,1,4.256848,13,66,3.875617,0,7.959013539,0,0,1.518858,10,66,4.686909,0
,0.453267962,0,1,1.311068,13,62,5.781450,0,7.981381743,0,0,3.079583,1,48,4.952456,1
,1.091764040,1,0,6.968320,12,45,5.142595,1,2.530478925,1,0,3.404665,22,68,5.164568,0
,7.857383113,0,1,1.535848,11,55,5.334129,0,6.030819284,1,1,4.804286,8,25,4.901409,1
,3.541964972,0,1,1.563701,11,41,5.412659,1,8.166674885,0,1,1.516168,8,45,5.488113,1
,1.452840143,1,1,3.202186,8,33,4.770898,1,0.627102346,1,0,1.644970,5,62,5.077524,0
,2.017670790,1,1,2.584701,7,51,5.034317,1,3.913311846,0,0,1.630889,15,29,5.161291,1
,12.363519356,0,1,4.134158,8,43,4.575657,1,2.280510671,1,0,2.942188,17,59,4.796997,0
,4.341179063,1,0,5.692550,9,29,4.561979,1,0.543884458,1,0,5.872703,9,37,5.517594,1
,5.384412364,0,0,7.086228,10,53,5.352583,0,0.357926877,0,0,1.233673,10,41,4.631800,1
,7.515937889,0,1,3.709324,7,57,4.841229,1,6.610268430,1,1,2.985425,12,26,4.247670,1
,0.767447666,1,0,3.118447,7,30,5.295317,1,2.430434981,1,0,2.116950,13,40,5.014839,1
,8.792517293,0,0,1.345875,10,52,4.454354,0,3.013777351,1,1,3.569921,13,36,4.532735,1
,2.163265932,1,0,4.411266,7,59,5.229125,0,7.968264872,0,0,3.003258,12,42,4.960784,1
,0.844040503,1,1,8.385769,7,42,6.516221,1,2.132462188,1,1,2.068871,10,30,4.561979,1
,10.138973269,0,0,1.010718,7,52,4.798963,0,3.056468891,1,1,1.038576,7,58,5.389681,0
,2.560767053,1,1,6.056348,5,52,4.976703,0,3.663193185,1,0,3.686986,7,31,6.202187,1
,2.291634648,1,1,4.039808,13,41,4.991342,1,8.643643262,1,0,2.566005,4,49,5.474375,0
,9.164207891,1,1,3.264355,14,29,4.820110,1,4.102095343,0,1,1.825318,3,47,6.041007,1
,7.108724919,0,0,1.307443,8,50,5.270361,0,4.659110887,0,0,2.996149,5,63,4.375697,0
,3.004592075,1,0,2.455574,10,44,4.724556,1,8.739417505,0,1,1.396395,7,23,5.228350,1
,7.454305665,0,1,4.503241,8,31,5.164568,1,4.439855779,0,0,1.313272,8,41,4.635125,0
,7.439941799,0,1,3.301486,17,56,4.615620,0,4.982958377,1,0,5.707454,13,29,5.220239,1
,8.799376889,0,0,1.639104,3,54,4.810457,0,1.478564699,1,1,3.254479,5,46,6.495191,1
,8.231881752,0,0,4.892462,13,59,5.576314,0,1.370228371,1,1,5.351432,17,41,5.090253,1
,7.847733076,0,1,1.340585,11,55,5.334129,0,9.260718383,0,0,2.958745,8,67,5.078968,0
,0.713335555,1,0,7.859836,11,44,4.311743,1,1.908614536,1,1,2.698272,9,48,4.704970,0
,0.947645353,1,1,4.364577,9,37,4.276668,1,7.067428947,0,1,3.923005,12,44,5.497474,1
,8.226063233,0,0,4.746083,15,56,6.017664,0,2.140435100,1,1,6.296405,15,72,4.615620,0
,8.220473873,0,1,3.105577,12,61,5.182124,0,3.949257197,1,1,6.190143,20,33,5.429166,1
,7.047474298,1,1,1.336732,7,48,5.416645,0,7.598319805,0,1,3.714017,7,54,5.481173,0
,10.267383089,0,1,1.222931,15,33,5.401378,1,4.023251093,0,0,1.371423,6,52,4.599488,0
,8.131350075,0,0,2.936749,7,76,4.621450,0,4.243175326,0,1,5.441902,11,40,4.923659,1
,1.052185439,1,1,2.927342,15,37,5.296764,1,11.637009767,0,1,4.994733,22,55,4.577900,0
,1.843750316,1,1,5.343756,12,51,5.421687,1,3.217178057,0,1,2.654884,13,55,4.166667,0
,0.050499688,0,1,3.330976,10,70,5.263158,0,2.150433838,1,1,4.745550,12,43,5.359078,1
,4.119472797,0,1,3.218857,14,41,5.357143,1,5.816827159,0,1,1.338042,17,29,5.386785,1
,1.235188234,0,1,5.678901,13,51,4.965363,0,1.902475299,1,1,8.224553,21,44,6.073310,1
,3.472140973,0,1,4.710552,6,39,5.299210,1,2.225435008,0,1,3.108445,3,67,4.224999,0
,3.769734310,1,1,3.474964,14,64,5.295317,0,10.907262619,0,1,8.869520,17,33,4.933737,1
,9.280621948,0,1,3.066142,8,50,4.632703,0,5.440011473,0,1,3.309324,10,55,4.892449,0
,0.720430845,0,1,1.952454,10,35,5.661270,1,9.275824202,0,1,1.432103,7,46,4.821142,1
,6.171754285,0,1,2.782620,5,30,4.907975,1,0.851761242,0,1,4.480100,9,37,4.276668,1
,3.991479129,0,1,2.788143,8,60,5.207717,0,4.027130766,0,1,2.712152,6,56,4.540842,1
,2.150557449,1,1,4.657102,12,31,5.333333,1,8.876155198,0,1,1.333591,7,55,4.273348,0
,2.709879382,1,1,7.772380,5,36,4.577911,1,3.509888648,1,1,4.911962,8,65,4.850712,0
,1.438177766,0,1,6.566161,3,40,5.164568,1,3.654998218,0,1,1.397364,6,50,4.718646,0
,0.491549121,1,1,7.104059,22,57,4.821142,0,0.643228080,1,0,3.836880,7,64,5.028178,0
,5.935350567,1,1,1.242361,8,26,4.960784,1,3.294179628,1,1,6.106902,3,66,4.398887,1
,3.222286648,1,1,1.672454,8,43,4.899540,1,3.880637595,1,1,1.478945,20,37,5.153882,1
,5.916463291,1,1,1.418330,9,53,5.182124,1,8.895888735,0,0,4.131253,23,36,4.930935,1
,7.569828171,0,0,4.078775,15,56,6.017664,0,0.190022413,0,1,3.969467,5,52,5.318160,0
,6.571915947,1,1,3.711871,20,55,5.167555,0,5.651828974,1,0,2.605541,8,68,4.466325,0
,2.792770935,1,0,3.232202,7,61,5.376453,0,2.112533086,1,1,4.284349,13,36,5.077524,1
,6.417937165,0,0,3.174980,7,36,5.159393,1,10.506763444,0,0,3.720029,7,58,5.153882,0
,6.441015197,1,0,1.387931,7,46,5.153882,0,0.416201286,1,0,7.575703,3,45,5.174546,1
,3.400599978,0,1,4.229721,13,53,4.516129,0,2.257456296,1,0,4.318870,13,52,4.839637,0
,1.980817692,1,0,3.172698,7,46,5.229125,1,0.409135638,0,1,1.124168,9,39,5.767761,1
,9.626953711,0,1,4.196797,5,62,4.231886,0,8.873726753,1,1,4.295763,5,29,5.907148,1
,6.526882130,0,0,1.103429,4,62,5.323971,0,2.033732309,1,1,4.651381,17,42,4.800717,1
,3.246602258,1,1,3.451903,15,40,4.891389,1,3.362305370,0,1,4.253282,7,48,5.911692,0
,5.530887640,0,1,1.786893,3,53,4.921255,0,6.631603905,0,1,7.327827,7,44,5.115846,1
,7.165996683,0,1,4.614974,12,42,6.059600,1,8.063983602,0,1,4.630703,13,25,5.363205,1
,5.704537372,0,1,4.382012,20,54,5.291503,0,4.686644559,0,1,2.653673,5,63,4.781461,0
,6.049204838,0,1,2.788475,16,61,4.830680,0,1.036582874,1,0,6.220311,10,46,4.194352,1
,11.037226142,0,1,1.951964,17,56,4.705882,0,5.994978555,0,1,2.766535,6,37,4.923234,1
,4.072662914,1,1,2.209646,6,33,5.096031,1,1.261901212,1,1,4.632160,17,40,6.295086,1
,6.329080725,0,1,4.307175,12,34,5.590170,1,0.657757057,0,0,2.297450,17,66,5.714959,0
,10.014428743,0,1,4.216938,5,62,4.231886,0,9.308329221,0,1,3.008391,14,44,5.085716,1
,2.229046193,1,1,7.814473,5,28,5.025885,1,11.016614279,0,1,2.086410,3,56,4.902511,0
,12.453829413,0,1,4.405956,22,55,4.577900,0,1.480791978,1,0,4.422947,10,51,4.374088,0
,6.073305339,1,1,3.013016,8,42,4.423004,1,10.721930187,0,1,2.417753,10,59,4.921255,0
,2.466598504,1,0,4.432532,7,41,6.171599,1,3.503739686,0,1,1.275846,8,40,5.068487,1
,11.649848491,0,1,4.518982,7,52,5.062724,1,0.582062522,1,0,4.834595,15,65,6.157191,0
,3.602467462,0,0,3.425201,17,54,5.196646,0,9.249038990,0,0,1.177843,5,22,4.881406,1
,3.963687505,0,1,3.850679,17,40,4.860499,1,4.304977773,1,1,6.796459,20,33,5.429166,0
,6.082609915,1,1,1.356877,9,53,5.182124,1,3.009463902,0,1,1.670912,6,50,4.718646,0
,4.757404046,1,0,2.645656,5,57,4.327874,0,3.951021254,0,1,3.233695,17,40,4.860499,1
,6.717183333,0,1,1.588997,7,30,5.055576,1,5.538491000,0,1,3.475693,8,56,4.231886,0
,0.329599467,1,0,6.217489,8,67,4.841229,0,3.427751670,1,1,3.416616,12,59,4.333398,0
,5.739149143,0,0,2.804315,20,59,5.656854,0,4.977774590,0,0,3.624645,3,62,4.631770,0
,5.445877095,0,1,3.540264,13,31,4.908459,1,10.177229403,0,1,3.953893,8,50,4.632703,0
,2.568706762,1,0,3.587995,20,64,4.650930,0,4.019721608,0,1,3.651225,25,52,6.521562,0
,8.533050785,0,1,1.084226,17,73,4.878049,0,1.025160758,0,1,3.647254,10,35,4.683626,1
,4.303917603,0,1,2.133479,3,31,4.594660,1,12.999218050,0,0,1.566818,5,70,5.043558,0
,9.677741577,0,0,1.778715,9,69,5.790636,0,3.838812548,0,1,4.145922,17,46,5.115846,0
,6.949571727,0,0,1.948355,20,34,5.397807,1,9.783264891,0,1,3.596454,8,41,6.014000,1
,4.379084150,0,1,2.724802,6,56,4.540842,1,3.014898616,1,1,3.189670,5,34,5.421687,1
,1.772994029,1,1,3.198882,7,55,4.967597,0,6.667516183,0,1,5.050620,13,33,4.953681,1
,7.165767668,0,0,2.239096,10,61,4.921529,0,2.833907426,1,0,2.945502,15,61,4.687360,0
,7.983392664,0,1,1.161906,8,45,5.488113,1,6.245410346,0,1,1.657422,8,49,4.977630,0
,6.186331715,0,1,1.869346,5,65,5.045987,0,3.357774321,1,0,1.963942,12,68,4.480820,0
,1.383746243,1,0,1.952852,7,55,4.624277,0,1.362630493,1,1,4.468038,10,27,5.015566,1
,6.927369494,0,1,1.243462,12,56,5.154913,0,2.503036259,1,1,1.514198,6,25,4.556451,1
,7.275876713,0,1,1.252355,7,43,4.562997,1,11.576856547,0,0,1.411803,6,65,5.038911,0
,6.818662056,0,1,3.567689,10,60,5.295317,0,3.423068071,0,1,1.089534,6,50,4.718646,0
,1.953605140,1,1,2.907695,18,50,4.535342,0,5.727678074,1,1,3.232356,7,38,5.138322,1
,12.132741075,0,1,3.095812,13,47,5.957490,1,6.053348652,0,1,1.128622,8,49,4.977630,0
,3.694768409,0,1,4.871262,16,54,4.572111,0,6.253365566,0,1,1.742508,17,39,5.500175,1
,3.705009777,0,1,3.267305,10,66,4.622501,0,2.938119453,1,1,5.360723,17,23,4.753973,1
,2.135541826,1,1,3.526161,8,55,5.370431,0,5.668259481,1,0,2.574615,10,29,5.474375,1
,7.080065233,0,1,1.692322,5,34,4.921255,1,8.544738114,0,0,2.767920,10,39,5.423261,1
,7.045502733,0,1,1.396488,5,39,5.696002,1,0.002559307,0,1,3.064055,6,37,4.976703,1
,6.617804649,0,0,3.760495,20,39,4.493895,1,9.681871105,0,1,4.324311,4,45,4.851086,1
,6.044649776,0,1,3.856579,7,63,4.374999,0,6.420862876,1,1,4.622526,10,49,4.766442,1
,1.874660804,1,1,1.159535,9,44,5.059026,1,1.235709549,1,0,5.039398,12,44,5.904718,1
,3.258418523,0,0,5.037710,8,28,4.893999,1,4.830709502,1,0,4.918688,15,54,5.326697,0
,2.922021965,1,0,1.920790,10,34,5.266344,1,3.906092804,0,0,4.699752,11,48,4.869480,0
,0.697623587,1,0,4.116854,5,59,5.642155,0,9.941569914,0,1,8.289183,10,52,4.821142,0
,10.350027004,0,1,3.366197,8,55,5.015292,1,5.527569011,1,1,4.051165,18,26,5.439283,1
,2.606175378,1,1,6.641173,13,26,5.555122,1,2.026639052,1,1,1.038067,10,54,5.294117,1
,7.008150992,0,0,4.232506,12,42,6.059600,1,2.646393091,1,1,6.246285,18,54,5.661270,1
,0.173419837,0,1,4.740231,15,49,4.967444,0,1.716937293,1,0,3.299435,3,25,5.451704,1
,2.830695509,1,0,2.794498,8,68,5.121871,0,3.569302815,1,1,3.840850,7,54,5.294117,0
,4.225227549,0,0,4.298415,10,41,5.021689,1,10.123232628,1,1,3.056420,21,67,4.610694,0
,4.584152274,1,1,3.654068,7,48,5.770498,0,6.633500108,0,0,6.271458,7,60,4.466325,0
,1.716644522,1,0,2.746618,9,43,5.484352,1,2.479548329,1,1,4.672449,10,33,4.850712,1
,1.574094294,0,1,3.839817,5,29,5.407597,1,4.376846888,0,0,1.434719,8,49,4.948717,1
,2.838176338,1,0,5.180557,10,49,5.497474,0,1.597733354,1,0,4.702143,20,69,4.319955,0
,7.761047735,0,1,3.263322,8,33,5.295317,1,2.303782009,1,1,4.107277,12,53,5.078968,0
,8.779316914,1,0,2.786833,4,49,5.474375,0,0.707945846,1,0,6.989046,15,30,5.187748,1
,2.771433325,1,1,5.418486,17,52,5.141796,0,0.576891829,0,1,3.060024,7,60,4.841229,0
,3.349106529,0,0,1.849925,8,60,5.094267,0,1.159469227,1,1,5.524873,8,43,5.063291,1
,1.322922594,1,0,2.465975,8,41,4.718646,1,0.517570282,1,0,3.848236,7,36,5.447472,1
,8.030447971,0,1,1.702648,12,65,4.590991,0,0.213668209,0,1,4.255354,13,61,6.373774,0
,3.283146825,1,0,2.082126,7,54,5.677647,0,1.271388416,1,1,3.141950,10,36,5.333006,1
,7.194447942,0,0,3.324746,1,48,4.952456,1,8.775137830,1,1,3.302513,7,56,4.242424,0
,11.477803069,0,0,4.508761,6,53,4.952207,0,4.495830518,0,1,4.465289,3,45,5.590170,1
,0.842441077,1,0,5.454550,8,50,5.625326,0,10.128304282,0,0,2.302096,11,43,5.224291,1
,5.414548830,1,1,5.351232,10,23,5.132883,1,1.672620775,1,0,1.674226,3,62,4.997703,0
,1.865278895,1,1,5.616745,6,66,5.111615,0,10.968824176,0,1,1.261960,13,57,4.154942,0
,4.239609137,0,1,1.289914,9,58,5.060192,0,6.409953923,1,1,3.524953,10,67,4.419417,0
,1.703956304,1,1,2.970582,5,53,5.521473,0,6.551169501,0,1,1.707130,4,47,5.552737,1
,10.016568240,1,1,2.170398,8,52,5.007613,0,4.105695818,0,0,1.727860,6,52,4.599488,0
,11.702959931,0,1,1.333864,15,41,5.090253,1,0.082156753,1,1,3.966131,23,62,4.635125,0
,2.228627842,1,0,3.881510,7,56,5.366974,1,0.994236710,1,1,6.727516,7,54,5.517594,1
,1.967989864,1,1,3.760652,3,67,4.224999,0,6.943112649,0,0,5.903703,15,39,5.161291,1
,5.752026284,1,1,3.380272,8,42,4.423004,1,3.066375134,0,1,1.500041,5,55,4.668973,0
,5.137912317,0,1,1.842717,9,33,5.219121,1,0.188009532,0,1,6.516876,8,30,5.055576,1
,8.457299556,0,1,4.308274,5,32,5.625000,1,5.110364498,0,1,6.847764,20,51,4.913402,1
,0.432127170,0,0,1.126517,10,41,4.631800,0,5.879574224,0,1,1.449354,5,43,4.864693,1
,4.086557800,0,1,3.752157,23,61,5.420771,0,6.479730458,0,1,1.541884,6,33,5.474375,1
,2.096060458,1,1,4.820653,13,42,6.016540,1,8.740526201,0,1,2.848737,4,54,4.960819,0
,0.987269106,1,1,1.953991,5,55,4.933303,0,3.784784833,0,1,1.146611,13,48,4.960784,0
,7.895338056,0,0,3.431830,10,39,4.519892,1,0.153081605,0,1,5.174208,10,21,5.420764,1
,3.612458119,1,1,3.034005,6,31,5.128117,1,1.204751877,1,0,4.917909,25,53,4.631800,0
,4.797438903,0,1,3.149489,3,43,5.050762,1,3.653500320,1,0,3.462682,3,43,5.735394,1
,4.572908745,0,1,1.790413,5,33,5.115846,1,8.925976739,0,1,1.753543,8,43,5.010377,1
,9.424089447,0,1,3.067573,7,48,4.808812,0,2.721229521,1,0,3.269509,22,68,5.164568,0
,0.617500623,1,0,2.210704,13,47,4.798963,1,0.733668341,1,1,3.274835,10,30,4.998959,1
,1.777653448,1,0,1.163099,13,55,4.766442,0,8.045365441,0,0,4.279056,10,63,5.621055,0
,0.895810048,1,1,6.392544,23,70,4.983549,0,5.886969043,0,0,3.685820,7,40,4.960784,1
,7.924019439,0,0,5.793592,5,38,5.697535,1,11.543164037,0,1,4.212662,5,45,5.329681,0
,2.092920364,1,0,4.339656,13,52,4.839637,0,5.835997393,0,0,5.470233,5,58,4.302066,1
,7.597981449,0,1,3.128263,12,34,4.966996,1,7.735710616,1,0,2.544029,4,49,5.474375,0
,7.710111583,0,1,2.007118,7,48,4.366659,1,3.082220332,1,1,5.178506,11,69,5.112992,0
,6.819277120,0,0,5.871234,15,44,5.025885,1,6.581073376,1,0,3.642454,13,59,4.923659,0
,1.263674974,1,0,6.885249,5,42,4.535342,1,9.058271228,1,1,1.749957,13,34,4.465782,1
,1.561582213,1,0,3.557060,4,48,4.991342,0,5.342270791,0,1,7.101077,15,51,5.045987,0
,7.713528057,0,1,3.927043,7,55,5.062724,0,5.871852210,1,1,3.520430,10,44,4.800717,1
,9.344645272,0,1,1.791652,13,56,5.993707,0,4.890129012,0,1,2.478405,8,38,5.229125,1
,1.194444507,1,0,3.177897,8,54,5.120809,0,10.277324399,0,0,1.573010,13,59,5.809277,0
,2.300039799,1,1,4.265143,8,45,4.724556,1,6.997014031,0,1,2.162587,7,53,5.764246,0
,3.270359373,0,1,6.658219,13,53,4.633481,0,0.593110196,1,0,4.767141,15,65,6.157191,0
,4.606589056,1,0,2.608047,5,57,4.327874,0,3.920409560,0,0,4.776128,23,66,5.153882,0
,4.651463728,0,0,1.835911,5,70,4.921255,0,3.133126574,1,0,5.083054,12,28,4.741448,1
,0.169656895,1,1,4.315575,8,53,5.201327,1,6.501047844,0,1,3.243885,5,43,4.984073,1
,8.068813673,0,1,3.223044,7,58,4.736275,0,7.840334027,0,0,3.619049,17,41,4.376881,1
,3.098918700,0,1,6.186871,10,59,4.953681,0,1.928365503,1,0,3.344411,8,66,4.916011,1
,0.764759076,1,0,7.455564,11,44,4.311743,1,1.757825223,1,0,4.464654,20,69,4.319955,1
,7.459053005,1,0,3.732241,21,49,5.070667,0,5.689613634,0,0,3.397186,9,52,4.923659,0)
testhare <- matrix(testhare,ncol=8,byrow=TRUE) |
library(hexSticker)
library(ggplot2)
library(gridExtra)
library(grid)
library(gtable)
library(sysfonts)
sysfonts::font_add_google(name = "Roboto", family = "Roboto")
col1 <- c(rep(" ", 4))
df <- data.frame(col1, col1, col1, col1)
print(df)
colnames(df) <- NULL
print(df)
myt <- ttheme_default(
core = list(bg_params=list(fill = NA, col="gray99" ) ),
colhead = list(bg_params=list(fill = NA, col="gray99"))
)
g <- tableGrob(df, rows = NULL, theme = myt)
g <- gtable_add_grob(g, grobs = rectGrob(gp = gpar(fill = NA, col="gray99",lwd = 9)),
t = 1, b = nrow(g), l = 1, r = ncol(g))
g <- gtable_add_grob(g, grobs = rectGrob(gp = gpar(fill = NA, col="gray99", lwd = 8)),
t = 1, l = 1, r = ncol(g))
grid.draw(g)
png(filename = "./man/figures/table.png", width=400, height=260, bg = "transparent", res=200)
grid.draw(g)
dev.off()
img_address <- "./man/figures/table.png"
sticker( img_address,
package="gtfs2gps", p_color="gray99", p_size=22, p_family= "Roboto",
s_x=1, s_y=.8, s_width=.6, s_height=.6,
h_fill="
spotlight=TRUE, l_x=20, l_y=4, l_width=2, l_height=2,
filename="./man/figures/gtfs2gps_logo.png", dpi=300) |
orderly_graph_src <- function(name, config, direction = "downstream",
max_depth = Inf, recursion_limit = 100,
show_all = FALSE) {
nms <- orderly_list(config)
if (!(name %in% nms)) {
stop(sprintf("Unknown source report '%s'", name), call. = FALSE)
}
src <- lapply(nms, orderly_recipe$new, config, develop = TRUE)
names(src) <- nms
deps <- lapply(src, function(x) unique(x$depends[c("name", "id")]))
if (direction == "downstream") {
len <- viapply(deps, NROW, USE.NAMES = FALSE)
parent <- unlist(lapply(deps, "[[", "name"), FALSE, FALSE)
deps <- split(data_frame(
name = rep(names(deps), len),
id = unlist(lapply(deps, "[[", "id"), FALSE, FALSE)), parent)
}
root <- report_vertex$new(NULL, name, "latest", FALSE)
seen <- new.env()
build_tree_src(root, deps, max_depth, recursion_limit, NULL)
report_tree$new(root, direction)
}
build_tree_src <- function(parent, deps, depth, limit, seen = NULL) {
if (limit < 0) {
stop("The tree is very large or degenerate.")
}
name <- parent$name
if (any(seen == name)) {
loop <- c(seen[which(seen == name):length(seen)], name)
stop(paste("Detected circular dependency:",
paste(squote(loop), collapse = " -> ")),
call. = FALSE)
}
if (is.null(deps[[name]]) || depth == 0) {
return(NULL)
}
d <- deps[[name]]
for (i in seq_len(NROW(d))) {
child <- report_vertex$new(parent, d$name[[i]], d$id[[i]], FALSE)
parent$add_child(child)
build_tree_src(child, deps, depth - 1, limit - 1, c(seen, name))
}
child
} |
context("Making a new dataset")
test_that("write.csv.gz gzips a csv", {
df <- data.frame(a = 1:1000)
f <- tempfile()
f2 <- tempfile()
write.csv.gz(df, file = f)
write.csv(df, file = f2, row.names = FALSE)
expect_true(file.exists(f))
expect_true(file.exists(f2))
expect_true(file.size(f) < file.size(f2))
expect_equal(read.csv(f), df)
expect_equal(read.csv(f2), df)
})
test_that("newDataset input validation", {
expect_error(
newDataset(NULL),
"Can only make a Crunch dataset from a two-dimensional data"
)
expect_error(
newDataset(1:5),
"Can only make a Crunch dataset from a two-dimensional data"
)
})
with_mock_crunch({
test_that("Basic exercise of turning data.frame to Crunch payload", {
expect_POST(
newDataset(data.frame(a = 1), name = "Testing"),
"https://app.crunch.io/api/datasets/",
'{"element":"shoji:entity","body":{"name":"Testing",',
'"table":{"element":"crunch:table",',
'"metadata":{"a":{"type":"numeric","name":"a","alias":"a"}},',
'"order":["a"]}}}'
)
})
test_that("Turning data.frame to Crunch payload without a name", {
expect_POST(
newDataset(data.frame(a = 1)),
"https://app.crunch.io/api/datasets/",
'{"element":"shoji:entity","body":{"name":"data.frame(a = 1)",',
'"table":{"element":"crunch:table",',
'"metadata":{"a":{"type":"numeric","name":"a","alias":"a"}},',
'"order":["a"]}}}'
)
})
test_that("Turning data.frame to Crunch payload with a long name", {
expect_POST(
newDataset(data.frame(a = 1, really_really_long_name = 2)),
"https://app.crunch.io/api/datasets/",
'{"element":"shoji:entity","body":{"name":"data.frame(a = 1, ',
'really_really_long_nam","table":{"element":"crunch:table",',
'"metadata":{"a":{"type":"numeric","name":"a","alias":"a"},',
'"really_really_long_name":{"type":"numeric",',
'"name":"really_really_long_name","alias":"really_really_long_name"}},',
'"order":["a","really_really_long_name"]}}}'
)
})
test_that("uploadData writes out a gzipped file", {
ds <- cachedLoadDataset("test ds")
with_DELETE(NULL, {
expect_POST(
uploadData(ds, data.frame(a = 1)),
"https://app.crunch.io/api/sources/",
"list(uploaded_file"
)
})
})
test_that("createDataset with named args", {
expect_POST(
createDataset(name = "Foo", description = "Bar."),
"https://app.crunch.io/api/datasets/",
'{"element":"shoji:entity","body":{"name":"Foo",',
'"description":"Bar."}}'
)
})
test_that("createDataset returns a dataset", {
with_POST(
"https://app.crunch.io/api/datasets/1/",
expect_true(is.dataset(createDataset(name = "Foo")))
)
})
test_that("newDataset calls newDatasetFromFile if given a string", {
expect_POST(
newDataset("helper.R"),
"https://app.crunch.io/api/datasets/",
'{"element":"shoji:entity","body":{"name":"helper.R"}}'
)
})
test_that("newDataset with a schema posts to sources", {
expect_POST(
newDataset(x = "helper.R", schema = "helper.R"),
"https://app.crunch.io/api/sources/",
"list\\(uploaded_file = list\\(path = .*helper.R",
fixed = FALSE
)
})
test_that("newDataset with schema and data posts, adds to batches and appends", {
with_POST("https://app.crunch.io/api/datasets/1/", {
ds <- newDataset(x = "teardown.R", schema = "setup.R")
})
})
test_that("newDataset(FromFile) cleans up the dataset entity if the file is invalid", {
with_POST("https://app.crunch.io/api/datasets/1/", {
expect_DELETE(
newDataset("NOTAFILE.exe"),
"https://app.crunch.io/api/datasets/1/"
)
with_DELETE(NULL, {
expect_error(
newDataset("NOTAFILE.exe"),
"File not found"
)
})
})
})
test_that("newDataset(FromFile) can take an s3 URL", {
with_DELETE(NULL, {
expect_POST(
newDataset("s3://httpbin.org/get"),
"https://app.crunch.io/api/datasets/1/batches/",
'{"element":"shoji:entity",',
'"body":{"url":"s3://httpbin.org/get"}}'
)
})
})
test_that("newDataset(FromFile) can take an http(s) URL", {
with_DELETE(NULL, {
expect_POST(
newDataset("https://httpbin.org/get"),
"https://app.crunch.io/api/sources/",
'{"element":"shoji:entity",',
'"body":{"location":"https://httpbin.org/get"}}'
)
})
})
test_that("newDatasetByColumn", {
expect_POST(
newDatasetByColumn(data.frame(a = 1), name = "Bam!"),
"https://app.crunch.io/api/datasets/",
'{"element":"shoji:entity","body":{"name":"Bam!"}}'
)
})
test_that("createSource validation", {
expect_error(
createSource("File not found"),
"File not found"
)
expect_error(
createSource(name = "x"),
"Must provide a file or url to createSource"
)
})
test_that("newExampleDataset", {
expect_POST(
newExampleDataset(),
"https://app.crunch.io/api/datasets/",
'{"element":"shoji:entity","body":{"name":"Example dataset",'
)
})
})
with_test_authentication({
whereas("The two methods for sending data", {
testfile.csv <- "fake.csv"
testfile.df <- read.csv(testfile.csv)
test_that("fake.csv is what we expect", {
expect_identical(dim(testfile.df), c(20L, 6L))
})
test_that("newDataset creates a dataset if given a file", {
ds <- newDataset(testfile.csv)
expect_true(is.dataset(ds))
expect_identical(nrow(ds), 20L)
expect_identical(ncol(ds), 6L)
expect_equivalent(mean(ds[[2]]), mean(testfile.df[[2]]))
})
test_that("Dataset-by-column variable types get set correctly", {
ds <- newDatasetByColumn(df)
expect_valid_df_import(ds)
expect_equivalent(mean(ds$v3), mean(df$v3))
expect_true(setequal(names(df), names(ds)))
expect_identical(names(df), names(ds))
})
})
m <- fromJSON(system.file("example-datasets", "pets.json", package = "crunch"),
simplifyVector = FALSE
)
whereas("Creating with metadata and csv", {
test_that("createWithMetadataAndFile using docs example", {
ds <- newDatasetFromFixture("apidocs")
expect_valid_apidocs_import(ds)
})
test_that("data.frame with spaces in column names", {
input <- data.frame(a = factor("A"), b = 4)
names(input) <- c("var one", "var two")
expect_identical(names(input), c("var one", "var two"))
ds <- newDataset(input)
expect_identical(names(ds), c("var one", "var two"))
})
test_that("Can create dataset with data in S3", {
ds <- createWithMetadataAndFile(m,
file = "s3://testing-crunch-io/example-dataset.csv"
)
expect_valid_apidocs_import(ds)
ds2 <- newDatasetFromFixture("apidocs")
expect_identical(dim(ds), dim(ds2))
expect_identical(as.vector(ds$q1), as.vector(ds2$q1))
})
})
test_that("Duplicate subvariables are forbidden", {
m2 <- m
dupe <- list(name = "Another", alias = "allpets_1")
m2$body$table$metadata$allpets$subvariables[[4]] <- dupe
expect_error(createWithMetadataAndFile(
m2,
system.file("example-datasets", "pets.csv", package = "crunch")
))
})
test_that("newDataset without specifying name grabs object name", {
dsz <- newDataset(df)
expect_true(is.dataset(dsz))
expect_identical(name(dsz), "df")
expect_valid_df_import(dsz)
})
test_that("data.frame with missing data in datetime & date columns", {
input <- data.frame(
date = c(as.Date("2020-01-01"), NA),
datetime = as.POSIXlt(c(NA, "2020-01-01 01:05:00"), tz = "UTC")
)
ds <- newDataset(input, "missing vals datetimes")
expect_is(ds$date, "DatetimeVariable")
expect_equal(as.vector(ds$date), input$date)
expect_is(ds$datetime, "DatetimeVariable")
expect_equal(as.vector(ds$datetime), input$datetime)
})
}) |
expected <- eval(parse(text="structure(c(1L, 1L, 2L, 5L, 6L, 6L), .Label = c(\"1:1\", \"1:2\", \"1:3\", \"2:1\", \"2:2\", \"2:3\"), class = \"factor\")"));
test(id=0, code={
argv <- eval(parse(text="list(structure(c(1L, 1L, 1L, 2L, 2L, 2L), .Label = c(\"1\", \"2\"), class = \"factor\"), structure(c(1L, 1L, 2L, 2L, 3L, 3L), .Label = c(\"1\", \"2\", \"3\"), class = \"factor\"))"));
do.call(`:`, argv);
}, o=expected); |
context("Export summary statistics table to a flextable")
library(flextable)
library(officer)
test_that("A summary table with a row variable is correctly exported to flextable", {
summaryTable <- data.frame(
PARAM = factor(c("A", "B"), levels = c("B", "A")),
n = c(9, 10)
)
expect_silent(
ft <- exportSummaryStatisticsTable(
summaryTable = summaryTable,
rowVar = "PARAM"
)
)
expect_s3_class(ft, "flextable")
expect_identical(
ft$body$dataset[, 1],
c("B", "A")
)
})
test_that("Multiple row variables are nested correctly in a flextable summary table", {
summaryTable <- data.frame(
PARAM = rep(c("Actual Value", "Change from baseline"), each = 3),
COHORT = rep(c("I", "I", "II"), times = 2),
TRT = factor(rep(c("A", "B", "A"), times = 2), levels = c("B", "A")),
n = seq_len(6)
)
ft <- exportSummaryStatisticsTable(
summaryTable = summaryTable,
rowVar = c("PARAM", "COHORT", "TRT")
)
dataRef <- data.frame(
c(
"Actual Value", "I", "B", "A", "II", "A",
"Change from baseline", "I", "B", "A", "II", "A"
),
c(
NA_character_, NA_character_, "2", "1", NA_character_, "3",
NA_character_, NA_character_, "5", "4", NA_character_, "6"
),
stringsAsFactors = FALSE
)
expect_equal(
unname(ft$body$dataset),
unname(dataRef),
check.attributes = FALSE
)
})
test_that("Padding is correctly set for nested row variables in a flextable summary table", {
summaryTable <- data.frame(
PARAM = rep(c("Actual Value", "Change from baseline"), each = 3),
COHORT = rep(c("I", "I", "II"), times = 2),
TRT = factor(rep(c("A", "B", "A"), times = 2), levels = c("B", "A")),
n = seq_len(6)
)
rowPadBase <- 50
ft <- exportSummaryStatisticsTable(
summaryTable = summaryTable,
rowVar = c("PARAM", "COHORT", "TRT"),
rowPadBase = rowPadBase
)
expect_equal(
ft$body$styles$pars$padding.left$data[-c(1, 7), 1],
rowPadBase * rep(c(1, 2, 2, 1, 2), 2)
)
})
test_that("Horizontal lines are correctly set for nested row variables in a flextable summary table", {
summaryTable <- data.frame(
PARAM = rep(c("Actual Value", "Change from baseline"), each = 3),
COHORT = rep(c("I", "I", "II"), times = 2),
TRT = factor(rep(c("A", "B", "A"), times = 2), levels = c("B", "A")),
n = seq_len(6)
)
colorTable <- getColorPaletteTable()
colorTable["line"] <- "red"
ft <- exportSummaryStatisticsTable(
summaryTable = summaryTable,
rowVar = c("PARAM", "COHORT", "TRT"),
colorTable = colorTable
)
ftDataBd <- ft$body$styles$cells$border.color.top$data
expect_setequal(
object = ftDataBd[7, ],
expected = "red"
)
expect_false(unique(c(ftDataBd[-7, ])) == "red")
})
test_that("The specified labels of the row variables are correctly set in a flextable summary table", {
summaryTable <- data.frame(
PARAM = factor(c("A", "B"), levels = c("B", "A")),
n = c(9, 10)
)
ft <- exportSummaryStatisticsTable(
summaryTable = summaryTable,
rowVar = "PARAM",
rowVarLab = c(PARAM = "Parameter")
)
expect_identical(
object = ft$header$dataset[, 1],
expected = "Parameter"
)
})
test_that("The labels of the row variables, extracted from the labels of all variables, are correctly set in a flextable summary table", {
summaryTable <- data.frame(
PARAM = factor(c("A", "B"), levels = c("B", "A")),
n = c(9, 10)
)
expect_identical(
object = {
ft <- exportSummaryStatisticsTable(
summaryTable = summaryTable,
rowVar = "PARAM",
labelVars = c(PARAM = "Parameter")
)
ft$header$dataset[, 1]
},
expected = "Parameter"
)
})
test_that("Row variables in a separated column are merged correctly in a flextable summary table", {
summaryTable <- data.frame(
PARAM = rep(c("Actual Value", "Change from baseline"), each = 3),
COHORT = rep(c("I", "I", "II"), times = 2),
TRT = factor(rep(c("A", "B", "A"), times = 2), levels = c("B", "A")),
n = seq_len(6)
)
ft <- exportSummaryStatisticsTable(
summaryTable = summaryTable,
rowVar = c("PARAM", "COHORT", "TRT"),
rowVarInSepCol = c("COHORT", "TRT")
)
expect_identical(
object = as.character(as.vector(ft$header$dataset[1, 1:3])),
expected = c("PARAM", "COHORT", "TRT")
)
dataRef <- data.frame(
rep(c("Actual Value", "Change from baseline"), each = 3),
rep(c("I", "I", "II"), times = 2),
rep(c("B", "A", "A"), times = 2),
c("2", "1", "3", "5", "4", "6"),
stringsAsFactors = FALSE
)
expect_equal(
object = unname(ft$body$dataset),
expected = unname(dataRef),
check.attributes = FALSE
)
expect_identical(
ft$body$spans$columns[, 1:2],
cbind(
c(3, 0, 0, 3, 0, 0),
c(2, 0, 1, 2, 0, 1)
)
)
expect_setequal(
c(ft$body$spans$columns[, -(1:2)]),
1
)
})
test_that("Nested and merged row variables are displayed correctly in a flextable summary table", {
summaryTable <- data.frame(
AESOC = c("A", "A", "A"),
AEDECOD = factor(c("a", "b", "b"), levels = c("b", "a")),
WORSTINT = factor(
c("Moderate", "Severe", "Moderate"),
levels = c("Moderate", "Severe")
),
n = c(2, 4, 7)
)
rowPadBase <- 50
ft <- exportSummaryStatisticsTable(
summaryTable = summaryTable,
rowVar = c("AESOC", "AEDECOD", "WORSTINT"),
rowVarInSepCol = "WORSTINT",
rowPadBase = rowPadBase
)
dataRef <- data.frame(
c("A", "b", "b", "a"),
c(NA_character_, "Moderate", "Severe", "Moderate"),
c(NA_character_, "7", "4", "2"),
stringsAsFactors = FALSE
)
expect_equal(
object = unname(ft$body$dataset),
expected = unname(dataRef),
check.attributes = FALSE
)
expect_setequal(
ft$body$styles$pars$padding.left$data[-1, 1],
rowPadBase
)
})
test_that("Horizontal lines are correctly set for multiple row variables in a separated column in a flextable summary table", {
summaryTable <- data.frame(
PARAM = rep(c("Actual Value", "Change from baseline"), each = 3),
COHORT = rep(c("I", "I", "II"), times = 2),
TRT = factor(rep(c("A", "B", "A"), times = 2), levels = c("B", "A")),
n = seq_len(6)
)
expect_warning(
ft <- exportSummaryStatisticsTable(
summaryTable = summaryTable,
rowVar = c("PARAM", "COHORT", "TRT"),
rowVarInSepCol = c("COHORT", "TRT"),
colorTable = c(line = "red")
)
)
ftDataBd <- ft$body$styles$cells$border.color.top$data
expect_setequal(
object = ftDataBd[c(3, 4, 6), 2:4],
expected = "red"
)
expect_false(unique(c(ftDataBd[-c(3, 4, 6), 2:4])) == "red")
})
test_that("Custom formatting is correctly set to a row variable in a flextable summary table", {
summaryTable <- data.frame(
TRT = c("A", "B"),
n = 1:2
)
ft <- exportSummaryStatisticsTable(
summaryTable = summaryTable,
rowVar = "TRT",
rowVarFormat = list(TRT = "bold")
)
expect_identical(
object = unname(ft$body$styles$text$bold$data),
expected = cbind(
rep(TRUE, nrow(summaryTable)),
rep(FALSE, nrow(summaryTable))
)
)
})
test_that("Custom formatting is correctly set to a nested row variable in a flextable summary table", {
summaryTable <- data.frame(
TRT = c("A", "A", "B", "B"),
PARAM = c("a", "b", "a", "b"),
n = 1:4
)
ft <- exportSummaryStatisticsTable(
summaryTable = summaryTable,
rowVar = c("TRT", "PARAM"),
rowVarFormat = list(PARAM = "bold")
)
expect_identical(
object = unname(ft$body$styles$text$bold$data),
expected = cbind(
c(FALSE, TRUE, TRUE, FALSE, TRUE, TRUE),
rep(FALSE, 6)
)
)
})
test_that("Custom formatting is correctly set to a row variable in a different column in a flextable summary table", {
summaryTable <- data.frame(
TRT = c("A", "A", "B", "B"),
PARAM = c("a", "b", "a", "b"),
n = 1:4
)
ft <- exportSummaryStatisticsTable(
summaryTable = summaryTable,
rowVar = c("TRT", "PARAM"),
rowVarInSepCol = "PARAM",
rowVarFormat = list(PARAM = "bold")
)
expect_identical(
object = unname(ft$body$styles$text$bold$data),
expected = cbind(
rep(FALSE, nrow(summaryTable)),
rep(TRUE, nrow(summaryTable)),
rep(FALSE, nrow(summaryTable))
)
)
})
test_that("Row totals are correctly included in the header row in a flextable summary table", {
summaryTable <- data.frame(
TRT = c("A", "A", "B", "B"),
PARAM = factor(c("a", "Total", "b", "Total"), levels = c("Total", "a", "b")),
n = c("2", "1", "4", "3")
)
ft <- exportSummaryStatisticsTable(
summaryTable = summaryTable,
rowVar = c("TRT", "PARAM"),
rowVarTotalInclude = "PARAM"
)
expect_equal(
object = ft$body$dataset,
expected = data.frame(
c("A", "a", "B", "b"),
c("1", "2", "3", "4"),
stringsAsFactors = FALSE
),
check.attributes = FALSE
)
})
test_that("Row totals are correctly included in separated rows in a flextable summary table", {
summaryTable <- data.frame(
TRT = c("A", "A", "B", "B"),
PARAM = factor(c("a", "Total", "b", "Total"), levels = c("Total", "a", "b")),
n = c("2", "1", "4", "3")
)
ft <- exportSummaryStatisticsTable(
summaryTable = summaryTable,
rowVar = c("TRT", "PARAM"),
rowVarTotalInclude = "PARAM",
rowVarTotalInSepRow = "PARAM"
)
expect_equal(
object = ft$body$dataset,
expected = data.frame(
c("A", "Total", "a", "B", "Total", "b"),
c(NA_character_, "1", "2", NA_character_, "3", "4"),
stringsAsFactors = FALSE
),
check.attributes = FALSE
)
})
test_that("The variable group and name are correctly merged in a flextable summary table in case of unique group", {
summaryTable <- data.frame(
variable = c("A", "B", "B"),
variableGroup = c("a", "b1", "b2"),
n = c("1", "2", "3")
)
ft <- exportSummaryStatisticsTable(
summaryTable = summaryTable,
rowVar = c("variable", "variableGroup"),
rowAutoMerge = TRUE
)
expect_equal(
object = ft$body$dataset[, 1],
expected = c("A a", "B", "b1", "b2"),
check.attributes = FALSE
)
})
test_that("The variable group and name are correctly included in different rows when requested in a flextable summary table in case of unique group", {
summaryTable <- data.frame(
variable = c("A", "B", "B"),
variableGroup = c("a", "b1", "b2"),
n = c("1", "2", "3")
)
ft <- exportSummaryStatisticsTable(
summaryTable = summaryTable,
rowVar = c("variable", "variableGroup"),
rowAutoMerge = FALSE
)
expect_equal(
object = ft$body$dataset[, 1],
expected = c("A", "a", "B", "b1", "b2"),
check.attributes = FALSE
)
})
test_that("The variable name and the statistic are correctly merged in a flextable summary table in case of unique statistic", {
summaryTable <- data.frame(
variable = c("A", "B"),
n = c("1", "2"),
Mean = c(NA_character_, "0.56"),
stringsAsFactors = FALSE
)
ft <- exportSummaryStatisticsTable(
summaryTable = summaryTable,
rowVar = "variable",
statsVar = c("n", "Mean"),
rowAutoMerge = TRUE
)
expect_equal(
object = ft$body$dataset[, 1],
expected = c("A n", "B", "n", "Mean"),
check.attributes = FALSE
)
})
test_that("The variable name and the statistic are correctly included in different rows when requested in a flextable summary table in case of unique statistic", {
summaryTable <- data.frame(
variable = c("A", "B"),
n = c("1", "2"),
Mean = c(NA_character_, "0.56"),
stringsAsFactors = FALSE
)
ft <- exportSummaryStatisticsTable(
summaryTable = summaryTable,
rowVar = "variable",
statsVar = c("n", "Mean"),
rowAutoMerge = FALSE
)
expect_equal(
object = ft$body$dataset[, 1],
expected = c("A", "n", "B", "n", "Mean"),
check.attributes = FALSE
)
})
test_that("A summary table with a continuous and a categorical variables is correctly exported to flextable", {
summaryTable <- data.frame(
variable = factor(
c("SEX", "SEX", "AGE"),
levels = c("SEX", "AGE")
),
variableGroup = factor(
c("Female", "Male", NA_character_),
levels = c("Male", "Female")
),
n = c(3, 4, NA_real_),
mean = c(NA_real_, NA_real_, 3.33)
)
ft <- exportSummaryStatisticsTable(
summaryTable = summaryTable,
rowVar = c("variable", "variableGroup"),
statsVar = c("n", "mean")
)
dataRef <- data.frame(
c("SEX", "Male n", "Female n", "AGE mean"),
c(NA_character_, "4", "3", "3.33"),
stringsAsFactors = FALSE
)
expect_equal(
object = unname(ft$body$dataset),
expected = unname(dataRef),
check.attributes = FALSE
)
})
test_that("A summary table with a continuous and a categorical variables is correctly exported without row merging to flextable", {
summaryTable <- data.frame(
variable = factor(
c("SEX", "SEX", "AGE"),
levels = c("SEX", "AGE")
),
variableGroup = factor(
c("Female", "Male", NA_character_),
levels = c("Male", "Female")
),
n = c(3, 4, NA_real_),
mean = c(NA_real_, NA_real_, 3.33)
)
rowPadBase <- 50
ft <- exportSummaryStatisticsTable(
summaryTable = summaryTable,
rowVar = c("variable", "variableGroup"),
statsVar = c("n", "mean"),
rowPadBase = rowPadBase,
rowAutoMerge = FALSE
)
dataRef <- data.frame(
c("SEX", "Male", "n", "Female", "n",
"AGE", "mean"),
c(NA_character_, NA_character_, "4", NA_character_, "3",
NA_character_, "3.33"),
stringsAsFactors = FALSE
)
expect_equal(
object = unname(ft$body$dataset),
expected = unname(dataRef),
check.attributes = FALSE
)
expect_equal(
ft$body$styles$pars$padding.left$data[-c(1, 6), 1],
rowPadBase * c(1, 2, 1, 2, 1)
)
})
test_that("A summary table with a continuous and a categorical variable, with and without named statistic, is correctly exported to flextable", {
summaryTable <- data.frame(
variable = factor(
c("SEX", "SEX", "AGE"),
levels = c("SEX", "AGE")
),
variableGroup = factor(
c("Female", "Male", NA_character_),
levels = c("Male", "Female")
),
Statistic = c(3, 4, NA_real_),
Mean = c(NA_real_, NA_real_, 3.33)
)
rowPadBase <- 50
ft <- exportSummaryStatisticsTable(
summaryTable = summaryTable,
rowVar = c("variable", "variableGroup"),
rowPadBase = rowPadBase
)
dataRef <- data.frame(
c("SEX", "Male", "Female", "AGE Mean"),
c(NA_character_, "4", "3", "3.33"),
stringsAsFactors = FALSE
)
expect_equal(
object = unname(ft$body$dataset),
expected = unname(dataRef),
check.attributes = FALSE
)
expect_setequal(
ft$body$styles$pars$padding.left$data[-c(1, 4), 1],
rowPadBase
)
})
test_that("A summary table with a column variable is correctly exported to flextable", {
summaryTable <- data.frame(
TRT = factor(c("A", "B"), levels = c("B", "A")),
n = c(9, 10)
)
expect_silent(
ft <- exportSummaryStatisticsTable(
summaryTable = summaryTable,
colVar = "TRT"
)
)
expect_s3_class(ft, "flextable")
expect_identical(
unname(unlist(ft$header$dataset[1, ])),
c("B", "A")
)
})
test_that("An error is generated if a flextable summary table contains multiple records for the same variable", {
summaryTable <- data.frame(
TRT = c("A", "A", "B"),
n = c(9, 8, 10)
)
expect_error(
exportSummaryStatisticsTable(
summaryTable = summaryTable,
colVar = "TRT"
),
"Table formatting to multiple columns failed because of duplicated records for each row/col"
)
})
test_that("A summary table without totals in the header is correctly exported to flextable", {
summaryTable <- data.frame(
TRT = factor(c("A", "A", "B", "B"), levels = c("B", "A")),
statN = c(1, 4, 2, 5),
isTotal = c(FALSE, TRUE, FALSE, TRUE)
)
ft <- exportSummaryStatisticsTable(
summaryTable = summaryTable,
colVar = "TRT",
colHeaderTotalInclude = TRUE
)
expect_identical(
object = unname(unlist(ft$header$dataset)),,
expected = c("B\n(N=5)", "A\n(N=4)")
)
})
test_that("A summary table without totals in the header is correctly exported to flextable", {
summaryTable <- data.frame(
TRT = factor(c("A", "A", "B", "B"), levels = c("B", "A")),
statN = c(1, 4, 2, 5),
isTotal = c(FALSE, TRUE, FALSE, TRUE)
)
ft <- exportSummaryStatisticsTable(
summaryTable = summaryTable,
colVar = "TRT",
colHeaderTotalInclude = FALSE
)
expect_identical(
object = unname(unlist(ft$header$dataset)),
expected = c("B", "A")
)
})
test_that("A summary table with statistics in rows is correctly exported to flextable", {
summaryTable <- data.frame(
variable = c("A", "B"),
n = c("1", "2"),
Mean = c("0.34", "0.56"),
stringsAsFactors = FALSE
)
ft <- exportSummaryStatisticsTable(
summaryTable = summaryTable,
rowVar = "variable",
statsVar = c("n", "Mean"),
statsLayout = "row"
)
expect_equal(
object = ft$body$dataset,
expected = data.frame(
c("A", "n", "Mean", "B", "n", 'Mean'),
c(NA_character_, "1", "0.34", NA_character_, "2", "0.56"),
stringsAsFactors = FALSE
),
check.attributes = FALSE
)
})
test_that("A summary table with statistics in columns is correctly exported to flextable", {
summaryTable <- data.frame(
variable = c("A", "B"),
n = c("1", "2"),
Mean = c("0.34", "0.56"),
stringsAsFactors = FALSE
)
ft <- exportSummaryStatisticsTable(
summaryTable = summaryTable,
rowVar = "variable",
statsVar = c("n", "Mean"),
statsLayout = "col"
)
expect_identical(
object = unname(unlist(ft$header$dataset[1, ])),
expected = c("variable", "n", "Mean")
)
expect_equal(
object = ft$body$dataset,
expected = data.frame(
c("A", "B"),
c("1", "2"),
c("0.34", "0.56"),
stringsAsFactors = FALSE
),
check.attributes = FALSE
)
})
test_that("A summary table with statistics in a separated column is correctly exported to flextable", {
summaryTable <- data.frame(
variable = c("A", "B"),
n = c("1", "2"),
Mean = c("0.34", "0.56"),
stringsAsFactors = FALSE
)
ft <- exportSummaryStatisticsTable(
summaryTable = summaryTable,
rowVar = "variable",
statsVar = c("n", "Mean"),
statsLayout = "rowInSepCol"
)
expect_identical(
object = unname(unlist(ft$header$dataset[1:2])),
expected = c("variable", "Statistic")
)
expect_equal(
object = as.data.frame(sapply(ft$body$dataset, as.character)),
expected = data.frame(
c("A", "A", "B", "B"),
c("n", "Mean", "n", "Mean"),
c("1", "0.34", "2", "0.56")
),
check.attributes = FALSE
)
})
test_that("The label for the statistic value is correctly set in a flextable summary table", {
summaryTable <- data.frame(
variable = c("A", "B"),
Statistic = c("1", "2"),
stringsAsFactors = FALSE
)
ft <- exportSummaryStatisticsTable(
summaryTable, rowVar = "variable",
statsVar = "Statistic",
statsValueLab = "Number of subjects"
)
expect_match(
object = unname(unlist(ft$header$dataset[, 2])),
regexp = "Number of subjects.*"
)
})
test_that("An error is generated if the label for the statistic value is set to the default name in a flextable summary table", {
summaryTable <- data.frame(
variable = c("A", "B"),
Statistic = c("1", "2"),
stringsAsFactors = FALSE
)
expect_error(
exportSummaryStatisticsTable(summaryTable, rowVar = "variable",
statsVar = "Statistic", statsValueLab = "Statistic"
),
"'statsValueLab' should be different than 'Statistic'."
)
})
test_that("A summary table with one statistic is correctly exported to flextable with the statistic name", {
summaryTable <- data.frame(
TRT = c("A", "A", "B", "B"),
variable = c("a", "b", "a", "b"),
n = c("1", "2", "3", "4"),
stringsAsFactors = FALSE
)
ft <- exportSummaryStatisticsTable(
summaryTable,
rowVar = "variable", colVar = "TRT",
statsVar = "n",
statsLabInclude = TRUE
)
expect_identical(
object = ft$body$dataset[, 1],
expected = c("a", "n", "b", "n")
)
})
test_that("A summary table with one statistic is correctly exported to flextable without the statistic name", {
summaryTable <- data.frame(
TRT = c("A", "A", "B", "B"),
variable = c("a", "b", "a", "b"),
n = c("1", "2", "3", "4"),
stringsAsFactors = FALSE
)
ft <- exportSummaryStatisticsTable(
summaryTable,
rowVar = "variable", colVar = "TRT",
statsVar = "n",
statsLabInclude = FALSE
)
expect_identical(
object = ft$body$dataset[, 1],
expected = c("a", "b")
)
})
test_that("A warning is generated if a flextable summary table contain multiple statistics but the names are specified to be not included", {
summaryTable <- data.frame(
TRT = c("A", "A", "B", "B"),
variable = c("a", "b", "a", "b"),
n = c("1", "2", "3", "4"),
Mean = c("0.34", "0.56"),
stringsAsFactors = FALSE
)
expect_warning(
exportSummaryStatisticsTable(
summaryTable,
rowVar = "variable", colVar = "TRT",
statsVar = c("n", "Mean"),
statsLabInclude = FALSE
),
"Statistic label is included.*because more than one statistic variable.*"
)
})
test_that("A placeholder for empty value is correctly included by default in a flextable summary table", {
summaryTable <- data.frame(
TRT = c("A", "A", "B", "B"),
PARAM = c("a", "b", "a", "b"),
n = c("1", "2", "3", NA_character_),
stringsAsFactors = FALSE
)
ft <- exportSummaryStatisticsTable(
summaryTable,
rowVar = "PARAM", colVar = "TRT",
statsVar = "n"
)
expect_equal(
object = ft$body$dataset,
expected = data.frame(
c("a", "b"),
c("1", "2"),
c("3", "-"),
stringsAsFactors = FALSE
),
check.attributes = FALSE
)
})
test_that("A specified placeholder for empty value is correctly included in a flextable summary table", {
summaryTable <- data.frame(
TRT = c("A", "A", "B", "B"),
PARAM = c("a", "b", "a", "b"),
n = c("1", "2", "3", NA_character_),
stringsAsFactors = FALSE
)
ft <- exportSummaryStatisticsTable(
summaryTable,
rowVar = "PARAM", colVar = "TRT",
statsVar = "n",
emptyValue = "0"
)
expect_equal(
object = ft$body$dataset,
expected = data.frame(
c("a", "b"),
c("1", "2"),
c("3", "0"),
stringsAsFactors = FALSE
),
check.attributes = FALSE
)
})
test_that("A list of summary tables is correctly exported to flextable", {
summaryTables <- list(
`PARAM 2` = data.frame(n = 10),
`PARAM 1` = data.frame(n = 2)
)
fts <- exportSummaryStatisticsTable(
summaryTables,
outputType = "flextable"
)
expect_type(fts, "list")
expect_named(fts, names(summaryTables))
for(group in names(summaryTables)){
expect_identical({
ft <- exportSummaryStatisticsTable(summaryTables[[!!group]])
ft$body$dataset
},
expected = fts[[!!group]]$body$dataset
)
expect_equal(fts[[!!group]]$header$dataset[1, ], !!group)
}
})
test_that("A list of summary tables with different titles is correctly exported to flextable", {
summaryTables <- list(
`PAR2` = data.frame(n = 10),
`PAR1` = data.frame(n = 2)
)
titles <- c("PARAMETER 2", "PARAMETER 1")
fts <- exportSummaryStatisticsTable(
summaryTables,
outputType = "flextable",
title = titles
)
for(i in seq_along(summaryTables)){
expect_equal(
fts[[!!i]]$header$dataset[1, 1],
titles[!!i]
)
}
})
test_that("A warning is generated if the variable group with totals is not formatted correctly in a flextable summary table", {
summaryTable <- data.frame(
variable = factor(c("RACE", "SEX", "SEX", "SEX")),
variableGroup = factor(
c("White", "Female", "Male", "Total"),
levels = c("White", "Female", "Male", "Total")
),
n = c("9", "3", "7", "10")
)
expect_warning(
exportSummaryStatisticsTable(
summaryTable = summaryTable,
rowVar = c("variable", "variableGroup"),
statsVar = "n",
rowVarTotalInclude = "variableGroup"
),
"variable.*with total.*should be formatted.*with 'Total' as the first level"
)
})
test_that("A flextable summary table is correctly formatted if totals are included only for a subset of the categorical variables", {
summaryTable <- data.frame(
variable = factor(c("RACE", "SEX", "SEX", "SEX")),
variableGroup = factor(
c("White", "Female", "Male", "Total"),
levels = c("Total", "White", "Female", "Male")
),
n = c("9", "3", "7", "10")
)
ft <- exportSummaryStatisticsTable(
summaryTable = summaryTable,
rowVar = c("variable", "variableGroup"),
statsVar = "n",
rowVarTotalInclude = "variableGroup"
)
refData <- data.frame(
c("RACE", "White", "SEX", "Female", "Male"),
c(NA_character_, "9", "10", "3", "7"),
stringsAsFactors = FALSE
)
expect_equal(
unname(ft$body$dataset),
unname(refData),
check.attributes = FALSE
)
})
test_that("A flextable summary table is correctly formatted if totals are included only for a subset of the categorical variables in separated rows", {
summaryTable <- data.frame(
variable = factor(c("RACE", "SEX", "SEX", "SEX")),
variableGroup = factor(
c("White", "Female", "Male", "Total"),
levels = c("Total", "White", "Female", "Male")
),
n = c("9", "3", "7", "10")
)
ft <- exportSummaryStatisticsTable(
summaryTable = summaryTable,
rowVar = c("variable", "variableGroup"),
statsVar = "n",
rowVarTotalInclude = "variableGroup",
rowVarTotalInSepRow = "variableGroup"
)
refData <- data.frame(
c("RACE White", "SEX", "Total", "Female", "Male"),
c("9", NA_character_, "10", "3", "7"),
stringsAsFactors = FALSE
)
expect_equal(
unname(ft$body$dataset),
unname(refData),
check.attributes = FALSE
)
})
test_that("Missing values in nested row variables are correctly represented in a flextable summary table", {
summaryTable <- data.frame(
variable = factor(c("SEX", "SEX", "SEX")),
variableGroup = factor(
c(NA_character_, "Male", "Female"),
levels = c("Male", "Female")
),
n = c("3", "7", "10")
)
ft <- exportSummaryStatisticsTable(
summaryTable = summaryTable,
rowVar = c("variable", "variableGroup"),
statsVar = "n"
)
refData <- data.frame(
c("SEX", "Male", "Female", NA_character_),
c(NA_character_, "7", "10", "3"),
stringsAsFactors = FALSE
)
expect_equal(
unname(ft$body$dataset),
unname(refData),
check.attributes = FALSE
)
})
test_that("A summary table with only one element in nested row variable in a row variable is correctly exported to flextable", {
summaryTable <- data.frame(
variable = "SEX",
variableGroup = factor("Total"),
n = "10"
)
ft <- exportSummaryStatisticsTable(
summaryTable = summaryTable,
rowVar = c("variable", "variableGroup"),
statsVar = "n",
rowVarTotalInclude = "variableGroup"
)
expect_equal(
unname(ft$body$dataset),
unname(data.frame("SEX", "10", stringsAsFactors = FALSE)),
check.attributes = FALSE
)
})
test_that("Page dimension are correctly set in a flextable summary table", {
summaryTable <- data.frame(n = 10)
ft <- exportSummaryStatisticsTable(
summaryTable = summaryTable,
pageDim = c(10, 3),
margin = 0
)
expect_equal(
object = ft$body$colwidths,
expected = 10
)
})
test_that("Margins are correctly set in a flextable summary table", {
summaryTable <- data.frame(n = 10)
ft <- exportSummaryStatisticsTable(
summaryTable = summaryTable,
pageDim = c(10, 3),
margin = 1
)
expect_equal(
object = ft$body$colwidths,
expected = 8
)
})
test_that("A summary table is correctly exported to flextable in landscape mode", {
summaryTable <- data.frame(n = 10)
ftLandscape <- exportSummaryStatisticsTable(
summaryTable = summaryTable,
landscape = TRUE
)
ftPortrait <- exportSummaryStatisticsTable(
summaryTable = summaryTable,
landscape = FALSE
)
for(part in c("body", "header", "footer"))
expect_gt(
object = ftLandscape[[!!part]]$colwidths,
expected = ftPortrait[[!!part]]$colwidths
)
})
test_that("Multiple titles are correctly included in a flextable summary table", {
summaryTable <- data.frame(n = 10)
titles <- c("Title A", "Title B")
expect_identical(
object = {
ft <- exportSummaryStatisticsTable(
summaryTable = summaryTable,
title = titles
)
ft$header$dataset[seq_along(titles), 1]
}, titles
)
})
test_that("Footers are correctly included in a flextable summary table", {
summaryTable <- data.frame(n = 10)
footers <- c("Explanation 1", "Explanation 2")
ft <- exportSummaryStatisticsTable(
summaryTable = summaryTable,
footer = footers
)
expect_identical(
object = ft$footer$dataset[seq_along(footers), 1],
expected = footers
)
})
test_that("Colors are correctly set in a flextable summary table", {
summaryTable <- data.frame(n = 10)
colorTable <- c(
header = "
body = "
footerBackground = "
line = "
)
expect_silent(
ft <- exportSummaryStatisticsTable(
summaryTable,
colorTable = colorTable,
footer = "test"
)
)
expect_setequal(ft$body$styles$text$color$data, colorTable["body"])
expect_setequal(ft$body$styles$cells$background.color$data, colorTable["bodyBackground"])
expect_setequal(ft$header$styles$text$color$data, colorTable["header"])
expect_setequal(ft$header$styles$cells$background.color$data, colorTable["headerBackground"])
expect_setequal(ft$footer$styles$text$color$data, colorTable["footer"])
expect_setequal(ft$footer$styles$cells$background.color$data, colorTable["footerBackground"])
expect_setequal(ft$body$styles$cells$border.color.bottom$data, colorTable["line"])
})
test_that("Fontsize is correctly set in a flextable summary table", {
summaryTable <- data.frame(n = 10)
ft <- exportSummaryStatisticsTable(
summaryTable,
fontsize = 25
)
expect_identical(
object = c(ft$body$styles$text$font.size$data),
expected = 25
)
})
test_that("Fontname is correctly set in a flextable summary table", {
summaryTable <- data.frame(n = 10)
ft <- exportSummaryStatisticsTable(
summaryTable,
fontname = "Arial"
)
expect_identical(
object = c(ft$body$styles$text$font.family$data),
expected = "Arial"
)
})
test_that("Vertical lines are not included in a flextable summary table when requested", {
summaryTable <- data.frame(
TRT = c("A", "A", "B", "B"),
AVISIT = c("a", "b", "a", "b"),
n = c("1", "2", "3", "4"),
stringsAsFactors = FALSE
)
ft <- exportSummaryStatisticsTable(
summaryTable,
colVar = c("TRT", "AVISIT"),
statsVar = "n",
vline = "none"
)
expect_setequal(
object = ft$body$styles$cells$border.width.left$data,
expected = 0
)
})
test_that("Vertical lines are correctly set in a flextable summary table", {
summaryTable <- data.frame(
TRT = c("A", "A", "B", "B"),
AVISIT = c("a", "b", "a", "b"),
n = c("1", "2", "3", "4"),
stringsAsFactors = FALSE
)
ft <- exportSummaryStatisticsTable(
summaryTable,
colVar = c("TRT", "AVISIT"),
statsVar = "n",
vline = "auto"
)
expect_setequal(
object = ft$body$styles$cells$border.width.left$data[, c(1, 3)],
expected = 1
)
})
test_that("Horizontal lines are not included in a flextable summary table when requested", {
summaryTable <- data.frame(
PARAM = c("a", "b"),
n = c("1", "2"),
Mean = c("1", "2"),
stringsAsFactors = FALSE
)
ft <- exportSummaryStatisticsTable(
summaryTable,
rowVar = "PARAM",
statsVar = c("n", "Mean"),
hline = "none"
)
expect_setequal(
object = ft$body$styles$cells$border.width.top$data,
expected = 0
)
})
test_that("Horizontal lines are correctly set in a flextable summary table", {
summaryTable <- data.frame(
PARAM = c("a", "b"),
n = c("1", "2"),
Mean = c("1", "2"),
stringsAsFactors = FALSE
)
ft <- exportSummaryStatisticsTable(
summaryTable,
rowVar = "PARAM",
statsVar = c("n", "Mean"),
hline = "auto"
)
expect_setequal(
object = ft$body$styles$cells$border.width.top$data[4, ],
expected = 1
)
})
test_that("A warning is generated if no data remain after filtering of the column totals in a flextable summary table", {
data <- data.frame(
isTotal = rep(TRUE, 2),
n = c(1, 2)
)
expect_warning(
exportSummaryStatisticsTable(data),
regexp = "No data remain after filtering of total rows."
)
})
test_that("An error is generated if a flextable summary table contains multiple values for the column total but no column variables are specified", {
data <- data.frame(
isTotal = c(FALSE, TRUE, TRUE),
statN = c(1, 2, 3)
)
expect_error(
exportSummaryStatisticsTable(data),
regexp = "Multiple values for the header total .*"
)
})
test_that("A superscript is correctly formatted in a flextable summary table", {
xSps <- "<0.001^{*}"
data <- data.frame(
pValue = c("0.05", xSps, "1", "0.89"),
TRT = rep(c("A", "B"), each = 2),
PARAM = rep(c("Actual Value", "Change from Baseline"), times = 2)
)
ft <- exportSummaryStatisticsTable(
data,
rowVar = "PARAM", colVar = "TRT",
statsVar = "pValue"
)
idxSps <- which(ft$body$dataset == xSps)
ftBodyCnt <- ft$body$content$content$data
cntDataSps <- ftBodyCnt[idxSps][[1]]
expect_equal(nrow(cntDataSps), 2)
expect_equal(cntDataSps[, "txt"], c("<0.001", "*"))
expect_equal(cntDataSps[, "vertical.align"], c(NA_character_, "superscript"))
alignDataOther <- unlist(lapply(ftBodyCnt[-idxSps], "[", "vertical.align"))
expect_setequal(alignDataOther, NA_character_)
})
test_that("A subscript is correctly formatted in a flextable summary table", {
xSbs <- "<0.001_{(significative)}"
data <- data.frame(
pValue = c("0.05", xSbs, "1", "0.89"),
TRT = rep(c("A", "B"), each = 2),
PARAM = rep(c("Actual Value", "Change from Baseline"), times = 2)
)
ft <- exportSummaryStatisticsTable(
data,
rowVar = "PARAM", colVar = "TRT",
statsVar = "pValue"
)
idxSbs <- which(ft$body$dataset == xSbs)
ftBodyCnt <- ft$body$content$content$data
cntDataSbs <- ftBodyCnt[idxSbs][[1]]
expect_equal(nrow(cntDataSbs), 2)
expect_equal(cntDataSbs[, "txt"], c("<0.001", "(significative)"))
expect_equal(cntDataSbs[, "vertical.align"], c(NA_character_, "subscript"))
alignDataOther <- unlist(lapply(ftBodyCnt[-idxSbs], "[", "vertical.align"))
expect_setequal(alignDataOther, NA_character_)
})
test_that("A cell is correctly formatted in bold in a flextable summary table", {
xBold <- "bold{<0.001}"
data <- data.frame(
pValue = c("0.05", xBold, "1", "0.89"),
TRT = rep(c("A", "B"), each = 2),
PARAM = rep(c("Actual Value", "Change from Baseline"), times = 2)
)
ft <- exportSummaryStatisticsTable(
data,
rowVar = "PARAM", colVar = "TRT",
statsVar = "pValue"
)
isBold <- apply(
ft$body$content$content$data,
2, function(x) sapply(x, `[[`, "bold")
)
idxBold <- which(ft$body$dataset == xBold)
expect_setequal(isBold[idxBold], TRUE)
expect_setequal(isBold[-idxBold], NA)
})
test_that("A summary table is correctly exported to a docx file", {
data <- data.frame(
pValue = c("0.05", "<0.001", "1", "0.89"),
TRT = rep(c("A", "B"), each = 2),
PARAM = rep(c("Actual Value", "Change from Baseline"), times = 2)
)
file <- tempfile(pattern = "table", fileext = ".docx")
expect_silent(
ft <- exportSummaryStatisticsTable(
data,
rowVar = "PARAM", colVar = "TRT",
statsVar = "pValue",
file = file
)
)
expect_true(file.exists(file))
doc <- officer::read_docx(file)
docTable <- subset(docx_summary(doc), `content_type` == "table cell")
docTableData <- with(
subset(docTable, !`is_header`),
tapply(text, list(`row_id`, `cell_id`), I)
)
docTableHeader <- with(
subset(docTable, is_header),
tapply(text, `cell_id`, I)
)
colnames(docTableData) <- docTableHeader
dataRef <- cbind(
PARAM = c("Actual Value", "Change from Baseline"),
A = c("0.05", "<0.001"),
B = c("1", "0.89")
)
expect_equal(object = docTableData, expected = dataRef, check.attributes = FALSE)
})
test_that("A summary table is correctly exported to a docx file in landscape format", {
file <- tempfile(pattern = "table", fileext = ".docx")
ft <- exportSummaryStatisticsTable(
summaryTable = data.frame(n = 10),
file = file,
landscape = TRUE
)
doc <- officer::read_docx(file)
docCnt <- officer::docx_body_xml(doc)
expect_match(
object = as.character(docCnt),
regexp = 'orient=\"landscape\"',
fixed = TRUE
)
})
test_that("A list of summary tables is correctly exported to docx files", {
summaryTables <- list(
`PARAM 2` = data.frame(n = 10),
`PARAM 1` = data.frame(n = 2)
)
file <- tempfile(pattern = "table", fileext = ".docx")
dts <- exportSummaryStatisticsTable(
summaryTables,
file = file,
colHeaderTotalInclude = FALSE
)
fileTableOutput <- paste0(
tools::file_path_sans_ext(file),
"_", c("1", "2"), ".docx"
)
expect_true(all(file.exists(fileTableOutput)))
for(iTable in seq_along(summaryTables)){
expect_equal(
object = {
doc <- officer::read_docx(fileTableOutput[!!iTable])
docTables <- subset(officer::docx_summary(doc), `content_type` == "table cell")
docTables$text
},
expected = {
table <- summaryTables[[!!iTable]]
tableCnt <- c(names(summaryTables)[!!iTable], colnames(table), unlist(table))
unname(tableCnt)
}
)
}
})
test_that("A list of summary tables is correctly exported to a single docx file", {
summaryTables <- list(
`PARAM 2` = flextable(data.frame(n = "10")),
`PARAM 1` = flextable(data.frame(n = "2"))
)
file <- tempfile(pattern = "table", fileext = ".docx")
exportFlextableToDocx(object = summaryTables, file = file)
doc <- officer::read_docx(file)
docTables <- subset(officer::docx_summary(doc), `content_type` == "table cell")
expect_equal(docTables$text, c("n", "10", "n", "2"))
})
test_that("A flextable summary table is correctly styled for a report", {
summaryTable <- data.frame(n = 9)
expect_identical(
object = exportSummaryStatisticsTable(
summaryTable = summaryTable,
style = "report"
),
expected = exportSummaryStatisticsTable(
summaryTable = summaryTable,
fontname = "Times", fontsize = 8,
landscape = FALSE,
pageDim = getDimPage(style = "report", margin = 0),
colorTable = getColorPaletteTable(style = "report")
)
)
})
test_that("A flextable summary table is correctly styled for a presentation", {
summaryTable <- data.frame(n = 9)
expect_identical(
object = exportSummaryStatisticsTable(
summaryTable = summaryTable,
style = "presentation"
),
expected = exportSummaryStatisticsTable(
summaryTable = summaryTable,
fontname = "Tahoma", fontsize = 10,
landscape = TRUE,
pageDim = getDimPage(
style = "presentation", margin = 0,
landscape = FALSE
),
colorTable = getColorPaletteTable(style = "presentation")
)
)
}) |
SODAS.to.RSDA <- function(XMLPath, labels = T) {
parsed.xml <- XML::xmlInternalTreeParse(XMLPath)
containsNode <- XML::getNodeSet(parsed.xml, "/assofile/contains")
if (length(containsNode) == 0) {
stop("No 'contains' tag is present in the XML file")
}
containsNode <- containsNode[[1]]
if (XML::xmlGetAttr(containsNode, "INDIVIDUALS") != "YES" || XML::xmlGetAttr(
containsNode,
"VARIABLES"
) != "YES" || XML::xmlGetAttr(containsNode, "RECTANGLE_MATRIX") != "YES") {
stop("Insufficient data in XML file")
}
if (labels) {
sym.obj.names <- XML::xpathSApply(
parsed.xml, "/assofile/individus/stindiv/label",
XML::xmlValue
)
variables.names <- XML::xpathSApply(
parsed.xml, "/assofile/variables/stvar/ident/label",
XML::xmlValue
)
} else {
sym.obj.names <- XML::xpathSApply(
parsed.xml, "/assofile/individus/stindiv/name",
XML::xmlValue
)
variables.names <- XML::xpathSApply(
parsed.xml, "/assofile/variables/stvar/ident/name",
XML::xmlValue
)
}
variables.types <- XML::xpathSApply(parsed.xml, "/assofile/variables/stvar/*[2]", XML::xmlName)
result <- data.frame(row.names = sym.obj.names)
number.of.rows <- nrow(result)
for (i in 1:length(variables.types)) {
cat(paste0("Processing variable ", i, ": ", variables.names[[i]], "\n"))
switch(variables.types[[i]], `inter-cont` = {
result <- cbind(result, process.inter.cont.variable(
number.of.rows, parsed.xml,
i, variables.names[[i]]
))
}, continue = {
result <- cbind(result, process.continue.variable(
number.of.rows, parsed.xml,
i, variables.names[[i]]
))
}, nominal = {
result <- cbind(result, process.nominal.variable(
labels, number.of.rows,
parsed.xml, i, variables.names[[i]]
))
}, mult_nominal = {
result <- cbind(result, process.mult.nominal.variable(
labels, number.of.rows,
parsed.xml, i, variables.names[[i]]
))
}, mult_nominal_Modif = {
type.modif <- XML::xpathSApply(parsed.xml, paste0(
"/assofile/variables/stvar[",
i, "]/mult_nominal_Modif/type_modif"
), XML::xmlValue)
if (type.modif != "proba") {
cat(
paste0("Unsupported type.modif in mult_nominal_Modif variable: "),
type.modif, "\n"
)
} else {
result <- cbind(result, process.mult.nominal.modif.variable(
labels,
number.of.rows, parsed.xml, i, variables.names[[i]]
))
}
}, cat(paste0("Variable type not supported:"), variables.types[[i]], "\n"))
}
out <- newSobject(result)
class(out) <- "sym.data.table"
out <- to.v3(out)
return(out)
}
process.nominal.variable <- function(labels, number.of.rows, parsed.xml, variable.index,
variable.name) {
aux <- list()
aux[[1]] <- rep("$S", number.of.rows)
if (labels) {
categories <- XML::xpathSApply(parsed.xml, paste0(
"/assofile/variables/stvar[",
variable.index, "]/nominal/nominal-desc/list-nom/label"
), XML::xmlValue)
} else {
categories <- XML::xpathSApply(parsed.xml, paste0(
"/assofile/variables/stvar[",
variable.index, "]/nominal/nominal-desc/list-nom/name"
), XML::xmlValue)
}
aux[[2]] <- rep(length(categories), number.of.rows)
nodes <- XML::getNodeSet(parsed.xml, paste0(
"/assofile/indiv_mat/ligmat/valmat[", variable.index,
"]"
))
after.evaluator <- function(node) {
if (length(node["val_nomina"]) == 0) {
return(rep(NA, length(categories)))
} else {
category <- as.numeric(XML::xmlValue(node))
return(append(rep(0, length(categories) - 1), 1, category - 1))
}
}
node.categories <- t(XML::xmlSApply(nodes, after.evaluator))
aux <- data.frame(c(aux, as.data.frame(node.categories)))
colnames(aux) <- c("$S", variable.name, categories)
return(aux)
}
process.mult.nominal.variable <- function(labels, number.of.rows, parsed.xml, variable.index,
variable.name) {
aux <- list()
aux[[1]] <- rep("$S", number.of.rows)
if (labels) {
categories <- XML::xpathSApply(parsed.xml, paste0(
"/assofile/variables/stvar[",
variable.index, "]/mult_nominal/nominal-desc/list-nom/label"
), xmlValue)
} else {
categories <- XML::xpathSApply(parsed.xml, paste0(
"/assofile/variables/stvar[",
variable.index, "]/mult_nominal/nominal-desc/list-nom/name"
), xmlValue)
}
aux[[2]] <- rep(length(categories), number.of.rows)
nodes <- XML::getNodeSet(parsed.xml, paste0(
"/assofile/indiv_mat/ligmat/valmat[", variable.index,
"]"
))
after.evaluator <- function(node) {
if (length(node["val_modal"]) == 0) {
return(NA)
} else {
present.mods <- as.numeric(XML::xmlSApply(node, XML::xmlValue))
modals.vector <- rep(0, length(categories) - length(present.mods))
for (present.mod in present.mods) {
modals.vector <- append(modals.vector, 1, present.mod - 1)
}
return(modals.vector)
}
}
node.categories <- t(XML::xmlSApply(nodes, after.evaluator))
aux <- data.frame(c(aux, as.data.frame(node.categories)))
colnames(aux) <- c("$S", variable.name, categories)
return(aux)
}
process.mult.nominal.modif.variable <- function(labels, number.of.rows, parsed.xml,
variable.index, variable.name) {
aux <- list()
aux[[1]] <- rep("$M", number.of.rows)
if (labels) {
categories <- XML::xpathSApply(parsed.xml, paste0(
"/assofile/variables/stvar[",
variable.index, "]/mult_nominal_Modif/nominal-desc/list-nom/label"
), XML::xmlValue)
} else {
categories <- XML::xpathSApply(parsed.xml, paste0(
"/assofile/variables/stvar[",
variable.index, "]/mult_nominal_Modif/nominal-desc/list-nom/name"
), XML::xmlValue)
}
aux[[2]] <- rep(length(categories), number.of.rows)
nodes <- XML::getNodeSet(parsed.xml, paste0(
"/assofile/indiv_mat/ligmat/valmat[", variable.index,
"]"
))
get.distributions <- function(node) {
if (length(node["val_list_modal"]) == 0) {
return(rep(NA, length(categories)))
} else {
moda.nodes <- as.numeric(sapply(
XML::xmlSApply(node, function(x) x["no_moda"]),
XML::xmlValue
))
frequencies <- as.numeric(sapply(
XML::xmlSApply(node, function(x) x["frequency"]),
XML::xmlValue
))
missing.categories.indexes <- setdiff(1:length(categories), moda.nodes)
for (missing.cat.index in missing.categories.indexes) {
frequencies <- append(frequencies, 0, after = missing.cat.index - 1)
}
return(frequencies)
}
}
all.frequencies <- t(round(sapply(nodes, get.distributions), 3))
aux <- data.frame(c(aux, as.data.frame(all.frequencies)))
colnames(aux) <- c("$M", variable.name, categories)
return(aux)
}
process.inter.cont.variable <- function(number.of.rows, parsed.xml, variable.index,
variable.name) {
aux <- list()
aux[[1]] <- rep("$I", number.of.rows)
after.evaluator <- function(node, element.to.retrieve) {
if (length(node["val_interv"]) == 0) {
return(NA)
} else {
return(as.numeric(XML::xmlValue(XML::xmlElementsByTagName(node[[1]], element.to.retrieve)[[1]])))
}
}
nodes <- XML::getNodeSet(parsed.xml, paste0(
"/assofile/indiv_mat/ligmat/valmat[", variable.index,
"]"
))
aux[[2]] <- sapply(nodes, after.evaluator, element.to.retrieve = "pmin")
aux[[3]] <- sapply(nodes, after.evaluator, element.to.retrieve = "pmax")
aux <- data.frame(aux)
colnames(aux) <- c("$I", variable.name, variable.name)
return(aux)
}
process.continue.variable <- function(number.of.rows, parsed.xml, variable.index, variable.name) {
aux <- list()
aux[[1]] <- rep("$C", number.of.rows)
after.evaluator <- function(node) {
if (length(node["val_conti"]) == 0) {
return(NA)
} else {
return(as.numeric(XML::xmlValue(node[[1]])))
}
}
aux[[2]] <- XML::xpathSApply(parsed.xml, paste0(
"/assofile/indiv_mat/ligmat/valmat[",
variable.index, "]"
), after.evaluator)
aux <- data.frame(aux)
colnames(aux) <- c("$C", variable.name)
return(aux)
} |
"as.mmglm0" <-
function (object)
{
x <- list(Pi=object$Pi, delta=object$delta,
family=object$distn[2],
link=object$distn[3],
beta=rbind(object$pm$beta0, object$pm$beta1),
sigma=object$pm$sigma,
nonstat=object$nonstat)
x$x <- object$glmdata
x$glmformula <- object$glmformula
class(x) <- c("mmglm0")
return(x)
} |
varim <-
function(A){
conv=.000001
T=matrix(0,ncol(A),ncol(A))
r=ncol(A)
m=nrow(A)
for (i in 1:r){
T[i,i]=1
}
B=A
f=sum(((A*A)-matrix(1,m,1)%*%colSums(A*A)/m)^2)
fold=f-2*conv*f
if (f==0){
fold=-conv
}
iter=0
while ((f-fold)>(f*conv)){
fold=f
iter=iter+1
for (i in 1:(r-1)){
for (j in (i+1):r){
x=B[,i]
y=B[,j]
xx=T[,i]
yy=T[,j]
u=x^2-y^2
v=2*x*y
u=u-matrix(1,m,1)*sum(u)/m
v=v-matrix(1,m,1)*sum(v)/m
a=2*sum(u*v)
b=sum(u^2)-sum(v^2)
c=(a^2+b^2)^.5
if (a>=0){
sign=1
}
if (a<0){
sign=-1
}
o=1
if (c<.00000000001){
cos=1
sin=0
o=0
}
if (c>=.00000000001){
vvv=-sign*((b+c)/(2*c))^.5
sin=(.5-.5*vvv)^.5
cos=(.5+.5*vvv)^.5
}
v=cos*x-sin*y
w=cos*y+sin*x
vv=cos*xx-sin*yy
ww=cos*yy+sin*xx
if (o==1){
if (vvv>=0){
B[,i]=v
B[,j]=w
T[,i]=vv
T[,j]=ww
}
if (vvv<0){
B[,i]=w
B[,j]=v
T[,i]=ww
T[,j]=vv
}
}
}
}
f=sum(((B*B)-matrix(1,nrow(B),1)%*%colSums(B*B)/nrow(B))^2)
}
out=list()
out$f=f
out$B=B
out$T=T
return(out)
} |
ftu <- function(tu, data){
ord <- order(data[, 1], -data[, 2])
size <- length(data[, 1])
prob <- numeric(size)
surv <- 1
for (i in 1:size){
if (data[ord[i], 2]==1){ prob[ord[i]] <- surv / (size - i + 1) }
surv <- surv - prob[ord[i]]
}
cdf <- sapply(1:NROW(tu), function(j){
logical.mark <- sapply(2:NCOL(tu), function(col){ data[, col + 1] <= tu[j, col] })
return(sum(prob[data[, 1] <= tu[j, 1] & Reduce("&", as.list(as.data.frame(logical.mark)))], na.rm=TRUE))
})
return(cdf)
}
g <- function(tu, data){
if (is.vector(tu)) tu <- t(as.matrix(tu))
tu <- na.omit(tu)
return(abs(ftu(tu, data) - ftu(cbind(tu[, 1], matrix(Inf, NROW(tu), NCOL(tu) - 1)), data) * ftu(cbind(Inf, tu[, -1]), data[data[, 2]==1, ])))
}
testIndepTimeMark <- function(data, iter=1000){
D <- max(g(data[data[, 2]==1, -2], data))
n <- NROW(data)
resamp <- matrix(sample(1:n, n*iter, replace=TRUE), n, iter)
obs.mark <- data[data[, 2]==1, -(1:2)]
if (is.vector(obs.mark)){ obs.mark <- as.matrix(obs.mark) }
bD <- sapply(1:iter, function(j){
bdata <- data[resamp[, j], 1:2]
if (sum(bdata[, 2]) > 1){
m <- sum(bdata[, 2])
resamp.mark <- sample(NROW(obs.mark), m, replace=TRUE)
bmark <- as.data.frame(matrix(0, n, NCOL(data) - 2))
bmark[bdata[, 2]==1, ] <- obs.mark[resamp.mark, ]
bdata <- cbind(bdata, bmark)
tstat <- max(g(bdata[bdata[, 2]==1, -2], bdata))
} else {
tstat <- NULL
}
return(tstat)
})
if (is.list(bD)){ bD <- do.call("c", lapply(bD, "[", 1)) }
return(mean(bD >= D))
} |
getExif <- function(api_key, photo_id){
url <- paste0("https://api.flickr.com/services/rest/?method=flickr.photos.getExif&api_key=", api_key, "&photo_id=",
photo_id, "&format=json&nojsoncallback=1")
raw_data <- RCurl::getURL(url, ssl.verifypeer = FALSE)
data <- jsonlite::fromJSON(raw_data)
as.data.frame(data)
} |
if(interactive()) library(testthat)
test_that("widths work", {
ff <- tempfile()
cat(file = ff, "123456", "987654", sep="\n")
rest <- structure(list(
V1 = c(1, 9),
V2 = c(23, 87),
V3 = c(456, 654)),
.Names = c("V1", "V2", "V3"),
row.names = c(NA, -2L),
class="data.frame"
)
test <- fast.read.fwf(ff, widths = c(1, 2, 3))
expect_equal(rest, test)
expect_s3_class(test, "data.frame")
unlink(ff)
})
test_that("colclasses work",{
ff <- tempfile()
cat(file=ff, "abc123def1010", "ghi456jkl1100", sep="\n")
test <- fast.read.fwf(file=ff, widths=c(3, 3, 3, 1, 1, 1, 1),
colClasses=c("character", "numeric", "character", rep("logical", 4)))
rest <- structure(list(
V1 = c("abc", "ghi"),
V2 = c(123, 456),
V3 = c("def", "jkl"),
V4 = c(TRUE, TRUE),
V5 = c(FALSE, TRUE),
V6 = c(TRUE, FALSE),
V7 = c(FALSE, FALSE)),
.Names = c("V1", "V2", "V3", "V4", "V5", "V6", "V7"),
row.names = c(NA, -2L), class = "data.frame")
unlink(ff)
expect_equal(test, rest)
}) |
mapdeckGridDependency <- function() {
list(
createHtmlDependency(
name = "grid",
version = "1.0.0",
src = system.file("htmlwidgets/lib/grid", package = "mapdeck"),
script = c("grid.js"),
all_files = FALSE
)
)
}
add_grid <- function(
map,
data = get_map_data(map),
lon = NULL,
lat = NULL,
polyline = NULL,
cell_size = 1000,
extruded = TRUE,
elevation = NULL,
elevation_function = c("sum","mean","min","max"),
colour = NULL,
colour_function = c("sum","mean","min","max"),
elevation_scale = 1,
colour_range = NULL,
legend = FALSE,
legend_options = NULL,
auto_highlight = FALSE,
highlight_colour = "
layer_id = NULL,
update_view = TRUE,
focus_layer = FALSE,
digits = 6,
transitions = NULL,
brush_radius = NULL
) {
l <- list()
l[["lon"]] <- force( lon )
l[["lat"]] <- force( lat )
l[["polyline"]] <- force( polyline )
l[["elevation"]] <- force( elevation )
l[["colour"]] <- force( colour )
colour_function <- match.arg( colour_function )
colour_function <- toupper( colour_function )
elevation_function <- match.arg( elevation_function )
elevation_function <- toupper( elevation_function )
legend <- force( legend )
legend <- aggregation_legend( legend, legend_options )
use_weight <- FALSE
if(!is.null(elevation)) use_weight <- TRUE
use_colour <- FALSE
if(!is.null(colour)) use_colour <- TRUE
l <- resolve_data( data, l, c("POINT") )
bbox <- init_bbox()
update_view <- force( update_view )
focus_layer <- force( focus_layer )
if ( !is.null(l[["data"]]) ) {
data <- l[["data"]]
l[["data"]] <- NULL
}
if( !is.null(l[["bbox"]] ) ) {
bbox <- l[["bbox"]]
l[["bbox"]] <- NULL
}
checkNumeric(elevation_scale)
checkNumeric(cell_size)
if( is.null( colour_range ) ) {
colour_range <- colourvalues::colour_values(1:6, palette = "viridis")
}
if(length(colour_range) != 6)
stop("mapdeck - colour_range must have 6 hex colours")
checkHex(colour_range)
checkHexAlpha(highlight_colour)
layer_id <- layerId(layer_id, "grid")
map <- addDependency(map, mapdeckGridDependency())
tp <- l[["data_type"]]
l[["data_type"]] <- NULL
jsfunc <- "add_grid_geo"
if ( tp == "sf" ) {
geometry_column <- c( "geometry" )
shape <- rcpp_aggregate_geojson( data, l, geometry_column, digits, "grid" )
} else if ( tp == "df" ) {
geometry_column <- list( geometry = c("lon", "lat") )
shape <- rcpp_aggregate_geojson_df( data, l, geometry_column, digits, "grid" )
} else if ( tp == "sfencoded" ) {
geometry_column <- "polyline"
shape <- rcpp_aggregate_polyline( data, l, geometry_column, "grid" )
jsfunc <- "add_grid_polyline"
}
js_transitions <- resolve_transitions( transitions, "grid" )
invoke_method(
map, jsfunc, map_type( map ), shape[["data"]], layer_id, cell_size,
jsonify::to_json(extruded, unbox = TRUE), elevation_scale,
colour_range, auto_highlight, highlight_colour, bbox, update_view, focus_layer,
js_transitions, use_weight, use_colour, elevation_function, colour_function, legend,
brush_radius
)
}
clear_grid <- function( map, layer_id = NULL) {
layer_id <- layerId(layer_id, "grid")
invoke_method(map, "md_layer_clear", map_type( map ), layer_id, "grid" )
} |
library(testthat)
library(parsnip)
library(dplyr)
library(rlang)
context("changing arguments and engine")
test_that('pipe arguments', {
mod_1 <- rand_forest() %>%
set_args(mtry = 1)
expect_equal(
quo_get_expr(mod_1$args$mtry),
1
)
expect_equal(
quo_get_env(mod_1$args$mtry),
empty_env()
)
mod_2 <- rand_forest(mtry = 2) %>%
set_args(mtry = 1)
var_env <- rlang::current_env()
expect_equal(
quo_get_expr(mod_2$args$mtry),
1
)
expect_equal(
quo_get_env(mod_2$args$mtry),
empty_env()
)
expect_error(rand_forest() %>% set_args())
})
test_that('pipe engine', {
mod_1 <- rand_forest() %>%
set_mode("regression")
expect_equal(mod_1$mode, "regression")
expect_error(rand_forest() %>% set_mode())
expect_error(rand_forest() %>% set_mode(2))
expect_error(rand_forest() %>% set_mode("haberdashery"))
})
test_that("can't set a mode that isn't allowed by the model spec", {
expect_error(
set_mode(linear_reg(), "classification"),
"'classification' is not a known mode"
)
})
test_that("unavailable modes for an engine and vice-versa", {
expect_error(
decision_tree() %>%
set_mode("regression") %>%
set_engine("C5.0"),
"Available modes for engine C5"
)
expect_error(
decision_tree() %>%
set_engine("C5.0") %>%
set_mode("regression"),
"Available modes for engine C5"
)
expect_error(
decision_tree(engine = NULL) %>%
set_engine("C5.0") %>%
set_mode("regression"),
"Available modes for engine C5"
)
expect_error(
decision_tree(engine = NULL)%>%
set_mode("regression") %>%
set_engine("C5.0"),
"Available modes for engine C5"
)
expect_error(
proportional_hazards() %>% set_mode("regression"),
"'regression' is not a known mode"
)
expect_error(
linear_reg() %>% set_mode(),
"Available modes for model type linear_reg"
)
expect_error(
linear_reg() %>% set_engine(),
"Missing engine"
)
expect_error(
proportional_hazards() %>% set_engine(),
"No known engines for"
)
}) |
optimal_rerandomization_exact = function(
W_base_object,
estimator = "linear",
q = 0.95,
skip_search_length = 1,
smoothing_degree = 1,
smoothing_span = 0.1,
z_sim_fun,
N_z = 1000,
dot_every_x_iters = 100
){
optimal_rerandomization_argument_checks(W_base_object, estimator, q)
n = W_base_object$n
X = W_base_object$X
W_base_sort = W_base_object$W_base_sort
max_designs = W_base_object$max_designs
imbalance_by_w_sorted = W_base_object$imbalance_by_w_sorted
if (estimator == "linear"){
Xt = t(X)
XtXinv = solve(Xt %*% X)
P = X %*% XtXinv %*% Xt
I = diag(n)
I_min_P = I - P
}
s_star = NULL
Q_star = Inf
Q_primes = array(NA, max_designs)
rel_mse_zs = matrix(NA, nrow = max_designs, ncol = N_z)
w_w_T_running_sum = matrix(0, n, n)
if (estimator == "linear"){
w_w_T_P_w_w_T_running_sum = matrix(0, n, n)
}
ss = seq(from = 1, to = max_designs, by = skip_search_length)
for (i in 1 : length(ss)){
s = ss[i]
if (!is.null(dot_every_x_iters)){
if (i %% dot_every_x_iters == 0){
cat(".")
}
}
w_s = W_base_sort[s, , drop = FALSE]
w_s_w_s_T = t(w_s) %*% w_s
w_w_T_running_sum = w_w_T_running_sum + w_s_w_s_T
Sigma_W = 1 / i * w_w_T_running_sum
if (estimator == "linear"){
w_w_T_P_w_w_T_running_sum = w_w_T_P_w_w_T_running_sum + w_s_w_s_T %*% P %*% w_s_w_s_T
D = 1 / i * w_w_T_P_w_w_T_running_sum
G = I_min_P %*% Sigma_W %*% I_min_P
for (n_z in 1 : N_z){
z = z_sim_fun()
rel_mse_zs[s, n_z] = t(z) %*% (G + 2 / n * D) %*% z
}
Q_primes[s] = quantile(rel_mse_zs[s, ], q)
}
if (Q_primes[s] < Q_star){
Q_star = Q_primes[s]
s_star = s
}
}
cat("\n")
smoothing_fit = loess(Q_primes ~ imbalance_by_w_sorted, degree = smoothing_degree, span = smoothing_span)
Q_primes_smoothed = predict(smoothing_fit, data.frame(X = imbalance_by_w_sorted))
s_star_smoothed = NULL
Q_star_smoothed = Inf
for (s in seq(from = 1, to = max_designs, by = skip_search_length)){
if (Q_primes_smoothed[s] < Q_star_smoothed){
Q_star_smoothed = Q_primes_smoothed[s]
s_star_smoothed = s
}
}
all_data_from_run = data.frame(
imbalance_by_w_sorted = imbalance_by_w_sorted,
Q_primes = Q_primes,
Q_primes_smoothed = Q_primes_smoothed
)
ll = list(
type = "exact",
q = q,
estimator = estimator,
z_sim_fun = z_sim_fun,
N_z = N_z,
W_base_object = W_base_object,
W_star = W_base_sort[1 : s_star, ],
W_star_size = s_star,
a_star = imbalance_by_w_sorted[s_star],
a_stars = imbalance_by_w_sorted[1 : s_star],
W_star_size_smoothed = s_star_smoothed,
a_star_smoothed = imbalance_by_w_sorted[s_star_smoothed],
a_stars_smoothed = imbalance_by_w_sorted[1 : s_star_smoothed],
all_data_from_run = all_data_from_run,
Q_star = Q_star,
Q_star_smoothed = Q_star_smoothed
)
class(ll) = "optimal_rerandomization_obj"
ll
} |
setup_LC_geometry <- function(horizon = list(PLC = 1, FLC = 0),
speed = 1, space.dim = 1, shape = "cone") {
if (is.null(horizon$FLC)) {
horizon$FLC <- 0
}
out <- list(horizon = horizon,
speed = speed,
space.dim = space.dim,
shape = shape)
out$coordinates <-
list(PLC = compute_LC_coordinates(horizon = horizon$PLC, speed = speed,
space.dim = space.dim, shape = shape,
type = "PLC"),
FLC = compute_LC_coordinates(horizon = horizon$FLC, speed = speed,
space.dim = space.dim, shape = shape,
type = "FLC"))
out$n.p <- nrow(out$coordinates$PLC)
out$n.f <- nrow(out$coordinates$FLC)
class(out) <- "LC"
return(out)
} |
pdfsq<-function(s, dist, p1, p2)
{
d.sample(s, dist, p1,p2)^2
} |
l_image_import_files <- function(paths) {
if (!.withTclImg) {
if (any(vapply(paths, function(p) {
!(toupper(tools::file_ext(p)) %in% c("PPM", "PGM", "PNG", "GIF"))
}, logical(1)))) {
stop("All file formats other than png require the IMG Tcl extension.")
}
}
unique_paths <- unique(paths)
unique_ids <- sapply(unique_paths, function(path) {
tclvalue(tkimage.create('photo', file = path))
})
ii <- match(paths,unique_paths)
return(unique_ids[ii])
} |
print.R0.sR <- function
(x,
...
)
{
if (class(x)!="R0.sR") {
stop("'x' must be of class 'R0.sR'")
}
if (exists("EG", where = x$estimates)) {
print(x$estimates$EG, ...)
}
if (exists("ML", where = x$estimates)) {
print(x$estimates$ML, ...)
}
if (exists("AR", where = x$estimates)) {
print(x$estimates$AR, ...)
}
if (exists("TD", where = x$estimates)) {
print(x$estimates$TD, ...)
}
if (exists("SB", where = x$estimates)) {
print(x$estimates$SB, ...)
}
} |
render_gt <- function(expr,
width = NULL,
height = NULL,
align = NULL,
env = parent.frame(),
quoted = FALSE,
outputArgs = list()) {
check_shiny()
func <-
shiny::installExprFunction(
expr = expr,
name = "func",
eval.env = env,
quoted = quoted
)
shiny::createRenderFunction(
func = func,
function(result, shinysession, name, ...) {
if (is.null(result)) {
return(NULL)
}
if (!inherits(result, "gt_tbl")) {
result <- result %>% gt()
}
result <-
result %>%
tab_options(
container.width = width,
container.height = height,
table.align = align
)
html_tbl <- as.tags.gt_tbl(result)
dependencies <-
lapply(
htmltools::resolveDependencies(htmltools::findDependencies(html_tbl)),
shiny::createWebDependency
)
names(dependencies) <- NULL
list(
html = htmltools::doRenderTags(html_tbl),
deps = dependencies
)
},
gt_output, outputArgs
)
}
gt_output <- function(outputId) {
check_shiny()
shiny::htmlOutput(outputId)
}
check_shiny <- function() {
if (!requireNamespace("shiny", quietly = TRUE)) {
stop("Please install the *shiny* package before using this function:\n",
"* Use `install.packages(\"shiny\")`",
call. = FALSE)
}
} |
test_that("proj_set() errors on non-existent path", {
expect_usethis_error(
proj_set("abcedefgihklmnopqrstuv"),
"does not exist"
)
})
test_that("proj_set() errors if no criteria are fulfilled", {
tmpdir <- withr::local_tempdir(pattern = "i-am-not-a-project")
expect_usethis_error(
proj_set(tmpdir),
"does not appear to be inside a project or package"
)
})
test_that("proj_set() can be forced, even if no criteria are fulfilled", {
tmpdir <- withr::local_tempdir(pattern = "i-am-not-a-project")
expect_error_free(old <- proj_set(tmpdir, force = TRUE))
withr::defer(proj_set(old))
expect_identical(proj_get(), proj_path_prep(tmpdir))
})
test_that("is_package() detects package-hood", {
create_local_package()
expect_true(is_package())
create_local_project()
expect_false(is_package())
})
test_that("check_is_package() errors for non-package", {
create_local_project()
expect_usethis_error(check_is_package(), "not an R package")
})
test_that("check_is_package() can reveal who's asking", {
create_local_project()
expect_usethis_error(check_is_package("foo"), "foo")
})
test_that("proj_path() appends to the project path", {
create_local_project()
expect_equal(
proj_path("a", "b", "c"),
path(proj_get(), "a/b/c")
)
expect_identical(proj_path("a", "b", "c"), proj_path("a/b/c"))
})
test_that("proj_rel_path() returns path part below the project", {
create_local_project()
expect_equal(proj_rel_path(proj_path("a/b/c")), "a/b/c")
})
test_that("proj_rel_path() returns path 'as is' if not in project", {
create_local_project()
expect_identical(proj_rel_path(path_temp()), path_temp())
})
test_that("proj_set() enforces proj path preparation policy", {
t <- withr::local_tempdir("proj-set-path-prep")
a <- path_real(dir_create(path(t, "a")))
b <- dir_create(path(a, "b"))
b2 <- link_create(b, path(a, "b2"))
d <- dir_create(path(b, "d"))
path_with_symlinks <- path(b2, "d")
expect_equal(path_rel(path_with_symlinks, a), path("b2/d"))
local_project(path_with_symlinks, force = TRUE)
expect_equal(path_rel(proj_get(), a), path("b/d"))
file_create(path(b, "d", ".here"))
proj_set(path_with_symlinks, force = FALSE)
expect_equal(path_rel(proj_get(), a), path("b/d"))
})
test_that("proj_path_prep() passes NULL through", {
expect_null(proj_path_prep(NULL))
})
test_that("is_in_proj() detects whether files are (or would be) in project", {
create_local_package()
expect_true(is_in_proj(proj_path("fiction")))
expect_true(is_in_proj(proj_path("DESCRIPTION")))
expect_false(is_in_proj(file_temp()))
expect_false(is_in_proj(path_temp()))
})
test_that("is_in_proj() does not activate a project", {
pkg <- create_local_package()
path <- proj_path("DESCRIPTION")
expect_true(is_in_proj(path))
local_project(NULL)
expect_false(is_in_proj(path))
expect_false(proj_active())
})
test_that("proj_sitrep() reports current working/project state", {
pkg <- create_local_package()
x <- proj_sitrep()
expect_s3_class(x, "sitrep")
expect_false(is.null(x[["working_directory"]]))
expect_identical(
fs::path_file(pkg),
fs::path_file(x[["active_usethis_proj"]])
)
})
test_that("with_project() runs code in temp proj, restores (lack of) proj", {
old_project <- proj_get_()
withr::defer(proj_set_(old_project))
temp_proj <- create_project(
file_temp(pattern = "TEMPPROJ"),
rstudio = FALSE, open = FALSE
)
proj_set_(NULL)
expect_identical(proj_get_(), NULL)
res <- with_project(path = temp_proj, proj_get_())
expect_identical(res, temp_proj)
expect_identical(proj_get_(), NULL)
})
test_that("with_project() runs code in temp proj, restores original proj", {
old_project <- proj_get_()
withr::defer(proj_set_(old_project))
host <- create_project(
file_temp(pattern = "host"),
rstudio = FALSE, open = FALSE
)
guest <- create_project(
file_temp(pattern = "guest"),
rstudio = FALSE, open = FALSE
)
proj_set(host)
expect_identical(proj_get_(), host)
res <- with_project(path = guest, proj_get_())
expect_identical(res, guest)
expect_identical(proj_get(), host)
})
test_that("with_project() works when temp proj == original proj", {
old_project <- proj_get_()
withr::defer(proj_set_(old_project))
host <- create_project(
file_temp(pattern = "host"),
rstudio = FALSE, open = FALSE
)
proj_set(host)
expect_identical(proj_get_(), host)
res <- with_project(path = host, proj_get_())
expect_identical(res, host)
expect_identical(proj_get(), host)
})
test_that("local_project() activates proj til scope ends", {
old_project <- proj_get_()
withr::defer(proj_set_(old_project))
new_proj <- file_temp(pattern = "localprojtest")
create_project(new_proj, rstudio = FALSE, open = FALSE)
proj_set_(NULL)
foo <- function() {
local_project(new_proj)
proj_sitrep()
}
res <- foo()
expect_identical(
res[["active_usethis_proj"]],
as.character(proj_path_prep(new_proj))
)
expect_null(proj_get_())
})
test_that("proj_activate() works with relative path when RStudio is not detected", {
sandbox <- path_real(dir_create(file_temp("sandbox")))
withr::defer(dir_delete(sandbox))
orig_proj <- proj_get_()
withr::defer(proj_set(orig_proj, force = TRUE))
withr::local_dir(sandbox)
rel_path_proj <- path_file(file_temp(pattern = "mno"))
out_path <- create_project(rel_path_proj, rstudio = FALSE, open = FALSE)
with_mock(
rstudio_available = function(...) FALSE,
expect_error_free(
result <- proj_activate(rel_path_proj)
)
)
expect_true(result)
expect_equal(path_wd(), out_path)
expect_equal(proj_get(), out_path)
})
test_that("local_project()'s `quiet` argument works", {
temp_proj <- create_project(
file_temp(pattern = "TEMPPROJ"),
rstudio = FALSE, open = FALSE
)
withr::defer(dir_delete(temp_proj))
local_project(path = temp_proj, quiet = TRUE, force = TRUE, setwd = FALSE)
expect_true(getOption("usethis.quiet"))
}) |
knitr::opts_chunk$set(comment = "")
options(width = 100, max.print = 100)
set.seed(123456)
library(simTool)
library(dplyr)
library(tidyr)
library(tibble)
library(ggplot2)
library(broom)
library(boot)
regData <- function(n, SD) {
x <- seq(0, 1, length = n)
y <- 10 + 2 * x + rnorm(n, sd = SD)
tibble(x = x, y = y)
}
eval_tibbles(
expand_tibble(fun = "regData", n = 5L, SD = 1:2),
expand_tibble(proc = "lm", formula = c("y~x", "y~I(x^2)")),
post_analyze = broom::tidy,
summary_fun = list(mean = mean, sd = sd),
group_for_summary = "term",
replications = 3
)
print(dg <- dplyr::bind_rows(
expand_tibble(fun = "rexp", n = c(10L, 20L), rate = 1:2),
expand_tibble(fun = "rnorm", n = c(10L, 20L), mean = 1:2)
))
print(pg <- dplyr::bind_rows(
expand_tibble(proc = "min"),
expand_tibble(proc = "mean", trim = c(0.1, 0.2))
))
dg <- expand_tibble(fun = "rnorm", n = 10, mean = 1:2)
pg <- expand_tibble(proc = "min")
eg <- eval_tibbles(data_grid = dg, proc_grid = pg, replications = 2)
eg
eg <- eval_tibbles(data_grid = dg, proc_grid = pg, replications = 3)
eg
eg <- eval_tibbles(data_grid = dg, proc_grid = pg, replications = 1)
eg$simulation
eg$generated_data
dg <- expand_tibble(fun = "runif", n = c(10, 20, 30))
pg <- expand_tibble(proc = c("min", "max"))
eval_tibbles(
data_grid = dg, proc_grid = pg, replications = 1000,
summary_fun = list(mean = mean)
)
eval_tibbles(
data_grid = dg, proc_grid = pg, replications = 1000,
summary_fun = list(mean = mean, sd = sd)
)
eval_tibbles(
expand_tibble(fun = "regData", n = 5L, SD = 1:2),
expand_tibble(proc = "lm", formula = c("y~x", "y~I(x^2)")),
replications = 2
)
eval_tibbles(
expand_tibble(fun = "regData", n = 5L, SD = 1:2),
expand_tibble(proc = "lm", formula = c("y~x", "y~I(x^2)")),
post_analyze = purrr::compose(function(mat) mat["(Intercept)", "Estimate"], coef, summary.lm),
replications = 2
)
presever_rownames <- function(mat) {
rn <- rownames(mat)
ret <- tibble::as_tibble(mat)
ret$term <- rn
ret
}
eval_tibbles(
expand_tibble(fun = "regData", n = 5L, SD = 1:2),
expand_tibble(proc = "lm", formula = c("y~x", "y~I(x^2)")),
post_analyze = purrr::compose(presever_rownames, coef, summary),
replications = 3
)
eval_tibbles(
expand_tibble(fun = "regData", n = 5L, SD = 1:2),
expand_tibble(proc = "lm", formula = c("y~x", "y~I(x^2)")),
post_analyze = purrr::compose(presever_rownames, coef, summary),
summary_fun = list(mean = mean, sd = sd),
group_for_summary = "term",
replications = 3
)
eval_tibbles(
data_grid = dg, proc_grid = pg, replications = 10,
ncpus = 2, summary_fun = list(mean = mean)
)
library(parallel)
cl <- makeCluster(rep("localhost", 2), type = "PSOCK")
eval_tibbles(
data_grid = dg, proc_grid = pg, replications = 10,
cluster = cl, summary_fun = list(mean = mean)
)
stopCluster(cl)
library(boot)
ratio <- function(d, w) sum(d$x * w) / sum(d$u * w)
city.boot <- boot(city, ratio,
R = 999, stype = "w",
sim = "ordinary"
)
boot.ci(city.boot,
conf = c(0.90, 0.95),
type = c("norm", "basic", "perc", "bca")
)
returnCity <- function() {
city
}
bootConfInt <- function(data) {
city.boot <- boot(data, ratio,
R = 999, stype = "w",
sim = "ordinary"
)
boot.ci(city.boot,
conf = c(0.90, 0.95),
type = c("norm", "basic", "perc", "bca")
)
}
dg <- expand_tibble(fun = "returnCity")
pg <- expand_tibble(proc = "bootConfInt")
eval_tibbles(dg, pg,
replications = 10, ncpus = 2,
cluster_libraries = c("boot"),
cluster_global_objects = c("ratio")
)
summary <- function(x) tibble(sd = sd(x))
g <- function(x) tibble(q0.1 = quantile(x, 0.1))
someFunc <- function() {
summary <- function(x) tibble(sd = sd(x), mean = mean(x))
dg <- expand_tibble(fun = "runif", n = 100)
pg <- expand_tibble(proc = c("summary", "g"))
print(eval_tibbles(dg, pg))
cat("--------------------------------------------------\n")
print(eval_tibbles(dg, pg, envir = environment()))
}
someFunc()
dg <- expand_tibble(fun = c("rnorm"), mean = c(1,1000), sd = c(1,10), n = c(10L, 100L))
pg <- expand_tibble(proc = "quantile", probs = 0.975)
post_ana <- function(q_est, .truth){
tibble::tibble(bias = q_est - stats::qnorm(0.975, mean = .truth$mean, sd = .truth$sd))
}
eval_tibbles(dg, pg, replications = 10^3, discard_generated_data = TRUE,
ncpus = 2,
post_analyze = post_ana,
summary_fun = list(mean = mean))
dg <- dplyr::bind_rows(
expand_tibble(fun = c("rnorm"), mean = 0, n = c(10L, 100L), .truth = qnorm(0.975)),
expand_tibble(fun = c("rexp"), rate = 1, n = c(10L, 100L), .truth = qexp(0.975, rate = 1)),
expand_tibble(fun = c("runif"), max = 2, n = c(10L, 100L), .truth = qunif(0.975, max = 2))
)
pg <- expand_tibble(proc = "quantile", probs = 0.975)
post_ana <- function(q_est, .truth){
ret <- q_est - .truth
names(ret) <- "bias"
ret
}
eval_tibbles(dg, pg, replications = 10^3, discard_generated_data = TRUE,
ncpus = 2,
post_analyze = post_ana,
summary_fun = list(mean = mean))
dg <- dplyr::bind_rows(
expand_tibble(fun = c("rnorm"), mean = 0, n = c(10L, 1000L),
.truth = list(function(prob) qnorm(prob, mean = 0))),
expand_tibble(fun = c("rexp"), rate = 1, n = c(10L, 1000L),
.truth = list(function(prob) qexp(prob, rate = 1))),
expand_tibble(fun = c("runif"), max = 2, n = c(10L, 1000L),
.truth = list(function(prob) qunif(prob, max = 2)))
)
bias_quantile <- function(x, prob, .truth) {
est <- quantile(x, probs = prob)
ret <- est - .truth[[1]](prob)
names(ret) <- "bias"
ret
}
pg <- expand_tibble(proc = "bias_quantile", prob = c(0.9, 0.975))
eval_tibbles(dg, pg, replications = 10^3, discard_generated_data = TRUE,
ncpus = 1,
summary_fun = list(mean = mean))
EVAL <- FALSE
if (Sys.getenv("NOT_CRAN") == "true") {
EVAL <- TRUE
} |
get.trait.data.pft <- function(pft, modeltype, dbfiles, dbcon, trait.names,
forceupdate = FALSE) {
if (!file.exists(pft$outdir) && !dir.create(pft$outdir, recursive = TRUE)) {
PEcAn.logger::logger.error(paste0("Couldn't create PFT output directory: ", pft$outdir))
}
old.files <- list.files(path = pft$outdir, full.names = TRUE, include.dirs = FALSE)
file.remove(old.files)
pftres <- query_pfts(dbcon, pft[["name"]], modeltype)
pfttype <- pftres[["pft_type"]]
pftid <- pftres[["id"]]
if (nrow(pftres) > 1) {
PEcAn.logger::logger.severe(
"Multiple PFTs named", pft[["name"]], "found,",
"with ids", PEcAn.utils::vecpaste(pftres[["id"]]), ".",
"Specify modeltype to fix this.")
}
if (nrow(pftres) == 0) {
PEcAn.logger::logger.severe("Could not find pft", pft[["name"]])
return(NA)
}
if (pfttype == "plant") {
pft_member_filename = "species.csv"
pft_members <- PEcAn.DB::query.pft_species(pft$name, modeltype, dbcon)
} else if (pfttype == "cultivar") {
pft_member_filename = "cultivars.csv"
pft_members <- PEcAn.DB::query.pft_cultivars(pft$name, modeltype, dbcon)
} else {
PEcAn.logger::logger.severe("Unknown pft type! Expected 'plant' or 'cultivar', got", pfttype)
}
pft_members <- pft_members %>%
dplyr::mutate_if(is.character, ~dplyr::na_if(., ""))
prior.distns <- PEcAn.DB::query.priors(pft = pftid, trstr = PEcAn.utils::vecpaste(trait.names), con = dbcon)
prior.distns <- prior.distns[which(!rownames(prior.distns) %in% names(pft$constants)),]
traits <- rownames(prior.distns)
trait.data.check <- PEcAn.DB::query.traits(ids = pft_members$id, priors = traits, con = dbcon, update.check.only = TRUE, ids_are_cultivars = (pfttype=="cultivar"))
traits <- names(trait.data.check)
forceupdate <- isTRUE(as.logical(forceupdate))
if (!forceupdate) {
if (is.null(pft$posteriorid)) {
recent_posterior <- dplyr::tbl(dbcon, "posteriors") %>%
dplyr::filter(.data$pft_id == !!pftid) %>%
dplyr::collect()
if (length(recent_posterior) > 0) {
pft$posteriorid <- dplyr::tbl(dbcon, "posteriors") %>%
dplyr::filter(.data$pft_id == !!pftid) %>%
dplyr::arrange(dplyr::desc(.data$created_at)) %>%
utils::head(1) %>%
dplyr::pull(id)
} else {
PEcAn.logger::logger.info("No previous posterior found. Forcing update")
}
}
if (!is.null(pft$posteriorid)) {
files <- dbfile.check(type = "Posterior", container.id = pft$posteriorid, con = dbcon,
return.all = TRUE)
need_files <- c(
trait_data = "trait.data.Rdata",
priors = "prior.distns.Rdata",
pft_membership = pft_member_filename
)
ids <- match(need_files, files$file_name)
names(ids) <- names(need_files)
if (any(is.na(ids))) {
missing_files <- need_files[is.na(ids)]
PEcAn.logger::logger.info(paste0(
"Forcing meta-analysis update because ",
"the following files are missing from the posterior: ",
paste0(shQuote(missing_files), collapse = ", ")
))
PEcAn.logger::logger.debug(
"\n `dbfile.check` returned the following output:\n",
PEcAn.logger::print2string(files),
wrap = FALSE
)
} else {
PEcAn.logger::logger.debug(
"All posterior files are present. Performing additional checks ",
"to determine if meta-analysis needs to be updated."
)
need_paths <- file.path(files$file_path[ids], need_files)
names(need_paths) <- names(need_files)
files_exist <- file.exists(need_paths)
foundallfiles <- all(files_exist)
if (!foundallfiles) {
PEcAn.logger::logger.warn(
"The following files are in database but not found on disk: ",
paste(shQuote(need_files[!files_exist]), collapse = ", "), ". ",
"Re-running meta-analysis."
)
} else {
PEcAn.logger::logger.debug("Checking if PFT membership has changed.")
if (pfttype == "plant") {
colClass = c("double", "character", "character", "character")
} else if (pfttype == "cultivar") {
colClass = c("double", "double", "character", "character", "character", "character")
}
existing_membership <- utils::read.csv(
need_paths[["pft_membership"]],
colClasses = colClass,
stringsAsFactors = FALSE,
na.strings = c("", "NA")
)
diff_membership <- symmetric_setdiff(
existing_membership,
pft_members,
xname = "existing",
yname = "current"
)
if (nrow(diff_membership) > 0) {
PEcAn.logger::logger.error(
"\n PFT membership has changed. \n",
"Difference is:\n",
PEcAn.logger::print2string(diff_membership),
wrap = FALSE
)
foundallfiles <- FALSE
}
PEcAn.logger::logger.debug("Checking if priors have changed")
existing_prior <- PEcAn.utils::load_local(need_paths[["priors"]])[["prior.distns"]]
diff_prior <- symmetric_setdiff(
dplyr::as_tibble(prior.distns, rownames = "trait"),
dplyr::as_tibble(existing_prior, rownames = "trait")
)
if (nrow(diff_prior) > 0) {
PEcAn.logger::logger.error(
"\n Prior has changed. \n",
"Difference is:\n",
PEcAn.logger::print2string(diff_prior),
wrap = FALSE
)
foundallfiles <- FALSE
}
PEcAn.logger::logger.debug("Checking if trait data have changed")
existing_trait_data <- PEcAn.utils::load_local(
need_paths[["trait_data"]]
)[["trait.data"]]
if (length(trait.data.check) != length(existing_trait_data)) {
PEcAn.logger::logger.warn(
"Lengths of new and existing `trait.data` differ. ",
"Re-running meta-analysis."
)
foundallfiles <- FALSE
} else if (length(trait.data.check) == 0) {
PEcAn.logger::logger.warn("New and existing trait data are both empty. Skipping this check.")
} else {
current_traits <- dplyr::bind_rows(trait.data.check, .id = "trait") %>%
dplyr::select(-mean, -.data$stat)
existing_traits <- dplyr::bind_rows(existing_trait_data, .id = "trait") %>%
dplyr::select(-mean, -.data$stat)
diff_traits <- symmetric_setdiff(current_traits, existing_traits)
if (nrow(diff_traits) > 0) {
diff_summary <- diff_traits %>%
dplyr::count(source, .data$trait)
PEcAn.logger::logger.error(
"\n Prior has changed. \n",
"Here are the number of differing trait records by trait:\n",
PEcAn.logger::print2string(diff_summary),
wrap = FALSE
)
foundallfiles <- FALSE
}
}
}
if (foundallfiles) {
PEcAn.logger::logger.info(
"Reusing existing files from posterior", pft$posteriorid,
"for PFT", shQuote(pft$name)
)
for (id in seq_len(nrow(files))) {
file.copy(from = file.path(files[[id, "file_path"]], files[[id, "file_name"]]),
to = file.path(pft$outdir, files[[id, "file_name"]]))
}
done <- TRUE
if (length(list.files(pft$outdir, "post.distns.Rdata")) == 0) {
all.files <- list.files(pft$outdir)
post.distn.file <- all.files[grep("post\\.distns\\..*\\.Rdata", all.files)]
if (length(post.distn.file) > 1)
PEcAn.logger::logger.severe(
"get.trait.data.pft() doesn't know how to ",
"handle multiple `post.distns.*.Rdata` files.",
"Found the following files: ",
paste(shQuote(post.distn.file), collapse = ", ")
)
else if (length(post.distn.file) == 1) {
link_input <- file.path(pft[["outdir"]], post.distn.file)
link_target <- file.path(pft[["outdir"]], "post.distns.Rdata")
PEcAn.logger::logger.debug(
"Found exactly one posterior distribution file: ",
shQuote(link_input),
". Symlinking it to PFT output directory: ",
shQuote(link_target)
)
file.symlink(from = link_input, to = link_target)
} else {
PEcAn.logger::logger.error(
"No previous posterior distribution file found. ",
"Most likely, trait data were retrieved, but meta-analysis ",
"was not run. Meta-analysis will be run."
)
done <- FALSE
}
}
if (done) return(pft)
}
}
}
}
trait.data <- query.traits(pft_members$id, traits, con = dbcon,
update.check.only = FALSE,
ids_are_cultivars = (pfttype == "cultivar"))
traits <- names(trait.data)
if (length(trait.data) > 0) {
trait_counts <- trait.data %>%
dplyr::bind_rows(.id = "trait") %>%
dplyr::count(.data$trait)
PEcAn.logger::logger.info(
"\n Number of observations per trait for PFT ", shQuote(pft[["name"]]), ":\n",
PEcAn.logger::print2string(trait_counts, n = Inf, na.print = ""),
wrap = FALSE
)
} else {
PEcAn.logger::logger.warn(
"None of the requested traits were found for PFT ",
format(pft_members[["id"]], scientific = FALSE)
)
}
old.files <- list.files(path = pft$outdir)
insert_result <- db.query(
paste0("INSERT INTO posteriors (pft_id) VALUES (", pftid, ") RETURNING id"),
con = dbcon)
pft$posteriorid <- insert_result[["id"]]
pathname <- file.path(dbfiles, "posterior", pft$posteriorid)
dir.create(pathname, showWarnings = FALSE, recursive = TRUE)
utils::write.csv(pft_members, file.path(pft$outdir, pft_member_filename),
row.names = FALSE)
save(prior.distns, file = file.path(pft$outdir, "prior.distns.Rdata"))
utils::write.csv(prior.distns, file.path(pft$outdir, "prior.distns.csv"),
row.names = TRUE)
PEcAn.logger::logger.info(
"\n Summary of prior distributions for PFT ", shQuote(pft$name), ":\n",
PEcAn.logger::print2string(prior.distns),
wrap = FALSE
)
trait.data.file <- file.path(pft$outdir, "trait.data.Rdata")
save(trait.data, file = trait.data.file)
utils::write.csv(
dplyr::bind_rows(trait.data),
file.path(pft$outdir, "trait.data.csv"),
row.names = FALSE
)
store_files_all <- list.files(path = pft[["outdir"]])
store_files <- setdiff(store_files_all, old.files)
PEcAn.logger::logger.debug(
"The following posterior files found in PFT outdir ",
"(", shQuote(pft[["outdir"]]), ") will be registered in BETY ",
"under posterior ID ", format(pft[["posteriorid"]], scientific = FALSE), ": ",
paste(shQuote(store_files), collapse = ", "), ". ",
"The following files (if any) will not be registered because they already existed: ",
paste(shQuote(intersect(store_files, old.files)), collapse = ", "),
wrap = FALSE
)
for (file in store_files) {
filename <- file.path(pathname, file)
file.copy(file.path(pft$outdir, file), filename)
dbfile.insert(in.path = pathname, in.prefix = file,
type = "Posterior", id = pft[["posteriorid"]],
con = dbcon)
}
return(pft)
} |
library(knitr)
knitr::opts_chunk$set(autodep = TRUE, cache = FALSE)
library(fMRIscrub)
dim(Dat1)
dim(Dat2)
ps.Dat1 = pscrub(Dat1, verbose=TRUE, comps_mean_dt=1, comps_var_dt=1)
ps.Dat2 = pscrub(Dat2, verbose=TRUE, comps_mean_dt=1, comps_var_dt=1)
p1 <- plot(ps.Dat1, title="Dat1", show.legend=FALSE)
p2 <- plot(ps.Dat2, title="Dat2", show.legend=FALSE)
cowplot::plot_grid(p1, p2, nrow=1)
p1 <- plot(DVARS(Dat1), title="Dat1", show.legend=FALSE)
p2 <- plot(DVARS(Dat2), title="Dat2", show.legend=FALSE)
cowplot::plot_grid(p1, p2, nrow=1)
library(oro.nifti)
library(neurobase)
fname = system.file("extdata", "Dat1_mask.nii.gz", package = "fMRIscrub")
Mask1 = readNIfTI(fname) > 0
Mask1 = array(Mask1, dim=c(dim(Mask1), 1))
Img1 = fMRIscrub::unmask_vol(t(Dat1), Mask1)
mfrow_original <- par("mfrow")
par(mfrow=c(1,2))
levs = ps.Dat1$measure
t_med = order(levs)[ceiling(length(levs)/2)]
t_max = which.max(levs)
image(Img1[,,,t_med], main=paste0('Median lev (T = ', t_med, ')'))
image(Img1[,,,t_max], main=paste0('Maximum lev (T = ', t_max, ')'))
par(mfrow=mfrow_original)
psx <- pscrub(Dat1, projection="ICA", get_dirs=TRUE, comps_mean_dt=1, comps_var_dt=1)
artImg1 = artifact_images(psx)
par(mfrow=c(1,2))
artImg1.mean = unmask_vol(t(artImg1$mean), Mask1)
artImg1.top = unmask_vol(t(artImg1$top), Mask1)
idx = which(which(psx$outlier_flag) == t_max)
image(artImg1.mean[,,1,idx], main=paste0('Lev image, mean (T=',t_max,')'))
image(artImg1.top[,,1,idx], main=paste0('Lev image, top (T=',t_max,')'))
ps.Dat1.3 <- fMRIscrub:::pscrub_multi(
Dat1, projection=c("ICA_kurt", "fusedPCA_kurt", "ICA"), verbose=TRUE, comps_mean_dt=1, comps_var_dt=1
)
fMRIscrub:::plot.scrub_projection_multi(ps.Dat1.3, legend.position="bottom") |
Act_logistic <- function (x) {
1 / (1 + exp(-x))}
setup_my_predict <- function () {
bias <<- c()
sObj <<- nn$snnsObject
weightvalues <<- weightMatrix(nn)
layers <<- c()
layer2 <<- c()
layer3 <<- c()
layer4 <<- c()
output <<- c()
nunits <<- sObj$getNoOfUnits()
for (i in 1:nunits) {
units <- sObj$getUnitName(i)
bias <<- c(bias, sObj$getUnitBias(i))
node_descriptor <- strsplit(units, '_')
node_descriptor <- unlist(node_descriptor)
if (node_descriptor[1] == 'Hidden') {
if (node_descriptor[2] == '2') {
layer2 <<- c(layer2, i)
}
if (node_descriptor[2] == '3') {
layer3 <<- c(layer3, i)
}
if (node_descriptor[2] == '4') {
layer4 <<- c(layer3, i)
}
}
if (node_descriptor[1] == 'Output') {
output <<- c(output, i)
}
}
if (length(layer2) > 0) {
layers <<- c(layers, 'layer2')
}
if (length(layer3) > 0) {
layers <<- c(layers, 'layer3')
}
if (length(layer4) > 0) {
layers <<- c(layers, 'layer4')
}
layers <<- c(layers, 'output')
}
my_predict_single <- function (data) {
xinput <- as.matrix(data)
nrows = nrow(xinput)
nextra <- (nunits - Dim)*nrows
hdata <- matrix(c(xinput, rep(0.0, nextra)), ncol = nunits, nrow = 1)
nlayers <- length(layers)
activity <- rep(0.0,nunits)
for (i in 1:Dim) {activity[i] <- data[i]}
n <- 1
while (n <= nlayers) {
hdata <- hdata %*% weightvalues
nodes_in_layer <- get(layers[n])
for (i in nodes_in_layer) {
hdata[i] <- Act_logistic(hdata[i] + bias[i])
activity[i] <- hdata[i]
}
n <- n + 1
}
return (unlist(activity))
}
my_predict <- function (data,disabled_nodes=NULL) {
xinput <- as.matrix(data)
nrows = nrow(xinput)
nextra <- (nunits - Dim)
zeros <- matrix(rep(0.0,nextra),ncol=nextra,nrow=nrows)
hdata <- cbind(xinput,zeros)
nlayers <- length(layers)
activity <- c()
for (i in 1:Dim) {activity[i] <- data[i]}
n <- 1
while (n <= nlayers) {
hdata <- hdata %*% weightvalues
nodes_in_layer <- get(layers[n])
for (i in nodes_in_layer) {
if (i %in% disabled_nodes) {
hdata[,i] = 0.0
} else {
hdata[,i] <- Act_logistic(hdata[,i] + bias[i])
}
}
n <- n + 1
}
return (hdata[,(ncol(hdata)-nclasses+1):ncol(hdata)])
}
my_predict_activation <- function (data) {
xinput <- as.matrix(data)
nrows = nrow(xinput)
activity <- c()
nextra <- (nunits - Dim)
zeros <- matrix(rep(0.0,nextra),ncol=nextra,nrow=nrows)
hdata <- cbind(xinput,zeros)
activation <- hdata
nlayers <- length(layers)
for (i in 1:Dim) {activity[i] <- data[i]}
n <- 1
while (n <= nlayers) {
hdata <- hdata %*% weightvalues
nodes_in_layer <- get(layers[n])
for (i in nodes_in_layer) {
hdata[,i] <- Act_logistic(hdata[,i] + bias[i])
activation[,i] <- hdata[,i]
}
n <- n + 1
}
return (activation)
}
library(plot3D)
plot_activation <- function (y1,y2,activation,node) {
scatter2D(y1,y2,clim = c(0.0,1.0), colvar=activation[,node])
} |
context("Function createArea")
sapply(studies, function(study) {
setup_study(study, sourcedir)
opts <- antaresRead::setSimulationPath(studyPath, 1)
test_that("Cannot initialize a new area if not in 'Input' mode", {
expect_error(createArea(name = "myarea"))
})
opts <- antaresRead::setSimulationPath(studyPath, 'input')
test_that("Backup study/input", {
expect_length(backupStudy(what = "study"), 1)
expect_length(backupStudy(what = "input"), 1)
})
test_that("Initialize a new area", {
n_before <- length(getOption("antares")$areaList)
createArea(name = "myarea")
n_after <- length(getOption("antares")$areaList)
expect_equal(n_before + 1, n_after)
expect_true("myarea" %in% getOption("antares")$areaList)
})
test_that("nodal optimization options are properly written", {
createArea(
name = "testarea",
nodalOptimization = nodalOptimizationOptions(
non_dispatchable_power = FALSE,
dispatchable_hydro_power = TRUE,
other_dispatchable_power = FALSE,
spread_unsupplied_energy_cost = 10,
spread_spilled_energy_cost = 3.14,
average_unsupplied_energy_cost = 239,
average_spilled_energy_cost = 1000
)
)
optim_testarea <- readIniFile(file.path(opts$inputPath, "areas", "testarea", "optimization.ini"))
expect_equal(optim_testarea$`nodal optimization`$`dispatchable-hydro-power`, TRUE)
expect_equal(optim_testarea$`nodal optimization`$`spread-unsupplied-energy-cost`, 10)
expect_equal(optim_testarea$`nodal optimization`$`non-dispatchable-power`, FALSE)
expect_equal(optim_testarea$`nodal optimization`$`other-dispatchable-power`, FALSE)
expect_equal(optim_testarea$`nodal optimization`$`spread-spilled-energy-cost`, 3.14)
thermal_areas <- readIniFile(file.path(opts$inputPath, "thermal", "areas.ini"))
expect_equal(thermal_areas$spilledenergycost$testarea, 1000)
expect_equal(thermal_areas$unserverdenergycost$testarea, 239)
})
test_that("Remove an area", {
area2remove <- "myareatoremove"
createArea(name = area2remove)
ra <- checkRemovedArea(area = area2remove)
expect_true(length(ra$areaResiduFiles) > 0)
expect_true(length(ra$areaResidus) > 0)
removeArea(name = area2remove)
ra <- checkRemovedArea(area = area2remove)
expect_length(ra$areaResiduFiles, 0)
expect_length(ra$areaResidus, 0)
})
unlink(x = file.path(pathstd, "test_case"), recursive = TRUE)
}) |
str(exp1)
res1 <-
expirest_wisle(data = exp1[exp1$Batch %in% c("b2", "b5", "b7"), ],
response_vbl = "Potency", time_vbl = "Month",
batch_vbl = "Batch", rl = 98, rl_sf = 3, sl = 95,
sl_sf = 3, srch_range = c(0, 500))
res2 <-
expirest_wisle(data = exp1[exp1$Batch %in% c("b3", "b4", "b5"), ],
response_vbl = "Potency", time_vbl = "Month",
batch_vbl = "Batch", rl = 98, rl_sf = 3, sl = 95,
sl_sf = 3, srch_range = c(0, 500))
res3 <-
expirest_wisle(data = exp1[exp1$Batch %in% c("b4", "b5", "b8"), ],
response_vbl = "Potency", time_vbl = "Month",
batch_vbl = "Batch", rl = 98, rl_sf = 3, sl = 95,
sl_sf = 3, srch_range = c(0, 500))
res1
res2
res3 |
HMMfits <- function(m)
{
stopifnot(any(unlist(lapply(m,is.momentuHMM))))
obj <- m
class(obj) <- append("HMMfits",class(obj))
return(obj)
}
is.HMMfits <- function(x)
inherits(x,"HMMfits") |
get.mpt.fia <- function(data, model.filename, restrictions.filename = NULL, Sample = 200000, model.type = c("easy", "eqn", "eqn2"), round.digit = 6, multicore = FALSE, split = NULL, mConst = NULL){
if(is.vector(data)) {
data <- array(data, dim = c(1, length(data)))
multiFit <- FALSE
} else
if(is.matrix(data) | is.data.frame(data)) {
if (is.data.frame(data)) data <- as.matrix(data)
multiFit <- TRUE
} else stop("data is neither vector, nor matrix, nor data.frame!")
class.model <- class(model.filename)
if ("connection" %in% class.model) {
tmp.model <- readLines(model.filename)
model.filename <- textConnection(tmp.model)
}
model <- .get.mpt.model(model.filename, model.type)
n.data <- dim(data)[1]
if(!is.null(restrictions.filename)) {
restrictions <- .check.restrictions(restrictions.filename, model)
}
if (sum(sapply(model, length)) != length(data[1,])) stop(paste("Size of data does not correspond to size of model (i.e., model needs ", sum(sapply(model, length)), " datapoints, data gives ", length(data[1,]), " datapoints).", sep = ""))
df.n <- apply(data, 1, .DF.N.get, tree = model)
n_items <- sapply(df.n, function (x) sum(x[[2]]))
if ("connection" %in% class.model) {
model.filename <- textConnection(tmp.model)
}
mpt.string <- make.mpt.cf(model.filename = model.filename, model.type = model.type)
is.category <- grepl("^[[:digit:]]+$", mpt.string)
s <- paste(ifelse(is.category == 0, "p", "C"), collapse = "")
params <- mpt.string[!is.category]
category <- mpt.string[is.category]
category <- as.numeric(category)
is.p.join <- grepl("^hank\\.join\\.", params)
c.join <- sum(is.p.join)
p.join <- params[is.p.join]
p.n.join <- params[!is.p.join]
hank.restrictions <- vector("list", c.join)
ns <- t(sapply(df.n, function (x) x[[2]]))
p.n.join.lev <- sort(unique(p.n.join))
f.p.n.join <- factor(p.n.join, levels = p.n.join.lev)
names(p.n.join.lev) <- 1:length(p.n.join.lev)
if (c.join > 0) {
for (indiv in 1:n.data) {
for (c.hank in 1:(c.join)) {
hank.restrictions[[c.hank]][indiv] <- round(sum(ns[indiv,1:c.hank]) / sum(ns[indiv,1:(c.hank+1)]), round.digit)
}
}
params.join.mat <- matrix(NA, nrow = n.data, ncol = (c.join))
for (indiv in 1:n.data) {
for (par.join in 1:length(p.join)) {
params.join.mat[indiv,par.join] <- -hank.restrictions[[length(p.join)-(par.join-1)]][indiv]
}
}
parameters <- vector("list", n.data)
for (indiv in 1:n.data) {
parameters[[indiv]] <- c(params.join.mat[indiv,], as.numeric(f.p.n.join))
}
n.fia <- 1
i.data <- vector('list', n.data)
p.fia <- vector("numeric", n.data)
m.fit <- vector("numeric", n.data)
i.data[[1]] <- params.join.mat[1,]
p.fia[1] <- 1
m.fit[1] <- 1
if (n.data > 1) {
for (c in 2:n.data) {
tmp <- list(params.join.mat[c,])
if (tmp %in% i.data) p.fia[c] <- which(i.data %in% tmp)
else {
n.fia <- n.fia + 1
i.data[n.fia] <- tmp
p.fia[c] <- n.fia
m.fit[n.fia] <- c
}
}
}
m.fit <- m.fit[1:n.fia]
} else {
parameters <- vector("list", n.data)
for (indiv in 1:n.data) {
parameters[[indiv]] <- as.numeric(f.p.n.join)
}
m.fit <- 1
p.fia <- rep(1,n.data)
}
if(!is.null(restrictions.filename)) {
ineq <- vector('list', length(restrictions))
n.ineq <- 1
for (restr in 1:length(restrictions)) {
if (restrictions[[restr]][3] == "=") {
if (grepl("^[[:digit:]]", restrictions[[restr]][2])) {
for (indiv in 1:n.data) {
parameters[[indiv]][params == restrictions[[restr]][1]] <- -as.numeric(restrictions[[restr]][2])
}
} else {
for (indiv in 1:n.data) {
parameters[[indiv]][params == restrictions[[restr]][1]] <- (parameters[[indiv]][params == restrictions[[restr]][2]])[1]
}
}
} else {
if (restrictions[[restr]][3] == "<") {
ineq[[n.ineq]] <- matrix(c((parameters[[indiv]][params == restrictions[[restr]][1]])[1], (parameters[[indiv]][params == restrictions[[restr]][4]])[1]), 1,2)
n.ineq <- n.ineq + 1
}
}
}
ineq <- do.call("rbind", ineq)
} else ineq <- NULL
n.fit <- length(m.fit)
fia.result <- vector('list', n.fit)
for (counter in 1:n.fit) {
fia.result[[counter]] <- bmpt.fia(s, parameters[[m.fit[counter]]], category, n_items[m.fit[counter]], ineq, Sample, multicore = multicore, split = split, mConst = mConst)
}
n.params <- length(unique(parameters[[1]][parameters[[1]] > 0]))
res <- vector('list', n.data)
for (c in 1:n.data) {
res[[c]] <- fia.result[[p.fia[c]]]
res[[c]][["CFIA"]] <- res[[c]][["lnInt"]] + res[[c]][["lnconst"]]+n.params/2*log(n_items[c]/2/pi)
res[[c]][["CI.l"]] <- NA
res[[c]][["CI.u"]] <- NA
}
as.data.frame(do.call('rbind', res))
} |
library(survRM2)
D = rmst2.sample.data()
nrow(D)
head(D[,1:3])
plot(survfit(Surv(time, status)~arm, data=D), col=c("blue","red"), lwd=2, mark.time=F, xlab="Years",ylab="Probability")
legend("bottomleft", c("Placebo (arm=0)","D-penicillamine (arm=1)"), col=c("blue","red"), lwd=2)
fit=survfit(Surv(D$time[D$arm==1], D$status[D$arm==1])~1)
tau=10
tmp.xx=c(0, fit$time); tmp.yy=c(1, fit$surv) ;
idx=tmp.xx<=tau
y.tau = min(tmp.yy[idx])
xx=c(tmp.xx[idx], tau)
yy=c(tmp.yy[idx], y.tau)
x.step=sort(c(0, tmp.xx, tmp.xx))
y.step=rev(sort(c(1,1,tmp.yy, tmp.yy[-length(tmp.yy)])))
rmst=summary(fit, rmean=10)$table[5]
par(mfrow=c(1,2))
plot(fit, mark.time=F, xlab="Years",ylab="Probability",conf.int=F, lwd=2, main="Restricted mean survival time (RMST)", col="red", cex.main=0.8)
for (i in 1: (length(xx)-1)){
polygon(c(xx[i], xx[i+1], xx[i+1], xx[i]), c(0, 0, yy[i+1], yy[i]), col="pink", density=80, angle=80, lwd=2)
}
lines(x.step, y.step, col="red", lwd=3)
text(5,0.4, paste(round(rmst, digits=2),"years"), cex=0.9)
plot(fit, mark.time=F, xlab="Years",ylab="Probability", conf.int=F, lwd=2, main="Restricted mean time lost (RMTL)", col="red",cex.main=0.8)
for (i in 1: (length(xx)-1)){
polygon(c(xx[i], xx[i+1], xx[i+1], xx[i]), c(yy[i], yy[i+1], 1,1), col="orange", density=80, angle=80, lwd=2)
}
lines(x.step, y.step, col="red", lwd=3)
text(7,0.8, paste(round(tau-rmst, digits=2),"years"), cex=0.9)
time = D$time
status = D$status
arm = D$arm
obj = rmst2(time, status, arm, tau=10)
print(obj)
plot(obj, xlab="Years", ylab="Probability")
x=D[,c(4,6,7)]
x=D[,c(4,6,7)]
head(x)
rmst2(time, status, arm, tau=10, covariates=x) |
extract_IrradiationTimes <- function(
object,
file.BINX,
recordType = c("irradiation (NA)", "IRSL (UVVIS)", "OSL (UVVIS)", "TL (UVVIS)"),
compatibility.mode = TRUE,
txtProgressBar = TRUE
){
if(is.list(object)){
if(!missing(file.BINX)){
warning("[extract_IrradiationTimes()] argument 'file.BINX' is not supported in the self call mode.",
call. = FALSE)
}
if(is(recordType, "list")){
recordType <-
rep(recordType, length = length(object))
}else{
recordType <-
rep(list(recordType), length = length(object))
}
results <- lapply(1:length(object), function(x) {
extract_IrradiationTimes(
object = object[[x]],
recordType = recordType[[x]],
txtProgressBar = txtProgressBar
)
})
if(length(results) == 0){
return(NULL)
}else{
return(results)
}
}
if(is(object)[1] != "character" & is(object)[1] != "RLum.Analysis"){
stop("[extract_IrradiationTimes()] Input object is neither of type 'character' nor of type 'RLum.Analysis'.", call. = FALSE)
}else if(is(object)[1] == "character"){
file.XSYG <- object
if(file.exists(file.XSYG) == FALSE){
stop("[extract_IrradiationTimes()] Wrong XSYG file name or file does not exsits!", call. = FALSE)
}
if(tail(unlist(strsplit(file.XSYG, split = "\\.")), 1) != "xsyg" &
tail(unlist(strsplit(file.XSYG, split = "\\.")), 1) != "XSYG" ){
stop("[extract_IrradiationTimes()] File is not of type 'XSYG'!", call. = FALSE)
}
if(!missing(file.BINX)){
if(file.exists(file.BINX) == FALSE){
stop("[extract_IrradiationTimes()] Wrong BINX file name or file does not exist!", call. = FALSE)
}
if(tail(unlist(strsplit(file.BINX, split = "\\.")), 1) != "binx" &
tail(unlist(strsplit(file.BINX, split = "\\.")), 1) != "BINX" ){
stop("[extract_IrradiationTimes()] File is not of type 'BINX'!", call. = FALSE)
}
}
temp.XSYG <- read_XSYG2R(file.XSYG, txtProgressBar = txtProgressBar)
if(!missing(file.BINX)){
temp.BINX <- read_BIN2R(file.BINX, txtProgressBar = txtProgressBar)
temp.BINX.dirname <- (dirname(file.XSYG))
}
temp.sequence.list <- list()
for(i in 1:length(temp.XSYG)){
temp.sequence.list[[i]] <- get_RLum(temp.XSYG[[i]]$Sequence.Object,
recordType = recordType,
drop = FALSE)
temp.sequence.position <- as.numeric(as.character(temp.XSYG[[i]]$Sequence.Header["position",]))
}
}else{
temp.sequence.list <- list(object)
}
if(length(temp.sequence.list)>1){
temp.sequence <- merge_RLum(temp.sequence.list)
}else{
temp.sequence <- temp.sequence.list[[1]]
}
STEP <- names_RLum(temp.sequence)
temp.START <- vapply(temp.sequence, function(x){
get_RLum(x, info.object = c("startDate"))
}, character(1))
DURATION.STEP <- vapply(temp.sequence, function(x){
max(get_RLum(x)[,1])
}, numeric(1))
START <- strptime(temp.START, format = "%Y%m%d%H%M%S", tz = "GMT")
END <- START + DURATION.STEP
if(exists("file.XSYG")){
POSITION <- rep(temp.sequence.position, each = length_RLum(temp.sequence))
}else if(!inherits(try(
get_RLum(
get_RLum(temp.sequence, record.id = 1), info.object = "position"),
silent = TRUE), "try-error")){
POSITION <- vapply(temp.sequence, function(x){
get_RLum(x, info.object = c("position"))
}, numeric(1))
}else{
POSITION <- NA
}
temp.results <- data.frame(POSITION,STEP,START,DURATION.STEP,END)
IRR_TIME <- numeric(length = nrow(temp.results))
temp_last <- 0
for(i in 1:nrow(temp.results)){
if(grepl("irradiation", temp.results[["STEP"]][i])) {
temp_last <- temp.results[["DURATION.STEP"]][i]
next()
}
IRR_TIME[i] <- temp_last
}
time.irr.end <- NA
TIMESINCEIRR <- unlist(sapply(1:nrow(temp.results), function(x){
if(grepl("irradiation", temp.results[x,"STEP"])){
time.irr.end<<-temp.results[x,"END"]
return(-1)
}else{
if(is.na(time.irr.end)){
return(-1)
}else{
return(difftime(temp.results[x,"START"],time.irr.end, units = "secs"))
}
}
}))
TIMESINCELAST.STEP <- unlist(sapply(1:nrow(temp.results), function(x){
if(x == 1){
return(0)
}else{
return(difftime(temp.results[x,"START"],temp.results[x-1, "END"], units = "secs"))
}
}))
results <- cbind(temp.results,IRR_TIME, TIMESINCEIRR,TIMESINCELAST.STEP)
if(!missing(file.BINX)){
results.BINX <- results[-which(results[,"STEP"] == "irradiation (NA)"),]
temp.BINX@METADATA[["IRR_TIME"]] <- results.BINX[["IRR_TIME"]]
temp.BINX@METADATA[["TIMESINCEIRR"]] <- results.BINX[["IRR_TIME"]] + results.BINX[["TIMESINCEIRR"]]
if(!missing(file.BINX)){
if(nrow(results.BINX) == nrow(temp.BINX@METADATA)){
try <- write_R2BIN(temp.BINX, version = "06",
file = paste0(file.BINX,"_extract_IrradiationTimes.BINX"),
compatibility.mode = compatibility.mode,
txtProgressBar = txtProgressBar)
if(!inherits(x = try, 'try-error')){
message("[extract_IrradiationTimes()] 'Time Since Irradiation' was redefined in the exported BINX-file to: 'Time Since Irradiation' plus the 'Irradiation Time' to be compatible with the Analyst.")
}
}
}else{
try(
stop("[extract_IrradiationTimes()] XSYG-file and BINX-file did not contain similar entries. BINX-file update skipped!",call. = FALSE))
}
}
return(set_RLum(class = "RLum.Results", data = list(irr.times = results)))
} |
AutoLightGBMHurdleCARMA <- function(data,
NonNegativePred = FALSE,
Threshold = NULL,
RoundPreds = FALSE,
TrainOnFull = FALSE,
TargetColumnName = 'Target',
DateColumnName = 'DateTime',
HierarchGroups = NULL,
GroupVariables = NULL,
EncodingMethod = "credibility",
TimeWeights = 1,
FC_Periods = 30,
TimeUnit = 'week',
TimeGroups = c('weeks','months'),
NumOfParDepPlots = 10L,
TargetTransformation = FALSE,
Methods = c('BoxCox', 'Asinh', 'Log', 'LogPlus1', 'Sqrt', 'Asin', 'Logit'),
AnomalyDetection = NULL,
XREGS = NULL,
Lags = c(1L:5L),
MA_Periods = c(2L:5L),
SD_Periods = NULL,
Skew_Periods = NULL,
Kurt_Periods = NULL,
Quantile_Periods = NULL,
Quantiles_Selected = c('q5','q95'),
Difference = TRUE,
FourierTerms = 6L,
CalendarVariables = c('second', 'minute', 'hour', 'wday', 'mday', 'yday', 'week', 'wom', 'isoweek', 'month', 'quarter', 'year'),
HolidayVariable = c('USPublicHolidays','EasterGroup','ChristmasGroup','OtherEcclesticalFeasts'),
HolidayLookback = NULL,
HolidayLags = 1L,
HolidayMovingAverages = 1L:2L,
TimeTrendVariable = FALSE,
ZeroPadSeries = NULL,
DataTruncate = FALSE,
SplitRatios = c(0.7, 0.2, 0.1),
PartitionType = 'timeseries',
Timer = TRUE,
DebugMode = FALSE,
EvalMetric = 'RMSE',
GridTune = FALSE,
PassInGrid = NULL,
ModelCount = 100,
MaxRunsWithoutNewWinner = 50,
MaxRunMinutes = 24L*60L,
input_model = list('classifier' = NULL, 'regression' = NULL),
task = list('classifier' = 'train', 'regression' = 'train'),
device_type = list('classifier' = 'CPU', 'regression' = 'CPU'),
objective = list('classifier' = 'binary', 'regression' = 'regression'),
metric = list('classifier' = 'binary_logloss', 'regression' = 'rmse'),
boosting = list('classifier' = 'gbdt', 'regression' = 'gbdt'),
LinearTree = list('classifier' = FALSE, 'regression' = FALSE),
Trees = list('classifier' = 1000L, 'regression' = 1000L),
eta = list('classifier' = NULL, 'regression' = NULL),
num_leaves = list('classifier' = 31, 'regression' = 31),
deterministic = list('classifier' = TRUE, 'regression' = TRUE),
force_col_wise = list('classifier' = FALSE, 'regression' = FALSE),
force_row_wise = list('classifier' = FALSE, 'regression' = FALSE),
max_depth = list('classifier' = NULL, 'regression' = NULL),
min_data_in_leaf = list('classifier' = 20, 'regression' = 20),
min_sum_hessian_in_leaf = list('classifier' = 0.001, 'regression' = 0.001),
bagging_freq = list('classifier' = 0, 'regression' = 0),
bagging_fraction = list('classifier' = 1.0, 'regression' = 1.0),
feature_fraction = list('classifier' = 1.0, 'regression' = 1.0),
feature_fraction_bynode = list('classifier' = 1.0, 'regression' = 1.0),
extra_trees = list('classifier' = FALSE, 'regression' = FALSE),
early_stopping_round = list('classifier' = 10, 'regression' = 10),
first_metric_only = list('classifier' = TRUE, 'regression' = TRUE),
max_delta_step = list('classifier' = 0.0, 'regression' = 0.0),
lambda_l1 = list('classifier' = 0.0, 'regression' = 0.0),
lambda_l2 = list('classifier' = 0.0, 'regression' = 0.0),
linear_lambda = list('classifier' = 0.0, 'regression' = 0.0),
min_gain_to_split = list('classifier' = 0, 'regression' = 0),
drop_rate_dart = list('classifier' = 0.10, 'regression' = 0.10),
max_drop_dart = list('classifier' = 50, 'regression' = 50),
skip_drop_dart = list('classifier' = 0.50, 'regression' = 0.50),
uniform_drop_dart = list('classifier' = FALSE, 'regression' = FALSE),
top_rate_goss = list('classifier' = FALSE, 'regression' = FALSE),
other_rate_goss = list('classifier' = FALSE, 'regression' = FALSE),
monotone_constraints = list('classifier' = NULL, 'regression' = NULL),
monotone_constraints_method = list('classifier' = 'advanced', 'regression' = 'advanced'),
monotone_penalty = list('classifier' = 0.0, 'regression' = 0.0),
forcedsplits_filename = list('classifier' = NULL, 'regression' = NULL),
refit_decay_rate = list('classifier' = 0.90, 'regression' = 0.90),
path_smooth = list('classifier' = 0.0, 'regression' = 0.0),
max_bin = list('classifier' = 255, 'regression' = 255),
min_data_in_bin = list('classifier' = 3, 'regression' = 3),
data_random_seed = list('classifier' = 1, 'regression' = 1),
is_enable_sparse = list('classifier' = TRUE, 'regression' = TRUE),
enable_bundle = list('classifier' = TRUE, 'regression' = TRUE),
use_missing = list('classifier' = TRUE, 'regression' = TRUE),
zero_as_missing = list('classifier' = FALSE, 'regression' = FALSE),
two_round = list('classifier' = FALSE, 'regression' = FALSE),
convert_model = list('classifier' = NULL, 'regression' = NULL),
convert_model_language = list('classifier' = "cpp", 'regression' = "cpp"),
boost_from_average = list('classifier' = TRUE, 'regression' = TRUE),
is_unbalance = list('classifier' = FALSE, 'regression' = FALSE),
scale_pos_weight = list('classifier' = 1.0, 'regression' = 1.0),
is_provide_training_metric = list('classifier' = TRUE, 'regression' = TRUE),
eval_at = list('classifier' = c(1,2,3,4,5), 'regression' = c(1,2,3,4,5)),
num_machines = list('classifier' = 1, 'regression' = 1),
gpu_platform_id = list('classifier' = -1, 'regression' = -1),
gpu_device_id = list('classifier' = -1, 'regression' = -1),
gpu_use_dp = list('classifier' = TRUE, 'regression' = TRUE),
num_gpu = list('classifier' = 1, 'regression' = 1)) {
Args <- CARMA_Define_Args(TimeUnit=TimeUnit,TimeGroups=TimeGroups,HierarchGroups=HierarchGroups,GroupVariables=GroupVariables,FC_Periods=FC_Periods,PartitionType=PartitionType,TrainOnFull=TrainOnFull,SplitRatios=SplitRatios)
IndepentVariablesPass <- Args$IndepentVariablesPass
HoldOutPeriods <- Args$HoldOutPeriods
HierarchGroups <- Args$HierarchGroups
GroupVariables <- Args$GroupVariables
TimeGroups <- Args$TimeGroups
FC_Periods <- Args$FC_Periods
TimeGroup <- Args$TimeGroupPlaceHolder
TimeUnit <- Args$TimeUnit
if(!data.table::is.data.table(data)) data.table::setDT(data)
if(!is.null(XREGS) && !data.table::is.data.table(XREGS)) data.table::setDT(XREGS)
if(!TrainOnFull) HoldOutPeriods <- round(SplitRatios[2L] * length(unique(data[[eval(DateColumnName)]])), 0L)
if(DebugMode) print('Feature Engineering: Add Zero Padding for missing dates----')
if(data[, .N] != unique(data)[, .N]) stop('There is duplicates in your data')
if(!is.null(ZeroPadSeries)) {
data <- TimeSeriesFill(data, DateColumnName=eval(DateColumnName), GroupVariables=GroupVariables, TimeUnit=TimeUnit, FillType=ZeroPadSeries, MaxMissingPercent=0.0, SimpleImpute=FALSE)
data <- ModelDataPrep(data=data, Impute=TRUE, CharToFactor=FALSE, FactorToChar=FALSE, IntToNumeric=FALSE, LogicalToBinary=FALSE, DateToChar=FALSE, RemoveDates=FALSE, MissFactor='0', MissNum=0, IgnoreCols=NULL)
} else {
temp <- TimeSeriesFill(data, DateColumnName=eval(DateColumnName), GroupVariables=GroupVariables, TimeUnit=TimeUnit, FillType='maxmax', MaxMissingPercent=0.25, SimpleImpute=FALSE)
if(temp[,.N] != data[,.N]) stop('There are missing dates in your series. You can utilize the ZeroPadSeries argument to handle this or manage it before running the function')
}
if(DebugMode) print('
Output <- CarmaFCHorizon(data.=data, XREGS.=XREGS, TrainOnFull.=TrainOnFull, Difference.= Difference, FC_Periods.=FC_Periods, HoldOutPeriods.=HoldOutPeriods, DateColumnName.=DateColumnName)
FC_Periods <- Output$FC_Periods
HoldOutPeriods <- Output$HoldOutPeriods; rm(Output)
if(DebugMode) print('merging xregs to data')
if(!is.null(XREGS)) {
Output <- CarmaMergeXREGS(data.=data, XREGS.=XREGS, TargetColumnName.=TargetColumnName, GroupVariables.=GroupVariables, DateColumnName.=DateColumnName)
data <- Output$data; Output$data <- NULL
XREGS <- Output$XREGS; rm(Output)
}
if(DebugMode) print('
if(!is.null(GroupVariables)) {
data.table::setkeyv(x = data, cols = c(eval(GroupVariables), eval(DateColumnName)))
if(!is.null(XREGS)) data.table::setkeyv(x = XREGS, cols = c('GroupVar', eval(DateColumnName)))
} else {
data.table::setkeyv(x = data, cols = c(eval(DateColumnName)))
if(!is.null(XREGS)) data.table::setkeyv(x = XREGS, cols = c(eval(DateColumnName)))
}
if(DebugMode) print('Data Wrangling: Remove Unnecessary Columns ----')
data <- CarmaSubsetColumns(data.=data, XREGS.=XREGS, GroupVariables.=GroupVariables, DateColumnName.=DateColumnName, TargetColumnName.=TargetColumnName)
if(DebugMode) print('Feature Engineering: Concat Categorical Columns - easier to deal with this way ----')
if(!is.null(GroupVariables)) {
data[, GroupVar := do.call(paste, c(.SD, sep = ' ')), .SDcols = GroupVariables]
if(length(GroupVariables) > 1L) data[, eval(GroupVariables) := NULL] else if(GroupVariables != 'GroupVar') data[, eval(GroupVariables) := NULL]
}
if(DebugMode) print('Variables for Program: Store unique values of GroupVar in GroupVarVector ----')
if(!is.null(GroupVariables)) {
GroupVarVector <- data.table::as.data.table(x = unique(as.character(data[['GroupVar']])))
data.table::setnames(GroupVarVector, 'V1', 'GroupVar')
}
if(DebugMode) print('Data Wrangling: Standardize column ordering ----')
if(!is.null(GroupVariables)) data.table::setcolorder(data, c('GroupVar', eval(DateColumnName), eval(TargetColumnName))) else data.table::setcolorder(data, c(eval(DateColumnName), eval(TargetColumnName)))
if(DebugMode) print('Data Wrangling: Convert DateColumnName to Date or POSIXct ----')
Output <- CarmaDateStandardize(data.=data, XREGS.=NULL, DateColumnName.=DateColumnName, TimeUnit.=TimeUnit)
data <- Output$data; Output$data <- NULL
XREGS <- Output$XREGS; rm(Output)
if(DebugMode) print('Data Wrangling: Ensure TargetColumnName is Numeric ----')
if(!is.numeric(data[[eval(TargetColumnName)]])) data[, eval(TargetColumnName) := as.numeric(get(TargetColumnName))]
if(DebugMode) print('Variables for Program: Store number of data partitions in NumSets ----')
NumSets <- length(SplitRatios)
if(DebugMode) print('Variables for Program: Store Maximum Value of TargetColumnName in val ----')
if(!is.null(Lags)) {
if(is.list(Lags) && is.list(MA_Periods)) val <- max(unlist(Lags), unlist(MA_Periods)) else val <- max(Lags, MA_Periods)
}
if(DebugMode) print('Data Wrangling: Sort data by GroupVar then DateColumnName ----')
if(!is.null(GroupVariables)) data <- data[order(GroupVar, get(DateColumnName))] else data <- data[order(get(DateColumnName))]
if(DebugMode) print('Feature Engineering: Fourier Features ----')
Output <- CarmaFourier(data.=data, XREGS.=XREGS, FourierTerms.=FourierTerms, TimeUnit.=TimeUnit, TargetColumnName.=TargetColumnName, GroupVariables.=GroupVariables, DateColumnName.=DateColumnName, HierarchGroups.=HierarchGroups)
FourierTerms <- Output$FourierTerms; Output$FourierTerms <- NULL
FourierFC <- Output$FourierFC; Output$FourierFC <- NULL
data <- Output$data; rm(Output)
if(DebugMode) print('Feature Engineering: Add Create Calendar Variables ----')
if(!is.null(CalendarVariables)) data <- CreateCalendarVariables(data=data, DateCols=eval(DateColumnName), AsFactor=FALSE, TimeUnits=CalendarVariables)
if(DebugMode) print('Feature Engineering: Add Create Holiday Variables ----')
if(!is.null(HolidayVariable)) {
data <- CreateHolidayVariables(data, DateCols = eval(DateColumnName), LookbackDays = if(!is.null(HolidayLookback)) HolidayLookback else LB(TimeUnit), HolidayGroups = HolidayVariable, Holidays = NULL)
if(!(tolower(TimeUnit) %chin% c('1min','5min','10min','15min','30min','hour'))) {
data[, eval(DateColumnName) := lubridate::as_date(get(DateColumnName))]
} else {
data[, eval(DateColumnName) := as.POSIXct(get(DateColumnName))]
}
}
if(DebugMode) print('Anomaly detection by Group and Calendar Vars ----')
if(!is.null(AnomalyDetection)) {
data <- GenTSAnomVars(
data = data, ValueCol = eval(TargetColumnName),
GroupVars = if(!is.null(CalendarVariables) && !is.null(GroupVariables)) c('GroupVar', paste0(DateColumnName, '_', CalendarVariables[1])) else if(!is.null(GroupVariables)) 'GroupVar' else NULL,
DateVar = eval(DateColumnName), KeepAllCols = TRUE, IsDataScaled = FALSE,
HighThreshold = AnomalyDetection$tstat_high,
LowThreshold = AnomalyDetection$tstat_low)
data[, paste0(eval(TargetColumnName), '_zScaled') := NULL]
data[, ':=' (RowNumAsc = NULL, CumAnomHigh = NULL, CumAnomLow = NULL, AnomHighRate = NULL, AnomLowRate = NULL)]
}
if(DebugMode) print('Feature Engineering: Add Target Transformation ----')
if(TargetTransformation) {
TransformResults <- AutoTransformationCreate(data, ColumnNames=TargetColumnName, Methods=Methods, Path=NULL, TransID='Trans', SaveOutput=FALSE)
data <- TransformResults$Data; TransformResults$Data <- NULL
TransformObject <- TransformResults$FinalResults; rm(TransformResults)
} else {
TransformObject <- NULL
}
if(DebugMode) print('Copy data for non grouping + difference ----')
if(is.null(GroupVariables) && Difference) antidiff <- data.table::copy(data[, .SD, .SDcols = c(eval(TargetColumnName),eval(DateColumnName))])
if(DebugMode) print('Feature Engineering: Add Difference Data ----')
Output <- CarmaDifferencing(GroupVariables.=GroupVariables, Difference.=Difference, data.=data, TargetColumnName.=TargetColumnName, FC_Periods.=FC_Periods)
data <- Output$data; Output$data <- NULL
dataStart <- Output$dataStart; Output$dataStart <- NULL
FC_Periods <- Output$FC_Periods; Output$FC_Periods <- NULL
Train <- Output$Train; rm(Output)
if(DebugMode) print('Feature Engineering: Lags and Rolling Stats ----')
Output <- CarmaTimeSeriesFeatures(data.=data, TargetColumnName.=TargetColumnName, DateColumnName.=DateColumnName, GroupVariables.=GroupVariables, HierarchGroups.=HierarchGroups, Difference.=Difference, TimeGroups.=TimeGroups, TimeUnit.=TimeUnit, Lags.=Lags, MA_Periods.=MA_Periods, SD_Periods.=SD_Periods, Skew_Periods.=Skew_Periods, Kurt_Periods.=Kurt_Periods, Quantile_Periods.=Quantile_Periods, Quantiles_Selected.=Quantiles_Selected, HolidayVariable.=HolidayVariable, HolidayLags.=HolidayLags, HolidayMovingAverages.=HolidayMovingAverages, DebugMode.=DebugMode)
IndependentSupplyValue <- Output$IndependentSupplyValue; Output$IndependentSupplyValue <- NULL
HierarchSupplyValue <- Output$HierarchSupplyValue; Output$HierarchSupplyValue <- NULL
GroupVarVector <- Output$GroupVarVector; Output$GroupVarVector <- NULL
Categoricals <- Output$Categoricals; Output$Categoricals <- NULL
data <- Output$data; rm(Output)
if(!is.null(GroupVariables) && !'GroupVar' %chin% names(data)) data[, GroupVar := do.call(paste, c(.SD, sep = ' ')), .SDcols = c(GroupVariables)]
if(DebugMode) print('Data Wrangling: ModelDataPrep() to prepare data ----')
data <- ModelDataPrep(data=data, Impute=TRUE, IntToNumeric=TRUE, DateToChar=FALSE, FactorToChar=FALSE, CharToFactor=TRUE, LogicalToBinary=FALSE, RemoveDates=FALSE, MissFactor='0', MissNum=-1, IgnoreCols=NULL)
if(DebugMode) print('Data Wrangling: Remove dates with imputed data from the DT_GDL_Feature_Engineering() features ----')
if(DataTruncate && !is.null(Lags)) data <- CarmaTruncateData(data.=data, DateColumnName.=DateColumnName, TimeUnit.=TimeUnit)
if(DebugMode) print('Feature Engineering: Add TimeTrend Variable----')
if(TimeTrendVariable) {
if(!is.null(GroupVariables)) data[, TimeTrend := seq_len(.N), by = 'GroupVar'] else data[, TimeTrend := seq_len(.N)]
}
if(DebugMode) print('Create TimeWeights ----')
train <- CarmaTimeWeights(train.=data, TimeWeights.=TimeWeights, GroupVariables.=GroupVariables, DateColumnName.=DateColumnName)
FutureDateData <- unique(data[, get(DateColumnName)])
if(DebugMode) print('Data Wrangling: Partition data with AutoDataPartition()----')
Output <- CarmaPartition(data.=data, SplitRatios.=if(TrainOnFull) NULL else SplitRatios, TrainOnFull.=TrainOnFull, NumSets.=NumSets, PartitionType.=PartitionType, GroupVariables.=GroupVariables, DateColumnName.=DateColumnName)
train <- Output$train; Output$train <- NULL
valid <- Output$valid; Output$valid <- NULL
data <- Output$data; Output$data <- NULL
test <- Output$test; rm(Output)
if(DebugMode) print('Variables for CARMA function:IDcols----')
IDcols <- names(data)[which(names(data) %chin% DateColumnName)]
IDcols <- c(IDcols, names(data)[which(names(data) == TargetColumnName)])
if(DebugMode) print('Data Wrangling: copy data or train for later in function since AutoRegression will modify data and train ----')
if(!is.null(GroupVariables)) data.table::setorderv(x = data, cols = c('GroupVar',eval(DateColumnName)), order = c(1,1)) else data.table::setorderv(x = data, cols = c(eval(DateColumnName)), order = c(1))
Step1SCore <- data.table::copy(data)
if(DebugMode) print('Define ML args ----')
Output <- CarmaFeatures(data.=data, train.=train, XREGS.=XREGS, Difference.=Difference, TargetColumnName.=TargetColumnName, DateColumnName.=DateColumnName, GroupVariables.=GroupVariables)
ModelFeatures <- Output$ModelFeatures
TargetVariable <- Output$TargetVariable; rm(Output)
if(!is.null(SplitRatios) || !TrainOnFull) TOF <- FALSE else TOF <- TRUE
if(DebugMode) print('Run AutoXGBoostHurdleModel() and return list of ml objects ----')
TestModel <- AutoLightGBMHurdleModel(
ModelID = 'ModelTest',
SaveModelObjects = FALSE,
ReturnModelObjects = TRUE,
data = data.table::copy(train),
TrainOnFull = TrainOnFull,
ValidationData = data.table::copy(valid),
TestData = data.table::copy(test),
Buckets = 0L,
TargetColumnName = TargetVariable,
FeatureColNames = ModelFeatures,
PrimaryDateColumn = eval(DateColumnName),
WeightsColumnName = if('Weights' %chin% names(train)) 'Weights' else NULL,
IDcols = IDcols,
EncodingMethod = EncodingMethod,
DebugMode = DebugMode,
Paths = getwd(),
MetaDataPaths = NULL,
TransformNumericColumns = NULL,
Methods = NULL,
ClassWeights = c(1,1),
SplitRatios = c(0.70, 0.20, 0.10),
NumOfParDepPlots = NumOfParDepPlots,
PassInGrid = PassInGrid,
GridTune = GridTune,
BaselineComparison = 'default',
MaxModelsInGrid = 500L,
MaxRunsWithoutNewWinner = 100L,
MaxRunMinutes = 60*60,
input_model = input_model,
task = task,
device_type = device_type,
objective = objective,
metric = metric,
boosting = boosting,
LinearTree = LinearTree,
Trees = Trees,
eta = eta,
num_leaves = num_leaves,
deterministic = deterministic,
force_col_wise = force_col_wise,
force_row_wise = force_row_wise,
max_depth = max_depth,
min_data_in_leaf = min_data_in_leaf,
min_sum_hessian_in_leaf = min_sum_hessian_in_leaf,
bagging_freq = bagging_freq,
bagging_fraction = bagging_fraction,
feature_fraction = feature_fraction,
feature_fraction_bynode = feature_fraction_bynode,
extra_trees = extra_trees,
early_stopping_round = early_stopping_round,
first_metric_only = first_metric_only,
max_delta_step = max_delta_step,
lambda_l1 = lambda_l1,
lambda_l2 = lambda_l2,
linear_lambda = linear_lambda,
min_gain_to_split = min_gain_to_split,
drop_rate_dart = drop_rate_dart,
max_drop_dart = max_drop_dart,
skip_drop_dart = skip_drop_dart,
uniform_drop_dart = uniform_drop_dart,
top_rate_goss = top_rate_goss,
other_rate_goss = other_rate_goss,
monotone_constraints = monotone_constraints,
monotone_constraints_method = monotone_constraints_method,
monotone_penalty = monotone_penalty,
forcedsplits_filename = forcedsplits_filename,
refit_decay_rate = refit_decay_rate,
path_smooth = path_smooth,
max_bin = max_bin,
min_data_in_bin = min_data_in_bin,
data_random_seed = data_random_seed,
is_enable_sparse = is_enable_sparse,
enable_bundle = enable_bundle,
use_missing = use_missing,
zero_as_missing = zero_as_missing,
two_round = two_round,
convert_model = convert_model,
convert_model_language = convert_model_language,
boost_from_average = boost_from_average,
is_unbalance = is_unbalance,
scale_pos_weight = scale_pos_weight,
is_provide_training_metric = is_provide_training_metric,
eval_at = eval_at,
num_machines = num_machines,
gpu_platform_id = gpu_platform_id,
gpu_device_id = gpu_device_id,
gpu_use_dp = gpu_use_dp,
num_gpu = num_gpu)
if(!is.null(Threshold)) {
threshold <- TestModel$ClassifierModel$EvaluationMetrics
col <- names(threshold)[grep(pattern = Threshold, x = names(threshold))]
Threshold <- threshold[, .SD, .SDcols = c('Threshold', eval(col))][order(-get(col))][1,1][[1]]
}
if(!TrainOnFull) return(TestModel)
if(DebugMode) options(warn = 2)
if(DebugMode) print('Variable for interation counts: max number of rows in Step1SCore data.table across all group ----')
N <- CarmaRecordCount(GroupVariables.=GroupVariables,Difference.=Difference, Step1SCore.=Step1SCore)
if(DebugMode) print('ARMA PROCESS FORECASTING----')
for(i in seq_len(FC_Periods+1L)) {
if(DebugMode) print('Row counts----')
if(i != 1) N <- as.integer(N + 1L)
if(DebugMode) print('Machine Learning: Generate predictions----')
if(i == 1L) {
if(!is.null(GroupVariables)) {
print("here yo")
print(Step1SCore)
Preds <- AutoLightGBMHurdleModelScoring(
TestData = data.table::copy(Step1SCore),
Path = NULL,
ModelID = 'ModelTest',
ModelList = TestModel$ModelList,
ArgsList = TestModel$ArgsList,
Threshold = Threshold,
CARMA = TRUE)
data.table::set(Preds, j = c(names(Preds)[2L:5L]), value = NULL)
data.table::set(Preds, j = eval(DateColumnName), value = NULL)
data.table::setnames(Preds, 'UpdatedPrediction', 'Predictions')
data.table::setcolorder(Preds, c(2L,1L,3L:ncol(Preds)))
if(RoundPreds) Preds[, Predictions := round(Predictions)]
} else {
Preds <- AutoLightGBMHurdleModelScoring(
TestData = data.table::copy(Step1SCore),
Path = NULL,
ModelID = 'ModelTest',
ModelList = TestModel$ModelList,
ArgsList = TestModel$ArgsList,
Threshold = Threshold,
CARMA = TRUE)
data.table::set(Preds, j = c(names(Preds)[2L:5L]), value = NULL)
if(DateColumnName %chin% names(Preds)) data.table::set(Preds, j = eval(DateColumnName), value = NULL)
data.table::setnames(Preds, 'UpdatedPrediction', 'Predictions')
data.table::setcolorder(Preds, c(2L,1L,3L:ncol(Preds)))
if(RoundPreds) Preds[, Predictions := round(Predictions)]
}
if(Difference) {
if(eval(TargetColumnName) %chin% names(Step1SCore) && eval(TargetColumnName) %chin% names(Preds)) {
data.table::set(Preds, j = eval(TargetColumnName), value = NULL)
}
if(eval(DateColumnName) %chin% names(Step1SCore)) data.table::set(Step1SCore, j = eval(DateColumnName), value = NULL)
if(eval(DateColumnName) %chin% names(Preds)) data.table::set(Preds, j = eval(DateColumnName), value = NULL)
if(!is.null(GroupVariables)) {
UpdateData <- cbind(FutureDateData, Step1SCore[, .SD, .SDcols = eval(TargetColumnName)],Preds)
} else {
UpdateData <- cbind(FutureDateData[2L:(nrow(Step1SCore)+1L)], Step1SCore[, .SD, .SDcols = eval(TargetColumnName)],Preds)
}
data.table::setnames(UpdateData, 'FutureDateData', eval(DateColumnName))
} else {
if(NonNegativePred) Preds[, Predictions := data.table::fifelse(Predictions < 0.5, 0, Predictions)]
UpdateData <- cbind(FutureDateData[1L:N], Preds)
data.table::setnames(UpdateData, c('V1'), c(eval(DateColumnName)))
}
} else {
if(!is.null(GroupVariables)) {
if(Difference) IDcols = 'ModTarget' else IDcols <- eval(TargetColumnName)
if(!is.null(HierarchGroups)) {
temp <- data.table::copy(UpdateData[, ID := seq_len(.N), by = c(eval(GroupVariables))])
temp <- temp[ID == N][, ID := NULL]
} else {
temp <- data.table::copy(UpdateData[, ID := seq_len(.N), by = 'GroupVar'])
temp <- temp[ID == N][, ID := NULL]
}
if('Predictions' %chin% names(temp)) data.table::set(temp, j = 'Predictions', value = NULL)
Preds <- AutoLightGBMHurdleModelScoring(
TestData = temp,
Path = NULL,
ModelID = 'ModelTest',
ModelList = TestModel$ModelList,
ArgsList = TestModel$ArgsList,
Threshold = Threshold,
CARMA = TRUE)
Preds[, (setdiff(names(Preds),'UpdatedPrediction')) := NULL]
data.table::setnames(Preds, 'UpdatedPrediction', 'Predictions')
if(RoundPreds) Preds[, Predictions := round(Predictions)]
if(DebugMode) print('Update data group case----')
data.table::setnames(Preds, 'Predictions', 'Preds')
if(NonNegativePred & !Difference) Preds[, Preds := data.table::fifelse(Preds < 0.5, 0, Preds)]
Preds <- cbind(UpdateData[ID == N], Preds)
if(Difference) Preds[, ModTarget := Preds][, eval(TargetColumnName) := Preds] else Preds[, eval(TargetColumnName) := Preds]
Preds[, Predictions := Preds][, Preds := NULL]
UpdateData <- UpdateData[ID != N]
if(any(class(UpdateData$Date) %chin% c('POSIXct','POSIXt')) && any(class(Preds$Date) == 'Date')) UpdateData[, eval(DateColumnName) := as.Date(get(DateColumnName))]
UpdateData <- data.table::rbindlist(list(UpdateData, Preds))
if(Difference) UpdateData[ID %in% c(N-1,N), eval(TargetColumnName) := cumsum(get(TargetColumnName)), by = 'GroupVar']
UpdateData[, ID := NULL]
} else {
temp <- UpdateData[.N, .SD, .SDcols = c(setdiff(names(UpdateData), "Predictions"))]
Preds <- AutoLightGBMHurdleModelScoring(
TestData = temp,
Path = NULL,
ModelID = 'ModelTest',
ModelList = TestModel$ModelList,
ArgsList = TestModel$ArgsList,
Threshold = Threshold,
CARMA = TRUE)
Preds[, (setdiff(names(Preds),'UpdatedPrediction')) := NULL]
data.table::setnames(Preds, 'UpdatedPrediction', 'Predictions')
if(RoundPreds) Preds[, Predictions := round(Predictions)]
if(DebugMode) print('Update data non-group case----')
data.table::set(UpdateData, i = UpdateData[, .N], j = which(names(UpdateData) %chin% c(TargetColumnName, "Predictions")), value = Preds[[1L]])
}
}
if(i != FC_Periods+1L) {
if(DebugMode) print('Timer----')
if(Timer) if(i != 1) print(paste('Forecast future step: ', i-1))
if(Timer) starttime <- Sys.time()
if(DebugMode) print('Create single future record ----')
CalendarFeatures <- NextTimePeriod(UpdateData.=UpdateData, TimeUnit.=TimeUnit, DateColumnName.=DateColumnName)
if(DebugMode) print('Update feature engineering ----')
UpdateData <- UpdateFeatures(UpdateData.=UpdateData, GroupVariables.=GroupVariables, CalendarFeatures.=CalendarFeatures, CalendarVariables.=CalendarVariables, GroupVarVector.=GroupVarVector, DateColumnName.=DateColumnName, XREGS.=XREGS, FourierTerms.=FourierTerms, FourierFC.=FourierFC, TimeGroups.=TimeGroups, TimeTrendVariable.=TimeTrendVariable, N.=N, TargetColumnName.=TargetColumnName, HolidayVariable.=HolidayVariable, HolidayLookback.=HolidayLookback, TimeUnit.=TimeUnit, AnomalyDetection.=AnomalyDetection, i.=i)
if(DebugMode) print('Update Lags and MAs ----')
UpdateData <- CarmaRollingStatsUpdate(ModelType='catboost', DebugMode.=DebugMode, UpdateData.=UpdateData, GroupVariables.=GroupVariables, Difference.=Difference, CalendarVariables.=CalendarVariables, HolidayVariable.=HolidayVariable, IndepVarPassTRUE.=IndepentVariablesPass, data.=data, CalendarFeatures.=CalendarFeatures, XREGS.=XREGS, HierarchGroups.=HierarchGroups, GroupVarVector.=GroupVarVector, TargetColumnName.=TargetColumnName, DateColumnName.=DateColumnName, Preds.=Preds, HierarchSupplyValue.=HierarchSupplyValue, IndependentSupplyValue.=IndependentSupplyValue, TimeUnit.=TimeUnit, TimeGroups.=TimeGroups, Lags.=Lags, MA_Periods.=MA_Periods, SD_Periods.=SD_Periods, Skew_Periods.=Skew_Periods, Kurt_Periods.=Kurt_Periods, Quantile_Periods.=Quantile_Periods, Quantiles_Selected.=Quantiles_Selected, HolidayLags.=HolidayLags, HolidayMovingAverages.=HolidayMovingAverages)
if("Weights" %chin% names(UpdateData)) data.table::set(UpdateData, i = N+1L, j = "Weights", value = 1.0)
if(Timer) endtime <- Sys.time()
if(Timer && i != 1) print(endtime - starttime)
}
}
gc()
if(DebugMode) print('Return data prep ----')
Output <- CarmaReturnDataPrep(UpdateData.=UpdateData, FutureDateData.=FutureDateData, dataStart.=dataStart, DateColumnName.=DateColumnName, TargetColumnName.=TargetColumnName, GroupVariables.=GroupVariables, Difference.=Difference, TargetTransformation.=TargetTransformation, TransformObject.=TransformObject, NonNegativePred.=NonNegativePred)
UpdateData <- Output$UpdateData; Output$UpdateData <- NULL
TransformObject <- Output$TransformObject; rm(Output)
if(is.null(GroupVariables) && "Predictions0" %chin% names(UpdateData)) data.table::set(UpdateData, j = 'Predictions0', value = NULL)
return(list(
Forecast = UpdateData,
ModelInformation = TestModel,
TransformationDetail = if(exists('TransformObject') && !is.null(TransformObject)) TransformObject else NULL))
} |
context("getTimezone()")
testthat::test_that("get functions return correct name", {
testthat::expect_match(getTimezone(2, 47), "Europe/Paris")
testthat::expect_match(getTimezone(-80, 40), "America/New_York")
testthat::expect_match(getTimezone(c(120,-7), c(-1.5,15)), "Asia/Makassar|Africa/Bamako")
})
testthat::test_that("subsetting with countryCodes works", {
testthat::expect_match(getTimezone(2, 47), "Europe/Paris")
testthat::expect_match(getTimezone(2, 47, countryCodes = c("FR")), "Europe/Paris")
testthat::expect_match(getTimezone(2, 47, countryCodes = "FR"), "Europe/Paris")
})
testthat::test_that("allData returns are correct dimension and type", {
testthat::expect_s3_class(getTimezone(2, 47, allData = TRUE), "data.frame")
testthat::expect_equal(dim(getTimezone(2, 47, allData = TRUE)), c(1,9))
testthat::expect_s3_class(getTimezone(c(120,-17), c(-1.5,15), allData = TRUE), "data.frame")
testthat::expect_equal(dim(getTimezone(c(120,-17), c(-1.5,15), allData = TRUE)), c(2,9))
})
testthat::test_that("getPolygonID handles errors correctly", {
testthat::expect_error(getPolygonID(iris))
testthat::expect_is(getPolygonID(SimpleTimezones), "character")
testthat::expect_is(getPolygonID(SimpleCountries), "character")
}) |
rmix.norm <-
function (n,alpha,mu,sigma=rep(1,length(alpha)))
{
m=length(alpha)
alpha=alpha/sum(alpha)
data=c()
nindex=rmultinom(1,n,alpha)
for( i in 1:m)
data=c(data,rnorm(nindex[i],mu[i],sigma[i]))
data
} |
check_dates <- function(x, error_on_NA = FALSE, ...) {
if (is.null(x)) {
stop("dates is NULL", call. = FALSE)
}
if (is.character(x)) {
x <- as.Date(x, ...)
}
not_finite <- !is.finite(x)
if (sum(not_finite) > 0) {
x[not_finite] <- NA
}
if (any(is.na(x)) && error_on_NA) {
msg <- "NA detected in the dates"
stop(msg, call. = FALSE)
}
if (sum(!is.na(x)) < 1) {
stop("At least one (non-NA) date must be provided", call. = FALSE)
}
if (inherits(x, "Date")) {
check_timespan(x)
return(x)
}
if (inherits(x, "POSIXt")) {
check_timespan(x)
return(x)
}
if (is.integer(x)) {
return(x)
}
if (is.numeric(x)) {
x_ori <- x
x <- as.integer(floor(x))
if (!isTRUE(note <- all.equal(x, x_ori))) {
msg <- paste0(
"Flooring from non-integer date caused approximations:\n",
note)
warning(msg, call. = FALSE)
}
return(x)
}
formats <- c("Date", "POSIXct", "integer", "numeric", "character")
msg <- paste0(
"Input could not be converted to date. Accepted formats are:\n",
paste(formats, collapse = ", "))
stop(msg)
} |
jt <- function(x, evidence = NULL, flow = "sum", propagate = "full") UseMethod("jt")
jt.charge <- function(x, evidence = NULL, flow = "sum", propagate = "full") {
if (!attr(x, "cpts_initialized")) {
stop("The CPTs are not yet initialized. Use either 'set_evidence' or 'initialize'.")
}
if (!is.null(evidence)) {
if (!valid_evidence(attr(x, "dim_names"), evidence)) {
stop("evidence is not on correct form", call. = FALSE)
}
attr(x, "evidence") <- c(attr(x, "evidence"), evidence)
}
j <- new_jt(x, evidence, flow)
attr(j, "propagated") <- "no"
attr(j, "type") <- ifelse(inherits(x, "bn"), "bn", "mrf")
if (length(j$charge$C) == 1L && attr(j, "flow") == "max") {
max_cell <- sparta::which_max_cell(j$charge$C$C1)
attr(j, "mpe")[names(max_cell)] <- max_cell
}
if (propagate == "no") {
return(j)
} else if (propagate == "collect") {
m <- send_messages(j)
while (attr(m, "direction") != "distribute") m <- send_messages(m)
attr(m, "propagated") <- "collect"
return(m)
} else {
m <- send_messages(j)
while (attr(m, "direction") != "full") m <- send_messages(m)
attr(m, "propagated") <- "full"
if (attr(m, "inconsistencies")) {
m$charge$C <- lapply(m$charge$C, sparta::normalize)
m$charge$S <- lapply(m$charge$S, function(s) {
if (is.null(s) || is_scalar(s)) return(s)
sparta::normalize(s)
})
}
return(m)
}
stop("propagate must be either 'no', 'collect' or full", call. = TRUE)
}
propagate <- function(x, prop = "full") UseMethod("propagate")
propagate.jt <- function(x, prop = "full") {
if (prop == "collect") {
if (attr(x, "propagated") == "collect") return(x)
if (attr(x, "propagated") == "full") {
stop("the junction tree is already propageted fully", call. = FALSE)
}
m <- send_messages(x)
while (attr(m, "direction") != "distribute") m <- send_messages(m)
attr(m, "propagated") <- "collect"
return(m)
} else if (prop == "full") {
if (attr(x, "propagated") == "full") return(x)
m <- send_messages(x)
while (attr(m, "direction") != "full") m <- send_messages(m)
attr(m, "propagated") <- "full"
if (attr(m, "inconsistencies")) {
m$charge$C <- lapply(m$charge$C, sparta::normalize)
m$charge$S <- lapply(m$charge$S, function(s) {
if (is.null(s) || is_scalar(s)) return(s)
sparta::normalize(s)
})
}
return(m)
} else {
stop("propagate must be either 'collect' or full", call. = TRUE)
}
}
mpe <- function(x) UseMethod("mpe")
mpe.jt <- function(x) {
if (attr(x, "flow") != "max") stop("The flow of the junction tree is not 'max'.")
attr(x, "mpe")
}
get_cliques <- function(x) UseMethod("get_cliques")
get_cliques.jt <- function(x) x$cliques
get_cliques.charge <- function(x) x$cliques
get_cliques.pot_list <- function(x) attr(x, "cliques")
get_clique_root_idx <- function(x) UseMethod("get_clique_root_idx")
get_clique_root_idx.jt <- function(x) as.integer(gsub("C","",attr(x, "clique_root")))
get_clique_root <- function(x) UseMethod("get_clique_root")
get_clique_root.jt <- function(x) x$cliques[[get_clique_root_idx(x)]]
query_evidence <- function(x) UseMethod("query_evidence")
query_evidence.jt <- function(x) {
if (has_inconsistencies(x)) {
stop(
"The probability of evidence is not meaningful ",
"when there are inconsistencies in the evidence.",
call. = FALSE
)
}
if(attr(x, "flow") != "sum") {
stop(
"The flow of the junction tree must be 'sum'.",
call. = FALSE
)
}
if (attr(x, "propagated") == "no") {
stop("In order to query the probabilty of evidence, ",
"the junction tree must at least be propagted to ",
"the root node (collect).",
call. = FALSE
)
}
return(attr(x, "probability_of_evidence"))
}
leaves <- function(jt) UseMethod("leaves")
leaves.jt <- function(jt) {
direction <- attr(jt, "direction")
if (direction == "full") {
message("The junction tree is already fully propagated. NULL is returned")
return(NULL)
}
x <- if (direction == "collect") jt$schedule$collect else jt$schedule$distribute
lvs <- attr(x$tree, "leaves")
true_clique_names <- names(x$cliques)[lvs]
true_lvs_indicies <- as.integer(gsub("C", "", true_clique_names))
return(true_lvs_indicies)
}
parents <- function(jt) UseMethod("parents")
parents.jt <- function(jt) {
direction <- attr(jt, "direction")
if (direction == "full") {
message("The junction tree is already fully propagated. NULL is returned")
return(NULL)
}
x <- if (direction == "collect") jt$schedule$collect else jt$schedule$distribute
par <- attr(x$tree, "parents")
true_clique_names <- lapply(par, function(p) names(x$cliques)[p])
true_par_indicies <- lapply(true_clique_names, function(tcn) {
as.integer(gsub("C", "", tcn))
})
return(true_par_indicies)
}
print.jt <- function(x, ...) {
cls <- paste0("<", paste0(class(x), collapse = ", "), ">")
direction <- attr(x, "direction")
flow <- attr(x, "flow")
nv <- ncol(x$clique_graph)
ne <- sum(x$clique_graph)/2
clique_sizes <- .map_int(x$cliques, length)
max_C <- max(clique_sizes)
min_C <- min(clique_sizes)
avg_C <- mean(clique_sizes)
cat(" Junction Tree",
"\n -------------------------",
"\n Propagated:", attr(x, "propagated"),
"\n Flow:", flow,
"\n Cliques:", length(x$cliques),
"\n - max:", max_C,
"\n - min:", min_C,
"\n - avg:", round(avg_C, 2)
)
inc <- attr(x, "inconsistencies")
e <- attr(x, "evidence")
if (!is.null(e)) {
if (inc) cat("\n Evidence: (inconsistencies)") else cat("\n Evidence:")
for (i in seq_along(e)) {
cat(
"\n -", paste0(names(e[i]), ":"), unname(e[i])
)
}
}
cat(paste0("\n ", cls),
"\n -------------------------\n"
)
}
plot.jt <- function(x, ...) {
direction <- attr(x, "direction")
y <- if (direction == "collect") {
list(
cliques = x$schedule$collect$cliques,
tree = x$schedule$collect$tree,
type = "directed"
)
} else if (direction == "distribute") {
list(
cliques = x$schedule$distribute$cliques,
tree = x$schedule$distribute$tree,
type = "directed"
)
} else {
list(
cliques = x$cliques,
tree = x$clique_graph,
type = "undirected"
)
}
.names <- unlist(lapply(y$cliques, function(z) paste(z, collapse = "\n")))
dimnames(y$tree) <- list(.names, .names)
g <- igraph::graph_from_adjacency_matrix(y$tree, y$type)
graphics::plot(g, ...)
}
plot.charge <- function(x, ...) {
.names <- unlist(lapply(x$cliques, function(z) paste(z, collapse = "\n")))
dimnames(x$schedule$clique_graph) <- list(.names, .names)
g <- igraph::graph_from_adjacency_matrix(x$schedule$clique_graph, "undirected")
graphics::plot(g, ...)
} |
extract_outputs <- function(docx,
track_changes,
wrap,
verbose = FALSE) {
md_tmp <- tempfile(fileext = ".md")
pandoc_convert(normalizePath(docx),
from = "docx+styles+empty_paragraphs",
to = "markdown+fenced_code_blocks",
options = c(
paste0(
"--lua-filter=",
system.file("lua-filters", "extract-outputs.lua", package = "redoc")
),
"--standalone"
),
output = md_tmp,
verbose = verbose
)
yml <- yaml::read_yaml(md_tmp)
yml
} |
library(tidymodels)
library(nycflights13)
library(doMC)
library(rlang)
library(xgboost)
library(vctrs)
num_resamples <- 5
num_grid <- 10
num_cores <- 3
preproc <- "light preprocessing"
par_method <- "resamples"
set.seed(123)
flight_data <-
flights %>%
mutate(
arr_delay = ifelse(arr_delay >= 30, "late", "on_time"),
arr_delay = factor(arr_delay),
date = as.Date(time_hour)
) %>%
inner_join(weather, by = c("origin", "time_hour")) %>%
select(dep_time, flight, origin, dest, air_time, distance,
carrier, date, arr_delay, time_hour) %>%
na.omit() %>%
mutate_if(is.character, as.factor) %>%
sample_n(4000)
flights_rec <-
recipe(arr_delay ~ ., data = flight_data) %>%
update_role(flight, time_hour, new_role = "ID") %>%
step_date(date, features = c("dow", "month")) %>%
step_holiday(date, holidays = timeDate::listHolidays("US")) %>%
step_rm(date) %>%
step_dummy(all_nominal_predictors()) %>%
step_zv(all_predictors())
preproc_data <-
flights_rec %>%
prep() %>%
juice(all_predictors(), all_outcomes())
xgboost_spec <-
boost_tree(trees = tune(), min_n = tune(), tree_depth = tune(), learn_rate = tune(),
loss_reduction = tune(), sample_size = tune()) %>%
set_mode("classification") %>%
set_engine("xgboost")
if (preproc != "no preprocessing") {
xgboost_workflow <-
workflow() %>%
add_recipe(flights_rec) %>%
add_model(xgboost_spec)
set.seed(33)
bt <- bootstraps(flight_data, times = num_resamples)
} else {
xgboost_workflow <-
workflow() %>%
add_variables(arr_delay, predictors = c(everything())) %>%
add_model(xgboost_spec)
set.seed(33)
bt <- bootstraps(preproc_data, times = num_resamples)
}
set.seed(22)
xgboost_grid <-
xgboost_workflow %>%
parameters() %>%
update(trees = trees(c(100, 2000))) %>%
grid_max_entropy(size = num_grid)
if (num_cores > 1) {
registerDoMC(cores=num_cores)
}
roc_res <- metric_set(roc_auc)
ctrl <- control_grid(parallel_over = par_method)
grid_time <- system.time({
set.seed(99)
xgboost_workflow %>%
tune_grid(bt, grid = xgboost_grid, metrics = roc_res, control = ctrl)
})
times <- tibble::tibble(
elapsed = grid_time[3],
num_resamples = num_resamples,
num_grid = num_grid,
num_cores = num_cores,
preproc = preproc,
par_method = par_method
)
save(times, file = paste0("xgb_", num_cores, format(Sys.time(), "_%Y_%m_%d_%H_%M_%S.RData")))
sessioninfo::session_info()
if (!interactive()) {
q("no")
} |
"dataFigure3" |
varcovcubshe <-function(m,pai1,pai2,csi,shelter,n){
pr<-probcubshe1(m,pai1,pai2,csi,shelter)
dd<-rep(0,m);dd[shelter]<-1;
bb<-probbit(m,csi)
aaa<-bb-dd
bbb<-(1/m)-dd
c4<-pai1*bb*(m-(1:m)-csi*(m-1))/(csi*(1-csi))
atilde<-aaa/pr; btilde<-bbb/pr; ctilde<-c4/pr;
d11<-sum(aaa*atilde); d22<-sum(bbb*btilde); dxx<-sum(c4*ctilde);
d12<-sum(bbb*atilde); d1x<-sum(c4*atilde); d2x<-sum(c4*btilde);
matinf<-matrix(c(d11,d12,d1x,d12,d22,d2x,d1x,d2x,dxx),nrow=3,byrow=T)
if(any(is.na(matinf))==TRUE){
warning("ATTENTION: NAs produced")
varmat<-matrix(NA,nrow=3,ncol=3)
} else {
if(det(matinf)<=0){
warning("ATTENTION: Variance-covariance matrix NOT positive definite")
varmat<-matrix(NA,nrow=3,ncol=3)
} else {
varmat<-solve(matinf)/n
}
}
return(varmat)
} |
context("Function netLoadRamp")
sapply(studyPathS, function(studyPath){
opts <- setSimulationPath(studyPath)
describe("netLoadRamp", {
mydata <- readAntares(areas = "all", districts = "all", links = "all", showProgress = FALSE, mcYears = "all")
it("returns an antaresDataTable with correct number of lines and columns", {
s <- netLoadRamp(mydata$areas, ignoreMustRun = TRUE)
expect_is(s, "antaresDataTable")
expect_equal(nrow(s) / length(simOptions()$mcYears) / (24 * 7 * nweeks),
nrow(unique(mydata$areas[, .(area)])))
})
it("accepts 'antaresDataList' objects", {
s <- netLoadRamp(mydata, ignoreMustRun = TRUE)
expect_is(s, "antaresDataList")
expect_false(is.null(s$areas$areaRamp))
expect_false(is.null(s$districts$areaRamp))
})
it("creates min and max columns only if timeStep is not hourly or synthesis is true", {
s <- netLoadRamp(mydata$areas, ignoreMustRun = TRUE)
expect_true(is.null(s$min_areaRamp))
s <- netLoadRamp(x = mydata$areas, ignoreMustRun = TRUE, synthesis = TRUE)
expect_false(is.null(s$min_areaRamp))
s <- suppressWarnings(netLoadRamp(mydata$areas, ignoreMustRun = TRUE, timeStep = "monthly"))
expect_false(is.null(s$min_areaRamp))
})
it("stops if input does not contain area or district data", {
expect_error(netLoadRamp(mydata$links, ignoreMustRun = TRUE), "area")
})
it("stops if some 'necesary'BALANCE' column is missing", {
mydata <- readAntares(areas="all", showProgress = FALSE, select = "LOAD", mcYears = "all")
expect_error(netLoadRamp(mydata, ignoreMustRun = TRUE), "missing")
})
})
}) |
gx.pearson <-
function(xx, log = FALSE, ifclr = FALSE, ifwarn = TRUE)
{
if(!is.matrix(xx)) stop(deparse(substitute(xx)), " is not a Matrix")
temp.x <- remove.na(xx)
x <- temp.x$x
if(ifclr) log <- FALSE
if(log) {
x <- log10(x)
cat("Data have been Log10 transformed\n")
}
else if(ifclr) {
x <- clr(x, ifwarn = ifwarn)
cat("Data have been Centred Log-Ratio transformed\n")
}
z <- scale(x)
r <- (t(z) %*% z)/(temp.x$n - 1)
df.t <- temp.x$n - 2
df.term <- sqrt(df.t)
for(i in 2:temp.x$m) {
for(j in 1:(i - 1))
r[i, j] <- pt((abs(r[i, j]) * df.term)/
sqrt(1 - r[i, j] * r[i, j]), df.t)
}
r <- round(r, 3)
for(i in 1:temp.x$m) r[i,i] <- NA
cat("Pearson Correlation Coefficients and their Statistical Significance,",
"\nupper and lower triangles, respectively,",
paste("for matrix ", deparse(substitute(xx)), ", N = ", temp.x$n, "\n\n", sep = ""))
print(r, na.print = " ")
cat("\n")
invisible()
} |
"add.lmomco.axis" <-
function(side=1, twoside=FALSE, twoside.suppress.labels=FALSE,
side.type=c("NPP", "RI", "SNV"), otherside.type=c("NA", "RI", "SNV", "NPP"),
alt.lab=NA, alt.other.lab=NA, npp.as.aep=FALSE, case=c("upper", "lower"),
NPP.control=NULL, RI.control=NULL, SNV.control=NULL, ...) {
case <- match.arg(case)
other.side <- switch(as.character(side), "1"=3, "2"=4, "3"=1, "4"=1)
side.type <- match.arg(side.type)
otherside.type <- match.arg(otherside.type)
if(otherside.type == "NA") otherside.type <- NA
if(twoside & ! is.na(otherside.type)) twoside <- FALSE
lims <- par()$usr;
ifelse(side == 1 | side == 3, lims <- lims[1:2], lims <- lims[3:4])
lims <- pnorm(lims)
"my.nonexceeds" <- function(minors=FALSE) {
if(minors) {
FF <- c(0.55, 0.65, 0.75, 0.825, 0.850, 0.875,
0.91, 0.92, 0.93, 0.94, 0.96, 0.97,
0.9925, 0.996, 0.997, 0.9996, 0.9997, 0.99996, 0.99997)
FF <- c(sort(1-FF), FF)
FF <- FF[FF >= lims[1]]; FF <- FF[FF <= lims[2]]
FF <- unique(c(lims[1],FF,lims[2]))
return(FF)
} else {
FF <- c(0.6, 0.7, 0.8, 0.9, 0.95, 0.98, 0.99, 0.995, 0.998, 0.999,
0.9995, 0.9998, 0.9999, 0.99995, 0.99998, 0.99999)
FF <- c(sort(1-FF), 0.5, FF)
FF <- FF[FF >= lims[1]]; FF <- FF[FF <= lims[2]]
FF <- unique(c(lims[1],FF,lims[2]))
return(FF)
}
}
if(is.null(NPP.control)) {
txt.npp <- ifelse(case == "upper", "NONEXCEEDANCE PROBABILITY", "Nonexceedance probability")
txt.epp <- ifelse(case == "upper", "EXCEEDANCE PROBABILITY", "Exceedance probability")
the.label <- ifelse(is.na(alt.lab), txt.npp, alt.lab)
the.other.label <- ifelse(is.na(alt.other.lab), txt.epp, alt.other.lab)
NPP.control <- list(label=the.label,
other.label=the.other.label,
probs=my.nonexceeds(minors=TRUE),
probs.label=my.nonexceeds(minors=FALSE),
digits=3, line=3, as.exceed=FALSE)
}
if(is.null(RI.control)) {
txt.ri <- ifelse(case == "upper", "RECURRENCE INTERVAL, IN YEARS", "Recurrence interval, in years")
the.label <- ifelse(is.na(alt.lab), txt.ri, alt.lab)
RI.control <- list(label=the.label,
Tyear=c(2, 5, 10, 25, 50, 100, 200, 500), line=2)
}
if(is.null(SNV.control)) {
txt.snv <- ifelse(case == "upper", "STANDARD NORMAL VARIATE", "Standard normal variate")
the.label <- ifelse(is.na(alt.lab), txt.snv, alt.lab)
SNV.control <- list(label=the.label,
begin=-5, end=5, by=0.5, line=2)
}
NPPf <- function(side, other.side) {
dots <- list(...)
tcl <- ifelse("tcl" %in% names(dots), dots$tcl, par()$tcl)
NPP <- NPP.control$probs; NPP.lab <- NPP.control$probs.lab
if(NPP.control$as.exceed) {
the.true.NPP.lab <- 1 - NPP.lab
} else {
the.true.NPP.lab <- NPP.lab
}
qNPP <- qnorm(NPP); qNPP.lab <- qnorm(NPP.lab)
if(npp.as.aep) {
NPP.lab <- format(1-the.true.NPP.lab, nsmall=NPP.control$digits)
} else {
NPP.lab <- format( the.true.NPP.lab, nsmall=NPP.control$digits)
}
Axis(qNPP, at=qNPP, labels=NA, side=side, ..., tcl=0.8*tcl)
Axis(qNPP.lab, at=qNPP.lab, labels=NPP.lab, side=side, ..., tcl=1.3*tcl)
if(npp.as.aep) {
mtext(NPP.control$other.label, line=NPP.control$line, side=side)
} else {
mtext(NPP.control$label, line=NPP.control$line, side=side)
}
if(twoside) {
Axis(qNPP, at=qNPP, labels=NA, side=other.side, ..., tcl=0.8*tcl)
if(twoside.suppress.labels) {
Axis(qNPP.lab, at=qNPP.lab, labels=NA, side=other.side, ..., tcl=1.3*tcl)
} else {
Axis(qNPP.lab, at=qNPP.lab, labels=NPP.lab, side=other.side, ..., tcl=1.3*tcl)
}
}
}
RIf <- function(side, other.side) {
F <- 1 - 1/RI.control$Tyear; qF <- qnorm(F); labF <- RI.control$Tyear
Axis(qF, at=qF, labels=labF, side=side, ...)
if(twoside) {
Axis(at=qF, labels=NA, side=other.side, ...)
}
mtext(RI.control$label, line=RI.control$line, side=side)
}
SNVf <- function(side, other.side) {
SNV <- NULL
try( SNV <- seq(SNV.control$begin, SNV.control$end, by=SNV.control$by) )
if(is.null(SNV)) {
warning("Poorly constructed SNV.control, trapping, and using alternative")
SNV <- seq(-5, 5, by=0.5)
}
Axis(SNV, at=SNV, side=side, ...)
mtext(SNV.control$label, line=SNV.control$line, side=side)
if(twoside) {
Axis(SNV, at=SNV, side=other.side, ...)
}
}
NULLf <- function() { return("no axis function made") }
primary.axis <- switch(side.type, NPP=NPPf, RI=RIf, SNV=SNVf, NULLf)
primary.axis(side, other.side)
if(! is.na(otherside.type)) {
secondary.axis <- switch(otherside.type, NPP=NPPf, RI=RIf, SNV=SNVf, NULLf)
secondary.axis(other.side, side)
}
} |
select_edges <- function(graph,
conditions = NULL,
set_op = "union",
from = NULL,
to = NULL,
edges = NULL) {
time_function_start <- Sys.time()
fcn_name <- get_calling_fcn()
if (graph_object_valid(graph) == FALSE) {
emit_error(
fcn_name = fcn_name,
reasons = "The graph object is not valid")
}
if (graph_contains_nodes(graph) == FALSE) {
emit_error(
fcn_name = fcn_name,
reasons = "The graph contains no nodes")
}
if (graph_contains_edges(graph) == FALSE) {
emit_error(
fcn_name = fcn_name,
reasons = "The graph contains no edges")
}
if (!is.null(edges)) {
if (!any(edges %in% graph$edges_df$id)) {
emit_error(
fcn_name = fcn_name,
reasons = "The values provided in `edges` do not all correspond to edge ID values in the graph")
}
}
conditions <- rlang::enquo(conditions)
edges_df <- graph$edges_df
n_e_select_properties_in <-
node_edge_selection_properties(graph = graph)
if (!is.null(
rlang::enquo(conditions) %>%
rlang::get_expr())) {
edges_df <- dplyr::filter(.data = edges_df, !!conditions)
}
if (!is.null(from)) {
if (any(!(from %in% edges_df$from))) {
emit_error(
fcn_name = fcn_name,
reasons = "One of more of the nodes specified as `from` not part of an edge")
}
from_val <- from
edges_df <-
edges_df %>%
dplyr::filter(from %in% from_val)
}
if (!is.null(to)) {
if (any(!(to %in% edges_df$to))) {
emit_error(
fcn_name = fcn_name,
reasons = "One of more of the nodes specified as `to` are not part of an edge")
}
to_val <- to
edges_df <-
edges_df %>%
dplyr::filter(to %in% to_val)
}
edges_selected <-
edges_df %>%
dplyr::select(id, from, to) %>%
dplyr::rename(edge = id)
edges_selected <- edges_selected$edge
if (!is.null(edges)) {
edges_selected <- intersect(edges, edges_selected)
}
edges_prev_selection <- graph$edge_selection$edge
if (set_op == "union") {
edges_combined <-
union(edges_prev_selection, edges_selected)
} else if (set_op == "intersect") {
edges_combined <-
intersect(edges_prev_selection, edges_selected)
} else if (set_op == "difference") {
edges_combined <-
base::setdiff(edges_prev_selection, edges_selected)
}
edges_combined <-
graph$edges_df %>%
dplyr::filter(id %in% edges_combined) %>%
dplyr::select(id, from, to) %>%
dplyr::rename(edge = id)
graph$edge_selection <- edges_combined
graph$node_selection <- create_empty_nsdf()
n_e_select_properties_out <-
node_edge_selection_properties(graph = graph)
graph$graph_log <-
add_action_to_log(
graph_log = graph$graph_log,
version_id = nrow(graph$graph_log) + 1,
function_used = fcn_name,
time_modified = time_function_start,
duration = graph_function_duration(time_function_start),
nodes = nrow(graph$nodes_df),
edges = nrow(graph$edges_df))
if (graph$graph_info$write_backups) {
save_graph_as_rds(graph = graph)
}
if (!is.null(graph$graph_info$display_msgs) &&
graph$graph_info$display_msgs) {
if (!n_e_select_properties_in[["node_selection_available"]] &
!n_e_select_properties_in[["edge_selection_available"]]) {
msg_body <-
glue::glue(
"created a new selection of \\
{n_e_select_properties_out[['selection_count_str']]}")
} else if (n_e_select_properties_in[["node_selection_available"]] |
n_e_select_properties_in[["edge_selection_available"]]) {
if (n_e_select_properties_in[["edge_selection_available"]]) {
msg_body <-
glue::glue(
"modified an existing selection of \\
{n_e_select_properties_in[['selection_count_str']]}:
* {n_e_select_properties_out[['selection_count_str']]} \\
are now in the active selection
* used the `{set_op}` set operation")
}
if (n_e_select_properties_in[["node_selection_available"]]) {
msg_body <-
glue::glue(
"created a new selection of \\
{n_e_select_properties_out[['selection_count_str']]}:
* this replaces \\
{n_e_select_properties_in[['selection_count_str']]} \\
in the prior selection")
}
}
emit_message(
fcn_name = fcn_name,
message_body = msg_body)
}
graph
} |
get_ust = function(x,y, nonzero)
{
beta = as.numeric(t(x)%*%y)
return = rep(0, length(beta))
select = order(abs(beta), decreasing=TRUE)[1:nonzero]
return[select] = beta[select]
names(return) <- colnames(x)
return
} |
knitr::opts_chunk$set(
collapse = TRUE,
comment = "
)
library(knitr)
library(interlineaR)
library(kableExtra)
corpuspath <- system.file("exampleData", "tuwariInterlinear.xml", package="interlineaR")
corpus <- read.emeld(corpuspath, vernacular.languages="tww")
kable(head(corpus$morphemes))
kable(head(corpus$words))
kable(head(corpus$sentences), booktabs = T)
kable(head(corpus$texts))
morphemes_words <- merge(corpus$morphemes, corpus$words[,-c(1,2)], by="word_id", suffixes = c(".morpheme",".word"))
kable(head(morphemes_words))
corpuspath <- system.file("exampleData", "tuwariToolbox.txt", package="interlineaR")
corpus <- read.toolbox(corpuspath)
kable(head(corpus$morphemes))
path <- system.file("exampleData", "kakabe.txt", package="interlineaR")
corpus <- read.toolbox(path, morpheme.fields.suppl = c("gr", "gf"))
kable(head(corpus$morphemes))
kable(head(corpus$sentences))
dicpath <- system.file("exampleData", "tuwariDictionary.lift", package="interlineaR")
dictionary <- read.lift(dicpath, vernacular.languages="tww", simplify=TRUE)
kable(head(dictionary$entries))
kable(head(dictionary$senses))
kable(head(dictionary$examples)) |
R_EL2n_E <- function(R_EL) {
n_E <- R_EL %*% (base::t(c(0, 0, -1)) %>% as.vector())
n_E %>% as.vector()
} |
palettes <- lapply(
list(
flat = list(
background = '
text = c(inner = '
line = c(inner = '
gridline = '
swatch = structure(c(
'
'
'
'
'
gradient = c(low = '
),
'flat dark' = list(
background = '
text = c(inner = '
line = c(inner = '
gridline = '
swatch = structure(c(
'
'
'
'
'
gradient = c(low = '
),
dust = list(
background = '
text = c(inner = '
line = c(inner = '
gridline = '
swatch = structure(c(
'
'
'
'
'
gradient = c(low = '
),
light = list(
background = '
text = c(inner = '
line = c(inner = '
gridline = '
swatch = structure(c(
'
'
'
'
'
gradient = c(low = '
),
earth = list(
background = '
text = list(inner = '
line = list(inner = '
gridline = '
swatch = structure(c(
'
'
'
'
'
gradient = list(low='
),
fresh = list(
background = '
text = list(inner = '
line = list(inner = '
gridline = '
swatch = structure(c(
'
'
'
'
'
gradient = list(low='
),
chalk = list(
background = '
text = c(inner = '
line = c(inner = '
gridline = '
swatch = structure(c(
'
'
'
'
gradient = c(low = '
),
lilac = list(
background = '
text = c(inner = '
line = c(inner = '
gridline = '
swatch = structure(c(
'
'
'
'
'
'
gradient = c(low = '
),
carrot = list(
background = '
text = c(inner = '
line = c(inner = '
gridline = '
swatch = structure(c(
'
'
'
'
'
gradient = c(low = '
),
pale = list(
background = '
text = c(inner = '
line = c(inner = '
gridline = '
swatch = structure(c(
'
'
'
'
'
'
gradient = c(low = '
),
copper = list(
background = '
text = c(inner = '
line = c(inner = '
gridline = '
swatch = structure(c(
'
'
'
'
'
'
gradient = c(low = '
),
grape = list(
background = '
text = c(inner = '
line = c(inner = '
gridline = '
swatch = structure(c(
'
'
'
'
'
gradient = c(low='
),
greyscale = list(
background = '
text = c(inner = '
line = c(inner = '
gridline = '
swatch = structure(c(
'
'
'
'
'
gradient = c(low = '
),
sky = list(
background = '
text = c(inner = '
line = c(inner = '
gridline = '
swatch = structure(c(
'
'
'
'
gradient = c(low = '
),
solarized = list(
background = '
text = c(inner = '
line = c(inner = '
gridline = '
swatch = structure(c(
'
'
'
'
gradient = c(low = '
),
grass = list(
background = '
text = c(inner = '
line = c(inner = NA, outer = '
gridline = '
swatch = structure(c(
'
'
'
'
gradient = c(low='
),
sea = list(
background = '
text = c(inner = '
line = c(inner = NA, outer = '
gridline = '
swatch = structure(c(
'
'
'
'
'
'
gradient = c(low='
),
camouflage = list(
background = '
text = list(inner = '
line = list(inner = '
gridline = '
swatch = structure(c(
'
'
'
'
gradient = list(low='
)
), function(x) {
class(x) <- 'ggthemr_palette'
x
}) |
test_that("checks on ticker and simfin_id are not too strict", {
skip("Very slow")
entities <- sfa_get_entities()
checkmate::expect_data_table(
sfa_get_info(simfin_id = entities[["simfin_id"]])
)
checkmate::expect_data_table(
sfa_get_info(ticker = entities[["ticker"]])
)
}) |
statvc4levelt2 <-
function(y,cluster,m1,n1,m2,n2,m3,n3,p,weight){
mat=matrix(0,p,p)
w=0
for(i in 1:m1)
{
clusteri=cluster[[i]]
m1i=m2[i]
m2i=m3[[i]]
n2i=n3[[i]]
mati=statvc3levelt1(y,clusteri,m1i,m2i,n2i,p,weight)
mat=mat+mati[[1]]
w=w+mati[[2]]
}
list(mat,w)
} |
ipums_website <- function(
x, var, project = NULL, launch = TRUE, verbose = TRUE, var_label = NULL, homepage_if_missing = TRUE
) {
UseMethod("ipums_website")
}
ipums_website.ipums_ddi <- function(
x, var, project = NULL, launch = TRUE, verbose = TRUE, var_label = NULL, homepage_if_missing = TRUE
) {
if (is.null(project)) project <- x$ipums_project
var <- fix_for_detailed_var(x, var, var_label)
url <- get_ipums_url(var, project, verbose, homepage_if_missing)
if (launch) {
shell.exec(url)
invisible(url)
} else {
url
}
}
ipums_website.default <- function(
x, var, project = NULL, launch = TRUE, verbose = TRUE, var_label = NULL, homepage_if_missing = TRUE
) {
if (is.null(project)) project <- attributes(x)[["ipums_project"]]
if (missing(x)) x <- NULL
var <- fix_for_detailed_var(x, var, var_label)
url <- get_ipums_url(var, project, verbose, homepage_if_missing)
if (launch) {
shell.exec(url)
invisible(url)
} else {
url
}
}
get_ipums_url <- function(var, project, verbose = TRUE, homepage_if_missing = FALSE) {
if (is.null(project)) {
stop(paste(
custom_format_text(
"Project not found. Please specify the project name using ",
"'project' argument. Options include: ", indent = 2, exdent = 2
),
custom_format_text(
paste(all_proj_names(), collapse = ", "), indent = 4, exdent = 4
),
sep = "\n"
))
}
config <- get_proj_config(project)
if (is.null(config)) {
stop(paste(
custom_format_text(
"Unexpected project '", project, "'. ",
"Options include: ", indent = 2, exdent = 2
),
custom_format_text(
paste(all_proj_names(), collapse = ", "), indent = 4, exdent = 4
),
sep = "\n"
))
}
if (verbose && !config$var_url) {
message("Cannot give a variable-specific URL for this project.")
}
if (!homepage_if_missing && !config$var_url) {
return(NULL)
}
config$url_function(var)
}
fix_for_detailed_var <- function(object, var, var_label) {
if (is.null(var_label) & !is.null(object)) var_label <- ipums_var_label(object, one_of(var))
if (is.null(var_label)) return(var)
is_det <- grepl("detailed version", tolower(var_label), fixed = TRUE)
if (is_det && fostr_sub(var, -1) == "D") {
var <- fostr_sub(var, 1, -2)
}
var
} |
"glosses_df" |
docker_available <- function(..., verbose = FALSE) {
res <- tryCatch({
cl <- http_client(docker_config(...))
cl$ping()
}, error = identity)
ok <- !inherits(res, "error")
if (verbose && !ok) {
what <- if (!exists("cl", inherits = FALSE))
"create docker client" else "connect to docker daemon"
message(sprintf("Failed to %s with error message:\n %s",
what, res$message))
}
ok
} |
setMethod("writeStart", signature(x="RasterLayer", filename="character"),
function(x, filename, options=NULL, format, prj=FALSE, ...) {
if (trim(filename) == "") {
filename <- rasterTmpFile()
}
filename <- .fullFilename(filename, expand=TRUE)
if (!file.exists(dirname(filename))) {
stop("Attempting to write a file to a path that does not exist:\n ", dirname(filename))
}
filetype <- .filetype(format=format, filename=filename)
filename <- .getExtension(filename, filetype)
if (filetype=="ascii") {
x <- .startAsciiWriting(x, filename, ...)
} else if ( filetype %in% .nativeDrivers() ) {
x <- .startRasterWriting(x, filename, format=filetype, ...)
} else if ( filetype == "CDF" ) {
x <- .startWriteCDF(x, filename, ...)
} else {
x <- .startGDALwriting(x, filename, options=options, format=filetype, ...)
}
if (prj) {
crs <-.getCRS(x)
if (crs != "") {
writeLines(wkt(x), extension(filename, "prj") )
}
}
return(x)
})
setMethod("writeStart", signature(x="RasterBrick", filename="character"),
function(x, filename, options=NULL, format, prj=FALSE, ...) {
if (trim(filename) == "") {
filename <- rasterTmpFile()
}
filename <- .fullFilename(filename, expand=TRUE)
filetype <- .filetype(format=format, filename=filename)
filename <- .getExtension(filename, filetype)
if (filetype=="ascii") {
stop("ARC-ASCII files cannot contain multiple layers")
}
native <- filetype %in% c(.nativeDrivers(), "ascii")
if (native) {
x <- .startRasterWriting(x, filename, format=filetype, ...)
} else if ( filetype == "CDF" ) {
x <- .startWriteCDF(x, filename, ...)
} else {
x <- .startGDALwriting(x, filename, options=options, format=filetype, ...)
}
if (prj) {
crs <-.getCRS(x)
if (!is.na(crs)) {
writeLines(wkt(x), extension(filename, "prj") )
}
}
return(x)
})
setMethod("writeStop", signature(x="RasterLayer"),
function(x) {
driver <- x@file@driver
if ( driver %in% .nativeDrivers() ) {
return( .stopRasterWriting(x) )
} else if ( driver == "ascii" ) {
return( .stopAsciiWriting(x) )
} else if ( driver == "netcdf" ) {
return( .stopWriteCDF(x) )
} else {
return( .stopGDALwriting(x) )
}
}
)
setMethod("writeStop", signature(x="RasterBrick"),
function(x) {
driver <- x@file@driver
if (driver %in% .nativeDrivers()) {
return( .stopRasterWriting(x) )
} else if ( driver == "netcdf" ) {
return( .stopWriteCDF(x) )
} else {
return( .stopGDALwriting(x) )
}
}
) |
NULL
filter.dm <- function(.data, ...) {
check_zoomed(.data)
}
filter.zoomed_dm <- function(.data, ...) {
.data %>%
dm_filter_impl(..., set_filter = FALSE)
}
mutate.dm <- function(.data, ...) {
check_zoomed(.data)
}
mutate.zoomed_dm <- function(.data, ...) {
tbl <- tbl_zoomed(.data)
mutated_tbl <- mutate(tbl, ...)
selected <- set_names(intersect(colnames(tbl), colnames(mutated_tbl)))
new_tracked_cols_zoom <- new_tracked_cols(.data, selected)
replace_zoomed_tbl(.data, mutated_tbl, new_tracked_cols_zoom)
}
transmute.dm <- function(.data, ...) {
check_zoomed(.data)
}
transmute.zoomed_dm <- function(.data, ...) {
tbl <- tbl_zoomed(.data)
groups <- set_names(map_chr(groups(tbl), as_string))
transmuted_tbl <- transmute(tbl, ...)
selected <- set_names(intersect(colnames(tbl), colnames(transmuted_tbl)))
new_tracked_cols_zoom <- new_tracked_cols(.data, selected)
replace_zoomed_tbl(.data, transmuted_tbl, new_tracked_cols_zoom)
}
select.dm <- function(.data, ...) {
check_zoomed(.data)
}
select.zoomed_dm <- function(.data, ...) {
tbl <- tbl_zoomed(.data)
selected <- eval_select_both(quo(c(...)), colnames(tbl))
selected_tbl <- select(tbl, !!!selected$indices)
new_tracked_cols_zoom <- new_tracked_cols(.data, selected$names)
replace_zoomed_tbl(.data, selected_tbl, new_tracked_cols_zoom)
}
relocate.dm <- function(.data, ...) {
check_zoomed(.data)
}
relocate.zoomed_dm <- function(.data, ..., .before = NULL, .after = NULL) {
tbl <- tbl_zoomed(.data)
relocated_tbl <- relocate(tbl, ..., .before = {{ .before }}, .after = {{ .after }})
replace_zoomed_tbl(.data, relocated_tbl)
}
rename.dm <- function(.data, ...) {
check_zoomed(.data)
}
rename.zoomed_dm <- function(.data, ...) {
tbl <- tbl_zoomed(.data)
renamed <- eval_rename_both(quo(c(...)), colnames(tbl))
renamed_tbl <- rename(tbl, !!!renamed$indices)
new_tracked_cols_zoom <- new_tracked_cols(.data, renamed$all_names)
replace_zoomed_tbl(.data, renamed_tbl, new_tracked_cols_zoom)
}
distinct.dm <- function(.data, ...) {
check_zoomed(.data)
}
distinct.zoomed_dm <- function(.data, ..., .keep_all = FALSE) {
tbl <- tbl_zoomed(.data)
distinct_tbl <- distinct(tbl, ..., .keep_all = .keep_all)
selected <- set_names(intersect(colnames(tbl), colnames(distinct_tbl)))
new_tracked_cols_zoom <- new_tracked_cols(.data, selected)
replace_zoomed_tbl(.data, distinct_tbl, new_tracked_cols_zoom)
}
arrange.dm <- function(.data, ...) {
check_zoomed(.data)
}
arrange.zoomed_dm <- function(.data, ...) {
tbl <- tbl_zoomed(.data)
arranged_tbl <- arrange(tbl, ...)
replace_zoomed_tbl(.data, arranged_tbl)
}
slice.dm <- function(.data, ...) {
check_zoomed(.data)
}
slice.zoomed_dm <- function(.data, ..., .keep_pk = NULL) {
sliced_tbl <- slice(tbl_zoomed(.data), ...)
orig_pk <- dm_get_pk_impl(.data, orig_name_zoomed(.data))
tracked_cols <- col_tracker_zoomed(.data)
if (is_null(.keep_pk)) {
if (has_length(orig_pk) && any(unlist(orig_pk) %in% tracked_cols)) {
message(
paste(
"Keeping PK column, but `slice.zoomed_dm()` can potentially damage the uniqueness of PK columns (duplicated indices).",
"Set argument `.keep_pk` to `TRUE` or `FALSE` to ensure the behavior you intended."
)
)
}
} else if (!.keep_pk) {
tracked_cols <- discard(tracked_cols, tracked_cols == orig_pk)
}
replace_zoomed_tbl(.data, sliced_tbl, tracked_cols)
}
group_by.dm <- function(.data, ...) {
check_zoomed(.data)
}
group_by.zoomed_dm <- function(.data, ...) {
tbl <- tbl_zoomed(.data)
grouped_tbl <- group_by(tbl, ...)
replace_zoomed_tbl(.data, grouped_tbl)
}
group_data.dm <- function(.data) {
check_zoomed(.data)
}
group_data.zoomed_dm <- function(.data) {
tbl <- tbl_zoomed(.data)
group_data(tbl)
}
group_keys.dm <- function(.tbl, ...) {
check_zoomed(.tbl)
}
group_keys.zoomed_dm <- function(.tbl, ...) {
.data <- .tbl
tbl <- tbl_zoomed(.data)
group_keys(tbl, ...)
}
group_indices.dm <- function(.data, ...) {
check_zoomed(.data)
}
group_indices.zoomed_dm <- function(.data, ...) {
tbl <- tbl_zoomed(.data)
group_indices(tbl, ...)
}
group_vars.dm <- function(x) {
check_zoomed(x)
}
group_vars.zoomed_dm <- function(x) {
.data <- x
tbl <- tbl_zoomed(.data)
group_vars(tbl)
}
groups.dm <- function(x) {
check_zoomed(x)
}
groups.zoomed_dm <- function(x) {
.data <- x
tbl <- tbl_zoomed(.data)
groups(tbl)
}
ungroup.dm <- function(x, ...) {
check_zoomed(x)
}
ungroup.zoomed_dm <- function(x, ...) {
tbl <- tbl_zoomed(x)
ungrouped_tbl <- ungroup(tbl, ...)
replace_zoomed_tbl(x, ungrouped_tbl)
}
summarise.dm <- function(.data, ...) {
check_zoomed(.data)
}
summarise.zoomed_dm <- function(.data, ...) {
tbl <- tbl_zoomed(.data)
groups <- set_names(map_chr(groups(tbl), as_string))
summarized_tbl <- summarize(tbl, ...)
new_tracked_cols_zoom <- new_tracked_cols(.data, groups)
replace_zoomed_tbl(.data, summarized_tbl, new_tracked_cols_zoom)
}
count.dm <- function(x, ...) {
check_zoomed(x)
}
count.zoomed_dm <- function(x, ..., wt = NULL, sort = FALSE, name = NULL,
.drop = group_by_drop_default(x)) {
tbl <- tbl_zoomed(x)
if (!missing(...)) {
out <- group_by(tbl, ..., .add = TRUE, .drop = .drop)
} else {
out <- tbl
}
groups <- set_names(map_chr(groups(out), as_string))
out <- tally(out, wt = !!enquo(wt), sort = sort, name = name)
if (is.data.frame(tbl)) {
out <- dplyr_reconstruct(out, tbl)
}
new_tracked_cols_zoom <- new_tracked_cols(x, groups)
replace_zoomed_tbl(x, out, new_tracked_cols_zoom)
}
tally.dm <- function(x, ...) {
check_zoomed(x)
}
tally.zoomed_dm <- function(x, ...) {
tbl <- tbl_zoomed(x)
groups <- set_names(map_chr(groups(tbl), as_string))
out <- tally(tbl, ...)
if (is.data.frame(tbl)) {
out <- dplyr_reconstruct(out, tbl)
}
new_tracked_cols_zoom <- new_tracked_cols(x, groups)
replace_zoomed_tbl(x, out, new_tracked_cols_zoom)
}
pull.dm <- function(.data, var = -1, name = NULL) {
check_zoomed(.data)
}
pull.zoomed_dm <- function(.data, var = -1, ...) {
tbl <- tbl_zoomed(.data)
pull(tbl, var = {{ var }}, ...)
}
compute.zoomed_dm <- function(x, ...) {
zoomed_df <-
tbl_zoomed(x) %>%
compute(...)
replace_zoomed_tbl(x, zoomed_df)
}
NULL
left_join.dm <- function(x, ...) {
check_zoomed(x)
}
left_join.zoomed_dm <- function(x, y, by = NULL, copy = NULL, suffix = NULL, select = NULL, ...) {
y_name <- as_string(enexpr(y))
join_data <- prepare_join(x, {{ y }}, by, {{ select }}, suffix, copy)
joined_tbl <- left_join(join_data$x_tbl, join_data$y_tbl, join_data$by, copy = FALSE, ...)
replace_zoomed_tbl(x, joined_tbl, join_data$new_col_names)
}
inner_join.dm <- function(x, ...) {
check_zoomed(x)
}
inner_join.zoomed_dm <- function(x, y, by = NULL, copy = NULL, suffix = NULL, select = NULL, ...) {
y_name <- as_string(enexpr(y))
join_data <- prepare_join(x, {{ y }}, by, {{ select }}, suffix, copy)
joined_tbl <- inner_join(join_data$x_tbl, join_data$y_tbl, join_data$by, copy = FALSE, ...)
replace_zoomed_tbl(x, joined_tbl, join_data$new_col_names)
}
full_join.dm <- function(x, ...) {
check_zoomed(x)
}
full_join.zoomed_dm <- function(x, y, by = NULL, copy = NULL, suffix = NULL, select = NULL, ...) {
y_name <- as_string(enexpr(y))
join_data <- prepare_join(x, {{ y }}, by, {{ select }}, suffix, copy)
joined_tbl <- full_join(join_data$x_tbl, join_data$y_tbl, join_data$by, copy = FALSE, ...)
replace_zoomed_tbl(x, joined_tbl, join_data$new_col_names)
}
right_join.dm <- function(x, ...) {
check_zoomed(x)
}
right_join.zoomed_dm <- function(x, y, by = NULL, copy = NULL, suffix = NULL, select = NULL, ...) {
y_name <- as_string(enexpr(y))
join_data <- prepare_join(x, {{ y }}, by, {{ select }}, suffix, copy)
joined_tbl <- right_join(join_data$x_tbl, join_data$y_tbl, join_data$by, copy = FALSE, ...)
replace_zoomed_tbl(x, joined_tbl, join_data$new_col_names)
}
semi_join.dm <- function(x, ...) {
check_zoomed(x)
}
semi_join.zoomed_dm <- function(x, y, by = NULL, copy = NULL, suffix = NULL, select = NULL, ...) {
y_name <- as_string(enexpr(y))
join_data <- prepare_join(x, {{ y }}, by, {{ select }}, suffix, copy, disambiguate = FALSE)
joined_tbl <- semi_join(join_data$x_tbl, join_data$y_tbl, join_data$by, copy = FALSE, ...)
replace_zoomed_tbl(x, joined_tbl, join_data$new_col_names)
}
anti_join.dm <- function(x, ...) {
check_zoomed(x)
}
anti_join.zoomed_dm <- function(x, y, by = NULL, copy = NULL, suffix = NULL, select = NULL, ...) {
y_name <- as_string(enexpr(y))
join_data <- prepare_join(x, {{ y }}, by, {{ select }}, suffix, copy, disambiguate = FALSE)
joined_tbl <- anti_join(join_data$x_tbl, join_data$y_tbl, join_data$by, copy = FALSE, ...)
replace_zoomed_tbl(x, joined_tbl, join_data$new_col_names)
}
prepare_join <- function(x, y, by, selected, suffix, copy, disambiguate = TRUE) {
y_name <- dm_tbl_name(x, {{ y }})
select_quo <- enquo(selected)
if (!is_null(suffix)) message("Column names are disambiguated if necessary, `suffix` ignored.")
if (!is_null(copy)) message("Tables in a `dm` are necessarily on the same `src`, setting `copy = FALSE`.")
zoomed <- dm_get_zoom(x, c("table", "zoom", "col_tracker_zoom"))
x_tbl <- zoomed$zoom[[1]]
x_orig_name <- zoomed$table
y_tbl <- dm_get_tables_impl(x)[[y_name]]
all_cols_y <- colnames(y_tbl)
if (quo_is_null(select_quo)) {
select_quo <- quo(everything())
}
selected <- eval_select_both(select_quo, colnames(y_tbl))$names
new_col_names <- zoomed$col_tracker_zoom[[1]]
if (is_null(by)) {
by <- get_by(x, x_orig_name, y_name)
if (!all(names(by) %in% new_col_names)) abort_fk_not_tracked(x_orig_name, y_name)
}
by <- repair_by(by)
selected_wo_by <- selected[selected %in% setdiff(selected, by)]
if (disambiguate) {
x_disambig_name <- x_orig_name
y_disambig_name <- y_name
if (x_disambig_name == y_disambig_name) {
x_disambig_name <- paste0(x_disambig_name, ".x")
y_disambig_name <- paste0(y_disambig_name, ".y")
}
table_colnames <-
vec_rbind(
tibble(table = x_disambig_name, column = colnames(x_tbl)),
tibble(table = y_disambig_name, column = names(selected_wo_by))
)
recipe <- compute_disambiguate_cols_recipe(table_colnames, sep = ".")
explain_col_rename(recipe)
x_renames <-
recipe %>%
filter(table == x_disambig_name) %>%
pull(renames)
y_renames <-
recipe %>%
filter(table == y_disambig_name) %>%
pull(renames)
if (has_length(x_renames)) {
x_tbl <- x_tbl %>% rename(!!!x_renames[[1]])
names(by) <- recode(names2(by), !!!prep_recode(x_renames[[1]]))
names(new_col_names) <- recode(names(new_col_names), !!!prep_recode(x_renames[[1]]))
}
if (has_length(y_renames)) {
names(selected_wo_by) <- recode(names(selected_wo_by), !!!prep_recode(y_renames[[1]]))
}
}
if (!all(by %in% selected)) {
new_cols <- glue_collapse(tick_if_needed(setdiff(by, selected)), ", ")
message(glue("Using `select = c({as_label(select_quo)}, {new_cols})`."))
}
prefix <- unique_prefix(names(selected_wo_by))
by_rhs_rename <- by
names(by_rhs_rename) <- paste0(prefix, seq_along(by_rhs_rename))
stopifnot(!any(names(selected_wo_by) %in% names(by_rhs_rename)))
selected_repaired <- c(selected_wo_by, by_rhs_rename)
y_tbl <- select(y_tbl, !!!selected_repaired)
repaired_by <- set_names(recode(by, !!!prep_recode(by_rhs_rename)), names(by))
list(x_tbl = x_tbl, y_tbl = y_tbl, by = repaired_by, new_col_names = new_col_names)
}
unique_prefix <- function(x) {
if (is_empty(x)) {
return("...")
}
dots <- max(max(nchar(x, "bytes")), 3)
paste(rep(".", dots), collapse = "")
}
safe_count <- function(x, ..., wt = NULL, sort = FALSE, name = NULL, .drop = group_by_drop_default(x)) {
quos <- enquos(...)
if (has_length(quos)) {
named <- names2(quos) != ""
if (any(named)) {
quos <- as.list(quos)
named_quos <- quos[named]
x <- mutate(x, !!!named_quos)
quos[named] <- syms(names2(quos)[named])
names(quos) <- NULL
}
out <- group_by(x, !!!quos, .add = FALSE, .drop = .drop)
} else {
out <- ungroup(x)
}
if (is.null(name)) {
out <- tally(out, wt = !!enquo(wt), sort = sort)
} else {
out <- tally(out, wt = !!enquo(wt), sort = sort, name = name)
}
ungroup(out)
}
new_tracked_cols <- function(dm, selected) {
tracked_cols <- col_tracker_zoomed(dm)
old_tracked_names <- names(tracked_cols)
selected_match <- selected[selected %in% old_tracked_names]
set_names(
tracked_cols[selected_match],
names(selected_match)
)
} |
as.root_criterion <- function(...) {
as_root_criterion(...)
}
is.root_criterion <- function(...) {
is_root_criterion(...)
} |
test_asymp <- function(Y, X, Z = NULL, space_y = FALSE, number_y = length(unique(Y))){
Y <- as.numeric(Y)
if (space_y){
y <- seq(ifelse(length(which(Y==0))==0,min(Y),min(Y[-which(Y==0)])),max(Y[-which.max(Y)]),length.out=number_y)
}
else{
y <- sort(unique(Y))
}
if (is.null(Z)){
colnames(X) <- sapply(1:ncol(X), function(i){paste0('X',i)})
modelmat <- model.matrix(~.,data=X)
}
else{
colnames(X) <- sapply(1:ncol(X), function(i){paste0('X',i)})
colnames(Z) <- sapply(1:ncol(Z), function(i){paste0('Z',i)})
modelmat <- model.matrix(~.,data=cbind(X,Z))
}
ind_X <- which(substring(colnames(modelmat),1,1)=="X")
beta <- matrix(NA,(length(y)-1),length(ind_X))
indi_pi <- matrix(NA,length(Y),(length(y)-1))
Phi <- (1/length(Y))*(t(as.matrix(modelmat))%*%as.matrix(modelmat))
H <- (solve(Phi)%*%t(as.matrix(modelmat)))
H <- H[ind_X,]
for (i in 1:(length(y)-1)){
indi_Y <- 1*(Y<=y[i])
indi_pi[,i] <- indi_Y
reg <- lm(indi_Y ~ as.matrix(modelmat[,-1]))
beta[i,] <- reg$coefficients[ind_X]
}
beta <- as.vector(beta)
prop <- colMeans(indi_pi)
if (is.null(dim(H))){
H_square <- sum(H^2)
Sigma <- sapply(1:(length(y)-1), function(i){sapply(1:((length(y)-1)*length(ind_X)), function(j){
if (i<=j){
(prop[i]-(prop[j]*prop[i]))
}
else{
(prop[j]-(prop[j]*prop[i]))
}
})})
Sigma <- (1/length(Y))*(H_square*Sigma)
}
else{
temp_Sigma <- lapply(1:ncol(H), function(k){sapply(1:nrow(H), function(s){sapply(1:nrow(H), function(r){H[s,k]*H[r,k]})})})
sum_temp_Sigma <- temp_Sigma[[1]]
for (i in 2:ncol(H)){
sum_temp_Sigma <- sum_temp_Sigma + temp_Sigma[[i]]
}
ind_sig <- rep(1:(length(y)-1),length(ind_X))
Sigma <- matrix(NA,((length(y)-1)*length(ind_X)),((length(y)-1)*length(ind_X)))
for (i in 1:((length(y)-1)*length(ind_X))){
for (j in 1:((length(y)-1)*length(ind_X))){
if (i<=j){
Sigma[i,j] <- sum_temp_Sigma[floor(i/(length(y))+1),floor(j/(length(y))+1)]*(prop[ind_sig[i]]-(prop[ind_sig[j]]*prop[ind_sig[i]]))
}
else{
Sigma[i,j] <- sum_temp_Sigma[floor(i/(length(y))+1),floor(j/(length(y))+1)]*(prop[ind_sig[j]]-(prop[ind_sig[j]]*prop[ind_sig[i]]))
}
}
}
Sigma <- (1/length(Y))*Sigma
}
decomp <- eigen(Sigma)
A <- matrix(0,(length(ind_X)*(length(y)-1)),(length(ind_X)*(length(y)-1)))
diag(A) <- decomp$values
z <- (sqrt(length(Y)))*beta
STAT <- sum(t(z)*z)
pval <- survey::pchisqsum(STAT, lower.tail = FALSE, df = rep(1,length(diag(A))), a = diag(A), method = "saddlepoint")
return(data.frame(raw_pval=pval,Stat=STAT))
} |
plot_cluster <- function(y,H,sort=FALSE,sample_rate = 0.05,
y.axis.label = NULL,
smoother = TRUE,
fade = 0.2, cluster_order = NULL, plot_render = TRUE)
{
N <- nrow(y)
T <- ncol(y)
M <- length(unique(H))
cluster <- vector("list", M)
for(m in 1:M)
{
if(is.null(cluster_order))
{
cluster[[m]] <- which(H == m)
}else{
cluster[[m]] <- which(H == cluster_order[m])
}
names(cluster[[m]]) <- NULL
}
c.sizes <- sapply(cluster,length)
if(!sort)
{
clusterstoplot <- 1:M
}else{
clusterstoplot <- sort(c.sizes,decreasing = TRUE,index.return=TRUE)$ix[1:length(cluster)]
}
map <- vector(mode="list",length = length(clusterstoplot))
for(i in 1:length(clusterstoplot))
{
cluster.i <- cluster[[clusterstoplot[i]]]
map[[i]] <- as.data.frame(cbind(cluster.i,i),stringsAsFactors = FALSE)
names(map[[i]]) <- c("establishment","cluster")
}
map <- do.call("rbind",map)
map$establishment <- as.numeric(map$establishment)
y.hat <- matrix(y,(N*T),1,byrow=FALSE)
establishment <- rep(1:N,times=T)
month <- rep(1:T,each=N)
dat.b <- data.frame(y.hat,month,establishment)
names(dat.b) <- c("value","time","establishment")
map <- map[order(map$establishment),]
datb.clust <- merge(dat.b,map,all.x=TRUE,by="establishment",sort = FALSE)
rate <- sample_rate
tmp <- split(datb.clust,list(datb.clust$cluster))
tmp <- unlist(sapply(tmp,function(x){
tot_recs <- length(unique(x$establishment))
u_recs <- sort(unique(x$establishment))
inc_recs <- sample(u_recs,round(rate*tot_recs),replace = FALSE)
}))
datb_plot <- subset(datb.clust, establishment %in% tmp)
p <- ggplot(data=datb_plot,aes(x = time, y = value))
l <- geom_line(aes(group = establishment), alpha = fade)
if(is.null(y.axis.label))
{
axis <- labs( x = "time",
y = substitute(expression(paste(Delta," (Simulated) Employment Counts"))) )
}else{
axis <- labs( x = "time", y = eval(y.axis.label) )
}
if(smoother == TRUE)
{
l.2 <- geom_smooth(aes(group=1),method = "loess", alpha = 0.5,
linetype = 2, se = FALSE, colour = "brown")
f <- facet_wrap(~cluster, scales = "fixed")
p.basis <- p + l + l.2 + f + theme_bw() + axis +
theme(axis.text.x=element_text(angle=90, hjust=0))
}else{
f <- facet_grid(cluster~., scales = "fixed")
p.basis <- p + l + f + theme_bw() + axis +
theme(axis.text.x=element_text(angle=90, hjust=0))
}
if(plot_render)
{
suppressWarnings(print(p.basis))
}
value <- time <- NULL
return(list(map = map, p.basis = p.basis))
} |
"fun.fmkl.L40"<-function(k,L3){
j<-0:k
result<-integrate(function(x,k,L3) ((x^L3-1)/L3-log(1-x))^k,0,1,
abs.tol=1e-100,k=k,L3=L3,stop.on.error=FALSE)
if(result$message!="OK"){return(NA)}
else return(result$value)} |
context("Testing table_exists()")
tables <- c("users",
"groups",
"file_type",
"issue",
"author",
"object",
"object_component",
"code_run",
"storage_root",
"storage_location",
"external_object",
"quality_controlled",
"keyword",
"licence",
"namespace",
"data_product",
"code_repo_release",
"key_value")
endpoint <- Sys.getenv("FDP_endpoint")
test_that("check table returns true with correct tables", {
tmp <- lapply(seq_along(tables), function(x) {
check_table_exists(tables[x])
}) %>% unlist()
expect_true(all(tmp))
})
test_that("unknown table returns false", {
expect_false(check_table_exists("unknown"))
})
test_that("invalid table name returns error", {
expect_error(check_table_exists(NULL))
expect_error(check_table_exists(NA))
expect_error(check_table_exists(NaN))
expect_error(check_table_exists(list()))
expect_error(check_table_exists(TRUE))
expect_error(check_table_exists())
}) |
library(Sim.DiffProc)
library(knitr)
knitr::opts_chunk$set(comment="",prompt=TRUE, fig.show='hold', warning=FALSE, message=FALSE)
options(prompt="R> ",scipen=16,digits=5,warning=FALSE, message=FALSE,
width = 70)
set.seed(12345, kind = "L'Ecuyer-CMRG")
f <- expression( (1+2*x) ) ; g <- expression( 0.5*x^0.3 )
sim <- snssde1d(drift=f,diffusion=g,x0=2,N=10^4,Dt=10^-4)
mydata <- sim$X
fx <- expression( theta[1]+theta[2]*x )
gx <- expression( theta[3]*x^theta[4] )
fitmod <- fitsde(data = mydata, drift = fx, diffusion = gx, start = list(theta1=1, theta2=1,
theta3=1,theta4=1),pmle="euler")
fitmod
coef(fitmod)
summary(fitmod)
vcov(fitmod)
logLik(fitmod)
AIC(fitmod)
BIC(fitmod)
confint(fitmod, level=0.95)
set.seed(1234, kind = "L'Ecuyer-CMRG")
f <- expression( 3*(2-x) ) ; g <- expression( 0.5 )
sim <- snssde1d(drift=f,diffusion=g,x0=5,Dt=0.01)
HWV <- sim$X
fx <- expression( theta[1]*(theta[2]- x) )
gx <- expression( theta[3] )
fitmod <- fitsde(data=HWV,drift=fx,diffusion=gx,start = list(theta1=1,theta2=1,
theta3=1),pmle="ozaki")
summary(fitmod)
confint(fitmod,parm=c("theta1","theta2"),level=0.95)
set.seed(1234, kind = "L'Ecuyer-CMRG")
f <- expression(-2*x*t) ; g <- expression(0.2*x)
sim <- snssde1d(drift=f,diffusion=g,N=1000,Dt=0.001,x0=10)
mydata <- sim$X
fx <- expression( theta[1]*x*t )
gx <- expression( theta[2]*x )
fitmod <- fitsde(data=mydata,drift=fx,diffusion=gx,start = list(theta1=1,
theta2=1),pmle="shoji",lower=c(-3,0),upper=c(-1,1))
summary(fitmod)
set.seed(1234, kind = "L'Ecuyer-CMRG")
f <- expression(3*t*(sqrt(t)-x)) ; g <- expression(0.3*t)
sim <- snssde1d(drift=f,diffusion=g,M=1,N=1000,x0=2,Dt=0.001)
mydata <- sim$X
fx <- expression( theta[1]*t* ( theta[2]*sqrt(t) - x ) )
gx <- expression( theta[3]*t )
fitmod <- fitsde(data=mydata,drift=fx,diffusion=gx,start = list(theta1=1,
theta2=1,theta3=1),pmle="kessler")
summary(fitmod)
set.seed(1234, kind = "L'Ecuyer-CMRG")
f <- expression( 2*x )
g <- expression( 0.3*x^0.5 )
sim <- snssde1d(drift=f,diffusion=g,M=1,N=10000,x0=2,Dt=0.0001)
mydata <- sim$X
fx <- expression( theta[1]*x )
gx <- expression( theta[2]*x^theta[3] )
truemod <- fitsde(data=mydata,drift=fx,diffusion=gx,start = list(theta1=1,
theta2=1,theta3=1),pmle="euler")
fx1 <- expression( theta[1]+theta[2]*x )
gx1 <- expression( theta[3]*x^theta[4] )
mod1 <- fitsde(data=mydata,drift=fx1,diffusion=gx1,start = list(theta1=1,
theta2=1,theta3=1,theta4=1),pmle="euler")
fx2 <- expression( theta[1]+theta[2]*x )
gx2 <- expression( theta[3]*sqrt(x) )
mod2 <- fitsde(data=mydata,drift=fx2,diffusion=gx2,start = list(theta1=1,
theta2=1,theta3=1),pmle="euler")
fx3 <- expression( theta[1] )
gx3 <- expression( theta[2]*x^theta[3] )
mod3 <- fitsde(data=mydata,drift=fx3,diffusion=gx3,start = list(theta1=1,
theta2=1,theta3=1),pmle="euler")
AIC <- c(AIC(truemod),AIC(mod1),AIC(mod2),AIC(mod3))
Test <- data.frame(AIC,row.names = c("True mod","Comp mod1","Comp mod2","Comp mod3"))
Bestmod <- rownames(Test)[which.min(Test[,1])]
Bestmod
Theta1 <- c(coef(truemod)[[1]],coef(mod1)[[1]],coef(mod2)[[1]],coef(mod3)[[1]])
Theta2 <- c(coef(truemod)[[2]],coef(mod1)[[2]],coef(mod2)[[2]],coef(mod3)[[2]])
Theta3 <- c(coef(truemod)[[3]],coef(mod1)[[3]],coef(mod2)[[3]],coef(mod3)[[3]])
Theta4 <- c("",round(coef(mod1)[[4]],5),"","")
Parms <- data.frame(Theta1,Theta2,Theta3,Theta4,row.names = c("True mod",
"Comp mod1","Comp mod2","Comp mod3"))
Parms
data(Irates)
rates <- Irates[, "r1"]
X <- window(rates, start = 1964.471, end = 1989.333)
plot(X)
fx <- expression( theta[1]+theta[2]*x )
gx <- expression( theta[3]*x^theta[4] )
pmle <- eval(formals(fitsde.default)$pmle)
fitres <- lapply(1:4, function(i) fitsde(X,drift=fx,diffusion=gx,pmle=pmle[i],
start = list(theta1=1,theta2=1,theta3=1,theta4=1)))
Coef <- data.frame(do.call("cbind",lapply(1:4,function(i) coef(fitres[[i]]))))
Info <- data.frame(do.call("rbind",lapply(1:4,function(i) logLik(fitres[[i]]))),
do.call("rbind",lapply(1:4,function(i) AIC(fitres[[i]]))),
do.call("rbind",lapply(1:4,function(i) BIC(fitres[[i]]))),
row.names=pmle)
names(Coef) <- c(pmle)
names(Info) <- c("logLik","AIC","BIC")
Coef
Info
set.seed(1234, kind = "L'Ecuyer-CMRG")
f <- expression( (2.076-0.263*x) )
g <- expression( 0.130*x^1.451 )
mod <- snssde1d(drift=f,diffusion=g,x0=X[1],M=500, N=length(X),t0=1964.471, T=1989.333)
mod
plot(mod,type="n",ylim=c(0,30))
lines(X,col=4,lwd=2)
lines(time(mod),apply(mod$X,1,mean),col=2,lwd=2)
lines(time(mod),apply(mod$X,1,bconfint,level=0.95)[1,],col=5,lwd=2)
lines(time(mod),apply(mod$X,1,bconfint,level=0.95)[2,],col=5,lwd=2)
legend("topleft",c("real data","mean path",paste("bound of", 95,"% confidence")),inset = .01,col=c(4,2,5),lwd=2,cex=0.8) |
sim.es <- function(model="ANN", obs=10, nsim=1,
frequency=1, persistence=NULL, phi=1,
initial=NULL, initialSeason=NULL,
bounds=c("usual","admissible","restricted"),
randomizer=c("rnorm","rlnorm","rt","rlaplace","rs"),
probability=1, ...){
randomizer <- randomizer[1];
ellipsis <- list(...);
bounds <- bounds[1];
if(is.numeric(bounds)){
ellipsis$b <- bounds;
bounds <- "u";
}
if(all(bounds!=c("u","a","r","usual","admissible","restricted"))){
warning(paste0("Strange type of bounds provided: ",bounds,". Switching to 'usual'."),
call.=FALSE);
bounds <- "u";
}
bounds <- substring(bounds[1],1,1);
if(nchar(model)==4){
Etype <- substring(model,1,1);
Ttype <- substring(model,2,2);
Stype <- substring(model,4,4);
if(substring(model,3,3)!="d"){
warning(paste0("You have defined a strange model: ",model),call.=FALSE);
model <- paste0(Etype,Ttype,"d",Stype);
}
if(Ttype!="N" & phi==1){
model <- paste0(Etype,Ttype,Stype);
warning(paste0("Damping parameter is set to 1. Changing model to: ",model),call.=FALSE);
}
}
else if(nchar(model)==3){
Etype <- substring(model,1,1);
Ttype <- substring(model,2,2);
Stype <- substring(model,3,3);
if(phi!=1 & Ttype!="N"){
model <- paste0(Etype,Ttype,"d",Stype);
warning(paste0("Damping parameter is set to ",phi,". Changing model to: ",model),call.=FALSE);
}
}
else{
stop(paste0("You have defined a strange model: ",model,". Cannot proceed"),call.=FALSE);
}
nsim <- abs(round(nsim,0));
obs <- abs(round(obs,0));
frequency <- abs(round(frequency,0));
if(!is.null(persistence) & length(persistence)>3){
stop("The length of persistence vector is wrong! It should not be greater than 3.",call.=FALSE);
}
if(phi<0 | phi>2){
warning(paste0("Damping parameter should lie in (0, 2) region! You have chosen phi=",phi,
". Be careful!"),call.=FALSE);
}
if(Etype!="A" & Etype!="M"){
stop("Wrong error type! Should be 'A' or 'M'.",call.=FALSE);
}
else{
persistenceLength <- 1;
componentsNumber <- 1;
lagsModel <- 1;
componentsNames <- "level";
matw <- 1;
matF <- matrix(1,1,1);
}
if(Ttype!="N" & Ttype!="A" & Ttype!="M"){
stop("Wrong trend type! Should be 'N', 'A' or 'M'.",call.=FALSE);
}
else if(Ttype!="N"){
if(is.na(phi) | is.null(phi)){
phi <- 1;
}
persistenceLength <- persistenceLength + 1;
componentsNumber <- componentsNumber + 1;
lagsModel <- c(lagsModel,1);
componentsNames <- c(componentsNames,"trend");
matw <- c(matw,phi);
matF <- matrix(c(1,0,phi,phi),2,2);
componentTrend=TRUE;
if(phi!=1){
model <- paste0(Etype,Ttype,"d",Stype);
}
}
else{
componentTrend=FALSE;
}
if(Stype!="N" & Stype!="A" & Stype!="M"){
stop("Wrong seasonality type! Should be 'N', 'A' or 'M'.",call.=FALSE);
}
if(Stype!="N" & frequency==1){
stop("Cannot create the seasonal model with the data frequency 1!",call.=FALSE);
}
if(Stype!="N"){
persistenceLength <- persistenceLength + 1;
lagsModel <- c(lagsModel,frequency);
componentsNames <- c(componentsNames,"seasonality");
matw <- c(matw,1);
componentSeasonal <- TRUE;
if(!componentTrend){
matF <- matrix(c(1,0,0,1),2,2);
}
else{
matF <- matrix(c(1,0,0,phi,phi,0,0,0,1),3,3);
}
}
else{
componentSeasonal <- FALSE;
}
lagsModel <- matrix(lagsModel,persistenceLength,1);
lagsModelMax <- max(lagsModel);
matw <- matrix(matw,1,persistenceLength);
arrF <- array(matF,c(dim(matF),nsim));
if(!is.null(persistence)){
if(persistenceLength != length(persistence)){
if(length(persistence)!=1){
warning(paste0("The length of persistence vector does not correspond to the chosen model!\n",
"Falling back to random number generator."),call.=FALSE);
persistence <- NULL;
}
else{
persistence <- rep(persistence,persistenceLength);
}
}
}
if(!is.null(initial)){
if(length(initial)>2){
stop("The length of the initial value is wrong! It should not be greater than 2.",call.=FALSE);
}
if(componentsNumber!=length(initial)){
warning(paste0("The length of initial state vector does not correspond to the chosen model!\n",
"Falling back to random number generator."),call.=FALSE);
initial <- NULL;
}
else{
if(Ttype=="M" & initial[2]<=0){
warning(paste0("Wrong initial value for multiplicative trend! It should be greater than zero!\n",
"Falling back to random number generator."),call.=FALSE);
initial <- NULL;
}
}
}
if(!is.null(initialSeason)){
if(lagsModelMax!=length(initialSeason)){
warning(paste0("The length of seasonal initial states does not correspond to the chosen frequency!\n",
"Falling back to random number generator."),call.=FALSE);
initialSeason <- NULL;
}
}
if(all(randomizer!=c("rnorm","rt","rlaplace","rs","rlnorm")) & (length(ellipsis)==0)){
warning(paste0("The chosen randomizer - ",randomizer," - needs some arbitrary parameters! Changing to 'rnorm' now."),call.=FALSE);
randomizer = "rnorm";
}
if(is.vector(probability)){
if(any(probability!=probability[1])){
if(length(probability)!=obs){
warning("Length of probability does not correspond to number of observations.",call.=FALSE);
if(length(probability)>obs){
warning("We will cut off the excessive ones.",call.=FALSE);
probability <- probability[1:obs];
}
else{
warning("We will duplicate the last one.",call.=FALSE);
probability <- c(probability,rep(probability[length(probability)],obs-length(probability)));
}
}
}
else{
probability <- probability[1];
}
}
if(length(probability)==1){
intermittent <- "fixed";
}
else{
intermittent <- "tsb";
}
if(all(probability==1)){
intermittent <- "none";
}
matg <- matrix(NA,persistenceLength,nsim);
arrvt <- array(NA,c(obs+lagsModelMax,persistenceLength,nsim),dimnames=list(NULL,componentsNames,NULL));
materrors <- matrix(NA,obs,nsim);
matyt <- matrix(NA,obs,nsim);
matot <- matrix(NA,obs,nsim);
if(is.null(persistence)){
if(bounds=="u"){
matg[1,] <- runif(nsim,0,1);
}
else if(bounds=="r"){
matg[1,] <- runif(nsim,0,0.3);
}
if(bounds!="a"){
if(Ttype!="N"){
matg[2,] <- runif(nsim,0,matg[1,]);
}
if(Stype!="N"){
matg[persistenceLength,] <- runif(nsim,0,max(0,1-matg[1]));
}
}
else{
matg[,] <- runif(persistenceLength*nsim,1-1/phi,1+1/phi);
if(Ttype!="N"){
matg[2,] <- runif(nsim,matg[1,]*(phi-1),(2-matg[1,])*(1+phi));
if(Stype!="N"){
Theta.func <- function(Theta){
result <- (phi*matg[1,i]+phi+1)/(matg[3,i]) +
((phi-1)*(1+cos(Theta)-cos(lagsModelMax*Theta)) +
cos((lagsModelMax-1)*Theta)-phi*cos((lagsModelMax+1)*Theta))/(2*(1+cos(Theta))*(1-cos(lagsModelMax*Theta)));
return(abs(result));
}
for(i in 1:nsim){
matg[3,i] <- runif(1,max(1-1/phi-matg[1,i],0),1+1/phi-matg[1,i]);
B <- phi*(4-3*matg[3,i])+matg[3,i]*(1-phi)/lagsModelMax;
C <- sqrt(B^2-8*(phi^2*(1-matg[3,i])^2+2*(phi-1)*(1-matg[3,i])-1)+8*matg[3,i]^2*(1-phi)/lagsModelMax);
matg[1,i] <- runif(1,1-1/phi-matg[3,i]*(1-lagsModelMax+phi*(1+lagsModelMax))/(2*phi*lagsModelMax),(B+C)/(4*phi));
Theta <- 0.1;
Theta <- optim(Theta,Theta.func,method="Brent",lower=0,upper=1)$par;
D <- (phi*(1-matg[1,i])+1)*(1-cos(Theta)) - matg[3,i]*((1+phi)*(1-cos(Theta) - cos(lagsModelMax*Theta)) +
cos((lagsModelMax-1)*Theta)+phi*cos((lagsModelMax+1)*Theta))/
(2*(1+cos(Theta))*(1-cos(lagsModelMax*Theta)));
matg[2,i] <- runif(1,-(1-phi)*(matg[3,i]/lagsModelMax+matg[1,i]),D+(phi-1)*matg[1,i]);
}
}
}
else{
if(Stype!="N"){
matg[1,] <- runif(nsim,-2/(lagsModelMax-1),2);
for(i in 1:nsim){
matg[2,i] <- runif(1,max(-lagsModelMax*matg[1,i],0),2-matg[1,i]);
}
matg[1,] <- runif(nsim,-2/(lagsModelMax-1),2-matg[2,]);
}
}
}
}
else{
matg[,] <- rep(persistence,nsim);
}
if(is.null(initial)){
if(Ttype=="N"){
arrvt[1:lagsModelMax,1,] <- runif(nsim,0,1000);
}
else if(Ttype=="A"){
arrvt[1:lagsModelMax,1,] <- runif(nsim,0,5000);
arrvt[1:lagsModelMax,2,] <- runif(nsim,-100,100);
}
else{
arrvt[1:lagsModelMax,1,] <- runif(nsim,500,5000);
arrvt[1:lagsModelMax,2,] <- 1;
}
initial <- matrix(arrvt[1,1:componentsNumber,],ncol=nsim);
}
else{
arrvt[,1:componentsNumber,] <- rep(rep(initial,each=(obs+lagsModelMax)),nsim);
initial <- matrix(arrvt[1,1:componentsNumber,],ncol=nsim);
}
if(componentSeasonal & is.null(initialSeason)){
if(Stype == "A"){
arrvt[1:lagsModelMax,componentsNumber+1,] <- runif(nsim*lagsModelMax,-500,500);
for(i in 1:nsim){
arrvt[1:lagsModelMax,componentsNumber+1,i] <- arrvt[1:lagsModelMax,componentsNumber+1,i] - mean(arrvt[1:lagsModelMax,componentsNumber+1,i]);
}
}
else{
arrvt[1:lagsModelMax,componentsNumber+1,] <- runif(nsim*lagsModelMax,0.3,1.7);
for(i in 1:nsim){
arrvt[1:lagsModelMax,componentsNumber+1,i] <- arrvt[1:lagsModelMax,componentsNumber+1,i] / exp(mean(log(arrvt[1:lagsModelMax,componentsNumber+1,i])));
}
}
initialSeason <- matrix(arrvt[1:lagsModelMax,componentsNumber+1,],ncol=nsim);
}
else if(componentSeasonal & !is.null(initialSeason)){
arrvt[1:lagsModelMax,componentsNumber+1,] <- rep(initialSeason,nsim);
initialSeason <- matrix(arrvt[1:lagsModelMax,componentsNumber+1,],ncol=nsim);
}
if(length(ellipsis)==0){
ellipsis$n <- nsim*obs;
if(any(randomizer==c("rnorm","rlaplace","rs"))){
materrors[,] <- do.call(randomizer,ellipsis);
}
else if(randomizer=="rt"){
materrors[,] <- rt(nsim*obs,obs-(persistenceLength + lagsModelMax));
}
else if(randomizer=="rlnorm"){
materrors[,] <- rlnorm(n=nsim*obs,0,0.01+(1-probability));
materrors <- materrors - 1;
}
if(randomizer!="rlnorm"){
if(Etype=="M"){
if(any(probability!=1)){
materrors <- materrors * 0.5;
}
else{
materrors <- materrors * 0.1;
}
materrors <- exp(materrors) - 1;
}
else if(Etype=="A"){
if(all(arrvt[1,1,]!=0)){
materrors <- materrors * sqrt(abs(arrvt[1,1,]));
}
if(randomizer=="rs"){
materrors <- materrors / 4;
}
}
}
}
else{
ellipsis$n <- nsim*obs;
materrors[,] <- do.call(randomizer,ellipsis);
if(randomizer=="rbeta"){
materrors <- materrors - 0.5;
materrors <- materrors / rep(sqrt(colMeans(materrors^2)) * sqrt(abs(arrvt[1,1,])),each=obs);
}
else if(randomizer=="rt"){
materrors <- materrors * rep(sqrt(abs(arrvt[1,1,])),each=obs);
}
if(Etype=="M"){
materrors <- materrors - 1;
}
}
if(all(probability == 1)){
matot[,] <- 1;
}
else{
matot[,] <- rbinom(obs*nsim,1,probability);
}
simulateddata <- simulatorwrap(arrvt,materrors,matot,arrF,matw,matg,Etype,Ttype,Stype,lagsModel);
matyt <- simulateddata$matyt;
arrvt <- simulateddata$arrvt;
dimnames(arrvt) <- list(NULL,componentsNames,NULL);
if(any(randomizer==c("rnorm","rt"))){
veclikelihood <- -obs/2 *(log(2*pi*exp(1)) + log(colMeans(materrors^2)));
}
else if(randomizer=="rlaplace"){
veclikelihood <- -obs*(log(2*exp(1)) + log(colMeans(abs(materrors))));
}
else if(randomizer=="rs"){
veclikelihood <- -2*obs*(log(2*exp(1)) + log(0.5*colMeans(sqrt(abs(materrors)))));
}
else if(randomizer=="rlnorm"){
veclikelihood <- -obs/2 *(log(2*pi*exp(1)) + log(colMeans(materrors^2))) - colSums(log(matyt));
}
else if(randomizer=="rinvgauss"){
veclikelihood <- -0.5*(obs*(log(colMeans(materrors^2/(1+materrors))/(2*pi))-1) +
sum(log(matyt/(1+materrors))) - 3*sum(log(matyt)));
}
else{
veclikelihood <- NA;
}
if(nsim==1){
matyt <- ts(matyt[,1],frequency=frequency);
materrors <- ts(materrors[,1],frequency=frequency);
arrvt <- ts(arrvt[,,1],frequency=frequency,start=c(0,frequency-lagsModelMax+1));
matot <- ts(matot[,1],frequency=frequency);
}
else{
matyt <- ts(matyt,frequency=frequency);
materrors <- ts(materrors,frequency=frequency);
matot <- ts(matot,frequency=frequency);
}
if(Ttype!="N"){
rownames(matg) <- c("alpha","beta","gamma")[1:persistenceLength];
}
else{
rownames(matg) <- c("alpha","gamma")[1:persistenceLength];
}
model <- paste0("ETS(",model,")");
if(any(probability!=1)){
model <- paste0("i",model);
}
if(any(is.nan(matyt))){
warning("NaN values were produced by the simulator.",call.=FALSE);
}
model <- list(model=model, data=matyt, states=arrvt, persistence=matg, phi=phi,
initial=initial, initialSeason=initialSeason, probability=probability, intermittent=intermittent,
residuals=materrors, occurrence=matot, logLik=veclikelihood, other=ellipsis);
return(structure(model,class="smooth.sim"));
} |
regmixEM = function (y, x, lambda = NULL, beta = NULL, sigma = NULL, k = 2,
addintercept = TRUE, arbmean = TRUE, arbvar = TRUE, epsilon = 1e-08, maxit = 10000,
verb = FALSE)
{
if(arbmean == FALSE && arbvar == FALSE){
stop(paste("Must change constraints on beta and/or sigma!","\n"))
}
s = sigma
if (addintercept) {
x = cbind(1, x)
}
n <- length(y)
p <- ncol(x)
tmp <- regmix.init(y = y, x = x, lambda = lambda, beta = beta,
s = s, k = k, addintercept = addintercept, arbmean = arbmean, arbvar = arbvar)
lambda <- tmp$lambda
beta <- tmp$beta
s <- tmp$s
k <- tmp$k
diff <- 1
iter <- 0
xbeta <- x %*% beta
res <- (y - xbeta)^2
if(arbmean == FALSE){
res <- sapply(1:k,function(i) res)
}
comp <- t((lambda/sqrt(2 * pi * s^2)) * t(exp(-t(t(res)/(2 *
s^2)))))
obsloglik <- sum(log(apply(comp, 1, sum)))
ll <- obsloglik
z = matrix(nrow = n, ncol = k)
restarts <- 0
while (diff > epsilon && iter < maxit) {
for (i in 1:n) {
for (j in 1:k) {
z.denom = c()
for (h in 1:k) {
z.denom = c(z.denom, (lambda[h]/lambda[j]) *
(s[j * arbvar + (1 - arbvar)]/s[h * arbvar +
(1 - arbvar)]) * exp(-0.5 * ((1/s[h * arbvar +
(1 - arbvar)]^2) * res[i, h] - (1/s[j * arbvar +
(1 - arbvar)]^2) * res[i, j])))
}
z[i, j] = 1/sum(z.denom)
}
}
z = z/apply(z,1,sum)
lambda.new <- apply(z, 2, mean)
if (sum(lambda.new < 1e-08)>0 || is.na(sum(lambda.new))) {
sing <- 1
}
else {
if (arbmean == FALSE) {
if (addintercept) {
beta.new <- lm(y~x[,-1],weights=apply(t(t(z)/(s^2)),1,sum))$coef
}
else beta.new <- lm(y~x-1,weights=apply(t(t(z)/(s^2)),1,sum))$coef
} else {
if (addintercept) {
lm.out <- lapply(1:k, function(i) lm(y ~ x[,
-1], weights = z[, i]))
}
else lm.out <- lapply(1:k, function(i) lm(y ~ x -
1, weights = z[, i]))
beta.new <- sapply(lm.out, coef)
}
xbeta.new <- x %*% beta.new
res <- (y - xbeta.new)^2
if(arbmean == FALSE){
res <- sapply(1:k,function(i) res)
}
if (arbvar) {
s.new <- sqrt(sapply(1:k, function(i) sum(z[,
i] * (res[, i]))/sum(z[, i])))
}
else s.new <- sqrt(sum(z * res)/n)
lambda <- lambda.new
beta <- beta.new
xbeta <- x%*%beta
s <- s.new
sing <- sum(s < 1e-08)
comp <- lapply(1:k, function(i) lambda[i] * dnorm(y, xbeta[,i * arbmean + (1 - arbmean)],
s[i * arbvar + (1 - arbvar)]))
comp <- sapply(comp, cbind)
compsum <- apply(comp, 1, sum)
newobsloglik <- sum(log(compsum))
}
if (sing > 0 || is.na(newobsloglik) || newobsloglik < obsloglik || abs(newobsloglik) ==
Inf){
cat("Need new starting values due to singularity...",
"\n")
restarts <- restarts + 1
if(restarts>15) stop("Too many tries!")
tmp <- regmix.init(y = y, x = x, k = k, addintercept = addintercept,
arbmean = arbmean, arbvar = arbvar)
lambda <- tmp$lambda
beta <- tmp$beta
s <- tmp$s
k <- tmp$k
diff <- 1
iter <- 0
xbeta <- x %*% beta
res <- (y - xbeta)^2
if(arbmean == FALSE){
res <- sapply(1:k,function(i) res)
}
comp <- t((lambda/sqrt(2 * pi * s^2)) * t(exp(-t(t(res)/(2 *
s^2)))))
obsloglik <- sum(log(apply(comp, 1, sum)))
ll <- obsloglik
}
else {
diff <- newobsloglik - obsloglik
obsloglik <- newobsloglik
ll <- c(ll, obsloglik)
iter <- iter + 1
if (verb) {
cat("iteration=", iter, "diff=", diff, "log-likelihood",
obsloglik, "\n")
}
}
}
scale.order = order(s)
sigma.min = min(s)
if (iter == maxit) {
cat("WARNING! NOT CONVERGENT!", "\n")
}
cat("number of iterations=", iter, "\n")
if(arbmean == FALSE){
z=z[,scale.order]
names(beta) <- c(paste("beta", ".", 0:(p-1), sep = ""))
colnames(z) <- c(paste("comp", ".", 1:k, sep = ""))
a=list(x=x, y=y, lambda = lambda[scale.order], beta = beta, sigma = sigma.min, scale = s[scale.order]/sigma.min, loglik = obsloglik,
posterior = z[,scale.order], all.loglik=ll, restarts = restarts, ft="regmixEM")
class(a) = "mixEM"
a
} else {
rownames(beta) <- c(paste("beta", ".", 0:(p-1), sep = ""))
colnames(beta) <- c(paste("comp", ".", 1:k, sep = ""))
colnames(z) <- c(paste("comp", ".", 1:k, sep = ""))
a=list(x=x, y=y, lambda = lambda, beta = beta, sigma = s, loglik = obsloglik,
posterior = z, all.loglik=ll, restarts = restarts, ft="regmixEM")
class(a) = "mixEM"
a
}
} |
GetWriteR =
function(UseGitHub = TRUE) {
if (interactive()) {
if (.Platform$OS.type == "windows") {
if (requireNamespace("BrailleR")) {
if (UseGitHub) {
download.file(
"https://raw.github.com/ajrgodfrey/WriteR/master/Python/WriteR.zip",
"WriteR.zip")
unzip("WriteR.zip")
file.remove("WriteR.zip")
file.rename("WriteR.exe",
paste0(getOption("BrailleR.Folder"), "WriteR.exe"))
message(
"The WriteR application has been added to your MyBrailleR folder.")
} else {
browseURL("https://R-Resources.massey.ac.nz/WriteR/WriteR.zip")
message(
"The WriteR application has been downloaded but you need to unzip it.")
message("Move it to your MyBrailleR folder and unzip before use.")
}
message(
"It is assumed you wanted to download this file by issuing the last command.")
message(
"You can delete WriteR.exe at any time to remove WriteR from your system.")
}
} else {
.WindowsOnly
}
} else {
.InteractiveOnly()
}
return(invisible(NULL))
}
GetWriteR = function(UseGitHub = TRUE) {
.TempUnavailable()
} |
lv3 <- new("odeModel",
main = function(time, init, parms, inputs) {
s.in <- approxTime1(inputs, time, rule = 2)["s.in"]
with(as.list(c(init, parms)),{
ds <- s.in - b*s*p + g*k
dp <- c*s*p - d*k*p
dk <- e*p*k - f*k
list(c(ds, dp, dk), s.in = s.in)
})
},
parms = c(b = 0.1, c = 0.1, d = 0.1, e = 0.1, f = 0.1, g = 0),
times = c(from = 0, to = 200, by = 1),
inputs = as.matrix(
data.frame(
time = c(0, 99, 100, 101, 200),
s.in = c(0.1, 0.1, 0.5, 0.1, 0.1)
)
),
init = c(s = 1, p = 1, k = 1),
solver = "lsoda"
) |
residualsStudy <- function(model)
{
residuals <- model$model$residuals
if (model$type=="PolyMARS"){
Yfit <- model$model$fitted
} else Yfit <- model$model$fitted.values
op <- par(mfrow = c(1, 3),pty="s")
plot(residuals ,ylab = "residuals")
plot(Yfit,residuals,xlab = "fitted values",ylab = "residuals")
hist(residuals, freq=FALSE,xlab = "residuals", ylab="density",main= "",
ylim=c(0,max(density(residuals)$y, hist(residuals,plot=FALSE)$density)))
lines(density(residuals) ,col="red")
par(op)
title("Residuals study", cex.main=1.3)
} |
MixedDistributionError <- function(Means, SDs, Weights, Data, rho = 0.5, breaks = NULL, Kernels = NULL, ErrorMethod = "chisquare"){
if(!is.vector(Data)) stop("Data is not a vector")
iqr = quantile(Data, 0.75) - quantile(Data, 0.25)
obw = 3.49 * (min(sd(Data), iqr/1.349) / nrow(Data)^(1/3))
NoBins = max((max(Data) - min(Data)) / obw, 10)
if(is.null(breaks)) breaks = seq(min(Data),max(Data), length.out=length(NoBins)+1)
if(is.null(Kernels)) Kernels = as.vector(seq(min(Data), max(Data), length.out = NoBins))
distvec = (stats::dist(as.matrix(Data)))
ParetoRad = quantile(distvec, 18/100, na.rm = TRUE)
ParetoRad = ParetoRad * 4 / length(Data)^0.2
inRad = sapply(Kernels, function(k) (Data >= (k - ParetoRad)) & (Data <= (k + ParetoRad)))
nrInRad = as.vector(colSums(inRad))
normv = pracma::trapz(Kernels, nrInRad)
paretoDensity = nrInRad / normv
if(ErrorMethod == "pdeerror"){
V = Pdf4Mixtures(Kernels, Means, SDs, Weights)
SimilarityError = sum(abs(V$PDFmixture - paretoDensity))
}
else if(ErrorMethod == "chisquare"){
estimatedBins = BinProb4Mixtures(Means, SDs, Weights, breaks)*length(Data)
norm = estimatedBins
norm[norm<1]=1
observedBins = hist(Data, breaks=breaks, plot = F)$counts
diffssq = (observedBins - estimatedBins)^2
diffssq[diffssq<4] = 0
SimilarityError = sum(diffssq/norm)
SimilarityError = SimilarityError / length(Data)
}
else{
stop(paste("ErrorMethod", ErrorMethod, "is not recognized. Please use either 'pdeeerror' or 'chisquare'"))
}
OError = OverlapErrorByDensity(Means, SDs, Weights, Data, Kernels)$OverlapError
DistributionError = rho*SimilarityError + (1-rho)*OError
return(list(SimilarityError = SimilarityError, OverlapError = OError, MixedDistributionError = DistributionError))
} |
dvariogram <-
function(obs, vth=NULL,vlen = NULL)
{
z <- matrix(obs[,4],ncol=1)
G <- (dist(z))^2/2
tstamp <- matrix(obs[,3],ncol=1)
coords <- matrix(obs[,1:2],ncol=2)
dx <- diff(range(coords[,1]))
dy <- diff(range(coords[,2]))
dmax <- sqrt(dx^2 + dy^2)
tmax <- diff(range(tstamp))
if(is.null(vth)) vth <- c(dmax,tmax)*.75
if(is.null(vlen)) vlen <- c(15,floor(tmax))
dmat <- dist(coords)
tmat <- dist(tstamp)
dbins <- seq(-1e-5,vth[1]+0.01,len=vlen[1]+1)
tbins <- seq(-0.01,vth[2]+0.01,len=vlen[2]+1)
dii <- findInterval(dmat,dbins)
tii <- findInterval(tmat,tbins)
tmp <- aggregate(x=as.vector(G),by=list(dbins=(dii),tbins=(tii)),FUN=mean)
ddist <- dbins[-1]
tdist <- tbins[-1]
gamma.long <- data.frame(s=ddist[tmp[,1]],t=tdist[tmp[,2]],gamma=tmp[,3])
vmm <- matrix(NA,ncol=length(dbins),nrow=length(tbins))
colnames(vmm) <- paste('(',round(dbins,2),
',',c(round(dbins[-1],2),999),')',sep='')
row.names(vmm) <- paste('(',round(tbins,2),
',',c(round(tbins[-1],2),999),')',sep='')
vmm[as.matrix(tmp[,2:1])] <- tmp[,3]
nmm <- matrix(0,ncol=length(dbins),nrow=length(tbins))
tmp <- aggregate(x=as.vector(G),
by=list(dbins=as.vector(dii),tbins=as.vector(tii)),
FUN=length)
gamma.long$n <- tmp[,3]
nmm[as.matrix(tmp[,2:1])] <- tmp[,3]
colnames(nmm) <- paste('(',round(dbins,2),
',',c(round(dbins[-1],2),999),')',sep='')
row.names(nmm) <- paste('(',round(tbins,2),
',',c(round(tbins[-1],2),999),')',sep='')
vmm <- vmm[-nrow(vmm),-ncol(vmm)]
nmm <- nmm[-nrow(nmm),-ncol(nmm)]
ii <- with(gamma.long, which(is.na(s) | is.na(t)))
gamma.s0 <- data.frame(x=ddist,n=nmm[1,],gamma=vmm[1,])
gamma.0t <- data.frame(x=tdist,n=nmm[,1],gamma=vmm[,1])
list(gamma=vmm,gamma.s0=gamma.s0,gamma.0t=gamma.0t,gamma.long=gamma.long[-ii,],np = nmm, ddist=ddist,tdist=tdist)
} |
combinedist <-
function(..., method=c("median", "mean"))
{
v <- list(...)
if(length(v) == 1 && is.list(v[[1]])) v <- v[[1]]
if(!all(sapply(v, function(a) inherits(a, "lineupdist"))))
stop("Input distance matrices must each be of class \"lineupdist\".")
cl <- lapply(v, class)
cl_len <- sapply(cl, length)
if(!all(cl_len == cl_len[1])) {
stop("Need all of the distance matrices to be the same type.")
}
else {
same <- sapply(cl, function(a) all(a==cl[[1]]))
if(!all(same)) {
stop("Need all of the distance matrices to be the same type.")
}
}
rn <- unique(unlist(lapply(v, rownames)))
cn <- unique(unlist(lapply(v, colnames)))
method <- match.arg(method)
d <- array(dim=c(length(rn), length(cn), length(v)))
dimnames(d) <- list(rn, cn, names(v))
for(i in seq(along=v))
d[rownames(v[[i]]),colnames(v[[i]]),i] <- v[[i]]
if(method=="mean" && all(sapply(v, function(a) !is.null(attr(a, "denom"))))) {
use.denom <- TRUE
denom <- array(dim=c(length(rn), length(cn), length(v)))
dimnames(denom) <- list(rn, cn, names(v))
for(i in seq(along=v))
denom[rownames(v[[i]]), colnames(v[[i]]), i] <- attr(v[[i]], "denom")
}
else use.denom <- FALSE
if(method=="median")
ds <- apply(d, 1:2, median, na.rm=TRUE)
else if(use.denom) {
denom.sum <- apply(denom, 1:2, sum, na.rm=TRUE)
ds <- apply(d*denom, 1:2, sum, na.rm=TRUE)/denom.sum
attr(ds, "denom") <- denom.sum
}
else
ds <- apply(d, 1:2, mean, na.rm=TRUE)
class(ds) <- class(v[[1]])
possible.attributes <- c("d.method", "compareWithin")
for(i in possible.attributes[possible.attributes %in% names(attributes(v[[1]]))])
attr(ds, i) <- attr(v[[1]], i)
ds
} |
geom_parallel_slopes <- function(mapping = NULL, data = NULL,
position = "identity", ...,
se = TRUE, formula = y ~ x, n = 100,
fullrange = FALSE, level = 0.95,
na.rm = FALSE, show.legend = NA,
inherit.aes = TRUE) {
dots <- list(...)
if ("method" %in% names(dots)) {
warning(
"`geom_parallel_slopes()` doesn't need a `method` argument ",
'("lm" is used).',
call. = FALSE
)
dots <- dots[setdiff(names(dots), "method")]
}
stat_params <- c(
na.rm = na.rm, se = se, formula = formula, n = n, fullrange = fullrange,
level = level
)
params <- c(stat_params, dots)
ggplot2::layer(
geom = ggplot2::GeomSmooth, stat = StatParallelSlopes, data = data,
mapping = mapping, position = position, params = params,
inherit.aes = inherit.aes, show.legend = show.legend
)
}
StatParallelSlopes <- ggplot2::ggproto(
"StatParallelSlopes", ggplot2::Stat,
required_aes = c("x", "y"),
compute_panel = function(data, scales, se = TRUE, formula = y ~ x, n = 100,
fullrange = FALSE, level = 0.95) {
if (nrow(data) == 0) {
return(data[integer(0), ])
}
model_info <- compute_model_info(data, formula)
formula <- model_info$formula
data <- model_info$data
model <- stats::lm(formula = formula, data = data)
groups <- split(data, data$group)
groups_new_data <- lapply(
X = groups, FUN = compute_group_new_data,
scales = scales, n = n, fullrange = fullrange
)
stats <- lapply(
X = groups_new_data, FUN = predict_df,
model = model, se = se, level = level
)
stats <- mapply(restore_unique_cols, stats, groups, SIMPLIFY = FALSE)
dplyr::bind_rows(stats)
}
)
compute_model_info <- function(data, formula) {
if (has_unique_value(data$group)) {
warning(
"`geom_parallel_slopes()` didn't recieve a grouping variable with more ",
"than one unique value. Make sure you supply one. Basic model is fitted.",
call. = FALSE
)
} else {
data$group <- as.factor(data$group)
formula <- as.formula(paste0(deparse(formula), " + group"))
}
list(formula = formula, data = data)
}
compute_group_new_data <- function(group_df, scales, n, fullrange) {
if (fullrange) {
support <- scales$x$dimension()
} else {
support <- range(group_df$x, na.rm = TRUE)
}
x_seq <- seq(support[1], support[2], length.out = n)
group_seq <- rep(group_df$group[1], n)
data.frame(x = x_seq, group = group_seq)
}
predict_df <- function(model, new_data, se, level) {
pred <- stats::predict(
model,
newdata = new_data, se.fit = se, level = level,
interval = if (se) "confidence" else "none"
)
if (isTRUE(se)) {
fit <- as.data.frame(pred$fit)
names(fit) <- c("y", "ymin", "ymax")
data.frame(x = new_data$x, fit, se = pred$se.fit)
} else {
data.frame(x = new_data$x, y = as.vector(pred))
}
}
restore_unique_cols <- function(new, old) {
is_unique <- sapply(old, has_unique_value)
unique_df <- old[1, is_unique, drop = FALSE]
rownames(unique_df) <- seq_len(nrow(unique_df))
missing <- !(names(unique_df) %in% names(new))
cbind(new, unique_df[rep(1, nrow(new)), missing, drop = FALSE])
}
has_unique_value <- function(x) {
length(unique(x)) <= 1
} |
ravelry_get <- function(path, query = NULL) {
url <- httr::modify_url(url = "https://api.ravelry.com/", path = path, query = query)
url <- gsub('\\%2B', '+', url)
response <- httr::GET(url,
authenticate(
Sys.getenv('RAVELRY_USERNAME'),
Sys.getenv('RAVELRY_PASSWORD')
),
user_agent("http://github.com/walkerkq/ravelRy")
)
if (httr::http_type(response) != "application/json") {
stop("API did not return json", call. = FALSE)
} else if(response$status_code != 200) {
stop(paste0("API returned status ", response$headers$status), call. = FALSE)
}
response
} |
"traffic2"
|
NN_EXECUTION <<- TRUE
nn.modelo <- function(variable.pr = NULL, threshold = 0.01, stepmax = 1000, cant.cap = 2, ...){
threshold <- ifelse(threshold == 0, 0.01, threshold)
stepmax <- ifelse(stepmax < 100, 100, stepmax)
capas <- as.string.c(as.numeric(list(...)[1:cant.cap]), .numeric = TRUE)
return(paste0("modelo.nn <<- train.neuralnet(",variable.pr,"~., data = datos.aprendizaje, hidden = ",capas,",\n\t\t\tlinear.output = FALSE,",
"threshold = ",threshold,", stepmax = ",stepmax,")\n"))
}
nn.prediccion <- function() {
return(paste0("prediccion.nn <<- predict(modelo.nn, datos.prueba, type = 'class')"))
}
nn.MC <- function(){
return(paste0("MC.nn <<- confusion.matrix(datos.prueba, prediccion.nn)","\n"))
}
nn.plot <- function(){
paste0("plot(modelo.nn,,arrow.length = 0.1, rep = 'best', intercept = T,x.entry = 0.1, x.out = 0.9,\n\t",
"information=F,intercept.factor = 0.8,col.entry.synapse='red',col.entry='red',col.out='green',col.out.synapse='green',\n\t",
"dimension=15, radius = 0.2, fontsize = 10)")
} |
uptimerobot.account.details <- function(api.key, unlist = FALSE){
if(is.null(api.key) |
is.na(api.key) |
(is.character(api.key) & nchar(api.key)==0)
) stop("api.key cannot be empty or NULL")
data <- fromJSON(
getURL(
URLencode(paste0("https://api.uptimerobot.com/getAccountDetails?apiKey=",
api.key,
"&format=json&noJsonCallback=1"
)
)
),
unexpected.escape="keep"
)
if(data$stat=="ok") {
if(!unlist) return(lapply(data$account, function(x){ as.integer(x)}))
data.unlisted <- as.integer(unlist(data$account))
names(data.unlisted) <- names(unlist(data$account))
return(data.unlisted)
}
else {
stop(data$message)
}
} |
makeLabels <- function(theMin,theMax,unit="") {
if (!is.na(as.numeric(theMin)) & !is.na(as.numeric(theMax))) {
step <- 10^floor(log10(theMax-theMin))
minorFactor <- 2
if ((theMax-theMin)/step < 6) {
step <- step/2
minorFactor <- 2
}
if ((theMax-theMin)/step < 6) {
step <- step/2
minorFactor <- 2.5
}
if ((theMax-theMin)/step < 6) {
step <- step/2.5
minorFactor <- 2
}
minTick <- step*ceiling(theMin/step)
if ((minTick-theMin)/step > 0.5) minTick <- minTick-step
maxTick <- step*floor(theMax/step)
if ((theMax-maxTick)/step > 0.5) maxTick <- maxTick+step
labels <- seq(from = minTick, to = maxTick, by = step)
if (log10(step) < 0) {
prefix <- switch(abs(ceiling(log10(step)/3))+1,"","m","\u00B5","n","p","f","a")
divisor <- switch(abs(ceiling(log10(step)/3))+1,1,0.001,0.000001,0.000000001,0.000000000001,0.000000000000001,0.000000000000000001)
} else {
prefix <- switch(floor(log10(step)/3)+1,"","k","M","G","T","P","E")
divisor <- switch(floor(log10(step)/3)+1,1,1000,1000000,1000000000,1000000000000,1000000000000000,1000000000000000000)
}
names <- paste0(labels/divisor,prefix,unit)
names <- gsub(paste0("^",0,prefix,unit),"0",names)
labels <- data.frame(ticks=labels,labels=names)
attr(labels,"minorTickFactor") <- as.character(minorFactor)
return(labels)
} else {
stop("There is a problem with the supplied minimum and maximum!")
}
} |
vbmfa <- function(Y, qmax = NULL, maxtries = 3, verbose = FALSE, varimax = FALSE){
k <- qmax
if(is.null(k)){
k = dim(Y)[2]
qmax <- k-1
} else {
k <- qmax + 1
}
stopifnot("verbose should be either TRUE or FALSE"={verbose %in% c(TRUE,FALSE)})
stopifnot("varimax should be either TRUE or FALSE"={varimax %in% c(TRUE,FALSE)})
stopifnot("Y should be an n by p numeric matrix"={is.numeric(Y)&is.matrix(Y)})
stopifnot("qmax should be numeric"={is.numeric(qmax)})
stopifnot("qmax should be an integer"={qmax%%1==0})
stopifnot("qmax must be greater than zero and less than or equal to p-1" = {(qmax>0) & (qmax <= k-1)} )
cat(sprintf("Variational Bayesian MFA fitting process begun...\n"))
start.time <- Sys.time()
pcaflag=0
Fflag=0
dsp=pi
Y <- t(Y)
dims <- dim(Y)
p <- dims[1]; n <- dims[2]
selection_method = 4;
getbeta = NULL;
net = list(
model = "variational MFA",
hparams = list(mcl = NULL,psii=NULL,pa=NULL,pb=NULL,alpha=NULL),
params = list(Lm = NULL, Lcov = NULL, u = NULL),
hidden = list(Xm = NULL, Xcov = NULL, Qns = NULL, a = NULL, b = NULL),
Fhist = NULL
)
pa = 1; pb = 1; alpha = 1; psii = matrix(rep(1,p), nrow = p)
Lm = vector(mode = "list")
Lcov = vector(mode = "list")
Lm[[1]] = matrix(rnorm(p*k), nrow = p)
Lcov[[1]] = array(rep(diag(k),p),dim = c(k,k,p))
u = 1;
mean_mcl = matrix(rowMeans(Y),ncol = 1);
nu_mcl = (1/matrix(sqrt(rowSums((Y - matrix(rep(mean_mcl,n),ncol=n))^2/(n-1))),ncol=1))^2;
Xcov = vector(mode = "list")
Xcov[[1]] = array(0,dim = c(k,k,n));
s = length(Lm);
pu = alpha*matrix(rep(1,s),nrow = 1)/s;
psimin = 1e-5;
maintol = 6;
finetol = 8;
tries = 0;
removal = 1;
F_ = -Inf;
Fhist = NULL;
it = 0; I = diag(k-1);
tol = exp(-maintol);
Qns = matrix(rep(1,n*s),nrow = n)/s;
b <- list()
Xm = vector(mode = "list")
trXm = vector(mode = "list")
tempOut <- inferQX(Y,Lm,Lcov,psii,Xcov,trXm,Xm)
Xcov = tempOut$Xcov
trXm = tempOut$trXm
Xm = tempOut$Xm
candorder = matrix()
cophd = NULL
tempOut <- inferQns(Y,Lm,Qns,psii,Lcov,Xm,Xcov,candorder,b,u,pu,removal,parent,pos,cophd, verbose)
Qns = tempOut$Qns; Ps = tempOut$Ps; Lm = tempOut$Lm; Lcov = tempOut$Lcov;
Xm = tempOut$Xm; Xcov = tempOut$Xcov; b = tempOut$b; u = tempOut$u;
pu = tempOut$pu; s = tempOut$s; dQns_sagit = tempOut$dQns_sagit; candorder = tempOut$candorder;
cophd = tempOut$cophd;
tempOut <- inferQnu(Lm,pa,pb,Lcov,b)
s = tempOut$s; b = tempOut$b; a = tempOut$a
num = vector(mode = "list")
tempOut <- learn(it, Y,Lm,Lcov,psii,Xcov,trXm,Xm,
mean_mcl,nu_mcl,a,b,Qns,candorder,u,pu,
removal, alpha, pa, pb, pcaflag, psimin, num, parent, pos, cophd, verbose)
it = tempOut$it; Xm=tempOut$Xm; trXm= tempOut$trXm; Xcov = tempOut$Xcov;
mean_Lambda = tempOut$mean_Lambda; num = tempOut$num; Lcov = tempOut$Lcov;
Lm = tempOut$Lm; Qns = tempOut$Qns; Ps = tempOut$Ps; b = tempOut$b;
u = tempOut$u; pu = tempOut$pu; s = tempOut$s; dQns_sagit = tempOut$dQns_sagit;
a = tempOut$a; psi = tempOut$psi; psii = tempOut$psii; zeta = tempOut$zeta;
mean_mcl = tempOut$mean_mcl; nu_mcl = tempOut$nu_mcl
candorder = tempOut$candorder; cophd = tempOut$cophd;
workspace = list(psii = psii, Lm = Lm, Lcov = Lcov, Qns = Qns, a = a, b = b, u = u)
candorder = cbind(1:length(Lm),0); pos = 1; parent = candorder[pos];
epoch = 0;
tempOut = Fcalc(Lm,Y,Qns,F_,a,u,pu,Xm,Xcov,Ps,psii,pi,b,pa,pb,nu_mcl,mean_mcl,it,alpha,Lcov,Fhist);
Fmatrix = tempOut$Fmatrix; pu = tempOut$p; Ps = tempOut$Ps;
F_old = tempOut$F_old;Qnsmod = tempOut$Qnsmod; FmatrixKLpi = tempOut$FmatrixKLpi;
F_ = tempOut$F_; dF = tempOut$dF; Fhist = tempOut$Fhist;
Ftarg = F_;
cumul_time = 0;
time.vec <- c(0)
birthFail = FALSE
while(parent != 0){
loop.start.time <- Sys.time()
epoch = epoch+1; cophd = 0;
tempOut = Fcalc(Lm,Y,Qns,F_,a,u,pu,Xm,Xcov,Ps,psii,pi,b,pa,pb,nu_mcl,mean_mcl,it,alpha,Lcov,Fhist);
Fmatrix = tempOut$Fmatrix; pu = tempOut$p; Ps = tempOut$Ps;
F_old = tempOut$F_old;Qnsmod = tempOut$Qnsmod; FmatrixKLpi = tempOut$FmatrixKLpi;
F_ = tempOut$F_; dF = tempOut$dF; Fhist = tempOut$Fhist;
Ftarg = F_; dF = 0;
if(length(Lm) < parent){birthFail = TRUE; break}
tempOut = dobirth(Y,Lm,psii,parent,Qns,Lcov,b,u,pu,alpha, Xcov, trXm, Xm, mean_mcl,nu_mcl,a,num, verbose)
Lcov = tempOut$Lcov; Lm = tempOut$Lm; b=tempOut$b; u=tempOut$u; pu = tempOut$pu;
mean_Lambda = tempOut$mean_Lambda; num = tempOut$num;
Xcov = tempOut$Xcov; trXm = tempOut$trXm; Xm = tempOut$Xm
Qns = tempOut$Qns
tol = maintol;
dQns_sagit = tol;
counter = 0
while (length(which(dQns_sagit>=exp(-tol))) > 0 ) {
tempOut <- learn(it, Y,Lm,Lcov,psii,Xcov,trXm,Xm,
mean_mcl,nu_mcl,a,b,Qns,candorder,u,pu,
removal, alpha, pa, pb, pcaflag, psimin, num, parent, pos, cophd, verbose)
it = tempOut$it; Xm=tempOut$Xm; trXm= tempOut$trXm; Xcov = tempOut$Xcov;
mean_Lambda = tempOut$mean_Lambda; num = tempOut$num; Lcov = tempOut$Lcov;
Lm = tempOut$Lm; Qns = tempOut$Qns; Ps = tempOut$Ps; b = tempOut$b;
u = tempOut$u; pu = tempOut$pu; s = tempOut$s; dQns_sagit = tempOut$dQns_sagit;
a = tempOut$a; psi = tempOut$psi; psii = tempOut$psii; zeta = tempOut$zeta;
mean_mcl = tempOut$mean_mcl; nu_mcl = tempOut$nu_mcl
candorder = tempOut$candorder; cophd = tempOut$cophd;
counter = counter + 1
if(Fflag == 1){
dF_old = dF;
tempOut = Fcalc(Lm,Y,Qns,F_,a,u,pu,Xm,Xcov,Ps,psii,pi,b,pa,pb,nu_mcl,mean_mcl,it,alpha, Lcov, Fhist);
Fmatrix = tempOut$Fmatrix; pu = tempOut$p; Ps = tempOut$Ps;
F_old = tempOut$F_old;Qnsmod = tempOut$Qnsmod; FmatrixKLpi = tempOut$FmatrixKLpi;
F_ = tempOut$F_; dF = tempOut$dF; Fhist = tempOut$Fhist;
if(verbose){
print(paste('Iteration: ',it, ', F-value: ', F_, ', Change in F: ', dF, '.'))}
}
}
tempOut = Fcalc(Lm,Y,Qns,F_,a,u,pu,Xm,Xcov,Ps,psii,pi,b,pa,pb,nu_mcl,mean_mcl,it,alpha, Lcov, Fhist);
Fmatrix = tempOut$Fmatrix; pu = tempOut$p; Ps = tempOut$Ps;
F_old = tempOut$F_old;Qnsmod = tempOut$Qnsmod; FmatrixKLpi = tempOut$FmatrixKLpi;
F_ = tempOut$F_; dF = tempOut$dF; Fhist = tempOut$Fhist;
if(F_ > Ftarg){
if(verbose){
print(paste('Accepting F= ', F_, ' (', Ftarg,')'))};
if(cophd == 1){
if(verbose){
print('Child of parent has died, no reordering.')};
pos = pos+1;
} else {
tempOut <- ordercands(Y,Lm,Fmatrix,Qns,getbeta,selection_method, verbose)
pos = tempOut$pos; candorder = tempOut$candorder
tries = 0;
}
workspace = list( psii=psii, Lm = Lm, Lcov = Lcov, Qns = Qns, a = a, b = b,u= u);
tempOut <- inferQX(Y,Lm,Lcov,psii,Xcov,trXm,Xm)
Xcov = tempOut$Xcov; trXm = tempOut$trXm; Xm = tempOut$Xm
tempOut <- infermcl(Lm,Lcov)
if(class(tempOut) != "character"){
mean_mcl = tempOut$mean_mcl; nu_mcl = tempOut$nu_mcl}
tempOut <- learn(it, Y,Lm,Lcov,psii,Xcov,trXm,Xm,
mean_mcl,nu_mcl,a,b,Qns,candorder,u,pu,
removal, alpha, pa, pb, pcaflag, psimin, num, parent, pos, cophd, verbose)
it = tempOut$it; Xm=tempOut$Xm; trXm= tempOut$trXm; Xcov = tempOut$Xcov;
mean_Lambda = tempOut$mean_Lambda; num = tempOut$num; Lcov = tempOut$Lcov;
Lm = tempOut$Lm; Qns = tempOut$Qns; Ps = tempOut$Ps; b = tempOut$b;
u = tempOut$u; pu = tempOut$pu; s = tempOut$s; dQns_sagit = tempOut$dQns_sagit;
a = tempOut$a; psi = tempOut$psi; psii = tempOut$psii; zeta = tempOut$zeta;
mean_mcl = tempOut$mean_mcl; nu_mcl = tempOut$nu_mcl
candorder = tempOut$candorder; cophd = tempOut$cophd;
tempOut = Fcalc(Lm,Y,Qns,F_,a,u,pu,Xm,Xcov,Ps,psii,pi,b,pa,pb,nu_mcl,mean_mcl,it,alpha, Lcov, Fhist);
Fmatrix = tempOut$Fmatrix; pu = tempOut$p; Ps = tempOut$Ps;
F_old = tempOut$F_old;Qnsmod = tempOut$Qnsmod; FmatrixKLpi = tempOut$FmatrixKLpi;
F_ = tempOut$F_; dF = tempOut$dF; Fhist = tempOut$Fhist;
Ftarg = F_;
} else {
if(verbose){
print(paste('Rejecting F= ', F_, ' (', Ftarg , ')'))};
psii = workspace$psii
Lm = workspace$Lm
Lcov = workspace$Lcov
Qns = workspace$Qns
a = workspace$a
b = workspace$b
u = workspace$u
tempOut <- inferQX(Y,Lm,Lcov,psii,Xcov,trXm,Xm)
Xcov = tempOut$Xcov; trXm = tempOut$trXm; Xm = tempOut$Xm
tempOut <- infermcl(Lm,Lcov)
if(class(tempOut) != "character"){
mean_mcl = tempOut$mean_mcl; nu_mcl = tempOut$nu_mcl}
tempOut <- learn(it, Y,Lm,Lcov,psii,Xcov,trXm,Xm,
mean_mcl,nu_mcl,a,b,Qns,candorder,u,pu,
removal, alpha, pa, pb, pcaflag, psimin, num, parent, pos, cophd, verbose)
it = tempOut$it; Xm=tempOut$Xm; trXm= tempOut$trXm; Xcov = tempOut$Xcov;
mean_Lambda = tempOut$mean_Lambda; num = tempOut$num; Lcov = tempOut$Lcov;
Lm = tempOut$Lm; Qns = tempOut$Qns; Ps = tempOut$Ps; b = tempOut$b;
u = tempOut$u; pu = tempOut$pu; s = tempOut$s; dQns_sagit = tempOut$dQns_sagit;
a = tempOut$a; psi = tempOut$psi; psii = tempOut$psii; zeta = tempOut$zeta;
mean_mcl = tempOut$mean_mcl; nu_mcl = tempOut$nu_mcl
candorder = tempOut$candorder; cophd = tempOut$cophd;
pos = pos +1;
}
if(verbose){
print(paste("Iteration: ", it, " Completion: ", ( tries/maxtries+(pos-1)/(dim(candorder)[2]-1)/maxtries )*100 , '%'))
} else {
cat(sprintf(paste('\rIteration: ',it, ', F-value: ', round(F_,6),'.', sep = "")))
}
if(pos > dim(candorder)[2]){
pos <- dim(candorder)[2];
}
parent = candorder[pos];
if(parent == 0){
tries = tries+1;
if(tries != maxtries){
if(verbose){ print('End of ordering reached, reordering and trying more splits')}
tempOut <- inferQX(Y,Lm,Lcov,psii,Xcov,trXm,Xm)
Xcov = tempOut$Xcov; trXm = tempOut$trXm; Xm = tempOut$Xm
tempOut <- inferQns(Y,Lm,Qns,psii,Lcov,Xm,Xcov,candorder,b,u,pu,removal,parent,pos,cophd, verbose)
Qns = tempOut$Qns; Ps = tempOut$Ps; Lm = tempOut$Lm; Lcov = tempOut$Lcov;
Xm = tempOut$Xm; Xcov = tempOut$Xcov;
b = tempOut$b; u = tempOut$u; pu = tempOut$pu; s = tempOut$s;
dQns_sagit = tempOut$dQns_sagit; candorder = tempOut$candorder; cophd = tempOut$cophd;
tempOut = Fcalc(Lm,Y,Qns,F_,a,u,pu,Xm,Xcov,Ps,psii,pi,b,pa,pb,nu_mcl,mean_mcl,it,alpha, Lcov, Fhist);
Fmatrix = tempOut$Fmatrix; pu = tempOut$p; Ps = tempOut$Ps;
F_old = tempOut$F_old;Qnsmod = tempOut$Qnsmod; FmatrixKLpi = tempOut$FmatrixKLpi;
F_ = tempOut$F_; dF = tempOut$dF; Fhist = tempOut$Fhist;
tempOut = ordercands(Y,Lm,Fmatrix,Qns,getbeta,selection_method, verbose)
pos = tempOut$pos; candorder = tempOut$candorder
parent = candorder[pos];
}
}
loop.end.time <- Sys.time()
loop.time <- loop.end.time - loop.start.time
time.vec <- c(time.vec, loop.time)
cumul_time <- cumul_time + loop.time
if(verbose){
print(paste("Time since last update: ", loop.time, "s. Cumulative running time: ", cumul_time, 's.'))}
}
if(birthFail == TRUE){
cat(sprintf(paste('Problem encountered while fitting. Consider refitting model.')))
}
end.time <- Sys.time()
cat(sprintf(paste('\nOptimisation complete with ', length(Lm), 'components.\n')))
totaltime = difftime(end.time,start.time,units = "secs")
cat(sprintf(paste('The total time taken for completion was ',totaltime , 's.\n')))
tempOut = Fcalc(Lm,Y,Qns,F_,a,u,pu,Xm,Xcov,Ps,psii,pi,b,pa,pb,nu_mcl,mean_mcl,it,alpha, Lcov, Fhist);
Fmatrix = tempOut$Fmatrix; pu = tempOut$p; Ps = tempOut$Ps;
F_old = tempOut$F_old;Qnsmod = tempOut$Qnsmod; FmatrixKLpi = tempOut$FmatrixKLpi;
F_ = tempOut$F_; dF = tempOut$dF; Fhist = tempOut$Fhist;
g <- length(Lm)
Mu <- vector(mode = 'list'); Lambda <- vector(mode = 'list');
for(i in 1:g){
Mu[[i]] <- matrix(Lm[[i]][,1],ncol = 1)
Lambda[[i]] <- matrix(Lm[[i]][,2:dim(Lm[[i]])[2]], ncol = dim(Lm[[i]])[2] - 1)
}
final.Lambda <- do.call(cbind, Lambda);
final.Mu <- do.call(cbind, Mu)
final.Pi <- matrix((1/n)*colSums(Qns), nrow = 1)
final.Psi <- matrix(rep(1/psii, g), ncol = g)
numFactors <- matrix(ncol = g)
for(i in 1:g){
numFactors[1,i] = dim(Lambda[[i]])[2]
}
tempLL <- logL(t(Y),g,final.Pi,final.Lambda,final.Mu,final.Psi,numFactors)
ll <- tempLL$logL
q <- max(numFactors);
d <- (g - 1) + 2 * g * p + sum(p * as.vector(numFactors) -
as.vector(numFactors)* (as.vector(numFactors) - 1)/2)
bic <- log(n)*d -2*ll;
D <- array(dim = c(p,p,g));
D[,,] <-diag(1/as.vector(psii))
out <- vector(mode = 'list')
out$model$pivec = (1/n)*colSums(Qns)
out$model$mu = abind::abind(Mu,along = 3)
out$model$B = abind::abind(Lambda,along=3)
out$model$D = D
out$model$numFactors <- numFactors
out$diagnostics$bic = bic
out$diagnostics$logL = ll
out$diagnostics$Fhist = Fhist
out$diagnostics$times = time.vec
out$diagnostics$totalTime = totaltime
out$clustering$responsibilities = Qns;
out$clustering$allocations = Rfast::rowMaxs(Qns);
if(birthFail){
out$diagnostics$model_complete = FALSE
}else{
out$diagnostics$model_complete = TRUE
}
if(varimax){
p <- dim(out$model$B)[1]
no.comp <- dim(out$model$B)[3]
for(i in 1:no.comp){
out$model$B[,,i] <- matrix(out$model$B[,,i], nrow = p) %*% stats::varimax(matrix(out$model$B[,,i], nrow = p))$rotmat
}
}
return(out)
} |
split_shape <- function(x, f, drop=TRUE, ...) {
if (!is.factor(f)) {
warning("f is not a factor", call. = FALSE)
f <- as.factor(f)
}
lev <- if (drop) {
intersect(levels(f), f)
} else levels(f)
xlist <- lapply(lev, function(l) {
ids <- which(f==l)
if (length(ids)==0L) NULL else x[ids,]
})
names(xlist) <- lev
xlist
} |
spectrogram <- function(signal,
sRate,
maxFreq = 25,
n = 1024,
window = n*2,
overlap = 0,
cols = c(rep("
freq = 4,
plot = TRUE,
startTime = as.POSIXct("1970/01/01 00:00:00")){
resample_sRate <- maxFreq*2
x <- signal::resample(signal,resample_sRate,sRate)
spec <- signal::specgram(x = x,
Fs = resample_sRate,
n = n,
window = window,
overlap = overlap)
spec$S <- apply(spec$S,2,Re)
suppressWarnings(
spec$S <- apply(spec$S,2,function(x){
stats::aggregate(stats::ts(as.numeric(x), frequency=freq), 1, max)
}))
spec$f <- as.numeric(stats::aggregate(stats::ts(spec$f, frequency=freq), 1, max))
endTime <- startTime + round(length(x)/resample_sRate)
spec$t <- seq(startTime,endTime,(as.numeric(endTime)-as.numeric(startTime))/length(spec$t))
if(plot){
plot(spec, col = cols,ylab="Frequency (Hz)",xlab="")
} else {
return(spec)
}
}
bands_power <- function(bands, signal , sRate, broadband = c(0.5,40)){
s <- phonTools::pwelch(sound = signal,fs = sRate,points = 1000, show = FALSE)
s[,2] <- s[,2]+abs(min(s[,2]))
lapply(bands, function(band){
s_filtered <- s[s[,1] >= band[1] & s[,1] < band[2],]
s_broadband <- s[s[,1] >= broadband[1] & s[,1] < broadband[2],]
(sum(s_filtered[,2])/dim(s_filtered)[1])/sum(s_broadband[,2])
})
}
pwelch <- function(
x,
sRate,
points = 0,
overlap = 0,
padding = 0){
n = length(x)
if (points == 0)
points = ceiling(n/10)
x = c(x, rep(0, points))
spots = seq(1, n, points - overlap)
if ((points + padding)%%2 == 1)
padding = padding + 1
n = points + padding
psd = rep(0, n)
for (i in 1:length(spots)) {
tmp = x[spots[i]:(spots[i] + points - 1)] * signal::hamming(points)
tmp = c(tmp, rep(0, padding))
tmp = stats::fft(tmp)
tmp = tmp * Conj(tmp)
psd = psd + tmp
}
psd = psd/length(spots)
psd = psd[1:(n/2 + 1)]
psd = abs(psd)
psd = log(psd)
psd = psd - max(psd)
hz = seq(0, sRate/2, length.out = (n/2) + 1)
return(data.frame("hz" = hz, "psd" = psd))
}
psm <- function(x, sRate, length=0){
options(psd.ops=list(
tapmin = 1,
tapcap = 1000,
names = list(
fft = "working_fft",
fft.padded = "fft_even_demeaned_padded",
last.taper = "last_taper_sequence",
last.psdcore = "last_psdcore_psd",
last.psdcore.extrap = "last_psdcore_psd_extrap",
series.even = "ser_orig_even",
var.even = "ser_even_var",
n.even = "len_even",
n.even.half = "len_even_half",
series.orig = "ser_orig",
n.orig = "len_orig"
)
))
res <- psd::pspectrum(x,plot=FALSE,verbose=FALSE)
df <- data.frame("hz" = res$freq, "psd" = res$spec)
df$psd <- log(df$psd)
df$hz <- df$hz*sRate
if(length > 0){
psd <- signal::resample(x = df$psd,
p = length,
q = nrow(df))
hz <- signal::resample(x = df$hz,
p = length,
q = nrow(df))
df <- data.frame("psd" = psd,
"hz" = hz)
}
return(df)
} |
runsum.print <- function(object,
modfile=paste("run",object@Runno,".mod",sep=""),
listfile=paste("run",object@Runno,".lst",sep=""),
print.cex=0.45,
print.columns=3,
...) {
cat("Type the name of the model file (0=cancel, return=",modfile,")\n",sep="")
ans <- readline()
cmdfile <- NULL
if(ans==0) {
return()
} else if (ans=="") {
if(is.readable.file(modfile)) {
cmdfile <- modfile
}
} else {
if(is.readable.file(ans)) {
cmdfile <- ans
}
}
if(is.null(cmdfile)) {
cat("The specified file couldn't be found in the current directory.\n")
return()
}
cat("Type the name of the output file (0=cancel, return=",listfile,")\n",sep="")
ans <- readline()
lstfile <- NULL
if(ans==0) {
return()
} else if (ans=="") {
if(is.readable.file(listfile)) {
lstfile <- listfile
}
} else {
if(is.readable.file(ans)) {
lstfile <- ans
}
}
if(is.null(lstfile)) {
cat("The specified file couldn't be found in the current directory.\n")
return()
}
cat("Do you want to optimize the summary output for printing n(y)?\n")
printit <- readline()
if(printit=="y") {
dev.new(height=11.7,width=8.25)
}
if(printit=="y") {
runsum(object,
modfile=cmdfile,
listfile = lstfile,
txt.columns=print.columns,
txt.cex=print.cex,
...)
} else {
runsum(object,
modfile=cmdfile,
listfile = lstfile,
...)
}
} |
expected <- eval(parse(text="\"1967-09-13\""));
test(id=0, code={
argv <- eval(parse(text="list(structure(-841, class = \"Date\"))"));
do.call(`as.character`, argv);
}, o=expected); |
CheckSVDOptions <- function(Ly1, Lt1, Ly2, Lt2, SVDoptns){
if( (SVDoptns[['dataType1']]=='Sparse' && is.null(SVDoptns[['userMu1']])) ||
(SVDoptns[['dataType2']]=='Sparse' && is.null(SVDoptns[['userMu2']])) ){
stop('User specified mean function required for sparse functional data for cross covariance estimation.')
}
if(is.numeric(SVDoptns$methodSelectK)){
if(SVDoptns$methodSelectK != round(SVDoptns$methodSelectK) ||
SVDoptns$methodSelectK <= 0){
stop("FSVD is aborted: 'methodSelectK' is invalid!\n")
}
}
if( !(SVDoptns$regulRS %in% c('sigma2','rho') ) ){
stop("FSVD is aborted: Unknown regularization option. The argument 'regulRS' should be 'rho' or 'sigma2'!")
}
} |
fs_image_url <- function(id, debug = FALSE) {
a <- fs_details(id)
b <- GET(a$doi)
if(debug | b$status_code != 200)
b
else {
doc <- htmlParse(content(b, as = "text"))
path <- xpathSApply(doc, "//div[@class='filesdownload' and @id='download_all']/a/@href")[[1]]
path
}
}
fs_embed <- function(file) {
title <- file
description <- "embedded file automatically uploaded to figshare from R"
id <- fs_new_article(title, description, type = "figure", visibility = "public")
fs_image_url(id)
} |
NULL
read.hills<-function(file="HILLS", per=c(FALSE, FALSE), pcv1=c(-pi,pi), pcv2=c(-pi,pi), ignoretime=FALSE) {
hillsf<-read.table(file, header=F, comment.char="
if(ncol(hillsf)==5 || ncol(hillsf)==6) {
cat("1D HILLS file read\n")
if(ignoretime) {
cat("Warning: The time will be updated automatically from zero\n")
cat("according to the first step!\n")
hillsf[,1]<-seq(from=hillsf[1,1], by=hillsf[1,1], length.out=nrow(hillsf))
}
hills<-list(hillsfile=hillsf, time=hillsf[,1], cv1=hillsf[,2], cv2=NULL,
size=dim(hillsf), filename=file, per=per, pcv1=pcv1)
class(hills) <- "hillsfile"
return(hills)
} else if(ncol(hillsf)==7 || ncol(hillsf)==8) {
cat("2D HILLS file read\n")
if(ignoretime) {
cat("Warning: The time will be updated automatically from zero\n")
cat("according to the first step!\n")
hillsf[,1]<-seq(from=hillsf[1,1], by=hillsf[1,1], length.out=nrow(hillsf))
}
hills<-list(hillsfile=hillsf, time=hillsf[,1], cv1=hillsf[,2], cv2=hillsf[,3],
size=dim(hillsf), filename=file, per=per, pcv1=pcv1, pcv2=pcv2)
class(hills) <- "hillsfile"
return(hills)
} else {
stop("Error: Number of columns in HILLS file must be 5 or 6 (1D) or 7 or 8 (2D)")
}
}
print.hillsfile<-function(x,...) {
hills <- x
if(hills$size[2]==5) {
cat("1D hills file ")
cat(hills$filename)
cat(" with ")
cat(hills$size[1])
cat(" lines\n")
}
if(hills$size[2]==7) {
cat("2D hills file ")
cat(hills$filename)
cat(" with ")
cat(hills$size[1])
cat(" lines\n")
}
}
summary.hillsfile<-function(object,...) {
hills <- object
if(hills$size[2]==5) {
cat("1D hills file ")
cat(hills$filename)
cat(" with ")
cat(hills$size[1])
cat(" lines\n")
cat("The CV1 ranges from ")
cat(min(hills$hillsfile[,2]))
cat(" to ")
cat(max(hills$hillsfile[,2]))
cat("\n")
}
if(hills$size[2]==7) {
cat("2D hills file ")
cat(hills$filename)
cat(" with ")
cat(hills$size[1])
cat(" lines\n")
cat("The CV1 ranges from ")
cat(min(hills$hillsfile[,2]))
cat(" to ")
cat(max(hills$hillsfile[,2]))
cat("\nThe CV2 ranges from ")
cat(min(hills$hillsfile[,3]))
cat(" to ")
cat(max(hills$hillsfile[,3]))
cat("\n")
}
}
head.hillsfile<-function(x, n=10,...) {
return(head(x$hillsfile, n=n))
}
tail.hillsfile<-function(x, n=10,...) {
return(tail(x$hillsfile, n=n))
}
`+.hillsfile`<-function(hills1, hills2) {
if(ncol(hills1$hillsfile)!=ncol(hills2$hillsfile)) {
stop("Error: You can sum only hills of same dimension")
}
if(hills1$per[1]!=hills2$per[1]) {
stop("Error: You can sum only hills of same periodicity")
}
if(ncol(hills1$hillsfile)==7 || ncol(hills1$hillsfile)==8) {
if(hills1$per[2]!=hills2$per[2]) {
stop("Error: You can sum only hills of same periodicity")
}
}
hills<-list(hillsfile=rbind(hills1$hillsfile, hills2$hillsfile), size=dim(rbind(hills1$hillsfile, hills2$hillsfile)),
filename=hills1$filename, per=hills1$per, pcv1=hills1$pcv1, pcv2=hills1$pcv2)
class(hills) <- "hillsfile"
return(hills)
}
plot.hillsfile<-function(x, ignoretime=FALSE,
xlab=NULL, ylab=NULL,
xlim=NULL, ylim=NULL,
main=NULL, sub=NULL,
pch=1, col="black", bg="red", cex=1,
asp=NULL, lwd=1, axes=TRUE,...) {
hills <-x
xlims<-NULL
ylims<-NULL
if(!is.null(xlim)) {xlims<-xlim}
if(!is.null(ylim)) {ylims<-ylim}
if(hills$size[2]==5) {
if((hills$per[1]==T)&is.null(ylim)) {ylims<-hills$pcv1}
if(is.null(xlab)) xlab="time"
if(is.null(ylab)) ylab="CV"
if(ignoretime) {
plot(seq(from=hills$hillsfile[1,1],by=hills$hillsfile[1,1],length.out=nrow(hills$hillsfile)),
hills$hillsfile[,2], type="l",
xlab=xlab, ylab=ylab,
main=main, sub=sub,
xlim=xlims, ylim=ylims,
col=col, cex=cex, lwd=lwd,
asp=asp, axes=axes)
} else {
plot(hills$hillsfile[,1], hills$hillsfile[,2], type="l",
xlab=xlab, ylab=ylab,
main=main, sub=sub,
xlim=xlims, ylim=ylims,
col=col, cex=cex, lwd=lwd,
asp=asp, axes=axes)
}
}
if(hills$size[2]==7) {
if((hills$per[1]==T)&is.null(xlim)) {xlims<-hills$pcv1}
if((hills$per[2]==T)&is.null(ylim)) {ylims<-hills$pcv2}
if(is.null(xlab)) xlab="CV1"
if(is.null(ylab)) ylab="CV2"
plot(hills$hillsfile[,2], hills$hillsfile[,3], type="p",
xlab=xlab, ylab=ylab,
main=main, sub=sub,
xlim=xlims, ylim=ylims,
pch=pch, col=col, bg=bg, cex=cex, lwd=lwd,
asp=asp, axes=axes)
}
}
points.hillsfile<-function(x, ignoretime=FALSE,
pch=1, col="black", bg="red", cex=1,
lwd=1, ...) {
hills <- x
if(hills$size[2]==5) {
if(ignoretime) {
points(seq(from=hills$hillsfile[1,1],by=hills$hillsfile[1,1],length.out=nrow(hills$hillsfile)),
hills$hillsfile[,2],
col=col, cex=cex, lwd=lwd)
} else {
points(hills$hillsfile[,1], hills$hillsfile[,2],
col=col, cex=cex, lwd=lwd)
}
}
if(hills$size[2]==7) {
points(hills$hillsfile[,2], hills$hillsfile[,3],
pch=pch, col=col, bg=bg, cex=cex, lwd=lwd)
}
}
lines.hillsfile<-function(x, ignoretime=FALSE,
lwd=1, col="black",...) {
hills <- x
if(hills$size[2]==5) {
if(ignoretime) {
lines(seq(from=hills$hillsfile[1,1],by=hills$hillsfile[1,1],length.out=nrow(hills$hillsfile)),
hills$hillsfile[,2],
col=col, lwd=lwd)
} else {
lines(hills$hillsfile[,1], hills$hillsfile[,2],
col=col, lwd=lwd)
}
}
if(hills$size[2]==7) {
lines(hills$hillsfile[,2], hills$hillsfile[,3],
col=col, lwd=lwd)
}
}
plotheights<-function(hills, ignoretime, xlab, ylab,
xlim, ylim, main, sub,
col, asp, lwd, axes) {
UseMethod("plotheights")
}
plotheights.hillsfile<-function(hills, ignoretime=FALSE,
xlab=NULL, ylab=NULL,
xlim=NULL, ylim=NULL,
main=NULL, sub=NULL,
col="black", asp=NULL, lwd=1, axes=TRUE) {
if(is.null(xlab)) xlab="time"
if(is.null(ylab)) ylab="hill height"
if(hills$size[2]==5) {
if(ignoretime) {
plot(seq(from=hills$hillsfile[1,1],by=hills$hillsfile[1,1],length.out=nrow(hills$hillsfile)),
hills$hillsfile[,4], type="l",
xlab=xlab, ylab=ylab,
main=main, sub=sub,
col=col, lwd=lwd,
asp=asp, axes=axes)
} else {
plot(hills$hillsfile[,1], hills$hillsfile[,4], type="l",
xlab=xlab, ylab=ylab,
main=main, sub=sub,
col=col, lwd=lwd,
asp=asp, axes=axes)
}
}
if(hills$size[2]==7) {
if(ignoretime) {
plot(seq(from=hills$hillsfile[1,1],by=hills$hillsfile[1,1],length.out=nrow(hills$hillsfile)),
hills$hillsfile[,6], type="l",
xlab=xlab, ylab=ylab,
main=main, sub=sub,
col=col, lwd=lwd,
asp=asp, axes=axes)
} else {
plot(hills$hillsfile[,1], hills$hillsfile[,6], type="l",
xlab=xlab, ylab=ylab,
main=main, sub=sub,
col=col, lwd=lwd,
asp=asp, axes=axes)
}
}
}
fes<-function(hills, imin, imax, xlim, ylim, zlim, npoints) {
UseMethod("fes")
}
fes.hillsfile<-function(hills, imin=1, imax=NULL, xlim=NULL, ylim=NULL, zlim=NULL, npoints=256) {
if(!is.null(imax)) {
if(hills$size[1]<imax) {
cat("Warning: You requested more hills by imax than available, using all hills\n")
imax<-hills$size[1]
}
}
if(is.null(imax)) {
imax<-hills$size[1]
}
if(imin>imax) {
stop("Error: imax cannot be lower than imin")
}
if(hills$size[2]==7) {
if(max(hills$hillsfile[,4])/min(hills$hillsfile[,4])>1.00000000001) {
stop("Error: Bias Sum algorithm works only with hills of the same sizes")
}
if(max(hills$hillsfile[,5])/min(hills$hillsfile[,5])>1.00000000001) {
stop("Error: Bias Sum algorithm works only with hills of the same sizes")
}
minCV1 <- min(hills$hillsfile[,2])
maxCV1 <- max(hills$hillsfile[,2])
minCV2 <- min(hills$hillsfile[,3])
maxCV2 <- max(hills$hillsfile[,3])
xlims<-c(minCV1-0.05*(maxCV1-minCV1), maxCV1+0.05*(maxCV1-minCV1))
ylims<-c(minCV2-0.05*(maxCV2-minCV2), maxCV2+0.05*(maxCV2-minCV2))
if(!is.null(xlim)) {xlims<-xlim}
if((hills$per[1]==T)&is.null(xlim)) {xlims<-hills$pcv1}
if(!is.null(ylim)) {ylims<-ylim}
if((hills$per[2]==T)&is.null(ylim)) {ylims<-hills$pcv2}
if(hills$per[1]==T) {
if(min(hills$hillsfile[,2])<xlims[1]) {
stop("Error: The first collective variable outside pcv1")
}
if(max(hills$hillsfile[,2])>xlims[2]) {
stop("Error: The first collective variable outside pcv1")
}
}
if(hills$per[2]==T) {
if(min(hills$hillsfile[,3])<ylims[1]) {
stop("Error: The second collective variable outside pcv2")
}
if(max(hills$hillsfile[,3])>ylims[2]) {
stop("Error: The second collective variable outside pcv2")
}
}
x<-0:(npoints-1)*(xlims[2]-xlims[1])/(npoints-1)+xlims[1]
y<-0:(npoints-1)*(ylims[2]-ylims[1])/(npoints-1)+ylims[1]
if((hills$per[1]==F)&(hills$per[2]==F)) {
fesm<-hills1(npoints*(hills$hillsfile[,2]-xlims[1])/(xlims[2]-xlims[1]),
npoints*(hills$hillsfile[,3]-ylims[1])/(ylims[2]-ylims[1]),
npoints*max(hills$hillsfile[,4])/(xlims[2]-xlims[1]),
npoints*max(hills$hillsfile[,5])/(ylims[2]-ylims[1]),
hills$hillsfile[,6],npoints,imin-1,imax-1)
}
if((hills$per[1]==T)&(hills$per[2]==F)) {
fesm<-hills1p1(npoints*(hills$hillsfile[,2]-xlims[1])/(xlims[2]-xlims[1]),
npoints*(hills$hillsfile[,3]-ylims[1])/(ylims[2]-ylims[1]),
npoints*max(hills$hillsfile[,4])/(xlims[2]-xlims[1]),
npoints*max(hills$hillsfile[,5])/(ylims[2]-ylims[1]),
hills$hillsfile[,6],npoints,imin-1,imax-1)
}
if((hills$per[1]==F)&(hills$per[2]==T)) {
fesm<-hills1p2(npoints*(hills$hillsfile[,2]-xlims[1])/(xlims[2]-xlims[1]),
npoints*(hills$hillsfile[,3]-ylims[1])/(ylims[2]-ylims[1]),
npoints*max(hills$hillsfile[,4])/(xlims[2]-xlims[1]),
npoints*max(hills$hillsfile[,5])/(ylims[2]-ylims[1]),
hills$hillsfile[,6],npoints,imin-1,imax-1)
}
if((hills$per[1]==T)&(hills$per[2]==T)) {
fesm<-hills1p12(npoints*(hills$hillsfile[,2]-xlims[1])/(xlims[2]-xlims[1]),
npoints*(hills$hillsfile[,3]-ylims[1])/(ylims[2]-ylims[1]),
npoints*max(hills$hillsfile[,4])/(xlims[2]-xlims[1]),
npoints*max(hills$hillsfile[,5])/(ylims[2]-ylims[1]),
hills$hillsfile[,6],npoints,imin-1,imax-1)
}
cfes<-list(fes=fesm, hills=hills$hillsfile, rows=npoints, dimension=2, per=hills$per, x=x, y=y, pcv1=hills$pcv1, pcv2=hills$pcv2)
class(cfes) <- "fes"
}
if(hills$size[2]==5) {
if(max(hills$hillsfile[,3])/min(hills$hillsfile[,3])>1.00000000001) {
stop("Error: Bias Sum algorithm works only with hills of the same sizes")
}
minCV1 <- min(hills$hillsfile[,2])
maxCV1 <- max(hills$hillsfile[,2])
xlims<-c(minCV1-0.05*(maxCV1-minCV1), maxCV1+0.05*(maxCV1-minCV1))
if(!is.null(xlim)) {xlims<-xlim}
if((hills$per[1]==T)&is.null(xlim)) {xlims<-hills$pcv1}
x<-0:(npoints-1)*(xlims[2]-xlims[1])/(npoints-1)+xlims[1]
if(hills$per[1]==F) {
fesm<-hills1d1(npoints*(hills$hillsfile[,2]-xlims[1])/(xlims[2]-xlims[1]),
npoints*max(hills$hillsfile[,3])/(xlims[2]-xlims[1]),
hills$hillsfile[,4],npoints,imin-1,imax-1)
}
if(hills$per[1]==T) {
fesm<-hills1d1p(npoints*(hills$hillsfile[,2]-xlims[1])/(xlims[2]-xlims[1]),
npoints*max(hills$hillsfile[,3])/(xlims[2]-xlims[1]),
hills$hillsfile[,4],npoints,imin-1,imax-1)
}
cfes<-list(fes=fesm, hills=hills$hillsfile, rows=npoints, dimension=1, per=hills$per, x=x, pcv1=hills$pcv1, pcv2=hills$pcv2)
class(cfes) <- "fes"
}
return(cfes)
}
fes2<-function(hills, imin, imax, xlim, ylim, zlim, npoints) {
UseMethod("fes2")
}
fes2.hillsfile<-function(hills, imin=1, imax=NULL, xlim=NULL, ylim=NULL, zlim=NULL, npoints=256) {
if(!is.null(imax)) {
if(hills$size[1]<imax) {
cat("Warning: You requested more hills by imax than available, using all hills\n")
imax<-hills$size[1]
}
}
if(is.null(imax)) {
imax<-hills$size[1]
}
if(imin>imax) {
stop("Error: imax cannot be lower than imin")
}
if(hills$size[2]==7) {
minCV1 <- min(hills$hillsfile[,2])
maxCV1 <- max(hills$hillsfile[,2])
minCV2 <- min(hills$hillsfile[,3])
maxCV2 <- max(hills$hillsfile[,3])
xlims<-c(minCV1-0.05*(maxCV1-minCV1), maxCV1+0.05*(maxCV1-minCV1))
ylims<-c(minCV2-0.05*(maxCV2-minCV2), maxCV2+0.05*(maxCV2-minCV2))
if(!is.null(xlim)) {xlims<-xlim}
if((hills$per[1]==T)&is.null(xlim)) {xlims<-hills$pcv1}
if(!is.null(ylim)) {ylims<-ylim}
if((hills$per[2]==T)&is.null(ylim)) {ylims<-hills$pcv2}
x<-0:(npoints-1)*(xlims[2]-xlims[1])/(npoints-1)+xlims[1]
y<-0:(npoints-1)*(ylims[2]-ylims[1])/(npoints-1)+ylims[1]
if((hills$per[1]==F)&(hills$per[2]==F)) {
fesm<-hills2((npoints-1)*(hills$hillsfile[,2]-xlims[1])/(xlims[2]-xlims[1]),
(npoints-1)*(hills$hillsfile[,3]-ylims[1])/(ylims[2]-ylims[1]),
(npoints-1)*hills$hillsfile[,4]/(xlims[2]-xlims[1]),
(npoints-1)*hills$hillsfile[,5]/(ylims[2]-ylims[1]),
hills$hillsfile[,6],npoints,imin-1,imax-1)
}
if((hills$per[1]==T)&(hills$per[2]==F)) {
fesm<-hills2p1((npoints-1)*(hills$hillsfile[,2]-xlims[1])/(xlims[2]-xlims[1]),
(npoints-1)*(hills$hillsfile[,3]-ylims[1])/(ylims[2]-ylims[1]),
(npoints-1)*hills$hillsfile[,4]/(xlims[2]-xlims[1]),
(npoints-1)*hills$hillsfile[,5]/(ylims[2]-ylims[1]),
hills$hillsfile[,6],npoints,imin-1,imax-1)
}
if((hills$per[1]==F)&(hills$per[2]==T)) {
fesm<-hills2p2((npoints-1)*(hills$hillsfile[,2]-xlims[1])/(xlims[2]-xlims[1]),
(npoints-1)*(hills$hillsfile[,3]-ylims[1])/(ylims[2]-ylims[1]),
(npoints-1)*hills$hillsfile[,4]/(xlims[2]-xlims[1]),
(npoints-1)*hills$hillsfile[,5]/(ylims[2]-ylims[1]),
hills$hillsfile[,6],npoints,imin-1,imax-1)
}
if((hills$per[1]==T)&(hills$per[2]==T)) {
fesm<-hills2p12((npoints-1)*(hills$hillsfile[,2]-xlims[1])/(xlims[2]-xlims[1]),
(npoints-1)*(hills$hillsfile[,3]-ylims[1])/(ylims[2]-ylims[1]),
(npoints-1)*hills$hillsfile[,4]/(xlims[2]-xlims[1]),
(npoints-1)*hills$hillsfile[,5]/(ylims[2]-ylims[1]),
hills$hillsfile[,6],npoints,imin-1,imax-1)
}
cfes<-list(fes=fesm, hills=hills$hillsfile, rows=npoints, dimension=2, per=hills$per, x=x, y=y, pcv1=hills$pcv1, pcv2=hills$pcv2)
class(cfes) <- "fes"
}
if(hills$size[2]==5) {
minCV1 <- min(hills$hillsfile[,2])
maxCV1 <- max(hills$hillsfile[,2])
xlims<-c(minCV1-0.05*(maxCV1-minCV1), maxCV1+0.05*(maxCV1-minCV1))
if(!is.null(xlim)) {xlims<-xlim}
if((hills$per[1]==T)&is.null(xlim)) {xlims<-hills$pcv1}
x<-0:(npoints-1)*(xlims[2]-xlims[1])/(npoints-1)+xlims[1]
if(hills$per[1]==F) {
fesm<-hills1d2((npoints-1)*(hills$hillsfile[,2]-xlims[1])/(xlims[2]-xlims[1]),
(npoints-1)*hills$hillsfile[,3]/(xlims[2]-xlims[1]),
hills$hillsfile[,4],npoints,imin-1,imax-1)
}
if(hills$per[1]==T) {
fesm<-hills1d2p((npoints-1)*(hills$hillsfile[,2]-xlims[1])/(xlims[2]-xlims[1]),
(npoints-1)*hills$hillsfile[,3]/(xlims[2]-xlims[1]),
hills$hillsfile[,4],npoints,imin-1,imax-1)
}
cfes<-list(fes=fesm, hills=hills$hillsfile, rows=npoints, dimension=1, per=hills$per, x=x, pcv1=hills$pcv1, pcv2=hills$pcv2)
class(cfes) <- "fes"
}
return(cfes)
}
read.plumed<-function(file="fes.dat", dim=2, per=c(F,F,F)) {
hillsf<-read.table(file, header=F, comment.char="
bins<-round(nrow(hillsf)^(1/dim))
if(bins^dim!=nrow(hillsf)) {
stop("Error: the number of bins cannot be determined, it must be same for all dimension, or the number of dimensions is wrong.")
}
if(dim==1) {
x <- hillsf[,1]
fesm <- hillsf[,2]
cfes <- list(fes=fesm, hills=NULL, rows=bins, dimension=1, per=per, x=x, pcv1=c(min(x), max(x)))
class(cfes) <- "fes"
} else if(dim==2) {
x <- hillsf[1:bins,1]
y <- hillsf[(0:(bins-1))*bins+1,2]
fesm <- matrix(hillsf[,3], nrow=bins)
cfes <- list(fes=fesm, hills=NULL, rows=bins, dimension=2, per=per, x=x, y=y,
pcv1=c(min(x), max(x)), pcv2=c(min(y), max(y)))
class(cfes) <- "fes"
} else {
stop("Error: for 3D fes use read.plumed3d from metadynminer3d, higher dimensions are not supported.")
}
return(cfes)
}
fes2d21d<-function(hills, remdim=2, temp=300, eunit="kJ/mol",
imin=1, imax=NULL, xlim=NULL, ylim=NULL, npoints=256) {
if(!is.null(imax)) {
if(hills$size[1]<imax) {
cat("Warning: You requested more hills by imax than available, using all hills\n")
imax<-hills$size[1]
}
}
if(is.null(imax)) {
imax<-hills$size[1]
}
if(imin>imax) {
stop("Error: imax cannot be lower than imin")
}
if(hills$size[2]==7) {
if(max(hills$hillsfile[,4])/min(hills$hillsfile[,4])>1.00000000001) {
stop("Error: Bias Sum algorithm works only with hills of the same sizes")
}
if(max(hills$hillsfile[,5])/min(hills$hillsfile[,5])>1.00000000001) {
stop("Error: Bias Sum algorithm works only with hills of the same sizes")
}
minCV1 <- min(hills$hillsfile[,2])
maxCV1 <- max(hills$hillsfile[,2])
minCV2 <- min(hills$hillsfile[,3])
maxCV2 <- max(hills$hillsfile[,3])
xlims<-c(minCV1-0.05*(maxCV1-minCV1), maxCV1+0.05*(maxCV1-minCV1))
ylims<-c(minCV2-0.05*(maxCV2-minCV2), maxCV2+0.05*(maxCV2-minCV2))
if(!is.null(xlim)) {xlims<-xlim}
if((hills$per[1]==T)&is.null(xlim)) {xlims<-hills$pcv1}
if(!is.null(ylim)) {ylims<-ylim}
if((hills$per[2]==T)&is.null(ylim)) {ylims<-hills$pcv2}
x<-0:(npoints-1)*(xlims[2]-xlims[1])/(npoints-1)+xlims[1]
y<-0:(npoints-1)*(ylims[2]-ylims[1])/(npoints-1)+ylims[1]
binx<-(xlims[2]-xlims[1])/(npoints-1)
biny<-(ylims[2]-ylims[1])/(npoints-1)
if((hills$per[1]==F)&(hills$per[2]==F)) {
fesm<-hills1(npoints*(hills$hillsfile[,2]-xlims[1])/(xlims[2]-xlims[1]+binx),
npoints*(hills$hillsfile[,3]-ylims[1])/(ylims[2]-ylims[1]+biny),
npoints*max(hills$hillsfile[,4])/(xlims[2]-xlims[1]+binx),
npoints*max(hills$hillsfile[,5])/(ylims[2]-ylims[1]+biny),
hills$hillsfile[,6],npoints,imin-1,imax-1)
}
if((hills$per[1]==T)&(hills$per[2]==F)) {
fesm<-hills1p1(npoints*(hills$hillsfile[,2]-xlims[1])/(xlims[2]-xlims[1]+binx),
npoints*(hills$hillsfile[,3]-ylims[1])/(ylims[2]-ylims[1]+biny),
npoints*max(hills$hillsfile[,4])/(xlims[2]-xlims[1]+binx),
npoints*max(hills$hillsfile[,5])/(ylims[2]-ylims[1]+biny),
hills$hillsfile[,6],npoints,imin-1,imax-1)
}
if((hills$per[1]==F)&(hills$per[2]==T)) {
fesm<-hills1p2(npoints*(hills$hillsfile[,2]-xlims[1])/(xlims[2]-xlims[1]+binx),
npoints*(hills$hillsfile[,3]-ylims[1])/(ylims[2]-ylims[1]+biny),
npoints*max(hills$hillsfile[,4])/(xlims[2]-xlims[1]+binx),
npoints*max(hills$hillsfile[,5])/(ylims[2]-ylims[1]+biny),
hills$hillsfile[,6],npoints,imin-1,imax-1)
}
if((hills$per[1]==T)&(hills$per[2]==T)) {
fesm<-hills1p12(npoints*(hills$hillsfile[,2]-xlims[1])/(xlims[2]-xlims[1]+binx),
npoints*(hills$hillsfile[,3]-ylims[1])/(ylims[2]-ylims[1]+biny),
npoints*max(hills$hillsfile[,4])/(xlims[2]-xlims[1]+binx),
npoints*max(hills$hillsfile[,5])/(ylims[2]-ylims[1]+biny),
hills$hillsfile[,6],npoints,imin-1,imax-1)
}
if(eunit=="kJ/mol") {
prob<- exp(-1000*fesm/8.314/temp)
if(remdim==1) {
fesm <- -8.314*temp*log(apply(prob, 2, sum))/1000
cfes<-list(fes=fesm, hills=hills$hillsfile, rows=npoints, dimension=1, per=hills$per[2], x=y, pcv1=hills$pcv2)
}
if(remdim==2) {
fesm <- -8.314*temp*log(apply(prob, 1, sum))/1000
cfes<-list(fes=fesm, hills=hills$hillsfile, rows=npoints, dimension=1, per=hills$per[1], x=x, pcv1=hills$pcv1)
}
}
if(eunit=="kcal/mol") {
prob<- exp(-1000*4.184*fesm/8.314/temp)
if(remdim==1) {
fesm <- -8.314*temp*log(apply(prob, 2, sum))/1000/4.184
cfes<-list(fes=fesm, hills=hills$hillsfile, rows=npoints, dimension=1, per=hills$per[2], x=y, pcv1=hills$pcv2)
}
if(remdim==2) {
fesm <- -8.314*temp*log(apply(prob, 1, sum))/1000/4.184
cfes<-list(fes=fesm, hills=hills$hillsfile, rows=npoints, dimension=1, per=hills$per[1], x=x, pcv1=hills$pcv1)
}
}
class(cfes) <- "fes"
}
if(hills$size[2]==5) {
stop("Error: Your free energy surface is already 1D")
}
return(cfes)
}
`+.fes`<-function(fes1, fes2) {
if((class(fes1)=="fes")&(class(fes2)=="fes")) {
if(fes1$rows!=fes2$rows) {
stop("Error: Free energy surfaces have different numbers of points, exiting")
}
if(fes1$dimension!=fes2$dimension) {
stop("Error: Free energy surfaces have different dimension, exiting")
}
if(sum(fes1$x!=fes2$x)>0) {
stop("Error: Free energy surfaces have different CV1 axes, exiting")
}
if(fes1$dimension==2) {
if(sum(fes1$y!=fes2$y)>0) {
stop("Error: Free energy surfaces have different CV2 axes, exiting")
}
}
if(fes1$dimension==1) {
cfes<-list(fes=fes1$fes+fes2$fes, hills=rbind(fes1$hills, fes2$hills), rows=fes1$rows, dimension=fes1$dimension, per=fes1$per, x=fes1$x, pcv1=fes1$pcv1, pcv2=fes1$pcv2)
}
if(fes1$dimension==2) {
cfes<-list(fes=fes1$fes+fes2$fes, hills=rbind(fes1$hills, fes2$hills), rows=fes1$rows, dimension=fes1$dimension, per=fes1$per, x=fes1$x, y=fes1$y, pcv1=fes1$pcv1, pcv2=fes1$pcv2)
}
} else if(class(fes1)=="fes") {
if(fes1$dimension==1) {
cfes<-list(fes=fes1$fes+fes2, hills=fes1$hills, rows=fes1$rows, dimension=fes1$dimension, per=fes1$per, x=fes1$x, pcv1=fes1$pcv1, pcv2=fes1$pcv2)
}
if(fes1$dimension==2) {
cfes<-list(fes=fes1$fes+fes2, hills=fes1$hills, rows=fes1$rows, dimension=fes1$dimension, per=fes1$per, x=fes1$x, y=fes1$y, pcv1=fes1$pcv1, pcv2=fes1$pcv2)
}
} else if(class(fes2)=="fes") {
if(fes2$dimension==1) {
cfes<-list(fes=fes1+fes2$fes, hills=fes2$hills, rows=fes2$rows, dimension=fes2$dimension, per=fes2$per, x=fes2$x, pcv1=fes2$pcv1, pcv2=fes2$pcv2)
}
if(fes2$dimension==2) {
cfes<-list(fes=fes1+fes2$fes, hills=rbind(fes1$hills,fes2$hills), rows=fes2$rows, dimension=fes2$dimension, per=fes2$per, x=fes2$x, y=fes2$y, pcv1=fes2$pcv1, pcv2=fes2$pcv2)
}
}
class(cfes) <- "fes"
return(cfes)
}
`-.fes`<-function(fes1, fes2) {
if((class(fes1)=="fes")&(class(fes2)=="fes")) {
if(fes1$rows!=fes2$rows) {
stop("Error: Free energy surfaces have different numbers of points, exiting")
}
if(fes1$dimension!=fes2$dimension) {
stop("Error: Free energy surfaces have different dimension, exiting")
}
if(sum(fes1$x!=fes2$x)>0) {
stop("Error: Free energy surfaces have different CV1 axes, exiting")
}
if(fes1$dimension==2) {
if(sum(fes1$y!=fes2$y)>0) {
stop("Error: Free energy surfaces have different CV2 axes, exiting")
}
}
cat("Warning: FES obtained by subtraction of two FESes\n")
cat(" will inherit hills only from the first FES\n")
if(fes1$dimension==1) {
cfes<-list(fes=fes1$fes-fes2$fes, hills=fes1$hills, rows=fes1$rows, dimension=fes1$dimension, per=fes1$per, x=fes1$x, pcv1=fes1$pcv1, pcv2=fes1$pcv2)
}
if(fes1$dimension==2) {
cfes<-list(fes=fes1$fes-fes2$fes, hills=fes1$hills, rows=fes1$rows, dimension=fes1$dimension, per=fes1$per, x=fes1$x, y=fes1$y, pcv1=fes1$pcv1, pcv2=fes1$pcv2)
}
} else if(class(fes1)=="fes") {
if(fes1$dimension==1) {
cfes<-list(fes=fes1$fes-fes2, hills=fes1$hills, rows=fes1$rows, dimension=fes1$dimension, per=fes1$per, x=fes1$x, pcv1=fes1$pcv1, pcv2=fes1$pcv2)
}
if(fes1$dimension==2) {
cfes<-list(fes=fes1$fes-fes2, hills=fes1$hills, rows=fes1$rows, dimension=fes1$dimension, per=fes1$per, x=fes1$x, y=fes1$y, pcv1=fes1$pcv1, pcv2=fes1$pcv2)
}
} else if(class(fes2)=="fes") {
if(fes2$dimension==1) {
cfes<-list(fes=fes1-fes2$fes, hills=fes2$hills, rows=fes2$rows, dimension=fes2$dimension, per=fes2$per, x=fes2$x, pcv1=fes2$pcv1, pcv2=fes2$pcv2)
}
if(fes2$dimension==2) {
cfes<-list(fes=fes1-fes2$fes, hills=fes2$hills, rows=fes2$rows, dimension=fes2$dimension, per=fes2$per, x=fes2$x, y=fes2$y, pcv1=fes2$pcv1, pcv2=fes2$pcv2)
}
}
class(cfes) <- "fes"
return(cfes)
}
`*.fes`<-function(fes1, fes2) {
if((class(fes1)=="fes")&(class(fes2)=="fes")) {
stop("Error: You cannot multiply fes by fes")
} else if(class(fes1)=="fes") {
if(fes1$dimension==1) {
cfes<-list(fes=fes1$fes*fes2, hills=fes1$hills, rows=fes1$rows, dimension=fes1$dimension, per=fes1$per, x=fes1$x, pcv1=fes1$pcv1, pcv2=fes1$pcv2)
}
if(fes1$dimension==2) {
cfes<-list(fes=fes1$fes*fes2, hills=fes1$hills, rows=fes1$rows, dimension=fes1$dimension, per=fes1$per, x=fes1$x, y=fes1$y, pcv1=fes1$pcv1, pcv2=fes1$pcv2)
}
} else if(class(fes2)=="fes") {
if(fes2$dimension==1) {
cfes<-list(fes=fes1*fes2$fes, hills=fes2$hills, rows=fes2$rows, dimension=fes2$dimension, per=fes2$per, x=fes2$x, pcv1=fes2$pcv1, pcv2=fes2$pcv2)
}
if(fes2$dimension==2) {
cfes<-list(fes=fes1*fes2$fes, hills=fes2$hills, rows=fes2$rows, dimension=fes2$dimension, per=fes2$per, x=fes2$x, y=fes2$y, pcv1=fes2$pcv1, pcv2=fes2$pcv2)
}
}
cat("Warning: multiplication of FES will multiply\n")
cat(" the FES but not hill heights\n")
class(cfes) <- "fes"
return(cfes)
}
`/.fes`<-function(fes1, coef) {
if((class(fes1)=="fes")&(class(coef)=="fes")) {
stop("Error: You cannot divide fes by fes")
} else if(class(fes1)=="fes") {
if(fes1$dimension==1) {
cfes<-list(fes=fes1$fes/coef, hills=fes1$hills, rows=fes1$rows, dimension=fes1$dimension, per=fes1$per, x=fes1$x, pcv1=fes1$pcv1, pcv2=fes1$pcv2)
}
if(fes1$dimension==2) {
cfes<-list(fes=fes1$fes/coef, hills=fes1$hills, rows=fes1$rows, dimension=fes1$dimension, per=fes1$per, x=fes1$x, y=fes1$y, pcv1=fes1$pcv1, pcv2=fes1$pcv2)
}
} else if(class(coef)=="fes") {
stop("Error: You cannot divide something by fes")
}
cat("Warning: division of FES will divide\n")
cat(" the FES but not hill heights\n")
class(cfes) <- "fes"
return(cfes)
}
min.fes<-function(inputfes, na.rm=NULL,...) {
return(min(inputfes$fes, na.rm=na.rm))
}
max.fes<-function(inputfes, na.rm=NULL,...) {
return(max(inputfes$fes, na.rm=na.rm))
}
print.fes<-function(x,...) {
inputfes<-x
if(inputfes$dimension==1) {
cat("1D free energy surface with ")
cat(inputfes$rows)
cat(" points, maximum ")
cat(max(inputfes$fes))
cat(" and minimum ")
cat(min(inputfes$fes))
cat("\n")
}
if(inputfes$dimension==2) {
cat("2D free energy surface with ")
cat(inputfes$rows)
cat(" x ")
cat(inputfes$rows)
cat(" points, maximum ")
cat(max(inputfes$fes))
cat(" and minimum ")
cat(min(inputfes$fes))
cat("\n")
}
}
summary.fes<-function(object,...) {
inputfes <- object
if(inputfes$dimension==1) {
cat("1D free energy surface with ")
cat(inputfes$rows)
cat(" points, maximum ")
cat(max(inputfes$fes))
cat(" and minimum ")
cat(min(inputfes$fes))
cat("\n")
}
if(inputfes$dimension==2) {
cat("2D free energy surface with ")
cat(inputfes$rows)
cat(" x ")
cat(inputfes$rows)
cat(" points, maximum ")
cat(max(inputfes$fes))
cat(" and minimum ")
cat(min(inputfes$fes))
cat("\n")
}
}
prob<-function(inputfes, temp=300, eunit="kJ/mol") {
if(class(inputfes)=="fes") {
if(eunit=="kJ/mol") {
if(inputfes$dimension==1) {
probs <- exp(-1000*inputfes$fes/8.314/temp)
cfes<-list(fes=probs/sum(probs), hills=inputfes$hills, rows=inputfes$rows, dimension=inputfes$dimension, per=inputfes$per, x=inputfes$x, pcv1=inputfes$pcv1)
}
if(inputfes$dimension==2) {
probs <- exp(-1000*inputfes$fes/8.314/temp)
cfes<-list(fes=probs/sum(probs), hills=inputfes$hills, rows=inputfes$rows, dimension=inputfes$dimension, per=inputfes$per, x=inputfes$x, y=inputfes$y, pcv1=inputfes$pcv1, pcv2=inputfes$pcv2)
}
} else if (eunit=="kJ/mol") {
if(inputfes$dimension==1) {
probs <- exp(-1000*4.184*inputfes$fes/8.314/temp)
cfes<-list(fes=probs/sum(probs), hills=inputfes$hills, rows=inputfes$rows, dimension=inputfes$dimension, per=inputfes$per, x=inputfes$x, pcv1=inputfes$pcv1)
}
if(inputfes$dimension==2) {
probs <- exp(-1000*4.184*inputfes$fes/8.314/temp)
cfes<-list(fes=probs/sum(probs), hills=inputfes$hills, rows=inputfes$rows, dimension=inputfes$dimension, per=inputfes$per, x=inputfes$x, y=inputfes$y, pcv1=inputfes$pcv1, pcv2=inputfes$pcv2)
}
} else {
stop("Error: Wrong eunit")
}
class(cfes) <- "fes"
return(cfes)
} else if(class(inputfes)=="fes3d") {
if(eunit=="kJ/mol") {
probs <- exp(-1000*inputfes$fes/8.314/temp)
cfes<-list(fes=probs/sum(probs), hills=inputfes$hills, rows=inputfes$rows, dimension=inputfes$dimension, per=inputfes$per, x=inputfes$x, y=inputfes$y, z=inputfes$z, pcv1=inputfes$pcv1, pcv2=inputfes$pcv2, pcv3=inputfes$pcv3)
} else if (eunit=="kJ/mol") {
probs <- exp(-1000*4.184*inputfes$fes/8.314/temp)
cfes<-list(fes=probs/sum(probs), hills=inputfes$hills, rows=inputfes$rows, dimension=inputfes$dimension, per=inputfes$per, x=inputfes$x, y=inputfes$y, z=inputfes$z, pcv1=inputfes$pcv1, pcv2=inputfes$pcv2, pcv3=inputfes$pcv3)
} else {
stop("Error: Wrong eunit")
}
class(cfes) <- "fes3d"
return(cfes)
} else {
stop("Error: Input must be fes or fes3d object")
}
}
plot.fes<-function(x, plottype="both",
colscale=F, xlim=NULL, ylim=NULL, zlim=NULL,
main=NULL, sub=NULL,
xlab=NULL, ylab=NULL,
nlevels=10, levels=NULL,
col=rainbow(135)[100:1],
labels=NULL, labcex=0.6, drawlabels=TRUE,
colscalelab="free energy",
method="flattest",
contcol=par("fg"), lty=par("lty"),
lwd=1, asp=NULL, axes=T,...) {
close.screen(all.screens=TRUE)
inputfes<-x
fes<-inputfes$fes
rows<-inputfes$rows
if(inputfes$dimension==1) {
x<-inputfes$x
if(is.null(xlab)) xlab="CV"
if(is.null(ylab)) ylab="free energy"
if(is.null(xlim)) xlim<-c(min(x),max(x))
if(is.null(ylim)) {
ylim<-range(pretty(range(fes)))
}
plot(x, fes, type="l", lwd=lwd,
col=col, xlim=xlim, ylim=ylim,
xlab=xlab, ylab=ylab, axes=axes,
main=main, sub=sub, asp=asp)
} else {
x<-inputfes$x
y<-inputfes$y
if(is.null(xlab)) xlab="CV1"
if(is.null(ylab)) ylab="CV2"
if(is.null(zlim)) {
zlim<-range(pretty(range(fes)))
}
if(is.null(levels)) {
levels<-pretty(zlim, nlevels)
}
if(is.null(xlim)) xlim<-c(min(x),max(x))
if(is.null(ylim)) ylim<-c(min(y),max(y))
if(colscale) {
split.screen(matrix(c(0,0.75,0,1,0.75,1,0,1), byrow=T, ncol=4))
screen(2)
smat<-matrix(seq(from=zlim[1], to=zlim[2], length.out=100))
image(c(0), seq(from=zlim[1], to=zlim[2], length.out=100),
t(smat), zlim=zlim, col=col, xlab="", ylab=colscalelab, axes=F)
axis(2, lty=lty, lwd=lwd)
box(lwd=lwd)
screen(1)
}
if(plottype=="image" || plottype=="both") {
image(x, y, fes, zlim=zlim,
col=col, xlim=xlim, ylim=ylim,
xlab=xlab, ylab=ylab, axes=axes,
main=main, sub=sub, asp=asp)
}
if(plottype=="contour") {
contour(x, y, fes, zlim=zlim,
nlevels=nlevels, levels=levels,
labels=labels, labcex=labcex, drawlabels=drawlabels,
method=method, col=contcol, lty=lty, lwd=lwd,
main=main, sub=sub, asp=asp)
}
if(plottype=="both") {
contour(x, y, fes, zlim=zlim,
nlevels=nlevels, levels=levels,
labels=labels, labcex=labcex, drawlabels=drawlabels,
method=method, col=contcol, lty=lty, lwd=lwd, add=T)
}
}
}
points.fes<-function(x, pch=1, col="black", bg="red", cex=1, lwd=1,...) {
fes<-x$fes
if(x$dimension==1) {
x<-x$x
points(x, fes,
pch=pch, col=col, bg=bg, cex=cex, lwd=lwd)
} else {
stop("Error: points available only for 1D free energy surfaces\n")
}
}
lines.fes<-function(x, lwd=1, col="black",...) {
fes<-x$fes
if(x$dimension==1) {
x<-x$x
lines(x, fes, lwd=lwd, col=col)
} else {
stop("Error: points available only for 1D free energy surfaces\n")
}
} |
test_that("qgis_function() works", {
skip_if_not(has_qgis())
expect_error(
qgis_function("native:buffer", NOT_AN_ARG = "value"),
"must be valid input names"
)
expect_error(
qgis_function("native:buffer", "no name"),
"must be named"
)
qgis_buffer <- qgis_function("native:buffer")
expect_is(qgis_buffer, "function")
expect_identical(parent.env(environment(qgis_buffer)), baseenv())
expect_true(rlang::is_call(body(qgis_buffer), "qgis_run_algorithm", ns = "qgisprocess"))
buffer_args <- qgis_arguments("native:buffer")
expect_identical(
names(formals(qgis_buffer)),
c(
buffer_args$name,
setdiff(names(formals(qgis_run_algorithm)), c("algorithm", "...", ".raw_json_input"))
)
)
result <- qgis_buffer(
system.file("longlake/longlake_depth.gpkg", package = "qgisprocess"),
DISTANCE = 100,
DISSOLVE = TRUE,
MITER_LIMIT = 2,
OUTPUT = qgis_tmp_vector(),
END_CAP_STYLE = 0,
JOIN_STYLE = 0
)
expect_is(result, "qgis_result")
})
test_that("qgis_pipe() works", {
skip_if_not(has_qgis())
result <- system.file("longlake/longlake_depth.gpkg", package = "qgisprocess") %>%
qgis_pipe(
"native:buffer",
DISTANCE = 100,
DISSOLVE = TRUE,
MITER_LIMIT = 2,
OUTPUT = qgis_tmp_vector(),
END_CAP_STYLE = 0,
JOIN_STYLE = 0
)
expect_is(result, "qgis_result")
}) |
test_that( "js events can be chained ", {
fns <- list(
a = htmlwidgets::JS("function(a) { a + 1 }"),
b = htmlwidgets::JS("function(b) { b + 3 }"),
c = htmlwidgets::JS("function(c) { c + 5 }")
)
js <- chain_js_events(
NULL,
fns$a,
NULL,
fns$b,
fns$c,
NULL,
NULL
)
expect_equal(
js,
htmlwidgets::JS("function() {
try {
(function(a) { a + 1 }).apply(this, arguments);
} catch(e) {
if (window.console && window.console.error) window.console.error(e);
}
try {
(function(b) { b + 3 }).apply(this, arguments);
} catch(e) {
if (window.console && window.console.error) window.console.error(e);
}
try {
(function(c) { c + 5 }).apply(this, arguments);
} catch(e) {
if (window.console && window.console.error) window.console.error(e);
}
}")
)
js <- chain_js_events(
NULL,
fns$a,
NULL
)
expect_equal(
js, fns$a
)
}) |
dmoz_cat <- function(domains = NULL, use_file = NULL) {
c_domains <- gsub("^ *| *$", "", domains)
c_domains_http <- gsub("^http://", "", c_domains)
c_domains <- gsub("^www.", "", c_domains)
dmoz <- NA
domain_cat <- data.frame(domain_name = c_domains, dmoz_category = NA)
if (is.character(use_file)) {
if (!file.exists(use_file)) {
stop("Please provide correct path to the file.
Or download it using get_dmoz_data().")
}
dmoz <- read.csv(use_file, header = FALSE, stringsAsFactors = FALSE)
} else {
if (!file.exists("dmoz_domain_category.csv")) {
stop("Please provide path to the dmoz file.
Or download it using get_dmoz_data().")
}
dmoz <- read.csv("dmoz_domain_category.csv",
header = FALSE, stringsAsFactors = FALSE)
}
names(dmoz) <- c("hostname", "category")
domain_cat$dmoz_category <-
dmoz$category[match(c_domains_http, dmoz$hostname)]
domain_cat$dmoz_category <-
ifelse(
is.na(domain_cat$dmoz_category),
dmoz$category[match(c_domains, dmoz$hostname)],
domain_cat$dmoz_category
)
domain_cat
} |
library(usethis)
source("data-raw/01-countrypops.R")
source("data-raw/02-sza.R")
source("data-raw/03-gtcars.R")
source("data-raw/04-sp500.R")
source("data-raw/05-pizzaplace.R")
source("data-raw/06-exibble.R")
usethis::use_data(
countrypops, sza, gtcars, sp500, pizzaplace, exibble,
internal = FALSE, overwrite = TRUE
) |
are_mrca_taxon_names_in_fasta <- function(
mrca_prior,
fasta_filename
) {
testit::assert(beautier::is_mrca_prior(mrca_prior))
testit::assert(
beautier::is_mrca_align_id_in_fasta(
mrca_prior = mrca_prior,
fasta_filename = fasta_filename
)
)
if (beautier::get_alignment_id(fasta_filename) == mrca_prior$alignment_id) {
for (name in mrca_prior$taxa_names) {
if (!name %in% beautier::get_taxa_names(fasta_filename)) {
return(FALSE)
}
}
}
TRUE
} |
kernelAuto <- function(data.x,
data.y,
kType,
lType,
time,
distanceFunction,
nCores,
verbose, ...){
nCov <- ncol(data.x) - 2L
patientIDs <- sort(unique(data.y[,1L]))
nPatients <- length(patientIDs)
xIs <- list()
for( i in 1L:nrow(data.y) ) {
xIs[[i]] <- list()
xIs[[i]]$v <- which( data.x[,1L] == data.y[i,1L] )
xIs[[i]]$n <- as.integer(round(length(xIs[[i]]$v),0L))
}
yIs <- list()
for( i in 1L:nPatients ) {
yIs[[i]] <- list()
yIs[[i]]$v <- which( data.y[,1L] == patientIDs[i] )
yIs[[i]]$n <- as.integer(round(length(yIs[[i]]$v),0L))
}
range <- c(data.x[, 2L], data.y[,2L])
bw <- seq(from = 2*(stats::quantile(x = range, probs = 0.75) -
stats::quantile(x = range, probs = 0.25)) *
nPatients^(-0.7),
to = 2*(stats::quantile(x = range, probs = 0.75) -
stats::quantile(x = range, probs = 0.25)) *
nPatients^(-0.3),
length = 50)
lbd <- length(bw)
cvlabel <- sample(x = rep( c(1L:2L), length.out = nPatients ))
betaHat0 <- matrix(data = 0.0, nrow = lbd, ncol = nCov)
hatV <- matrix(data = 0.0, nrow = lbd, ncol = nCov)
guess0 <- NULL
guess1 <- NULL
guess2 <- NULL
tempFunc <- function(x,
data.y,
data.x,
bw,
kType,
lType,
nPatients,
xIs,
yIs,
tt,
guess0,
guess1,
guess2,
distanceFunction,
cvlabel){
betaHat0 <- betaHat(data.y = data.y,
data.x = data.x,
bandwidth = bw[x],
kType = kType,
lType = lType,
nPatients = nPatients,
xIs = xIs,
yIs = yIs,
tt = time,
guess = guess0,
distanceFunction = distanceFunction)
tst <- cvlabel == 1L
betaHat1 <- betaHat(data.y = data.y,
data.x = data.x,
bandwidth = bw[x],
kType = kType,
lType = lType,
nPatients = sum(tst),
xIs = xIs,
yIs = yIs[tst],
tt = time,
guess = guess1,
distanceFunction = distanceFunction)
tst <- cvlabel == 2L
betaHat2 <- betaHat(data.y = data.y,
data.x = data.x,
bandwidth = bw[x],
kType = kType,
lType = lType,
nPatients = sum(tst),
xIs = xIs,
yIs = yIs[tst],
tt = time,
guess = guess2,
distanceFunction = distanceFunction)
betaDiff <- betaHat2 - betaHat1
hatV <- nPatients * bw[x] * ( betaDiff * betaDiff ) * 0.25
return( list( "beta0" = betaHat0,
"beta1" = betaHat1,
"beta2" = betaHat2,
"hatV" = hatV) )
}
if( nCores > 1.1 ) {
cl <- makeCluster(nCores)
res <- parLapply(cl, 1L:lbd, tempFunc, data.y = data.y,
data.x = data.x,
bw = bw,
kType = kType,
lType = lType,
nPatients = nPatients,
xIs = xIs,
yIs = yIs,
tt = time,
guess0 = NULL,
guess1 = NULL,
guess2 = NULL,
distanceFunction = distanceFunction,
cvlabel = cvlabel)
stopCluster(cl)
for( bd in 1L:lbd ) {
betaHat0[bd,] <- res[[bd]]$beta0
hatV[bd,] <- res[[bd]]$hatV
}
} else {
for( bd in 1L:lbd ) {
res <- tempFunc(data.y = data.y,
data.x = data.x,
bw = bw[bd],
kType = kType,
lType = lType,
nPatients = nPatients,
xIs = xIs,
yIs = yIs,
tt = time,
guess0 = guess0,
guess1 = guess1,
guess2 = guess2,
distanceFunction = distanceFunction,
cvlabel = cvlabel)
betaHat0[bd,] <- res$beta0
guess0 <- res$beta0
guess1 <- res$beta1
guess2 <- res$beta2
hatV[bd,] <- res$hatV
}
}
hatC <- array(data = 0.0, dim = nCov)
for( p in 1L:nCov ) {
hatC[p] <- lm( betaHat0[,p] ~ bw^2 )$coef[2]
}
MSE <- bw^4 %o% ( hatC * hatC ) + hatV
mseFunc <- function(x) {
tst <- x > 0.0
if( sum(tst) == 0L ) {
stop("no positive MSE values", call. = FALSE)
}
x[!tst] <- NA
opt_h <- which.min(x)
if( length(opt_h) > 1L ) {
warning("Multiple minimums. Smallest bandwidth used.",
call. = FALSE)
opt_h <- opt_h[1]
}
return(opt_h)
}
opt_h <- apply(X = MSE, MARGIN = 2L, FUN = mseFunc)
if( any(opt_h == 1L) || any(opt_h == lbd) ) {
warning("At least 1 minimum is at bandwidth boundary.",
call. = FALSE)
}
tminMSE <- array(data = 0.0, dim = nCov)
for( i in 1L:nCov ) {
tminMSE[i] <- MSE[opt_h[i],i]
}
optH <- bw[opt_h]
minMSE <- tminMSE
bHat <- betaHat(data.y = data.y,
data.x = data.x,
bandwidth = bw[opt_h],
kType = kType,
lType = lType,
tt = time,
guess = guess0,
xIs = xIs,
yIs = yIs,
nPatients = nPatients,
distanceFunction = distanceFunction)
sdVec <- SD(data.y = data.y,
data.x = data.x,
bandwidth = bw[opt_h],
kType = kType,
lType = lType,
bHat = bHat,
tt = time,
xIs = xIs,
yIs = yIs,
nPatients = nPatients,
distanceFunction = distanceFunction)
results <- matrix(data = 0.0,
nrow = nCov,
ncol = 6L,
dimnames = list(paste("beta",0L:{nCov-1L},sep=""),
c("estimate","stdErr","z-value",
"p-value", "optBW", "minMSE")))
results[,1L] <- bHat
results[,2L] <- sdVec
results[,3L] <- bHat/sdVec
results[,4L] <- 2.0*pnorm(-abs(results[,3L]))
results[,5L] <- bw[opt_h]
results[,6L] <- tminMSE
if (verbose) {
print(results)
cat("\n")
}
return( list( "betaHat" = results[,1L],
"stdErr" = results[,2L],
"zValue" = results[,3L],
"pValue" = results[,4L],
"optBW" = results[,5L],
"minMSE" = results[,6L] ) )
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.