code
stringlengths 1
13.8M
|
---|
source("mniw-testfunctions.R")
context("Multivariate-Normal Distribution")
tol <- 1e-6
test_that("Multivariate Normal simulation is same in C++ as R", {
calc.diff <- FALSE
case.par <- expand.grid(q = c(1,2,4),
mu = c("none", "single", "multi"),
Sigma = c("none", "single", "multi"),
drop = c(TRUE, FALSE), stringsAsFactors = FALSE)
case.par <- case.par[!with(case.par, {
mu == "none" & Sigma == "none"}),]
ncases <- nrow(case.par)
n <- 12
test.seed <- sample(1e6, ncases)
if(calc.diff) {
MaxDiff <- rep(NA, ncases)
}
for(ii in 1:ncases) {
cp <- case.par[ii,]
q <- cp$q
args <- list(mu = list(p = 1, q = q, rtype = cp$mu, vtype = "vector"),
Sigma = list(q = q, rtype = cp$Sigma, vtype = "matrix"))
args <- get_args(n = n, args = args, drop = cp$drop)
yR <- matrix(NA, n, q)
set.seed(test.seed[ii])
for(jj in 1:n) {
yR[jj,] <- rmNormR(mu = args$R$mu[[jj]],
V = args$R$Sigma[[jj]])
}
set.seed(test.seed[ii])
ycpp <- do.call(rmNorm, args = c(args$cpp, list(n = n)))
mx <- arDiff(yR, ycpp)
if(calc.diff) {
MaxDiff[ii] <- mx
} else {
expect_Rcpp_equal("rmNorm", ii, mx, tolerance = tol)
}
}
})
test_that("Matrix Normal density is same in C++ as R", {
calc.diff <- FALSE
case.par <- expand.grid(q = c(1,2,4),
x = c("single", "multi"),
mu = c("none", "single", "multi"),
Sigma = c("none", "single", "multi"),
drop = c(TRUE, FALSE), stringsAsFactors = FALSE)
ncases <- nrow(case.par)
n <- 12
if(calc.diff) {
MaxDiff <- rep(NA, ncases)
}
for(ii in 1:ncases) {
cp <- case.par[ii,]
q <- cp$q
args <- list(x = list(p = 1, q = q, rtype = cp$x, vtype = "vector"),
mu = list(p = 1, q = q, rtype = cp$mu, vtype = "vector"),
Sigma = list(q = q, rtype = cp$Sigma, vtype = "matrix"))
args <- get_args(n = n, args = args, drop = cp$drop)
llR <- rep(NA, n)
for(jj in 1:n) {
llR[jj] <- dmNormR(x = args$R$x[[jj]],
mu = args$R$mu[[jj]],
V = args$R$Sigma[[jj]], log = TRUE)
}
llcpp <- do.call(dmNorm, args = c(args$cpp, list(log = TRUE)))
if(all_single(cp)) llcpp <- rep(llcpp, n)
mx <- arDiff(llR, llcpp)
if(calc.diff) {
MaxDiff[ii] <- mx
} else {
expect_Rcpp_equal("dmNorm", ii, mx, tolerance = tol)
}
}
}) |
plot_plate_summary_for_bursts <- function(s, outputdir, parameters) {
for (i in (1:length(s))) {
basename <- get_file_basename(s[[i]]$file)
burst_plot_path <- paste(outputdir, "/", basename, "_burst_plot.pdf",
sep = "")
pdf(file = burst_plot_path)
p <- .plot_mealayout(s[[i]]$layout, use_names = T, cex = 0.48)
title(main = paste(paste("Electrode Layout"),
paste("file= ", strsplit(basename(s[[i]]$file), ".RData")[[1]][1],
sep = ""), sep = "\n"))
if (parameters$burst_distribution_ibi$perform) {
feature <- "ibi"; print("Running IBI distribution analysis.")
params <- parameters$burst_distribution_ibi
p <- calc_burst_distributions(s[[i]], min_vals = params$min_cases,
xlimit = params$x_axis_lim, bins_in_sec = params$bins_in_sec,
feature = feature, filter_values_by_min = params$filter_by_min,
min_values = params$min_values, per_well = params$per_well,
outputdir = outputdir, min_electrodes =
parameters$well_min_rate, parameters$time_stamp)
}
if (parameters$burst_distribution_isi$perform) {
feature <- "isi"; print("Running ISI distribution analysis.")
params <- parameters$burst_distribution_isi
p <- calc_burst_distributions(s[[i]], min_vals = params$min_cases,
xlimit = params$x_axis_lim, bins_in_sec = params$bins_in_sec,
feature = feature, filter_values_by_min = params$filter_by_min,
min_values = params$min_values, per_well = params$per_well,
outputdir = outputdir, min_electrodes = parameters$well_min_rate,
parameters$time_stamp)
}
if (parameters$burst_distribution_nspikes$perform) {
feature <- "nspikes_in_burst"
print("Running nSpikes in bursts distribution analysis.")
params <- parameters$burst_distribution_nspikes
p <- calc_burst_distributions(s[[i]], min_vals = params$min_cases,
xlimit = params$x_axis_lim, bins_in_sec = params$bins_in_sec,
feature = feature, filter_values_by_min = params$filter_by_min,
min_values = params$min_values, per_well = params$per_well,
outputdir = outputdir, min_electrodes = parameters$well_min_rate,
parameters$time_stamp)
}
if (parameters$burst_distribution_durn$perform) {
feature <- "duration"
print("Running duration of bursts distribution analysis.")
params <- parameters$burst_distribution_durn
p <- calc_burst_distributions(s[[i]], min_vals = params$min_cases,
xlimit = params$x_axis_lim, bins_in_sec = params$bins_in_sec,
feature = feature, filter_values_by_min = params$filter_by_min,
min_values = params$min_values, per_well = params$per_well,
outputdir = outputdir, min_electrodes = parameters$well_min_rate,
parameters$time_stamp)
}
if (parameters$burst_distribution_spike_freq$perform) {
feature <- "spikes_density_in_burst"
print("Running spike density in bursts distribution analysis.")
params <- parameters$burst_distribution_spike_freq
p <- calc_burst_distributions(s[[i]], min_vals = params$min_cases,
xlimit = params$x_axis_lim, bins_in_sec = params$bins_in_sec,
feature = feature, filter_values_by_min = params$filter_by_min,
min_values = params$min_values, per_well = params$per_well,
outputdir = outputdir, min_electrodes = parameters$well_min_rate,
parameters$time_stamp)
}
p <- .plot_meanfiringrate(s[[i]], main = "Mean Firing Rate by Plate (Hz)")
p <- .channel_plot_by_well(s[[i]], resp = "meanfiringrate",
resp_label = "Mean Firing Rate (Hz)")
p <- .channel_plot_by_well(s[[i]], resp = "bs$mean_dur",
resp_label = "Mean Duration of Burst (s)")
p <- .channel_plot_by_well(s[[i]], resp = "bs$nbursts",
resp_label = "Number of Bursts")
p <- .channel_plot_by_well(s[[i]], resp = "bs$mean_ibis",
resp_label = "Mean IBIs (ms)")
p <- .channel_plot_by_well(s[[i]], resp = "bs$mean_isis",
resp_label = "Mean ISI w/i Bursts (s)")
p <- .channel_plot_by_well(s[[i]], resp = "bs$bursts_per_min",
resp_label = "Mean Burst per Minute")
p <- .channel_plot_by_well(s[[i]], resp = "bs$mean_spikes",
resp_label = "Mean
p <- .channel_plot_by_well(s[[i]], resp = "bs$per_spikes_in_burst",
resp_label = "% Spikes/Burst")
dev.off()
}
}
write_plate_summary_for_bursts <- function(s, outputdir) {
master_sum <- .get_mean_burst_info_per_well(s)
csvwell <- paste(outputdir, "/", get_project_plate_name(s[[1]]$file),
"_well_bursts.csv", sep = "")
for (i in 1:length(s)) {
div <- .get_div(s[[i]])
basename <- get_file_basename(s[[i]]$file)
csvfile <- paste(outputdir, "/", basename, "_bursts.csv", sep = "")
tempdf <- c(); tempcolnames <- c()
for (j in 2:length(master_sum[[i]])) {
tempc <- unlist(master_sum[[i]][j])
tempdf <- cbind(tempdf, tempc)
tempcolnames <- c(tempcolnames, names(master_sum[[i]][j]))
}
if (dim(tempdf)[2] > 20) {
if (dim(tempdf)[1] == 1) {
df <- cbind(t(tempdf[, 21:25]), t(tempdf[, 1:20]))
} else {
df <- cbind(tempdf[, 21:25], tempdf[, 1:20])
}
colnames <- c(tempcolnames[21:25], tempcolnames[1:20])
colnames(df) <- colnames
}
wellindex <- which(is.element(names(s[[i]]$treatment),
unique(s[[i]]$cw)))
well <- c(); file <- c();
file <- rep(strsplit(basename(s[[i]]$file), ".RData")[[1]][1],
length(s[[i]]$cw))
well <- s[[i]]$cw
df2 <- cbind(file, well, as.data.frame(s[[i]]$bs[1:length(s[[i]]$bs)]))
write.table("Burst Analysis Averaged Over Each Well",
csvfile, sep = ",", append = FALSE, row.names = FALSE, col.names = FALSE)
write.table(paste("file= ", strsplit(basename(s[[i]]$file),
".RData")[[1]][1], sep = ""),
csvfile, sep = ",", append = TRUE, row.names = FALSE, col.names = FALSE)
write.table(" ", csvfile, sep = ",", append = TRUE, row.names = FALSE,
col.names = FALSE)
write.table(paste("recording time (s): [", paste(s[[i]]$rec_time[1],
round(s[[i]]$rec_time[2]), sep = " ,"),
"]", sep = ""), csvfile, sep = ",", append = TRUE, row.names = FALSE,
col.names = FALSE)
write.table(" ", csvfile, sep = ",", append = TRUE, row.names = FALSE,
col.names = FALSE)
if (dim(df)[1] == 1) {
suppressWarnings(write.table(t(df[, - c(2:3)]),
csvfile, sep = ",", append = TRUE, row.names = FALSE,
col.names = TRUE))
suppressWarnings(write.table(cbind(div, t(df[, - c(2:3)])),
csvwell, sep = ",", append = TRUE, row.names = FALSE,
col.names = FALSE))
} else {
suppressWarnings(write.table(df[, - c(2:3)],
csvfile, sep = ",", append = TRUE, row.names = FALSE,
col.names = TRUE))
suppressWarnings(write.table(cbind(div, df[, - c(2:3)]),
csvwell, sep = ",", append = TRUE, row.names = FALSE,
col.names = FALSE))
}
write.table(" ", csvfile, sep = ",", append = TRUE, row.names = FALSE,
col.names = FALSE)
write.table(" ", csvfile, sep = ",", append = TRUE, row.names = FALSE,
col.names = FALSE)
write.table("Channel Burst Summary", csvfile, sep = ",", append = TRUE,
row.names = FALSE, col.names = FALSE)
write.table(paste("file= ", strsplit(basename(s[[i]]$file),
".RData")[[1]][1], sep = ""),
csvfile, sep = ",", append = TRUE, row.names = FALSE, col.names = FALSE)
write.table(" ", csvfile, sep = ",", append = TRUE, row.names = FALSE,
col.names = FALSE)
suppressWarnings(write.table(df2,
csvfile, sep = ",", append = TRUE, row.names = FALSE, col.names = TRUE))
write.table(" ", csvfile, sep = ",", append = TRUE, row.names = FALSE,
col.names = FALSE)
write.table(" ", csvfile, sep = ",", append = TRUE, row.names = FALSE,
col.names = FALSE)
write.table(" ", csvfile, sep = ",", append = TRUE, row.names = FALSE,
col.names = FALSE)
write.table(" ", csvfile, sep = ",", append = TRUE, row.names = FALSE,
col.names = FALSE)
write.table(" ", csvfile, sep = ",", append = TRUE, row.names = FALSE,
col.names = FALSE)
write.table(" ", csvfile, sep = ",", append = TRUE, row.names = FALSE,
col.names = FALSE)
}
} |
tdmClassifyLoop <- function(dset,response.variables,input.variables,opts,tset=NULL) {
if (exists(".Random.seed")) SAVESEED<-.Random.seed
if (is.null(tset)) {
tsetStr = c("Validation", "validation");
} else {
tsetStr = c("Test", " test");
}
if (class(opts)[1] != "tdmOpts") stop("Class of object opts is not tdmOpts");
if (is.null(opts$PRE.PCA.numericV)) opts$PRE.PCA.numericV <- input.variables;
if (opts$NRUN<=0) stop(sprintf("opts$NRUN has to be positive, but it is %d",opts$NRUN));
if (opts$PRE.PCA!="none" & opts$PRE.SFA!="none") stop("It is not allowed to activate opts$PRE.PCA and opts$PRE.SFA simultaneously.")
if (!all(response.variables %in% names(dset)))
stop(sprintf("Not all response.variables are in names(dset)!\n %s\n response.variables=%s",
"Note that response.variables have to be strings (names of columns in dset), not the columns themselves.",
paste(response.variables,collapse=",")))
if (!is.null(opts$CLS.gainmat)) {
for (respVar in response.variables)
testit::assert(sprintf("tdmClassifyLoop: Size of opts$CLS.gainmat and number of levels in dset$%s do not match", respVar)
, nrow(opts$CLS.gainmat)==length(levels(dset[,respVar]))
, ncol(opts$CLS.gainmat)==length(levels(dset[,respVar])) )
}
predProbList=list();
C_train <- C_vali <- C_vali2 <- G_train <- G_vali <- NULL
R_train <- R_vali <- R_vali2 <- G_vali2 <- NULL
Err <- NULL
for (i in 1:opts$NRUN) {
if (opts$NRUN>1) {
if (opts$GD.RESTART) tdmGraphicCloseDev(opts);
tdmGraphicInit(opts);
}
opts$i = i;
if (is.null(opts$TST.SEED)) {
set.seed(tdmRandomSeed());
} else if (opts$TST.SEED=="algSeed") {
newseed=opts$ALG.SEED+(opts$i-1)+opts$NRUN*(opts$rep-1);
set.seed(newseed);
} else {
newseed=opts$TST.SEED+(opts$i-1)+opts$NRUN*(opts$rep-1);
set.seed(newseed);
}
cvi <- tdmModCreateCVindex(dset,response.variables,opts,stratified=TRUE);
nfold = max(cvi,1);
alltrn = alltst = NULL;
predictions <- as.data.frame(dset[,response.variables]);
names(predictions) <- response.variables;
predProb=list();
for (k in 1:nfold) {
opts$k=k;
opts$the.nfold = nfold;
cat1(opts,"\n")
if (any(names(dset)=="IND.dset")) stop("Name clash in dset, which has already a column IND.dset. Please consider renaming it.")
if (is.null(tset)) {
d_test <- dset[cvi==k, ]
d_test <- tdmBindResponse(d_test , "IND.dset", which(cvi==k));
} else {
if (opts$TST.kind %in% c("cv"))
stop(sprintf("Option opts$TST.kind=\"%s\" together with tset!=NULL is currently not implemented. Consider opts$TST.kind=\"rand\".",opts$TST.kind));
d_test <- tset;
d_test <- tdmBindResponse(d_test , "IND.dset", rep(-1,nrow(tset)));
}
d_train <- dset[cvi!=k & cvi>=0, ]
d_dis <- dset[cvi!=k & cvi==-1, ]
d_train <- tdmBindResponse(d_train, "IND.dset", which(cvi!=k & cvi>=0));
d_dis <- tdmBindResponse(d_dis , "IND.dset", which(cvi!=k & cvi==-1));
ntst=nrow(d_test);
ntrn=nrow(d_train);
d_preproc <- NULL;
if (opts$PRE.PCA!="none" | opts$PRE.SFA!="none") {
if (opts$PRE.allNonVali) {
d_preproc <- dset[cvi!=k, ]
} else {
d_preproc <- d_train
}
}
if (opts$PRE.PCA!="none") {
other.variables <- setdiff(input.variables,opts$PRE.PCA.numericV);
pca <- tdmPrePCA.train(d_preproc,opts);
d_train <- tdmPrePCA.apply(d_train,pca$pcaList,opts,d_train)$dset;
d_test <- tdmPrePCA.apply(d_test,pca$pcaList,opts,d_train)$dset;
d_dis <- tdmPrePCA.apply(d_dis,pca$pcaList,opts,d_train)$dset;
input.variables <- union(pca$numeric.variables,other.variables);
if (length(setdiff(input.variables,names(d_train)))>0)
stop("Some elements of input.variables are not columns of d_train");
}
res <- tdmClassify(d_train,d_test,d_dis,d_preproc,response.variables,input.variables,opts,tsetStr)
predProb$Val = rbind(predProb$Val,res$predProb$Val);
if (k==1) predProb$Trn = res$predProb$Trn;
alltrn = rbind(alltrn,as.data.frame(list(cerr.trn=mean(res$allEVAL$cerr.trn) * ntrn
,gain.trn=mean(res$allEVAL$gain.trn) * ntrn
,rgain.trn=mean(res$allEVAL$rgain.trn) * ntrn
,ntrn=ntrn
)));
alltst = rbind(alltst,as.data.frame(list(cerr.tst=mean(res$allEVAL$cerr.tst) * ntst
,gain.tst=mean(res$allEVAL$gain.tst) * ntst
,rgain.tst=mean(res$allEVAL$rgain.tst) * ntst
,cerr.tst2=mean(res$allEVAL$cerr.tst2) * ntst
,gain.tst2=mean(res$allEVAL$gain.tst2) * ntst
,rgain.tst2=mean(res$allEVAL$rgain.tst2)
,ntst=ntst
)));
if (is.null(tset)) {
predictions[cvi==k,response.variables] <- res$d_test[,paste("pred_",response.variables,sep="")];
predictTest = NULL;
} else {
predictTest = res$d_test[,paste("pred_",response.variables,sep="")];
}
if (!(opts$TST.kind=="cv")) {
predictions[cvi!=k & cvi>=0,response.variables] <- res$d_train[1:ntrn,paste("pred_",response.variables,sep="")];
}
}
Err = rbind(Err,c(colSums(alltrn)/sum(alltrn$ntrn),colSums(alltst)/sum(alltst$ntst)));
Err[i,"ntrn"]=ifelse(opts$TST.kind=="cv",nrow(dset),nrow(d_train));
Err[i,"ntst"]=ifelse(opts$TST.kind=="cv",nrow(dset),nrow(d_test));
predProbList[[i]] <- list()
predProbList[[i]]$Val <- predProb$Val;
predProbList[[i]]$Trn <- predProb$Trn;
cat1(opts,"\n",ifelse(opts$TST.kind=="cv","CV","")
,paste0("Relative gain (",opts$rgain.type,") on training set "),Err[i,"rgain.trn"],"%\n")
cat1(opts,"", ifelse(opts$TST.kind=="cv","CV","")
,paste0("Relative gain (",opts$rgain.type,") on ",tsetStr[2]," set "),Err[i,"rgain.tst"],"%\n\n")
C_train[i] = Err[i,"cerr.trn"]
G_train[i] = Err[i,"gain.trn"]
R_train[i] = Err[i,"rgain.trn"]
C_vali[i] = Err[i,"cerr.tst"]
G_vali[i] = Err[i,"gain.tst"]
R_vali[i] = Err[i,"rgain.tst"]
C_vali2[i] = Err[i,"cerr.tst2"]
G_vali2[i] = Err[i,"gain.tst2"]
R_vali2[i] = Err[i,"rgain.tst2"]
if (opts$GD.DEVICE!="non" & !is.null(opts$gr.fctcall)) {
eval(parse(text=opts$gr.fctcall));
}
}
if (opts$NRUN>1) {
cat1(opts,"\nAverage over all ",opts$NRUN," runs: \n")
cat1(opts,sprintf("cerr$train: (%7.5f +- %7.5f)%%\n", mean(C_train)*100, sd(C_train)*100));
cat1(opts,sprintf("cerr$vali: (%7.5f +- %7.5f)%%\n", mean(C_vali)*100, sd(C_vali)*100));
cat1(opts,sprintf("gain$train: (%7.2f +- %4.2f)\n", mean(G_train), sd(G_train)));
cat1(opts,sprintf("gain$vali: (%7.2f +- %4.2f)\n", mean(G_vali), sd(G_vali)));
cat1(opts,sprintf("rgain.train: %7.3f%%\n", mean(R_train)));
cat1(opts,sprintf("rgain.vali: %7.3f%%\n\n", mean(R_vali)));
}
result = list(lastRes = res
, C_train = C_train
, G_train = G_train
, R_train = R_train
, C_vali = C_vali
, G_vali = G_vali
, R_vali = R_vali
, C_vali2 = C_vali2
, G_vali2 = G_vali2
, R_vali2 = R_vali2
, Err = Err
, predictions = predictions
, predictTest = predictTest
, predProbList = predProbList
);
if (!is.null(opts$TST.COL))
if (opts$TST.COL %in% names(dset)) result$TST = dset[,opts$TST.COL]
class(result) <- c("TDMclassifier","TDM")
if (exists("SAVESEED")) assign(".Random.seed", SAVESEED, envir=globalenv());
result;
}
tdmClassifySummary <- function(result,opts,dset=NULL)
{
res <- result$lastRes;
cat1Records <- function (nrow_noCV) {
cat1(opts,ifelse(opts$TST.kind=="cv"
, sprintf(" (on %d records in %d folds)",nrow(res$d_train)+nrow(res$d_test),opts$TST.NFOLD)
, sprintf(" (on %d records)",nrow_noCV)
),"\n");
}
y = mean(result$R_vali);
ytr = mean(result$R_train);
maxScore = result$G_vali[1]/(result$R_vali[1]/100);
maxScore.tr = result$G_train[1]/(result$R_train[1]/100);
z=data.frame(TYPE=c("rgain","meanCA","minCA","bYouden")
,DESC=c("relative gain, i.e. percent of correctly classified records"
,"mean class accuracy, i.e. average over class levels"
,"minimum class accuracy", "balanced Youden index"));
cat1(opts,sprintf("\nRelative gain is \"%s\"",opts$rgain.type));
cat1(opts,sprintf(" (%s)", z$DESC[which(z$TYPE==opts$rgain.type)]));
if (opts$MOD.method %in% c("RF","MC.RF") & opts$RF.OOB==TRUE) {
cat1(opts,sprintf("\n%sTrain OOB relative gain: %7.3f",ifelse(opts$TST.kind=="cv","CV ",""),ytr));
cat1(opts,ifelse(opts$NRUN>1,sprintf(" +-%7.3f",sd(result$R_train)),""));
cat1Records(nrow(res$d_train));
result$y=-ytr;
result$sd.y=sd(result$R_train);
} else {
cat1(opts,"\n");
result$y=-y;
result$sd.y=sd(result$R_vali);
}
cat1(opts,sprintf("%s Vali relative gain: %7.3f",ifelse(opts$TST.kind=="cv","CV ",""),y));
cat1(opts,ifelse(opts$NRUN>1,sprintf(" +-%7.3f",sd(result$R_vali)),""));
cat1Records(nrow(res$d_test));
cat1(opts,sprintf("%s Vali2 relative gain (predict always with %s): %7.3f"
,ifelse(opts$TST.kind=="cv","CV ",""), res$allEVAL$test2.string, mean(result$R_vali2)));
cat1(opts,ifelse(opts$NRUN>1,sprintf(" +-%7.3f",sd(result$R_vali2)),""));
cat1Records(nrow(res$d_test));
if (!is.null(dset)) result$dset=dset;
result;
} |
library(lingtypology)
context("Tests for lat.lang function")
df <- data.frame(my_langs = c("Adyghe", "Russian"), stringsAsFactors = FALSE)
test_that("lat.lang", {
expect_equal(lat.lang("Adyghe"), c(Adyghe = 44))
expect_equal(lat.lang(df), c(my_langs1 = 44, my_langs2 = 59))
expect_equal(lat.lang(c("Adyghe", "Russian")), c(Adyghe = 44, Russian = 59))
}) |
project_gradient <- function(Q, b, maxits, alpha, R, betaOld) {
its <- 1
change <- 1
tol <- 1e-4
p <- length(b)
while(its <= maxits & change > tol){
grad <- Q %*% betaOld - b
betaNew <- project_onto_l1_ball(betaOld - alpha*grad, R)
diff <- sum(betaNew^2 - betaOld^2)
change <- diff / alpha
betaOld <- betaNew
its <- its+1
}
if(its == maxits+1) print('Max iterations')
return(betaNew)
} |
twsa <- function(sa_obj, param1 = NULL, param2 = NULL, ranges = NULL,
nsamp = 100,
outcome = c("eff", "cost", "nhb", "nmb", "nhb_loss", "nmb_loss"),
wtp = NULL,
strategies = NULL,
poly.order = 2) {
if (inherits(sa_obj, "psa")) {
if (is.null(param1) | is.null(param2)) {
stop("if using psa object, both param1 and param2 must be provided")
}
params <- c(param1, param2)
outcome <- match.arg(outcome)
if (!is.null(sa_obj$other_outcome)) {
sa_obj$effectiveness <- sa_obj$other_outcome
}
mm <- metamodel("twoway", sa_obj, params, strategies, outcome, wtp, "poly", poly.order)
tw <- predict(mm, ranges, nsamp)
} else if (inherits(sa_obj, "dsa_twoway")) {
params <- sa_obj$parameters
if (!is.null(sa_obj$other_outcome)) {
eff <- sa_obj$other_outcome
} else {
eff <- sa_obj$effectiveness
}
cost <- sa_obj$cost
strategies <- sa_obj$strategies
parnames <- sa_obj$parnames
y <- calculate_outcome(outcome, cost, eff, wtp)
names(y) <- strategies
tw <- NULL
for (s in strategies) {
new_df <- data.frame("p1" = params[, parnames[1]], "p2" = params[, parnames[2]],
"strategy" = s, "outcome_val" = y[, s])
tw <- rbind(tw, new_df, stringsAsFactors = FALSE)
}
names(tw)[1:2] <- parnames
} else {
stop("either a psa or dsa_twoway object must be provided")
}
class(tw) <- c("twsa", "data.frame")
return(tw)
}
plot.twsa <- function(x, maximize = TRUE,
col = c("full", "bw"),
n_x_ticks = 6,
n_y_ticks = 6,
txtsize = 12, ...) {
params <- names(x)[c(1, 2)]
param1 <- params[1]
param2 <- params[2]
if (maximize) {
obj_fn <- which.max
} else {
obj_fn <- which.min
}
opt_df <- x %>%
group_by(.data[[param1]], .data[[param2]]) %>%
slice(obj_fn(.data$outcome_val))
g <- ggplot(opt_df, aes_(x = as.name(param1), y = as.name(param2))) +
geom_tile(aes_(fill = as.name("strategy"))) +
theme_bw() +
xlab(param1) +
ylab(param2)
col <- match.arg(col)
add_common_aes(g, txtsize, col = col, col_aes = "fill",
scale_name = "Strategy",
continuous = c("x", "y"),
n_x_ticks = n_x_ticks,
n_y_ticks = n_y_ticks,
xexpand = c(0, 0),
yexpand = c(0, 0))
} |
patternmat2<-function(nobj)
{
nvar<-nobj*(nobj-1)/2
Y <- matrix(0, 2^nvar, nvar)
for (i in 1:nvar) Y[, nvar + 1 - i] <- rep(rep(c(0, 1), c(2^(i -
1), 2^(i - 1))), 2^(nvar - i))
Y
} |
CLOSE <- function(x) {
x/rowSums(x)
} |
.overlap <- function(l) {
.stopIfNotIsMassObjectList(l)
leftMass <- .unlist(lapply(l, function(x)x@mass[1L]))
rightMass <- .unlist(lapply(l, function(x)x@mass[length(x@mass)]))
if (length(rightMass)) {
r <- c(max(leftMass, na.rm=TRUE), min(rightMass, na.rm=TRUE))
if (r[1L] < r[2L]) {
return(r)
}
}
c(0L, 0L)
}
.reorderRange <- function(x) {
if (x[1L] > x[2L]) {
x <- x[2L:1L]
}
x
} |
context("arrayize")
test_that("it preserves scalar values", {
expect_equal(arrayize(c("a"=3)), list("a"=3))
expect_equal(arrayize(c("a"=3, "b"=2)), list("a"=3, "b"=2))
})
test_that("it parses vectors", {
expect_equal(arrayize(c("a[1]"=3)), list("a"=3))
expect_equal(arrayize(c("a[1]"=3, "a[2]"=4)), list("a"=c(3,4)))
expect_equal(arrayize(c("a[2]"=4)), list("a"=c(NA,4)))
})
test_that("it parses matrices", {
expect_equal(arrayize(c("m[1,2]"=8,"m[2,1]"=4)), list("m"=rbind(c(NA,8),c(4,NA))))
})
test_that("it handles multiple variables", {
result <- arrayize(c("m[1,2]"=8,"a"=1,"beta[2]"=3,"m[2,1]"=4))
expect_equal(result, list("a"=1, "beta"=c(NA,3), "m"=rbind(c(NA,8),c(4,NA))))
}) |
importCol <- function(res.file, Dev=FALSE, CPUE=FALSE, Survey=FALSE, CAc=FALSE,
CAs=FALSE, CLc=FALSE, CLs=FALSE, LA=FALSE, quiet=TRUE)
{
readVector <- function(keyword, same.line=TRUE, file=res.file,
vector=res.vector)
{
line <- match(keyword, substring(vector,1,nchar(keyword)))
v <- if(same.line)
as.numeric(scan(file, what="", skip=line-1, nlines=1, quiet=TRUE)[-1])
else
as.numeric(scan(file, what="", skip=line, nlines=1, quiet=TRUE))
if(!quiet) cat("vector...")
v
}
readMatrix <- function(keyword, nrow, header=FALSE,
stripe=c("no","left","right","upper","lower"),
file=res.file, vector=res.vector)
{
stripe <- match.arg(stripe)
line <- match(keyword,substring(vector,1,nchar(keyword))) +
as.numeric(header)
m <- scan(file, skip=line, nlines=nrow, quiet=TRUE)
m <- matrix(m, byrow=TRUE, nrow=nrow)
m <- switch(stripe,
left=m[,seq(1,ncol(m)/2)],
right=m[,seq(ncol(m)/2+1,ncol(m))],
upper=m[seq(1,nrow(m)-1,by=2),],
lower=m[seq(2,nrow(m),by=2),], m)
if(!quiet) cat("matrix...")
m
}
getN <- function(sexes, years, ages)
{
if(!quiet) cat("N ")
nsexes <- length(sexes)
nyears <- length(years)
nages <- length(ages)
if(nsexes == 1)
{
Nu <- readMatrix("Numbers_at_age_by_Year,sex_and_age", nrow=nyears*nsexes)
N <- data.frame(Sex=rep(sexes,nyears*nages), Year=rep(years,each=nages),
Age=rep(ages,nyears), N=as.vector(t(Nu)),
stringsAsFactors=FALSE)
}
if(nsexes == 2)
{
Nf <- readMatrix("Numbers_at_age_by_Year,sex_and_age",
nrow=nyears*nsexes, stripe="upper")
Nm <- readMatrix("Numbers_at_age_by_Year,sex_and_age",
nrow=nyears*nsexes, stripe="lower")
N <- data.frame(Sex=rep(sexes,each=nyears*nages),
Year=rep(rep(years,each=nages),2), Age=rep(ages,2*nyears),
N=as.vector(t(rbind(Nf,Nm))), stringsAsFactors=FALSE)
}
if(!quiet) cat("OK\n")
N
}
getB <- function(years, gears)
{
ngears <- length(gears)
if(!quiet) cat("B ")
vb <- readMatrix("Vulnerable_Biomass_by_Method_and_Year", nrow=ngears)
sb <- readVector("Spawning_Biomass_by_Year", same.line=FALSE)
y <- c(readVector("Total_Catch_by_Method_and_Year", same.line=FALSE), NA)
B <- data.frame(years=years, vb=t(vb), sb=sb, y=y)
names(B) <- if(ngears==1) c("Year", "VB", "SB", "Y")
else c("Year", paste("VB",gears,sep="."), "SB", "Y")
if(!quiet) cat("OK\n")
B
}
getSel <- function(gears, surveys, years, sexes, ages)
{
if(!quiet) cat("Sel ")
ngears <- length(gears)
nsurveys <- length(surveys)
nyears <- length(years)
nsexes <- length(sexes)
nages <- length(ages)
com <- readMatrix(
"Commercial_age-specific_selectivity_by_method,Year,sex_and_age",
nrow=ngears*nyears*nsexes)
com <- com[seq(1, to=ngears*nyears*nsexes, by=nyears),]
srv <- readMatrix(
"Survey_age-specific_selectivity_by_survey,Year,sex_and_age",
nrow=nsurveys*nsexes)
fecundity <- readVector("Fecundity_by_year_and_age", same.line=FALSE)
weight <- readVector("Weight_by_year,sex_and_age", same.line=FALSE)
mat <- rep(ifelse(weight>0,fecundity/weight,0), nsexes)
if(is.numeric(gears))
gears <- paste("Gear", gears)
if(is.numeric(surveys))
surveys <- paste("Survey", surveys)
Sel <- data.frame(Series=
c(rep(gears,each=nsexes*nages),
rep(surveys,each=nsexes*nages),
rep("Maturity",nsexes*nages)),
Sex=rep(rep(sexes,each=nages),ngears+nsurveys+1),
Age=rep(ages,(ngears+nsurveys+1)*nsexes),
P=c(t(com),t(srv),mat),
stringsAsFactors=FALSE)
if(!quiet) cat("OK\n")
Sel
}
getDev <- function(ages, years)
{
if(!quiet) cat("Dev ")
Dev <- list()
Dev$sigmaR <- c(readVector("p_log_InitialDev",same.line=TRUE)[6],
readVector("p_log_RecDev",same.line=TRUE)[6])
names(Dev$sigmaR) <- c("Initial", "Annual")
Dev$Initial <- readVector("log_InitialDev", same.line=TRUE)
names(Dev$Initial) <- ages[-c(1,length(ages))]
Dev$Annual <- readVector("log_RecDev", same.line=TRUE)
names(Dev$Annual) <- years[-length(years)]
if(!quiet) cat("OK\n")
Dev
}
getCPUE <- function(gears, years)
{
if(!quiet) cat("CPUE ")
nseries <- readVector("NCPUEindex")
ngears <- length(gears)
nyears <- length(years)
obs <- readMatrix("indexmethodyearvaluecv",
nrow=readVector("Number_of_CPUE_data",same.line=FALSE))
obs <- data.frame(Series=obs[,1], Gear=obs[,2], Year=obs[,3], Obs=obs[,4],
CV=obs[,5], stringsAsFactors=FALSE)
fit <- readMatrix("CPUE_Index_Trajectories", nrow=nseries)
fit <- data.frame(Series=rep(1:nseries,each=nyears),
Year=rep(years,nseries), Fit=as.vector(t(fit)),
stringsAsFactors=FALSE)
CPUE <- merge(obs[,names(obs)!="Gear"], fit, all=TRUE)
sgkey <- unique(obs[,c("Series","Gear")])
CPUE <- merge(sgkey, CPUE)
CPUE <- data.frame(Series=paste("Series ",CPUE$Series,"-",CPUE$Gear,sep=""),
Year=as.integer(CPUE$Year), Obs=CPUE$Obs, CV=CPUE$CV,
Fit=CPUE$Fit, stringsAsFactors=FALSE)
if(!quiet) cat("OK\n")
CPUE
}
getSurvey <- function(years)
{
if(!quiet) cat("Survey ")
nyears <- length(years)
nseries <- readVector("Nsurveyindex")
obs <- readMatrix("indexyearvaluecv",
nrow=readVector("Number_of_survey_data",same.line=FALSE))
obs <- data.frame(Series=obs[,1], Year=obs[,2], Obs=obs[,3], CV=obs[,4])
fit <- readMatrix("Survey_Index_Trajectories", nrow=nseries)
fit <- data.frame(Series=rep(1:nseries,each=nyears),
Year=rep(years,nseries), Fit=as.vector(t(fit)),
stringsAsFactors=FALSE)
Survey <- merge(obs, fit, all=TRUE)
Survey$Series <- as.integer(Survey$Series)
Survey$Year <- as.integer(Survey$Year)
if(!quiet) cat("OK\n")
Survey
}
getCAc <- function(sexes, ages)
{
if(!quiet) cat("CAc ")
nsexes <- length(sexes)
nages <- length(ages)
nobs <- readVector("Number_of_Commercial_C@A", same.line=FALSE)
obs <- readMatrix("methodyearsamplesizesex1a1sex1a2sex1a3", nrow=nobs)
fit <- readMatrix("methodyearsamplesizesex1a1sex1a2sex1a3", nrow=nobs,
header=2*(nobs+1))
CAc <- data.frame(Series=rep(obs[,1],each=nsexes*nages),
Year=rep(obs[,2],each=nsexes*nages),
SS=rep(obs[,3],each=nsexes*nages),
Sex=rep(rep(sexes,each=nages),nobs),
Age=rep(ages,nsexes*nobs),
Obs=as.vector(t(obs[,-(1:3)])),
Fit=as.vector(t(fit)),
stringsAsFactors=FALSE)
CAc$Series <- as.integer(CAc$Series)
CAc$Year <- as.integer(CAc$Year)
CAc$Age <- as.integer(CAc$Age)
if(!quiet) cat("OK\n")
CAc
}
getCAs <- function(sexes, ages)
{
if(!quiet) cat("CAs ")
nsexes <- length(sexes)
nages <- length(ages)
nobs <- readVector("Number_of_survey_C@A",same.line=FALSE)
obs <- readMatrix("surveyyearsamplesizesex1a1sex1a2sex1a3", nrow=nobs)
fit <- readMatrix("surveyyearsamplesizesex1a1sex1a2sex1a3", nrow=nobs,
header=2*(nobs+1))
CAs <- data.frame(Series=rep(obs[,1],each=nsexes*nages),
Year=rep(obs[,2],each=nsexes*nages),
SS=rep(obs[,3],each=nsexes*nages),
Sex=rep(rep(sexes,each=nages),nobs),
Age=rep(ages,nsexes*nobs),
Obs=as.vector(t(obs[,-(1:3)])),
Fit=as.vector(t(fit)),
stringsAsFactors=FALSE)
CAs$Series <- as.integer(CAs$Series)
CAs$Year <- as.integer(CAs$Year)
CAs$Age <- as.integer(CAs$Age)
if(!quiet) cat("OK\n")
CAs
}
getCLc <- function(sexes, lengths)
{
if(!quiet) cat("CLc ")
nsexes <- length(sexes)
nlengths <- length(lengths)
nobs <- readVector("Number_of_Commercial_C@L", same.line=FALSE)
obs <- readMatrix("methodyearsamplesizesex1l1sex1l2sex1l3", nrow=nobs)
fit <- readMatrix("methodyearsamplesizesex1l1sex1l2sex1l3", nrow=nobs,
header=nobs+1)
CLc <- data.frame(Series=rep(obs[,1],each=nsexes*nlengths),
Year=rep(obs[,2],each=nsexes*nlengths),
SS=rep(obs[,3],each=nsexes*nlengths),
Sex=rep(rep(sexes,each=nlengths),nobs),
Length=rep(lengths,nsexes*nobs),
Obs=as.vector(t(obs[,-(1:3)])),
Fit=as.vector(t(fit)),
stringsAsFactors=FALSE)
CLc$Series <- as.integer(CLc$Series)
CLc$Year <- as.integer(CLc$Year)
CLc$Length <- as.integer(CLc$Length)
if(!quiet) cat("OK\n")
CLc
}
getCLs <- function(sexes, lengths)
{
if(!quiet) cat("CLs ")
nsexes <- length(sexes)
nlengths <- length(lengths)
nobs <- readVector("Number_of_surveyC@L",same.line=FALSE)
obs <- readMatrix("surveyyearsamplesizesex1l1sex1l2sex1l3", nrow=nobs)
fit <- readMatrix("surveyyearsamplesizesex1l1sex1l2sex1l3", nrow=nobs,
header=2*(nobs+1))
CLs <- data.frame(Series=rep(obs[,1],each=nsexes*nlengths),
Year=rep(obs[,2],each=nsexes*nlengths),
SS=rep(obs[,3],each=nsexes*nlengths),
Sex=rep(rep(sexes,each=nlengths),nobs),
Length=rep(lengths,nsexes*nobs),
Obs=as.vector(t(obs[,-(1:3)])),
Fit=as.vector(t(fit)),
stringsAsFactors=FALSE)
CLs$Series <- as.integer(CLs$Series)
CLs$Year <- as.integer(CLs$Year)
CLs$Length <- as.integer(CLs$Length)
if(!quiet) cat("OK\n")
CLs
}
getLA <- function(sexes, ages)
{
if(!quiet) cat("LA ")
nsexes <- length(sexes)
nages <- length(ages)
nobs <- readVector("
vector=latage.vector)
obs <- readMatrix("VonBertalanfy--Lenght-at-agefit--Likelihood",
nrow=sum(nobs), header=8)
obs <- data.frame(Sex=rep(sexes,nobs), Age=obs[,1], Obs=obs[,2],
stringsAsFactors=FALSE)
Linf <- suppressWarnings(readVector("VonBeratalanfy:Linf")[-(1:3)])
K <- suppressWarnings(readVector("VonBeratalanfy:k")[-(1:3)])
t0 <- suppressWarnings(readVector("VonBeratalanfy:to")[-(1:3)])
CV1 <- suppressWarnings(readVector("cvoftheFitbysex")[-(1:5)])
CVratio <- suppressWarnings(
readVector("ratioofcv(L_an)/cv(L_a1)oftheFitbysex")[-(1:7)])
sigmaLA <- readVector(
"
same.line=FALSE, file=txt.file, vector=txt.vector)[1]
max.age <- c(max(obs$Age[obs$Sex==sexes[1]]),
max(obs$Age[obs$Sex==sexes[2]]))
fit <- data.frame(Sex=rep(sexes,max.age), Age=c(1:max.age[1],1:max.age[2]),
stringsAsFactors=FALSE)
fit$Fit[fit$Sex==sexes[1]] <- Linf[1]*
(1-exp(-K[1]*(fit$Age[fit$Sex==sexes[1]]-t0[1])))
fit$Fit[fit$Sex==sexes[2]] <- Linf[2]*
(1-exp(-K[2]*(fit$Age[fit$Sex==sexes[2]]-t0[2])))
if(sigmaLA == 1)
{
A <- rep(max(ages), 2)
a <- cbind(fit$Age[fit$Sex==sexes[1]], fit$Age[fit$Sex==sexes[2]])
fit$CV[fit$Sex==sexes[1]] <- CV1[1] +
CV1[1]*(CVratio[1]-1)/(A[1]-1)*(a[,1]-1)
fit$CV[fit$Sex==sexes[2]] <- CV1[2] +
CV1[2]*(CVratio[2]-1)/(A[2]-1)*(a[,2]-1)
}
if(sigmaLA == 2)
{
L1 <- Linf*(1-exp(-K*(1-t0)))
Ln <- Linf*(1-exp(-K*(max(ages)-t0)))
fit$CV[fit$Sex==sexes[1]] <- CV1[1] +
CV1[1]*(CVratio[1]-1)/(Ln[1]-L1[1])*(fit$Fit[fit$Sex==sexes[1]]-L1[1])
fit$CV[fit$Sex==sexes[2]] <- CV1[2] +
CV1[2]*(CVratio[2]-1)/(Ln[2]-L1[2])*(fit$Fit[fit$Sex==sexes[2]]-L1[2])
}
LA <- merge(obs, fit, by=c("Sex","Age"), all=TRUE)
LA$Age <- as.integer(LA$Age)
LA$Fit <- LA$Fit
LA$CV <- LA$CV
if(!quiet) cat("OK\n")
LA
}
if(!file.exists(res.file))
stop("file ", res.file, " not found; use / or \\\\ separators")
res.vector <- readLines(res.file)
res.vector <- gsub("\"","",
gsub("\t","",gsub(" ","",res.vector)))
if(!quiet) cat("\nParsing text file ", res.file, ":\n\nPreamble ", sep="")
sexes <- if(readVector("Nsexes")==1) "Unisex" else c("Female","Male")
gears <- seq(1, length.out=readVector("Nmethods"))
surveys <- seq(1, length.out=readVector("Nsurveyindex"))
years <- seq(from=readVector("StartYear"), to=readVector("EndYear")+1)
ages <- seq(from=1, to=readVector("Nages"))
lengths <- seq(from=readVector("First_length"),
by=readVector("Length_class_increment"),
length.out=readVector("Number_of_length_classes"))
if(!quiet) cat("OK\n")
model <- list()
model$N <- getN(sexes, years, ages)
model$B <- getB(years, gears)
rec <- model$N[model$N$Age==1,]
rec <- tapply(rec$N, rec$Year, sum)
model$B$R <- c(rec[-1], NA)
model$Sel <- getSel(gears, surveys, years, sexes, ages)
if(Dev) model$Dev <- getDev(ages, years)
if(CPUE) model$CPUE <- getCPUE(gears, years)
if(Survey) model$Survey <- getSurvey(years)
if(CAc) model$CAc <- getCAc(sexes, ages)
if(CAs) model$CAs <- getCAs(sexes, ages)
if(CLc) model$CLc <- getCLc(sexes, lengths)
if(CLs) model$CLs <- getCLs(sexes, lengths)
if(LA)
{
latage.file <- paste(dirname(res.file),"l_at_age.dat",sep="/")
if(!file.exists(latage.file))
stop("file ", latage.file, " not found; use / or \\\\ separators")
latage.vector <- readLines(latage.file)
latage.vector <- gsub("\"","", gsub("\t","",gsub(" ","",latage.vector)))
txt.file <- gsub("\\.res", "\\.txt", res.file)
if(!file.exists(txt.file))
stop("file ", txt.file, " not found; use / or \\\\ separators")
txt.vector <- readLines(txt.file)
txt.vector <- gsub("\"","", gsub("\t","",gsub(" ","",txt.vector)))
model$LA <- getLA(sexes, ages)
}
if(!quiet) cat("\n")
attr(model,"call") <- match.call()
class(model) <- "scape"
model
} |
ValidityCheckDetail <- function(data_in,
value) {
if (!(value %in% names(data_in))) {
stop(paste0("Column ", value),
" is missing in data or is not named as defined")
}else{
if (!(is.numeric(data_in[[value]]))) {
stop(paste0("Non-numeric ", value), " values")
} else{
if (!(length(data_in[, 1]) == length(data_in[[value]]))) {
stop("Columns in data are not of equal length")
} else{
if(sum(is.na(data_in[[value]])) > sum(!(is.na(data_in[[value]])))){
warning(paste0("Column ", value),
" contains more NA values than non-NA values")
}
}
}
}
} |
pkgVersion <- packageDescription("RcppEigen")$Version
pkgDate <- packageDescription("RcppEigen")$Date
prettyDate <- format(Sys.Date(), "%B %e, %Y") |
context('logregmulti')
test_that('logregmulti works', {
suppressWarnings(RNGversion("3.5.0"))
set.seed(1337)
y <- sample(c('A', 'B', 'C'), 100, replace = TRUE)
x1 <- rnorm(100)
x2 <- rnorm(100)
df <- data.frame(y=y, x1=x1, x2=x2)
logReg <- jmv::logRegMulti(data = df, dep = "y",
covs = c("x1", "x2"),
blocks = list(list("x1", "x2")),
refLevels = list(list(var="y", ref="A")),
emMeans = ~ x1)
modelFit <- logReg$modelFit$asDF
coef <- logReg$models[[1]]$coef$asDF
expect_equal(0.000870, modelFit$r2mf[1], tolerance = 1e-6)
expect_equal(217.714, modelFit$dev[1], tolerance = 1e-3)
expect_equal(-0.160, coef$est[1], tolerance = 1e-3)
expect_equal(0.242, coef$se[3], tolerance = 1e-3)
expect_equal(0.917, coef$p[2], tolerance = 1e-3)
}) |
sum_play_stats <- function(play_Id, stats) {
play_stats <- stats %>% filter(.data$playId == play_Id)
row <- bind_cols(play_id = as.integer(play_Id), tidy_play_stats_row)
for (index in seq_along(play_stats$playId)) {
stat_id <- play_stats$statId[index]
if (stat_id == 2) {
row$punt_blocked <- 1
row$punt_attempt <- 1
row$kick_distance <- play_stats$yards[index]
} else if (stat_id == 3) {
row$first_down_rush <- 1
} else if (stat_id == 4) {
row$first_down_pass <- 1
} else if (stat_id == 5) {
row$first_down_penalty <- 1
} else if (stat_id == 6) {
row$third_down_converted <- 1
} else if (stat_id == 7) {
row$third_down_failed <- 1
} else if (stat_id == 8) {
row$fourth_down_converted <- 1
} else if (stat_id == 9) {
row$fourth_down_failed <- 1
} else if (stat_id == 10) {
row$rush_attempt <- 1
row$rusher_player_id <- play_stats$player.esbId[index]
row$rusher_player_name <- play_stats$player.displayName[index]
row$yards_gained <- play_stats$yards[index]
row$rushing_yards <- play_stats$yards[index]
row$penalty_fix <- 1
} else if (stat_id == 11) {
row$rush_attempt <- 1
row$touchdown <- 1
row$first_down_rush <- 1
row$rush_touchdown <- 1
row$rusher_player_id <- play_stats$player.esbId[index]
row$rusher_player_name <- play_stats$player.displayName[index]
row$yards_gained <- play_stats$yards[index]
row$rushing_yards <- play_stats$yards[index]
row$td_team <- play_stats$teamAbbr[index]
row$td_player_id <- play_stats$player.esbId[index]
row$td_player_name <- play_stats$player.displayName[index]
row$penalty_fix <- 1
} else if (stat_id == 12) {
row$rush_attempt <- 1
row$lateral_rush <- 1
row$lateral_rusher_player_id <- play_stats$player.esbId[index]
row$lateral_rusher_player_name <- play_stats$player.displayName[index]
row$yards_gained <- play_stats$yards[index]
row$lateral_rushing_yards <- play_stats$yards[index]
row$penalty_fix <- 1
} else if (stat_id == 13) {
row$rush_attempt <- 1
row$touchdown <- 1
row$rush_touchdown <- 1
row$lateral_rush <- 1
row$lateral_rusher_player_id <- play_stats$player.esbId[index]
row$lateral_rusher_player_name <- play_stats$player.displayName[index]
row$td_team <- play_stats$teamAbbr[index]
row$td_player_id <- play_stats$player.esbId[index]
row$td_player_name <- play_stats$player.displayName[index]
row$yards_gained <- play_stats$yards[index]
row$lateral_rushing_yards <- play_stats$yards[index]
row$penalty_fix <- 1
} else if (stat_id == 14) {
row$incomplete_pass <- 1
row$pass_attempt <- 1
row$passer_player_id <- play_stats$player.esbId[index]
row$passer_player_name <- play_stats$player.displayName[index]
row$penalty_fix <- 1
} else if (stat_id == 15) {
row$pass_attempt <- 1
row$complete_pass <- 1
row$passer_player_id <- play_stats$player.esbId[index]
row$passer_player_name <- play_stats$player.displayName[index]
row$yards_gained <- play_stats$yards[index]
row$passing_yards <- play_stats$yards[index]
row$penalty_fix <- 1
} else if (stat_id == 16) {
row$pass_attempt <- 1
row$touchdown <- 1
row$pass_touchdown <- 1
row$complete_pass <- 1
row$passer_player_id <- play_stats$player.esbId[index]
row$passer_player_name <- play_stats$player.displayName[index]
row$yards_gained <- play_stats$yards[index]
row$passing_yards <- play_stats$yards[index]
row$td_team <- play_stats$teamAbbr[index]
row$td_player_id <- play_stats$player.esbId[index]
row$td_player_name <- play_stats$player.displayName[index]
row$penalty_fix <- 1
} else if (stat_id == 19) {
row$interception <- 1
row$pass_attempt <- 1
row$passer_player_id <- play_stats$player.esbId[index]
row$passer_player_name <- play_stats$player.displayName[index]
row$penalty_fix <- 1
} else if (stat_id == 20) {
row$pass_attempt <- 1
row$sack <- 1
row$passer_player_id <- play_stats$player.esbId[index]
row$passer_player_name <- play_stats$player.displayName[index]
row$yards_gained <- play_stats$yards[index]
row$penalty_fix <- 1
} else if (stat_id == 21) {
row$pass_attempt <- 1
row$complete_pass <- 1
row$receiver_player_id <- play_stats$player.esbId[index]
row$receiver_player_name <- play_stats$player.displayName[index]
row$yards_gained <- play_stats$yards[index]
row$receiving_yards <- play_stats$yards[index]
row$penalty_fix <- 1
} else if (stat_id == 22) {
row$pass_attempt <- 1
row$touchdown <- 1
row$pass_touchdown <- 1
row$complete_pass <- 1
row$receiver_player_id <- play_stats$player.esbId[index]
row$receiver_player_name <- play_stats$player.displayName[index]
row$td_team <- play_stats$teamAbbr[index]
row$td_player_id <- play_stats$player.esbId[index]
row$td_player_name <- play_stats$player.displayName[index]
row$yards_gained <- play_stats$yards[index]
row$receiving_yards <- play_stats$yards[index]
row$penalty_fix <- 1
} else if (stat_id == 23) {
row$pass_attempt <- 1
row$complete_pass <- 1
row$lateral_reception <- 1
row$lateral_receiver_player_id <- play_stats$player.esbId[index]
row$lateral_receiver_player_name <- play_stats$player.displayName[index]
row$yards_gained <- play_stats$yards[index]
row$lateral_receiving_yards <- play_stats$yards[index]
row$penalty_fix <- 1
} else if (stat_id == 24) {
row$pass_attempt <- 1
row$touchdown <- 1
row$pass_touchdown <- 1
row$complete_pass <- 1
row$lateral_reception <- 1
row$lateral_receiver_player_id <- play_stats$player.esbId[index]
row$lateral_receiver_player_name <- play_stats$player.displayName[index]
row$td_team <- play_stats$teamAbbr[index]
row$td_player_id <- play_stats$player.esbId[index]
row$td_player_name <- play_stats$player.displayName[index]
row$yards_gained <- play_stats$yards[index]
row$lateral_receiving_yards <- play_stats$yards[index]
row$penalty_fix <- 1
} else if (stat_id == 25) {
row$pass_attempt <- 1
row$interception_player_id <- play_stats$player.esbId[index]
row$interception_player_name <- play_stats$player.displayName[index]
row$return_team <- play_stats$teamAbbr[index]
row$return_yards <- play_stats$yards[index]
row$return_penalty_fix <- 1
} else if (stat_id == 26) {
row$pass_attempt <- 1
row$touchdown <- 1
row$return_touchdown <- 1
row$interception_player_id <- play_stats$player.esbId[index]
row$interception_player_name <- play_stats$player.displayName[index]
row$td_team <- play_stats$teamAbbr[index]
row$td_player_id <- play_stats$player.esbId[index]
row$td_player_name <- play_stats$player.displayName[index]
row$return_team <- play_stats$teamAbbr[index]
row$return_yards <- play_stats$yards[index]
row$return_penalty_fix <- 1
} else if (stat_id == 27) {
row$pass_attempt <- 1
row$lateral_return <- 1
row$lateral_interception_player_id <- play_stats$player.esbId[index]
row$lateral_interception_player_name <- play_stats$player.displayName[index]
row$return_yards <- play_stats$yards[index]
row$return_penalty_fix <- 1
} else if (stat_id == 28) {
row$pass_attempt <- 1
row$touchdown <- 1
row$return_touchdown <- 1
row$lateral_return <- 1
row$lateral_interception_player_id <- play_stats$player.esbId[index]
row$lateral_interception_player_name <- play_stats$player.displayName[index]
row$td_team <- play_stats$teamAbbr[index]
row$td_player_id <- play_stats$player.esbId[index]
row$td_player_name <- play_stats$player.displayName[index]
row$return_yards <- play_stats$yards[index]
row$return_penalty_fix <- 1
} else if (stat_id == 29) {
row$punt_attempt <- 1
row$punter_player_id <- play_stats$player.esbId[index]
row$punter_player_name <- play_stats$player.displayName[index]
row$kick_distance <- play_stats$yards[index]
} else if (stat_id == 30) {
row$punt_inside_twenty <- 1
row$punt_attempt <- 1
row$punter_player_id <- play_stats$player.esbId[index]
row$punter_player_name <- play_stats$player.displayName[index]
} else if (stat_id == 31) {
row$punt_in_endzone <- 1
row$punt_attempt <- 1
row$punter_player_id <- play_stats$player.esbId[index]
row$punter_player_name <- play_stats$player.displayName[index]
row$kick_distance <- play_stats$yards[index]
} else if (stat_id == 32) {
row$punt_attempt <- 1
row$punter_player_id <- play_stats$player.esbId[index]
row$punter_player_name <- play_stats$player.displayName[index]
} else if (stat_id == 33) {
row$punt_attempt <- 1
row$punt_returner_player_id <- play_stats$player.esbId[index]
row$punt_returner_player_name <- play_stats$player.displayName[index]
row$return_yards <- play_stats$yards[index]
row$return_team <- play_stats$teamAbbr[index]
row$return_penalty_fix <- 1
} else if (stat_id == 34) {
row$touchdown <- 1
row$return_touchdown <- 1
row$punt_attempt <- 1
row$punt_returner_player_id <- play_stats$player.esbId[index]
row$punt_returner_player_name <- play_stats$player.displayName[index]
row$td_team <- play_stats$teamAbbr[index]
row$td_player_id <- play_stats$player.esbId[index]
row$td_player_name <- play_stats$player.displayName[index]
row$return_team <- play_stats$teamAbbr[index]
row$return_yards <- play_stats$yards[index]
row$return_penalty_fix <- 1
} else if (stat_id == 35) {
row$punt_attempt <- 1
row$lateral_return <- 1
row$lateral_punt_returner_player_id <- play_stats$player.esbId[index]
row$lateral_punt_returner_player_name <- play_stats$player.displayName[index]
row$return_yards <- play_stats$yards[index]
row$return_penalty_fix <- 1
} else if (stat_id == 36) {
row$touchdown <- 1
row$return_touchdown <- 1
row$punt_attempt <- 1
row$lateral_return <- 1
row$lateral_punt_returner_player_id <- play_stats$player.esbId[index]
row$lateral_punt_returner_player_name <- play_stats$player.displayName[index]
row$td_team <- play_stats$teamAbbr[index]
row$td_player_id <- play_stats$player.esbId[index]
row$td_player_name <- play_stats$player.displayName[index]
row$return_yards <- play_stats$yards[index]
row$return_team <- play_stats$teamAbbr[index]
row$return_penalty_fix <- 1
} else if (stat_id == 37) {
row$punt_out_of_bounds <- 1
row$punt_attempt <- 1
row$return_yards <- 0
row$return_team <- play_stats$teamAbbr[index]
} else if (stat_id == 38) {
row$punt_downed <- 1
row$punt_attempt <- 1
row$return_team <- play_stats$teamAbbr[index]
} else if (stat_id == 39) {
row$punt_fair_catch <- 1
row$punt_attempt <- 1
row$punt_returner_player_id <- play_stats$player.esbId[index]
row$punt_returner_player_name <- play_stats$player.displayName[index]
row$return_team <- play_stats$teamAbbr[index]
} else if (stat_id == 40) {
row$punt_attempt <- 1
row$return_team <- play_stats$teamAbbr[index]
} else if (stat_id == 41) {
row$kickoff_attempt <- 1
row$kicker_player_id <- play_stats$player.esbId[index]
row$kicker_player_name <- play_stats$player.displayName[index]
row$kick_distance <- play_stats$yards[index]
} else if (stat_id == 42) {
row$kickoff_inside_twenty <- 1
row$kickoff_attempt <- 1
row$kicker_player_id <- play_stats$player.esbId[index]
row$kicker_player_name <- play_stats$player.displayName[index]
} else if (stat_id == 43) {
row$kickoff_in_endzone <- 1
row$kickoff_attempt <- 1
row$kicker_player_id <- play_stats$player.esbId[index]
row$kicker_player_name <- play_stats$player.displayName[index]
} else if (stat_id == 44) {
row$kickoff_attempt <- 1
row$kicker_player_id <- play_stats$player.esbId[index]
row$kicker_player_name <- play_stats$player.displayName[index]
} else if (stat_id == 45) {
row$kickoff_attempt <- 1
row$kickoff_returner_player_id <- play_stats$player.esbId[index]
row$kickoff_returner_player_name <- play_stats$player.displayName[index]
row$return_yards <- play_stats$yards[index]
row$return_team <- play_stats$teamAbbr[index]
row$return_penalty_fix <- 1
} else if (stat_id == 46) {
row$touchdown <- 1
row$return_touchdown <- 1
row$kickoff_attempt <- 1
row$kickoff_returner_player_id <- play_stats$player.esbId[index]
row$kickoff_returner_player_name <- play_stats$player.displayName[index]
row$td_team <- play_stats$teamAbbr[index]
row$td_player_id <- play_stats$player.esbId[index]
row$td_player_name <- play_stats$player.displayName[index]
row$return_yards <- play_stats$yards[index]
row$return_team <- play_stats$teamAbbr[index]
row$return_penalty_fix <- 1
} else if (stat_id == 47) {
row$kickoff_attempt <- 1
row$lateral_return <- 1
row$lateral_kickoff_returner_player_id <- play_stats$player.esbId[index]
row$lateral_kickoff_returner_player_name <- play_stats$player.displayName[index]
row$return_yards <- play_stats$yards[index]
row$return_team <- play_stats$teamAbbr[index]
row$return_penalty_fix <- 1
} else if (stat_id == 48) {
row$touchdown <- 1
row$return_touchdown <- 1
row$kickoff_attempt <- 1
row$lateral_return <- 1
row$lateral_kickoff_returner_player_id <- play_stats$player.esbId[index]
row$lateral_kickoff_returner_player_name <- play_stats$player.displayName[index]
row$td_team <- play_stats$teamAbbr[index]
row$td_player_id <- play_stats$player.esbId[index]
row$td_player_name <- play_stats$player.displayName[index]
row$return_yards <- play_stats$yards[index]
row$return_team <- play_stats$teamAbbr[index]
row$return_penalty_fix <- 1
} else if (stat_id == 49) {
row$kickoff_out_of_bounds <- 1
row$kickoff_attempt <- 1
row$return_team <- play_stats$teamAbbr[index]
} else if (stat_id == 50) {
row$kickoff_fair_catch <- 1
row$kickoff_attempt <- 1
row$kickoff_returner_player_id <- play_stats$player.esbId[index]
row$kickoff_returner_player_name <- play_stats$player.displayName[index]
row$return_team <- play_stats$teamAbbr[index]
} else if (stat_id == 51) {
row$kickoff_attempt <- 1
row$return_team <- play_stats$teamAbbr[index]
} else if (stat_id == 52) {
row$fumble_forced <- 1
row$fumble <- 1
row$fumbled_1_player_id <-
if_else(
is.na(row$fumbled_1_player_id),
play_stats$player.esbId[index],
row$fumbled_1_player_id
)
row$fumbled_1_player_name <-
if_else(
is.na(row$fumbled_1_player_name),
play_stats$player.displayName[index],
row$fumbled_1_player_name
)
row$fumbled_1_team <-
if_else(
is.na(row$fumbled_1_team),
play_stats$teamAbbr[index],
row$fumbled_1_team
)
row$fumbled_2_player_id <-
if_else(
is.na(row$fumbled_2_player_id) &
row$fumbled_1_player_id != play_stats$player.esbId[index],
play_stats$player.esbId[index],
row$fumbled_2_player_id
)
row$fumbled_2_player_name <-
if_else(
is.na(row$fumbled_2_player_name) &
row$fumbled_1_player_name != play_stats$player.displayName[index],
play_stats$player.displayName[index],
row$fumbled_2_player_name
)
row$fumbled_2_team <-
if_else(
is.na(row$fumbled_2_team) &
row$fumbled_1_player_name != play_stats$player.displayName[index],
play_stats$teamAbbr[index],
row$fumbled_2_team
)
} else if (stat_id == 53) {
row$fumble_not_forced <- 1
row$fumble <- 1
row$fumbled_1_player_id <-
if_else(
is.na(row$fumbled_1_player_id),
play_stats$player.esbId[index],
row$fumbled_1_player_id
)
row$fumbled_1_player_name <-
if_else(
is.na(row$fumbled_1_player_name),
play_stats$player.displayName[index],
row$fumbled_1_player_name
)
row$fumbled_1_team <-
if_else(
is.na(row$fumbled_1_team),
play_stats$teamAbbr[index],
row$fumbled_1_team
)
row$fumbled_2_player_id <-
if_else(
is.na(row$fumbled_2_player_id) &
row$fumbled_1_player_id != play_stats$player.esbId[index],
play_stats$player.esbId[index],
row$fumbled_2_player_id
)
row$fumbled_2_player_name <-
if_else(
is.na(row$fumbled_2_player_name) &
row$fumbled_1_player_name != play_stats$player.displayName[index],
play_stats$player.displayName[index],
row$fumbled_2_player_name
)
row$fumbled_2_team <-
if_else(
is.na(row$fumbled_2_team) &
row$fumbled_1_player_name != play_stats$player.displayName[index],
play_stats$teamAbbr[index],
row$fumbled_2_team
)
} else if (stat_id == 54) {
row$fumble_out_of_bounds <- 1
row$fumble <- 1
row$fumbled_1_player_id <-
if_else(
is.na(row$fumbled_1_player_id),
play_stats$player.esbId[index],
row$fumbled_1_player_id
)
row$fumbled_1_player_name <-
if_else(
is.na(row$fumbled_1_player_name),
play_stats$player.displayName[index],
row$fumbled_1_player_name
)
row$fumbled_1_team <-
if_else(
is.na(row$fumbled_1_team),
play_stats$teamAbbr[index],
row$fumbled_1_team
)
row$fumbled_2_player_id <-
if_else(
is.na(row$fumbled_2_player_id) &
row$fumbled_1_player_id != play_stats$player.esbId[index],
play_stats$player.esbId[index],
row$fumbled_2_player_id
)
row$fumbled_2_player_name <-
if_else(
is.na(row$fumbled_2_player_name) &
row$fumbled_1_player_name != play_stats$player.displayName[index],
play_stats$player.displayName[index],
row$fumbled_2_player_name
)
row$fumbled_2_team <-
if_else(
is.na(row$fumbled_2_team) &
row$fumbled_1_player_name != play_stats$player.displayName[index],
play_stats$teamAbbr[index],
row$fumbled_2_team
)
} else if (stat_id == 55) {
row$fumble <- 1
row$fumble_recovery_1_player_id <-
if_else(
is.na(row$fumble_recovery_1_player_id),
play_stats$player.esbId[index],
row$fumble_recovery_1_player_id
)
row$fumble_recovery_1_player_name <-
if_else(
is.na(row$fumble_recovery_1_player_name),
play_stats$player.displayName[index],
row$fumble_recovery_1_player_name
)
row$fumble_recovery_1_team <-
if_else(
is.na(row$fumble_recovery_1_team),
play_stats$teamAbbr[index],
row$fumble_recovery_1_team
)
row$fumble_recovery_1_yards <-
if_else(
is.na(row$fumble_recovery_1_yards),
play_stats$yards[index],
row$fumble_recovery_1_yards
)
row$fumble_recovery_2_player_id <-
if_else(
is.na(row$fumble_recovery_2_player_id) &
row$fumble_recovery_1_player_id != play_stats$player.esbId[index],
play_stats$player.esbId[index],
row$fumble_recovery_2_player_id
)
row$fumble_recovery_2_player_name <-
if_else(
is.na(row$fumble_recovery_2_player_name) &
row$fumble_recovery_1_player_name != play_stats$player.displayName[index],
play_stats$player.displayName[index],
row$fumble_recovery_2_player_name
)
row$fumble_recovery_2_team <-
if_else(
is.na(row$fumble_recovery_2_team) &
row$fumble_recovery_1_player_name != play_stats$player.displayName[index],
play_stats$teamAbbr[index],
row$fumble_recovery_2_team
)
row$fumble_recovery_2_yards <-
if_else(
is.na(row$fumble_recovery_2_yards) &
row$fumble_recovery_1_player_name != play_stats$player.displayName[index],
play_stats$yards[index],
row$fumble_recovery_2_yards
)
} else if (stat_id == 56) {
row$touchdown <- 1
row$fumble <- 1
row$td_team <- play_stats$teamAbbr[index]
row$td_player_id <- play_stats$player.esbId[index]
row$td_player_name <- play_stats$player.displayName[index]
row$fumble_recovery_1_player_id <-
if_else(
is.na(row$fumble_recovery_1_player_id),
play_stats$player.esbId[index],
row$fumble_recovery_1_player_id
)
row$fumble_recovery_1_player_name <-
if_else(
is.na(row$fumble_recovery_1_player_name),
play_stats$player.displayName[index],
row$fumble_recovery_1_player_name
)
row$fumble_recovery_1_team <-
if_else(
is.na(row$fumble_recovery_1_team),
play_stats$teamAbbr[index],
row$fumble_recovery_1_team
)
row$fumble_recovery_1_yards <-
if_else(
is.na(row$fumble_recovery_1_yards),
play_stats$yards[index],
row$fumble_recovery_1_yards
)
row$fumble_recovery_2_player_id <-
if_else(
is.na(row$fumble_recovery_2_player_id) &
row$fumble_recovery_1_player_id != play_stats$player.esbId[index],
play_stats$player.esbId[index],
row$fumble_recovery_2_player_id
)
row$fumble_recovery_2_player_name <-
if_else(
is.na(row$fumble_recovery_2_player_name) &
row$fumble_recovery_1_player_name != play_stats$player.displayName[index],
play_stats$player.displayName[index],
row$fumble_recovery_2_player_name
)
row$fumble_recovery_2_team <-
if_else(
is.na(row$fumble_recovery_2_team) &
row$fumble_recovery_1_player_name != play_stats$player.displayName[index],
play_stats$teamAbbr[index],
row$fumble_recovery_2_team
)
row$fumble_recovery_2_yards <-
if_else(
is.na(row$fumble_recovery_2_yards) &
row$fumble_recovery_1_player_name != play_stats$player.displayName[index],
play_stats$yards[index],
row$fumble_recovery_2_yards
)
} else if (stat_id == 57) {
row$fumble <- 1
row$lateral_recovery <- 1
} else if (stat_id == 58) {
row$touchdown <- 1
row$td_team <- play_stats$teamAbbr[index]
row$td_player_id <- play_stats$player.esbId[index]
row$td_player_name <- play_stats$player.displayName[index]
row$fumble <- 1
row$lateral_recovery <- 1
} else if (stat_id == 59) {
row$fumble <- 1
row$fumble_recovery_1_player_id <-
if_else(
is.na(row$fumble_recovery_1_player_id),
play_stats$player.esbId[index],
row$fumble_recovery_1_player_id
)
row$fumble_recovery_1_player_name <-
if_else(
is.na(row$fumble_recovery_1_player_name),
play_stats$player.displayName[index],
row$fumble_recovery_1_player_name
)
row$fumble_recovery_1_team <-
if_else(
is.na(row$fumble_recovery_1_team),
play_stats$teamAbbr[index],
row$fumble_recovery_1_team
)
row$fumble_recovery_1_yards <-
if_else(
is.na(row$fumble_recovery_1_yards),
play_stats$yards[index],
row$fumble_recovery_1_yards
)
row$fumble_recovery_2_player_id <-
if_else(
is.na(row$fumble_recovery_2_player_id) &
row$fumble_recovery_1_player_id != play_stats$player.esbId[index],
play_stats$player.esbId[index],
row$fumble_recovery_2_player_id
)
row$fumble_recovery_2_player_name <-
if_else(
is.na(row$fumble_recovery_2_player_name) &
row$fumble_recovery_1_player_name != play_stats$player.displayName[index],
play_stats$player.displayName[index],
row$fumble_recovery_2_player_name
)
row$fumble_recovery_2_team <-
if_else(
is.na(row$fumble_recovery_2_team) &
row$fumble_recovery_1_player_name != play_stats$player.displayName[index],
play_stats$teamAbbr[index],
row$fumble_recovery_2_team
)
row$fumble_recovery_2_yards <-
if_else(
is.na(row$fumble_recovery_2_yards) &
row$fumble_recovery_1_player_name != play_stats$player.displayName[index],
play_stats$yards[index],
row$fumble_recovery_2_yards
)
} else if (stat_id == 60) {
row$touchdown <- 1
row$return_touchdown <- 1
row$td_team <- play_stats$teamAbbr[index]
row$td_player_id <- play_stats$player.esbId[index]
row$td_player_name <- play_stats$player.displayName[index]
row$fumble <- 1
row$fumble_recovery_1_player_id <-
if_else(
is.na(row$fumble_recovery_1_player_id),
play_stats$player.esbId[index],
row$fumble_recovery_1_player_id
)
row$fumble_recovery_1_player_name <-
if_else(
is.na(row$fumble_recovery_1_player_name),
play_stats$player.displayName[index],
row$fumble_recovery_1_player_name
)
row$fumble_recovery_1_team <-
if_else(
is.na(row$fumble_recovery_1_team),
play_stats$teamAbbr[index],
row$fumble_recovery_1_team
)
row$fumble_recovery_1_yards <-
if_else(
is.na(row$fumble_recovery_1_yards),
play_stats$yards[index],
row$fumble_recovery_1_yards
)
row$fumble_recovery_2_player_id <-
if_else(
is.na(row$fumble_recovery_2_player_id) &
row$fumble_recovery_1_player_id != play_stats$player.esbId[index],
play_stats$player.esbId[index],
row$fumble_recovery_2_player_id
)
row$fumble_recovery_2_player_name <-
if_else(
is.na(row$fumble_recovery_2_player_name) &
row$fumble_recovery_1_player_name != play_stats$player.displayName[index],
play_stats$player.displayName[index],
row$fumble_recovery_2_player_name
)
row$fumble_recovery_2_team <-
if_else(
is.na(row$fumble_recovery_2_team) &
row$fumble_recovery_1_player_name != play_stats$player.displayName[index],
play_stats$teamAbbr[index],
row$fumble_recovery_2_team
)
row$fumble_recovery_2_yards <-
if_else(
is.na(row$fumble_recovery_2_yards) &
row$fumble_recovery_1_player_name != play_stats$player.displayName[index],
play_stats$yards[index],
row$fumble_recovery_2_yards
)
} else if (stat_id == 61) {
row$fumble <- 1
row$lateral_recovery <- 1
} else if (stat_id == 62) {
row$touchdown <- 1
row$return_touchdown <- 1
row$td_team <- play_stats$teamAbbr[index]
row$td_player_id <- play_stats$player.esbId[index]
row$td_player_name <- play_stats$player.displayName[index]
row$fumble <- 1
row$lateral_recovery <- 1
} else if (stat_id == 63) {
NULL
} else if (stat_id == 64) {
row$touchdown <- 1
row$td_team <- play_stats$teamAbbr[index]
row$td_player_id <- play_stats$player.esbId[index]
row$td_player_name <- play_stats$player.displayName[index]
} else if (stat_id == 68) {
row$timeout <- 1
row$timeout_team <- play_stats$teamAbbr[index]
} else if (stat_id == 69) {
row$field_goal_missed <- 1
row$field_goal_attempt <- 1
row$kicker_player_id <- play_stats$player.esbId[index]
row$kicker_player_name <- play_stats$player.displayName[index]
row$kick_distance <- play_stats$yards[index]
} else if (stat_id == 70) {
row$field_goal_made <- 1
row$field_goal_attempt <- 1
row$kicker_player_id <- play_stats$player.esbId[index]
row$kicker_player_name <- play_stats$player.displayName[index]
row$kick_distance <- play_stats$yards[index]
} else if (stat_id == 71) {
row$field_goal_blocked <- 1
row$field_goal_attempt <- 1
row$kicker_player_id <- play_stats$player.esbId[index]
row$kicker_player_name <- play_stats$player.displayName[index]
row$kick_distance <- play_stats$yards[index]
} else if (stat_id == 72) {
row$extra_point_good <- 1
row$extra_point_attempt <- 1
row$kicker_player_id <- play_stats$player.esbId[index]
row$kicker_player_name <- play_stats$player.displayName[index]
} else if (stat_id == 73) {
row$extra_point_failed <- 1
row$extra_point_attempt <- 1
row$kicker_player_id <- play_stats$player.esbId[index]
row$kicker_player_name <- play_stats$player.displayName[index]
} else if (stat_id == 74) {
row$extra_point_blocked <- 1
row$extra_point_attempt <- 1
row$kicker_player_id <- play_stats$player.esbId[index]
row$kicker_player_name <- play_stats$player.displayName[index]
} else if (stat_id == 75) {
row$two_point_rush_good <- 1
row$rush_attempt <- 1
row$two_point_attempt <- 1
row$rusher_player_id <- play_stats$player.esbId[index]
row$rusher_player_name <- play_stats$player.displayName[index]
} else if (stat_id == 76) {
row$two_point_rush_failed <- 1
row$rush_attempt <- 1
row$two_point_attempt <- 1
row$rusher_player_id <- play_stats$player.esbId[index]
row$rusher_player_name <- play_stats$player.displayName[index]
} else if (stat_id == 77) {
row$two_point_pass_good <- 1
row$pass_attempt <- 1
row$two_point_attempt <- 1
row$passer_player_id <- play_stats$player.esbId[index]
row$passer_player_name <- play_stats$player.displayName[index]
} else if (stat_id == 78) {
row$two_point_pass_failed <- 1
row$pass_attempt <- 1
row$two_point_attempt <- 1
row$passer_player_id <- play_stats$player.esbId[index]
row$passer_player_name <- play_stats$player.displayName[index]
} else if (stat_id == 79) {
row$solo_tackle <- 1
row$solo_tackle_1_player_id <-
if_else(
is.na(row$solo_tackle_1_player_id),
play_stats$player.esbId[index],
row$solo_tackle_1_player_id
)
row$solo_tackle_1_player_name <-
if_else(
is.na(row$solo_tackle_1_player_name),
play_stats$player.displayName[index],
row$solo_tackle_1_player_name
)
row$solo_tackle_1_team <-
if_else(
is.na(row$solo_tackle_1_team),
play_stats$teamAbbr[index],
row$solo_tackle_1_team
)
row$solo_tackle_2_player_id <-
if_else(
is.na(row$solo_tackle_2_player_id) &
row$solo_tackle_1_player_id != play_stats$player.esbId[index],
play_stats$player.esbId[index],
row$solo_tackle_2_player_id
)
row$solo_tackle_2_player_name <-
if_else(
is.na(row$solo_tackle_2_player_name) &
row$solo_tackle_1_player_name != play_stats$player.displayName[index],
play_stats$player.displayName[index],
row$solo_tackle_2_player_name
)
row$solo_tackle_2_team <-
if_else(
is.na(row$solo_tackle_2_team) &
row$solo_tackle_1_player_name != play_stats$player.displayName[index],
play_stats$teamAbbr[index],
row$solo_tackle_2_team
)
} else if (stat_id == 80) {
row$tackle_with_assist <- 1
row$tackle_with_assist_1_player_id <-
if_else(
is.na(row$tackle_with_assist_1_player_id),
play_stats$player.esbId[index],
row$tackle_with_assist_1_player_id
)
row$tackle_with_assist_1_player_name <-
if_else(
is.na(row$tackle_with_assist_1_player_name),
play_stats$player.displayName[index],
row$tackle_with_assist_1_player_name
)
row$tackle_with_assist_1_team <-
if_else(
is.na(row$tackle_with_assist_1_team),
play_stats$teamAbbr[index],
row$tackle_with_assist_1_team
)
row$tackle_with_assist_2_player_id <-
if_else(
is.na(row$tackle_with_assist_2_player_id) &
row$tackle_with_assist_1_player_id != play_stats$player.esbId[index],
play_stats$player.esbId[index],
row$tackle_with_assist_2_player_id
)
row$tackle_with_assist_2_player_name <-
if_else(
is.na(row$tackle_with_assist_2_player_name) &
row$tackle_with_assist_1_player_name != play_stats$player.displayName[index],
play_stats$player.displayName[index],
row$tackle_with_assist_2_player_name
)
row$tackle_with_assist_2_team <-
if_else(
is.na(row$tackle_with_assist_2_team) &
row$tackle_with_assist_1_player_name != play_stats$player.displayName[index],
play_stats$teamAbbr[index],
row$tackle_with_assist_2_team
)
} else if (stat_id == 82) {
row$assist_tackle <- 1
row$assist_tackle_1_player_id <-
if_else(
is.na(row$assist_tackle_1_player_id),
play_stats$player.esbId[index],
row$assist_tackle_1_player_id
)
row$assist_tackle_1_player_name <-
if_else(
is.na(row$assist_tackle_1_player_name),
play_stats$player.displayName[index],
row$assist_tackle_1_player_name
)
row$assist_tackle_1_team <-
if_else(
is.na(row$assist_tackle_1_team),
play_stats$teamAbbr[index],
row$assist_tackle_1_team
)
row$assist_tackle_2_player_id <-
if_else(
is.na(row$assist_tackle_2_player_id) &
row$assist_tackle_1_player_id != play_stats$player.esbId[index],
play_stats$player.esbId[index],
row$assist_tackle_2_player_id
)
row$assist_tackle_2_player_name <-
if_else(
is.na(row$assist_tackle_2_player_name) &
row$assist_tackle_1_player_name != play_stats$player.displayName[index],
play_stats$player.displayName[index],
row$assist_tackle_2_player_name
)
row$assist_tackle_2_team <-
if_else(
is.na(row$assist_tackle_2_team) &
row$assist_tackle_1_player_name != play_stats$player.displayName[index],
play_stats$teamAbbr[index],
row$assist_tackle_2_team
)
row$assist_tackle_3_player_id <-
if_else(
(is.na(row$assist_tackle_3_player_id) &
row$assist_tackle_1_player_id != play_stats$player.esbId[index] &
row$assist_tackle_2_player_id != play_stats$player.esbId[index]),
play_stats$player.esbId[index],
row$assist_tackle_3_player_id
)
row$assist_tackle_3_player_name <-
if_else(
(is.na(row$assist_tackle_3_player_name) &
row$assist_tackle_1_player_name != play_stats$player.displayName[index] &
row$assist_tackle_2_player_name != play_stats$player.displayName[index]),
play_stats$player.displayName[index],
row$assist_tackle_3_player_name
)
row$assist_tackle_3_team <-
if_else(
(is.na(row$assist_tackle_3_team) &
row$assist_tackle_1_player_name != play_stats$player.displayName[index] &
row$assist_tackle_2_player_name != play_stats$player.displayName[index]),
play_stats$teamAbbr[index],
row$assist_tackle_3_team
)
row$assist_tackle_4_player_id <-
if_else(
(is.na(row$assist_tackle_4_player_id) &
row$assist_tackle_1_player_id != play_stats$player.esbId[index] &
row$assist_tackle_2_player_id != play_stats$player.esbId[index] &
row$assist_tackle_3_player_id != play_stats$player.esbId[index]),
play_stats$player.esbId[index],
row$assist_tackle_4_player_id
)
row$assist_tackle_4_player_name <-
if_else(
(is.na(row$assist_tackle_4_player_name) &
row$assist_tackle_1_player_name != play_stats$player.displayName[index] &
row$assist_tackle_2_player_name != play_stats$player.displayName[index] &
row$assist_tackle_3_player_name != play_stats$player.displayName[index]),
play_stats$player.displayName[index],
row$assist_tackle_4_player_name
)
row$assist_tackle_4_team <-
if_else(
(is.na(row$assist_tackle_4_team) &
row$assist_tackle_1_player_name != play_stats$player.displayName[index] &
row$assist_tackle_2_player_name != play_stats$player.displayName[index] &
row$assist_tackle_3_player_name != play_stats$player.displayName[index]),
play_stats$teamAbbr[index],
row$assist_tackle_4_team
)
} else if (stat_id == 83) {
row$sack <- 1
row$sack_player_id <- play_stats$player.esbId[index]
row$sack_player_name <- play_stats$player.displayName[index]
} else if (stat_id == 84) {
row$sack <- 1
row$assist_tackle <- 1
row$half_sack_1_player_id <-
if_else(
is.na(row$half_sack_1_player_id),
play_stats$player.esbId[index],
row$half_sack_1_player_id
)
row$half_sack_1_player_name <-
if_else(
is.na(row$half_sack_1_player_name),
play_stats$player.displayName[index],
row$half_sack_1_player_name
)
row$half_sack_2_player_id <-
if_else(
is.na(row$half_sack_2_player_id) &
row$half_sack_1_player_id != play_stats$player.esbId[index],
play_stats$player.esbId[index],
row$half_sack_2_player_id
)
row$half_sack_2_player_name <-
if_else(
is.na(row$half_sack_2_player_name) &
row$half_sack_1_player_name != play_stats$player.displayName[index],
play_stats$player.displayName[index],
row$half_sack_2_player_name
)
} else if (stat_id == 85) {
row$pass_defense_1_player_id <-
if_else(
is.na(row$pass_defense_1_player_id),
play_stats$player.esbId[index],
row$pass_defense_1_player_id
)
row$pass_defense_1_player_name <-
if_else(
is.na(row$pass_defense_1_player_name),
play_stats$player.displayName[index],
row$pass_defense_1_player_name
)
row$pass_defense_2_player_id <-
if_else(
is.na(row$pass_defense_2_player_id) &
row$pass_defense_1_player_id != play_stats$player.esbId[index],
play_stats$player.esbId[index],
row$pass_defense_2_player_id
)
row$pass_defense_2_player_name <-
if_else(
is.na(row$pass_defense_2_player_name) &
row$pass_defense_1_player_name != play_stats$player.displayName[index],
play_stats$player.displayName[index],
row$pass_defense_2_player_name
)
} else if (stat_id == 86) {
row$punt_attempt <- 1
row$blocked_player_id <- play_stats$player.esbId[index]
row$blocked_player_name <- play_stats$player.displayName[index]
} else if (stat_id == 87) {
row$blocked_player_id <- play_stats$player.esbId[index]
row$blocked_player_name <- play_stats$player.displayName[index]
} else if (stat_id == 88) {
row$field_goal_attempt <- 1
row$blocked_player_id <- play_stats$player.esbId[index]
row$blocked_player_name <- play_stats$player.displayName[index]
} else if (stat_id == 89) {
row$safety <- 1
row$safety_player_id <- play_stats$player.esbId[index]
row$safety_player_name <- play_stats$player.displayName[index]
} else if (stat_id == 91) {
row$fumble <- 1
row$forced_fumble_player_1_player_id <-
if_else(
is.na(row$forced_fumble_player_1_player_id),
play_stats$player.esbId[index],
row$forced_fumble_player_1_player_id
)
row$forced_fumble_player_1_player_name <-
if_else(
is.na(row$forced_fumble_player_1_player_name),
play_stats$player.displayName[index],
row$forced_fumble_player_1_player_name
)
row$forced_fumble_player_1_team <-
if_else(
is.na(row$forced_fumble_player_1_team),
play_stats$teamAbbr[index],
row$forced_fumble_player_1_team
)
row$forced_fumble_player_2_player_id <-
if_else(
is.na(row$forced_fumble_player_2_player_id) &
row$forced_fumble_player_1_player_id != play_stats$player.esbId[index],
play_stats$player.esbId[index],
row$forced_fumble_player_2_player_id
)
row$forced_fumble_player_2_player_name <-
if_else(
is.na(row$forced_fumble_player_2_player_name) &
row$forced_fumble_player_1_player_name != play_stats$player.displayName[index],
play_stats$player.displayName[index],
row$forced_fumble_player_2_player_name
)
row$forced_fumble_player_2_team <-
if_else(
is.na(row$forced_fumble_player_2_team) &
row$forced_fumble_player_1_player_name != play_stats$player.displayName[index],
play_stats$teamAbbr[index],
row$forced_fumble_player_2_team
)
} else if (stat_id == 93) {
row$penalty <- 1
row$penalty_player_id <- play_stats$player.esbId[index]
row$penalty_player_name <- play_stats$player.displayName[index]
row$penalty_team <- play_stats$teamAbbr[index]
row$penalty_yards <- play_stats$yards[index]
} else if (stat_id == 95) {
row$tackled_for_loss <- 1
} else if (stat_id == 96) {
row$extra_point_safety <- 1
row$extra_point_attempt <- 1
} else if (stat_id == 99) {
row$two_point_rush_safety <- 1
row$rush_attempt <- 1
row$two_point_attempt <- 1
row$rusher_player_id <- play_stats$player.esbId[index]
row$rusher_player_name <- play_stats$player.displayName[index]
} else if (stat_id == 100) {
row$two_point_pass_safety <- 1
row$pass_attempt <- 1
row$two_point_attempt <- 1
row$passer_player_id <- play_stats$player.esbId[index]
row$passer_player_name <- play_stats$player.displayName[index]
} else if (stat_id == 102) {
row$kickoff_downed <- 1
row$kickoff_attempt <- 1
} else if (stat_id == 103) {
row$lateral_sack_player_id <- play_stats$player.esbId[index]
row$lateral_sack_player_name <- play_stats$player.displayName[index]
} else if (stat_id == 104) {
row$two_point_pass_reception_good <- 1
row$pass_attempt <- 1
row$two_point_attempt <- 1
row$receiver_player_id <- play_stats$player.esbId[index]
row$receiver_player_name <- play_stats$player.displayName[index]
} else if (stat_id == 105) {
row$two_point_pass_reception_failed <- 1
row$pass_attempt <- 1
row$two_point_attempt <- 1
row$receiver_player_id <- play_stats$player.esbId[index]
row$receiver_player_name <- play_stats$player.displayName[index]
} else if (stat_id == 106) {
row$fumble_lost <- 1
row$fumble <- 1
row$fumbled_1_player_id <-
if_else(
is.na(row$fumbled_1_player_id),
play_stats$player.esbId[index],
row$fumbled_1_player_id
)
row$fumbled_1_player_name <-
if_else(
is.na(row$fumbled_1_player_name),
play_stats$player.displayName[index],
row$fumbled_1_player_name
)
row$fumbled_1_team <-
if_else(
is.na(row$fumbled_1_team),
play_stats$teamAbbr[index],
row$fumbled_1_team
)
row$fumbled_2_player_id <-
if_else(
is.na(row$fumbled_2_player_id) &
row$fumbled_1_player_id != play_stats$player.esbId[index],
play_stats$player.esbId[index],
row$fumbled_2_player_id
)
row$fumbled_2_player_name <-
if_else(
is.na(row$fumbled_2_player_name) &
row$fumbled_1_player_name != play_stats$player.displayName[index],
play_stats$player.displayName[index],
row$fumbled_2_player_name
)
row$fumbled_2_team <-
if_else(
is.na(row$fumbled_2_team) &
row$fumbled_1_player_name != play_stats$player.displayName[index],
play_stats$teamAbbr[index],
row$fumbled_2_team
)
} else if (stat_id == 107) {
row$own_kickoff_recovery <- 1
row$kickoff_attempt <- 1
row$own_kickoff_recovery_player_id <- play_stats$player.esbId[index]
row$own_kickoff_recovery_player_name <- play_stats$player.displayName[index]
} else if (stat_id == 108) {
row$own_kickoff_recovery_td <- 1
row$touchdown <- 1
row$td_team <- play_stats$teamAbbr[index]
row$td_player_id <- play_stats$player.esbId[index]
row$td_player_name <- play_stats$player.displayName[index]
row$kickoff_attempt <- 1
row$own_kickoff_recovery_player_id <- play_stats$player.esbId[index]
row$own_kickoff_recovery_player_name <- play_stats$player.displayName[index]
} else if (stat_id == 110) {
row$qb_hit <- 1
row$qb_hit_1_player_id <-
if_else(
is.na(row$qb_hit_1_player_id),
play_stats$player.esbId[index],
row$qb_hit_1_player_id
)
row$qb_hit_1_player_name <-
if_else(
is.na(row$qb_hit_1_player_name),
play_stats$player.displayName[index],
row$qb_hit_1_player_name
)
row$qb_hit_2_player_id <-
if_else(
is.na(row$qb_hit_2_player_id) &
row$qb_hit_1_player_id != play_stats$player.esbId[index],
play_stats$player.esbId[index],
row$qb_hit_2_player_id
)
row$qb_hit_2_player_name <-
if_else(
is.na(row$qb_hit_2_player_name) &
row$qb_hit_1_player_name != play_stats$player.displayName[index],
play_stats$player.displayName[index],
row$qb_hit_2_player_name
)
} else if (stat_id == 111) {
row$pass_attempt <- 1
row$complete_pass <- 1
row$passer_player_id <- play_stats$player.esbId[index]
row$passer_player_name <- play_stats$player.displayName[index]
row$air_yards <- play_stats$yards[index]
} else if (stat_id == 112) {
row$pass_attempt <- 1
row$passer_player_id <- play_stats$player.esbId[index]
row$passer_player_name <- play_stats$player.displayName[index]
row$air_yards <- play_stats$yards[index]
} else if (stat_id == 113) {
row$pass_attempt <- 1
row$complete_pass <- 1
if (is.na(row$receiver_player_id)) {
row$receiver_player_id <- play_stats$player.esbId[index]
row$receiver_player_name <- play_stats$player.displayName[index]
}
if (is.na(row$yards_after_catch)) {
row$yards_after_catch <- play_stats$yards[index]
}
} else if (stat_id == 115) {
row$pass_attempt <- 1
row$receiver_player_id <- play_stats$player.esbId[index]
row$receiver_player_name <- play_stats$player.displayName[index]
} else if (stat_id == 120) {
row$tackle_for_loss_1_player_id <-
if_else(
is.na(row$tackle_for_loss_1_player_id),
play_stats$player.esbId[index],
row$tackle_for_loss_1_player_id
)
row$tackle_for_loss_1_player_name <-
if_else(
is.na(row$tackle_for_loss_1_player_name),
play_stats$player.displayName[index],
row$tackle_for_loss_1_player_name
)
row$tackle_for_loss_2_player_id <-
if_else(
is.na(row$tackle_for_loss_2_player_id) &
row$tackle_for_loss_1_player_id != play_stats$player.esbId[index],
play_stats$player.esbId[index],
row$tackle_for_loss_2_player_id
)
row$tackle_for_loss_2_player_name <-
if_else(
is.na(row$tackle_for_loss_2_player_name) &
row$tackle_for_loss_1_player_name != play_stats$player.displayName[index],
play_stats$player.displayName[index],
row$tackle_for_loss_2_player_name
)
} else if (stat_id == 301) {
row$extra_point_aborted <- 1
row$extra_point_attempt <- 1
} else if (stat_id == 402) {
NULL
} else if (stat_id == 403) {
row$defensive_two_point_attempt <- 1
} else if (stat_id == 404) {
row$defensive_two_point_conv <- 1
} else if (stat_id == 405) {
row$defensive_extra_point_attempt <- 1
} else if (stat_id == 406) {
row$defensive_extra_point_conv <- 1
} else if (stat_id == 410) {
row$kickoff_attempt <- 1
row$kicker_player_id <- play_stats$player.esbId[index]
row$kicker_player_name <- play_stats$player.displayName[index]
} else if (stat_id == 420) {
row$two_point_return <- 1
row$two_point_attempt <- 1
} else {
NULL
}
}
return(row)
} |
polyptb <- function(v,y){
stopifnot(length(v)==length(y))
du <- dummy(v)
dul <- as.list(data.frame((du)))
names(dul) <- colnames(du)
erg <- sapply(dul, ptb, y)
return(erg)
} |
"estprod_data" |
new_record_update_set <- function() {
update_set <- list()
structure(update_set,
class = "record_update_set")
}
record_update_set <- function() {
new_record_update_set()
} |
trendLevel <- function(mydata, pollutant = "nox", x = "month", y = "hour",
type = "year", rotate.axis = c(90, 0), n.levels = c(10, 10, 4),
limits = c(0, 100), cols = "default", auto.text = TRUE,
key.header = "use.stat.name", key.footer = pollutant,
key.position = "right", key = TRUE, labels = NA,
breaks = NA,
statistic = c("mean", "max", "frequency"),
stat.args = NULL, stat.safe.mode = TRUE, drop.unused.types = TRUE,
col.na = "white",
...) {
if (length(cols) == 1 && cols == "greyscale") {
trellis.par.set(list(strip.background = list(col = "white")))
}
current.strip <- trellis.par.get("strip.background")
current.font <- trellis.par.get("fontsize")
on.exit(trellis.par.set(
fontsize = current.font
))
category <- FALSE
if (!is.na(labels) && !is.na(breaks)) category <- TRUE
check.valid <- function(a, x, y) {
if (length(x) > 1) x <- x[1]
if (is.null(x)) {
stop(
paste0(
"\ttrendLevel does not allow 'NULL' ", a, " option.",
"\n\t[suggest one of following: ", paste(y, collapse = ", "), "]"
),
call. = FALSE
)
}
out <- y[pmatch(x, y)]
if (is.na(out)) {
stop(
paste0(
"\ttrendLevel could not evaluate ", a, " term '", x,
"'.\n\t[suggest one of following: ", paste(y, collapse = ", "), "]"
),
call. = FALSE
)
}
out
}
extra.args <- list(...)
extra.args$xlab <- if ("xlab" %in% names(extra.args)) {
quickText(extra.args$xlab, auto.text)
} else {
quickText(x, auto.text)
}
extra.args$ylab <- if ("ylab" %in% names(extra.args)) {
quickText(extra.args$ylab, auto.text)
} else {
quickText(y, auto.text)
}
extra.args$main <- if ("main" %in% names(extra.args)) {
quickText(extra.args$main, auto.text)
} else {
quickText("", auto.text)
}
if ("fontsize" %in% names(extra.args)) {
trellis.par.set(fontsize = list(text = extra.args$fontsize))
}
if (length(x) > 1) {
warning(paste(
"\ttrendLevel does not allow multiple 'x' values.",
"\n\t[ignoring all but first]",
sep = ""
), call. = FALSE)
x <- x[1]
xlab <- xlab[1]
}
if (length(y) > 1) {
warning(paste(
"\ttrendLevel does not allow multiple 'y' values.",
"\n\t[ignoring all but first]",
sep = ""
), call. = FALSE)
y <- y[1]
ylab <- ylab[1]
}
if (length(type) > 2) {
warning(paste(
"\ttrendLevel allows up to two 'type' values.",
"\n\t[ignoring all but first two]",
sep = ""
), call. = FALSE)
type <- type[1]
}
temp <- unique(c(x, y, type)[duplicated(c(x, y, type))])
if (length(temp) > 0) {
stop(paste0(
"\ttrendLevel could not rationalise plot structure.",
"\n\t[duplicate term(s) in pollutant, x, y, type structure]",
"\n\t[term(s): ", paste(temp, collapse = ", "), "]"
), call. = FALSE)
}
ls.check.fun <- function(vector, vector.name, len) {
if (!is.numeric(vector)) {
warning(paste0(
"\ttrendLevel ignored unrecognised '", vector.name, "' option.",
"\n\t[check ?trendLevel for details]"
), call. = FALSE)
vector <- eval(formals(trendLevel)[[vector.name]])
}
if (length(vector) < len) vector <- rep(vector, len)[1:len]
ifelse(is.na(vector), eval(formals(trendLevel)[[vector.name]]), vector)
}
rotate.axis <- ls.check.fun(rotate.axis, "rotate.axis", 2)
n.levels <- ls.check.fun(n.levels, "n.levels", 3)
if (is.character(statistic) | is.function(statistic)) {
if (is.character(statistic)) {
statistic <- check.valid(
"statistic", statistic,
eval(formals(trendLevel)$statistic)
)
if (statistic == "mean") {
stat.fun <- mean
stat.args <- list(na.rm = TRUE)
}
if (statistic == "max") {
stat.fun <- function(x, ...) {
if (all(is.na(x))) {
NA
} else {
max(x, ...)
}
}
stat.args <- list(na.rm = TRUE)
}
if (statistic == "frequency") {
stat.fun <- function(x, ...) {
if (all(is.na(x))) {
NA
} else {
length(na.omit(x))
}
}
stat.args <- NULL
}
stat.name <- statistic
} else {
stat.name <- substitute(statistic)
if (length(stat.name) != 1) stat.name <- "level"
if (stat.safe.mode) {
stat.fun <- function(x, ...) {
if (all(is.na(x))) NA else statistic(x, ...)[1]
}
} else {
stat.fun <- function(x, ...) statistic(x, ...)[1]
}
}
} else {
stop(
paste0(
"\ttrendLevel could not apply statistic option '", substitute(statistic),
"'.\n\t[suggest valid function or character vector]",
"\n\t[currect character vectors options: '",
paste(eval(formals(trendLevel)$statistic), collapse = "', '"), "']"
),
call. = FALSE
)
}
if (!is.null(key.header)) {
if (is.character(key.header)) {
key.header <- gsub("use.stat.name", stat.name, key.header)
}
}
if (!is.null(key.footer)) {
if (is.character(key.footer)) {
key.footer <- gsub("use.stat.name", stat.name, key.footer)
}
}
temp <- if ("date" %in% names(mydata)) {
c("date", pollutant)
} else {
pollutant
}
mydata <- checkPrep(mydata, temp, type = c(x, y, type), remove.calm = FALSE)
newdata <- cutData(mydata, x, n.levels = n.levels[1], is.axis = TRUE, ...)
newdata <- cutData(newdata, y, n.levels = n.levels[2], is.axis = TRUE, ...)
newdata <- cutData(newdata, type, n.levels = n.levels[3], ...)
newdata <- newdata[c(pollutant, x, y, type)]
calc.stat <- function(...)
tapply(newdata[[pollutant]], newdata[c(x, y, type)], stat.fun, ...)
if (is.null(stat.args)) {
newdata <- try(calc.stat(), silent = TRUE)
} else {
newdata <- try(do.call(calc.stat, stat.args), silent = TRUE)
}
if (is(newdata)[1] == "try-error") {
stop(
paste0(
"\ttrendLevel could not complete supplied statistic operation '",
stat.name, "'.\n\t[R error below]", "\n\t", temp[1]
),
call. = FALSE
)
}
newdata <- data.frame(
expand.grid(dimnames(newdata)),
matrix(unlist(newdata), byrow = TRUE)
)
pollutant <- paste(pollutant, stat.name, sep = ".")
names(newdata)[ncol(newdata)] <- pollutant
temp <- paste(type, collapse = "+")
myform <- formula(paste0(pollutant, " ~ ", x, " * ", y, " | ", temp))
if (type == "default") myform <- formula(paste0(pollutant, " ~ ", x, " * ", y))
if (length(type) == 1 & type[1] == "wd" & !"layout" %in% names(extra.args)) {
wds <- c("NW", "N", "NE", "W", "E", "SW", "S", "SE")
newdata$wd <- ordered(newdata$wd, levels = wds)
wd.ok <- sapply(wds, function(x) if (x %in% unique(newdata$wd)) FALSE else TRUE)
skip <- c(wd.ok[1:4], TRUE, wd.ok[5:8])
newdata$wd <- factor(newdata$wd)
extra.args$layout <- c(3, 3)
if (!"skip" %in% names(extra.args)) {
extra.args$skip <- skip
}
}
temp <- if (is.factor(newdata[, type[1]])) {
levels(newdata[, type[1]])
} else {
unique(newdata[, type[1]])
}
temp <- sapply(temp, function(x) quickText(x, auto.text))
if (is.factor(temp)) temp <- as.character(temp)
strip <- strip.custom(
factor.levels = temp, strip.levels = c(TRUE, FALSE),
strip.names = FALSE
)
strip.left <- if (length(type) == 1) {
FALSE
} else {
temp <- sapply(unique(newdata[, type[2]]), function(x)
quickText(x, auto.text))
if (is.factor(temp)) temp <- as.character(temp)
strip.custom(factor.levels = temp)
}
suppressWarnings(trellis.par.set(list(strip.background = list(col = "white"))))
scales <- list(
x = list(rot = rotate.axis[1]),
y = list(rot = rotate.axis[2])
)
if (category) {
if (length(labels) + 1 != length(breaks)) stop("Need one more break than labels")
newdata$cuts <- cut(
newdata[, pollutant],
breaks = breaks, labels = labels,
include.lowest = TRUE
)
n <- length(levels(newdata$cuts))
col.regions <- openColours(cols, n)
col.scale <- breaks
legend <- list(
col = col.regions, space = key.position, auto.text = auto.text,
labels = levels(newdata$cuts), footer = key.footer,
header = key.header, height = 0.8, width = 1.5, fit = "scale",
plot.style = "other"
)
col.scale <- breaks
legend <- makeOpenKeyLegend(key, legend, "windRose")
} else {
nlev <- 200
if (missing(limits)) {
breaks <- seq(
min(newdata[, pollutant], na.rm = TRUE),
max(newdata[, pollutant], na.rm = TRUE),
length.out = nlev
)
labs <- pretty(breaks, 7)
labs <- labs[labs >= min(breaks) & labs <= max(breaks)]
at <- labs
} else {
breaks <- seq(min(limits), max(limits), length.out = nlev)
labs <- pretty(breaks, 7)
labs <- labs[labs >= min(breaks) & labs <= max(breaks)]
at <- labs
if (max(limits) < max(newdata[, pollutant], na.rm = TRUE)) {
id <- which(newdata[, pollutant] > max(limits))
newdata[id, pollutant] <- max(limits)
labs[length(labs)] <- paste(">", labs[length(labs)])
}
if (min(limits) > min(newdata[, pollutant], na.rm = TRUE)) {
id <- which(newdata[, pollutant] < min(limits))
newdata[id, pollutant] <- min(limits)
labs[1] <- paste("<", labs[1])
}
}
nlev2 <- length(breaks)
col.regions <- openColours(cols, (nlev2 - 1))
col.scale <- breaks
legend <- list(
col = col.regions, at = col.scale,
labels = list(labels = labs, at = at),
space = key.position, auto.text = auto.text,
footer = key.footer, header = key.header,
height = 1, width = 1.5, fit = "all"
)
legend <- makeOpenKeyLegend(key, legend, "polarPlot")
}
colorkey <- FALSE
yscale.lp <- function(...) {
ans <- yscale.components.default(...)
ans$left$labels$check.overlap <- TRUE
ans$left$labels$labels <- levels(newdata[, y])
ans$left$labels$at <- seq_along(levels(newdata[, y]))
ans
}
xscale.lp <- function(...) {
ans <- xscale.components.default(...)
ans$bottom$labels$check.overlap <- TRUE
ans$bottom$labels$labels <- levels(newdata[, x])
ans$bottom$labels$at <- seq_along(levels(newdata[, x]))
ans
}
xlim <- range(as.numeric(newdata[, x])) + c(-0.5, 0.5)
ylim <- range(as.numeric(newdata[, y])) + c(-0.5, 0.5)
if (length(levels(newdata[[y]])) > 25) ylim <- ylim + c(-0.3, 0.3)
if (length(levels(newdata[[x]])) > 25) xlim <- xlim + c(-0.3, 0.3)
levelplot.args <- list(
x = myform, data = newdata, as.table = TRUE,
legend = legend, colorkey = colorkey,
at = breaks, col.regions = col.regions,
scales = scales,
yscale.components = yscale.lp,
xscale.components = xscale.lp,
par.strip.text = list(cex = 0.8),
strip = strip, strip.left = strip.left,
xlim = xlim, ylim = ylim,
panel = function(x, y, ...) {
panel.fill(col = col.na)
panel.levelplot(x, y, ...)
}
)
levelplot.args <- listUpdate(levelplot.args, extra.args)
plt <- do.call(levelplot, levelplot.args)
if (length(type) > 1) {
plt <- useOuterStrips(plt, strip = strip, strip.left = strip.left)
}
plot(plt)
output <- list(plot = plt, data = newdata, call = match.call())
class(output) <- "openair"
invisible(output)
} |
cdm_penalty_threshold_ridge <- function(beta, lambda)
{
y <- beta / ( 1 + 2*lambda )
return(y)
} |
context("test-classical")
test_that("Additive classical decomposition", {
tsbl_co2 <- as_tsibble(co2)
dcmp <- tsbl_co2 %>% model(classical_decomposition(value)) %>% components()
stats_dcmp <- stats::decompose(co2)
expect_equivalent(
dcmp$trend,
unclass(stats_dcmp$trend)
)
expect_equivalent(
dcmp$seasonal,
unclass(stats_dcmp$seasonal)
)
expect_equivalent(
dcmp$random,
unclass(stats_dcmp$random)
)
expect_equal(
dcmp$value - dcmp$seasonal,
dcmp$season_adjust
)
})
test_that("Multiplicative classical decomposition", {
tsbl_uad <- as_tsibble(USAccDeaths)
dcmp <- tsbl_uad %>% model(classical_decomposition(value, type = "multiplicative")) %>% components()
stats_dcmp <- stats::decompose(USAccDeaths, type = "multiplicative")
expect_equivalent(
dcmp$trend,
unclass(stats_dcmp$trend)
)
expect_equivalent(
dcmp$seasonal,
unclass(stats_dcmp$seasonal)
)
expect_equivalent(
dcmp$random,
unclass(stats_dcmp$random)
)
expect_equal(
dcmp$value / dcmp$seasonal,
dcmp$season_adjust
)
}) |
ssBasis <-
function(x, knots, m=2, d=0, xmin=min(x), xmax=max(x), periodic=FALSE, intercept=FALSE){
k1fun <- function(x) {
x - 1/2
}
k2fun <- function(x) {
(k1fun(x)^2 - (1/12))/2
}
k3fun <- function(x){
(4*(k1fun(x)^3) - k1fun(x))/24
}
k4fun <- function(x) {
(k1fun(x)^4 - ((k1fun(x)^2)/2) + 7/240)/24
}
k5fun <- function(x){
((k1fun(x)^5)/5 - (k1fun(x)^3)/6 + 7*k1fun(x)/240)/24
}
k6fun <- function(x) {
( (k1fun(x)^6)/30 - (k1fun(x)^4)/24 + 7*(k1fun(x)^2)/480 - 31/40320 ) / 24
}
m <- as.integer(m[1])
if(m < 1L) stop("Input 'm' must be a positive integer between 1 and 3.")
if(m > 3L) stop("Input 'm' must be a positive integer between 1 and 3.")
d <- as.integer(d[1])
if(d < 0L) stop("Input 'd' must be a positive integer between 0 and 2.")
if(d > 2L) stop("Input 'd' must be a positive integer between 0 and 2.")
x <- as.matrix(x)
knots <- as.matrix(knots)
nx <- nrow(x)
nknots <- nrow(knots)
x <- (x - xmin) / (xmax - xmin)
knots <- (knots - xmin) / (xmax - xmin)
periodic <- periodic[1]
if(!is.logical(periodic)) stop("Input 'periodic' should be a logical (TRUE/FALSE) variable.")
if(m == 1L){
if(d == 0L){
Xn <- matrix(1, nx, 1)
colnames(Xn) <- "null.0"
if(periodic){
Xc <- k2fun( abs( matrix(x,nx,nknots) - matrix(knots,nx,nknots,byrow=TRUE) ) )
} else {
Xc <- matrix(k1fun(x),nx,nknots) * matrix(k1fun(knots),nx,nknots,byrow=TRUE) + k2fun( abs( matrix(x,nx,nknots) - matrix(knots,nx,nknots,byrow=TRUE) ) )
}
colnames(Xc) <- paste0("knot.",1:nknots)
} else if(d == 1L){
Xn <- matrix(0, nx, 1)
colnames(Xn) <- "null.0"
dmat <- matrix(x,nx,nknots) - matrix(knots,nx,nknots,byrow=TRUE)
smat <- sign(dmat)
smat[smat==0L] <- 1L
if(periodic){
Xc <- smat * k1fun(abs(dmat))
} else {
Xc <- matrix(k1fun(knots),nx,nknots,byrow=TRUE) + smat * k1fun(abs(dmat))
}
colnames(Xc) <- paste0("knot.",1:nknots)
} else {
stop("Cannot set 'd=2' when 'm=1' (need d <= m).")
}
}
if(m == 2L){
if(d == 0L){
if(periodic){
Xn <- matrix(1, nx, 1)
colnames(Xn) <- "null.0"
Xc <- matrix(k2fun(x),nx,nknots) * matrix(k2fun(knots),nx,nknots,byrow=TRUE) - k4fun( abs( matrix(x,nx,nknots) - matrix(knots,nx,nknots,byrow=TRUE) ) )
} else {
Xn <- cbind(1, k1fun(x))
colnames(Xn) <- paste0("null.",0:1)
Xc <- (-1)*k4fun( abs( matrix(x,nx,nknots) - matrix(knots,nx,nknots,byrow=TRUE) ) )
}
colnames(Xc) <- paste0("knot.",1:nknots)
} else if(d == 1L){
dmat <- matrix(x,nx,nknots) - matrix(knots,nx,nknots,byrow=TRUE)
smat <- sign(dmat)
smat[smat==0L] <- 1L
if(periodic){
Xn <- matrix(0, nx, 1)
colnames(Xn) <- "null.0"
Xc <- (-1) * smat * k3fun(abs(dmat))
} else {
Xn <- matrix(c(0,1), nx, 2, byrow=TRUE)
colnames(Xn) <- paste0("null.",0:1)
Xc <- matrix(k1fun(x),nx,nknots) * matrix(k2fun(knots),nx,nknots,byrow=TRUE) - smat * k3fun(abs(dmat))
}
colnames(Xc) <- paste0("knot.",1:nknots)
} else {
if(periodic){
Xn <- matrix(0, nx, 1)
colnames(Xn) <- "null.0"
Xc <- (-1) * k2fun( abs( matrix(x,nx,nknots) - matrix(knots,nx,nknots,byrow=TRUE) ) )
} else {
Xn <- matrix(0, nx, 2)
colnames(Xn) <- paste0("null.",0:1)
Xc <- matrix(k2fun(knots),nx,nknots,byrow=TRUE) - k2fun( abs( matrix(x,nx,nknots) - matrix(knots,nx,nknots,byrow=TRUE) ) )
}
colnames(Xc) <- paste0("knot.",1:nknots)
}
}
if(m == 3L) {
if(d == 0L){
if(periodic){
Xn <- matrix(1, nx, 1)
colnames(Xn) <- "null.0"
Xc <- k6fun( abs( matrix(x,nx,nknots) - matrix(knots,nx,nknots,byrow=TRUE) ) )
} else {
Xn <- cbind(1, k1fun(x), k2fun(x))
colnames(Xn) <- paste0("null.",0:2)
Xc <- matrix(k3fun(x),nx,nknots) * matrix(k3fun(knots),nx,nknots,byrow=TRUE) + k6fun( abs( matrix(x,nx,nknots) - matrix(knots,nx,nknots,byrow=TRUE) ) )
}
colnames(Xc) <- paste0("knot.",1:nknots)
} else if(d == 1L){
dmat <- matrix(x,nx,nknots) - matrix(knots,nx,nknots,byrow=TRUE)
smat <- sign(dmat)
smat[smat==0L] <- 1L
if(periodic){
Xn <- matrix(0, nx, 1)
colnames(Xn) <- "null.0"
Xc <- smat * k5fun(abs(dmat))
} else {
Xn <- cbind(0, 1, k1fun(x))
colnames(Xn) <- paste0("null.",0:2)
Xc <- matrix(k2fun(x),nx,nknots) * matrix(k3fun(knots),nx,nknots,byrow=TRUE) + smat * k5fun(abs(dmat))
}
colnames(Xc) <- paste0("knot.",1:nknots)
} else {
if(periodic){
Xn <- matrix(0, nx, 1)
colnames(Xn) <- "null.0"
Xc <- k4fun( abs( matrix(x,nx,nknots) - matrix(knots,nx,nknots,byrow=TRUE) ) )
} else {
Xn <- matrix(c(0, 0, 1), nx, 3, byrow=TRUE)
colnames(Xn) <- paste0("null.",0:2)
Xc <- matrix(k1fun(x),nx,nknots) * matrix(k3fun(knots),nx,nknots,byrow=TRUE) + k4fun( abs( matrix(x,nx,nknots) - matrix(knots,nx,nknots,byrow=TRUE) ) )
}
colnames(Xc) <- paste0("knot.",1:nknots)
}
}
if(intercept){
return(list(X=cbind(Xn,Xc), knots=knots, m=m, d=d, xlim=c(xmin,xmax),
periodic=periodic, intercept=intercept))
} else {
return(list(X=cbind(Xn,Xc)[,-1], knots=knots, m=m, d=d, xlim=c(xmin,xmax),
periodic=periodic, intercept=intercept))
}
} |
get_cpp_api <- function() {
pkg_env <- asNamespace("outbreaker2")
regxp <- "^cpp_(ll|prior|move)"
names_cpp_functions <- sort(ls(envir = pkg_env, pattern = regxp))
out_env <- new.env()
for (e in names_cpp_functions) {
f <- get(e, envir = pkg_env)
assign(e, f, envir = out_env)
}
return(out_env)
} |
library(mixdir)
context("Variational Inference")
test_that("VI works for simple models", {
X <- create_data()
set.seed(1)
result <- mixdir(X, n_latent=3, select_latent=FALSE)
expect_true(result$converged)
assigned_cluster <- result$pred_class
expect_equal(assigned_cluster[7], assigned_cluster[2])
expect_equal(assigned_cluster[10], assigned_cluster[9])
expect_true(assigned_cluster[10] != assigned_cluster[7])
})
test_that("VI can handle missign values", {
X <- create_data()
set.seed(1)
X[sample(seq_along(X), 3, replace=FALSE)] <- NA
result <- mixdir(X, n_latent=3, select_latent=FALSE)
expect_true(result$converged)
assigned_cluster <-result$pred_class
expect_equal(assigned_cluster[7], assigned_cluster[2])
expect_equal(assigned_cluster[10], assigned_cluster[9])
expect_true(assigned_cluster[10] != assigned_cluster[7])
})
test_that("VI can handle more complex models", {
tmp <- generate_categorical_dataset(n_ind=1000, n_quest=5, n_cat=3, n_true_classes=4)
set.seed(1)
expect_silent(result <- mixdir(tmp$X, n_latent=4, select_latent=FALSE))
})
test_that("VI can handle the mushroom dataset", {
data("mushroom")
set.seed(4)
expect_silent(res <- mixdir(mushroom, n_latent=5))
})
context("Variational Inference DP")
test_that("VI DP works for simple models", {
X <- create_data()
set.seed(1)
result <- mixdir(X, n_latent=10, select_latent = TRUE)
expect_true(result$converged)
assigned_cluster <-result$pred_class
expect_equal(assigned_cluster[7], assigned_cluster[2])
expect_equal(assigned_cluster[10], assigned_cluster[9])
expect_true(assigned_cluster[10] != assigned_cluster[7])
})
test_that("VI DP can handle missing values", {
X <- create_data()
set.seed(1)
X[sample(seq_along(X), 3, replace=FALSE)] <- NA
result <- mixdir(X, n_latent=10, select_latent = TRUE)
expect_true(result$converged)
assigned_cluster <- result$pred_class
expect_equal(assigned_cluster[7], assigned_cluster[2])
expect_equal(assigned_cluster[10], assigned_cluster[9])
expect_true(assigned_cluster[10] != assigned_cluster[7])
})
test_that("mixdir can handle missing values as category", {
X <- create_data()
set.seed(1)
X[sample(seq_along(X), 3, replace=FALSE)] <- NA
result <- mixdir(X, n_latent=10, select_latent = TRUE, na_handle = "category")
expect_true(result$converged)
assigned_cluster <- result$pred_class
expect_equal(assigned_cluster[7], assigned_cluster[2])
expect_equal(assigned_cluster[10], assigned_cluster[9])
expect_true(assigned_cluster[10] != assigned_cluster[7])
})
test_that("mixdir repetitions selects the best run", {
X <- create_data()
set.seed(1)
result <- mixdir(X, n_latent=10, select_latent = TRUE, repetitions = 3)
set.seed(1)
result1 <- mixdir(X, n_latent=10, select_latent = TRUE)
result2 <- mixdir(X, n_latent=10, select_latent = TRUE)
result3 <- mixdir(X, n_latent=10, select_latent = TRUE)
expect_equal(result$ELBO, max(c(result1$ELBO, result2$ELBO, result3$ELBO)))
})
test_that("mixdir fails gracefully with too many columns", {
data_set <- generate_categorical_dataset(n_ind = 50, n_quest = 1e3, n_cat = 2, n_true_classes = 2)
set.seed(1)
expect_error(mixdir(data_set$X, n_latent=2, select_latent = FALSE))
expect_error(mixdir(data_set$X, n_latent=2, select_latent = TRUE))
})
context("Prediction")
test_that("predict_class works", {
data("mushroom")
res <- mixdir(mushroom[1:30, ])
expect_silent(predict_class(mushroom[40, ], res$lambda, res$category_prob))
expect_silent(predict_class(c(`gill-color`="black"), res$lambda, res$category_prob))
expect_warning(predict_class(mushroom[42, ], res$lambda, res$category_prob))
tmp <- mushroom
ind <- sapply(tmp, is.character)
tmp[ind] <- lapply(tmp[ind], factor)
res2 <- mixdir(tmp[1:30, ])
expect_silent(predict_class(mushroom[42, ], res2$lambda, res2$category_prob))
})
test_that("predict.mixdir works", {
data("mushroom")
res <- mixdir(mushroom[1:30, ])
expect_silent(predict(res, mushroom[40, ]))
expect_silent(predict(res, c(`gill-color`="black")))
expect_warning(predict(res, mushroom[42, ]))
expect_equal(res$lambda, c(predict(res, c(`edible`=NA))))
expect_equal(res$class_prob, predict(res))
expect_equal(res$class_prob, predict(res, mushroom[1:30, ]))
})
test_that("predict.mixdir works with DP", {
data("mushroom")
res <- mixdir(mushroom[1:30, ], n_latent=10, select_latent = TRUE)
expect_silent(predict(res, mushroom[40, ]))
expect_silent(predict(res, c(`gill-color`="black")))
expect_warning(predict(res, mushroom[42, ]))
expect_equal(res$lambda, c(predict(res, c(`edible`=NA)) * sum(res$lambda)))
expect_equal(res$class_prob, predict(res))
expect_equal(res$class_prob, predict(res, mushroom[1:30, ]))
})
test_that("finding the most representative answers works", {
data("mushroom")
res <- mixdir(mushroom[1:30, ], beta=1)
find_predictive_features(res, top_n=3)
})
test_that("finding the most typical answers works", {
data("mushroom")
res <- mixdir(mushroom[1:30, ], beta=1)
find_typical_features(res, top_n=3)
})
test_that("finding the defining answers works", {
data("mushroom")
res <- mixdir(mushroom[1:30, ], beta=1)
def_feats <- find_defining_features(res, mushroom[1:30, ])
expect_equal(length(def_feats$quality), ncol(mushroom))
expect_equal(length(def_feats$features), ncol(mushroom))
def_feats2 <- find_defining_features(res, mushroom[1:30, ], n_features = 2)
expect_equal(length(def_feats2$quality), 1)
expect_equal(length(def_feats2$features), 2)
expect_equal(def_feats2$quality, def_feats$quality[3])
}) |
summary.ICBayes <-
function(object, ...){
coef.table<-cbind(object$coef, object$coef_ssd, object$coef_ci[,1], object$coef_ci[,2])
dimnames(coef.table)<-list(names(object$coef),
c('Mean','Std. Dev.',
paste(object$conf.int*100,'%CI-Low',sep=''),
paste(object$conf.int*100,'%CI-Upp',sep='')))
cat("Call:\n")
print(object$call)
cat("\nPosterior inference of regression coefficients\n")
print.default(coef.table)
cat("\nLog pseudo marginal likelihood: LPML=", object$LPML, sep="")
cat("\nNegative log-likelihood: NLLK=", -object$LPML, sep="")
cat("\nNumber of subjects: n=", object$n, "\n", sep="")
} |
NULL
grade_feedback <- function(...) {
deprecate_warn("0.2.0", "grade_feedback()")
ret <- feedback(...)
class(ret) <- "grader_feedback"
ret
}
grade_conditions <- function(
...,
correct = NULL,
incorrect = NULL,
grader_args = deprecated(),
learnr_args = deprecated(),
glue_correct = getOption("gradethis.glue_correct_test"),
glue_incorrect = getOption("gradethis.glue_incorrect_test")
) {
deprecate_warn("0.1.0", "grade_result_strict()")
grade_result_strict(
...,
correct = correct,
incorrect = incorrect,
grader_args = grader_args,
learnr_args = learnr_args,
glue_correct = glue_correct,
glue_incorrect = glue_incorrect
)
}
random_encourage <- function() {
lifecycle::deprecate_soft("0.2.1", "random_encourage()", "random_encouragement()")
random_encouragement()
}
grade_learnr <- function(...) {
lifecycle::deprecate_soft("0.2.2", "grade_learnr()", "gradethis_exercise_checker()")
gradethis_exercise_checker(...)
} |
context("Four Parameter Beta Distribution")
test_that('The density functions provide correct answers', {
x <- c(-3, 2, 0, 4, -1)
s1 <- 2.0; s2 <- 2.0; a <- -2.5; b <- 2.5;
expect_equal(test_d4beta(x, s1, s2, a, b),
list(
"VectorLog" = c(-Inf, -2.225624051858, -1.203972804326,
-Inf, -1.378326191471),
"VectorNoLog" = c(0, 0.108, 0.3, 0, 0.252),
"DoubleLog" = -Inf,
"DoubleNoLog" = 0
)
)
x <- x[-1]
expect_equal(test_d4beta(x, s1, s2, a, b),
list(
"VectorLog" = c(-2.225624051858, -1.203972804326,
-Inf, -1.378326191471),
"VectorNoLog" = c(0.108, 0.3, 0, 0.252),
"DoubleLog" = -2.225624051858,
"DoubleNoLog" = 0.108
)
)
})
test_that('The distribution functions provide correct answers', {
x <- c(-3, 2, 0, 4, -1)
s1 <- 2.0; s2 <- 2.0; a <- -2.5; b <- 2.5;
expect_equal(test_p4beta(x, s1, s2, a, b),
list(
"VectorLog" = c(-Inf, -0.028399474522, -0.693147180560, 0,
-1.532476871298),
"VectorNoLog" = c(0, 0.972, 0.5, 1, 0.216),
"DoubleLog" = -Inf,
"DoubleNoLog" = 0,
"VectorLogNoLower" = c(0, -3.575550768807, -0.693147180560,
-Inf, -0.243346258632),
"VectorNoLogNoLower" = c(1, 0.028, 0.5, 0, 0.784),
"DoubleLogNoLower" = 0,
"DoubleNoLogNoLower" = 1
)
)
x <- x[-1]
expect_equal(test_p4beta(x, s1, s2, a, b),
list(
"VectorLog" = c(-0.028399474522, -0.693147180560, 0,
-1.532476871298),
"VectorNoLog" = c(0.972, 0.5, 1, 0.216),
"DoubleLog" = -0.028399474522,
"DoubleNoLog" = 0.972,
"VectorLogNoLower" = c(-3.575550768807, -0.693147180560,
-Inf, -0.243346258632),
"VectorNoLogNoLower" = c(0.028, 0.5, 0, 0.784),
"DoubleLogNoLower" = -3.575550768807,
"DoubleNoLogNoLower" = 0.028
)
)
})
test_that('The quantile functions provide correct answers', {
x <- c(0, 0.5, 1)
s1 <- 2.0; s2 <- 2.0; a <- -2.5; b <- 2.5;
expect_equal(test_q4beta_nolog(x, s1, s2, a, b),
list(
"VectorNoLog" = c(-2.5, 0, 2.5),
"DoubleNoLog" = -2.5,
"VectorNoLogNoLower" = c(2.5, 0, -2.5),
"DoubleNoLogNoLower" = 2.5
)
)
x <- c(-1, -2, -10)
expect_equal(test_q4beta_log(x, s1, s2, a, b),
list(
"VectorLog" = c(-0.445104983167, -1.345445414451,
-2.480523918142),
"DoubleLog" = -0.445104983167,
"VectorLogNoLower" = c(0.445104983167, 1.345445414451,
2.480523918142),
"DoubleLogNoLower" = 0.445104983167
)
)
}) |
plot.visreg <- function(
x, overlay=FALSE, print.cond=FALSE, whitespace=0.2, partial=identical(x$meta$trans, I),
band=TRUE, rug=ifelse(partial, 0, 2), strip.names=is.numeric(x$fit[, x$meta$by]),
legend=TRUE, top=c('line', 'points'), gg=FALSE, line.par=NULL, fill.par=NULL,
points.par=NULL, ...) {
top <- match.arg(top)
warn <- FALSE
if (missing(print.cond)) {
if (!("by" %in% names(x$meta)) & x$meta$hasInteraction) print.cond <- warn <- TRUE
}
if (print.cond) printCond(x, warn)
if (all(is.na(x$res$visregRes))) {
partial <- FALSE
rug <- FALSE
warning(paste0("The generic function residuals() is not set up for this type of model object. To plot partial residuals, you will need to define your own residuals.", x$meta$class[1], "() function."))
}
if (gg) {
if (!requireNamespace("ggplot2")) stop("You must first install the ggplot2 package: install.packages('ggplot2')", call.=FALSE)
if (is.factor(x$fit[, x$meta$x])) {
p <- ggFactorPlot(x, partial, band, rug, whitespace, strip.names, overlay, top, line.par, fill.par, points.par, ...)
} else {
p <- ggContPlot(x, partial, band, rug, whitespace, strip.names, overlay, top, line.par, fill.par, points.par, ...)
}
return(p)
} else {
if ("by" %in% names(x$meta)) {
if (overlay) {
visregOverlayPlot(x, strip.names=strip.names, legend=legend, whitespace=whitespace, partial=partial, band=band, rug=rug, line.par=line.par, fill.par=fill.par, points.par=points.par, ...)
} else {
p <- visregLatticePlot(x, strip.names=strip.names, whitespace=whitespace, partial=partial, band=band, rug=rug, top=top, line.par=line.par, fill.par=fill.par, points.par=points.par, ...)
return(invisible(p))
}
} else {
visregPlot(x, whitespace=whitespace, partial=partial, band=band, rug=rug, top=top, line.par=line.par, fill.par=fill.par, points.par=points.par, ...)
}
}
} |
BayesSurv_AFT <- function(Formula,
data,
model = "LN",
hyperParams,
startValues,
mcmcParams,
na.action = "na.fail",
subset=NULL,
path = NULL)
{
mcmcList <- mcmcParams
if((mcmcList$run$numReps / mcmcList$run$thin * mcmcList$run$burninPerc) %% 1 == 0)
{
nChain <- length(startValues)
hz.type <- model[1]
if(na.action != "na.fail" & na.action != "na.omit")
{
stop("na.action should be either na.fail or na.omit")
}
form2 <- as.Formula(paste(Formula[2], Formula[1], Formula[3], sep = ""))
if(hz.type == "DPM")
{
for(i in 1:nChain)
{
nam1 <- paste("DPM.classch", i, sep = "")
data[[nam1]] <- startValues[[i]]$DPM$DPM.class
nam2 <- paste("DPM.much", i, sep = "")
data[[nam2]] <- startValues[[i]]$DPM$DPM.mu
nam3 <- paste("DPM.zetach", i, sep = "")
data[[nam3]] <- startValues[[i]]$DPM$DPM.zeta
form2 <- as.Formula(paste(form2[2], form2[1], form2[3], "| ", nam1, "| ", nam2, "| ", nam3, sep = ""))
}
}
for(i in 1:nChain)
{
nam1 <- paste("ych", i, sep = "")
data[[nam1]] <- startValues[[i]]$common$y
form2 <- as.Formula(paste(form2[2], form2[1], form2[3], "| ", nam1, sep = ""))
}
data <- model.frame(form2, data=data, na.action = na.action, subset = subset)
if(hz.type == "DPM")
{
for(i in 1:nChain)
{
nam1 <- paste("DPM.classch", i, sep = "")
startValues[[i]]$DPM$DPM.class <- data[[nam1]]
nam2 <- paste("DPM.much", i, sep = "")
startValues[[i]]$DPM$DPM.mu <- data[[nam2]]
nam3 <- paste("DPM.zetach", i, sep = "")
startValues[[i]]$DPM$DPM.zeta <- data[[nam3]]
}
}
for(i in 1:nChain)
{
nam1 <- paste("ych", i, sep = "")
startValues[[i]]$common$y <- data[[nam1]]
}
LT <- model.part(Formula, data=data, lhs=1)
y.mat <- model.part(Formula, data=data, lhs=2)
Y <- cbind(y.mat, LT)
Xmat <- model.frame(formula(Formula, lhs=0, rhs=1), data=data)
p <- ncol(Xmat)
if(p == 0){
survData <- Y
}
if(p > 0){
survData <- cbind(Y, Xmat)
}
n <- dim(survData)[1]
Y[,1] <- log(Y[,1])
Y[,2] <- log(Y[,2])
Y[,3] <- log(Y[,3])
yLInf <- rep(0, n)
for(i in 1:n) if(Y[i,1] == -Inf)
{
Y[i,1] <- -9.9e10
yLInf[i] <- 1
}
yUInf <- rep(0, n)
for(i in 1:n) if(Y[i,2] == Inf)
{
Y[i,2] <- 9.9e10
yUInf[i] <- 1
}
c0Inf <- rep(0, n)
for(i in 1:n) if(Y[i,3] == -Inf)
{
Y[i,3] <- -9.9e10
c0Inf[i] <- 1
}
if(!is.null(path)){
dir.create(paste(path), recursive = TRUE, showWarnings = FALSE)
}
if(hz.type == "DPM")
{
hyperP <- as.vector(c(hyperParams$DPM$DPM.ab, hyperParams$DPM$Tau.ab, hyperParams$DPM$DPM.mu, hyperParams$DPM$DPM.sigSq))
}
if(hz.type == "LN")
{
hyperP <- as.vector(c(hyperParams$LN$LN.ab))
}
mcmcP <- as.vector(c(mcmcParams$tuning$beta.prop.var, mcmcParams$tuning$mu.prop.var, mcmcParams$tuning$zeta.prop.var))
chain = 1
ret <- list()
while(chain <= nChain){
cat("chain: ", chain, "\n")
nam = paste("chain", chain, sep="")
temp <- startValues[[chain]]
if(hz.type == "DPM")
{
startV <- as.vector(c(y=temp$common$y, beta=temp$common$beta, r=temp$DPM$DPM.class, tau=temp$DPM$DPM.tau, mu=temp$DPM$DPM.mu, zeta=temp$DPM$DPM.zeta))
}
if(hz.type == "LN")
{
startV <- as.vector(c(y=temp$common$y, beta=temp$common$beta, mu=temp$LN$LN.mu, sigSq=temp$LN$LN.sigSq))
}
if(hz.type == "LN"){
numReps <- mcmcParams$run$numReps
thin <- mcmcParams$run$thin
burninPerc <- mcmcParams$run$burninPerc
nStore <- round(numReps/thin*(1-burninPerc))
mcmcRet <- .C("BAFTunimcmc",
Ymat = as.double(as.matrix(Y)),
yUInf = as.double(yUInf),
c0Inf = as.double(c0Inf),
Xmat = as.double(as.matrix(Xmat)),
n = as.integer(n),
p = as.integer(p),
hyperP = as.double(hyperP),
mcmcP = as.double(mcmcP),
startValues = as.double(startV),
numReps = as.integer(numReps),
thin = as.integer(thin),
burninPerc = as.double(burninPerc),
samples_y = as.double(rep(0, nStore*n)),
samples_beta = as.double(rep(0, nStore*p)),
samples_beta0 = as.double(rep(0, nStore*1)),
samples_sigSq = as.double(rep(0, nStore*1)),
samples_misc = as.double(rep(0, p+1+1)))
y.p <- matrix(as.vector(mcmcRet$samples_y), nrow=nStore, byrow=T)
if(p >0)
{
beta.p <- matrix(as.vector(mcmcRet$samples_beta), nrow=nStore, byrow=T)
}else
{
beta.p <- NULL
}
mu.p <- matrix(as.vector(mcmcRet$samples_beta0), nrow=nStore, byrow=T)
sigSq.p <- matrix(as.vector(mcmcRet$samples_sigSq), nrow=nStore, byrow=T)
if(p >0)
{
accept.beta <- as.vector(mcmcRet$samples_misc[1:p])
}else
{
accept.beta <- NULL
}
accept.mu <- as.vector(mcmcRet$samples_misc[p+1])
accept.sigSq <- as.vector(mcmcRet$samples_misc[p+2])
if(p > 0){
covNames = colnames(Xmat)
}
if(p == 0){
covNames = NULL
}
ret[[nam]] <- list(y.p = y.p, beta.p = beta.p, mu.p=mu.p, sigSq.p = sigSq.p, accept.beta = accept.beta, accept.mu = accept.mu, accept.sigSq = accept.sigSq, covNames = covNames, model = hz.type)
}
if(hz.type == "DPM"){
numReps <- mcmcParams$run$numReps
thin <- mcmcParams$run$thin
burninPerc <- mcmcParams$run$burninPerc
nStore <- round(numReps/thin*(1-burninPerc))
mcmcRet <- .C("BAFT_DPunimcmc",
Ymat = as.double(as.matrix(Y)),
yLInf = as.double(yLInf),
yUInf = as.double(yUInf),
c0Inf = as.double(c0Inf),
Xmat = as.double(as.matrix(Xmat)),
n = as.integer(n),
p = as.integer(p),
hyperP = as.double(hyperP),
mcmcP = as.double(mcmcP),
startValues = as.double(startV),
numReps = as.integer(numReps),
thin = as.integer(thin),
burninPerc = as.double(burninPerc),
samples_y = as.double(rep(0, nStore*n)),
samples_beta = as.double(rep(0, nStore*p)),
samples_r = as.double(rep(0, nStore*n)),
samples_mu = as.double(rep(0, nStore*n)),
samples_sigSq = as.double(rep(0, nStore*n)),
samples_tau = as.double(rep(0, nStore*1)),
samples_misc = as.double(rep(0, p+2)))
y.p <- matrix(as.vector(mcmcRet$samples_y), nrow=nStore, byrow=T)
if(p >0)
{
beta.p <- matrix(as.vector(mcmcRet$samples_beta), nrow=nStore, byrow=T)
}else
{
beta.p <- NULL
}
r.p <- matrix(mcmcRet$samples_r, nrow = nStore, byrow = T)
mu.p <- matrix(mcmcRet$samples_mu, nrow = nStore, byrow = T)
sigSq.p <- matrix(as.vector(mcmcRet$samples_sigSq), nrow=nStore, byrow=T)
tau.p <- matrix(mcmcRet$samples_tau, nrow = nStore, byrow = T)
if(p >0)
{
accept.beta <- as.vector(mcmcRet$samples_misc[1:p])
}else
{
accept.beta <- NULL
}
accept.mu <- as.vector(mcmcRet$samples_misc[p+1])
accept.sigSq <- as.vector(mcmcRet$samples_misc[p+2])
if(p > 0){
covNames = colnames(Xmat)
}
if(p == 0){
covNames = NULL
}
ret[[nam]] <- list(y.p = y.p, beta.p = beta.p, r.p = r.p, mu.p = mu.p, sigSq.p = sigSq.p, tau.p = tau.p, accept.beta = accept.beta, accept.mu = accept.mu, accept.sigSq=accept.sigSq, covNames = covNames, model = hz.type)
}
chain = chain + 1
}
ret[["setup"]] <- list(hyperParams = hyperParams, startValues = startValues, mcmcParams = mcmcParams, numReps = numReps, thin = thin, path = path, burninPerc = burninPerc, model = hz.type, nChain = nChain)
if(hz.type == "LN")
{
ret$class <- c("Bayes_AFT", "Surv", "Ind", "LN")
}
if(hz.type == "DPM")
{
ret$class <- c("Bayes_AFT", "Surv", "Ind", "DPM")
}
class(ret) <- "Bayes_AFT"
return(ret)
}
else{
warning(" (numReps * burninPerc) must be divisible by (thin)")
}
} |
sc_object <- function(x, ...) UseMethod("sc_object")
sc_object.default <- function(x, ...) {
as_tibble(x[["object"]])
}
.st_set_geometry <- function(x, value = NULL) {
x[[attr(x, "sf_column")]] <- NULL
as.data.frame(x)
}
.st_get_geometry <- function(x) {
x[[attr(x, "sf_column")]]
}
sc_object.sf <- function(x, ...) {
tibble::as_tibble(.st_set_geometry(x))
}
sc_object.sfc <- function(x, ...) {
tibble(object_ = sc_uid(length(x)))
}
sc_object.Spatial <- function(x, ...) {
if (!.hasSlot(x, "data")) {
out <- setNames(list(seq_along(x)), class(x))
} else {
out <- methods::slot(x, "data")
}
tibble::as_tibble(out)
}
sc_object.trip <- function(x, ...) {
tor <- slot(x, "TOR.columns")
tibble::tibble(trip = unique(x[[tor[2L]]]))
} |
causeSumNoP=
function(mtx, nam = colnames(mtx), blksiz=10,
ctrl = 0, dig = 6, wt = c(1.2, 1.1, 1.05, 1), sumwt = 4)
{
p = NCOL(mtx)
if (p < 2)
stop("stop:too few columns in input mtx to siPairsBlk")
pv=rep(NA,p-1)
pearson=rep(NA,p-1)
for ( i in 2:p){
x=mtx[,1]
y=mtx[,i]
ok=complete.cases(x,y)
c1 = cor.test(x[ok], y[ok])
pv[i] = c1$p.value
pearson[i] = c1$estimate
}
si0 = siPairsBlk(mtx, ctrl = ctrl, dig = dig, wt = wt,
blksiz=blksiz, sumwt = sumwt)
si = round(100 * as.numeric(si0)/3.175, 3)
out = matrix(NA, nrow = (p - 1), ncol = 5)
for (i in 2:p) {
if (si[i - 1] < 0) {
out[i - 1, 1] = nam[i]
out[i - 1, 2] = nam[1]
out[i - 1, 3] = abs(si[i - 1])
}
if (si[i - 1] > 0) {
out[i - 1, 1] = nam[1]
out[i - 1, 2] = nam[i]
out[i - 1, 3] = abs(si[i - 1])
}
out[i - 1, 4] = round(pearson[i], 4)
out[i - 1, 5] = round(pv[i], 5)
}
colnames(out) = c("cause","response","strength","corr.","p-value")
return(out)
} |
anova1f_4c<-function(m1=NULL,m2=NULL,m3=NULL,m4=NULL, s1=NULL,s2=NULL,s3=NULL,s4=NULL,
n1=NULL,n2=NULL,n3=NULL,n4=NULL, alpha=.05, c1 =0, c2=0, c3=0, c4=0){
x<-stats::rnorm(n1,m1,s1)
X<-x
MEAN<-m1
SD<-s1
Z <- (((X - mean(X, na.rm = TRUE))/stats::sd(X, na.rm = TRUE))) * SD
y<-MEAN + Z
group<-rep("A1",n1)
l1<-data.frame(y, group)
x<-stats::rnorm(n2,m2,s2)
X<-x
MEAN<-m2
SD<-s2
Z <- (((X - mean(X, na.rm = TRUE))/stats::sd(X, na.rm = TRUE))) * SD
y<-MEAN + Z
group<-rep("A2",n2)
l2<-data.frame(y, group)
x<-stats::rnorm(n3,m3,s3)
X<-x
MEAN<-m3
SD<-s3
Z <- (((X - mean(X, na.rm = TRUE))/stats::sd(X, na.rm = TRUE))) * SD
y<-MEAN + Z
group<-rep("A3",n3)
l3<-data.frame(y, group)
x<-stats::rnorm(n4,m4,s4)
X<-x
MEAN<-m4
SD<-s4
Z <- (((X - mean(X, na.rm = TRUE))/stats::sd(X, na.rm = TRUE))) * SD
y<-MEAN + Z
group<-rep("A4",n4)
l4<-data.frame(y, group)
simdat<-rbind(l1,l2,l3,l4)
anova<-stats::aov(y~group, data=simdat)
anova<-car::Anova(anova, type="III")
SSA<-anova[2,1]
SSwin<-anova[3,1]
dfwin<-anova[3,2]
mswin<-SSwin/dfwin
dfbg<-anova[2,2]
eta2<-SSA/(SSA+SSwin)
f2<-eta2/(1-eta2)
lambda<-f2*dfwin
minusalpha<-1-alpha
Ft<-stats::qf(minusalpha, dfbg, dfwin)
power<-1-stats::pf(Ft, dfbg,dfwin,lambda)
delta=((c1*m1)+(c2*m2)+(c3*m3)+(c4*m4))/((mswin*((c1^2/n1)+(c2^2/n2)+(c3^2/n3)+(c4^2/n4))))^.5
lambda.c=delta^2
Ft.c<-stats::qf(minusalpha, 1, dfwin)
power.contrast<-round(1-stats::pf(Ft.c, 1,dfwin,lambda.c),3)
message("Power for contrast = ", power.contrast)
result <- data.frame(matrix(ncol = 5))
colnames(result) <- c("n1","n2", "n3", "n4","Power")
result[, 1]<-n1
result[, 2]<-n2
result[, 3]<-n3
result[, 4]<-n4
result[, 5]<-power.contrast
output<-na.omit(result)
rownames(output)<- c()
invisible(output)
} |
expected <- eval(parse(text="FALSE"));
test(id=0, code={
argv <- eval(parse(text="list(structure(list(`character(0)` = structure(integer(0), .Label = character(0), class = \"factor\")), .Names = \"character(0)\", row.names = character(0), class = \"data.frame\"))"));
do.call(`is.double`, argv);
}, o=expected); |
eventMaxima <- function(data, delta.y = 200, delta.x = 1, threshold = -1, out.style = "summary") {
n.data = length(data)
minima = localMin(data)
current.stats = calcStats(head(minima, -1), tail(minima, -1), data, f.vec = c("which.max", "max"))
maxima.x = c(current.stats$which.max, n.data)
maxima.y = c(current.stats$max, data[n.data])
srt.index = c()
end.index = c()
n.minima = length(minima)
i = 1; j = 2
while (j <= n.minima) {
current.x.1 = minima[i]
current.x.2 = minima[j]
current.y.1 = data[current.x.1]
current.y.2 = data[current.x.2]
current.max = max(maxima.y[i:(j-1)])
if (delta.y > 0) {
test.1 = (current.max - max(current.y.1, current.y.2)) > delta.y
} else {
test.1 = (current.max - max(current.y.1, current.y.2)) > -current.max*delta.y
}
test.2 = ((maxima.x[j]-maxima.x[j-1]) > delta.x)
if (test.1 & test.2) {
srt.index = c(srt.index, current.x.1)
end.index = c(end.index, current.x.2)
i = j
j = i + 1
} else {
j = j + 1
}
}
if (threshold > 0) {
check.max = calcStats(srt.index, end.index, data, f.vec = c("max"))
srt.index = srt.index[check.max >= threshold]
end.index = end.index[check.max >= threshold]
}
n.events = length(srt.index)
if (n.events == 0) {
return(NULL)
} else {
if (threshold >= 0) {
for (i in 1:n.events) {
event.data = data[srt.index[i]:end.index[i]]
runs = rle(event.data <= threshold)
n.runs = length(runs$values)
if (runs$values[1]) {
srt.index[i] = srt.index[i] + runs$lengths[1]
}
if (runs$values[n.runs]) {
end.index[i] = end.index[i] - runs$lengths[n.runs]
}
}
}
if (out.style == "summary") {
event.stats = calcStats(srt.index, end.index, data, f.vec = c("which.max", "max", "sum"))
return(data.frame(srt = srt.index, end = end.index, event.stats))
} else {
return(data.frame(srt = srt.index, end = end.index))
}
}
} |
setMethod("initialize", "track",
function(.Object, x, y, ...) {
if(missing(y)) {
y <- x; x <- seq(along=y)
}
callNextMethod(.Object, x = x, y = y, ...)
}) |
enqueueJobs <- function(package, directory, dbfile="", addfailed=FALSE) {
if (!is.null(cfg <- getConfig())) {
if ("setup" %in% names(cfg)) source(cfg$setup)
if ("libdir" %in% names(cfg)) {
.libPaths(cfg$libdir)
Sys.setenv("R_LIBS_USER"=cfg$libdir)
if (!dir.exists(cfg$libdir)) {
dir.create(cfg$libdir)
}
}
}
runEnqueueSanityChecks()
if (dbfile != "") {
if (file.exists(dbfile)) {
db <- dbfile
} else {
stop("No file ", dbfile, " found\n", call. = FALSE)
}
con <- getDatabaseConnection(db)
res <- setDT(dbGetQuery(con, "select * from results"))
dbDisconnect(con)
}
AP <- available.packages(filters=list())
pkgset <- dependsOnPkgs(package, recursive=FALSE, installed=AP)
if (length(pkgset) == 0) stop("No dependencies for ", package, call. = FALSE)
AP <- setDT(as.data.frame(AP))
if (dbfile == "") {
pkgset <- data.table(Package=pkgset)
} else {
newpkgs <- setdiff(pkgset, res$package)
if (addfailed) {
failed <- res[ result == 1, .(package)]
pkgset <- data.table(Package=unique(sort(c(failed$package, newpkgs))))
} else {
pkgset <- data.table(Package=newpkgs)
}
}
work <- AP[pkgset, on="Package"][,1:2]
db <- getQueueFile(package=package, path=directory)
q <- ensure_queue("jobs", db = db)
con <- getDatabaseConnection(db)
createRunDataTable(con)
dat <- data.frame(package=package,
version=format(packageVersion(package)),
date=format(Sys.Date()))
dbWriteTable(con, "metadata", dat, append=TRUE)
dbDisconnect(con)
n <- nrow(work)
for (i in 1:n) {
ttl <- paste0(work[i,Package])
msg <- paste(work[i,Package], work[i, Version], sep="_")
publish(q, title = ttl, message = msg)
}
list_messages(q)
}
enqueueDepends <- function(package, directory) {
if (!is.null(cfg <- getConfig())) {
if ("setup" %in% names(cfg)) source(cfg$setup)
if ("libdir" %in% names(cfg)) {
.libPaths(cfg$libdir)
Sys.setenv("R_LIBS_USER"=cfg$libdir)
if (!dir.exists(cfg$libdir)) {
dir.create(cfg$libdir)
}
}
}
AP <- available.packages(filters=list())
pkgset <- dependsOnPkgs(package, recursive=FALSE, installed=AP)
AP <- setDT(as.data.frame(AP))
pkgset <- setDT(data.frame(Package=pkgset))
work <- AP[pkgset, on="Package"][,1:2]
deplst <- package_dependencies(as.character(work[[1]]), db=as.matrix(AP), recursive=TRUE)
deppkg <- unique(sort(do.call(c, deplst)))
IP <- installed.packages()
needed <- setdiff(deppkg, IP[, "Package"])
db <- getQueueFile(package=package, path=directory)
q <- ensure_queue("depends", db = db)
n <- length(needed)
for (i in 1:n) {
ttl <- paste0(needed[i])
msg <- paste(needed[i])
publish(q, title = ttl, message = msg)
}
list_messages(q)
}
globalVariables(c("Package", "Version")) |
statcan <- function(word, maxCodeLen = 4, clean = TRUE) {
word <- gsub("\u00C0|\u00C2", "A", word, perl = TRUE)
word <- gsub("\u00C8|\u00C9|\u00CA|\u00CB", "E", word, perl = TRUE)
word <- gsub("\u00CE|\u00CF", "I", word, perl = TRUE)
word <- gsub("\u00D4", "O", word, perl = TRUE)
word <- gsub("\u00D9|\u00DB|\u00DC", "U", word, perl = TRUE)
word <- gsub("\u0178", "Y", word, perl = TRUE)
word <- gsub("\u00C7", "C", word, perl = TRUE)
word <- toupper(word)
word[is.null(word)] <- NA
listNAs <- is.na(word)
if(any(nonalpha <- grepl("[^A-Z]", word, perl = TRUE)) && clean)
warning("unknown characters found, results may not be consistent")
word <- gsub("[^A-Z]*", "", word, perl = TRUE)
first <- substr(word, 1, 1)
word <- substr(word, 2, nchar(word))
word <- gsub("A|E|I|O|U|Y", "", word, perl = TRUE)
word <- paste(first, word, sep = "")
word <- gsub("([A-Z])\\1+", "\\1", word, perl = TRUE)
word <- substr(word, 1, maxCodeLen)
word[listNAs] <- NA
if(clean)
word[nonalpha] <- NA
return(word)
} |
summ.rq <- function(model, scale = FALSE,
confint = getOption("summ-confint", FALSE),
ci.width = getOption("summ-ci.width", .95),
se = c("nid", "rank", "iid", "ker", "boot"),
boot.sims = 1000, boot.method = "xy",
vifs = getOption("summ-vifs", FALSE),
digits = getOption("jtools-digits", 2), pvals = getOption("summ-pvals", TRUE),
n.sd = 1, center = FALSE, transform.response = FALSE, data = NULL,
model.info = getOption("summ-model.info", TRUE),
model.fit = getOption("summ-model.fit", TRUE), which.cols = NULL, ...) {
j <- list()
dots <- list(...)
deps <- dep_checks(dots)
any_deps <- sapply(deps, is.null)
if (any(!any_deps)) {
for (n in names(any_deps)[which(any_deps == FALSE)]) {
assign(n, deps[[n]])
}
}
if ("robust" %in% names(dots)) {
warn_wrap("The robust argument is not supported for rq objects. Use the
se argument instead.")
}
if (se[1] == "rank") {
confint <- TRUE
pvals <- FALSE
}
the_call <- match.call()
the_call[[1]] <- substitute(summ)
the_env <- parent.frame(n = 2)
alpha <- (1 - ci.width) / 2
missing <- length(model$na.action)
if (scale == TRUE) {
model <- scale_mod(model, n.sd = n.sd,
scale.response = transform.response,
data = data, ... )
} else if (center == TRUE && scale == FALSE) {
model <- center_mod(model, center.response = transform.response,
data = data, ...)
}
if (se[1] == "boot") {
sum <- summary(model, covariance = TRUE, se = se[1],
R = boot.sims, bsmethod = boot.method)
} else {
sum <- summary(model, covariance = TRUE, se = se[1], alpha = alpha)
}
j <- structure(j, standardize = scale, vifs = vifs, robust = FALSE,
digits = digits, model.info = model.info,
model.fit = model.fit,
n.sd = n.sd, center = center, call = the_call,
env = the_env, scale = scale, data = data,
transform.response = transform.response,
boot.sims = boot.sims, boot.method = boot.method)
if (length(attr(model$terms, "order")) != 0) {
df.int <- if (attr(model$terms, "intercept"))
1L else 0L
} else {
df.int <- 1
}
n <- length(model$residuals)
j <- structure(j, n = n)
ivs <- names(coef(model))
coefs <- unname(coef(model))
params <- list("Est." = coefs)
r1 <- R1(model)
aic = AIC(model)
bic = AIC(model, k = log(n))
ivs <- names(coefficients(model))
coefs <- unname(coef(model))
params <- list("Est." = coefs)
if (vifs == TRUE) {
tvifs <- rep(NA, length(ivs))
the_vifs <- unname(vif(model, vcov = sum$cov,
mod.matrix = rq_model_matrix(model)))
if (is.matrix(the_vifs)) {the_vifs <- the_vifs[,1]}
tvifs[-1] <- the_vifs
params[["VIF"]] <- tvifs
}
if (se[1] != "rank") {
ses <- coef(sum)[,2]
ts <- coef(sum)[,3]
ps <- coef(sum)[,4]
} else {
ses <- rep(NA, nrow(coef(sum)))
ts <- rep(NA, nrow(coef(sum)))
ps <- rep(NA, nrow(coef(sum)))
}
if (length(coefs) > length(ses)) {
temp_vec <- rep(NA, times = length(coefs))
temp_vec[which(!is.na(coefs))] <- ses
ses <- temp_vec
}
params[c("S.E.", "t val.", "p")] <- list(ses, ts, ps)
if (se[1] != "rank") {
tcrit <- abs(qt(alpha, df = sum$rdf))
labs <- make_ci_labs(ci.width)
lci <- coefs - (ses * tcrit)
uci <- coefs + (ses * tcrit)
cis <- list(lci, uci)
names(cis) <- labs
} else {
cis <- list(coef(sum)[, "lower bd"], coef(sum)[, "upper bd"])
names(cis) <- make_ci_labs(ci.width)
}
params[names(cis)] <- cis
which.cols <- which_columns(which.cols = which.cols, confint = confint,
ci.labs = make_ci_labs(ci.width), vifs = vifs,
pvals = pvals, t.col = "t val.")
if (se[1] == "rank") {which.cols <- which.cols[which.cols %nin% "t val."]}
mat <- create_table(params = params, which.cols = which.cols, ivs = ivs)
j <- structure(j, r1 = r1, dv = names(model$model[1]), lmClass = class(model),
missing = missing, use_cluster = FALSE,
confint = confint, ci.width = ci.width, pvals = pvals,
test.stat = "t val.",
standardize.response = transform.response,
scale.response = transform.response,
transform.response = transform.response,
exp = FALSE, se = se[1], aic = aic, bic = bic)
j$coeftable <- mat
j$model <- model
class(j) <- c("summ.rq", "summ")
return(j)
}
print.summ.rq <- function(x, ...) {
j <- x
x <- attributes(j)
ctable <- j$coeftable
if (x$model.info == TRUE) {
method <-
switch(j$model$method,
"br" = "Barrodale-Roberts",
"fn" = "Frisch-Newton",
"pfn" = "Frisch-Newton (after pre-processing)",
"sfn" = "Frisch-Newton (sparse algebra)",
"lasso" = "lasso",
"scad" = "Fan-Li SCAD",
"fnc" = "Frisch-Newton (user-specified equality constraints)")
type <- paste0("Quantile regression",
"\n ", italic("Quantile (tau): "), j$model$tau, "\n ",
italic("Method: "), method)
print_mod_info(missing = x$missing, n = x$n, dv = x$dv, type = type)
}
if (x$model.fit == TRUE) {
stats <- paste(italic("R\u00B9"), paste0("(", j$model$tau, ")"), " = ",
num_print(x$r1, digits = x$digits), sep = "")
print_mod_fit(stats)
}
se_name <- switch(x$se,
"iid" = "IID",
"nid" = "Sandwich (Huber)",
"ker" = "Sandwich (kernel)",
"boot" = "bootstrap",
"rank" = "Koenker rank test")
print_se_info(x$robust, x$use_cluster, manual = se_name)
print(md_table(ctable, format = getOption("summ.table.format", "multiline"),
sig.digits = FALSE, digits = x$digits))
ss <- scale_statement(x$scale, x$center, x$transform.response, x$n.sd)
if (!is.null(ss)) {cat("\n", ss, "\n", sep = "")}
}
knit_print.summ.rq <- function(x, options = NULL, ...) {
if (!nzchar(system.file(package = "kableExtra")) |
getOption("summ-normal-print", FALSE)) {
return(knitr::normal_print(x))
}
j <- x
x <- attributes(j)
format <- ifelse(knitr::is_latex_output(), yes = "latex", no = "html")
o_opt <- getOption("kableExtra.auto_format", NULL)
options(kableExtra.auto_format = FALSE)
ctable <- round_df_char(df = j$coeftable, digits = x$digits)
if (x$model.info == TRUE) {
method <-
switch(j$model$method,
"br" = "Barrodale-Roberts",
"fn" = "Frisch-Newton",
"pfn" = "Frisch-Newton (after pre-processing)",
"sfn" = "Frisch-Newton (sparse algebra)",
"lasso" = "lasso",
"scad" = "Fan-Li SCAD",
"fnc" = "Frisch-Newton (user-specified equality constraints)")
mod_info <-
mod_info_list(missing = x$missing, n = x$n, dv = x$dv,
type = "Quantile regression")
obs <- mod_info$n
if ("missing" %in% names(mod_info)) {
obs <- paste0(obs, " (", mod_info$missing, " missing obs. deleted)")
}
mod_meta <- data.frame(
datum = c("Observations", "Dependent variable", "Type",
" Quantile (tau)", " Method"
),
value = c(obs, mod_info$dv, mod_info$type, j$model$tau, method)
)
mod_meta %<>% to_kable(format = format, row.names = FALSE, col.names = NULL)
} else {
mod_meta <- NULL
}
if (x$model.fit == T && !is.null(x$modpval)) {
stats <- paste(italic("R\u00B9"), paste0("(", j$model$tau, ")"), " = ",
num_print(x$r1, digits = x$digits), sep = "")
stats <- data.frame(stat = c(paste0("R\u00B9 ", "(", j$model$tau, ")")),
value = c(num_print(x$r1, digits = x$digits))
)
stats %<>% to_kable(format = format, row.names = FALSE, col.names = NULL)
} else {stats <- NULL}
se_name <- switch(x$se,
"iid" = "IID",
"nid" = "Sandwich (Huber)",
"ker" = "Sandwich (kernel)",
"boot" = "bootstrap",
"rank" = "Koenker rank test")
se_info <- get_se_info(x$robust, x$use_cluster, manual = se_name)
ss <- scale_statement(x$scale, x$center, x$transform.response, x$n.sd)
ss <- if (!is.null(ss)) {paste(";", ss)} else {ss}
cap <- paste0("Standard errors: ", se_info, ss)
if (format == "html") {ctable %<>% escape_stars()}
ctable %<>% to_kable(format = format, row.names = TRUE, footnote = cap)
out <- paste(mod_meta, stats, ctable, collapse = "\n\n")
options(kableExtra.auto_format = o_opt)
if (format == "latex") {
return(knitr::asis_output(out, meta = kableExtra_latex_deps))
}
knitr::asis_output(out)
}
R1 <- function(model) {
rho_1 <- model$rho
null_resids <- model.frame(model)[[1]] - quantile(model.frame(model)[[1]],
model$tau)
rho_0 <- sum(null_resids * (model$tau - (null_resids < 0)))
return(1 - (rho_1 / rho_0))
}
rq_model_matrix <- function(object) {
mt <- terms(object)
m <- model.frame(object)
y <- model.response(m)
if (object$method == "sfn")
x <- object$model$x
else x <- model.matrix(mt, m, contrasts = object$contrasts)
return(x)
}
rq.fit.br <- function(x, y, tau = 0.5, alpha = 0.1, ci = FALSE,
iid = TRUE, interp = TRUE, tcrit = TRUE, ...) {
rq.fit.br(x, y, tau = tau, alpha = alpha, ci = ci, iid = iid,
interp = interp, tcrit = tcrit)
}
glance.summ.rq <- function(x, ...) {
m <- x$model
n <- length(fitted(m))
s <- summary(m, se = attr(x, "se"))
base <- data.frame(tau = m[["tau"]], logLik = logLik(m), AIC = AIC(m),
BIC = AIC(m, k = log(n)),
df.residual = rep(s[["rdf"]], times = length(m[["tau"]])))
base$r.1 <- attr(x, "r1")
return(base)
}
nobs.summ.rq <- function(object, ...) {
return(length(fitted((object$model))))
}
family.rq <- function(object, ...) {
gaussian(link = "identity")
} |
step_hai_hyperbolic <- function(recipe,
...,
role = "predictor",
trained = FALSE,
columns = NULL,
scale_type = c("sin","cos","tan", "sincos"),
skip = FALSE,
id = rand_id("hai_hyperbolic")
){
terms <- recipes::ellipse_check(...)
funcs <- c("sin", "cos", "tan", "sincos")
if (!(scale_type %in% funcs))
rlang::abort("`func` should be either `sin`, `cos`, `sincos` or `tan`")
recipes::add_step(
recipe,
step_hai_hyperbolic_new(
terms = terms,
role = role,
trained = trained,
columns = columns,
scale_type = scale_type,
skip = skip,
id = id
)
)
}
step_hai_hyperbolic_new <-
function(terms, role, trained, columns, scale_type, skip, id){
recipes::step(
subclass = "hai_hyperbolic",
terms = terms,
role = role,
trained = trained,
columns = columns,
scale_type = scale_type,
skip = skip,
id = id
)
}
prep.step_hai_hyperbolic <- function(x, training, info = NULL, ...) {
col_names <- recipes::recipes_eval_select(x$terms, training, info)
value_data <- info[info$variable %in% col_names, ]
if(any(value_data$type != "numeric")){
rlang::abort(
paste0("All variables for `step_hai_hyperbolic` must be `numeric`",
"`integer` `double` classes.")
)
}
step_hai_hyperbolic_new(
terms = x$terms,
role = x$role,
trained = TRUE,
columns = col_names,
scale_type = x$scale_type,
skip = x$skip,
id = x$id
)
}
bake.step_hai_hyperbolic <- function(object, new_data, ...){
make_call <- function(col, scale_type){
rlang::call2(
"hai_hyperbolic_vec",
.x = rlang::sym(col)
, .scale_type = scale_type
, .ns = "healthyR.ai"
)
}
grid <- expand.grid(
col = object$columns
, scale_type = object$scale_type
, stringsAsFactors = FALSE
)
calls <- purrr::pmap(.l = list(grid$col, grid$scale_type), make_call)
newname <- paste0("hyperbolic_", grid$col, "_", grid$scale_type)
calls <- recipes::check_name(calls, new_data, object, newname, TRUE)
tibble::as_tibble(dplyr::mutate(new_data, !!!calls))
}
print.step_hai_hyperbolic <-
function(x, width = max(20, options()$width - 35), ...) {
cat("Hyperbolic transformation on ", sep = "")
printer(
untr_obj = x$terms,
tr_obj = names(x$columns),
trained = x$trained,
width = width
)
invisible(x)
}
required_pkgs.step_hai_hyperbolic <- function(x, ...) {
c("healthyR.ai")
} |
d_bar.boot <- function(df, d=d){
numerator <- df[d] %>% sum(df[3,]*df[2,])
denominator <- df[d] %>% sum(df[3,])
d_bar1 <- exp(numerator/denominator)
return(d_bar1)
} |
standardize_bounds <- function(percent_df, dir, em_ctl_file, om_ctl_file = "",
verbose = FALSE, estimate = NULL, ...) {
if (!file.exists(file.path(dir, em_ctl_file))) {
stop(paste("The em_ctl_file,", em_ctl_file, "does not exist",
"in the directory", dir))
}
if (!"Label" %in% colnames(percent_df)) {
stop(paste("In percent_df, the first column is currently named",
colnames(percent_df)[1], "rename as 'Label'"))
}
ss_version <- get_ss_ver_file(file.path(dir, em_ctl_file))
em_pars <- SS_parlines(ctlfile = file.path(dir, em_ctl_file),
version = ss_version, verbose = verbose)
if(nchar(om_ctl_file)>0){
om_pars <- SS_parlines(ctlfile = file.path(dir, om_ctl_file),
version = ss_version, verbose = verbose)
indices <- sapply(percent_df$Label, function(x) {
rmpuncx <- gsub("[[:punct:]]", "", x)
rmpuncom <- gsub("[[:punct:]]", "", om_pars$Label)
rmpuncem <- gsub("[[:punct:]]", "", em_pars$Label)
findinom <- grep(rmpuncx, rmpuncom, ignore.case = TRUE)
findinem <- grep(rmpuncx, rmpuncem, ignore.case = TRUE)
c(ifelse(is.null(findinom), NA, findinom),
ifelse(is.null(findinem), NA, findinem))
})
tochange <- !is.na(indices[1, ]) | !is.na(indices[2, ])
restr_percent_df <- percent_df[tochange, ]
if (NROW(restr_percent_df) == 0) {
stop(paste("None of the entered parameter labels (,",
paste(percent_df[, 1], collapse = ", "),
") are found in both the EM and OM.", sep = ""))
}
changeem <- cbind(em_pars$Label[indices[2, ]],
om_pars$INIT[indices[1, ]], em_pars$INIT[indices[2, ]])
changeem <- changeem[tochange, ]
changeinits <- changeem[which(changeem[, 2] != changeem[, 3]), ,
drop = FALSE]
if (NROW(changeinits) > 0) {
print.verbose <- SS_changepars(dir = dir, ctlfile = em_ctl_file,
newctlfile = em_ctl_file, strings = changeinits[, 1],
newvals = changeinits[, 2], verbose = verbose, repeat.vals = FALSE)
if (verbose) message(paste(print.verbose, collapse = "\n"))
om_pars<-SS_parlines(ctlfile = file.path(dir,om_ctl_file),
version = ss_version, verbose = verbose)
parsinboth <- which(percent_df$Label %in% om_pars$Label &
percent_df$Label %in% em_pars$Label)
restr_percent_df <- percent_df[parsinboth, ]
if(NROW(restr_percent_df) != 0){
om_indices<-which(om_pars[,"Label"] %in% restr_percent_df[,"Label"])
em_indices<-which(em_pars[,"Label"] %in% restr_percent_df[,"Label"])
whichunequal <- om_pars[om_indices,"INIT"]!= em_pars[em_indices,"INIT"]
if(any(whichunequal)){
inits_to_change <- em_pars[which(whichunequal), "Label"]
SS_changepars(dir=dir, ctlfile=em_ctl_file,newctlfile = em_ctl_file,
strings = inits_to_change,
newvals = om_pars[which(whichunequal),"INIT"],
verbose = verbose)
}
}else{
message("None of the entered parameter labels are found in both the EM and OM.")
}
}
}
indexem <- sapply(percent_df$Label, function(x) {
rmpuncx <- gsub("[[:punct:]]", "", x)
rmpuncem <- gsub("[[:punct:]]", "", em_pars$Label)
findinem <- grep(rmpuncx, rmpuncem, ignore.case = TRUE)
ifelse(is.null(findinem), NA, findinem)
})
if (any(is.na(indexem))) {
stop(paste("Element(s):",
paste(percent_df$Label[which(is.na(indexem))], collapse = ", "),
"do not have valid parameter labels."))
}else{
indices_to_standardize<-matrix(ncol=2,nrow=nrow(percent_df))
indices_to_standardize[, 1] <- seq_len(NROW(percent_df))
indices_to_standardize[, 2] <- indexem
newlos <- percent_df[indices_to_standardize[, 1], "lo"] *
em_pars[indices_to_standardize[, 2], "INIT"]
newhis <- percent_df[indices_to_standardize[, 1], "hi"] *
em_pars[indices_to_standardize[, 2], "INIT"]
newlos[grep("Ln", percent_df$Label, ignore.case = TRUE)] <-
percent_df[grep("Ln", percent_df$Label, ignore.case = TRUE), 2]
newhis[grep("Ln", percent_df$Label, ignore.case = TRUE)] <-
percent_df[grep("Ln", percent_df$Label, ignore.case = TRUE), 3]
newlos[grep("CV", percent_df$Label, ignore.case = TRUE)] <-
percent_df[grep("CV", percent_df$Label, ignore.case = TRUE), 2]
newhis[grep("CV", percent_df$Label, ignore.case = TRUE)] <-
percent_df[grep("CV", percent_df$Label, ignore.case = TRUE), 3]
SS_changepars(dir=dir,ctlfile=em_ctl_file,newctlfile=em_ctl_file,
linenums = em_pars[indexem, "Linenum"],
newlos=newlos,newhis=newhis, verbose = verbose, estimate = estimate, ...)
}
} |
eigs_real_sym <- function(A, n, k, which, sigma, opts, mattype, extra_args = list())
{
if (!is.null(dim(A)))
{
if (nrow(A) != ncol(A) | nrow(A) != n)
stop("'A' must be a square matrix of size n")
}
if (n < 3)
stop("dimension of 'A' must be at least 3")
if (k == n)
{
warning("all eigenvalues are requested, eigen() is used instead")
return(c(eigen(if(extra_args$use_lower) A else t(A),
symmetric = TRUE,
only.values = identical(opts$retvec, FALSE)),
nconv = n, niter = 0))
}
if (mattype == "matrix" & typeof(A) != "double")
{
mode(A) = "double"
}
if (k <= 0 | k >= n)
stop("'k' must satisfy 0 < k < nrow(A)")
if (is.null(sigma))
{
workmode = "regular"
sigma = 0
} else {
workmode = "real_shift"
if(is.complex(sigma)) warning("only real part of sigma is used")
sigma = Re(sigma)
}
spectra.param = list(which = which,
ncv = min(n, max(2 * k + 1, 20)),
tol = 1e-10,
maxitr = 1000,
retvec = TRUE,
user_initvec = FALSE,
sigma = sigma)
eigenv.type = c("LM", "SM", "LA", "SA", "BE")
if (!(spectra.param$which %in% eigenv.type))
{
stop(sprintf("argument 'which' must be one of\n%s",
paste(eigenv.type, collapse = ", ")))
}
spectra.param[names(opts)] = opts
spectra.param$which = EIGS_RULE[spectra.param$which]
spectra.param = c(spectra.param, as.list(extra_args))
if (spectra.param$ncv <= k | spectra.param$ncv > n)
stop("'opts$ncv' must be > k and <= nrow(A)")
if ("initvec" %in% names(spectra.param))
{
if(length(spectra.param$initvec) != n)
stop("'opt$initvec' must have length n")
spectra.param$initvec = as.numeric(spectra.param$initvec)
spectra.param$user_initvec = TRUE
}
fun = switch(workmode,
regular = "eigs_sym",
real_shift = "eigs_shift_sym",
stop("unknown work mode"))
dot_call_args = list(
fun,
A, as.integer(n), as.integer(k), as.list(spectra.param), as.integer(MAT_TYPE[mattype]),
PACKAGE = "RSpectra"
)
do.call(.Call, args = dot_call_args)
} |
genCorData <- function(n, mu, sigma, corMatrix = NULL, rho, corstr = "ind",
cnames = NULL, idname = "id") {
nvars <- length(mu)
if (!is.null(cnames)) {
nnames <- trimws(unlist(strsplit(cnames, split = ",")))
if (length(nnames) != nvars) {
stop("Invalid number of variable names")
}
}
corMatrix <- .buildCorMat(nvars, corMatrix, corstr, rho)
if (length(sigma) == 1) {
varMatrix <- (sigma^2) * corMatrix
} else if (length(sigma) > 0) {
D <- diag(sigma)
if (length(diag(corMatrix)) != length(sigma)) {
stop("Improper number of standard deviations")
}
varMatrix <- (D %*% corMatrix) %*% D
}
dt <- data.table(mvnfast::rmvn(n = n, mu = mu, sigma = varMatrix))
if (!is.null(cnames)) setnames(dt, nnames)
dtid <- data.table(1:nrow(dt))
setnames(dtid, idname)
dt <- cbind(dtid, dt)
setkeyv(dt, idname)
return(dt[])
}
genCorFlex <- function(n, defs, rho = 0, tau = NULL, corstr = "cs", corMatrix = NULL) {
X <- NULL
Unew <- NULL
param1 <- NULL
param2 <- NULL
id <- NULL
period <- NULL
dist <- NULL
formula <- NULL
variance <- NULL
if (!all(defs[, dist] %in% c("normal", "gamma", "uniform", "binary", "poisson", "negBinomial"))) {
stop("Only implemented for the following distributions: binary, uniform, normal, poisson, gamma, and negative binomial")
}
corDefs <- copy(defs)
nvars <- nrow(corDefs)
nUniform <- corDefs[dist == "uniform", .N]
if (nUniform > 0) {
rangeV <- 2 * (1:nUniform)
rangeF <- rangeV - 1
range <- corDefs[dist == "uniform", unlist(strsplit(as.character(formula), split = ";", fixed = TRUE))]
corDefs[dist == "uniform", formula := range[rangeF]]
corDefs[dist == "uniform", variance := as.numeric(range[rangeV])]
}
chkWarn <- tryCatch(corDefs[, formula := as.numeric(formula)],
warning = function(w) {
"warning"
}
)
if (class(chkWarn)[1] == "character") stop("Non-scalar values in definitions")
sr1 <- corDefs[dist == "gamma", gammaGetShapeRate(formula, variance)[[1]]]
sr2 <- corDefs[dist == "gamma", gammaGetShapeRate(formula, variance)[[2]]]
corDefs[dist == "gamma", `:=`(formula = sr1, variance = sr2)]
sp1 <- corDefs[dist == "negBinomial", negbinomGetSizeProb(formula, variance)[[1]]]
sp2 <- corDefs[dist == "negBinomial", negbinomGetSizeProb(formula, variance)[[2]]]
corDefs[dist == "negBinomial", `:=`(formula = sp1, variance = sp2)]
if (corDefs[is.na(formula), .N] > 0) stop("Non-scalar values in definitions")
if (!is.null(tau)) {
rho <- sin(tau * pi / 2)
}
dx <- .genQuantU(nvars, n, rho, corstr, corMatrix)
dx[, dist := rep(corDefs[, dist], length.out = .N)]
dx[, param1 := rep(corDefs[, formula], length.out = .N)]
dx[, param2 := rep(corDefs[, variance], length.out = .N)]
dFinal <- dx[period == 0, list(id)]
for (i in 1:nvars) {
dTemp <- dx[period == (i - 1)]
type <- corDefs[i, dist]
if (type == "binary") {
V <- dTemp[, stats::qbinom(Unew, 1, param1)]
} else if (type == "poisson") {
V <- dTemp[, stats::qpois(Unew, param1)]
} else if (type == "uniform") {
V <- dTemp[, stats::qunif(Unew, param1, param2)]
} else if (type == "gamma") {
V <- dTemp[, stats::qgamma(Unew, param1, param2)]
} else if (type == "normal") {
V <- dTemp[, stats::qnorm(Unew, param1, sqrt(param2))]
} else if (type == "negBinomial") {
V <- dTemp[, stats::qnbinom(Unew, param1, param2)]
}
dFinal <- cbind(dFinal, V)
setnames(dFinal, "V", corDefs$varname[i])
}
return(dFinal[])
}
genCorGen <- function(n, nvars, params1, params2 = NULL, dist, rho, corstr,
corMatrix = NULL, wide = FALSE, cnames = NULL, method = "copula",
idname = "id") {
param1 <- NULL
seqid <- NULL
X <- NULL
Unew <- NULL
param2 <- NULL
id <- NULL
period <- NULL
if (!(dist %in% c("poisson", "binary", "gamma", "uniform", "negBinomial", "normal"))) {
stop("Distribution not properly specified.")
}
if (class(params1) != "numeric") stop("Parameters must be numeric")
if (!is.null(params2)) {
if (class(params2) != "numeric") stop("Parameters must be numeric")
}
nparams <- as.numeric(!is.null(params1)) + as.numeric(!is.null(params2))
if (((nparams > 1) & (dist %in% c("poisson", "binary")))) {
stop(paste0("Too many parameter vectors (", nparams, ") for ", dist))
}
if (((nparams < 2) & (dist %in% c("gamma", "uniform", "normal", "negBinomial")))) {
stop(paste0("Too few parameter vectors (", nparams, ") for ", dist))
}
if (length(params1) == 1) {
params1 <- rep(params1, nvars)
}
if (!is.null(params2)) {
if (length(params2) == 1) {
params2 <- rep(params2, nvars)
}
}
if (length(params1) != nvars) {
stop(paste0(
"Length of vector 1 = ", length(params1),
", not equal to number of correlated variables: ", nvars
))
}
if (!is.null(params2)) {
if (length(params2) != nvars) {
stop(paste0(
"Length of vector 2 = ", length(params2),
", not equal to number of correlated variables: ", nvars
))
}
}
if (!(method %in% c("copula", "ep"))) {
stop(paste(method, "is not a valid method"))
}
if (dist != "binary" & method == "ep") {
stop("Method `ep` applies only to binary data generation")
}
if (method == "copula") {
mu <- rep(0, nvars)
dtM <- .genQuantU(nvars, n, rho, corstr, corMatrix)
if (dist == "binary") {
dtM[, param1 := params1[seq], keyby = seqid]
dtM[, X := stats::qbinom(p = Unew, 1, prob = param1)]
} else if (dist == "poisson") {
dtM[, param1 := params1[seq], keyby = seqid]
dtM[, X := stats::qpois(p = Unew, lambda = param1)]
} else if (dist == "negBinomial") {
sp <- negbinomGetSizeProb(params1, params2)
dtM[, param1 := sp[[1]][seq]]
dtM[, param2 := sp[[2]][seq]]
dtM[, X := stats::qnbinom(p = Unew, size = param1, prob = param2)]
} else if (dist == "uniform") {
dtM[, param1 := params1[seq], keyby = seqid]
dtM[, param2 := params2[seq], keyby = seqid]
dtM[, X := stats::qunif(p = Unew, min = param1, max = param2)]
} else if (dist == "gamma") {
sr <- gammaGetShapeRate(params1, params2)
dtM[, param1 := sr[[1]][seq]]
dtM[, param2 := sr[[2]][seq]]
dtM[, X := stats::qgamma(p = Unew, shape = param1, rate = param2)]
} else if (dist == "normal") {
dtM[, param1 := params1[seq], keyby = seqid]
dtM[, param2 := params2[seq], keyby = seqid]
dtM[, X := stats::qnorm(p = Unew, mean = param1, sd = sqrt(param2))]
}
} else if (method == "ep") {
corMatrix <- .buildCorMat(nvars, corMatrix, corstr, rho)
dtM <- .genBinEP(n, params1, corMatrix)
}
setkey(dtM, "id")
if (wide == FALSE) {
dFinal <- dtM[, list(id, period, X)]
if (!is.null(cnames)) setnames(dFinal, "X", cnames)
} else {
dFinal <- dcast(dtM, id ~ seq, value.var = "X")
if (!is.null(cnames)) {
nnames <- trimws(unlist(strsplit(cnames, split = ",")))
setnames(dFinal, paste0("V", 1:nvars), nnames)
}
}
setnames(dFinal, "id", idname)
return(dFinal[])
}
.checkBoundsBin <- function(p1, p2, d) {
l <- (p1 * p2) / ((1 - p1) * (1 - p2))
L <- max(-sqrt(l), -sqrt(1 / l))
u <- (p1 * (1 - p2)) / (p2 * (1 - p1))
U <- min(sqrt(u), sqrt(1 / u))
if ((d < L & isTRUE(all.equal(d, L)) == FALSE) |
(d > U & isTRUE(all.equal(d, U)) == FALSE)) {
LU <- paste0("(", round(L, 2), " ... ", round(U, 2), ")")
stopText <- paste("Specified correlation", d, "out of range", LU)
stop(stopText)
}
}
.findRhoBin <- function(p1, p2, d) {
.checkBoundsBin(p1, p2, d)
target <- d * sqrt(p1 * p2 * (1 - p1) * (1 - p2)) + p1 * p2
Max <- 1
Min <- -1
test <- 0
found <- FALSE
while (!found) {
corr <- diag(2)
corr[1, 2] <- corr[2, 1] <- test
est <- mvtnorm::pmvnorm(lower = rep(-Inf, 2), upper = c(stats::qnorm(p1), stats::qnorm(p2)), mean = c(0, 0), corr = corr)
if (round(est, 5) == round(target, 5)) {
found <- TRUE
rho <- test
} else if (est < target) {
Min <- test
test <- (Min + Max) / 2
} else {
Max <- test
test <- (Min + Max) / 2
}
}
return(rho)
}
.genBinEP <- function(n, p, tcorr) {
id <- NULL
period <- NULL
seqid <- NULL
np <- length(p)
phicorr <- diag(length(p))
for (i in (1:(np - 1))) {
for (j in ((i + 1):np)) {
p1 <- p[i]
p2 <- p[j]
phicorr[j, i] <- phicorr[i, j] <- .findRhoBin(p1, p2, tcorr[i, j])
}
}
if (!all(eigen(phicorr)$values > 0)) {
phicorr <- Matrix::nearPD(phicorr)$mat
}
normvars <- mvnfast::rmvn(n, mu = rep(0, length(p)), sigma = phicorr)
z <- matrix(rep(stats::qnorm(p), nrow(normvars)), nrow = nrow(normvars), byrow = TRUE)
binvars <- matrix(as.integer(normvars < z), nrow = nrow(z))
dtX <- data.table(binvars)
dtX[, id := .I]
dtM <- melt(dtX, id.vars = "id", variable.factor = TRUE, value.name = "X", variable.name = "seq")
dtM[, period := as.integer(seq) - 1]
setkey(dtM, "id")
dtM[, seqid := .I]
return(dtM[])
}
genCorMat <- function(nvars, cors = NULL) {
if (is.null(cors)) {
ev <- stats::runif(nvars, 0, 10)
Z <- matrix(ncol = nvars, stats::rnorm(nvars^2))
decomp <- qr(Z)
Q <- qr.Q(decomp)
R <- qr.R(decomp)
d <- diag(R)
ph <- d / abs(d)
O <- Q %*% diag(ph)
Z <- t(O) %*% diag(ev) %*% O
cm <- stats::cov2cor(Z)
} else {
if (choose(nvars, 2) != length(cors)) stop("Correlations improperly specified")
cmLower <- matrix(0, nrow = nvars, ncol = nvars)
cmLower[lower.tri(cmLower)] <- cors
cmUpper <- t(cmLower)
cm <- cmLower + cmUpper
diag(cm) <- 1
}
assertPositiveDefinite(corMat = cm)
cm
}
genCorOrdCat <- function(dtName, idname = "id", adjVar = NULL, baseprobs,
prefix = "grp", rho, corstr, corMatrix = NULL) {
.Deprecated("genOrdCat")
genOrdCat(
dtName = dtName,
adjVar = adjVar,
baseprobs = baseprobs,
idname = idname,
prefix = prefix,
rho = rho,
corstr = corstr,
corMatrix = corMatrix,
asFactor = FALSE
)
} |
`D2ACWmat.d` <-
function(J, filter.number = 10., family = "DaubLeAsymm", OPLENGTH = 100000.)
{
J <- - J
P <- D2ACW( - J, filter.number = filter.number, family = family,
OPLENGTH = OPLENGTH)
nc <- ncol(P[[3. * J]])
nr <- 3. * J * nc
nrlocal <- nc
m <-
matrix(0., nrow = nr, ncol = nc)
tmp <- matrix(0., nrow = nrlocal, ncol = nc)
tmp <- P[[3. * J]]
m[((3. * J - 1.) * nc + 1.):(3. * J * nc), ] <- tmp[1.:nc, ]
for(j in (2. * J + 1.):(3. * J - 1.)) {
nrj <- nrow(P[[j]])
ncj <- ncol(P[[j]])
ncz <- (nc - ncj)/2.
nrz <- (nrlocal - nrj)/2.
z1 <- matrix(0., nrow = nrj, ncol = ncz)
z2 <- matrix(0., nrow = nrz, ncol = nc)
tmp1 <- matrix(0., nrow = nrj, ncol = ncj)
tmp1 <- P[[j]]
m[((j - 1.) * nc + 1.):(j * nc), ] <- rbind(z2, cbind(z1,
tmp1[1.:nrj, ], z1), z2)
}
tmp <- matrix(0., nrow = nrlocal, ncol = nc)
tmp <- P[[2. * J]]
m[((2. * J - 1.) * nc + 1.):(2. * J * nc), ] <- tmp[1.:nc, ]
for(j in (J + 1.):(2. * J - 1.)) {
nrj <- nrow(P[[j]])
ncj <- ncol(P[[j]])
ncz <- (nc - ncj)/2.
nrz <- (nrlocal - nrj)/2.
z1 <- matrix(0., nrow = nrj, ncol = ncz)
z2 <- matrix(0., nrow = nrz, ncol = nc)
tmp1 <- matrix(0., nrow = nrj, ncol = ncj)
tmp1 <- P[[j]]
m[((j - 1.) * nc + 1.):(j * nc), ] <- rbind(z2, cbind(z1,
tmp1[1.:nrj, ], z1), z2)
}
tmp <- matrix(0., nrow = nrlocal, ncol = nc)
tmp <- P[[J]]
m[((J - 1.) * nc + 1.):(J * nc), ] <- tmp[1.:nc, ]
for(j in 1.:(J - 1.)) {
nrj <- nrow(P[[j]])
ncj <- ncol(P[[j]])
ncz <- (nc - ncj)/2.
nrz <- (nrlocal - nrj)/2.
z1 <- matrix(0., nrow = nrj, ncol = ncz)
z2 <- matrix(0., nrow = nrz, ncol = nc)
tmp1 <- matrix(0., nrow = nrj, ncol = ncj)
tmp1 <- P[[j]]
m[((j - 1.) * nc + 1.):(j * nc), ] <- rbind(z2, cbind(z1,
tmp1[1.:nrj, ], z1), z2)
}
m
} |
context("atable_longitudinal")
library(atable)
DD = atable::test_data
set.seed(42)
DD = within(DD,{time = sample(paste0("time_", 0:5), size=nrow(DD), replace = TRUE)})
split_cols = "time"
group_col = "Group"
test_that("call atable_longitudinal", {
tab = atable_longitudinal(x = DD,
target_cols = "Split1",
group_col = group_col,
split_cols = split_cols,
add_margins = TRUE)
expect_true(is.data.frame(tab))
expect_true(ncol(tab)==8)
expect_true(nrow(tab)==30)
tab = atable_longitudinal(x = DD,
target_cols = "Split1",
group_col = group_col,
split_cols = split_cols,
add_margins = FALSE)
expect_true(is.data.frame(tab))
expect_true(ncol(tab)==7)
expect_true(nrow(tab)==30)
})
test_that("target_cols with 2 levels", {
tab = atable_longitudinal(x = DD,
target_cols = "Split2",
group_col = group_col,
split_cols = split_cols,
add_margins = TRUE)
expect_true(is.data.frame(tab))
expect_true(ncol(tab)==7)
expect_true(nrow(tab)==6)
})
test_that("no group", {
tab = atable_longitudinal(x = DD,
target_cols = "Numeric",
group_col = NULL,
split_cols = split_cols)
expect_true(is.data.frame(tab))
expect_true(ncol(tab)==2)
expect_true(nrow(tab)==6)
tab = atable_longitudinal(x = DD,
target_cols = "Split1",
group_col = NULL,
split_cols = split_cols)
expect_true(is.data.frame(tab))
expect_true(ncol(tab)==3)
expect_true(nrow(tab)==30)
})
test_that("wrong arguments", {
expect_error(
atable_longitudinal(x = DD,
target_cols = c("Numeric", "Split1"),
group_col = group_col,
split_cols = split_cols)
)
expect_error(
atable_longitudinal(x = DD,
target_cols = c("Numeric"),
group_col = group_col,
split_cols = NULL)
)
expect_error(
atable_longitudinal(x = DD,
target_cols = c("Numeric"),
group_col = group_col,
split_cols = c("Split1", "Split2"))
)
expect_error(
atable_longitudinal(x = DD,
target_cols = c("Numeric"),
group_col = group_col,
split_cols = split_cols,
blocks = list("block"="Numeric"))
)
}) |
lw_tf <- function(m) {
return(m)
}
lw_logtf <- function(m) {
return( log(m+1) )
}
lw_bintf <- function(m) {
return( (m>0)*1 )
}
gw_normalisation <- function(m) {
return ( 1 / sqrt( rowSums((m*m), na.rm = TRUE) ) )
}
gw_idf <- function(m) {
df = rowSums(lw_bintf(m), na.rm=TRUE)
return ( ( log2(ncol(m)/df) + 1 ) )
}
gw_gfidf <- function(m) {
gf = rowSums(m, na.rm = TRUE)
df = rowSums(lw_bintf(m), na.rm=TRUE)
return ( gf/df )
}
entropy <- function (m) {
gf = rowSums(m, na.rm = TRUE)
p = m / gf
ndocs = ncol(m)
entropy = - rowSums( (p*log(p)) / log(ndocs), na.rm = TRUE )
return ( entropy )
}
gw_entropy <- function(m) {
return ( (1 + entropy(m)) )
} |
material_column <- function(..., width = 6, offset = 0){
shiny::tags$div(
class = paste0("col s12 m", width, " offset-s0 offset-m", offset),
...
)
} |
otp_routing_options <- function() {
opts <- list(
alightSlack = NULL,
bannedAgencies = NULL,
bannedRoutes = NULL,
bannedStops = NULL,
bannedStopsHard = NULL,
bannedTrips = NULL,
batch = NULL,
bikeBoardCost = NULL,
bikeSpeed = NULL,
bikeSwitchCost = NULL,
bikeSwitchTime = NULL,
boardSlack = NULL,
clampInitialWait = NULL,
disableAlertFiltering = NULL,
disableRemainingWeightHeuristic = NULL,
flexFlagStopBufferSize = NULL,
flexIgnoreDrtAdvanceBookMin = NULL,
flexUseEligibilityServices = NULL,
flexUseReservationServices = NULL,
geoidElevation = NULL,
ignoreRealtimeUpdates = NULL,
maxHours = NULL,
maxPreTransitTime = NULL,
maxTransfers = NULL,
minTransferTime = NULL,
nonpreferredTransferPenalty = NULL,
optimize = NULL,
otherThanPreferredRoutesPenalty = NULL,
preferredAgencies = NULL,
preferredRoutes = NULL,
reverseOptimizeOnTheFly = NULL,
showIntermediateStops = NULL,
startTransitStopId = NULL,
startTransitTripId = NULL,
transferPenalty = NULL,
triangleSafetyFactor = NULL,
triangleSlopeFactor = NULL,
triangleTimeFactor = NULL,
unpreferredAgencies = NULL,
unpreferredRoutes = NULL,
useRequestedDateTimeInMaxHours = NULL,
waitAtBeginningFactor = NULL,
waitReluctance = NULL,
walkBoardCost = NULL,
walkReluctance = NULL,
walkSpeed = NULL,
wheelchair = NULL,
whiteListedAgencies = NULL,
whiteListedRoutes = NULL
)
return(opts)
}
otp_validate_routing_options <- function(opts) {
checkmate::assert_list(opts)
names_list <- c(
"alightSlack", "bannedAgencies", "bannedRoutes", "bannedStops",
"bannedStopsHard", "bannedTrips", "batch", "bikeBoardCost",
"bikeSpeed", "bikeSwitchCost", "bikeSwitchTime",
"boardSlack", "clampInitialWait", "disableAlertFiltering",
"disableRemainingWeightHeuristic", "flexFlagStopBufferSize",
"flexIgnoreDrtAdvanceBookMin", "flexUseEligibilityServices",
"flexUseReservationServices", "geoidElevation",
"ignoreRealtimeUpdates",
"maxHours", "maxPreTransitTime",
"maxTransfers", "minTransferTime",
"nonpreferredTransferPenalty",
"optimize", "otherThanPreferredRoutesPenalty",
"preferredAgencies",
"preferredRoutes", "reverseOptimizeOnTheFly",
"showIntermediateStops", "startTransitStopId",
"startTransitTripId", "transferPenalty",
"triangleSafetyFactor", "triangleSlopeFactor",
"triangleTimeFactor", "unpreferredAgencies",
"unpreferredRoutes", "useRequestedDateTimeInMaxHours",
"waitAtBeginningFactor", "waitReluctance", "walkBoardCost",
"walkReluctance", "walkSpeed", "wheelchair", "whiteListedAgencies",
"whiteListedRoutes"
)
names_unknown <- names(opts)[!names(opts) %in% names_list]
if (length(names_unknown) > 0) {
stop("Unknown routeOptions: ", paste(names_unknown, collapse = ", "))
}
checkmate::assert_logical(opts$batch,
len = 1, null.ok = TRUE
)
checkmate::assert_logical(opts$disableAlertFiltering,
len = 1, null.ok = TRUE
)
checkmate::assert_logical(opts$disableRemainingWeightHeuristic,
len = 1, null.ok = TRUE
)
checkmate::assert_logical(opts$geoidElevation,
len = 1, null.ok = TRUE
)
checkmate::assert_logical(opts$ignoreRealtimeUpdates,
len = 1, null.ok = TRUE
)
checkmate::assert_logical(opts$maxTransfers,
len = 1, null.ok = TRUE
)
checkmate::assert_logical(opts$reverseOptimizeOnTheFly,
len = 1, null.ok = TRUE
)
checkmate::assert_logical(opts$showIntermediateStops,
len = 1, null.ok = TRUE
)
checkmate::assert_logical(opts$useRequestedDateTimeInMaxHours,
len = 1, null.ok = TRUE
)
checkmate::assert_logical(opts$wheelchair,
len = 1, null.ok = TRUE
)
checkmate::assert_logical(opts$flexIgnoreDrtAdvanceBookMin,
len = 1, null.ok = TRUE
)
checkmate::assert_logical(opts$flexUseEligibilityServices,
len = 1, null.ok = TRUE
)
checkmate::assert_logical(opts$flexUseReservationServices,
len = 1, null.ok = TRUE
)
checkmate::assert_integer(opts$alightSlack,
len = 1, null.ok = TRUE
)
checkmate::assert_integer(opts$bikeBoardCost,
len = 1, null.ok = TRUE
)
checkmate::assert_integer(opts$bikeSwitchCost,
len = 1, null.ok = TRUE
)
checkmate::assert_integer(opts$bikeSwitchTime,
len = 1, null.ok = TRUE, lower = 0
)
checkmate::assert_integer(opts$maxPreTransitTime,
len = 1, null.ok = TRUE
)
checkmate::assert_integer(opts$nonpreferredTransferPenalty,
len = 1, null.ok = TRUE
)
checkmate::assert_integer(opts$otherThanPreferredRoutesPenalty,
len = 1, null.ok = TRUE
)
checkmate::assert_integer(opts$transferPenalty,
len = 1, null.ok = TRUE
)
checkmate::assert_integer(opts$walkBoardCost,
len = 1, null.ok = TRUE
)
checkmate::assert_integer(opts$minTransferTime,
len = 1, null.ok = TRUE
)
checkmate::assert_character(opts$bannedAgencies,
null.ok = TRUE,
len = 1
)
checkmate::assert_character(opts$bannedRoutes,
null.ok = TRUE,
len = 1
)
checkmate::assert_character(opts$bannedStops,
null.ok = TRUE,
len = 1
)
checkmate::assert_character(opts$bannedStopsHard,
null.ok = TRUE,
len = 1
)
checkmate::assert_character(opts$bannedTrips,
null.ok = TRUE,
len = 1
)
checkmate::assert_character(opts$boardSlack,
null.ok = TRUE,
len = 1
)
checkmate::assert_character(opts$preferredAgencies,
null.ok = TRUE,
len = 1
)
checkmate::assert_character(opts$preferredRoutes,
null.ok = TRUE,
len = 1
)
checkmate::assert_character(opts$optimize,
len = 1,
null.ok = TRUE
)
checkmate::assert_choice(opts$optimize,
choices = c(
"FLAT", "GREENWAYS",
"QUICK", "SAFE",
"TRANSFERS", "TRIANGLE"
),
null.ok = TRUE
)
checkmate::assert_character(opts$startTransitStopId,
len = 1,
null.ok = TRUE
)
checkmate::assert_character(opts$startTransitTripId,
len = 1,
null.ok = TRUE
)
checkmate::assert_character(opts$unpreferredAgencies,
len = 1,
null.ok = TRUE
)
checkmate::assert_character(opts$unpreferredRoutes,
len = 1,
null.ok = TRUE
)
checkmate::assert_character(opts$whiteListedAgencies,
len = 1,
null.ok = TRUE
)
checkmate::assert_character(opts$whiteListedRoutes,
len = 1,
null.ok = TRUE
)
checkmate::assert_numeric(opts$bikeSpeed,
len = 1, null.ok = TRUE, lower = 0
)
checkmate::assert_numeric(opts$clampInitialWait,
len = 1, null.ok = TRUE
)
checkmate::assert_numeric(opts$maxHours,
len = 1, null.ok = TRUE, lower = 0
)
checkmate::assert_numeric(opts$triangleSafetyFactor,
len = 1, null.ok = TRUE
)
checkmate::assert_numeric(opts$triangleSlopeFactor,
len = 1, null.ok = TRUE
)
checkmate::assert_numeric(opts$triangleTimeFactor,
len = 1, null.ok = TRUE
)
checkmate::assert_numeric(opts$waitAtBeginningFactor,
len = 1, null.ok = TRUE
)
checkmate::assert_numeric(opts$waitReluctance,
len = 1, null.ok = TRUE
)
checkmate::assert_numeric(opts$walkReluctance,
len = 1, null.ok = TRUE
)
checkmate::assert_numeric(opts$walkSpeed,
len = 1, null.ok = TRUE
)
checkmate::assert_numeric(opts$flexFlagStopBufferSize,
len = 1, null.ok = TRUE
)
opts <- opts[lengths(opts) > 0]
if (length(opts) == 0) {
opts <- NULL
}
return(opts)
} |
testthat::test_that(
"methods for fhir_columns() create identical results", {
c1 <- fhir_columns(xpaths = c(code="code/coding/code", id = "id"))
c2 <- fhir_columns(xpaths = c("code/coding/code", "id"), colnames = c("code", "id"))
c3 <- fhir_columns(xpaths = list(code="code/coding/code", id = "id"))
testthat::expect_identical(c1, c2)
testthat::expect_identical(c3, c2)
}
)
testthat::test_that(
"names() works on fhir_columns", {
c <- fhir_columns(xpaths = c(code="code/coding/code", id = "id"))
testthat::expect_identical(names(c), c("code", "id"))
}
)
testthat::test_that(
"errors are thrown for incorrect input", {
testthat::expect_error(fhir_columns(xpaths = list(c("a", "b"), c("a"))))
testthat::expect_error(fhir_columns(xpaths = list(c(1, 2))))
}
) |
GaudinskiModel14<- function
(t,
ks=c(kr=1/1.5,koi=1/1.5,koeal=1/4,koeah=1/80,kA1=1/3,kA2=1/75,kM=1/110),
C0=c(FR0=390, C10=220, C20=390, C30=1370, C40=90, C50=1800, C60=560),
F0_Delta14C=rep(0,7),
LI=150,
RI=255,
xi=1,
inputFc,
lambda=-0.0001209681,
lag=0,
solver=deSolve.lsoda.wrapper,
pass=FALSE
)
{
t_start=min(t)
t_stop=max(t)
if(length(ks)!=7) stop("ks must be of length = 7")
if(length(C0)!=7) stop("the vector with initial conditions must be of length = 7")
if(length(LI)==1) inputFluxes=BoundInFluxes(
function(t){
matrix(
nrow=7,ncol=1, c(RI,LI,0,0,0,0,0))
},
t_start,
t_stop
)
if(class(LI)=="data.frame"){
x1=LI[,1]
y1=LI[,2]
x2=RI[,1]
y2=RI[,2]
LitterFlux=function(t0){as.numeric(spline(x1,y1,xout=t0)[2])}
RootFlux=function(t0){as.numeric(spline(x2,y2,xout=t0)[2])}
inputFluxes= BoundInFluxes(map=function(t){matrix(nrow=7,ncol=1,c(RootFlux(t),LitterFlux(t),0,0,0,0,0))}, t_start, t_stop )
}
if(length(xi)==1) fX=function(t){xi}
if(class(xi)=="data.frame"){
X=xi[,1]
Y=xi[,2]
fX=function(t){as.numeric(spline(X,Y,xout=t)[2])}
}
A=-abs(diag(ks))
A[3,2]=ks[2]*(98/(3+98+51))
A[4,3]=ks[3]*(4/(94+4))
A[6,5]=ks[5]*(24/(6+24))
A[7,6]=ks[6]*(3/(22+3))
A[7,2]=ks[2]*(3/(3+98+51))
A[4,1]=ks[1]*(35/(35+190+30))
A[5,1]=ks[1]*(30/(35+190+30))
At=BoundLinDecompOp(
map=function(t){ fX(t)*A },
t_start,
t_stop
)
Fc=BoundFc(inputFc,lag=lag,format="Delta14C")
mod=Model_14(t,
At,
ivList=C0,
initialValF=ConstFc(F0_Delta14C,"Delta14C"),
inputFluxes=inputFluxes,
inputFc=Fc,
c14DecayRate=lambda,
pass=pass
)
} |
context("pkg-options")
source("setup.R")
skip_on_cran_windows()
test_that("workflowr does not overwrite user-defined package options", {
sysgit <- callr::r_safe(function() {
options(workflowr.sysgit = "/git")
library(workflowr)
getOption("workflowr.sysgit")
})
expect_identical(sysgit, "/git")
})
test_that("workflowr does not overwrite user-defined package options in .Rprofile", {
if (!interactive()) skip("These tests don't work in R CMD check")
path <- test_setup()
on.exit(test_teardown(path))
cwd <- setwd(path)
on.exit(setwd(cwd), add = TRUE)
writeLines(c("options(workflowr.autosave = FALSE)",
"options(workflowr.sysgit = \"/git\")",
"options(workflowr.view = \"bananas\")",
"library(workflowr)"),
con = ".Rprofile")
autosave <- callr::r_safe(function() getOption("workflowr.autosave"),
user_profile = TRUE)
expect_false(autosave)
sysgit <- callr::r_safe(function() getOption("workflowr.sysgit"),
user_profile = TRUE)
expect_identical(sysgit, "/git")
view <- callr::r_safe(function() getOption("workflowr.view"),
user_profile = TRUE)
expect_identical(view, "bananas")
})
test_that("Invalid workflowr.autosave does not crash workflowr", {
path <- test_setup()
on.exit(test_teardown(path))
expect_silent(
callr::r_safe(function(path) {
options(workflowr.autosave = "not-a-logical")
library(workflowr)
wflow_status(project = path)
}, args = list(path = path))
)
}) |
mrbj <-
function(formula, data, subset, trace=FALSE, gehanonly=FALSE, cov=FALSE,
na.action=na.exclude, residue=FALSE, mcsize=100)
{
lss.betag<-function(x,y,delta,z)
{
row=ncol(x)
col=ncol(z)
betagm<-matrix(0,ncol=col,nrow=row)
dimnum<-dim(x)
n1<-dimnum[1]
n2<-dimnum[2]
yy0<-rep(y,rep(n1,n1))
delta1<-rep(delta,rep(n1,n1))
yy1<-rep(y,n1)
yy2<-delta1*(yy0-yy1)
xx0<-matrix(rep(as.vector(x),rep(n1,n1*n2)),nrow=n1*n1)
xx1<-t(matrix(rep(as.vector(t(x)),n1),nrow=n2))
xx2<-xx0-xx1
for(i in 1:col)
{
zz=rep(z[,i],rep(n1,n1))*rep(z[,i],n1)
xxdif<-xx2*zz*delta1
xnew<-apply(xxdif,2,sum)
xnew<-rbind(xxdif)
yynew<-c(yy2*zz)
fit <- Enet.wls(xnew, yynew, delta1)$beta
betagm[,i] <- fit
}
betagm
}
eps <- .Machine$double.eps^(2/3)
call <- match.call()
mf <- match.call(expand.dots = FALSE)
m <- match(c("formula", "data", "subset", "na.action"), names(mf), 0)
mf <- mf[c(1, m)]
mf$drop.unused.levels <- TRUE
mf[[1]] <- as.name("model.frame")
mf <- eval(mf, sys.parent())
Terms <- attr(mf, "terms")
xvars <- as.character(attr(Terms, "variables"))
yvar <- attr(Terms, "response")
if((yvar <- attr(Terms, "response")) > 0)
xvars <- xvars[ - yvar]
else xlevels <- NULL
y <- model.extract(mf, "response")
x <- model.matrix(Terms, mf)
if(all(x[, 1] == 1))
x <- x[, -1]
if(ncol(as.matrix(y)) != 2)
stop("Response must be a right-censored survival object!")
nobs <- nrow(y)
nvar1 <- ncol(x)
fit <- list(converged = FALSE, gehanonly=gehanonly, cov=cov, mcsize=mcsize)
fit$call <- call
fit$nobs <- nobs
fit$censored <- nobs - sum(y[,2])
fit$niter <- 0
fit$printkm <- residue
if(gehanonly)
{
z <- matrix(rexp(nobs*mcsize), ncol=mcsize)
zdummy <- matrix(rep(1,nobs), ncol=1)
beta <- lss.betag(x, y[,1], y[,2], zdummy)
betastar <- lss.betag(x, y[,1], y[,2], z)
fit$betag <- beta
beta <- lss.betag(x, y[,1], y[,2], zdummy)
betastar <- lss.betag(x, y[,1], y[,2], z)
fit$cnames <- dimnames(x)[[2]]
nvar <- ncol(x)
bbar <- apply(betastar, 1, mean)
tmp <- betastar - bbar
fit$gehancov <- tmp %*% t(tmp)/(mcsize - 1)
fit$gehansd <- sqrt(diag(fit$gehancov))
fit$gehanzvalue <- beta/fit$gehansd
fit$gehanpvalue <- (1 - pnorm(abs(fit$gehanzvalue))) * 2
dimnames(fit$gehancov) <- list(fit$cnames,fit$cnames)
if(trace)
cat("\nbetag: ", format(beta), "\n\n")
}
gnet<-Enet.wls(x, y[,1], y[,2])
fit$enet <- gnet$beta
fit$fit <- gnet$fit
fit
} |
gkcov<-function(x,y,gk.sigmamu=taulc,...){
val<-.25*(gk.sigmamu(x+y,...)-gk.sigmamu(x-y,...))
val
} |
ldArgument<- function(obj, ...)
{
UseMethod("ldArgument")
}
ldArgument.HRMtree<- function(obj, J, ...)
{
g<- getGraph(obj)
params<- getParams(obj)
if (length(J)!=2)
stop("The set J must contain only two elements")
if (sum(J %in% get.vertex.attribute(g, "name", V(g)))<2)
stop("Incorrect vertex set J")
ep<- edge_names_along_path(obj, rt=J[1], id=J[2], edge_names = TRUE)
s<- sum(params[ep]^2)
ld<- pnorm(sqrt(s)/2)
return(ld)
}
ldArgument.Tree<- function(obj, x, k_ratio, ...)
{
J<- names(which(x>0))
ld<- rep(0, length(J))
names(ld)<- J
for(j in J)
{
x_up<- x
x_down<- x
dt<- 10^{-5}
dtt<- dt
repeat
{
x_up[j]<- x[j]+dt
x_down[j]<- x[j]-dt
ld[j]<- (stdf(obj, x_up, k_ratio ) - stdf(obj, x_down, k_ratio ))/(2*dt)
if (ld[j]>0)
{
break
}
dt<- dt+dtt
}
}
return(ld)
} |
setMethod("getvcov",
signature(object = "cold"),
function (object)
{
cov<-object@covariance
cnames <- rownames(cov)
r1<-nrow(cov)
c1<-ncol(cov)
if(all(is.na(match(cnames, "omega1"))) )
{ cov
}
else if (!all(is.na(match(cnames, "omega1"))) &&
all(is.na(match(cnames, "omega2"))) )
{ cov.aux<-cov[1:(r1-1),1:(c1-1)]
cov.aux
}
else if (!all(is.na(match(cnames, "omega2"))) )
{ cov.aux<-cov[1:(r1-2),1:(c1-2)]
cov.aux
}
} ) |
repo_adapter_create_s3 <- function(name) {
result <- repo_adapter_create_url(name)
class(result) <- c("rsuite_repo_adapter_s3", class(result))
return(result)
}
repo_adapter_get_path.rsuite_repo_adapter_s3 <- function(repo_adapter, params, ix = NA) {
url <- repo_adapter_get_path.rsuite_repo_adapter_url(repo_adapter, params, ix)
assert(all(grepl("^https?://[\\w\\d\\._-]+\\.s3\\.amazonaws\\.com(/.*)?$", url, perl = TRUE)),
paste0("Invalid url specified for %s repository in project PARAMETERS file.",
" Amazon S3 url should have <schema>://<bucket>.s3.amazonaws.com[/<path>] form;",
" Url does not have required form: %s"),
repo_adapter$name, url)
return(url)
}
repo_adapter_create_manager.rsuite_repo_adapter_s3 <- function(repo_adapter, ...) {
dots <- list(...)
if ("prj" %in% names(dots)) {
prj <- dots$prj
assert(!is.null(prj) && is_prj(prj), "rsuite_project expected for prj")
dots$params <- prj$load_params()
}
if ("params" %in% names(dots)) {
params <- dots$params
assert(!is.null(params) && "rsuite_project_params" %in% class(params),
"rsuite_project_params expected for params")
assert("ix" %in% names(dots),
paste0("Either prj/params and ix or url and rver must be provided to",
" repo_adapter_start_management.rsuite_repo_adapter_s3"))
ix <- dots$ix
url <- repo_adapter_get_path.rsuite_repo_adapter_s3(repo_adapter, params, ix = ix)
types <- c(params$pkgs_type, params$aux_pkgs_type)
rver <- params$r_ver
} else {
assert(all(c("url", "rver") %in% names(dots)),
paste0("Either prj/params and ix or url and rver must be provided to",
" repo_adapter_start_management.rsuite_repo_adapter_s3"))
url <- dots$url
rver <- dots$rver
if ("types" %in% names(dots)) {
types <- dots$types
assert(is.character(types) & length(types) > 0, "Non empty character(N) expected for types")
exp_types <- unique(c("win.binary", "mac.binary", "binary", "source", .Platform$pkgType))
assert(all(types %in% exp_types),
"Invalid types management requested. Supported types are %s",
paste(exp_types, collapse = ", "))
} else {
types <- .Platform$pkgType
}
}
repo_manager <- repo_manager_s3_create(url, types, rver, dots$s3_profile)
return(repo_manager)
} |
count_quasi_exts <- function(dat, quasi_thresh, ignore_pops_thresh = 5,
duration = 1) {
subpop_qe <- plyr::llply(dat, function(x) {
plyr::laply(x, function(y) {
conserved_pops <- which(y$A[1, ] > ignore_pops_thresh)
out <- apply(y$A[, conserved_pops], 2, function(z) {
temp <- is_quasi_ext(z, thresh = quasi_thresh, duration = duration)$first_qe
temp
})
out
})
})
return(subpop_qe)
} |
ui_pca <- function() {
ns <- NS("pca")
tabPanel("PCA",
value = "pca",
splitLayout(
verticalLayout(
fluidRow(
column(6,selectizeInput(ns("pca_xaxis"),
label = "X axis:", choices = NULL)),
column(6,selectizeInput(ns("pca_yaxis"),
label = "Y axis:", choices = NULL))),
plotOutput(ns("pca_plot"), width = "100%", height = "400px")
),
plotOutput(ns("drivers_plot"), width = "100%", height = "auto"),
cellWidths = c("40%", "60%")
) %>% shinyhelper::helper(type = "markdown", content = "pca_help")
)
}
server_pca <- function(id, selected_data, cluster_labels, cluster_colors) {
moduleServer(id, function(input, output, session) {
pca_data <- reactive({
df <- selected_data()
stats::prcomp(df)
})
observeEvent(pca_data(), {
pca_res <- pca_data()
pca_dims <- colnames(pca_res$x)
updateSelectizeInput(session, "pca_xaxis",
choices = pca_dims, selected = "PC1")
updateSelectizeInput(session, "pca_yaxis",
choices = pca_dims, selected = "PC2")
})
output$pca_plot <- renderPlot({
req(input$pca_xaxis)
req(input$pca_yaxis)
pca_scatterplot(pca_data(), cluster_labels(),
cluster_colors, input$pca_xaxis, input$pca_yaxis)
})
output$drivers_plot <- renderPlot({
drivers_df <- pca_drivers_df(selected_data(), pca_data())
pca_driversplot(drivers_df)
}, height = function() max(300, ncol(selected_data()) * 25))
})
} |
stratifyvegdata<-function(x,sizes1, sizes2 = NULL, treeSel=NULL, spcodes=NULL, plotColumn="plot",
speciesColumn = "species", abundanceColumn="abundance", size1Column = "size",
size2Column = NULL, cumulative = FALSE, counts=FALSE, mergeSpecies=FALSE, verbose=FALSE) {
treeData = as.data.frame(x)
plotColumnId = which(names(treeData)==plotColumn)
abundanceColumnId = which(names(treeData)==abundanceColumn)
size1ColumnId = which(names(treeData)==size1Column)
doublestratify = FALSE
if(!is.null(size2Column) || (!is.null(sizes2))) {
doublestratify = TRUE
if(is.null(sizes2)) stop("sizes2 must be specified for double stratification")
if(is.null(size2Column)) stop("size2Column must be specified for double stratification")
size2ColumnId = which(names(treeData)==size2Column)
}
speciesColumnId = which(names(treeData)==speciesColumn)
if(mergeSpecies) treeData[,speciesColumnId] = "allspecies"
if(is.null(spcodes)) spcodes =sort(unique(treeData[,speciesColumnId]))
if(!is.null(treeSel)) {
treeData = treeData[treeSel,]
}
stratify<-function(treeDataPlot, sizes, spcodes=NULL, speciesColumnId, abundanceColumnId, sizeColumnId, cumulative=FALSE, counts=FALSE, verbose=FALSE) {
if(is.null(spcodes)) spcodes = unique(treeData[,speciesColumnId])
nsp = length(spcodes)
nstrata = length(sizes)-1
m = matrix(0,nrow=nsp, ncol=nstrata)
rownames(m) = spcodes
c1 = cut(treeDataPlot[,sizeColumnId], sizes, include.lowest = TRUE)
if(sum(is.na(c1))>0) stop("Some values are not included within size classes. Revise size class definition")
colnames(m) = levels(c1)
c1 = as.numeric(c1)
for(i in 1:nrow(treeDataPlot)) {
isp = which(spcodes==treeDataPlot[i,speciesColumnId])
if(verbose) cat(paste(i,"_",isp,"_",c1[i],"_",m[isp, c1[i]], ": ", treeDataPlot[i,abundanceColumnId],"\n"))
if(!cumulative) {
if(!counts) m[isp, c1[i]] = m[isp,c1[i]]+ as.numeric(treeDataPlot[i,abundanceColumnId])
else m[isp,c1[i]] = m[isp,c1[i]]+1
} else {
if(!counts) m[isp,1:c1[i]] = m[isp,1:c1[i]]+as.numeric(treeDataPlot[i,abundanceColumnId])
else m[isp,1:c1[i]] = m[isp,1:c1[i]]+1
}
}
return(m)
}
doublestratify<-function(treeDataPlot, sizes1, sizes2, spcodes=NULL, speciesColumnId, abundanceColumnId,
size1ColumnId, size2ColumnId, cumulative=FALSE, counts=FALSE, verbose=FALSE) {
if(is.null(spcodes)) spcodes = unique(treeData[,speciesColumnId])
nsp = length(spcodes)
nstrata1 = length(sizes1)-1
nstrata2 = length(sizes2)-1
m = array(0,dim=c(nsp, nstrata1, nstrata2))
c1 = cut(treeDataPlot[,size1ColumnId], sizes1, include.lowest = TRUE)
if(sum(is.na(c1))>0) stop("Some values are not included within size1 classes. Revise size1 class definition")
c2 = cut(treeDataPlot[,size2ColumnId], sizes2, include.lowest = TRUE)
if(sum(is.na(c2))>0) stop("Some values are not included within size2 classes. Revise size2 class definition")
dimnames(m) = list(spcodes, levels(c1), levels(c2))
c1 = as.numeric(c1)
c2 = as.numeric(c2)
for(i in 1:nrow(treeDataPlot)) {
if(verbose) cat(paste(i,"\n"))
isp = which(spcodes==treeDataPlot[i,speciesColumnId])
if(!cumulative){
if(!counts) m[isp,c1[i],c2[i]] = m[isp,c1[i],c2[i]]+as.numeric(treeDataPlot[i,abundanceColumnId])
else m[isp,c1[i],c2[i]] = m[isp,c1[i],c2[i]]+1
} else {
if(!counts) m[isp,1:c1[i],1:c2[i]] = m[isp,1:c1[i],1:c2[i]]+as.numeric(treeDataPlot[i,abundanceColumnId])
else m[isp,1:c1[i],1:c2[i]] = m[isp,1:c1[i],1:c2[i]]+1
}
}
return(m)
}
if(!is.null(size2Column)){
X = lapply(split(treeData,treeData[,plotColumnId]),
FUN=doublestratify,sizes1=sizes1, sizes2 =sizes2,
spcodes=spcodes, speciesColumnId = speciesColumnId, abundanceColumnId =abundanceColumnId, size1ColumnId=size1ColumnId,size2ColumnId=size2ColumnId,
cumulative=cumulative, counts=counts, verbose=verbose)
if(!cumulative) class(X)<-c("doublestratifiedvegdata","list")
else class(X)<-c("CAS","list")
} else {
X = lapply(split(treeData,treeData[,plotColumnId]),
FUN=stratify,sizes=sizes1,
spcodes=spcodes, speciesColumnId = speciesColumnId, abundanceColumnId =abundanceColumnId, sizeColumnId=size1ColumnId,
cumulative = cumulative, counts=counts, verbose=verbose)
if(!cumulative) class(X)<-c("stratifiedvegdata","list")
else class(X)<-c("CAP","list")
}
return(X)
} |
svc <- paws::timestreamquery()
test_that("describe_endpoints", {
expect_error(svc$describe_endpoints(), NA)
}) |
read_server_directory1 <- function(dirs_or_files, sample_set_name = NULL,
existing_sample_set_names = NULL,
unique_sample_set_name = FALSE,
include_base_dir = T, display_messages = TRUE,
glob_files = FALSE) {
new_sample_sets <- list()
read_error_msg <- list(val_neg=NULL, val_pos=NULL)
msg1 <- function(msg) {
read_error_msg$val_neg <- msg
}
get_sampleset_name <- function(data_dir, my_set_name = sample_set_name) {
if (is.null(my_set_name)) {
if (isTRUE(glob_files)) {
my_set_name <- "Server files"
} else {
my_set_name <- basename(data_dir)
}
}
if (unique_sample_set_name) {
counter <- 1
if (!is.null(existing_sample_set_names)) {
while (paste(my_set_name, counter) %in% existing_sample_set_names) {
counter <- counter + 1
}
}
my_set_name <- paste(my_set_name, counter)
}
my_set_name
}
file_infos <- file.info(dirs_or_files)
file_sel <- !is.na(file_infos$isdir) & !file_infos$isdir
if (any(file_sel)) {
my_set_name <- get_sampleset_name("Server files")
new_sample_sets[[my_set_name]] <- read_sample_data(rownames(file_infos)[file_sel],
ext=NULL, is_files=TRUE)
}
if (any(!file_sel)) {
for (data_dir in dirs_or_files[!file_sel]) {
my_set_name <- get_sampleset_name(data_dir, sample_set_name)
bad_files <- c()
if (isTRUE(glob_files)) {
new_sample_sets[[my_set_name]] <- read_sample_data(data_dir, ext=NULL, glob_files=TRUE)
} else {
dmessage("Reading files in ", data_dir)
if (!dir.exists(data_dir)) {
msg1(paste("Directory ", data_dir, "does not exist."))
next
}
if (length(list.files(data_dir)) == 0) {
msg1(paste("No files in directory ", data_dir, "."))
next
}
n_files <- length(list.files(data_dir))
max_files <- getOption("pavian.maxFiles", 100)
if (n_files > max_files) {
msg1(paste("There are ",n_files," files in the directory, but the highest allowed number is ",max_files," files ", data_dir, " - please subdivide the data into smaller directories, or set the option 'pavian.maxFiles' to a higher number (e.g. 'options(pavian.maxFiles=250)')."))
next
}
if (include_base_dir) {
new_sample_sets[[my_set_name]] <- read_sample_data(data_dir, ext=NULL)
}
dirs <- grep("^\\.", list.dirs(data_dir, recursive = FALSE), invert = TRUE, value = TRUE)
n_dirs <- length(dirs)
max_dirs <- getOption("pavian.maxSubDirs", 10)
if (n_dirs > max_dirs) {
read_error_msg$val_neg <- c(read_error_msg$val_neg, paste("There are ",n_dirs," sub-directories in ", data_dir,
" but the highest allowed number is ",max_dirs," - specify individual directories with reports one at a time to load data or set the option 'pavian.maxSubDirs' to a higher number (e.g. 'options(pavian.maxSubDirs=50)')."))
} else if (length(dirs) > 0) {
sub_dir_sets <- lapply(dirs, read_sample_data, ext=NULL)
names(sub_dir_sets) <- paste0(my_set_name,"/",basename(dirs))
new_sample_sets <- c(new_sample_sets, sub_dir_sets)
}
}
}
}
paste_last <- function(x, ..., collapse_last) {
if (length(x) ==1)
return(x)
y <- paste(x[-length(x)], ...)
paste(y, x[length(x)], sep=collapse_last)
}
sel_bad_sets <- sapply(new_sample_sets, function(x) is.null(x) || nrow(x) == 0)
bad_files <- unlist(sapply(new_sample_sets, attr, "bad_files"))
new_sample_sets <- new_sample_sets[!sel_bad_sets]
if (length(new_sample_sets) > 0) {
read_error_msg$val_pos <- sprintf("Added sample set%s <b>%s</b> with <b>%s</b> valid reports in total.",
ifelse(length(new_sample_sets) == 1, "", "s"),
paste_last(names(new_sample_sets), collapse="</b>, <b>", collapse_last="</b> and <b>"),
sum(unlist(sapply(new_sample_sets, function(x) sum(x$FormatOK)))))
}
if (length(bad_files) > 0) {
read_error_msg$val_neg <- c(read_error_msg$val_neg,
sprintf("The following files did not conform the report format: <br/> - <b>%s</b>",
paste(bad_files, collapse="</b><br/> - <b>")))
}
if (isTRUE(display_messages)) {
if (!is.null(read_error_msg$val_neg)) { warning(read_error_msg$val_neg) }
if (!is.null(read_error_msg$val_pos)) { dmessage(read_error_msg$val_pos) }
}
return(list(
sample_sets = new_sample_sets,
error_msg = read_error_msg))
} |
new_tibble <- function(x, ..., nrow = NULL, class = NULL, subclass = NULL) {
if (is.null(class) && !is.null(subclass)) {
deprecate_soft("2.0.0", "tibble::new_tibble(subclass = )", "new_tibble(class = )")
class <- subclass
}
x <- unclass(x)
if (!is.list(x)) {
cnd_signal(error_new_tibble_must_be_list())
}
if (!is.null(nrow)) {
if (!is.numeric(nrow) || length(nrow) != 1 || nrow < 0 || !is_integerish(nrow, 1) || nrow >= 2147483648) {
cnd_signal(error_new_tibble_nrow_must_be_nonnegative())
}
nrow <- as.integer(nrow)
}
args <- attributes(x)
if (is.null(args)) {
args <- list()
}
new_attrs <- pairlist2(...)
nms <- names(new_attrs)
for (i in seq_along(nms)) {
nm <- nms[[i]]
if (nm == "") {
next
}
args[[nm]] <- new_attrs[[i]]
}
if (length(x) == 0) {
args[["names"]] <- character()
} else if (is.null(args[["names"]])) {
cnd_signal(error_names_must_be_non_null())
}
if (is.null(class)) {
class <- tibble_class_no_data_frame
} else {
class <- c(class[!class %in% tibble_class], tibble_class_no_data_frame)
}
slots <- c("x", "n", "class")
args[slots] <- list(x, nrow, class)
args[["row.names"]] <- NULL
exec(new_data_frame, !!!args)
}
validate_tibble <- function(x) {
check_minimal_names(x)
check_valid_cols(unclass(x))
validate_nrow(names(x), col_lengths(x), vec_size(x))
x
}
cnd_signal_if <- function(x) {
if (!is.null(x)) {
cnd_signal(x)
}
}
check_minimal <- function(name) {
cnd_signal_if(cnd_names_non_null(name))
cnd_signal_if(cnd_names_non_na(name))
}
check_minimal_names <- function(x) {
check_minimal(names(x))
invisible(x)
}
col_lengths <- function(x) {
map_int(x, vec_size)
}
validate_nrow <- function(names, lengths, nrow) {
bad_len <- which(lengths != nrow)
if (has_length(bad_len)) {
cnd_signal(error_incompatible_size(nrow, names, lengths, "Requested with `nrow` argument"))
}
}
tibble_class <- c("tbl_df", "tbl", "data.frame")
tibble_class_no_data_frame <- c("tbl_df", "tbl")
error_new_tibble_must_be_list <- function() {
tibble_error("`x` must be a list.")
}
error_new_tibble_nrow_must_be_nonnegative <- function() {
tibble_error("`nrow` must be a nonnegative whole number smaller than 2^31.")
} |
gdm_calc_deviance <- function(G, use.freqpatt, ind.group, p.xi.aj, pi.k, weights)
{
ll <- 0
for (gg in 1:G){
if ( ! use.freqpatt ){
ind.gg <- ind.group[[gg]]
ll <- ll + sum( weights[ind.gg] * log( rowSums( p.xi.aj[ind.gg,] *
matrix( pi.k[,gg], nrow=length(ind.gg), ncol=nrow(pi.k), byrow=TRUE ) ) ) )
}
if ( use.freqpatt ){
if (G>1){
wgg <- weights[,gg]
}
if (G==1){
wgg <- weights
}
ll <- ll + sum( wgg * log( rowSums( p.xi.aj * matrix( pi.k[,gg], nrow=nrow(p.xi.aj),
ncol=nrow(pi.k), byrow=TRUE ) ) ) )
}
}
dev <- -2*ll
res <- list( ll=ll, dev=dev)
return(res)
} |
makeTransparent<-function(someColor, alpha=100) {
newColor<-col2rgb(someColor)
apply(newColor, 2, function(curcoldata){
rgb(red=curcoldata[1], green=curcoldata[2], blue=curcoldata[3],
alpha=alpha, maxColorValue=255)})
} |
fget_A_RothC <- function(clay = 23.4)
{
if ((clay < 0) | (clay > 100)) stop("Invalid clay value. Must be >= 0 AND <= 100.")
ks <- c(10,0.3,0.66,0.02,0)
x <- 1.67 * (1.85 + 1.6 * exp(-0.0786 * clay))
B <- 0.46/(x + 1)
H <- 0.54/(x + 1)
ai3 <- B * ks
ai4 <- H * ks
A <- diag(-ks)
A[3, ] <- A[3, ] + ai3
A[4, ] <- A[4, ] + ai4
A
} |
assign("identify.point",
function(x,v="",...) {
v <- x[[match(v,names(x))]]
if(!is.null(v))
identify(x$x,x$y,v,...)
else
identify(x$x,x$y,...)
}) |
expected <- eval(parse(text="FALSE"));
test(id=0, code={
argv <- eval(parse(text="list(structure(list(y = c(1.08728092481538, 0.0420572471552261, 0.787502161306819, 0.512717751544676, 3.35376639535311, 0.204341510750309, -0.334930602487435, 0.80049208412789, -0.416177803375218, -0.777970346246018, 0.934996808181635, -0.678786709127108, 1.52621589791412, 0.5895781228122, -0.744496121210548, -1.99065153885627, 1.51286447692396, -0.750182409847851), A = c(0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1), U = structure(c(1L, 1L, 1L, 1L, 1L, 1L, 2L, 2L, 2L, 2L, 2L, 2L, 3L, 3L, 3L, 3L, 3L, 3L), .Label = c(\"a\", \"b\", \"c\"), class = \"factor\"), V = structure(c(1L, 1L, 2L, 2L, 3L, 3L, 1L, 1L, 2L, 2L, 3L, 3L, 1L, 1L, 2L, 2L, 3L, 3L), .Label = c(\"a\", \"b\", \"c\"), class = \"factor\")), .Names = c(\"y\", \"A\", \"U\", \"V\"), terms = quote(y ~ A:U + A:V - 1), row.names = c(NA, 18L), class = \"data.frame\"), \"any\")"));
.Internal(`is.vector`(argv[[1]], argv[[2]]));
}, o=expected); |
gc_heatmap <- function(object = NULL,
algorithm_step = c('aligned','shifted','input'),
substance_subset = NULL, legend_type = c('legend','colourbar'),
samples_subset = NULL,
type = c("binary","discrete"),
threshold = NULL,
label_size = NULL,
show_legend = TRUE,
main_title = NULL,
label = c("y","xy","x","none")) {
algorithm_step <- match.arg(algorithm_step)
if (algorithm_step == "aligned") algorithm_step <- "aligned_rts"
if (algorithm_step == "shifted") algorithm_step <- "linear_transformed_rts"
if (algorithm_step == "input") algorithm_step <- "input_rts"
if (is.null(threshold)) threshold <- object[["Logfile"]][["Call"]][["max_diff_peak2mean"]]
type <- match.arg(type)
legend_type <- match.arg(legend_type)
label <- match.arg(label)
rt_df <- object[['heatmap_input']][[algorithm_step]]
rt_df[,'id'] <- as.character(rt_df[,'id'])
if (is.null(label_size)) {
lab_thresh <- c(20,40,60,80,100,120,140, Inf)
lab_size <- c(12,10,8,8,6,5,4,4)
samples_size <- nrow(rt_df)
temp <- which(lab_thresh > samples_size)
if (min(temp) == 1) {
label_size <- lab_size[1]
} else {
label_size <- lab_size[min(temp) - 1]
}
}
if (!is.null(substance_subset)) {
rt_df <- rt_df[,c(1,substance_subset + 1)]
}
if (!is.null(samples_subset)) {
if (is.character(samples_subset)) {
rt_df <- rt_df[rt_df[,1] %in% samples_subset,]
} else if (is.numeric(samples_subset)) {
rt_df <- rt_df[samples_subset,]
}
}
heat_matrix <- reshape2::melt(data = rt_df,id.vars = 'id')
names(heat_matrix) <- c('id','substance','rt')
heat_matrix[,'substance'] <- as.numeric(as.character(heat_matrix[,'substance']))
heat_matrix[,'diff'] <- (as.numeric(heat_matrix[,'rt']) - heat_matrix[,'substance'])
heat_matrix['diff'][heat_matrix['rt'] == 0] <- 0
heat_matrix[,'id'] <- ordered( heat_matrix[,'id'], levels = as.factor(rt_df[,'id']))
heat_matrix[,'substance'] <- ordered( heat_matrix[,'substance'], levels = as.factor(colnames(rt_df)[2:ncol(rt_df)]))
if (type == "binary") {
heat_matrix['diff'][abs(heat_matrix['diff']) > threshold] <- 1
heat_matrix['diff'][abs(heat_matrix['diff']) < threshold] <- 0
}
heat_matrix['diff'][heat_matrix['rt'] == 0] <- NA
heat_matrix['substance'] <- as.factor(round(as.numeric(as.character(heat_matrix[['substance']])),digits = 2))
if (type == "binary") {
if (max(heat_matrix['diff'],na.rm = T) == 0) {
hm <- ggplot(heat_matrix, aes_string(x = 'substance', y = 'id',fill = 'diff'),colour = "Blue")
hm <- hm + geom_tile(color = "transparent", size = 0.001)
hm <- hm + scale_fill_gradientn(colours = 'blue',na.value = "white")
hm <- hm + labs(x = "substance", y = "sample", title = ifelse(is.null(main_title),paste("No deviations exceeding a threshold of",as.character(threshold)),main_title))
hm <- hm + guides(fill = FALSE)
} else {
hm <- ggplot(heat_matrix, aes_string(x = 'substance', y = 'id',fill = 'diff'))
hm <- hm + geom_tile(color = "transparent", size = 0.001)
hm <- hm + scale_fill_continuous(low = "blue",high = "red",breaks = c(0,1),na.value = "white", guide = 'legend',name = paste('Deviation\n','>',as.character(threshold)),labels = c('NO','YES'))
hm <- hm + labs(x = "substance", y = "sample", title = ifelse(is.null(main_title),paste("Deviation from substance mean retention time\n(Threshold = ",as.character(threshold),")"),main_title))
}
} else {
col_pal <- c("
r <- c(min(heat_matrix[["diff"]],na.rm = T),max(heat_matrix[["diff"]],na.rm = T))
hm <- ggplot(heat_matrix, aes_string(x = 'substance', y = 'id', fill = 'diff'))
hm <- hm + geom_tile(color = "white", size = 0.01)
hm <- hm + scale_fill_gradientn(colours = col_pal,guide = "legend",name = 'Deviation',na.value = "white", limits = c(-round(max(abs(r)),2) - 0.01,round(max(abs(r)),2) + 0.01)
)
hm <- hm + labs(x = "substance", y = "sample", title = ifelse(is.null(main_title),"Variation of retention times",main_title))
}
hm <- hm + theme(plot.title = element_text(hjust = 0.5,vjust = 1,size = 10,face = 'bold'))
theme(axis.text.x = element_text(size = label_size, hjust = 0.5,angle = 90),
axis.ticks.y = element_line(size = 0.3, colour = "grey60"),
axis.ticks.x = element_line(size = 0.3, colour = "grey60"),
axis.text.y = element_text(size = label_size,hjust = 0.5))
if (label == "xy") {
hm <- hm + theme(axis.title.x = element_text(size = 10),
axis.title.y = element_text(size = 10),
axis.text.x = element_text(size = label_size, vjust = 0.5,angle = 90),
axis.ticks.y = element_line(size = 0.3, colour = "grey60"),
axis.ticks.x = element_line(size = 0.3, colour = "grey60"),
axis.text.y = element_text(size = label_size,hjust = 0.5))
} else if (label == "y") {
hm <- hm + theme(axis.title.x = element_text(size = 10),
axis.title.y = element_text(size = 10),
axis.text.x = element_blank(),
axis.ticks.y = element_line(size = 0.3, colour = "grey60"),
axis.ticks.x = element_line(size = 0.3, colour = "grey60"),
axis.text.y = element_text(size = label_size,hjust = 0.5))
} else if (label == "x") {
hm <- hm + theme(axis.title.x = element_text(size = 10),
axis.title.y = element_text(size = 10),
axis.text.x = element_text(size = label_size, vjust = 0.5,angle = 90),
axis.ticks.y = element_line(size = 0.3, colour = "grey60"),
axis.ticks.x = element_line(size = 0.3, colour = "grey60"),
axis.text.y = element_blank())
} else if (label == "none") {
hm <- hm + theme(axis.title.x = element_text(size = 10),
axis.title.y = element_text(size = 10),
axis.text.x = element_blank(),
axis.ticks.y = element_line(size = 0.3, colour = "grey40"),
axis.ticks.x = element_line(size = 0.3, colour = "grey40"),
axis.text.y = element_blank())
}
hm <- hm + theme(plot.background = element_rect(fill = "grey95"))
y <- 1:nrow(rt_df) + 0.5
x <- rep(0,nrow(rt_df))
yend <- 1:nrow(rt_df) + 0.5
xend <- rep(ncol(rt_df),nrow(rt_df))
my.lines <- data.frame(y = y,x = x,xend = xend, yend = yend)
hm <- hm + geom_segment(data = my.lines, aes(x,y,xend = xend, yend = yend),color = "grey", size = 0.35,show.legend = FALSE, inherit.aes = F)
y <- rep(0,ncol(rt_df))
x <- 1:ncol(rt_df) + 0.5
xend <- 1:ncol(rt_df) + 0.5
yend <- rep(nrow(rt_df) + 0.5,ncol(rt_df))
my.lines <- data.frame(y = y,x = x, xend = xend,yend = yend)
hm <- hm + geom_segment(data = my.lines, aes(x,y,xend = xend, yend = yend),color = "grey", size = 0.35,show.legend = FALSE, inherit.aes = F)
if (is.null(label)) {
if ((!is.null(substance_subset) & ncol(rt_df) < 151) || ncol(rt_df) < 151 ) {
hm <- hm + theme(axis.text.x = element_text(size = label_size, vjust = 0.5,angle = 90),
axis.ticks.y = element_line(size = 0.3, colour = "grey60"),
axis.ticks.x = element_line(size = 0.3, colour = "grey60"),
axis.text.y = element_text(size = label_size,hjust = 0.5))
}
}
if (!show_legend) hm <- hm + theme(legend.position = "none")
return(hm)
} |
boolSkip=F
test_that("Check 26.1 - test deeganPackelIndex4Vector Holler & Illing 6.3.3 example",{
if(boolSkip){
skip("Test was skipped")
}
v=weightedVotingGameVector(n=5, w=c(35,20,15,15,15),q=51)
dp=deeganPackelIndex(v)
expect_equal(dp,c(18/60,9/60,11/60,11/60,11/60))
}) |
rescale_weights <- function(data, group, probability_weights, nest = FALSE) {
if (inherits(group, "formula")) {
group <- all.vars(group)
}
weight_missings <- which(is.na(data[[probability_weights]]))
weight_non_na <- which(!is.na(data[[probability_weights]]))
if (length(weight_missings) > 0) {
data_tmp <- data[weight_non_na, ]
} else {
data_tmp <- data
}
data_tmp$.bamboozled <- 1:nrow(data_tmp)
if (nest && length(group) < 2) {
warning(insight::format_message(sprintf("Only one group variable selected, no nested structure possible. Rescaling weights for grout '%s' now.", group)), call. = FALSE)
nest <- FALSE
}
if (nest) {
out <- .rescale_weights_nested(data_tmp, group, probability_weights, nrow(data), weight_non_na)
} else {
out <- lapply(group, function(i) {
x <- .rescale_weights(data_tmp, i, probability_weights, nrow(data), weight_non_na)
if (length(group) > 1) {
colnames(x) <- sprintf(c("pweight_a_%s", "pweight_b_%s"), i)
}
x
})
}
do.call(cbind, list(data, out))
}
.rescale_weights <- function(x, group, probability_weights, n, weight_non_na) {
design_weights <- .data_frame(
group = sort(unique(x[[group]])),
sum_weights_by_group = tapply(x[[probability_weights]], as.factor(x[[group]]), sum),
sum_squared_weights_by_group = tapply(x[[probability_weights]]^2, as.factor(x[[group]]), sum),
n_per_group = as.vector(table(x[[group]]))
)
colnames(design_weights)[1] <- group
x <- merge(x, design_weights, by = group, sort = FALSE)
x <- x[order(x$.bamboozled), ]
x$.bamboozled <- NULL
w_a <- x[[probability_weights]] * x$n_per_group / x$sum_weights_by_group
w_b <- x[[probability_weights]] * x$sum_weights_by_group / x$sum_squared_weights_by_group
out <- data.frame(
pweights_a = rep(as.numeric(NA), times = n),
pweights_b = rep(as.numeric(NA), times = n)
)
out$pweights_a[weight_non_na] <- w_a
out$pweights_b[weight_non_na] <- w_b
out
}
.rescale_weights_nested <- function(x, group, probability_weights, n, weight_non_na) {
groups <- expand.grid(lapply(group, function(i) sort(unique(x[[i]]))))
colnames(groups) <- group
design_weights <- cbind(
groups,
.data_frame(
sum_weights_by_group = unlist(as.list(tapply(x[[probability_weights]], lapply(group, function(i) as.factor(x[[i]])), sum))),
sum_squared_weights_by_group = unlist(as.list(tapply(x[[probability_weights]]^2, lapply(group, function(i) as.factor(x[[i]])), sum))),
n_per_group = unlist(as.list(table(x[, group])))
)
)
x <- merge(x, design_weights, by = group, sort = FALSE)
x <- x[order(x$.bamboozled), ]
x$.bamboozled <- NULL
w_a <- x[[probability_weights]] * x$n_per_group / x$sum_weights_by_group
w_b <- x[[probability_weights]] * x$sum_weights_by_group / x$sum_squared_weights_by_group
out <- data.frame(
pweights_a = rep(as.numeric(NA), times = n),
pweights_b = rep(as.numeric(NA), times = n)
)
out$pweights_a[weight_non_na] <- w_a
out$pweights_b[weight_non_na] <- w_b
out
} |
(colorset = c('Red','Green', 'Blue'))
set.seed(1234)
tshirts = sample(colorset, size=50, replace=T, prob=c(.5, .3,.2))
table(tshirts)
prop.table(table(tshirts))
class(tshirts)
summary(tshirts)
FactorTshirts = factor(tshirts, ordered=F)
FactorTshirts
class(FactorTshirts)
summary(FactorTshirts)
likertScale = c('Excellent','Good','Satisfactory','Poor')
feedback = sample(likertScale, size=50, replace=T, prob=c(.4, .3,.2,.1))
feedback
summary(feedback)
table(feedback)
barplot(table(feedback))
FactorFeedback = factor(feedback, ordered=T, levels = c('Excellent', 'Good','Satisfactory','Poor'))
barplot(table(FactorFeedback))
summary(FactorFeedback) |
svec.irf <- irf(svec, response = "U",
n.ahead = 48, boot = TRUE)
svec.irf
plot(svec.irf) |
source(testthat::test_path("test_helpers.R"))
data(cells, package = "modeldata")
cells$case <- cells$class <- NULL
cells <- as.data.frame(scale(cells))
split <- seq.int(1, 2019, by = 10)
tr <- cells[-split, ]
te <- cells[ split, ]
test_that("step_pca_sparse_bayes", {
skip_if_not_installed("VBsparsePCA")
rec <-
recipe(~ ., data = tr) %>%
step_pca_sparse_bayes(
all_predictors(),
num_comp = 4,
prior_slab_dispersion = 1/2,
prior_mixture_threshold = 1/15
) %>%
prep()
direct_mod <- VBsparsePCA::VBsparsePCA(as.matrix(tr), lambda = 1/2, r = 4, threshold = 1/15)
direct_coef <- svd(direct_mod$loadings)$u
embed_coef <- rec$steps[[1]]$res
vars <- rownames(embed_coef)
dimnames(embed_coef) <- NULL
expect_equal(abs(direct_coef), abs(embed_coef), tolerance = 0.1)
tidy_coef <- tidy(rec, number = 1)
expect_equal(
tidy_coef$value[tidy_coef$terms == "angle_ch_1" & tidy_coef$component == "PC1"],
embed_coef[which(vars == "angle_ch_1"), 1]
)
expect_equal(
tidy_coef$value[tidy_coef$terms == "total_inten_ch_3" & tidy_coef$component == "PC3"],
embed_coef[which(vars == "total_inten_ch_3"), 3]
)
expect_snapshot(print(rec))
})
test_that("step_pca_sparse", {
skip_if_not_installed("irlba")
rec <-
recipe(~ ., data = tr) %>%
step_pca_sparse(
all_predictors(),
num_comp = 4,
predictor_prop = 1/2
) %>%
prep()
direct_mod <- irlba::ssvd(as.matrix(tr), k = 4, n = ncol(tr)/2)
direct_coef <- direct_mod$v
embed_coef <- rec$steps[[1]]$res
vars <- rownames(embed_coef)
dimnames(embed_coef) <- NULL
dimnames(direct_coef) <- NULL
expect_equal(abs(direct_coef), abs(embed_coef), tolerance = 0.1)
tidy_coef <- tidy(rec, number = 1)
expect_equal(
tidy_coef$value[tidy_coef$terms == "angle_ch_1" & tidy_coef$component == "PC1"],
embed_coef[which(vars == "angle_ch_1"), 1]
)
expect_equal(
tidy_coef$value[tidy_coef$terms == "total_inten_ch_3" & tidy_coef$component == "PC3"],
embed_coef[which(vars == "total_inten_ch_3"), 3]
)
expect_snapshot(print(rec))
})
test_that("empty selections", {
data(ad_data, package = "modeldata")
expect_error(
rec <-
recipe(Class ~ Genotype + tau, data = ad_data) %>%
step_pca_sparse(starts_with("potato")) %>%
prep(),
regexp = NA
)
expect_equal(
bake(rec, new_data = NULL),
ad_data %>% select(Genotype, tau, Class)
)
expect_error(
rec <-
recipe(Class ~ Genotype + tau, data = ad_data) %>%
step_pca_sparse_bayes(starts_with("potato")) %>%
prep(),
regexp = NA
)
expect_equal(
bake(rec, new_data = NULL),
ad_data %>% select(Genotype, tau, Class)
)
}) |
plot_roc_data <- function(roc_res) {
n_method <- length(unique(roc_res$Methods))
n_group <- length(unique(roc_res$Groups))
roc_res_df <- data.frame(Specificity= numeric(0), Sensitivity= numeric(0), Group = character(0), AUC = numeric(0), Method = character(0))
for (i in 1:n_method) {
for (j in 1:n_group) {
temp_data_1 <- data.frame(Specificity=roc_res$Specificity[[i]][j],
Sensitivity=roc_res$Sensitivity[[i]][j],
Group=unique(roc_res$Groups)[j],
AUC=roc_res$AUC[[i]][j],
Method = unique(roc_res$Methods)[i])
colnames(temp_data_1) <- c("Specificity", "Sensitivity", "Group", "AUC", "Method")
roc_res_df <- rbind(roc_res_df, temp_data_1)
}
temp_data_2 <- data.frame(Specificity=roc_res$Specificity[[i]][n_group+1],
Sensitivity=roc_res$Sensitivity[[i]][n_group+1],
Group= "Macro",
AUC=roc_res$AUC[[i]][n_group+1],
Method = unique(roc_res$Methods)[i])
temp_data_3 <- data.frame(Specificity=roc_res$Specificity[[i]][n_group+2],
Sensitivity=roc_res$Sensitivity[[i]][n_group+2],
Group= "Micro",
AUC=roc_res$AUC[[i]][n_group+2],
Method = unique(roc_res$Methods)[i])
colnames(temp_data_2) <- c("Specificity", "Sensitivity", "Group", "AUC", "Method")
colnames(temp_data_3) <- c("Specificity", "Sensitivity", "Group", "AUC", "Method")
roc_res_df <- rbind(roc_res_df, temp_data_2)
roc_res_df <- rbind(roc_res_df, temp_data_3)
}
return(roc_res_df)
} |
has_color <- function() {
num_ansi_colors() > 1L
}
num_colors <- function(forget = FALSE) {
cray_opt_num <- getOption("crayon.colors", NULL)
if (!is.null(cray_opt_num)) return(as.integer(cray_opt_num))
num_ansi_colors()
} |
group_center <- function(x, grp) {
grp <- as.numeric(as.factor(grp))
return(x - tapply(x, grp, mean, na.rm = TRUE)[grp])
} |
tam_pv_mcmc_proposal_theta <- function(theta, nstud, variance, adj_MH, D, G, group_index)
{
nstud <- nrow(theta)
theta_new <- matrix( NA, nrow=nstud, ncol=D)
for (gg in 1:G){
ind_gg <- group_index[[gg]]
nstud_gg <- attr( group_index, "N_groups")[gg]
variance_gg <- variance[[gg]]
mean_gg <- rep(0, D )
samp_values <- matrix( CDM::CDM_rmvnorm( nstud_gg, mean=mean_gg, sigma=variance_gg ), ncol=D )
theta_new[ind_gg,] <- theta[ind_gg,] + adj_MH[ind_gg] * samp_values
}
return(theta_new)
} |
NDVI <- function(spct, imager = "LandsatOLI", wb.trim = FALSE) {
if (is.generic_mspct(spct)) {
msaply(spct, .fun = NDVI, imager = imager, wb.trim = wb.trim)
} else if (is.reflector_spct(spct) || is.object_spct(spct)) {
Red.band <- Red(std = imager)
NIR.band <- NIR(std = imager)
if (wl_min(spct) > wl_min(Red.band) || wl_max(spct) < wl_max(NIR.band)) {
NA_real_
} else {
Rfr.Red <- as.numeric(reflectance(spct, Red.band))
Rfr.NIR <- as.numeric(reflectance(spct, NIR.band))
(Rfr.NIR - Rfr.Red) / (Rfr.NIR + Rfr.Red)
}
} else {
warning("'NDVI' is not defined for ", class(spct))
NA_real_
}
} |
rm_determine_fixed_tau_parameters <- function( K, maxK, VV, tau.item.fixed=NULL, val=99)
{
if ( min(maxK) < K ){
for (vv in 1:VV){
K.vv <- maxK[vv]
if ( K.vv < K ){
for (zz in (K.vv+1):K ){
d1 <- data.frame( "item"=vv, "categ"=zz, "val"=val)
tau.item.fixed <- rbind( tau.item.fixed, d1 )
}
}
}
tau.item.fixed <- as.matrix(tau.item.fixed )
}
return(tau.item.fixed)
} |
feglmControl <- function(dev.tol = 1.0e-08,
center.tol = 1.0e-05,
iter.max = 25L,
limit = 10L,
trace = FALSE,
drop.pc = TRUE,
conv.tol = NULL,
rho.tol = NULL,
pseudo.tol = NULL,
step.tol = NULL) {
if (!is.null(conv.tol)) {
warning("'conv.tol' is deprecated;", call. = FALSE)
}
if (!is.null(rho.tol)) {
warning("'rho.tol' is deprecated;", call. = FALSE)
}
if (!is.null(pseudo.tol)) {
warning("'pseudo.tol' is deprecated; please use 'center.tol' instead.", call. = FALSE)
center.tol <- pseudo.tol
}
if (!is.null(step.tol)) {
warning("'step.tol' is deprecated;", call. = FALSE)
}
if (dev.tol <= 0.0 || center.tol <= 0.0) {
stop("All tolerance paramerters should be greater than zero.", call. = FALSE)
}
iter.max <- as.integer(iter.max)
if (iter.max < 1L) {
stop("Maximum number of iterations should be at least one.", call. = FALSE)
}
limit <- as.integer(limit)
if (limit < 1L) {
stop("Maximum number of iterations should be at least one.", call. = FALSE)
}
list(dev.tol = dev.tol,
center.tol = center.tol,
iter.max = iter.max,
limit = limit,
trace = as.logical(trace),
drop.pc = as.logical(drop.pc))
}
feglm.control <- function(...) {
.Deprecated("feglmControl")
do.call(feglmControl, list(...))
} |
material_text_box <- function(input_id, label, value = "", color = NULL, icon = NULL){
if (!is.null(color)) {
if (!is.null(icon)) {
icon_style_color <- paste0(
"
color: ", color, ";
}"
)
} else {
icon_style_color <- ""
}
text_box_style <-
shiny::tagList(
shiny::tags$head(
shiny::tags$style(
paste0(
"
color: ", color, ";
}
border-bottom: 1px solid ", color, ";
box-shadow: 0 1px 0 0 ", color, ";
}
",
icon_style_color
)
)
)
)
} else {
text_box_style <- shiny::tags$div()
}
if (!is.null(icon)) {
text_box_icon <- shiny::tags$i(class = "material-icons prefix", icon)
} else {
text_box_icon <- shiny::tags$div()
}
create_material_object(
js_file =
"shiny-material-text-box.js",
material_tag_list =
shiny::tagList(
shiny::tags$div(
class = "input-field",
id = paste0(input_id, "_text_box"),
text_box_icon,
shiny::tags$input(
id = input_id,
type = "text",
class = "validate",
value = value
),
shiny::tags$label(
`for` = input_id,
label
)
),
text_box_style
)
)
} |
laymanMetrics <- function(x,y){
out <- list()
metrics <- double(length=6)
names(metrics) <- c("dY_range","dX_range",
"TA","CD","NND","SDNND")
metrics[1] <- max(y) - min(y)
metrics[2] <- max(x) - min(x)
hull <- siberConvexhull(x,y)
metrics[3] <- hull$TA
mean_y <- mean(y)
mean_x <- mean(x)
metrics[4] <- mean( ( (mean_x - x)^2 + (mean_y - y)^2 ) ^ 0.5 )
NNDs <- numeric(length(x))
for (j in 1:length(x)){
tmp <- ( (x[j] - x)^2 + (y[j] - y)^2 ) ^ 0.5
tmp[j] <- max(tmp)
NNDs[j] <- min(tmp)
}
metrics[5] <- mean(NNDs)
metrics[6] <- stats::sd(NNDs)
out$metrics <- metrics
out$hull <- hull
return(out)
} |
group_nest_impl <- function(.tbl, .key, keep = FALSE){
mutate(group_keys(.tbl), !!.key := group_split(.tbl, .keep = keep))
}
group_nest <- function(.tbl, ..., .key = "data", keep = FALSE){
lifecycle::signal_stage("experimental", "group_nest()")
UseMethod("group_nest")
}
group_nest.data.frame <- function(.tbl, ..., .key = "data", keep = FALSE) {
if (dots_n(...)) {
group_nest_impl(group_by(.tbl, ...), .key = .key, keep = keep)
} else {
tibble(!!.key := list(.tbl))
}
}
group_nest.grouped_df <- function(.tbl, ..., .key = "data", keep = FALSE) {
if (dots_n(...)) {
warn("... is ignored in group_nest(<grouped_df>), please use group_by(..., .add = TRUE) %>% group_nest()")
}
group_nest_impl(.tbl, .key = .key, keep = keep)
} |
bh=function(p) { q=p; q[!is.na(p)]=p.adjust(pmin(p[!is.na(p)],1),method="fdr"); q }
bfc=function(p) min(p,na.rm=T)*sum(!is.na(p))
logistic=function(g) 1/(1+exp(-g))
robustSolve=function(si, eigenThreshold=0.01){
svdSigma=eigen(si)
svdSigma$values=pmax(svdSigma$values,eigenThreshold)
svdSigma$vectors %*% diag(1/svdSigma$values) %*% t(svdSigma$vectors)
}
fisherCombined=function(p) pchisq( -2*sum(log(p),na.rm=T), df=2*sum(!is.na(p)), lower.tail = F)
pqplotHelper=function(p, ...) qqplot(-log10(runif(length(p))), -log10(p), pch=16, ...)
pqplot=function(p, ...) { pqplotHelper(p); abline(0,1) }
multiqq=function(pvalues) {
punif=-log10(runif(max(sapply(pvalues,length))))
df=do.call(rbind, foreach( i=seq_len(length(pvalues))) %do% {
df=as.data.frame( qqplot(punif[1:length(pvalues[[i]])], -log10(pvalues[[i]]), plot.it=F) )
df$group=names(pvalues)[i]
df
} )
df$group=factor(df$group, names(pvalues))
ggplot(df, aes(x,y,col=group)) + geom_point() + geom_abline(intercept=0,slope=1) + theme_bw(base_size=18) + xlab("Expected -log10(p)") + ylab("Observed -log10(p)")
}
get_intron_meta=function(introns){
intron_meta=do.call(rbind,strsplit(introns,":"))
colnames(intron_meta)=c("chr","start","end","clu")
intron_meta=as.data.frame(intron_meta,stringsAsFactors = F)
intron_meta$start=as.numeric(intron_meta$start)
intron_meta$end=as.numeric(intron_meta$end)
intron_meta$middle=.5*(intron_meta$start+intron_meta$end)
intron_meta
}
mahalanobis_outlier=function(x) {
ei=eigen(cov(x))
if (any(ei$values<=0.0)) return(numeric(nrow(x))+1)
prec=ei$vectors %*% diag(1/ei$values) %*% t(ei$vectors)
mah_dist=mahalanobis( x, colMeans(x), prec, inverted=T )
pchisq(mah_dist, df=ncol(x), lower.tail=F) * nrow(x)
}
sanitize_simplex=function(x, eps=1e-6) {
x[x<eps]=eps
x[x>(1.0-eps)]=(1.0-eps)
x/sum(x)
}
map_clusters_to_genes=function(intron_meta, exons_table) {
gene_df=foreach (chr=sort(unique(intron_meta$chr)), .combine=rbind) %dopar% {
intron_chr=intron_meta[ intron_meta$chr==chr, ]
exons_chr=exons_table[exons_table$chr==chr, ]
exons_chr$temp=exons_chr$start
intron_chr$temp=intron_chr$end
three_prime_matches=inner_join( intron_chr, exons_chr, by="temp")
exons_chr$temp=exons_chr$end
intron_chr$temp=intron_chr$start
five_prime_matches=inner_join( intron_chr, exons_chr, by="temp")
all_matches=rbind(three_prime_matches, five_prime_matches)[ , c("clu", "gene_name")]
all_matches=all_matches[!duplicated(all_matches),]
if (nrow(all_matches)==0) return(NULL)
all_matches$clu=paste(chr,all_matches$clu,sep=':')
all_matches
}
clu_df=gene_df %>% group_by(clu) %>% summarize(genes=paste(gene_name, collapse = ","))
class(clu_df)="data.frame"
clu_df
}
add_chr=function(chrs)
if (!grepl("chr",chrs[1])) paste0("chr",chrs) else chrs |
prettymap <- function(plotexpression, oma=c(0, 0, 0, 0),
mai=c(0, 0, 0, 0), drawbox=FALSE, box.lwd=1,
drawscale=TRUE, scale.pos="bottomleft", scale.htin=0.1,
scale.widthhint=0.25, scale.unitcategory="metric", scale.style="bar",
scale.bar.cols=c("black", "white"), scale.lwd=1, scale.linecol="black",
scale.padin=c(0.15, 0.15), scale.labelpadin=0.08, scale.label.cex=0.8,
scale.label.col="black", scale.plotunit=NULL, scale.plotepsg=NULL, scale.tick.cex=0.8,
drawarrow=FALSE, arrow.pos="topright", arrow.scale=1, arrow.padin=c(0.15, 0.15),
arrow.lwd=1, arrow.cols=c("white", "black"), arrow.border="black",
arrow.text.col="black", title=NULL, ...) {
prevpars <- graphics::par(oma=oma, mai=mai, ...)
tryCatch(expr={
force(plotexpression)
if(drawbox) graphics::box(lwd=box.lwd)
if(drawscale) addscalebar(plotunit=scale.plotunit, pos=scale.pos, htin=scale.htin,
widthhint=scale.widthhint, unitcategory=scale.unitcategory, style=scale.style,
bar.cols=scale.bar.cols, lwd=scale.lwd, linecol=scale.linecol,
padin=scale.padin, labelpadin=scale.labelpadin, label.cex=scale.label.cex,
label.col=scale.label.col, plotepsg=scale.plotepsg, tick.cex=scale.tick.cex)
if(drawarrow) addnortharrow(pos=arrow.pos, padin=arrow.padin, scale=arrow.scale, lwd=arrow.lwd,
cols=arrow.cols, border=arrow.border, text.col=arrow.text.col)
}, error=function(e) {
message(paste("Error occured while plotting: ", e))
}, finally={graphics::par(prevpars)})
if(!is.null(title)) {
prevpars <- graphics::par(mar=c(0,0,1,0))
graphics::title(title)
graphics::par(prevpars)
}
} |
calculateIIF <- function(A = rep(1, length(B)), B, C = rep(0, length(B)), theta, D = 1.7) {
if(!is.numeric(theta)) stop("'theta' must be a numeric vector.")
if(!is.numeric(A)) stop("'A' must be a numeric vector.")
if(!is.numeric(B)) stop("'B' must be a numeric vector.")
if(!is.numeric(C)) stop("'C' must be a numeric vector.")
if(length(A) != length(B) || length(A) != length(C)) stop("'A', 'B', and 'C' must be of the same length.")
nP <- length(theta)
Am <- rep(D, nP) %o% A
Cm <- rep(1, nP) %o% C
P <- Cm + (1 - Cm) / (1 + exp(- Am * outer(theta, B, "-")))
IIFs <- t(Am^2 * ((P - Cm)/(1 - Cm))^2 * (1 - P)/P)
colnames(IIFs) <- paste0("theta=", theta)
return(IIFs)
} |
runmed <- function(x, k, endrule = c("median","keep","constant"),
algorithm = NULL, print.level = 0)
{
n <- as.integer(length(x))
if(is.na(n)) stop("invalid value of length(x)")
k <- as.integer(k)
if(is.na(k)) stop("invalid value of 'k'")
if(k < 0L) stop("'k' must be positive")
if(k %% 2L == 0L)
warning(gettextf("'k' must be odd! Changing 'k' to %d",
k <- as.integer(1+ 2*(k %/% 2))), domain = NA)
if(n == 0L) {
x <- double(); attr(x, "k") <- k
return(x)
}
if (k > n)
warning(gettextf("'k' is bigger than 'n'! Changing 'k' to %d",
k <- as.integer(1+ 2*((n - 1)%/% 2))), domain = NA)
algorithm <-
if(missing(algorithm)) {
if(k < 20L || n < 300L) "Stuetzle" else "Turlach"
}
else match.arg(algorithm, c("Stuetzle", "Turlach"))
endrule <- match.arg(endrule)
iend <- switch(endrule,
"median" =, "keep" = 0L,
"constant" = 1L)
if(print.level)
cat("runmed(*, endrule=", endrule,", algorithm=",algorithm,
", iend=",iend,")\n")
res <- switch(algorithm,
Turlach = .Call(C_runmed, as.double(x), 1, k, iend, print.level),
Stuetzle = .Call(C_runmed, as.double(x), 0, k, iend, print.level))
if(endrule == "median") res <- smoothEnds(res, k = k)
attr(res,"k") <- k
res
}
smoothEnds <- function(y, k = 3)
{
med3 <- function(a,b,c)
{
m <- b
if (a < b) {
if (c < b) m <- if (a >= c) a else c
} else {
if (c > b) m <- if (a <= c) a else c
}
m
}
med.odd <- function(x, n = length(x))
{
half <- (n + 1) %/% 2
sort(x, partial = half)[half]
}
k <- as.integer(k)
if (k < 0L || k %% 2L == 0L)
stop("bandwidth 'k' must be >= 1 and odd!")
k <- k %/% 2L
if (k < 1L) return(y)
n <- length(y)
sm <- y
if (k >= 2L) {
sm [2L] <- med3(y[1L],y [2L], y [3L])
sm[n-1L] <- med3(y[n],y[n-1L],y[n-2L])
if (k >= 3L) {
for (i in 3:k) {
j <- 2L*i - 1L
sm [i] <- med.odd( y[1L:j] , j)
sm[n-i+1L] <- med.odd( y[(n+1L-j):n], j)
}
}
}
sm[1L] <- med3(y[1L], sm [2L] , 3*sm [2L] - 2*sm [3L])
sm[n] <- med3(y[n], sm[n-1L], 3*sm[n-1L] - 2*sm[n-2L])
sm
} |
setMethod('compare', signature(numerator = "BFlinearModel", denominator = "missing", data = "data.frame"),
function(numerator, data, ...){
if(!.hasSlot(numerator,"analysis")) numerator@analysis = list()
old.numerator = numerator
rscaleFixed = rpriorValues("allNways","fixed",numerator@prior$rscale[['fixed']])
rscaleRandom = rpriorValues("allNways","random",numerator@prior$rscale[['random']])
rscaleCont = rpriorValues("regression",,numerator@prior$rscale[['continuous']])
rscaleEffects = numerator@prior$rscale[['effects']]
formula = formula(numerator@identifier$formula)
checkFormula(formula, data, analysis = "lm")
factors = fmlaFactors(formula, data)[-1]
nFactors = length(factors)
dataTypes = numerator@dataTypes
relevantDataTypes = dataTypes[names(dataTypes) %in% factors]
dv = stringFromFormula(formula[[2]])
dv = composeTerm(dv)
if(numerator@type != "JZS") stop("Unknown model type.")
denominator = BFlinearModel(type = "JZS",
identifier = list(formula = paste(dv,"~ 1")),
prior=list(),
dataTypes = dataTypes,
shortName = paste("Intercept only",sep=""),
longName = paste("Intercept only", sep=""),
analysis = list(method="trivial")
)
bf <- list(bf=NA, properror=NA, method=NA)
BFtry({
if( nFactors == 0 ){
numerator = denominator
bf = list(bf = 0, properror = 0, method = "trivial")
}else if(all(relevantDataTypes == "continuous")){
reg = summary(lm(formula,data=data))
R2 = reg[[8]]
N = nrow(data)
p = length(attr(terms(formula),"term.labels"))
if( any( names( rscaleEffects ) %in% attr(terms(formula),"term.labels")) ){
stop("Continuous prior settings set from rscaleEffects; use rscaleCont instead.")
}
bf = linearReg.R2stat(N,p,R2,rscale=rscaleCont)
}else if(all(relevantDataTypes != "continuous")){
freqs <- table(data[[factors[1]]])
if(all(freqs==1)) stop("not enough observations")
nLvls <- length(freqs)
rscale = ifelse(dataTypes[factors[1]] == "fixed", rscaleFixed, rscaleRandom)
if(length(rscaleEffects)>0)
if(!is.na(rscaleEffects[factors[1]]))
rscale = rscaleEffects[factors[1]]
if( (nFactors==1) & (nLvls==2) ){
t = t.test(formula = formula,data=data, var.eq=TRUE)$statistic
bf = ttest.tstat(t=t, n1=freqs[1], n2=freqs[2],rscale=rscale*sqrt(2))
}else if( (nFactors==1) & (nLvls>2) & all(freqs==freqs[1])){
Fstat = summary(aov(formula, data=data))[[1]]["F value"][1,]
J = length(freqs)
N = freqs[1]
bf = oneWayAOV.Fstat(Fstat, N, J, rscale)
}else if( (nFactors > 1) | ( (nFactors == 1) & any(freqs!=freqs[1]))){
bf = nWayFormula(formula=formula, data = data,
dataTypes = dataTypes,
rscaleFixed = rscaleFixed,
rscaleRandom = rscaleRandom,
rscaleEffects = rscaleEffects,
posterior = FALSE, ...)
}else{
stop("Too few levels in independent variable: ",factors[1])
}
}else{
bf = nWayFormula(formula=formula, data = data,
dataTypes = dataTypes,
rscaleFixed = rscaleFixed,
rscaleRandom = rscaleRandom,
rscaleCont = rscaleCont,
rscaleEffects = rscaleEffects,
posterior = FALSE, ...)
}
})
numerator@analysis = as.list(bf)
numerator = combineModels(list(numerator,old.numerator))
bf_df = data.frame(bf = numerator@analysis[['bf']],
error = numerator@analysis[['properror']],
time = date(),
code = randomString(1)
)
rownames(bf_df) <- numerator@shortName
newBF = BFBayesFactor(numerator = list(numerator),
denominator = denominator,
data = data,
bayesFactor = bf_df
)
return(newBF)
}
) |
Transpose_graph <- function(vgraph) {
edges <- get.edgelist(vgraph, names=F)
edges <- edges[,2:1]
vgraph <- delete.edges(vgraph, E(vgraph))
vgraph <- add.edges(vgraph, t(edges))
return(vgraph)
} |
context("Test for lets.presab.birds")
path.Ramphastos <- system.file("extdata", package = "letsR")
test_that("lets.presab.birds return a correct PresenceAbsence object", {
skip_on_cran()
PAM <- lets.presab.birds(path.Ramphastos, xmn=-93, xmx=-29, ymn= -57, ymx=25,
resol=1, remove.cells=TRUE, remove.sp=TRUE, show.matrix=FALSE,
crs=CRS("+proj=longlat +datum=WGS84"), cover=0, presence=NULL,
origin=NULL, seasonal=NULL, count=FALSE)
expect_equal(class(PAM)[1], "PresenceAbsence")
expect_true(is.matrix(PAM[[1]]))
expect_true(inherits(PAM[[2]], "RasterLayer"))
expect_equal(class(PAM[[3]])[1], "character")
})
test_that("lets.presab.birds return a correct PresenceAbsence object for the world", {
skip_on_cran()
PAM <- lets.presab.birds(path.Ramphastos, resol=5, remove.cells=TRUE, remove.sp=TRUE, show.matrix=FALSE,
crs=CRS("+proj=longlat +datum=WGS84"), cover=0.2, presence=NULL,
origin=NULL, seasonal=NULL, count=FALSE)
expect_equal(class(PAM), "PresenceAbsence")
expect_true(is.matrix(PAM[[1]]))
expect_true(inherits(PAM[[2]], "RasterLayer"))
expect_equal(class(PAM[[3]]), "character")
})
test_that("lets.presab.birdsreturn a correct PresenceAbsence object for cover different projection", {
skip_on_cran()
pro <- paste("+proj=eqdc +lat_0=-32 +lon_0=-60 +lat_1=-5",
"+lat_2=-42 +x_0=0 +y_0=0 +ellps=aust_SA",
"+units=m +no_defs")
SA_EC <- CRS(pro)
PAM3 <- lets.presab.birds(path.Ramphastos, xmn = -4135157,
xmx = 4707602,
ymn = -450000, ymx = 5774733,
resol = 100000,
crs.grid = SA_EC, cover = .9)
expect_equal(class(PAM3), "PresenceAbsence")
expect_true(is.matrix(PAM3[[1]]))
expect_true(inherits(PAM3[[2]], "RasterLayer"))
expect_equal(class(PAM3[[3]]), "character")
})
test_that("lets.presab.birdsreturn a correct PresenceAbsence object (count=TRUE)", {
skip_on_cran()
PAM <- lets.presab.birds(path.Ramphastos, xmn=-93, xmx=-29, ymn= -57, ymx=25,
resol=1, remove.cells=TRUE, remove.sp=TRUE, show.matrix=FALSE,
crs=CRS("+proj=longlat +datum=WGS84"), cover=0, presence=NULL,
origin=NULL, seasonal=NULL, count=TRUE)
expect_equal(class(PAM), "PresenceAbsence")
expect_true(is.matrix(PAM[[1]]))
expect_true(inherits(PAM[[2]], "RasterLayer"))
expect_equal(class(PAM[[3]]), "character")
})
test_that("lets.presab.birdsreturn a correct PresenceAbsence object, cover=0.2", {
skip_on_cran()
PAM <- lets.presab.birds(path.Ramphastos, xmn=-93, xmx=-29, ymn= -57, ymx=25,
resol=1, remove.cells=TRUE, remove.sp=TRUE, show.matrix=FALSE,
crs=CRS("+proj=longlat +datum=WGS84"), cover=0.2, presence=NULL,
origin=NULL, seasonal=NULL, count=FALSE)
expect_equal(class(PAM), "PresenceAbsence")
expect_true(is.matrix(PAM[[1]]))
expect_true(inherits(PAM[[2]], "RasterLayer"))
expect_equal(class(PAM[[3]]), "character")
})
test_that("lets.presab.birds return a correct PresenceAbsence object, remove.sp=FALSE", {
skip_on_cran()
PAM <- lets.presab.birds(path.Ramphastos, xmn=-93, xmx=-29, ymn= -57, ymx=25,
resol=1, remove.cells=TRUE, remove.sp=FALSE, show.matrix=FALSE,
crs=CRS("+proj=longlat +datum=WGS84"), cover=1, presence=NULL,
origin=NULL, seasonal=NULL, count=FALSE)
expect_equal(class(PAM), "PresenceAbsence")
expect_true(is.matrix(PAM[[1]]))
expect_true(inherits(PAM[[2]], "RasterLayer"))
expect_equal(class(PAM[[3]]), "character")
})
test_that("lets.presab.birdsreturn a correct PresenceAbsence object, remove.cells=FALSE", {
skip_on_cran()
PAM <- lets.presab.birds(path.Ramphastos, xmn=-93, xmx=-29, ymn= -57, ymx=25,
resol=1, remove.cells=FALSE, remove.sp=TRUE, show.matrix=FALSE,
crs=CRS("+proj=longlat +datum=WGS84"), cover=0, presence=NULL,
origin=NULL, seasonal=NULL, count=FALSE)
expect_equal(class(PAM), "PresenceAbsence")
expect_true(is.matrix(PAM[[1]]))
expect_true(inherits(PAM[[2]], "RasterLayer"))
expect_equal(class(PAM[[3]]), "character")
response <- summary(PAM)
expect_true(response$Cellswithoutanypresence > 0)
})
test_that("lets.presab.birds new projection", {
skip_on_cran()
desiredcrs <- CRS("+proj=laea +lat_0=0 +lon_0=-80 +x_0=180 +y_0=70 +units=km")
PAM <- lets.presab.birds(path.Ramphastos, xmn = -3000,
xmx = 6000, ymn = -5000,
ymx = 3000, res = 100, remove.cells=TRUE,
remove.sp=TRUE, show.matrix=FALSE,
crs=CRS("+proj=longlat +datum=WGS84"), cover=0,
presence=NULL,
crs.grid = desiredcrs,
origin=NULL, seasonal=NULL, count=TRUE)
expect_equal(class(PAM), "PresenceAbsence")
expect_true(is.matrix(PAM[[1]]))
expect_true(inherits(PAM[[2]], "RasterLayer"))
expect_equal(class(PAM[[3]]), "character")
}) |
`$.python.builtin.dict` <- function(x, name) {
if (py_is_null_xptr(x) || !py_available())
return(NULL)
if (py_has_attr(x, name)) {
item <- py_get_attr(x, name)
return(py_maybe_convert(item, py_has_convert(x)))
}
`[.python.builtin.dict`(x, name)
}
`[.python.builtin.dict` <- function(x, name) {
if (py_is_null_xptr(x) || !py_available())
return(NULL)
item <- py_dict_get_item(x, name)
py_maybe_convert(item, py_has_convert(x))
}
`[[.python.builtin.dict` <- `[.python.builtin.dict`
`$<-.python.builtin.dict` <- function(x, name, value) {
if (!py_is_null_xptr(x) && py_available())
py_dict_set_item(x, name, value)
else
stop("Unable to assign value (dict reference is NULL)")
x
}
`[<-.python.builtin.dict` <- `$<-.python.builtin.dict`
`[[<-.python.builtin.dict` <- `$<-.python.builtin.dict`
length.python.builtin.dict <- function(x) {
if (py_is_null_xptr(x) || !py_available())
0L
else
py_dict_length(x)
} |
context("jagsOneBaseline")
test_that("function return a character class", {
expect_equal(typeof(jagsOneBaseline()), "character")
})
test_that("function return a ihnerited oneBaseline class", {
expect_equal(class(jagsOneBaseline())[2], "oneBaseline")
})
test_that("function send a warning/error when priors are not correctly written",
{
expect_warning(jagsOneBaseline(TP = "dnorm(3, 1"))
expect_warning(jagsOneBaseline(TP = "dnorm(3, 1"),
muB = "duni(3,3)")
expect_warning(jagsOneBaseline(sigmaB = "dnorn(3, 1)"))
expect_warning(jagsOneBaseline(TP = "dnorm(3, 1)",
sigmaDeltaN = "dbeta(1,1"))
expect_error(jagsOneBaseline(TP = "dnorm(3, 1)",
sigma = "dbeta(1, 1)",
lambda = "5"))
}) |
cpt.reg <- function(data, penalty="MBIC", pen.value=0, method="AMOC", dist="Normal",
class=TRUE, param.estimates=TRUE, minseglen=3, shape = 0, tol = 1e-07){
MBIC=0
if(!is.array(data) || !is.numeric(data))
stop("Argument 'data' must be a numerical matrix/array.")
if(!is.character(penalty) || length(penalty)>1)
stop("Argument 'penelty' is invalid.")
if(!is.character(method) || length(method)>1)
stop("Argument 'method' is invalid.")
if(method!="AMOC" && method != "PELT")
stop("Invalid method, must be AMOC or PELT.")
if(!is.character(dist) || length(dist)>1)
stop("Argument 'dist' is invalid.")
if(dist != "Normal"){
warning(paste0("dist = ",dist," is not supported. Converted to dist='Normal'"))
dist <- "Normal"
}
if(!is.logical(class) || length(class)>1)
stop("Argument 'class' is invalid.")
if(!is.logical(param.estimates) || length(param.estimates)>1)
stop("Argument 'param.estimates' is invalid.")
if(!is.numeric(minseglen) || length(minseglen)>1)
stop("Argument 'minseglen' is invalid.")
if(minseglen <= 0 || minseglen%%1 != 0)
stop("Argument 'minseglen' must be positive integer.")
if(!is.numeric(tol) || length(tol)!=1)
stop("Argument 'tol' is invalid.")
if(tol<0) stop("Argument 'tol' must be positive.")
if(length(dim(data)) == 2) data <- array(data,dim=c(1,dim(data)))
ans <- vector("list",dim(data)[1])
for(i in 1:dim(data)[1]){
datai <- check_data(data[i,,])
pen.value <- changepoint::penalty_decision(penalty=penalty,
pen.value=pen.value, n=nrow(datai), diffparam=ncol(datai),
asymcheck="cpt.reg", method=method)
if(penalty=="MBIC"){MBIC=1}
if(minseglen < (ncol(datai)-1)){
warning(paste("minseglen is too small, set to:",ncol(datai)))
minsegleni <- ncol(datai)
}else if(nrow(datai) < (2*minseglen)){
stop("Minimum segment length is too large to include a change in this data.")
}else{
minsegleni <- minseglen
}
CPTS <- ChangepointRegression(data=datai, penalty=penalty,
penalty.value=pen.value, method=method, dist=dist, minseglen=minseglen,
shape = shape, tol=tol, cpts.only=class, MBIC=MBIC)
if(class){
ansi <- new("cpt.reg")
data.set(ansi) <- datai
cpttype(ansi) <- "regression"
method(ansi) <- method
distribution(ansi) <- dist
pen.type(ansi) <- penalty
pen.value(ansi) <- pen.value
cpts(ansi) <- CPTS
if(method=="PELT") ncpts.max(ansi) <- Inf
if(param.estimates) ansi = param(ansi)
ans[[i]] <- ansi
}else{
ans[[i]] <- CPTS
}
}
if(dim(data)[1]==1){ return(ans[[1]]) }else{ return(ans) }
}
check_data <- function(data, minseglen=3){
if(!is.array(data) || !is.numeric(data))
stop("Argument 'data' must be a numerical matrix.")
if(length(dim(data))!=2)
stop("Argument 'data' must be a numerical matrix.")
if(!is.numeric(minseglen) || length(minseglen)>1)
stop("Argument 'minseglen' is invalid.")
n <- nrow(data)
p <- ncol(data)-1
if(p==0) stop("Dimension of data is 1, no regressors found.")
if(n<p) stop("More regressors than observations.")
intercept <- apply(as.matrix(data[,2]),2,function(a){all(a[1]==a)})
if(sum(intercept)<1){
warning("Missing intercept regressor. Append 1's to right of data.")
data <- cbind(data,1)
p <- p+1
}else if(sum(intercept)>1){
i <- which(intercept)[-1]+1
warning("Multiple intercepts found. Keeping only first instance.")
data <- data[,-i]
}
return(data)
}
ChangepointRegression <- function(data, penalty="MBIC", penalty.value=0,
method="AMOC", dist="Normal", minseglen=3, cpts.only=TRUE, shape=0, MBIC=0, tol=1e-07){
if(!is.logical(cpts.only) && length(cpts.only)>1)
stop("Argument 'cpts.only' is invalid.")
if(method=="AMOC" && dist=="Normal"){
out <- CptReg_AMOC_Normal(data, penalty, penalty.value, minseglen, shape, MBIC, tol)
}else if(method=="PELT" && dist=="Normal"){
out <- CptReg_PELT_Normal(data, penalty.value, minseglen, shape,MBIC, tol)
}else{
stop("Changepoint in regression method not recognised.")
}
if(cpts.only){
return(sort(out$cpts))
}else{
return(out)
}
}
CptReg_AMOC_Normal <- function(data, penalty="MBIC", penalty.value=0, minseglen=3,
shape=0, MBIC=0, tol=1e-07){
n <- as.integer(nrow(data))
p <- as.integer(ncol(data)-1)
if(p<1 || n<p) stop("Invalid data dimensions.")
if(!is.numeric(shape) || length(shape)!=1)
stop("Argument 'shape' is invalid.")
answer=list()
answer[[5]]=1
on.exit(.C("Free_CptReg_Normal_AMOC",answer[[6]],PACKAGE="EnvCpt"))
answer <- .C("CptReg_Normal_AMOC", data=as.double(data), n=as.integer(n),
m=as.integer(p+1), pen=as.double(penalty.value), err=0L,
shape=as.double(shape), minseglen=as.integer(minseglen), tol=as.double(tol),
tau=0L, nulllike=vector("double",1), taulike=vector("double",1),
tmplike=vector("double",n), MBIC=as.integer(MBIC),PACKAGE="EnvCpt")
if(answer$err!=0) stop("C code error:",answer$err,call.=F)
tmp <- c(answer$tau,answer$nulllike,answer$taulike)
out <- changepoint::decision(tau = tmp[1], null = tmp[2], alt = tmp[3],
penalty = penalty, n=n, diffparam=p, pen.value = penalty.value)
names(out) <- c("cpts","pen.value")
return(out)
}
CptReg_PELT_Normal <- function(data, penalty.value=0, minseglen=3, shape=0,
MBIC=0, tol=1e-07){
n <- as.integer(nrow(data))
p <- as.integer(ncol(data)-1)
if(!is.numeric(shape) || length(shape)!=1)
stop("Argument 'shape' is invalid.")
answer=list()
answer[[6]]=1
on.exit(.C("Free_CptReg_Normal_PELT",answer[[6]],PACKAGE="EnvCpt"))
answer <- .C("CptReg_Normal_PELT", data=as.double(data), n=n,
m=as.integer(p+1), pen=as.double(penalty.value), cpt=vector("integer",n),
err=0L, shape=as.double(shape), minseglen=as.integer(minseglen),
tol=as.double(tol), lastchangelike=vector("double",n+1),
lastchangecpts=vector("integer",n+1), numchangecpts=vector("integer",n+1),
MBIC=as.integer(MBIC), PACKAGE="EnvCpt")
if(answer$err!=0){
stop("C code error:",answer$err,call.=F)
}
return(list(lastchangecpts=answer$lastchangecpts,
cpts=sort(answer$cpt[answer$cpt>0]),
lastchangelike=answer$lastchangelike,
ncpts=answer$numchangecpts))
} |
context("Checking r_data")
test_that("r_data ...",{
}) |
structure(list(url = "https://api.twitter.com/2/tweets?tweet.fields=attachments%2Cauthor_id%2Cconversation_id%2Ccreated_at%2Centities%2Cgeo%2Cid%2Cin_reply_to_user_id%2Clang%2Cpublic_metrics%2Cpossibly_sensitive%2Creferenced_tweets%2Csource%2Ctext%2Cwithheld&user.fields=created_at%2Cdescription%2Centities%2Cid%2Clocation%2Cname%2Cpinned_tweet_id%2Cprofile_image_url%2Cprotected%2Cpublic_metrics%2Curl%2Cusername%2Cverified%2Cwithheld&expansions=author_id%2Centities.mentions.username%2Cgeo.place_id%2Cin_reply_to_user_id%2Creferenced_tweets.id%2Creferenced_tweets.id.author_id&place.fields=contained_within%2Ccountry%2Ccountry_code%2Cfull_name%2Cgeo%2Cid%2Cname%2Cplace_type&ids=1266841835626024964",
status_code = 200L, headers = structure(list(date = "Sun, 19 Dec 2021 10:36:55 UTC",
server = "tsa_o", `api-version` = "2.32", `content-type` = "application/json; charset=utf-8",
`cache-control` = "no-cache, no-store, max-age=0", `content-length` = "1378",
`x-access-level` = "read", `x-frame-options` = "SAMEORIGIN",
`content-encoding` = "gzip", `x-xss-protection` = "0",
`x-rate-limit-limit` = "300", `x-rate-limit-reset` = "1639910865",
`content-disposition` = "attachment; filename=json.json",
`x-content-type-options` = "nosniff", `x-rate-limit-remaining` = "296",
`strict-transport-security` = "max-age=631138519", `x-response-time` = "156",
`x-connection-hash` = "244c25d8369406ebe1736b071cb6dbbc1f610fcb6b5dee6b60cd76860eaeab36"), class = c("insensitive",
"list")), all_headers = list(list(status = 200L, version = "HTTP/2",
headers = structure(list(date = "Sun, 19 Dec 2021 10:36:55 UTC",
server = "tsa_o", `api-version` = "2.32", `content-type` = "application/json; charset=utf-8",
`cache-control` = "no-cache, no-store, max-age=0",
`content-length` = "1378", `x-access-level` = "read",
`x-frame-options` = "SAMEORIGIN", `content-encoding` = "gzip",
`x-xss-protection` = "0", `x-rate-limit-limit` = "300",
`x-rate-limit-reset` = "1639910865", `content-disposition` = "attachment; filename=json.json",
`x-content-type-options` = "nosniff", `x-rate-limit-remaining` = "296",
`strict-transport-security` = "max-age=631138519",
`x-response-time` = "156", `x-connection-hash` = "244c25d8369406ebe1736b071cb6dbbc1f610fcb6b5dee6b60cd76860eaeab36"), class = c("insensitive",
"list")))), cookies = structure(list(domain = c(".twitter.com",
".twitter.com", ".twitter.com", ".twitter.com"), flag = c(TRUE,
TRUE, TRUE, TRUE), path = c("/", "/", "/", "/"), secure = c(TRUE,
TRUE, TRUE, TRUE), expiration = structure(c(1702744284, 1702744284,
1702744284, 1702744284), class = c("POSIXct", "POSIXt")),
name = c("guest_id_marketing", "guest_id_ads", "personalization_id",
"guest_id"), value = c("REDACTED", "REDACTED", "REDACTED",
"REDACTED")), row.names = c(NA, -4L), class = "data.frame"),
content = charToRaw("{\"data\":[{\"possibly_sensitive\":false,\"created_at\":\"2020-05-30T21:20:05.000Z\",\"entities\":{\"urls\":[{\"start\":200,\"end\":223,\"url\":\"https://t.co/Zt7xl7gsz4\",\"expanded_url\":\"https://twitter.com/MarcelFckNzs/status/1266841835626024964/photo/1\",\"display_url\":\"pic.twitter.com/Zt7xl7gsz4\"}],\"hashtags\":[{\"start\":110,\"end\":124,\"tag\":\"ClimateCrisis\"},{\"start\":125,\"end\":140,\"tag\":\"ClimateJustice\"},{\"start\":141,\"end\":150,\"tag\":\"Datteln4\"},{\"start\":151,\"end\":168,\"tag\":\"FridaysForFuture\"}],\"mentions\":[{\"start\":169,\"end\":183,\"username\":\"FutureDatteln\",\"id\":\"1159451223344525313\"},{\"start\":184,\"end\":199,\"username\":\"Ende__Gelaende\",\"id\":\"3232960847\"}]},\"public_metrics\":{\"retweet_count\":1,\"reply_count\":1,\"like_count\":7,\"quote_count\":0},\"conversation_id\":\"1266841835626024964\",\"lang\":\"de\",\"text\":\"Heute war ich wieder in Datteln demonstrieren. Traurig zu sehen, wie diesmal Datteln 4 leider im Betrieb war.
date = structure(1639910215, class = c("POSIXct", "POSIXt"
), tzone = "GMT"), times = c(redirect = 0, namelookup = 0.000132,
connect = 0.000137, pretransfer = 0.000489, starttransfer = 0.17225,
total = 0.172573)), class = "response") |
cff2zenodo <- function(x) {
folder <- NULL
if(is.character(x) && file.exists(x)) {
folder <- dirname(x)
x <- read_cff(x)
}
tmp <- function(x) {
name <- paste(x[["family-names"]],
x[["given-names"]],
sep=", ")
out <- list(name=name)
if(!is.null(x[["affiliation"]])) out$affiliation <- x[["affiliation"]]
if(!is.null(x[["orcid"]])) out$orcid <- sub("^.*/","",x[["orcid"]])
return(out)
}
json <- list(title=x$title,
version=x$version,
creators=lapply(x$authors,tmp),
keywords=x$keywords,
license=list(id=x$license),
publication_date=x$`date-released`)
out <- toJSON(json,pretty=TRUE,auto_unbox = TRUE)
if(!is.null(folder)) {
zenodofile <- paste0(folder,"/.zenodo.json")
if(file.exists(zenodofile)) message("Updated .zenodo.json file")
else message("Added .zenodo.json file")
writeLines(out,zenodofile)
rbuildignore <- paste0(folder,"/.Rbuildignore")
if(file.exists(rbuildignore)) {
a <- readLines(rbuildignore)
if(all(!grepl("zenodo.json",a,fixed = TRUE))) {
a <- c(a,"^.*\\.zenodo.json$")
writeLines(a,rbuildignore)
message("Added .zenodo.json to .Rbuildignore")
}
}
invisible(out)
} else {
return(out)
}
} |
KCC <- function(model,GRR,p1,K)
{
model.idx <- charmatch(model,c("multiplicative","additive","recessive","dominant","overdominant"))
if(is.na(model.idx)) stop("Invalid model type")
if(model.idx == 0) stop("Ambiguous model type")
multiplicative <- c(1,GRR,GRR*GRR)
additive <- c(1,GRR,2*GRR-1)
recessive <- c(1,1,GRR)
dominant <- c(1,GRR,GRR)
overdominant <- c(GRR,1,GRR)
f <- switch(model.idx,multiplicative,additive,recessive,dominant,overdominant)
scale <- K/(f[1]*(1-p1)^2+f[2]*2*p1*(1-p1)+f[3]*p1^2)
f <- f*scale
pprime <- (f[3]*p1^2+f[2]*p1*(1-p1))/K
p <- ((1-f[3])*p1^2+(1-f[2])*p1*(1-p1))/(1-K)
invisible(list(pprime=pprime,p=p))
} |
frames_init <- function(imports = memory_init()) {
targets <- memory_init(new.env(parent = imports$envir))
frames_new(imports, targets)
}
frames_new <- function(imports = NULL, targets = NULL) {
force(imports)
force(targets)
environment()
}
frames_get_envir <- function(frames) {
frames$targets$envir
}
frames_set_object <- function(frames, name, object) {
memory_set_object(frames$targets, name, object)
}
frames_clear_objects <- function(frames) {
frames$targets <- memory_init(new.env(parent = frames$imports$envir))
}
frames_set_dep <- function(frames, dep, pipeline) {
value <- dep$value
if (!is.null(value)) {
object <- dep$value$object
frames_set_object(frames, target_get_parent(dep), object)
}
}
frames_set_deps <- function(frames, target, pipeline) {
map(
target_deps_shallow(target, pipeline),
~frames_set_dep(frames, pipeline_get_target(pipeline, .x), pipeline)
)
}
frames_produce <- function(envir, target, pipeline) {
frames <- frames_init(memory_init(new.env(parent = envir)))
frames_set_deps(frames = frames, target = target, pipeline = pipeline)
frames
}
frames_validate_inheritance <- function(frames) {
envir1 <- parent.env(frames$targets$envir)
envir2 <- frames$imports$envir
if (!identical(envir1, envir2)) {
tar_throw_validate("broken inheritance in the frames.")
}
}
frames_validate <- function(frames) {
tar_assert_correct_fields(frames, frames_new)
memory_validate(frames$imports)
memory_validate(frames$targets)
frames_validate_inheritance(frames)
invisible()
} |
write.px <- function ( obj.px, filename, heading = NULL, stub = NULL,
keys = NULL , write.na = FALSE, write.zero = FALSE ,
fileEncoding = "ISO-8859-1" )
{
if ( ! inherits( obj.px, "px" ) )
stop("Error: object needs to have class 'px'")
unquote <- function(x)
gsub( '^"|"$', "", x)
requote <- function(x)
paste( '"', unquote(x), '"', sep = "")
wf <- function(...){
cadena <- paste(..., sep = "")
cat(cadena, file = con, sep = "")
}
obj.px[['LAST.UPDATED']]$value <- format(Sys.time(), "%Y%m%d %H:%M:%S")
obj.px[['CHARSET']]$value <- ifelse(fileEncoding == "ISO-8859-1", 'ANSI', fileEncoding)
obj.px$INFO$value <- "File generated using R and package pxR (http://pxr.r-forge.r-project.org/)"
if (!is.null(keys)) {
obj.px$KEYS <- NULL
if ( ! all(keys %in% names(obj.px$VALUES))) {
c('Error: Some keys are not in VALUES')
}
kk <- lapply(keys,function(e) { 'VALUES' })
names(kk) <- keys
for (i in keys[keys %in% names(obj.px$COD)]) {
kk[[i]]<- 'CODES'
levels(obj.px$DATA[[1]][,i]) <- obj.px$CODES[[i]]
}
obj.px$KEYS <- kk
}
if ( is.null(obj.px$'CREATION-DATE') | is.null(obj.px$'CREATION.DATE') ) {
obj.px$'CREATION-DATE'$value <- format(Sys.time(),'%Y%m%d %H:%m')
}
names(obj.px) <- gsub("\\.", "-", names(obj.px))
order.kw <- c("CHARSET", "AXIS-VERSION", "CODEPAGE", "LANGUAGE",
"LANGUAGES", "CREATION-DATE", "NEXT-UPDATE", "PX-SERVER",
"DIRECTORY-PATH", "UPDATE-FREQUENCY", "TABLEID", "SYNONYMS",
"DEFAULT-GRAPH", "DECIMALS", "SHOWDECIMALS", "ROUNDING",
"MATRIX", "AGGREGALLOWED", "AUTOPEN", "SUBJECT-CODE",
"SUBJECT-AREA", "CONFIDENTIAL", "COPYRIGHT", "DESCRIPTION",
"TITLE", "DESCRIPTIONDEFAULT", "CONTENTS", "UNITS", "STUB",
"HEADING", "CONTVARIABLE", "VALUES", "TIMEVAL", "CODES",
"DOUBLECOLUMN", "PRESTEXT", "DOMAIN", "VARIABLE-TYPE",
"HIERARCHIES", "HIERARCHYLEVELS", "HIERARCHYLEVELSOPEN",
"HIERARCHYNAMES", "MAP", "PARTITIONED", "ELIMINATION", "PR",
"ECISION", "LAST-UPDATED", "STOCKFA", "CFPRICES", "DAYADJ",
"SEASADJ", "CONTACT", "REFPERIOD", "BASEPERIOD",
"DATABASE", "SOURCE", "SURVEY", "LINK", "INFOFILE",
"FIRST-PUBLISHED", "META-ID", "OFFICIAL-STATISTICS", "INFO",
"NOTEX", "NOTE", "VALUENOTEX", "VALUENOTE", "CELLNOTEX",
"CELLNOTE", "DATASYMBOL1", "DATASYMBOL2", "DATASYM", "BOL3",
"DATASYMBOL4", "DATASYMBOL5", "DATASYMBOL6", "DATASYMBOLSUM",
"DATASYMBOLNIL", "DATANOTECELL", "DATANOTESUM", "DATANOTE",
"KEYS", "ATTRIBUTE-ID", "ATTRIBUTE-TEXT", "ATTRIBUTES",
"PRECISION","DATA")
order.px <- charmatch(names(obj.px), order.kw, nomatch=999)
new.order <- setdiff( names(obj.px)[order(order.px)], "DATA" )
if(! is.null(heading)){
if(is.null(stub))
stop("If heading is specified, you need also specify the stub parameter.")
if(! setequal(c(heading, stub), c(obj.px$HEADING$value, obj.px$STUB$value)) )
stop("Specified heading and stub parameters differ from those in the px object")
obj.px$HEADING$value <- heading
obj.px$STUB$value <- stub
}
if (! is.null(obj.px$KEYS)) {
keys <- names(obj.px$KEYS)
values <- names(obj.px$VALUES)
no.keys <- values[! (values %in% keys) ]
obj.px$STUB$value <- keys
obj.px$HEADING$value <- no.keys
}
con <- file( description = filename, open = "w", encoding = ifelse(fileEncoding == "ISO-8859-1", "latin1", fileEncoding))
on.exit(close(con))
for (key in new.order ) {
if (length(obj.px[[key]]) == 0)
next
if (key %in% c('DECIMALS', 'SHOWDECIMALS',
'COPYRIGHT', 'DESCRIPTIONDEFAULT', 'DAYADJ', 'SEASADJ')){
wf( key, "=")
wf( unquote(obj.px[[key]]$value) )
wf(';\n')
next
}
if ( names(obj.px[[key]])[1] == 'value' ) {
wf( key, "=")
wf( paste( requote(obj.px[[key]]$value), collapse = ',') )
wf(';\n')
next
}
for (subkey in names(obj.px[[key]])){
wf( key, '("', subkey, '")=' )
if ( (key =='ELIMINATION' && obj.px[[key]][[subkey]] %in% c('YES','NO')) || key == 'KEYS' )
wf(obj.px[[key]][[subkey]])
else
wf(paste(requote(obj.px[[key]][[subkey]]), collapse = ','))
wf ( ';\n' )
}
}
wf('DATA=\n')
if (!is.null(obj.px$KEYS)) {
keys <- names(obj.px$KEYS)
values <- names(obj.px$VALUES)
fm <- formula( paste(
paste(keys, collapse = '+'),'~',
paste(values[!values %in% keys], collapse = '+'), sep=''))
for (i in keys) {
if (obj.px$KEYS[[i]]=='CODES') {
levels(obj.px$DATA[[1]][,i]) <- obj.px$CODES[[i]]
} else
levels(obj.px$DATA[[1]][,i]) <- obj.px$VALUES[[i]]
}
res <- dcast(obj.px$DATA[[1]], fm, sum)
with.data <- rep(TRUE, nrow(res))
no.keys <- names(res)[!names(res) %in% keys]
data.no.keys <- as.matrix(res[,no.keys])
if (!write.na)
with.data <- with.data & ! apply(data.no.keys, 1, function(x) all(is.na(x)))
if (!write.zero)
with.data <- with.data & ! apply(data.no.keys, 1, function(x) all(x == 0))
res <- res[with.data, ]
zz <- res[, keys[1]]
for (i in keys[-1]) {
zz <- paste(zz, res[,i],sep='","')
}
zz <- paste('"',zz,'",',sep='')
for (i in names(res)[!names(res) %in% keys] ) {
column.num <- formatC(res[,i],
format = 'f',
digits = as.numeric(obj.px$DECIMALS$value),
drop0trailing = T, flag = '-')
column.num <- gsub("NA", '".."', column.num)
zz <- paste(zz, column.num, sep=' ')
}
write(zz, file = con, ncolumns = 1, append = T )
} else {
zz <- formatC(as.array(obj.px),
format = 'f',
digits = as.numeric(obj.px$DECIMALS$value),
drop0trailing = T, flag = '-')
zz <- gsub("NA", '".."', zz)
zz <- aperm(zz, c(rev(obj.px$HEADING$value), rev(obj.px$STUB$value)))
write(zz, file = con, ncolumns = sum( dim(zz)[1:length(obj.px$HEADING$value)] ), append = T )
}
wf(";\n")
invisible(NULL)
} |
NULL
tbl_json <- function(df, json.list, drop.null.json = FALSE, ..., .column_order = NULL) {
assertthat::assert_that(is.data.frame(df))
assertthat::assert_that(is.list(json.list) || is.vector(json.list))
assertthat::assert_that(nrow(df) == length(json.list))
row.names(df) <- NULL
if (drop.null.json) {
nulls <- purrr::map_lgl(json.list, is.null)
df <- df[!nulls, , drop = FALSE]
json.list <- json.list[!nulls]
}
df <- reorder_json_column(df, json.list, .preserve_column_order = .column_order)
structure(df, class = c("tbl_json", "tbl_df", "tbl", "data.frame"))
}
reorder_json_column <- function(.data, .json, .preserve_column_order = NULL) {
if (is.null(.preserve_column_order)) {
.data[["..JSON"]] <- NULL
.data[["..JSON"]] <- .json
} else {
.data[["..JSON"]] <- .json
}
return(.data)
}
as.tbl_json <- function(.x, ...) UseMethod("as.tbl_json")
as_tbl_json <- function(.x, ...) UseMethod("as.tbl_json")
as.tbl_json.tbl_json <- function(.x, ...) reorder_json_column(.x, json_get(.x))
as.tbl_json.character <- function(.x, ...) {
json <- purrr::map(.x, jsonlite::fromJSON, simplifyVector = FALSE)
ids <- data.frame(document.id = seq_along(json))
tbl_json(ids, json)
}
as.tbl_json.list <- function(.x, ...) {
ids <- data.frame(document.id = seq_along(.x))
tbl_json(ids, .x)
}
as.tbl_json.data.frame <- function(.x, json.column, ...) {
assertthat::assert_that(is.character(json.column))
assertthat::assert_that(json.column %in% names(.x))
jcol <- .x[[json.column]]
if (is.list(jcol)) {
json <- jcol
} else {
json <- purrr::map(jcol, jsonlite::fromJSON, simplifyVector = FALSE)
}
.x <- .x[, setdiff(names(.x), json.column), drop = FALSE]
tbl_json(.x, json)
}
is.tbl_json <- function(.x) inherits(.x, "tbl_json")
`[.tbl_json` <- function(.x, i, j,
drop = FALSE) {
n_real_args <- nargs() - !missing(drop)
json <- json_get(.x)
if (n_real_args <= 2L) {
if (!missing(drop))
warning("drop ignored")
if (missing(i)) {
return(.x)
}
.x <- NextMethod('[')
} else {
.x <- NextMethod('[')
if (!missing(i)) {
json <- json[i]
}
}
tbl_json(.x, json, .column_order = TRUE)
}
`$<-.tbl_json` <- function(x, name, value) {
y <- NextMethod("$<-", x)
reorder_json_column(y, json_get(y))
}
json_get <- function(.data) {
.data[["..JSON"]]
}
json_get_column <- function(.data, column_name = "json") {
qnm <- rlang::enquo(column_name)
colnm <- rlang::as_name(qnm)
if ("json" %in% names(.data)) {
warning(paste0("Column `", colnm, "` already exists. It will be overwritten by `json_get_column()`"))
}
.data[[colnm]] <- json_get(.data)
return(.data)
}
wrap_dplyr_verb <- function(dplyr.verb, generic) {
function(.data, ...) {
json <- json_get(.data)
if (generic %in% c("select")) {
.data <- tibble::as_tibble(.data)
vars <- rlang::enquos(...)
vars_lgl <- purrr::map_lgl(
vars,
~ any(as.character(rlang::get_expr(.x)) %in% "..JSON")
)
vars[vars_lgl] <- NULL
y <- dplyr::select(.data, !!!vars)
} else if (generic %in% c("transmute")) {
.data <- tibble::as_tibble(.data)
y <- NextMethod(generic, .data)
} else {
y <- NextMethod(generic, .data)
}
if ("..JSON" %in% names(y)) {
return(tbl_json(tibble::as_tibble(y), json_get(y)))
} else {
return(tbl_json(y, json))
}
}
}
select.tbl_json <- wrap_dplyr_verb(dplyr::select, "select")
filter_.tbl_json <- wrap_dplyr_verb(dplyr::filter_, "filter_")
filter.tbl_json <- wrap_dplyr_verb(dplyr::filter, "filter")
arrange_.tbl_json <- wrap_dplyr_verb(dplyr::arrange_, "arrange_")
arrange.tbl_json <- wrap_dplyr_verb(dplyr::arrange, "arrange")
mutate_.tbl_json <- wrap_dplyr_verb(dplyr::mutate_, "mutate_")
mutate.tbl_json <- wrap_dplyr_verb(dplyr::mutate, "mutate")
transmute.tbl_json <- wrap_dplyr_verb(dplyr::transmute, "transmute")
slice_.tbl_json <- wrap_dplyr_verb(dplyr::slice_, "slice_")
slice.tbl_json <- wrap_dplyr_verb(dplyr::slice, "slice")
inner_join.tbl_json <- function(x, y, by = NULL, copy = FALSE, suffix = c(".x", ".y"),
...) {
x <- as_tibble(x)
y <- as_tibble(y)
NextMethod("inner_join", x)
}
full_join.tbl_json <- function(x, y, by = NULL, copy = FALSE, suffix = c(".x", ".y"),
..., keep = FALSE) {
x <- as_tibble(x)
y <- as_tibble(y)
NextMethod("full_join", x)
}
left_join.tbl_json <- function(x, y, by = NULL, copy = FALSE, suffix = c(".x", ".y"),
..., keep = FALSE) {
x <- as_tibble(x)
y <- as_tibble(y)
NextMethod("left_join", x)
}
right_join.tbl_json <- function(x, y, by = NULL, copy = FALSE, suffix = c(".x", ".y"),
..., keep = FALSE) {
x <- as_tibble(x)
y <- as_tibble(y)
NextMethod("right_join", x)
}
dplyr_reconstruct.tbl_json <- function(data, template) {
if ("..JSON" %in% names(data)) {
as_tbl_json(data, json.column = "..JSON")
} else {
tibble::as_tibble(data)
}
}
dplyr::bind_rows
as.character.tbl_json <- function(x, ...) {
json <- json_get(x)
if (is.null(json)) {
warning("the ..JSON column has been removed from this tbl_json object")
json <- list()
}
json %>% purrr::map_chr(jsonlite::toJSON,
null = "null",
auto_unbox = TRUE)
}
as_tibble.tbl_json <- function(x, ...) {
x$..JSON <- NULL
as_tibble(
structure(x, class = class(tibble::tibble()))
)
}
as_data_frame.tbl_json <- function(x, ...) {
as_tibble.tbl_json(x,...)
}
print.tbl_json <- function(x, ..., json.n = 20, json.width = 15) {
json <- x %>% as.character
json <- json[seq_len(min(json.n, nrow(x)))]
lengths <- dplyr::coalesce(json %>% nchar,0L)
json <- json %>% strtrim(json.width)
json[lengths > json.width] <- paste0(json[lengths > json.width], "...")
.y <- dplyr::as_tibble(x)
json_name <- '..JSON'
.y[json_name] <- rep("...", nrow(x))
.y[[json_name]][seq_len(length(json))] <- json
ncol_y <- ncol(.y)
.y <- .y[, c("..JSON", names(.y)[names(.y) != "..JSON"])]
out <- capture.output(print(.y))
out <- gsub("^\\
"\\
sprintf(nrow(x) %>% format(big.mark = ','),
ncol(x) %>% format(big.mark = ',')),
out)
writeLines(out)
invisible(x)
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.