code
stringlengths 1
13.8M
|
---|
context("robust Sobel test: covariance")
library("robmed", quietly = TRUE)
n <- 250
a <- c <- 0.2
b <- 0
R <- 1000
seed <- 20150601
set.seed(seed)
X <- rnorm(n)
M1 <- a * X + rnorm(n)
M2 <- rnorm(n)
Y <- b * M1 + c * X + rnorm(n)
C1 <- rnorm(n)
C2 <- rnorm(n)
test_data <- data.frame(X, Y, M1, M2, C1, C2)
ctrl <- cov_control(prob = 0.9)
sobel <- test_mediation(test_data, x = "X", y = "Y", m = "M1", test = "sobel",
method = "covariance", robust = TRUE, control = ctrl)
summary_sobel <- summary(sobel)
sobel_less <- retest(sobel, alternative = "less")
sobel_greater <- retest(sobel, alternative = "greater")
sobel_second <- retest(sobel, order = "second")
level <- 0.9
ci <- setup_ci_plot(sobel, level = level)
ci_p <- setup_ci_plot(sobel, level = level, p_value = TRUE)
density <- setup_density_plot(sobel, level = level)
ellipse <- setup_ellipse_plot(sobel)
coef_names <- c("a", "b", "Direct", "Total", "ab")
test_that("output has correct structure", {
expect_s3_class(sobel, "sobel_test_mediation")
expect_s3_class(sobel, "test_mediation")
expect_s3_class(sobel$fit, "cov_fit_mediation")
expect_s3_class(sobel$fit, "fit_mediation")
expect_is(sobel$se, "numeric")
expect_is(sobel$statistic, "numeric")
expect_is(sobel$p_value, "numeric")
})
test_that("arguments are correctly passed", {
expect_identical(sobel$alternative, "twosided")
expect_identical(sobel$fit$x, "X")
expect_identical(sobel$fit$y, "Y")
expect_identical(sobel$fit$m, "M1")
expect_identical(sobel$fit$covariates, character())
expect_true(sobel$fit$robust)
expect_equal(sobel$fit$control, ctrl)
})
test_that("dimensions are correct", {
expect_length(sobel$se, 1L)
expect_length(sobel$statistic, 1L)
expect_length(sobel$p_value, 1L)
})
test_that("coef() method returns correct values of coefficients", {
expect_identical(coef(sobel), coef(sobel$fit))
})
test_that("output of confint() method has correct attributes", {
ci_sobel <- confint(sobel, level = level)
expect_equal(dim(ci_sobel), c(5L, 2L))
expect_equal(rownames(ci_sobel), coef_names)
expect_equal(colnames(ci_sobel), c("5 %", "95 %"))
})
test_that("confint() method returns correct values of confidence intervals", {
ci_default <- confint(sobel, parm = "ab")
ci_90 <- confint(sobel, parm = "ab", level = level)
expect_lt(ci_default["ab", 1], ci_90["ab", 1])
expect_gt(ci_default["ab", 2], ci_90["ab", 2])
})
test_that("summary has correct structure", {
expect_s3_class(summary_sobel, "summary_test_mediation")
expect_identical(summary_sobel$object, sobel)
expect_s3_class(summary_sobel$summary, "summary_cov_fit_mediation")
expect_s3_class(summary_sobel$summary, "summary_fit_mediation")
expect_null(summary_sobel$summary$s)
expect_null(summary_sobel$summary$R2)
expect_null(summary_sobel$summary$F_test)
expect_null(summary_sobel$plot)
})
test_that("attributes are correctly passed through summary", {
expect_true(summary_sobel$summary$robust)
expect_identical(summary_sobel$summary$n, as.integer(n))
expect_identical(summary_sobel$summary$x, "X")
expect_identical(summary_sobel$summary$y, "Y")
expect_identical(summary_sobel$summary$m, "M1")
expect_null(summary_sobel$summary$covariates)
})
test_that("effect summaries have correct names", {
expect_identical(dim(summary_sobel$summary$a), c(1L, 4L))
expect_identical(rownames(summary_sobel$summary$a), "X")
expect_identical(colnames(summary_sobel$summary$a)[1], "Estimate")
expect_identical(dim(summary_sobel$summary$b), c(1L, 4L))
expect_identical(rownames(summary_sobel$summary$b), "M1")
expect_identical(colnames(summary_sobel$summary$b)[1], "Estimate")
expect_identical(dim(summary_sobel$summary$direct), c(1L, 4L))
expect_identical(rownames(summary_sobel$summary$direct), "X")
expect_identical(colnames(summary_sobel$summary$direct)[1], "Estimate")
expect_identical(dim(summary_sobel$summary$total), c(1L, 4L))
expect_identical(rownames(summary_sobel$summary$total), "X")
expect_identical(colnames(summary_sobel$summary$total)[1], "Estimate")
expect_null(summary_sobel$summary$fit_mx)
expect_null(summary_sobel$summary$fit_ymx)
})
test_that("effect summaries contain correct coefficient values", {
expect_identical(summary_sobel$summary$a["X", "Estimate"],
sobel$fit$a)
expect_identical(summary_sobel$summary$b["M1", "Estimate"],
sobel$fit$b)
expect_identical(summary_sobel$summary$direct["X", "Estimate"],
sobel$fit$direct)
expect_identical(summary_sobel$summary$total["X", "Estimate"],
sobel$fit$total)
})
test_that("output of retest() has correct structure", {
expect_identical(class(sobel_less), class(sobel))
expect_identical(class(sobel_greater), class(sobel))
expect_identical(class(sobel_second), class(sobel))
expect_identical(sobel_less$fit, sobel$fit)
expect_identical(sobel_greater$fit, sobel$fit)
expect_identical(sobel_second$fit, sobel$fit)
})
test_that("arguments of retest() are correctly passed", {
expect_identical(sobel_less$se, sobel$se)
expect_identical(sobel_greater$se, sobel$se)
expect_true(sobel_second$se > sobel$se)
expect_identical(sobel_less$statistic, sobel$statistic)
expect_identical(sobel_greater$statistic, sobel$statistic)
expect_true(abs(sobel_second$statistic) < abs(sobel$statistic))
expect_equal(sobel_less$p_value, 1-sobel$p_value/2)
expect_equal(sobel_greater$p_value, sobel$p_value/2)
expect_true(sobel_second$p_value > sobel$p_value)
expect_identical(sobel_less$alternative, "less")
expect_identical(sobel_greater$alternative, "greater")
expect_identical(sobel_second$alternative, sobel$alternative)
})
test_that("output of p_value() method has correct attributes", {
p_data <- p_value(sobel, parm = NULL)
expect_length(p_data, 5L)
expect_named(p_data, coef_names)
expect_equivalent(p_data["ab"], sobel$p_value)
})
test_that("objects returned by setup_xxx_plot() have correct structure", {
expect_s3_class(ci$ci, "data.frame")
expect_identical(dim(ci$ci), c(2L, 4L))
expect_named(ci$ci, c("Effect", "Estimate", "Lower", "Upper"))
effect_names <- c("Direct", "ab")
effect_factor <- factor(effect_names, levels = effect_names)
expect_identical(ci$ci$Effect, effect_factor)
expect_identical(ci$level, level)
expect_false(ci$have_methods)
expect_s3_class(ci_p$ci, "data.frame")
expect_s3_class(ci_p$p_value, "data.frame")
expect_identical(dim(ci_p$ci), c(2L, 5L))
expect_identical(dim(ci_p$p_value), c(2L, 3L))
expect_named(ci_p$ci, c("Label", "Effect", "Estimate", "Lower", "Upper"))
expect_named(ci_p$p_value, c("Label", "Effect", "Value"))
label_names <- c("Confidence interval", "p-Value")
expect_identical(ci_p$ci$Label,
factor(rep.int(label_names[1], 2), levels = label_names))
expect_identical(ci_p$p_value$Label,
factor(rep.int(label_names[2], 2), levels = label_names))
effect_names <- c("Direct", "ab")
effect_factor <- factor(effect_names, levels = effect_names)
expect_identical(ci_p$ci$Effect, effect_factor)
expect_identical(ci_p$p_value$Effect, effect_factor)
expect_identical(ci$level, level)
expect_false(ci$have_methods)
expect_s3_class(density$density, "data.frame")
expect_identical(ncol(density$density), 2L)
expect_gt(nrow(density$density), 0L)
expect_named(density$density, c("ab", "Density"))
expect_s3_class(density$ci, "data.frame")
expect_identical(dim(density$ci), c(1L, 3L))
expect_named(density$ci, c("Estimate", "Lower", "Upper"))
expect_identical(density$test, "sobel")
expect_identical(density$level, level)
expect_false(density$have_effect)
expect_false(density$have_methods)
expect_identical(ellipse, setup_ellipse_plot(sobel$fit))
expect_error(setup_weight_plot(sobel))
})
sobel_f1 <- test_mediation(Y ~ m(M1) + X, data = test_data, test = "sobel",
method = "covariance", robust = TRUE, control = ctrl)
sobel_f2 <- test_mediation(Y ~ m(M1) + X, test = "sobel",
method = "covariance", robust = TRUE, control = ctrl)
med <- m(M1)
sobel_f3 <- test_mediation(Y ~ med + X, data = test_data, test = "sobel",
method = "covariance", robust = TRUE, control = ctrl)
test_that("formula interface works correctly", {
expect_equal(sobel_f1, sobel)
expect_equal(sobel_f2, sobel)
expect_equal(sobel_f3, sobel)
}) |
create_test_ns_mcmc <- function(
chain_length = 2000,
store_every = 1000,
pre_burnin = 0,
n_init_attempts = 3,
particle_count = 1,
sub_chain_length = 500,
epsilon = 1e-12,
tracelog = create_test_tracelog(),
screenlog = create_test_screenlog(),
treelog = create_test_treelog()
) {
beautier::create_ns_mcmc(
chain_length = chain_length,
store_every = store_every,
pre_burnin = pre_burnin,
n_init_attempts = n_init_attempts,
tracelog = tracelog,
screenlog = screenlog,
treelog = treelog
)
} |
add_ep <- function(pbp) {
out <- pbp %>% add_ep_variables()
user_message("added ep variables", "done")
return(out)
}
add_air_yac_ep <- function(pbp) {
if (nrow(pbp %>% dplyr::filter(!is.na(.data$air_yards))) == 0) {
out <- pbp %>%
dplyr::mutate(
air_epa = NA_real_,
yac_epa = NA_real_,
comp_air_epa = NA_real_,
comp_yac_epa = NA_real_,
home_team_comp_air_epa = NA_real_,
away_team_comp_air_epa = NA_real_,
home_team_comp_yac_epa = NA_real_,
away_team_comp_yac_epa = NA_real_,
total_home_comp_air_epa = NA_real_,
total_away_comp_air_epa = NA_real_,
total_home_comp_yac_epa = NA_real_,
total_away_comp_yac_epa = NA_real_,
home_team_raw_air_epa = NA_real_,
away_team_raw_air_epa = NA_real_,
home_team_raw_yac_epa = NA_real_,
away_team_raw_yac_epa = NA_real_,
total_home_raw_air_epa = NA_real_,
total_away_raw_air_epa = NA_real_,
total_home_raw_yac_epa = NA_real_,
total_away_raw_yac_epa = NA_real_
)
user_message("No non-NA air_yards detected. air_yac_ep variables set to NA", "info")
} else {
out <- pbp %>% add_air_yac_ep_variables()
user_message("added air_yac_ep variables", "done")
}
return(out)
}
add_wp <- function(pbp) {
out <- pbp %>% add_wp_variables()
user_message("added wp variables", "done")
return(out)
}
add_air_yac_wp <- function(pbp) {
if (nrow(pbp %>% dplyr::filter(!is.na(.data$air_yards))) == 0) {
out <- pbp %>%
dplyr::mutate(
air_wpa = NA_real_,
yac_wpa = NA_real_,
comp_air_wpa = NA_real_,
comp_yac_wpa = NA_real_,
home_team_comp_air_wpa = NA_real_,
away_team_comp_air_wpa = NA_real_,
home_team_comp_yac_wpa = NA_real_,
away_team_comp_yac_wpa = NA_real_,
total_home_comp_air_wpa = NA_real_,
total_away_comp_air_wpa = NA_real_,
total_home_comp_yac_wpa = NA_real_,
total_away_comp_yac_wpa = NA_real_,
home_team_raw_air_wpa = NA_real_,
away_team_raw_air_wpa = NA_real_,
home_team_raw_yac_wpa = NA_real_,
away_team_raw_yac_wpa = NA_real_,
total_home_raw_air_wpa = NA_real_,
total_away_raw_air_wpa = NA_real_,
total_home_raw_yac_wpa = NA_real_,
total_away_raw_yac_wpa = NA_real_
)
user_message("No non-NA air_yards detected. air_yac_wp variables set to NA", "info")
} else {
out <- pbp %>% add_air_yac_wp_variables()
user_message("added air_yac_wp variables", "done")
}
return(out)
}
get_preds <- function(pbp) {
preds <- as.data.frame(
matrix(stats::predict(fastrmodels::ep_model, as.matrix(pbp %>% ep_model_select())), ncol=7, byrow=TRUE)
)
colnames(preds) <- c("Touchdown","Opp_Touchdown","Field_Goal","Opp_Field_Goal",
"Safety","Opp_Safety","No_Score")
return(preds)
}
get_preds_wp <- function(pbp) {
preds <- stats::predict(fastrmodels::wp_model, as.matrix(pbp %>% wp_model_select()))
return(preds)
}
get_preds_wp_spread <- function(pbp) {
preds <- stats::predict(fastrmodels::wp_model_spread, as.matrix(pbp %>% wp_spread_model_select()))
return(preds)
}
ep_model_select <- function(pbp) {
pbp <- pbp %>%
dplyr::select(
"half_seconds_remaining",
"yardline_100",
"home",
"retractable",
"dome",
"outdoors",
"ydstogo",
"era0", "era1", "era2", "era3", "era4",
"down1", "down2", "down3", "down4",
"posteam_timeouts_remaining",
"defteam_timeouts_remaining",
)
return(pbp)
}
wp_model_select <- function(pbp) {
pbp <- pbp %>%
dplyr::select(
"receive_2h_ko",
"home",
"half_seconds_remaining",
"game_seconds_remaining",
"Diff_Time_Ratio",
"score_differential",
"down",
"ydstogo",
"yardline_100",
"posteam_timeouts_remaining",
"defteam_timeouts_remaining"
)
return(pbp)
}
wp_spread_model_select <- function(pbp) {
pbp <- pbp %>%
dplyr::select(
"receive_2h_ko",
"spread_time",
"home",
"half_seconds_remaining",
"game_seconds_remaining",
"Diff_Time_Ratio",
"score_differential",
"down",
"ydstogo",
"yardline_100",
"posteam_timeouts_remaining",
"defteam_timeouts_remaining"
)
return(pbp)
}
prepare_wp_data <- function(pbp) {
pbp <- pbp %>%
dplyr::group_by(.data$game_id) %>%
dplyr::mutate(
receive_2h_ko = dplyr::if_else(.data$qtr <= 2 & .data$posteam == dplyr::first(stats::na.omit(.data$defteam)), 1, 0)
) %>%
dplyr::ungroup() %>%
dplyr::mutate(
posteam_spread = dplyr::if_else(.data$home == 1, .data$spread_line, -1 * .data$spread_line),
elapsed_share = (3600 - .data$game_seconds_remaining) / 3600,
spread_time = .data$posteam_spread * exp(-4 * .data$elapsed_share),
Diff_Time_Ratio = .data$score_differential / (exp(-4 * .data$elapsed_share))
)
return(pbp)
}
add_ep_variables <- function(pbp_data) {
base_ep_preds <- get_preds(pbp_data)
missed_fg_data <- pbp_data
missed_fg_data$half_seconds_remaining <- missed_fg_data$half_seconds_remaining - 5.065401
missed_fg_data$yardline_100 <- 100 - (missed_fg_data$yardline_100 + 8)
missed_fg_data$down1 <- rep(1,nrow(pbp_data))
missed_fg_data$down2 <- rep(0,nrow(pbp_data))
missed_fg_data$down3 <- rep(0,nrow(pbp_data))
missed_fg_data$down4 <- rep(0,nrow(pbp_data))
missed_fg_data$ydstogo <- rep(10,nrow(pbp_data))
if (nrow(missed_fg_data) > 1) {
missed_fg_ep_preds <- get_preds(missed_fg_data)
} else{
missed_fg_ep_preds <- get_preds(missed_fg_data)
}
end_game_i <- which(missed_fg_data$half_seconds_remaining <= 0)
missed_fg_ep_preds[end_game_i,] <- rep(0,ncol(missed_fg_ep_preds))
missed_fg_ep_preds[end_game_i, "No_Score"] <- 1
make_fg_prob <- as.numeric(mgcv::predict.bam(fastrmodels::fg_model, newdata = pbp_data, type="response"))
missed_fg_ep_preds <- missed_fg_ep_preds * (1 - make_fg_prob)
fg_attempt_i <- which(pbp_data$play_type == "field_goal")
base_ep_preds[fg_attempt_i, "Field_Goal"] <- make_fg_prob[fg_attempt_i] + missed_fg_ep_preds[fg_attempt_i,"Opp_Field_Goal"]
base_ep_preds[fg_attempt_i, "Touchdown"] <- missed_fg_ep_preds[fg_attempt_i,"Opp_Touchdown"]
base_ep_preds[fg_attempt_i, "Opp_Field_Goal"] <- missed_fg_ep_preds[fg_attempt_i,"Field_Goal"]
base_ep_preds[fg_attempt_i, "Opp_Touchdown"] <- missed_fg_ep_preds[fg_attempt_i,"Touchdown"]
base_ep_preds[fg_attempt_i, "Safety"] <- missed_fg_ep_preds[fg_attempt_i,"Opp_Safety"]
base_ep_preds[fg_attempt_i, "Opp_Safety"] <- missed_fg_ep_preds[fg_attempt_i,"Safety"]
base_ep_preds[fg_attempt_i, "No_Score"] <- missed_fg_ep_preds[fg_attempt_i,"No_Score"]
kickoff_data <- pbp_data
kickoff_data$yardline_100 <- with(kickoff_data,
ifelse(season < 2016,
80, 75))
kickoff_data$down1 <- rep(1,nrow(pbp_data))
kickoff_data$down2 <- rep(0,nrow(pbp_data))
kickoff_data$down3 <- rep(0,nrow(pbp_data))
kickoff_data$down4 <- rep(0,nrow(pbp_data))
kickoff_data$ydstogo <- rep(10,nrow(pbp_data))
kickoff_preds <- get_preds(kickoff_data)
kickoff_i <- which(pbp_data$play_type == "kickoff" | pbp_data$kickoff_attempt == 1)
base_ep_preds[kickoff_i, "Field_Goal"] <- kickoff_preds[kickoff_i, "Field_Goal"]
base_ep_preds[kickoff_i, "Touchdown"] <- kickoff_preds[kickoff_i, "Touchdown"]
base_ep_preds[kickoff_i, "Opp_Field_Goal"] <- kickoff_preds[kickoff_i, "Opp_Field_Goal"]
base_ep_preds[kickoff_i, "Opp_Touchdown"] <- kickoff_preds[kickoff_i, "Opp_Touchdown"]
base_ep_preds[kickoff_i, "Safety"] <- kickoff_preds[kickoff_i, "Safety"]
base_ep_preds[kickoff_i, "Opp_Safety"] <- kickoff_preds[kickoff_i, "Opp_Safety"]
base_ep_preds[kickoff_i, "No_Score"] <- kickoff_preds[kickoff_i, "No_Score"]
qb_kneels_i <- which(pbp_data$play_type == "qb_kneel" & pbp_data$yardline_100 > 50)
base_ep_preds[qb_kneels_i, "Field_Goal"] <- 0
base_ep_preds[qb_kneels_i, "Touchdown"] <- 0
base_ep_preds[qb_kneels_i, "Opp_Field_Goal"] <- 0
base_ep_preds[qb_kneels_i, "Opp_Touchdown"] <- 0
base_ep_preds[qb_kneels_i, "Safety"] <- 0
base_ep_preds[qb_kneels_i, "Opp_Safety"] <- 0
base_ep_preds[qb_kneels_i, "No_Score"] <- 1
base_ep_preds$ExPoint_Prob <- 0
base_ep_preds$TwoPoint_Prob <- 0
extrapoint_i <- which((pbp_data$play_type == "extra_point" | pbp_data$play_type_nfl == "XP_KICK") &
(is.na(pbp_data$play_type_nfl) | pbp_data$play_type_nfl != "PAT2"))
twopoint_i <- which(pbp_data$two_point_attempt == 1)
st_penalty_i_1 <- which(
(pbp_data$touchdown == 0 & (dplyr::lag(pbp_data$touchdown == 1) | dplyr::lag(pbp_data$play_type_nfl == "XP_KICK")) &
(dplyr::lead(pbp_data$two_point_attempt)==1 | dplyr::lead(pbp_data$extra_point_attempt)==1 | dplyr::lead(pbp_data$play_type_nfl) == "XP_KICK")) |
((dplyr::lag(pbp_data$two_point_attempt)==1 | dplyr::lag(pbp_data$extra_point_attempt)==1) & dplyr::lead(pbp_data$kickoff_attempt == 1))
)
st_penalty_i_2 <- which(
is.na(dplyr::lead(pbp_data$down)) &
(((stringr::str_detect(pbp_data$desc, 'Kick formation') & is.na(pbp_data$down) & pbp_data$play_type == 'no_play') |
(stringr::str_detect(pbp_data$desc, 'Pass formation') & is.na(pbp_data$down) & pbp_data$play_type == 'no_play') |
(stringr::str_detect(pbp_data$desc, 'kicks onside') & is.na(pbp_data$down) & pbp_data$play_type == 'no_play') |
(stringr::str_detect(pbp_data$desc, 'Offside on Free Kick') & is.na(pbp_data$down) & pbp_data$play_type == 'no_play') |
(stringr::str_detect(pbp_data$desc, 'TWO-POINT CONVERSION')) &
is.na(pbp_data$down) & pbp_data$play_type == 'no_play' & dplyr::lead(pbp_data$kickoff_attempt) == 0))
)
base_ep_preds$ExPoint_Prob[extrapoint_i] <- make_fg_prob[extrapoint_i]
base_ep_preds$TwoPoint_Prob[twopoint_i] <- 0.4735
missing_i <- which(
(pbp_data$timeout == 1 &
pbp_data$play_type == "no_play" &
!stringr::str_detect(pbp_data$desc, ' pass ') &
!stringr::str_detect(pbp_data$desc, ' sacked ') &
!stringr::str_detect(pbp_data$desc, ' scramble ') &
!stringr::str_detect(pbp_data$desc, ' punts ') &
!stringr::str_detect(pbp_data$desc, ' up the middle ') &
!stringr::str_detect(pbp_data$desc, ' left end ') &
!stringr::str_detect(pbp_data$desc, ' left guard ') &
!stringr::str_detect(pbp_data$desc, ' left tackle ') &
!stringr::str_detect(pbp_data$desc, ' right end ') &
!stringr::str_detect(pbp_data$desc, ' right guard ') &
!stringr::str_detect(pbp_data$desc, ' right tackle ')
) |
is.na(pbp_data$play_type))
base_ep_preds$Field_Goal[c(missing_i, extrapoint_i, twopoint_i, st_penalty_i_1, st_penalty_i_2)] <- 0
base_ep_preds$Touchdown[c(missing_i, extrapoint_i, twopoint_i, st_penalty_i_1, st_penalty_i_2)] <- 0
base_ep_preds$Opp_Field_Goal[c(missing_i, extrapoint_i, twopoint_i, st_penalty_i_1, st_penalty_i_2)] <- 0
base_ep_preds$Opp_Touchdown[c(missing_i, extrapoint_i, twopoint_i, st_penalty_i_1, st_penalty_i_2)] <- 0
base_ep_preds$Safety[c(missing_i, extrapoint_i, twopoint_i, st_penalty_i_1, st_penalty_i_2)] <- 0
base_ep_preds$Opp_Safety[c(missing_i, extrapoint_i, twopoint_i, st_penalty_i_1, st_penalty_i_2)] <- 0
base_ep_preds$No_Score[c(missing_i, extrapoint_i, twopoint_i, st_penalty_i_1, st_penalty_i_2)] <- 0
base_ep_preds <- dplyr::rename(base_ep_preds,
Field_Goal_Prob = "Field_Goal",
Touchdown_Prob = "Touchdown",
Opp_Field_Goal_Prob = "Opp_Field_Goal",
Opp_Touchdown_Prob = "Opp_Touchdown",
Safety_Prob = "Safety",
Opp_Safety_Prob = "Opp_Safety",
No_Score_Prob = "No_Score")
pbp_data <- cbind(pbp_data, base_ep_preds)
pbp_data_ep <- dplyr::mutate(pbp_data,
ExpPts = (0*.data$No_Score_Prob) + (-3 * .data$Opp_Field_Goal_Prob) +
(-2 * .data$Opp_Safety_Prob) +
(-7 * .data$Opp_Touchdown_Prob) + (3 * .data$Field_Goal_Prob) +
(2 * .data$Safety_Prob) + (7 * .data$Touchdown_Prob) +
(1 * .data$ExPoint_Prob) + (2 * .data$TwoPoint_Prob))
if (length(st_penalty_i_1) > 0) {
pbp_data_ep$ExpPts[st_penalty_i_1] <- NA_real_
}
if (length(st_penalty_i_2) > 0) {
pbp_data_ep$ExpPts[st_penalty_i_2] <- NA_real_
}
pbp_data_ep$ExpPts[missing_i] <- NA_real_
pbp_data_ep %>%
dplyr::group_by(.data$game_id) %>%
dplyr::mutate(
ep = .data$ExpPts,
tmp_posteam = .data$posteam
) %>%
tidyr::fill(
.data$ep, .direction = "up"
) %>%
tidyr::fill(
.data$tmp_posteam, .direction = "up"
) %>%
dplyr::mutate(
home_ep = dplyr::if_else(.data$tmp_posteam == .data$home_team, .data$ep, - .data$ep),
home_epa = dplyr::lead(.data$home_ep) - .data$home_ep,
epa = dplyr::if_else(.data$tmp_posteam == .data$home_team, .data$home_epa, -.data$home_epa),
epa = dplyr::if_else(!is.na(.data$td_team),
dplyr::if_else(.data$td_team == .data$posteam,
7 - .data$ep, -7 - .data$ep),
.data$epa),
epa = dplyr::if_else(is.na(.data$td_team) & .data$field_goal_made == 1,
3 - .data$ep, .data$epa, missing = .data$epa),
epa = dplyr::if_else(is.na(.data$td_team) & .data$field_goal_made == 0 &
.data$extra_point_good == 1,
1 - .data$ep, .data$epa, missing = .data$epa),
epa = dplyr::if_else(is.na(.data$td_team) & .data$field_goal_made == 0 &
.data$extra_point_good == 0 &
(.data$two_point_rush_good == 1 |
.data$two_point_pass_good == 1 |
.data$two_point_pass_reception_good == 1),
2 - .data$ep, .data$epa, missing = .data$epa),
epa = dplyr::if_else(is.na(.data$td_team) & .data$field_goal_made == 0 &
.data$extra_point_good == 0 &
((.data$extra_point_failed == 1 |
.data$extra_point_blocked == 1 |
.data$extra_point_aborted == 1) |
(.data$two_point_rush_failed == 1 |
.data$two_point_pass_failed == 1 |
.data$two_point_pass_reception_failed == 1)),
0 - .data$ep, .data$epa, missing = .data$epa),
epa = dplyr::if_else(
.data$defensive_two_point_conv == 1, -2 - .data$ep, .data$epa, missing = .data$epa
),
epa = dplyr::case_when(
!is.na(.data$safety_team) & .data$safety_team == .data$posteam ~ 2 - .data$ep,
!is.na(.data$safety_team) & .data$safety_team == .data$defteam ~ -2 - .data$ep,
TRUE ~ .data$epa
)
) %>%
dplyr::rename(
no_score_prob = "No_Score_Prob",
opp_fg_prob = "Opp_Field_Goal_Prob",
opp_safety_prob = "Opp_Safety_Prob",
opp_td_prob = "Opp_Touchdown_Prob",
fg_prob = "Field_Goal_Prob",
safety_prob = "Safety_Prob",
td_prob = "Touchdown_Prob",
extra_point_prob = "ExPoint_Prob",
two_point_conversion_prob = "TwoPoint_Prob"
) %>%
dplyr::mutate(
epa = dplyr::if_else(((.data$qtr == 2 &
(dplyr::lead(.data$qtr) == 3 |
dplyr::lead(.data$desc) == "END QUARTER 2")) |
(.data$qtr == 4 &
(dplyr::lead(.data$qtr) == 5 |
dplyr::lead(.data$desc) == "END QUARTER 4" |
dplyr::lead(.data$desc) == "END GAME"))) &
.data$sp == 0 &
!is.na(.data$play_type),
0 - .data$ep, .data$epa),
epa = dplyr::if_else(.data$desc == "END QUARTER 2", NA_real_, .data$epa),
epa = dplyr::if_else(.data$desc == "GAME", NA_real_, .data$epa),
ep = dplyr::if_else(.data$desc == "END QUARTER 2", NA_real_, .data$ep),
ep = dplyr::if_else(.data$desc == "GAME", NA_real_, .data$ep),
home_team_epa = dplyr::if_else(.data$posteam == .data$home_team,
.data$epa, -.data$epa),
away_team_epa = dplyr::if_else(.data$posteam == .data$away_team,
.data$epa, -.data$epa),
home_team_epa = dplyr::if_else(is.na(.data$home_team_epa),
0, .data$home_team_epa),
away_team_epa = dplyr::if_else(is.na(.data$away_team_epa),
0, .data$away_team_epa),
total_home_epa = cumsum(.data$home_team_epa),
total_away_epa = cumsum(.data$away_team_epa),
home_team_rush_epa = dplyr::if_else(.data$play_type == "run",
.data$home_team_epa, 0),
away_team_rush_epa = dplyr::if_else(.data$play_type == "run",
.data$away_team_epa, 0),
home_team_rush_epa = dplyr::if_else(is.na(.data$home_team_rush_epa),
0, .data$home_team_rush_epa),
away_team_rush_epa = dplyr::if_else(is.na(.data$away_team_rush_epa),
0, .data$away_team_rush_epa),
total_home_rush_epa = cumsum(.data$home_team_rush_epa),
total_away_rush_epa = cumsum(.data$away_team_rush_epa),
home_team_pass_epa = dplyr::if_else(.data$play_type == "pass",
.data$home_team_epa, 0),
away_team_pass_epa = dplyr::if_else(.data$play_type == "pass",
.data$away_team_epa, 0),
home_team_pass_epa = dplyr::if_else(is.na(.data$home_team_pass_epa),
0, .data$home_team_pass_epa),
away_team_pass_epa = dplyr::if_else(is.na(.data$away_team_pass_epa),
0, .data$away_team_pass_epa),
total_home_pass_epa = cumsum(.data$home_team_pass_epa),
total_away_pass_epa = cumsum(.data$away_team_pass_epa)) %>%
dplyr::ungroup() %>%
return()
}
add_wp_variables <- function(pbp_data) {
OffWinProb <- rep(NA_real_, nrow(pbp_data))
OffWinProb_spread <- rep(NA_real_, nrow(pbp_data))
pbp_data <- pbp_data %>%
prepare_wp_data()
if (any(pbp_data$qtr > 4)){
overtime_i <- which(pbp_data$qtr > 4)
overtime_df <- pbp_data[overtime_i,]
overtime_df$First_Drive <- rep(min(overtime_df$drive,
na.rm = TRUE),
nrow(overtime_df))
overtime_df <- dplyr::mutate(overtime_df,
Drive_Diff = .data$drive - .data$First_Drive)
overtime_df$One_FG_Game <- ifelse(overtime_df$score_differential == -3 &
overtime_df$Drive_Diff == 1, 1, 0)
overtime_df_ko <- overtime_df
overtime_df_ko$yrdline100 <- with(overtime_df_ko,
ifelse(game_year < 2016 |
(game_year == 2016 & game_month < 4),
80, 75))
overtime_df_ko$down1 <- rep(1,nrow(overtime_df_ko))
overtime_df_ko$down2 <- rep(0,nrow(overtime_df_ko))
overtime_df_ko$down3 <- rep(0,nrow(overtime_df_ko))
overtime_df_ko$down4 <- rep(0,nrow(overtime_df_ko))
overtime_df_ko$ydstogo <- rep(10,nrow(overtime_df_ko))
overtime_df_ko_preds <- get_preds(overtime_df_ko)
overtime_df_ko_preds <- dplyr::mutate(overtime_df_ko_preds,
Win_Back = .data$No_Score + .data$Opp_Field_Goal + .data$Opp_Safety + .data$Opp_Touchdown)
overtime_df$Sudden_Death_WP <- overtime_df$fg_prob + overtime_df$td_prob + overtime_df$safety_prob
overtime_df$One_FG_WP <- overtime_df$td_prob + (overtime_df$fg_prob * overtime_df_ko_preds$Win_Back)
OffWinProb[overtime_i] <- ifelse(overtime_df$game_year >= 2012 & (overtime_df$Drive_Diff == 0 | (overtime_df$Drive_Diff == 1 & overtime_df$One_FG_Game == 1)),
overtime_df$One_FG_WP, overtime_df$Sudden_Death_WP)
OffWinProb_spread[overtime_i] <- OffWinProb[overtime_i]
}
regular_i <- which(pbp_data$qtr <= 4)
regular_df <- pbp_data[regular_i,]
OffWinProb[regular_i] <- get_preds_wp(regular_df)
OffWinProb_spread[regular_i] <- get_preds_wp_spread(regular_df)
down_na <- which(is.na(pbp_data$down))
OffWinProb[down_na] <- NA_real_
OffWinProb_spread[down_na] <- NA_real_
make_pat_prob <- as.numeric(
mgcv::predict.bam(
fastrmodels::fg_model,
newdata = pbp_data %>%
mutate(
yardline_100 = ifelse(.data$season >= 2015, 15, 3)
), type="response")
)
pat_i <- which(
(pbp_data$kickoff_attempt == 0 &
!(stringr::str_detect(pbp_data$desc, 'Onside Kick')) &
(stringr::str_detect(pbp_data$desc, 'Kick formation')) &
is.na(pbp_data$down)) |
stringr::str_detect(pbp_data$desc, 'extra point') |
!is.na(pbp_data$extra_point_result)
)
two_pt_i <- which(
(pbp_data$kickoff_attempt == 0 &
!(stringr::str_detect(pbp_data$desc, 'Onside Kick')) &
(stringr::str_detect(pbp_data$desc, 'Pass formation')) &
is.na(pbp_data$down)) |
stringr::str_detect(pbp_data$desc, 'TWO-POINT CONVERSION ATTEMPT') |
!is.na(pbp_data$two_point_conv_result)
)
pat_i <- pat_i[!pat_i %in% two_pt_i]
pat_data <- pbp_data %>%
dplyr::mutate(
to_pos = .data$posteam_timeouts_remaining,
to_def = .data$defteam_timeouts_remaining,
posteam_timeouts_remaining = .data$to_def,
defteam_timeouts_remaining = .data$to_pos,
score_differential = -.data$score_differential,
down = 1,
ydstogo = 10,
receive_2h_ko = case_when(
.data$qtr <= 2 & .data$receive_2h_ko == 0 ~ 1,
.data$qtr <= 2 & .data$receive_2h_ko == 1 ~ 0,
TRUE ~ .data$receive_2h_ko
),
posteam = if_else(.data$home_team == .data$posteam, .data$away_team, .data$home_team),
yardline_100 = 75
) %>%
dplyr::mutate(
home = case_when(
.data$home == 0 ~ 1,
.data$home == 1 ~ 0
),
posteam_spread = dplyr::if_else(.data$home == 1, .data$spread_line, -1 * .data$spread_line),
elapsed_share = (3600 - .data$game_seconds_remaining) / 3600,
spread_time = .data$posteam_spread * exp(-4 * .data$elapsed_share)
)
pat_0 <- get_preds_wp_spread(pat_data %>% add_esdtr())
pat_1 <- get_preds_wp_spread(pat_data %>% dplyr::mutate(score_differential = .data$score_differential - 1) %>% add_esdtr())
pat_2 <- get_preds_wp_spread(pat_data %>% dplyr::mutate(score_differential = .data$score_differential - 2) %>% add_esdtr())
pat_go_for_1 <- 1 - (make_pat_prob * pat_1 + (1 - make_pat_prob) * pat_0)
pat_go_for_2 <- 1 - (0.4735 * pat_2 + (1 - 0.4735) * pat_0)
OffWinProb_spread[two_pt_i] <- pat_go_for_2[two_pt_i]
OffWinProb_spread[pat_i] <- pat_go_for_1[pat_i]
pat_0 <- get_preds_wp(pat_data %>% add_esdtr())
pat_1 <- get_preds_wp(pat_data %>% dplyr::mutate(score_differential = .data$score_differential - 1) %>% add_esdtr())
pat_2 <- get_preds_wp(pat_data %>% dplyr::mutate(score_differential = .data$score_differential - 2) %>% add_esdtr())
pat_go_for_1 <- 1 - (make_pat_prob * pat_1 + (1 - make_pat_prob) * pat_0)
pat_go_for_2 <- 1 - (0.4735 * pat_2 + (1 - 0.4735) * pat_0)
OffWinProb[two_pt_i] <- pat_go_for_2[two_pt_i]
OffWinProb[pat_i] <- pat_go_for_1[pat_i]
kickoff_data <- pbp_data
kickoff_data$yardline_100 <- with(kickoff_data,
ifelse(season < 2016,
80, 75))
kickoff_data$down <- rep(1,nrow(pbp_data))
kickoff_data$down1 <- rep(1,nrow(pbp_data))
kickoff_data$down2 <- rep(0,nrow(pbp_data))
kickoff_data$down3 <- rep(0,nrow(pbp_data))
kickoff_data$down4 <- rep(0,nrow(pbp_data))
kickoff_data$ydstogo <- rep(10,nrow(pbp_data))
kickoff_preds <- get_preds_wp(kickoff_data)
kickoff_preds_spread <- get_preds_wp_spread(kickoff_data)
kickoff_i <- which((pbp_data$play_type == "kickoff" | pbp_data$kickoff_attempt == 1) & pbp_data$qtr <= 4)
OffWinProb[kickoff_i] <- kickoff_preds[kickoff_i]
OffWinProb_spread[kickoff_i] <- kickoff_preds_spread[kickoff_i]
pbp_data <- pbp_data %>%
dplyr::mutate(
wp = OffWinProb,
vegas_wp = OffWinProb_spread,
tmp_posteam = .data$posteam
) %>%
tidyr::fill(
.data$wp, .direction = "up"
) %>%
tidyr::fill(
.data$vegas_wp, .direction = "up"
) %>%
tidyr::fill(
.data$tmp_posteam, .direction = "up"
) %>%
dplyr::group_by(.data$game_id) %>%
dplyr::mutate(
home_wp = dplyr::if_else(.data$tmp_posteam == .data$home_team, .data$wp, 1 - .data$wp),
vegas_home_wp = dplyr::if_else(.data$tmp_posteam == .data$home_team, .data$vegas_wp, 1 - .data$vegas_wp),
end_game = ifelse(
stringr::str_detect(tolower(.data$desc), "(end of game)|(end game)"),
1, 0
),
final_value = dplyr::case_when(
.data$home_score > .data$away_score ~ 1,
.data$away_score > .data$home_score ~ 0,
.data$home_score == .data$away_score ~ .5
),
vegas_home_wp = dplyr::if_else(
.data$end_game == 1,
.data$final_value,
.data$vegas_home_wp
),
home_wp = dplyr::if_else(
.data$end_game == 1,
.data$final_value,
.data$home_wp
),
away_wp = 1 - .data$home_wp,
vegas_wp = dplyr::if_else(
.data$end_game == 1,
NA_real_,
.data$vegas_wp
),
wp = dplyr::if_else(
.data$end_game == 1,
NA_real_,
.data$wp
),
def_wp = 1 - .data$wp,
vegas_home_wpa = dplyr::lead(.data$vegas_home_wp) - .data$vegas_home_wp,
vegas_wpa = dplyr::if_else(.data$tmp_posteam == .data$home_team, .data$vegas_home_wpa, -.data$vegas_home_wpa),
vegas_wpa = dplyr::if_else(
stringr::str_detect(tolower(.data$desc), "( kneels )|(end of game)|(end game)"), NA_real_, .data$vegas_wpa
),
home_wpa = dplyr::lead(.data$home_wp) - .data$home_wp,
wpa = dplyr::if_else(.data$tmp_posteam == .data$home_team, .data$home_wpa, -.data$home_wpa),
wpa = dplyr::if_else(
stringr::str_detect(tolower(.data$desc), "( kneels )|(end of game)|(end game)"), NA_real_, .data$wpa
)
) %>%
dplyr::ungroup()
pbp_data$home_wp_post <- ifelse(pbp_data$posteam == pbp_data$home_team,
pbp_data$home_wp + pbp_data$wpa,
pbp_data$home_wp - pbp_data$wpa)
pbp_data$away_wp_post <- ifelse(pbp_data$posteam == pbp_data$away_team,
pbp_data$away_wp + pbp_data$wpa,
pbp_data$away_wp - pbp_data$wpa)
pbp_data <- pbp_data %>%
dplyr::mutate(home_wp_post = dplyr::if_else(.data$qtr == 5 &
stringr::str_detect(tolower(dplyr::lead(.data$desc)),
"(end of game)|(end game)") &
.data$score_differential_post == 0,
0, .data$home_wp_post),
away_wp_post = dplyr::if_else(.data$qtr == 5 &
stringr::str_detect(tolower(dplyr::lead(.data$desc)),
"(end of game)|(end game)") &
.data$score_differential_post == 0,
0, .data$away_wp_post))
pbp_data$home_wp_post <- with(pbp_data,
ifelse(stringr::str_detect(tolower(desc),
"(end of game)|(end game)"), dplyr::lag(home_wp_post),
ifelse(dplyr::lag(play_type) == "no_play" & play_type == "no_play", dplyr::lag(home_wp_post),home_wp_post)))
pbp_data$away_wp_post <- with(pbp_data,
ifelse(stringr::str_detect(tolower(desc),
"(end of game)|(end game)"), dplyr::lag(away_wp_post),
ifelse(dplyr::lag(play_type) == "no_play" & play_type == "no_play", dplyr::lag(away_wp_post),away_wp_post)))
pbp_data %>%
dplyr::group_by(.data$game_id) %>%
dplyr::mutate(
home_team_wpa = dplyr::if_else(.data$posteam == .data$home_team,
.data$wpa, -.data$wpa),
away_team_wpa = dplyr::if_else(.data$posteam == .data$away_team,
.data$wpa, -.data$wpa),
home_team_wpa = dplyr::if_else(is.na(.data$home_team_wpa),
0, .data$home_team_wpa),
away_team_wpa = dplyr::if_else(is.na(.data$away_team_wpa),
0, .data$away_team_wpa),
home_team_rush_wpa = dplyr::if_else(.data$play_type == "run",
.data$home_team_wpa, 0),
away_team_rush_wpa = dplyr::if_else(.data$play_type == "run",
.data$away_team_wpa, 0),
home_team_rush_wpa = dplyr::if_else(is.na(.data$home_team_rush_wpa),
0, .data$home_team_rush_wpa),
away_team_rush_wpa = dplyr::if_else(is.na(.data$away_team_rush_wpa),
0, .data$away_team_rush_wpa),
total_home_rush_wpa = cumsum(.data$home_team_rush_wpa),
total_away_rush_wpa = cumsum(.data$away_team_rush_wpa),
home_team_pass_wpa = dplyr::if_else(.data$play_type == "pass",
.data$home_team_wpa, 0),
away_team_pass_wpa = dplyr::if_else(.data$play_type == "pass",
.data$away_team_wpa, 0),
home_team_pass_wpa = dplyr::if_else(is.na(.data$home_team_pass_wpa),
0, .data$home_team_pass_wpa),
away_team_pass_wpa = dplyr::if_else(is.na(.data$away_team_pass_wpa),
0, .data$away_team_pass_wpa),
total_home_pass_wpa = cumsum(.data$home_team_pass_wpa),
total_away_pass_wpa = cumsum(.data$away_team_pass_wpa)) %>%
dplyr::ungroup() %>%
return()
}
add_esdtr <- function(data) {
data %>%
dplyr::mutate(
Diff_Time_Ratio = .data$score_differential / (exp(-4 * .data$elapsed_share))
) %>%
return()
}
add_air_yac_ep_variables <- function(pbp_data) {
pass_plays_i <- which(!is.na(pbp_data$air_yards) & pbp_data$play_type == 'pass')
pass_pbp_data <- pbp_data[pass_plays_i,]
pass_pbp_data <- pass_pbp_data %>%
dplyr::mutate(
posteam_timeouts_pre = .data$posteam_timeouts_remaining,
defeam_timeouts_pre = .data$defteam_timeouts_remaining
) %>%
dplyr::rename(old_yrdline100 = .data$yardline_100,
old_ydstogo = .data$ydstogo,
old_TimeSecs_Remaining = .data$half_seconds_remaining,
old_down = .data$down) %>%
dplyr::mutate(Turnover_Ind = dplyr::if_else(.data$old_down == 4 & .data$air_yards < .data$old_ydstogo,
1, 0),
yardline_100 = dplyr::if_else(.data$Turnover_Ind == 0,
.data$old_yrdline100 - .data$air_yards,
100 - (.data$old_yrdline100 - .data$air_yards)),
ydstogo = dplyr::if_else(.data$air_yards >= .data$old_ydstogo |
.data$Turnover_Ind == 1,
10, .data$old_ydstogo - .data$air_yards),
down = dplyr::if_else(.data$air_yards >= .data$old_ydstogo |
.data$Turnover_Ind == 1,
1, as.numeric(.data$old_down) + 1),
half_seconds_remaining = .data$old_TimeSecs_Remaining - 5.704673,
down1 = dplyr::if_else(.data$down == 1, 1, 0),
down2 = dplyr::if_else(.data$down == 2, 1, 0),
down3 = dplyr::if_else(.data$down == 3, 1, 0),
down4 = dplyr::if_else(.data$down == 4, 1, 0),
posteam_timeouts_remaining = dplyr::if_else(.data$Turnover_Ind == 1,
.data$defeam_timeouts_pre,
.data$posteam_timeouts_pre),
defteam_timeouts_remaining = dplyr::if_else(.data$Turnover_Ind == 1,
.data$posteam_timeouts_pre,
.data$defeam_timeouts_pre)
)
pass_pbp_data_preds <- get_preds(pass_pbp_data)
pass_pbp_data_preds <- dplyr::mutate(pass_pbp_data_preds, airEP = (.data$Opp_Safety*-2) + (.data$Opp_Field_Goal*-3) +
(.data$Opp_Touchdown*-7) + (.data$Safety*2) + (.data$Field_Goal*3) + (.data$Touchdown*7))
pass_pbp_data$airEP <- pass_pbp_data_preds$airEP
pass_pbp_data$airEP[which(pass_pbp_data$half_seconds_remaining <= 0)] <- 0
pass_pbp_data$airEPA <- with(pass_pbp_data, ifelse(old_yrdline100 - air_yards <= 0,
7 - ep,
ifelse(old_yrdline100 - air_yards > 99,
-2 - ep,
ifelse(Turnover_Ind == 1,
(-1*airEP) - ep,
airEP - ep))))
pass_pbp_data$airEPA <- with(pass_pbp_data, ifelse(two_point_attempt == 1,
NA, airEPA))
pass_pbp_data <- dplyr::mutate(pass_pbp_data, yacEPA = .data$epa - .data$airEPA)
pass_pbp_data$yacEPA <- ifelse(pass_pbp_data$penalty == 0 & pass_pbp_data$yards_after_catch == 0 & pass_pbp_data$complete_pass==1,
0, pass_pbp_data$yacEPA)
pass_pbp_data$airEPA <- ifelse(pass_pbp_data$penalty == 0 & pass_pbp_data$yards_after_catch == 0 & pass_pbp_data$complete_pass == 1,
pass_pbp_data$epa, pass_pbp_data$airEPA)
pbp_data$airEPA <- NA
pbp_data$yacEPA <- NA
pbp_data$airEPA[pass_plays_i] <- pass_pbp_data$airEPA
pbp_data$yacEPA[pass_plays_i] <- pass_pbp_data$yacEPA
pbp_data %>%
dplyr::rename(air_epa = "airEPA",
yac_epa = "yacEPA") %>%
dplyr::group_by(.data$game_id) %>%
dplyr::mutate(comp_air_epa = dplyr::if_else(.data$complete_pass == 1,
.data$air_epa, 0),
comp_yac_epa = dplyr::if_else(.data$complete_pass == 1,
.data$yac_epa, 0),
home_team_comp_air_epa = dplyr::if_else(.data$posteam == .data$home_team,
.data$comp_air_epa, -.data$comp_air_epa),
away_team_comp_air_epa = dplyr::if_else(.data$posteam == .data$away_team,
.data$comp_air_epa, -.data$comp_air_epa),
home_team_comp_yac_epa = dplyr::if_else(.data$posteam == .data$home_team,
.data$comp_yac_epa, -.data$comp_yac_epa),
away_team_comp_yac_epa = dplyr::if_else(.data$posteam == .data$away_team,
.data$comp_yac_epa, -.data$comp_yac_epa),
home_team_comp_air_epa = dplyr::if_else(is.na(.data$home_team_comp_air_epa),
0, .data$home_team_comp_air_epa),
away_team_comp_air_epa = dplyr::if_else(is.na(.data$away_team_comp_air_epa),
0, .data$away_team_comp_air_epa),
home_team_comp_yac_epa = dplyr::if_else(is.na(.data$home_team_comp_yac_epa),
0, .data$home_team_comp_yac_epa),
away_team_comp_yac_epa = dplyr::if_else(is.na(.data$away_team_comp_yac_epa),
0, .data$away_team_comp_yac_epa),
total_home_comp_air_epa = cumsum(.data$home_team_comp_air_epa),
total_away_comp_air_epa = cumsum(.data$away_team_comp_air_epa),
total_home_comp_yac_epa = cumsum(.data$home_team_comp_yac_epa),
total_away_comp_yac_epa = cumsum(.data$away_team_comp_yac_epa),
home_team_raw_air_epa = dplyr::if_else(.data$posteam == .data$home_team,
.data$air_epa, -.data$air_epa),
away_team_raw_air_epa = dplyr::if_else(.data$posteam == .data$away_team,
.data$air_epa, -.data$air_epa),
home_team_raw_yac_epa = dplyr::if_else(.data$posteam == .data$home_team,
.data$yac_epa, -.data$yac_epa),
away_team_raw_yac_epa = dplyr::if_else(.data$posteam == .data$away_team,
.data$yac_epa, -.data$yac_epa),
home_team_raw_air_epa = dplyr::if_else(is.na(.data$home_team_raw_air_epa),
0, .data$home_team_raw_air_epa),
away_team_raw_air_epa = dplyr::if_else(is.na(.data$away_team_raw_air_epa),
0, .data$away_team_raw_air_epa),
home_team_raw_yac_epa = dplyr::if_else(is.na(.data$home_team_raw_yac_epa),
0, .data$home_team_raw_yac_epa),
away_team_raw_yac_epa = dplyr::if_else(is.na(.data$away_team_raw_yac_epa),
0, .data$away_team_raw_yac_epa),
total_home_raw_air_epa = cumsum(.data$home_team_raw_air_epa),
total_away_raw_air_epa = cumsum(.data$away_team_raw_air_epa),
total_home_raw_yac_epa = cumsum(.data$home_team_raw_yac_epa),
total_away_raw_yac_epa = cumsum(.data$away_team_raw_yac_epa)) %>%
dplyr::ungroup() %>%
return()
}
add_air_yac_wp_variables <- function(pbp_data) {
pbp_data <- pbp_data %>%
dplyr::mutate(
posteam_timeouts_pre = .data$posteam_timeouts_remaining,
defeam_timeouts_pre = .data$defteam_timeouts_remaining
)
pass_plays_i <- which(!is.na(pbp_data$air_yards) & pbp_data$play_type == 'pass')
pass_pbp_data <- pbp_data[pass_plays_i,]
pass_pbp_data <- pass_pbp_data %>%
dplyr::mutate(
half_seconds_remaining = .data$half_seconds_remaining - 5.704673,
game_seconds_remaining = .data$game_seconds_remaining - 5.704673,
Diff_Time_Ratio = .data$score_differential / (exp(-4 * .data$elapsed_share)),
Turnover_Ind = dplyr::if_else(.data$down == 4 & .data$air_yards < .data$ydstogo,
1, 0),
Diff_Time_Ratio = dplyr::if_else(.data$Turnover_Ind == 1,
-1 * .data$Diff_Time_Ratio,
.data$Diff_Time_Ratio),
posteam_timeouts_remaining = dplyr::if_else(.data$Turnover_Ind == 1,
.data$defeam_timeouts_pre,
.data$posteam_timeouts_pre),
defteam_timeouts_remaining = dplyr::if_else(.data$Turnover_Ind == 1,
.data$posteam_timeouts_pre,
.data$defeam_timeouts_pre)
)
pass_pbp_data$airWP <- get_preds_wp(pass_pbp_data)
pass_pbp_data$airWP <- ifelse(pass_pbp_data$Turnover_Ind == 1,
1 - pass_pbp_data$airWP, pass_pbp_data$airWP)
pass_pbp_data$airWP[which(pass_pbp_data$half_seconds_remaining <= 0)] <- 0
pass_pbp_data$airWP[which(pass_pbp_data$game_seconds_remaining <= 0)] <- 0
pass_pbp_data <- dplyr::mutate(pass_pbp_data, airWPA = .data$airWP - .data$wp,
yacWPA = .data$wpa - .data$airWPA)
pass_pbp_data$airWPA <- with(pass_pbp_data, ifelse(two_point_attempt == 1,
NA, airWPA))
pass_pbp_data$yacWPA <- with(pass_pbp_data, ifelse(two_point_attempt == 1,
NA, yacWPA))
if (any(pass_pbp_data$qtr == 5 | pass_pbp_data$qtr == 6)){
pass_overtime_i <- which(pass_pbp_data$qtr == 5 | pass_pbp_data$qtr == 6)
pass_overtime_df <- pass_pbp_data[pass_overtime_i,]
overtime_i <- which(pbp_data$qtr == 5 | pbp_data$qtr == 6)
overtime_df <- pbp_data[overtime_i,]
overtime_df$First_Drive <- rep(min(overtime_df$drive,
na.rm = TRUE),
nrow(overtime_df))
overtime_df <- dplyr::mutate(overtime_df,
Drive_Diff = .data$drive - .data$First_Drive)
overtime_df$One_FG_Game <- ifelse(overtime_df$score_differential == -3 &
overtime_df$Drive_Diff == 1, 1, 0)
overtime_df_ko <- overtime_df
overtime_df_ko$yardline_100 <- with(overtime_df_ko,
ifelse(game_year < 2016 |
(game_year == 2016 & game_month < 4),
80, 75))
overtime_df_ko$down1 <- rep(1,nrow(overtime_df_ko))
overtime_df_ko$down2 <- rep(0,nrow(overtime_df_ko))
overtime_df_ko$down3 <- rep(0,nrow(overtime_df_ko))
overtime_df_ko$down4 <- rep(0,nrow(overtime_df_ko))
overtime_df_ko$ydstogo <- rep(10,nrow(overtime_df_ko))
if (nrow(overtime_df_ko) > 1) {
overtime_df_ko_preds <- get_preds(overtime_df_ko)
} else{
overtime_df_ko_preds <- get_preds(overtime_df_ko)
}
overtime_df_ko_preds <- dplyr::mutate(overtime_df_ko_preds,
Win_Back = .data$No_Score + .data$Opp_Field_Goal + .data$Opp_Safety + .data$Opp_Touchdown)
overtime_df$Sudden_Death_WP <- overtime_df$fg_prob + overtime_df$td_prob + overtime_df$safety_prob
overtime_df$One_FG_WP <- overtime_df$td_prob + (overtime_df$fg_prob * overtime_df_ko_preds$Win_Back)
overtime_pass_plays_i <- which(overtime_df$play_type == "pass" &
!is.na(overtime_df$air_yards))
overtime_pass_df <- overtime_df[overtime_pass_plays_i,]
overtime_df_ko_preds_pass <- overtime_df_ko_preds[overtime_pass_plays_i,]
overtime_pass_df <- dplyr::rename(overtime_pass_df,
old_yrdline100 = "yardline_100",
old_ydstogo = "ydstogo",
old_TimeSecs_Remaining = "half_seconds_remaining",
old_down = "down")
overtime_pass_df$Turnover_Ind <- ifelse(overtime_pass_df$old_down == 4 &
overtime_pass_df$air_yards < overtime_pass_df$old_ydstogo,
1, 0)
overtime_pass_df$yardline_100 <- ifelse(overtime_pass_df$Turnover_Ind == 0,
overtime_pass_df$old_yrdline100 - overtime_pass_df$air_yards,
100 - (overtime_pass_df$old_yrdline100 - overtime_pass_df$air_yards))
overtime_pass_df$ydstogo <- ifelse(overtime_pass_df$air_yards >= overtime_pass_df$old_ydstogo |
overtime_pass_df$Turnover_Ind == 1,
10, overtime_pass_df$old_ydstogo - overtime_pass_df$air_yards)
overtime_pass_df$down <- ifelse(overtime_pass_df$air_yards >= overtime_pass_df$old_ydstogo |
overtime_pass_df$Turnover_Ind == 1,
1, as.numeric(overtime_pass_df$old_down) + 1)
overtime_pass_df$half_seconds_remaining <- overtime_pass_df$old_TimeSecs_Remaining - 5.704673
overtime_pass_df <- overtime_pass_df %>%
dplyr::mutate(
down1 = dplyr::if_else(.data$down == 1, 1, 0),
down2 = dplyr::if_else(.data$down == 2, 1, 0),
down3 = dplyr::if_else(.data$down == 3, 1, 0),
down4 = dplyr::if_else(.data$down == 4, 1, 0)
)
if (nrow(overtime_df_ko) > 1) {
overtime_pass_data_preds <- get_preds(overtime_pass_df)
} else{
overtime_pass_data_preds <- get_preds(overtime_pass_df)
}
overtime_pass_data_preds <- dplyr::mutate(overtime_pass_data_preds,
old_Opp_Field_Goal = .data$Opp_Field_Goal,
old_Opp_Safety = .data$Opp_Safety,
old_Opp_Touchdown = .data$Opp_Touchdown,
old_Field_Goal = .data$Field_Goal,
old_Safety = .data$Safety,
old_Touchdown = .data$Touchdown)
overtime_pass_data_preds$Opp_Field_Goal <- ifelse(overtime_pass_df$Turnover_Ind == 1,
overtime_pass_data_preds$old_Field_Goal,
overtime_pass_data_preds$Opp_Field_Goal)
overtime_pass_data_preds$Opp_Safety <- ifelse(overtime_pass_df$Turnover_Ind == 1,
overtime_pass_data_preds$old_Safety,
overtime_pass_data_preds$Opp_Safety)
overtime_pass_data_preds$Opp_Touchdown <- ifelse(overtime_pass_df$Turnover_Ind == 1,
overtime_pass_data_preds$old_Touchdown,
overtime_pass_data_preds$Opp_Touchdown)
overtime_pass_data_preds$Field_Goal <- ifelse(overtime_pass_df$Turnover_Ind == 1,
overtime_pass_data_preds$old_Opp_Field_Goal,
overtime_pass_data_preds$Field_Goal)
overtime_pass_data_preds$Safety <- ifelse(overtime_pass_df$Turnover_Ind == 1,
overtime_pass_data_preds$old_Opp_Safety,
overtime_pass_data_preds$Safety)
overtime_pass_data_preds$Touchdown <- ifelse(overtime_pass_df$Turnover_Ind == 1,
overtime_pass_data_preds$old_Opp_Touchdown,
overtime_pass_data_preds$Touchdown)
pass_overtime_df$Sudden_Death_airWP <- with(overtime_pass_data_preds, Field_Goal + Touchdown + Safety)
pass_overtime_df$One_FG_airWP <- overtime_pass_data_preds$Touchdown + (overtime_pass_data_preds$Field_Goal*overtime_df_ko_preds_pass$Win_Back)
pass_overtime_df$airWP <- ifelse(overtime_pass_df$game_year >= 2012 & (overtime_pass_df$Drive_Diff == 0 | (overtime_pass_df$Drive_Diff == 1 & overtime_pass_df$One_FG_Game == 1)),
pass_overtime_df$One_FG_airWP, pass_overtime_df$Sudden_Death_airWP)
pass_overtime_df$airWP[which(overtime_pass_df$half_seconds_remaining <= 0)] <- 0
pass_overtime_df <- dplyr::mutate(pass_overtime_df, airWPA = .data$airWP - .data$wp,
yacWPA = .data$wpa - .data$airWPA)
pass_overtime_df$airWPA <- with(pass_overtime_df, ifelse(two_point_attempt == 1,
NA, airWPA))
pass_overtime_df$yacWPA <- with(pass_overtime_df, ifelse(two_point_attempt == 1,
NA, yacWPA))
pass_overtime_df <- pass_pbp_data[pass_overtime_i,]
pass_pbp_data$airWPA[pass_overtime_i] <- pass_overtime_df$airWPA
pass_pbp_data$yacWPA[pass_overtime_i] <- pass_overtime_df$yacWPA
}
pass_pbp_data$yacWPA <- ifelse(pass_pbp_data$penalty == 0 & pass_pbp_data$yards_after_catch == 0 &
pass_pbp_data$complete_pass == 1,
0, pass_pbp_data$yacWPA)
pass_pbp_data$airWPA <- ifelse(pass_pbp_data$penalty == 0 & pass_pbp_data$yards_after_catch == 0 &
pass_pbp_data$complete_pass == 1,
pass_pbp_data$wpa, pass_pbp_data$airWPA)
pbp_data$airWPA <- NA
pbp_data$yacWPA <- NA
pbp_data$airWPA[pass_plays_i] <- pass_pbp_data$airWPA
pbp_data$yacWPA[pass_plays_i] <- pass_pbp_data$yacWPA
pbp_data %>%
dplyr::rename(air_wpa = "airWPA",
yac_wpa = "yacWPA") %>%
dplyr::group_by(.data$game_id) %>%
dplyr::mutate(comp_air_wpa = dplyr::if_else(.data$complete_pass == 1,
.data$air_wpa, 0),
comp_yac_wpa = dplyr::if_else(.data$complete_pass == 1,
.data$yac_wpa, 0),
home_team_comp_air_wpa = dplyr::if_else(.data$posteam == .data$home_team,
.data$comp_air_wpa, -.data$comp_air_wpa),
away_team_comp_air_wpa = dplyr::if_else(.data$posteam == .data$away_team,
.data$comp_air_wpa, -.data$comp_air_wpa),
home_team_comp_yac_wpa = dplyr::if_else(.data$posteam == .data$home_team,
.data$comp_yac_wpa, -.data$comp_yac_wpa),
away_team_comp_yac_wpa = dplyr::if_else(.data$posteam == .data$away_team,
.data$comp_yac_wpa, -.data$comp_yac_wpa),
home_team_comp_air_wpa = dplyr::if_else(is.na(.data$home_team_comp_air_wpa),
0, .data$home_team_comp_air_wpa),
away_team_comp_air_wpa = dplyr::if_else(is.na(.data$away_team_comp_air_wpa),
0, .data$away_team_comp_air_wpa),
home_team_comp_yac_wpa = dplyr::if_else(is.na(.data$home_team_comp_yac_wpa),
0, .data$home_team_comp_yac_wpa),
away_team_comp_yac_wpa = dplyr::if_else(is.na(.data$away_team_comp_yac_wpa),
0, .data$away_team_comp_yac_wpa),
total_home_comp_air_wpa = cumsum(.data$home_team_comp_air_wpa),
total_away_comp_air_wpa = cumsum(.data$away_team_comp_air_wpa),
total_home_comp_yac_wpa = cumsum(.data$home_team_comp_yac_wpa),
total_away_comp_yac_wpa = cumsum(.data$away_team_comp_yac_wpa),
home_team_raw_air_wpa = dplyr::if_else(.data$posteam == .data$home_team,
.data$air_wpa, -.data$air_wpa),
away_team_raw_air_wpa = dplyr::if_else(.data$posteam == .data$away_team,
.data$air_wpa, -.data$air_wpa),
home_team_raw_yac_wpa = dplyr::if_else(.data$posteam == .data$home_team,
.data$yac_wpa, -.data$yac_wpa),
away_team_raw_yac_wpa = dplyr::if_else(.data$posteam == .data$away_team,
.data$yac_wpa, -.data$yac_wpa),
home_team_raw_air_wpa = dplyr::if_else(is.na(.data$home_team_raw_air_wpa),
0, .data$home_team_raw_air_wpa),
away_team_raw_air_wpa = dplyr::if_else(is.na(.data$away_team_raw_air_wpa),
0, .data$away_team_raw_air_wpa),
home_team_raw_yac_wpa = dplyr::if_else(is.na(.data$home_team_raw_yac_wpa),
0, .data$home_team_raw_yac_wpa),
away_team_raw_yac_wpa = dplyr::if_else(is.na(.data$away_team_raw_yac_wpa),
0, .data$away_team_raw_yac_wpa),
total_home_raw_air_wpa = cumsum(.data$home_team_raw_air_wpa),
total_away_raw_air_wpa = cumsum(.data$away_team_raw_air_wpa),
total_home_raw_yac_wpa = cumsum(.data$home_team_raw_yac_wpa),
total_away_raw_yac_wpa = cumsum(.data$away_team_raw_yac_wpa)) %>%
dplyr::ungroup() %>%
return()
} |
s_crandb <- function(..., char = NULL, select = "PTD", mode = "or", sensitive = FALSE,
perl = FALSE, fixed = FALSE,
agrep = FALSE, max.distance = 0.1, costs = NULL,
crandb = get("crandb", envir = .GlobalEnv)) {
if (!is.data.frame(crandb)) stop("crandb is not loaded.")
mode <- match.arg(mode, choices = c("or", "and", "relax"))
columns <- fcccrandb(select, crandb)
words <- if (is.null(char)) cnscinfun() else char
if (is.list(words)) stop("... cannot be a list.")
if (fixed) sensitive <- TRUE
funcolumn <- function(column, word, sensitive, perl, fixed,
agrep, max.distance, costs, crandb) {
if (agrep) {
agrep(word, crandb[, column], max.distance = max.distance,
costs = costs, ignore.case = !sensitive)
} else {
grep(word, crandb[, column], ignore.case = !sensitive,
perl = perl, fixed = fixed)
}
}
funword <- function(word, columns, sensitive, perl, fixed,
agrep, max.distance, costs, crandb) {
sort(unique(unlist(lapply(columns, funcolumn, word, sensitive, perl, fixed,
agrep, max.distance, costs, crandb))))
}
nums <- sort(unlist(lapply(words, funword, columns, sensitive, perl, fixed,
agrep, max.distance, costs, crandb)))
if ((mode == "and" || mode == "relax") && length(words) > 1L) {
nums <- nums[duplicated(nums)]
if (mode == "and" && length(words) > 2L) {
nums <- nums[duplicated(nums)]
}
}
pkgs <- crandb[sort(unique(nums)), "Package"]
pkgs
}
s_crandb_list <- function(..., char = NULL, select = "PTD", mode = "or", sensitive = FALSE,
perl = FALSE, fixed = FALSE,
agrep = FALSE, max.distance = 0.1, costs = NULL,
crandb = get("crandb", envir = .GlobalEnv)) {
if (!is.data.frame(crandb)) stop("crandb is not loaded.")
mode <- match.arg(mode, choices = c("or", "and", "relax"))
select <- fcccrandb(select, crandb)
words <- if (is.null(char)) cnscinfun() else char
if (is.list(words)) stop("... cannot be a list.")
lst <- vector("list", length(words))
names(lst) <- words
for (i in seq_along(words)) lst[[i]] <- s_crandb(char = words[i],
select = select, mode = mode, sensitive = sensitive,
perl = perl, fixed = fixed, agrep = agrep, max.distance = max.distance, costs = costs,
crandb = crandb)
lst
}
s_crandb_PTD <- function(..., char = NULL, mode = "or", sensitive = FALSE,
perl = FALSE, fixed = FALSE,
agrep = FALSE, max.distance = 0.1, costs = NULL,
crandb = get("crandb", envir = .GlobalEnv)) {
if (!is.data.frame(crandb)) stop("crandb is not loaded.")
mode <- match.arg(mode, choices = c("or", "and", "relax"))
words <- if (is.null(char)) cnscinfun() else char
if (is.list(words)) stop("... cannot be a list.")
funmot <- function(mot, mode, sensitive, perl, fixed, agrep, max.distance, costs, crandb) {
list("Package" = s_crandb(char = mot, select = "P",
mode = mode, sensitive = sensitive,
perl = perl, fixed = fixed, agrep = agrep,
max.distance = max.distance, costs = costs,
crandb = crandb),
"Title" = s_crandb(char = mot, select = "T",
mode = mode, sensitive = sensitive,
perl = perl, fixed = fixed, agrep = agrep,
max.distance = max.distance, costs = costs,
crandb = crandb),
"Description" = s_crandb(char = mot, select = "D",
mode = mode, sensitive = sensitive,
perl = perl, fixed = fixed, agrep = agrep,
max.distance = max.distance, costs = costs,
crandb = crandb))
}
lst <- if (length(words) == 1L) {
funmot(words, mode, sensitive, perl, fixed, agrep, max.distance,
costs, crandb)
} else {
sapply(words, funmot, mode, sensitive, perl, fixed, agrep, max.distance,
costs, crandb, simplify = FALSE)
}
lst
}
s_crandb_AM <- function(..., char = NULL, mode = "or", sensitive = FALSE,
perl = FALSE, fixed = FALSE,
agrep = FALSE, max.distance = 0.1, costs = NULL,
crandb = get("crandb", envir = .GlobalEnv)) {
if (!is.data.frame(crandb)) stop("crandb is not loaded.")
mode <- match.arg(mode, choices = c("or", "and", "relax"))
words <- if (is.null(char)) cnscinfun() else char
if (is.list(words)) stop("... cannot be a list.")
funmot <- function(mot, mode, sensitive, perl, fixed, agrep, max.distance, costs, crandb) {
list("Author" = s_crandb(char = mot, select = "A",
mode = mode, sensitive = sensitive,
perl = perl, fixed = fixed, agrep = agrep,
max.distance = max.distance, costs = costs,
crandb = crandb),
"Maintainer" = s_crandb(char = mot, select = "M",
mode = mode, sensitive = sensitive,
perl = perl, fixed = fixed, agrep = agrep,
max.distance = max.distance, costs = costs,
crandb = crandb))
}
lst <- if (length(words) == 1L) {
funmot(words, mode, sensitive, perl, fixed, agrep, max.distance,
costs, crandb)
} else {
sapply(words, funmot, mode, sensitive, perl, fixed, agrep, max.distance,
costs, crandb, simplify = FALSE)
}
lst
} |
context("text_to_lower")
test_that("text_to_lower works", {
expect_true({
text_to_lower("123")=="123"
})
expect_true({
text_to_lower("abcd")=="abcd"
})
expect_true({
text_to_lower("AbCd")=="abcd"
})
expect_true({
all(text_to_lower(LETTERS)==letters)
})
expect_true({
text_to_lower("\ue4")=="\ue4"
})
expect_true({
text_to_lower("\uc4")=="\ue4"
})
expect_equal({
text_to_lower(
list(
"Title Case in the Light of Modern History",
"Title Case in the Light of Modern History",
"Title Case in the Light of Modern History"
)
)
},
list(
"title case in the light of modern history",
"title case in the light of modern history",
"title case in the light of modern history"
)
)
})
test_that("text_to_upper works", {
expect_true({
text_to_upper("123")=="123"
})
expect_true({
text_to_upper("abcd")=="ABCD"
})
expect_true({
text_to_upper("ABCD")=="ABCD"
})
expect_true({
text_to_upper("AbCd")=="ABCD"
})
expect_true({
all(text_to_upper(letters)==LETTERS)
})
expect_true({
text_to_upper("\ue4")=="\uc4"
})
expect_true({
text_to_upper("\uc4")=="\uc4"
})
expect_equal({
text_to_upper(
list(
"Title Case in the Light of Modern History",
"Title Case in the Light of Modern History",
"Title Case in the Light of Modern History"
)
)
},
list(
"TITLE CASE IN THE LIGHT OF MODERN HISTORY",
"TITLE CASE IN THE LIGHT OF MODERN HISTORY",
"TITLE CASE IN THE LIGHT OF MODERN HISTORY"
)
)
})
test_that("text_to_title_case works", {
expect_true({
text_to_title_case("123")=="123"
})
expect_true({
text_to_title_case("fi fa fom fei")=="Fi Fa Fom Fei"
})
expect_true({
text_to_title_case("title case in the light of modern history")==
"Title Case in the Light of Modern History"
})
expect_equal({
text_to_title_case(
list(
"title case in the light of modern history",
"title case in the light of modern history",
"title case in the light of modern history"
)
)
},
list(
"Title Case in the Light of Modern History",
"Title Case in the Light of Modern History",
"Title Case in the Light of Modern History"
)
)
}) |
edcdf <- function(countries, init.y, final.y, database) {
if (init.y < 1970){init.y = 1970}
if (final.y > 2010){final.y = 2010}
if (final.y < init.y){
print("Initial year must be earlier than final year.")
stop()
}
if((init.y/5)%%1 != 0 | init.y == "" ) {
print("Starting year incorrectly specified")
stop()
}
if (final.y == "" | (final.y/5)%%1 != 0 ) {
print("Final year incorrectly specified")
stop()
}
if (database != "total15" & database != "total25" &
database != "male15" & database != "male25" &
database != "female15" & database != "female25"|
database == "") {
print("Database incorrectly specified. Use total15, total25, male15, male25, female15 or female25.")
stop()
}
if (database == "total15") {
dataset <- estim_total15
}
if (database == "total25") {
dataset <- estim_total25
}
if (database == "male15") {
dataset <- estim_male15
}
if (database == "male25") {
dataset <- estim_male25
}
if (database == "female15") {
dataset <- estim_female15
}
if (database == "female25") {
dataset <- estim_female25
}
if(any(countries %in% levels(data_countries$Region))){
if (length(which(countries %in% levels(data_countries$Region)))<2) {
countries<-data_countries$Code[data_countries$Region ==
countries[which(countries %in% levels(data_countries$Region))]]
}
else{
print("More than two regions used as countries.")
stop()
}
}
if(any(countries == "all")){
countries<-data_countries$Code
}
countries = as.data.frame(countries)
ok.data = merge(x = dataset, y = countries, by.x = "code", by.y = "countries")
if (nrow(ok.data) == 0) {
print("Countries are incorrectly specified. Check the list of countries.")
stop()
}
if (length(unique(ok.data$country)) != nrow(countries)) {
print("Warning: Some countries are incorrectly specified. Check the list of countries.")
}
time <- seq(init.y, final.y, 5)
x.axis<-seq(0.0000001,30,0.01)
qED <- matrix(NA, length(x.axis), length(time))
for(k in 1:length(time)){
a.x<-ok.data$parA[ok.data$year==time[k]]
p.x<-ok.data$parP[ok.data$year==time[k]]
b.x<-ok.data$parB[ok.data$year==time[k]]
w <- ok.data$pop[ok.data$year==time[k]]/sum(ok.data$pop[ok.data$year==time[k]])
pdfED<-function(x){
w%*%pgengamma.orig(x,a.x,b.x,p.x)
}
for (i in 1:length(x.axis)){
qED[i,k] <- pdfED(x.axis[i])
}
}
plot(x.axis, qED[, 1], xlab = "Years of schooling", ylab = "Probability", panel.first = grid(col="gray78"), ylim=c(0,1),xlim = c(0.5, 30), type = "l", pch = 20, col = 1)
box(lwd = 2)
if (length(time)>1){
for(j in 2:ncol(qED)){
points(x.axis, qED[, j], col = j, type = "l")
}
}
legendtext <- time
legend("bottomright", legend = legendtext, cex = 0.7,
lty = 1, col = 1:ncol(qED), ncol = 2)
list(countries = unique(ok.data$country))
} |
setMethodS3("findPandoc", "default", function(mustExist=TRUE, ..., verbose=FALSE) {
mustExist <- Arguments$getLogical(mustExist)
verbose <- Arguments$getVerbose(verbose)
if (verbose) {
pushState(verbose)
on.exit(popState(verbose))
}
command <- "pandoc"
verbose && enter(verbose, "Locating external software")
verbose && cat(verbose, "Command: ", command)
bin <- Sys.getenv("R_PANDOC")
if (identical(bin, "")) bin <- Sys.getenv("RSTUDIO_PANDOC")
if (identical(bin, "")) bin <- Sys.which(command)
if (identical(bin, "")) bin <- NULL
if (!isFile(bin)) bin <- NULL
verbose && cat(verbose, "Located pathname: ", bin)
if (mustExist && !isFile(bin)) {
throw(sprintf("Failed to located external executable: '%s'", command))
}
if (isFile(bin)) {
output <- tryCatch({
system2(bin, args="--version", stdout=TRUE)
}, error = function(ex) {
NULL
})
if (!is.null(output)) {
name <- "pandoc"
pattern <- "pandoc.* ([0-9.-]+).*"
ver <- grep(pattern, output, value=TRUE)
ver <- gsub(pattern, "\\1", ver)
if (length(ver) == 0) {
stop(sprintf("Failed to infer version of %s based on captured output: ", sQuote(name), paste(dQuote(output), collapse=", ")))
}
ver <- numeric_version(ver, strict = FALSE)
ver <- ver[!is.na(ver)]
if (length(ver) == 0) {
stop("Failed to parse version of %s based on captured output: ", sQuote(name), paste(dQuote(output), collapse=", "))
}
ver <- ver[[1]]
attr(bin, "version") <- ver
}
}
verbose && exit(verbose)
bin
}) |
Xbeta <-
function(X,beta)
{
XB<-matrix(0,nrow=dim(X)[1],ncol=dim(X)[2] )
for(k in seq(1,length(beta),length=length(beta))){XB<-XB + beta[k]*X[,,k]}
XB
} |
test_that("basic usage", {
expect_identical(get_hospital_attribute('h0001'), 'ULS do Nordeste')
expect_identical(get_hospital_attribute('h0001', value = 'hospital_full_name'), 'Unidade Local de Saúde do Nordeste, EPE')
expect_identical(get_hospital_attribute('IPO de Lisboa', key = 'hospital_short_name', value = 'hospital_full_name'), 'Instituto Português de Oncologia de Lisboa Francisco Gentil, EPE')
})
test_that("vectorised over `x`", {
expect_identical(get_hospital_attribute(c('h0002', 'h0004')), c('ULS do Alto Minho', 'H Santa Maria Maior'))
expect_identical(get_hospital_attribute(c('h0016', 'h0017', 'h0018'), value = 'hospital_legal_status'), c('EPE', 'SPA', 'IPSS-CA'))
}) |
Test_uSPA <- function(LossDiff, L, B=999){
if (!is.matrix(LossDiff)){LossDiff <- as.matrix(LossDiff)}
bootout <- Bootstrap_uSPA(LossDiff, L, B)
p_value <- mean(bootout$t_uSPA < bootout$t_uSPA_b)
return(list("p_value"=p_value, "t_uSPA"=bootout$t_uSPA))
} |
signal_motion <- function(
data,
time,
dt,
window,
step,
order = "xyz"
) {
if(missing(dt) == TRUE && class(data[[1]])[1] != "eseis") {
if(missing(time) == TRUE) {
if(missing(dt) == TRUE) {
stop("Neither time nor dt provided!")
} else {
time <- seq(from = 0,
by = dt,
length.out = nrow(data))
}
}
} else if(missing(dt) == TRUE){
dt <- NULL
time <- NULL
}
if(missing(window) == TRUE) {
if(class(data[[1]])[1] == "eseis") {
n <- data[[1]]$meta$n
} else {
n <- nrow(data)
}
window <- round(x = nrow(data) * 0.01,
digits = 0)
}
if(window %% 2 != 0) {
window <- window - 1
}
if(missing(step) == TRUE) {
step <- round(x = window * 0.5,
digits = 0)
}
eseis_t_0 <- Sys.time()
eseis_arguments <- list(data = "",
time = time,
dt = dt,
window = window,
step = step,
order = order)
if(class(data[[1]])[1] == "eseis") {
eseis_class <- TRUE
eseis_data <- data
data <- lapply(X = data, FUN = function(X) {
X$signal
})
dt <- eseis_data[[1]]$meta$dt
time <- seq(from = eseis_data[[1]]$meta$starttime,
by = eseis_data[[1]]$meta$dt,
length.out = eseis_data[[1]]$meta$n)
} else {
eseis_class <- FALSE
}
if(class(data)[1] == "list") {
data <- do.call(cbind, data)
}
data <- as.data.frame(x = data)
component_ID <- strsplit(x = order, split = "")[[1]]
data <- data[,order(component_ID)]
window_left <- seq(from = 1,
to = nrow(data) - window,
by = step)
window_right <- seq(from = window,
to = nrow(data),
by = step)
time_i <- time[(window_left + window_right) / 2]
eig_ratio_i <- numeric(length = length(window_left))
azimuth_i <- numeric(length = length(window_left))
inclination_i <- numeric(length = length(window_left))
for(i in 1:length(window_left)) {
data_i <- data[window_left[i]:window_right[i],]
cov_i <- stats::var(x = data_i)
eig_i <- eigen(x = cov_i, symmetric = TRUE)
eig_ratio_i[i] <- 1 - ((eig_i$values[2] + eig_i$values[3]) /
(2 * eig_i$values[1]))
azimuth_i[i] <- 180 / pi * atan2(eig_i$vectors[2,1],
eig_i$vectors[3,1])
inclination_i[i] <- abs(180 / pi * atan2(eig_i$vectors[1,1],
sqrt(eig_i$vectors[2,1]^2 +
eig_i$vectors[3,1]^2)))
}
data_out <- list(time = time_i,
eigen = eig_ratio_i,
azimuth = azimuth_i,
inclination = inclination_i)
if(eseis_class == TRUE) {
eseis_data <- list(time = time_i,
eigen = eig_ratio_i,
azimuth = azimuth_i,
inclination = inclination_i,
history = eseis_data[[1]]$history)
eseis_duration <- as.numeric(difftime(time1 = Sys.time(),
time2 = eseis_t_0,
units = "secs"))
eseis_data$history[[length(eseis_data$history) + 1]] <-
list(time = Sys.time(),
call = "signal_motion()",
arguments = eseis_arguments,
duration = eseis_duration)
names(eseis_data$history)[length(eseis_data$history)] <-
as.character(length(eseis_data$history))
class(eseis_data)[1] <- "eseis"
data_out <- eseis_data
}
return(data_out)
} |
editor.tagcloud <- function( boxes ) {
plot.tagcloud( boxes, with.box= T )
nstep <- 10
while( 1 ) {
xvec <- as.vector( sapply( 1:nrow( boxes ), function( x ) seq( boxes[x,"x"], boxes[x,"x"] + boxes[x,"w"], length.out= nstep ) ) )
yvec <- as.vector( sapply( 1:nrow( boxes ), function( x ) seq( boxes[x,"y"], boxes[x,"y"] + boxes[x,"h"], length.out= nstep ) ) )
catf( "Please click on the label you want to move\n" )
catf( "(right-click to finish)\n" )
i <- identify( xvec, yvec, n= 1, plot= F )
if( length( i ) == 0 ) break
i <- as.integer( i / nstep ) + 1
if( length( i ) == 0 ) break
catf( "Please click on the new position for:\n" )
catf( "%s\n", boxes$tags[i] )
xy <- locator( 1 )
debugpr( xy )
boxes[i,"x"] <- xy[1]
boxes[i,"y"] <- xy[2]
plot( boxes, with.box= T )
}
plot( boxes )
return( invisible( boxes ) )
} |
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL |
get_issues_closed_state <- function(base_url, api_key, owner, repo){
if (missing(base_url)) {
stop("Please add a valid URL")
} else if (missing(api_key)) {
stop("Please add a valid API token")
} else if (missing(owner)) {
stop("Please add a valid owner")
} else if (missing(repo)) {
stop("Please add a valid repository")
}
page <- 1
content_issues <- data.frame()
while (TRUE) {
base_url <- sub("/$", "", base_url)
gitea_url <- file.path(base_url, "api/v1",
sub("^/", "", "/repos"),
owner,
repo,
paste0("issues?state=closed&page=", page))
authorization <- paste("token", api_key)
r <- tryCatch(GET(gitea_url,
add_headers(Authorization = authorization),
accept_json()),
error = function(cond) {"Failure"})
if (class(r) != "response") {
stop(paste0("Error consulting the url: ", gitea_url))
}
stop_for_status(r)
page_issues <- fromJSON(content(r, as = "text"))
page_issues <- jsonlite::flatten(as.data.frame(page_issues))
if (page != 1 && nrow(page_issues) == 0) {
break
}
if (page == 1) {
content_issues <- page_issues
} else {
content_issues <- dplyr::bind_rows(content_issues, page_issues)
}
page <- page + 1
}
content_issues <- dplyr::select_if(content_issues,
.predicate = function(x) !is.list(x))
return(content_issues)
} |
spml.fbed <- function(y, x, alpha = 0.05, K = 0,
backward = FALSE, parallel = FALSE, tol = 1e-07, maxiters = 100) {
Rfast2::fbed.reg(y, x, alpha = alpha, type = "spml", K = K,
backward = backward, parallel = parallel, tol = tol, maxiters = maxiters)
} |
setMethodS3("getRegions", "profileCGH", function(this, nbrOfSnps=c(1,Inf), smoothing=NULL, ...) {
if (length(nbrOfSnps) == 1)
nbrOfSnps <- c(nbrOfSnps,Inf)
if (!is.null(smoothing) & !is.matrix(smoothing)) {
smoothing <- matrix(smoothing, ncol=2, byrow=TRUE)
}
pv <- this$profileValues
stdvs <- this$SigmaC$Value
hasUnits <- (!is.null(pv$chipType) && !is.null(pv$units))
if (hasUnits) {
chipType <- as.character(pv$chipType)
chipType <- gsub("[,-]monocell$", "", chipType)
rsIds <- character(nrow(pv))
unitNames <- character(nrow(pv))
for (cc in unique(chipType)) {
cdf <- AffymetrixCdfFile$byChipType(cc)
idxs <- which(chipType == cc)
unitNames[idxs] <- getUnitNames(cdf, units=pv$units[idxs])
}
}
rsIds <- NULL
uRegions <- unique(pv$Region)
nbrOfRegions <- length(uRegions)
colClasses <- c(Chromosome="character", start="integer",
stop="integer", length="integer", nbrOfSnps="integer",
Smoothing="double", SNRtoZero="double")
if (hasUnits) {
colClasses <- c(colClasses, firstSnp="character", lastSnp="character")
if (!is.null(rsIds))
colClasses <- c(colClasses, firstRsId="character", lastRsId="character")
}
df <- dataFrame(colClasses, nrow=nbrOfRegions)
for (rr in seq_along(uRegions)) {
region <- uRegions[rr]
idx <- which(region == pv$Region)
idx <- idx[c(1,length(idx))]
df[rr,"Chromosome"] <- pv$Chromosome[idx[1]]
df[rr,c("start", "stop")] <- as.integer(pv$PosBase[idx])
df[rr,"length"] <- as.integer(diff(pv$PosBase[idx]))
df[rr,"nbrOfSnps"] <- as.integer(diff(idx)+1)
df[rr,"Smoothing"] <- pv$Smoothing[idx[1]]
df[rr,"SNRtoZero"] <- abs(df[rr,"Smoothing"]) / stdvs
if (hasUnits) {
df[rr,c("firstSnp", "lastSnp")] <- unitNames[idx]
}
}
if (!is.null(nbrOfSnps)) {
keep <- (nbrOfSnps[1] <= df$nbrOfSnps & df$nbrOfSnps <= nbrOfSnps[2])
df <- df[keep,]
}
if (!is.null(smoothing)) {
keep <- rep(FALSE, nrow(df))
for (kk in seq_len(nrow(smoothing))) {
range <- smoothing[kk,]
keep <- keep | (range[1] <= df$Smoothing & df$Smoothing <= range[2])
}
df <- df[keep,]
}
df
}, private=TRUE) |
setGeneric(
"NetworkSummary",
signature = "x",
function(x, ...) {
standardGeneric("NetworkSummary")
}
)
setMethod(
"NetworkSummary",
signature(x = "ContactTrace"),
function(x) {
data.frame(root = x@root,
inBegin = x@ingoingContacts@tBegin,
inEnd = x@ingoingContacts@tEnd,
inDays = as.integer(x@ingoingContacts@tEnd -
x@ingoingContacts@tBegin),
outBegin = x@outgoingContacts@tBegin,
outEnd = x@outgoingContacts@tEnd,
outDays = as.integer(x@outgoingContacts@tEnd -
x@outgoingContacts@tBegin),
inDegree = InDegree(x@ingoingContacts),
outDegree = OutDegree(x@outgoingContacts),
ingoingContactChain =
IngoingContactChain(x@ingoingContacts),
outgoingContactChain =
OutgoingContactChain(x@outgoingContacts),
stringsAsFactors = FALSE)
}
)
setMethod(
"NetworkSummary",
signature(x = "list"),
function(x) {
if (!all(vapply(x,
function(y) {
inherits(y, "ContactTrace")
},
logical(1)))) {
stop("list must only contain 'ContactTrace' objects.")
}
do.call("rbind", lapply(x, NetworkSummary))
}
)
setMethod(
"NetworkSummary",
signature(x = "data.frame"),
function(x,
root,
tEnd = NULL,
days = NULL,
inBegin = NULL,
inEnd = NULL,
outBegin = NULL,
outEnd = NULL) {
if (!all(c("source", "destination", "t") %in% names(x))) {
stop("x must contain the columns source, destination and t.")
}
if (any(is.factor(x$source), is.integer(x$source))) {
x$source <- as.character(x$source)
} else if (!is.character(x$source)) {
stop("invalid class of column source in x")
}
if (any(is.factor(x$destination), is.integer(x$destination))) {
x$destination <- as.character(x$destination)
} else if (!is.character(x$destination)) {
stop("invalid class of column destination in x")
}
if (any(is.character(x$t), is.factor(x$t))) {
x$t <- as.Date(x$t)
}
if (!identical(class(x$t), "Date")) {
stop("invalid class of column t in x")
}
if (any(is.na(x$t))) {
stop("t in x contains NA")
}
x <- unique(x[, c("source", "destination", "t")])
if (missing(root)) {
stop("Missing root in call to NetworkSummary")
}
if (any(is.factor(root), is.integer(root))) {
root <- as.character(root)
} else if (is.numeric(root)) {
rootr <- round(root)
if (any(max(abs(root - rootr) > 1e-07))) {
stop("'root' must be an integer or character")
}
root <- as.character(rootr)
} else if (!is.character(root)) {
stop("invalid class of root")
}
if (all(!is.null(tEnd), !is.null(days))) {
if (!all(is.null(inBegin), is.null(inEnd),
is.null(outBegin), is.null(outEnd))) {
stop("Use either tEnd and days or inBegin, inEnd, ",
"outBegin and outEnd in call to NetworkSummary")
}
if (any(is.character(tEnd), is.factor(tEnd))) {
tEnd <- as.Date(tEnd)
}
if (!identical(class(tEnd), "Date")) {
stop("'tEnd' must be a Date vector")
}
daysr <- round(days)
if (any(is.na(days) | (days < 0)) ||
max(abs(days - daysr)) > 1e-07) {
stop("'days' must be nonnegative and integer")
}
days <- daysr
root <- unique(root)
tEnd <- unique(tEnd)
days <- unique(days)
n.root <- length(root)
n.tEnd <- length(tEnd)
n.days <- length(days)
n <- n.root * n.tEnd * n.days
root <- rep(root, each = n.tEnd * n.days, length.out = n)
inEnd <- rep(tEnd, each = n.days, length.out = n)
inBegin <- inEnd - rep(days, each = 1, length.out = n)
outEnd <- inEnd
outBegin <- inBegin
} else if (all(!is.null(inBegin), !is.null(inEnd),
!is.null(outBegin), !is.null(outEnd))) {
if (!all(is.null(tEnd), is.null(days))) {
stop("Use either tEnd and days or inBegin, inEnd, ",
"outBegin and outEnd in call to NetworkSummary")
}
} else {
stop("Use either tEnd and days or inBegin, inEnd, ",
"outBegin and outEnd in call to NetworkSummary")
}
if (any(is.character(inBegin), is.factor(inBegin))) {
inBegin <- as.Date(inBegin)
}
if (!identical(class(inBegin), "Date")) {
stop("'inBegin' must be a Date vector")
}
if (any(is.na(inBegin))) {
stop("inBegin contains NA")
}
if (any(is.character(inEnd), is.factor(inEnd))) {
inEnd <- as.Date(inEnd)
}
if (!identical(class(inEnd), "Date")) {
stop("'inEnd' must be a Date vector")
}
if (any(is.na(inEnd))) {
stop("inEnd contains NA")
}
if (any(is.character(outBegin), is.factor(outBegin))) {
outBegin <- as.Date(outBegin)
}
if (!identical(class(outBegin), "Date")) {
stop("'outBegin' must be a Date vector")
}
if (any(is.na(outBegin))) {
stop("outBegin contains NA")
}
if (any(is.character(outEnd), is.factor(outEnd))) {
outEnd <- as.Date(outEnd)
}
if (!identical(class(outEnd), "Date")) {
stop("'outEnd' must be a Date vector")
}
if (any(is.na(outEnd))) {
stop("outEnd contains NA")
}
if (any(inEnd < inBegin)) {
stop("inEnd < inBegin")
}
if (any(outEnd < outBegin)) {
stop("outEnd < outBegin")
}
if (!identical(length(unique(c(length(root),
length(inBegin),
length(inEnd),
length(outBegin),
length(outEnd)))),
1L)) {
stop("root, inBegin, inEnd, outBegin and ",
"outEnd must have equal length")
}
nodes <- as.factor(unique(c(x$source,
x$destination,
root)))
contact_chain <- .Call(
"networkSummary",
as.integer(factor(x$source, levels = levels(nodes))),
as.integer(factor(x$destination, levels = levels(nodes))),
as.integer(julian(x$t)),
as.integer(factor(root, levels = levels(nodes))),
as.integer(julian(inBegin)),
as.integer(julian(inEnd)),
as.integer(julian(outBegin)),
as.integer(julian(outEnd)),
length(nodes),
PACKAGE = "EpiContactTrace")
data.frame(
root = root,
inBegin = inBegin,
inEnd = inEnd,
inDays = as.integer(inEnd - inBegin),
outBegin = outBegin,
outEnd = outEnd,
outDays = as.integer(outEnd - outBegin),
inDegree = contact_chain[["inDegree"]],
outDegree = contact_chain[["outDegree"]],
ingoingContactChain = contact_chain[["ingoingContactChain"]],
outgoingContactChain = contact_chain[["outgoingContactChain"]],
stringsAsFactors = FALSE)
}
) |
GeoRarefaction_MultiTaxa<-function(nLocCut=3,OccMatrix,TaxaStart,LongPos=1,LatPos=2,iter=10,CellSize=5,longBounds=c(-180,180),latBounds=c(-90,90),steps=c(1,50,40,30,20,10,5),replacePts=FALSE){
n<-length(OccMatrix[1,])
a<-vector("list",n-(TaxaStart-1))
YesPos<-c()
for(k in TaxaStart:n){
TaxonID<-names(OccMatrix[1,])[k]
taxPos<-which(OccMatrix[,k]!=0)
if(length(taxPos)!=0){
nCoordCheck<-CoordCollapse(OccMatrix[taxPos,LongPos],OccMatrix[taxPos,LatPos])
nCoords<-length(nCoordCheck[,1])
print(paste(TaxonID,k-(TaxaStart-1)))
if(nCoords>=nLocCut){
b<-GeoRarefaction_SingleTaxon(TName=TaxonID,OccMatrix=OccMatrix,LongPos=LongPos,LatPos=LatPos,iter=iter,CellSize=CellSize,longBounds=longBounds,latBounds=latBounds,steps=steps,replacePts=replacePts)
a[[k-(TaxaStart-1)]]<-b
YesPos<-c(YesPos,k-(TaxaStart-1))
}
else{
b<-list(TaxonName=TaxonID,MST_mat=NA,CH_mat=NA,GCD_mat=NA,LatRg_mat=NA,LonRg_mat=NA,CellCount_mat=NA)
a[[k-(TaxaStart-1)]]<-b
}
}
else{
b<-list(TaxonName=TaxonID,MST_mat=NA,CH_mat=NA,GCD_mat=NA,LatRg_mat=NA,LonRg_mat=NA,CellCount_mat=NA)
a[[k-(TaxaStart-1)]]<-b
}
}
YesLen<-length(YesPos)
if(YesLen>0){
c<-vector("list",YesLen)
for(i in 1:YesLen){
c[[i]]<-a[[YesPos[i]]]
}
return(c)
}
else{
print("")
print("No taxa above nLocCut")
return(a)
}
} |
context("map_utilities")
test_that("invoke_method works", {
m <- mapdeck(token = 'abc')
x <- invoke_method(m, 'add_layer')
expect_true(all(attr(x, 'class') == c("mapdeck", "htmlwidget")))
expect_true(x$x$calls[[1]]$functions == 'add_layer')
})
test_that("layer_ids are set", {
expect_true("arc-defaultLayerId" == mapdeck:::layerId(NULL, "arc"))
expect_true("myLayer" == mapdeck:::layerId("myLayer"))
expect_true("myLayer" == mapdeck:::layerId("myLayer", "grid"))
expect_true("myLayer" == mapdeck:::layerId("myLayer", "scatterplot"))
}) |
"matLD" |
`acf.fnc` <-
function(dat, group="Subject", time="Trial", x = "RT", plot=TRUE, ...) {
dat = dat[order(dat[,group], dat[,time]),]
cnt=0
civec = vector(mode="numeric")
for (s in levels(dat[,group])) {
cnt = cnt+1
tmp = dat[dat[, group] == s, ]
a = stats::acf(tmp[,x], plot=FALSE)$acf[,,1]
n = nrow(tmp)
ci = -(1/n) + 2/sqrt(n)
civec[cnt]=ci
a.dfr = data.frame(Lag=0:(length(a)-1), Acf=a, Subject=rep(s, length(a)),
ci=rep(ci, length(a)))
if (cnt == 1) {
res = a.dfr
} else {
res = rbind(res, a.dfr)
}
}
dfr = res
if (plot==TRUE) {
lattice::xyplot(Acf~Lag|Subject,type="h", data=dfr, col.line="black",
panel = function(...) {
lattice::panel.abline(h=civec[lattice::panel.number()],col.line="grey")
lattice::panel.abline(h=-civec[lattice::panel.number()],col.line="grey")
lattice::panel.xyplot(...)
},
strip = lattice::strip.custom(bg="grey90"),
par.strip.text=list(cex=0.8), ...)
} else {
return(dfr)
}
} |
test_multinomial = function(dat, covnames, membership.1 , num.cluster, ref.clsuter = "MAX",
partition )
{
U.cov = data.frame(cbind(membership.1, dat[,covnames]))
names(U.cov)[(ncol(membership.1)+1):ncol(U.cov)] = covnames
if(ref.clsuter=="MAX"){
Ref.Cluster = which.max(table(membership.1$label))
}else if(ref.clsuter=="MIN"){
Ref.Cluster = which.min(table(membership.1$label))
}else if(ref.clsuter=="FIRST"){
Ref.Cluster = 1
}else if(ref.clsuter=="LAST"){
Ref.Cluster = num.cluster
}
U.cov$label = factor(U.cov$label, c(paste0("Latent Class ", Ref.Cluster),
paste0("Latent Class ", setdiff(1:num.cluster, Ref.Cluster ))),
c(paste0("Latent Class ", Ref.Cluster),
paste0("Latent Class ", setdiff(1:num.cluster, Ref.Cluster ))))
formula.lm= stats::as.formula(paste("label ~", paste(covnames, collapse = " + ")))
if(partition == "hard"){
test.lm <- nnet::multinom(formula.lm, data = U.cov,
trace = FALSE)
}else {
wt = apply(U.cov[,grep("Class", names(U.cov))], 1, function(x) x[which.max(x)])
test.lm <- nnet::multinom(formula.lm, data = U.cov, weights = wt,
trace = FALSE)
}
test_results = list()
if(num.cluster==2){
test_result_tem = cbind(summary(test.lm)$coefficients,
summary(test.lm)$standard.errors)
test_result_tem = data.frame(test_result_tem)
test_result_tem$V3 = test_result_tem[,1]/test_result_tem[,2]
test_result_tem$V4 = (1 - stats::pnorm(abs(test_result_tem$V3), 0, 1)) * 2
names(test_result_tem) = c("Estimate", "Std.error", "z value", "Pr(>|z|)")
test_results[[1]] = test_result_tem
}else{
for(kt in 1:(num.cluster-1))
{
test_result_tem = cbind(summary(test.lm)$coefficients[kt,],
summary(test.lm)$standard.errors[kt,])
test_result_tem = data.frame(test_result_tem)
test_result_tem$V3 = test_result_tem[,1]/test_result_tem[,2]
test_result_tem$V4 = (1 - stats::pnorm(abs(test_result_tem$V3), 0, 1)) * 2
names(test_result_tem) = c("Estimate", "Std.error", "z value", "Pr(>|z|)")
rownames(test_result_tem) = dimnames(summary(test.lm)$coefficients)[[2]]
test_results[[kt]] = test_result_tem
}
}
label.level = levels(U.cov$label)
names(test_results) = paste0(label.level[-1]," / ", label.level[1])
return(list(test_results= test_results,
multinom_raw= test.lm))
} |
skip_on_cran()
if (!requireNamespace("cmdstanr", quietly = TRUE)) {
backend <- "rstan"
skip_on_os("windows")
} else {
if (isFALSE(is.null(cmdstanr::cmdstan_version(error_on_NA = FALSE)))) {
backend <- "cmdstanr"
}
}
dlogit <- withr::with_seed(
seed = 12345, code = {
nGroups <- 100
nObs <- 20
theta.location <- matrix(rnorm(nGroups * 2), nrow = nGroups, ncol = 2)
theta.location[, 1] <- theta.location[, 1] - mean(theta.location[, 1])
theta.location[, 2] <- theta.location[, 2] - mean(theta.location[, 2])
theta.location[, 1] <- theta.location[, 1] / sd(theta.location[, 1])
theta.location[, 2] <- theta.location[, 2] / sd(theta.location[, 2])
theta.location <- theta.location %*% chol(matrix(c(1.5, -.25, -.25, .5^2), 2))
theta.location[, 1] <- theta.location[, 1] - 2.5
theta.location[, 2] <- theta.location[, 2] + 1
d <- data.table(
x = rep(rep(0:1, each = nObs / 2), times = nGroups))
d[, ID := rep(seq_len(nGroups), each = nObs)]
for (i in seq_len(nGroups)) {
d[ID == i, y := rbinom(
n = nObs,
size = 1,
prob = plogis(theta.location[i, 1] + theta.location[i, 2] * x))
]
}
copy(d)
})
res.samp <- dlogit[, .(M = mean(y)), by = .(ID, x)][, .(M = mean(M)), by = x]
suppressWarnings(
mlogit <- brms::brm(
y ~ 1 + x + (1 + x | ID), family = "bernoulli",
data = dlogit, iter = 1000, warmup = 500, seed = 1234,
chains = 2, backend = backend, save_pars = save_pars(all = TRUE),
silent = 2, refresh = 0)
)
preddat <- data.frame(y = c(0, 0), x = c(0, 1), ID = 999)
res.integrate <- withr::with_seed(
seed = 1234, {
test0 <- prediction(object = mlogit, data = preddat[1, ], posterior = TRUE,
effects = "integrateoutRE", k = 100L, CI = 0.95, CIType = "ETI")
test1 <- prediction(object = mlogit, data = preddat[2, ], posterior = TRUE,
effects = "integrateoutRE", k = 100L, CI = 0.95, CIType = "ETI")
ame <- list(Summary = NULL, Posterior = test1$Posterior - test0$Posterior)
ame$Summary <- bsummary(ame$Posterior, CI = 0.95, CIType = "ETI")
list(
Summary = rbind(
test0$Summary, test1$Summary, ame$Summary),
Posterior = cbind(
test0$Posterior, test1$Posterior, ame$Posterior))
})
res.fixedonly <- withr::with_seed(
seed = 1234, {
test0 <- prediction(object = mlogit, data = preddat[1, ], posterior = TRUE,
effects = "fixedonly", CI = 0.95, CIType = "ETI")
test1 <- prediction(object = mlogit, data = preddat[2, ], posterior = TRUE,
effects = "fixedonly", CI = 0.95, CIType = "ETI")
ame <- list(Summary = NULL, Posterior = test1$Posterior - test0$Posterior)
ame$Summary <- bsummary(ame$Posterior, CI = 0.95, CIType = "ETI")
list(
Summary = rbind(
test0$Summary, test1$Summary, ame$Summary),
Posterior = cbind(
test0$Posterior, test1$Posterior, ame$Posterior))
})
test_that(".predict works to integrate out random effects in multilevel logistic models", {
expect_type(res.integrate, "list")
expect_equal(
c(ndraws(mlogit), 3L),
dim(res.integrate$Posterior))
expect_true(all(
res.integrate$Posterior[, 1:2] >= 0 &
res.integrate$Posterior[, 1:2] <= 1))
expect_true(all(
res.integrate$Summary$M >= 0 &
res.integrate$Summary$M <= 1))
expect_true(abs(res.integrate$Summary$M[1] - res.samp$M[1]) < .01)
expect_true(abs(res.integrate$Summary$M[2] - res.samp$M[2]) < .01)
expect_true(abs(res.integrate$Summary$M[3] -
(res.samp$M[2] - res.samp$M[1])) < .01)
})
test_that(".predict works with fixed effects only in multilevel logistic models", {
expect_type(res.fixedonly, "list")
expect_equal(
c(ndraws(mlogit), 3L),
dim(res.fixedonly$Posterior))
expect_true(all(
res.fixedonly$Posterior[, 1:2] >= 0 &
res.fixedonly$Posterior[, 1:2] <= 1))
expect_true(all(
res.fixedonly$Summary$M >= 0 &
res.fixedonly$Summary$M <= 1))
expect_true(res.fixedonly$Summary$M[1] < res.integrate$Summary$M[1])
expect_true(res.fixedonly$Summary$M[2] < res.integrate$Summary$M[2])
})
h <- .001
ames <- brmsmargins(
object = mlogit,
add = data.frame(x = c(0, h)),
contrasts = cbind("AME time" = c(-1 / h, 1 / h)),
effects = "integrateoutRE",
k = 100L,
seed = 1234
)
test_that("brmsmargins works with random slope logit models", {
expect_type(ames, "list")
expect_equal(
ndraws(mlogit),
nrow(ames$Posterior))
expect_true(all(
ames$Posterior[, 1:2] >= 0 &
ames$Posterior[, 1:2] <= 1))
expect_true(all(
ames$ContrastSummary$M >= 0 &
ames$ContrastSummary$M <= 1))
expect_true(abs(ames$ContrastSummary$M - 0.11) < .02)
})
suppressWarnings(
mlogit.intonly <- brms::brm(
y ~ 1 + x + (1 | ID), family = "bernoulli",
data = dlogit, seed = 1234,
chains = 2, backend = backend, save_pars = save_pars(all = TRUE),
silent = 2, refresh = 0)
)
h <- .001
ames <- brmsmargins(
object = mlogit.intonly,
add = data.frame(x = c(0, h)),
contrasts = cbind("AME time" = c(-1 / h, 1 / h)),
effects = "integrateoutRE",
k = 100L,
seed = 1234
)
test_that("brmsmargins works with intercept only logit models", {
expect_type(ames, "list")
expect_equal(
ndraws(mlogit.intonly),
nrow(ames$Posterior))
expect_true(all(
ames$Posterior[, 1:2] >= 0 &
ames$Posterior[, 1:2] <= 1))
expect_true(all(
ames$ContrastSummary$M >= 0 &
ames$ContrastSummary$M <= 1))
expect_true(abs(ames$ContrastSummary$M - 0.11) < .02)
}) |
setGeneric("modelOrder",
function(object, convention, ...){ standardGeneric("modelOrder") })
setGeneric("modelCoef" ,
function(object, convention, component, ...){ standardGeneric("modelCoef") })
setGeneric("modelPoly",
function(object, convention, ...){ standardGeneric("modelPoly") })
setGeneric("modelPolyCoef",
function(object, convention, lag_0 = TRUE, ...){
standardGeneric("modelPolyCoef")
},
signature = c("object", "convention")
)
setGeneric("sigmaSq", def = function(object){ standardGeneric("sigmaSq") })
setGeneric("modelCenter", def = function(object){ standardGeneric("modelCenter") })
setGeneric("modelIntercept", def = function(object){ standardGeneric("modelIntercept") })
setGeneric("nUnitRoots", def = function(object){ standardGeneric("nUnitRoots") })
setGeneric("isStationaryModel",
def = function(object){ standardGeneric("isStationaryModel") })
setClass("BJ", slots = c(dummy = "character"))
setClass("SP", slots = c(dummy = "character"))
setClass("BD", slots = c(dummy = "character"))
setClass("VirtualMeanModel", contains = c("VIRTUAL"))
setClass("VirtualAutocovarianceModel", contains = c("VIRTUAL"))
setClass("VirtualAutocorelationModel",
contains = c("VirtualAutocovarianceModel", "VIRTUAL"))
setClass("VirtualPartialAutocovarianceModel", contains = "VIRTUAL")
setClass("VirtualPartialAutocorelationModel",
contains = c("VirtualPartialAutocovarianceModel", "VIRTUAL"))
setClass("VirtualStationaryModel",
contains = c("VirtualAutocovarianceModel", "VirtualMeanModel"))
setClass("VirtualWhiteNoiseModel", contains = c("VirtualStationaryModel", "VIRTUAL"))
setClass("VirtualFilterModel", contains = c("VIRTUAL"))
setClass("VirtualArmaModel",
contains = c("VirtualFilterModel", "VirtualStationaryModel", "VIRTUAL")
)
setClass("VirtualArModel", contains = c("VirtualArmaModel", "VIRTUAL") )
setClass("VirtualMaModel", contains = c("VirtualArmaModel", "VIRTUAL") )
setClass("VirtualIntegratedModel", contains = c("VirtualFilterModel", "VIRTUAL") )
setClass("VirtualSarimaModel", contains = c("VirtualIntegratedModel", "VIRTUAL") )
setClass("VirtualArimaModel", contains = c("VirtualSarimaModel", "VIRTUAL") )
setClass("VirtualAriModel", contains = c("VirtualArimaModel", "VIRTUAL") )
setClass("VirtualImaModel", contains = c("VirtualArimaModel", "VIRTUAL") )
setClass("VirtualAutocovarianceSpec", contains = "VIRTUAL",
slots = c(acvf = "ANY")
)
setClass("AutocovarianceSpec",
slots = c(acvf = "numeric")
)
setClass("InterceptSpec",
slots = c(center = "numeric", intercept = "numeric", sigma2 = "numeric"),
prototype = list(center = 0, intercept = 0, sigma2 = NA_real_)
)
setMethod("sigmaSq", "InterceptSpec", function(object) object@sigma2)
setMethod("modelCenter", "InterceptSpec", function(object) object@center)
setMethod("modelIntercept", "InterceptSpec", function(object) object@intercept)
setClass("ArmaSpec", contains = c("ArmaFilter", "InterceptSpec"))
setMethod("initialize", "ArmaSpec",
function(.Object, ..., ar, ma, mean, check = TRUE){
.Object <- callNextMethod(.Object, ...)
if(!missing(mean)){
if(.Object@center == 0 && .Object@intercept == 0)
.Object@center <- mean
else
stop(paste0("Use argument 'mean' only when 'center' and 'intercept' ",
"are missing or zero"))
}
if(!missing(ar)) .Object@ar <- as(ar, "BJFilter")
if(!missing(ma)) .Object@ma <- as(ma, "SPFilter")
if(check){
if(any(abs(solve(filterPoly(.Object@ar))) <= 1))
warning("The AR polynomial is not stable.")
if(any(abs(solve(filterPoly(.Object@ma))) < 1))
warning("The model is not invertible.")
}
.Object
}
)
setClass("AutocovarianceModel",
contains = c("VirtualAutocovarianceModel", "AutocovarianceSpec")
)
setClass("ArmaModel",
contains = c("ArmaSpec", "VirtualArmaModel")
)
setClass("ArModel", contains = c("VirtualArModel", "ArmaModel"))
setClass("MaModel", contains = c("VirtualMaModel", "ArmaModel"))
setMethod("initialize", "ArModel",
function(.Object, ...){
.Object <- callNextMethod()
validObject(.Object)
.Object
}
)
setMethod("initialize", "MaModel",
function(.Object, ...){
.Object <- callNextMethod()
validObject(.Object)
.Object
}
)
ArmaModel <- function(...){
new("ArmaModel", ...)
}
ArModel <- function(...){
new("ArModel", ...)
}
MaModel <- function(...){
new("MaModel", ...)
}
setAs("ArmaModel", "ArModel",
function(from){
if(modelOrder(from)$ma > 0)
stop("Cannot convert 'model' to AR since it contains MA terms")
obj <- new("ArModel")
as(obj, "ArmaModel") <- from
obj
})
setAs("ArmaModel", "MaModel",
function(from){
if(modelOrder(from)$ar > 0)
stop("Cannot convert 'model' to MA since it contains AR terms")
obj <- new("MaModel")
as(obj, "ArmaModel") <- from
obj
})
setValidity("ArModel", function(object){
if(object@ma@order > 0)
"Moving average terms found in ArModel object."
else
TRUE
})
setValidity("MaModel", function(object){
if(object@ar@order > 0)
"Autoregressive terms found in MaModel object."
else
TRUE
})
setClass("SarimaSpec", contains = c("SarimaFilter", "InterceptSpec"))
setClass("SarimaModel", contains = c("VirtualSarimaModel", "SarimaSpec"))
setMethod("nUnitRoots", "SarimaSpec",
function(object){
if(is.na(object@nseasons))
object@iorder
else
object@iorder + object@nseasons * object@siorder
}
)
setMethod("isStationaryModel", "SarimaSpec",
function(object){
object@iorder == 0 && object@siorder == 0
}
)
setMethod("isStationaryModel", "VirtualIntegratedModel",
function(object){
nUnitRoots(object) == 0
}
)
setMethod("nUnitRoots", "VirtualStationaryModel", function(object){ 0 })
setMethod("isStationaryModel", "VirtualStationaryModel", function(object){ TRUE })
setMethod("modelPoly", c("VirtualMonicFilter", "missing"),
function(object){
filterPoly(object)
}
)
setMethod("modelPoly", c("VirtualFilterModel", "character"),
function(object, convention){
if(class(object) == convention)
modelPoly(object)
else{
convention <- new(convention)
modelPoly(object, convention = convention )
}
}
)
setMethod("modelPolyCoef", c("VirtualMonicFilter", "missing"),
function(object, lag_0 = TRUE){
filterPolyCoef(object, lag_0 = lag_0)
}
)
setMethod("modelPolyCoef", c("VirtualFilterModel", "character"),
function(object, convention){
if(class(object) == convention)
modelPolyCoef(object)
else{
convention <- new(convention)
modelPolyCoef(object, convention = convention )
}
}
)
setMethod("modelPoly", c("SarimaModel", "ArmaFilter"),
function(object, convention){
wrk <- filterPoly(object)
list(ar = wrk$fullarpoly,
ma = wrk$fullmapoly )
}
)
setMethod("modelPolyCoef", c("SarimaModel", "ArmaFilter"),
function(object, convention, lag_0 = TRUE){
wrk <- modelPoly(object, convention)
if(lag_0)
list(ar = coef(wrk$ar), ma = coef(wrk$ma) )
else
list(ar = coef(wrk$ar)[-1], ma = coef(wrk$ma)[-1] )
}
)
setMethod("modelCoef", c("VirtualFilterModel", "missing", "missing"),
function(object){
filterCoef(object)
}
)
setMethod("modelCoef", c("VirtualFilterModel", "character", "missing"),
function(object, convention){
if(class(object) == convention)
modelCoef(object)
else{
convention <- new(convention)
modelCoef(object, convention = convention )
}
}
)
setMethod("modelCoef", c("VirtualFilterModel", "BJ", "missing"),
function(object, convention){
if(class(object) == "ArmaModel")
filt <- modelCoef(object)
else{
filt <- modelCoef(object, convention = "ArmaModel" )
}
list(ar = filt$ar, ma = -filt$ma)
}
)
setMethod("modelCoef", c("VirtualFilterModel", "SP", "missing"),
function(object, convention){
if(class(object) == "ArmaModel")
filt <- modelCoef(object)
else{
filt <- modelCoef(object, convention = "ArmaModel" )
}
list(ar = - filt$ar, ma = filt$ma)
}
)
setMethod("modelCoef", c("VirtualFilterModel", "BD", "missing"),
function(object, convention){
if(class(object) == "ArmaModel")
modelCoef(object)
else{
modelCoef(object, convention = "ArmaFilter" )
}
}
)
setMethod("modelCoef", c("ArmaModel", "ArmaFilter", "missing"),
function(object, convention){
filterCoef(object)
}
)
setMethod("modelCoef", c("SarimaModel", "SarimaFilter", "missing"),
function(object, convention){
filterCoef(object)
}
)
setMethod("modelCoef", c("SarimaModel", "ArmaFilter", "missing"),
function(object, convention){
wrk <- filterPolyCoef(object, lag_0 = FALSE)
list(ar = - wrk$fullarpoly, ma = wrk$fullmapoly)
}
)
setMethod("modelCoef", c("SarimaModel", "ArFilter", "missing"),
function(object, convention){
wrk <- filterPolyCoef(object, lag_0 = FALSE)
if(length(wrk$fullmapoly) > 0)
stop("Model not Ar-like (has non-trivial moving average part)")
list(ar = - wrk$fullarpoly, ma = numeric(0))
}
)
setMethod("modelCoef", c("SarimaModel", "MaFilter", "missing"),
function(object, convention){
wrk <- filterPolyCoef(object, lag_0 = FALSE)
if(length(wrk$fullarpoly) > 0)
stop("Model not MA-like (has non-trivial autoregressive part)")
list(ar = numeric(0), ma = wrk$fullmapoly)
}
)
setMethod("modelCoef", c("SarimaModel", "ArModel", "missing"),
function(object, convention){
wrk <- filterPolyCoef(object, lag_0 = FALSE)
if(length(wrk$fullmapoly) > 0)
stop("Model not Ar-like (has non-trivial moving average part)")
list(ar = - wrk$fullarpoly, ma = numeric(0))
}
)
setMethod("modelCoef", c("SarimaModel", "MaModel", "missing"),
function(object, convention){
wrk <- filterPolyCoef(object, lag_0 = FALSE)
if(length(wrk$fullarpoly) > 0)
stop("Model not MA-like (has non-trivial autoregressive part)")
list(ar = numeric(0), ma = wrk$fullmapoly)
}
)
setMethod("modelOrder", c("VirtualFilterModel", "missing"),
function(object){
filterOrder(object)
}
)
setMethod("modelOrder", c("VirtualFilterModel", "character"),
function(object, convention){
if(class(object) == convention)
modelOrder(object)
else{
convention <- new(convention)
modelOrder(object, convention = convention )
}
}
)
setMethod("modelOrder", c("ArmaModel", "ArFilter"),
function(object, convention){
wrk <- filterOrder(object)
if(wrk$ma != 0)
stop("Non-zero moving average order")
wrk
}
)
setMethod("modelOrder", c("ArmaModel", "MaFilter"),
function(object, convention){
wrk <- filterOrder(object)
if(wrk$ar != 0)
stop("Non-zero autoregressive order")
wrk
}
)
setMethod("modelOrder", c("SarimaModel", "ArmaFilter"),
function(object, convention){
wrk <- modelOrder(object)
if(is.na(wrk$nseasons))
wrk$nseasons <- 0
with(wrk, list(ar = ar + iorder + (sar + siorder) * nseasons,
ma = ma + sma * nseasons ) )
}
)
setMethod("modelOrder", c("SarimaModel", "ArFilter"),
function(object, convention){
wrk <- modelOrder(object, "ArmaFilter")
if(wrk$ma != 0)
stop("Non-zero moving average order")
wrk
}
)
setMethod("modelOrder", c("SarimaModel", "MaFilter"),
function(object, convention){
wrk <- modelOrder(object, "ArmaFilter")
if(wrk$ar != 0)
stop("Non-zero autoregressive order")
wrk
}
)
setMethod("modelOrder", c("SarimaModel", "ArmaModel"),
function(object, convention){
wrk <- modelOrder(object)
if(is.na(wrk$nseasons))
wrk$nseasons <- 0
with(wrk, {stopifnot(iorder == 0, siorder == 0)
list(ar = ar + sar * nseasons,
ma = ma + sma * nseasons )
})
}
)
setMethod("modelOrder", c("SarimaModel", "ArModel"),
function(object, convention){
wrk <- modelOrder(object, "ArmaModel")
if(wrk$ma != 0)
stop("Non-zero moving average order")
wrk
}
)
setMethod("modelOrder", c("SarimaModel", "MaModel"),
function(object, convention){
wrk <- modelOrder(object, "ArmaModel")
if(wrk$ar != 0)
stop("Non-zero autoregressive order")
wrk
}
)
.slots2list <- function(object){
nams <- slotNames(object)
res <- lapply(nams, function(x) slot(object, x))
names(res) <- nams
res
}
setAs("SarimaModel", "list",
function(from){
res <- .slots2list(from)
res$ar <- filterCoef(res$ar)
res$sar <- filterCoef(res$sar)
res$ma <- filterCoef(res$ma)
res$sma <- filterCoef(res$sma)
res
}
)
as.list.SarimaModel <- function(x, ...){
as(x, "list")
}
setAs("ArmaSpec", "list",
function(from){
res <- .slots2list(from)
res$ar <- filterCoef(res$ar)
res$ma <- filterCoef(res$ma)
res
}
)
as.list.ArmaModel <- function(x, ...){ as(x, "list") }
as.list.ArmaSpec <- function(x, ...){ as(x, "list") }
.copy_cis <- function(from, to){
to@center <- from@center
to@intercept <- from@intercept
to@sigma2 <- from@sigma2
to
}
setAs("SarimaFilter", "ArmaFilter",
function(from){
filt <- filterPolyCoef(from, lag_0 = FALSE)
new("ArmaFilter", ar = filt$fullarpoly, ma = filt$fullmapoly)
})
setAs("VirtualSarimaModel", "ArmaModel",
function(from){
if(!isStationaryModel(from))
stop("This SARIMA model is not stationary.")
filt <- as(from, "ArmaFilter")
to <- new("ArmaModel", filt)
to <- .copy_cis(from, to)
to
})
.print_cis <- function(object, unconditional = FALSE){
intercept <- modelIntercept(object)
center <- modelCenter(object)
if(unconditional || center != 0)
cat("Center: ", center, "\n")
if(unconditional || intercept != 0 || center == 0)
cat("Intercept: ", intercept, "\n")
cat("SigmaSq: ", object@sigma2, "\n")
}
.print_mis <- function(object){
intercept <- modelIntercept(object)
center <- modelCenter(object)
if(is.na(intercept) || is.na(center))
cat("mean: ", NA, "\n")
else{
pofzero <- sum(c(1, filterPolyCoef(object)$ar))
fullintercept <- intercept + pofzero * center
mean <- fullintercept / pofzero
if(intercept == 0)
cat("mean: ", center, "\n")
else if(center == 0)
cat("intercept: ", intercept, "\n")
else{
cat("mean: ", mean, "\n")
cat("intercept: ", intercept, "(full intercept: ", fullintercept, ")", "\n")
}
}
cat("sigmaSq: ", sigmaSq(object), "\n")
}
.print_formula <- function(object){
order <- modelOrder(object)
ar <- sar <- ma <- sma <- d <- ds <- intercept <- ""
if(order$iorder > 0){
d <- "(1-B)"
if(order$iorder > 1)
d <- paste0(d, "^", order$iorder)
}
if(order$siorder > 0){
ds <- "(1-B^s)"
if(order$siorder > 1)
ds <- paste0(ds, "^", order$siorder)
}
if(order$ar > 0) ar <- "Phi(B)"
if(order$sar > 0) sar <- "Phi_s(B)"
if(order$ma > 0) ma <- "Theta(B)"
if(order$sma > 0) sma <- "Theta_s(B)"
arall <- paste0(d, ds, ar, sar)
maall <- paste0(ma, sma)
x <- "X(t)"
if(object@center != 0){
x <- paste0(x, " - ", "center")
if(nchar(arall) > 0)
x <- paste0("(", x, ")")
}
intercept <- if(object@intercept != 0)
"intercept + "
e <- "e(t)"
cat("Model: ", arall, x, " = ", intercept, maall, e, "\n", sep = "")
}
setMethod("show",
signature(object = "InterceptSpec"),
function (object){
.reportClassName(object, "InterceptSpec")
.print_cis(object, unconditional = TRUE)
invisible(NULL)
}
)
setMethod("show",
signature(object = "ArmaModel"),
function (object){
.reportClassName(object, "ArmaModel")
.print_mis(object)
callNextMethod()
invisible(NULL)
}
)
setMethod("show",
signature(object = "ArModel"),
function (object){
.reportClassName(object, "ArModel")
callNextMethod()
}
)
setMethod("show",
signature(object = "MaModel"),
function (object)
{
.reportClassName(object, "MaModel")
callNextMethod()
}
)
setMethod("show",
signature(object = "SarimaModel"),
function (object){
.reportClassName(object, "SarimaModel")
.print_formula(object)
cat("\n")
if(isStationaryModel(object))
.print_mis(object)
else
.print_cis(object)
callNextMethod()
}
)
summary.SarimaSpec <- function(object, ...){
cat("Intercept: ", object@intercept, "\n")
cat("Center: ", object@center, "\n")
cat("Innovation variance: ", object@sigma2, "\n")
summary.SarimaFilter(object)
}
summary.SarimaModel <- function(object, ...){
.reportClassName(object, "SarimaModel")
summary.SarimaSpec(object)
} |
odds.us2malay <- function (x) {
malay <- x
malay[] <- NA_real_
malay[which(x <= -100 | x >= 100)] <- -100 / x[which(x <= -100 | x >= 100)]
malay
} |
melt_cs_field <- function(data,melt,sepchar=","){
tdata <- data
tdata <- rename_column(tdata,melt,'pri')
if(!is.null(tdata)){
tdata$pri <- as.character(tdata$pri)
retdat <- NULL
pb = txtProgressBar(min = 0, max = nrow(tdata), initial = 0)
for(i in 1:nrow(tdata)){
crec <- tdata[i,]
if(tdata$pri[i]!="" & !is.na(tdata$pri[i])){
items <- strsplit(tdata$pri[i],sepchar)[[1]]
for (j in 1:length(items)){
addrec <- crec
addrec$pri <- trimws(items[j])
retdat <- rbind(retdat,addrec)
}
} else {
addrec <- crec
retdat <- rbind(retdat,addrec)
}
setTxtProgressBar(pb,i)
}
retdat <- as.data.frame(retdat)
retdat <- rename_column(retdat,'pri',melt)
rownames(retdat) <- NULL
return(retdat)
} else {
return(NULL)
}
} |
expected.placements <- list(
"ndka" = list(
"forward" = structure(list(theta = 0.611957994579946, X = c(0.805555555555556,
0.625, 0.680555555555556, 0.763888888888889, 0.0416666666666667,
0.277777777777778, 0.513888888888889, 0.930555555555556, 0.972222222222222,
0.930555555555556, 0.930555555555556, 0.402777777777778, 0.722222222222222,
0.861111111111111, 0.166666666666667, 0.25, 0.708333333333333,
0.0138888888888889, 1, 0.861111111111111, 0.430555555555556,
0.513888888888889, 0.555555555555556, 0.708333333333333, 0.888888888888889,
0.645833333333333, 0.861111111111111, 0.25, 0.986111111111111,
0.756944444444444, 0.0416666666666667, 0.340277777777778, 0.861111111111111,
0.0416666666666667, 0.333333333333333, 0.930555555555556, 0.527777777777778,
0.722222222222222, 0.958333333333333, 0.513888888888889, 0.763888888888889
), Y = c(1, 0.829268292682927, 0.878048780487805, 0.707317073170732,
0.902439024390244, 0.402439024390244, 0.902439024390244, 0.317073170731707,
0.51219512195122, 0.317073170731707, 0.341463414634146, 0.0731707317073171,
0.804878048780488, 0.902439024390244, 0.585365853658537, 0.902439024390244,
0.902439024390244, 0.024390243902439, 0.560975609756098, 0.75609756097561,
0.75609756097561, 0.317073170731707, 0.975609756097561, 0.878048780487805,
0.902439024390244, 0.768292682926829, 0.536585365853659, 0.878048780487805,
0.414634146341463, 0.0975609756097561, 0.878048780487805, 0.902439024390244,
0.585365853658537, 0.804878048780488, 0.878048780487805, 0.536585365853659,
0.195121951219512, 0.219512195121951, 0.731707317073171, 0.609756097560976,
0.0487804878048781, 0.902439024390244, 0.341463414634146, 0.75609756097561,
0.707317073170732, 0.707317073170732, 0.414634146341463, 0.975609756097561,
0.341463414634146, 0.585365853658537, 0.804878048780488, 0.707317073170732,
0.317073170731707, 0.707317073170732, 0.609756097560976, 0.878048780487805,
0.75609756097561, 0.707317073170732, 0.195121951219512, 0.829268292682927,
0.585365853658537, 0.548780487804878, 0.0975609756097561, 0.634146341463415,
0.585365853658537, 0.804878048780488, 0.195121951219512, 0.731707317073171,
0.463414634146341, 0.219512195121951, 0.902439024390244, 0.51219512195122
)), .Names = c("theta", "X", "Y")),
"reverse" = structure(list(theta = 0.388042005420054, X = c(0, 0.170731707317073,
0.121951219512195, 0.292682926829268, 0.0975609756097561, 0.597560975609756,
0.0975609756097561, 0.682926829268293, 0.48780487804878, 0.682926829268293,
0.658536585365854, 0.926829268292683, 0.195121951219512, 0.0975609756097561,
0.414634146341463, 0.0975609756097561, 0.0975609756097561, 0.975609756097561,
0.439024390243902, 0.24390243902439, 0.24390243902439, 0.682926829268293,
0.024390243902439, 0.121951219512195, 0.0975609756097561, 0.231707317073171,
0.463414634146341, 0.121951219512195, 0.585365853658537, 0.902439024390244,
0.121951219512195, 0.0975609756097561, 0.414634146341463, 0.195121951219512,
0.121951219512195, 0.463414634146341, 0.804878048780488, 0.780487804878049,
0.268292682926829, 0.390243902439024, 0.951219512195122, 0.0975609756097561,
0.658536585365854, 0.24390243902439, 0.292682926829268, 0.292682926829268,
0.585365853658537, 0.024390243902439, 0.658536585365854, 0.414634146341463,
0.195121951219512, 0.292682926829268, 0.682926829268293, 0.292682926829268,
0.390243902439024, 0.121951219512195, 0.24390243902439, 0.292682926829268,
0.804878048780488, 0.170731707317073, 0.414634146341463, 0.451219512195122,
0.902439024390244, 0.365853658536585, 0.414634146341463, 0.195121951219512,
0.804878048780488, 0.268292682926829, 0.536585365853659, 0.780487804878049,
0.0975609756097561, 0.48780487804878), Y = c(0.194444444444444,
0.375, 0.319444444444444, 0.236111111111111, 0.958333333333333,
0.722222222222222, 0.486111111111111, 0.0694444444444444, 0.0277777777777778,
0.0694444444444444, 0.0694444444444444, 0.597222222222222, 0.277777777777778,
0.138888888888889, 0.833333333333333, 0.75, 0.291666666666667,
0.986111111111111, 0, 0.138888888888889, 0.569444444444444, 0.486111111111111,
0.444444444444444, 0.291666666666667, 0.111111111111111, 0.354166666666667,
0.138888888888889, 0.75, 0.0138888888888889, 0.243055555555556,
0.958333333333333, 0.659722222222222, 0.138888888888889, 0.958333333333333,
0.666666666666667, 0.0694444444444444, 0.472222222222222, 0.277777777777778,
0.0416666666666667, 0.486111111111111, 0.236111111111111)), .Names = c("theta",
"X", "Y"))
),
"wfns" = list(
"forward" = structure(list(theta = 0.823678861788618, X = c(0.8125, 0.652777777777778,
0.888888888888889, 0.972222222222222, 0.972222222222222, 0.972222222222222,
0.972222222222222, 0.652777777777778, 0.972222222222222, 0.652777777777778,
0.256944444444444, 0.652777777777778, 0.972222222222222, 0.888888888888889,
0.888888888888889, 0.652777777777778, 0.972222222222222, 0.972222222222222,
0.972222222222222, 0.652777777777778, 0.972222222222222, 0.652777777777778,
0.972222222222222, 0.652777777777778, 0.888888888888889, 0.972222222222222,
0.888888888888889, 0.888888888888889, 0.256944444444444, 0.888888888888889,
0.888888888888889, 0.652777777777778, 0.652777777777778, 0.972222222222222,
0.972222222222222, 0.652777777777778, 0.652777777777778, 0.972222222222222,
0.972222222222222, 0.972222222222222, 0.972222222222222), Y = c(0.975609756097561,
0.975609756097561, 0.975609756097561, 0.975609756097561, 0.219512195121951,
0.975609756097561, 0.804878048780488, 0.804878048780488, 0.975609756097561,
0.804878048780488, 0.804878048780488, 0.975609756097561, 0.975609756097561,
0.975609756097561, 0.804878048780488, 0.975609756097561, 0.804878048780488,
0.975609756097561, 0.975609756097561, 0.646341463414634, 0.536585365853659,
0.975609756097561, 0.536585365853659, 0.975609756097561, 0.219512195121951,
0.219512195121951, 0.975609756097561, 0.975609756097561, 0.804878048780488,
0.536585365853659, 0.975609756097561, 0.975609756097561, 0.804878048780488,
0.804878048780488, 0.975609756097561, 0.804878048780488, 0.975609756097561,
0.804878048780488, 0.804878048780488, 0.975609756097561, 0.975609756097561,
0.975609756097561, 0.975609756097561, 0.804878048780488, 0.975609756097561,
0.804878048780488, 0.975609756097561, 0.975609756097561, 0.646341463414634,
0.804878048780488, 0.536585365853659, 0.804878048780488, 0.975609756097561,
0.536585365853659, 0.975609756097561, 0.975609756097561, 0.219512195121951,
0.536585365853659, 0.804878048780488, 0.804878048780488, 0.975609756097561,
0.975609756097561, 0.975609756097561, 0.646341463414634, 0.975609756097561,
0.804878048780488, 0.804878048780488, 0.536585365853659, 0.536585365853659,
0.975609756097561, 0.975609756097561, 0.975609756097561)), .Names = c("theta",
"X", "Y")),
"reverse" = structure(list(theta = 0.176321138211382, X = c(0.024390243902439,
0.024390243902439, 0.024390243902439, 0.024390243902439, 0.780487804878049,
0.024390243902439, 0.195121951219512, 0.195121951219512, 0.024390243902439,
0.195121951219512, 0.195121951219512, 0.024390243902439, 0.024390243902439,
0.024390243902439, 0.195121951219512, 0.024390243902439, 0.195121951219512,
0.024390243902439, 0.024390243902439, 0.353658536585366, 0.463414634146341,
0.024390243902439, 0.463414634146341, 0.024390243902439, 0.780487804878049,
0.780487804878049, 0.024390243902439, 0.024390243902439, 0.195121951219512,
0.463414634146341, 0.024390243902439, 0.024390243902439, 0.195121951219512,
0.195121951219512, 0.024390243902439, 0.195121951219512, 0.024390243902439,
0.195121951219512, 0.195121951219512, 0.024390243902439, 0.024390243902439,
0.024390243902439, 0.024390243902439, 0.195121951219512, 0.024390243902439,
0.195121951219512, 0.024390243902439, 0.024390243902439, 0.353658536585366,
0.195121951219512, 0.463414634146341, 0.195121951219512, 0.024390243902439,
0.463414634146341, 0.024390243902439, 0.024390243902439, 0.780487804878049,
0.463414634146341, 0.195121951219512, 0.195121951219512, 0.024390243902439,
0.024390243902439, 0.024390243902439, 0.353658536585366, 0.024390243902439,
0.195121951219512, 0.195121951219512, 0.463414634146341, 0.463414634146341,
0.024390243902439, 0.024390243902439, 0.024390243902439), Y = c(0.1875,
0.347222222222222, 0.111111111111111, 0.0277777777777778, 0.0277777777777778,
0.0277777777777778, 0.0277777777777778, 0.347222222222222, 0.0277777777777778,
0.347222222222222, 0.743055555555556, 0.347222222222222, 0.0277777777777778,
0.111111111111111, 0.111111111111111, 0.347222222222222, 0.0277777777777778,
0.0277777777777778, 0.0277777777777778, 0.347222222222222, 0.0277777777777778,
0.347222222222222, 0.0277777777777778, 0.347222222222222, 0.111111111111111,
0.0277777777777778, 0.111111111111111, 0.111111111111111, 0.743055555555556,
0.111111111111111, 0.111111111111111, 0.347222222222222, 0.347222222222222,
0.0277777777777778, 0.0277777777777778, 0.347222222222222, 0.347222222222222,
0.0277777777777778, 0.0277777777777778, 0.0277777777777778, 0.0277777777777778
)), .Names = c("theta", "X", "Y"))
),
"s100b" = list(
"forward" = structure(list(theta = 0.731368563685637, X = c(0.5625, 0.4375,
0.715277777777778, 0.541666666666667, 0.902777777777778, 1, 0.972222222222222,
0.180555555555556, 0.854166666666667, 0.347222222222222, 0.180555555555556,
0.888888888888889, 0.875, 0.965277777777778, 1, 0.819444444444444,
1, 1, 1, 0.180555555555556, 1, 0.833333333333333, 0, 0.347222222222222,
0.805555555555556, 1, 0.819444444444444, 1, 0.263888888888889,
0.819444444444444, 0.5625, 0.4375, 0.513888888888889, 0.805555555555556,
1, 0.611111111111111, 1, 0.840277777777778, 1, 1, 0.902777777777778
), Y = c(0.707317073170732, 0.670731707317073, 0.804878048780488,
0.975609756097561, 0.341463414634146, 0.634146341463415, 0.804878048780488,
0.804878048780488, 0.975609756097561, 0.890243902439024, 0.975609756097561,
0.853658536585366, 0.768292682926829, 0.939024390243902, 0.634146341463415,
0.939024390243902, 0.768292682926829, 0.707317073170732, 0.634146341463415,
0.975609756097561, 0.646341463414634, 0.670731707317073, 0.439024390243902,
0.853658536585366, 0.292682926829268, 0.341463414634146, 0.646341463414634,
0.939024390243902, 0.390243902439024, 0.341463414634146, 0.768292682926829,
0.890243902439024, 0.853658536585366, 0.890243902439024, 0.975609756097561,
0.707317073170732, 0.804878048780488, 0.670731707317073, 0.658536585365854,
0.975609756097561, 0.975609756097561, 0.853658536585366, 0.975609756097561,
0.853658536585366, 0.890243902439024, 0.646341463414634, 0.853658536585366,
0.890243902439024, 0.451219512195122, 0.768292682926829, 0.51219512195122,
0.939024390243902, 0.804878048780488, 0.475609756097561, 0.939024390243902,
0.975609756097561, 0.585365853658537, 0.414634146341463, 0.804878048780488,
0.658536585365854, 0.890243902439024, 0.670731707317073, 0.804878048780488,
0.939024390243902, 0.975609756097561, 0.634146341463415, 0.658536585365854,
0.341463414634146, 0.634146341463415, 0.658536585365854, 0.292682926829268,
0.329268292682927)), .Names = c("theta", "X", "Y")),
"reverse" = structure(list(theta = 0.268631436314363, X = c(0.292682926829268,
0.329268292682927, 0.195121951219512, 0.024390243902439, 0.658536585365854,
0.365853658536585, 0.195121951219512, 0.195121951219512, 0.024390243902439,
0.109756097560976, 0.024390243902439, 0.146341463414634, 0.231707317073171,
0.0609756097560976, 0.365853658536585, 0.0609756097560976, 0.231707317073171,
0.292682926829268, 0.365853658536585, 0.024390243902439, 0.353658536585366,
0.329268292682927, 0.560975609756098, 0.146341463414634, 0.707317073170732,
0.658536585365854, 0.353658536585366, 0.0609756097560976, 0.609756097560976,
0.658536585365854, 0.231707317073171, 0.109756097560976, 0.146341463414634,
0.109756097560976, 0.024390243902439, 0.292682926829268, 0.195121951219512,
0.329268292682927, 0.341463414634146, 0.024390243902439, 0.024390243902439,
0.146341463414634, 0.024390243902439, 0.146341463414634, 0.109756097560976,
0.353658536585366, 0.146341463414634, 0.109756097560976, 0.548780487804878,
0.231707317073171, 0.48780487804878, 0.0609756097560976, 0.195121951219512,
0.524390243902439, 0.0609756097560976, 0.024390243902439, 0.414634146341463,
0.585365853658537, 0.195121951219512, 0.341463414634146, 0.109756097560976,
0.329268292682927, 0.195121951219512, 0.0609756097560976, 0.024390243902439,
0.365853658536585, 0.341463414634146, 0.658536585365854, 0.365853658536585,
0.341463414634146, 0.707317073170732, 0.670731707317073), Y = c(0.4375,
0.5625, 0.284722222222222, 0.458333333333333, 0.0972222222222222,
0, 0.0277777777777778, 0.819444444444444, 0.145833333333333,
0.652777777777778, 0.819444444444444, 0.111111111111111, 0.125,
0.0347222222222222, 0, 0.180555555555556, 0, 0, 0, 0.819444444444444,
0, 0.166666666666667, 1, 0.652777777777778, 0.194444444444444,
0, 0.180555555555556, 0, 0.736111111111111, 0.180555555555556,
0.4375, 0.5625, 0.486111111111111, 0.194444444444444, 0, 0.388888888888889,
0, 0.159722222222222, 0, 0, 0.0972222222222222)), .Names = c("theta",
"X", "Y"))
),
list
) |
download.NLDAS <- function(outfolder, start_date, end_date, site_id, lat.in, lon.in,
overwrite = FALSE, verbose = FALSE, ...) {
start_date <- as.POSIXlt(start_date, tz = "UTC")
end_date <- as.POSIXlt(end_date, tz = "UTC")
start_year <- lubridate::year(start_date)
end_year <- lubridate::year(end_date)
site_id <- as.numeric(site_id)
outfolder <- paste0(outfolder, "_site_", paste0(site_id %/% 1e+09, "-", site_id %% 1e+09))
NLDAS_start <- 1980
if (start_year < NLDAS_start) {
PEcAn.logger::logger.severe(sprintf('Input year range (%d:%d) exceeds the NLDAS range (%d:present)',
start_year, end_year,
NLDAS_start))
}
lat.in <- as.numeric(lat.in)
lon.in <- as.numeric(lon.in)
dap_base <- "http://hydro1.sci.gsfc.nasa.gov/thredds/dodsC/NLDAS_FORA0125_H.002"
dir.create(outfolder, showWarnings = FALSE, recursive = TRUE)
ylist <- seq(start_year, end_year, by = 1)
rows <- length(ylist)
results <- data.frame(file = character(rows),
host = character(rows),
mimetype = character(rows),
formatname = character(rows),
startdate = character(rows),
enddate = character(rows),
dbfile.name = "NLDAS",
stringsAsFactors = FALSE)
var <- data.frame(DAP.name = c("N2-m_above_ground_Temperature", "LW_radiation_flux_downwards_surface",
"Pressure", "SW_radiation_flux_downwards_surface", "N10-m_above_ground_Zonal_wind_speed",
"N10-m_above_ground_Meridional_wind_speed", "N2-m_above_ground_Specific_humidity", "Precipitation_hourly_total"),
DAP.dim = c(2, 1, 1, 1, 2, 2, 2, 1),
CF.name = c("air_temperature", "surface_downwelling_longwave_flux_in_air",
"air_pressure", "surface_downwelling_shortwave_flux_in_air", "eastward_wind", "northward_wind",
"specific_humidity", "precipitation_flux"),
units = c("Kelvin", "W/m2", "Pascal", "W/m2", "m/s", "m/s", "g/g", "kg/m2/s"))
time.stamps <- seq(0, 2300, by = 100)
for (i in seq_len(rows)) {
year <- ylist[i]
nday <- PEcAn.utils::days_in_year(year)
if (rows > 1 & i != 1 & i != rows) {
days.use <- 1:nday
} else if (rows == 1) {
day1 <- lubridate::yday(start_date)
day2 <- lubridate::yday(end_date)
days.use <- day1:day2
nday <- length(days.use)
} else if (i == 1) {
day1 <- lubridate::yday(start_date)
days.use <- day1:nday
nday <- length(days.use)
} else if (i == rows) {
day2 <- lubridate::yday(end_date)
days.use <- 1:day2
nday <- length(days.use)
}
ntime <- nday * 24
loc.file <- file.path(outfolder, paste("NLDAS", year, "nc", sep = "."))
lat <- ncdf4::ncdim_def(name = "latitude", units = "degree_north", vals = lat.in, create_dimvar = TRUE)
lon <- ncdf4::ncdim_def(name = "longitude", units = "degree_east", vals = lon.in, create_dimvar = TRUE)
time <- ncdf4::ncdim_def(name = "time", units = "sec",
vals = seq((min(days.use) + 1 - 1 / 24) * 24 * 360, (max(days.use) + 1 - 1/24) * 24 * 360, length.out = ntime),
create_dimvar = TRUE,
unlim = TRUE)
dim <- list(lat, lon, time)
var.list <- list()
dat.list <- list()
for (j in 1:nrow(var)) {
var.list[[j]] <- ncdf4::ncvar_def(name = as.character(var$CF.name[j]),
units = as.character(var$units[j]),
dim = dim,
missval = -999,
verbose = verbose)
dat.list[[j]] <- array(NA, dim = c(length(lat.in), length(lon.in), ntime))
}
names(var.list) <- names(dat.list) <- var$CF.name
for (j in seq_along(days.use)) {
date.now <- as.Date(days.use[j], origin = as.Date(paste0(year - 1, "-12-31")))
mo.now <- stringr::str_pad(lubridate::month(date.now), 2, pad = "0")
day.mo <- stringr::str_pad(lubridate::day(date.now), 2, pad = "0")
doy <- stringr::str_pad(days.use[j], 3, pad = "0")
for (h in seq_along(time.stamps)) {
hr <- stringr::str_pad(time.stamps[h], 4, pad = "0")
dap_file <- paste0(dap_base, "/", year, "/", doy, "/", "NLDAS_FORA0125_H.A", year,
mo.now, day.mo, ".", hr, ".002.grb.ascii?")
latlon <- RCurl::getURL(paste0(dap_file, "lat[0:1:223],lon[0:1:463]"))
lat.ind <- gregexpr("lat", latlon)
lon.ind <- gregexpr("lon", latlon)
lats <- as.vector(utils::read.table(con <- textConnection(substr(latlon, lat.ind[[1]][3],
lon.ind[[1]][3] - 1)), sep = ",", fileEncoding = "\n", skip = 1))
lons <- as.vector(utils::read.table(con <- textConnection(substr(latlon, lon.ind[[1]][3],
nchar(latlon))), sep = ",", fileEncoding = "\n", skip = 1))
lat.use <- which(lats - 0.125 / 2 <= lat.in & lats + 0.125 / 2 >= lat.in)
lon.use <- which(lons - 0.125 / 2 <= lon.in & lons + 0.125 / 2 >= lon.in)
dap_query <- ""
for (v in seq_len(nrow(var))) {
time.string <- ""
for (i in seq_len(var$DAP.dim[v])) {
time.string <- paste0(time.string, "[0:1:0]")
}
dap_query <- paste(dap_query,
paste0(var$DAP.name[v], time.string, "[", lat.use, "][", lon.use, "]"), sep = ",")
}
dap_query <- substr(dap_query, 2, nchar(dap_query))
dap.out <- RCurl::getURL(paste0(dap_file, dap_query))
for (v in seq_len(nrow(var))) {
var.now <- var$DAP.name[v]
ind.1 <- gregexpr(paste(var.now, var.now, sep = "."), dap.out)
end.1 <- gregexpr(paste(var.now, "time", sep = "."), dap.out)
dat.list[[v]][, , j * 24 - 24 + h] <-
utils::read.delim(con <- textConnection(substr(dap.out,
ind.1[[1]][1], end.1[[1]][2])), sep = ",", fileEncoding = "\n")[1, 1]
}
}
}
dat.list[["precipitation_flux"]] <- dat.list[["precipitation_flux"]] / 3600
loc <- ncdf4::nc_create(filename = loc.file, vars = var.list, verbose = verbose)
for (j in seq_len(nrow(var))) {
ncdf4::ncvar_put(nc = loc, varid = as.character(var$CF.name[j]), vals = dat.list[[j]])
}
ncdf4::nc_close(loc)
results$file[i] <- loc.file
results$host[i] <- PEcAn.remote::fqdn()
results$startdate[i] <- paste0(year, "-01-01 00:00:00")
results$enddate[i] <- paste0(year, "-12-31 23:59:59")
results$mimetype[i] <- "application/x-netcdf"
results$formatname[i] <- "CF Meteorology"
}
return(invisible(results))
} |
unigram <- function(DataFrame) {
if (!is.data.frame(DataFrame)) {
stop("The input for this function is a data frame.")
}
text <- dplyr::quo(text)
word <- dplyr::quo(word)
wu <- "https://t.co/[A-Za-z\\d]+|http://[A-Za-z\\d]+|&|<|>|RT|https"
TD_Unigram <- DataFrame %>%
dplyr::mutate(
text = stringr::str_replace_all(
string = text,
pattern = "RT",
replacement = ""),
text = stringr::str_replace_all(
string = text,
pattern = "&",
replacement = ""),
text = stringr::str_replace_all(
string = text,
pattern = wu,
replacement = ""),
text = stringr::str_replace_all(
string = text,
pattern = "
replacement = ""),
text = stringr::str_replace_all(
string = text,
pattern = "[:punct:]",
replacement = ""),
text = stringr::str_replace_all(
string = text,
pattern = "[^[:alnum:]///' ]",
replacement = "")) %>%
tidytext::unnest_tokens(
output = word,
input = text) %>%
dplyr::filter(!word %in% c(tidytext::stop_words$word, "[0-9]+")) %>%
dplyr::count(word, sort = TRUE)
return(TD_Unigram)
} |
.getTextMatPt <- function(lstM) {
for (i in 1:length(lstM)) {
aggList <- lapply(lstM, function(x) table(x, lstM[[i]], useNA='always'))
aggMat <- do.call(rbind, aggList)
if (! exists('textMat')) {
textMat <- aggMat
} else {
textMat <- cbind(textMat, aggMat)
}
}
reps <- unlist(lapply(lstM, function(x) length(levels(x))+1))
rownames(textMat) <- paste(rep(names(lstM),reps), unlist(lapply(lstM, function(x) c(levels(x),NA))),sep=":")
colnames(textMat) <- paste(rep(names(lstM),reps), unlist(lapply(lstM, function(x) c(levels(x),NA))),sep=":")
attr(textMat,'reps') <- reps
attr(textMat, 'nPt') <- unique(unlist(lapply(lstM, length)))
return(textMat)
}
.getValueMatPt <- function(textMat, nPt=attr(textMat,'nPt')) {
valueMat <- apply(textMat, 2, function(x) x/nPt)
return(valueMat)
}
.getValueMatP <- function(lstM) {
for (i in 1:length(lstM)) {
aggList <- lapply(lstM, function(x) .checkDimAndTest(table(x, lstM[[i]])))
aggMat <- matrix(unlist(lapply(aggList, function(x) x$p.value)),ncol=1)
if (! exists('valueMat')) {
valueMat <- aggMat
} else {
valueMat <- cbind(valueMat, aggMat)
}
}
return(valueMat)
}
.getTextMatP <- function(lstM) {
for (i in 1:length(lstM)) {
aggList <- lapply(lstM, function(x) .checkDimAndTest(table(x, lstM[[i]])))
estMat <- matrix(unlist(lapply(aggList, function(x) ifelse(!is.null(x$estimate), x$estimate,NA))), ncol=1)
if (! exists('textMat')) {
textMat <- estMat
} else {
textMat <- cbind(textMat, estMat)
}
}
textMat <- textMat > 1
rownames(textMat) <- names(lstM)
colnames(textMat) <- names(lstM)
return(textMat)
}
.checkDimAndTest <- function(crosstab) {
if (any(!( dim(crosstab) == c(2,2)))) {
set.seed(1)
chisq.test(crosstab, simulate.p.value=T)
} else {
fisher.test(crosstab)
}
}
.maskImageDiag <- function(mat) {
mask <- ! sapply(1:nrow(mat), function(x) c(rep(T, x), rep(F,dim(mat)[1]-x)))
mat[mask] <- NA
return(mat)
}
plotTable1Heatmap <- function(factorList, method=c('AssociationByP', 'CrosstableByP', 'CrosstableByN')[1], drawRaster=NULL, ...) {
textMat <- switch(method,
AssociationByP = .getTextMatP(factorList),
CrosstableByP = .getTextMatPt(factorList),
CrosstableByN = .getTextMatPt(factorList)
)
valueMat <- switch(method,
AssociationByP = .getValueMatP(factorList),
CrosstableByP = .getValueMatP(factorList),
CrosstableByN = .getValueMatPt(textMat)
)
par(mar=c(15.1,15.1,2.1,5.1))
vm <- valueMat
vt <- .maskImageDiag(textMat)
if (method %in% c('AssociationByP', 'CrosstableByP' )) {
if (is.null(drawRaster)) {
drawRaster <- F
}
if (all(dim(vm) == dim(vt))) {
vt[vm > 0.10] <- NA
txt <- c('-','+')[factor(vt, levels=c(F,T))]
axlab <- rownames(textMat)
axtick <- 1:nrow(valueMat)-0.5
setcex=3
verticallines <- 1:nrow(vm)
horizontallines <- 1:nrow(vm)
}
if (! all(dim(vm) == dim(vt))) {
axlab <- rownames(textMat)
axtick <- 1:nrow(textMat)-0.5
setcex=1
rowP <- list()
for (i in 1:nrow(vm)) {
rowP[[i]] <- vm[,i]
}
rowP <- lapply(rowP, function(x) rep(x, times=attr(vt,'reps')))
rowP <- lapply(1:length(rowP), function(x) matrix(rep(rowP[[x]], times=attr(vt,'reps')[x]), ncol=attr(vt,'reps')[x]))
setcex <- 1
vm <- do.call(cbind, rowP)
txt <- vt
verticallines <- cumsum(attr(vt, 'reps'))
horizontallines <- cumsum(attr(vt, 'reps'))
}
colFills <- c('orange',rev(blue2yellow(19)))
textCols <- c('black','white')[factor(vm>0.6)]
}
if (method=='CrosstableByN') {
if (! all(dim(vm) == dim(vt))) {
stop('Pt invalid option')
}
if (is.null(drawRaster)) {
drawRaster <- T
}
txt <- vt
colFills=c(blue2yellow(19),'orange')
textCols <- c('black','white')[factor(vm<0.4)]
setcex=1
axlab <- rownames(textMat)
axtick <- 1:nrow(textMat)-0.5
verticallines <- cumsum(attr(vt, 'reps'))
horizontallines <- cumsum(attr(vt, 'reps'))
}
vm <- .maskImageDiag(vm)
image(z=vm, x=(0 : nrow(vm)), y=c(0 : ncol(vm)),
col=colFills, zlim=c(0,1),
xaxt='n', yaxt='n', xlab='',ylab='', ...)
if (drawRaster) {
abline(v=verticallines, h=horizontallines)
}
axis(1, at=axtick, labels=axlab, las=2)
axis(2, at=axtick, labels=axlab, las=2)
ys <- rep(1:nrow(vm), each=ncol(vm))-0.5
xs <- rep(1:nrow(vm), times=ncol(vm))-0.5
text(x=xs, y=ys, labels=txt, cex=setcex,
col=textCols)
legend('bottomright', fill=colFills,
legend=levels(cut(0:1, breaks=c(0, seq(0.05,1,by=0.05)), include.lowest=T)), cex=0.90,
bg='white')
box()
} |
ReadFromFile3D <- function (input)
{
vector = try(read.table(input, header = FALSE, dec = ".",
))
if (class(vector) == "try-error") {
print("Error in file!")
return(0)
}
return(vector)
} |
evalFunctionOnList <-
function(x,variables=list(),...,parent_env){
e <- new.env()
if (!missing(parent_env))
parent.env(e) <- parent_env
mapply(assign, MoreArgs = list(envir=e),x=names(variables),value=variables)
environment(x)<-e
x(...)
} |
.markExamples <- function(rule, dataset, targetClass, nVars, maxRule, to_cover, nLabels, Objectives = c(.LocalSupport, .confidence, NA, FALSE), Weights = c(0.7,0.3,0), DNFRules = FALSE, cate, num){
cover <- .fitnessFunction(rule = rule, dataset = dataset, noClass = matrix(unlist(.separate(dataset)), nrow = length(dataset[[2]]) - 1, ncol = length(dataset[[7]])), targetClass = targetClass, to_cover = to_cover, n_Vars = nVars,nLabels = nLabels, maxRule = maxRule , mark = TRUE, Objectives = Objectives, Weights = Weights, DNFRules = DNFRules, fuzzy = Objectives[[4]], cate = cate, num = num)
sumNews <- sum(cover[[1]]) - sum(dataset[["covered"]])
confi <- cover[[2]]
to_cover <- to_cover - sumNews
return( list(cubreNuevos = sumNews > 0, covered = cover , porCubrir = to_cover, confidence = confi) )
}
.getBestRule <- function(result){
bestFitness <- result@fitnessValue
draws <- which(result@fitness == bestFitness)
if(length(draws) > 1){
if(!result@DNFRules){
lista <- apply(X = result@population[draws, ], MARGIN = 1, FUN = function(x, max) sum(x != max), result@max )
} else{
lista <- apply(X = result@population[draws, ], MARGIN = 1, FUN = function(x, max){
particip <- .getParticipants(rule = x, maxRule = max, DNFRules = TRUE)
val <- numeric(0)
for(i in 2:length(max)){
if(particip[i-1])
val <- c(val, (max[i-1]+1):max[i])
}
sum(x[val] != 0)
} , result@max )
}
orden <- order(lista[which(lista > 0)])
return(result@population[ orden[1] , ])
} else {
return(result@population[ draws , ])
}
}
.mutate <- function(chromosome, variable, maxVariablesValue, DNF_Rule){
mutation_type <- sample(x = 1:2, size = 1)
if(! DNF_Rule){
if(mutation_type == 1L){
chromosome[variable] <- maxVariablesValue[variable]
} else {
value <- sample(x = 0:(maxVariablesValue[variable] ), size = 1)
chromosome[variable] <- value
}
} else {
variable <- variable + 1
range <- (maxVariablesValue[variable - 1] + 1):maxVariablesValue[variable]
if(mutation_type == 1){
chromosome[range] <- 0
} else {
chromosome[range] <- sample(x = 0:1 , size = length(range), replace = TRUE)
}
}
chromosome
}
SDIGA <- function(parameters_file = NULL,
training = NULL,
test = NULL,
output = c("optionsFile.txt", "rulesFile.txt", "testQM.txt"),
seed = 0,
nLabels = 3,
nEval = 10000,
popLength = 100,
mutProb = 0.01,
RulesRep = "can",
Obj1 = "CSUP",
w1 = 0.7,
Obj2 = "CCNF",
w2 = 0.3,
Obj3 = "null",
w3 = 0,
minConf = 0.6,
lSearch = "yes",
targetVariable = NA,
targetClass = "null")
{
if(is.null(parameters_file)){
if(is.null(training))
stop("Not provided a 'test' or 'training' file and neither a parameter file. Aborting...")
if(is.null(test))
test <- training
if(class(training) != "SDEFSR_Dataset" | class(test) != "SDEFSR_Dataset")
stop("'training' or 'test' parameters is not a SDEFSR_Dataset class")
if(training[[1]] != test[[1]] )
stop("datasets ('training' and 'test') does not have the same relation name.")
if(length(output) != 3 )
stop("You must specify three files to save the results.")
parameters <- list(seed = seed,
algorithm = "SDIGA",
outputData = output,
nEval = nEval,
popLength = popLength,
nLabels = nLabels,
mutProb = mutProb,
RulesRep = RulesRep,
Obj1 = Obj1,
w1 = w1,
Obj2 = Obj2,
w2 = w2,
Obj3 = Obj3,
w3 = w3,
lSearch = lSearch,
minConf = minConf,
targetClass = targetClass,
targetVariable = if(is.na(targetVariable)) training$attributeNames[length(training$attributeNames)] else targetVariable)
} else {
parameters <- .read.parametersFile2(file = parameters_file)
if(parameters$algorithm != "SDIGA")
stop("Parameters file is not for \"SDIGA\"")
training <- read.dataset(file = parameters$inputData[1])
if(is.na(parameters$inputData[2])){
test <- training
} else {
test <- read.dataset(file = parameters$inputData[2])
}
}
if(is.na(parameters$targetVariable))
parameters$targetVariable <- training$attributeNames[length(training$attributeNames)]
training <- changeTargetVariable(training, parameters$targetVariable)
test <- changeTargetVariable(test, parameters$targetVariable)
if(training$attributeTypes[length(training$attributeTypes)] != 'c' | test$attributeTypes[length(test$attributeTypes)] != 'c')
stop("Target variable is not categorical.")
training <- modifyFuzzyCrispIntervals(training, parameters$nLabels)
training$sets <- .giveMeSets(data_types = training$attributeTypes, max = training$max, n_labels = parameters$nLabels)
test <- modifyFuzzyCrispIntervals(test, parameters$nLabels)
test$sets <- .giveMeSets(data_types = test$attributeTypes, max = test$max, n_labels = parameters$nLabels)
training$covered <- logical(training$Ns)
test$covered <- logical(test$Ns)
if(tolower(parameters$RulesRep) == "can"){
DNF = FALSE
} else {
DNF = TRUE
}
Objectives <- .parseObjectives(parameters = parameters, "SDIGA", DNF)
if(all(is.na(Objectives[1:3]))) stop("No objetive values selected. You must select, at least, one objective value. Aborting...")
Weights <- c(parameters$w1, parameters$w2, parameters$w3)
if(sum(Weights) == 0) stop("Sum of weigths must be a value greater than zero.")
Best <- TRUE
rules <- list()
.show_parameters(params = parameters, train = training, test = test)
contador <- 0
cate <- training[["attributeTypes"]][- length(training[["attributeTypes"]])] == 'c'
num <- training[["attributeTypes"]][- length(training[["attributeTypes"]])] == 'r' | training[["attributeTypes"]][- length(training[["attributeTypes"]])] == 'e'
if(parameters$targetClass != "null"){
targetClass <- parameters$targetClass
if(! any(training$class_names == targetClass)) stop("No valid target value provided.")
message("\n\nSearching rules for only one value of the target class...\n\n")
message(paste(" - Target value:", targetClass ), appendLF = T)
if(!is.na(parameters$outputData[2]))
cat("\n - Target value:", targetClass , file = parameters$outputData[2], sep = " ", fill = TRUE, append = TRUE)
first_rule <- TRUE
Best = TRUE
to_cover = training$examplesPerClass[[targetClass]]
while(Best){
Best <- FALSE
rule <- .executeGA(algorithm = "SDIGA", dataset = training, targetClass = targetClass, n_vars = training$nVars, to_cover = to_cover, nLabels = parameters$nLabels, N_evals = parameters$nEval, tam_pob = parameters$popLength, p_mut = parameters$mutProb, seed = parameters$seed, Objectives = Objectives, Weights = Weights, DNFRules = DNF, cate = cate, num = num)
maxRule <- if(!DNF) training$sets else c(0, Reduce(f = '+', x = training[["sets"]], accumulate = TRUE))
values <- .fitnessFunction(rule = rule, dataset = training, noClass = matrix(unlist(.separate(training)), nrow = length(training[[2]]) - 1, ncol = length(training[[7]])), targetClass = targetClass, to_cover = to_cover, n_Vars = training$nVars, nLabels = parameters$nLabels, maxRule = maxRule, mark = TRUE, Objectives = Objectives, Weights = Weights, DNFRules = DNF, fuzzy = Objectives[[4]], test = TRUE, cate = cate, num = num)[[2]]
if(tolower(parameters$lSearch) == "yes") rule <- .localSearch(att_obj = targetClass, rule = rule, DNF_Rules = DNF, dataset = training , minimumConfidence = parameters$minConf, x = values, maxRule = maxRule, to_cover = to_cover, nLabels = parameters$nLabels, Objectives = Objectives, cate = cate, num = num)
x <- .markExamples(rule = rule, dataset = training, targetClass = targetClass, nVars = training$nVars, maxRule = maxRule, to_cover = to_cover, nLabels = parameters$nLabels, Objectives = Objectives, Weights = Weights, DNFRules = DNF, cate = cate, num = num)
if(x$cubreNuevos && x$confidence > parameters$minConf || first_rule){
first_rule <- FALSE
Best <- TRUE
contador <- contador + 1
training$covered <- x$covered[[1]]
message(paste("\n GENERATED RULE", contador, ":"),appendLF = T)
if(!is.na(parameters$outputData[2]))
cat("\n"," GENERATED RULE", contador, ":",file = parameters$outputData[2], sep = " ", fill = TRUE, append = TRUE)
.print.rule(rule = rule, max = training$sets, names = training$attributeNames, consecuent = targetClass, types = training$attributeTypes,fuzzySets = training$fuzzySets, categoricalValues = training$categoricalValues, DNF, rulesFile = parameters$outputData[2])
rule[length(rule) + 1] <- targetClass
rules[[contador]] <- rule
to_cover <- x$porCubrir
if(to_cover <= 0) Best <- FALSE
} else {
message("\n GENERATED RULE :", appendLF = T)
message("
if(!is.na(parameters$outputData[2]))
cat("\n GENERATED RULE", ":", "\n",
"
}
}
} else {
message("\n\nSearching rules for all values of the target class...\n\n")
for(i in seq_len(length(training[["class_names"]]))) {
targetClass <- training[["class_names"]][i]
message(paste(" - Target value:", targetClass,"\n"))
if(!is.na(parameters$outputData[2]))
cat(" \n - Target value:", targetClass , file = parameters$outputData[2], sep = " ", fill = TRUE, append = TRUE)
first_rule <- TRUE
Best = TRUE
to_cover = training$examplesPerClass[[i]]
while(Best){
Best <- FALSE
rule <- .executeGA(algorithm = "SDIGA", dataset = training, targetClass = targetClass, n_vars = training$nVars, to_cover = to_cover, nLabels = parameters$nLabels, N_evals = parameters$nEval, tam_pob = parameters$popLength, p_mut = parameters$mutProb, seed = parameters$seed, Objectives = Objectives, Weights = Weights, DNFRules = DNF, cate = cate, num = num)
maxRule <- if(!DNF) training$sets else c(0, Reduce(f = '+', x = training[["sets"]], accumulate = TRUE))
values <- .fitnessFunction(rule = rule, dataset = training, noClass = matrix(unlist(.separate(training)), nrow = length(training[[2]]) - 1, ncol = length(training[[7]])), targetClass = targetClass, to_cover = to_cover, n_Vars = training$nVars, nLabels = parameters$nLabels, maxRule = maxRule, mark = TRUE, Objectives = Objectives, Weights = Weights, DNFRules = DNF, fuzzy = Objectives[[4]], test = TRUE, cate = cate, num = num)[[2]]
if(tolower(parameters$lSearch) == "yes"){
rule <- .localSearch(att_obj = targetClass, rule = rule, DNF_Rules = DNF, dataset = training , minimumConfidence = parameters$minConf, x = values, maxRule = maxRule, to_cover = to_cover, nLabels = parameters$nLabels, Objectives = Objectives, cate = cate, num = num)
}
x <- .markExamples(rule = rule, dataset = training, targetClass = targetClass, nVars = training$nVars, maxRule = maxRule, to_cover = to_cover, nLabels = parameters$nLabels, Objectives = Objectives, Weights = Weights, DNFRules = DNF, cate = cate, num = num)
if(x$cubreNuevos && x$confidence > parameters$minConf || first_rule){
first_rule <- FALSE
Best <- TRUE
contador <- contador + 1
training$covered <- x$covered[[1]]
message(paste("\n GENERATED RULE", contador, ":\n"))
if(!is.na(parameters$outputData[2]))
cat("\n"," GENERATED RULE", contador, ":",file = parameters$outputData[2], sep = " ", fill = TRUE, append = TRUE)
.print.rule(rule = rule, max = training$sets, names = training$attributeNames, consecuent = targetClass, types = training$attributeTypes,fuzzySets = training$fuzzySets, categoricalValues = training$categoricalValues, DNF, rulesFile = parameters$outputData[2])
rule[length(rule) + 1] <- targetClass
rules[[contador]] <- rule
to_cover <- x$porCubrir
if(to_cover <= 0) Best <- FALSE
} else {
message(" GENERATED RULE:\n")
message("
if(!is.na(parameters$outputData[2]))
cat("\n GENERATED RULE", ":", "\n",
"
}
}
}
}
message("\n\nTesting rules...\n\n")
sumNvars <- 0
sumCov <- 0
sumFsup <- 0
sumCsup <- 0
sumCconf <- 0
sumFconf <- 0
sumUnus <- 0
sumSign <- 0
sumAccu <- 0
sumTpr <- 0
sumFpr <- 0
n_rules <- length(rules)
rulesToReturn <- vector(mode = "list", length = n_rules)
for(i in 1:n_rules){
val <- .proveRule(rule = rules[[i]][-length(rules[[i]])], testSet = test, targetClass = rules[[i]][length(rules[[i]])], numRule = i, parameters = parameters, Objectives = Objectives, Weights = Weights, cate = cate, num = num, DNF = DNF)
test[["covered"]] <- val[["covered"]]
sumNvars <- sumNvars + val[["nVars"]]
sumCov <- sumCov + val[["coverage"]]
sumFsup <- sumFsup + val[["fsupport"]]
sumCconf <- sumCconf + val[["cconfidence"]]
sumFconf <- sumFconf + val[["fconfidence"]]
sumUnus <- sumUnus + val[["unusualness"]]
sumSign <- sumSign + val[["significance"]]
sumAccu <- sumAccu + val[["accuracy"]]
sumTpr <- sumTpr + val[["tpr"]]
sumFpr <- sumFpr + val[["fpr"]]
names(val[["significance"]]) <- NULL
rulesToReturn[[i]] <- list( rule = createHumanReadableRule(rules[[i]], training, DNF),
qualityMeasures = list(nVars = val[["nVars"]],
Coverage = val[["coverage"]],
Unusualness = val[["unusualness"]],
Significance = val[["significance"]],
FuzzySupport = val[["fsupport"]],
Support = val[["csupport"]],
FuzzyConfidence = val[["fconfidence"]],
Confidence = val[["cconfidence"]],
TPr = val[["tpr"]],
FPr = val[["fpr"]]))
}
message("Global:\n")
message(paste(paste("\t - N_rules:", length(rules), sep = " "),
paste("\t - N_vars:", round(sumNvars / n_rules, 6), sep = " "),
paste("\t - Coverage:", round(sumCov / n_rules, 6), sep = " "),
paste("\t - Significance:", round(sumSign / n_rules, 6), sep = " "),
paste("\t - Unusualness:", round(sumUnus / n_rules, 6), sep = " "),
paste("\t - Accuracy:", round(sumAccu / n_rules, 6), sep = " "),
paste("\t - CSupport:", round(sum(test[["covered"]] / test[["Ns"]]), 6), sep = " "),
paste("\t - FSupport:", round(sumFsup / n_rules, 6), sep = " "),
paste("\t - FConfidence:", round(sumFconf / n_rules, 6), sep = " "),
paste("\t - CConfidence:", round(sumCconf / n_rules, 6), sep = " "),
paste("\t - True Positive Rate:", round(sumTpr / n_rules, 6), sep = " "),
paste("\t - False Positive Rate:", round(sumFpr / n_rules, 6), sep = " "),
sep = "\n")
)
if(!is.na(parameters$outputData[3])){
cat( "Global:",
paste("\t - N_rules:", length(rules), sep = " "),
paste("\t - N_vars:", round(sumNvars / n_rules, 6), sep = " "),
paste("\t - Coverage:", round(sumCov / n_rules, 6), sep = " "),
paste("\t - Significance:", round(sumSign / n_rules, 6), sep = " "),
paste("\t - Unusualness:", round(sumUnus / n_rules, 6), sep = " "),
paste("\t - Accuracy:", round(sumAccu / n_rules, 6), sep = " "),
paste("\t - CSupport:", round(sum(test[["covered"]] / test[["Ns"]]), 6), sep = " "),
paste("\t - FSupport:", round(sumFsup / n_rules, 6), sep = " "),
paste("\t - FConfidence:", round(sumFconf / n_rules, 6), sep = " "),
paste("\t - CConfidence:", round(sumCconf / n_rules, 6), sep = " "),
paste("\t - True Positive Rate:", round(sumTpr / n_rules, 6), sep = " "),
paste("\t - False Positive Rate:", round(sumFpr / n_rules, 6), sep = " "),
file = parameters$outputData[3], sep = "\n", append = TRUE
)
}
class(rulesToReturn) <- "SDEFSR_Rules"
rulesToReturn
}
.proveRule <- function(rule, testSet, targetClass, numRule, parameters, Objectives, Weights, cate, num, DNF = FALSE){
stopifnot(class(testSet) == "SDEFSR_Dataset")
maxRule <- .giveMeSets(data_types = testSet[[3]], max = testSet[[5]], n_labels = parameters$nLabels)
if(DNF) maxRule <- c(0,Reduce(f= '+', x = maxRule, accumulate = TRUE))
p <- .fitnessFunction(rule = rule, dataset = testSet, noClass = matrix(unlist(.separate(testSet)), nrow = length(cate)), targetClass = targetClass, to_cover = testSet$examplesPerClass[[targetClass]], n_Vars = testSet$nVars, nLabels = parameters$nLabels, maxRule = maxRule, mark = TRUE, Objectives = Objectives, Weights = Weights, DNFRules = DNF, fuzzy = Objectives[[4]], test = TRUE, cate = cate, num = num)
values <- p[[2]]
testSet[["covered"]] <- testSet[["covered"]] | p[[1]]
Cov <- round(.coverage(values), 6)
sig <- round(.significance(values), 6)
unus <- round(.unusualness(values), 6)
acc <- round(.accuracy(values), 6)
Csup <- round(.Csupport(values), 6)
Fsup <- round(.Fsupport(values), 6)
Ccnf <- round(.confidence(values), 6)
Fcnf <- round(.fuzzyConfidence(values), 6)
tpr <- round(p[[2]]$tpr, 6)
fpr <- round(p[[2]]$fpr, 6)
if(DNF) {
participants <- .getParticipants(rule = rule, maxRule = maxRule, DNFRules = TRUE)
nVars <- sum(participants) + 1
} else{
nVars <- sum(rule < testSet[["sets"]]) + 1
}
message(paste("Rule", numRule,":\n"))
message(paste(paste("\t - N_vars:", nVars, sep = " "),
paste("\t - Coverage:", Cov, sep = " "),
paste("\t - Significance:", sig, sep = " "),
paste("\t - Unusualness:", unus, sep = " "),
paste("\t - Accuracy:", acc, sep = " "),
paste("\t - CSupport:", Csup, sep = " "),
paste("\t - FSupport:", Fsup, sep = " "),
paste("\t - CConfidence:", Ccnf, sep = " "),
paste("\t - FConfidence:", Fcnf, sep = " "),
paste("\t - True Positive Rate:", tpr, sep = " "),
paste("\t - False Positive Rate:", fpr, "\n", sep = " "),
sep = "\n")
)
if(!is.na(parameters$outputData[3])){
cat(paste("Rule", numRule,":"),
paste("\t - N_vars:", nVars, sep = " "),
paste("\t - Coverage:", Cov, sep = " "),
paste("\t - Significance:", sig, sep = " "),
paste("\t - Unusualness:", unus, sep = " "),
paste("\t - Accuracy:", acc, sep = " "),
paste("\t - CSupport:", Csup, sep = " "),
paste("\t - FSupport:", Fsup, sep = " "),
paste("\t - CConfidence:", Ccnf, sep = " "),
paste("\t - FConfidence:", Fcnf, sep = " "),
paste("\t - True Positive Rate:", tpr, sep = " "),
paste("\t - False Positive Rate:", fpr, "\n", sep = " "),
file = parameters$outputData[3], sep = "\n", append = TRUE
)
}
list( covered = testSet[["covered"]],
nVars = nVars,
coverage = Cov,
significance = sig,
unusualness = unus,
accuracy = acc,
csupport = Csup,
fsupport = Fsup,
cconfidence = Ccnf,
fconfidence = Fcnf,
tpr = tpr,
fpr = fpr)
}
.eraseGene <- function(rule, variable, maxVariablesValue, DNF_Rules){
if(!DNF_Rules){
rule[variable] <- maxVariablesValue[variable]
} else {
range <- (maxVariablesValue[variable] + 1):maxVariablesValue[variable + 1]
rule[range] <- 0
}
rule
}
.localSearch <- function(att_obj, rule, DNF_Rules, dataset, minimumConfidence, x, maxRule, to_cover, nLabels, Objectives, cate, num){
bestRule <- rule
ruleSignificance <- .significance(x)
participants <- .getParticipants(rule = rule, maxRule = maxRule, DNFRules = DNF_Rules)
if(ruleSignificance == 1 || sum(participants) == 1 ){
return(rule)
}
bestSignificance <- ruleSignificance
best = TRUE
len = if(DNF_Rules) length(maxRule) - 1 else length(maxRule)
while(best){
best = FALSE
participants <- .getParticipants(rule = bestRule, maxRule = maxRule, DNFRules = DNF_Rules)
for(i in seq_len(len) ){
if(participants[i]){
regla_m <- .eraseGene(rule = bestRule, variable = i, maxVariablesValue = maxRule, DNF_Rules = DNF_Rules)
x1 <- .fitnessFunction(rule = regla_m, dataset = dataset, noClass = matrix(unlist(.separate(dataset)), nrow = length(dataset[[2]]) - 1, ncol = length(dataset[[7]])), targetClass = att_obj, to_cover = to_cover, n_Vars = dataset$nVars, nLabels = nLabels, maxRule = maxRule, mark = TRUE, Objectives = Objectives, DNFRules = DNF_Rules, fuzzy = Objectives[[4]], test = TRUE, cate = cate, num = num)
if(length(x1) > 1){
x1 <- x1[[2]]
supp1 <- .significance(x1)
} else {
supp1 <- 0
}
if( supp1 >= bestSignificance ){
c1 <- .confidence(x1)
c2 <- .confidence(x)
if( (supp1 > bestSignificance) && c1 >= c2 ){
bestSignificance <- supp1
bestRule <- regla_m
best = TRUE
}
}
}
}
}
x1 <- .fitnessFunction(rule = bestRule, dataset = dataset, noClass = matrix(unlist(.separate(dataset)), nrow = length(dataset[[2]]) - 1, ncol = length(dataset[[7]])), targetClass = att_obj, to_cover = to_cover, n_Vars = dataset$nVars, nLabels = nLabels, maxRule = maxRule, mark = TRUE, Objectives = Objectives, DNFRules = DNF_Rules, fuzzy = Objectives[[4]], test = TRUE, cate = cate, num = num)[[2]]
if(.confidence(x1) >= minimumConfidence){
bestRule
} else {
rule
}
} |
new_callr_error <- function(out, msg = NULL) {
error_msg <- paste0(
if (out$timeout) "callr timed out" else "callr subprocess failed",
if (!is.null(msg)) paste0(": ", msg) else if (!out$timeout) ":"
)
cond <- new_error(paste(error_msg))
class(cond) <- c(
if (out$timeout) "callr_timeout_error" else "callr_status_error",
"callr_error",
class(cond))
cond$status <- out$status
cond$stdout <- out$stdout
cond$stderr <- out$stderr
cond
}
print.callr_error <- function(x, ...) {
err$.internal$print_rlib_error_2_0(x)
invisible(x)
} |
CompK <- function(prv, method = 'ZJ', startyr = 1996, yr = NULL, invest = NULL, InvestPrice = NULL,
depr = NULL, delta = 0.096, bt = 1952){
if (method == 'ZJ'){
ans <- CompK_ZJ(prv = prv, yr = yr, invest = invest, InvestPrice = InvestPrice,
delta = delta, bt = bt)
}else if (method == 'CP'){
ans <- CompK_CP(prv = prv, startyr = startyr, yr = yr, invest = invest, InvestPrice = InvestPrice,
depr = depr, delta = delta, bt = bt)
}
return(ans)
} |
ellip.kmeans.torus <- function(data, centers = 10,
type = c("homogeneous-circular",
"heterogeneous-circular",
"ellipsoids",
"general"),
init = c("kmeans", "hierarchical"),
d = NULL,
additional.condition = TRUE,
THRESHOLD = 1e-10, maxiter = 200,
verbose = TRUE, ...){
type <- match.arg(type)
init <- match.arg(init)
p <- ncol(data)
n <- nrow(data)
if (init == "hierarchical" && is.null(d)) {
d <- ang.pdist(data)
}
sphere.param <- list(mu = NULL, Sigmainv = NULL, c = NULL)
if (init == "kmeans"){
kmeans.out <- kmeans.torus(data, centers, ...)
} else {
J <- ifelse(is.null(ncol(centers)), centers, ncol(centers))
if (!(class(d) == "dist")) {stop("invalid d: d must be a distance matrix (dist object).")}
kmeans.out <- hcluster.torus(data, J = centers, d = d, ...)
}
centroid <- kmeans.out$centers
J <- nrow(centroid)
sphere.param$mu <- centroid
sphere.param$c <- rep(0, J)
for(j in 1:J){
sphere.param$Sigmainv[[j]] <- diag(p)
}
if (type == "heterogeneous-circular"){
for(j in 1:J){
nj <- kmeans.out$size[j]
pi_j <- nj / n
sigma_j <- ifelse(kmeans.out$size[j] <= 1,
1e-6, kmeans.out$withinss[j] / (nj * p))
sphere.param$c[j] <- 2 * log(pi_j) - p * log(sigma_j)
sphere.param$Sigmainv[[j]] <- diag(p) / sigma_j
}
}
else if (type == "ellipsoids") {
for (j in 1:J){
nj <- kmeans.out$size[j]
pi_j <- nj / n
dat.j <- data[kmeans.out$membership == j, ]
z <- tor.minus(dat.j, sphere.param$mu[j, ])
S <- t(z) %*% z / nrow(z)
if (det(S) < THRESHOLD || sum(is.na(S)) != 0){
S <- diag(diag(S))
}
if (additional.condition){
if (det(S) < THRESHOLD || sum(is.na(S)) != 0){
cnt.singular <- cnt.singular + 1
S <- sum(S) / p * diag(p)
}
}
if (det(S) < THRESHOLD || sum(is.na(S)) != 0){
S <- 1e-6 * diag(p)
}
sphere.param$Sigmainv[[j]] <- solve(S)
pi_j <- ifelse(sum(kmeans.out$membership == j) == 0,
1e-6, sum(kmeans.out$membership == j) / n)
sphere.param$c[j] <- 2 * log(pi_j) - log(det(S))
}
}
else if (type == "general"){
for (j in 1:J){
nj <- kmeans.out$size[j]
pi_j <- nj / n
z <- tor.minus(data[kmeans.out$membership == j, ], sphere.param$mu[j, ])
S <- t(z) %*% z / nrow(z)
if (det(S) < THRESHOLD || sum(is.na(S)) != 0){
S <- diag(diag(S))
}
if (additional.condition){
if (det(S) < THRESHOLD || sum(is.na(S)) != 0){
S <- sum(S) / p * diag(p)
}
}
if (det(S) < THRESHOLD || sum(is.na(S)) != 0){
S <- 1e-6 * diag(p)
}
sphere.param$Sigmainv[[j]] <- solve(S)
pi_j <- ifelse(sum(kmeans.out$membership == j) == 0,
1e-6, sum(kmeans.out$membership == j) / n)
sphere.param$c[j] <- 2 * log(pi_j) - log(det(S))
}
param.seq <- unlist(sphere.param)
if (verbose){
cat("ellip.kmeans.torus: fitting appr. mixture, J = ", J, ", option = ", type, ".", sep = "")
}
cnt <- 1
wmat <- ehatj <- matrix(0, n, J)
while(TRUE){
cnt <- cnt + 1
if(verbose){if (cnt %% 5 == 0){cat(".")}}
ehatj <- ehat.eval(data, sphere.param)
for(j in 1:J){ wmat[, j] <- max.col(ehatj, ties.method = "first") == j }
wmat.mul <- apply(wmat, 2, function(x){
dat.j <- data[x == 1, ]
nj <- length(dat.j) / p
if(nj > 0){
return(wtd.stat.ang(dat.j, w = rep(1, nj) / nj)$Mean)
} else { return(rep(0, p)) }
})
sphere.param$mu <- t(wmat.mul)
for (j in 1:J){
z <- tor.minus(data[wmat[, j] == 1, ], sphere.param$mu[j, ])
S <- t(z) %*% z / nrow(z)
if (det(S) < THRESHOLD || sum(is.na(S)) != 0){
S <- diag(diag(S))
}
if (additional.condition){
if (det(S) < THRESHOLD || sum(is.na(S)) != 0){
S <- sum(S) / p * diag(p)
}
}
if (det(S) < THRESHOLD || sum(is.na(S)) != 0){
S <- 1e-6 * diag(p)
}
sphere.param$Sigmainv[[j]] <- solve(S)
pi_j <- ifelse(sum(wmat[, j]) == 0, 1e-6, sum(wmat[, j]) / n)
sphere.param$c[j] <- 2 * log(pi_j) - log(det(S))
}
diff <- sum((param.seq - unlist(sphere.param))^2, na.rm = TRUE)
param.seq <- unlist(sphere.param)
if (cnt >= maxiter | diff < THRESHOLD){
if(verbose){
cat("Done")
cat("\n")
}
break}
}
sphere.param$loglkhd <- 0.5 * sum(do.call(pmax,
as.data.frame(ehat.eval(data, sphere.param)))) - n * p * log(2 * pi) / 2
sphere.param$singular <- c()
for (j in 1:J){
if (sphere.param$Sigmainv[[j]][1, 1] == 1e+6){
sphere.param$singular <- c(sphere.param$singular, j)
}
}
}
return(sphere.param)
} |
trinomCalc <-
function(N, int, delta) {
lookupArray <- NULL
for (x12 in 0:N){
lookupArrayTemp <- matrix(0, nrow=N + 1, ncol=length(int))
index <- 1
for (x21 in 0:(N-x12)) {
lookupArrayTemp[index, ] <- trinom(x12 = x12, x21 = x21, n = N, p12 = int, p21 = int, delta=delta)
index = index + 1
}
lookupArray <- c(lookupArray, list(lookupArrayTemp))
}
return(lookupArray)
} |
.plotlinkfunctionmult <- function(x,legend.loc="topleft",legend=x$Ynames,add=FALSE,...)
{
if(missing(x)) stop("The argument x should be specified")
if (!inherits(x, "multlcmm")) stop("use only with \"multlcmm\" objects")
if(is.na(as.logical(add))) stop("add should be TRUE or FALSE")
if(x$conv %in% c(1,2,3))
{
ny <- length(x$Ynames)
dots <- list(...)
plot.axes <- list(axes=TRUE,yaxt="s",xaxt="s")
plot.axes[names(dots[c("axes","yaxt","xaxt")])] <- dots[c("axes","yaxt","xaxt")]
if(plot.axes$axes==FALSE) plot.axes[c("yaxt","xaxt")] <- "n"
dots <- dots[setdiff(names(dots),c("ylim","ylab","yaxt","x","y","log","xaxt","axes"))]
if(length(list(...)$main))
{
title1 <- as.character(list(...)$main)
dots <- dots[setdiff(names(dots),"main")]
}
else title1 <- "Estimated link functions"
if(length(list(...)$col))
{
color <- as.vector(list(...)$col)
dots <- dots[-which(names(dots)=="col")]
}
else color <- rainbow(ny)
color <- rep(color,length.out=ny)
if(length(list(...)$type))
{
type1 <- list(...)$type
dots <- dots[-which(names(dots)=="type")]
}
else type1 <- "l"
if(length(list(...)$lty))
{
lty1 <- dots$lty
dots <- dots[-which(names(dots)=="lty")]
}
else lty1 <- 1
if(length(list(...)$xlab))
{
xlab1 <- as.character(list(...)$xlab)
dots <- dots[setdiff(names(dots),"xlab")]
}
else xlab1 <- "Latent process"
if(length(list(...)$frame.plot))
{
frame.plot1 <- list(...)$frame.plot
dots <- dots[setdiff(names(dots),"frame.plot")]
}
else frame.plot1 <- FALSE
if(length(list(...)$box.lty))
{
box.lty1 <- as.integer(list(...)$box.lty)
dots <- dots[setdiff(names(dots),"box.lty")]
}
else box.lty1 <- 0
if(length(list(...)$inset))
{
inset1 <- list(...)$inset
dots <- dots[setdiff(names(dots),"inset")]
}
else inset1 <- c(0.05,0.05)
if(length(list(...)$cex.axis))
{
cex.axis1 <- list(...)$cex.axis
dots <- dots[setdiff(names(dots),"cex.axis")]
}
else cex.axis1 <- 0.8
if(length(list(...)$mar))
{
mar1 <- list(...)$mar
dots <- dots[setdiff(names(dots),"mar")]
}
else
{
if(plot.axes$yaxt!="n" )
mar1 <- c(5,ny+1,2,ny+1)+0.2
else
mar1 <- c(5,4,2,4)+0.2
}
if(!isTRUE(add))
{
nsim <- length(x$estimlink[,1])
}
else
{
if(par("yaxs")=="r")
{
a <- (26*par("usr")[3])/27 + par("usr")[4]/27
b <- (par("usr")[3]+26*par("usr")[4])/27
nsim <- b-a+1
}
if(par("yaxs")=="i")
{
nsim <- par("usr")[4]-par("usr")[3]+1
}
}
loc.grad <- function(y.grad,yk)
{
(nsim*(y.grad-min(x$linknodes[1:nbnodes[yk],yk]))-y.grad+max(x$linknodes[1:nbnodes[yk],yk]))/(max(x$linknodes[1:nbnodes[yk],yk])-min(x$linknodes[1:nbnodes[yk],yk]))
}
nbnodes <- rep(2,ny)
nbnodes[which(x$linktype==2)] <- x$nbnodes
oldmar <- par("mar")
on.exit(par(mar=oldmar))
par(mar=mar1)
names.plot <- c("adj","ann","asp","axes","bg","bty","cex","cex.axis","cex.lab","cex.main","cex.sub","col","col.axis",
"col.lab","col.main","col.sub","crt","err","family","fig","fin","font","font.axis","font.lab","font.main","font.sub",
"frame.plot","lab","las","lend","lheight","ljoin","lmitre","lty","lwd","mai","main","mar","mex","mgp","mkh","oma",
"omd","omi","pch","pin","plt","ps","pty","smo","srt","sub","tck","tcl","type","usr","xaxp","xaxs","xaxt","xlab",
"xlim","xpd","yaxp","yaxs","yaxt","ylab","ylbias","ylim")
dots.plot <- dots[intersect(names(dots),names.plot)]
loc.y <- x$estimlink[,2*(1:ny)-1,drop=FALSE]
for(yk in 1:ny)
{
loc.y[,yk] <- loc.grad(x$estimlink[,2*yk-1],yk)
}
if(!isTRUE(add))
{
do.call("matplot",c(dots.plot,list(x=x$estimlink[,2*(1:ny)],y=loc.y,type=type1,col=color,axes=FALSE,ylim=c(1,nsim),xlab=xlab1,ylab="",main=title1,lty=lty1)))
names.axis <- c("lwd","lwd.ticks","hadj","padj","cex.axis","font.axis",
"xaxp","yaxp","tck","tcl","las","xpd","cex.axis")
dots.axis <- dots[intersect(names(dots),names.axis)]
if(plot.axes$xaxt=="s") do.call("axis",c(dots.axis,list(side=1,col=1,cex.axis=cex.axis1)))
y.grad <- pretty(min(x$linknodes[1:nbnodes[1],1]):max(x$linknodes[1:nbnodes[1],1]))
y.grad[1] <- round(min(x$linknodes[1:nbnodes[1],1]),2)
y.grad[length(y.grad)] <- round(max(x$linknodes[1:nbnodes[1],1]),2)
if(plot.axes$yaxt=="s") do.call("axis",c(dots.axis,list(side=2,at=loc.grad(y.grad,1),labels=y.grad,col=color[1],col.axis=color[1],cex.axis=cex.axis1)))
if(ny>1)
{
for (i in 2:ny)
{
y.grad <- pretty(min(x$linknodes[1:nbnodes[i],i]):max(x$linknodes[1:nbnodes[i],i]))
y.grad[1] <- round(min(x$linknodes[1:nbnodes[i],i]),2)
y.grad[length(y.grad)] <- round(max(x$linknodes[1:nbnodes[i],i]),2)
if(plot.axes$yaxt=="s") do.call("axis",c(dots.axis,list(side=ifelse(i%%2==0,4,2),at=loc.grad(y.grad,i),labels=y.grad,col=color[i],col.axis=color[i],cex.axis=cex.axis1,line=(round((i+0.1)/2)-1)*2)))
}
}
}
else
{
do.call("matlines",c(dots.plot,list(x=x$estimlink[,2*(1:ny)],y=loc.y,type=type1,col=color,axes=FALSE,lty=lty1)))
}
names.legend <- c("fill","border","lty","lwd","pch","angle","density","bg","box.lwd",
"box.lty","box.col","pt.bg","cex","pt.cex","pt.lwd","xjust","yjust","x.intersp","y.intersp","adj","text.width",
"text.col","text.font","merge","trace","plot","ncol","horiz","title","xpd","title.col","title.adj","seg.len")
dots.leg <- dots[intersect(names(dots),names.legend)]
if(!is.null(legend)) do.call("legend",c(dots.leg,list(x=legend.loc,legend=legend,col=color,box.lty=box.lty1,inset=inset1,lty=lty1)))
}
else
{
cat("Output can not be produced since the program stopped abnormally. \n")
}
} |
context("data fun")
test_that("download and parse generated data functions", {
skip_slow("checking all data fetch/download/get")
ns <- asNamespace("icd")
for (b in c(
.data_names,
"icd10cm_latest"
)) {
inf <- paste("Data fun name:", b)
expect_true(.exists_in_ns(.get_fetcher_name(b)), info = inf)
if (.offline() || !.exists_in_cache(b)) {
skip(paste(
"Regardless of interactivity, don't download during tests and",
inf, "is not in the cache."
))
}
f <- .get_fetcher_fun(b)
expect_is(f(), "data.frame", info = inf)
}
}) |
OptimalRule <- function(Obj,lambda){
p <- mean(Obj@Z,na.rm=TRUE)
fnr <- [email protected][,1]
fpr <- [email protected][,2]
risk <- fnr*p*lambda + fpr*(1-p)*(1-lambda)
index <- which.min(risk)
if(0){
opt.risk <- risk[index]
opt.rule <- Obj@Rules[index,]
opt.fnr.fpr <- [email protected][index,]
TMR <- fnr[index]*p + fpr[index]*(1-p)
z <- c(opt.rule,opt.fnr.fpr,opt.risk,TMR)
names(z) <- c("lower.cutoff","upper.cutoff","FNR","FPR","opt.risk","TMR")
return(z)
}
opt.rule <- Obj@Rules[index,]
names(opt.rule) <- c("lower.cutoff","upper.cutoff")
result=new("Output", phi=Obj@phi, Z=Obj@Z, S=Obj@S, Rules=Obj@Rules, Nonparametric=Obj@Nonparametric, [email protected], OptRule=opt.rule)
message(paste0(utils::capture.output(opt.rule), collapse = "\n"))
return(invisible(result))
} |
ggNNC <- function(cerDataSeq, d = NULL,
eventDesirable = TRUE,
r = 1,
xlab = "Continuous outcome",
plotTitle = c("Numbers Needed for Change = ", ""),
theme=ggplot2::theme_bw(),
lineSize=1,
cerColor = '
eerColor = "
cerLineColor = "
eerLineColor = "
dArrowColor = "
cerAlpha = .66,
eerAlpha = .66,
xLim = NULL,
xLimAutoDensityTolerance = .001,
showLegend = TRUE,
verticalLineColor = "
desirableColor = "
desirableAlpha = .2,
undesirableColor = "
undesirableAlpha = .2,
desirableTextColor = "
undesirableTextColor = "
dArrowDistance = .04 * max(cerDataSeq$density),
dLabelDistance = .08 * max(cerDataSeq$density)) {
if (!('erDataSeq' %in% class(cerDataSeq))) {
stop("As 'erDataSeq', you must pass an object of class 'erDataSeq', such as ",
"the result of a call to function 'erDataSeq' (see ?erDataSeq for help).");
}
eventIfHigher <- attr(cerDataSeq, 'eventIfHigher');
cer <- attr(cerDataSeq, 'er');
if (!is.null(d)) d <- ufs::convert.r.to.d(ufs::convert.d.to.r(d) * r);
if (!is.null(d)) {
eer <- ufs::convert.d.to.eer(d, cer,
eventDesirable=eventDesirable, eventIfHigher=eventIfHigher);
} else {
eer <- cer;
}
if (is.null(d)) d <- 0;
sd <- attr(cerDataSeq, 'sd');
cerValue <- attr(cerDataSeq, 'threshold');
meanValue <- attr(cerDataSeq, 'mean');
eerDataSeq <- cerDataSeq;
eerDataSeq$x <- eerDataSeq$x + d * sd;
newMeanValue <- meanValue + d * sd;
cerValueDensity <- cerDataSeq[cerDataSeq$x == max(cerDataSeq[cerDataSeq$x < cerValue, 'x']), 'density'];
eerValueDensity <- eerDataSeq[eerDataSeq$x == max(eerDataSeq[eerDataSeq$x < cerValue, 'x']), 'density'];
cerLabel <- paste0("CER = ", round(100*cer, 2), ifelse(d != 0, "% ", "%"));
eerLabel <- paste0("EER = ", round(100*eer, 2), "%");
nnc <- nnc(d = d, cer = cer,
eventDesirable=eventDesirable, eventIfHigher=eventIfHigher,
plot=FALSE);
if (!is.null(plotTitle)) {
if (length(plotTitle) == 2) {
plotTitle <- paste0(plotTitle[1], ceiling(nnc), " (exact: ", round(nnc, 2), ")", plotTitle[2]);
} else {
plotTitle <- paste0(plotTitle, collapse="");
}
}
densityTolerance <- xLimAutoDensityTolerance * max(cerDataSeq$density);
if (meanValue < newMeanValue) {
lowestXWithDensity <- floor(max(cerDataSeq[cerDataSeq$density < densityTolerance & cerDataSeq$x < meanValue, 'x']));
highestXWithDensity <- ceiling(min(eerDataSeq[eerDataSeq$density < densityTolerance & eerDataSeq$x > newMeanValue, 'x']));
} else {
lowestXWithDensity <- floor(max(eerDataSeq[eerDataSeq$density < densityTolerance & eerDataSeq$x < newMeanValue, 'x']));
highestXWithDensity <- ceiling(min(cerDataSeq[cerDataSeq$density < densityTolerance & cerDataSeq$x > meanValue, 'x']));
}
if (is.null(xLim)) xLim <- c(lowestXWithDensity,
highestXWithDensity);
basePlot <- ggplot2::ggplot() + theme + ggplot2::xlim(xLim);
if (eventIfHigher) {
cerFill <- ggplot2::geom_ribbon(data = cerDataSeq[cerDataSeq$x > cerValue, ],
ggplot2::aes_string(x='x', ymax='density', ymin=0, fill='cerLabel'), alpha=cerAlpha);
} else {
cerFill <- ggplot2::geom_ribbon(data = cerDataSeq[cerDataSeq$x < cerValue, ],
ggplot2::aes_string(x="x", ymax="density", ymin=0, fill='cerLabel'), alpha=cerAlpha);
}
cerOutline <- ggplot2::geom_line(data=cerDataSeq,
ggplot2::aes_string(x="x", y="density"), size=lineSize, color=cerLineColor,
na.rm=TRUE);
cerLine <- ggplot2::geom_segment(ggplot2::aes_string(x="cerValue", xend="cerValue", y=0, yend="cerValueDensity"),
size=lineSize, color=verticalLineColor);
if (eventIfHigher) {
eerFill <- ggplot2::geom_ribbon(data = eerDataSeq[eerDataSeq$x > cerValue, ],
ggplot2::aes_string(x="x", ymax="density", ymin=0, fill="eerLabel"),
alpha=eerAlpha);
} else {
eerFill <- ggplot2::geom_ribbon(data = eerDataSeq[eerDataSeq$x < cerValue, ],
ggplot2::aes_string(x="x", ymax="density", ymin=0, fill="eerLabel"),
alpha=eerAlpha);
}
eerOutline <- ggplot2::geom_line(data=eerDataSeq,
ggplot2::aes_string(x="x", y="density"),
size=lineSize, color=eerLineColor,
na.rm=TRUE);
eerLine <- ggplot2::geom_segment(ggplot2::aes_string(x="cerValue", xend="cerValue", y=0, yend="eerValueDensity"),
size=lineSize, color=verticalLineColor);
dArrow <- ggplot2::geom_segment(ggplot2::aes_(x = meanValue, xend = newMeanValue,
y = max(cerDataSeq$density) + dArrowDistance,
yend = max(cerDataSeq$density) + dArrowDistance),
arrow=ggplot2::arrow(length = ggplot2::unit(.02, 'npc'), ends='last', type='closed', angle=20),
size=lineSize, color=dArrowColor);
dText <- ggplot2::geom_text(ggplot2::aes_(x = mean(c(meanValue, newMeanValue)),
y = max(cerDataSeq$density) + dLabelDistance),
hjust=.5, label=paste0("d = ", round(d, 2)));
if (eventDesirable) {
eventColor <- desirableColor;
eventAlpha <- desirableAlpha;
eventTextColor <- desirableTextColor;
noEventColor <- undesirableColor;
noEventAlpha <- undesirableAlpha;
noEventTextColor <- undesirableTextColor;
} else {
eventColor <- undesirableColor;
eventAlpha <- undesirableAlpha;
eventTextColor <- undesirableTextColor;
noEventColor <- desirableColor;
noEventAlpha <- desirableAlpha;
noEventTextColor <- desirableTextColor;
}
if (eventIfHigher) {
eventBarNoEvent <- ggplot2::geom_rect(ggplot2::aes(xmin = -Inf, xmax = cerValue, ymax = 0, ymin = -Inf),
fill=noEventColor, alpha=noEventAlpha);
eventBarEvent <- ggplot2::geom_rect(ggplot2::aes(xmin = cerValue, xmax = Inf, ymax = 0, ymin = -Inf),
fill=eventColor, alpha=eventAlpha);
eventBarNoEventText <- ggplot2::geom_text(ggplot2::aes(x = mean(c(lowestXWithDensity, cerValue)),
y = -.5*dArrowDistance,
label=paste0('No event (< ', round(cerValue, 2), ")")),
vjust=1, color = noEventTextColor);
eventBarEventText <- ggplot2::geom_text(ggplot2::aes(x = mean(c(highestXWithDensity, cerValue)),
y = -.5*dArrowDistance,
label=paste0('Event (> ', round(cerValue, 2), ")")),
vjust=1, color = eventTextColor);
} else {
eventBarNoEvent <- ggplot2::geom_rect(ggplot2::aes(xmin = cerValue, xmax = Inf, ymax = 0, ymin = -Inf),
fill=noEventColor, alpha=noEventAlpha);
eventBarEvent <- ggplot2::geom_rect(ggplot2::aes(xmin = -Inf, xmax = cerValue, ymax = 0, ymin = -Inf),
fill=eventColor, alpha=eventAlpha);
eventBarNoEventText <- ggplot2::geom_text(ggplot2::aes(x = mean(c(highestXWithDensity, cerValue)),
y = -.5*dArrowDistance,
label=paste0('No event (< ', round(cerValue, 2), ")")),
vjust=1, color = noEventTextColor);
eventBarEventText <- ggplot2::geom_text(ggplot2::aes(x = mean(c(lowestXWithDensity, cerValue)),
y = -.5*dArrowDistance,
label=paste0('Event (> ', round(cerValue, 2), ")")),
vjust=1, color = eventTextColor);
}
zeroLine <- ggplot2::geom_hline(ggplot2::aes(yintercept=0), color="
basePlot <- basePlot +
eventBarNoEvent + eventBarEvent +
eventBarNoEventText + eventBarEventText;
if (d == 0) {
basePlot <- basePlot + cerFill + cerOutline + cerLine + zeroLine +
ggplot2::scale_fill_manual(values = c(cerColor), name="");
} else if (d>0) {
basePlot <- basePlot + eerFill + eerOutline + eerLine +
cerFill + cerOutline + cerLine;
basePlot <- basePlot + dArrow + dText + zeroLine +
ggplot2::scale_fill_manual(values = c(cerColor, eerColor), name="");
} else {
basePlot <- basePlot + eerFill + eerOutline + eerLine +
cerFill + cerOutline + cerLine;
basePlot <- basePlot + dArrow + dText + zeroLine +
ggplot2::scale_fill_manual(values = c(cerColor, eerColor), name="");
}
if (showLegend && d!=0) {
basePlot <- basePlot +
ggplot2:: guides(fill=ggplot2::guide_legend(override.aes=list(color=c(cerLineColor, eerLineColor), size=lineSize))) +
ggplot2::theme(legend.position="top");
} else {
basePlot <- basePlot + ggplot2::theme(legend.position="none");
}
if (!is.null(plotTitle) && (d!=0)) {
basePlot <- basePlot + ggplot2::ggtitle(plotTitle);
}
return(basePlot + ggplot2::xlab(xlab) + ggplot2::ylab('Density'));
} |
example.2DGeneralDecompOpArgs<- function(){
possibleArgs <- list(
DO_matrix =matrix(nrow=2,byrow=TRUE,c(-0.1,0,0,-0.2)),
DO_matrix_func =function(t){matrix(nrow=2,byrow=TRUE,c(-0.1,0,0,-0.2*(sin(t)+2)))},
DO_list_times_Array =example.Time3DArrayList(),
DO_list_times_Matrice=example.nestedTime2DMatrixList(),
DO_TimeMap =TimeMap(example.Time3DArrayList()),
DO_ConstlinDecompOp =example.ConstlinDecompOpFromMatrix(),
DO_BoundLinDecompOp =example.2DBoundLinDecompOpFromFunction(),
DO_UnBoundLinDecompOp=example.2DUnBoundLinDecompOpFromFunction()
)
return(possibleArgs)
} |
refnr <- function(.data, formulas) {
stopifnot(names(formulas) == c("Name", "Formula"))
res <- data.frame(matrix(vector(), nrow(.data), 0),
stringsAsFactors=F)
for (i in 1:nrow(formulas)) {
tryCatch({
refined <- eval(parse(text = as.character(formulas[i, "Formula"])),
envir = .data)
res[as.character(formulas[i, "Name"])] <- refined
}, error = function(e) {
message(e)
})
}
return(res)
} |
skip_on_cran()
test_that("set base URL", {
original_base_url <- base_url()
expect_equal(original_base_url, "https://filebin.net")
base_url <- "https://my-custom-filebin.net"
expect_equal(base_url(base_url), base_url)
expect_equal(base_url(), base_url)
if (LOCAL_FILEBIN) {
base_url("http://localhost:8080")
} else {
base_url(original_base_url)
}
}) |
td_create <- function(provider = getOption("taxadb_default_provider", "itis"),
schema = c("dwc", "common"),
version = latest_version(),
overwrite = TRUE,
lines = 1e5,
dbdir = taxadb_dir(),
db = td_connect(dbdir)
){
dest <- tl_import(provider, schema, version)
tablenames <- names(dest)
progress <- getOption("readr.show_progress")
options(readr.show_progress = FALSE)
suppress_msg({
arkdb::unark(dest,
tablenames = tablenames,
db_con = db,
lines = lines,
streamable_table = arkdb::streamable_readr_tsv(),
overwrite = overwrite,
col_types = readr::cols(.default = "c"))
})
options(readr.show_progress = progress)
invisible(dbdir)
} |
NULL
isIdCurrent <- function(obj) {
dbIsValid(obj)
}
checkValid <- function(obj) {
if (dbIsValid(obj)) return(TRUE)
stop("Expired ", class(obj), call. = FALSE)
}
setMethod("dbIsValid", "MySQLDriver", function(dbObj) {
.Call(rmysql_driver_valid)
})
setMethod("dbIsValid", "MySQLConnection", function(dbObj) {
.Call(rmysql_connection_valid, dbObj@Id)
})
setMethod("dbIsValid", "MySQLResult", function(dbObj) {
.Call(rmysql_result_valid, dbObj@Id)
}) |
zclopperpearson <- function(dat, conf.level){
a <- dat[,1]
n <- dat[,2]
p <- a / n
tails <- 2
low <- stats::qbeta((1 - conf.level) / tails, a, n - a + 1)
upp <- stats::qbeta(1 - (1 - conf.level) / tails, a + 1, n - a)
rval <- data.frame(est = p, lower = low, upper = upp)
rval
} |
PlotOrdinalResponses <- function(olb, A1=1, A2=2, inf = -12, sup = 12, Legend=TRUE, WhatVars=NULL){
oldpar <- par(no.readonly = TRUE)
on.exit(par(oldpar))
A = olb$RowCoordinates[, c(A1, A2)]
B = olb$ColumnParameters$coefficients[, c(A1, A2)]
names=rownames(B)
thresholds=olb$ColumnParameters$thresholds
n = dim(A)[1]
p = dim(olb$ColumnParameters$coefficients)[1]
if (is.null(WhatVars)) WhatVars=1:p
nf = ceiling(sqrt(length(WhatVars)))
nc = ceiling(length(WhatVars)/nf)
olb$Communalities=round(olb$Communalities, digits=3)
op <- par(mfrow=c(nf,nc))
for (j in WhatVars)
OrCoor=OrdVarCoordinates(tr=thresholds[j,1:(olb$Ncats[j]-1)], c(B[j, 1], B[j, 2]),
inf = inf, sup = sup, plotresponse=T, label=names[j], labx=paste("Comunality =", olb$Communalities[j]),
catnames=olb$CategoryNames[[j]], Legend=Legend)
} |
autoplot <- function (object, ...) UseMethod("autoplot")
autoplot.ten <- function(object,
...,
title="Marks show times with censoring",
type=c("single", "CI", "fill"),
alpha=0.05,
ciLine=10,
censShape=3,
palette=c("Dark2", "Set2", "Accent", "Paired",
"Pastel1", "Pastel2", "Set1", "Set3"),
jitter=c("none", "noEvents", "all"),
tabTitle="Number at risk by time",
xLab="Time",
timeTicks=c("major", "minor", "days", "months", "custom"),
times=NULL,
yLab="Survival",
yScale=c("perc", "frac"),
legend=TRUE,
legTitle="Group",
legLabs=NULL,
legOrd=NULL,
titleSize=15,
axisTitleSize=15,
axisLabSize=10,
survLineSize=0.5,
censSize=5,
legTitleSize=10,
legLabSize=10,
fillLineSize=0.05,
tabTitleSize=15,
tabLabSize=5,
nRiskSize=5) {
stopifnot(inherits(object, "ten"))
stopifnot(alpha > 0 & alpha < 1)
nc(object)
dt1 <- data.table::copy(ci(object))
dt1[, c("Sv", "SCV") := NULL]
dt1 <- merge(object[, list(cg, t, nc)],
dt1,
all.x=FALSE,
all.y=FALSE,
by=c("cg", "t"))
if (!is.null(legOrd)) {
stopifnot(length(unique(legOrd))==length(unique(dt1[, cg])))
stopifnot(all(legOrd %in% dt1[, seq.int(length(cg))]))
}
dt2 <- data.table::rbindlist(list(dt1[, .SD[1, ], by=cg],
dt1[, .SD[1, ], by=cg]))
dt2[, c("S", "lower", "upper") := list(1), by=cg]
dt2[seq.int(unique(dt2$cg)), c("t", "nc") := list(0L)]
dt1 <- data.table::rbindlist(list(dt2, dt1))
jitter <- match.arg(jitter)
if (jitter=="noEvents") {
dt1[, s1 := sum(n), by=list(cg)]
dt1[s1==0, S := S + (stats::runif(1, 0.01, 0.05)), by=cg]
}
if(jitter=="all"){
dt1[, S := S + (stats::runif(1, 0.01, 0.05)), by=cg]
dt1[, t := abs(jitter(t, factor=0.5))]
}
if (attr(object, "abbNames")) {
na1 <- attr(object, "longNames")[, id]
abbFn <- identity
} else {
na1 <- attr(object, "longNames")[, longName]
abbFn <- as.integer
}
if (is.null(legLabs)) {
dt1[, "cg" := factor(cg, labels=na1)]
} else {
stopifnot(length(legLabs)==length(unique(object$cg)))
dt1[, "cg" := factor(cg, labels=legLabs)]
}
if (is.null(legOrd)) legOrd <- dt1[, seq.int(levels(cg))]
g1 <- ggplot(data=dt1, aes(group=cg, color=cg, fill=cg)) +
geom_step(aes(x=t, y=S), direction="hv", size=survLineSize)
type <- match.arg(type)
if (type=="CI") {
g1 <- g1 +
geom_step(aes(x=t, y=upper),
direction="hv", linetype=ciLine, alpha=alpha) +
geom_step(aes(x=t, y=lower),
direction="hv", linetype=ciLine, alpha=alpha)
}
if (type=="fill") {
dt2 <- dt1[, list(l=unique(lower),
u=unique(upper),
minT=as.numeric(min(t)),
t=as.numeric(t)
), by=list(S, cg)]
dt2[, "maxT" := c(minT[2:length(minT)], Inf), by=cg]
dt1 <- merge(dt1, dt2, by=c("t", "S", "cg"), all.y=TRUE)
dt1 <- dt1[order(cg)]
g1 <- g1 + geom_rect(data=dt1, aes(x=NULL, y=NULL,
ymax=S, ymin=l,
xmax=maxT, xmin=minT,
color=cg, group=cg, fill=cg),
alpha=alpha, size=fillLineSize) +
geom_rect(data=dt1, aes(x=NULL, y=NULL,
ymax=u, ymin=S,
xmax=maxT, xmin=minT,
color=cg, group=cg, fill=cg),
alpha=alpha, size=fillLineSize)
}
if (any(dt1[, nc >= 1])) {
g1 <- g1 + geom_point(data=dt1[nc >= 1, ],
aes(x=t, y=S),
shape=censShape, size=censSize)
}
palette <- match.arg(palette)
g1 <- g1 + scale_color_brewer(type="qual",
breaks=dt1[, levels(cg)[legOrd]],
palette=palette,
guide=guide_legend(
title=legTitle)) +
scale_fill_brewer(type="qual",
breaks=dt1[, levels(cg)[legOrd]],
palette=palette,
guide=guide_legend(
title=legTitle))
g1 <- g1 + ggtitle(title)
yScale <- match.arg(yScale)
if (yScale=="frac") {
g1 <- g1 + scale_y_continuous(yLab)
} else {
y1 <- ggplot_build(g1)$panel$ranges[[1L]]$y.major_source
g1 <- g1 + scale_y_continuous(yLab,
breaks=y1,
labels=paste0(y1 * 100, "%"))
}
timeTicks <- match.arg(timeTicks)
x1 <- get("range", envir=get("range", envir=layer_scales(g1)$x))
times1 <- switch(EXPR=timeTicks,
major=ggplot_build(g1)$layout$panel_ranges[[1]]$x.major_source,
minor=ggplot_build(g1)$layout$panel_ranges[[1]]$x.minor_source,
custom=NaN,
days=seq(from=min(x1), to=max(x1), by=7L),
months=seq(from=min(x1), to=max(x1), by=12L))
if (is.nan(times1[1])) times1 <- times
g1 <- g1 +
scale_x_continuous(name=xLab,
breaks=times1)
g1 <- g1 +
theme(title=element_text(size=titleSize),
legend.text=element_text(size=legLabSize),
legend.title=element_text(size=legTitleSize),
axis.text=element_text(size=axisLabSize),
axis.title=element_text(size=axisTitleSize))
dt3 <- data.table::data.table("t"=times1)
cg1 <- seq.int(attr(object, "ncg"))
tnc1 <- lapply(cg1, FUN=function(cg1) {
r1 <- data.table::setkey(object[abbFn(cg)==cg1, ncg, by=t], t)
r1[dt3, roll=-Inf][, ncg]
})
tnc1 <- data.table::data.table(
"t"=rep(times1, attr(object, "ncg")),
"n"=unlist(tnc1),
"cg"=as.factor(rep(na1, each=length(times1))))
g2 <- ggplot(data=tnc1, aes(x=t, y=cg, shape=cg)) +
geom_point(size=0) +
geom_text(aes(label=n), color=1, size=nRiskSize) +
scale_x_continuous(name=xLab,
limits=c(0, max(dt1[, t])),
breaks=times1) +
scale_y_discrete(name=legTitle,
breaks=levels(tnc1$cg),
labels=levels(tnc1$cg)) +
ggtitle(tabTitle) +
theme(axis.text=element_text(size=axisLabSize),
axis.title=element_text(size=axisTitleSize),
plot.title=element_text(size=tabTitleSize),
legend.title=element_text(size=tabLabSize),
legend.text=element_text(size=tabLabSize)) +
guides(shape=guide_legend(title=legTitle,
keywidht=tabLabSize,
keyheight=tabLabSize))
if (!legend) {
g1 <- g1 + theme(legend.position="none")
g2 <- g2 + theme(legend.position="none")
}
res1 <- list("table"=g2,
"plot"=g1)
class(res1) <- c("tableAndPlot", "list")
return(res1)
}
autoplot.stratTen <- function(object,
...,
title=NULL,
type=c("single", "CI", "fill"),
alpha=0.05,
ciLine=10,
censShape=3,
palette=c("Dark2", "Set2", "Accent", "Paired",
"Pastel1", "Pastel2", "Set1", "Set3"),
jitter=c("none", "noEvents", "all"),
tabTitle="Number at risk by time",
xLab="Time",
timeTicks=c("major", "minor", "days", "months", "custom"),
times=NULL,
yLab="Survival",
yScale=c("perc", "frac"),
legend=TRUE,
legTitle="Group",
legLabs=NULL,
legOrd=NULL,
titleSize=15,
axisTitleSize=15,
axisLabSize=10,
survLineSize=0.5,
censSize=5,
legTitleSize=10,
legLabSize=10,
fillLineSize=0.05,
tabTitleSize=15,
tabLabSize=5,
nRiskSize=5) {
res1 <- lapply(object,
autoplot,
title=title,
type=type,
alpha=alpha,
ciLine=ciLine,
censShape=3,
palette=palette,
jitter=jitter,
tabTitle=tabTitle,
xLab=xLab,
timeTicks=timeTicks,
times=times,
yLab=yLab,
yScale=yScale,
legend=TRUE,
legTitle=legTitle,
legLabs=legLabs,
legOrd=legOrd,
titleSize=titleSize,
axisTitleSize=axisTitleSize,
axisLabSize=axisLabSize,
survLineSize=survLineSize,
censSize=censSize,
legTitleSize=legTitleSize,
legLabSize=legLabSize,
fillLineSize=fillLineSize,
tabTitleSize=tabTitleSize,
tabLabSize=tabLabSize,
nRiskSize=nRiskSize)
if (is.null(title)) {
if (attr(object, "abbNames")) {
title <- attr(object, "longNames")[, id]
} else {
title <- attr(object, "longNames")[, longName]
}
} else {
title <- rep(title, length(object))
}
for (i in seq.int(length(object))){
res1[[i]][[2]] <- res1[[i]][[2]] + ggplot2::ggtitle(title[i])
}
data.table::setattr(res1, "class", c("stratTableAndPlot", class(res1)))
return(res1)
}
autoplot.survfit <- function(object,
...,
title="Marks show times with censoring",
type=c("single", "CI", "fill"),
alpha=0.05,
ciLine=10,
censShape=3,
palette=c("Dark2", "Set2", "Accent", "Paired",
"Pastel1", "Pastel2", "Set1", "Set3"),
jitter=c("none", "noEvents", "all"),
tabTitle="Number at risk by time",
xLab="Time",
timeTicks=c("major", "minor", "weeks", "months", "custom"),
times=NULL,
yLab="Survival",
yScale=c("perc", "frac"),
legend=TRUE,
legLabs=NULL,
legOrd=NULL,
legTitle="Group",
titleSize=15,
axisTitleSize=15,
axisLabSize=10,
survLineSize=0.5,
censSize=5,
legTitleSize=10,
legLabSize=10,
fillLineSize=0.05,
tabTitleSize=15,
tabLabSize=5,
nRiskSize=5,
pVal=FALSE,
sigP=1,
pX=0.1,
pY=0.1) {
stopifnot(inherits(object, "survfit"))
if (!is.null(legLabs) &! length(object$strata)==0){
stopifnot(length(legLabs)==length(object$strata))
}
if (is.null(legLabs)) {
stNames <- names(object$strata)
} else {
stNames <- legLabs
}
if (is.null(object$strata)) {
if (is.null(legLabs)) {
st1 <- as.factor(rep(1, length(object$time)))
} else {
stopifnot(length(legLabs)==1)
st1 <- as.factor(rep(legLabs, length(object$time)))
}
} else {
st1 <- unlist(sapply(1:length(object$strata),
function (i) rep(stNames[i], object$strata[i])))
}
dt1 <- data.table::data.table(time=object$time,
n.risk=object$n.risk,
n.event=object$n.event,
n.censor=object$n.censor,
surv=object$surv,
upper=object$upper,
lower=object$lower,
cg=as.factor(st1))
dt2 <- data.table::rbindlist(list(dt1[, .SD[1, ], by=cg],
dt1[, .SD[1, ], by=cg]))
dt2[, c("n.event", "n.censor") := list(0), by=cg]
dt2[, c("surv", "upper", "lower") := list(1), by=cg]
dt2[seq(length(unique(dt2$cg))), "time" := (0L) ]
data.table::setcolorder(dt2, names(dt1))
dt1 <- data.table::rbindlist(list(dt2, dt1))
if (is.null(legOrd)) legOrd <- dt1[, seq.int(levels(cg))]
jitter <- match.arg(jitter)
if (jitter=="noEvents") {
dt1[, s1 := sum(n.event), by=list(cg)]
dt1[s1==0, surv := surv+(runif(1, 0.01, 0.05)), by=cg]
}
if(jitter=="all"){
dt1[, surv := surv+(runif(1, 0.01, 0.05)), by=cg]
}
dt1 <- dt1[order(cg)]
g1 <- ggplot(data=dt1, aes(group=cg, colour=cg, fill=cg)) +
geom_step(aes(x=time, y=surv), direction="hv", size=survLineSize)
type <- match.arg(type)
if (type=="CI"){
g1 <- g1 +
geom_step(aes(x=time, y=upper),
direction="hv", linetype=ciLine, alpha=alpha) +
geom_step(aes(x=time, y=lower),
direction="hv", linetype=ciLine, alpha=alpha)
}
if (type=="fill"){
dt2 <- dt1[, list(l=unique(lower),
u=unique(upper),
minT=as.numeric(min(time)),
time=as.numeric(time)
), by=list(surv, cg)]
dt2[, "maxT" := c(minT[2:length(minT)], NA), by=cg]
dt1 <- merge(dt1, dt2, by=c("time", "surv", "cg"), all.y=TRUE)
dt1 <- dt1[order(cg)]
g1 <- g1 + geom_rect(data=dt1, aes(ymax=surv, ymin=l,
xmax=maxT, xmin=minT,
colour=cg, group=cg, fill=cg),
alpha=alpha, size=fillLineSize) +
geom_rect(data=dt1, aes(ymax=u, ymin=surv,
xmax=maxT, xmin=minT,
colour=cg, group=cg, fill=cg),
alpha=alpha, size=fillLineSize)
}
if (any(dt1$n.censor >= 1)) {
g1 <- g1 + geom_point(data=dt1[n.censor>=1, ],
aes(x=time, y=surv),
shape=censShape, size=censSize)
}
palette <- match.arg(palette)
g1 <- g1 + scale_color_brewer(type="qual",
breaks=dt1[, levels(cg)[legOrd]],
palette=palette,
guide=guide_legend(
title=legTitle))
g1 <- g1 + scale_fill_brewer(type="qual",
breaks=dt1[, levels(cg)[legOrd]],
palette=palette,
guide=guide_legend(
title=legTitle))
g1 <- g1 + ggtitle(title)
yScale <- match.arg(yScale)
if (yScale=="frac") {
g1 <- g1 + scale_y_continuous(yLab)
} else {
y1 <- ggplot_build(g1)$panel$ranges[[1L]]$y.major_source
g1 <- g1 + scale_y_continuous(yLab,
breaks=y1,
labels=paste0(y1 * 100, "%"))
}
timeTicks <- match.arg(timeTicks)
x1 <- get("range", envir=get("range", envir=layer_scales(g1)$x))
times1 <- switch(EXPR=timeTicks,
major=ggplot_build(g1)$layout$panel_ranges[[1]]$x.major_source,
minor=ggplot_build(g1)$layout$panel_ranges[[1]]$x.minor_source,
custom=NaN,
weeks=seq(from=min(x1), to=max(x1), by=7L),
months=seq(from=min(x1), to=max(x1), by=12L))
if (is.nan(times1[1])) times1 <- times
g1 <- g1 +
scale_x_continuous(name=xLab,
breaks=times1)
g1 <- g1 +
theme(title=element_text(size=titleSize),
legend.text=element_text(size=legLabSize),
legend.title=element_text(size=legTitleSize),
axis.text = element_text(size=axisLabSize),
axis.title = element_text(size=axisTitleSize))
if(!legend) g1 <- g1 + theme(legend.position="none")
if (pVal & !is.null(object$strata)) {
sd1 <- survival::survdiff(eval(object$call$formula),
data=eval(object$call$data))
p1 <- stats::pchisq(sd1$chisq,
length(sd1$n) - 1,
lower.tail=FALSE)
p1txt <- ifelse(p1 < 0.0001,
"Log-rank test \n p < 0.0001",
paste("Log-rank test \n p =", signif(p1, sigP)))
g1 <- g1 + annotate(geom="text",
x=pX * max(dt1$time),
y=pY,
label=p1txt,
size=legLabSize)
}
dt3 <- data.table::data.table(
time=summary(object, times = times1, extend = TRUE)$time,
n.risk=summary(object, times = times1, extend = TRUE)$n.risk)
if (is.null(object$strata)) {
dt3[, "cg" := as.factor(rep(1, length(times1)))]
} else {
dt3[, "cg" := summary(object, times=times1, extend=TRUE)$strata]
}
if(!is.null(legLabs)) dt3[, "cg" := factor(cg, labels=legLabs) ]
g2 <- ggplot(data=dt3, aes(x=time, y=cg, shape=cg)) +
geom_point(size=0) +
geom_text(aes(label=n.risk), colour=1, size=nRiskSize) +
scale_x_continuous(name=xLab,
limits=c(0, max(object$time)),
breaks=times1) +
scale_y_discrete(name=legTitle,
breaks=levels(dt3$cg),
labels=levels(dt3$cg)) +
ggtitle(tabTitle) +
theme(axis.text = element_text(size=axisLabSize),
axis.title = element_text(size=axisTitleSize),
plot.title = element_text(size=tabTitleSize),
legend.title = element_text(size=tabLabSize),
legend.text = element_text(size=tabLabSize)) +
guides(shape = guide_legend(title=legTitle,
keywidht=tabLabSize,
keyheight=tabLabSize))
if(!legend) g2 <- g2 + theme(legend.position = "none")
res1 <- list("table"=g2,
"plot"=g1)
class(res1) <- c("tableAndPlot", "list")
return(res1)
}
surv <- n.risk <- n.censor <- n.event <- upper <- lower <- NULL
.SD <- st1 <- stNames <- st <- s1 <- minT <- l <- maxT <- u <- NULL |
PraD <-function(the,Model,der=FALSE){
G = Model$G; R0 = Model$R0; R1 = Model$R1; C0 = Model$C0; C1 = Model$C1
J00 = Model$J00; J01 = Model$J01; J10 = Model$J10; J11 = Model$J11
lev = Model$lev; la = Model$la; k = Model$k
Cmg = Model$Cmg; Cjn = Model$Cjn
p = c(exp(G%*%the)); p = p/sum(p);
Lr = log(R1%*%p)-log(R0%*%p); Lc=log(C1%*%p)-log(C0%*%p)
if(la==0){
f11 = c(J11%*%p); f10 = c(J10%*%p); f01 = c(J01%*%p); f00 = c(J00%*%p)
Int = log(f11)-log(f10)-log(f01)+log(f00)
}else{
d11 = 1/c((R1%*%p)%x%(C1%*%p)); d10 = 1/c((R1%*%p)%x%(C0%*%p))
d01 = 1/c((R0%*%p)%x%(C1%*%p)); d00 = 1/c((R0%*%p)%x%(C0%*%p))
f11 = c(J11%*%p)*d11; f10 = c(J10%*%p)*d10; f01 = c(J01%*%p)*d01; f00 = c(J00%*%p)*d00
Int = (f11^la-f10^la-f01^la+f00^la)/la;
}
eta = c(Lr,Lc,Int)
hm = Cmg%*%rbind(Lr,Lc)
hj = Cjn%*%Int
if(der){
out = Drank(Int,lev,k,der="TRUE")
hrk = out$fr; Drk = out$Dfr
}else{
hrk = Drank(Int,lev,k)$fr
}
hdis = c(hm,hj,hrk)
if(der){
Mg = rbind(R1/c(R1%*%p)-R0/c(R0%*%p),
C1/c(C1%*%p)-C0/c(C0%*%p))
if(la==0){
Dj = J11/f11-J10/f10-J01/f01+J00/f00
}else{
g11 = f11^(la-1); g10 = f10^(la-1); g01 = f01^(la-1); g00 = f00^(la-1)
D11 = d11*J11-c((J11%*%p)*d11^2)*(R1%x%c(C1%*%p)+c(R1%*%p)%x%C1)
D10 = d10*J10-c((J10%*%p)*d10^2)*(R1%x%c(C0%*%p)+c(R1%*%p)%x%C0)
D01 = d01*J01-c((J01%*%p)*d01^2)*(R0%x%c(C1%*%p)+c(R0%*%p)%x%C1)
D00 = d00*J00-c((J00%*%p)*d00^2)*(R0%x%c(C0%*%p)+c(R0%*%p)%x%C0)
Dj = g11*D11-g10*D10-g01*D01+g00*D00
}
Hdis = rbind(Cmg%*%Mg, Cjn%*%Dj, Drk%*%Dj)%*%(p*G-(p%o%p)%*%G)
}
out = list(eta=eta,hdis=hdis)
if(der) out$Hdis = Hdis
return(out)
} |
version_strftime <- "0.9.2" |
timecox<-function(formula=formula(data),data, weights, subset, na.action,
start.time=0,max.time=NULL,id=NULL,clusters=NULL,
n.sim=1000,residuals=0,robust=1,Nit=20,bandwidth=0.5,
method="basic",weighted.test=0,degree=1,covariance=0)
{
sim2<-0; if (n.sim==0) sim<-0 else sim<-1;
if (method!="basic") stop("Only runs the default method at the moment\n");
if (covariance==1 & robust==0) {
cat("When robust=0 no covariance computed \n");
cat("covariance set to 0\n");
covariance<-0;}
if (sim==1 & robust==0) {
cat("When robust=0, No simulations \n");
cat("n.sim set to 0\n");
n.sim<-0;}
if (residuals==1 & robust==0) {
cat("When robust=0, no martingale residuals \n");
cat("residuals set to 0\n");
residuals<-0;}
if (n.sim>0 & n.sim<50) {n.sim<-50 ; cat("Minimum 50 simulations\n");}
call <- match.call()
indx <- match(c("formula", "data", "weights", "subset", "na.action",
"id"), names(call), nomatch=0)
if (indx[1] ==0) stop ("a formula argument is required")
temp <- call[c(1, indx)]
temp[[1L]] <- quote(stats::model.frame)
special <- c("const","cluster")
temp$formula <- if(missing(data)) terms(formula, special)
else terms(formula, special, data=data)
m <- eval(temp, parent.frame())
mt <- attr(m, "terms")
intercept<-attr(mt, "intercept")
Y <- model.response(m,)
if (!inherits(Y, "Surv"))
stop("Response must be a survival object")
id <- model.extract(m, "(id)")
weights <- model.weights(m)
if (!is.null(weights)) stop("timecox does not support case weights")
Terms <- terms(m)
des<-read.design(m,Terms)
X<-des$X; Z<-des$Z; npar<-des$npar; px<-des$px; pz<-des$pz;
covnamesX<-des$covnamesX; covnamesZ<-des$covnamesZ
if(is.null(clusters)) clusters <- des$clusters
if (is.null(Z)==TRUE) XZ<-X else XZ<-cbind(X,Z);
if (method=="breslow" && intercept==1) {
covnamesX<-covnamesX[-1]; X<-as.matrix(X[,-1]); XZ<-as.matrix(XZ[,-1]);
colnames(X)<-covnamesX; px<-px-1;}
pxz <- px + pz;
survs<-read.surv(m,id,npar,clusters,start.time,max.time,model="timecox")
times<-survs$times;id<-id.call<-survs$id.cal;
clusters<-cluster.call<-survs$clusters;
time2<-survs$stop; time<-survs$start
status<-survs$status; Ntimes<-sum(status);
ldata<-list(start=survs$start,stop=survs$stop,
antpers=survs$antpers,antclust=survs$antclust);
times<-c(start.time,time2[status==1]); times<-sort(times);
Ntimes <- Ntimes+1;
if (is.null(max.time)==TRUE) maxtimes<-max(times)+0.1 else maxtimes<-max.time;
times<-times[times<maxtimes];
bandwidth<-(maxtimes-start.time)*bandwidth;
if (method=="breslow")
beta<-coxph(Surv(time,time2,status)~XZ)$coef
else if (method=="basic" && intercept==1)
beta<-coxph(Surv(time,time2,status)~XZ[,-1])$coef
else beta<-coxph(Surv(time,time2,status)~XZ)$coef;
beta0<-c(0,0,beta)
if (method=="basic" && intercept==0) beta0<-c(0,beta);
bhat<-matrix(beta0,length(times),length(beta0),byrow=TRUE);
timerange<-range(times);
bhat[,1]<-times;
if (method=="breslow" || intercept==1) {
bhat[,2]<-sum(status)/sum(ldata$stop-ldata$start);
if (method=="basic") bhat[,2]<-log(bhat[,2]);
}
if (npar==TRUE) {
ud<-timecoxBase(times,ldata,X,status,id,bhat,
sim=sim,antsim=n.sim,degree=degree,robust=robust,
band=bandwidth,it=Nit,method=method,retur=residuals,sim2=sim2,
weighted.test=weighted.test,covariance=covariance);
if (method=="breslow") covnamesX<-c("Cumulative Baseline",covnamesX);
colnames(ud$cum)<-colnames(ud$var.cum)<-c("time",covnamesX)
if (robust==1) colnames(ud$robvar.cum)<-c("time",covnamesX)
if (sim==1) {
colnames(ud$test.procBeqC)<- c("time",covnamesX)
names(ud$conf.band)<-names(ud$pval.testBeq0)<-
names(ud$pval.testBeqC)<- names(ud$pval.testBeqC.is)<-
names(ud$obs.testBeqC.is)<-
names(ud$obs.testBeq0)<- names(ud$obs.testBeqC)<- covnamesX;
colnames(ud$sim.testBeq0)<- colnames(ud$sim.testBeqC)<-
colnames(ud$sim.testBeqC.is)<- covnamesX;
ud$sim.testBeqC.is<-ud$sim.testBeqC<-NULL;
if (method=="breslow" && sim2==1)
names(ud$pval.testBeqC.is1)<-names(ud$pval.testBeqC.is2)<-
names(ud$obs.testBeqC.is1)<-names(ud$obs.testBeqC.is2)<- covnamesX;
}
}
else {
if (px==0) { stop("No nonparametric terms (needs one!)"); }
if (method=="breslow") {
gamma<-bhat[1,(px+3):(pxz+2)]; bhat<-bhat[,1:(px+2)]; }
else {
gamma<-bhat[1,(px+2):(pxz+1)]; bhat<-bhat[,1:(px+1)] }
ud<-semicox(times,ldata,X,Z,
status,id,bhat,gamma=gamma,sim=sim,antsim=n.sim,
band=bandwidth,it=Nit,method=method,retur=residuals,robust=robust,
degree=degree,weighted.test=weighted.test,covariance=covariance)
if (px>0) {
if (method=="breslow")
colnames(ud$cum)<- colnames(ud$var.cum)<-
c("time","Cumulative Baseline",covnamesX)
else
colnames(ud$cum)<- colnames(ud$var.cum)<- c("time",covnamesX)
if (robust==1) {
if (method=="breslow") colnames(ud$robvar.cum)<-
c("time","Cumulative Baseline",covnamesX) else
colnames(ud$robvar.cum)<-c("time",covnamesX); }
if (sim>=1) {
if (method=="breslow") name<-
c("time","Cumulative Baseline",covnamesX) else name<-c("time",covnamesX)
colnames(ud$test.procBeqC)<- name;
names(ud$conf.band)<- names(ud$pval.testBeq0)<-
names(ud$pval.testBeqC)<-
names(ud$pval.testBeqC.is)<- names(ud$obs.testBeqC.is)<-
names(ud$obs.testBeq0)<- names(ud$obs.testBeqC)<-
colnames(ud$sim.testBeq0)<- colnames(ud$sim.testBeqC.is)<-
colnames(ud$sim.testBeqC)<- name[-1];
ud$sim.testBeqC.is<-ud$sim.testBeqC<-NULL;
}
}
rownames(ud$gamma)<-c(covnamesZ);
colnames(ud$gamma)<-"estimate";
colnames(ud$var.gamma)<-c(covnamesZ);
rownames(ud$var.gamma)<-c(covnamesZ);
colnames(ud$robvar.gamma)<-c(covnamesZ);
rownames(ud$var.gamma)<-c(covnamesZ);
}
ud$method<-method
attr(ud,"Call")<-call;
class(ud)<-"timecox"
attr(ud,"Formula")<-formula;
attr(ud,"id")<-id.call;
attr(ud,"cluster")<-cluster.call;
attr(ud,"start.time") <- start.time
attr(ud,"start")<- time;
attr(ud,"stop")<- time2;
attr(ud,"status")<-status;
attr(ud,"time2")<-time2;
attr(ud,"residuals")<-residuals;
attr(ud,"max.time")<-max.time;
attr(ud,"stratum")<-0;
ud$call<-call
return(ud);
}
"plot.timecox" <- function (x,..., pointwise.ci=1,
hw.ci=0, sim.ci=0, robust.ci=0, col=NULL, specific.comps=FALSE,level=0.05,
start.time = 0,
stop.time = 0, add.to.plot=FALSE, mains=TRUE, xlab="Time",
ylab ="Cumulative coefficients",score=FALSE)
{
object <- x; rm(x);
if (!inherits(object,'timecox') ) stop ("Must be output
from Cox-Aalen function")
if (score==FALSE) plot.cums(object,
pointwise.ci=pointwise.ci,
hw.ci=hw.ci,
sim.ci=sim.ci, robust.ci=robust.ci,col=col,
specific.comps=specific.comps,level=level,
start.time = start.time, stop.time = stop.time,
add.to.plot=add.to.plot,
mains=mains, xlab=xlab, ylab =ylab)
else plotScore(object, specific.comps=specific.comps,
mains=mains,
xlab=xlab,ylab =ylab);
}
"print.timecox"<-
function (x,...)
{
timecox.object <- x; rm(x);
if (!inherits(timecox.object, 'timecox'))
stop ("Must be an timecox.object")
if (is.null(timecox.object$gamma)==TRUE) semi<-FALSE else semi<-TRUE
cat("Multiplicative Hazard Model \n\n")
cat(" Nonparametric terms : "); cat(colnames(timecox.object$cum)[-1]);
cat(" \n");
if (semi) {
cat(" Parametric terms : "); cat(rownames(timecox.object$gamma));
cat(" \n"); }
cat(" \n");
cat(" Call: \n")
dput(attr(timecox.object, "Call"))
cat("\n")
}
"summary.timecox" <-
function (object,..., digits = 3)
{
timecox.object <- object; rm(object);
obj<-timecox.object
if (!inherits(timecox.object, 'timecox'))
stop ("Must be an timecox.object")
if (is.null(timecox.object$gamma)==TRUE) semi<-FALSE else semi<-TRUE
cat("Multiplicative Hazard Model \n\n")
timetest(obj,digits=digits);
if (obj$method=="breslow" && (!semi) && (obj$obs.testBeqC.is1!=FALSE)) {
testsupBL<-cbind(obj$obs.testBeqC.is1,obj$pval.testBeqC.is1)
testssBL<-cbind(obj$obs.testBeqC.is2,obj$pval.testBeqC.is2)
cat("Tests without baseline correction\n")
cat("BL(t) = int_0^t lambda_0(t) b(t) dt, L(t) = int_0^t lambda_0(t) dt \n")
colnames(testsupBL)<-c("sup| BL(t) - (t/tau)B(tau) L(t)|","p-value H_0: B(t)=b t")
colnames(testssBL)<-c("int (BL(t)-(t/tau)B(tau) L(t))^2dt","p-value H_0: B(t)=b t")
prmatrix(signif(testsupBL,digits))
prmatrix(signif(testssBL,digits)) }
if (semi) {
cat("Parametric terms : ");
}
cat(" \n");
if (semi) {
out=coef.timecox(timecox.object,digits=digits);
out=signif(out,digits=digits)
print(out)
}
cat(" \n");
cat(" Call: \n")
dput(attr(timecox.object, "Call"))
cat("\n")
}
coef.timecox<- function(object,..., digits=3) {
coefBase(object,digits=digits)
} |
colByValue<-function(x,col,range=NA,breaks=NA,cex.axis=2,las=1,...) {
if (is.vector(x)) {
x<-as.matrix(x)
}
if (is.na(range[1])) {} else {
x[x<range[1]]<-range[1]
x[x>range[2]]<-range[2]
}
if (is.na(breaks[1])) {
ff <- seq(min(x,na.rm=T),max(x,na.rm=T), length=length(col))
bg2<-apply(as.matrix(as.numeric(unlist(x))),1,function(x) rank(c(ff,x),ties.method ="min")[length(col)+1])
dens <-matrix(bg2,nrow(x),ncol(x))
result<-matrix(col[dens],nrow=nrow(x),ncol=ncol(x))
row.names(result)<-row.names(x)
image(x=1:2,y=as.matrix(ff),z=t(ff),col=col,xaxt="n",ylab="",las=las,xlab="",xlim=c(1,4),bty="n",...)
return(result)
} else {
temp<-cut(as.numeric(unlist(x)),breaks=breaks,include.lowest=T)
if (length(col)!=length(levels(temp))) {stop("length:col != length: cut result")}
result<-matrix(col[as.numeric(temp)],nrow=nrow(x),ncol=ncol(x))
row.names(result)<-row.names(x)
image(x=1:2,y=as.matrix(1:(length(breaks)-1)),z=t(1:(length(breaks)-1)),col=col,xaxt="n",yaxt="n",ylab="",xlab="",xlim=c(0,3),...)
axis(2, at = 1:(length(breaks)-1),labels=levels(temp),las=las,cex.axis=cex.axis)
return(result)
}
} |
setMethodS3("createUniqueCdf", "AffymetrixCdfFile", function(this, chipType=getChipType(this), tags="unique", path=NULL, units=NULL, ..., ram=NULL, verbose=TRUE) {
rearrangeCells <- function(units, offset=0, hasGroups=TRUE, ncols, ...) {
rearrangeGroup <- function(group, idxs, ...) {
y = (idxs-1) %/% ncols
x = (idxs-1) - ncols*y
group$y <- y
group$x <- x
group$indices <- idxs
group
}
nbrOfCells <- lapply(units, FUN=function(unit) .subset2(unit, "ncells"))
nbrOfCells <- sum(unlist(nbrOfCells, use.names=FALSE))
cells <- seq(from=offset+1, to=offset+nbrOfCells)
verbose && printf(verbose, "Units: ")
if (hasGroups) {
for (kk in seq_along(units)) {
if (verbose) {
if (kk %% 1000 == 0) {
printf(verbose, "%d, ", kk)
} else if (kk %% 100 == 0) {
cat(".")
}
}
groups <- .subset2(.subset2(units, kk), "groups")
for (ll in seq_along(groups)) {
group <- .subset2(groups, ll)
nindices <- length(.subset2(group, "indices"))
head <- 1:nindices
idxs <- .subset(cells, head)
cells <- .subset(cells, (nindices+1):length(cells))
groups[[ll]] <- rearrangeGroup(group, idxs)
}
units[[kk]]$groups <- groups
}
} else {
for (kk in seq_along(units)) {
if (verbose) {
if (kk %% 1000 == 0) {
printf(verbose, "%d, ", kk)
} else if (kk %% 100 == 0) {
cat(".")
}
}
group <- .subset2(units, kk)
nindices <- length(.subset2(group, "indices"))
head <- 1:nindices
idxs <- .subset(cells, head)
cells <- .subset(cells, (nindices+1):length(cells))
group <- rearrangeGroup(group, idxs)
units[[kk]] <- group
}
}
verbose && printf(verbose, "\n")
units
}
ram <- getRam(aromaSettings, ram)
verbose <- Arguments$getVerbose(verbose)
if (verbose) {
pushState(verbose)
on.exit(popState(verbose))
}
verbose2 <- as.integer(verbose)-1;
verbose2 <- 2
if(is.null(units)) {
units <- seq_len(nbrOfUnits(this))
} else {
units <- Arguments$getIndices(units, max=nbrOfUnits(this))
}
if (isUniqueCdf(this)) {
return(this)
}
verbose && enter(verbose, "Creating unique CDF")
verbose && cat(verbose, "Chip type: ", getChipType(this))
src <- getPathname(this)
src <- Arguments$getReadablePathname(src)
if (is.null(path)) {
mainChipType <- gsub("[,].*", "", chipType)
path <- filePath("annotationData", "chipTypes", mainChipType)
}
name <- paste(c(chipType, tags), collapse=",")
filename <- sprintf("%s.CDF", name)
pathname <- Arguments$getWritablePathname(filename, path=path)
pathname <- AffymetrixFile$renameToUpperCaseExt(pathname)
pathname <- Arguments$getWritablePathname(pathname, mustNotExist=TRUE)
pathnameT <- pushTemporaryFile(pathname, verbose=verbose)
if (identical(src, pathnameT)) {
throw("Cannot not create CDF file. Destination is same as source: ", src)
}
verbose && enter(verbose, "Reading CDF group names")
nbrOfGroupsPerUnit <- .readCdfGroupNames(src)
verbose && exit(verbose)
names(nbrOfGroupsPerUnit) <- NULL
gc <- gc()
verbose && print(verbose, gc)
nbrOfGroupsPerUnit <- sapply(nbrOfGroupsPerUnit, FUN=length)
gc <- gc()
verbose && print(verbose, gc)
cdfLite <- .readCdf(src, units=units,
readXY=FALSE, readBases=FALSE,
readIndexpos=FALSE, readAtoms=FALSE,
readUnitType=FALSE, readUnitDirection=FALSE,
readUnitNumber=FALSE, readUnitAtomNumbers=FALSE,
readGroupAtomNumbers=FALSE, readGroupDirection=FALSE,
readIndices=TRUE, readIsPm=FALSE,
stratifyBy=c("nothing", "pmmm", "pm", "mm"), verbose=0)
nbrOfCellsPerUnit <- sapply(cdfLite, FUN=function(unit) {
length(unlist(unit, use.names=FALSE))
})
verbose && cat(verbose, "Number of cells per unit:")
verbose && summary(verbose, nbrOfCellsPerUnit)
cdfLite <- NULL;
nbrOfCells <- sum(nbrOfCellsPerUnit)
nbrOfUnits <- length(nbrOfCellsPerUnit)
verbose && enter(verbose, "Reading CDF QC units")
destQcUnits <- .readCdfQc(src)
verbose && exit(verbose)
nbrOfQcUnits <- length(destQcUnits)
nbrOfCellsPerQcUnit <- lapply(destQcUnits, FUN=.subset2, "ncells")
nbrOfCellsPerQcUnit <- unlist(nbrOfCellsPerQcUnit, use.names=FALSE)
nbrOfQcCells <- sum(nbrOfCellsPerQcUnit)
verbose && printf(verbose,
"Number of QC cells: %d in %d QC units (%s)\n",
nbrOfQcCells, nbrOfQcUnits,
hsize(object.size(destQcUnits), digits = 2L, standard = "IEC"))
totalNbrOfCells <- nbrOfCells + nbrOfQcCells
verbose && printf(verbose, "Total number of cells: %d\n", totalNbrOfCells)
side <- as.integer(floor(sqrt(totalNbrOfCells)))
nrows <- ncols <- side
if (nrows*ncols < totalNbrOfCells) {
nrows <- as.integer(nrows + 1)
if (nrows*ncols < totalNbrOfCells) {
ncols <- as.integer(ncols + 1)
}
}
verbose && printf(verbose, "Best array dimension: %dx%d (=%d cells, i.e. %d left-over cells)\n", nrows, ncols, nrows*ncols, nrows*ncols - totalNbrOfCells)
verbose && enter(verbose, "Creating CDF header with source CDF as template")
verbose && enter(verbose, "Setting up header")
verbose && enter(verbose, "Reading CDF header")
destHeader <- .readCdfHeader(src)
verbose && exit(verbose)
destHeader$nrows <- nrows
destHeader$ncols <- ncols
verbose && enter(verbose, "Reading CDF unit names")
unitNames <- .readCdfUnitNames(src)
verbose && exit(verbose)
if (nbrOfUnits > 0) {
unitLengths <- 20 + 82*nbrOfGroupsPerUnit + 14*nbrOfCellsPerUnit
avgUnitLength <- mean(unitLengths)
} else {
unitLengths <- NULL
}
if (nbrOfQcUnits > 0) {
qcUnitLengths <- 6 + 7*nbrOfCellsPerQcUnit
} else {
qcUnitLengths <- NULL
}
verbose && exit(verbose)
verbose && enter(verbose, "Writing")
verbose && cat(verbose, "destHeader:")
verbose && str(verbose, destHeader)
verbose && cat(verbose, "unitNames:")
verbose && str(verbose, unitNames)
verbose && cat(verbose, "qcUnitLengths:")
verbose && str(verbose, qcUnitLengths)
verbose && cat(verbose, "unitLengths:")
verbose && str(verbose, unitLengths)
gc <- gc()
verbose && print(verbose, gc)
con <- file(pathnameT, open = "wb")
on.exit({
if (!is.null(con))
close(con)
con <- NULL
})
.writeCdfHeader(con=con, destHeader, unitNames=unitNames,
qcUnitLengths=qcUnitLengths, unitLengths=unitLengths,
verbose=verbose2)
destHeader <- unitNames <- qcUnitLengths <- unitLengths <- NULL
gc <- gc()
verbose && print(verbose, gc)
verbose && exit(verbose)
verbose && exit(verbose)
verbose && enter(verbose, "Writing QC units")
verbose && enter(verbose, "Rearranging QC unit cell indices")
destQcUnits <- rearrangeCells(destQcUnits, offset=nbrOfCells, ncols=ncols,
hasGroups=FALSE, verbose=verbose)
gc <- gc()
verbose && exit(verbose)
.writeCdfQcUnits(con=con, destQcUnits, verbose=verbose2)
destQcUnits <- NULL;
gc <- gc()
verbose && print(verbose, gc)
verbose && exit(verbose)
verbose && printf(verbose, "Number of units: %d\n", nbrOfUnits)
verbose && printf(verbose, "Argument 'ram': %f\n", ram)
verbose && printf(verbose, "Average unit length: %f bytes\n", avgUnitLength)
if (nbrOfUnits > 0) {
scale <- 200/avgUnitLength
} else {
scale <- 1
}
nbrOfUnitsPerChunk <- as.integer(ram * scale * 20000)
nbrOfChunks <- ceiling(nbrOfUnits / nbrOfUnitsPerChunk)
verbose && printf(verbose, "Number of chunks: %d (%d units/chunk)\n",
nbrOfChunks, nbrOfUnitsPerChunk)
verbose && enter(verbose, "Reading, extracting, and writing units")
fields <- c("pbase", "tbase", "atom", "indexpos")
head <- 1:nbrOfUnitsPerChunk
count <- 1
idxOffset <- as.integer(0)
unitsToDo <- 1:nbrOfUnits
while (length(unitsToDo) > 0) {
if (length(unitsToDo) < nbrOfUnitsPerChunk) {
head <- 1:length(unitsToDo)
}
units <- unitsToDo[head]
verbose && printf(verbose, "Chunk
count, nbrOfChunks, length(units))
verbose && cat(verbose, "Units:")
verbose && str(verbose, units)
unitsToDo <- unitsToDo[-head]
verbose && enter(verbose, "Reading CDF list structure")
srcUnits <- .readCdf(src, units=units, readGroupDirection=TRUE)
verbose && exit(verbose)
if (is.null(srcUnits)) {
throw(sprintf("Failed to read %d units from CDF file. This could be because you are running out of memory. Try decreasing argument 'ram': %s", length(units), src))
}
if (length(srcUnits) != length(units)) {
throw("Number of read CDF units does not equal number of requested units: ", length(srcUnits), " != ", length(units))
}
if (verbose && isVisible(verbose)) {
printf(verbose, " => RAM: %s\n", hsize(object.size(srcUnits), digits = 2L, standard = "IEC"))
}
if (length(srcUnits) == 0) {
throw("Internal error: While creating unique CDF, an empty list of CDF units was read.")
}
srcUnits <- lapply(srcUnits, FUN=function(unit) {
groups <- .subset2(unit, "groups")
groups <- lapply(groups, FUN=function(group) {
nThisGroup <- length(.subset2(group,"pbase"))
idxs <- idxOffset + seq_len(nThisGroup)
idxs1 <- as.integer(idxs-1);
y <- idxs1 %/% ncols
group$y <- y
group$x <- idxs1 - ncols*y
idxOffset <<- idxOffset + nThisGroup
group
})
unit$groups <- groups
unit
})
if (length(srcUnits) == 0) {
throw("Internal error: While creating unique CDF, an empty list of CDF units is requested to be written.")
}
.writeCdfUnits(con=con, srcUnits, verbose=verbose2)
srcUnits <- units <- NULL;
count <- count + 1
}
unitsToDo <- head <- fields <- count <- NULL
gc <- gc()
verbose && print(verbose, gc)
verbose && exit(verbose)
close(con)
con <- NULL
verbose && cat(verbose, "Temporary CDF file created:")
verbose && cat(verbose, "Output pathname: ", pathnameT)
verbose && print(verbose, file.info(pathnameT))
gc <- gc()
verbose && print(verbose, gc)
verbose && enter(verbose, "Verifying the written CDF")
header <- .readCdfHeader(pathnameT)
if ((header$nrows != nrows) || (header$ncols != ncols)) {
throw(sprintf("Failed to create a valid unique-cell CDF: The dimension of the written CDF does not match the intended one: (%d,%d) != (%d,%d)", header$nrows, header$ncols, nrows, ncols))
}
nbrOfUnits <- header$probesets
chunkSize <- 10000*ram
nbrOfChunks <- ceiling(nbrOfUnits / chunkSize)
for (kk in 1:nbrOfChunks) {
verbose && printf(verbose, "Chunk %d of %d\n", kk, nbrOfChunks)
from <- (kk-1)*chunkSize+1
to <- min(from+chunkSize, nbrOfUnits)
cells <- .readCdfCellIndices(pathnameT, units=from:to)
cells <- unlist(cells, use.names=FALSE)
cells <- diff(cells)
cells <- unique(cells)
udcells <- as.integer(cells)
if (!identical(udcells, 1:1)) {
throw("Failed to create a valid unique-cell CDF: The cell indices are not contiguous: ", paste(udcells, collapse=", "))
}
cells <- udcells <- NULL
}
verbose && exit(verbose)
gc <- gc()
verbose && print(verbose, gc)
popTemporaryFile(pathnameT, verbose=verbose)
verbose && cat(verbose, "File pathname: ", pathname)
verbose && print(verbose, file.info(pathname))
verbose && exit(verbose)
verbose && exit(verbose)
cdfU <- newInstance(this, pathname)
verbose && enter(verbose, "Final set of sanity checks")
verbose && cat(verbose, "Number of units")
stopifnot(nbrOfUnits(cdfU) == nbrOfUnits(this))
verbose && cat(verbose, "Number of groups per unit")
stopifnot(identical(nbrOfGroupsPerUnit(cdfU), nbrOfGroupsPerUnit(this)))
verbose && cat(verbose, "Groups names per unit")
stopifnot(identical(.readCdfGroupNames(getPathname(cdfU)), .readCdfGroupNames(getPathname(this))))
verbose && cat(verbose, "Number of cells per unit group")
stopifnot(identical(nbrOfCellsPerUnitGroup(cdfU), nbrOfCellsPerUnitGroup(this)))
verbose && cat(verbose, "Consecutive ordering of cell indices")
cells <- getCellIndices(cdfU, unlist=TRUE, useNames=FALSE)
stopifnot(length(cells) <= nbrOfCells(cdfU))
stopifnot(identical(unique(diff(cells)), 1L))
cdfUZ <- getChecksumFile(cdfU)
verbose && exit(verbose)
cdfU
}, private=TRUE)
setMethodS3("getUniqueCdf", "AffymetrixCdfFile", function(this, ..., verbose=FALSE) {
verbose <- Arguments$getVerbose(verbose)
if (verbose) {
pushState(verbose)
on.exit(popState(verbose))
}
if (isUniqueCdf(this)) {
return(this)
}
verbose && enter(verbose, "Retrieving unique CDF")
chipType <- sprintf("%s,unique", getChipType(this))
verbose && cat(verbose, "Unique chip type: ", chipType)
verbose && enter(verbose, "Locating unique CDF")
pathname <- findByChipType(this, chipType=chipType, ...)
verbose && cat(verbose, "Pathname: ", pathname)
verbose && exit(verbose)
if (is.null(pathname)) {
verbose && enter(verbose, "Could not locate unique CDF. Will create one for chip type")
res <- createUniqueCdf(this, ..., verbose=less(verbose))
verbose && exit(verbose)
} else {
res <- byChipType(this, chipType=chipType, ...)
}
verbose && exit(verbose)
res
}, protected=TRUE)
setMethodS3("isUniqueCdf", "AffymetrixCdfFile", function(this, ...) {
hasTag(this, "unique")
})
setMethodS3("getUnitGroupCellMapWithUnique", "AffymetrixCdfFile", function(this, units=NULL, ..., ugcMapM=NULL, verbose=FALSE) {
if (isUniqueCdf(this)) {
throw("Argument 'this' is already a unique CDF: ", getPathname(this))
}
units0 <- units
if (is.null(ugcMapM)) {
cdfM <- getUniqueCdf(this)
} else if (!isUnitGroupCellMap(ugcMapM)) {
throw("Argument 'ugcMapM' is not a UnitGroupCellMap")
}
if (!is.null(units))
units <- sort(unique(units))
ugcMap <- list()
ugcMap[[1]] <- getUnitGroupCellMap(this, units=units, verbose=verbose)
if (is.null(ugcMapM)) {
ugcMap[[2]] <- getUnitGroupCellMap(cdfM, units=units, verbose=verbose)
} else {
ugcMap[[2]] <- ugcMapM
}
MAXGROUP <- 10000
ugHashcode <- lapply(ugcMap, FUN=function(map) {
units <- map[,"unit"]
groups <- map[,"group"]
hashcode <- MAXGROUP*units + groups
hashcode
})
rr <- match(ugHashcode[[1]], ugHashcode[[2]])
ugHashcode <- NULL
mergedMap <- cbind(ugcMap[[1]], cellM=ugcMap[[2]][rr,"cell"])
ugcMap <- NULL
mergedMap
}, protected=TRUE) |
alt_mcvis = function(mcvis_result,
eig_max = 1L,
var_max = ncol(mcvis_result$MC))
{
MC_ordered = make_MC_ordered(
mcvis_result = mcvis_result,
eig_max = eig_max,
var_max = var_max)
taup = rownames(MC_ordered)[1]
p = ncol(mcvis_result$MC)
melt_MC = reshape2::melt(
MC_ordered,
varnames = c("taus", "cols"),
value.name = "weights")
thickness = 1 - melt_MC$weights
thickness = thickness - (1/p)
ggplot_size_cat = dplyr::case_when(
thickness <= 0.2 ~ "Small",
thickness <= 0.3 ~ "Medium",
TRUE ~ "Strong")
ggplot_size_cat = factor(
ggplot_size_cat,
levels = c("Small", "Medium", "Strong"))
plotdf = dplyr::mutate(melt_MC, thickness, ggplot_size_cat)
plotdf$cols_norm = rangeTransform(as.integer(plotdf$cols))
plotdf$taus_norm = rangeTransform(as.integer(plotdf$taus))
plotdf$y1 = 0
plotdf$y2 = 1
plotdf$linetype = ifelse(plotdf$taus == rownames(MC_ordered)[1], rownames(MC_ordered)[1], "others")
ggplot_size_manual = c(0, 0.5, 1, 2)
ggplot_alpha_manual = c(0, 0.5, 1, 1)
axis_1 = data.frame(x=rangeTransform(as.integer(unique(plotdf$cols))),
y=0, label=as.character(unique(plotdf$cols)))
axis_2 = data.frame(x=rangeTransform(as.integer(unique(plotdf$taus))),
y=1, label=as.character(unique(plotdf$taus)))
linetype_manual = c("dotted","solid")
names(linetype_manual) = c("others", taup)
gg = ggplot2::ggplot(data=plotdf) +
geom_segment(aes(
x=.data$cols_norm, xend=.data$taus_norm,
y=.data$y1, yend=.data$y2,
colour = .data$ggplot_size_cat,
size = .data$ggplot_size_cat,
alpha = .data$ggplot_size_cat,
linetype = .data$linetype)) +
geom_text(data=axis_1, aes(label=.data$label, x=.data$x, y=.data$y - 0.075)) +
geom_text(data=axis_2, aes(label=.data$label, x=.data$x, y=.data$y + 0.075)) +
geom_segment(data=axis_1, aes(x=.data$x, xend=.data$x, y=.data$y, yend=.data$y-0.025), size=0.7) +
geom_segment(data=axis_2, aes(x=.data$x, xend=.data$x, y=.data$y, yend=.data$y+0.025), size=0.7) +
geom_segment(x=0, xend=1, y=0, yend=0, size=0.7) +
geom_segment(x=0, xend=1, y=1, yend=1, size=0.7) +
scale_colour_brewer(palette = "Set1", drop = FALSE, direction = -1) +
scale_size_manual(values = ggplot_size_manual, drop = FALSE) +
scale_alpha_manual(values = ggplot_alpha_manual, drop = FALSE) +
scale_linetype_manual(values = linetype_manual, drop = FALSE) +
scale_y_continuous(limits=c(-0.2, 1.2), expand=c(0, 0)) +
labs(title = "Multi-collinearity plot") +
guides(
colour = guide_legend(title = "Strength of MC"),
size = "none",
linetype = "none",
alpha = "none") +
theme_bw() +
theme(axis.title=element_blank(),
axis.text=element_blank(),
axis.ticks=element_blank(),
panel.grid=element_blank())
gg
return(gg)
} |
CV<- function(x, dec = 3){
cv = (sd(x, na.rm = TRUE)/mean(x, na.rm = TRUE))*100
cv = round(cv, dec)
if(!is.na(cv)){
cv = paste(cv, "%")
}
return(cv)
}
meant<-function(x, dec = 3){
meant = round(mean(x, na.rm = TRUE), dec)
return(meant)
}
mediant<-function(x, dec = 3){
mediant = round(median(x, na.rm = TRUE), dec)
return(mediant)
}
sdt<-function(x, dec = 3){
sd = round(sd(x, na.rm = TRUE), dec)
return(sd)
}
datamean<- function(data, inicio = 5){
len = length(data)
data<- data.table::data.table(data)
datamean <- data[, lapply(.SD, meant), by = .(site, longitude, latitude), .SDcols = inicio:len]
datamean <- as.data.frame(datamean)
return(datamean)
}
datasd<- function(data, inicio = 5){
len = length(data)
data<- data.table::data.table(data)
datasd <- data[, lapply(.SD, sdt), by = .(site, longitude, latitude), .SDcols = inicio:len]
datasd <- as.data.frame(datasd)
return(datasd)
}
datamedian<- function(data, inicio = 5){
len = length(data)
data<- data.table::data.table(data)
datamedian <- data[, lapply(.SD, mediant), by = .(site, longitude, latitude), .SDcols = inicio:len]
datamedian <- as.data.frame(datamedian)
return(datamedian)
}
datacv<- function(data, inicio = 5){
len = length(data)
data<- data.table::data.table(data)
datacv <- data[, lapply(.SD, CV), by = .(site, longitude, latitude), .SDcols = inicio:len]
datacv <- as.data.frame(datacv)
return(datacv)
}
datamean2<- function(data, inicio = 5){
len = length(data)
data<- data.table::data.table(data)
datamean <- data[, lapply(.SD, meant), by = .(Nombre, Latitud, Longitud), .SDcols = inicio:len]
datamean <- as.data.frame(datamean)
return(datamean)
}
datasd2<- function(data, inicio = 5){
len = length(data)
data<- data.table::data.table(data)
datasd <- data[, lapply(.SD, sdt), by = .(Nombre, Latitud, Longitud), .SDcols = inicio:len]
datasd <- as.data.frame(datasd)
return(datasd)
}
datamedian2<- function(data, inicio = 5){
len = length(data)
data<- data.table::data.table(data)
datamedian <- data[, lapply(.SD, mediant), by = .(Nombre, Latitud, Longitud), .SDcols = inicio:len]
datamedian <- as.data.frame(datamedian)
return(datamedian)
}
datacv2<- function(data, inicio = 5){
len = length(data)
data<- data.table::data.table(data)
datacv <- data[, lapply(.SD, CV), by = .(Nombre, Latitud, Longitud), .SDcols = inicio:len]
datacv <- as.data.frame(datacv)
return(datacv)
}
comparFunction<- function(data){
obs <- data
comparar <- data.frame(
par<- c("Temperatura", "PuntoRocio", "Humedad", "PresionQFE", "PresionQFF", "dd_Valor", "ff_Valor"),
nom <- c("Ts_Valor", "Td_Valor", "HR_Valor", "QFE_Valor", "QFF_Valor", "dd_Valor", "ff_Valor")
)
a <- NULL
for(i in 1:length(obs)){
aux <- obs[i]
if(aux == "Viento"){
a <- c(a, "dd_Valor", "ff_Valor")
}else{
for(j in 1:nrow(comparar)){
aux1 <- comparar[j, 1]
aux2 <- comparar[j, 2]
if(aux == aux1){
a <- c(a, aux2)
}
}
}
}
print(a)
return(a)
}
siteplot<-function(data, latitud = data$Longitud, longitud = data$Latitud, centro = c(-70.6, -33.4)){
fig<-plotly::plot_ly(data,
lat = latitud,
lon = longitud,
marker = list(color = "red"),
hovertext = ~paste("Estacion:", data$Estacion,"<br />", "Site:", data$Ciudad),
type = 'scattermapbox'
)
fig<- plotly::layout(
p = fig,
mapbox = list(
style = 'open-street-map',
zoom =9,
center = list(lon = centro[1], lat = centro[2])
)
)
return(fig)
}
ChileClimateData <- function(Estaciones = "INFO", Parametros, inicio, fin, Region = FALSE){
tablaEstaciones <- data.frame(
"Codigo Nacional" = c("180005","200006","220002","230001","270001","270008","290004","320041",
"320051","330007","330019","330020","330021","330030","330031","330066",
"330077","330111","330112","330113","340031","360011","360019","360042",
"370033","380013","380029","390006","400009","410005","420004","420014",
"430002","430004","430009","450001","450004","450005","460001","470001",
"510005","520006","530005","550001","950001","950002","950003"),
"Codigo OMM" = c("85406","85418","85432","85442","85469","85467","85488","85556","85539",
"85560","85580","85577","85574","85586","85585","85584","85594","85571",
"85593","85569","85629","85672","85682","85671","85703","85743","85744",
"85766","85782","85799","85830","85824","85832","85836","85837","85862",
"85864","85874","85886","85892","85920","85934","85940","85968","89056",
"89057","89059"),
"Codigo OACI" = c("SCAR","SCDA","SCCF","SCFA","SCIP","SCAT","SCSE","SCVM","","SCRD","SCTB",
"SCQN","SCEL","SCSN","","SCIR","","","","","SCIC","SCCH","SCIE","","SCGE",
"SCTC","SCQP","SCVD","SCJO","SCTE","SCTN","SCPQ","SCFT","SCAP","SCMK","SCAS",
"SCCY","SCBA","SCCC","SCHR","SCNT","SCCI","SCFM","SCGZ","SCRM","SCBP","SCBO"),
"Nombre" = c("Chacalluta Arica Ap.","Diego Aracena Iquique Ap.","El Loa Calama Ad.",
"Cerro Moreno Antofagasta Ap.","Mataveri Isla de Pascua Ap.",
"Desierto de Atacama Caldera Ad.","La Florida La Serena Ad.",
"Vina del Mar Ad. (Torquemada)","Los Libertadores","Rodelillo Ad.",
"Eulogio Sanchez Tobalaba Ad.","Quinta Normal Santiago","Pudahuel Santiago",
"Santo Domingo Ad.","Juan Fernandez Estacion Meteorologica.",
"La Punta Juan Fernandez Ad.","El Colorado","Lo Prado Cerro San Francisco",
"San Jose Guayacan","El Paico","General Freire Curico Ad.",
"General Bernardo O'Higgins Chillan Ad.","Carriel Sur Concepcion Ap.",
"Termas de Chillan","Maria Dolores Los Angeles Ad.","Maquehue Temuco Ad.",
"La Araucania Ad.","Pichoy Valdivia Ad.","Canal Bajo Osorno Ad.",
"El Tepual Puerto Montt Ap.","Chaiten Ad.","Mocopulli Ad.","Futaleufu Ad.",
"Alto Palena Ad.","Melinka Ad.","Puerto Aysen Ad.","Teniente Vidal Coyhaique Ad.",
"Balmaceda Ad.","Chile Chico Ad.","Lord Cochrane Ad.",
"Teniente Gallardo Puerto Natales Ad.","Carlos Ibanez Punta Arenas Ap.",
"Fuentes Martinez Porvenir Ad.","Guardiamarina Zanartu Pto Williams Ad.",
"C.M.A. Eduardo Frei Montalva Antartica","Base Antartica Arturo Prat",
"Base Antartica Bernardo O`Higgins"),
"Latitud" = c("-18.35555","-20.54917","-22.49806","-23.45361","-27.15889","-27.25444",
"-29.91444","-32.94944","-32.84555","-33.06528","-33.45528","-33.44500",
"-33.37833","-33.65611","-33.63583","-33.66639","-33.35000","-33.45806",
"-33.61528","-33.70639","-34.96944","-36.58583","-36.78055","-36.90361",
"-37.39694","-38.76778","-38.93444","-39.65667","-40.61444","-41.44750",
"-42.93028","-42.34667","-43.18889","-43.61167","-43.89778","-45.39944",
"-45.59083","-45.91833","-46.58500","-47.24389","-51.66722","-53.00167",
"-53.25361","-54.93167","-62.19194","-62.47861","-63.32083"),
"Longitud" = c("-70.33889","-70.16944","-68.89805","-70.44056","-109.42361","-70.77944",
"-71.20333","-71.47444","-70.11861","-71.55917","-70.54222","-70.67778",
"-70.79639","-71.61000","-78.83028","-78.93194","-70.28805","-70.94889",
"-70.35583","-71.00000","-71.22028","-72.03389","-73.05083","-71.40667",
"-72.42361","-72.62694","-72.66083","-73.08472","-73.05083","-73.08472",
"-72.71167","-73.71167","-71.86417","-71.81333","-73.74555","-72.67778",
"-72.10167","-71.67778","-71.69472","-72.57611","-72.52528","-70.84722",
"-70.32194","-67.61000","-58.98278","-59.66083","-57.89805"),
"Region" = c("XV","I","II","II","V","III","VI","V","V","V","RM","RM","RM","V","V","V",
"RM","RM","RM","RM","VII","XVI","VII","XVI","VIII","IX","IX","XIV","X","X",
"X","X","X","X","XI","XI","XI","XI","XI","XI","XII","XII","XII","XII","XII",
"XII","XII")
)
if(Estaciones[1] == "INFO"){
return(tablaEstaciones)
}
if(fin < inicio){
print()
stop("Verificar fechas de inicio y fin")
}
url1 <- "https://climatologia.meteochile.gob.cl/application/datos/getDatosSaclim/"
parametros_list <- c("Temperatura", "PuntoRocio", "Humedad",
"Viento", "PresionQFE", "PresionQFF")
intervalo <- inicio:fin
lenInEstaciones <- length(Estaciones)
lenInParametros <- length(Parametros)
lenEstaciones <- nrow(tablaEstaciones)
lenParametros <- length(parametros_list)
lendate <- length(intervalo)
start <- as.POSIXct(strptime(paste("01-01-", inicio, "00:00:00", sep =""), format = "%d-%m-%Y %H:%M:%S"))
end <- as.POSIXct(strptime(paste("31-12-", fin, "23:00:00", sep =""), format = "%d-%m-%Y %H:%M:%S"))
date = NULL
date <- seq(start, end, by = "hour")
date <- format(date, format = "%d-%m-%Y %H:%M:%S")
df <- NULL
df2 <- NULL
data_total <- data.frame()
if(Region == TRUE){
r <- 7
}else{
r <- 1
}
for(i in 1:lenInEstaciones){
for(j in 1:lenEstaciones){
if(Estaciones[i] == tablaEstaciones[j, r]){
estacion_var <- tablaEstaciones[j, 1]
Latitud <- tablaEstaciones[j, 5]
Longitud <- tablaEstaciones[j, 6]
Nombre <- rep(tablaEstaciones[j, 4], length(date))
Latitud <- rep(tablaEstaciones[j, 5], length(date))
Longitud <- rep(tablaEstaciones[j, 6], length(date))
data <- data.frame(date, Nombre, Latitud, Longitud)
data.table::setDT(data)
for(k in 1:lenInParametros){
for(l in 1:lenParametros){
if(Parametros[k] == parametros_list[l]){
for(m in 1:lendate){
url3 <- paste(url1, estacion_var,"_",intervalo[m], "_", parametros_list[l], "_", sep = "")
print(url3)
filename <- paste(estacion_var,"_",intervalo[m],"_", parametros_list[l], ".zip", sep = "")
csvname <- paste(estacion_var,"_",intervalo[m],"_", parametros_list[l], "_.csv", sep = "")
CSV <- NULL
try({
download.file(url3, destfile = filename, method = "curl")
suppressWarnings({
unzip(zipfile = filename)
try({
CSV <- read.csv(csvname, sep = ";", dec = ".", encoding = "UTF-8")
}, silent = T)
})
}, silent = TRUE)
if(is.null(CSV)| length(CSV) == 0){
momento1 <- as.POSIXct(strptime(paste("01-01-", intervalo[m], "00:00:00", sep =""), format = "%d-%m-%Y %H:%M:%S"))
momento2 <- as.POSIXct(strptime(paste("31-12-", intervalo[m], "23:00:00", sep =""), format = "%d-%m-%Y %H:%M:%S"))
momento <- seq(momento1, momento2, by = "hour")
CodigoNacional <-rep("", length(momento))
momento <- format(momento, format = "%d-%m-%Y %H:%M:%S")
if(parametros_list[l] == "Temperatura"){
Ts_Valor<- rep("", length(momento))
CSV <- data.frame(CodigoNacional, momento, Ts_Valor)
}else if(parametros_list[l] == "PuntoRocio"){
Td_Valor<- rep("", length(momento))
CSV <- data.frame(CodigoNacional, momento, Td_Valor)
}else if(parametros_list[l] == "Humedad"){
HR_Valor<- rep("", length(momento))
CSV <- data.frame(CodigoNacional, momento, HR_Valor)
}else if(parametros_list[l] == "Viento"){
dd_Valor<- rep("", length(momento))
ff_Valor<- rep("", length(momento))
VRB_Valor<- rep("", length(momento))
CSV <- data.frame(CodigoNacional, momento, dd_Valor,ff_Valor, VRB_Valor)
}else if(parametros_list[l] == "PresionQFE"){
QFE_Valor<- rep("", length(momento))
CSV <- data.frame(CodigoNacional, momento, QFE_Valor)
}else if(parametros_list[l] == "PresionQFF"){
QFF_Valor<- rep("", length(momento))
CSV <- data.frame(CodigoNacional, momento, QFF_Valor)
}
}
df<- rbind(df, CSV)
suppressWarnings({
file.remove(filename)
file.remove(csvname)
})
}
if(parametros_list[l] == "Viento"){
df2 <- data.frame(df[2], df[3], df[4], df[5])
}else{
df2 <- data.frame(df[2], df[3])
}
data.table::setDT(df2)
data <- data[df2, on = c("date" = "momento")]
df <- NULL
df2 <- NULL
}
}
}
if(is.null(data_total)){
data_total<-data
}else{
data_total<-rbind(data_total, data)
}
}
}
}
data_total$date <- format(as.POSIXct(strptime(data_total$date, format = "%d-%m-%Y %H:%M:%S")), format = "%d/%m/%Y %H:%M")
data_total <- data_total[!(is.na(data_total$date)),]
data_total <- data_total[!(is.na(data_total$Nombre)),]
data_total <- as.data.frame(data_total)
for(i in 3:ncol(data_total)){
data_total[[i]] <- as.numeric(data_total[[i]])
}
return(data_total)
}
ChileAirQuality <- function(Comunas = "INFO", Parametros, fechadeInicio, fechadeTermino, Site = FALSE, Curar = TRUE){
estationMatrix <- data.frame(
"Ciudad" = c("SA","CE1","CE","CN","EB","IN","LF","LC","PU","PA","QU","QU1","AH","AR","TE","TEII",
"TEIII","PLCI","PLCII","LU","LR","MAI","MAII","MAIII","VA","VAII","OS","OSII","PMI",
"PMII","PMIII","PMIV","PV","COI","COII","PAR"),
"cod" = c("RM/D14","RM/D16","RM/D31","RM/D18","RM/D17","RM/D11","RM/D12","RM/D13","RM/D15",
"RM/D27","RM/D30","RM/D19","RI/117","RXV/F01","RIX/901","RIX/905","RIX/904","RIX/903",
"RIX/902","RXIV/E04","RXIV/E06","RXIV/E01","RXIV/E05","RXIV/E02","RXIV/E03","RXIV/E08",
"RX/A01","RX/A04","RX/A08","RX/A07","RX/A02","RX/A03","RX/A09","RXI/B03","RXI/B04","RXII/C05"),
"Longitud" = c("-33.450819","-33.479515","-33.482411","-33.419725","-33.533626","-33.40892","-33.503288",
"-33.363453","-33.424439","-33.577948","-33.33632","-33.352539","-20.290467","-18.476839",
"-38.748699","-38.727003","-38.725302","-38.772463","-38.764767","-40.286857","-40.321282",
"-39.665626","-39.542346","-39.719218","-39.831316","-39.805429","-40.584479","-40.683736",
"-41.39917","-41.479507","-41.510342","-41.18765","-41.328935","-45.57993636","-45.57904645",
"-53.158295"),
"Latitud" = c("-70.6604476","-70.719064","-70.703947","-70.73179","-70.665906","-70.650886","-70.587916",
"-70.523024","-70.749876","-70.594184","-70.723583","-70.747952","-70.100192","-70.287911",
"-72.620788","-72.580002","-72.571193","-72.595024","-72.598796","-73.07671","-72.471895",
"-72.953729","-72.925205","-73.128677","-73.228513","-73.25873","-73.11872","-72.596399",
"-72.899523","-72.968756","-73.065294","-73.08804","-72.968209","-72.0610848","-72.04996681",
"-70.921497"),
"Estacion" = c("P. O'Higgins","Cerrillos 1","Cerrillos","Cerro Navia","El Bosque","Independecia","La Florida",
"Las Condes","Pudahuel","Puente Alto","Quilicura","Quilicura 1","Alto Hospicio","Arica",
"Las Encinas Temuco","Nielol Temuco","Museo Ferroviario Temuco","Padre Las Casas I",
"Padre Las Casas II","La Union","CESFAM Lago Ranco","Mafil","Fundo La Ribera",
"Vivero Los Castanos","Valdivia I","Valdivia II","Osorno","Entre Lagos","Alerce","Mirasol",
"Trapen Norte","Trapen Sur","Puerto Varas","Coyhaique I","Coyhaique II","Punta Arenas"),
"Region" = c("RM","RM","RM","RM","RM","RM","RM","RM","RM","RM","RM","RM","I","XV","IX","IX","IX","IX",
"IX","XIV","XIV","XIV","XIV","XIV","XIV","XIV","X","X","X","X","X","X","X","XI","XI","XII")
)
if(Comunas[1] == "INFO"){
return((estationMatrix))
}else{
fi <- paste(fechadeInicio,"1:00")
ft <- paste(fechadeTermino,"23:00")
Fecha_inicio <- as.POSIXct(strptime(fi, format = "%d/%m/%Y %H:%M"))
Fecha_termino<- as.POSIXct(strptime(ft, format = "%d/%m/%Y %H:%M"))
Fecha_inicio_para_arana <- as.character(Fecha_inicio, format("%y%m%d"))
Fecha_termino_para_arana <- as.character(Fecha_termino, format("%y%m%d"))
id_fecha <- gsub(" ","",paste("from=", Fecha_inicio_para_arana, "&to=", Fecha_termino_para_arana))
horas <- (as.numeric(Fecha_termino)/3600-as.numeric(Fecha_inicio)/3600)
urlSinca <- "https://sinca.mma.gob.cl/cgi-bin/APUB-MMA/apub.tsindico2.cgi?outtype=xcl¯o=./"
urlSinca2 <- "&path=/usr/airviro/data/CONAMA/&lang=esp&rsrc=¯opath="
date = NULL
date <- seq(Fecha_inicio, Fecha_termino, by = "hour")
date <- format(date, format = "%d/%m/%Y %H:%M")
data <- data.frame(date)
data_total <- data.frame()
for (i in 1:length(Comunas)) {
try({
inEstation <- Comunas[i]
for(j in 1:nrow(estationMatrix)){
mSite <- estationMatrix[j, 1]
mCod <- estationMatrix[j, 2]
mLon <- estationMatrix[j, 3]
mLat <- estationMatrix[j, 4]
mEstation <- estationMatrix[j, 5]
if(Site){
aux <- mSite
}else{
aux <- mEstation
}
if(inEstation == aux){
try({
site <- rep(mSite, horas + 1)
longitude <- rep(mLat, horas + 1)
latitude <- rep(mLon, horas + 1)
data <- data.frame(date, site, longitude, latitude)
{
for(p in 1:length(Parametros))
{
inParametro <- Parametros[p]
if(inParametro == "PM10" |inParametro == "pm10" |
inParametro == "pM10" |inParametro == "Pm10")
{
codParametro <- "/Cal/PM10//PM10.horario.horario.ic&"
url <- gsub(" ", "",paste(urlSinca, mCod, codParametro, id_fecha, urlSinca2))
try(
{
PM10_Bruto <- read.csv(url,dec =",", sep= ";",na.strings= "")
PM10_col1 <- PM10_Bruto$Registros.validados
PM10_col2 <- PM10_Bruto$Registros.preliminares
PM10_col3 <- PM10_Bruto$Registros.no.validados
PM10 <- gsub("NA","",gsub(" ", "",paste(PM10_col1,PM10_col2,PM10_col3)))
if(length(PM10) == 0){PM10 <- rep("", horas + 1)}
data <- data.frame(data,PM10)
print(paste(inParametro,inEstation))
}
,silent = T)
} else if(inParametro == "PM25" |inParametro == "pm25" |
inParametro == "pM25" |inParametro == "Pm25")
{
codParametro <- "/Cal/PM25//PM25.horario.horario.ic&"
url <- gsub(" ", "",paste(urlSinca,
mCod, codParametro, id_fecha,
urlSinca2))
try(
{
PM25_Bruto <- read.csv(url,dec =",", sep= ";",na.strings= "")
PM25_col1 <- PM25_Bruto$Registros.validados
PM25_col2 <- PM25_Bruto$Registros.preliminares
PM25_col3 <- PM25_Bruto$Registros.no.validados
PM25 <- gsub("NA","",gsub(" ", "",paste(PM25_col1,PM25_col2,PM25_col3)))
if(length(PM25) == 0){PM25 <- rep("",horas + 1)}
data <- data.frame(data,PM25)
print(paste(inParametro, inEstation))
}
, silent = TRUE)
} else if(inParametro == "O3")
{
codParametro <- "/Cal/0008//0008.horario.horario.ic&"
url <- gsub(" ", "",paste(urlSinca, mCod, codParametro,
id_fecha, urlSinca2))
try(
{
O3_Bruto <- read.csv(url,dec =",", sep= ";",na.strings= "")
O3_col1 <- O3_Bruto$Registros.validados
O3_col2 <- O3_Bruto$Registros.preliminares
O3_col3 <- O3_Bruto$Registros.no.validados
O3 <- gsub("NA","",gsub(" ", "",paste(O3_col1, O3_col2, O3_col3)))
if(length(O3) == 0){O3 <- rep("",horas + 1)}
data <- data.frame(data, O3)
print(paste(inParametro,inEstation))
}
, silent = TRUE)
} else if(inParametro == "CO"| inParametro == "co"|
inParametro == "Co"| inParametro == "cO")
{
codParametro <- "/Cal/0004//0004.horario.horario.ic&"
url <- gsub(" ", "",paste(urlSinca, mCod, codParametro,
id_fecha, urlSinca2))
try(
{
CO_Bruto <- read.csv(url, dec =",", sep= ";",na.strings = "")
CO_col1 <- CO_Bruto$Registros.validados
CO_col2 <- CO_Bruto$Registros.preliminares
CO_col3 <- CO_Bruto$Registros.no.validados
CO <- gsub("NA","",gsub(" ", "",paste(CO_col1,CO_col2,CO_col3)))
if(length(O3) == 0){O3 <- rep("",horas + 1)}
data <- data.frame(data,CO)
print(paste(inParametro, inEstation))
}
, silent = TRUE)
} else if(inParametro == "NO"| inParametro == "no"|
inParametro == "No"| inParametro == "nO")
{
codParametro <- "/Cal/0002//0002.horario.horario.ic&"
url <- gsub(" ", "",paste(urlSinca, mCod, codParametro,
id_fecha, urlSinca2))
try(
{
NO_Bruto <- read.csv(url, dec = ",", sep = ";",na.strings = "")
NO_col1 <- NO_Bruto$Registros.validados
NO_col2 <- NO_Bruto$Registros.preliminares
NO_col3 <- NO_Bruto$Registros.no.validados
NO <- gsub("NA", "", gsub(" ", "", paste(NO_col1, NO_col2, NO_col3)))
if(length(NO) == 0){NO <- rep("", horas + 1)}
data <- data.frame(data, NO)
print(paste(inParametro, inEstation))
}
,silent = T)
}else if(inParametro == "NO2"| inParametro == "no2"|
inParametro == "No2"| inParametro == "nO2")
{
codParametro <- "/Cal/0003//0003.horario.horario.ic&"
url <- gsub(" ", "",paste(urlSinca, mCod, codParametro, id_fecha, urlSinca2))
try(
{
NO2_Bruto <- read.csv(url, dec =",", sep= ";", na.strings= "")
NO2_col1 <- NO2_Bruto$Registros.validados
NO2_col2 <- NO2_Bruto$Registros.preliminares
NO2_col3 <- NO2_Bruto$Registros.no.validados
NO2 <- gsub("NA","",gsub(" ", "",paste(NO2_col1,NO2_col2,NO2_col3)))
if(length(NO2) == 0){NO2 <- rep("",horas + 1)}
data <- data.frame(data, NO2)
print(paste(inParametro,inEstation))
}
, silent = TRUE)
}else if(inParametro == "NOX"|inParametro == "NOx"|
inParametro == "nOX"|inParametro == "NoX"|
inParametro == "Nox"|inParametro == "nOx"|
inParametro == "nox"|inParametro == "noX")
{
codParametro <- "/Cal/0NOX//0NOX.horario.horario.ic&"
url <- gsub(" ", "",paste(urlSinca, mCod, codParametro, id_fecha, urlSinca2))
try(
{
NOX_Bruto <- read.csv(url,dec =",", sep= ";",na.strings= "")
NOX_col1 <- NOX_Bruto$Registros.validados
NOX_col2 <- NOX_Bruto$Registros.preliminares
NOX_col3 <- NOX_Bruto$Registros.no.validados
NOX <- gsub("NA", "", gsub(" ", "", paste(NOX_col1, NOX_col2, NOX_col3)))
if(length(NOX) == 0){NOX <- rep("", horas + 1)}
data <- data.frame(data, NOX)
print(paste(inParametro, inEstation))
}
, silent = TRUE)
}else if(inParametro == "SO2"| inParametro == "so2"|
inParametro == "sO2"| inParametro == "So2")
{
codParametro <- "/Cal/0001//0001.horario.horario.ic&"
url <- gsub(" ", "",paste(urlSinca, mCod, codParametro, id_fecha, urlSinca2))
try(
{
SO2_Bruto <- read.csv(url, dec =",", sep= ";", na.strings= "")
SO2_col1 <- SO2_Bruto$Registros.validados
SO2_col2 <- SO2_Bruto$Registros.preliminares
SO2_col3 <- SO2_Bruto$Registros.no.validados
SO2 <- gsub("NA","",gsub(" ", "",paste(SO2_col1, SO2_col2, SO2_col3)))
if(length(SO2) == 0){SO2 <- rep("",horas + 1)}
data <- data.frame(data, SO2)
print(paste(inParametro, inEstation))
}
, silent = TRUE)
}else if(inParametro == "tEMP" |inParametro == "TeMP"|inParametro == "TEmP" |inParametro == "TEMp"
|inParametro == "TEmp"|inParametro == "TeMp"|inParametro == "TemP"|inParametro == "tEMp"
|inParametro == "tEmP"|inParametro == "teMP"|inParametro == "temp"|inParametro == "TEMP"
|inParametro == "temP"|inParametro == "teMp"|inParametro == "tEmp"|inParametro == "Temp")
{
codParametro <- "/Met/TEMP//horario_000.ic&"
url <- gsub(" ", "", paste(urlSinca, mCod, codParametro, id_fecha, urlSinca2))
try(
{
temp_bruto <- read.csv(url,dec =",", sep= ";",na.strings= "")
temp_col1 <- temp_bruto$X
temp <- gsub("NA","",gsub(" ", "",temp_col1))
if(length(temp) == 0){temp <- rep("",horas + 1)}
data <- data.frame(data, temp)
print(paste(inParametro, inEstation))
}
, silent = TRUE)
} else if(inParametro == "HR"| inParametro == "hr"|
inParametro == "hR"| inParametro == "Hr")
{
codParametro <- "/Met/RHUM//horario_000.ic&"
url <- gsub(" ", "",paste(urlSinca,
mCod, codParametro, id_fecha,
urlSinca2))
try(
{
HR_bruto <- read.csv(url,dec =",", sep= ";",na.strings= "")
HR_col1 <- HR_bruto$X
HR <- gsub("NA","",gsub(" ", "",HR_col1))
if(length(HR) == 0){HR <- rep("",horas + 1)}
data <- data.frame(data,HR)
print(paste(inParametro,inEstation))
}
, silent = TRUE)
} else if(inParametro == "wd"| inParametro == "WD"|
inParametro == "Wd"| inParametro == "wD")
{
codParametro <- "/Met/WDIR//horario_000_spec.ic&"
url <- gsub(" ", "",paste(urlSinca, mCod, codParametro, id_fecha, urlSinca2))
try(
{
wd_bruto <- read.csv(url,dec =",", sep= ";",na.strings= "")
wd_col1 <- wd_bruto$X
wd <- gsub("NA","",gsub(" ", "",wd_col1))
if(length(wd) == 0 ){wd <- rep("",horas + 1)}
data <- data.frame(data,wd)
print(paste(inParametro,inEstation))
}
, silent = TRUE)
} else if(inParametro == "ws"| inParametro == "WS"|
inParametro == "Ws"| inParametro == "wS")
{
codParametro <- "/Met/WSPD//horario_000.ic&"
url <- gsub(" ", "",paste(urlSinca, mCod, codParametro, id_fecha, urlSinca2))
try(
{
ws_bruto <- read.csv(url,dec =",", sep= ";",na.strings= "")
ws_col1 <- ws_bruto$X
ws <- gsub("NA","",gsub(" ", "",ws_col1))
if(length(ws) == 0){ws <- rep("",horas + 1)}
data <- data.frame(data,ws)
print(paste(inParametro,inEstation))
}
, silent = TRUE)
} else
{
print(paste("Contaminante",inParametro,"no soportado en el Software"))
}
}
try(
{
data_total <- rbind(data_total, data)
}
, silent = T)
}
}
, silent = T)
}
}
}, silent = T)
}
if(Curar){
len = nrow(data_total)
try({
for (i in 1:len)
{
try(
{
if((as.numeric(data_total$NO[i]) + as.numeric(data_total$NO2[i])) > as.numeric(data_total$NOX[i]) * 1.001){
data_total$NO[i] = ""
data_total$NO2[i] = ""
data_total$NOX[i] = ""
}
}
, silent = T)
}
}, silent = T)
try({
for (i in 1:len)
{
try(
{
if(as.numeric(data_total$PM25[i]) > as.numeric(data_total$PM10[i])*1.001){
data_total$PM10[i] = ""
data_total$PM25[i] = ""
}
}
,silent = T)
}
}, silent = T)
try({
for (i in 1:len)
{
try({
if(as.numeric(data_total$wd[i]) > 360||as.numeric(data_total$wd[i]) < 0){
data_total$wd[i] = ""
}
}, silent = T)
}
}, silent = T)
try({
i =NULL
for (i in 1:len)
{
try(
{
if(as.numeric(data_total$HR[i]) > 100||as.numeric(data_total$HR[i]) <0){
data_total$HR[i] = ""
}
}, silent = T)
}
}, silent = T)
}
for(i in 3:ncol(data_total)){
data_total[[i]] <- as.numeric(data_total[[i]])
}
print("Datos Capturados!")
return(data_total)
}
} |
context("geocode")
test_that("geocode output is consistent", {
for(source in c("pickpoint", "dsk", "google")) {
dfgood <- geocode("wolfville, ns", output = "data.frame", source = source)
dfbad <- geocode("don't you dare geocode this", output = "data.frame", source = source)
expect_is(dfgood, "data.frame")
expect_is(dfbad, "data.frame")
required_columns <- c("query", "source", "status", "rank", "lon", "lat", "address",
"bbox_n", "bbox_e", "bbox_s", "bbox_w")
expect_equal(names(dfgood)[1:length(required_columns)], required_columns)
expect_equal(names(dfbad), required_columns[1:ncol(dfbad)])
required_types <- c("character", "character", "character", "integer", "numeric",
"numeric", "character", "numeric", "numeric", "numeric", "numeric")
expect_equivalent(vapply(dfgood[1:length(required_columns)], class, character(1)),
required_types)
expect_equivalent(vapply(dfbad, class, character(1)),
required_types[1:ncol(dfbad)])
expect_equal(nrow(dfgood), 1)
expect_equal(nrow(dfbad), 1)
listgood <- geocode("wolfville, ns", output = "list", source = source)
listbad <- geocode("don't you dare geocode this", output = "list", source = source)
expect_is(listgood, "list")
expect_is(listbad, "list")
expect_length(listgood, 1)
expect_length(listbad, 1)
}
})
test_that("emptys and nulls produce the correct output", {
df0 <- geocode(character(0), output = "data.frame")
expect_is(df0, "data.frame")
expect_equal(nrow(df0), 0)
list0 <- geocode(character(0), output = "list")
expect_is(list0, "list")
expect_length(list0, 0)
expect_silent(geocode(NA_character_, output = "data.frame"))
dfNA <- geocode(NA_character_, output = "data.frame")
expect_is(dfNA, "data.frame")
expect_equal(nrow(dfNA), 1)
listNA <- geocode(NA_character_, output = "list")
expect_is(listNA, "list")
expect_length(listNA, 1)
expect_identical(dfNA[-1], geocode("", output = "data.frame")[-1])
expect_identical(listNA, geocode("", output = "list"))
dfgood <- geocode("wolfville, ns", output = "data.frame")
expect_equal(names(dfNA), names(dfgood)[1:ncol(dfNA)])
})
test_that("output is vectorized with correct lengths", {
for(source in c("pickpoint", "google", "dsk")) {
cities <- c("wolfville, ns", "halifax, ns", "calgary, ab", "auckland, nz", "middlebury, vt",
"ottawa, on")
df <- geocode(cities, output = "data.frame", source = "pickpoint")
expect_equal(length(cities), nrow(df))
alist <- geocode(cities, output = "list", source = "pickpoint")
expect_equal(length(cities), length(alist))
}
})
test_that("errors in the geocode function don't stop execution", {
expect_silent(geocode("something", source = "error_source"))
expect_silent(geocode(rep("something", 10), source = "error_source", progress = "none"))
})
test_that("invalid pickpoint API key results in correct error message", {
df <- geocode("wolfville ns", source = "pickpoint", key = "notavalidkey")
expect_equal(df$status, "Invalid API key for pickpoint: notavalidkey")
})
test_that("invalid parameters are detected", {
expect_error(geocode("something", quiet = NULL),
"'quiet' must be TRUE or FALSE")
expect_error(geocode("something", source = "not a source"),
"Unrecognized geocode source: not a source")
expect_error(geocode("something", messaging = "not a logical"),
"'messaging' must be TRUE or FALSE")
expect_message(geocode("something", messaging = TRUE),
"Parameter 'messaging' is deprecated. Use 'quiet = FALSE' instead")
expect_error(geocode("something", limit = "not a number"),
"'limit' must be numeric")
expect_error(geocode("something", output = "a fish"),
"'arg' should be one of")
})
test_that("mesages are printed when defaults are guessed", {
expect_message(geocode("something"),
"Using default API key for pickpoint.io.")
expect_message(geocode(factor("something")),
"Coercing argument 'location' from 'factor' to 'character'")
expect_silent(geocode("something", key = "yxsN6E8EYYHFF_xsa_uL", source = "pickpoint"))
})
test_that("default source can be set from options", {
old_opts <- options(prettymapr.geosource = "google")
df <- geocode("wolfville ns")
expect_equal(df$source, "google")
options(prettymapr.geosource = old_opts$prettymapr.geosource)
})
test_that("non 200 status codes throw warning when quiet = FALSE", {
expect_warning(geocode("something", key = "not a key", quiet = FALSE))
})
test_that("vectors that contain zero-length input don't screw up the query / source columns", {
cities <- c("wolfville, ns", "halifax, ns", "calgary, ab", "auckland, nz", "middlebury, vt",
"ottawa, on")
df1 <- geocode(cities)
list1 <- geocode(cities, output = "list")
cities[2] <- ""
df2 <- geocode(cities)
list2 <- geocode(cities, output = "list")
expect_identical(nrow(df1), nrow(df2))
identical(df1[-2,], df2[-2,])
})
test_that("progress bar is hidden when appropriate", {
cities <- c("wolfville, ns", "halifax, ns")
expect_output(geocode(cities, key = "yxsN6E8EYYHFF_xsa_uL"))
expect_silent(geocode(cities, key = "yxsN6E8EYYHFF_xsa_uL", progress = "none"))
expect_silent(geocode(cities[1], key = "yxsN6E8EYYHFF_xsa_uL"))
})
test_that("setting the default source changes the source", {
default_source <- get_default_geocoder()
expect_equal(default_source, "pickpoint")
expect_equal(geocode("wolfville ns")$source, default_source)
new_default <- "google"
set_default_geocoder(new_default)
expect_equal(get_default_geocoder(), new_default)
expect_equal(geocode("wolfville ns")$source, new_default)
set_default_geocoder(NULL)
expect_equal(get_default_geocoder(), "pickpoint")
expect_equal(geocode("wolfville ns")$source, "pickpoint")
}) |
sim_mte <- function(phy, map = NULL, model = "OU", pars = c(root = 2, theta = 1, sigma_sq = 0.1, alpha = 1),
sampling = c(1, 7), bounds = c(-Inf, Inf)){
ntips <- length(phy$tip.label)
if(all(sapply(pars, length) == 1)) map.mean <- lapply(phy$edge.length, function(x) c('1'= x))
else map.mean <- map
mean <- sim_pet(phy, map.mean, model, pars, ntips, bounds)
return(mean)
} |
PipeOpKernelPCA = R6Class("PipeOpKernelPCA",
inherit = PipeOpTaskPreproc,
public = list(
initialize = function(id = "kernelpca", param_vals = list()) {
ps = ParamSet$new(params = list(
ParamFct$new("kernel", default = "rbfdot", levels = c("rbfdot", "polydot",
"vanilladot", "tanhdot", "laplacedot", "besseldot", "anovadot", "splinedot"), tags = c("train", "kpca")),
ParamUty$new("kpar", tags = c("train", "kpca")),
ParamInt$new("features", default = 0, lower = 0, tags = c("train", "kpca")),
ParamDbl$new("th", default = 1e-04, lower = 0, tags = c("train", "kpca")),
ParamUty$new("na.action", default = stats::na.omit, tags = c("train", "kpca"))
))
super$initialize(id, param_set = ps, param_vals = param_vals,
packages = "kernlab", feature_types = c("numeric", "integer"))
}
),
private = list(
.train_dt = function(dt, levels, target) {
pcr = invoke(kernlab::kpca, as.matrix(dt), .args = self$param_set$get_values(tags = "kpca"))
self$state$pcr = pcr
self$state$pcr@rotated = matrix(numeric(0))
kernlab::rotated(pcr)
},
.predict_dt = function(dt, levels) {
kernlab::predict(self$state$pcr, as.matrix(dt))
}
)
)
mlr_pipeops$add("kernelpca", PipeOpKernelPCA) |
jomo1cat.MCMCchain <-
function(Y.cat, Y.numcat, X=NULL, beta.start=NULL, l1cov.start=NULL, l1cov.prior=NULL, start.imp=NULL, nburn=100, output=1, out.iter=10) {
if (is.null(X)) X=matrix(1,nrow(Y.cat),1)
if (is.null(beta.start)) beta.start=matrix(0,ncol(X),((sum(Y.numcat)-length(Y.numcat))))
if (is.null(l1cov.start)) l1cov.start=diag(1,ncol(beta.start))
if (is.null(l1cov.prior)) l1cov.prior=diag(1,ncol(beta.start))
previous_levels<-list()
Y.cat<-data.frame(Y.cat)
for (i in 1:ncol(Y.cat)) {
Y.cat[,i]<-factor(Y.cat[,i])
previous_levels[[i]]<-levels(Y.cat[,i])
levels(Y.cat[,i])<-1:nlevels(Y.cat[,i])
}
if (any(is.na(Y.cat))) {
if (ncol(Y.cat)==1) {
miss.pat<-matrix(c(0,1),2,1)
n.patterns<-2
} else {
miss.pat<-md.pattern.mice(Y.cat, plot=F)
miss.pat<-miss.pat[,colnames(Y.cat)]
n.patterns<-nrow(miss.pat)-1
}
} else {
miss.pat<-matrix(0,2,ncol(Y.cat)+1)
n.patterns<-nrow(miss.pat)-1
}
miss.pat.id<-rep(0,nrow(Y.cat))
for (i in 1:nrow(Y.cat)) {
k <- 1
flag <- 0
while ((k <= n.patterns) & (flag == 0)) {
if (all(!is.na(Y.cat[i,])==miss.pat[k,1:(ncol(miss.pat))])) {
miss.pat.id[i] <- k
flag <- 1
} else {
k <- k + 1
}
}
}
for (i in 1:ncol(X)) {
if (is.factor(X[,i])) X[,i]<-as.numeric(X[,i])
}
stopifnot( nrow(beta.start)==ncol(X), ncol(beta.start)==((sum(Y.numcat)-length(Y.numcat))),nrow(l1cov.start)==ncol(l1cov.start), nrow(l1cov.start)==ncol(beta.start), nrow(l1cov.prior)==ncol(l1cov.prior),nrow(l1cov.prior)==nrow(l1cov.start))
betait=matrix(0,nrow(beta.start),ncol(beta.start))
for (i in 1:nrow(beta.start)) {
for (j in 1:ncol(beta.start)) betait[i,j]=beta.start[i,j]
}
covit=matrix(0,nrow(l1cov.start),ncol(l1cov.start))
for (i in 1:nrow(l1cov.start)) {
for (j in 1:ncol(l1cov.start)) covit[i,j]=l1cov.start[i,j]
}
nimp=1;
colnamycat<-colnames(Y.cat)
colnamx<-colnames(X)
Y.cat<-data.matrix(Y.cat)
storage.mode(Y.cat) <- "numeric"
X<-data.matrix(X)
storage.mode(X) <- "numeric"
stopifnot(!any(is.na(X)))
Y=cbind(Y.cat)
Yi=cbind(matrix(0,nrow(Y.cat),(sum(Y.numcat)-length(Y.numcat))))
h=1
for (i in 1:length(Y.numcat)) {
for (j in 1:nrow(Y)) {
if (is.na(Y.cat[j,i])) {
Yi[j,h:(h+Y.numcat[i]-2)]=NA
}
}
h=h+Y.numcat[i]-1
}
if (output!=1) out.iter=nburn+2
imp=matrix(0,nrow(Y)*(nimp+1),ncol(Y)+ncol(X)+2)
imp[1:nrow(Y),1:ncol(Y)]=Y
imp[1:nrow(X), (ncol(Y)+1):(ncol(Y)+ncol(X))]=X
imp[1:nrow(X), (ncol(Y)+ncol(X)+1)]=c(1:nrow(Y))
Yimp=Yi
Yimp2=matrix(Yimp, nrow(Yimp),ncol(Yimp))
imp[(nrow(X)+1):(2*nrow(X)),(ncol(Y)+1):(ncol(Y)+ncol(X))]=X
imp[(nrow(X)+1):(2*nrow(X)), (ncol(Y)+ncol(X)+1)]=c(1:nrow(Y))
imp[(nrow(X)+1):(2*nrow(X)), (ncol(Y)+ncol(X)+2)]=1
betapost<- array(0, dim=c(nrow(beta.start),ncol(beta.start),nburn))
omegapost<- array(0, dim=c(nrow(l1cov.start),ncol(l1cov.start),nburn))
meanobs<-colMeans(Yi,na.rm=TRUE)
if (!is.null(start.imp)) {
start.imp<-as.matrix(start.imp)
if ((nrow(start.imp)!=nrow(Yimp2))||(ncol(Yimp2)>ncol(start.imp))) {
cat("start.imp dimensions incorrect. Not using start.imp as starting value for the imputed dataset.\n")
start.imp=NULL
} else {
if ((nrow(start.imp)==nrow(Yimp2))&(ncol(Yimp2)<ncol(start.imp))) {
Yimp2<-start.imp[,1:ncol(Yimp2)]
cat("NOTE: start.imp has more columns than needed. Dropping unnecessary columns.\n")
} else {
Yimp2<-start.imp
}
}
}
if (is.null(start.imp)) {
for (i in 1:nrow(Yi)) for (j in 1:ncol(Yi)) if (is.na(Yimp[i,j])) Yimp2[i,j]=meanobs[j]
}
.Call("jomo1C", Y, Yimp, Yimp2, Y.cat, X,betait,betapost,covit,omegapost, nburn, l1cov.prior,Y.numcat, 0, out.iter, 1, miss.pat.id, n.patterns, PACKAGE = "jomo")
imp[(nrow(Y)+1):(2*nrow(Y)),1:ncol(Y)]=Y.cat
imp<-data.frame(imp)
for (i in 1:ncol(Y)) {
imp[,i]<-as.factor(imp[,i])
levels(imp[,i])<-previous_levels[[i]]
}
if (is.null(colnamycat)) colnamycat=paste("Y", 1:ncol(Y.cat), sep = "")
if (is.null(colnamx)) colnamx=paste("X", 1:ncol(X), sep = "")
colnames(imp)<-c(colnamycat,colnamx,"id","Imputation")
cnycatcomp<-rep(NA,(sum(Y.numcat)-length(Y.numcat)))
count=0
for ( j in 1:ncol(Y.cat)) {
for (k in 1:(Y.numcat[j]-1)) {
cnycatcomp[count+k]<-paste(colnamycat[j],k,sep=".")
}
count=count+Y.numcat[j]-1
}
cnamycomp<-c(cnycatcomp)
dimnames(betapost)[1] <- list(colnamx)
dimnames(betapost)[2] <- list(cnamycomp)
dimnames(omegapost)[1] <- list(cnamycomp)
dimnames(omegapost)[2] <- list(cnamycomp)
dimnames(Yimp2)[2] <- list(cnamycomp)
betapostmean<-data.frame(apply(betapost, c(1,2), mean))
omegapostmean<-data.frame(apply(omegapost, c(1,2), mean))
if (output==1) {
cat("The posterior mean of the fixed effects estimates is:\n")
print(t(betapostmean))
cat("\nThe posterior covariance matrix is:\n")
print(omegapostmean)
}
return(list("finimp"=imp,"collectbeta"=betapost,"collectomega"=omegapost, "finimp.latnorm" = Yimp2))
} |
set.seed(888)
data <- dreamer_data_linear(
n_cohorts = c(20, 20, 20),
dose = c(0, 3, 10),
b1 = 1,
b2 = 3,
sigma = 5
)
output <- dreamer_mcmc(
data = data,
n_adapt = 1e3,
n_burn = 1e2,
n_iter = 1e3,
n_chains = 2,
silent = TRUE,
mod_linear = model_linear(
mu_b1 = 0,
sigma_b1 = 1,
mu_b2 = 0,
sigma_b2 = 1,
shape = 1,
rate = .001,
w_prior = 1 / 2
),
mod_quad = model_quad(
mu_b1 = 0,
sigma_b1 = 1,
mu_b2 = 0,
sigma_b2 = 1,
mu_b3 = 0,
sigma_b3 = 1,
shape = 1,
rate = .001,
w_prior = 1 / 2
)
)
output$w_post
plot(output)
library(ggplot2)
set.seed(889)
data_long <- dreamer_data_linear(
n_cohorts = c(10, 10, 10, 10),
doses = c(.25, .5, .75, 1.5),
b1 = 0,
b2 = 2,
sigma = .5,
longitudinal = "itp",
times = c(0, 12, 24, 52),
t_max = 52,
a = .5,
c1 = .1
)
\dontrun{
ggplot(data_long, aes(time, response, group = dose, color = factor(dose))) +
geom_point()
}
output_long <- dreamer_mcmc(
data = data_long,
n_adapt = 1e3,
n_burn = 1e2,
n_iter = 1e3,
n_chains = 2,
silent = TRUE,
mod_linear = model_linear(
mu_b1 = 0,
sigma_b1 = 1,
mu_b2 = 0,
sigma_b2 = 1,
shape = 1,
rate = .001,
w_prior = 1 / 2,
longitudinal = model_longitudinal_itp(
mu_a = 0,
sigma_a = 1,
a_c1 = 0,
b_c1 = 1,
t_max = 52
)
),
mod_quad = model_quad(
mu_b1 = 0,
sigma_b1 = 1,
mu_b2 = 0,
sigma_b2 = 1,
mu_b3 = 0,
sigma_b3 = 1,
shape = 1,
rate = .001,
w_prior = 1 / 2,
longitudinal = model_longitudinal_linear(
mu_a = 0,
sigma_a = 1,
t_max = 52
)
)
)
\dontrun{
plot(output_long, data = data_long)
plot(output_long$mod_quad, data = data_long)
plot(output_long, data = data_long, times = 52)
plot(output_long$mod_quad, data = data_long, times = 52)
} |
knitr::opts_chunk$set(
collapse = TRUE,
comment = "
out.width = "220px",
fig.align = 'center'
)
library(png)
library(nvctr)
knitr::include_graphics("ex1img.png")
lat_EA <- rad(1)
lon_EA <- rad(2)
z_EA <- 3
lat_EB <- rad(4)
lon_EB <- rad(5)
z_EB <- 6
(n_EA_E <- lat_lon2n_E(lat_EA, lon_EA))
(n_EB_E <- lat_lon2n_E(lat_EB, lon_EB))
(p_AB_E <- n_EA_E_and_n_EB_E2p_AB_E(n_EA_E, n_EB_E, z_EA, z_EB))
(R_EN <- n_E2R_EN(n_EA_E))
(p_AB_N <- base::t(R_EN) %*% p_AB_E %>%
as.vector())
(los_distance <- norm(p_AB_N, type = "2"))
(elevation <- atan2(-p_AB_N[3], p_AB_N[2]) %>% deg())
(azimuth <- atan2(p_AB_N[2], p_AB_N[1]) %>%
deg())
knitr::include_graphics("ex2img.png")
p_BC_B <- c(3000, 2000, 100)
(n_EB_E <- unit(c(1, 2, 3)))
z_EB <- -400
(R_NB <- zyx2R(rad(10),rad(20),rad(30)))
a <- 6378135
f <- 1 / 298.26
(R_EN <- n_E2R_EN(n_EB_E))
(R_EB <- R_EN %*% R_NB)
(p_BC_E <- R_EB %*% p_BC_B)
l <- n_EA_E_and_p_AB_E2n_EB_E(n_EB_E, p_BC_E, z_EB, a, f)
(n_EB_E <- l[['n_EB_E']])
(z_EB <- l[['z_EB']])
lat_lon_EB <- n_E2lat_lon(n_EB_E)
(latitude <- lat_lon_EB[1])
(longitude <- lat_lon_EB[2])
(height <- -z_EB)
knitr::include_graphics("ex3img.png")
(p_EB_E <- 6371e3 * c(0.9, -1, 1.1))
l <- p_EB_E2n_EB_E(p_EB_E)
(n_EB_E <- l[['n_EB_E']])
(z_EB <- l[['z_EB']])
lat_lon_EB <- n_E2lat_lon(n_EB_E)
(latEB <- lat_lon_EB[1])
(lonEB <- lat_lon_EB[2])
(hEB <- -z_EB)
knitr::include_graphics("ex4img.png")
lat_EB <- rad(1)
lon_EB <- rad(2)
h_EB <- 3
(n_EB_E <- lat_lon2n_E(lat_EB, lon_EB))
(p_EB_E <- n_EB_E2p_EB_E(n_EB_E, -h_EB))
knitr::include_graphics("ex5img.png")
n_EA_E <- lat_lon2n_E(rad(88), rad(0));
n_EB_E <- lat_lon2n_E(rad(89), rad(-170))
r_Earth <- 6371e3
(s_AB <- (atan2(base::norm(pracma::cross(n_EA_E, n_EB_E), type = "2"),
pracma::dot(n_EA_E, n_EB_E)) * r_Earth))
(d_AB <- base::norm(n_EB_E - n_EA_E, type = "2") * r_Earth)
geosphere::distGeo(c(0, 88), c(-170, 89))
knitr::include_graphics("ex6img.png")
n_EB_E_t0 <- lat_lon2n_E(rad(89.9), rad(-150))
n_EB_E_t1 <- lat_lon2n_E(rad(89.9), rad(150))
t0 <- 10
t1 <- 20
ti <- 16
t_frac <- (ti - t0) / (t1 - t0)
(n_EB_E_ti <- unit(n_EB_E_t0 + t_frac * (n_EB_E_t1 - n_EB_E_t0) ))
(l <- n_E2lat_lon(n_EB_E_ti) %>% deg())
(latitude <- l[1])
(longitude <- l[2])
knitr::include_graphics("ex7img.png")
n_EA_E <- lat_lon2n_E(rad(90), rad(0))
n_EB_E <- lat_lon2n_E(rad(60), rad(10))
n_EC_E <- lat_lon2n_E(rad(50), rad(-20))
(n_EM_E <- unit(n_EA_E + n_EB_E + n_EC_E))
(l <- n_E2lat_lon(n_EM_E) %>% deg())
(latitude <- l[1])
(longitude <- l[2])
knitr::include_graphics("ex8img.png")
n_EA_E <- lat_lon2n_E(rad(80),rad(-90))
azimuth <- rad(200)
s_AB <- 1000
r_Earth <- 6371e3
k_east_E <- unit(pracma::cross(base::t(R_Ee()) %*% c(1, 0, 0) %>% as.vector(), n_EA_E))
k_north_E <- pracma::cross(n_EA_E, k_east_E)
d_E <- k_north_E * cos(azimuth) + k_east_E * sin(azimuth)
n_EB_E <- n_EA_E * cos(s_AB / r_Earth) + d_E * sin(s_AB / r_Earth)
(l <- n_E2lat_lon(n_EB_E) %>% deg())
(latitude <- l[1])
(longitude <- l[2])
knitr::include_graphics("ex9img.png")
n_EA1_E <- lat_lon2n_E(rad(50), rad(180))
n_EA2_E <- lat_lon2n_E(rad(90), rad(180))
n_EB1_E <- lat_lon2n_E(rad(60), rad(160))
n_EB2_E <- lat_lon2n_E(rad(80), rad(-140))
n_EC_E_tmp <- unit(pracma::cross(
pracma::cross(n_EA1_E, n_EA2_E),
pracma::cross(n_EB1_E, n_EB2_E)))
n_EC_E <- sign(pracma::dot(n_EC_E_tmp, n_EA1_E)) * n_EC_E_tmp
(l <- n_E2lat_lon(n_EC_E) %>% deg())
(latitude <- l[1])
(longitude <- l[2])
knitr::include_graphics("ex10img.png")
n_EA1_E <- lat_lon2n_E(rad(0), rad(0))
n_EA2_E <- lat_lon2n_E(rad(10),rad(0))
n_EB_E <- lat_lon2n_E(rad(1), rad(0.1))
r_Earth <- 6371e3
c_E <- unit(pracma::cross(n_EA1_E, n_EA2_E))
knitr::include_graphics("solution10img.png")
(s_xt <- (acos(pracma::dot(c_E, n_EB_E)) - pi / 2) * r_Earth)
(d_xt <- -pracma::dot(c_E, n_EB_E) * r_Earth)
knitr::include_graphics("ex11img.png")
n_EA1_E <- lat_lon2n_E(rad(0), rad(3))
n_EA2_E <- lat_lon2n_E(rad(0),rad(10))
n_EB_E <- lat_lon2n_E(rad(-1), rad(-1))
n_EN_E <- unit(pracma::cross(n_EA1_E, n_EA2_E))
n_EC_E_tmp <- unit(
pracma::cross(
n_EN_E,
pracma::cross(n_EN_E, n_EB_E)
)
)
n_EC_E <- sign(pracma::dot(n_EC_E_tmp, n_EB_E)) * n_EC_E_tmp
(l <- n_E2lat_lon(n_EC_E) %>% deg())
(latitude <- l[1])
(longitude <- l[2]) |
test_that("field_mask works", {
x <- list(a = "A")
expect_equal(field_mask(x), "a")
x <- list(a = "A", b = "B")
expect_equal(field_mask(x), "a,b")
x <- list(a = list(b = "B", c = "C"))
expect_equal(field_mask(x), "a(b,c)")
x <- list(a = "A", b = list(c = "C"))
expect_equal(field_mask(x), "a,b.c")
x <- list(a = "A", b = list(c = "C", d = list(e = "E")))
expect_equal(field_mask(x), "a,b.c,b.d.e")
x <- list(a = "A", b = list(c = "C", d = "D", e = list(f = "F")))
expect_equal(field_mask(x), "a,b(c,d),b.e.f")
}) |
NULL
print.IsothermalGrowth <- function(x, ...) {
cat("Growth prediction based on primary models\n\n")
cat(paste("Growth model:", x$model, "\n\n"))
cat("Parameters of the primary model:\n")
print(unlist(x$pars))
}
plot.IsothermalGrowth <- function(x, y=NULL, ...,
line_col = "black",
line_size = 1,
line_type = "solid") {
ggplot(x$simulation) +
geom_line(aes(x = .data$time, y = .data$logN),
col = line_col,
size = line_size,
linetype = line_type) +
theme_cowplot()
} |
fitted.ellipsefitlist <- function(object,...){
thenames <- object$Estimates[,1:(which(colnames(object$Estimates)=="b.x")-1)]
thelengths <- lapply(object$models, function(x) length(x$pred.x))
rowvec <- mapply(function(x,y) rep(x,each=y),1:length(thelengths),y=thelengths)
thenames <- thenames[rowvec,]
thefittedx<-lapply(object$models,function (x) x$pred.x)
thefittedx <- unlist(thefittedx)
thefittedy<-lapply(object$models,function (x) x$pred.y)
thefittedy <- unlist(thefittedy)
data.frame(thenames,"input"=thefittedx,"output"=thefittedy)
} |
Mi1 <- new.env()
Mi1$Id <- "$Id: c212.interim.BB.hier3.lev0.R,v 1.14 2018/10/03 15:40:56 clb13102 Exp clb13102 $"
c212.interim.BB.indep <- function(trial.data, sim_type = "SLICE", burnin = 10000, iter = 60000, nchains = 5,
theta_algorithm = "MH",
global.sim.params = data.frame(type = c("MH", "MH", "MH", "MH", "SLICE", "SLICE", "SLICE"),
param = c("sigma_MH_alpha", "sigma_MH_beta", "sigma_MH_gamma", "sigma_MH_theta",
"w_alpha", "w_beta", "w_gamma"),
value = c(3, 3, 0.2, 0.25, 1, 1, 1), control = c(0, 0, 0, 0, 6, 6, 6),
stringsAsFactors = FALSE),
sim.params = NULL,
monitor = data.frame(variable = c("theta", "gamma", "mu.gamma", "mu.theta",
"sigma2.theta", "sigma2.gamma",
"mu.theta.0", "mu.gamma.0", "tau2.theta.0", "tau2.gamma.0",
"pi", "alpha.pi", "beta.pi"),
monitor = c(1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1),
stringsAsFactors = FALSE),
initial_values = NULL,
hyper_params = list(mu.gamma.0.0 = 0, tau2.gamma.0.0 = 10,
mu.theta.0.0 = 0, tau2.theta.0.0 = 10, alpha.gamma.0.0 = 3, beta.gamma.0.0 = 1, alpha.theta.0.0 = 3,
beta.theta.0.0 = 1, alpha.gamma = 3, beta.gamma = 1, alpha.theta = 3, beta.theta = 1, lambda.alpha = 1.0,
lambda.beta = 1.0),
global.pm.weight = 0.5,
pm.weights = NULL,
adapt_phase=1, memory_model = "HIGH")
{
interim = M_global$INTERIMdata(Mi1, trial.data, iter, nchains, burnin, initial_values)
if (is.null(interim)) {
return(NULL)
}
trial.data = interim$trial.data
cntrl.data = interim$cntrl.data
Mi1$Algo <- theta_algorithm
Mi1$sim_type <- sim_type
if (nrow(global.sim.params[global.sim.params$type == sim_type,]) == 0) {
print("Missing simulation parametetrs");
return(NULL)
}
if (!all(global.sim.params$value > 0)) {
print("Invalid simulation parameter value");
return(NULL)
}
Mi1$global.sim.params <- global.sim.params
Mi1$Level = 0
sp = M_global$INTERIM_sim_paramsBB_3(Mi1, sim.params, pm.weights, sim_type, trial.data, cntrl.data)
sim.params = sp$sim.params
pm.weights = sp$pm.weights
monitor = M_global$INTERIM_monitor_BB_3(monitor)
Mi1$mu.gamma.0.0 <- hyper_params$mu.gamma.0.0
Mi1$tau2.gamma.0.0 <- hyper_params$tau2.gamma.0.0
Mi1$alpha.gamma <- hyper_params$alpha.gamma
Mi1$beta.gamma <- hyper_params$beta.gamma
Mi1$alpha.gamma.0.0 <- hyper_params$alpha.gamma.0.0
Mi1$beta.gamma.0.0 <- hyper_params$beta.gamma.0.0
Mi1$mu.theta.0.0 <- hyper_params$mu.theta.0.0
Mi1$tau2.theta.0.0 <- hyper_params$tau2.theta.0.0
Mi1$alpha.theta <- hyper_params$alpha.theta
Mi1$beta.theta <- hyper_params$beta.theta
Mi1$alpha.theta.0.0 <- hyper_params$alpha.theta.0.0
Mi1$beta.theta.0.0 <- hyper_params$beta.theta.0.0
Mi1$lambda.alpha <- hyper_params$lambda.alpha
Mi1$lambda.beta <- hyper_params$lambda.beta
algo = 1
if (Mi1$Algo == "BB2004") {
algo <- 1;
} else {
if (Mi1$Algo == "MH") {
algo <- 2;
} else {
if (Mi1$Algo == "Adapt") {
algo <- 3;
} else {
if (Mi1$Algo == "Indep") {
algo <- 4;
} else {
algo <- 1;
}
}
}
}
Ret2 = .Call("c212BB_poisson_mc_exec", as.integer(nchains), as.integer(burnin),
as.integer(iter), Mi1$sim_type,
memory_model, Mi1$global.sim.params,
sim.params,
as.numeric(global.pm.weight),
pm.weights,
monitor,
as.integer(Mi1$numIntervals), as.integer(Mi1$Level),
Mi1$maxBs, as.integer(Mi1$numB), as.integer(Mi1$maxAEs),
as.integer(t(Mi1$nAE)), as.integer(aperm(Mi1$x)), as.integer(aperm(Mi1$y)),
as.numeric(aperm(Mi1$C)),
as.numeric(aperm(Mi1$T)),
as.numeric(aperm(Mi1$theta)),
as.numeric(aperm(Mi1$gamma)),
as.numeric(Mi1$mu.gamma.0.0),
as.numeric(Mi1$tau2.gamma.0.0),
as.numeric(Mi1$mu.theta.0.0),
as.numeric(Mi1$tau2.theta.0.0),
as.numeric(Mi1$alpha.gamma.0.0),
as.numeric(Mi1$beta.gamma.0.0),
as.numeric(Mi1$alpha.theta.0.0),
as.numeric(Mi1$beta.theta.0.0),
as.numeric(Mi1$alpha.gamma),
as.numeric(Mi1$beta.gamma),
as.numeric(Mi1$alpha.theta),
as.numeric(Mi1$beta.theta),
as.numeric(aperm(Mi1$mu.gamma.0)),
as.numeric(aperm(Mi1$tau2.gamma.0)),
as.numeric(aperm(Mi1$mu.theta.0)),
as.numeric(aperm(Mi1$tau2.theta.0)),
as.numeric(aperm(Mi1$mu.gamma)),
as.numeric(aperm(Mi1$mu.theta)),
as.numeric(aperm(Mi1$sigma2.gamma)),
as.numeric(aperm(Mi1$sigma2.theta)),
as.numeric(aperm(Mi1$pi)),
as.numeric(aperm(Mi1$alpha.pi)),
as.numeric(aperm(Mi1$beta.pi)),
as.numeric(Mi1$lambda.alpha),
as.numeric(Mi1$lambda.beta),
as.integer(algo),
as.integer(adapt_phase))
mu.gamma.0_samples = NULL
if (monitor[monitor$variable == "mu.gamma.0", ]$monitor == 1) {
mu.gamma.0_samples <- .Call("getMuGamma0SamplesInterimAll")
mu.gamma.0_samples = aperm(mu.gamma.0_samples)
}
mu.theta.0_samples = NULL
if (monitor[monitor$variable == "mu.theta.0", ]$monitor == 1) {
mu.theta.0_samples <- .Call("getMuTheta0SamplesInterimAll")
mu.theta.0_samples = aperm(mu.theta.0_samples)
}
tau2.gamma.0_samples = NULL
if (monitor[monitor$variable == "tau2.gamma.0", ]$monitor == 1) {
tau2.gamma.0_samples <- .Call("getTau2Gamma0SamplesInterimAll")
tau2.gamma.0_samples = aperm(tau2.gamma.0_samples)
}
tau2.theta.0_samples = NULL
if (monitor[monitor$variable == "tau2.theta.0", ]$monitor == 1) {
tau2.theta.0_samples <- .Call("getTau2Theta0SamplesInterimAll")
tau2.theta.0_samples = aperm(tau2.theta.0_samples)
}
mu.theta_samples = NULL
if (monitor[monitor$variable == "mu.theta", ]$monitor == 1) {
mu.theta_samples <- .Call("getMuThetaSamplesInterimAll")
mu.theta_samples <- aperm(mu.theta_samples)
}
mu.gamma_samples = NULL
if (monitor[monitor$variable == "mu.gamma", ]$monitor == 1) {
mu.gamma_samples <- .Call("getMuGammaSamplesInterimAll")
mu.gamma_samples <- aperm(mu.gamma_samples)
}
sigma2.theta_samples = NULL
if (monitor[monitor$variable == "sigma2.theta", ]$monitor == 1) {
sigma2.theta_samples <- .Call("getSigma2ThetaSamplesInterimAll")
sigma2.theta_samples <- aperm(sigma2.theta_samples)
}
sigma2.gamma_samples = NULL
if (monitor[monitor$variable == "sigma2.gamma", ]$monitor == 1) {
sigma2.gamma_samples <- .Call("getSigma2GammaSamplesInterimAll")
sigma2.gamma_samples <- aperm(sigma2.gamma_samples)
}
pi_samples = NULL
if (monitor[monitor$variable == "pi", ]$monitor == 1) {
pi_samples = .Call("getPiSamplesInterimAll")
pi_samples <- aperm(pi_samples)
}
alpha.pi_samples = NULL
alpha.pi_acc = NULL
if (monitor[monitor$variable == "alpha.pi", ]$monitor == 1) {
alpha.pi_samples = .Call("getAlphaPiSamplesInterimAll")
alpha.pi_samples = aperm(alpha.pi_samples)
alpha.pi_acc = .Call("getAlphaPiAcceptInterimAll")
alpha.pi_acc = aperm(alpha.pi_acc)
}
beta.pi_samples = NULL
beta.pi_acc = NULL
if (monitor[monitor$variable == "beta.pi", ]$monitor == 1) {
beta.pi_samples = .Call("getBetaPiSamplesInterimAll")
beta.pi_samples = aperm(beta.pi_samples)
beta.pi_acc = .Call("getBetaPiAcceptInterimAll")
beta.pi_acc = aperm(beta.pi_acc)
}
gamma_samples = NULL
gamma_acc = NULL
if (monitor[monitor$variable == "gamma", ]$monitor == 1) {
gamma_samples = .Call("getGammaSamplesInterimAll")
gamma_samples = aperm(gamma_samples)
gamma_acc = .Call("getGammaAcceptInterimAll")
gamma_acc <- aperm(gamma_acc)
}
theta_samples = NULL
theta_acc = NULL
if (monitor[monitor$variable == "theta", ]$monitor == 1) {
theta_samples = .Call("getThetaSamplesInterimAll")
theta_samples = aperm(theta_samples)
theta_acc = .Call("getThetaAcceptInterimAll")
theta_acc <- aperm(theta_acc)
}
.C("Release_Interim")
model_fit = list(id = Mi1$Id, sim_type = Mi1$sim_type, chains = nchains, nIntervals = Mi1$numIntervals,
Intervals = Mi1$Intervals, nBodySys = Mi1$numB, maxBs = Mi1$maxBs,
maxAEs = Mi1$maxAEs, nAE = Mi1$nAE, AE=Mi1$AE, B = Mi1$B,
burnin = burnin, iter = iter,
monitor = monitor,
gamma = gamma_samples,
theta = theta_samples,
mu.gamma = mu.gamma_samples,
mu.theta = mu.theta_samples,
sigma2.gamma = sigma2.gamma_samples,
sigma2.theta = sigma2.theta_samples,
pi = pi_samples,
alpha.pi = alpha.pi_samples,
beta.pi = beta.pi_samples,
alpha.pi_acc = alpha.pi_acc,
beta.pi_acc = beta.pi_acc,
mu.gamma.0 = mu.gamma.0_samples,
mu.theta.0 = mu.theta.0_samples,
tau2.gamma.0 = tau2.gamma.0_samples,
tau2.theta.0 = tau2.theta.0_samples,
gamma_acc = gamma_acc,
theta_acc = theta_acc)
attr(model_fit, "model") = "BB_pois_indep"
return(model_fit)
}
Mi1$initVars = function() {
Mi1$B <- c()
Mi1$numB <- NA
Mi1$numIntervals <- NA
Mi1$nAE <- c()
Mi1$maxAEs <- NA
Mi1$x <- array()
Mi1$C <- array()
Mi1$y <- array()
Mi1$T <- array()
Mi1$mu.gamma.0.0 <- NA
Mi1$tau2.gamma.0.0 <- NA
Mi1$mu.theta.0.0 <- NA
Mi1$tau2.theta.0.0 <- NA
Mi1$alpha.gamma.0.0 <- NA
Mi1$beta.gamma.0.0 <- NA
Mi1$alpha.theta.0.0 <- NA
Mi1$beta.theta.0.0 <- NA
Mi1$alpha.gamma <- NA
Mi1$beta.gamma <- NA
Mi1$alpha.theta <- NA
Mi1$beta.theta <- NA
Mi1$mu.gamma.0 <- c()
Mi1$tau2.gamma.0 <- c()
Mi1$mu.theta.0 <- c()
Mi1$tau2.theta.0 <- c()
Mi1$mu.gamma <- array()
Mi1$mu.theta <- array()
Mi1$sigma2.gamma <- array()
Mi1$sigma2.theta <- array()
Mi1$theta <- array()
Mi1$gamma <- array()
Mi1$lambda.alpha <- NA
Mi1$lambda.beta <- NA
Mi1$alpha.pi <- NA
Mi1$beta.pi <- NA
Mi1$pi <- NA
}
Mi1$initChains = function(c) {
for (i in 1:Mi1$numIntervals) {
numB = Mi1$numB[i]
for (b in 1:numB) {
Mi1$gamma[c, i, b, 1:Mi1$nAE[i, b]] <- runif(Mi1$nAE[i, b], -10, 10)
Mi1$theta[c, i, b, 1:Mi1$nAE[i, b]] <- runif(Mi1$nAE[i, b], -10, 10)
Mi1$theta[c, i, b, ][is.infinite(Mi1$theta[c, i, b, ])] = -10
Mi1$gamma[c, i, b, ][is.infinite(Mi1$gamma[c, i, b, ])] = -10
Mi1$theta[c, i, b, ][is.nan(Mi1$theta[c, i, b, ])] = -10
Mi1$gamma[c, i, b, ][is.nan(Mi1$gamma[c, i, b, ])] = -10
}
Mi1$mu.gamma[c, i, 1:numB] = runif(numB, -10, 10)
Mi1$mu.theta[c, i, 1:numB] = runif(numB, -10, 10)
Mi1$sigma2.gamma[c, i, 1:numB] = runif(numB, 5, 20)
Mi1$sigma2.theta[c, i, 1:numB] = runif(numB, 5, 20)
Mi1$pi[c, i, 1:numB] = runif(numB, 0, 1)
Mi1$mu.gamma.0[c, i] = runif(1, -10, 10)
Mi1$tau2.gamma.0[c, i] = runif(1, 5, 20)
Mi1$mu.theta.0[c, i] = runif(1, -10, 10)
Mi1$tau2.theta.0[c, i] = runif(1, 5, 20)
Mi1$alpha.pi[c, i] = runif(1, 1.25, 100)
Mi1$beta.pi[c, i] = runif(1, 1.25, 100)
}
}
Mi1$initialiseChains = function(initial_values, nchains) {
Mi1$theta = array(0, dim=c(nchains, Mi1$numIntervals, Mi1$maxBs, Mi1$maxAEs))
Mi1$gamma = array(0, dim=c(nchains, Mi1$numIntervals, Mi1$maxBs, Mi1$maxAEs))
if (is.null(initial_values)) {
for (i in 1:Mi1$numIntervals) {
numB = Mi1$numB[i]
for (b in 1:numB) {
Mi1$gamma[1, i, b, ] <- log(Mi1$x[i, b,]/Mi1$C[i, b, ])
Mi1$theta[1, i, b, ] <- log(Mi1$y[i, b,]/Mi1$T[i, b, ]) - Mi1$gamma[1, i, b, ]
Mi1$theta[1, i, b, ][is.infinite(Mi1$theta[1, i, b, ])] = -10
Mi1$gamma[1, i, b, ][is.infinite(Mi1$gamma[1, i, b, ])] = -10
Mi1$theta[1, i, b, ][is.nan(Mi1$theta[1, i, b, ])] = -10
Mi1$gamma[1, i, b, ][is.nan(Mi1$gamma[1, i, b, ])] = -10
}
}
Mi1$mu.gamma <- array(0, dim = c(nchains, Mi1$numIntervals, Mi1$maxBs))
Mi1$mu.theta <- array(0, dim = c(nchains, Mi1$numIntervals, Mi1$maxBs))
Mi1$sigma2.gamma <- array(10, dim = c(nchains, Mi1$numIntervals, Mi1$maxBs))
Mi1$sigma2.theta <- array(10, dim = c(nchains, Mi1$numIntervals, Mi1$maxBs))
Mi1$pi <- array(0.5, dim = c(nchains, Mi1$numIntervals, Mi1$maxBs))
Mi1$mu.gamma.0 <- array(0, dim = c(nchains, Mi1$numIntervals))
Mi1$tau2.gamma.0 <- array(10, dim = c(nchains, Mi1$numIntervals))
Mi1$mu.theta.0 <- array(0, dim = c(nchains, Mi1$numIntervals))
Mi1$tau2.theta.0 <- array(10, dim = c(nchains, Mi1$numIntervals))
Mi1$alpha.pi <- array(1.5, dim = c(nchains, Mi1$numIntervals))
Mi1$beta.pi <- array(1.5, dim = c(nchains, Mi1$numIntervals))
if (nchains > 1) {
for (c in 2:nchains) {
Mi1$initChains(c)
}
}
}
else {
Mi1$mu.gamma.0 <- array(0, dim = c(nchains, Mi1$numIntervals))
Mi1$tau2.gamma.0 <- array(10, dim = c(nchains, Mi1$numIntervals))
Mi1$mu.theta.0 <- array(0, dim = c(nchains, Mi1$numIntervals))
Mi1$tau2.theta.0 <- array(10, dim = c(nchains, Mi1$numIntervals))
Mi1$alpha.pi <- array(10, dim = c(nchains, Mi1$numIntervals))
Mi1$beta.pi <- array(10, dim = c(nchains, Mi1$numIntervals))
for (c in 1:nchains) {
for (i in 1:Mi1$numIntervals) {
interval = Mi1$Intervals[i]
data = initial_values$mu.gamma.0[initial_values$mu.gamma.0$chain == c &
initial_values$mu.gamma.0$Interval == interval, ]
Mi1$mu.gamma.0[c, i] = data$value
data = initial_values$mu.theta.0[initial_values$mu.theta.0$chain == c &
initial_values$mu.theta.0$Interval == interval, ]
Mi1$mu.theta.0[c, i] = data$value
data = initial_values$tau2.gamma.0[initial_values$tau2.gamma.0$chain == c &
initial_values$tau2.gamma.0$Interval == interval, ]
Mi1$tau2.gamma.0[c, i] = data$value
data = initial_values$tau2.theta.0[initial_values$tau2.theta.0$chain == c &
initial_values$tau2.theta.0$Interval == interval, ]
Mi1$tau2.theta.0[c, i] = data$value
data = initial_values$alpha.pi[initial_values$alpha.pi$chain == c &
initial_values$alpha.pi$Interval == interval, ]
Mi1$alpha.pi[c, i] = data$value
data = initial_values$beta.pi[initial_values$beta.pi$chain == c &
initial_values$beta.pi$Interval == interval, ]
Mi1$beta.pi[c, i] = data$value
}
}
Mi1$mu.gamma <- array(0, dim = c(nchains, Mi1$numIntervals, Mi1$maxBs))
Mi1$mu.theta <- array(0, dim = c(nchains, Mi1$numIntervals, Mi1$maxBs))
Mi1$sigma2.gamma <- array(0, dim = c(nchains, Mi1$numIntervals, Mi1$maxBs))
Mi1$sigma2.theta <- array(0, dim = c(nchains, Mi1$numIntervals, Mi1$maxBs))
Mi1$pi <- array(0.5, dim = c(nchains, Mi1$numIntervals, Mi1$maxBs))
for (c in 1:nchains) {
for (i in 1:Mi1$numIntervals) {
interval = Mi1$Intervals[i]
for (b in 1:Mi1$numB[i]) {
data = initial_values$mu.gamma[initial_values$mu.gamma$chain == c &
initial_values$mu.gamma$Interval == interval
& initial_values$mu.gamma$B == Mi1$B[i, b],]
Mi1$mu.gamma[c, i, b] = data$value
data = initial_values$mu.theta[initial_values$mu.theta$chain == c &
initial_values$mu.theta$Interval == interval
& initial_values$mu.theta$B == Mi1$B[i, b],]
Mi1$mu.theta[c, i, b] = data$value
data = initial_values$sigma2.gamma[initial_values$sigma2.gamma$chain == c &
initial_values$sigma2.gamma$Interval == interval
& initial_values$sigma2.gamma$B == Mi1$B[i, b],]
Mi1$sigma2.gamma[c, i, b] = data$value
data = initial_values$sigma2.theta[initial_values$sigma2.theta$chain == c &
initial_values$sigma2.theta$Interval == interval
& initial_values$sigma2.theta$B == Mi1$B[i, b],]
Mi1$sigma2.theta[c, i, b] = data$value
data = initial_values$pi[initial_values$pi$chain == c &
initial_values$pi$Interval == interval
& initial_values$pi$B == Mi1$B[i, b],]
Mi1$pi[c, i, b] = data$value
}
}
}
for (c in 1:nchains) {
for (i in 1:Mi1$numIntervals) {
interval = Mi1$Intervals[i]
for (b in 1:Mi1$numB[i]) {
for (j in 1:Mi1$nAE[i, b]) {
ae = Mi1$AE[i, b, j]
data = initial_values$gamma[initial_values$gamma$chain == c
& initial_values$gamma$Interval == interval
& initial_values$gamma$B == Mi1$B[i, b]
& initial_values$gamma$AE == ae,]
Mi1$gamma[c, i, b, j] = data$value
data = initial_values$theta[initial_values$theta$chain == c
& initial_values$theta$Interval == interval
& initial_values$theta$B == Mi1$B[i, b]
& initial_values$theta$AE == ae,]
Mi1$theta[c, i, b, j] = data$value
}
}
}
}
}
} |
parm <- "terr.turnover"
par.range = seq(from=0.45, to=0.55, by=0.05)
sens.results <- NULL
data("song.data")
data("glo.parms")
years=2005-1969
iteration=5
timestep=1
n.territories <- glo.parms$n.territories
starting.trait <- subset(song.data, Population=="PRBO" & Year==1969)$Trill.FBW
starting.trait2 <- c(starting.trait, rnorm(n.territories-length(starting.trait),
mean=mean(starting.trait), sd=sd(starting.trait)))
init.inds <- data.frame(id = seq(1:n.territories), age = 2, trait = starting.trait2)
init.inds$x1 <- round(runif(n.territories, min=-122.481858, max=-122.447270), digits=8)
init.inds$y1 <- round(runif(n.territories, min=37.787768, max=37.805645), digits=8)
extra_parms <- list(init.inds = init.inds,
timestep = 1,
n.territories = nrow(init.inds),
learning.method = "integrate",
integrate.dist = 0.1,
lifespan = NA,
terr.turnover = 0.5,
mate.comp = FALSE,
prin = FALSE,
all = TRUE)
global_parms_key <- which(!names(glo.parms) %in% names(extra_parms))
extra_parms[names(glo.parms[global_parms_key])]=glo.parms[global_parms_key]
par.sens1 <- par.sens(parm = parm, par.range = par.range,
iteration = iteration, steps = years, mate.comp = FALSE,
fixed_parms=extra_parms[names(extra_parms)!=parm], all = TRUE)
target.data <- subset(song.data, Population=="PRBO" & Year==2005)$Trill.FBW
ts <- years
par.opt1 <- par.opt(sens.results=par.sens1$sens.results, ts=ts,
target.data=target.data, par.range=par.range)
par.opt1$Residuals
par.opt1$Target.match
\donttest{
plot(par.range, par.opt1$Target.match[,1], type="l",
xlab="Parameter range", ylab="Difference in means (Hz)")
plot(par.range, par.opt1$Prop.contained, type="l",
xlab="Parameter range", ylab="Proportion contained")
res.mean.means <- apply(par.opt1$Residuals[, , 1], MARGIN=1,
mean, na.rm=TRUE)
res.mean.quants <- apply (par.opt1$Residuals[, , 1], MARGIN=1,
quantile, probs=c(0.975, 0.025), R=600, na.rm=TRUE)
plot(par.range, res.mean.means, col="orange",
ylim=range(par.opt1$Residuals[,,1], na.rm=TRUE),
type="b",
xlab="Parameter value (territory turnover rate)",
ylab="Residual of trait mean (trill bandwidth, Hz)")
points(par.range, res.mean.quants[1,], col="orange")
points(par.range, res.mean.quants[2,], col="orange")
lines(par.range, res.mean.quants[1,], col="orange", lty=2)
lines(par.range, res.mean.quants[2,], col="orange", lty=2)
res.var.mean <- apply(par.opt1$Residuals[, , 2], MARGIN=1,
mean, na.rm=TRUE)
res.var.quants <- apply (par.opt1$Residuals[, , 2], MARGIN=1,
quantile, probs=c(0.975, 0.025), R=600, na.rm=TRUE)
plot(par.range, res.var.mean, col="purple",
ylim=range(par.opt1$Residuals[,,2], na.rm=TRUE),
type="b",
xlab="Parameter value (territory turnover rate)",
ylab="Residual of trait variance (trill bandwidth, Hz)")
points(par.range, res.var.quants[1,], col="purple")
points(par.range, res.var.quants[2,], col="purple")
lines(par.range, res.var.quants[1,], col="purple", lty=2)
lines(par.range, res.var.quants[2,], col="purple", lty=2)
par(mfcol=c(3,2),
mar=c(4.1, 4.1, 1, 1),
cex=1.2)
for(i in 1:length(par.range)){
plot(par.sens1$sens.results[ , , "trait.pop.mean", ],
xlab="Year", ylab="Bandwidth (Hz)",
xaxt="n", type="n",
xlim=c(-0.5, years), ylim=range(par.sens1$sens.results[ , , "trait.pop.mean", ], na.rm=TRUE))
for(p in 1:iteration){
lines(par.sens1$sens.results[p, , "trait.pop.mean", i], col="light gray")
}
freq.mean <- apply(par.sens1$sens.results[, , "trait.pop.mean", i], 2, mean, na.rm=TRUE)
lines(freq.mean, col="blue")
axis(side=1, at=seq(0, 35, by=5), labels=seq(1970, 2005, by=5))
quant.means <- apply (par.sens1$sens.results[, , "trait.pop.mean", i], MARGIN=2,
quantile, probs=c(0.95, 0.05), R=600, na.rm=TRUE)
lines(quant.means[1,], col="blue", lty=2)
lines(quant.means[2,], col="blue", lty=2)
library("boot")
sample.mean <- function(d, x) {
mean(d[x])
}
boot_hist <- boot(starting.trait, statistic=sample.mean, R=100)
ci.hist <- boot.ci(boot_hist, conf=0.95, type="basic")
low <- ci.hist$basic[4]
high <- ci.hist$basic[5]
points(0, mean(starting.trait), pch=20, cex=0.6, col="black")
library("Hmisc")
errbar(x=0, y=mean(starting.trait), high, low, add=TRUE)
boot_curr <- boot(target.data, statistic=sample.mean, R=100)
ci.curr <- boot.ci(boot_curr, conf=0.95, type="basic")
low <- ci.curr$basic[4]
high <- ci.curr$basic[5]
points(years, mean(target.data), pch=20, cex=0.6, col="black")
errbar(x=years, y=mean(target.data), high, low, add=TRUE)
text(x=3, y=max(par.sens1$sens.results[ , , "trait.pop.mean", ], na.rm=TRUE)-100,
labels=paste("Par = ", par.range[i], sep=""))
}
} |
use_recommended_deps <- function(
pkg = get_golem_wd(),
recommended = c("shiny","DT","attempt","glue","htmltools","golem")
){
old <- setwd(path_abs(pkg))
on.exit(setwd(old))
for ( i in sort(recommended)){
try(use_package(i))
}
cat_green_tick("Dependencies added")
}
use_recommended_tests <- function (
pkg = get_golem_wd(),
spellcheck = TRUE,
vignettes = TRUE,
lang = "en-US",
error = FALSE
){
old <- setwd(path_abs(pkg))
on.exit(setwd(old))
if (!dir.exists(
path(path_abs(pkg), "tests")
)){
without_warning(use_testthat)()
}
if (requireNamespace("processx")){
capture.output(use_package("processx"))
} else {
stop("Please install the {processx} package to add these tests.")
}
stop_if(
path(old, "tests", "testthat", "test-golem-recommended.R"),
file_exists,
"test-golem-recommended.R already exists. \nPlease remove it first if you need to reinsert it."
)
file_copy(
golem_sys("utils", "test-golem-recommended.R"),
path(old, "tests", "testthat"),
overwrite = TRUE
)
if (spellcheck){
use_spell_check(
vignettes = vignettes,
lang = lang,
error = error
)
}
cat_green_tick("Tests added")
} |
sigmaHat <- function(object){UseMethod("sigmaHat")}
sigmaHat.default <- function(object){summary(object)$sigma}
sigmaHat.lm <- function(object) sigmaHat.default(object)
sigmaHat.glm <- function(object){sqrt(summary(object)$dispersion)} |
RANDBETWEEN <-
function(bottom,top, number = 1) {
floor(runif(number,min = bottom, max = top+1))
} |
fix_envi_format <- function(infiles) {
gdal_formats <- fromJSON(
system.file("extdata/settings/gdal_formats.json",package="sen2r")
)$drivers
envi_ext <- gdal_formats[gdal_formats$name=="ENVI","ext"][1]
s2_bands <- list(
"TOA" = list("A" = list(), "B" = list()),
"BOA" = list("A" = list(), "B" = list())
)
s2_bands[["TOA"]][["A"]][["bandname"]] <- s2_bands[["TOA"]][["B"]][["bandname"]] <- list(
"B1 Aerosol",
"B2 Blue",
"B3 Green",
"B4 Red",
"B5 Red-edge 1",
"B6 Red-edge 2",
"B7 Red-edge 3",
c("B8 NIR", "B8a narrow NIR"),
"B9 Water vapour",
"B10 Cirrus",
"B11 SWIR1",
"B12 SWIR2"
)
s2_bands[["BOA"]][["A"]][["bandname"]] <- s2_bands[["BOA"]][["B"]][["bandname"]] <-
s2_bands[["TOA"]][["B"]][["bandname"]][c(1:9,11:12)]
s2_bands[["TOA"]][["A"]][["wavelength"]] <- list(
0.4427, 0.4924, 0.5598, 0.6646, 0.7041, 0.7405, 0.7828, c(0.8328, 0.8647),
0.9451, 1.3735, 1.6137, 2.2024
)
s2_bands[["TOA"]][["B"]][["wavelength"]] <- list(
0.4422, 0.4921, 0.5590, 0.6649, 0.7038, 0.7391, 0.7797, c(0.8329, 0.8640),
0.9432, 1.3769, 1.6104, 2.1857
)
s2_bands[["BOA"]][["A"]][["wavelength"]] <- s2_bands[["TOA"]][["A"]][["wavelength"]][c(1:9,11:12)]
s2_bands[["BOA"]][["B"]][["wavelength"]] <- s2_bands[["TOA"]][["B"]][["wavelength"]][c(1:9,11:12)]
s2_bands[["TOA"]][["A"]][["fwhm"]] <- list(
0.021, 0.066, 0.036, 0.031, 0.015, 0.015, 0.020, c(0.106, 0.021),
0.020, 0.031, 0.091, 0.175
)
s2_bands[["TOA"]][["B"]][["fwhm"]] <- list(
0.021, 0.066, 0.036, 0.031, 0.016, 0.015, 0.020, c(0.106, 0.022),
0.021, 0.030, 0.094, 0.185
)
s2_bands[["BOA"]][["A"]][["fwhm"]] <- s2_bands[["TOA"]][["A"]][["fwhm"]][c(1:9,11:12)]
s2_bands[["BOA"]][["B"]][["fwhm"]] <- s2_bands[["TOA"]][["B"]][["fwhm"]][c(1:9,11:12)]
s2_bands[["RGB"]] <- c("Red","Green","Blue")
for (infile in infiles) {
infile_meta <- sen2r_getElements(infile)
infile.dat <- if (grepl(paste0(envi_ext,"$"), infile)) {
infile
} else {
paste0(infile,".",envi_ext)
}
infile.envi <- gsub(paste0("\\.",envi_ext,"$"),".envi",infile.dat)
if (!file.exists(infile.dat) & file.exists(infile.envi)) {
file.rename(infile.envi, infile.dat)
file.rename(paste0(infile.envi,".aux.xml"), paste0(infile.dat,".aux.xml"))
}
infile.hdr <- gsub(paste0("\\.",envi_ext,"$"),".hdr",infile.dat)
if (file.exists(infile.hdr)) {
hdr_content <- readLines(infile.hdr)
rn_0 <- grep("^ *band names ?= ?\\{ *$", hdr_content)
rns <- grep("^ *Band [0-9]+\\ *[\\,\\}]$", hdr_content)
filename_rn <- grep(infile.envi, hdr_content, fixed = TRUE)
if (infile_meta$prod_type %in% c("TOA","BOA")) {
sel_s2_bands <- s2_bands[[infile_meta$prod_type]][[infile_meta$mission]]
if (infile_meta$res == "10m") {
sel_s2_bands[["bandname"]][[8]] <- sel_s2_bands[["bandname"]][[8]][1]
sel_s2_bands[["wavelength"]][[8]] <- sel_s2_bands[["wavelength"]][[8]][1]
sel_s2_bands[["fwhm"]][[8]] <- sel_s2_bands[["fwhm"]][[8]][1]
} else {
sel_s2_bands[["bandname"]][[8]] <- sel_s2_bands[["bandname"]][[8]][2]
sel_s2_bands[["wavelength"]][[8]] <- sel_s2_bands[["wavelength"]][[8]][2]
sel_s2_bands[["fwhm"]][[8]] <- sel_s2_bands[["fwhm"]][[8]][2]
}
sel_s2_bands[["bandname"]] <- unlist(sel_s2_bands[["bandname"]])
sel_s2_bands[["wavelength"]] <- unlist(sel_s2_bands[["wavelength"]])
sel_s2_bands[["fwhm"]] <- unlist(sel_s2_bands[["fwhm"]])
rns_length <- switch(infile_meta$prod_type,TOA=12,BOA=11)
} else if (grepl("^RGB", infile_meta$prod_type)) {
sel_s2_bands <- list("bandname" = s2_bands[["RGB"]])
rns_length <- 3
} else {
sel_s2_bands <- list("bandname" = infile_meta$prod_type)
rns_length <- 1
}
check_rn <- all(
all(diff(c(rn_0,rns))==1),
length(rns)==rns_length
)
if (check_rn) {
hdr_content[rns] <- paste0(
gsub("^( *)Band [0-9]+(\\ *[\\,\\}])$", "\\1", hdr_content[rns]),
sel_s2_bands[["bandname"]],
gsub("^( *)Band [0-9]+(\\ *[\\,\\}])$", "\\2", hdr_content[rns])
)
hdr_content[filename_rn] <- gsub(infile.envi, infile.dat, hdr_content[filename_rn], fixed = TRUE)
if (infile_meta$prod_type %in% c("TOA","BOA")) {
hdr_content <- c(
hdr_content,
paste0("wavelength = {",paste(sel_s2_bands[["wavelength"]], collapse=", "),"}"),
paste0("fwhm = {",paste(sel_s2_bands[["fwhm"]], collapse=", "),"}"),
"wavelength units = {um}",
"default bands = {8, 4, 3}"
)
} else if (infile_meta$prod_type == "SCL") {
hdr_content <- c(
hdr_content,
"classes = 12",
"class lookup = {",
paste0(" 0, 0, 0, 255, 0, 0, 66, 65, 66, 99, 52, 0, 41, ",
"243, 41, 255, 255, 0, 0, 0, 255, 123, 125, 123, 189, 190, ",
"189, 255, 255, 255, 99, 203, 255, 255, 154, 255}"),
"class names = {",
paste0("No_data, Saturated or defective, Dark area pixels, Cloud shadows, ",
"Vegetation, Not vegetated, Water, Unclassified, Cloud (medium ",
"probability), Cloud (high probability), Thin cirrus, Snow}")
)
}
writeLines(hdr_content, infile.hdr)
}
}
}
} |
NULL
if(getRversion() >= "2.15.1") utils::globalVariables(c(".",
"Lower.CI",
"Lower_CI",
"Inv_Lower_CI",
"HR",
"Upper.CI",
"Upper_CI",
"Inv_Upper_CI",
"Inv_HR",
"p",
"breakByYears",
"breakByHalfYear",
"breakByQuarterYear",
"scaleByMonths",
"scaleByYears",
"label",
"factor.name",
"factor.value",
"factor.id",
"endpoint",
"subgroup_n",
"breakAfter",
"ordered_index",
"breaks",
"endpointLabel",
"factorLabel",
"x",
"n_string",
"HR_string",
"CI_string",
"p_string",
"subgroup_n_string",
"Likelihood ratio test p",
"Score (logrank) test p",
"Wald test p")) |
check.bg <- function(species, env = NA, nback = 1000, bg.source = "default", verbose = FALSE, bias = NA){
species <- check.species(species)
if(!inherits(species$presence.points, "data.frame")){
stop("Species presence.points do not appear to be an object of class data.frame")
}
with.bias <- FALSE
if(inherits(bias, c("raster", "RasterLayer", "RasterStack", "RasterBrick"))){
with.bias <- TRUE
}
if(bg.source %in% c("points", "range", "env")){
if(verbose == TRUE){message(paste("Pulling background points from", bg.source))}
} else {
if(inherits(species$background.points, "data.frame")){
bg.source = "points"
if(verbose == TRUE){message("\n\nDrawing background from species background points.\n\n")}
} else if(inherits(species$range, c("raster", "RasterLayer", "RasterStack", "RasterBrick"))){
bg.source = "range"
if(verbose == TRUE){message("\n\nNo background points provided, drawing background from range raster.\n\n")}
} else if(inherits(env, c("raster", "RasterLayer", "RasterStack", "RasterBrick"))) {
if(verbose == TRUE){message("\nNo background points or range raster, drawing background from environmental layers.\n\n")}
bg.source = "env"
} else {
stop("No background points, range raster, or environmental data of appropriate type!")
}
}
if(bg.source == "points"){
if(!inherits(species$background.points, "data.frame")){
stop("bg.source set to points, but species background.points does not contain a data frame!")
}
return(species)
}
if(bg.source == "range"){
if(!inherits(species$range, c("raster", "RasterLayer", "RasterStack", "RasterBrick"))){
stop("bg.source set to range, but species does not have a recognizable range raster!")
}
if(!raster::compareCRS(crs(env), crs(species$range))){
stop("CRS mismatch between species range raster and environmental rasters!")
}
if(with.bias == FALSE){
species$background.points <- as.data.frame(rasterToPoints(species$range)[,1:2])
inds <- sample(1:nrow(species$background.points), size = nback, replace = TRUE)
species$background.points <- species$background.points[inds,]
} else {
if(!inherits(bias, c("raster", "RasterLayer", "RasterStack", "RasterBrick"))){
stop("Bias layer was provided, but it is not a raster!")
}
if(!raster::compareCRS(bias, crs(species$range))){
stop("CRS mismatch between species range raster and bias raster!")
}
sample.raster = raster::mask(bias, bias + species$range)
species$background.points <- as.data.frame(rasterToPoints(sample.raster))
inds <- sample(1:nrow(species$background.points),
size = nback,
prob = species$background.points[,3],
replace = TRUE)
species$background.points <- species$background.points[inds,1:2]
}
colnames(species$background.points) <- colnames(species$presence.points)
return(species)
}
if(bg.source == "env"){
if(!inherits(env, c("raster", "RasterLayer", "RasterStack", "RasterBrick"))){
stop("bg.source set to env, but env layers were not recognized!")
}
if(with.bias == FALSE){
species$background.points <- as.data.frame(rasterToPoints(env[[1]])[,1:2])
inds <- sample(1:nrow(species$background.points), size = nback, replace = TRUE)
species$background.points <- species$background.points[inds,]
} else {
if(!inherits(bias, c("raster", "RasterLayer", "RasterStack", "RasterBrick"))){
stop("Bias layer was provided, but it is not a raster!")
}
if(!raster::compareCRS(bias, crs(env))){
stop("CRS mismatch between species range raster and bias raster!")
}
sample.raster = raster::mask(bias, bias + env)
species$background.points <- as.data.frame(rasterToPoints(sample.raster))
inds <- sample(1:nrow(species$background.points),
size = nback,
prob = species$background.points[,3],
replace = TRUE)
species$background.points <- species$background.points[inds,1:2]
}
colnames(species$background.points) <- colnames(species$presence.points)
return(species)
}
} |
coord_flip <- function(xlim = NULL, ylim = NULL, expand = TRUE, clip = "on") {
ggproto(NULL, CoordFlip,
limits = list(x = xlim, y = ylim),
expand = expand,
clip = clip
)
}
CoordFlip <- ggproto("CoordFlip", CoordCartesian,
transform = function(data, panel_params) {
data <- flip_axis_labels(data)
CoordCartesian$transform(data, panel_params)
},
backtransform_range = function(self, panel_params) {
self$range(panel_params)
},
range = function(self, panel_params) {
un_flipped_range <- ggproto_parent(CoordCartesian, self)$range(panel_params)
list(x = un_flipped_range$y, y = un_flipped_range$x)
},
setup_panel_params = function(self, scale_x, scale_y, params = list()) {
parent <- ggproto_parent(CoordCartesian, self)
panel_params <- parent$setup_panel_params(scale_x, scale_y, params)
flip_axis_labels(panel_params)
},
labels = function(labels, panel_params) {
CoordCartesian$labels(flip_axis_labels(labels), panel_params)
},
setup_layout = function(layout, params) {
layout[c("SCALE_X", "SCALE_Y")] <- layout[c("SCALE_Y", "SCALE_X")]
layout
},
modify_scales = function(scales_x, scales_y) {
lapply(scales_x, scale_flip_axis)
lapply(scales_y, scale_flip_axis)
}
)
scale_flip_axis <- function(scale) {
scale$position <- switch(scale$position,
top = "right",
bottom = "left",
left = "bottom",
right = "top",
scale$position
)
invisible(scale)
}
flip_axis_labels <- function(x) {
old_names <- names(x)
new_names <- old_names
new_names <- gsub("^x", "z", new_names)
new_names <- gsub("^y", "x", new_names)
new_names <- gsub("^z", "y", new_names)
setNames(x, new_names)
} |
get.varType<-function(attrib=NULL,
sep="_"){
varType=strsplit(x = attrib,split=sep)[[1]][1]
return(varType)
}
check_attributes <- function(attHold = NULL,
attPerturb = NULL,
attTargetsFile = NULL,
attPerturbSamp = NULL,
attPerturbBy = NULL,
attPerturbMin = NULL,
attPerturbMax = NULL,
attribute.funcs = NULL
) {
attributelist <- names(attribute.funcs)
attSel <- c(attPerturb, attHold)
if(is.null(attPerturb)){
stop("No attributes nominated for perturbation")
}
if(is.null(attHold)){
message("Note: There are no attributes held at historical levels")
}
if (anyDuplicated(attSel)!=0) {
stop("There are multiple entries of the same attribute")
}
if(!is.null(attHold)){
for (i in 1:length(attHold)){
if(sum(attHold[i] %in% attributelist)==0){
stop(paste0("attHold [",i,"] unrecognised"))
}
}
}
for (i in 1:length(attPerturb)){
if(sum(attPerturb[i] %in% attributelist)==0){
stop(paste0("attPerturb [",i,"] unrecognised"))
}
}
if (!is.null(attTargetsFile)) {
if (!is.character(attTargetsFile)) { stop("attTargetsFile should be the path of the csv file with targets")}
}
if(is.character(attTargetsFile)) {
targetMat <- read.table(file = attTargetsFile, sep = ",", header = TRUE)
att_frmFile <- names(targetMat)
for (i in 1:length(att_frmFile)) {
if(sum(att_frmFile[i] %in% attSel)==0){
stop("There is a mismatch in attributes specified in attPerturb & attHold and attTargetsFile")
}
}
if (length(att_frmFile) != length(attSel)) {
stop("Ensure that targets for attPerturb & attHold are specified in attTargetsFile")
}
} else {
if (!is.null(attPerturbSamp)) {
if (length(attPerturb) != length(attPerturbSamp)) {
stop("attPerturbSamp should be specified for each attribute in attPerturb")
}
if (!is.numeric(attPerturbSamp)) stop("Enter numeric values for attPerturbSamp")
}
if (!is.null(attPerturbSamp)) {
if (length(attPerturb) != length(attPerturbSamp)) {
stop("attPerturbSamp should be specified for each attribute in attPerturb")
}
if (!is.numeric(attPerturbSamp)) stop("Enter numeric values for attPerturbSamp")
if (any(attPerturbSamp < 0) | !all((attPerturbSamp %% 1) == 0)) stop("Enter positive integers for attPerturbSamp")
}
if (!is.null(attPerturbBy)) {
if (length(attPerturb) != length(attPerturbBy)) {
stop("attPerturbby should be specified for each attribute in attPerturb")
}
if (!is.numeric(attPerturbBy)) stop("Enter numeric values for attPerturbBy")
if (any(attPerturbBy < 0)) stop("Enter positive values for attPerturbBy")
}
if (length(attPerturb) != length(attPerturbMin)) {
stop("attPerturbMin should be specified for each attribute in attPerturb")
}
if (length(attPerturb) != length(attPerturbMax)) {
stop("attPerturbMax should be specified for each attribute in attPerturb")
}
if (!is.numeric(attPerturbMin)) stop("Enter numeric values for attPerturbMin")
if (!is.numeric(attPerturbMax)) stop("Enter numeric values for attPerturbMax")
if (!all(attPerturbMin <= attPerturbMax)) {
stop("attPerturbMin should be less than or equal to attPerturbMax")
}
}
return(invisible(NULL))
}
check_duplicates_mismatch<-function(obs=NULL,
attSel=NULL,
attPrim=NULL,
attHold=NULL,
attPerturb=NULL,
modelTag=NULL,
optimArgs=NULL,
file
){
names <- names(obs)
names<-names[names!="year"];names<-names[names!="month"];names<-names[names!="day"]
if (modelTag[1]=="Simple-ann") {
if(length(attHold)!=0) {
logfile("Error: Invalid - Simple scaling cannot hold attributes constant",file)
logfile("Program terminated",file)
stop("Simple scaling cannot hold attributes constant")
}
if(length(attPrim)!=0) {
logfile("Error: Simple scaling uses no primary attributes",file)
logfile("Program terminated",file)
stop("Simple scaling uses no primary attributes")
}
if(length(attPerturb)!=length(names)){
logfile("Error: There is a mismatch between number of variables and number of attributes. These should be the same for simple scaling, which only has multiplicative or additive changes",file)
logfile("Program terminated",file)
stop("There is a mismatch between number of variables and number of attributes. These should be the same for simple scaling, which only has multiplicative or additive changes")
}
} else {
if(is.null(attHold)){
warn("No attributes held at historical levels",file)
}
if (anyDuplicated(attPrim)!=0) {
logfile("Error: There are multiple entries of the same primary attribute",file)
logfile("Program terminated",file)
stop("There are multiple entries of the same primary attribute")
}
for(i in 1:length(modelTag)){
if(sum(modelTag[i] %in% modelTaglist)==0){
logfile("Error: modelTag unrecognised",file)
logfile("Program terminated",file)
stop(paste0("modelTag ",i," unrecognised"))
}
}
modelVars<-sapply(modelTag,get.varType,USE.NAMES=FALSE,sep="-")
if (anyDuplicated(modelVars)!=0) {
logfile("Error: There are multiple entries of a model type for one variable",file)
logfile("Program terminated",file)
stop("There are multiple entries of a model type for one variable")
}
if (length(which((names %in% modelVars)==FALSE))>0) {
message("reference contains more variables than the specified attributes or models. Stochastic series will only be produced for the specified settings.")
array<-c("year","month","day",modelVars)
obs=obs[array]
}
}
if (anyDuplicated(modelTag)!=0) {
logfile("Error: There are multiple entries of the same model tag",file)
logfile("Program terminated",file)
stop("There are multiple entries of the same model tag")
}
if(!is.null(attPrim)){
for (i in 1:length(attPrim)){
if(sum(attPrim[i] %in% attSel)==0){
logfile(paste0("contolFile: penaltyAttribute [",i,"] does not exist in the expSpace"), file)
logfile("Program terminated",file)
stop(paste0("contolFile: penaltyAttribute [",i,"] does not exist in the expSpace"))
}
}
}
if((length(attPrim!=0)) & (length(attPrim)!=length(which(optimArgs$lambda.mult>0)))) {
warn("contolFile: There are specified penaltyAttributes with a lambda value of zero",file)
}
if(length(attPrim)>length(optimArgs$lambda.mult)){
warn("There are more specified penaltyAttributes than lambda values",file)
logfile("Error: check number of supplied lambda values",file)
logfile("Program terminated",file)
stop("Ensure a lambda value is entered for each Primary attribute")
}else{
note=paste0("Lambda(",attPrim,"): ",optimArgs$lambda.mult,collapse = ", ")
progress(note,file)
logfile(note,file)
}
return(invisible())
}
check_models_attributes<-function(names=NULL,
attSel=NULL,
attPrim=NULL,
modelTag=NULL,
file
){
nam<-names[-c(1:3)]
modelVars<-sapply(modelTag,get.varType,USE.NAMES=FALSE,sep="-")
if (sum("P" %in% modelVars)==0) {
if(sum("Temp-har26-wgen-wd" %in% modelTag)==1) {
logfile("Error: Cannot simulate stochastic wet/dry dependent temperature without a rainfall model",file)
stop("Cannot simulate stochastic wet/dry dependent temperature without a rainfall model")
} else if(sum("PET-har26-wgen-wd" %in% modelTag)==1) {
logfile("Error: Cannot simulate stochastic wet/dry dependent PET without a rainfall model",file)
stop("Cannot simulate stochastic wet/dry dependent PET without a rainfall model")
}
}
if (modelTag[1]=="Simple-ann") {
validAtts <- get.attribute.info(modelTag = "Simple-ann")
if(sum(attSel %in% validAtts)!=length(attSel)) {
logfile("Error: Simple scaling cannot perturb selected attributes",file)
logfile("Program terminated",file)
stop("Simple scaling cannot perturb selected attributes. Choose a stochastic model")
}
} else {
validAtts=("temp")
for(i in 1:length(modelVars)) {
temp=get.attribute.info(modelTag=modelTag[i])
validAtts=append(validAtts,temp)
}
validAtts=validAtts[-1]
if(sum(attSel %in% validAtts)!=length(attSel)) {
logfile("Error: Model combinations cannot perturb selected attributes",file)
logfile("Program terminated",file)
stop("Model combinations cannot perturb or hold selected attributes. Change attPerturb or attHold selection.")
}
progress("You have selected the following penalty attributes:",file)
logfile(attPrim,file)
progress("These attributes will be perturbed with model types:",file)
logfile(modelTag,file)
progress("The scenarios will include the following attributes in the objective function:",file)
logfile(attSel,file)
}
return(invisible())
} |
data("sim_data")
datjags <- as.list(sim_data)
datjags$N <- length(datjags$Y)
model_string <- "
model{
for(i in 1:N){
Y[i] ~ dbern(p[i])
logit(p[i]) <- mu[i]
mu[i] <- b[1] +
b[2] * X1[i] +
b[3] * X2[i]
}
for(j in 1:3){
b[j] ~ dnorm(0, 0.001)
}
}
"
inits1 <- list("b" = rnorm(3))
inits2 <- list("b" = rnorm(3))
inits <- list(inits1, inits2)
set.seed(123)
runjags_logit <- run.jags(model = model_string, monitor = "b", data = datjags,
n.chains = 2, sample = 2000, burnin = 1000,
inits = inits)
saveRDS(runjags_logit, "tests/testdata/runjags-logit.rds") |
setGeneric(".applysplit_rawvals",
function(spl, df) standardGeneric(".applysplit_rawvals"))
setGeneric(".applysplit_datapart",
function(spl, df, vals) standardGeneric(".applysplit_datapart"))
setGeneric(".applysplit_extras",
function(spl, df, vals) standardGeneric(".applysplit_extras"))
setGeneric(".applysplit_partlabels",
function(spl, df, vals, labels) standardGeneric(".applysplit_partlabels"))
setGeneric("check_validsplit",
function(spl, df) standardGeneric("check_validsplit"))
setGeneric(".applysplit_ref_vals",
function(spl, df, vals) standardGeneric(".applysplit_ref_vals"))
NULL
.fixupvals = function(partinfo) {
if(is.factor(partinfo$labels))
partinfo$labels = as.character(partinfo$labels)
vals = partinfo$values
if(is.factor(vals))
vals = levels(vals)[vals]
extr = partinfo$extras
dpart = partinfo$datasplit
labels = partinfo$labels
if(is.null(labels)) {
if(!is.null(names(vals)))
labels = names(vals)
else if(!is.null(names(dpart)))
labels = names(dpart)
else if (!is.null(names(extr)))
labels = names(extr)
}
if(is.null(vals) && !is.null(extr))
vals = seq_along(extr)
if(length(vals) == 0) {
stopifnot(length(extr) == 0)
return(partinfo)
}
if(are(vals, "SplitValue") && !are(vals, "LevelComboSplitValue")) {
if(!is.null(extr)) {
warning("Got a partinfo list with values that are ",
"already SplitValue objects and non-null extras ",
"element. This shouldn't happen")
}
} else {
if(is.null(extr))
extr = rep(list(list()), length(vals))
vals = make_splvalue_vec(vals, extr, labels = labels)
}
partinfo$extras = NULL
vnames <- value_names(vals)
names(vals) = vnames
partinfo$values = vals
if(!identical(names(dpart), vnames)) {
names(dpart) = vnames
partinfo$datasplit = dpart
}
partinfo$labels = labels
stopifnot(length(unique(sapply(partinfo, NROW))) == 1)
partinfo
}
.add_ref_extras <- function(spl, df, partinfo) {
refvals <- .applysplit_ref_vals(spl, df, partinfo$values)
ref_ind <- which(unlist(refvals))
stopifnot(length(ref_ind) == 1)
vnames <- value_names(partinfo$values)
if(is.null(partinfo$extras)) {
names(refvals) <- vnames
partinfo$extras <- refvals
} else {
newextras <- mapply(function(old, incol, ref_full)
c(old, list(.in_ref_col = incol,
.ref_full = ref_full)),
old = partinfo$extras,
incol = unlist(refvals),
MoreArgs = list(ref_full = partinfo$datasplit[[ref_ind]]),
SIMPLIFY = FALSE)
names(newextras) <- vnames
partinfo$extras <- newextras
}
partinfo
}
func_takes <- function(fun, argname, truefordots = FALSE) {
fnames <- names(formals(fun))
argname %in% fnames || (truefordots && "..." %in% fnames)
}
do_split = function(spl, df, vals = NULL, labels = NULL, trim = FALSE, spl_context) {
check_validsplit(spl, df)
if(!is.null(splfun<-split_fun(spl))) {
if(func_takes(splfun, ".spl_context")) {
ret <- splfun(df, spl, vals, labels, trim = trim, .spl_context = spl_context)
} else {
ret <- splfun(df, spl, vals, labels, trim = trim)
}
} else {
ret <- .apply_split_inner(df = df, spl = spl, vals = vals, labels = labels, trim = trim)
}
if(is(spl, "VarLevWBaselineSplit"))
ret <- .add_ref_extras(spl, df, ret)
ret <- .fixupvals(ret)
ret
}
.apply_split_inner = function(spl, df, vals = NULL, labels = NULL, trim = FALSE) {
if(is.null(vals))
vals = .applysplit_rawvals(spl, df)
extr = .applysplit_extras(spl, df, vals)
if(is.null(vals) && length(extr) > 0) {
vals = seq_along(extr)
names(vals) = names(extr)
}
if(is.null(vals)) {
return(list(values = list(),
datasplit = list(),
labels = list(),
extras = list()))
}
dpart = .applysplit_datapart(spl, df, vals)
if(is.null(labels))
labels = .applysplit_partlabels(spl, df, vals, labels)
else
stopifnot(names(labels)== names(vals))
if(trim) {
hasdata = sapply(dpart, function(x) nrow(x) >0)
if(nrow(df) > 0 && length(dpart) > sum(hasdata)) {
dpart = dpart[hasdata]
vals = vals[hasdata]
extr = extr[hasdata]
labels = labels[hasdata]
}
}
if(is.null(spl_child_order(spl)) || is(spl, "AllSplit")) {
vord = seq_along(vals)
} else {
vord = match(spl_child_order(spl),
vals)
vord = vord[!is.na(vord)]
}
ret = list(values = vals[vord],
datasplit = dpart[vord],
labels = labels[vord],
extras = extr[vord])
ret
}
.checkvarsok = function(spl, df) {
vars = spl_payload(spl)
if(!all(vars %in% names(df)))
stop( " variable(s) [",
paste(setdiff(vars, names(df)),
collapse = ", "),
"] not present in data. (",
class(spl), ")")
invisible(NULL)
}
setMethod("check_validsplit", "VarLevelSplit",
function(spl, df) {
.checkvarsok(spl, df)
})
setMethod("check_validsplit", "MultiVarSplit",
function(spl, df) {
.checkvarsok(spl, df)
})
setMethod("check_validsplit", "VAnalyzeSplit",
function(spl, df) {
if(!is.na(spl_payload(spl))) {
.checkvarsok(spl, df)
} else {
TRUE
}
})
setMethod("check_validsplit", "CompoundSplit",
function(spl, df) {
all(sapply(spl_payload(spl), df))
})
setMethod("check_validsplit", "Split",
function(spl, df)
invisible(NULL))
setMethod(".applysplit_rawvals", "VarLevelSplit",
function(spl, df) {
varvec = df[[spl_payload(spl)]]
if(is.factor(varvec))
levels(varvec)
else
unique(varvec)
})
setMethod(".applysplit_rawvals", "MultiVarSplit",
function(spl, df) {
spl_varnames(spl)
})
setMethod(".applysplit_rawvals", "AllSplit",
function(spl, df) obj_name(spl))
setMethod(".applysplit_rawvals", "ManualSplit",
function(spl, df) spl@levels)
setMethod(".applysplit_rawvals", "NULLSplit",
function(spl, df) "")
setMethod(".applysplit_rawvals", "VAnalyzeSplit",
function(spl, df) spl_payload(spl))
setMethod(".applysplit_rawvals", "VarStaticCutSplit",
function(spl, df) {
spl_cutlabels(spl)
})
setMethod(".applysplit_datapart", "VarLevelSplit",
function(spl, df, vals) {
if(!(spl_payload(spl) %in% names(df))) {
stop("Attempted to split on values of column (", spl_payload(spl), ") not present in the data")
}
ret = lapply(seq_along(vals), function(i) {
df[df[[spl_payload(spl)]] == vals[[i]],]
})
names(ret) = as.character(vals)
ret
})
setMethod(".applysplit_datapart", "MultiVarSplit",
function(spl, df, vals) {
allvnms <- spl_varnames(spl)
if(!is.null(vals) && !identical(allvnms, vals)) {
incl <- match(vals, allvnms)
} else {
incl <- seq_along(allvnms)
}
vars <- spl_payload(spl)[incl]
ret <- rep(list(df), length(vars))
names(ret) = vals
ret
})
setMethod(".applysplit_datapart", "AllSplit",
function(spl, df, vals) list(df))
setMethod(".applysplit_datapart", "ManualSplit",
function(spl, df, vals) rep(list(df), times = length(vals)))
setMethod(".applysplit_datapart", "NULLSplit",
function(spl, df, vals) list(df[FALSE,]))
setMethod(".applysplit_datapart", "VarStaticCutSplit",
function(spl, df, vals) {
var = spl_payload(spl)
varvec = df[[var]]
cts = spl_cuts(spl)
cfct = cut(varvec, cts, include.lowest = TRUE)
split(df, cfct, drop = FALSE)
})
setClass("NullSentinel", contains = "NULL")
nullsentinel = new("NullSentinel")
noarg = function() nullsentinel
setMethod(".applysplit_extras", "Split",
function(spl, df, vals) {
splex <- split_exargs(spl)
nextr <- length(splex)
nvals <- length(vals)
lapply(seq_len(nvals), function(vpos) {
one_ex <- lapply(splex, function(arg) {
if(length(arg) >= vpos)
arg[[vpos]]
else
noarg()
})
names(one_ex) <- names(splex)
one_ex <- one_ex[!sapply(one_ex, is, "NullSentinel")]
one_ex
})
})
setMethod(".applysplit_ref_vals", "Split",
function(spl, df, vals) rep(list(NULL), length(vals)))
setMethod(".applysplit_ref_vals", "VarLevWBaselineSplit",
function(spl, df, vals) {
var <- spl_payload(spl)
bl_level <- spl@ref_group_value
bldata <- df[df[[var]] %in% bl_level,]
vnames <- value_names(vals)
ret <- lapply(vnames, function(vl) {
list(.in_ref_col = vl == bl_level)
})
names(ret) <- vnames
ret
})
setMethod(".applysplit_partlabels", "Split",
function(spl, df, vals, labels) as.character(vals))
setMethod(".applysplit_partlabels", "VarLevelSplit",
function(spl, df, vals, labels) {
varname <- spl_payload(spl)
vlabelname <- spl_labelvar(spl)
varvec = df[[varname]]
if(is.null(vals)) {
vals = if(is.factor(varvec))
levels(varvec)
else
unique(varvec)
}
if(is.null(labels)) {
if(varname == vlabelname) {
labels = vals
} else {
labfact <- is.factor(df[[vlabelname]])
lablevs <- if(labfact) levels(df[[vlabelname]]) else NULL
labels = sapply(vals, function(v) {
vlabel = unique(df[varvec == v,
vlabelname, drop = TRUE])
stopifnot(length(vlabel) < 2)
if(length(vlabel) == 0)
vlabel = ""
else if(labfact)
vlabel <- lablevs[vlabel]
vlabel
})
}
}
names(labels) = as.character(vals)
labels
})
setMethod(".applysplit_partlabels", "MultiVarSplit",
function(spl, df, vals, labels) value_labels(spl))
make_splvalue_vec = function(vals, extrs = list(list()), labels = vals) {
if(length(vals) == 0)
return(vals)
if(is(extrs, "AsIs"))
extrs = unclass(extrs)
mapply(SplitValue, val = vals, extr = extrs,
label = labels,
SIMPLIFY=FALSE)
}
remove_split_levels <- function(excl) {
stopifnot(is.character(excl))
function(df, spl, vals = NULL, labels = NULL, trim = FALSE) {
var = spl_payload(spl)
df2 = df[!(df[[var]] %in% excl), ]
if(is.factor(df2[[var]])) {
levels = levels(df2[[var]])
levels = levels[!(levels %in% excl)]
df2[[var]] = factor(df2[[var]], levels = levels)
}
.apply_split_inner(spl, df2, vals = vals,
labels = labels,
trim = trim)
}
}
keep_split_levels = function(only, reorder = TRUE) {
function(df, spl, vals = NULL, labels = NULL, trim = FALSE) {
var = spl_payload(spl)
varvec = df[[var]]
if(is.factor(varvec) && !all(only %in% levels(varvec)))
stop("Attempted to keep invalid factor level(s) in split ", setdiff(only, levels(varvec)))
df2 = df[df[[var]] %in% only,]
if(reorder)
df2[[var]] = factor(df2[[var]], levels = only)
spl_child_order(spl) <- only
.apply_split_inner(spl, df2, vals = only,
labels = labels,
trim = trim)
}
}
drop_split_levels <- function(df, spl, vals = NULL, labels = NULL, trim = FALSE) {
var = spl_payload(spl)
df2 = df
df2[[var]] = factor(df[[var]])
.apply_split_inner(spl, df2, vals = vals,
labels = labels,
trim = trim)
}
drop_and_remove_levels <- function(excl) {
stopifnot(is.character(excl))
function(df, spl, vals = NULL, labels = NULL, trim = FALSE) {
var <- spl_payload(spl)
df2 <- df[!(df[[var]] %in% excl), ]
df2[[var]] = factor(df2[[var]])
.apply_split_inner(
spl,
df2,
vals = vals,
labels = labels,
trim = trim
)
}
}
reorder_split_levels = function(neworder, newlabels = neworder, drlevels = TRUE) {
if(length(neworder) != length(newlabels)) {
stop("Got mismatching lengths for neworder and newlabels.")
}
function(df, spl, trim, ...) {
df2 <- df
valvec <- df2[[spl_payload(spl)]]
vals <- if(is.factor(valvec)) levels(valvec) else unique(valvec)
if(!drlevels)
neworder <- c(neworder, setdiff(vals, neworder))
df2[[spl_payload(spl)]] = factor(valvec, levels = neworder)
if(drlevels) {
orig_order <- neworder
df2[[spl_payload(spl)]] <- droplevels(df2[[spl_payload(spl)]] )
neworder <- levels(df2[[spl_payload(spl)]])
newlabels <- newlabels[orig_order %in% neworder]
}
spl_child_order(spl) <- neworder
.apply_split_inner(spl, df2, vals = neworder, labels = newlabels, trim = trim)
}
}
trim_levels_in_group = function(innervar, drop_outlevs = TRUE) {
myfun = function(df, spl, vals = NULL, labels = NULL, trim = FALSE) {
if(!drop_outlevs)
ret <- .apply_split_inner(spl, df, vals = vals, labels = labels, trim = trim)
else
ret <- drop_split_levels(df = df, spl = spl, vals = vals, labels = labels, trim = trim)
ret$datasplit = lapply(ret$datasplit, function(x) {
coldat = x[[innervar]]
if(is(coldat, "character")) {
if(!is.null(vals))
lvs = vals
else
lvs = unique(coldat)
coldat = factor(coldat, levels = lvs)
} else {
coldat = droplevels(coldat)
}
x[[innervar]] = coldat
x
})
ret$labels <- as.character(ret$labels)
ret
}
myfun
}
.add_combo_part_info = function(part, df, valuename, levels, label, extras, first = TRUE) {
value = LevelComboSplitValue(valuename, extras, combolevels = levels, label = label)
newdat = setNames(list(df), valuename)
newval = setNames(list(value), valuename)
newextra = setNames(list(extras), valuename)
if(first) {
part$datasplit = c(newdat, part$datasplit)
part$values = c(newval, part$values)
part$labels = c(setNames(label, valuename), part$labels)
part$extras = c(newextra, part$extras)
} else {
part$datasplit = c(part$datasplit, newdat)
part$values = c(part$values, newval)
part$labels = c(part$labels, setNames(label, valuename))
part$extras = c(part$extras, newextra)
}
part
}
add_overall_level = function(valname = "Overall", label = valname, extra_args = list(), first = TRUE, trim = FALSE) {
combodf <- data.frame(valname = valname,
label = label,
levelcombo = I(list(select_all_levels)),
exargs = I(list(extra_args)),
stringsAsFactors = FALSE)
add_combo_levels(combodf,
trim = trim, first = first)
}
setClass("AllLevelsSentinel", contains = "character")
select_all_levels = new("AllLevelsSentinel")
add_combo_levels = function(combosdf, trim = FALSE, first = FALSE, keep_levels = NULL) {
myfun = function(df, spl, vals = NULL, labels = NULL, ...) {
ret = .apply_split_inner(spl, df, vals = vals, labels = labels, trim = trim)
for(i in 1:nrow(combosdf)) {
lcombo = combosdf[i, "levelcombo", drop = TRUE][[1]]
spld = spl_payload(spl)
if(is(lcombo, "AllLevelsSentinel"))
subdf = df
else if (is(spl, "VarLevelSplit")) {
subdf = df[df[[spld]] %in% lcombo,]
} else {
stopifnot(all(lcombo %in% c(ret$labels, ret$vals)))
subdf = do.call(rbind, ret$datasplit[names(ret$datasplit) %in% lcombo |
ret$vals %in% lcombo])
}
ret = .add_combo_part_info(ret, subdf,
combosdf[i, "valname", drop=TRUE],
lcombo,
combosdf[i,"label", drop = TRUE],
combosdf[i, "exargs", drop = TRUE][[1]],
first)
}
if(!is.null(keep_levels)) {
keep_inds <- value_names(ret$values) %in% keep_levels
ret <- lapply(ret, function(x) x[keep_inds])
}
ret
}
myfun
}
trim_levels_to_map <- function(map = NULL) {
if (is.null(map) || any(sapply(map, class) != "character"))
stop("No map dataframe was provided or not all of the columns are of type character.")
myfun <- function(df, spl, vals = NULL, labels = NULL, trim = FALSE, .spl_context) {
allvars <- colnames(map)
splvar <- spl_payload(spl)
allvmatches <- match(.spl_context, allvars)
outvars <- allvars[na.omit(allvmatches)]
invars <- intersect(setdiff(allvars, c(outvars, splvar)),
names(df))
if(length(outvars) > 0) {
indfilters <- vapply(outvars, function(ivar) {
obsval <- .spl_context$value[match(ivar, .spl_context$split)]
sprintf("%s == '%s'", ivar, obsval)
}, "")
allfilters <- paste(indfilters, collapse = " & ")
map <- map[eval(parse(text = allfilters), envir = map),]
}
map_splvarpos <- which(names(map) == splvar)
nondup <- !duplicated(map[[splvar]])
ksl_fun <- keep_split_levels(only = map[[splvar]][nondup], reorder = TRUE)
ret <- ksl_fun(df, spl, vals, labels, trim = trim)
if(length(ret$datasplit) == 0) {
msg <- paste(sprintf("%s[%s]", .spl_context$split, .spl_context$value),
collapse = "->")
stop("map does not allow any values present in data for split variable ", splvar, " under the following parent splits:\n\t", msg)
}
ret$datasplit <- lapply(ret$values, function(splvar_lev) {
df3 <- ret$datasplit[[splvar_lev]]
curmap <- map[map[[map_splvarpos]] == splvar_lev,]
for (iv in invars) {
iv_lev <- df3[[iv]]
levkeep <- as.character(unique(curmap[[iv]]))
if (is.factor(iv_lev) && !all(levkeep %in% levels(iv_lev)))
stop("Attempted to keep invalid factor level(s) in split ", setdiff(levkeep, levels(iv_lev)))
df3 <- df3[iv_lev %in% levkeep, , drop = FALSE]
if (is.factor(iv_lev))
df3[[iv]] <- factor(as.character(df3[[iv]]), levels = levkeep)
}
df3
})
names(ret$datasplit) <- ret$values
ret
}
myfun
} |
zb_quadrat = function(x, ncol, nrow = NULL, intersection = TRUE) {
g = sf::st_make_grid(x = x, n = ncol)
if(!intersection) {
return(g)
}
sf::st_intersection(x, g)
} |
"quick.glm.fit" <-
function (x, y,
weights = rep(1, length(y)),
offset = rep(0, length(y)),
family = gaussian(),
eliminate = 0,
nIter = 2,
verbose = FALSE)
{
if (eliminate == 0)
return(suppressWarnings(glm.fit(x, y, weights = weights,
offset = offset,
family = family)$coef))
xElim <- x[ , seq(eliminate), drop = FALSE]
if (eliminate < ncol(x))
xNotElim <- cbind(1, x[ , (eliminate + 1):ncol(x), drop = FALSE])
else
xNotElim <- matrix(1, nrow(x), 1)
os.by.level <- numeric(eliminate)
model <- suppressWarnings(glm.fit(xNotElim, y,
weights = weights,
offset = offset,
family = family,
control = glm.control(maxit = 1)))
for (i in 1:nIter) {
if (verbose) cat("quick.glm.fit iteration", i,
"deviance =", deviance(model), "\n")
w <- xElim * model$weights
wz <- w * model$residuals
os.by.level <- os.by.level + colSums(wz)/colSums(w) + coef(model)[1]
os.vec <- offset + colSums(os.by.level * t(xElim))
model <- suppressWarnings(glm.fit(xNotElim, y,
weights = weights,
offset = os.vec,
etastart = model$linear.predictors,
family = family,
control = glm.control(maxit = 2)))
}
structure(c(os.by.level + coef(model)[1], coef(model)[-1]),
names = colnames(x))
} |
.doTraceTrace <- function(on) {
.assignOverBinding(".traceTraceState", on,
environment(.doTraceTrace), FALSE)
on
}
.traceTraceState <- FALSE
.InvalidTracedFunctions <- c("if", "where", "for", "repeat", "(", "{",
"next", "break", ".Call", ".Internal", ".Primitive")
.TraceWithMethods <- function(what, tracer = NULL, exit = NULL, at = numeric(),
print = TRUE, signature = NULL,
where = .GlobalEnv, edit = FALSE, from = NULL,
untrace = FALSE, classMethod = FALSE) {
fromPackage <-
if(is.function(where)) {
where <- if(is(where, "genericFunction"))
parent.env(environment(where))
else
environment(where)
getPackageName(where)
} else ""
doEdit <- !isFALSE(edit)
whereF <- NULL
pname <- character()
def <- NULL
tracingWhere <- "in package"
refCase <- isS4(where) && (is(where, "envRefClass") ||
is(where, "refClassRepresentation"))
if(refCase) {
if(!is.null(signature))
stop("argument 'signature' is not meaningful for tracing reference methods")
.where <- where
if(is(.where, "refGeneratorSlot") && !classMethod)
.where <- .where$def
if(is(.where, "refClassRepresentation")) {
pname <- .where@className
.where <- .where@refMethods
tracingWhere <- "for class"
}
else {
tracingWhere <- "for object from class"
pname <- class(.where)
}
def <- eval(substitute(.dollarForEnvRefClass(.where, what)))
if(!is(def, "refMethodDef")) {
thisName <- substitute(what)
stop(gettextf("%s is not a method for reference class %s",
sQuote(as.character(if(is.symbol(thisName)) thisName
else what)),
dQuote(class(where))),
domain = NA)
}
what <- def@name
whereF <- .where
}
else if(is.function(what)) {
def <- what
if(is(def, "genericFunction")) {
what <- def@generic
whereF <- .genEnv(what, where)
pname <- def@package
}
else {
fname <- substitute(what)
if(is.name(fname)) {
what <- as.character(fname)
temp <- .findFunEnvAndName(what, where)
whereF <- temp$whereF
pname <- temp$pname
}
else if(is.call(fname) && identical(fname[[1L]], as.name("::"))) {
whereF <- as.character(fname[[2L]])
require(whereF, character.only = TRUE)
whereF <- as.environment(paste0("package:", whereF))
pname <- fname[[2L]]
what <- as.character(fname[[3L]])
}
else if(is.call(fname) && identical(fname[[1L]], as.name(":::"))) {
pname <- paste(fname[[2L]], "(not-exported)")
whereF <- loadNamespace(as.character(fname[[2L]]))
what <- as.character(fname[[3L]])
}
else
stop("argument 'what' should be the name of a function")
}
}
else {
what <- as(what, "character")
if(length(what) != 1) {
for(f in what) {
if(nargs() == 1)
trace(f)
else
Recall(f, tracer, exit, at, print, signature, where, edit, from, untrace)
}
return(what)
}
temp <- .findFunEnvAndName(what, where, signature)
whereF <- temp$whereF
pname <- temp$pname
fname <- what
}
if(what %in% .InvalidTracedFunctions)
stop(gettextf("tracing the internal function %s is not allowed",
sQuote(what)), domain = NA)
if(.traceTraceState) {
message(".TraceWithMethods: after computing what, whereF", domain = NA)
browser()
}
if(nargs() == 1)
return(if(untrace) .primUntrace(what) else .primTrace(what))
if(is.null(whereF)) {
allWhere <- findFunction(what, where = where)
if(length(allWhere)==0)
stop(gettextf("no function definition for %s found", sQuote(what)),
domain = NA)
whereF <- as.environment(allWhere[[1L]])
}
if(is.null(tracer) && is.null(exit) && isFALSE(edit))
tracer <- quote({})
if(is.null(def))
def <- getFunction(what, where = whereF)
if(is(def, "traceable") && isFALSE(edit) && !untrace)
def <- .untracedFunction(def)
if(!is.null(signature)) {
fdef <- if (!is(def, "genericFunction"))
getGeneric(as.character(fname), TRUE, where)
else def
def <- selectMethod(what, signature, fdef = fdef, optional = TRUE)
if(isRematched(def)) {
expr <- substitute(trace(.local, tracer = tr, at = at,
exit = ex, print = pr,
edit = ed,
where = sys.frame(sys.nframe())),
list( tr = substitute(tracer),
ex = exit, at = at, pr = print,
ed = edit))
at <- 3L
tracer <- expr
print <- FALSE
}
if(is.null(def)) {
warning(gettextf("cannot untrace method for %s; no method defined for this signature: %s",
sQuote(what),
paste(signature, collapse = ", ")),
domain = NA)
return(def)
}
signature <- def@target
}
if(untrace) {
if(.traceTraceState) {
message(".TraceWithMethods: untrace case", domain = NA)
browser()
}
if(is.null(signature)) {
if(is(def, "traceable")) {
newFun <- .untracedFunction(def)
}
else {
.primUntrace(what)
return(what)
}
}
else {
if(is(def, "traceable"))
newFun <- .untracedFunction(def)
else {
warning(gettextf("the method for %s for this signature was not being traced",
sQuote(what)),
domain = NA)
return(what)
}
}
}
else {
if(!is.null(exit)) {
if(is.function(exit)) {
tname <- substitute(exit)
if(is.name(tname))
exit <- tname
exit <- substitute(TRACE(), list(TRACE=exit))
}
}
if(!is.null(tracer)) {
if(is.function(tracer)) {
tname <- substitute(tracer)
if(is.name(tname))
tracer <- tname
tracer <- substitute(TRACE(), list(TRACE=tracer))
}
}
original <- .untracedFunction(def)
traceClass <- .traceClassName(class(original))
if(is.null(getClassDef(traceClass)))
traceClass <- .makeTraceClass(traceClass, class(original))
if(doEdit && is.environment(edit)) {
def <- .findNewDefForTrace(what, signature, edit, fromPackage)
environment(def) <- environment(original)
if(is.null(c(tracer, exit))) {
newFun <- new(traceClass, original)
[email protected] <- def
}
else {
newFun <- new(traceClass, def = def, tracer = tracer, exit = exit, at = at, print = print, doEdit = FALSE)
newFun@original <- original
}
newFun@source <- edit
}
else
newFun <- new(traceClass,
def = if(doEdit) def else original, tracer = tracer, exit = exit, at = at,
print = print, doEdit = edit)
}
global <- identical(whereF, .GlobalEnv)
if(.traceTraceState) {
message(".TraceWithMethods: about to assign or setMethod", domain = NA)
browser()
}
if(is.null(signature)) {
if(bindingIsLocked(what, whereF))
.assignOverBinding(what, newFun, whereF, global)
else
assign(what, newFun, whereF)
if (length(pname) != 0) {
spname <- sub("^namespace:", "", pname)
ipkgs <- tryCatch(getNamespaceUsers(spname), error=function(e){c()})
for(importingPkg in ipkgs) {
.updateInImportsEnv(what, newFun, importingPkg)
}
}
if(length(grep("[^.]+[.][^.]+", what)) > 0) {
S3MTableName <- ".__S3MethodsTable__."
if(!is.null(tbl <- get0(S3MTableName, envir = whereF, inherits = FALSE))) {
if(exists(what, envir = tbl, inherits = FALSE)) {
tracedFun <- get(what, envir = whereF, inherits = TRUE)
assign(what, tracedFun, envir = tbl)
}
}
}
}
else {
if(untrace && is(newFun, "MethodDefinition") &&
!identical(newFun@target, newFun@defined))
newFun <- NULL
setMethod(fdef, signature, newFun, where = baseenv())
}
if(!global) {
action <- if(untrace)"Untracing" else "Tracing"
nameSpaceCase <- FALSE
location <- if(.identC(fromPackage, "")) {
if(length(pname)==0 && !is.null(whereF))
pname <- getPackageName(whereF)
nameSpaceCase <- isNamespace(whereF) &&
!is.na(match(pname, loadedNamespaces())) &&
identical(whereF, getNamespace(pname))
if(length(pname)==0)
""
else {
if(nameSpaceCase)
paste0(" in environment <namespace:", pname, ">")
else
paste0(" ", tracingWhere, " \"", pname, "\"")
}
}
else paste0(" as seen from package \"", fromPackage, "\"")
object <- if(refCase) "reference method"
else if(is.null(signature)) "function"
else "specified method for function"
object <- paste0(" ", object, " \"", what, "\" ")
.message(action, object, location)
if(nameSpaceCase && !untrace && is.null(signature) && exists(what, envir = .GlobalEnv)) {
untcall <- paste0("untrace(\"", what,
"\", where = getNamespace(\"", pname, "\"))")
.message("Warning: Tracing only in the namespace; to untrace you will need:\n ",
untcall, "\n")
}
}
what
}
.makeTracedFunction <- function(def, tracer, exit, at, print, doEdit) {
switch(typeof(def),
builtin = {
fBody <- substitute({.prim <- DEF; .prim(...)},
list(DEF = def))
def <- eval(function(...)NULL)
body(def, envir = .GlobalEnv) <- fBody
},
special = {
fBody <- substitute({do.call(DEF, list(...))},
list(DEF = def))
def <- eval(function(...)NULL)
body(def, envir = .GlobalEnv) <- fBody
warning("making a traced version of a special; arguments may be altered")
}
)
if(!isFALSE(doEdit)) {
if(is.character(doEdit) || is.function(doEdit)) {
editor <- doEdit
doEdit <- TRUE
}
else
editor <- getOption("editor")
}
if(doEdit) {
if(is(def, "traceable"))
def <- as(def, "function")
if(is(editor, "character") && !is.na(match(editor, c("emacs","xemacs")))) {
file <- tempfile("emacs")
file <- sub('..$', ".R", file)
}
else
file <- ""
if(!(is.null(tracer) && is.null(exit) && length(at)==0))
def <- Recall(def, tracer, exit, at, print, FALSE)
def2 <- utils::edit(def, editor = editor, file = file)
if(!is.function(def2))
stop(gettextf("the editing in trace() can only change the body of the function; got an object of class %s",
dQuote(class(def2))),
domain = NA)
if(!identical(args(def), args(def2)))
stop("the editing in trace() can only change the body of the function, not the arguments or defaults")
fBody <- body(def2)
}
else {
def <- .untracedFunction(def)
fBody <- body(def)
if(length(at) > 0) {
if(is.null(tracer))
stop("cannot use 'at' argument without a trace expression")
else if(!inherits(fBody, "{"))
stop("cannot use 'at' argument unless the function body has the form '{ ... }'")
for(i in at) {
fBody[[i]] <-
if(print)
substitute({.doTrace(TRACE, MSG); EXPR},
list(TRACE = tracer,
MSG = paste("step",paste(i, collapse=",")),
EXPR = fBody[[i]]))
else
substitute({.doTrace(TRACE); EXPR},
list(TRACE=tracer, EXPR = fBody[[i]]))
}
}
else if(!is.null(tracer)){
fBody <-
if(print)
substitute({.doTrace(TRACE, MSG); EXPR},
list(TRACE = tracer, MSG = paste("on entry"), EXPR = fBody))
else
substitute({.doTrace(TRACE); EXPR},
list(TRACE=tracer, EXPR = fBody))
}
if(!is.null(exit)) {
exit <-
if(print)
substitute(.doTrace(EXPR, MSG),
list(EXPR = exit, MSG = paste("on exit")))
else
substitute(.doTrace(EXPR),
list(EXPR = exit))
fBody <- substitute({on.exit(TRACE); BODY},
list(TRACE=exit, BODY=fBody))
}
}
body(def, envir = environment(def)) <- fBody
def
}
.untracedFunction <- function(f) {
while(is(f, "traceable"))
f <- f@original
f
}
.InitTraceFunctions <- function(envir) {
setClass("traceable", representation(original = "PossibleMethod", source = "environment"), contains = "VIRTUAL",
where = envir); clList <- "traceable"
for(cl in c("function", "MethodDefinition", "MethodWithNext", "genericFunction",
"standardGeneric", "nonstandardGeneric", "groupGenericFunction",
"derivedDefaultMethod")) {
.makeTraceClass(.traceClassName(cl), cl, FALSE)
clList <- c(clList, .traceClassName(cl))
}
setClass("sourceEnvironment", contains = "environment",
representation(packageName = "character", dateCreated = "POSIXt", sourceFile = "character"),
prototype = prototype( packageName = "", dateCreated = Sys.time(), sourceFile = ""))
clList <- c(clList, "sourceEnvironment")
assign(".SealedClasses", c(get(".SealedClasses", envir), clList), envir)
setMethod("initialize", "traceable",
function(.Object, ...) .initTraceable(.Object, ...),
where = envir)
if(!isGeneric("show", envir))
setGeneric("show", where = envir, simpleInheritanceOnly = TRUE)
setMethod("show", "traceable", .showTraceable, where = envir)
setMethod("show", "sourceEnvironment", .showSource, where = envir)
}
cacheOnAssign <- function(env) is.null(env$.cacheOnAssign) || env$.cacheOnAssign
setCacheOnAssign <- function(env, onOff = cacheOnAssign(env))
env$.cacheOnAssign <- if(onOff) TRUE else FALSE
.showTraceable <- function(object) {
if(identical(object@source, emptyenv())) {
cat("Object with tracing code, class \"", class(object),
"\"\nOriginal definition: \n", sep="")
callGeneric(object@original)
cat("\n
}
else {
cat("Object of class \"", class(object),
"\", from source\n", sep = "")
callGeneric([email protected])
cat("\n
}
}
.initTraceable <- function(.Object, def, tracer, exit, at, print, doEdit) {
.Object@source <- emptyenv()
if(missing(def))
return(.Object)
oldClass <- class(def)
oldClassDef <- getClass(oldClass)
if(!is.null(oldClassDef) && length(oldClassDef@slots) > 0)
as(.Object, oldClass) <- def
.Object@original <- def
if(nargs() > 2) {
if(!is.null(elNamed(getSlots(getClass(class(def))), ".Data")))
def <- [email protected]
[email protected] <- .makeTracedFunction(def, tracer, exit, at, print, doEdit)
}
.Object
}
.showSource <- function(object) {
cat("Object of class \"", class(object), "\"\n", sep = "")
cat("Source environment created ", format(object@dateCreated), "\n")
if(nzchar(object@packageName))
cat("For package \"",object@packageName, "\"\n", sep = "")
if(nzchar(object@sourceFile))
cat("From source file \"", object@sourceFile, "\"\n", sep = "")
}
.doTracePrint <- function(msg = "") {
call <- deparse(sys.call(sys.parent(1)))
if(length(call)>1)
call <- paste(call[[1L]], "....")
cat("Tracing", call, msg, "\n")
}
.traceClassName <- function(className) {
className[] <- paste0(className, "WithTrace")
className
}
.assignOverBinding <- function(what, value, where, verbose = TRUE) {
if(verbose) {
pname <- getPackageName(where)
msg <-
gettextf("assigning over the binding of symbol %s in environment/package %s",
sQuote(what), sQuote(pname))
message(strwrap(msg), domain = NA)
}
warnOpt <- options(warn= -1)
on.exit(options(warnOpt))
if(is.function(value)) {
fenv <- environment(value)
if(is.null(fenv))
fenv <- baseenv()
if(!identical(fenv, where) && exists(what, envir = fenv, inherits = FALSE
) && bindingIsLocked(what, fenv)) {
unlockBinding(what, fenv)
assign(what, value, fenv)
invalidateS4Cache(paste(".assignOverBinding(",sQuote(what),").1",sep=""))
lockBinding(what, fenv)
}
}
if(exists(what, envir = where, inherits = FALSE) && bindingIsLocked(what, where)) {
unlockBinding(what, where)
assign(what, value, where)
invalidateS4Cache(paste(".assignOverBinding(",sQuote(what),").2",sep=""))
lockBinding(what, where)
}
else {
assign(what, value, where)
invalidateS4Cache(paste(".assignOverBinding(",sQuote(what),").3",sep=""))
}
}
.setMethodOverBinding <- function(what, signature, method, where, verbose = TRUE) {
if(verbose)
warning(gettextf("setting a method over the binding of symbol %s in environment/package %s",
sQuote(what),
sQuote(getPackageName(where))),
domain = NA)
if(exists(what, envir = where, inherits = FALSE)) {
fdef <- get(what, envir = where)
hasFunction <- is(fdef, "genericFunction")
}
hasFunction <- FALSE
if(hasFunction) {
where2 <- findFunction(what, where = environment(fdef))[[1L]]
unlockBinding(what, where)
setMethod(what, signature, method, where = where)
lockBinding(what, where)
unlockBinding(what, where2)
setMethod(what, signature, method, where = where2)
lockBinding(what, where2)
}
else {
setMethod(what, signature, method, where = where)
}
}
.getImportsEnv <- function(pkg) {
iname = paste0("imports:", pkg)
empty = emptyenv()
env = asNamespace(pkg)
while(!identical(env, empty)) {
if (identical(attr(env, "name"), iname))
return(env)
env = parent.env(env)
}
NULL
}
.updateInImportsEnv <- function(what, newFun, importingPkg) {
where = .getImportsEnv(importingPkg)
if (!is.null(where) && (what %in% names(where))) {
.assignOverBinding(what, newFun, where, FALSE)
}
}
.searchNamespaceNames <- function(env)
paste0("namespace:", getNamespaceName(env))
.findFunEnvAndName <- function(what, where, signature = NULL) {
pname <- character()
if(is.null(signature)) {
whereF <- findFunction(what, where = where)
if(length(whereF)>0)
whereF <- whereF[[1L]]
else return(list(pname = pname, whereF = baseenv()))
} else
whereF <- .genEnv(what, where)
if("name" %in% names(attributes(whereF)))
pname <- gsub("^.*:", "", attr(whereF, "name"))
else if(isNamespace(whereF))
pname <- .searchNamespaceNames(whereF)
list(pname = pname, whereF = whereF)
}
.makeTraceClass <- function(traceClassName, className, verbose = TRUE) {
if(isClass(as.character(traceClassName)))
return(as.character(traceClassName))
if(verbose)
message(sprintf("Constructing traceable class %s", dQuote(traceClassName)),
domain = NA)
env <- .classEnv(className)
if(environmentIsLocked(env)) {
message(gettextf("Environment of class %s is locked; using global environment for new class",
dQuote(className)),
domain = NA)
env <- .GlobalEnv
packageSlot(traceClassName) <- NULL
}
setClass(traceClassName,
contains = c(className, "traceable"), where = env)
if(existsMethod("show", className, env))
setMethod("show", traceClassName, .showTraceable)
traceClassName
}
utils::globalVariables("fdef")
.dummySetMethod <- function(f, signature = character(), definition,
where = topenv(parent.frame()), valueClass = NULL,
sealed = FALSE)
{
if(is.function(f) && is(f, "genericFunction"))
f <- fdef@generic
else if(is.function(f)) {
if(is.primitive(f))
f <- .primname(f)
else
stop("a function for argument 'f' must be a generic function")
} else
f <- switch(f, "as.double" = "as.numeric", f)
assign(.dummyMethodName(f, signature), definition, envir = where)
}
.functionsOverriden <- c("setClass", "setClassUnion", "setGeneric", "setIs", "setMethod", "setValidity")
.setEnvForSource <- function(env) {
doNothing <- function(x, ...)x
for(f in .functionsOverriden)
assign(f, switch(f, setMethod = .dummySetMethod, doNothing),
envir = env)
env
}
.dummyMethodName <- function(f, signature)
paste(c(f,signature), collapse="
.guessPackageName <- function(env) {
allObjects <- names(env)
allObjects <- allObjects[is.na(match(allObjects, .functionsOverriden))]
possible <- sort(table(unlist(lapply(allObjects, utils::find))),
decreasing = TRUE)
if(length(possible) == 0)
stop("none of the objects in the source code could be found: need to attach or specify the package")
else if(length(possible) > 1L) {
global <- match(".GlobalEnv", names(possible), 0)
if(global > 0) {
possible <- possible[-global]
}
if(length(possible) > 1L)
warning(gettextf("objects found in multiple packages: using %s and ignoring %s",
sQuote(names(possible[[1L]])),
paste(sQuote(names(possible[-1L])),
collapse = ", ")),
domain = NA)
}
.rmpkg(names(possible[1L]))
}
evalSource <- function(source, package = "", lock = TRUE, cache = FALSE) {
if(!nzchar(package))
envp <- .GlobalEnv
else {
pstring <- paste0("package:",package)
packageIsVisible <- pstring %in% search()
if(packageIsVisible) {
envp <- as.environment(pstring)
}
else {
envp <- tryCatch(asNamespace(package), error = function(cond) NULL)
}
if(is.null(envp))
stop(gettextf("package %s is not attached and no namespace found for it",
sQuote(package)),
domain = NA)
}
env <- new("sourceEnvironment", new.env(parent = envp),
packageName = package,
sourceFile = (if(is.character(source)) source else ""))
env$.packageName <- package
setCacheOnAssign(env, cache)
if(is(source, "character"))
for(text in source) sys.source(text, envir = env)
else if(is(source, "connection")) sys.source(source, envir = env)
else if(!is(source, "environment"))
stop(gettextf("invalid 'source' argument: expected file names or a connection but got an object of class %s",
dQuote(class(source)[[1L]])),
domain = NA)
if(lock)
lockEnvironment(env, bindings = TRUE)
env
}
insertSource <- function(source, package = "",
functions = allPlainObjects(),
methods = (if(missing(functions)) allMethodTables() else NULL)
, force = missing(functions) & missing(methods)
){
MPattern <- .TableMetaPattern()
CPattern <- .ClassMetaPattern()
allPlainObjects <- function()
allObjects[!(grepl(MPattern, allObjects) | grepl(CPattern, allObjects) | ".cacheOnAssign" == allObjects)]
allMethodTables <- function()
allObjects[grepl(MPattern, allObjects)]
differs <- function(f1, f2)
!(identical(body(f1), body(f2)) && identical(args(f1), args(f2)))
if(is.environment(source) && !nzchar(package)) {
if(is(source, "sourceEnvironment"))
package <- source@packageName
else if(exists(".packageName", envir = source, inherits = FALSE))
package <- get(".packageName", envir =source)
}
if(is(source, "environment"))
env <- source
else
env <- evalSource(source, package, FALSE)
envPackage <- getPackageName(env, FALSE)
envp <- parent.env(env)
if(identical(envp, .GlobalEnv) || !nzchar(envPackage)) {
if(!nzchar(package))
package <- .guessPackageName(env)
if(identical(package, ".GlobalEnv"))
envns <- NULL
else {
pname <- paste0("package:", package)
envp <- tryCatch(as.environment(pname), error = function(cond)NULL)
if(is.null(envp)) {
envp <- tryCatch(as.environment(pname), error = function(cond)NULL)
if(is.null(envp))
stop(gettextf(
"cannot find an environment corresponding to package name \'%s\"",
package), domain = NA)
}
envns <- tryCatch(asNamespace(package), error = function(cond)NULL)
}
if(nzchar(package))
assign(".packageName", package, envir = env)
}
else {
if(isNamespace(envp))
envns <- envp
else
envns <- tryCatch(asNamespace(package), error = function(cond)NULL)
}
if(nzchar(envPackage) && envPackage != package)
warning(gettextf("supplied package, %s, differs from package inferred from source, %s",
sQuote(package), sQuote(envPackage)),
domain = NA)
packageSlot(env) <- package
allObjects <- names(env)
if(!missing(functions)) {
notThere <- is.na(match(functions, allObjects))
if(any(notThere)) {
warning(gettextf("cannot insert these (not found in source): %s",
paste0('"', functions[notThere], '"',
collapse = ", ")),
domain = NA)
}
}
.mnames <- allMethodTables()
if(length(methods) > 0) {
notThere <- vapply(methods, function(fname)
length(grep(fname, .mnames, fixed = TRUE)) == 0, NA)
if(any(notThere)) {
warning(gettextf("cannot insert methods for these functions (methods table not found in source): %s",
paste0('"', methods[notThere], '"',
collapse = ", ")),
domain = NA)
methods <- methods[!notThere]
}
methodNames <- vapply(methods, function(fname)
.mnames[[grep(fname, .mnames, fixed = TRUE)[[1]]]], "")
}
else {
methodNames <- .mnames
methods <- sub(.TableMetaPrefix(), "", methodNames)
methods <- sub(":.*","",methods)
}
notTraceable <- newObjects <- objectsDone <- character()
for(i in seq_along(functions)) {
this <- functions[[i]]
thisWhere <- NULL
if(is.null(envns) ||
exists(this, envir = envp, inherits = FALSE)) {
envwhere <- envp
thisWhere <- get(this, envir = envp)
}
else {
envwhere <- envns
if(is.environment(envns) &&
exists(this, envir = envns, inherits = FALSE))
thisWhere <- get(this, envir = envns)
}
thisObj <- get(this, envir = env)
if(is.function(thisObj) && is.function(thisWhere)
&& differs(thisObj, thisWhere)) {
suppressMessages(
.TraceWithMethods(this, where = envwhere, edit = env))
objectsDone <- c(objectsDone, this)
}
else if(force)
assign(this, thisObj, envir = envwhere)
else if(!is.function(thisObj))
notTraceable <- c(notTraceable, this)
else if(is.null(thisWhere))
newObjects <- c(newObjects, this)
}
if(length(notTraceable) > 0)
message(gettextf("Non-function objects are not currently inserted (not traceable): %s",
paste(notTraceable, collapse = ", ")), domain = NA)
if(length(newObjects) > 0)
message(gettextf("New functions are not currently inserted (not untraceable): %s",
paste(newObjects, collapse = ", ")), domain = NA)
if(length(objectsDone) > 0)
message(gettextf("Modified functions inserted through trace(): %s",
paste(objectsDone, collapse = ", ")), domain = NA)
for(i in seq_along(methods)) {
.copyMethods(methods[[i]], methodNames[[i]], env, envp)
}
if(!is.environment(source)) {
lockEnvironment(env, bindings = TRUE)
invisible(env)
}
else
invisible(source)
}
.copyMethods <- function(f, tableName, env, envwhere) {
differs <- function(o1, o2)
!(is.function(o2) &&
identical(body(o2), body(o2)) && identical(args(o1), args(o2)))
table <- get(tableName, envir=env)
fdef <- getGeneric(f, where = envwhere)
if(!is(fdef, "genericFunction")) {
message(gettextf("%s() is not a generic function in the target environment -- methods will not be inserted",
f), domain = NA)
return(NULL)
}
curTable <- getMethodsForDispatch(fdef)
allObjects <- sort(names(table))
if(length(allObjects) > 0) {
methodsInserted <- as.character(Filter(function(this) {
def <- get(this, envir = table)
curdef <- curTable[[this]]
if(differs(def, curdef)) {
suppressMessages(
.TraceWithMethods(f, signature = this, where = envwhere,
edit = env))
TRUE
} else
FALSE
}, allObjects))
if(length(methodsInserted) > 0)
message(gettextf("Methods inserted for function %s(): %s",
f, paste(methodsInserted, collapse =", ")),
domain = NA)
}
}
.copyClass <- function(class, env, envwhere) {
message("Pretend we inserted class ", class, domain = NA)
}
.findNewDefForTrace <- function(what, signature, env, package) {
if(is.null(signature)) {
if(exists(what, envir = env, inherits = FALSE))
newObject <- get(what, envir = env)
else
stop(gettextf("no definition for object %s found in tracing environment",
sQuote(what), source),
domain = NA)
}
else {
table <- .TableMetaName(what, "")
allObjects <- sort(names(env))
i <- grep(table, allObjects, fixed = TRUE)
if(length(i) == 1)
table <- env[[allObjects[[i]]]]
else if(length(i) >1) {
table <- allObjects[[i[[1]]]]
warning(gettextf("multiple generics match pattern, using table %s", table)
, domain = NA)
table <- env[[table]]
}
else
stop(gettextf("does not seem to be a method table for generic %s in tracing environment",
sQuote(what)),
domain = NA)
if(exists(signature, envir = table, inherits = FALSE))
newObject <- get(signature, envir = table)
else
stop(gettextf("no method in methods table for %s for signature %s",
sQuote(what),
sQuote(signature)),
domain = NA)
}
newObject
} |
dv.plot <- function(df,
xvar = "Time",
yvar = "Conc",
obsLog = FALSE,
myXlab = "Time",
myYlab = "Concentration",
color = NULL,
group = NULL,
guide = TRUE,
onlyLin = FALSE,
onlyLog = FALSE,
XYlog = FALSE,
STRATY = ".",
STRATX = ".",
myYBr = waiver(),
myXBr = waiver(),
myYBrLog = waiver(),
myXBrLog = waiver(),
myYlim = NULL,
myXlim = NULL,
myYlimLog = NULL,
myXlimLog = NULL,
title=NULL){
"ID" <- "Time" <- "Conc" <- "theme" <- "unit" <- "element_text" <- "xlab" <- "ylab" <- "geom_line" <- "aes_string" <- "geom_point" <- "ggplot" <- "facet_wrap" <- "scale_y_log10" <- "arrangeGrob" <- "textGrob" <- "gpar" <- "packageVersion" <- NULL
rm(list=c("ID","Time","Conc","theme","unit","element_text","xlab","ylab","geom_line","aes_string","geom_point","ggplot","facet_wrap","scale_y_log10","arrangeGrob","textGrob","gpar","packageVersion"))
df[,xvar] <- as.numeric(as.character(df[,xvar]))
df[,yvar] <- as.numeric(as.character(df[,yvar]))
if(obsLog) df[,yvar] <- exp(df[,yvar])
if(is.null(color)){
if(is.null(group)){
p01 <- ggplot(df,aes_string(x=xvar,y=yvar,group=1))
}else{
p01 <- ggplot(df,aes_string(x=xvar,y=yvar,group=group))
}
}else{
color <- paste0("factor(",color,")")
if(is.null(group)){
p01 <- ggplot(df,aes_string(x=xvar,y=yvar,color=color))
}else{
p01 <- ggplot(df,aes_string(x=xvar,y=yvar,group=group,color=color))
}
}
p01 <- p01 + geom_line(alpha = 0.5)
p01 <- p01 + geom_point()
p01 <- p01 + xlab(myXlab)
p01 <- p01 + ylab(myYlab)
p01 <- p01 + theme(legend.position = "none",
axis.text.x = element_text(size=10),
axis.text.y = element_text(size=10),
strip.text.x = element_text(size=10),
title = element_text(size=10))
facets <- paste(STRATY, '~', STRATX)
if (facets != '. ~ .') p01 <- p01 + facet_grid(facets, scales = "free")
p02 <- p01 + scale_y_log10(breaks=myYBrLog,labels=myYBrLog)
if(XYlog) p02 <- p02 + scale_x_log10(breaks=myXBrLog,labels=myXBrLog)
p01 <- p01 + scale_x_continuous(breaks=myXBr, labels=myXBr)
p01 <- p01 + scale_y_continuous(breaks=myYBr, labels=myYBr)
if (!is.null(myYlim) & is.null(myXlim)){
p01 <- p01 + coord_cartesian(ylim=myYlim)
}else if (is.null(myYlim) & !is.null(myXlim)){
p01 <- p01 + coord_cartesian(xlim=myXlim)
}else if (!is.null(myYlim) & !is.null(myXlim)){
p01 <- p01 + coord_cartesian(xlim=myXlim, ylim=myYlim)
}
if (!is.null(myYlimLog) & is.null(myXlimLog)){
p02 <- p02 + coord_cartesian(ylim=myYlimLog)
}else if (is.null(myYlimLog) & !is.null(myXlimLog)){
p02 <- p02 + coord_cartesian(xlim=myXlimLog)
}else if (!is.null(myYlimLog) & !is.null(myXlimLog)){
p02 <- p02 + coord_cartesian(xlim=myXlimLog, ylim=myYlimLog)
}
if(onlyLin){
if(!is.null(title)) p01 <- p01 + ggtitle(title)
return(p01)
}
if(onlyLog){
if(!is.null(title)) p02 <- p02 + ggtitle(title)
return(p02)
}
if (!onlyLin & !onlyLog){
df$type1 <- "Linear"
df$type2 <- "Log"
p01 <- p01 %+% xlab("") %+% ylab("") %+% df + facet_wrap(~type1)
p02 <- p02 %+% xlab("") %+% ylab("") %+% df + facet_wrap(~type2)
Label <- NULL
if(!is.null(title)) Label <- textGrob(paste0(title,"\n"),vjust=1,hjust=0, x=0, gp=gpar(cex=1.2))
plot_args <- list(p01,p02,ncol=2,
top=Label,
left=textGrob(myYlab,gp=gpar(cex=1),rot=90,vjust=1),
bottom=textGrob(myXlab,gp=gpar(cex=1),vjust = 0))
if(packageVersion("gridExtra") < "0.9.2"){
arg_names <- names(plot_args)
arg_names <- sub("top","main",arg_names)
arg_names <- sub("bottom","sub",arg_names)
names(plot_args) <- arg_names
}
gdr <- suppressMessages(suppressWarnings(do.call(arrangeGrob,plot_args)))
return(gdr)
}
} |
library(testthat)
test_that("existing directory", {
path_demo <- base::tempfile(pattern="temp", fileext=".credentials")
on.exit(base::unlink(path_demo))
success <- create_credential_local(path_demo)
expect_true(success)
expect_true(base::file.exists(path_demo))
})
test_that("new directory", {
path_demo <- base::tempfile(pattern="new-dir/temp", fileext=".credentials")
on.exit(base::unlink(path_demo))
success <- create_credential_local(path_demo)
expect_true(success)
expect_true(base::file.exists(path_demo))
})
test_that("overwrite-fail", {
expected_message <- "^A credential file already exists at .+?\\.credentials`\\.$"
path_demo <- base::tempfile(pattern="temp", fileext=".credentials")
on.exit(base::unlink(path_demo))
success <- create_credential_local(path_demo)
expect_true(success)
expect_error(
regexp = expected_message,
create_credential_local(path_demo)
)
}) |
rvn_rvi_write_template <- function(modelname="UBCWM", filename=NULL,
overwrite=TRUE, writeheader=TRUE,
filetype="rvi ASCII Raven", author="RavenR",
description=NULL) {
known_templates <- c("UBCWM", "HBV-EC", "HBV-Light", "GR4J", "CdnShield", "MOHYSE", "HMETS", "HYPR", "HYMOD")
if (is.null(modelname) | modelname %notin% known_templates) {
stop("modelname must be one of the available model templates, see function details")
}
if (is.null(filename)) {
filename <- sprintf("%s_template.rvi", modelname)
} else if (rvn_substrRight(filename,4) != ".rvi") {
warning("filename should end in .rvi, extension will be added to filename")
filename <- sprintf("%s.rvi", filename)
}
if (!overwrite & file.exists(filename)) {
stop(sprintf("Filename %s already exists and overwrite set as FALSE",filename))
}
if (writeheader) {
if (is.null(description)) {
description <- sprintf("File template for %s model, written using RavenR::rvn_rvi_write_template", modelname)
}
rvn_write_Raven_newfile(filename=filename, description=description, filetype=filetype, author=author)
}
model_templates <- list(
"UBCWM"="
:StartDate 2000-01-01 00:00:00
:Duration 365
:TimeStep 1.0
:Method ORDERED_SERIES
:Interpolation INTERP_NEAREST_NEIGHBOR
:Routing ROUTE_NONE
:CatchmentRoute ROUTE_DUMP
:Evaporation PET_MONTHLY_FACTOR
:OW_Evaporation PET_MONTHLY_FACTOR
:SWRadiationMethod SW_RAD_UBCWM
:SWCloudCorrect SW_CLOUD_CORR_UBCWM
:SWCanopyCorrect SW_CANOPY_CORR_UBCWM
:LWRadiationMethod LW_RAD_UBCWM
:WindspeedMethod WINDVEL_UBCWM
:RainSnowFraction RAINSNOW_UBCWM
:PotentialMeltMethod POTMELT_UBCWM
:OroTempCorrect OROCORR_UBCWM
:OroPrecipCorrect OROCORR_UBCWM2
:OroPETCorrect OROCORR_UBCWM
:CloudCoverMethod CLOUDCOV_UBCWM
:PrecipIceptFract PRECIP_ICEPT_USER
:MonthlyInterpolationMethod MONTHINT_LINEAR_21
:SoilModel SOIL_MULTILAYER 6
:SnapshotHydrograph
:Alias TOP_SOIL SOIL[0]
:Alias INT_SOIL SOIL[1]
:Alias SHALLOW_GW SOIL[2]
:Alias DEEP_GW SOIL[3]
:Alias INT_SOIL2 SOIL[4]
:Alias INT_SOIL3 SOIL[5]
:HydrologicProcesses
:SnowAlbedoEvolve SNOALB_UBCWM
:SnowBalance SNOBAL_UBCWM MULTIPLE MULTIPLE
:Flush RAVEN_DEFAULT PONDED_WATER INT_SOIL2
:-->Conditional HRU_TYPE IS GLACIER
:GlacierMelt GMELT_UBC GLACIER_ICE PONDED_WATER
:Precipitation PRECIP_RAVEN ATMOS_PRECIP MULTIPLE
:SoilEvaporation SOILEVAP_UBC MULTIPLE ATMOSPHERE
:Infiltration INF_UBC PONDED_WATER MULTIPLE
:Flush RAVEN_DEFAULT SURFACE_WATER INT_SOIL2
:GlacierInfiltration GINFIL_UBCWM PONDED_WATER MULTIPLE
:Percolation PERC_LINEAR_ANALYTIC INT_SOIL INT_SOIL2
:Percolation PERC_LINEAR_ANALYTIC INT_SOIL2 INT_SOIL3
:Baseflow BASE_LINEAR INT_SOIL3 SURFACE_WATER
:Baseflow BASE_LINEAR SHALLOW_GW SURFACE_WATER
:Baseflow BASE_LINEAR DEEP_GW SURFACE_WATER
:GlacierRelease GRELEASE_LINEAR GLACIER SURFACE_WATER
:EndHydrologicProcesses
",
"HBV-EC"="
:StartDate 2000-01-01 00:00:00
:Duration 365
:TimeStep 1.0
:Method ORDERED_SERIES
:Interpolation INTERP_NEAREST_NEIGHBOR
:Routing ROUTE_NONE
:CatchmentRoute ROUTE_TRI_CONVOLUTION
:Evaporation PET_FROMMONTHLY
:OW_Evaporation PET_FROMMONTHLY
:SWRadiationMethod SW_RAD_DEFAULT
:SWCloudCorrect SW_CLOUD_CORR_NONE
:SWCanopyCorrect SW_CANOPY_CORR_NONE
:LWRadiationMethod LW_RAD_DEFAULT
:RainSnowFraction RAINSNOW_HBV
:PotentialMeltMethod POTMELT_HBV
:OroTempCorrect OROCORR_HBV
:OroPrecipCorrect OROCORR_HBV
:OroPETCorrect OROCORR_HBV
:CloudCoverMethod CLOUDCOV_NONE
:PrecipIceptFract PRECIP_ICEPT_USER
:MonthlyInterpolationMethod MONTHINT_LINEAR_21
:SoilModel SOIL_MULTILAYER 3
:Alias FAST_RESERVOIR SOIL[1]
:Alias SLOW_RESERVOIR SOIL[2]
:LakeStorage SLOW_RESERVOIR
:HydrologicProcesses
:SnowRefreeze FREEZE_DEGREE_DAY SNOW_LIQ SNOW
:Precipitation PRECIP_RAVEN ATMOS_PRECIP MULTIPLE
:CanopyEvaporation CANEVP_ALL CANOPY ATMOSPHERE
:CanopySnowEvap CANEVP_ALL CANOPY_SNOW ATMOSPHERE
:SnowBalance SNOBAL_SIMPLE_MELT SNOW SNOW_LIQ
:-->Overflow RAVEN_DEFAULT SNOW_LIQ PONDED_WATER
:Flush RAVEN_DEFAULT PONDED_WATER GLACIER
:-->Conditional HRU_TYPE IS GLACIER
:GlacierMelt GMELT_HBV GLACIER_ICE GLACIER
:GlacierRelease GRELEASE_HBV_EC GLACIER SURFACE_WATER
:Infiltration INF_HBV PONDED_WATER MULTIPLE
:Flush RAVEN_DEFAULT SURFACE_WATER FAST_RESERVOIR
:-->Conditional HRU_TYPE IS_NOT GLACIER
:SoilEvaporation SOILEVAP_HBV SOIL[0] ATMOSPHERE
:CapillaryRise CRISE_HBV FAST_RESERVOIR SOIL[0]
:LakeEvaporation LAKE_EVAP_BASIC SLOW_RESERVOIR ATMOSPHERE
:Percolation PERC_CONSTANT FAST_RESERVOIR SLOW_RESERVOIR
:Baseflow BASE_POWER_LAW FAST_RESERVOIR SURFACE_WATER
:Baseflow BASE_LINEAR SLOW_RESERVOIR SURFACE_WATER
:EndHydrologicProcesses
:AggregatedVariable FAST_RESERVOIR AllHRUs
:AggregatedVariable SLOW_RESERVOIR AllHRUs
",
"HBV-Light"="
:StartDate 2000-01-01 00:00:00
:Duration 365
:TimeStep 1.0
:Method ORDERED_SERIES
:SoilModel SOIL_MULTILAYER 3
:Routing ROUTE_NONE
:CatchmentRoute ROUTE_TRI_CONVOLUTION
:Evaporation PET_DATA
:RainSnowFraction RAINSNOW_HBV
:PotentialMeltMethod POTMELT_DEGREE_DAY
:OroTempCorrect OROCORR_HBV
:OroPrecipCorrect OROCORR_HBV
:OroPETCorrect OROCORR_HBV
:CloudCoverMethod CLOUDCOV_NONE
:PrecipIceptFract PRECIP_ICEPT_USER
:Alias TOPSOIL SOIL[0]
:Alias FAST_RESERVOIR SOIL[1]
:Alias SLOW_RESERVOIR SOIL[2]
:HydrologicProcesses
:SnowRefreeze FREEZE_DEGREE_DAY SNOW_LIQ SNOW
:Precipitation PRECIP_RAVEN ATMOS_PRECIP MULTIPLE
:SnowBalance SNOBAL_SIMPLE_MELT SNOW SNOW_LIQ
:-->Overflow RAVEN_DEFAULT SNOW_LIQ PONDED_WATER
:Infiltration INF_HBV PONDED_WATER MULTIPLE
:Flush RAVEN_DEFAULT SURFACE_WATER FAST_RESERVOIR
:SoilEvaporation SOILEVAP_HBV TOPSOIL ATMOSPHERE
:CapillaryRise RISE_HBV FAST_RESERVOIR TOPSOIL
:Percolation PERC_CONSTANT FAST_RESERVOIR SLOW_RESERVOIR
:Baseflow BASE_POWER_LAW FAST_RESERVOIR SURFACE_WATER
:Baseflow BASE_THRESH_POWER FAST_RESERVOIR SURFACE_WATER
:Baseflow BASE_LINEAR SLOW_RESERVOIR SURFACE_WATER
:EndHydrologicProcesses
",
"GR4J"="
:StartDate 2000-01-01 00:00:00
:Duration 365
:TimeStep 1.0
:Method ORDERED_SERIES
:Interpolation INTERP_NEAREST_NEIGHBOR
:Routing ROUTE_NONE
:CatchmentRoute ROUTE_DUMP
:Evaporation PET_DATA
:RainSnowFraction RAINSNOW_DINGMAN
:PotentialMeltMethod POTMELT_DEGREE_DAY
:OroTempCorrect OROCORR_SIMPLELAPSE
:OroPrecipCorrect OROCORR_SIMPLELAPSE
:SoilModel SOIL_MULTILAYER 4
:Alias PRODUCT_STORE SOIL[0]
:Alias ROUTING_STORE SOIL[1]
:Alias TEMP_STORE SOIL[2]
:Alias GW_STORE SOIL[3]
:HydrologicProcesses
:Precipitation PRECIP_RAVEN ATMOS_PRECIP MULTIPLE
:SnowTempEvolve SNOTEMP_NEWTONS SNOW_TEMP
:SnowBalance SNOBAL_CEMA_NEIGE SNOW PONDED_WATER
:OpenWaterEvaporation OPEN_WATER_EVAP PONDED_WATER ATMOSPHERE
:Infiltration INF_GR4J PONDED_WATER MULTIPLE
:SoilEvaporation SOILEVAP_GR4J PRODUCT_STORE ATMOSPHERE
:Percolation PERC_GR4J PRODUCT_STORE TEMP_STORE
:Flush RAVEN_DEFAULT SURFACE_WATER TEMP_STORE
:Split RAVEN_DEFAULT TEMP_STORE CONVOLUTION[0] CONVOLUTION[1] 0.9
:Convolve CONVOL_GR4J_1 CONVOLUTION[0] ROUTING_STORE
:Convolve CONVOL_GR4J_2 CONVOLUTION[1] TEMP_STORE
:Percolation PERC_GR4JEXCH ROUTING_STORE GW_STORE
:Percolation PERC_GR4JEXCH2 TEMP_STORE GW_STORE
:Flush RAVEN_DEFAULT TEMP_STORE SURFACE_WATER
:Baseflow BASE_GR4J ROUTING_STORE SURFACE_WATER
:EndHydrologicProcesses
", "CdnShield"="
:StartDate 2000-01-01 00:00:00
:Duration 365
:TimeStep 1.0
:Method ORDERED_SERIES
:InterpolationMethod NEAREST_NEIGHBOR
:SoilModel SOIL_MULTILAYER 3
:Routing ROUTE_DIFFUSIVE_WAVE
:CatchmentRoute ROUTE_TRI_CONVOLUTION
:Evaporation PET_HARGREAVES_1985
:OW_Evaporation PET_HARGREAVES_1985
:SWCanopyCorrect SW_CANOPY_CORR_STATIC
:RainSnowFraction RAINSNOW_DINGMAN
:PotentialMeltMethod POTMELT_DEGREE_DAY
:PrecipIceptFract PRECIP_ICEPT_LAI
:MonthlyInterpolationMethod MONTHINT_LINEAR_MID
:Alias SOIL0 SOIL[0]
:Alias SOIL1 SOIL[1]
:Alias SOIL2 SOIL[2]
:HydrologicProcesses
:SnowRefreeze FREEZE_DEGREE_DAY SNOW_LIQ SNOW
:Precipitation PRECIP_RAVEN ATMOS_PRECIP MULTIPLE
:CanopyEvaporation CANEVP_MAXIMUM CANOPY ATMOSPHERE
:CanopySnowEvap CANEVP_MAXIMUM CANOPY_SNOW ATMOSPHERE
:SnowBalance SNOBAL_TWO_LAYER MULTIPLE MULTIPLE
:Abstraction ABST_FILL PONDED_WATER DEPRESSION
:OpenWaterEvaporation OPEN_WATER_EVAP DEPRESSION ATMOSPHERE
:Infiltration INF_HBV PONDED_WATER MULTIPLE
:Baseflow BASE_POWER_LAW SOIL1 SURFACE_WATER
:Baseflow BASE_POWER_LAW SOIL2 SURFACE_WATER
:Interflow INTERFLOW_PRMS SOIL0 SURFACE_WATER
:Percolation PERC_GAWSER SOIL0 SOIL1
:Percolation PERC_GAWSER SOIL1 SOIL2
:SoilEvaporation SOILEVAP_ROOT SOIL0 ATMOSPHERE
:EndHydrologicProcesses
", "MOHYSE"="
:StartDate 2000-01-01 00:00:00
:Duration 365
:TimeStep 1.0
:Method ORDERED_SERIES
:Routing ROUTE_NONE
:CatchmentRoute ROUTE_GAMMA_CONVOLUTION
:PotentialMeltMethod POTMELT_DEGREE_DAY
:Evaporation PET_MOHYSE
:RainSnowFraction RAINSNOW_DATA
:DirectEvaporation
:SoilModel SOIL_TWO_LAYER
:HydrologicProcesses
:SoilEvaporation SOILEVAP_LINEAR SOIL[0] ATMOSPHERE
:SnowBalance SNOBAL_SIMPLE_MELT SNOW PONDED_WATER
:Precipitation RAVEN_DEFAULT ATMOS_PRECIP MULTIPLE
:Infiltration INF_HBV PONDED_WATER SOIL[0]
:Baseflow BASE_LINEAR SOIL[0] SURFACE_WATER
:Percolation PERC_LINEAR SOIL[0] SOIL[1]
:Baseflow BASE_LINEAR SOIL[1] SURFACE_WATER
:EndHydrologicProcesses
", "HMETS"="
:StartDate 1953-01-01 00:00:00
:EndDate 2009-12-31 00:00:00
:TimeStep 24:00:00
:PotentialMeltMethod POTMELT_HMETS
:RainSnowFraction RAINSNOW_DATA
:Evaporation PET_OUDIN
:CatchmentRoute ROUTE_DUMP
:Routing ROUTE_NONE
:SoilModel SOIL_TWO_LAYER
:HydrologicProcesses
:SnowBalance SNOBAL_HMETS MULTIPLE MULTIPLE
:Precipitation RAVEN_DEFAULT ATMOS_PRECIP MULTIPLE
:Infiltration INF_HMETS PONDED_WATER MULTIPLE
:Overflow OVERFLOW_RAVEN SOIL[0] CONVOLUTION[1]
:Baseflow BASE_LINEAR SOIL[0] SURFACE_WATER
:Percolation PERC_LINEAR SOIL[0] SOIL[1]
:Overflow OVERFLOW_RAVEN SOIL[1] CONVOLUTION[1]
:SoilEvaporation SOILEVAP_ALL SOIL[0] ATMOSPHERE
:Convolve CONVOL_GAMMA CONVOLUTION[0] SURFACE_WATER
:Convolve CONVOL_GAMMA_2 CONVOLUTION[1] SURFACE_WATER
:Baseflow BASE_LINEAR SOIL[1] SURFACE_WATER
:EndHydrologicProcesses
",
"HYPR"="
:StartDate 2000-01-01 00:00:00
:Duration 365
:TimeStep 1.0
:CatchmentRoute TRIANGULAR_UH
:Evaporation PET_FROMMONTHLY
:OW_Evaporation PET_FROMMONTHLY
:SWRadiationMethod SW_RAD_DEFAULT
:LWRadiationMethod LW_RAD_DEFAULT
:RainSnowFraction RAINSNOW_HBV
:PotentialMeltMethod POTMELT_HBV
:PrecipIceptFract PRECIP_ICEPT_USER
:MonthlyInterpolationMethod MONTHINT_LINEAR_21
:SoilModel SOIL_MULTILAYER 3
:Alias FAST_RESERVOIR SOIL[1]
:Alias SLOW_RESERVOIR SOIL[2]
:HydrologicProcesses
:SnowRefreeze FREEZE_DEGREE_DAY SNOW_LIQ SNOW
:Precipitation PRECIP_RAVEN ATMOS_PRECIP MULTIPLE
:CanopyEvaporation CANEVP_ALL CANOPY ATMOSPHERE
:CanopySnowEvap CANEVP_ALL CANOPY_SNOW ATMOSPHERE
:SnowBalance SNOBAL_SIMPLE_MELT SNOW PONDED_WATER
:Infiltration INF_HBV PONDED_WATER MULTIPLE
:Flush RAVEN_DEFAULT SURFACE_WATER PONDED_WATER
:Abstraction ABST_PDMROF PONDED_WATER DEPRESSION
:Flush RAVEN_DEFAULT SURFACE_WATER FAST_RESERVOIR
:SoilEvaporation SOILEVAP_HYPR MULTIPLE ATMOSPHERE
:Baseflow BASE_LINEAR FAST_RESERVOIR SURFACE_WATER
:Baseflow BASE_THRESH_STOR FAST_RESERVOIR SURFACE_WATER
:EndHydrologicProcesses
",
"HYMOD"="
:StartDate 2000-01-01 00:00:00
:Duration 365
:TimeStep 1.0
:Routing ROUTE_NONE
:CatchmentRoute ROUTE_RESERVOIR_SERIES
:Evaporation PET_HAMON
:OW_Evaporation PET_HAMON
:SWRadiationMethod SW_RAD_NONE
:LWRadiationMethod LW_RAD_NONE
:CloudCoverMethod CLOUDCOV_NONE
:RainSnowFraction RAINSNOW_THRESHOLD
:PotentialMeltMethod POTMELT_DEGREE_DAY
:PrecipIceptFract PRECIP_ICEPT_NONE
:SoilModel SOIL_MULTILAYER 2
:HydrologicProcesses
:Precipitation PRECIP_RAVEN ATMOS_PRECIP MULTIPLE
:SnowBalance SNOBAL_SIMPLE_MELT SNOW PONDED_WATER
:Infiltration INF_PDM PONDED_WATER MULTIPLE
:Flush RAVEN_DEFAULT SURFACE_WATER SOIL[1] 0.5
:SoilEvaporation SOILEVAP_PDM SOIL[0] ATMOSPHERE
:Baseflow BASE_LINEAR SOIL[1] SURFACE_WATER
:EndHydrologicProcesses
"
)
if (writeheader) {
fc <- file(filename, open="a+")
} else {
fc <- file(filename, open="w+")
}
writeLines(model_templates[[modelname]], con=fc)
close(fc)
return (TRUE)
} |
x_aes <- scale_x_continuous()$aesthetics
y_aes <- scale_y_continuous()$aesthetics
safe_eval <- function(expr, data) {
value <- tryCatch(eval_tidy(expr, data), error = function(e) numeric())
if (!is.function(value) &&
!is.environment(value) &&
!is.symbol(value) &&
!is.language(value) &&
(length(value) == 1 || length(value) == nrow(data))) {
value
} else {
NULL
}
}
require_quo <- function(expr, name) {
if (quo_is_missing(expr)) {
stop(name, ' must be provided', call. = FALSE)
}
}
require_stat <- function(x) {
if (is.call(x)) {
if (identical(x[[1]], quote(stat))) {
TRUE
} else {
any(vapply(x, require_stat, logical(1)))
}
} else {
FALSE
}
}
`%?%` <- function(l, r) if (missing(l)) r else l
png_dim <- function(file) {
if (!file.exists(file)) {
stop('Provided file does not exist', call. = FALSE)
}
bts <- as.integer(readBin(file, n = 24L, what = "raw"))
if (!all(bts[1:8] == c(137, 80, 78, 71, 13, 10, 26 ,10))) {
stop('Provided file does not appear to be a png', call. = FALSE)
}
c(bts[21] * 2^24 + bts[22] * 2^16 + bts[23] * 2^8 + bts[24],
bts[17] * 2^24 + bts[18] * 2^16 + bts[19] * 2^8 + bts[20])
} |
options(width = 60, str = strOptions(vec.len = 1.4), prompt = 'R> ', continue = '+ ')
require("robustlmm")
source(system.file("doc/Penicillin.R", package = "robustlmm"))
str(PenicillinC)
fm <- lmer(diameter ~ (1|plate) + (1|sample), PenicillinC)
rfm <- rlmer(diameter ~ (1|plate) + (1|sample), PenicillinC)
summary(rfm)
require(ggplot2)
theme_set(theme_bw())
print(plot(rfm, which = 1)[[1]] + scale_color_continuous(guide = "none"))
print(plot(rfm, which = 2)[[1]] + scale_color_continuous(guide = "none"))
print(plot(rfm, which = 3)[[1]] + theme(legend.position = "bottom"))
rfm2 <- update(rfm, rho.sigma.e = psi2propII(smoothPsi, k = 2.28),
rho.sigma.b = psi2propII(smoothPsi, k = 2.28))
rsb <- list(psi2propII(smoothPsi), psi2propII(smoothPsi, k = 2.28))
rfm3 <- update(rfm2, rho.sigma.b = rsb)
oldopts <- options(width = 90)
fmUncontam <- update(fm, data = Penicillin)
compare(fmUncontam, fm, rfm, rfm2, rfm3, show.rho.functions = FALSE)
options(oldopts)
require(reshape2)
xs <- seq.int(0, 3, length.out = 100)
data <- data.frame(x = xs, Huber = huberPsiRcpp@psi(xs),
"Smoothed" = smoothPsi@psi(xs))
print(ggplot(melt(data, 1), aes(x, value, color = variable,
linetype = variable)) + geom_line() +
scale_colour_hue(expression(paste(psi, "-function"))) +
scale_linetype_discrete(expression(paste(psi, "-function"))) +
ylab(expression(psi(x))) +
theme(legend.position = "bottom", legend.box = "horizontal")) |
add_time <- function(index, period) {
UseMethod("add_time", index)
}
add_time.character <- function(index, period) {
index <- try_parse_date_time(index)
add_time(index, period)
}
add_time.POSIXt <- function(index, period) {
time_adder(index, period)
}
add_time.Date <- function(index, period) {
time_adder(index, period)
}
add_time.yearmon <- function(index, period) {
index <- lubridate::as_date(index)
message("Converting to date class")
time_adder(index, period)
}
add_time.yearqtr <- function(index, period) {
index <- lubridate::as_date(index)
message("Converting to date class")
time_adder(index, period)
}
add_time.numeric <- function(index, period) {
stop("Index must be a non-numeric time-based class.")
}
add_time.default <- function(index, period) {
rlang::abort(paste0("No method for class ", class(index)[[1]], "."))
}
subtract_time <- function(index, period) {
UseMethod("subtract_time", index)
}
subtract_time.character <- function(index, period) {
index <- try_parse_date_time(index)
subtract_time(index, period)
}
subtract_time.POSIXt <- function(index, period) {
time_subtracter(index, period)
}
subtract_time.Date <- function(index, period) {
time_subtracter(index, period)
}
subtract_time.yearmon <- function(index, period) {
index <- lubridate::as_date(index)
message("Converting to date class")
time_subtracter(index, period)
}
subtract_time.yearqtr <- function(index, period) {
index <- lubridate::as_date(index)
message("Converting to date class")
time_subtracter(index, period)
}
subtract_time.numeric <- function(index, period) {
stop("Index must be a non-numeric time-based class.")
}
subtract_time.default <- function(index, period) {
rlang::abort(paste0("No method for class ", class(index)[[1]], "."))
}
`%+time%` <- function(index, period) {
add_time(index, period)
}
`%-time%` <- function(index, period) {
subtract_time(index, period)
}
time_adder <- function(index, period) {
check_quarter(period)
ret <- index + lubridate::period(period)
if (any(is.na(ret))) warning("Missing values created during time addition. This can happen if dates do not exist.")
return(ret)
}
time_subtracter <- function(index, period) {
check_quarter(period)
ret <- index - lubridate::period(period)
if (any(is.na(ret))) warning("Missing values created during time subtraction. This can happen if dates do not exist.")
return(ret)
}
check_quarter <- function(period) {
if (tolower(period) %>% stringr::str_detect("quarter")) {
rlang::abort("`quarter` detected. Try using `3 month` increments instead.")
}
} |
gen_spheres <- function(x, y = NULL, z = NULL, t = NULL, frequency = 1, ...) {
all <- (x * frequency)^2 +
((y %||% 0 * frequency))^2 +
((z %||% 0 * frequency))^2 +
((t %||% 0 * frequency))^2
dist <- sqrt(all)
dist_small <- dist - floor(dist)
dist_large <- 1 - dist_small
1 - pmin(dist_small, dist_large) * 4
} |
test_that("list_languages", {
testthat::expect_error(list_languages(), NA)
}) |
context("Testing 'esearch()'")
if (getOption('reutils.test.remote')) {
a <- esearch(term = "cancer", db = "pubmed", reldate = 60, datetype = "edat",
retmax = 6, usehistory = TRUE)
b <- esearch(term = "cancer", db = "pubmed", reldate = 60, datetype = "edat",
retmax = 6, usehistory = FALSE)
test_that("esearch() returns an 'esearch' object", {
expect_is(a, "esearch")
expect_is(b, "esearch")
})
test_that("'content()' returns a character vector or an XMLInternalDocument", {
expect_that(content(a, "text"), is_a("character"))
expect_that(content(b, "text"), is_a("character"))
expect_that(content(a, "xml"), is_a("XMLInternalDocument"))
expect_that(content(b, "xml"), is_a("XMLInternalDocument"))
expect_that(content(a, "json"), throws_error("Cannot return data of retmode.+"))
expect_that(content(b, "json"), throws_error("Cannot return data of retmode.+"))
expect_that(content(a, 'parsed'), is_a("entrez_uid"))
expect_that(content(b, 'parsed'), is_a("entrez_uid"))
})
test_that("Subsetting an 'esearch' returns an 'esearch' object", {
expect_that(a[1:2], is_a("entrez_uid"))
expect_that(b[1:2], is_a("entrez_uid"))
expect_that(length(b[1:2]), equals(2))
})
test_that("'querykey', 'webenv', and 'database' return the appropriate results", {
expect_equal(querykey(a), 1)
expect_match(webenv(a), "NCID_+")
expect_equal(database(a), "pubmed")
expect_equal(querykey(b), NA_integer_)
expect_equal(webenv(b), NA_character_)
expect_equal(database(b), 'pubmed')
})
test_that("'rettype', and 'retmode' return the appropriate results", {
expect_equal(rettype(a), "uilist")
expect_match(retmode(a), "xml")
expect_equal(rettype(b), "uilist")
expect_match(retmode(b), "xml")
})
test_that("'uid' returns a character vector for esearch objdect", {
expect_equal(uid(a), NA_character_)
expect_is(uid(b), "character")
expect_equal(length(uid(b)), 6)
})
a <- esearch(term = "cancer", db = "pubmed", reldate = 60, datetype = "edat",
retmax = 6, usehistory = TRUE, retmode = 'json')
b <- esearch(term = "cancer", db = "pubmed", reldate = 60, datetype = "edat",
retmax = 6, usehistory = FALSE, retmode = 'json')
test_that("'content()' returns a character vector or a json object", {
expect_that(content(a, "text"), is_a("character"))
expect_that(content(b, "text"), is_a("character"))
expect_that(content(a, "xml"), throws_error("Cannot return data of retmode.+"))
expect_that(content(b, "xml"), throws_error("Cannot return data of retmode.+"))
expect_that(content(a, "json"), is_a("json"))
expect_that(content(b, "json"), is_a("json"))
expect_that(content(a, 'parsed'), is_a("entrez_uid"))
expect_that(content(b, 'parsed'), is_a("entrez_uid"))
})
test_that("'retmode' returns the appropriate results", {
expect_match(retmode(a), "json")
expect_match(retmode(b), "json")
})
a <- esearch(term = "cancer", db = "pubmed", reldate = 60, datetype = "edat",
rettype = "count", retmode = 'xml')
b <- esearch(term = "cancer", db = "pubmed", reldate = 60, datetype = "edat",
rettype = "count", retmode = 'json')
test_that("'content()' returns a numeric vector", {
expect_that(content(a, "parsed"), is_a("numeric"))
expect_that(content(b, "parsed"), is_a("numeric"))
expect_that(content(a, "xml"), is_a("XMLInternalDocument"))
expect_that(content(b, "json"), is_a("json"))
})
} |
ffrandom <- function(n, rfun = runif, ..., vmode = NULL) {
r <- ff(rfun(1), length=n, vmode=vmode)
for (i in chunk(r)) {
Log$chunk(i)
ni <- diff(range(i))+1
r[i] <- rfun(ni, ...)
}
r
} |
library(MittagLeffleR)
context("Does dml() integrate to pml()?")
tailvec <- seq(0.4, 0.9, 0.1)
scalevec <- 10^seq(-3,3,1)
pvec <- seq(0.1, 0.9, 0.1)
tol <- 0.01
test_that("dml() integrates to pml() for Type 1", {
for (tail in tailvec){
for (scale in scalevec){
ml_dens <- function(x) {
dml(x = x, tail = tail, scale = scale, second.type = FALSE)
}
qvec <- qml(pvec, tail, scale, second.type = FALSE)
for (i in 1:(length(qvec)-1)){
p2 <- integrate(ml_dens, qvec[i], qvec[i+1])$value
expect_equal(object = p2, expected = pvec[i+1]-pvec[i], tol=tol,
info = paste("tail=", toString(tail),
"scale=", toString(scale),
"q=", toString(qvec[i]),
"p=", toString(pvec[i]),
"p2=", toString(p2)))
}
}
}
})
test_that("dml() integrates to pml() for Type 2", {
for (tail in tailvec){
for (scale in scalevec){
qvec <- qml(pvec, tail, scale, second.type = TRUE)
ml_dens <- function(x) {
dml(x = x, tail = tail, scale = scale, second.type = TRUE)
}
for (i in 1:(length(qvec)-1)){
p2 <- suppressWarnings(integrate(ml_dens, qvec[i], qvec[i+1])$value)
expect_equal(object = p2, expected = pvec[i+1]-pvec[i], tol=tol,
info = paste("tail=", toString(tail),
"scale=", toString(scale),
"q=", toString(qvec[i]),
"p=", toString(pvec[i]),
"p2=", toString(p2)))
}
}
}
}) |
context("Check break_down_uncertainty() function")
library("DALEX")
library("iBreakDown")
library("randomForest")
set.seed(1313)
model <- randomForest(status ~ . , data = HR)
new_observation <- HR_test[1,]
explainer_rf <- explain(model,
data = HR[1:1000,1:5],
y = HR$status[1:1000])
bd_rf_A <- break_down_uncertainty(explainer_rf,
new_observation,
path = c(3,2,4,1,5))
pl_A <- plot(bd_rf_A)
model <- randomForest(m2.price ~ . , data = apartments)
explainer_rf <- explain(model,
data = apartments_test[1:1000,2:6],
y = apartments_test$m2.price[1:1000])
bd_rf_B <- break_down_uncertainty(explainer_rf,
apartments_test[1,],
path = c("floor", "no.rooms", "district", "construction.year", "surface"))
pl_B <- plot(bd_rf_B)
bd_rf_C <- shap(explainer_rf,
apartments_test[1,])
pl_C <- plot(bd_rf_C, show_boxplots = FALSE)
bd_rf_D <- shap(explainer_rf,
apartments_test[1,], keep_distribution = FALSE)
pl_D <- plot(bd_rf_D, show_boxplots = FALSE)
test_that("Output format", {
expect_is(bd_rf_A, "break_down_uncertainty")
expect_is(bd_rf_B, "break_down_uncertainty")
expect_is(bd_rf_C, "break_down_uncertainty")
expect_is(pl_A, "ggplot")
expect_is(pl_B, "ggplot")
expect_is(pl_C, "ggplot")
}) |
prouclDistChoose <-
function (y, alpha = 0.05, data.name = NULL, parent.of.data = NULL,
subset.expression = NULL)
{
if (!is.numeric(alpha) || length(alpha) != 1 || !(alpha %in%
c(0.01, 0.05, 0.1)))
stop("The argument 'alpha' must be a numeric scalar equal to 0.01, 0.05, or 0.10")
string <- paste("at ", 100 * alpha, "% Significance Level",
sep = "")
decision <- ""
distribution.parameters <- NULL
estimation.method <- NULL
sample.size <- length(y)
norm.sw.list <- gofTest(y, distribution = "norm", test = "sw",
keep.data = FALSE, data.name = data.name, parent.of.data = parent.of.data,
subset.expression = subset.expression)
norm.sw.p <- norm.sw.list$p.value
norm.lillie.list <- gofTest(y, distribution = "norm", test = "lillie",
keep.data = FALSE, data.name = data.name, parent.of.data = parent.of.data,
subset.expression = subset.expression)
norm.lillie.p <- norm.lillie.list$p.value
if (all(c(norm.sw.p, norm.lillie.p) < alpha)) {
norm.text <- paste("Data Not Normal", string)
}
else {
if (any(c(norm.sw.p, norm.lillie.p) < alpha)) {
norm.text <- paste("Data appear Approximate Normal",
string)
}
else {
norm.text <- paste("Data appear Normal", string)
}
decision <- "Normal"
distribution.parameters <- norm.sw.list$distribution.parameters
estimation.method <- norm.sw.list$estimation.method
}
gamma.ad.list <- gofTest(y, distribution = "gamma", test = "proucl.ad.gamma",
keep.data = FALSE, data.name = data.name, parent.of.data = parent.of.data,
subset.expression = subset.expression)
gamma.ad.p <- gamma.ad.list$p.value
gamma.ks.list <- gofTest(y, distribution = "gamma", test = "proucl.ks.gamma",
keep.data = FALSE, data.name = data.name, parent.of.data = parent.of.data,
subset.expression = subset.expression)
gamma.ks.p <- gamma.ks.list$p.value
if (alpha == 0.01) {
if (all(c(gamma.ad.p, gamma.ks.p) == "< 0.01")) {
gamma.text <- paste("Data Not Gamma Distributed",
string)
}
else {
if (any(c(gamma.ad.p, gamma.ks.p) == "< 0.01")) {
gamma.text <- paste("Data appear Approximate Gamma Distributed",
string)
}
else {
gamma.text <- paste("Data appear Gamma Distributed",
string)
}
if (decision == "") {
decision <- "Gamma"
distribution.parameters <- gamma.ad.list$distribution.parameters
estimation.method <- gamma.ad.list$estimation.method
}
}
}
else if (alpha == 0.05) {
if (all(c(gamma.ad.p, gamma.ks.p) %in% c("< 0.01", "0.01 <= p < 0.05"))) {
gamma.text <- paste("Data Not Gamma Distributed",
string)
}
else {
if (any(c(gamma.ad.p, gamma.ks.p) %in% c("< 0.01",
"0.01 <= p < 0.05"))) {
gamma.text <- paste("Data appear Approximate Gamma Distributed",
string)
}
else {
gamma.text <- paste("Data appear Gamma Distributed",
string)
}
if (decision == "") {
decision <- "Gamma"
distribution.parameters <- gamma.ad.list$distribution.parameters
estimation.method <- gamma.ad.list$estimation.method
}
}
}
else {
if (all(c(gamma.ad.p, gamma.ks.p) %in% c("< 0.01", "0.01 <= p < 0.05",
"0.05 <= p < 0.10"))) {
gamma.text <- paste("Data Not Gamma Distributed",
string)
}
else {
if (any(c(gamma.ad.p, gamma.ks.p) %in% c("< 0.01",
"0.01 <= p < 0.05", "0.05 <= p < 0.10"))) {
gamma.text <- paste("Data appear Approximate Gamma Distributed",
string)
}
else {
gamma.text <- paste("Data appear Gamma Distributed",
string)
}
if (decision == "") {
decision <- "Gamma"
distribution.parameters <- gamma.ad.list$distribution.parameters
estimation.method <- gamma.ad.list$estimation.method
}
}
}
lnorm.sw.list <- gofTest(y, distribution = "lnorm", test = "sw",
keep.data = FALSE, data.name = data.name, parent.of.data = parent.of.data,
subset.expression = subset.expression)
lnorm.sw.p <- lnorm.sw.list$p.value
lnorm.lillie.list <- gofTest(y, distribution = "lnorm", test = "lillie",
keep.data = FALSE, data.name = data.name, parent.of.data = parent.of.data,
subset.expression = subset.expression)
lnorm.lillie.p <- lnorm.lillie.list$p.value
if (all(c(lnorm.sw.p, lnorm.lillie.p) < alpha)) {
lnorm.text <- paste("Data Not Lognormal", string)
if (decision == "")
decision <- "Nonparametric"
}
else {
if (any(c(lnorm.sw.p, lnorm.lillie.p) < alpha)) {
lnorm.text <- paste("Data appear Approximate Lognormal",
string)
}
else {
lnorm.text <- paste("Data appear Lognormal", string)
}
if (decision == "") {
decision <- "Lognormal"
distribution.parameters <- lnorm.sw.list$distribution.parameters
estimation.method <- lnorm.sw.list$estimation.method
}
}
test.list = list(norm = list(norm.sw.list, norm.lillie.list,
text = norm.text), gamma = list(gamma.ad.list, gamma.ks.list,
text = gamma.text), lnorm = list(lnorm.sw.list, lnorm.lillie.list,
text = lnorm.text))
names(test.list$norm) <- c(norm.sw.list$method, norm.lillie.list$method,
"text")
names(test.list$gamma) <- c(gamma.ad.list$method, gamma.ks.list$method,
"text")
names(test.list$lnorm) <- c(lnorm.sw.list$method, lnorm.lillie.list$method,
"text")
ret.list <- list(choices = c("Normal", "Gamma", "Lognormal"),
method = "ProUCL", decision = decision, alpha = alpha,
distribution.parameters = distribution.parameters, estimation.method = estimation.method,
sample.size = sample.size, test.results = test.list)
ret.list
} |
if (Sys.getenv("DISPLAY") == "") stop("Run with xvfb-run")
free <- system(paste0("df --output=avail ", tempdir(), " | tail -n 1"), intern = TRUE)
if (as.numeric(free) < 1e8) stop("Set TMPDIR to a location with at least 100 GB free space")
package <- basename(getwd())
library(revdepcheck)
dir_setup(getwd())
if (!revdepcheck:::db_exists(getwd())) {
revdepcheck:::db_setup(getwd())
}
if (length(revdep_todo()) == 0) {
import_revdeps <- revdepcheck:::cran_revdeps(package = package, dependencies = c("Depends", "Imports"), bioc = TRUE)
import_revdeps <- setdiff(import_revdeps, package)
todo_import_revdeps <- import_revdeps
while (FALSE && length(todo_import_revdeps) > 0) {
print(length(todo_import_revdeps))
print(todo_import_revdeps)
print(Sys.time())
new_import_revdeps <- unlist(purrr::map(todo_import_revdeps, revdepcheck:::cran_revdeps, dependencies = c("Depends", "Imports"), bioc = TRUE))
todo_import_revdeps <- setdiff(new_import_revdeps, import_revdeps)
import_revdeps <- union(import_revdeps, new_import_revdeps)
print(new_import_revdeps)
break
}
weak_revdeps <- revdepcheck:::cran_revdeps(package = package, dependencies = c("Suggests", "Enhances", "LinkingTo"), bioc = TRUE)
print(weak_revdeps)
revdep_add(".", c(import_revdeps, weak_revdeps))
}
N <- 100
for (i in seq_len(N)) {
try(
revdepcheck::revdep_check(
bioc = TRUE,
dependencies = character(),
quiet = FALSE,
num_workers = 24,
timeout = as.difftime(60, units = "mins")
)
)
if (length(revdep_todo()) == 0) break
}
withr::with_output_sink(
"revdep/cran.md",
revdep_report_cran()
)
system("git add revdep/*.md")
system("git commit -m 'update revdep results'")
system("git push -u origin HEAD") |
tabs <- tab(forcats::gss_cat, race, marital)
testthat::test_that("dplyr::rowwise preserves class tabxplor_tab", {
testthat::expect_s3_class(dplyr::rowwise(tabs), "tabxplor_tab")
})
testthat::test_that("dplyr::mutate preserves class tabxplor_tab", {
testthat::expect_s3_class(dplyr::mutate(tabs, Married = sum(Married)), "tabxplor_tab")
})
testthat::test_that("dplyr::transmute preserves class tabxplor_tab", {
testthat::expect_s3_class(dplyr::transmute(tabs, race = race, Married = sum(Married)),
"tabxplor_tab")
})
testthat::test_that("dplyr::filter preserves class tabxplor_tab", {
testthat::expect_s3_class(dplyr::filter(tabs, is_totrow(Married)), "tabxplor_tab")
})
testthat::test_that("dplyr::slice preserves class tabxplor_tab", {
testthat::expect_s3_class(dplyr::slice(tabs, 1:2), "tabxplor_tab")
})
testthat::test_that("dplyr::arrange preserves class tabxplor_tab", {
testthat::expect_s3_class(dplyr::arrange(tabs, Married), "tabxplor_tab")
})
testthat::test_that("dplyr::distinct preserves class tabxplor_tab", {
testthat::expect_s3_class(dplyr::distinct(tabs), "tabxplor_tab")
})
testthat::test_that("dplyr::select preserves class tabxplor_tab", {
testthat::expect_s3_class(dplyr::select(tabs, race, Married), "tabxplor_tab")
})
testthat::test_that("dplyr::rename, rename_with and relocate preserves class tabxplor_tab", {
testthat::expect_s3_class(dplyr::relocate (tabs, Divorced , .after = Married),
"tabxplor_tab")
testthat::expect_s3_class(dplyr::rename (tabs, new_name = race), "tabxplor_tab")
testthat::expect_s3_class(dplyr::rename_with(tabs, toupper), "tabxplor_tab")
})
testthat::test_that("[<- and [[<- preserves class tabxplor_tab", {
tabs[4] <- dplyr::mutate(tabs[4], dplyr::across(.fns = ~ set_display(., "ctr")))
tabs[[2]] <- tabs[[2]] %>% set_digits(3)
tabs[[2, 1]] <- factor("White")
testthat::expect_s3_class(tabs, "tabxplor_tab")
})
grouped_tabs <- forcats::gss_cat %>%
dplyr::filter(year %in% c(2000, 2014)) %>%
tab(race, marital, year)
testthat::test_that("dplyr::ungroup preserves class tabxplor_tab", {
testthat::expect_s3_class(dplyr::ungroup(grouped_tabs), "tabxplor_tab")
})
testthat::test_that("dplyr::summarise, preserves class tabxplor_tab", {
testthat::expect_s3_class(dplyr::summarise (grouped_tabs, Married = sum(Married)),
"tabxplor_tab")
})
testthat::test_that("dplyr::rowwise preserves class tabxplor_grouped_tab", {
testthat::expect_s3_class(dplyr::rowwise(grouped_tabs), "tabxplor_grouped_tab")
})
testthat::test_that("dplyr::mutate preserves class tabxplor_grouped_tab", {
testthat::expect_s3_class(dplyr::mutate(grouped_tabs, Married = sum(Married)),
"tabxplor_grouped_tab")
})
testthat::test_that("dplyr::transmute preserves class tabxplor_grouped_tab", {
testthat::expect_s3_class(dplyr::transmute(grouped_tabs, year = year, race = race,
Married = sum(Married)), "tabxplor_grouped_tab")
})
testthat::test_that("dplyr::filter preserves class tabxplor_grouped_tab", {
testthat::expect_s3_class(dplyr::filter(grouped_tabs, is_totrow(Married)),
"tabxplor_grouped_tab")
})
testthat::test_that("dplyr::slice preserves class tabxplor_grouped_tab", {
testthat::expect_s3_class(dplyr::slice(grouped_tabs, 1:2), "tabxplor_grouped_tab")
})
testthat::test_that("dplyr::arrange preserves class tabxplor_grouped_tab", {
testthat::expect_s3_class(dplyr::arrange(grouped_tabs, Married), "tabxplor_grouped_tab")
})
testthat::test_that("dplyr::distinct preserves class tabxplor_grouped_tab", {
testthat::expect_s3_class(dplyr::distinct(grouped_tabs), "tabxplor_grouped_tab")
})
testthat::test_that("dplyr::select preserves class tabxplor_grouped_tab", {
testthat::expect_s3_class(dplyr::select(grouped_tabs, year, race, Married),
"tabxplor_grouped_tab")
})
testthat::test_that("dplyr::rename, rename_with and relocate preserves class tabxplor_grouped_tab", {
testthat::expect_s3_class(dplyr::relocate (grouped_tabs, Divorced , .after = Married),
"tabxplor_grouped_tab")
testthat::expect_s3_class(dplyr::rename (grouped_tabs, new_name = year),
"tabxplor_grouped_tab")
testthat::expect_s3_class(dplyr::rename_with(grouped_tabs, toupper), "tabxplor_grouped_tab")
})
testthat::test_that("[<- and [[<- preserves class tabxplor_grouped_tab", {
grouped_tabs[4] <- dplyr::mutate(grouped_tabs[4],
dplyr::across(.fns = ~ set_display(., "ctr")))
grouped_tabs[[2]] <- grouped_tabs[[2]] %>% forcats::fct_recode("kéké" = "Black")
grouped_tabs[[2,2]] <- factor("White")
testthat::expect_s3_class(grouped_tabs, "tabxplor_grouped_tab")
}) |
context("test-compare_solutions.R")
x <- estimate_profiles(iris[, 1:4], n_profiles = 2:4)
test_that("compare_solutions works with iris data when we use compare_solutions()
outside of a pipe", {
expect_s3_class(suppressWarnings(compare_solutions(x)), "bestLPA")
}) |
knitr::opts_chunk$set(
collapse = TRUE,
comment = "
message=FALSE,
warning=FALSE
)
library(chessR)
library(ggplot2)
library(dplyr)
library(stringr)
library(lubridate)
chessdotcom_game_data_all_months <- get_raw_chessdotcom(usernames = "JaseZiv")
glimpse(chessdotcom_game_data_all_months)
chessdotcom_hikaru_recent <- get_raw_chessdotcom(usernames = "Hikaru", year_month = c(202104:202105))
glimpse(chessdotcom_hikaru_recent)
lichess_game_data <- get_raw_lichess("Georges")
glimpse(lichess_game_data)
chess_analysis_single <- get_game_data("JaseZiv")
glimpse(chess_analysis_single)
daily_leaders <- chessdotcom_leaderboard(game_type = "daily")
glimpse(daily_leaders)
chessdotcom_game_data_all_months$nMoves <- return_num_moves(moves_string = chessdotcom_game_data_all_months$Moves)
head(chessdotcom_game_data_all_months[, c("Moves", "nMoves")])
chessdotcom_game_data_all_months$Ending <- mapply(get_game_ending,
termination_string = chessdotcom_game_data_all_months$Termination,
white = chessdotcom_game_data_all_months$White,
black = chessdotcom_game_data_all_months$Black)
head(chessdotcom_game_data_all_months[, c("Termination", "White", "Black", "Ending")])
chessdotcom_game_data_all_months$Winner <- get_winner(result_column = chessdotcom_game_data_all_months$Result,
white = chessdotcom_game_data_all_months$White,
black = chessdotcom_game_data_all_months$Black)
head(chessdotcom_game_data_all_months[, c("White", "Black", "Result", "Winner")])
chessdotcom_game_data_all_months %>%
count(TimeClass) %>%
ggplot(aes(x= reorder(TimeClass,n), y= n)) +
geom_col(fill = "steelblue", colour = "grey40", alpha = 0.7) +
labs(x= "Game Style", y= "Number of Games") +
ggtitle("WHICH TIME CLASSES ARE PLAYED MOST BY USER") +
coord_flip() +
theme_minimal() +
theme(panel.grid.major.y = element_blank())
chessdotcom_game_data_all_months %>%
mutate(MonthEnd = paste(year(EndDate), str_pad(lubridate::month(ymd(EndDate)), 2, side = "left", pad = "0"), sep = "-")) %>%
mutate(UserResult = ifelse(Winner == Username, "Win", ifelse(Winner == "Draw", "Draw", "Loss"))) %>%
group_by(MonthEnd, UserResult) %>%
summarise(n = n()) %>%
mutate(WinPercentage = n / sum(n)) %>%
filter(UserResult == "Win") %>%
ggplot(aes(x= MonthEnd, y= WinPercentage, group=1)) +
geom_line(colour= "steelblue", size=1) +
geom_hline(yintercept = 0.5, linetype = 2, colour = "grey40") +
scale_y_continuous(limits = c(0,1)) +
labs(x= "Month Game Ended", y= "Win %") +
ggtitle("MONTHLY WINNING %") +
theme_minimal()
chessdotcom_game_data_all_months %>%
filter(TimeClass %in% c("blitz", "daily")) %>%
mutate(UserELO = as.numeric(ifelse(Username == White, WhiteElo, BlackElo))) %>%
mutate(MonthEnd = paste(year(EndDate), str_pad(lubridate::month(ymd(EndDate)), 2, side = "left", pad = "0"), sep = "-")) %>%
group_by(MonthEnd, TimeClass) %>%
summarise(AverageELO = mean(UserELO, na.rm = T)) %>%
ggplot(aes(x= MonthEnd, y= AverageELO, group=1)) +
geom_line(colour= "steelblue", size=1) +
labs(x= "Month Game Ended", y= "Average ELO") +
ggtitle("MONTHLY AVERAGE ELO RATING") +
facet_wrap(~ TimeClass, scales = "free_y", ncol = 1) +
theme_minimal()
chessdotcom_game_data_all_months %>%
mutate(OpponentELO = as.numeric(ifelse(Username == White, BlackElo, WhiteElo)),
UserResult = ifelse(Winner == Username, "Win", ifelse(Winner == "Draw", "Draw", "Loss"))) %>%
filter(TimeClass %in% c("blitz", "daily")) %>%
ggplot(aes(x= OpponentELO, fill = UserResult)) +
geom_density(alpha = 0.3) +
ggtitle("HOW DO WE FARE AGAINST DIFFERENT ELOs?") +
facet_wrap(~ TimeClass, scales = "free", ncol = 1) +
theme_minimal() |
get_wms_raster <- function(shape,
apikey = "altimetrie",
layer_name = "ELEVATION.ELEVATIONGRIDCOVERAGE",
resolution = 10,
filename = NULL,
version = "1.3.0",
format = "image/geotiff",
styles = "") {
shape <- st_make_valid(shape) %>%
st_transform(4326)
width_height <- width_height(shape, resolution)
url <- modify_url("https://wxs.ign.fr",
path = paste0(apikey, "/geoportail/r/wms"),
query = list(version = version,
request = "GetMap",
format = format,
layers = layer_name,
styles = styles,
width = width_height[1],
height = width_height[2],
crs = "EPSG:4326",
bbox = format_bbox_wms(shape)))
if (is.null(filename)) {
url_rgdal_option <- paste0("/vsicurl/", url)
res <- try(read_stars(url_rgdal_option, normalize_path = FALSE),
silent = TRUE)
if (grepl("Error", as.character(res), fixed = TRUE)) {
stop("\n 1. Please check that ", layer_name,
" exists at shape location\n",
" 2. If yes, rgal does not support this resource. ",
"To overcome this, you must save the resource ,",
"by using the filename argument.: \n")
}
}else{
filename <- paste0(filename,
switch(
format,
"image/jpeg" = ".jpg",
"image/png" = ".png",
"image/tiff" = ".tif",
"image/geotiff" = ".tif",
stop("Bad format, please check ",
"`?get_wms_raster()`")
))
download.file(url = url,
method = "auto",
mode = "wb",
destfile = filename)
message("The layer is saved at : ", file.path(getwd(), filename))
res <- read_stars(filename)
}
return(res)
}
format_bbox_wms <- function(shape = NULL) {
bbox <- st_bbox(shape)
paste(bbox["ymin"], bbox["xmin"], bbox["ymax"], bbox["xmax"], sep = ",")
}
width_height <- function(shape, resolution = NULL) {
bbox <- st_bbox(shape)
width <- st_linestring(rbind(c(bbox[1], bbox[2]),
c(bbox[1], bbox[4])))
height <- st_linestring(rbind(c(bbox[1], bbox[2]),
c(bbox[3], bbox[2])))
width_height <- st_length(st_sfc(list(width, height), crs = 4326))
names(width_height) <- c("width", "height")
nb_pixel <- c(2048, 2048)
if (!is.null(resolution)) {
nb_pixel <- as.numeric(ceiling(width_height / resolution))
nb_pixel <- ifelse(nb_pixel > 2048, 2048, nb_pixel)
}
resolution <- width_height / nb_pixel
if (sum(nb_pixel == 2048) >= 1) {
message("The resolution is too high (or set to NULL) so the ",
"maximum resolution is used. Reducing the resolution ",
"allows to speed up calculations on raster.")
}
message(paste(c("x", "\ny"), "cell_size :", round(resolution, 3), "[m]"))
invisible(nb_pixel)
} |
roxy_tag_parse.roxy_tag_concept <- function(x) tag_value(x)
roxy_tag_rd.roxy_tag_concept <- function(x, base_path, env) {
rd_section(x$tag, x$val)
}
format.rd_section_concept <- function(x, ...) {
format_rd(x, ...)
}
roxy_tag_parse.roxy_tag_docType <- function(x) tag_name(x)
roxy_tag_rd.roxy_tag_docType <- function(x, base_path, env) {
rd_section("docType", x$val)
}
format.rd_section_docType <- function(x, ...) {
format_first(x, ...)
}
roxy_tag_parse.roxy_tag_encoding <- function(x) tag_value(x)
roxy_tag_rd.roxy_tag_encoding <- function(x, base_path, env) {
rd_section(x$tag, x$val)
}
format.rd_section_encoding <- function(x, ...) {
format_first(x, ...)
}
roxy_tag_parse.roxy_tag_keywords <- function(x) tag_value(x)
roxy_tag_rd.roxy_tag_keywords <- function(x, base_path, env) {
rd_section("keyword", str_split(x$val, "\\s+")[[1]])
}
format.rd_section_keyword <- function(x, ...) {
format_rd(x, ...)
} |
NiceNumber <- function(x=6, round=TRUE){
expon = floor(log10(x))
f = x/(10^expon)
if (round){
if (f<1.5) nf=1
else if (f < 3) nf=2
else if (f < 7) nf=5
else nf=10}
else{
if (f<1) nf=1
else if (f < 2) nf=2
else if (f < 5) nf=5
else nf=10
}
result=nf*(10^expon)
return(result)
} |