code
stringlengths 1
13.8M
|
---|
otp_traveltime <- function(otpcon = NA,
path_data = NULL,
fromPlace = NA,
toPlace = NA,
fromID = NULL,
toID = NULL,
mode = "CAR",
date_time = Sys.time(),
arriveBy = FALSE,
maxWalkDistance = 1000,
numItineraries = 3,
routeOptions = NULL,
ncores = 1,
timezone = otpcon$timezone) {
if (is.null(timezone)) {
warning("otpcon is missing the timezone variaible, assuming local timezone")
timezone <- Sys.timezone()
}
RcppSimdJsonVersion <- try(utils::packageVersion("RcppSimdJson") >= "0.1.2", silent = TRUE)
if (class(RcppSimdJsonVersion) == "try-error") {
RcppSimdJsonVersion <- FALSE
}
if (!RcppSimdJsonVersion) {
message("NOTE: You do not have 'RcppSimdJson' >= 0.1.2 installed")
stop("This feature is not supported")
}
checkmate::assert_subset(timezone, choices = OlsonNames(tzdir = NULL))
checkmate::assert_class(otpcon, "otpconnect")
mode <- toupper(mode)
checkmate::assert_subset(mode,
choices = c(
"TRANSIT", "WALK", "BICYCLE",
"CAR", "BUS", "RAIL", "SUBWAY",
"TRAM", "FERRY"
),
empty.ok = FALSE
)
checkmate::assert_posixct(date_time)
date <- format(date_time, "%m-%d-%Y", tz = timezone)
time <- tolower(format(date_time, "%I:%M%p", tz = timezone))
checkmate::assert_numeric(maxWalkDistance, lower = 0, len = 1)
checkmate::assert_numeric(numItineraries, lower = 1, len = 1)
checkmate::assert_character(fromID, null.ok = FALSE)
checkmate::assert_character(toID, null.ok = FALSE)
checkmate::assert_logical(arriveBy)
if (!is.null(routeOptions)) {
routeOptions <- otp_validate_routing_options(routeOptions)
}
fromPlace <- otp_clean_input(fromPlace, "fromPlace")
if (!is.null(fromID)) {
if (length(fromID) != nrow(fromPlace)) {
stop("The length of fromID and fromPlace are not the same")
}
}
if (!is.null(toID)) {
if (length(toID) != nrow(toPlace)) {
stop("The length of toID and toPlace are not the same")
}
}
toPlace <- sf::st_sf(data.frame(geometry = sf::st_geometry(toPlace)))
pointsetname <- paste(sample(LETTERS, 10, TRUE), collapse = "")
otp_pointset(toPlace, pointsetname, path_data)
fromPlacelst <- split(fromPlace[,2:1], seq_len(nrow(fromPlace)))
if(ncores > 1){
cl <- parallel::makeCluster(ncores, outfile = "otp_parallel_log.txt")
parallel::clusterExport(
cl = cl,
varlist = c("otpcon", "pointsetname"),
envir = environment()
)
parallel::clusterEvalQ(cl, {
loadNamespace("opentripplanner")
})
pbapply::pboptions(use_lb = TRUE)
res <- pbapply::pblapply(fromPlacelst,
otp_traveltime_internal,
otpcon = otpcon,
pointsetname = pointsetname,
mode = mode,
date_time = date_time,
arriveBy = arriveBy,
maxWalkDistance = maxWalkDistance,
routeOptions = routeOptions,
cl = cl)
parallel::stopCluster(cl)
rm(cl)
} else {
res <- pbapply::pblapply(fromPlacelst,
otp_traveltime_internal,
otpcon = otpcon,
pointsetname = pointsetname,
mode = mode,
date_time = date_time,
arriveBy = arriveBy,
maxWalkDistance = maxWalkDistance,
routeOptions = routeOptions)
}
names(res) <- fromID
res <- res[lengths(res) > 0]
res <- list2df(res)
rownames(res) <- toID
return(res)
}
otp_traveltime_internal <- function(fromPlace,
otpcon,
pointsetname,
mode,
date_time,
arriveBy,
maxWalkDistance,
routeOptions){
surface <- try(otp_make_surface(otpcon = otpcon,
fromPlace = fromPlace,
mode = mode,
date_time = date_time,
arriveBy = arriveBy,
maxWalkDistance = maxWalkDistance,
routeOptions = routeOptions), silent = TRUE)
if ("try-error" %in% class(surface)) {
warning("Failed to create surface for: ",paste(fromPlace, collapse = ", "))
return(NULL)
}
times <- try(otp_surface(otpcon, surface, pointsetname, get_data = FALSE),
silent = TRUE)
if ("try-error" %in% class(times)) {
warning("Failed to evaluate surface for: ",paste(fromPlace, collapse = ", "))
return(NULL)
}
return(times$times)
}
|
wbt_accumulation_curvature <- function(dem, output, log=FALSE, zfactor=1.0, wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--dem=", dem))
args <- paste(args, paste0("--output=", output))
if (log) {
args <- paste(args, "--log")
}
if (!is.null(zfactor)) {
args <- paste(args, paste0("--zfactor=", zfactor))
}
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "accumulation_curvature"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_aspect <- function(dem, output, zfactor=NULL, wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--dem=", dem))
args <- paste(args, paste0("--output=", output))
if (!is.null(zfactor)) {
args <- paste(args, paste0("--zfactor=", zfactor))
}
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "aspect"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_assess_route <- function(routes, dem, output, length="", dist=20, wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--routes=", routes))
args <- paste(args, paste0("--dem=", dem))
args <- paste(args, paste0("--output=", output))
if (!is.null(length)) {
args <- paste(args, paste0("--length=", length))
}
if (!is.null(dist)) {
args <- paste(args, paste0("--dist=", dist))
}
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "assess_route"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_average_normal_vector_angular_deviation <- function(dem, output, filter=11, wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--dem=", dem))
args <- paste(args, paste0("--output=", output))
if (!is.null(filter)) {
args <- paste(args, paste0("--filter=", filter))
}
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "average_normal_vector_angular_deviation"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_circular_variance_of_aspect <- function(dem, output, filter=11, wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--dem=", dem))
args <- paste(args, paste0("--output=", output))
if (!is.null(filter)) {
args <- paste(args, paste0("--filter=", filter))
}
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "circular_variance_of_aspect"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_contours_from_points <- function(input, output, field=NULL, use_z=FALSE, max_triangle_edge_length=NULL, interval=10.0, base=0.0, smooth=5, wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--input=", input))
args <- paste(args, paste0("--output=", output))
if (!is.null(field)) {
args <- paste(args, paste0("--field=", field))
}
if (use_z) {
args <- paste(args, "--use_z")
}
if (!is.null(max_triangle_edge_length)) {
args <- paste(args, paste0("--max_triangle_edge_length=", max_triangle_edge_length))
}
if (!is.null(interval)) {
args <- paste(args, paste0("--interval=", interval))
}
if (!is.null(base)) {
args <- paste(args, paste0("--base=", base))
}
if (!is.null(smooth)) {
args <- paste(args, paste0("--smooth=", smooth))
}
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "contours_from_points"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_contours_from_raster <- function(input, output, interval=10.0, base=0.0, smooth=9, tolerance=10.0, wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--input=", input))
args <- paste(args, paste0("--output=", output))
if (!is.null(interval)) {
args <- paste(args, paste0("--interval=", interval))
}
if (!is.null(base)) {
args <- paste(args, paste0("--base=", base))
}
if (!is.null(smooth)) {
args <- paste(args, paste0("--smooth=", smooth))
}
if (!is.null(tolerance)) {
args <- paste(args, paste0("--tolerance=", tolerance))
}
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "contours_from_raster"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_curvedness <- function(dem, output, log=FALSE, zfactor=1.0, wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--dem=", dem))
args <- paste(args, paste0("--output=", output))
if (log) {
args <- paste(args, "--log")
}
if (!is.null(zfactor)) {
args <- paste(args, paste0("--zfactor=", zfactor))
}
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "curvedness"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_dev_from_mean_elev <- function(dem, output, filterx=11, filtery=11, wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--dem=", dem))
args <- paste(args, paste0("--output=", output))
if (!is.null(filterx)) {
args <- paste(args, paste0("--filterx=", filterx))
}
if (!is.null(filtery)) {
args <- paste(args, paste0("--filtery=", filtery))
}
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "dev_from_mean_elev"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_diff_from_mean_elev <- function(dem, output, filterx=11, filtery=11, wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--dem=", dem))
args <- paste(args, paste0("--output=", output))
if (!is.null(filterx)) {
args <- paste(args, paste0("--filterx=", filterx))
}
if (!is.null(filtery)) {
args <- paste(args, paste0("--filtery=", filtery))
}
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "diff_from_mean_elev"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_difference_curvature <- function(dem, output, log=FALSE, zfactor=1.0, wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--dem=", dem))
args <- paste(args, paste0("--output=", output))
if (log) {
args <- paste(args, "--log")
}
if (!is.null(zfactor)) {
args <- paste(args, paste0("--zfactor=", zfactor))
}
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "difference_curvature"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_directional_relief <- function(dem, output, azimuth=0.0, max_dist=NULL, wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--dem=", dem))
args <- paste(args, paste0("--output=", output))
if (!is.null(azimuth)) {
args <- paste(args, paste0("--azimuth=", azimuth))
}
if (!is.null(max_dist)) {
args <- paste(args, paste0("--max_dist=", max_dist))
}
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "directional_relief"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_downslope_index <- function(dem, output, drop=2.0, out_type="tangent", wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--dem=", dem))
args <- paste(args, paste0("--output=", output))
if (!is.null(drop)) {
args <- paste(args, paste0("--drop=", drop))
}
if (!is.null(out_type)) {
args <- paste(args, paste0("--out_type=", out_type))
}
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "downslope_index"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_edge_density <- function(dem, output, filter=11, norm_diff=5.0, zfactor=NULL, wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--dem=", dem))
args <- paste(args, paste0("--output=", output))
if (!is.null(filter)) {
args <- paste(args, paste0("--filter=", filter))
}
if (!is.null(norm_diff)) {
args <- paste(args, paste0("--norm_diff=", norm_diff))
}
if (!is.null(zfactor)) {
args <- paste(args, paste0("--zfactor=", zfactor))
}
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "edge_density"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_elev_above_pit <- function(dem, output, wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--dem=", dem))
args <- paste(args, paste0("--output=", output))
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "elev_above_pit"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_elev_percentile <- function(dem, output, filterx=11, filtery=11, sig_digits=2, wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--dem=", dem))
args <- paste(args, paste0("--output=", output))
if (!is.null(filterx)) {
args <- paste(args, paste0("--filterx=", filterx))
}
if (!is.null(filtery)) {
args <- paste(args, paste0("--filtery=", filtery))
}
if (!is.null(sig_digits)) {
args <- paste(args, paste0("--sig_digits=", sig_digits))
}
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "elev_percentile"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_elev_relative_to_min_max <- function(dem, output, wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--dem=", dem))
args <- paste(args, paste0("--output=", output))
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "elev_relative_to_min_max"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_elev_relative_to_watershed_min_max <- function(dem, watersheds, output, wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--dem=", dem))
args <- paste(args, paste0("--watersheds=", watersheds))
args <- paste(args, paste0("--output=", output))
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "elev_relative_to_watershed_min_max"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_embankment_mapping <- function(dem, road_vec, output, search_dist=2.5, min_road_width=6.0, typical_width=30.0, max_height=2.0, max_width=60.0, max_increment=0.05, spillout_slope=4.0, remove_embankments=FALSE, wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--dem=", dem))
args <- paste(args, paste0("--road_vec=", road_vec))
args <- paste(args, paste0("--output=", output))
if (!is.null(search_dist)) {
args <- paste(args, paste0("--search_dist=", search_dist))
}
if (!is.null(min_road_width)) {
args <- paste(args, paste0("--min_road_width=", min_road_width))
}
if (!is.null(typical_width)) {
args <- paste(args, paste0("--typical_width=", typical_width))
}
if (!is.null(max_height)) {
args <- paste(args, paste0("--max_height=", max_height))
}
if (!is.null(max_width)) {
args <- paste(args, paste0("--max_width=", max_width))
}
if (!is.null(max_increment)) {
args <- paste(args, paste0("--max_increment=", max_increment))
}
if (!is.null(spillout_slope)) {
args <- paste(args, paste0("--spillout_slope=", spillout_slope))
}
if (remove_embankments) {
args <- paste(args, "--remove_embankments")
}
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "embankment_mapping"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_exposure_towards_wind_flux <- function(dem, output, azimuth="", max_dist="", zfactor="", wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--dem=", dem))
args <- paste(args, paste0("--output=", output))
if (!is.null(azimuth)) {
args <- paste(args, paste0("--azimuth=", azimuth))
}
if (!is.null(max_dist)) {
args <- paste(args, paste0("--max_dist=", max_dist))
}
if (!is.null(zfactor)) {
args <- paste(args, paste0("--zfactor=", zfactor))
}
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "exposure_towards_wind_flux"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_feature_preserving_smoothing <- function(dem, output, filter=11, norm_diff=15.0, num_iter=3, max_diff=0.5, zfactor=NULL, wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--dem=", dem))
args <- paste(args, paste0("--output=", output))
if (!is.null(filter)) {
args <- paste(args, paste0("--filter=", filter))
}
if (!is.null(norm_diff)) {
args <- paste(args, paste0("--norm_diff=", norm_diff))
}
if (!is.null(num_iter)) {
args <- paste(args, paste0("--num_iter=", num_iter))
}
if (!is.null(max_diff)) {
args <- paste(args, paste0("--max_diff=", max_diff))
}
if (!is.null(zfactor)) {
args <- paste(args, paste0("--zfactor=", zfactor))
}
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "feature_preserving_smoothing"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_fetch_analysis <- function(dem, output, azimuth=0.0, hgt_inc=0.05, wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--dem=", dem))
args <- paste(args, paste0("--output=", output))
if (!is.null(azimuth)) {
args <- paste(args, paste0("--azimuth=", azimuth))
}
if (!is.null(hgt_inc)) {
args <- paste(args, paste0("--hgt_inc=", hgt_inc))
}
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "fetch_analysis"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_fill_missing_data <- function(input, output, filter=11, weight=2.0, no_edges=TRUE, wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--input=", input))
args <- paste(args, paste0("--output=", output))
if (!is.null(filter)) {
args <- paste(args, paste0("--filter=", filter))
}
if (!is.null(weight)) {
args <- paste(args, paste0("--weight=", weight))
}
if (no_edges) {
args <- paste(args, "--no_edges")
}
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "fill_missing_data"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_find_ridges <- function(dem, output, line_thin=TRUE, wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--dem=", dem))
args <- paste(args, paste0("--output=", output))
if (line_thin) {
args <- paste(args, "--line_thin")
}
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "find_ridges"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_gaussian_curvature <- function(dem, output, log=FALSE, zfactor=NULL, wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--dem=", dem))
args <- paste(args, paste0("--output=", output))
if (log) {
args <- paste(args, "--log")
}
if (!is.null(zfactor)) {
args <- paste(args, paste0("--zfactor=", zfactor))
}
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "gaussian_curvature"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_gaussian_scale_space <- function(dem, output, output_zscore, output_scale, points=NULL, sigma=0.5, step=0.5, num_steps=10, lsp="Slope", z_factor=NULL, wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--dem=", dem))
args <- paste(args, paste0("--output=", output))
args <- paste(args, paste0("--output_zscore=", output_zscore))
args <- paste(args, paste0("--output_scale=", output_scale))
if (!is.null(points)) {
args <- paste(args, paste0("--points=", points))
}
if (!is.null(sigma)) {
args <- paste(args, paste0("--sigma=", sigma))
}
if (!is.null(step)) {
args <- paste(args, paste0("--step=", step))
}
if (!is.null(num_steps)) {
args <- paste(args, paste0("--num_steps=", num_steps))
}
if (!is.null(lsp)) {
args <- paste(args, paste0("--lsp=", lsp))
}
if (!is.null(z_factor)) {
args <- paste(args, paste0("--z_factor=", z_factor))
}
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "gaussian_scale_space"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_generating_function <- function(dem, output, log=FALSE, zfactor=1.0, wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--dem=", dem))
args <- paste(args, paste0("--output=", output))
if (log) {
args <- paste(args, "--log")
}
if (!is.null(zfactor)) {
args <- paste(args, paste0("--zfactor=", zfactor))
}
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "generating_function"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_geomorphons <- function(dem, output, search=50, threshold=0.0, tdist=0, forms=TRUE, wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--dem=", dem))
args <- paste(args, paste0("--output=", output))
if (!is.null(search)) {
args <- paste(args, paste0("--search=", search))
}
if (!is.null(threshold)) {
args <- paste(args, paste0("--threshold=", threshold))
}
if (!is.null(tdist)) {
args <- paste(args, paste0("--tdist=", tdist))
}
if (forms) {
args <- paste(args, "--forms")
}
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "geomorphons"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_hillshade <- function(dem, output, azimuth=315.0, altitude=30.0, zfactor=NULL, wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--dem=", dem))
args <- paste(args, paste0("--output=", output))
if (!is.null(azimuth)) {
args <- paste(args, paste0("--azimuth=", azimuth))
}
if (!is.null(altitude)) {
args <- paste(args, paste0("--altitude=", altitude))
}
if (!is.null(zfactor)) {
args <- paste(args, paste0("--zfactor=", zfactor))
}
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "hillshade"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_horizon_angle <- function(dem, output, azimuth=0.0, max_dist=100.0, wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--dem=", dem))
args <- paste(args, paste0("--output=", output))
if (!is.null(azimuth)) {
args <- paste(args, paste0("--azimuth=", azimuth))
}
if (!is.null(max_dist)) {
args <- paste(args, paste0("--max_dist=", max_dist))
}
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "horizon_angle"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_horizontal_excess_curvature <- function(dem, output, log=FALSE, zfactor=1.0, wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--dem=", dem))
args <- paste(args, paste0("--output=", output))
if (log) {
args <- paste(args, "--log")
}
if (!is.null(zfactor)) {
args <- paste(args, paste0("--zfactor=", zfactor))
}
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "horizontal_excess_curvature"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_hypsometric_analysis <- function(inputs, output, watershed=NULL, wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--inputs=", inputs))
args <- paste(args, paste0("--output=", output))
if (!is.null(watershed)) {
args <- paste(args, paste0("--watershed=", watershed))
}
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "hypsometric_analysis"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_hypsometrically_tinted_hillshade <- function(dem, output, altitude=45.0, hs_weight=0.5, brightness=0.5, atmospheric=0.0, palette="atlas", reverse=FALSE, zfactor=NULL, full_mode=FALSE, wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--dem=", dem))
args <- paste(args, paste0("--output=", output))
if (!is.null(altitude)) {
args <- paste(args, paste0("--altitude=", altitude))
}
if (!is.null(hs_weight)) {
args <- paste(args, paste0("--hs_weight=", hs_weight))
}
if (!is.null(brightness)) {
args <- paste(args, paste0("--brightness=", brightness))
}
if (!is.null(atmospheric)) {
args <- paste(args, paste0("--atmospheric=", atmospheric))
}
if (!is.null(palette)) {
args <- paste(args, paste0("--palette=", palette))
}
if (reverse) {
args <- paste(args, "--reverse")
}
if (!is.null(zfactor)) {
args <- paste(args, paste0("--zfactor=", zfactor))
}
if (full_mode) {
args <- paste(args, "--full_mode")
}
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "hypsometrically_tinted_hillshade"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_local_hypsometric_analysis <- function(input, out_mag, out_scale, min_scale=4, step=1, num_steps=10, step_nonlinearity=1.0, wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--input=", input))
args <- paste(args, paste0("--out_mag=", out_mag))
args <- paste(args, paste0("--out_scale=", out_scale))
if (!is.null(min_scale)) {
args <- paste(args, paste0("--min_scale=", min_scale))
}
if (!is.null(step)) {
args <- paste(args, paste0("--step=", step))
}
if (!is.null(num_steps)) {
args <- paste(args, paste0("--num_steps=", num_steps))
}
if (!is.null(step_nonlinearity)) {
args <- paste(args, paste0("--step_nonlinearity=", step_nonlinearity))
}
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "local_hypsometric_analysis"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_local_quadratic_regression <- function(dem, output, filter=3, wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--dem=", dem))
args <- paste(args, paste0("--output=", output))
if (!is.null(filter)) {
args <- paste(args, paste0("--filter=", filter))
}
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "local_quadratic_regression"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_map_off_terrain_objects <- function(dem, output, max_slope=40.0, min_size=1, wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--dem=", dem))
args <- paste(args, paste0("--output=", output))
if (!is.null(max_slope)) {
args <- paste(args, paste0("--max_slope=", max_slope))
}
if (!is.null(min_size)) {
args <- paste(args, paste0("--min_size=", min_size))
}
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "map_off_terrain_objects"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_max_anisotropy_dev <- function(dem, out_mag, out_scale, max_scale, min_scale=3, step=2, wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--dem=", dem))
args <- paste(args, paste0("--out_mag=", out_mag))
args <- paste(args, paste0("--out_scale=", out_scale))
args <- paste(args, paste0("--max_scale=", max_scale))
if (!is.null(min_scale)) {
args <- paste(args, paste0("--min_scale=", min_scale))
}
if (!is.null(step)) {
args <- paste(args, paste0("--step=", step))
}
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "max_anisotropy_dev"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_max_anisotropy_dev_signature <- function(dem, points, output, max_scale, min_scale=1, step=1, wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--dem=", dem))
args <- paste(args, paste0("--points=", points))
args <- paste(args, paste0("--output=", output))
args <- paste(args, paste0("--max_scale=", max_scale))
if (!is.null(min_scale)) {
args <- paste(args, paste0("--min_scale=", min_scale))
}
if (!is.null(step)) {
args <- paste(args, paste0("--step=", step))
}
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "max_anisotropy_dev_signature"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_max_branch_length <- function(dem, output, log=FALSE, wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--dem=", dem))
args <- paste(args, paste0("--output=", output))
if (log) {
args <- paste(args, "--log")
}
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "max_branch_length"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_max_difference_from_mean <- function(dem, out_mag, out_scale, min_scale, max_scale, step=1, wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--dem=", dem))
args <- paste(args, paste0("--out_mag=", out_mag))
args <- paste(args, paste0("--out_scale=", out_scale))
args <- paste(args, paste0("--min_scale=", min_scale))
args <- paste(args, paste0("--max_scale=", max_scale))
if (!is.null(step)) {
args <- paste(args, paste0("--step=", step))
}
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "max_difference_from_mean"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_max_downslope_elev_change <- function(dem, output, wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--dem=", dem))
args <- paste(args, paste0("--output=", output))
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "max_downslope_elev_change"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_max_elev_dev_signature <- function(dem, points, output, min_scale, max_scale, step=10, wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--dem=", dem))
args <- paste(args, paste0("--points=", points))
args <- paste(args, paste0("--output=", output))
args <- paste(args, paste0("--min_scale=", min_scale))
args <- paste(args, paste0("--max_scale=", max_scale))
if (!is.null(step)) {
args <- paste(args, paste0("--step=", step))
}
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "max_elev_dev_signature"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_max_elevation_deviation <- function(dem, out_mag, out_scale, min_scale, max_scale, step=1, wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--dem=", dem))
args <- paste(args, paste0("--out_mag=", out_mag))
args <- paste(args, paste0("--out_scale=", out_scale))
args <- paste(args, paste0("--min_scale=", min_scale))
args <- paste(args, paste0("--max_scale=", max_scale))
if (!is.null(step)) {
args <- paste(args, paste0("--step=", step))
}
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "max_elevation_deviation"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_max_upslope_elev_change <- function(dem, output, wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--dem=", dem))
args <- paste(args, paste0("--output=", output))
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "max_upslope_elev_change"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_maximal_curvature <- function(dem, output, log=FALSE, zfactor=NULL, wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--dem=", dem))
args <- paste(args, paste0("--output=", output))
if (log) {
args <- paste(args, "--log")
}
if (!is.null(zfactor)) {
args <- paste(args, paste0("--zfactor=", zfactor))
}
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "maximal_curvature"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_mean_curvature <- function(dem, output, log=FALSE, zfactor=NULL, wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--dem=", dem))
args <- paste(args, paste0("--output=", output))
if (log) {
args <- paste(args, "--log")
}
if (!is.null(zfactor)) {
args <- paste(args, paste0("--zfactor=", zfactor))
}
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "mean_curvature"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_min_downslope_elev_change <- function(dem, output, wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--dem=", dem))
args <- paste(args, paste0("--output=", output))
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "min_downslope_elev_change"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_minimal_curvature <- function(dem, output, log=FALSE, zfactor=NULL, wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--dem=", dem))
args <- paste(args, paste0("--output=", output))
if (log) {
args <- paste(args, "--log")
}
if (!is.null(zfactor)) {
args <- paste(args, paste0("--zfactor=", zfactor))
}
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "minimal_curvature"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_multidirectional_hillshade <- function(dem, output, altitude=45.0, zfactor=NULL, full_mode=FALSE, wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--dem=", dem))
args <- paste(args, paste0("--output=", output))
if (!is.null(altitude)) {
args <- paste(args, paste0("--altitude=", altitude))
}
if (!is.null(zfactor)) {
args <- paste(args, paste0("--zfactor=", zfactor))
}
if (full_mode) {
args <- paste(args, "--full_mode")
}
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "multidirectional_hillshade"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_multiscale_elevation_percentile <- function(dem, out_mag, out_scale, sig_digits=3, min_scale=4, step=1, num_steps=10, step_nonlinearity=1.0, wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--dem=", dem))
args <- paste(args, paste0("--out_mag=", out_mag))
args <- paste(args, paste0("--out_scale=", out_scale))
if (!is.null(sig_digits)) {
args <- paste(args, paste0("--sig_digits=", sig_digits))
}
if (!is.null(min_scale)) {
args <- paste(args, paste0("--min_scale=", min_scale))
}
if (!is.null(step)) {
args <- paste(args, paste0("--step=", step))
}
if (!is.null(num_steps)) {
args <- paste(args, paste0("--num_steps=", num_steps))
}
if (!is.null(step_nonlinearity)) {
args <- paste(args, paste0("--step_nonlinearity=", step_nonlinearity))
}
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "multiscale_elevation_percentile"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_multiscale_roughness <- function(dem, out_mag, out_scale, max_scale, min_scale=1, step=1, wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--dem=", dem))
args <- paste(args, paste0("--out_mag=", out_mag))
args <- paste(args, paste0("--out_scale=", out_scale))
args <- paste(args, paste0("--max_scale=", max_scale))
if (!is.null(min_scale)) {
args <- paste(args, paste0("--min_scale=", min_scale))
}
if (!is.null(step)) {
args <- paste(args, paste0("--step=", step))
}
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "multiscale_roughness"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_multiscale_roughness_signature <- function(dem, points, output, max_scale, min_scale=1, step=1, wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--dem=", dem))
args <- paste(args, paste0("--points=", points))
args <- paste(args, paste0("--output=", output))
args <- paste(args, paste0("--max_scale=", max_scale))
if (!is.null(min_scale)) {
args <- paste(args, paste0("--min_scale=", min_scale))
}
if (!is.null(step)) {
args <- paste(args, paste0("--step=", step))
}
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "multiscale_roughness_signature"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_multiscale_std_dev_normals <- function(dem, out_mag, out_scale, min_scale=1, step=1, num_steps=10, step_nonlinearity=1.0, wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--dem=", dem))
args <- paste(args, paste0("--out_mag=", out_mag))
args <- paste(args, paste0("--out_scale=", out_scale))
if (!is.null(min_scale)) {
args <- paste(args, paste0("--min_scale=", min_scale))
}
if (!is.null(step)) {
args <- paste(args, paste0("--step=", step))
}
if (!is.null(num_steps)) {
args <- paste(args, paste0("--num_steps=", num_steps))
}
if (!is.null(step_nonlinearity)) {
args <- paste(args, paste0("--step_nonlinearity=", step_nonlinearity))
}
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "multiscale_std_dev_normals"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_multiscale_std_dev_normals_signature <- function(dem, points, output, min_scale=1, step=1, num_steps=10, step_nonlinearity=1.0, wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--dem=", dem))
args <- paste(args, paste0("--points=", points))
args <- paste(args, paste0("--output=", output))
if (!is.null(min_scale)) {
args <- paste(args, paste0("--min_scale=", min_scale))
}
if (!is.null(step)) {
args <- paste(args, paste0("--step=", step))
}
if (!is.null(num_steps)) {
args <- paste(args, paste0("--num_steps=", num_steps))
}
if (!is.null(step_nonlinearity)) {
args <- paste(args, paste0("--step_nonlinearity=", step_nonlinearity))
}
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "multiscale_std_dev_normals_signature"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_multiscale_topographic_position_image <- function(local, meso, broad, output, lightness=1.2, wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--local=", local))
args <- paste(args, paste0("--meso=", meso))
args <- paste(args, paste0("--broad=", broad))
args <- paste(args, paste0("--output=", output))
if (!is.null(lightness)) {
args <- paste(args, paste0("--lightness=", lightness))
}
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "multiscale_topographic_position_image"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_num_downslope_neighbours <- function(dem, output, wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--dem=", dem))
args <- paste(args, paste0("--output=", output))
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "num_downslope_neighbours"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_num_upslope_neighbours <- function(dem, output, wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--dem=", dem))
args <- paste(args, paste0("--output=", output))
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "num_upslope_neighbours"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_openness <- function(input, pos_output, neg_output, dist=20, wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--input=", input))
args <- paste(args, paste0("--pos_output=", pos_output))
args <- paste(args, paste0("--neg_output=", neg_output))
if (!is.null(dist)) {
args <- paste(args, paste0("--dist=", dist))
}
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "openness"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_pennock_landform_class <- function(dem, output, slope=3.0, prof=0.1, plan=0.0, zfactor=NULL, wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--dem=", dem))
args <- paste(args, paste0("--output=", output))
if (!is.null(slope)) {
args <- paste(args, paste0("--slope=", slope))
}
if (!is.null(prof)) {
args <- paste(args, paste0("--prof=", prof))
}
if (!is.null(plan)) {
args <- paste(args, paste0("--plan=", plan))
}
if (!is.null(zfactor)) {
args <- paste(args, paste0("--zfactor=", zfactor))
}
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "pennock_landform_class"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_percent_elev_range <- function(dem, output, filterx=3, filtery=3, wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--dem=", dem))
args <- paste(args, paste0("--output=", output))
if (!is.null(filterx)) {
args <- paste(args, paste0("--filterx=", filterx))
}
if (!is.null(filtery)) {
args <- paste(args, paste0("--filtery=", filtery))
}
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "percent_elev_range"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_plan_curvature <- function(dem, output, log=FALSE, zfactor=NULL, wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--dem=", dem))
args <- paste(args, paste0("--output=", output))
if (log) {
args <- paste(args, "--log")
}
if (!is.null(zfactor)) {
args <- paste(args, paste0("--zfactor=", zfactor))
}
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "plan_curvature"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_profile <- function(lines, surface, output, wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--lines=", lines))
args <- paste(args, paste0("--surface=", surface))
args <- paste(args, paste0("--output=", output))
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "profile"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_profile_curvature <- function(dem, output, log=FALSE, zfactor=NULL, wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--dem=", dem))
args <- paste(args, paste0("--output=", output))
if (log) {
args <- paste(args, "--log")
}
if (!is.null(zfactor)) {
args <- paste(args, paste0("--zfactor=", zfactor))
}
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "profile_curvature"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_relative_aspect <- function(dem, output, azimuth=0.0, zfactor=NULL, wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--dem=", dem))
args <- paste(args, paste0("--output=", output))
if (!is.null(azimuth)) {
args <- paste(args, paste0("--azimuth=", azimuth))
}
if (!is.null(zfactor)) {
args <- paste(args, paste0("--zfactor=", zfactor))
}
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "relative_aspect"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_relative_topographic_position <- function(dem, output, filterx=11, filtery=11, wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--dem=", dem))
args <- paste(args, paste0("--output=", output))
if (!is.null(filterx)) {
args <- paste(args, paste0("--filterx=", filterx))
}
if (!is.null(filtery)) {
args <- paste(args, paste0("--filtery=", filtery))
}
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "relative_topographic_position"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_remove_off_terrain_objects <- function(dem, output, filter=11, slope=15.0, wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--dem=", dem))
args <- paste(args, paste0("--output=", output))
if (!is.null(filter)) {
args <- paste(args, paste0("--filter=", filter))
}
if (!is.null(slope)) {
args <- paste(args, paste0("--slope=", slope))
}
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "remove_off_terrain_objects"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_ring_curvature <- function(dem, output, log=FALSE, zfactor=1.0, wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--dem=", dem))
args <- paste(args, paste0("--output=", output))
if (log) {
args <- paste(args, "--log")
}
if (!is.null(zfactor)) {
args <- paste(args, paste0("--zfactor=", zfactor))
}
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "ring_curvature"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_rotor <- function(dem, output, log=FALSE, zfactor=1.0, wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--dem=", dem))
args <- paste(args, paste0("--output=", output))
if (log) {
args <- paste(args, "--log")
}
if (!is.null(zfactor)) {
args <- paste(args, paste0("--zfactor=", zfactor))
}
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "rotor"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_ruggedness_index <- function(dem, output, zfactor=NULL, wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--dem=", dem))
args <- paste(args, paste0("--output=", output))
if (!is.null(zfactor)) {
args <- paste(args, paste0("--zfactor=", zfactor))
}
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "ruggedness_index"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_sediment_transport_index <- function(sca, slope, output, sca_exponent=0.4, slope_exponent=1.3, wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--sca=", sca))
args <- paste(args, paste0("--slope=", slope))
args <- paste(args, paste0("--output=", output))
if (!is.null(sca_exponent)) {
args <- paste(args, paste0("--sca_exponent=", sca_exponent))
}
if (!is.null(slope_exponent)) {
args <- paste(args, paste0("--slope_exponent=", slope_exponent))
}
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "sediment_transport_index"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_shadow_animation <- function(input, output, palette="atlas", max_dist="", date="21/06/2021", interval=15, location="43.5448/-80.2482/-4", height=600, delay=250, label="", wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--input=", input))
args <- paste(args, paste0("--output=", output))
if (!is.null(palette)) {
args <- paste(args, paste0("--palette=", palette))
}
if (!is.null(max_dist)) {
args <- paste(args, paste0("--max_dist=", max_dist))
}
if (!is.null(date)) {
args <- paste(args, paste0("--date=", date))
}
if (!is.null(interval)) {
args <- paste(args, paste0("--interval=", interval))
}
if (!is.null(location)) {
args <- paste(args, paste0("--location=", location))
}
if (!is.null(height)) {
args <- paste(args, paste0("--height=", height))
}
if (!is.null(delay)) {
args <- paste(args, paste0("--delay=", delay))
}
if (!is.null(label)) {
args <- paste(args, paste0("--label=", label))
}
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "shadow_animation"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_shadow_image <- function(input, output, palette="soft", max_dist="", date="21/06/2021", time="1300", location="43.5448/-80.2482/-4", wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--input=", input))
args <- paste(args, paste0("--output=", output))
if (!is.null(palette)) {
args <- paste(args, paste0("--palette=", palette))
}
if (!is.null(max_dist)) {
args <- paste(args, paste0("--max_dist=", max_dist))
}
if (!is.null(date)) {
args <- paste(args, paste0("--date=", date))
}
if (!is.null(time)) {
args <- paste(args, paste0("--time=", time))
}
if (!is.null(location)) {
args <- paste(args, paste0("--location=", location))
}
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "shadow_image"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_shape_index <- function(dem, output, zfactor=1.0, wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--dem=", dem))
args <- paste(args, paste0("--output=", output))
if (!is.null(zfactor)) {
args <- paste(args, paste0("--zfactor=", zfactor))
}
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "shape_index"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_slope <- function(dem, output, zfactor=NULL, units="degrees", wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--dem=", dem))
args <- paste(args, paste0("--output=", output))
if (!is.null(zfactor)) {
args <- paste(args, paste0("--zfactor=", zfactor))
}
if (!is.null(units)) {
args <- paste(args, paste0("--units=", units))
}
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "slope"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_slope_vs_aspect_plot <- function(input, output, bin_size=2.0, min_slope=0.1, zfactor=1.0, wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--input=", input))
args <- paste(args, paste0("--output=", output))
if (!is.null(bin_size)) {
args <- paste(args, paste0("--bin_size=", bin_size))
}
if (!is.null(min_slope)) {
args <- paste(args, paste0("--min_slope=", min_slope))
}
if (!is.null(zfactor)) {
args <- paste(args, paste0("--zfactor=", zfactor))
}
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "slope_vs_aspect_plot"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_slope_vs_elevation_plot <- function(inputs, output, watershed=NULL, wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--inputs=", inputs))
args <- paste(args, paste0("--output=", output))
if (!is.null(watershed)) {
args <- paste(args, paste0("--watershed=", watershed))
}
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "slope_vs_elevation_plot"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_smooth_vegetation_residual <- function(input, output, max_scale=30, dev_threshold=1.0, scale_threshold=5, wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--input=", input))
args <- paste(args, paste0("--output=", output))
if (!is.null(max_scale)) {
args <- paste(args, paste0("--max_scale=", max_scale))
}
if (!is.null(dev_threshold)) {
args <- paste(args, paste0("--dev_threshold=", dev_threshold))
}
if (!is.null(scale_threshold)) {
args <- paste(args, paste0("--scale_threshold=", scale_threshold))
}
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "smooth_vegetation_residual"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_spherical_std_dev_of_normals <- function(dem, output, filter=11, wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--dem=", dem))
args <- paste(args, paste0("--output=", output))
if (!is.null(filter)) {
args <- paste(args, paste0("--filter=", filter))
}
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "spherical_std_dev_of_normals"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_standard_deviation_of_slope <- function(input, output, zfactor=NULL, filterx=11, filtery=11, wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--input=", input))
args <- paste(args, paste0("--output=", output))
if (!is.null(zfactor)) {
args <- paste(args, paste0("--zfactor=", zfactor))
}
if (!is.null(filterx)) {
args <- paste(args, paste0("--filterx=", filterx))
}
if (!is.null(filtery)) {
args <- paste(args, paste0("--filtery=", filtery))
}
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "standard_deviation_of_slope"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_stream_power_index <- function(sca, slope, output, exponent=1.0, wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--sca=", sca))
args <- paste(args, paste0("--slope=", slope))
args <- paste(args, paste0("--output=", output))
if (!is.null(exponent)) {
args <- paste(args, paste0("--exponent=", exponent))
}
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "stream_power_index"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_surface_area_ratio <- function(dem, output, wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--dem=", dem))
args <- paste(args, paste0("--output=", output))
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "surface_area_ratio"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_tangential_curvature <- function(dem, output, log=FALSE, zfactor=NULL, wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--dem=", dem))
args <- paste(args, paste0("--output=", output))
if (log) {
args <- paste(args, "--log")
}
if (!is.null(zfactor)) {
args <- paste(args, paste0("--zfactor=", zfactor))
}
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "tangential_curvature"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_time_in_daylight <- function(dem, output, lat, long, az_fraction=10.0, max_dist=100.0, utc_offset="0000", start_day=1, end_day=365, start_time="000000", end_time="235959", wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--dem=", dem))
args <- paste(args, paste0("--output=", output))
args <- paste(args, paste0("--lat=", lat))
args <- paste(args, paste0("--long=", long))
if (!is.null(az_fraction)) {
args <- paste(args, paste0("--az_fraction=", az_fraction))
}
if (!is.null(max_dist)) {
args <- paste(args, paste0("--max_dist=", max_dist))
}
if (!is.null(utc_offset)) {
args <- paste(args, paste0("--utc_offset=", utc_offset))
}
if (!is.null(start_day)) {
args <- paste(args, paste0("--start_day=", start_day))
}
if (!is.null(end_day)) {
args <- paste(args, paste0("--end_day=", end_day))
}
if (!is.null(start_time)) {
args <- paste(args, paste0("--start_time=", start_time))
}
if (!is.null(end_time)) {
args <- paste(args, paste0("--end_time=", end_time))
}
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "time_in_daylight"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_topographic_position_animation <- function(input, output, palette="bl_yl_rd", min_scale=1, num_steps=100, step_nonlinearity=1.5, height=600, delay=250, label="", dev_max=FALSE, wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--input=", input))
args <- paste(args, paste0("--output=", output))
if (!is.null(palette)) {
args <- paste(args, paste0("--palette=", palette))
}
if (!is.null(min_scale)) {
args <- paste(args, paste0("--min_scale=", min_scale))
}
if (!is.null(num_steps)) {
args <- paste(args, paste0("--num_steps=", num_steps))
}
if (!is.null(step_nonlinearity)) {
args <- paste(args, paste0("--step_nonlinearity=", step_nonlinearity))
}
if (!is.null(height)) {
args <- paste(args, paste0("--height=", height))
}
if (!is.null(delay)) {
args <- paste(args, paste0("--delay=", delay))
}
if (!is.null(label)) {
args <- paste(args, paste0("--label=", label))
}
if (dev_max) {
args <- paste(args, "--dev_max")
}
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "topographic_position_animation"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_total_curvature <- function(dem, output, log=FALSE, zfactor=NULL, wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--dem=", dem))
args <- paste(args, paste0("--output=", output))
if (log) {
args <- paste(args, "--log")
}
if (!is.null(zfactor)) {
args <- paste(args, paste0("--zfactor=", zfactor))
}
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "total_curvature"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_unsphericity <- function(dem, output, log=FALSE, zfactor=1.0, wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--dem=", dem))
args <- paste(args, paste0("--output=", output))
if (log) {
args <- paste(args, "--log")
}
if (!is.null(zfactor)) {
args <- paste(args, paste0("--zfactor=", zfactor))
}
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "unsphericity"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_vertical_excess_curvature <- function(dem, output, log=FALSE, zfactor=1.0, wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--dem=", dem))
args <- paste(args, paste0("--output=", output))
if (log) {
args <- paste(args, "--log")
}
if (!is.null(zfactor)) {
args <- paste(args, paste0("--zfactor=", zfactor))
}
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "vertical_excess_curvature"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_viewshed <- function(dem, stations, output, height=2.0, wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--dem=", dem))
args <- paste(args, paste0("--stations=", stations))
args <- paste(args, paste0("--output=", output))
if (!is.null(height)) {
args <- paste(args, paste0("--height=", height))
}
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "viewshed"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_visibility_index <- function(dem, output, height=2.0, res_factor=2, wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--dem=", dem))
args <- paste(args, paste0("--output=", output))
if (!is.null(height)) {
args <- paste(args, paste0("--height=", height))
}
if (!is.null(res_factor)) {
args <- paste(args, paste0("--res_factor=", res_factor))
}
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "visibility_index"
wbt_run_tool(tool_name, args, verbose_mode)
}
wbt_wetness_index <- function(sca, slope, output, wd=NULL, verbose_mode=FALSE, compress_rasters=FALSE) {
wbt_init()
args <- ""
args <- paste(args, paste0("--sca=", sca))
args <- paste(args, paste0("--slope=", slope))
args <- paste(args, paste0("--output=", output))
if (!is.null(wd)) {
args <- paste(args, paste0("--wd=", wd))
}
if (compress_rasters) {
args <- paste(args, "--compress_rasters")
}
tool_name <- "wetness_index"
wbt_run_tool(tool_name, args, verbose_mode)
}
|
write4D <- function(scene, outfile, fnames=NULL,
captions=NULL, writefiles=TRUE, reprint=TRUE, ...){
nrois <- length(scene)
nfiles <- length(fnames)
stopifnot(nfiles == nrois)
formats <- sapply(strsplit(fnames, split="\\."), function(x) x[length(x)])
formats <- toupper(formats)
if (!all(formats %in% c("PLY", "STL", "OBJ"))){
stop("Formats are not PLY,OBJ, or STL!")
}
roi_names <- names(scene)
if (is.null(roi_names)) {
tmp <- tolower(fnames)
tmp <- gsub(".ply", "", tmp, fixed=TRUE)
tmp <- gsub(".stl", "", tmp, fixed=TRUE)
tmp <- gsub(".obj", "", tmp, fixed=TRUE)
roi_names <- tmp
}
stopifnot(all(!is.na(roi_names)))
if (is.null(captions)) captions <- roi_names
lfnames <- opacity <- colors <-NULL
iroi <- 1
classes <- sapply(scene, class)
outdir <- dirname(outfile)
write_output <- function(outdir, fname, fmt, reprint=FALSE, ...){
filename <- file.path(outdir, basename(fname))
fcn <- paste0("write", fmt)
if (fmt %in% "STL" & !reprint) fcn <- paste0("writeTriangles", fmt)
do.call(fcn, list(con=filename, ...))
}
getBase <- function(x, ind=1){
sapply(strsplit(x, split="\\."), function(xx) paste(xx[1:(length(xx)-ind)], collapse=".", sep=""))
}
for (iroi in 1:nrois) {
if (reprint) {
pars <- par3d()
wrect <- pars$windowRect
} else {
wrect = c(0L, 44L, 256L, 300L)
}
irgl <- scene[[iroi]]
fname <- fnames[iroi]
fmt <- formats[iroi]
fname = basename(fname)
if (class(irgl) == "Triangles3D"){
lfname <- fname
obj.colors <- irgl$color
obj.opac <- irgl$alpha
if (fmt %in% "STL" & !reprint){
if (!writefiles){
stop("Specified no reprinting but no writing files - not sure what to do")
}
write_output(outdir, fname, fmt, reprint=reprint, scene=list(irgl))
} else {
drawScene.rgl(irgl)
if (writefiles) write_output(outdir, fname, fmt, reprint=reprint)
}
}
if (class(irgl) == "list"){
obj.colors <- sapply(irgl, function(x) x$color)
obj.opac <- sapply(irgl, function(x) x$alpha)
stub <- getBase(fname, 1)
nsubrois <- length(irgl)
getfmt <- floor(log(nsubrois, 10)) + 1
nums <- sapply(1:nsubrois, sprintf, fmt=paste0("%0", getfmt, ".0f"))
lfname <- paste0(stub, "_", nums, ".", tolower(fmt))
for (isroi in 1:nsubrois){
iirgl <- irgl[[isroi]]
sfname <- paste0(stub, "_", nums[isroi], ".", tolower(fmt))
if (fmt %in% "STL" & !reprint){
if (!writefiles){
stop("Specified no reprinting but no writing files - not sure what to do")
}
write_output(outdir, sfname, fmt, reprint=reprint,
scene=list(iirgl))
} else {
drawScene.rgl(iirgl)
if (writefiles) {
write_output(outdir, sfname, fmt, reprint=reprint )
}
}
}
}
stopifnot(class(irgl) %in% c("list", "Triangles3D"))
opacity <- c(opacity, list(obj.opac))
colors <- c(colors, list(obj.colors))
lfnames <- c(lfnames, list(lfname))
}
if (class(scene[[1]]) == "Triangles3D") vscale <- max(scene[[1]]$v1)
if (class(scene[[1]]) == "list") vscale <- max(scene[[1]][[1]]$v1)
fnames <- lfnames
write4D.file(outfile=outfile, fnames=lfnames, captions=captions,
colors=colors, opacity=opacity, scene=scene, ...)
return(invisible(NULL))
}
|
check_bool <- function(x, name = NULL, general = NULL, specific = NULL,
supplement = NULL, ...) {
if (is.null(name)) {
name <- deparse(substitute(x))
}
check_content(x, c(TRUE, FALSE), name, general, specific, supplement, ...)
}
|
fitParabola <-function(x,y=NULL,searchAngle=c(-pi/2, pi/2),... ){
xy <- xy.coords(x,y)
xy<-cbind(xy$x,xy$y)
bar2 <- optimize(costparabxy, searchAngle,xy=xy)
theta <- bar2$minimum
finalcost <- costparab(theta,xy)
coeffs <-finalcost$coeffs
xv <- - coeffs[2]/2/coeffs[3]
vertex <- xyrot( xv, coeffs[1]+coeffs[2]*xv+coeffs[3]*xv^2, theta)
costhet = cos(theta)
sinthet = -sin(theta)
parA = coeffs[3]*costhet^2
parA[2] = 2*coeffs[3] * costhet * sinthet
parA[3] = coeffs[3] *sinthet^2
parA[4] = sinthet + coeffs[2]*costhet
parA[5] = coeffs[2]*sinthet - costhet
parA[6] = coeffs[1]
return(list(vertex=vertex, theta=theta, parA=parA, parQ = coeffs, cost = finalcost$thecost) )
}
costparab <- function(theta,xy){
rxy <-xyrot(xy, theta = -theta)
lmout <- lm(rxy[,2] ~ I(rxy[,1]) + I(rxy[,1]^2) )
normres <- norm(as.matrix(lmout$residuals),'F')
return(list(thecost=normres, coeffs=lmout$coefficients))
}
costparabxy <- function(theta,xy) costparab(theta,xy)$thecost
|
expected <- eval(parse(text="structure(list(), .Names = character(0), row.names = integer(0), class = \"data.frame\")"));
test(id=0, code={
argv <- eval(parse(text="list(structure(list(), .Names = character(0), row.names = integer(0), class = \"data.frame\"))"));
do.call(`(`, argv);
}, o=expected);
|
context("range dataset")
test_succeeds("range_dataset creates a dataset", {
dataset <- range_dataset(from = 1, to = 11) %>% dataset_batch(10)
batch <- next_batch(dataset)
res <- if (tf$executing_eagerly()) {
as.array(batch)
} else {
with_session(function (sess) {
sess$run(batch)
})
}
expect_equal(res, array(1L:10L))
})
test_succeeds("random_integer_dataset creates a dataset", {
ds1 <- random_integer_dataset(seed=4L) %>% dataset_take(10)
ds2 <- random_integer_dataset(seed=4L) %>% dataset_take(10)
r1 <- reticulate::iterate(ds1, as.numeric) %>% unlist()
r2 <- reticulate::iterate(ds2, as.numeric) %>% unlist()
expect_equal(r1, r2)
})
|
FmakeDB<-function(LF2, kind =1, Iendian=1, BIGLONG=FALSE)
{
if(missing(kind)) { kind =1 }
if(missing(Iendian)) { Iendian=1 }
if(missing(BIGLONG)) { BIGLONG=FALSE }
ADB = list(fn="",
yr=0,
jd=0,
hr=0,
mi=0,
sec=0,
dur=0,
t1=0,
t2=0,
sta="",
comp="")
attr(ADB, "origyr")<- 1972
N = 0
if(length(kind)==1) kind = rep(kind, times=length(LF2) )
if(length(Iendian)==1) Iendian = rep(Iendian, times=length(LF2) )
if(length(BIGLONG)==1) BIGLONG = rep(BIGLONG, times=length(LF2) )
for(i in 1:length(LF2))
{
sinfo = GET.seis(LF2[i], kind=kind[i], Iendian=Iendian[i], BIGLONG=BIGLONG[i] , HEADONLY=TRUE , PLOT=-1)
for(j in 1:length(sinfo))
{
REC = sinfo[[j]]
if(is.null(REC$DATTIM[[ 'msec' ]] )) REC$DATTIM$msec=0
if(is.null(REC$DATTIM[['dt']] )) REC$DATTIM$dt=REC$dt
N = N + 1
ADB$fn[N] = REC$fn
ADB$sta[N] = REC$sta
ADB$comp[N] = REC$comp
ADB$yr[N] = REC$DATTIM$yr
ADB$jd[N] = REC$DATTIM$jd
ADB$hr[N] = REC$DATTIM$hr
ADB$mi[N] = REC$DATTIM$mi
ADB$sec[N] = REC$DATTIM$sec+REC$DATTIM$msec/1000
ADB$dur[N] = REC$DATTIM$dt*REC$N
}
}
origyr = min(ADB$yr, na.rm =TRUE )
if(is.na(origyr) ) { origyr = 2000 }
if(is.null(origyr) ) { origyr = 2000 }
eday = EPOCHday(ADB$yr, jd = ADB$jd, origyr = origyr)
ADB$t1 = eday$jday + ADB$hr/24 + ADB$mi/(24 * 60) + ADB$sec/(24 *
3600)
ADB$t2 = ADB$t1 + ADB$dur/(24 * 3600)
attr(ADB, "origyr")<- origyr
attr(ADB, "kind")=kind
attr(ADB, "Iendian")=Iendian
attr(ADB, "BIGLONG")=BIGLONG
invisible(ADB)
}
|
library(dplyr)
library(posterior)
data(RankCorr, package = "ggdist")
RankCorr_s = as_draws_rvars(RankCorr[[1]][1:10,])
i_labels = c("a", "b", "c")
RankCorr_i = recover_types(RankCorr_s, list(i = factor(i_labels)))
i_labels = c("a", "b", "c")
j_labels = c("A", "B", "C", "D")
RankCorr_ij = recover_types(RankCorr_s, list(i = factor(i_labels), j = factor(j_labels)))
test_that("spread_rvars correctly rejects missing variables", {
data("RankCorr", package = "ggdist")
expect_error(spread_rvars(RankCorr, c(a, b)),
"The variable .* was not found in the model")
expect_error(spread_rvars(RankCorr, a[b]),
"The variable .* was not found in the model")
expect_error(spread_rvars(RankCorr, c(a, x)[b]),
"The variable .* was not found in the model")
})
test_that("spread_rvars works on a simple variable with no dimensions", {
ref = tibble(
typical_r = RankCorr_s$typical_r
)
expect_equal(spread_rvars(RankCorr_s, typical_r), ref)
set.seed(1234)
RankCorr_draws = as_draws(RankCorr_s)
RankCorr_subsample = RankCorr_draws %>%
weight_draws(rep(1, ndraws(RankCorr_draws))) %>%
resample_draws(ndraws = 5)
subsample_ref = tibble(
typical_r = RankCorr_subsample$typical_r
)
expect_equal(spread_rvars(RankCorr_s, typical_r, ndraws = 5, seed = 1234), subsample_ref)
})
test_that("spread_rvars works on two variables with no dimensions and multiple chains", {
data(line, package = "coda")
line = as_draws_rvars(line)
ref = tibble(
alpha = line$alpha,
beta = line$beta
)
expect_equal(spread_rvars(line, alpha, beta), ref)
expect_equal(spread_rvars(line, c(alpha, beta)), ref)
expect_equal(spread_rvars(line, alpha[], beta[]), ref)
})
test_that("spread_rvars works on a variable with one unnamed index", {
ref = tibble(
i = 1:3,
tau = RankCorr_s$tau
)
expect_equal(spread_rvars(RankCorr_s, tau[i]) %>% arrange(i), ref)
})
test_that("spread_rvars works on a variable with one named index", {
ref = tibble(
i = factor(c("a","b","c")),
tau = RankCorr_s$tau
)
expect_equal(spread_rvars(RankCorr_i, tau[i]) %>% arrange(i), ref)
})
test_that("spread_rvars works on a variable with one index left wide", {
ref = tibble(
tau = t(RankCorr_s$tau)
)
expect_equal(spread_rvars(RankCorr_s, tau[]), ref)
})
test_that("spread_rvars works on a variable with one named wide index", {
tau = t(RankCorr_s$tau)
dimnames(tau) = list(NULL, c("a","b","c"))
ref = tibble(
tau = tau
)
RankCorr_i_abc = RankCorr_i
names(RankCorr_i_abc$tau) = c("a","b","c")
expect_equal(spread_rvars(RankCorr_i_abc, tau[]), ref)
})
test_that("spread_rvars works on a variable with two named dimensions", {
i = rep(1:3, 4)
j = rep(1:4, each = 3)
ref = tibble(
i = factor(i_labels[i]),
j = factor(j_labels[j]),
b = RankCorr_ij$b[cbind(i,j)]
)
expect_equal(spread_rvars(RankCorr_ij, b[i, j]) %>% arrange(j, i), ref)
})
test_that("spread_rvars works on a variable with one named index and one wide index", {
ref = tibble(
i = factor(i_labels),
b = RankCorr_i$b
)
expect_equivalent(spread_rvars(RankCorr_i, b[i, ]) %>% arrange(i), ref)
})
test_that("spread_rvars allows extraction of two variables simultaneously with a wide index", {
ref = tibble(
tau = t(RankCorr_i$tau),
u_tau = t(RankCorr_i$u_tau)
)
expect_equal(spread_rvars(RankCorr_s, c(tau, u_tau)[]), ref)
})
test_that("spread_rvars correctly extracts multiple variables simultaneously", {
ref = tibble(
i = factor(i_labels),
tau = RankCorr_i$tau,
u_tau = RankCorr_i$u_tau
)
expect_equal(spread_rvars(RankCorr_i, c(tau, u_tau)[i]), ref)
expect_equal(spread_rvars(RankCorr_i, cbind(tau, u_tau)[i]), ref)
expect_equal(spread_rvars(RankCorr_i, cbind(tau)[i]), ref[-3])
})
test_that("spread_rvars correctly extracts multiple variables simultaneously when those variables have no dimensions", {
RankCorr_t = RankCorr_s
RankCorr_t$tr2 = RankCorr_t$tau[[1]]
ref = tibble(
typical_r = RankCorr_t$typical_r,
tr2 = RankCorr_t$tr2
)
expect_equal(spread_rvars(RankCorr_t, c(typical_r, tr2)), ref)
})
test_that("spread_rvars multispec syntax joins results correctly", {
i_int = rep(1:3, each = 4)
v = rep(1:4, 3)
ref = tibble(
typical_r = RankCorr_ij$typical_r,
i = factor(i_int, labels = i_labels),
tau = RankCorr_ij$tau[i_int],
v = v,
b = RankCorr_ij$b[cbind(i_int,v)]
)
expect_equal(spread_rvars(RankCorr_ij, typical_r, tau[i], b[i, v]) %>% arrange(i,v), ref)
})
|
predict.pcLasso <- function(object, xnew, ...) {
if (object$overlap) {
beta <- object$origbeta
} else {
beta <- object$beta
}
out <- t(object$a0 + t(xnew %*% beta))
if (object$family == "binomial") {
out <- 1 / (1 + exp(-out))
}
return(out)
}
|
urstab<-function(n,alpha,beta,sigma,mu,param)
{
stopifnot(0<alpha,alpha<=2,length(alpha)==1,-1<=beta,beta<=1,length(beta)==1,0<=sigma,length(param)==1, param %in% 0:1)
theta<-runif(n,-pi/2,pi/2)
theta0<-atan(beta*tan(pi*alpha/2))/alpha
x<-c()
w<-rexp(n,1)
if (param==0)
{
if (alpha==1)
{
x<-sigma*2/pi*((pi/2+beta*theta)*tan(theta)-beta*log((pi/2*w*cos(theta))/(pi/2+beta*theta)))+2/pi*beta*sigma*log(sigma)+mu
}
else
{
x<-sigma*sin(alpha*(theta0+theta))/(cos(alpha*theta0)*cos(theta))^(1/alpha)*(cos(alpha*theta0+(alpha-1)*theta)/w)^((1-alpha)/alpha)+mu
}
}
else
{
if(alpha!= 1)
{
x<-x-beta*sigma*tan(pi*alpha/2)
}
else
{
x<-x-2/pi*beta*sigma*log(sigma)
}
}
return(x)
}
urstab.trunc<-function(n,alpha,beta,sigma,mu,a,b,param)
{
stopifnot(0<alpha,alpha<=2,length(alpha)==1,-1<beta,beta<1,length(beta)==1,0<sigma,length(sigma)==1,a<b)
y<-c()
if (alpha==1)
{
if (param==0)
{
y<-c()
for (i in 1:n)
{
w<-rexp(1)
l<-uniroot(function(theta){2/pi*((pi/2+beta*theta)*tan(theta)-beta*log((pi/2*cos(theta))/(pi/2+beta*theta)))-(a-mu)/sigma-2/pi*beta*log(w)},lower=-pi/2,upper=pi/2)$root
u<-uniroot(function(theta){2/pi*((pi/2+beta*theta)*tan(theta)-beta*log((pi/2*cos(theta))/(pi/2+beta*theta)))-(b-mu)/sigma-2/pi*beta*log(w)},lower=-pi/2,upper=pi/2)$root
tc<-runif(1,l,u)
y[i]<-sigma*2/pi*((pi/2+beta*tc)*tan(tc)-beta*log((pi/2*w*cos(tc))/(pi/2+beta*tc)))+mu
}
}
else
{
for (i in 1:n)
{
w<-rexp(1)
l<-uniroot(function(theta){2/pi*((pi/2+beta*theta)*tan(theta)-beta*log((pi/2*cos(theta))/(pi/2+beta*theta)))-(a-mu-beta*2/pi*sigma*log(sigma))/sigma-2/pi*beta*log(w)},lower=-pi/2,upper=pi/2)$root
u<-uniroot(function(theta){2/pi*((pi/2+beta*theta)*tan(theta)-beta*log((pi/2*cos(theta))/(pi/2+beta*theta)))-(b-mu-beta*2/pi*sigma*log(sigma))/sigma-2/pi*beta*log(w)},lower=-pi/2,upper=pi/2)$root
tc<-runif(1,l,u)
y[i]<-sigma*2/pi*((pi/2+beta*tc)*tan(tc)-beta*log((pi/2*w*cos(tc))/(pi/2+beta*tc)))+mu+beta*2/pi*sigma*log(sigma)
}
}
}
else
{
theta0<-atan(beta*tan(pi*alpha/2))/alpha
if (param==1)
{
for(i in 1:n)
{
w<-rexp(1)
l<-uniroot(function(theta){sin(alpha*(theta0+theta))/(cos(alpha*theta0)*cos(theta))^(1/alpha)*(cos(alpha*theta0+(alpha-1)*theta))^((1-alpha)/alpha)-(a-mu)/(sigma*w^((alpha-1)/alpha))},lower=-pi/2,upper=pi/2)$root
u<-uniroot(function(theta){sin(alpha*(theta0+theta))/(cos(alpha*theta0)*cos(theta))^(1/alpha)*(cos(alpha*theta0+(alpha-1)*theta))^((1-alpha)/alpha)-(b-mu)/(sigma*w^((alpha-1)/alpha))},lower=-pi/2,upper=pi/2)$root
uu<-runif(1,l,u)
y[i]<-sigma*sin(alpha*(theta0+uu))/(cos(alpha*theta0)*cos(uu))^(1/alpha)*(cos(alpha*theta0+(alpha-1)*uu)/w)^((1-alpha)/alpha)+mu
}
}
else
{
for(i in 1:n)
{
w<-rexp(1)
l<-uniroot(function(theta){sin(alpha*(theta0+theta))/(cos(alpha*theta0)*cos(theta))^(1/alpha)*(cos(alpha*theta0+(alpha-1)*theta))^((1-alpha)/alpha)-(a-mu+sigma*beta*tan(pi*alpha/2))/(sigma*w^((alpha-1)/alpha))},lower=-pi/2,upper=pi/2)$root
u<-uniroot(function(theta){sin(alpha*(theta0+theta))/(cos(alpha*theta0)*cos(theta))^(1/alpha)*(cos(alpha*theta0+(alpha-1)*theta))^((1-alpha)/alpha)-(b-mu+sigma*beta*tan(pi*alpha/2))/(sigma*w^((alpha-1)/alpha))},lower=-pi/2,upper=pi/2)$root
uu<-runif(1,l,u)
y[i]<-sigma*sin(alpha*(theta0+uu))/(cos(alpha*theta0)*cos(uu))^(1/alpha)*(cos(alpha*theta0+(alpha-1)*uu)/w)^((1-alpha)/alpha)+mu-sigma*beta*tan(pi*alpha/2)
}
}
}
return(y)
}
mrstab.elliptical<-function(n,alpha,Sigma,Mu)
{
stopifnot(0<alpha,alpha<=2,length(alpha)==1,dim(Sigma)[1]==dim(Sigma)[2],length(Mu)==dim(Sigma)[1])
d<-dim(Sigma)[1]
x<-matrix(0, nrow=n, ncol=d)
for(i in 1:n)
{
x[i,]<-suppressWarnings(Mu+sqrt(rstable(1,alpha/2,1,cos(pi*alpha/4)^(2/alpha),0,1))*rmvnorm(1,c(rep(0,d)),Sigma))
}
return(x)
}
mrstab<-function(n,m,alpha,Gamma,Mu)
{
stopifnot(0<alpha,alpha<=2,length(alpha)==1,length(Gamma)==m,length(Mu)==2)
x<-matrix(0,nrow=n,ncol=2)
S<-L<-matrix(2*m,nrow=2,ncol=m)
for (j in 1:m)
{
S[1,j]<-cos(2*(j-1)*pi/m)
S[2,j]<-sin(2*(j-1)*pi/m)
}
for (i in 1:n)
{
for (j in 1:m)
{
L[,j]<-(Gamma[j])^(1/alpha)*(rstable(1,alpha,1,1,0,1)*S[,j])
}
x[i,]<-apply(L,1,sum)+Mu
}
return(x)
}
udstab<-function(x,alpha,beta,sigma,mu,param)
{
stopifnot(0<alpha,alpha<=2,length(alpha)==1,-1<=beta,beta<=1,length(beta)==1,0<=sigma,length(param)==1, param %in% 0:1)
k<-seq(1,150)
xi<--beta*sigma*tan(pi*alpha/2)
eta<--beta*tan(pi*alpha/2)
r<-(1+eta^2)^(1/(2*alpha))
i<-150
if (x==mu)
{
pdf<-suppressWarnings(dstable(x,alpha,beta,sigma,mu,param))
}
else
{
if(alpha==1)
{
pdf<-suppressWarnings(dstable(x,1,beta,sigma,mu,param))
}
else
{
if (param==0)
{
L1<--sigma*r*(alpha*exp(lgamma(alpha*i+alpha)-lgamma(alpha*i+1)))^(1/alpha)+mu+xi-alpha/2
L2<-sigma*r*(alpha*exp(lgamma(alpha*i+alpha)-lgamma(alpha*i+1)))^(1/alpha)+mu+xi+alpha/2
U1<--sigma*r*alpha*exp(lgamma(i/alpha+1)-lgamma(i/alpha+1/alpha))+mu+xi+2*alpha
U2<-sigma*r*alpha*exp(lgamma(i/alpha+1)-lgamma(i/alpha+1/alpha))+mu+xi-2*alpha
if(x<L1 || x>L2)
{
pdf<-1/(pi*abs(x-mu-xi))*sum((-1)^(k-1)*exp(lgamma(alpha*k+1)-lgamma(k+1))*(abs(x-mu-xi)/(sigma*r))^(-alpha*k)*sin(k*pi/2*(alpha+2/pi*atan(beta*tan(pi*alpha/2))*sign(x-mu-xi))))
}
else
{
if(x<U2 && x>U1)
{
pdf<-1/(pi*abs(x-mu-xi))*sum((-1)^(k-1)*exp(lgamma(k/alpha+1)-lgamma(k+1))*(abs(x-mu-xi)/(sigma*r))^k*sin(k*pi*(alpha+2/pi*atan(beta*tan(pi*alpha/2))*sign(x-mu-xi))/(2*alpha)))
}
else
{
pdf<-suppressWarnings(dstable(x,alpha,beta,sigma,mu,0))
}
}
}
else
{
L1<--sigma*r*(alpha*exp(lgamma(alpha*i+alpha)-lgamma(alpha*i+1)))^(1/alpha)+mu-alpha
L2<-sigma*r*(alpha*exp(lgamma(alpha*i+alpha)-lgamma(alpha*i+1)))^(1/alpha)+mu+alpha
U1<--sigma*r*alpha*exp(lgamma(i/alpha+1)-lgamma(i/alpha+1/alpha))+mu+2*alpha
U2<-sigma*r*alpha*exp(lgamma(i/alpha+1)-lgamma(i/alpha+1/alpha))+mu-2*alpha
if(x<L1 || x>L2)
{
pdf<-1/(pi*abs(x-mu))*sum((-1)^(k-1)*exp(lgamma(alpha*k+1)-lgamma(k+1))*(abs(x-mu)/(sigma*r))^(-alpha*k)*sin(k*pi/2*(alpha+2/pi*atan(beta*tan(pi*alpha/2))*sign(x-mu))))
}
else
{
if(x<U2 && x>U1)
{
pdf<-1/(pi*abs(x-mu))*sum((-1)^(k-1)*exp(lgamma(k/alpha+1)-lgamma(k+1))*(abs(x-mu)/(sigma*r))^k*sin(k*pi*(alpha+2/pi*atan(beta*tan(pi*alpha/2))*sign(x-mu))/(2*alpha)))
}
else
{
pdf<-suppressWarnings(dstable(x,alpha,beta,sigma,mu,1))
}
}
}
}
}
return(pdf)
}
mdstab.elliptical<-function(x,alpha,Sigma,Mu)
{
stopifnot(0<alpha,alpha<=2,length(alpha)==1,length(Mu)==length(x),dim(Sigma)[1]==dim(Sigma)[2],length(Mu)==dim(Sigma)[1])
if(dim(Sigma)[1]!=dim(Sigma)[2]){message("matrix Sigma must be square")}
if(length(Mu)!=dim(Sigma)[1]){message("matrix Sigma and Mu must be of the same dimensions")}
if(length(Mu)!=length(x)){message("vector x and Mu must be of the same dimensions")}
if(min(eigen(Sigma)$values)<0){message("matrix Sigma is not positive definite")}
d<-length(Mu)
dd<-(x-Mu)%*%solve(Sigma)%*%cbind(x-Mu)/2
k<-150
j<-seq(1,150)
r<-2*(exp(lgamma(alpha*k/2+alpha/2+1)+lgamma(alpha*k/2+alpha/2+d/2)-lgamma(alpha*k/2+1)-lgamma(alpha*k/2+d/2))/(k+1))^(2/alpha)
if (dd>r)
{
pdf<-.5*suppressWarnings(1/((2*pi)^(d/2)*pi*sqrt(det(Sigma)))*sum(2^(alpha*j/2+d/2)*(-1)^(j-1)*dd^(-alpha*j/2-d/2)*exp(lgamma(alpha*j/2+1)+lgamma(alpha*j/2+d/2)-lgamma(j+1))*sin(j*pi*alpha*.5)))
}
else
{
rr<-rstable(5000,alpha/2,1,(cos(pi*alpha/4))^(2/alpha),0,1)
pdf<-suppressWarnings(mean(exp(-dd/(4*rr))/((4*pi*rr)^(d/2)*sqrt(det(Sigma)))))
}
return(pdf)
}
upstab<-function(x,alpha,beta,sigma,mu,param)
{
stopifnot(0<alpha,alpha<=2,length(alpha)==1,-1<=beta,beta<=1,length(beta)==1,0<=sigma,length(param)==1, param %in% 0:1)
k<-seq(1,150)
xi<--beta*sigma*tan(pi*alpha/2)
eta<--beta*tan(pi*alpha/2)
r<-(1+eta^2)^(1/(2*alpha))
i<-150
if (x==mu)
{
cdf<-suppressWarnings(pstable(x,alpha,beta,sigma,mu,param))
}
else
{
if(alpha==1)
{
cdf<-suppressWarnings(pstable(x,1,beta,sigma,mu,param))
}
else
{
if (param==0)
{
L1<--sigma*r*(alpha*exp(lgamma(alpha*i+alpha)-lgamma(alpha*i+1)))^(1/alpha)+mu+xi-alpha/2
L2<-sigma*r*(alpha*exp(lgamma(alpha*i+alpha)-lgamma(alpha*i+1)))^(1/alpha)+mu+xi+alpha/2
U1<--sigma*r*alpha*exp(lgamma(i/alpha+1)-lgamma(i/alpha+1/alpha))+mu+xi+2*alpha
U2<-sigma*r*alpha*exp(lgamma(i/alpha+1)-lgamma(i/alpha+1/alpha))+mu+xi-2*alpha
if(x<L1 || x>L2)
{
cdf<-(1+sign(x-mu-xi))/2+sign(x-mu-xi)/(pi)*sum((-1)^(k)*exp(lgamma(alpha*k+1)-lgamma(k+1))*(abs(x-mu-xi)/(sigma*r))^(-alpha*k)/(alpha*k)*sin(k*pi/2*(alpha+2/pi*atan(beta*tan(pi*alpha/2))*sign(x-mu-xi))))
}
else
{
if(x<U2 && x>U1)
{
cdf<-(1/2-atan(beta*tan(pi*alpha/2))/(alpha*pi))-sign(x-mu-xi)/pi*sum((-1)^(k)*exp(lgamma(k/alpha+1)-lgamma(k+1))*(abs(x-mu-xi)/(sigma*r))^(k)/(k)*sin(k*pi*(alpha+2/pi*atan(beta*tan(pi*alpha/2))*sign(x-mu-xi))/(2*alpha)))
}
else
{
cdf<-suppressWarnings(pstable(x,alpha,beta,sigma,mu,0))
}
}
}
else
{
L1<--sigma*r*(alpha*exp(lgamma(alpha*i+alpha)-lgamma(alpha*i+1)))^(1/alpha)+mu-alpha
L2<-sigma*r*(alpha*exp(lgamma(alpha*i+alpha)-lgamma(alpha*i+1)))^(1/alpha)+mu+alpha
U1<--sigma*r*alpha*exp(lgamma(i/alpha+1)-lgamma(i/alpha+1/alpha))+mu+2*alpha
U2<-sigma*r*alpha*exp(lgamma(i/alpha+1)-lgamma(i/alpha+1/alpha))+mu-2*alpha
if(x<L1 || x>L2)
{
cdf<-(1+sign(x-mu))/2+sign(x-mu)/(pi)*sum((-1)^(k)*exp(lgamma(alpha*k+1)-lgamma(k+1))*(abs(x-mu)/(sigma*r))^(-alpha*k)/(alpha*k)*sin(k*pi/2*(alpha+2/pi*atan(beta*tan(pi*alpha/2))*sign(x-mu))))
}
else
{
if(x<U2 && x>U1)
{
cdf<-(1/2-atan(beta*tan(pi*alpha/2))/(alpha*pi))-sign(x-mu)/pi*sum((-1)^(k)*exp(lgamma(k/alpha+1)-lgamma(k+1))*(abs(x-mu)/(sigma*r))^(k)/(k)*sin(k*pi*(alpha+2/pi*atan(beta*tan(pi*alpha/2))*sign(x-mu))/(2*alpha)))
}
else
{
cdf<-suppressWarnings(pstable(x,alpha,beta,sigma,mu,1))
}
}
}
}
}
return(cdf)
}
mfitstab.ustat<-function(u,m,method=method)
{
stopifnot(length(u[1,])>1,length(u[,1])>2,m %in% 2:40, method %in% 1:2)
S<-matrix(2*m,nrow=2,ncol=m)
for (j in 1:m)
{
S[1,j]<-cos(2*(j-1)*pi/m)
S[2,j]<-sin(2*(j-1)*pi/m)
}
T<-t(S)
u<-t(u)
n<-length(u[1,])
mass<-W<-V<-c()
L<-matrix(m*m,nrow=m,ncol=m)
if(method=="1")
{
s<-0
for (i in 1:(n-1))
{
for (j in (i+1):n)
{
s1<-sqrt(sum(u[,i]^2))
s2<-sqrt(sum(u[,j]^2))
s3<-sqrt(sum((u[,i]+u[,j])^2))
s<-s+(log(s3)-1/2*(log(s1)+log(s2)))/log(2)
}
}
alpha.hat<-n*(n-1)/(2*s)
for (i in 1:m)
{
for (j in 1:m)
{
q<-T[i,1]*S[1,j]+T[i,2]*S[2,j]
L[i,j]<--(abs(q))^(alpha.hat)*(1-sign(q)*tan(pi*alpha.hat/2))
}
}
for (i in 1:m)
{
hh<-T[i,1]*u[1,]+T[i,2]*u[2,]
V[i]<-complex(real=mean(cos(hh)),imaginary=mean(sin(hh)))
W[i]<-Re(log(V[i]))+Im(log(V[i]))
}
for (i in 1:m)
{
mass[i]<-((nnls(-L,-W)[1])$x[i])
}
}
else
{
d1<-0;
d2<-0;
for (j2 in 1:n)
{
d1[j2]<-u[1,j2]
d2[j2]<-u[2,j2]
}
y<-0
j1=1:(n/2)
y[j1]<-log(((u[1,2*j1]+u[1,2*j1-1])^2+(u[2,2*j1]+u[2,2*j1-1])^2)^.5)
xx<-0
j1=1:n
xx[j1]<-log((u[1,j1]^2+u[2,j1]^2)^.5)
alpha.hat<-log(2)/(mean(y)-mean(xx))
for (i in 1:m)
{
for (j in 1:m)
{
q<-T[i,1]*S[1,j]+T[i,2]*S[2,j]
L[i,j]<--(abs(q))^(alpha.hat)*(1-sign(q)*tan(pi*alpha.hat/2))
}
}
for (i in 1:m)
{
hh<-T[i,1]*u[1,]+T[i,2]*u[2,]
V[i]<-complex(real=mean(cos(hh)),imaginary=mean(sin(hh)))
W[i]<-Re(log(V[i]))+Im(log(V[i]))
}
for (i in 1:m)
{
mass[i]<-((nnls(-L,-W)[1])$x[i])
}
}
return(list(alpha=alpha.hat,mass=mass))
}
mfitstab.elliptical<-function(yy,alpha0,Sigma0,Mu0){
s0<-Sigma0
a0<-alpha0
m0<-Mu0
b<-1;
N<-2000;
n<-length(yy[,1]);
d<-length(yy[1,]);
mm<-150;
nn<-120;
jj<-1;
Sigma.matrix<-array(0,dim=c(d,ncol=d,mm))
mu.matrix<-matrix(0,ncol=d,nrow=mm)
alpha.matrix<-c()
Sigma.matrix[,,1]<-s0
alpha.matrix[1]<-a0
mu.matrix[1,]<-m0
sm<-c()
ww<-matrix(0,ncol=d,nrow=n)
for (r in 2:mm)
{
ee<-sqrt(rexp(n,1))
k<-round(min(160,160/a0))
vv<-0.5+(exp(lgamma(a0*k/2+a0/2+1)+lgamma(a0*k/2+a0/2+d/2+1)-lgamma(a0*k/2+1)-lgamma(a0*k/2+d/2+1))/((k+1)))^(2/a0)
for (i in 1:n)
{
dis<-(yy[i,]-m0)%*%solve(s0)%*%cbind(yy[i,]-m0)
if (dis>vv)
{
s1<-s2<-0;
for (j in 1:k)
{
s1<-s1+(-1)^(j)*dis^(-a0*j/2-d/2-1)*exp(lgamma(a0*j/2+1)+lgamma(a0*j/2+d/2+1)-lgamma(j+1))*sin(j*pi*a0*.75)
s2<-s2+(-1)^(j)*dis^(-a0*j/2-d/2)*exp(lgamma(a0*j/2+1)+lgamma(a0*j/2+d/2)-lgamma(j+1))*sin(j*pi*a0*.75)
}
sm[i]<-s1/s2
}
else
{
rr<-(rstable(N,a0/2,1,(cos(pi*a0/4))^(2/a0),0,1))
ss1<-sum(rr^(-d/2-1)*exp(-.5*dis/rr),na.rm=TRUE)
ss2<-sum(rr^(-d/2)*exp(-.5*dis/rr),na.rm=TRUE)
sm[i]<-ss1/ss2
}
}
ee<-sqrt(rexp(n,1))
for (j in 1:d)
{
mu.matrix[r,j]<-suppressWarnings(sum(yy[,j]*sm,na.rm=TRUE)/sum(sm,na.rm=TRUE))
ww[,j]<-yy[,j]-m0[j]
}
m0<-mu.matrix[r,]
y<-ww/ee
Z<-c()
for (i in 1:n)
{
num<-y[i,]%*%solve(s0)%*%y[i,]
up<-d^(d/2)*exp(-d/2)/num^(d/2)
j<-1
while (j<2)
{
w<-rweibull(1,a0,1)
ex<-exp(-.5*num*w^2)
if (runif(1)<w^d*ex/up)
{
Z[i]<-w
j<-j+1
}
}
}
f<-function(p)
{
sum(-log(p[1])-(p[1]-1)*log(Z)+Z^p[1])
}
a0<-suppressWarnings(nlm(f,p<-c(a0),hessian=TRUE)$estimate)
sum<-0
for (i in 1:n)
{
sum<-sum+cbind(y[i,])%*%rbind(y[i,])*Z[i]^2
}
s0<-sum/n
Sigma.matrix[,,r]<-s0
if (a0>2)
{
a0<-1.99
}
alpha.matrix[r]<-a0
}
a1<-matrix(0,nrow=(mm-nn+1),ncol=1)
a1<-alpha.matrix[(mm-nn):mm]
s1<-matrix(0,nrow=d,ncol=d)
for (i in 1:d)
{
for (j in 1:d)
{
s1[i,j]<-mean(Sigma.matrix[i,j,(mm-nn):(mm)])
}
}
Sigma<-s1
alpha=mean(a1)
mu=apply(mu.matrix[(mm-nn):mm,],2,mean)
suppressWarnings(return(list(alpha=alpha,Sigma=Sigma,Mu=mu)))
}
ufitstab.sym<-function(yy,alpha0,sigma0,mu0)
{
n<-length(yy)
m<-120
N<-2000
alphahat<-c()
sigmahat<-c()
muhat<-c()
m0<-mu0
a<-matrix(m*3,nrow=m,ncol=3)
a[1,1]<-alpha0
a[1,2]<-sigma0
a[1,3]<-m0
a0<-alpha0
s0<-sigma0
ss<-sm<-Z1<-Z<-c()
for (r in 1:m-1)
{
k<-round(min(165,165/a0))
ee<-sqrt(rexp(n,1))
vv<-1+2*s0*(exp(lgamma(a0*k/2+a0/2+1)+lgamma(a0*k/2+a0/2+1/2)-lgamma(a0*k/2+1)-lgamma(a0*k/2+1/2))/((k+1)))^(1/a0)
for (i in 1:n)
{
d<-abs(yy[i]-m0)
if (d>vv)
{
s1<-s2<-0
for (j in 1:k)
{
s1<-s1+(2*s0)^(a0*j+2)/(abs(d)^(a0*j+3))*(-1)^(j-1)*exp(lgamma(a0*j/2+1)+lgamma(a0*j/2+3/2)-lgamma(j+1))*sin(j*pi*a0*.75)/(pi^1.5)
s2<-s2+(2*s0)^(a0*j)/(abs(d)^(a0*j+1))*(-1)^(j-1)*exp(lgamma(a0*j/2+1)+lgamma(a0*j/2+1/2)-lgamma(j+1))*sin(j*pi*a0*.75)/(pi^1.5)
}
sm[i]<-s1/s2
}
else
{
rr<-rstable(N,a0/2,1,(cos(pi*a0/4))^(2/a0),0,1)
sm[i]<-sum(1/(rr^(1.5))*exp(-(d^2/(2*sqrt(rr)*s0)^2)),na.rm=TRUE)/sum(1/(rr^(0.5))*exp(-(d^2/(2*sqrt(rr)*s0)^2)),na.rm=TRUE)
}
}
m0<-sum(yy*sm,na.rm=TRUE)/sum(sm,na.rm=TRUE)
y<-(yy-m0)/ee
for (i in 1:n)
{
y0<-y[i]
j<-1
while (j<2)
{
tt<-rweibull(1,a0,1)
ra<-exp(-.5)/(sqrt(2*pi)*abs(y0))
u<-runif(1)
if (u<dnorm(y0,0,sqrt(2)*s0/tt)/ra)
{
Z1[j]<-tt
j<-j+1
}
}
Z[i]<-Z1
}
f<-function(p){sum(-log(p[1])-(p[1]-1)*log(Z)+Z^p[1])}
out<-suppressWarnings(nlm(f, p<-c(a0), hessian=FALSE))
a0<-out$estimate[]
s0<-sqrt(sum(y^2*Z^2)/(2*n))
a[r+1,3]<-m0
a[r+1,2]<-s0
if (a0>2){a0<-1.95}
a[r+1,1]<-a0
}
return((list(alpha=mean(a[(m-30):m,1]),sigma=mean(a[(m-30):m,2]),mu=mean(a[(m-30):m,3]))))
}
ufitstab.sym.mix<-function(yy,k,omega0,alpha0,sigma0,mu0)
{
n<-length(yy)
m<-150
N<-4000
mu.matrix<-matrix(m*k,ncol=k,nrow=m)
sigma.matrix<-matrix(m*k,ncol=k,nrow=m)
alpha.matrix<-matrix(m*k,ncol=k,nrow=m)
p.matrix<-matrix(m*k,ncol=k,nrow=m)
tau.matrix<-matrix(n*k,ncol=k,nrow=n)
d<-matrix(n*k,ncol=k,nrow=n)
sm<-matrix(n*k,ncol=k,nrow=n)
ss<-matrix(n*k,ncol=k,nrow=n)
clustering<-rep(0,length(yy))
vv<-c()
mu.matrix[1,]<-mu0
p.matrix[1,]<-omega0
alpha.matrix[1,]<-alpha0
sigma.matrix[1,]<-sigma0
p0<-p.matrix[1,]
a0<-alpha.matrix[1,]
s0<-sigma.matrix[1,]
m0<-mu.matrix[1,]
a11<-matrix(0,ncol=5,nrow=k)
a12<-matrix(0,ncol=5,nrow=k)
for (r in 2:m)
{
for (j in 1:n)
{
for (ii in 1:k)
{
kk<-round(min(168,168/a0[ii]))
vv[ii]<-2+2*s0[ii]*(exp(lgamma(a0[ii]*kk/2+a0[ii]/2+1)+lgamma(a0[ii]*kk/2+a0[ii]/2+1/2)-lgamma(a0[ii]*kk/2+1)-lgamma(a0[ii]*kk/2+1/2))/((kk+1)))^(1/a0[ii])
d<-abs(yy[j]-m0[ii])
if (d>vv[ii])
{
s.1<-s.2<-0
for (jj in 1:kk)
{
s.1<-s.1+(2*s0[ii])^(a0[ii]*jj+2)/(abs(d)^(a0[ii]*jj+3))*(-1)^(jj-1)*exp(lgamma(a0[ii]*jj/2+1)+lgamma(a0[ii]*jj/2+3/2)-lgamma(jj+1))*sin(jj*pi*a0[ii]*.75)/(pi^1.5)
s.2<-s.2+(2*s0[ii])^(a0[ii]*jj+0)/(abs(d)^(a0[ii]*jj+1))*(-1)^(jj-1)*exp(lgamma(a0[ii]*jj/2+1)+lgamma(a0[ii]*jj/2+1/2)-lgamma(jj+1))*sin(jj*pi*a0[ii]*.75)/(pi^1.5)
}
sm[j,ii]<-s.1/s.2
}
else
{
rr<-rstable(N,a0[ii]/2,1,(cos(pi*a0[ii]/4))^(2/a0[ii]),0,1)
ss1<-sum(1/rr^(1.5)*exp(-(yy[j]-m0[ii])^2/(2*sqrt(rr)*s0[ii])^2),na.rm=TRUE)
ss2<-sum(1/rr^(0.5)*exp(-(yy[j]-m0[ii])^2/(2*sqrt(rr)*s0[ii])^2),na.rm=TRUE)
sm[j,ii]<-ss1/ss2
}
s.pdf<-0
for (mm in 1:k)
{
s.pdf<-s.pdf+p0[mm]*dstable(yy[j],a0[mm],0,s0[mm],m0[mm],1)
}
tau.matrix[j,ii]<-p0[ii]*dstable(yy[j],a0[ii],0,s0[ii],m0[ii],1)/s.pdf
}
}
for (ii in 1:k)
{
mu.matrix[r,ii]<-sum(yy*sm[,ii]*tau.matrix[,ii],na.rm=TRUE)/sum(sm[,ii]*tau.matrix[,ii],na.rm=TRUE)
m0[ii]<-mu.matrix[r,ii]
p0[ii]<-sum(tau.matrix[,ii])/n
p.matrix[r,ii]<-p0[ii]
}
z<-matrix(0,ncol=k,nrow=n)
for (j in 1:n)
{
max<-tau.matrix[j,1]
tt<-1
for (ii in 2:k)
{
if (tau.matrix[j,ii]> max)
{
max<-tau.matrix[j,ii]
tt<-ii
}
}
z[j,tt]<-1
}
for (bb in 1:k)
{
for (rrr in 1:5)
{
n00<-length(yy[z[,bb]==1])
y00<-(yy[z[,bb]==1]-m0[bb])/sqrt(rexp(n00,1))
Z<-c()
for (i in 1:n00)
{
up<-exp(-.5)/(sqrt(2*pi)*abs(y00[i]))
j<-1
while (j<2)
{
w<-rweibull(1,a0[bb],1)
ex<-dnorm(y00[i],0,sqrt(2)*s0[bb]/w)
if (runif(1)<ex/up)
{
Z[i]<-w
j<-j+1
}
if (ex==0)
{
Z[i]<-sqrt(2)*s0[bb]/abs(y00[i])
j<-j+1
}
}
}
f<-function(v){sum(-log(v[1])-(v[1]-1)*log(Z)+Z^v[1])}
out<-suppressWarnings(nlm(f,v<-c(a0[bb]),hessian=FALSE))
a11[bb,rrr]<-out$estimate[]
if (a11[bb,rrr]>2)
{
a11[bb,rrr]<-1.99
}
a12[bb,rrr]<-sqrt(sum(y00^2*Z^2,na.rm=TRUE)/(2*n00))
}
alpha.matrix[r,bb]<-mean(a11[bb,])
sigma.matrix[r,bb]<-mean(a12[bb,])
a0[bb]<-alpha.matrix[r,bb]
s0[bb]<-sigma.matrix[r,bb]
}
}
for (i in 1:length(yy)){clustering[i]<-which(z[i,]==1)[1]}
return(list(omega=apply(p.matrix[(m-50):m,],2,mean),alpha=apply(alpha.matrix[(m-50):m,],2,mean),sigma=apply(sigma.matrix[(m-50):m,],2,mean),mu=apply(mu.matrix[(m-50):m,],2,mean),cluster=clustering))
}
ufitstab.cauchy<-function(y,beta0,sigma0,mu0,param)
{
stopifnot(-1<=beta0,beta0<=1,length(beta0)==1,0<=sigma0,length(mu0)==1,length(param)==1,param %in% 0:1)
n<-length(y)
ep11<-ep1<-ep2<-ep22<-ep222<-ep11<-ep0<-m<-k<-t1<-t2<-c()
t2[1]<-sigma0*beta0
t1[1]<-max(sigma0*(1-abs(beta0)),.001)
m[1]<-mu0
nn<-1000;mm<-950
for(j in 1:nn)
{
for(i in 1:n)
{
p2<-rstable(3000,1,1,1,0,1)
k<-(y[i]-m[j]-t2[j]*p2)/t1[j]
tt<-0;r<-0;
dy<-2^(tt/2)*gamma(tt/2+1)/(pi*t1[j])*mean(p2^r/(1+k^2)^(tt/2+1))
tt<-2;r<-2;
ep22[i]<-2^(tt/2)*gamma(tt/2+1)/(pi*t1[j])*mean(p2^r/(1+k^2)^(tt/2+1))/dy
tt<-2;r<-1;
ep2[i]<-2^(tt/2)*gamma(tt/2+1)/(pi*t1[j])*mean(p2^r/(1+k^2)^(tt/2+1))/dy
tt<-2;r<-0;
ep1[i]<-2^(tt/2)*gamma(tt/2+1)/(pi*t1[j])*mean(p2^r/(1+k^2)^(tt/2+1))/dy
}
m[j]<-(sum(y*ep1,na.rm=TRUE)-t2[j]*sum(ep2,na.rm=TRUE))/sum(ep1,na.rm=TRUE)
m[j+1]<-m[j]
t1[j]<-sqrt((sum((y-m[j])^2*ep1)+t2[j]^2*sum(ep22)-2*t2[j]*sum((y-m[j])*ep2))/n)
t1[j+1]<-t1[j]
t2[j]<-sum((y-m[j])*ep2)/sum(ep22)
t2[j+1]<-t2[j]
}
beta.hat<-uniroot(function(p) mean(t2[mm:nn])/mean(t1[mm:nn])-p/(1-abs(p)),c(-.999999,.999999))$root
sigma.hat<-min(c(mean(t2[mm:nn])/beta.hat, mean(t1[mm:nn])/(1-abs(beta.hat))))
mu.hat<-mean(m[mm:nn])
if (param==0)
{
return(list(beta=beta.hat,sigma=sigma.hat,mu=mu.hat))
}
else
{
return(list(beta=beta.hat,sigma=sigma.hat,mu=(mu.hat-2/pi*beta.hat*sigma.hat*log(sigma.hat))))
}
}
ufitstab.cauchy.mix<-function(y,k,omega0,beta0,sigma0,mu0)
{
stopifnot(-1<=beta0,beta0<=1,length(beta0)==k,0<=sigma0,length(mu0)==k,length(sigma0)==k,sum(omega0)==1,0<omega0,omega0<1)
n <- length(y)
MM <- 1300
NN <- 1500
m <- 1500
N <- 2000
estim.matrix <- array(0, dim = c(4, ncol = k, m))
mu.matrix <- matrix(m * k, ncol = k, nrow = m)
t1.matrix <- matrix(m * k, ncol = k, nrow = m)
t2.matrix <- matrix(m * k, ncol = k, nrow = m)
p.matrix <- matrix(m * k, ncol = k, nrow = m)
tau.matrix <- matrix(m * k, ncol = k, nrow = n)
e1ij <- matrix(m * k, ncol = k, nrow = n)
e2ij <- matrix(m * k, ncol = k, nrow = n)
e3ij <- matrix(m * k, ncol = k, nrow = n)
e4ij <- matrix(m * k, ncol = k, nrow = n)
mu.matrix[1, ] <- mu0
p.matrix[1, ] <- omega0
t2.matrix[1, ] <- sigma0 * beta0
t1.matrix[1, ] <- sigma0 * (1 - abs(beta0))
p0 <- p.matrix[1, ]
t1 <- t1.matrix[1, ]
t2 <- t2.matrix[1, ]
m0 <- mu.matrix[1, ]
b0 <- s0 <- dy <- c()
clustering<-rep(0,n)
for (r in 2:m)
{
for (i in 1:n)
{
for (bb in 1:k)
{
p2<-rstable(N,1,1,1,0,1)
t1[bb]<-ifelse (abs(t1[bb])< 0.000001,t1[bb]<-.000001,t1[bb])
kk<-(y[i]-m0[bb]-t2[bb]*p2)/t1[bb]
tt<-0;rr<-0;
dy[bb]<-2^(tt/2)*gamma(tt/2+1)/(pi*t1[bb])*mean(p2^rr/(1+kk^2)^(tt/2+1),na.rm=TRUE)
tt<-2;rr<-2;
e4ij[i,bb]<-2^(tt/2)*gamma(tt/2+1)/(pi*t1[bb])*mean(p2^rr/(1+kk^2)^(tt/2+1),na.rm=TRUE)/dy[bb]
tt<-2;rr<-1;
e3ij[i,bb]<-2^(tt/2)*gamma(tt/2+1)/(pi*t1[bb])*mean(p2^rr/(1+kk^2)^(tt/2+1),na.rm=TRUE)/dy[bb]
tt<-2;rr<-0;
e2ij[i,bb]<-2^(tt/2)*gamma(tt/2+1)/(pi*t1[bb])*mean(p2^rr/(1+kk^2)^(tt/2+1),na.rm=TRUE)/dy[bb]
}
for (aa in 1:k)
{
e1ij[i,aa]<-p0[aa]*dy[aa]/sum(p0*dy,na.rm=TRUE)
}
}
for (ii in 1:k)
{
mu.matrix[r,ii]<-(sum(y*e2ij[,ii]*e1ij[,ii],na.rm=TRUE)-t2[ii]*sum(e3ij[,ii]*e1ij[,ii],na.rm=TRUE))/
sum(e1ij[,ii]*e2ij[,ii],na.rm=TRUE)
m0[ii]<-mu.matrix[r,ii]
t1.matrix[r,ii]<-sqrt((sum((y-m0[ii])^2*e2ij[,ii]*e1ij[,ii],na.rm=TRUE)-2*t2[ii]*
sum((y-m0[ii])*e3ij[,ii]*e1ij[,ii],na.rm=TRUE)+t2[ii]^2*sum(e4ij[,ii]*e1ij[,ii],na.rm=TRUE))/sum(e1ij[,ii],na.rm=TRUE))
t1[ii]<-t1.matrix[r,ii]
t2.matrix[r,ii]<-sum((y-m0[ii])*e3ij[,ii]*e1ij[,ii],na.rm=TRUE)/sum(e4ij[,ii]*e1ij[,ii],na.rm=TRUE)
t2[ii]<-t2.matrix[r,ii]
p.matrix[r,ii]<-sum(e1ij[,ii])/n
p0[ii]<-p.matrix[r,ii]
}
z<-matrix(0,ncol=k,nrow=n)
for (j in 1:n)
{
max<-e1ij[j,1]
uu<-1
for (ii in 2:k)
{
if (e1ij[j,ii]> max)
{
max<-e1ij[j,ii]
uu<-ii
}
}
z[j,uu]<-1
}
for (aa in 1:k)
{
b0[aa]<-suppressWarnings(uniroot(function(p) t2[aa]/t1[aa]-p/(1-abs(p)),c(-.9999999,.9999999))$root)
s0[aa]<-t1[aa]/(1-abs(b0[aa]))
}
estim.matrix[1,,r]<-p0
estim.matrix[2,,r]<-b0
estim.matrix[3,,r]<-s0
estim.matrix[4,,r]<-m0
}
estim.matrix[1,,1]<-omega0
estim.matrix[2,,1]<-beta0
estim.matrix[3,,1]<-sigma0
estim.matrix[4,,1]<-mu0
for (i in 1:length(y)){clustering[i]<-which(z[i,]==1)[1]}
return(list(omega=apply(estim.matrix[1,,(MM:NN)],1,mean),beta=apply(estim.matrix[2,,(MM:NN)],1,mean),sigma=apply(estim.matrix[3,,(MM:NN)],1,mean),mu=apply(estim.matrix[4,,(MM:NN)],1,mean),cluster=clustering))
}
ufitstab.skew<-function(y,alpha0,beta0,sigma0,mu0,param)
{
stopifnot(length(y)>=4,0<alpha0,alpha0<=2,alpha0!=1,length(alpha0)==1,-1<=beta0,beta0<=1,length(beta0)==1,0<=sigma0,length(sigma0)==1,length(param)==1, param %in% 0:1)
n<-length(y)
M<-100;N0<-100;N<-120;
sss<-m<-s<-a<-b<-ep11<-ep1<-ep2<-ep3<-a.estim<-c()
m[1]<-mu0;s[1]<-sigma0;b[1]<-beta0;a[1]<-alpha0
for(j in 1:N)
{
pi1<-matrix(suppressWarnings(rstable(M^2,a[j]/2,1,(cos(pi*a[j]/4))^(2/a[j]),0,1)),M,M)
pi2<-matrix(suppressWarnings(rstable(M^2,a[j],1,1,0,1)),M,M)
for(i in 1:n)
{
ss<-dnorm(y[i],m[j]-s[j]*b[j]*tan(pi*a[j]/2)+sign(b[j])*abs(b[j])^(1/a[j])*s[j]*pi2,sd=sqrt(2*pi1)*s[j]*(1-abs(b[j]))^(1/a[j]))
ep11[i]<-mean(ss,na.rm=TRUE)
dy<-ep11[i]
ep1[i]<-mean(ss/pi1,na.rm=TRUE)/dy
ep2[i]<-mean(ss*pi2/pi1,na.rm=TRUE)/dy
ep3[i]<-mean((ss*pi2^2/pi1),na.rm=TRUE)/dy
}
m[j+1]<-(sum((y+s[j]*b[j]*tan(pi*a[j]/2))*ep1,na.rm=TRUE)-s[j]*sign(b[j])*abs(b[j])^(1/a[j])*sum(ep2,na.rm=TRUE))/sum(ep1,na.rm=TRUE)
fs<-function(p){.5*sum((y-m[j])^2*ep1)/(abs(p)^3*(1-abs(b[j]))^(2/a[j]))+.5*b[j]*(tan(pi*a[j]/2))*sum((y-m[j])*ep1)/(p^2*(1-abs(b[j]))^(2/a[j]))-.5*sign(b[j])*abs(b[j])^(1/a[j])*sum((y-m[j])*ep2)/(p^2*(1-abs(b[j]))^(2/a[j]))-n/abs(p)}
s[j+1]<-suppressWarnings(uniroot(fs,c(0.000000001,10000000))$root)
fb<-function(p){.25*sum((y-m[j])^2*ep1)/(s[j]^2*(1-abs(p))^(2/a[j]))+.25*p^2*(tan(pi*a[j]/2))^2*sum(ep1)/((1-abs(p))^(2/a[j]))+.25*abs(p)^(2/a[j])*sum(ep3)/((1-abs(p))^(2/a[j]))+.5*p*(tan(pi*a[j]/2))*sum((y-m[j])*ep1)/(s[j]*(1-abs(p))^(2/a[j]))-.5*abs(p)^(1+1/a[j])*(tan(pi*a[j]/2))*sum(ep2)/((1-abs(p))^(2/a[j]))-.5*sign(p)*abs(p)^(1/a[j])*sum((y-m[j])*ep2)/(s[j]*(1-abs(p))^(2/a[j]))+sum(1/a[j]*(log(1-abs(p))))}
b[j+1]<-suppressWarnings(optimize(fb,lower=-.999999,upper=.999999))$minimum[[1]]
st<-suppressWarnings(rstable(n,a[j],1,1,0,1))
sss[j]<-s[j]*(1+abs(b[j]))^(1/a[j])
yy<-(y-m[j]-s[j]*sign(b[j])*(abs(b[j]))^(1/a[j])*st)/sss[j]
for (ii in 1:20)
{
nn<-length(yy)
Z1<-c()
Z<-c()
yyy<-(yy)/(sqrt(rexp(nn,1)))
for (i in 1:nn)
{
y0<-yyy[i]
jj<-1
while (jj<2)
{
tt<-rweibull(1,a[j],1)
ra<-exp(-.5)/(sqrt(2*pi)*abs(y0))
u<-runif(1)
if (u<dnorm(y0,0,sqrt(2)/tt)/ra)
{
Z1[jj]<-tt
jj<-jj+1
}
}
Z[i]<-Z1
}
f<-function(p){sum(-log(p[1])-(p[1]-1)*log(Z)+Z^p[1])}
a.hat<-suppressWarnings(nlm(f, p<-c(a[j])))$estimate[]
if (a.hat>1.99){a.hat<-1.98}
a.estim[ii]<-a.hat
}
a[j+1]<-mean(a.estim[10:20])
}
alpha.hat<-mean(a[N0:N])
mu.hat<-mean(m[N0:N])
sigma.hat<-mean(s[N0:N])
w<-function(p){-sum(log(dstable(y,alpha.hat,p,sigma.hat,mu.hat,0)))}
beta.hat<-suppressWarnings(optimize(w,c(-1,1)))$minimum
if(param==0)
{
return(list(alpha=alpha.hat,beta=beta.hat,sigma=sigma.hat,mu=mu.hat))
}
else
{
return(list(alpha=alpha.hat,beta=beta.hat,sigma=sigma.hat,mu=mu.hat-beta.hat*sigma.hat*tan(pi*alpha.hat/2)))
}
}
ufitstab.ustat<-function(x)
{
n<-length(x)
s1<-s2<-0
for (i in 1:(n-1))
{
for (j in (i+1):n)
{
s1<-s1+(log(abs(x[i]+x[j]))-(log(abs(x[i]))+log(abs(x[j])))/2)/log(2)
s2<-s2+(1+0.57721566/log(2))*(log(abs(x[i]))+log(abs(x[j])))/2-0.57721566/log(2)*log(abs(x[i]+x[j]))+0.57721566
}
}
return(list(alpha=n*(n-1)/(2*s1),sigma=exp(2*s2/(n*(n-1)))))
}
|
library(plotly)
library(scales)
library(dplyr)
library(purrr)
library(ggplot2)
data_f <- function(view_id,
date_range = c(Sys.Date() - 365, Sys.Date() - 1),
page_filter_regex = ".*", ...) {
page_filter_object <- dim_filter("pagePath",
operator = "REGEXP",
expressions = page_filter_regex)
page_filter <- filter_clause_ga4(list(page_filter_object), operator = "AND")
google_analytics(
viewId = view_id,
date_range = date_range,
metrics = "uniquePageviews",
dimensions = c("date", "pagePath"),
dim_filters = page_filter,
max = 10000,
order = order_type("uniquePageviews", "DESCENDING"),
anti_sample = FALSE)
}
model_f <- function(
ga_data,
first_day_pageviews_min = 1,
total_unique_pageviews_cutoff = 100,
days_live_range = 365, ...) {
normalize_date_start <- function(page) {
ga_data_single_page <- ga_data %>%
filter(pagePath == page)
first_live_row <- min(which(ga_data_single_page$uniquePageviews > first_day_pageviews_min))
ga_data_single_page <- ga_data_single_page[first_live_row:nrow(ga_data_single_page), ]
normalized_results <- data.frame(
date = seq.Date(from = min(ga_data_single_page$date),
to = max(ga_data_single_page$date),
by = "day"),
days_live = seq(min(ga_data_single_page$date):max(ga_data_single_page$date)), page = page) %>%
left_join(ga_data_single_page, by = "date") %>%
mutate(uniquePageviews = ifelse(is.na(uniquePageviews),
0,
uniquePageviews)) %>%
mutate(cumulative_uniquePageviews = cumsum(uniquePageviews)) %>%
dplyr::select(page, days_live, uniquePageviews, cumulative_uniquePageviews)
}
pages_list <- ga_data %>%
group_by(pagePath) %>%
summarise(total_traffic = sum(uniquePageviews)) %>%
filter(total_traffic > total_unique_pageviews_cutoff)
ga_data_normalized <- map_dfr(pages_list$pagePath, normalize_date_start)
ga_data_normalized %>% filter(days_live <= days_live_range)
}
output_f <- function(ga_data_normalized, ...) {
gg <- ggplot(ga_data_normalized, mapping = aes(x = days_live, y = cumulative_uniquePageviews,
color = page)) + geom_line() + scale_y_continuous(labels = comma) + labs(title = "Unique Pageviews by Day from Launch",
x = "
theme_light() + theme(panel.grid = element_blank(), panel.border = element_blank(),
legend.position = "none", panel.grid.major.y = element_line(color = "gray80"),
axis.ticks = element_blank())
ggplotly(gg)
}
|
library(testthat)
library(rly)
context("Attempt to define a rule named 'error'")
Parser <- R6::R6Class("Parser",
public = list(
tokens = c('NAME','NUMBER', 'PLUS','MINUS','TIMES','DIVIDE','EQUALS', 'LPAREN','RPAREN'),
precedence = list(c('left','PLUS','MINUS'),
c('left','TIMES','DIVIDE','MINUS'),
c('right','UMINUS')),
names = new.env(hash=TRUE),
p_statement_assign = function(doc='statement : NAME EQUALS expression', p) {
self$names[[as.character(p$get(2))]] <- p$get(4)
},
p_statement_expr = function(doc='statement : expression', p) {
cat(p$get(2))
cat('\n')
},
p_expression_binop = function(doc='expression : expression PLUS expression
| expression MINUS expression
| expression TIMES expression
| expression DIVIDE expression', p) {
if(p$get(3) == 'PLUS') p$set(1, p$get(2) + p$get(4))
else if(p$get(3) == 'MINUS') p$set(1, p$get(2) - p$get(4))
else if(p$get(3) == 'TIMES') p$set(1, p$get(2) * p$get(4))
else if(p$get(3) == 'DIVIDE') p$set(1, p$get(2) / p$get(4))
},
p_expression_uminus = function(doc='expression : MINUS expression %prec UMINUS', p) {
p$set(1, -p$get(3))
},
p_expression_group = function(doc='expression : LPAREN expression RPAREN', p) {
p$set(1, p$get(3))
},
p_expression_number = function(doc='expression : NUMBER', p) {
p$set(1, p$get(2))
},
p_expression_name = function(doc='expression : NAME', p) {
p$set(1, self$names[[as.character(p$get(2))]])
},
p_error_handler = function(doc='error : NAME', p) { },
p_error = function(p) { }
)
)
test_that("error", {
expect_output(expect_error(rly::yacc(Parser), "\\[YaccError\\]Unable to build parser"),
"ERROR .* \\[GrammarError\\]p_error_handler: Illegal rule name error. Already defined as a token")
})
|
knitr::opts_chunk$set(
collapse = TRUE,
comment = "
fig.height = 5,
fig_width = 8
)
library(avocado)
library(dplyr)
library(ggplot2)
data('hass_region')
dplyr::glimpse(hass_region)
hass_region %>%
mutate(
year = lubridate::year(week_ending)
) %>%
filter(year == 2019) %>%
group_by(year, region) %>%
summarize(
total_revenue = sum((avg_price_nonorg)*(plu4046 + plu4225 + plu4770 + small_nonorg_bag + large_nonorg_bag + xlarge_nonorg_bag) + (avg_price_org)*(plu94046 + plu94225 + plu94770 + small_org_bag + large_org_bag + xlarge_org_bag)),
.groups = 'drop'
) %>%
slice(which.max(total_revenue))
hass_region %>%
mutate(
year = lubridate::year(week_ending)
) %>%
filter(year == 2019) %>%
group_by(week_ending, region) %>%
summarize(
total_revenue = sum((avg_price_nonorg)*(plu4046 + plu4225 + plu4770 + small_nonorg_bag + large_nonorg_bag + xlarge_nonorg_bag) + (avg_price_org)*(plu94046 + plu94225 + plu94770 + small_org_bag + large_org_bag + xlarge_org_bag)),
.groups = 'drop'
) %>%
ggplot(aes(x = week_ending)) +
geom_line(aes(y = total_revenue, color = region)) +
labs(x = 'Month/Year', y = 'Revenue (US$)', title = 'Total Revenue for 2019 Across Regions', caption = 'Source: Hass Avocado Board\nNot adjusted for inflation') +
scale_color_manual(name = 'Region', values = c('California' = 'orange', 'Great Lakes' = 'blue', 'Midsouth' = 'yellow', 'Northeast' = 'steelblue', 'Plains' = 'darkgreen', 'South Central' = 'red', 'Southeast' = 'magenta', 'West' = 'darkgray')) +
theme(plot.background = element_rect(fill = "grey20"),
plot.title = element_text(color = "
axis.title = element_text(color = "
axis.text.x = element_text(color = 'grey50', angle = 45, hjust = 1),
axis.text.y = element_text(color = 'grey50'),
plot.caption = element_text(color = 'grey75'),
panel.background = element_blank(),
panel.grid.major = element_line(color = "grey50", size = 0.2),
panel.grid.minor = element_line(color = "grey50", size = 0.2),
legend.background = element_rect(fill = 'grey20'),
legend.key = element_rect(fill = 'grey20'),
legend.title = element_text(color = 'grey75'),
legend.text = element_text(color = 'grey75')
)
hass_region %>%
group_by(region) %>%
summarize(
total_revenue = sum((avg_price_nonorg)*(plu4046 + plu4225 + plu4770 + small_nonorg_bag + large_nonorg_bag + xlarge_nonorg_bag) + (avg_price_org)*(plu94046 + plu94225 + plu94770 + small_org_bag + large_org_bag + xlarge_org_bag)),
.groups = 'drop'
) %>%
arrange(desc(region)) %>%
mutate(
prop = round(total_revenue / sum(total_revenue) * 100),
ypos = cumsum(prop) - (0.5*prop),
txt = paste0(region, ': ', prop,'%')
) %>%
ggplot(aes(x = "", y = prop, fill = region)) +
geom_bar(stat = 'identity', width = 1, color = 'white') +
coord_polar(theta = 'y') +
theme_void() +
ggrepel::geom_label_repel(aes(y = ypos, label = txt), show.legend = FALSE, color = 'black', size = 3, nudge_x = 1) +
labs(title = 'Revenue Proportion by Region', caption = 'Source: Hass Avocado Board') +
theme(
plot.title = element_text(color = "
plot.caption = element_text(color = 'grey75'),
panel.background = element_blank(),
legend.position = 'none'
)
|
context("parse_text")
describe("parse expressions like parse(text = ...)", {
it("can parse sinlge expression", {
expr <- "1+1"
res <- as.character(parse(text = expr))
expect_identical(as.character(parse_text(expr)), res)
})
it("can parse multiple expressions", {
expr <- "1+1; ls()"
res <- as.character(parse(text = expr))
expr <- c("1+1", "ls()")
res <- as.character(parse(text = expr))
})
it("produces NA on incomplete expression and try-error on wrong expression", {
expr <- "1 +"
expect_true(is.na(parse_text(expr)))
expr <- "1+)"
expect_is(parse_text(expr), "try-error")
})
it("captures the error message for a wrong expression", {
get_error_msg <- function(text) {
res <- try(parse(text = text), silent = TRUE)
if (inherits(res, "try-error")) {
res <- sub("^.*<text>:", "", as.character(res))
res <- sub("\n$", "", res)
return(res)
} else return("")
}
expr <- "1+)"
})
})
|
create.tags <-
function(mat){
n <- NROW(mat)
k <- NCOL(mat)
cnms <- colnames(mat)
resind <- matrix(0, nrow=k*(k-1)/2, ncol=2)
ind1 <- 1:n
j0 <- 1
for(i in 1:(k-1)){
ind2 <- ind1 + n
for(j in (i+1):k){
j0 <- j0+1
resind[j0/2, 1] <- i
resind[j0/2, 2] <- j
j0 <- j0 + 1
ind2 <- ind2 + n
}
ind1 <- ind1 + n
}
col.means <- colMeans(mat)
col.norms <- sqrt(colSums(mat*mat))
list(tags=resind,col.means=col.means, col.norms=col.norms)
}
|
knitr::opts_chunk$set(
collapse = TRUE,
comment = "
error = TRUE
)
library(erify)
emphasize <- function(what, n) {
for (i in 1:n) {
cat(what, "\n")
}
}
emphasize("You're beautiful!", 3)
emphasize(c, 3)
emphasize <- function(what, n) {
check_type(what, "character")
for (i in 1:n) {
cat(what, "\n")
}
}
emphasize(c, 3)
emphasize <- function(what, n) {
check_type(what, "character")
check_length(what, 1)
for (i in 1:n) {
cat(what, "\n")
}
}
emphasize(c("apple", "orange"), 3)
emphasize <- function(what, n) {
check_type(what, "character")
check_length(what, c(0, NA))
for (i in 1:n) {
cat(what, "\n")
}
}
emphasize(character(0), 3)
emphasize("You're beautiful again!", 3)
arg <- "I'm invalid."
check_content(arg, c("yes", "no"))
check_content(arg, c("yes", "no"), general = "You are wrong.")
check_content(arg, c("yes", "no"), specific = "You are wrong.")
supplement <- c(x = "You're wrong.", i = "But you're beautiful.")
check_content(arg, c("yes", "no"), supplement = supplement)
general <- "You're beautiful."
specifics <- c(
i = "Your eyes are big.",
i = "Your hair is long.",
x = "But you broke my heart."
)
throw(general, specifics)
throw(general, specifics, as = "message")
check_positive <- function(x) {
check_type(x, c("integer", "double"))
check_length(x, 1)
if (is.na(x) || x <= 0) {
general <- "`x` must be a positive number."
specifics <- "`x` is `{x}`."
throw(general, specifics, env = list(x = x))
}
}
check_positive(-2)
x <- c("Pink Floyd", "Pink Freud", "Pink Florida")
join(x, "and")
cat(back_quote(x))
back_quote(c(1, 2, NA))
arg <- "Pink Florence"
check_content(arg, x)
is_rmd()
where()
is_rstudio()
is_jupyter()
|
context("asR test")
test_that("test of the asR functions", {
skip_on_cran()
julia <- julia_setup(installJulia = TRUE)
expect_equal(as.double(julia_eval("1//2")), 0.5)
expect_equal(as.double(julia_eval("[1//2, 3//4]")), c(0.5, 0.75))
})
|
context("Testing bootstrap functions")
test_that("auc boot functions", {
set.seed(123)
n <- 100
p <- 1
X <- data.frame(matrix(rnorm(n*p), nrow = n, ncol = p))
Y <- rbinom(n, 1, plogis(0.2 * X[,1]))
boot1 <- boot_auc(Y = Y, X = X, B = 10)
boot2 <- boot_auc(Y = Y, X = X, B = 10, correct632 = TRUE)
lpo <- lpo_auc(Y = Y, X = X, max_pairs = 10)
expect_true(is.numeric(boot1$auc))
expect_true(is.numeric(boot2$auc))
expect_true(is.numeric(lpo$auc))
expect_true(boot1$auc >= 0 & boot1$auc <= 1)
expect_true(boot2$auc >= 0 & boot2$auc <= 1)
expect_true(lpo$auc >= 0 & lpo$auc <= 1)
})
test_that("scrnp boot functions", {
set.seed(123)
n <- 100
p <- 1
X <- data.frame(matrix(rnorm(n*p), nrow = n, ncol = p))
Y <- rbinom(n, 1, plogis(0.2 * X[,1]))
boot1 <- boot_scrnp(Y = Y, X = X, B = 10)
boot2 <- boot_scrnp(Y = Y, X = X, B = 10, correct632 = TRUE)
expect_true(is.numeric(boot1$scrnp))
expect_true(is.numeric(boot2$scrnp))
})
|
family.hdlm <-
function(object, ...) { gaussian() }
|
weaver <- function()
{
list(setup = weaverLatexSetup,
runcode = weaverRuncode,
writedoc = RweaveLatexWritedoc,
finish = weaverLatexFinish,
checkopts = RweaveLatexOptions)
}
weaverLatexSetup <-
function(file, syntax,
output=NULL, quiet=FALSE, debug=FALSE, echo=TRUE,
eval=TRUE, keep.source=FALSE, split=FALSE, stylepath=TRUE,
pdf=TRUE, eps=TRUE, use.cache=TRUE)
{
if (!quiet)
cat("Working dir:", getwd(), "\n")
log_debug(paste("Working dir:", getwd()))
res <- RweaveLatexSetup(file, syntax, output=output, quiet=quiet,
debug=debug, echo=echo, eval=eval,
keep.source=keep.source,
split=split, stylepath=stylepath, pdf=pdf,
eps=eps)
res$options[["use.cache"]] <- use.cache
res$options[["cache"]] <- FALSE
res$options <- RweaveLatexOptions(res$options)
res
}
resetStorage <- function(fun) {
storage <- environment(fun)
storage[["hashDeps"]] <- new.env(parent=emptyenv())
storage[["sym2hash"]] <- new.env(parent=emptyenv())
}
weaverRemoveOrphans <- function(object, options) {
if(!options$use.cache || !options$cache)
return(NULL)
chunk <- options$label
cachedir <- file.path(get_cache_dir(CACHE_DIR),
get_chunk_id(chunk, options$chunknr))
curhashes <- sort(ls(environment(cache_expr)$hashDeps))
expPat1 <- paste(".*\\", CACHE_EXT, "$", sep="")
expPat2 <- paste("\\", CACHE_EXT, sep="")
hashfiles <- list.files(cachedir, pattern=expPat1)
hashfiles <- sort(sub(expPat2, "", hashfiles))
orphans <- hashfiles[!hashfiles %in% curhashes]
if (length(orphans)) {
if (!object$quiet)
cat(" Removing orphaned cache files:\n")
for (orph in orphans) {
if (!object$quiet)
cat(paste(" ", orph, ".RData", sep=""), "\n")
orph <- paste(cachedir, "/", orph, CACHE_EXT, sep="")
tryCatch(file.remove(orph), error=function(e) NULL)
}
}
}
weaverLatexFinish <- function(object, error=FALSE) {
resetStorage(cache_expr)
RweaveLatexFinish(object, error)
}
weaverRuncode <- makeRweaveLatexCodeRunner(evalFunc=weaverEvalWithOpt)
weaverEvalWithOpt <- function (expr, options, quiet=FALSE){
if(options$eval){
label <- options$label
chunkNum <- options$chunknr
if(options$use.cache && options$cache) {
expr <- substitute(cache_expr(e, chunk.name=n, chunk.num=i,
quiet=q),
list(e=expr, n=label, i=chunkNum, q=quiet))
}
res <- try(withVisible(eval(expr, .GlobalEnv)), silent=TRUE)
if(inherits(res, "try-error")) return(res)
if(options$print | (options$term & res$visible))
print(res$value)
}
return(res)
}
|
is.dendro <- function(dm.data)
{
nm <- deparse(substitute(dm.data))
if(!is.data.frame(dm.data)) {
warning(paste("'", nm, "' is not a data frame", sep = ""))
return(FALSE)
}
tst.timestamp.data <- try(is(as.POSIXct(rownames(dm.data)),"POSIXct"), silent = TRUE)
if(tst.timestamp.data != TRUE){
warning(paste("rownames of '", nm, "' is not a timestamp or contains errors", sep = ""))
return(FALSE)
}
if(length(rownames(dm.data)) != length(unique(rownames(dm.data)))) {
warning(paste("the date-time stamp of '", nm, "' contains non-unique values", sep = ""))
return(FALSE)
}
row.nm <- row.names(dm.data)
row.diff <- diff(as.POSIXct(row.nm, tz = "GMT"))
if(units(row.diff) != "hours") {
units(row.diff) <- "hours"
}
resolution <- unique(row.diff)
if(length(resolution) != 1) {
warning(paste("the temporal resolution of '", nm, "' is not constant", sep = ""))
return(FALSE)
}
tst.data.numeric <- sapply(dm.data, is.numeric)
if(FALSE %in% tst.data.numeric) {
warning(paste("the columns of '", nm, "' should contain numeric dendrometer data only", sep = ""))
return(FALSE)
}
return(TRUE)
}
|
filter.conflictive.snps <- function(sum.stat, allele.info, options){
msg <- paste("Removing SNPs with conflictive allele information:", date())
if(options$print) message(msg)
sum.info <- sum.stat$stat
nstudy <- length(sum.info)
foo <- function(x){
paste(sort(x), collapse = '')
}
ref.allele <- apply(allele.info[, c('RefAllele', 'EffectAllele')], 1, foo)
names(ref.allele) <- allele.info$SNP
exc.snps <- NULL
for(k in 1:nstudy){
ord.allele <- apply(sum.info[[k]][, c('RefAllele', 'EffectAllele')], 1, foo)
rs <- names(ord.allele)
id <- which(ord.allele != ref.allele[rs])
if(length(id) > 0){
exc.snps <- c(exc.snps, rs[id])
}
}
exc.snps <- unique(exc.snps)
exc.snps
}
|
context("Bootstrapping")
library(finalfit)
test_that("ff_newdata gives dataframe", {
expect_is(ff_newdata(colon_s, explanatory = c("age.factor", "extent.factor"), newdata = list(
c("<40 years", "Submucosa"),
c("<40 years", "Submucosa"))) -> newdata,
"data.frame")
})
test_that("ff_newdata gives dataframe", {
expect_is(ff_newdata(colon_s, explanatory = c("nodes", "extent.factor", "perfor.factor"), rowwise = FALSE,
newdata = list(
rep(seq(0, 30), 4),
c(rep("Muscle", 62), rep("Adjacent structures", 62)),
c(rep("No", 31), rep("Yes", 31), rep("No", 31), rep("Yes", 31))
)) -> newdata,
"data.frame")
})
test_that("ff_newdata gives dataframe", {
expect_is(colon_s %>%
glmmulti("mort_5yr", c("age.factor", "extent.factor")) %>%
boot_predict(newdata = ff_newdata(colon_s, explanatory = c("age.factor", "extent.factor"),
newdata = list(
c("<40 years", "Submucosa"),
c("<40 years", "Submucosa"))),
estimate_name = "Predicted probability of death",
compare_name = "Absolute risk difference", R=40, digits = c(2,3)),
"data.frame")
})
test_that("ff_newdata gives dataframe", {
expect_is(colon_s %>%
glmmulti("mort_5yr", c("age.factor", "extent.factor")) %>%
boot_predict(newdata = ff_newdata(colon_s, explanatory = c("age.factor", "extent.factor"),
newdata = list(
c("<40 years", "Submucosa"),
c("<40 years", "Submucosa"))),
condense = FALSE,
comparison = "ratio", R=40, digits = c(2,3)),
"data.frame")
})
|
.CVXR.options <- list(idCounter = 0L, np = NULL, sp = NULL, mosekglue = NULL)
setIdCounter <- function(value = 0L) {
.CVXR.options$idCounter <- value
assignInMyNamespace(".CVXR.options", .CVXR.options)
.CVXR.options
}
resetOptions <- function() {
assignInMyNamespace(".CVXR.options", list(idCounter = 0L, np = NULL, sp = NULL, mosekglue = NULL))
.CVXR.options
}
get_id <- function() {
id <- .CVXR.options$idCounter <- .CVXR.options$idCounter + 1L
assignInMyNamespace(".CVXR.options", .CVXR.options)
id
}
get_sp <- function() {
sp <- .CVXR.options$sp
if (is.null(sp)) {
stop("Scipy not available")
}
sp
}
get_np <- function() {
np <- .CVXR.options$np
if (is.null(np)) {
stop("Numpy not available")
}
np
}
flatten_list <- function(x) {
y <- list()
rapply(x, function(x) y <<- c(y,x))
y
}
setClass("Rdict", representation(keys = "list", values = "list"), prototype(keys = list(), values = list()),
validity = function(object) {
if(length(object@keys) != length(object@values))
return("Number of keys must match number of values")
if(!all(unique(object@keys) != object@keys))
return("Keys must be unique")
return(TRUE)
})
Rdict <- function(keys = list(), values = list()) {
new("Rdict", keys = keys, values = values)
}
setMethod("$", signature(x = "Rdict"), function(x, name) {
if(name == "items") {
items <- rep(list(list()), length(x))
for(i in 1:length(x)) {
tmp <- list(key = x@keys[[i]], value = x@values[[i]])
items[[i]] <- tmp
}
return(items)
} else
slot(x, name)
})
setMethod("length", signature(x = "Rdict"), function(x) { length(x@keys) })
setMethod("is.element", signature(el = "ANY", set = "Rdict"), function(el, set) {
for(k in set@keys) {
if(identical(k, el))
return(TRUE)
}
return(FALSE)
})
setMethod("[", signature(x = "Rdict"), function(x, i, j, ..., drop = TRUE) {
for(k in 1:length(x@keys)) {
if(length(x@keys[[k]]) == length(i) && all(x@keys[[k]] == i))
return(x@values[[k]])
}
stop("key ", i, " was not found")
})
setMethod("[<-", signature(x = "Rdict"), function(x, i, j, ..., value) {
if(is.element(i, x))
x@values[[i]] <- value
else {
x@keys <- c(x@keys, list(i))
x@values <- c(x@values, list(value))
}
return(x)
})
setClass("Rdictdefault", representation(default = "function"), contains = "Rdict")
Rdictdefault <- function(keys = list(), values = list(), default) {
new("Rdictdefault", keys = keys, values = values, default = default)
}
setMethod("[", signature(x = "Rdictdefault"), function(x, i, j, ..., drop = TRUE) {
if(length(x@keys) > 0) {
for(k in 1:length(x@keys)) {
if(length(x@keys[[k]]) == length(i) && all(x@keys[[k]] == i))
return(x@values[[k]])
}
}
stop("Unimplemented: For now, user must manually create key and set its value to default(key)")
x@keys <- c(x@keys, list(i))
x@values <- c(x@values, list(x@default(i)))
return(x@values[[length(x@values)]])
})
|
expected <- eval(parse(text="FALSE"));
test(id=0, code={
argv <- eval(parse(text="list(list(structure(3.14159265358979, class = structure(\"3.14159265358979\", class = \"testit\"))), \"try-error\", FALSE)"));
.Internal(inherits(argv[[1]], argv[[2]], argv[[3]]));
}, o=expected);
|
"Certolizumabdat"
|
if(FALSE)
{
library(fitdistrplus)
n <- 100
set.seed(12345)
x <- rbeta(n, 3, 3/4)
psi <- function(x) digamma(x)
grbetalnl <- function(x, a, b)
c(log(x)-psi(a)+psi(a+b), log(1-x)-psi(b)+psi(a+b))
grlnL <- function(par, obs, ...)
-rowSums(sapply(obs, function(x) grbetalnl(x, a=par[1], b=par[2])))
constrOptim2 <- function(par, fn, gr=NULL, ui, ci, ...)
constrOptim(theta=unlist(par), f=fn, grad=gr, ui=ui, ci=ci, ...)
ctr <- list(trace=3, REPORT=1, maxit=1000)
ctr <- list(trace=0, REPORT=1, maxit=1000)
bfgs_gr$time <- system.time(bfgs_gr <- mledist(x, dist="beta", optim.method="BFGS", gr=grlnL, control=ctr))[3]
bfgs <- mledist(x, dist="beta", optim.method="BFGS", control=ctr)
lbfgs_gr <- mledist(x, dist="beta", optim.method="L-BFGS-B", gr=grlnL, control=ctr, lower=c(0,0))
lbfgs <- mledist(x, dist="beta", optim.method="L-BFGS-B", control=ctr, lower=c(0,0))
cg_gr <- mledist(x, dist="beta", optim.method="CG", gr=grlnL, control=ctr)
cg <- mledist(x, dist="beta", optim.method="CG", control=ctr)
nm_gr <- mledist(x, dist="beta", optim.method="Nelder", gr=grlnL, control=ctr)
nm <- mledist(x, dist="beta", optim.method="Nelder", control=ctr)
constr_nm_gr <- mledist(x, dist="beta", custom.optim=constrOptim2,
ui = diag(2), ci = c(0, 0), optim.method="Nelder", gr=grlnL, control=ctr)
constr_nm <- mledist(x, dist="beta", custom.optim=constrOptim2,
ui = diag(2), ci = c(0, 0), optim.method="Nelder", control=ctr)
constr_bfgs_gr <- mledist(x, dist="beta", custom.optim=constrOptim2,
ui = diag(2), ci = c(0, 0), optim.method="BFGS", gr=grlnL, control=ctr)
constr_bfgs <- mledist(x, dist="beta", custom.optim=constrOptim2,
ui = diag(2), ci = c(0, 0), optim.method="BFGS", control=ctr)
constr_cg_gr <- mledist(x, dist="beta", custom.optim=constrOptim2,
ui = diag(2), ci = c(0, 0), optim.method="CG", gr=grlnL, control=ctr)
constr_cg <- mledist(x, dist="beta", custom.optim=constrOptim2,
ui = diag(2), ci = c(0, 0), optim.method="CG", control=ctr)
lnL <- function(par, fix.arg, obs, ddistnam)
{
fitdistrplus:::loglikelihood(par, fix.arg, obs, ddistnam, weights = rep(1, NROW(obs)))
}
constrOptim2(c(shape1=1, shape2=1), lnL, obs=x, fix.arg=NULL, ddistnam="dbeta",
ui = diag(2), ci = c(0, 0))
dbeta3 <- function(x, shape1, shape2)
dbeta(x, shape1, shape2)
dbeta2 <- function(x, shape1, shape2, log)
dbeta(x, exp(shape1), exp(shape2), log=log)
pbeta2 <- function(q, shape1, shape2, log.p)
pbeta(q, exp(shape1), exp(shape2), log.p=log.p)
bfgs2 <- mledist(x, dist="beta2", optim.method="BFGS", control=ctr,
start=list(shape1=0, shape2=0))
bfgs3 <- mledist(x, dist="beta3", optim.method="BFGS", control=ctr,
start=list(shape1=1, shape2=1))
getval <- function(x)
c(x$estimate, loglik=x$loglik, x$counts)
getval2 <- function(x)
c(exp(x$estimate), loglik=x$loglik, x$counts)
cbind(trueval=c(3, 3/4, lnL(c(3, 3/4), NULL, x, "dbeta"), NA, NA),
NM=getval(nm), NMgrad=getval(nm_gr),
constr_NM=getval(constr_nm), constr_NMgrad=getval(constr_nm_gr),
CG=getval(cg), CGgrad=getval(cg_gr),
constr_CG=getval(constr_cg), constr_CGgrad=getval(constr_cg_gr),
BFGS=getval(bfgs), BFGSgrad=getval(bfgs_gr),
constr_BFGS=getval(constr_bfgs), constr_BFGSgrad=getval(constr_bfgs_gr),
BFGS_exp=getval2(bfgs2), BFGS_nolog=getval(bfgs3))
llsurface(min.arg=c(0.1, 0.1), max.arg=c(7, 3), plot.arg=c("shape1", "shape2"),
lseq=50, data=x, distr="beta")
points(bfgs$estimate[1], bfgs$estimate[2], pch="+", col="red")
points(3, 3/4, pch="x", col="green")
}
|
"Marijuana"
|
ml_g <- function(formula, data) {
mf <- model.frame(formula, data)
y <- model.response(mf, "numeric")
X <- model.matrix(formula, data = data)
if (any(is.na(cbind(y, X)))) stop("Some data are missing.")
jll.normal <- function(params, X, y) {
p <- length(params)
beta <- params[-p]
sigma <- exp(params[p])
linpred <- X %*% beta
sum(dnorm(y, mean = linpred, sd = sigma, log = TRUE))
}
ls.reg <- lm(y ~ X - 1)
beta.hat.ls <- coef(ls.reg)
sigma.hat.ls <- sd(residuals(ls.reg))
start <- c(beta.hat.ls, sigma.hat.ls)
fit <- optim(start,
jll.normal,
X = X,
y = y,
control = list(
fnscale = -1,
maxit = 10000),
hessian = TRUE
)
if (fit$convergence > 0) {
print(fit)
stop("optim failed to converge!")
}
beta.hat <- fit$par
se.beta.hat <- sqrt(diag(solve(-fit$hessian)))
results <- list(fit = fit,
X = X,
y = y,
call = match.call(),
beta.hat = beta.hat,
se.beta.hat = se.beta.hat,
sigma.hat = exp(beta.hat[length(beta.hat)]))
class(results) <- c("ml_g_fit","lm")
return(results)
}
|
create_arm <- function(size,
accr_time,
accr_dist = "pieceuni",
accr_interval = c(0, accr_time),
accr_param = NA,
surv_cure = 0,
surv_interval = c(0, Inf),
surv_shape=1,
surv_scale,
loss_shape=1,
loss_scale,
follow_time = Inf,
total_time = Inf) {
if (! accr_dist %in% c("pieceuni", "truncexp")) {
stop("Please specify a valid accrual distribution.", call.=F)
}
accr_interval = sort(unique(c(0, accr_interval, accr_time)))
if (min(accr_interval) < 0 | max(accr_interval) > accr_time) {
stop("accr_interval is out of range.", call.=F)
}
if (accr_dist == "pieceuni") {
if (length(accr_param) != length(accr_interval) - 1) {
stop("Number of accrual intervals (accr_interval) does not match number of
accrual parameters (accr_param).", call.=F)
}
if (length(accr_interval) > 2 & sum(accr_param) != 1) {
stop("accr_param must sum to 1.", call.=F)
}
} else if (is.na(accr_param) | length(accr_param) > 1) {
stop("Truncated exponential is a one-parameter family distribution.", call.=F)
}
surv_interval = sort(unique(c(0, surv_interval, Inf)))
if (min(surv_interval) < 0) {
stop("surv_interval is out of range.", call.=F)
}
if (surv_shape != 1 & length(surv_scale) > 1) {
surv_shape = 1
warning("Piecewise Weibull is not supported. surv_shape defaulted to 1.", call.=F)
}
if (length(surv_scale) != length(surv_interval) - 1) {
stop("Number of survival intervals (surv_interval) does not match number of
piecewise hazards (surv_scale).", call.=F)
}
if (length(loss_shape) > 1 | length(loss_scale) > 1) {
loss_shape = loss_shape[1]
loss_scale = loss_scale[1]
warning("Only Weibull loss to follow-up is supported. First number in loss_shape
and loss_scale are considered. The rest are ignored.", call.=F)
}
if (is.infinite(follow_time) & is.infinite(total_time)) {
total_time = 1e6
follow_time = total_time - accr_time
warning("Neither follow_time nor total_time were defined. Therefore, total_time is
defaulted to max value.", call.=F)
} else if (!is.infinite(follow_time) & !is.infinite(total_time) & accr_time+follow_time != total_time) {
total_time = accr_time + follow_time
warning("follow_time and total_time were inconsistently defined.
total_time will be ignored.", call.=F)
} else if (is.infinite(follow_time)) {
follow_time = total_time - accr_time
} else {
total_time = accr_time + follow_time
}
arm <- list(size = size,
accr_time = accr_time,
accr_dist = accr_dist,
accr_interval = accr_interval,
accr_param = accr_param,
surv_cure = surv_cure,
surv_interval = surv_interval,
surv_shape = surv_shape,
surv_scale = surv_scale,
loss_shape = loss_shape,
loss_scale = loss_scale,
follow_time = follow_time,
total_time = total_time)
if (length(accr_param)==1 &
length(surv_interval)==2 & surv_shape==1 &
loss_shape==1) {
class(arm) <- append(class(arm), "lachin")
}
class(arm) <- append(class(arm), "arm")
return(arm)
}
create_arm_lachin <- function(size,
accr_time,
accr_dist = "pieceuni",
accr_param = NA,
surv_median = NA,
surv_exphazard = NA,
surv_milestone = NA,
loss_median = NA,
loss_exphazard = NA,
loss_milestone = NA,
follow_time = Inf,
total_time = Inf) {
if (accr_dist == "pieceuni" & !is.na(accr_param)) {
accr_param = NA
warning("accr_param is ignored.", call.=F)
}
if (sum(!is.na(c(surv_median, surv_exphazard, surv_milestone[1]))) > 1) {
stop("Please specify just one of surv_median, surv_exphazard, or surv_milestone.", call.=F)
} else if (!is.na(surv_median)) {
surv_scale = per2haz(surv_median)
} else if (!is.na(surv_exphazard)) {
surv_scale = surv_exphazard
} else {
surv_scale = per2haz(x=surv_milestone[1], per=1-surv_milestone[2])
}
if (sum(!is.na(c(loss_median, loss_exphazard, loss_milestone[1]))) > 1) {
stop("Please specify just one of loss_median, loss_exphazard, or loss_milestone.", call.=F)
} else if (!is.na(loss_median)) {
loss_scale = per2haz(loss_median)
} else if (!is.na(loss_exphazard)) {
loss_scale = loss_exphazard
} else {
loss_scale = per2haz(x=loss_milestone[1], per=1-loss_milestone[2])
}
arm <- create_arm(size = size,
accr_time = accr_time,
accr_dist = accr_dist,
accr_param = accr_param,
surv_scale = surv_scale,
loss_scale = loss_scale,
follow_time = follow_time,
total_time = total_time)
return(arm)
}
per2haz <- function(x, per=0.5) {
-log(1-per)/x
}
|
NULL
setClass("DiffSummary",
slots=c(
max.lines="integer", width="integer", etc="Settings",
diffs="matrix", all.eq="character",
scale.threshold="numeric"
),
validity=function(object) {
if(
!is.integer(object@diffs) &&
!identical(rownames(object@diffs), c("match", "delete", "add"))
)
return("Invalid diffs object")
TRUE
}
)
setMethod("summary", "Diff",
function(
object, scale.threshold=0.1, max.lines=50L, width=getOption("width"), ...
) {
if(!is.int.1L(max.lines) || max.lines < 1L)
stop("Argument `max.lines` must be integer(1L) and strictly positive")
max.lines <- as.integer(max.lines)
if(!is.int.1L(width) || width < 0L)
stop("Argument `width` must be integer(1L) and positive")
if(width < 10L) width <- 10L
if(
!is.numeric(scale.threshold) || length(scale.threshold) != 1L ||
is.na(scale.threshold) || !scale.threshold %bw% c(0, 1)
)
stop("Argument `scale.threshold` must be numeric(1L) between 0 and 1")
diffs.c <- count_diffs_detail(object@diffs)
match.seq <- rle(!!diffs.c["match", ])
match.keep <- unlist(
lapply(
match.seq$lengths,
function(x) if(x == 2L) c(TRUE, FALSE) else TRUE
) )
diffs <- diffs.c[, match.keep, drop=FALSE]
all.eq <- all.equal(object@target, object@current)
new(
"DiffSummary", max.lines=max.lines, width=width, etc=object@etc,
diffs=diffs, all.eq=if(isTRUE(all.eq)) character(0L) else all.eq,
scale.threshold=scale.threshold
)
}
)
setMethod("finalizeHtml", c("DiffSummary"),
function(x, x.chr, ...) {
js <- ""
callNextMethod(x, x.chr, js=js, ...)
} )
setMethod("as.character", "DiffSummary",
function(x, ...) {
etc <- x@etc
style <- etc@style
hunks <- sum(!x@diffs["match", ])
res <- c(apply(x@diffs, 1L, sum))
scale.threshold <- [email protected]
res <- if(!hunks || !sum(x@diffs[c("delete", "add"), ])) {
style@summary@body(
if(length([email protected])) {
eq.txt <- paste0("- ", [email protected])
paste0(
c(
"No visible differences, but objects are not `all.equal`:",
eq.txt
),
collapse=style@[email protected]
)
} else {
"Objects are `all.equal`"
} )
} else {
pad <- 2L
width <- x@width - pad
head <- paste0(
paste0(
strwrap(
sprintf(
"Found differences in %d hunk%s:", hunks, if(hunks != 1L) "s" else ""
),
width=width
),
collapse=style@[email protected]
),
style@summary@detail(
paste0(
strwrap(
sprintf(
"%d insertion%s, %d deletion%s, %d match%s (lines)",
res[["add"]], if(res[["add"]] == 1L) "" else "s",
res[["delete"]], if(res[["delete"]] == 1L) "" else "s",
res[["match"]], if(res[["match"]] == 1L) "" else "es"
),
width=width
),
collapse=style@[email protected]
)
),
collapse=""
)
max.chars <- [email protected] * width
diffs <- x@diffs
scale.threshold <- [email protected]
scale_err <- function(orig, scaled, threshold, width) {
if((width - sum(scaled)) / width > threshold) {
TRUE
} else {
zeroes <- !orig
orig.nz <- orig[!zeroes]
scaled.nz <- scaled[!zeroes]
orig.norm <- orig.nz / max(orig.nz)
scaled.norm <- scaled.nz / max(scaled.nz)
any(abs(orig.norm - scaled.norm) > threshold)
}
}
diffs.gz <- diffs > 1L
diffs.nz <- diffs[diffs.gz]
safety <- 10000L
tol <- width / 4
diffs.scale <- diffs
lo.bound <- lo <- length(diffs.nz)
hi.bound <- hi <- sum(diffs.nz)
if(sum(diffs.scale) > width) {
repeat {
mp <- ceiling((hi.bound - lo.bound) / 2) + lo.bound
safety <- safety - 1L
if(safety < 0L)
stop("Logic Error: likely infinite loop; contact maintainer.")
diffs.nz.s <- pmax(
round(diffs.nz * (mp - lo) / (hi - lo)), 1L
)
diffs.scale[diffs.gz] <- diffs.nz.s
scale.err <- scale_err(diffs, diffs.scale, scale.threshold, width)
break.cond <- floor(mp / width) <= floor(lo.bound / width) ||
mp >= hi.bound
if(scale.err) {
lo.bound <- mp
} else {
if(break.cond) break
hi.bound <- mp
}
}
}
diffs.fin <- diffs.scale
scale.one <- diffs.scale == 1
scale.gt.one <- diffs.scale > 1
s.o.txt <- if(any(scale.one)) {
s.o.r <- unique(range(diffs[scale.one]))
if(length(s.o.r) == 1L)
sprintf("%d:1 for single chars", s.o.r)
else
sprintf("%d-%d:1 for single chars", s.o.r[1L], s.o.r[2L])
}
s.gt.o.txt <- if(any(scale.gt.one)) {
s.gt.o.r <- unique(
range(round(diffs[scale.gt.one] / diffs.scale[scale.gt.one]))
)
if(length(s.gt.o.r) == 1L)
sprintf("%d:1 for char seqs", s.gt.o.r)
else
sprintf("%d-%d:1 for char seqs", s.gt.o.r[1L], s.gt.o.r[2L])
}
map.txt <- sprintf(
"Diff map (line:char scale is %s%s%s):",
if(!is.null(s.o.txt)) s.o.txt else "",
if(is.null(s.o.txt) && !is.null(s.gt.o.txt)) "" else ", ",
if(!is.null(s.gt.o.txt)) s.gt.o.txt else ""
)
body <- if(style@wrap) strwrap(map.txt, width=x@width) else map.txt
diffs.txt <- character(length(diffs.fin))
attributes(diffs.txt) <- attributes(diffs.fin)
symb <- c(match=".", add="I", delete="D")
use.ansi <- FALSE
for(i in names(symb)) {
test <- diffs.txt[i, ] <- vapply(
diffs.fin[i, ],
function(x) paste0(rep(symb[[i]], x), collapse=""),
character(1L)
)
}
txt <- do.call(paste0, as.list(c(diffs.txt)))
txt <- substr2(txt, 1, max.chars, [email protected])
txt.w <- unlist(
if(style@wrap) wrap(txt, width, [email protected])
else txt
)
if(is(style, "StyleAnsi")) {
old.crayon.opt <- options(crayon.enabled=TRUE)
on.exit(options(old.crayon.opt), add=TRUE)
}
s.f <- style@funs
txt.w <- gsub(
symb[["add"]], [email protected](symb[["add"]]),
gsub(
symb[["delete"]], [email protected](symb[["delete"]]),
txt.w, fixed=TRUE
),
fixed=TRUE
)
extra <- if(sum(diffs.fin) > max.chars) {
diffs.omitted <- diffs.fin
diffs.under <- cumsum(diffs.omitted) <= max.chars
diffs.omitted[diffs.under] <- 0L
res.om <- apply(diffs.omitted, 1L, sum)
sprintf(
paste0(
"omitting %d deletion%s, %d insertion%s, and %d matche%s; ",
"increase `max.lines` to %d to show full map"
),
res.om[["delete"]], if(res.om[["delete"]] != 1L) "s" else "",
res.om[["add"]], if(res.om[["add"]] != 1L) "s" else "",
res.om[["match"]], if(res.om[["match"]] != 1L) "s" else "",
ceiling(sum(diffs.scale) / width)
)
} else character(0L)
map <- txt.w
if(length(extra) && style@wrap) extra <- strwrap(extra, width=width)
c(
style@summary@body(
paste0(
c(head, body),
collapse=style@[email protected]
) ),
style@summary@map(c(map, extra))
)
}
fin <- style@funs@container(style@summary@container(res))
finalize(
fin, x,
length(unlist(gregexpr(style@[email protected], fin, fixed=TRUE))) +
length(fin)
)
}
)
setMethod("show", "DiffSummary",
function(object) {
show_w_pager(as.character(object), object@etc@style@pager)
invisible(NULL)
}
)
|
.Pars <- c(
"xlog", "ylog",
"adj", "ann", "ask", "bg", "bty",
"cex", "cex.axis", "cex.lab", "cex.main", "cex.sub", "cin",
"col", "col.axis", "col.lab", "col.main", "col.sub",
"cra", "crt", "csi","cxy", "din", "err", "family",
"fg", "fig", "fin",
"font", "font.axis", "font.lab", "font.main", "font.sub",
"lab", "las", "lend", "lheight", "ljoin", "lmitre", "lty", "lwd",
"mai", "mar", "mex", "mfcol", "mfg", "mfrow", "mgp", "mkh",
"new", "oma", "omd", "omi", "page", "pch", "pin", "plt", "ps", "pty",
"smo", "srt", "tck", "tcl", "usr",
"xaxp", "xaxs", "xaxt", "xpd",
"yaxp", "yaxs", "yaxt", "ylbias"
)
par <- function (..., no.readonly = FALSE)
{
.Pars.readonly <- c("cin","cra","csi","cxy","din","page")
single <- FALSE
args <- list(...)
if (!length(args))
args <- as.list(if (no.readonly) .Pars[-match(.Pars.readonly, .Pars)]
else .Pars)
else {
if (all(unlist(lapply(args, is.character))))
args <- as.list(unlist(args))
if (length(args) == 1) {
if (is.list(args[[1L]]) | is.null(args[[1L]]))
args <- args[[1L]]
else
if(is.null(names(args)))
single <- TRUE
}
}
value <- .External2(C_par, args)
if(single) value <- value[[1L]]
if(!is.null(names(args))) invisible(value) else value
}
clip <- function(x1, x2, y1, y2)
invisible(.External.graphics(C_clip, x1, x2, y1, y2))
|
sncFun.04110 <- function(p_snc, h){
h <- abs(h)
alf <- p_snc[1];
bet <- p_snc[2];
h0 <- tail(p_snc, 1);
ah <- alf*h
ah0 <- alf*10^h0
gnc <- ifelse(ah >= 1, (bet+log(1/alf))/log(10) - bet/log(10)*ah^(-1/bet) - log10(h) , 0)
gnc0 <- ifelse(ah0 >= 1, (bet+log(1/alf))/log(10) - bet/log(10)*ah0^(-1/bet)- h0, 0)
snc <- 1 - gnc/gnc0
return(list("snc" = snc))
}
|
RSKC <-
function(d,ncl,alpha,L1=12,nstart=200,silent=TRUE,scaling=FALSE,correlation = FALSE){
if (alpha > 1 | alpha < 0) stop("alpha must be between 0 and 1")
if (!is.null(L1)) if (L1<1) stop("L1 value must be greater or equal to 1 or NULL!")
if (is.data.frame(d)) d <- as.matrix(d)
r.ncl <- round(ncl)
if (ncl != r.ncl) ncl <- r.ncl
if (ncl <= 1) stop("ncl must be positive integer > 1! but ncl=",ncl)
if (scaling) d=scale(d)
if (correlation) d = t(scale(t(d)))
if (is.null(L1)) sparse<-FALSE else{ sparse<-TRUE}
n<-nrow(d);Nout<-floor(n*alpha)
f<-ncol(d);g<-f+1
W<-rep(1,f);sumW<-f
if( sum(is.na(d))==0 )
{
miss<-FALSE
if(sparse){
Result<-RSKC.a1.a2.b(d,L1,ncl,nstart,alpha,n,f,g,Nout,silent)
}else{
Result<-RSKC.trimkmeans(d,ncl,trim=alpha,runs=nstart,maxit=10000)
}
}else{
d[is.nan(d) ] <- NA
miss<-TRUE
if (sparse){
Result<-RSKC.a1.a2.b.missing(d,L1,ncl,nstart,alpha,n,f,g,Nout,silent)
}else{
Result<-RSKC.trimkmeans.missing(d=d,ncl=ncl,w=W,trim=alpha,runs=nstart,points=Inf,maxit=10000)
}
}
if(sparse)
{
Result$oW<-sort(Result$oW)
if(Nout==0){
Result$oW<-Result$oE<-"undefined"
}
}else{
Result<-modified.result.nonsparse(Result,ncl,f)
if(Nout==0){
Result<-modified.result.kmean(Result)
}
}
Result$disttom<-Result$ropt<-Result$trim<-Result$scaling<-Result$centers<-
Result$criterion<-Result$classification<-Result$means<-Result$ropt<-Result$k<-Result$runs<-NULL
if (!is.null(colnames(d))) names(Result$weights) <- colnames(d)
Input<-list(N=n,p=f,ncl=ncl,L1=L1,nstart=nstart,alpha=alpha,
scaling=scaling,correlation=correlation,missing=miss)
r2<-c(Input,Result)
class(r2)<-"rskc"
return(r2)
}
modified.result.nonsparse<-function(Result,ncl,f){
Result$centers<-Result$means;
Result$oW<-which(Result$classification==ncl+1)
Result$oE<-"undefined";
Result$weights<-rep(1,f)
return(Result)
}
modified.result.kmean<-function(Result){
Result$oE<-Result$oW<-"undefined"
Result$labels<-Result$classification
return(Result)
}
|
vcov.PLADMM <- function(object, ...) {
if ("vcov" %in% names(object)) return(object$vcov)
alpha <- object$tilde_pi
nrankings <- nrow(object$orderings)
weights <- object$weights
beta <- coef(object)[-1]
ncoef <- length(beta)
H <- matrix(0, ncoef, ncoef, dimnames = list(names(beta), names(beta)))
Xalpha <- object$x[, -1, drop = FALSE] * alpha
for (r in seq(nrankings)){
nitems <- sum(object$orderings[r,] != 0)
nchoices <- nitems - 1
for (i in seq(nchoices)){
items <- object$orderings[r, i:nitems]
a <- sum(alpha[items])
xa <- colSums(Xalpha[items, , drop = FALSE])
H <- H + weights[r] * (
tcrossprod(xa)/a^2 -
crossprod(object$x[items, -1, drop = FALSE],
Xalpha[items, , drop = FALSE])/a)
}
}
solve(-H)
}
|
source("ESEUR_config.r")
library("simex")
pal_col=rainbow(3)
maint_all=read.csv(paste0(ESEUR_dir, "regression/10-1002_maint-task-data.csv.xz"), as.is=TRUE)
maint_all$ins_up=maint_all$INSERT+maint_all$UPDATE
maint_all$lins_up=log(maint_all$ins_up)
maint=subset(maint_all, EFFORT > 0.1)
maint=subset(maint, ins_up > 0.0)
plot(maint$ins_up, maint$EFFORT, log="xy", col=pal_col[2],
xlab="Lines added+updated", ylab="Effort (days)\n")
maint_mod=glm(EFFORT ~ lins_up, data=maint, family=gaussian(link="log"),
x=TRUE, y=TRUE)
x_vals=exp(seq(1e-2, log(max(maint$ins_up)), length.out=20))
pred=predict(maint_mod, newdata=data.frame(lins_up=log(x_vals)), se.fit=TRUE)
lines(x_vals, exp(pred$fit), col=pal_col[1])
y_err=simex(maint_mod, SIMEXvariable="lins_up", measurement.error=maint$lins_up/10, asymptotic=FALSE)
pred=predict(y_err, newdata=data.frame(lins_up=log(x_vals)), se.fit=TRUE)
lines(x_vals, exp(pred$fit), col=pal_col[3])
|
setClass("ResidualFitIndices",
slots = c(sampleMoments = "list",
impliedMoments = "list",
RMR = "ResidualFitIndex",
SRMR = "ResidualFitIndex",
CRMR = "ResidualFitIndex"))
setMethod(f = "initialize", signature = "ResidualFitIndices",
definition = function(.Object) {
.Object@sampleMoments = list(yBar = matrix(NA_real_), S = matrix(NA_real_))
.Object@impliedMoments = list(muHat = matrix(NA_real_), SigmaHat = matrix(NA_real_))
.Object@RMR = new("ResidualFitIndex")
.Object@SRMR = new("ResidualFitIndex")
.Object@CRMR = new("ResidualFitIndex")
return(.Object)
}
)
setMethod(f = "print",
signature = "ResidualFitIndices",
definition = function(x, ...) {
cat("Residual Fit Indices\n",
" RMR: ", round(x@RMR@index$total, 3), "\n",
" SRMR: ", round(x@SRMR@index$total, 3), "\n",
" CRMR: ", round(x@CRMR@index$total, 3), "\n")
})
setMethod(f = "show",
signature = "ResidualFitIndices",
definition = function(object) {
cat("Residual Fit Indices\n",
" RMR: ", round(object@RMR@index$total, 3), "\n",
" SRMR: ", round(object@SRMR@index$total, 3), "\n",
" CRMR: ", round(object@CRMR@index$total, 3), "\n")
})
setGeneric(name = "details", def = function(object, comp = c("Total", "Covariance", "Variance", "Mean", "Total")) {
standardGeneric("details")
}
)
setMethod(f = "details",
signature = "ResidualFitIndices",
function(object, comp = c("Total", "Covariance", "Variance", "Mean", "Total")) {
if ("Total" %in% comp) {
cat("Total\n")
cat(" RMR: ", round(object@RMR@index$total, 3), "\n")
cat(" SRMR: ", round(object@SRMR@index$total, 3), "\n")
cat(" CRMR: ", round(object@CRMR@index$total, 3), "\n")
}
if ("Covariance" %in% comp) {
cat("Covariance (off-diagonal)\n")
cat(" RMR: ", round(object@RMR@index$cov, 3), "\n")
cat(" SRMR: ", round(object@SRMR@index$cov, 3), "\n")
cat(" CRMR: ", round(object@CRMR@index$cov, 3), "\n")
}
if ( !("Variance" %in% comp) |
all(diag(object@sampleMoments$S) == diag(object@impliedMoments$SigmaHat))) {
cat("Variances not included \n\n")
} else if("Variance" %in% comp) {
cat("Variance\n")
cat(" RMR: ", round(object@RMR@index$var, 3), "\n")
cat(" SRMR: ", round(object@SRMR@index$var, 3), "\n")
}
if ("Mean" %in% comp & !is.null(object@sampleMoments$yBar)) {
cat("Mean\n")
cat(" RMR: ", round(object@RMR@index$mean, 3), "\n")
cat(" SRMR: ", round(object@SRMR@index$mean, 3), "\n")
cat(" CRMR: ", round(object@CRMR@index$mean, 3), "\n\n")
} else if ("Mean" %in% comp & is.null(object@sampleMoments$yBar)) {
cat("Means not specified \n\n")
}
})
|
GetSpecialOdds <-
function(sportid,
leagueids = NULL,
since = NULL,
oddsformat = 'AMERICAN',
tableformat = 'clean',
force = TRUE){
CheckTermsAndConditions()
if(missing(sportid)) {
cat('No Sports Selected, choose one:\n')
ViewSports()
sportid <- readline('Selection (id): ')
}
message(
Sys.time(),
'| Pulling Specials Odds for - sportid: ', sportid,
if (!is.null(leagueids)) sprintf(' leagueids: %s', paste(leagueids, collapse = ', ')),
if (!is.null(since)) sprintf(' since: %s', since),
' oddsformat: ', oddsformat,
' tableformat: ', tableformat
)
r <-
sprintf('%s/v1/odds/special', .PinnacleAPI$url) %>%
modify_url(query = list(sportId = sportid,
leagueIds = if(!is.null(leagueids)) paste(leagueids,collapse=',') else NULL,
since = since)) %>%
httr::GET(add_headers(Authorization= authorization(),
"Content-Type" = "application/json")) %>%
content(type="text", encoding = "UTF-8")
if(identical(r, '')) return(data.frame())
r %>%
jsonlite::fromJSON(flatten = TRUE) %>%
as.data.table %>%
{
if(all(sapply(.,is.atomic))) .
expandListColumns(.)
} %>%
{
if(tableformat == 'long') SpreadsAndTotalsLong(.)
else if(tableformat == 'wide') SpreadsAndTotalsWide(.)
else if(tableformat == 'subtables') .
else if(tableformat == 'clean') expandListColumns(.)
else stop("Undefined value for tableFormat, options are 'mainlines','long','wide', and 'subtables'")
} %>%
as.data.frame()
}
|
coef.pot <- function(object, ...){
if (!inherits(object, "pot"))
stop("Use only with 'pot' objects")
return(object$fitted.values)
}
|
plot.TLMoments <- function(x, ...) {
if (!inherits(x, "TLMoments")) stop("First argument has to be of class TLMoments. ")
if (!all(c(3, 4) %in% attr(x, "order"))) stop("Object must contain T2 and T3. ")
UseMethod("plot.TLMoments", x$lambdas)
}
plot.TLMoments.numeric <- function(x, distr = "all", add_center = FALSE, use_internal = TRUE, ...) {
lmrdiagram(x$ratios[3], x$ratios[4],
trim = c(attr(x, "leftrim"), attr(x, "rightrim")),
distr = distr, add_center = add_center, use_internal = use_internal)
}
plot.TLMoments.matrix <- function(x, distr = "all", add_center = TRUE, use_internal = TRUE, ...) {
lmrdiagram(x$ratios[3, ], x$ratios[4, ],
trim = c(attr(x, "leftrim"), attr(x, "rightrim")),
distr = distr, add_center = add_center, use_internal = use_internal)
}
plot.TLMoments.list <- function(x, distr = "all", add_center = TRUE, use_internal = TRUE, ...) {
lmrdiagram(vapply(x$ratios, getElement, "T3", FUN.VALUE = numeric(1)),
vapply(x$ratios, getElement, "T4", FUN.VALUE = numeric(1)),
trim = c(attr(x, "leftrim"), attr(x, "rightrim")),
distr = distr, add_center = add_center, use_internal = use_internal)
}
plot.TLMoments.data.frame <- function(x, distr = "all", add_center = TRUE, use_internal = TRUE, ...) {
lmrdiagram(x$ratios$T3, x$ratios$T4,
trim = c(attr(x, "leftrim"), attr(x, "rightrim")),
distr = distr, add_center = add_center, use_internal = use_internal)
}
lmrdiagram <- function(t3, t4, trim = c(0, 0), distr = c("all"), add_center = TRUE, use_internal = TRUE) {
d_lines <- c("gev", "gpd", "ln3", "pe3", "glo")
d_points <- c("gum", "exp", "norm")
if (length(distr) == 1 && distr == "all")
distr <- c(d_lines, d_points)
if (length(distr) == 1 && distr == "only-lines")
distr <- d_lines
if (length(distr) == 1 && distr == "only-points")
distr <- d_points
if (use_internal) {
tlmr <- tlmomentratios[tlmomentratios$leftrim == trim[1] & tlmomentratios$rightrim == trim[2], ]
}
if (use_internal && nrow(tlmr) == 0) {
warning("No internal data available for this trimming. Using calculated values. ")
use_internal <- FALSE
}
if (!use_internal) {
tlmr <- getTLMomsByDistr(distr, trim)
}
tlmr_points <- tlmr[tlmr$distr %in% intersect(d_points, distr), ]
tlmr_lines <- tlmr[tlmr$distr %in% intersect(d_lines, distr), ]
lab_pref <- ifelse(all(trim == c(0, 0)), "L", paste0("TL(", paste(trim, collapse = ","), ")"))
p <- ggplot2::ggplot(
data.frame(T3 = t3, T4 = t4),
ggplot2::aes_(~T3, ~T4)
) +
ggplot2::labs(x = paste(lab_pref, "skewness"), y = paste(lab_pref, "kurtosis")) +
ggplot2::coord_cartesian(xlim = range(t3), ylim = range(t4)) +
ggplot2::geom_point() +
ggplot2::geom_line(ggplot2::aes_(~T3, ~T4, colour = ~distr, linetype = ~distr), data = tlmr_lines) +
ggplot2::geom_point(ggplot2::aes_(~T3, ~T4, shape = ~distr), data = tlmr_points, size = 3)
if (add_center) {
p + ggplot2::annotate("point", mean(t3), mean(t4), shape = 4)
} else {
p
}
}
|
if (suppressWarnings(require("testthat") && require("ggeffects"))) {
data(mtcars)
mtcars$cyl2 <- factor(mtcars$cyl)
m1 <- lm(mpg ~ hp + factor(cyl) + gear, data = mtcars)
m2 <- lm(mpg ~ hp + cyl2 + gear, data = mtcars)
pr1 <- ggpredict(m1, "gear")
pr2 <- ggpredict(m2, "gear")
test_that("ggpredict, lm", {
expect_equal(pr1$conf.high, c(24.5107, 25.8074, 28.1194), tolerance = 1e-3)
expect_equal(pr2$conf.high, c(24.5107, 25.8074, 28.1194), tolerance = 1e-3)
expect_equal(pr1$conf.high, pr2$conf.high, tolerance = 1e-3)
})
pr1 <- ggpredict(m1, "gear", vcov.fun = "vcovHC")
pr2 <- ggpredict(m2, "gear", vcov.fun = "vcovHC")
test_that("ggpredict, lm", {
expect_equal(pr1$conf.high, c(24.1337, 25.913, 28.5737), tolerance = 1e-3)
expect_equal(pr2$conf.high, c(24.1337, 25.913, 28.5737), tolerance = 1e-3)
expect_equal(pr1$conf.high, pr2$conf.high, tolerance = 1e-3)
})
}
|
setOldClass("gam")
.predictSmooth <- function(dm, X, beta, pseudotime, gene, nPoints, tidy){
nCurves <- length(grep(x = colnames(dm), pattern = "t[1-9]"))
for (jj in seq_len(nCurves)) {
df <- .getPredictRangeDf(dm, jj, nPoints = nPoints)
Xdf <- predictGAM(lpmatrix = X,
df = df,
pseudotime = pseudotime)
if (jj == 1) Xall <- Xdf
if (jj > 1) Xall <- rbind(Xall, Xdf)
}
if (tidy) out <- list()
for (jj in seq_len(nCurves)) {
df <- .getPredictRangeDf(dm, jj, nPoints = nPoints)
Xdf <- predictGAM(lpmatrix = X,
df = df,
pseudotime = pseudotime)
if (jj == 1) Xall <- Xdf
if (jj > 1) Xall <- rbind(Xall, Xdf)
if (tidy) out[[jj]] <- data.frame(lineage = jj, time = df[, paste0("t",jj)])
}
if (tidy) outAll <- do.call(rbind,out)
yhatMat <- matrix(NA, nrow = length(gene), ncol = nCurves * nPoints)
rownames(yhatMat) <- gene
pointNames <- paste(rep(seq_len(nCurves), each = nPoints), seq_len(nPoints),
sep = "_")
colnames(yhatMat) <- paste0("lineage", pointNames)
for (jj in 1:length(gene)) {
yhat <- c(exp(t(Xall %*% t(beta[as.character(gene[jj]), ,
drop = FALSE])) +
df$offset[1]))
yhatMat[jj, ] <- yhat
}
if(!tidy){
return(yhatMat)
} else {
outList <- list()
for(gg in seq_len(length(gene))){
curOut <- outAll
curOut$gene <- gene[gg]
curOut$yhat <- yhatMat[gg,]
outList[[gg]] <- curOut
}
return(do.call(rbind, outList))
}
}
.predictSmooth_conditions <- function(dm, X, beta, pseudotime, gene, nPoints,
conditions, tidy){
nCurves <- length(grep(x = colnames(dm), pattern = "t[1-9]"))
nConditions <- nlevels(conditions)
if (tidy) out <- list()
for (jj in seq_len(nCurves)) {
if (tidy) out_cond <- list()
for(kk in seq_len(nConditions)){
df <- .getPredictRangeDf(dm, lineageId = jj, conditionId = kk,
nPoints = nPoints)
Xdf <- predictGAM(lpmatrix = X,
df = df,
pseudotime = pseudotime,
conditions = conditions)
if(kk == 1) XallCond <- Xdf
if(kk > 1) XallCond <- rbind(XallCond, Xdf)
if (tidy) {
out_cond[[kk]] <- data.frame(lineage = jj, time = df[, paste0("t",jj)],
condition = levels(conditions)[kk])
}
}
if (jj == 1) Xall <- XallCond
if (jj > 1) Xall <- rbind(Xall, XallCond)
if (tidy) out[[jj]] <- do.call(rbind, out_cond)
}
if (tidy) outAll <- do.call(rbind, out)
yhatMat <- matrix(NA, nrow = length(gene), ncol = nCurves * nConditions * nPoints)
rownames(yhatMat) <- gene
pointNames <- expand.grid(1:nCurves, 1:nConditions)
baseNames <- paste0("lineage", pointNames[,1], "_condition",
levels(conditions)[pointNames[,2]])
colnames(yhatMat) <- c(sapply(baseNames, paste0, "_point",1:nPoints))
for (jj in 1:length(gene)) {
yhat <- c(exp(t(Xall %*% t(beta[as.character(gene[jj]), ,
drop = FALSE])) +
df$offset[1]))
yhatMat[jj, ] <- yhat
}
if (!tidy) {
return(yhatMat)
} else {
outList <- list()
for (gg in seq_len(length(gene))){
curOut <- outAll
curOut$gene <- gene[gg]
curOut$yhat <- yhatMat[gg,]
outList[[gg]] <- curOut
}
return(do.call(rbind, outList))
}
}
setMethod(f = "predictSmooth",
signature = c(models = "SingleCellExperiment"),
definition = function(models,
gene,
nPoints = 100,
tidy = TRUE){
if (is(gene, "character")) {
if (!all(gene %in% rownames(models))) {
stop("Not all gene IDs are present in the models object.")
}
id <- match(gene, rownames(models))
} else id <- gene
dm <- colData(models)$tradeSeq$dm
X <- colData(models)$tradeSeq$X
slingshotColData <- colData(models)$crv
pseudotime <- slingshotColData[,grep(x = colnames(slingshotColData),
pattern = "pseudotime")]
if (is.null(dim(pseudotime))) pseudotime <- matrix(pseudotime, ncol = 1)
betaMat <- rowData(models)$tradeSeq$beta[[1]]
beta <- as.matrix(betaMat[id,])
rownames(beta) <- gene
condPresent <- suppressWarnings({
!is.null(SummarizedExperiment::colData(models)$tradeSeq$conditions)
})
if(!condPresent){
yhatMat <- .predictSmooth(dm = dm,
X = X,
beta = beta,
pseudotime = pseudotime,
gene = gene,
nPoints = nPoints,
tidy = tidy)
} else if(condPresent){
conditions <- SummarizedExperiment::colData(models)$tradeSeq$conditions
yhatMat <- .predictSmooth_conditions(dm = dm,
X = X,
beta = beta,
pseudotime = pseudotime,
gene = gene,
nPoints = nPoints,
conditions = conditions,
tidy = tidy)
}
return(yhatMat)
}
)
setMethod(f = "predictSmooth",
signature = c(models = "list"),
definition = function(models,
gene,
nPoints = 100
){
if (is(gene, "character")) {
if (!all(gene %in% names(models))) {
stop("Not all gene IDs are present in the models object.")
}
id <- which(names(models) %in% gene)
} else id <- gene
m <- .getModelReference(models)
dm <- m$model[, -1]
X <- predict(m, type = "lpmatrix")
pseudotime <- dm[, grep(x = colnames(dm), pattern = "t[1-9]")]
if (is.null(dim(pseudotime))) pseudotime <- matrix(pseudotime, ncol = 1)
nCurves <- length(grep(x = colnames(dm), pattern = "t[1-9]"))
for (jj in seq_len(nCurves)) {
df <- .getPredictRangeDf(dm, jj, nPoints = nPoints)
if (jj == 1) dfall <- df
if (jj > 1) dfall <- rbind(dfall, df)
}
pointNames <- expand.grid(1:nPoints, 1:nCurves)[, 2:1]
rownames(dfall) <- paste0("lineage", apply(pointNames, 1, paste,
collapse = "_"))
yhatMat <- t(sapply(models[id], function(m) {
predict(m, newdata = dfall)
}))
rownames(yhatMat) <- gene
return(exp(yhatMat))
}
)
|
get_mapbox_token <- function ()
{
e <- Sys.getenv()
e <- e [grep ("mapbox|mapscan", names (e), ignore.case = TRUE)]
tok <- unique (as.character (e))
if (all (tok == ""))
stop0 ()
else if (length (tok) > 1)
{
e <- e [grep ("mapscan", names (e), ignore.case = TRUE)]
tok <- unique (as.character (e))
if (length (tok) == 0)
stop0 ()
else if (length (tok) > 1)
stop ("Found multiple potential tokens named [",
paste0 (names (e), collapse = ","), "];\nplease specify ",
"only one environnmental variable which includes the ",
"name\n'mapscan', and contains a personal API key for ",
"mapbox services.")
}
return (tok)
}
stop0 <- function ()
{
stop ("Map generation requires a mapbox API key to be set with ",
"Sys.setenv\nor the package's 'set_mapbox_token' function, ",
"using a token name that\nincludes either the strings ",
"'mapbox' or 'mapscanner'. Tokens can be obtained\nfrom ",
"https://docs.mapbox.com/api/overview/",
call. = FALSE)
}
set_mapbox_token <- function (token)
{
chk <- Sys.setenv ("mapscanner" = token)
if (chk)
message ("Token successfully set")
else
warning ("Unable to set token")
}
|
setMethodS3("compileRnw", "default", function(filename, path=NULL, ..., type=NULL, verbose=FALSE) {
pathname <- if (is.null(path)) filename else file.path(path, filename)
if (!isUrl(pathname)) {
pathname <- Arguments$getReadablePathname(pathname)
}
if (!is.null(type)) {
type <- Arguments$getCharacter(type)
}
verbose <- Arguments$getVerbose(verbose)
if (verbose) {
pushState(verbose)
on.exit(popState(verbose))
}
verbose && enter(verbose, "Compiling Rnw document")
if (isUrl(pathname)) {
verbose && enter(verbose, "Downloading URL")
url <- pathname
verbose && cat(verbose, "URL: ", url)
pathname <- downloadFile(url, verbose=less(verbose,50))
verbose && cat(verbose, "Local file: ", pathname)
verbose && exit(verbose)
}
if (is.null(type)) {
type <- typeOfRnw(pathname)
}
verbose && cat(verbose, "Type of Rnw file: ", type)
if (type == "application/x-sweave") {
pathnameR <- compileSweave(filename, path=path, ..., verbose=verbose)
} else if (type == "application/x-knitr") {
pathnameR <- compileKnitr(filename, path=path, ..., verbose=verbose)
} else if (type == "application/x-asciidoc-noweb") {
pathnameR <- compileAsciiDocNoweb(filename, path=path, ..., verbose=verbose)
} else {
throw("Unknown value of argument 'type': ", type)
}
verbose && exit(verbose)
pathnameR
})
|
source("scripts/update_data_funs.R")
convert_official_qc <- function() {
dat <- Covid19CanadaData::dl_dataset("3b93b663-4b3f-43b4-a23d-cbf6d149d2c5")
dat2 <- Covid19CanadaData::dl_dataset("b78d46c8-9a56-4b75-94c5-4ace36e014f5")
dat <- dat %>%
filter(Date != "Date inconnue") %>%
mutate(Date = as.Date(Date)) %>%
rename(
date = Date
)
dat2 <- dat2[24:nrow(dat2), 1:5] %>%
rename(
date = 1,
hosp = 2,
icu = 3,
hosp_old = 4,
samples_analyzed = 5
) %>%
mutate(date = as.Date(date, "%d/%m/%Y")) %>%
mutate(across(c(hosp, icu, hosp_old, samples_analyzed), as.integer))
qc_testing_datasets_prov <- dat %>%
filter(Regroupement == "Région" & Nom == "Ensemble du Québec") %>%
select(date, psi_cum_tes_n, psi_cum_pos_n, psi_cum_inf_n, psi_quo_pos_n,
psi_quo_inf_n, psi_quo_tes_n, psi_quo_pos_t) %>%
mutate(province = "Quebec") %>%
rename(
date = date,
cumulative_unique_people_tested = psi_cum_tes_n,
cumulative_unique_people_tested_positive = psi_cum_pos_n,
cumulative_unique_people_tested_negative = psi_cum_inf_n,
unique_people_tested_positive = psi_quo_pos_n,
unique_people_tested_negative = psi_quo_inf_n,
unique_people_tested = psi_quo_tes_n,
unique_people_tested_positivity_percent = psi_quo_pos_t
) %>%
full_join(
dat2 %>%
select(date, samples_analyzed),
by = "date"
) %>%
replace_na(list(
samples_analyzed = 0
)) %>%
mutate(unique_people_tested_positivity_percent = as.numeric(ifelse(unique_people_tested_positivity_percent == " . ", 0, unique_people_tested_positivity_percent))) %>%
arrange(date) %>%
mutate(cumulative_samples_analyzed = cumsum(samples_analyzed)) %>%
select(
date, province,
cumulative_unique_people_tested, cumulative_unique_people_tested_positive, cumulative_unique_people_tested_negative,
unique_people_tested, unique_people_tested_positive, unique_people_tested_negative, unique_people_tested_positivity_percent,
samples_analyzed, cumulative_samples_analyzed
)
convert_dates("qc_testing_datasets_prov",
date_format_out = "%d-%m-%Y")
write.csv(qc_testing_datasets_prov, "official_datasets/qc/qc_testing_datasets_prov.csv")
}
convert_phac_testing_prov <- function() {
ds <- Covid19CanadaData::dl_dataset("f7db31d0-6504-4a55-86f7-608664517bdb")
dat <- Covid19CanadaDataProcess::process_dataset(
uuid = "f7db31d0-6504-4a55-86f7-608664517bdb",
val = "testing",
fmt = "prov_ts",
testing_type = "n_tests_completed",
ds = ds
) %>%
dplyr::select(-.data$name) %>%
dplyr::rename(c("n_tests_completed" = "value")) %>%
dplyr::group_by(.data$province) %>%
dplyr::mutate(n_tests_completed_daily = c(0, diff(.data$n_tests_completed)))
write.csv(dat, "official_datasets/can/phac_n_tests_performed_timeseries_prov.csv", row.names = FALSE)
}
update_nt_subhr <- function(update_date, archive_date = NULL) {
if (!is.null(archive_date)) {
update_date <- as.Date(as.character(archive_date))
ds <- Covid19CanadaData::dl_archive("9ed0f5cd-2c45-40a1-94c9-25b0c9df8f48",
date = as.character(update_date))[[1]]
} else {
ds <- Covid19CanadaData::dl_dataset("9ed0f5cd-2c45-40a1-94c9-25b0c9df8f48")
}
nt_cases_subhr <- Covid19CanadaDataProcess::process_dataset(
uuid = "9ed0f5cd-2c45-40a1-94c9-25b0c9df8f48",
val = "cases",
fmt = "subhr_cum_current_residents_nonresidents",
ds = ds
)
if (identical(nt_cases_subhr, NA)) {
Sys.sleep(15)
ds <- Covid19CanadaData::dl_dataset("9ed0f5cd-2c45-40a1-94c9-25b0c9df8f48")
nt_cases_subhr <- Covid19CanadaDataProcess::process_dataset(
uuid = "9ed0f5cd-2c45-40a1-94c9-25b0c9df8f48",
val = "cases",
fmt = "subhr_cum_current_residents_nonresidents",
ds = ds
)
}
if (!is.null(archive_date)) {
nt_cases_subhr$date <- update_date
}
if (identical(nt_cases_subhr, NA)) {
stop("Failed to download ds: 9ed0f5cd-2c45-40a1-94c9-25b0c9df8f48.")
}
nt_cases_subhr_old <- Covid19CanadaETL::sheets_load(
"1RSy3qAqA4jdC4QUVTcSBogIerP7-rNic0H3L5F8_uE0",
"cases_timeseries_subhr"
) %>% dplyr::mutate(date = as.Date(date))
nt_cases_subhr_old <- nt_cases_subhr_old %>%
dplyr::filter(date == as.Date(update_date) - 1)
nt_cases_subhr$value_daily <- nt_cases_subhr$value - as.integer(nt_cases_subhr_old$value)
if (sum(is.na(nt_cases_subhr$value)) > 0 | nrow(nt_cases_subhr) == 0) {
stop("Failed to process ds: 9ed0f5cd-2c45-40a1-94c9-25b0c9df8f48.")
} else {
if (update_date %in% nt_cases_subhr_old$date) {
nt_cases_subhr_old <- nt_cases_subhr_old %>%
dplyr::filter(date != update_date)
sheet_write(
data = nt_cases_subhr_old,
ss = "1RSy3qAqA4jdC4QUVTcSBogIerP7-rNic0H3L5F8_uE0",
sheet = "cases_timeseries_subhr")
}
googlesheets4::sheet_append(
"1RSy3qAqA4jdC4QUVTcSBogIerP7-rNic0H3L5F8_uE0",
nt_cases_subhr,
"cases_timeseries_subhr"
)
}
nt_active_subhr <- Covid19CanadaDataProcess::process_dataset(
uuid = "9ed0f5cd-2c45-40a1-94c9-25b0c9df8f48",
val = "active",
fmt = "subhr_current",
ds = ds
)
if (!is.null(archive_date)) {
nt_active_subhr$date <- update_date
}
nt_active_subhr_old <- Covid19CanadaETL::sheets_load(
"1RSy3qAqA4jdC4QUVTcSBogIerP7-rNic0H3L5F8_uE0",
"active_timeseries_subhr"
) %>% dplyr::mutate(date = as.Date(date))
if (update_date %in% nt_active_subhr_old$date) {
nt_active_subhr_old <- nt_active_subhr_old %>%
dplyr::filter(date != update_date)
sheet_write(
data = nt_active_subhr_old,
ss = "1RSy3qAqA4jdC4QUVTcSBogIerP7-rNic0H3L5F8_uE0",
sheet = "active_timeseries_subhr")
}
nt_active_subhr_old <- nt_active_subhr_old %>%
dplyr::filter(date == as.Date(update_date) - 1)
nt_active_subhr$value_daily <- nt_active_subhr$value - as.integer(nt_active_subhr_old$value)
googlesheets4::sheet_append(
"1RSy3qAqA4jdC4QUVTcSBogIerP7-rNic0H3L5F8_uE0",
nt_active_subhr,
"active_timeseries_subhr"
)
nt_cases_timeseries_subhr <- Covid19CanadaETL::sheets_load(
"1RSy3qAqA4jdC4QUVTcSBogIerP7-rNic0H3L5F8_uE0", "cases_timeseries_subhr")
nt_active_timeseries_subhr <- Covid19CanadaETL::sheets_load(
"1RSy3qAqA4jdC4QUVTcSBogIerP7-rNic0H3L5F8_uE0", "active_timeseries_subhr")
write.csv(nt_cases_timeseries_subhr, "official_datasets/nt/nt_cases_timeseries_subhr.csv", row.names = FALSE)
write.csv(nt_active_timeseries_subhr, "official_datasets/nt/nt_active_timeseries_subhr.csv", row.names = FALSE)
}
convert_official_sk_new_hr <- function() {
dat <- Covid19CanadaData::dl_dataset("61cfdd06-7749-4ae6-9975-d8b4f10d5651")
dat <- dat %>%
mutate(Date = as.Date(Date, "%Y/%m/%d")) %>%
mutate(province = "Saskatchewan") %>%
rename(
date = Date,
health_region = Region,
cases = New.Cases,
cumulative_cases = Total.Cases,
active_cases = Active.Cases,
hosp = Inpatient.Hospitalizations,
icu = ICU.Hospitalizations,
recovered = Recovered.Cases,
cumulative_deaths = Deaths
) %>%
group_by(date, province, health_region) %>%
summarize(
cases = sum(cases, na.rm = TRUE),
across(c(cumulative_cases, active_cases, hosp,
icu, recovered, cumulative_deaths),
function(x) {
ifelse(
all(is.na(x)), 0, max(x, na.rm = TRUE)
)
}),
.groups = "drop"
) %>%
arrange(province, health_region, date)
cases_timeseries_hr <- dat %>%
rename(date_report = date) %>%
select(
province, health_region, date_report,
cases, cumulative_cases
)
cases_timeseries_prov <- cases_timeseries_hr %>%
select(-health_region) %>%
group_by(province, date_report) %>%
summarize(
cases = sum(cases),
cumulative_cases = sum(cumulative_cases),
.groups = "drop"
)
mortality_timeseries_hr <- dat %>%
rename(date_death_report = date) %>%
group_by(province, health_region) %>%
mutate(deaths = c(0, diff(cumulative_deaths))) %>%
ungroup %>%
select(
province, health_region, date_death_report,
deaths, cumulative_deaths
)
mortality_timeseries_prov <- mortality_timeseries_hr %>%
select(-health_region) %>%
group_by(province, date_death_report) %>%
summarize(
deaths = sum(deaths),
cumulative_deaths = sum(cumulative_deaths),
.groups = "drop"
)
convert_dates("cases_timeseries_hr", "cases_timeseries_prov",
"mortality_timeseries_hr", "mortality_timeseries_prov",
date_format_out = "%d-%m-%Y")
write.csv(cases_timeseries_hr, "official_datasets/sk/timeseries_hr/sk_new_cases_timeseries_hr.csv", row.names = FALSE)
write.csv(cases_timeseries_prov, "official_datasets/sk/timeseries_prov/sk_new_cases_timeseries_prov.csv", row.names = FALSE)
write.csv(mortality_timeseries_hr, "official_datasets/sk/timeseries_hr/sk_new_mortality_timeseries_hr.csv", row.names = FALSE)
write.csv(mortality_timeseries_prov, "official_datasets/sk/timeseries_prov/sk_new_mortality_timeseries_prov.csv", row.names = FALSE)
}
combine_ccodwg_official_sk_new_hr <- function(stat = c("cases", "mortality"), loc = c("prov", "hr")) {
match.arg(stat,
choices = c("cases", "mortality"),
several.ok = FALSE)
match.arg(loc,
choices = c("prov", "hr"),
several.ok = FALSE)
switch(
paste(stat, loc),
"cases prov" = {path_ccodwg <- "timeseries_prov/cases_timeseries_prov.csv"},
"cases hr" = {path_ccodwg <- "timeseries_hr/cases_timeseries_hr.csv"},
"mortality prov" = {path_ccodwg <- "timeseries_prov/mortality_timeseries_prov.csv"},
"mortality hr" = {path_ccodwg <- "timeseries_hr/mortality_timeseries_hr.csv"}
)
dat_ccodwg <- read.csv(path_ccodwg,
stringsAsFactors = FALSE)
switch(
paste(stat, loc),
"cases prov" = {path_official <- "official_datasets/sk/timeseries_prov/sk_new_cases_timeseries_prov.csv"; var_date <- "date_report"},
"cases hr" = {path_official <- "official_datasets/sk/timeseries_hr/sk_new_cases_timeseries_hr.csv"; var_date <- "date_report"},
"mortality prov" = {path_official <- "official_datasets/sk/timeseries_prov/sk_new_mortality_timeseries_prov.csv"; var_date <- "date_death_report"},
"mortality hr" = {path_official <- "official_datasets/sk/timeseries_hr/sk_new_mortality_timeseries_hr.csv"; var_date <- "date_death_report"}
)
dat_official <- read.csv(path_official,
stringsAsFactors = FALSE)
convert_dates("dat_ccodwg", "dat_official",
date_format_out = "%Y-%m-%d")
date_official_min <- min(dat_official[, var_date])
dat_combined <- bind_rows(
dat_ccodwg %>%
filter(province != "Saskatchewan"),
dat_official
) %>%
filter(!!sym(var_date) >= date_official_min)
if (loc == "prov") {
dat_combined <- dat_combined %>%
arrange(province, !!sym(var_date))
} else if (loc == "hr") {
dat_combined <- dat_combined %>%
arrange(province, health_region, !!sym(var_date))
}
convert_dates("dat_combined",
date_format_out = "%d-%m-%Y")
out_name <- paste0("timeseries_hr_sk_new/sk_new_", stat, "_timeseries_", loc, "_combined.csv")
write.csv(dat_combined, out_name, row.names = FALSE)
}
|
person_formats_female_fr_ch = c(
'{{first_names_female}} {{last_names}}',
'{{first_names_female}} {{last_names}}',
'{{first_names_female}} {{last_names}}',
'{{first_names_female}} {{last_names}}',
'{{first_names_female}} {{last_names}}',
'{{first_names_female}} {{last_names}}',
'{{first_names_female}} {{last_names1}}-{{last_names2}}'
)
person_formats_male_fr_ch = c(
'{{first_names_male}} {{last_names}}',
'{{first_names_male}} {{last_names}}',
'{{first_names_male}} {{last_names}}',
'{{first_names_male}} {{last_names}}',
'{{first_names_male}} {{last_names}}',
'{{first_names_male}} {{last_names}}',
'{{first_names_male}} {{last_names1}}-{{last_names2}}'
)
person_formats_fr_ch = c(person_formats_male_fr_ch, person_formats_female_fr_ch)
person_first_names_male_fr_ch = c(
"Alain",
"Albert",
"Alexandre",
"Andr\u00e9",
"Antonio",
"Arthur",
"Bernard",
"Bruno",
"Charles",
"Christian",
"Christophe",
"Claude",
"Daniel",
"David",
"Eric",
"Ethan",
"Florian",
"Fran\u00e7ois",
"Fr\u00e9d\u00e9ric",
"Gabriel",
"Georges",
"Gilbert",
"Guillaume",
"G\u00e9rard",
"Henri",
"Hugo",
"Jacques",
"Jean",
"Jean-Claude",
"Jean-Pierre",
"Jonathan",
"Jos\u00e9",
"Julien",
"Kevin",
"Laurent",
"Louis",
"Lo\u00efc",
"Luca",
"Lucas",
"L\u00e9o",
"Manuel",
"Marcel",
"Mathieu",
"Matteo",
"Maurice",
"Maxime",
"Michael",
"Michel",
"Nathan",
"Nicolas",
"Noah",
"Nolan",
"Olivier",
"Pascal",
"Patrick",
"Paul",
"Philippe",
"Pierre",
"Raymond",
"Ren\u00e9",
"Robert",
"Roger",
"Roland",
"Romain",
"Samuel",
"St\u00e9phane",
"S\u00e9bastien",
"Thierry",
"Thomas",
"Th\u00e9o",
"Vincent"
)
person_first_names_female_fr_ch = c(
"Alice",
"Alicia",
"Ana",
"Anna",
"Anne",
"Aur\u00e9lie",
"Camille",
"Caroline",
"Catherine",
"Chantal",
"Charlotte",
"Chlo\u00e9",
"Christiane",
"Christine",
"Clara",
"Claudine",
"Corinne",
"C\u00e9line",
"Danielle",
"Denise",
"Eliane",
"Elisa",
"Elisabeth",
"Elodie",
"Emilie",
"Emma",
"Eva",
"Fabienne",
"Fran\u00e7oise",
"Georgette",
"Germaine",
"H\u00e9l\u00e8ne",
"Isabelle",
"Jacqueline",
"Jeanne",
"Jessica",
"Josiane",
"Julie",
"Laetitia",
"Lara",
"Laura",
"Laurence",
"Liliane",
"Lisa",
"Lucie",
"L\u00e9a",
"Madeleine",
"Manon",
"Marcelle",
"Marguerite",
"Maria",
"Marianne",
"Marie",
"Mathilde",
"Monique",
"M\u00e9lanie",
"Nathalie",
"Nelly",
"Nicole",
"Odette",
"Patricia",
"Sandra",
"Sandrine",
"Sara",
"Sarah",
"Simone",
"Sophie",
"St\u00e9phanie",
"Suzanne",
"Sylvie",
"Th\u00e9r\u00e8se",
"Val\u00e9rie",
"Vanessa",
"V\u00e9ronique",
"Yvette",
"Yvonne",
"Zo\u00e9"
)
person_first_names_fr_ch = c(person_first_names_male_fr_ch, person_first_names_female_fr_ch)
person_last_names_fr_ch = c(
"Aebi",
"Aeby",
"Alber",
"Babey",
"Badan",
"Badel",
"Bahon",
"Balmat",
"Barbey",
"Barillon",
"Barman",
"Bavaud",
"Beguin",
"Berberat",
"Bernasconi",
"Besan\u00e7on",
"Besen\u00e7on",
"Besse",
"Beuchat",
"Beuret",
"Beurret",
"Blanc",
"Bochud",
"Boechat",
"Boichat",
"Boillat",
"Bonvin",
"Bonvini",
"Botteron",
"Bourquard",
"Bourquin",
"Bouvier",
"Bovet",
"Brahier",
"Brandt",
"Broquet",
"Bugnon",
"Bujard",
"B\u00e9guelin",
"Candaux",
"Carraud",
"Carraux",
"Carron",
"Cattin",
"Chappuis",
"Chapuis",
"Charpi\u00e9",
"Chatriand",
"Chatriant",
"Chaudet",
"Chenaux",
"Chevalley",
"Chevrolet",
"Chopard",
"Coigny",
"Comman",
"Comment",
"Comte",
"Conrad",
"Corbat",
"Corboz",
"Cornut",
"Cornuz",
"Corpataux",
"Cosandey",
"Cosendey",
"Cossy",
"Courvoisier",
"Cousin",
"Cretton",
"Crevoisier",
"Crivelli",
"Curdy",
"de Dardel",
"Delado\u00eby",
"Del\u00e8ze",
"Deshusses",
"Diesbach",
"Droz",
"Dubey",
"Duroux",
"Duvanel",
"D\u00e9l\u00e8ze",
"Ev\u00e9quoz",
"Fonjallaz",
"Francillon",
"Galland",
"Georges",
"Gilli\u00e8ron",
"Gilli\u00e9ron",
"Godet",
"Grand",
"Grojean",
"Grosjean",
"Gub\u00e9ran",
"Humbert",
"Isella",
"Jacot-Descombes",
"Jacot-Guillarmod",
"Joly",
"Jomini",
"Joye",
"Julliard",
"Maire",
"Marti",
"Martin",
"Marty",
"Masseron",
"Matile",
"Mayor",
"Menthonnex",
"Mercier",
"Meyer",
"Monnard",
"Monnet",
"Monnet",
"Monney",
"Montandon",
"Morand",
"Morard",
"Mottet",
"Mottiez",
"Muriset",
"Musy",
"M\u00fcller",
"Niquille",
"Nussl\u00e9",
"N\u00fcsslin",
"Paccot",
"Pachoud",
"Paschoud",
"Pasquier",
"Peitrequin",
"Pellet",
"Piccand",
"Polla",
"Privet",
"Quartier",
"Rapin",
"Rappaz",
"Rapraz",
"Rey",
"Robadey",
"Robert",
"Romanens",
"Rosselat",
"Rosselet",
"Rossellat",
"Sandoz",
"Sansonnens",
"Saudan",
"Thorens",
"Th\u00e9raulaz",
"Tinguely",
"Treboux",
"Uldry",
"Vall\u00e9lian",
"Vermeil",
"Vienne",
"Vonlanthen",
"Vuille",
"Wicht"
)
person_fr_ch <- list(
first_names = person_first_names_fr_ch,
first_names_male = person_first_names_male_fr_ch,
first_names_female = person_first_names_female_fr_ch,
last_names = person_last_names_fr_ch
)
|
rm(list=ls())
library(crs)
set.seed(42)
n <- 1000
x <- runif(n)
neval <- 100
c <- 3
z <- as.integer(cut(runif(n),breaks=qunif(seq(0,1,length=c+1))))-1
dgp <- cos(4*pi*x)+z
dgp <- dgp/sd(dgp)
y <- dgp + rnorm(n,sd=.25)
z <- factor(z)
model <- crs(y ~ x + z)
data.eval <- expand.grid(x = seq(min(x), max(x), length = neval),
z = levels(z))
library(ggplot2)
data.eval$y <- predict(model, newdata=data.eval)
qplot(x, y, colour=z) + geom_line(data=data.eval)
data.eval$ucl <- attr(data.eval$y,"upr")
data.eval$lcl <- attr(data.eval$y,"lwr")
qplot(x, y, colour=z) + geom_smooth(aes(ymin = lcl, ymax = ucl), data=data.eval, stat="identity")
|
setIs("Exp", "Gammad",
coerce = function(obj){new("Gammad", shape = 1, scale = 1/rate(obj))},
replace = function(obj, value)
{new("Gammad", shape = value@shape, scale = value@scale)}
)
setIs("Exp", "Weibull",
coerce = function(obj) {new("Weibull", shape = 1, scale = 1/rate(obj))},
replace = function(obj, value)
{new("Weibull", shape = value@shape,
scale = value@scale)}
)
setIs("Chisq", "Gammad", test = function(obj) isTRUE(all.equal(ncp(obj), 0)),
coerce = function(obj) {new("Gammad", shape = df(obj)/2, scale = 2)},
replace = function(obj, value)
{new("Gammad", shape = value@shape, scale = value@scale)}
)
setIs("Cauchy", "Td", test = function(obj)
{isTRUE(all.equal(location(obj),0)) &&
isTRUE(all.equal(scale(obj),1))},
coerce = function(obj) {new("Td")},
replace = function(obj, value)
{new("Td", df = value@df, ncp = value@ncp)}
)
setIs("Unif", "Beta", test = function(obj)
{isTRUE(all.equal(Min(obj),0)) &&
isTRUE(all.equal(Max(obj),1))},
coerce = function(obj) {new("Beta", shape1 = 1, shape2 = 1)},
replace = function(obj, value) {new("Beta", shape1 = value@shape1,
shape2 = value@shape2, ncp = value@ncp)}
)
setAs("DiscreteDistribution", "LatticeDistribution",
function(from){
if(!.is.vector.lattice(from@support))
return(from)
else{ to <- new("LatticeDistribution")
slotNames <- slotNames(from)
lst <- sapply(slotNames, function(x) slot(from,x))
names(lst) <- slotNames
lst$lattice <- .make.lattice.es.vector(from@support)
for (i in 1: length(lst))
slot(to, name = names(lst)[i]) <- lst[[i]]
return(to)}
})
setAs("AffLinDiscreteDistribution", "LatticeDistribution",
function(from){
if(!.is.vector.lattice(from@support))
return(from)
else{ to <- new("AffLinLatticeDistribution")
slotNames <- slotNames(from)
lst <- sapply(slotNames, function(x) slot(from,x))
names(lst) <- slotNames
lst$lattice <- .make.lattice.es.vector(from@support)
for (i in 1: length(lst))
slot(to, name = names(lst)[i]) <- lst[[i]]
return(to)}
})
|
if (electricShine::get_os() != "unix") {
context("test-long_running_tests")
tmp <- file.path(tempdir(), "space path")
dir.create(tmp)
tmp <- file.path(tempdir(), "space path", "build_git_install")
dir.create(tmp)
tmp <- normalizePath(tmp, "/")
repo <- system.file("demoApp", package = "electricShine")
repos <- "https://cran.r-project.org/"
installed_r <- electricShine::install_r(cran_like_url = "https://cran.r-project.org",
app_root_path = tmp,
mac_url = "https://mac.r-project.org/el-capitan/R-devel/R-devel-el-capitan-sa-x86_64.tar.gz",
permission_to_install = TRUE)
test_that("install_r works", {+
testthat::skip_on_os("linux")
expect_identical(basename(installed_r),
"bin")
expect_true(any(file.exists(installed_r, pattern = "Rscript")))
})
temp <- file.path(tempdir(),
"space path",
"deletemetesting")
dir.create(temp)
temp <- normalizePath(temp, "/")
nodejs_version <- "10.16.0"
getnode <- electricShine::install_nodejs(node_url = "https://nodejs.org/dist/",
nodejs_path = temp,
force_install = FALSE,
nodejs_version = nodejs_version,
permission_to_install = TRUE)
test_that(".check_node_works provides message", {
testthat::skip_on_os("linux")
expect_message(electricShine:::.check_node_works(node_top_dir = getnode,
expected_version = nodejs_version))
})
test_that(".check_npm_works provides message", {
testthat::skip_on_os("linux")
expect_message(electricShine:::.check_npm_works(node_top_dir = getnode))
})
node_exists <- electricShine:::.check_node_works(node_top_dir = tempdir(),
expected_version = nodejs_version)
npm_exists <- electricShine:::.check_npm_works(node_top_dir = tempdir())
test_that(".check_node_works gives false ", {
testthat::skip_on_os("linux")
expect_false(node_exists)
})
test_that(".check_npm_works gives false", {
testthat::skip_on_os("linux")
expect_false(npm_exists)
})
node_exists <- electricShine:::.check_node_works(node_top_dir = getnode,
expected_version = nodejs_version)
npm_exists <- electricShine:::.check_npm_works(node_top_dir = getnode)
test_that(".check_node_works ", {
testthat::skip_on_os("linux")
expect_true(file.exists(node_exists))
expect_equal(tools::file_path_sans_ext(basename(node_exists)),
"node")
})
test_that(".check_npm_works ", {
testthat::skip_on_os("linux")
expect_true(file.exists(npm_exists))
expect_equal(tools::file_path_sans_ext(basename(npm_exists)),
"npm")
})
}
|
knitr::opts_chunk$set(
collapse = TRUE,
comment = "
)
library(tilemaps)
library(sf)
library(dplyr)
library(ggplot2)
governors <- governors %>%
mutate(tile_map = generate_map(geometry, square = FALSE, flat_topped = TRUE))
ggplot(governors) +
geom_sf(aes(geometry = tile_map)) +
geom_sf_text(aes(geometry = tile_map, label = abbreviation),
fun.geometry = function(x) st_centroid(x)) +
theme_void()
all_states <- governors %>%
add_row(abbreviation = "AK", party = "Republican",
tile_map = create_island(governors$tile_map, "lower left")) %>%
add_row(abbreviation = "HI", party = "Democrat",
tile_map = create_island(governors$tile_map, c(-12050000, 3008338)))
ggplot(all_states) +
geom_sf(aes(geometry = tile_map)) +
geom_sf_text(aes(geometry = tile_map, label = abbreviation),
fun.geometry = function(x) st_centroid(x)) +
theme_void()
all_states <- all_states %>%
mutate(party = factor(party, c("Republican", "Democrat")))
ggplot(all_states) +
geom_sf(aes(geometry = tile_map, fill = party)) +
geom_sf_text(aes(geometry = tile_map, label = abbreviation),
fun.geometry = function(x) st_centroid(x)) +
scale_fill_brewer(palette = "Set1") +
ggtitle("Party Affiliation of United States Governors (2020)") +
theme_void()
us_maps <- many_maps(governors$geometry, governors$abbreviation,
prop = c(0, 0.1), interpolate = c(0.5, 1),
smoothness = c(0, 20), shift = list(c(0,0), c(0,0.5)))
governors <- governors %>%
mutate(square_map = us_maps$map[[1]])
ggplot(governors) +
geom_sf(aes(geometry = square_map)) +
geom_sf_text(aes(geometry = square_map, label = abbreviation)) +
theme_void()
neighbors <- st_touches(governors$geometry)
crs <- st_crs(governors$geometry)
R <- length(governors$geometry)
A <- sum(st_area(governors$geometry))
s <- as.numeric(sqrt(A/R))
centroids <- tilemaps:::transform_centroids(governors$geometry, neighbors, crs, s, prop = 0.1)
interpolated_centroids <- tilemaps:::interpolate_centroids(centroids$noisy_centroids,
centroids$transformed_centroids,
crs, interpolate = 0.75)
centroids_df <- data.frame(st_coordinates(c(centroids$noisy_centroids,
centroids$transformed_centroids,
interpolated_centroids)))
centroids_df <- centroids_df %>%
mutate(centroids = c(rep("noisy", nrow(governors)),
rep("fully-transformed", nrow(governors)),
rep("interpolated", nrow(governors))),
region = rep(governors$abbreviation, 3))
centroids_df$centroids <- factor(centroids_df$centroids, c("noisy", "interpolated",
"fully-transformed"))
ggplot(governors) +
geom_sf(aes(geometry = geometry)) +
geom_point(data = centroids_df, aes(X, Y, color = centroids)) +
geom_line(data = centroids_df, aes(X,Y, group = region)) +
scale_color_brewer(palette = "YlGnBu") +
theme_void()
transformed_boundary <- tilemaps:::transform_boundary(governors$geometry, centroids$noisy_centroids,
interpolated_centroids)
smoothed_boundary <- smoothr::smooth(transformed_boundary, method = "ksmooth",
smoothness = 20)
transformed_coords <- data.frame(st_coordinates(st_boundary(transformed_boundary)))
smoothed_coords <- data.frame(st_coordinates(st_boundary(smoothed_boundary)))
original_coords <- data.frame(st_coordinates(st_boundary(st_union(governors$geometry))))
legend_order <- c("original", "transformed", "smoothed")
ggplot() +
geom_path(data = original_coords, aes(X,Y, group = L1, color = "original")) +
geom_path(data = transformed_coords, aes(X,Y, group = L1, color = "transformed")) +
geom_path(data = smoothed_coords, aes(X,Y, group = L1, color = "smoothed")) +
theme_void() +
scale_color_discrete(name = "boundary", breaks = legend_order)
tiles <- tilemaps:::fit_tiles(smoothed_boundary, nrow(governors),
s, shift = c(0.5,0.5))
permutation <- tilemaps:::assign_regions(interpolated_centroids, sf::st_centroid(tiles))
final_map <- tiles[order(permutation)]
governors <- governors %>%
mutate(map = final_map)
ggplot(governors) +
geom_sf(aes(geometry = map)) +
geom_sf_text(aes(geometry = map, label = abbreviation)) +
theme_void()
|
test_that("works as expected", {
expect_equal(
ddiff( lubridate::mdy( '1/1/2019' ), lubridate::mdy( '2/1/2019' ) ),
31
)
expect_equal(
ddiff( lubridate::mdy( '1/1/2019' ), lubridate::mdy( '2/1/2019' ), unit = 'month' ),
1
)
expect_equal(
mdiff( lubridate::mdy( '1/1/2019' ), lubridate::mdy( '2/1/2019' ) ),
1
)
expect_equal(
ddiff( lubridate::mdy( '1/1/2019' ), lubridate::mdy( '2/1/2020' ), unit = 'month' ),
13
)
expect_equal(
mdiff( lubridate::mdy( '1/1/2019' ), lubridate::mdy( '2/1/2020' ) ),
13
)
expect_equal(
ddiff( lubridate::mdy( '1/1/2019' ), lubridate::mdy( '2/1/2020' ), unit = 'year' ),
1
)
expect_equal(
ydiff( lubridate::mdy( '1/1/2019' ), lubridate::mdy( '2/1/2020' ) ),
1
)
expect_equal(
ddiff( lubridate::mdy( '1/1/2019' ), lubridate::mdy( '2/1/2020' ), unit = 'quarter' ),
4
)
expect_equal(
qdiff( lubridate::mdy( '1/1/2019' ), lubridate::mdy( '2/1/2020' ) ),
4
)
})
|
orwg <- function(object, ...) UseMethod("orwg")
orwg.table <- function(object, ...)
{
stopifnot(length(dim(object)) == 3)
z <- apply(object, 3, function(x)
c(between= sum(x) - sum(diag(x)),
within=sum(diag(x)))
)
offdiag <- z[ col(z) != row(z) ]
prod(diag(z)) / prod(offdiag)
}
orwg.igraph <- function(object, vattr, ...)
{
m <- mixingm(object, rattr=vattr, full=TRUE)
orwg(m, ...)
}
orwg.default <- function(object, ...)
{
orwg.table( as.table(object), ... )
}
|
context("AA class behaviour")
test_that("Construction of aa vector works", {
test <- aa("ACGT", "RWSAG", "QQQ-")
expect_s3_class(test, "bioseq_aa")
expect_type(test, "character")
expect_length(test, 3)
})
test_that("Non IUPAC character are changed to X", {
test <- suppressWarnings(aa("AcgT", "RWSAG", "QQQ", "K@"))
expect_equal(as.character(test[1]), "ACGT")
expect_equal(as.character(test[2]), "RWSAG")
expect_equal(as.character(test[3]), "QQQ")
expect_equal(as.character(test[4]), "KX")
})
test_that("Lower case changed to upper case", {
test <- aa("AcgT", "RWSAG", "QqQ")
expect_equal(as.character(test[1]), "ACGT")
expect_equal(as.character(test[2]), "RWSAG")
expect_equal(as.character(test[3]), "QQQ")
})
test_that("Names are correctly returned", {
test <- aa("AcgT", "RWSAG", "QqQ")
expect_null(names(test))
test <- aa(A = "AcgT", B = "RWSAG", C = "QqQ")
expect_equal(names(test), c("A", "B", "C"))
})
|
test_that('REQUIRE TEST poissonbayes Monte Carlo', {
z <- zpoissonbayes$new()
test.poissonbayes <- z$mcunit(minx=1, nsim = 2000, ci=0.99, plot = FALSE)
expect_true(test.poissonbayes)
})
|
context("monoisotopicPeaks")
p <- createMassPeaks(mass=995:1005,
intensity=c(100, 10, 30, 10, 40, 550, 330, 110, 10, 5, 15))
m <- createMassPeaks(mass=1000, intensity=550)
test_that("monoisotopicPeaks", {
expect_equal(monoisotopicPeaks(p), m)
})
test_that("detectPeaks works with list of MassPeaks objects", {
expect_error(monoisotopicPeaks(list(x=1, y=1)),
"no list of MALDIquant::MassPeaks objects")
expect_error(monoisotopicPeaks(list(createMassSpectrum(1, 1),
createMassSpectrum(1, 1)),
"no list of MALDIquant::MassPeaks objects"))
expect_equal(monoisotopicPeaks(list(p, p)), list(m, m))
})
|
"diamonds"
"economics"
"economics_long"
"midwest"
"mpg"
"msleep"
"presidential"
"seals"
"faithfuld"
"luv_colours"
"txhousing"
|
require("emmeans")
require("ggplot2")
options(show.signif.stars = FALSE)
knitr::opts_chunk$set(fig.width = 4.5, class.output = "ro")
nutr.lm <- lm(gain ~ (age + group + race)^2, data = nutrition)
car::Anova(nutr.lm)
emmeans(nutr.lm, ~ group * race, calc = c(n = ".wgt."))
with(nutrition, table(race, age))
summary(emmeans(nutr.lm, pairwise ~ group | race, at = list(age = "3")),
by = NULL)
framing <- mediation::framing
levels(framing$educ) <- c("NA","Ref","< HS", "HS", "> HS","Coll +")
framing.glm <- glm(cong_mesg ~ age + income + educ + emo + gender * factor(treat),
family = binomial, data = framing)
emmip(framing.glm, treat ~ educ | gender, type = "response")
emmip(framing.glm, treat ~ educ | gender, type = "response",
cov.reduce = emo ~ treat*gender + age + educ + income)
sapply(c("equal", "prop", "outer", "cells", "flat"), function(w)
predict(emmeans(nutr.lm, ~ race, weights = w)))
mtcars.lm <- lm(mpg ~ factor(cyl)*am + disp + hp + drat + log(wt) + vs +
factor(gear) + factor(carb), data = mtcars)
rg.usual <- ref_grid(mtcars.lm)
rg.usual
nrow(rg.usual@linfct)
rg.nuis = ref_grid(mtcars.lm, non.nuisance = "cyl")
rg.nuis
nrow(rg.nuis@linfct)
emmeans(rg.usual, ~ cyl * am)
emmeans(rg.nuis, ~ cyl * am)
predict(emmeans(mtcars.lm, ~ cyl * am, non.nuis = c("cyl", "am"),
wt.nuis = "prop"))
predict(emmeans(mtcars.lm, ~ cyl * am, weights = "outer"))
emmeans(mtcars.lm, ~ gear | am, non.nuis = quote(all.vars(specs)))
ref_grid(mtcars.lm, rg.limit = 200)
summary(emmeans(nutr.lm, pairwise ~ group | race, submodel = ~ age + group*race),
by = NULL)
emmeans(nutr.lm, ~ group * race, submodel = "minimal")
joint_tests(nutr.lm, submodel = "type2")
cows <- data.frame (
route = factor(rep(c("injection", "oral"), c(5, 9))),
drug = factor(rep(c("Bovineumab", "Charloisazepam",
"Angustatin", "Herefordmycin", "Mollycoddle"), c(3,2, 4,2,3))),
resp = c(34, 35, 34, 44, 43, 36, 33, 36, 32, 26, 25, 25, 24, 24)
)
cows.lm <- lm(resp ~ route + drug, data = cows)
cows.rg <- ref_grid(cows.lm)
cows.rg
route.emm <- emmeans(cows.rg, "route")
route.emm
drug.emm <- emmeans(cows.rg, "drug")
drug.emm
pairs(route.emm, reverse = TRUE)
pairs(drug.emm, by = "route", reverse = TRUE)
emmip(cows.rg, ~ drug | route)
require(ggplot2)
emmip(cows.rg, ~ drug) + facet_wrap(~ route, scales = "free_x")
plot(drug.emm, PIs = TRUE) +
facet_wrap(~ route, nrow = 2, scales = "free_y")
|
gmm_generate <- function(input_model,
samples,
seed=NA,
verbose=FALSE) {
IO_RestoreSettings("GMM Sample Generator")
IO_SetParamGMMPtr("input_model", input_model)
IO_SetParamInt("samples", samples)
if (!identical(seed, NA)) {
IO_SetParamInt("seed", seed)
}
if (verbose) {
IO_EnableVerbose()
} else {
IO_DisableVerbose()
}
IO_SetPassed("output")
gmm_generate_mlpackMain()
out <- list(
"output" = IO_GetParamMat("output")
)
IO_ClearSettings()
return(out)
}
|
mc.crisk2.bart <- function(
x.train = matrix(0,0,0), y.train=NULL,
x.train2 = x.train, y.train2=NULL,
times=NULL, delta=NULL, K=NULL,
x.test = matrix(0,0,0), x.test2 = x.test,
sparse=FALSE, theta=0, omega=1,
a=0.5, b=1, augment=FALSE, rho=NULL, rho2=NULL,
xinfo=matrix(0,0,0), xinfo2=matrix(0,0,0), usequants=FALSE,
rm.const=TRUE, type='pbart',
ntype=as.integer(
factor(type, levels=c('wbart', 'pbart', 'lbart'))),
k = 2,
power = 2, base = 0.95,
offset = NULL, offset2 = NULL,
tau.num=c(NA, 3, 6)[ntype],
ntree = 50L, numcut = 100L,
ndpost = 1000L, nskip = 250L,
keepevery = 10L,
printevery=100L,
id=NULL,
seed = 99L, mc.cores = 2L, nice=19L
)
{
if(.Platform$OS.type!='unix')
stop('parallel::mcparallel/mccollect do not exist on windows')
RNGkind("L'Ecuyer-CMRG")
set.seed(seed)
parallel::mc.reset.stream()
if(is.na(ntype) || ntype==1)
stop("type argument must be set to either 'pbart' or 'lbart'")
x.train2 <- bartModelMatrix(x.train2)
x.test2 <- bartModelMatrix(x.test2)
x.train <- bartModelMatrix(x.train)
x.test <- bartModelMatrix(x.test)
if(length(y.train)==0) {
pre <- surv.pre.bart(times, delta, x.train, x.test, K=K)
pre2 <- surv.pre.bart(times, delta, x.train2, x.test2, K=K)
y.train <- pre$y.train
x.train <- pre$tx.train
x.test <- pre$tx.test
y.train2 <- 1*(y.train[y.train>0]==1)
x.train2 <- cbind(pre2$tx.train[y.train>0, ])
x.test2 <- pre2$tx.test
y.train <- 1*(y.train>0)
times <- pre$times
K <- pre$K
}
else {
if(length(x.train)==0 | length(x.train2)==0)
stop('both x.train and x.train2 must be provided')
times <- unique(sort(x.train[ , 1]))
K <- length(times)
}
H <- 1
Mx <- 2^31-1
Nx <- 2*max(nrow(x.train), nrow(x.test))
if(Nx>Mx%/%ndpost) {
H <- ceiling(ndpost / (Mx %/% Nx))
ndpost <- ndpost %/% H
}
mc.cores.detected <- detectCores()
if(mc.cores>mc.cores.detected) {
message('The number of cores requested, ', mc.cores,
',\n exceeds the number of cores detected via detectCores() ',
'reducing to ', mc.cores.detected)
mc.cores <- mc.cores.detected
}
mc.ndpost <- ceiling(ndpost/mc.cores)
post.list <- list()
for(h in 1:H) {
for(i in 1:mc.cores) {
parallel::mcparallel({psnice(value=nice);
crisk2.bart(x.train=x.train, y.train=y.train,
x.train2=x.train2, y.train2=y.train2,
x.test=x.test, x.test2=x.test2,
sparse=sparse, theta=theta, omega=omega,
a=a, b=b, augment=augment,
rho=rho, rho2=rho2,
xinfo=xinfo, xinfo2=xinfo2, usequants=usequants,
rm.const=rm.const, type=type,
k=k, power=power, base=base,
offset=offset, offset2=offset2, tau.num=tau.num,
ntree=ntree, numcut=numcut,
ndpost=mc.ndpost, nskip=nskip,
keepevery = keepevery,
printevery=printevery)},
silent=(i!=1))
}
post.list[[h]] <- parallel::mccollect()
}
if((H==1 & mc.cores==1) |
attr(post.list[[1]][[1]], 'class')!='crisk2bart')
return(post.list[[1]][[1]])
else {
for(h in 1:H) for(i in mc.cores:1) {
if(h==1 & i==mc.cores) {
post <- post.list[[1]][[mc.cores]]
post$ndpost <- H*mc.cores*mc.ndpost
p <- ncol(x.train[ , post$rm.const])
old.text <- paste0(as.character(mc.ndpost), ' ',
as.character(ntree), ' ', as.character(p))
old.stop <- nchar(old.text)
post$treedraws$trees <- sub(old.text,
paste0(as.character(post$ndpost),
' ', as.character(ntree),
' ', as.character(p)),
post$treedraws$trees)
p <- ncol(x.train2[ , post$rm.const2])
old.text <- paste0(as.character(mc.ndpost), ' ',
as.character(ntree), ' ', as.character(p))
old.stop2 <- nchar(old.text)
post$treedraws2$trees <- sub(old.text,
paste0(as.character(post$ndpost),
' ', as.character(ntree),
' ', as.character(p)),
post$treedraws2$trees)
}
else {
if(length(x.test)>0) {
post$yhat.test <- rbind(post$yhat.test,
post.list[[h]][[i]]$yhat.test)
post$yhat.test2 <- rbind(post$yhat.test2,
post.list[[h]][[i]]$yhat.test2)
post$prob.test <- rbind(post$prob.test,
post.list[[h]][[i]]$prob.test)
post$prob.test2 <- rbind(post$prob.test2,
post.list[[h]][[i]]$prob.test2)
post$cif.test <- rbind(post$cif.test,
post.list[[h]][[i]]$cif.test)
post$cif.test2 <- rbind(post$cif.test2,
post.list[[h]][[i]]$cif.test2)
post$surv.test <- rbind(post$surv.test,
post.list[[h]][[i]]$surv.test)
}
post$varcount <- rbind(post$varcount,
post.list[[h]][[i]]$varcount)
post$varcount2 <- rbind(post$varcount2,
post.list[[h]][[i]]$varcount2)
post$varprob <- rbind(post$varprob,
post.list[[h]][[i]]$varprob)
post$varprob2 <- rbind(post$varprob2,
post.list[[h]][[i]]$varprob2)
post$treedraws$trees <- paste0(post$treedraws$trees,
substr(post.list[[h]][[i]]$treedraws$trees, old.stop+2,
nchar(post.list[[h]][[i]]$treedraws$trees)))
post$treedraws2$trees <- paste0(post$treedraws2$trees,
substr(post.list[[h]][[i]]$treedraws2$trees, old.stop2+2,
nchar(post.list[[h]][[i]]$treedraws2$trees)))
}
post.list[[h]][[i]] <- NULL
}
if(length(x.test)>0) {
post$prob.test.mean <- apply(post$prob.test, 2, mean)
post$prob.test2.mean <- apply(post$prob.test2, 2, mean)
post$cif.test.mean <- apply(post$cif.test, 2, mean)
post$cif.test2.mean <- apply(post$cif.test2, 2, mean)
post$surv.test.mean <- apply(post$surv.test, 2, mean)
}
post$varcount.mean <- apply(post$varcount, 2, mean)
post$varcount2.mean <- apply(post$varcount2, 2, mean)
post$varprob.mean <- apply(post$varprob, 2, mean)
post$varprob2.mean <- apply(post$varprob2, 2, mean)
attr(post, 'class') <- 'crisk2bart'
return(post)
}
}
|
LKrigSetupLattice <- function(object, ...){
UseMethod("LKrigSetupLattice")
}
LKrigSetupLattice.default<- function( object,...){
stop("LKGeometry needs to be specified, e.g. LKRectangle")
}
|
CreateMap <-
function(xy1.1,xy2.1,
plotgrid = F,
costfn = Cost.area,
nondecreasingos=F,
verbose=F,
insertopposites=T)
{
impliedpoints <- InsertIntersections(xy1.1,xy2.1,insertopposites=insertopposites)
xy1 <- impliedpoints[[1]]
xy2 <- impliedpoints[[2]]
l1 <- nrow(xy1)
l2 <- nrow(xy2)
l1.b <- 2*l1-1
l2.b <- 2*l2-1
l1keya <-c(rep(1:(l1-1),each=2),l1)
l1keyb <-c(1,rep(2:(l1),each=2))
l2keya <-c(rep(1:(l2-1),each=2),l2)
l2keyb <-c(1,rep(2:(l2),each=2))
if(verbose) print("Computing matches")
matches <- matrix(0,l1.b,l2.b)
for(i in 1:l1.b)
{
if(verbose) print(paste(i,"of", l1.b,l2.b))
for(j in 1:l2.b)
{
if(odd(i) & even(j))
{
intprop <- IntersectPoint(unlist(xy2[l2keya[j],]),unlist(xy2[l2keyb[j],]),unlist(xy1[l1keya[i],]))
if(intprop<0)
{
matches[i,j]<- 0
}else if (intprop>1)
{
matches[i,j]<-1
}
else
{
matches[i,j]<- intprop
}
if(i>1 & nondecreasingos)
{
matches[i,j] <- max(matches[i,j],matches[i-2,j])
}
}
if(even(i) & odd(j))
{
intprop <- IntersectPoint(unlist(xy1[l1keya[i],]),unlist(xy1[l1keyb[i],]),unlist(xy2[l2keya[j],]))
if(intprop<0)
{
matches[i,j]<-0
}else if(intprop>1)
{
matches[i,j]<- 1
}else{
matches[i,j]<- intprop
}
if(j>1&nondecreasingos)
{
matches[i,j] <- max(matches[i,j],matches[i,j-2])
}
}
}
}
pathEnvelope <- matrix(F,l1.b,l2.b)
pathEnvelope[1,1] <- T
if(plotgrid)
{
par(mfrow=c(1,1))
PlotGrid(l1,l2)
}
leastcost <-matrix(0,nrow=l1.b,ncol=l2.b)
linkcost <- matrix(0,nrow=l1.b,ncol=l2.b)
if(verbose)print("Computing linkage costs")
for(i in 1:l1.b)
{
if(verbose)cat(".")
for(j in 1:l2.b)
{
linkcost[i,j] <- LinkCost(xy1,xy2,i,j)
}
}
if(verbose)cat("\n")
chain <- matrix(0,nrow=2*(l1+l2))
bestpath <- array(0,c(l1.b,l2.b,2),dimnames=list(NULL,NULL, c("x","y")))
if(verbose) print("Computing paths:")
id <- 1
for(i in 1:(l1.b))
{
if(verbose)print(paste(i, "of",l1.b))
for(j in 1:(l2.b))
{
if(i==1 & j==1)
{
tmpcosts <- 0
bestpath[i,j,] <- c(1,1)
leastcost[i,j]<-min(tmpcosts,na.rm=T)
} else if(odd(i) & odd(j))
{
pair1 <- c(i,j-1)
pair2 <- c(i-1,j)
pair3 <- c(i-2,j-2)
if(all(pair1>0))
{
path1 <- leastcost[pair1[1],pair1[2]]
} else {
path1 <-Inf
}
cost1 <- Cost(xy1,xy2,i,j,i,j-1,matches,costfn=costfn)
if(all(pair2>0))
{
path2 <- leastcost[pair2[1],pair2[2]]
}else{
path2 <- Inf
}
cost2 <- Cost(xy1,xy2,i,j,i-1,j,matches,costfn=costfn)
if(all(pair3>0))
{
path3 <- leastcost[pair3[1],pair3[2]]
}else{
path3 <- Inf
}
cost3 <- Cost(xy1,xy2,i,j,i-2,j-2,matches,costfn=costfn)
cost <- min(path1+cost1,path2+cost2,path3+cost3,na.rm=T)
opts <- c(path1+cost1,path2+cost2,path3+cost3)
choices <- which((opts-min(opts))<.00001)
leastcost[i,j] <- min(opts)
if(length(choices)>1)
{
choice <- which.min(c(1,3,2)[choices])
}else{
choice <- choices
}
bestpath[i,j,] <-rbind(c(i,j-1),c(i-1,j),c(i-2,j-2))[choice,]
if(plotgrid)
{
points(j-.5,l1.b-i+1,pch=16,col="white",cex=3)
text(j-.5,l1.b-i+1,round(cost1,2),cex=.8,col="black")
points(j,l1.b-i+1.5,pch=16,col="white",cex=3)
text(j,l1.b-i+1.5,round(cost2,2),cex=.8,col="black")
points(j-.5,l1.b-i+1.5,pch=16,col="white",cex=3)
text( j-.5,l1.b-i+1.5,round(cost3,2),cex=.8,col="black")
points(j,l1.b-i+1,pch=16,col="grey",cex=3)
text(j,l1.b-i+1,round(leastcost[i,j],2),cex=.8,col="black")
}
}else if(even(i) & odd(j))
{
cost1 <- Cost(xy1,xy2,i,j,i-1,j,matches,costfn=costfn)
path1 <- cost1 + leastcost[i-1,j]
cost2 <- Cost(xy1,xy2,i,j,i,j-2,matches,costfn=costfn)
path2 <- cost2 + leastcost[i,j-2]
opts <- c(path1,path2)
choices <- which((opts-min(opts))<.00001)
if(length(choices)>1)
{
choice <- which.min(c(1,2)[choices])
} else {
choice <- choices
}
leastcost[i,j] <- min(path1,path2)
if(length(choices)>1)
{
choice <- which.min(c(1,2))
}else{
choice <- choices
}
bestpath[i,j,] <- rbind(c(i-1,j),c(i,j-2))[choice,]
if(plotgrid)
{
points(j,l1.b-i+1+.5,pch=16,col="white",cex=3)
text(j,l1.b-i+1+.5,round(cost1,2),cex=.8,col="black")
points(j-.5,l1.b-i+1,pch=16,col="white",cex=3)
text( j-.5,l1.b-i+1,round(cost2,2),col="black",cex=.8)
points(j,l1.b-i+1,pch=16,col="grey",cex=3)
text(j,l1.b-i+1,round(leastcost[i,j],2),cex=.8,col="black")
}
}else if(odd(i) & even(j))
{
cost1 <- Cost(xy1,xy2,i,j,i-2,j,matches,costfn=costfn)
prev1 <- ifelse(i<3,0,leastcost[i-2,j])
path1 <- cost1 + prev1
cost2 <- Cost(xy1,xy2,i,j,i,j-1,matches,costfn=costfn)
prev2 <- ifelse(j<2,0,leastcost[i,j-1])
path2 <- cost2 + prev2
opts <- c(path2,path1)
choices <- which((opts-min(opts))<.00001)
if(length(choices)>1)
{
choice <- which.min(c(1,2)[choices])
} else {
choice <- choices
}
leastcost[i,j] <- min(path1,path2)
bestpath[i,j,] <- rbind(c(i,j-1),c(i-2,j))[choice,]
if(plotgrid)
{
points(j-.5,l1.b-i+1,pch=16,col="white",cex=3)
text(j-.5, l1.b-i+1,round(cost2,2),col="black",cex=.8)
points(j,l1.b-i+1+.5,pch=16,col="white",cex=3)
text(j, l1.b-i+1+.5,round(cost1,2),col="black",cex=.8)
points(j,l1.b-i+1,pch=16,col="grey",cex=3)
text( j,l1.b-i+1,round(leastcost[i,j],2),col="black",cex=.8)
}
}
}
}
if(plotgrid)
{
i <- l1.b
j <- l2.b
path <- c(i,j)
previ <- i
prevj <- j
while(i>1 | j >1)
{
points(j,l1.b-i+1,cex=3.1,col="red")
previ <- i
prevj <- j
nexti <- bestpath[i,j,1]
nextj <- bestpath[i,j,2]
i <- nexti
j <- nextj
}
}
return (list(path1 = xy1,
path2 = xy2,
origpath1 = xy1.1,
origpath2 = xy2.1,
key1 = impliedpoints[[3]],
key2 = impliedpoints[[4]],
linkcost = linkcost,
leastcost = leastcost,
bestpath =bestpath,
minmap = FALSE,
opposite = matches,
deviation=leastcost[nrow(leastcost),ncol(leastcost)])
)
}
|
rpart.rules.table<-function(object)
{
rules<-rpart.rules(object)
ff<-object$frame
ff$rules<-unlist(rules[as.numeric(row.names(ff))])
ruleList<-lapply(row.names(ff),function (name) setNames(data.frame(name,
(strsplit(ff[name,'rules'],split=',')),
ff[name,'var']=="<leaf>"
),
c("Rule","Subrule","Leaf")))
combinedRules<-Reduce(rbind,ruleList)
return(combinedRules)
}
|
ggsaveKmg2 <- function(
filename = default_name(plot), plot = last_plot(), device = default_device(filename),
path = NULL, scale = 1, width = par("din")[1], height = par("din")[2], units = c("in", "cm", "mm"),
dpi = 300, ...) {
if (!inherits(plot, "ggplot") && !inherits(plot, "recordedplot"))
stop("plot should be a ggplot2 plot or a recordedplot plot")
eps <- ps <- function(..., width, height)
grDevices::postscript(..., width = width, height = height, onefile = FALSE,
horizontal = FALSE, paper = "special")
tex <- function(..., width, height)
grDevices::pictex(..., width = width, height = height)
pdf <- function(..., version = "1.4")
grDevices::pdf(..., version = version)
svg <- function(...)
grDevices::svg(...)
wmf <- function(..., width, height)
grDevices::win.metafile(..., width = width, height = height)
png <- function(..., width, height)
grDevices::png(..., width = width, height = height, res = dpi, units = "in")
jpg <- jpeg <- function(..., width, height)
grDevices::jpeg(..., width = width, height = height, res = dpi, units = "in")
bmp <- function(..., width, height)
grDevices::bmp(..., width = width, height = height, res = dpi, units = "in")
tiff <- function(..., width, height)
grDevices::tiff(..., width = width, height = height, res = dpi, units = "in")
default_name <- function(plot) {
paste("default_plot.pdf", sep = "")
}
default_device <- function(filename) {
pieces <- strsplit(filename, "\\.")[[1]]
ext <- tolower(pieces[length(pieces)])
match.fun(ext)
}
units <- match.arg(units)
convert_to_inches <- function(x, units) {
x <- switch(units,
`in` = x,
cm = x / 2.54,
mm = x / 2.54 /10
)
}
convert_from_inches <- function(x, units) {
x <- switch(units,
`in` = x,
cm = x * 2.54,
mm = x * 2.54 * 10
)
}
if (!missing(width)) {
width <- convert_to_inches(width, units)
}
if (!missing(height)) {
height <- convert_to_inches(height, units)
}
if (missing(width) || missing(height)) {
message("Saving ", prettyNum(convert_from_inches(width * scale, units), digits = 3),
" x ", prettyNum(convert_from_inches(height * scale, units), digits = 3),
" ", units, " image")
}
width <- width * scale
height <- height * scale
if (!is.null(path)) {
filename <- file.path(path, filename)
}
device(file = filename, width = width, height = height, ...)
on.exit(capture.output(dev.off()))
print(plot)
invisible()
}
|
predict.io.fi <- function(object ,newdata=NULL, compute=FALSE, int.range=NULL,
integrate=FALSE, ...){
model <- object
width <- model$meta.data$width
point <- model$meta.data$point
if(is.null(newdata)){
newdata <- model$mr$data
}else{
if(!("observer" %in% names(newdata))){
stop("newdata does not contain a column named \"observer\"")
}
}
newdata$offsetvalue <- 0
GAM <- FALSE
if("gam" %in% class(model$mr)){
GAM <- TRUE
}
if(!integrate){
fitted <- predict(model$mr,newdata,type="response")
p1 <- fitted[newdata$observer==1]
p2 <- fitted[newdata$observer==2]
fitted <- p1+p2-p1*p2
names(fitted) <- newdata$object[newdata$observer==1]
return(list(fitted = fitted,
p1 = p1,
p2 = p2))
}else{
left <- model$meta.data$left
formula <- paste("~",as.character(model$mr$formula)[3],collapse="")
if("gam" %in% class(model$mr)){
integral.numeric <- TRUE
}else{
integral.numeric <- is.linear.logistic(newdata,formula,
length(coef(model$mr)),width)
}
models <- list(g0model = formula,
scalemodel = NULL,
fullscalemodel = NULL)
if(is.null(int.range)){
pdot.list <- pdot.dsr.integrate.logistic(width, width, model$mr$coef,
newdata, integral.numeric, FALSE, models,GAM, point=point)
}else{
pdot.list <- pdot.dsr.integrate.logistic(int.range,width, model$mr$coef,
newdata, integral.numeric, FALSE, models,GAM, point=point)
}
if(left !=0){
pdot.list$pdot <- pdot.list$pdot -
pdot.dsr.integrate.logistic(left, width, model$mr$coef,
newdata, integral.numeric, FALSE, models,
GAM, point=point)$pdot
}
fitted <- pdot.list$pdot
names(fitted) <- newdata$object[newdata$observer==1]
return(list(fitted=fitted))
}
}
|
toptfit <- function(Ea, Hd, kopt, Tleaf, Topt) {
param = kopt * (Hd * exp((Ea * (Tleaf - Topt) /
((Tleaf + 273.15) *
(Topt + 273.15) * 0.008314)))) /
(Hd - Ea * (1 - exp((Hd * (Tleaf - Topt) /
((Tleaf + 273.15) *
(Topt + 273.15) * 0.008314)))))
}
|
summarize.em <- function(x, thresholds){
if("fastLink.EM" %in% class(x)){
em.out <- x
EM <- data.frame(em.out$patterns.w)
EM$zeta.j <- em.out$zeta.j
EM <- EM[order(EM[, "weights"]), ]
n1 <- em.out$nobs.a; n2 <- em.out$nobs.b
}else{
em.out <- x$EM
EM <- data.frame(em.out$patterns.w)
EM$zeta.j <- em.out$zeta.j
EM <- EM[order(EM[, "weights"]), ]
n1 <- x$nobs.a; n2 <- x$nobs.b
}
count <- min(n1, n2)
tmc <- rep(NA, length(thresholds))
tpc <- rep(NA, length(thresholds))
fpc <- rep(NA, length(thresholds))
fnc <- rep(NA, length(thresholds))
for(i in 1:length(thresholds)){
tmc[i] <- sum(EM$counts[EM$zeta.j >= thresholds[i]] * EM$zeta.j[EM$zeta.j >= thresholds[i]])
tpc[i] <- min(sum(EM$counts[EM$zeta.j >= thresholds[i]]), min(n1, n2))
fpc[i] <- sum(EM$counts[EM$zeta.j >= thresholds[i]] * (1 - EM$zeta.j[EM$zeta.j >= thresholds[i]]))
fnc[i] <- sum(EM$counts[EM$zeta.j < thresholds[i]] * (EM$zeta.j[EM$zeta.j < thresholds[i]]))
}
exp.match <- sum(EM$counts * EM$zeta.j)
gamma.ind <- grep("gamma.[[:digit:]]", names(EM))
exact.match.ind <- which(rowSums(EM[,gamma.ind]) == length(gamma.ind)*2)
if(length(exact.match.ind) == 0){
exact.matches <- 0
}else{
exact.matches <- EM$counts[exact.match.ind]
}
out <- data.frame(t(c(count, tmc, tpc, fpc, fnc, exp.match, exact.matches, n1, n2)))
names(out) <- c("count", paste0("tmc.", thresholds*100), paste0("tpc.", thresholds*100), paste0("fpc.", thresholds*100),
paste0("fnc.", thresholds*100), "exp.match", "exact.matches", "nobs.a", "nobs.b")
return(out)
}
summarize.agg <- function(x, num.comparisons, weighted){
s.calc <- function(y){
matches <- 100 * (y[,grep("tmc.", names(y))]) / min(y$nobs.a, y$nobs.b)
matches.E <- 100 * (y$exact.matches) / min(y$nobs.a, y$nobs.b)
matches <- cbind(matches, matches.E)
colnames(matches) <- c(names(y)[grep("tmc.", names(y))], "matches.E")
matchcount <- y[,grep("tpc.", names(y))]
matchcount.E <- y$exact.matches
matchcount <- cbind(matchcount, matchcount.E)
colnames(matchcount) <- c(names(y)[grep("tpc.", names(y))], "matchcount.E")
fdr <- 100 * (y[,grep("fpc.", names(y))]) * 1 / (y[,grep("tpc.", names(y))])
names(fdr) <- names(y)[grep("fpc.", names(y))]
fnr <- 100 * (y[,grep("fnc.", names(y))]) / y$exp.match
names(fnr) <- names(y)[grep("fnc.", names(y))]
return(list(fdr = fdr, fnr = fnr, matches = matches, matchcount = matchcount))
}
if(class(x) == "data.frame"){
out <- s.calc(x)
}else{
out <- list()
out[["within"]] <- s.calc(x[["within"]])
out[["across"]] <- s.calc(x[["across"]])
matches <- 100 * (x$within[,grep("tmc.", names(x$within))] + x$across[,grep("tmc.", names(x$across))]) /
min(x$within$nobs.a, x$within$nobs.b)
matches.E <- 100 * (x$within$exact.matches + x$across$exact.matches) / min(x$within$nobs.a, x$within$nobs.b)
matches <- cbind(matches, matches.E)
colnames(matches) <- c(names(x$within)[grep("tmc.", names(x$within))], "matches.E")
matchcount <- out$within$matchcount + out$across$matchcount
fdr <- 100 * (x$within[,grep("fpc.", names(x$across))] + x$across[,grep("fpc.", names(x$across))]) /
(x$within[,grep("tpc.", names(x$within))] + x$across[,grep("tpc.", names(x$across))])
names(fdr) <- names(x$within)[grep("fpc.", names(x$within))]
fnr <- 100 * (x$within[,grep("fnc.", names(x$across))] + (x$across[,grep("fnc.", names(x$across))] / num.comparisons)) /
x$within$exp.match
names(fnr) <- names(x$within)[grep("fnc.", names(x$within))]
out[["pooled"]] <- list(fdr = fdr, fnr = fnr, matches = matches, matchcount = matchcount)
if(weighted){
wm <- 100 * (x$within[,grep("tmc.", names(x$within))]) /
min(x$within$nobs.a, x$within$nobs.b)
wm.E <- 100 * (x$within$exact.matches) / min(x$within$nobs.a, x$within$nobs.b)
out$within$matches <- cbind(wm, wm.E)
wm <- 100 * (x$across[,grep("tmc.", names(x$across))]) /
min(x$within$nobs.a, x$within$nobs.b)
wm.E <- 100 * (x$across$exact.matches) / min(x$within$nobs.a, x$within$nobs.b)
out$across$matches <- cbind(wm, wm.E)
fdr.a <- 100 * (x$across[, grep("fpc.", names(x$across))]) /
(x$across[,grep("tmc.", names(x$across))] + x$within[, grep("tmc.", names(x$within))])
names(fdr.a) <- names(x$across)[grep("fd.", names(x$across))]
out$across$fdr <- fdr.a
fdr.w <- 100 * (x$within[, grep("fpc.", names(x$within))]) /
(x$across[,grep("tpc.", names(x$across))] + x$within[, grep("tpc.", names(x$within))])
names(fdr.w) <- names(x$within)[grep("fd.", names(x$within))]
out$within$fdr <- fdr.w
fnr.a <- 100 * (x$across[,grep("fnc.", names(x$across))] / num.comparisons) /
x$within$exp.match
names(fnr.a) <- names(x$across)[grep("fnc.", names(x$across))]
out$across$fnr <- fnr.a
fnr.w <- 100 * (x$within[,grep("fnc.", names(x$across))]) /
x$within$exp.match
names(fnr.w) <- names(x$within)[grep("fnc.", names(x$within))]
out$within$fnr <- fnr.w
}
}
return(out)
}
summary.fastLink <- function(object, num.comparisons = 1, thresholds = c(.95, .85, .75), weighted = TRUE, digits = 3, ...){
round.pct <- function(x){
a <- unlist(x)
b <- round(a, digits)
c <- paste0(b, "%")
return(c)
}
if("fastLink.agg" %in% class(object) & !("across.geo" %in% names(object))){
out <- as.data.frame(do.call(rbind, lapply(object, function(x){summarize.em(x, thresholds = thresholds)})))
out <- data.frame(t(colSums(out)))
out.agg <- summarize.agg(out, num.comparisons = num.comparisons, weighted = weighted)
}else if("fastLink.agg" %in% class(object) & "across.geo" %in% names(object)){
out.w <- as.data.frame(do.call(rbind, lapply(object[["within.geo"]], function(x){summarize.em(x, thresholds = thresholds)})))
out.a <- as.data.frame(do.call(rbind, lapply(object[["across.geo"]], function(x){summarize.em(x, thresholds = thresholds)})))
out <- list(within = data.frame(t(colSums(out.w))), across = data.frame(t(colSums(out.a))))
out.agg <- summarize.agg(out, num.comparisons = num.comparisons, weighted = weighted)
}else if("fastLink" %in% class(object) | "fastLink.EM" %in% class(object)){
out <- summarize.em(object, thresholds = thresholds)
out.agg <- summarize.agg(out, num.comparisons = num.comparisons, weighted = weighted)
}
if("fastLink.agg" %in% class(object) & "across.geo" %in% names(object)){
tab <- as.data.frame(
rbind(c(out.agg$pooled$matchcount), c(out.agg$within$matchcount),
c(out.agg$across$matchcount),
round.pct(out.agg$pooled$matches), round.pct(out.agg$within$matches),
round.pct(out.agg$across$matches),
c(round.pct(out.agg$pooled$fdr), ""), c(round.pct(out.agg$within$fdr), ""),
c(round.pct(out.agg$across$fdr), ""),
c(round.pct(out.agg$pooled$fnr), ""), c(round.pct(out.agg$within$fnr), ""),
c(round.pct(out.agg$across$fnr), ""))
)
tab <- cbind(rep(c("All", "Within-State", "Across-State"), 4), tab)
tab <- cbind(c("Match Count", "", "", "Match Rate", "", "", "FDR", "", "", "FNR", "", ""), tab)
colnames(tab) <- c("", "", paste0(thresholds * 100, "%"), "Exact")
}else{
tab <- as.data.frame(
rbind(out.agg$matchcount, round.pct(out.agg$matches), c(round.pct(out.agg$fdr), ""), c(round.pct(out.agg$fnr), ""))
)
tab <- cbind(c("Match Count", "Match Rate", "FDR", "FNR"), tab)
colnames(tab) <- c("", paste0(thresholds * 100, "%"), "Exact")
}
return(tab)
}
aggregateEM <- function(em.list, within.geo = NULL){
if(is.null(within.geo)){
out <- em.list
}else{
if(length(within.geo) != length(em.list)){
stop("If provided, within.geo should be the same length as em.list.")
}
wg <- vector(mode = "list", length = sum(within.geo))
ag <- vector(mode = "list", length = length(within.geo) - sum(within.geo))
ind.within <- which(within.geo == TRUE)
ind.across <- which(within.geo == FALSE)
for(i in 1:length(ind.within)){
wg[[i]] <- em.list[[ind.within[i]]]
}
for(i in 1:length(ind.across)){
ag[[i]] <- em.list[[ind.across[i]]]
}
out <- list(within.geo = wg, across.geo = ag)
}
class(out) <- c("fastLink", "fastLink.agg")
return(out)
}
|
library("mvtnorm")
(cor1 <- toeplitz(c(1, 1/4, -1/8)))
(up1 <- c(1/4, 7/4, 5/8))
d <- length(up1)
pmvt.. <- function(df, algorithm)
vapply(df, function(df) pmvt(upper=up1, corr=cor1, df=df, algorithm=algorithm),
numeric(1))
dfs <- 1:9
pmvt_TV.7 <- replicate(7, pmvt..(dfs, TVPACK()))
stopifnot(pmvt_TV.7 == pmvt_TV.7[,1])
(pmvt.TV. <- pmvt_TV.7[,1])
(pmvt.TV <- pmvt..(dfs, TVPACK(1e-14)))
all.equal(max(abs(pmvt.TV - pmvt.TV.)), 0)
set.seed(47)
pmvt_7 <- replicate(7, vapply(dfs, function(df) pmvt(df=df, upper=up1, corr=cor1), numeric(1)))
relE <- 1 - pmvt_7 / pmvt.TV
rng.rE <- range(abs(relE))
stopifnot(1e-6 < rng.rE[1], rng.rE[2] < 7e-4)
stopifnot(all.equal(
colMeans(abs(relE)),
c(88, 64, 105, 73, 52, 90, 87)*1e-6, tol= 1e-3))
set.seed(29)
corr <- cov2cor(crossprod(matrix(runif(9,-1,1),3,3))+diag(3))
df <- rpois(1,3)+1
ctrl <- GenzBretz(maxpts = 2500000, abseps = 0.000001, releps = 0)
upper <- rexp(3,1)
pmvt(upper=upper, corr=corr, df = df, algorithm = ctrl)
pmvt(upper=upper, corr=corr, df = df, algorithm = TVPACK())
lower <- -rexp(3,1)
pmvt(lower=lower, upper=rep(Inf,3), corr=corr, df = df, algorithm = ctrl)
pmvt(lower=lower, upper=rep(Inf,3), corr=corr, df = df, algorithm = TVPACK())
delt <- rexp(3,1/10)
upper <- delt+runif(3)
ctrl <- GenzBretz(maxpts = 2500000, abseps = 0.000001, releps = 0)
pmvt(upper=upper, corr=corr, df = df, algorithm = ctrl, delta = delt)
tools::assertError(pmvt(upper=upper, corr=corr, df = df, algorithm = TVPACK(), delta = delt))
upper <- rexp(3,1)
pmvnorm(upper=upper, corr=corr, algorithm = ctrl)
pmvnorm(upper=upper, corr=corr, algorithm = TVPACK())
lower <- rexp(3,5)
pmvnorm(lower=lower,upper=rep(Inf, 3), corr=corr, algorithm = ctrl)
pmvnorm(lower=lower,upper=rep(Inf, 3), corr=corr, algorithm = TVPACK())
delt <- rexp(3,1/10)
upper <- delt+rexp(3,1)
pmvnorm(upper=upper, corr=corr, algorithm = ctrl, mean = delt)
pmvnorm(upper=upper, corr=corr, algorithm = TVPACK(), mean = delt)
corr <- cov2cor(crossprod(matrix(runif(4,-1,1),2,2))+diag(2))
upper <- rexp(2,1)
df <- rpois(1, runif(1, 0, 20))
pmvt(upper=upper, corr=corr, df = df, algorithm = ctrl)
pmvt(upper=upper, corr=corr, df = df, algorithm = TVPACK())
pmvt(lower=-upper, upper=rep(Inf, 2), corr=corr, df = df, algorithm = ctrl)
pmvt(lower=-upper, upper=rep(Inf, 2), corr=corr, df = df, algorithm = TVPACK())
delt <- rexp(2,1/5)
upper <- delt+rexp(2,1)
pmvnorm(upper=upper, corr=corr, algorithm = ctrl, mean = delt)
pmvnorm(upper=upper, corr=corr, algorithm = TVPACK(), mean = delt)
corr <- cov2cor(crossprod(matrix(runif(4,-1,1),2,2))+diag(2))
upper <- rexp(2, 1)
pmvnorm(upper=upper, corr=corr, algorithm = Miwa(steps=128))
pmvnorm(upper=upper, corr=corr, algorithm = TVPACK())
corr <- cov2cor(crossprod(matrix(runif(9,-1,1),3,3))+diag(3))
upper <- rexp(3, 1)
ctrl <- Miwa(steps=128)
pmvnorm(upper=upper, corr=corr, algorithm = ctrl)
pmvnorm(upper=upper, corr=corr, algorithm = TVPACK())
S <- toeplitz(c(1, 1/2, 1/4))
set.seed(11)
P0 <- pmvnorm(lower=c(-Inf, 0, 0), upper=Inf, corr=S)
P1 <- pmvnorm(lower=c(-Inf, 0, 0), upper=Inf, corr=S, algorithm = TVPACK())
P2 <- pmvnorm(lower=c(-Inf, 0, 0), upper=Inf, corr=S, algorithm = Miwa())
P2a<- pmvnorm(lower=c(-Inf, 0, 0), upper=Inf, corr=S, algorithm = Miwa(512))
P2.<- pmvnorm(lower=c(-Inf, 0, 0), upper=Inf, corr=S, algorithm = Miwa(2048))
stopifnot(all.equal(1/3, c(P0), tol=1e-14)
, all.equal(1/3, c(P1), tol=1e-14)
, all.equal(1/3, c(P2), tol=1e-9 )
, all.equal(1/3, c(P2a),tol=4e-12)
, all.equal(1/3, c(P2.),tol=2e-12)
)
set.seed(11)
Ptdef <- replicate(20, c(pmvt(lower=c(-Inf, 1, 2), upper=Inf, df=2, corr=S)))
unique(Ptdef)
Pt1 <- pmvt(lower=c(-Inf, 1, 2), upper=Inf, df=2, corr=S, algorithm = TVPACK())
P. <- 0.0570404044526986
stopifnot(exprs = {
all.equal(P., c(Pt1), tol = 1e-14)
abs(P. - Ptdef) < 1e-15
})
|
Ops.lfactor <- function(e1,e2) {
e10 <- e1
if(.Generic %in% c("<", "<=", ">=", ">")) {
if(inherits(e1, "lfactor")) {
e1 <- as.numeric(e1)
} else {
if(inherits(e1, "character")) {
e2l <- levels(e2)
if(e1 %in% e2l) {
e1 <- as.numeric(llevels(e2)[e2l==e1])
} else {
return(rep(FALSE,length(e2)))
}
}
}
if(inherits(e2, "lfactor")) {
e2 <- as.numeric(e2)
} else {
if(inherits(e2, "character")) {
e1l <- levels(e10)
if(e2 %in% e1l) {
e2 <- as.numeric(llevels(e10)[e1l==e2])
} else {
return(rep(FALSE,length(e1)))
}
}
}
if(inherits(e1, "numeric") & inherits(e2, "numeric")) {
return(eval(call(.Generic,e1,e2)))
}
}
if(! .Generic %in% c("==", "!=")) {
return(NextMethod(e1,e2))
}
e2 <- as.character(e2)
lvl <- levels(e1)
llvl <- llevels(e1)
e1 <- factor(e1)
for(oli in 1:length(llvl)) {
e2i <- e2 %in% llvl[oli]
e2[e2i] <- lvl[oli]
}
return(NextMethod(e1,e2))
}
|
ggsom_aes <- function(object_som, class) {
assertthat::assert_that(is.kohonen(object_som))
model_som_values <-
data.table::data.table(object_som$data[[1]],
unit.class = object_som$unit.classif,
class,
id = (1:nrow(object_som$data[[1]]))) %>%
.[,sum:=.(.N), by="unit.class"]
model_som_pts <-
data.table::data.table(object_som$grid$pts,
unit.class = 1:nrow(object_som$grid$pts))
model_som_values <- model_som_pts[model_som_values, on = 'unit.class']
return(model_som_values)
}
|
find_rprofile <- function(all = FALSE) {
pathnames <- c(Sys.getenv("R_PROFILE_USER"), "./.Rprofile", "~/.Rprofile")
pathnames <- drop_user_files_during_check(pathnames)
find_files(pathnames, all = all)
}
find_renviron <- function(all = FALSE) {
pathnames <- c(Sys.getenv("R_ENVIRON_USER"), "./.Renviron", "~/.Renviron")
pathnames <- drop_user_files_during_check(pathnames)
find_files(pathnames, all = all)
}
find_rprofile_d <- function(sibling = FALSE, all = FALSE) {
if (sibling) {
pathnames <- find_rprofile(all = all)
} else {
pathnames <- c(Sys.getenv("R_PROFILE_USER"), "~/.Rprofile", "./.Rprofile")
pathnames <- drop_user_files_during_check(pathnames)
}
pathnames <- pathnames[nzchar(pathnames)]
paths <- sprintf("%s.d", pathnames)
paths_d <- find_d_dirs(paths, all = all)
if (length(paths_d) == 0) {
logf("Found no corresponding startup directory %s.",
paste(squote(paths), collapse = ", "))
} else {
logf("Found startup directory %s.", paste(squote(paths_d), collapse = ", "))
}
paths_d
}
find_renviron_d <- function(sibling = FALSE, all = FALSE) {
if (sibling) {
pathnames <- find_renviron(all = all)
} else {
pathnames <- c(Sys.getenv("R_ENVIRON_USER"), "~/.Renviron", "./.Renviron")
pathnames <- drop_user_files_during_check(pathnames)
}
pathnames <- pathnames[nzchar(pathnames)]
paths <- sprintf("%s.d", pathnames)
paths_d <- find_d_dirs(paths, all = all)
if (length(paths_d) == 0) {
logf("Found no corresponding startup directory %s.",
paste(squote(paths), collapse = ", "))
} else {
logf("Found startup directory %s.", paste(squote(paths_d), collapse = ", "))
}
paths_d
}
find_files <- function(pathnames, all = FALSE) {
pathnames <- pathnames[file.exists(pathnames)]
pathnames <- pathnames[!file.info(pathnames)$isdir]
if (!all) {
pathnames <- if (length(pathnames) == 0) character(0L) else pathnames[1]
}
pathnames
}
find_d_dirs <- function(paths, all = FALSE) {
if (length(paths) == 0) return(character(0))
paths <- paths[file.exists(paths)]
paths <- paths[file.info(paths)$isdir]
if (!all) {
paths <- if (length(paths) == 0) character(0L) else paths[1]
}
paths
}
list_d_files <- function(paths, recursive = TRUE, filter = NULL) {
ol <- Sys.getlocale("LC_COLLATE")
on.exit(Sys.setlocale("LC_COLLATE", ol))
Sys.setlocale("LC_COLLATE", "C")
paths <- paths[file.exists(paths)]
if (length(paths) == 0) return(character(0L))
files <- NULL
for (path in paths) {
files <- c(files, dir(path = path, pattern = "[^~]$",
recursive = recursive, all.files = TRUE,
full.names = TRUE))
}
files <- files[!grepl("^
ignores <- c(".Rhistory", ".RData")
files <- files[!is.element(basename(files), ignores)]
ignores <- c(".DS_Store", ".Spotlight-V100", ".TemporaryItems",
".VolumeIcon.icns", ".apDisk", ".fseventsd")
files <- files[!is.element(basename(files), ignores)]
files <- grep("[/\\\\](__MACOSX|[.]Trash|[.]Trashes)[/\\\\]", files, value = TRUE,
fixed = FALSE, invert = TRUE)
hidden <- grep("._", basename(files), fixed = TRUE, value = FALSE)
if (length(hidden) > 0) {
hidden_files <- files[hidden]
hidden_names <- sub("^[.]_", "", basename(hidden_files))
hidden_siblings <- file.path(dirname(hidden_files), hidden_names)
hidden_siblings <- normalizePath(hidden_siblings, mustWork = FALSE)
files_normalized <- normalizePath(files, mustWork = FALSE)
drop <- is.element(hidden_siblings, files_normalized)
hidden_files <- hidden_files[drop]
files <- setdiff(files, hidden_files)
}
files <- grep("([.]md|[.]txt|~)$", files, value = TRUE, invert = TRUE)
files <- grep("(^|/|\\\\)[.][.]", files, value = TRUE, invert = TRUE)
if (length(files) == 0) return(character(0))
files <- files[file.exists(files)]
files <- files[!file.info(files)$isdir]
if (length(files) == 0) return(character(0))
files_normalized <- normalizePath(files, winslash = "/")
files <- files[!duplicated(files_normalized)]
if (is.function(filter)) {
files <- filter(files)
}
files
}
drop_user_files_during_check <- function(pathnames) {
if (!nzchar(Sys.getenv("R_CMD"))) return(pathnames)
grep("~", pathnames, value = TRUE, invert = TRUE)
}
|
if (httr::status_code(
httr::GET("https://clinicaltrials.gov/ct2/search",
httr::timeout(5))) != 200L
) exit_file("Reason: CTGOV not working")
expect_equal(
suppressMessages(
ctrLoadQueryIntoDb(
queryterm = "2010-024264-18",
register = "CTGOV",
only.count = TRUE))[["n"]], 1L)
expect_message(
tmpTest <- suppressWarnings(
ctrLoadQueryIntoDb(
queryterm = "2010-024264-18",
register = "CTGOV",
con = dbc)),
"Imported or updated 1 trial")
expect_equal(tmpTest$n, 1L)
expect_equal(tmpTest$success, "NCT01471782")
expect_true(length(tmpTest$failed) == 0L)
expect_message(
suppressWarnings(
ctrLoadQueryIntoDb(
queryterm = "NCT01471782",
register = "CTGOV",
con = dbc)),
"Imported or updated 1 trial")
expect_error(
suppressWarnings(
suppressMessages(
ctrLoadQueryIntoDb(
queryterm = paste0(
"https://clinicaltrials.gov/ct2/results?cond=Cancer&type=Intr&phase=0",
"&strd_s=01%2F02%2F2005&strd_e=12%2F31%2F2017"),
con = dbc))),
"more than 10,000) trials")
expect_message(
suppressWarnings(
ctrLoadQueryIntoDb(
querytoupdate = "last",
verbose = TRUE,
con = dbc)),
"No trials or number of trials could not be determined")
expect_error(
suppressWarnings(
ctrLoadQueryIntoDb(
querytoupdate = 999L,
con = dbc)),
"'querytoupdate': specified number not found")
q <- paste0("https://clinicaltrials.gov/ct2/results?",
"term=osteosarcoma&type=Intr&phase=0&age=0&lup_e=")
expect_message(
tmpTest <- suppressWarnings(
ctrLoadQueryIntoDb(
queryterm = paste0(q, "12%2F31%2F2008"),
con = dbc)),
"Imported or updated ")
hist <- suppressWarnings(dbQueryHistory(con = dbc))
hist[nrow(hist), "query-term"] <-
sub("(.*&lup_e=).*", "\\112%2F31%2F2009", hist[nrow(hist), "query-term"])
json <- jsonlite::toJSON(list("queries" = hist))
expect_equal(
nodbi::docdb_update(
src = dbc,
key = dbc$collection,
value = as.character(json),
query = '{"_id": "meta-info"}'), 1L)
expect_message(
tmpTest <- suppressWarnings(
ctrLoadQueryIntoDb(
querytoupdate = "last",
con = dbc)),
"Imported or updated")
expect_true(tmpTest$n > 2L)
expect_true(length(tmpTest$success) > 2L)
expect_true(length(tmpTest$failed) == 0L)
expect_message(
suppressWarnings(
ctrLoadQueryIntoDb(
queryterm = "someQueryForErrorTriggering",
register = "CTGOV",
verbose = TRUE,
only.count = TRUE,
con = dbc)),
"term=someQueryForErrorTriggering")
result <- suppressMessages(
suppressWarnings(
dbGetFieldsIntoDf(
fields = c(
"clinical_results.baseline.analyzed_list.analyzed.count_list.count",
"clinical_results.baseline.group_list.group",
"clinical_results.baseline.analyzed_list.analyzed.units",
"clinical_results.outcome_list.outcome",
"study_design_info.allocation",
"location.facility.name",
"location"),
con = dbc)
))
expect_equal(
sapply(
result[["location"]],
function(x) length(x[["facility"]][["name"]])),
c(1, 1, 1, 30))
expect_true("character" == class(result[[
"study_design_info.allocation"]]))
expect_true(
any(grepl(" / ", result[["location.facility.name"]])))
expect_true(
length(unlist(strsplit(
result[["location.facility.name"]], " / "))) >= 32L)
expect_true("list" == class(result[[
"clinical_results.baseline.group_list.group"]]))
expect_true(
sum(nchar(
suppressWarnings(
dfListExtractKey(
result,
list(c("location", "name"))
))[["value"]]),
na.rm = TRUE) > 1000L)
df <- suppressMessages(
dfTrials2Long(
df = result
))
expect_identical(
names(df),
c("_id", "identifier", "name", "value")
)
expect_true(
nrow(df) > 800L
)
df2 <- dfName2Value(
df = df,
valuename = paste0(
"clinical_results.*category_list.category.measurement_list.measurement.value|",
"clinical_results.outcome_list.outcome.measure.units"
),
wherename = "clinical_results.outcome_list.outcome.measure.title",
wherevalue = "duration of response"
)
expect_true(
any("NCT01471782" %in% df2[["_id"]])
)
expect_true(
all(grepl("^0.5", df2[["identifier"]][ df2[["_id"]] == "NCT01471782" ]))
)
expect_error(
suppressWarnings(
suppressMessages(
ctrLoadQueryIntoDb(
queryterm = "term=ET743OVC3006",
register = "CTGOV",
annotation.text = "something",
annotation.mode = "WRONG",
con = dbc))),
"'annotation.mode' incorrect")
|
f.genpar <- function(x,xi,alfa,k) {
if ((k > -0.0000001) & (k < 0.0000001)) {
y <- (x - xi)/alfa
}
else {
y <- -k^(-1) * log(1 - k*(x - xi)/alfa)
}
f <- alfa^(-1) * exp(-(1 - k)*y)
return(f)
}
F.genpar <- function(x,xi,alfa,k) {
if ((k > -0.0000001) & (k < 0.0000001)) {
y <- (x - xi)/alfa
}
else {
y <- -k^(-1) * log(1 - k*(x - xi)/alfa)
}
F <- 1 - exp(-y)
return(F)
}
invF.genpar <- function(F,xi,alfa,k) {
if ((k > -0.0000001) & (k < 0.0000001)) {
x <- xi - alfa*log(1 - F)
}
else {
x <- xi + alfa*(1 - (1 - F)^k)/k
}
return(x)
}
Lmom.genpar <- function(xi,alfa,k) {
quanti <- length(k)
lambda1 <- rep(NA,quanti)
lambda2 <- rep(NA,quanti)
tau3 <- rep(NA,quanti)
tau4 <- rep(NA,quanti)
for (i in 1:quanti) {
if (k[i] <= -1) {
stop("L-moments are defined for k>-1")
}
lambda1[i] <- xi[i] + alfa[i]/(1 + k[i])
lambda2[i] <- alfa[i]/((1 + k[i])*(2 + k[i]))
tau3[i] <- (1 - k[i])/(3 + k[i])
tau4[i] <- (1 - k[i])*(2 - k[i])/((3 + k[i])*(4 + k[i]))
}
output <- list(lambda1=lambda1, lambda2=lambda2, tau3=tau3, tau4=tau4)
return(output)
}
par.genpar <- function(lambda1,lambda2,tau3) {
k <- (1 - 3*tau3)/(1 + tau3)
alfa <- (1 + k)*(2 + k)*lambda2
xi <- lambda1 - (2 + k)*lambda2
output <- list(xi=xi, alfa=alfa, k=k)
return(output)
}
rand.genpar <- function(numerosita,xi,alfa,k) {
F <- runif(numerosita, min=0.0000000001, max=0.9999999999)
x <- invF.genpar(F,xi,alfa,k)
return(x)
}
|
read_lcm_raw <- function(fname, ft, fs, ref, extra) {
in_nmid <- FALSE
con <- file(fname, "rb")
while (length(line <- readLines(con, n = 1, warn = FALSE)) > 0) {
if (endsWith(line, "$NMID")) {
in_nmid <- TRUE
} else if (endsWith(line, "$END") && in_nmid) {
fp <- seek(con, origin='current')
l1 <- readLines(con, n=1, warn=FALSE);
fpn <- seek(con, origin='start', where=fp);
tokens <- strsplit(trimws(l1),"[[:space:]]+")[[1]];
cols <- length(tokens);
width <- ceiling(nchar(l1)/cols);
fmt <- sprintf("%dF%d.0", cols, width);
x <- utils::read.fortran(con, fmt);
break
}
}
close(con)
data <- as.vector(t(as.matrix(x)))
N <- length(data)/2
data <- data[seq(1, 2 * N, 2)] +
1i * data[seq(2, 2 * N, 2)]
dim(data) <- c(1, 1, 1, 1, 1, 1, N)
res <- c(NA, NA, NA, NA, 1, NA, 1 / fs)
freq_domain <- rep(FALSE, 7)
nuc <- def_nuc()
mrs_data <- mrs_data(data = data, ft = ft, resolution = res, ref = ref,
nuc = nuc, freq_domain = freq_domain, affine = NULL,
meta = NULL, extra = extra)
return(mrs_data)
}
|
mrf_nnetar_forecast = function(UnivariateData, Horizon, Aggregation,
Threshold="hard", Lambda = 0.05){
if(!is.vector(UnivariateData)){
message("Data must be of type vector")
return()
}
if(!is.vector(Aggregation)){
message("agg_per_lvl must be of type vector")
return()
}
dec_res <- wavelet_decomposition(UnivariateData, Aggregation,Threshold,Lambda)
if (!requireNamespace('nnfor', quietly = TRUE)) {
message(
"Package nnfor is missing in function mrf_xlm_one_step_forecast
No computations are performed.
Please install the packages which are defined in 'Suggests'"
)
return()
}else{
Cut = mrf_requirement(UnivariateData=UnivariateData,
CoefficientCombination = rep(0,length(Aggregation)+1),
Aggregation = Aggregation)
Cut = Cut$StartTraining
LenData = length(UnivariateData)
NumLevels = length(Aggregation)+1
Forecast = rbind()
for(i in 1:NumLevels){
currForecast = 0
if(i < NumLevels){
model = forecast::nnetar(y = stats::as.ts(dec_res$WaveletCoefficients[i,]))
tmpForecast = forecast::forecast(model,h=Horizon)
currForecast = as.vector(tmpForecast$mean)
}else{
model = forecast::nnetar(y = as.ts(dec_res$SmoothCoefficients[i-1,]))
tmpForecast = forecast::forecast(model,h=Horizon)
currForecast = as.vector(tmpForecast$mean)
}
Forecast = rbind(Forecast, currForecast)
}
FinalForecast = colSums(Forecast)
return(FinalForecast)
}
}
|
library("tmle.npvi")
rootPath <- "geneData"
rootPath <- Arguments$getReadablePath(rootPath)
dataSet <- "tcga2012brca"
path <- file.path(rootPath, dataSet)
path <- Arguments$getReadablePath(path)
files <- list.files(path, pattern=".*chr17,.*.xdr")
idxs <- 1:150
if (FALSE) {
files <- list.files(path, pattern=".*chr21,.*.xdr")
idxs <- seq(along=files)
}
filenames <- files[idxs]
pathnames <- file.path(path, filenames)
obsList <- lapply(pathnames, loadObject)
snames <- gsub("\\.xdr", "", filenames)
names(obsList) <- snames
str(obsList)
tcga2012brca <- obsList
save(tcga2012brca, file="tcga2012brca.rda")
if (FALSE) {
opath <- "data"
opath <- Arguments$getWritablePath(opath)
for (ff in seq(along=idxs)) {
filename <- files[ff]
pathname <- file.path(path, filename)
obs <- loadObject(pathname)
sname <- gsub("\\.xdr", "", filename)
ofilename <- sprintf("%s,%s.txt", dataSet, sname)
opathname <- file.path(opath, ofilename)
write.table(obs, opathname, quote=FALSE, row.names=FALSE)
}
}
|
plot.var <- function(x, ...) {
obs <- NULL
V1 <- NULL
lower <- NULL
type <- NULL
upper <- NULL
if (any(class(x) == 'bekkFit')) {
if(is.null(x$portfolio_weights)) {
if (inherits(x$bekk$data, "ts")) {
autoplot(x$VaR) + theme_bw() + ylab('VaR')
} else {
x$VaR$obs <- 1:nrow(x$VaR)
VaR <- melt(x$VaR, id = 'obs')
ggplot(VaR) + geom_line(aes(x = obs, y = value)) + theme_bw() + xlab('') + ylab('VaR') + facet_wrap(~variable, scales = 'free_y', ncol = 1)
}
} else {
if (inherits(x$bekk$data, "ts")) {
autoplot(x$VaR) + theme_bw() + ylab('VaR') + ggtitle('Portfolio VaR')
} else {
ggplot(x$VaR) + geom_line(aes(x = 1:nrow(x$VaR), y = V1)) + theme_bw() + xlab('') + ylab('VaR') + ggtitle('Portfolio VaR')
}
}
} else if (any(class(x) == 'bekkForecast')) {
if(is.null(x$portfolio_weights)) {
sample <- x$VaR[1:(nrow(x$VaR)-x$n.ahead),]
forc <- x$VaR[(nrow(x$VaR)-x$n.ahead+1):nrow(x$VaR),]
cb_lower <- x$VaR_lower[(nrow(x$VaR)-x$n.ahead+1):nrow(x$VaR),]
cb_upper <- x$VaR_upper[(nrow(x$VaR)-x$n.ahead+1):nrow(x$VaR),]
sample$obs <- as.character(1:nrow(sample))
forc$obs <- as.character((nrow(sample)+1):(nrow(sample)+x$n.ahead))
cb_lower$obs <- as.character((nrow(sample)+1):(nrow(sample)+x$n.ahead))
cb_upper$obs <- as.character((nrow(sample)+1):(nrow(sample)+x$n.ahead))
sample <- sample[(nrow(sample)-4*x$n.ahead):nrow(sample),]
sample$type <- as.factor('Sample')
forc$type <- as.factor('Forecast')
cb_lower$type <- as.factor('Forecast')
cb_upper$type <- as.factor('Forecast')
cb_l <- melt(cb_lower, id = c('obs', 'type'))
cb_u <- melt(cb_upper, id = c('obs', 'type'))
cb <- cbind(cb_l, cb_u$value)
colnames(cb)[4:5] <- c('lower', 'upper')
total <- rbind(sample, forc)
VaR <- melt(total, id = c('obs', 'type'))
cc <- merge(VaR, cb, all.x = TRUE, all.y = TRUE)
if (x$n.ahead > 1) {
ggplot(cc, aes(x = obs, y = value)) +
geom_line(aes(y = lower, group = type, color = type, linetype = type), na.rm = TRUE, color = 'red') +
geom_line(aes(y = upper, group = type, color = type, linetype = type), na.rm = TRUE, color = 'red') +
geom_line(aes(group = type, color = type)) +
geom_point(aes(shape = type)) +
theme_bw() + xlab('') + ylab('VaR') +
scale_color_manual(values = c('black', 'blue')) +
facet_wrap(~variable, scales = 'free_y', ncol = 1) +
theme(legend.position="bottom", legend.title = element_blank())
} else {
ggplot(cc, aes(x = obs, y = value)) +
geom_line(data = cc[cc$type == 'Sample',], aes(x = obs, y = value, group = type)) +
geom_errorbar( aes(ymin=lower, ymax=upper), width=.2, color = 'red') +
geom_point(aes(x = obs, y = value, shape = type), size = 2.5) +
theme_bw() + xlab('') + ylab('VaR') +
scale_color_manual(values = c('black', 'blue')) +
facet_wrap(~variable, scales = 'free_y', ncol = 1) +
theme(legend.position="bottom", legend.title = element_blank())
}
} else {
sample <- as.data.frame(x$VaR[1:(nrow(x$VaR)-x$n.ahead),])
forc <- as.data.frame(x$VaR[(nrow(x$VaR)-x$n.ahead+1):nrow(x$VaR),])
cb_lower <- as.data.frame(x$VaR_lower[(nrow(x$VaR)-x$n.ahead+1):nrow(x$VaR),])
cb_upper <- as.data.frame(x$VaR_upper[(nrow(x$VaR)-x$n.ahead+1):nrow(x$VaR),])
sample$obs <- as.character(1:nrow(sample))
forc$obs <- as.character((nrow(sample)+1):(nrow(sample)+x$n.ahead))
cb_lower$obs <- as.character((nrow(sample)+1):(nrow(sample)+x$n.ahead))
cb_upper$obs <- as.character((nrow(sample)+1):(nrow(sample)+x$n.ahead))
sample <- sample[(nrow(sample)-4*x$n.ahead):nrow(sample),]
sample$type <- as.factor('Sample')
forc$type <- as.factor('Forecast')
colnames(sample)[1] <- colnames(forc)[1] <- colnames(cb_lower)[1] <- colnames(cb_upper)[1] <- 'V1'
cb_lower$type <- as.factor('Forecast')
cb_upper$type <- as.factor('Forecast')
cb_l <- melt(cb_lower, id = c('obs', 'type'))
cb_u <- melt(cb_upper, id = c('obs', 'type'))
cb <- cbind(cb_l, cb_u$value)
colnames(cb)[4:5] <- c('lower', 'upper')
total <- rbind(sample, forc)
VaR <- melt(total, id = c('obs', 'type'))
cc <- merge(VaR, cb, all.x = TRUE, all.y = TRUE)
if (x$n.ahead > 1) {
ggplot(cc, aes(x = obs, y = value)) +
geom_line(aes(y = lower, group = type, linetype = type), color = 'red', na.rm = TRUE) +
geom_line(aes(y = upper, group = type, linetype = type), color = 'red', na.rm = TRUE) +
geom_line(aes(group = type, color = type)) +
geom_point(aes(shape = type)) +
theme_bw() + xlab('') + ylab('VaR') +
scale_color_manual(values = c('black', 'blue')) +
theme(legend.position="bottom", legend.title = element_blank()) + ggtitle('Portfolio VaR')
} else {
ggplot(cc, aes(x = obs, y = value)) +
geom_line(data = cc[cc$type == 'Sample',], aes(x = obs, y = value, group = type)) +
geom_errorbar( aes(ymin=lower, ymax=upper), width=.2, color = 'red') +
geom_point(aes(x = obs, y = value, shape = type), size = 2.5) +
theme_bw() + xlab('') + ylab('VaR') +
scale_color_manual(values = c('black', 'blue')) +
theme(legend.position="bottom", legend.title = element_blank()) + ggtitle('Portfolio VaR')
}
}
}
}
|
mass_gis <- function(layer = "contours250k") {
dir <- tempdir()
url <- paste0("http://download.massgis.digital.mass.gov/shapefiles/state/", layer, ".zip")
lcl_zip <- file.path(dir, basename(url))
utils::download.file(url, destfile = lcl_zip)
lcl_shp <- file.path(dir, layer)
utils::unzip(lcl_zip, exdir = lcl_shp)
sf::st_read(lcl_shp) %>%
sf::st_transform(4326)
}
macleish_intersect <- function(x) {
sf::st_intersection(macleish::macleish_layers[["boundary"]], x)
}
|
k.points.max.cpe <-
function(formula, cat.var, data, range, k, l.s.points = 100 , min.p.cat=1) {
point=NULL
mat=matrix(ncol=2,nrow=k)
colnames(mat)<- c("point","cpe")
for (i in 1:k){
cpes <- select.cutpoint.cpe(formula = formula, cat.var = cat.var, data = data, range = range, point = point, l.s.points = l.s.points, min.p.cat = min.p.cat)
pos <- which(cpes[,2]==max(cpes[,2],na.rm=TRUE))
if(length(pos)> 1 & i > 1) {
if (cpes[pos[1],1] <= mat[i-1,1]) {
pos <- max(pos)
} else {
pos = min(pos)
}
} else {
pos = pos[1]
}
mat[i,1] <- cpes[pos,1]
mat[i,2] <- cpes[pos,2]
point <- c(point,mat[i,1])
}
mat
}
|
expect_equal(ore_subst("\\d+","no","2 dogs"), "no dogs")
expect_equal(ore_subst("\\d+","no","2 dogs",start=3L), "2 dogs")
expect_equal(ore_subst("(\\d+)","\\1\\1","2 dogs"), "22 dogs")
expect_equal(ore_subst("\\d+",function(i) as.numeric(i)^2,"2 dogs"), "4 dogs")
expect_equal(ore_subst("\\d+",function(i) max(as.numeric(i)), "2, 4, 6 or 8 dogs?", all=TRUE), "8, 8, 8 or 8 dogs?")
expect_equal(ore_repl("\\d+",function(i) max(as.numeric(i)), "2, 4, 6 or 8 dogs?", all=TRUE), "2, 4, 6 or 8 dogs?")
expect_equal(ore_subst("(?<numbers>\\d+)","\\k<numbers>+\\k<numbers>","2 dogs"), "2+2 dogs")
expect_equal(ore_subst("\\d+",function(i) c("no","all the"),c("2 dogs","some dogs")), c("no dogs","some dogs"))
expect_equal(ore_repl("\\d+",function(i) c("no","all the"),c("2 dogs","some dogs")), list(c("no dogs","all the dogs"), "some dogs"))
expect_equal(ore_subst("\\d+",c("no","some"),c("2 dogs","3 cats")), c("no dogs","no cats"))
expect_equal(ore_repl("\\d+",c("no","some"),c("2 dogs","3 cats")), list(c("no dogs","some dogs"), c("no cats","some cats")))
expect_equal(ore_subst("\\d+",c("no","some"),"2 dogs and 3 cats",all=TRUE), "some dogs and some cats")
expect_equal(ore_repl("\\d+",c("no","some"),"2 dogs and 3 cats",all=TRUE), c("no dogs and no cats","some dogs and some cats"))
text <- readLines("drink.txt", encoding="UTF-8")
switched <- ore_subst("(\\w)(\\w)", "\\2\\1", text, all=TRUE)
expect_equal(Encoding(switched), "UTF-8")
expect_error(ore_subst("\\d+",character(0),"2 dogs"), "No replacement")
expect_error(ore_subst("\\d+","\\k<name>","2 dogs"))
expect_error(ore_subst("\\d+","\\1","2 dogs"))
expect_equal(ore_subst("\\d+",function(i) NULL,"2 dogs"), " dogs")
expect_equal(ore_split("[\\s\\-()]+","(801) 234-5678"), c("","801","234","5678"))
|
rp.plots2pdf <- function(rp.object,
file="rp_plots.pdf",
groups=NULL,
page.breaks=NULL,
bw=FALSE
) {
if(!methods::is(rp.object,"ResponsePatterns"))
stop("The object is not of class ResponsePatterns")
grDevices::pdf(file=file)
for(i in 1:[email protected])
rp.plot(rp.object, obs=i, plot=TRUE, text.output=FALSE, groups=groups, page.breaks=page.breaks, bw=bw)
grDevices::dev.off()
message("Plots saved to ",file)
}
|
commonArgs <- function(par, fn, ctrl, rho) {
rho$n <- n <- length(rho$par <- as.double(par))
stopifnot(all(is.finite(par)),
is.function(fn),
length(formals(fn)) >= 1)
rho$.feval. <- integer(1)
cc <- do.call(function(npt = min(n+2L, 2L * n), rhobeg = NA,
rhoend = NA, iprint = 0L, maxfun=10000L,
obstop=TRUE, force.start=FALSE,...) {
if (length(list(...))>0) warning("unused control arguments ignored")
list(npt = npt, rhobeg = rhobeg, rhoend = rhoend,
iprint = iprint, maxfun = maxfun, obstop = obstop,
force.start = force.start)
}, ctrl)
ctrl <- new.env(parent = emptyenv())
lapply(names(cc), function(nm) assign(nm, cc[[nm]], envir = ctrl))
ctrl$npt <- as.integer(max(n + 2L, min(ctrl$npt, ((n+1L)*(n+2L)) %/% 2L)))
if (ctrl$npt > (2 * n + 1))
warning("Setting npt > 2 * length(par) + 1 is not recommended.")
if (is.na(ctrl$rhobeg))
ctrl$rhobeg <- min(0.95, 0.2 * max(abs(par)))
if (is.na(ctrl$rhoend)) ctrl$rhoend <- 1.0e-6 * ctrl$rhobeg
stopifnot(0 < ctrl$rhoend, ctrl$rhoend <= ctrl$rhobeg)
if (ctrl$maxfun < 10 * n^2)
warning("maxfun < 10 * length(par)^2 is not recommended.")
ctrl
}
bobyqa <- function(par, fn, lower = -Inf, upper = Inf, control = list(), ...)
{
nn <- names(par)
ctrl <- commonArgs(par, fn, control, environment())
n <- length(par)
fn1 <- function(x) {
names(x) <- nn
fn(x, ...)
}
checkObj <- fn1(par)
if(length(checkObj) > 1 || !is.numeric(checkObj))
stop("Objective function must return a single numeric value.")
lower <- as.double(lower); upper <- as.double(upper)
if (length(lower) == 1) lower <- rep(lower, n)
if (length(upper) == 1) upper <- rep(upper, n)
stopifnot(length(lower) == n, length(upper) == n, all(lower < upper))
if (any(par < lower | par > upper)) {
if (ctrl$obstop)
stop("Starting values violate bounds")
else {
par <- pmax(lower, pmax(par, upper))
warning("Some parameters adjusted to nearest bound")
}
}
rng <- upper - lower
if (any(rng < 2 * ctrl$rhobeg)) {
warning("All upper - lower must be >= 2*rhobeg. Changing rhobeg")
ctrl$rhobeg <- 0.2 * min(rng)
}
verb <- 1 < (ctrl$iprint <- as.integer(ctrl$iprint))
if (all(is.finite(upper)) && all(is.finite(lower)) &&
all(par >= lower) && all(par <= upper) ) {
if (verb) cat("ctrl$force.start = ", ctrl$force.start,"\n")
if (!ctrl$force.start) {
i <- rng < ctrl$rhobeg
if (any(i)) {
par[i] <- lower[i] + ctrl$rhobeg
warning("Some parameters adjusted away from lower bound")
}
i <- rng < ctrl$rhobeg
if (any(i)) {
par[i] <- upper[i] - ctrl$rhobeg
warning("Some parameters adjusted away from upper bound")
}
}
}
if (verb) {
cat("npt =", ctrl$npt, ", n = ",n,"\n")
cat("rhobeg = ", ctrl$rhobeg,", rhoend = ", ctrl$rhoend, "\n")
}
if(ctrl$iprint > 0)
cat("start par. = ", par, "fn = ", checkObj, "\n")
retlst<- .Call(bobyqa_cpp, par, lower, upper, ctrl, fn1)
if (retlst$ierr > 0){
if (retlst$ierr == 10) {
retlst$ierr<-2
retlst$msg<-"bobyqa -- NPT is not in the required interval"
} else if (retlst$ierr == 320) {
retlst$ierr<-5
retlst$msg<-"bobyqa detected too much cancellation in denominator"
} else if (retlst$ierr == 390) {
retlst$ierr<-1
retlst$msg<-"bobyqa -- maximum number of function evaluations exceeded"
} else if (retlst$ierr == 430) {
retlst$ierr<-3
retlst$msg<-"bobyqa -- a trust region step failed to reduce q"
} else if (retlst$ierr == 20) {
retlst$ierr<-4
retlst$msg<-"bobyqa -- one of the box constraint ranges is too small (< 2*RHOBEG)"
}
} else {
retlst$msg<-"Normal exit from bobyqa"
}
retlst
}
newuoa <- function(par, fn, control = list(), ...)
{
nn <- names(par)
ctrl <- commonArgs(par + 0, fn, control, environment())
n <- length(par)
fn1 <- function(x) {
names(x) <- nn
fn(x, ...)
}
checkObj <- fn1(par)
if(length(checkObj) > 1 || !is.numeric(checkObj))
stop("Objective function must return a single numeric value.")
verb <- 1 < (ctrl$iprint <- as.integer(ctrl$iprint))
if (verb) {
cat("npt =", ctrl$npt, ", n = ",n,"\n")
cat("rhobeg = ", ctrl$rhobeg,", rhoend = ", ctrl$rhoend, "\n")
}
if(ctrl$iprint > 0)
cat("start par. = ", par, "fn = ", checkObj, "\n")
retlst<-.Call(newuoa_cpp, par, ctrl, fn1)
if (retlst$ierr > 0){
if (retlst$ierr == 10) {
retlst$ierr<-2
retlst$msg<-"newuoa -- NPT is not in the required interval"
} else if (retlst$ierr == 320) {
retlst$ierr<-5
retlst$msg<-"newuoa detected too much cancellation in denominator"
} else if (retlst$ierr == 390) {
retlst$ierr<-1
retlst$msg<-"newuoa -- maximum number of function evaluations exceeded"
} else if (retlst$ierr == 3701) {
retlst$ierr<-3
retlst$msg<-"newuoa -- a trust region step failed to reduce q"
}
} else {
retlst$msg<-"Normal exit from newuoa"
}
retlst
}
uobyqa <- function(par, fn, control = list(), ...)
{ nn <- names(par)
ctrl <- commonArgs(par + 0, fn, control, environment())
n <- length(par)
fn1 <- function(x) {
names(x) <- nn
fn(x, ...)
}
checkObj <- fn1(par)
if(length(checkObj) > 1 || !is.numeric(checkObj))
stop("Objective function must return a single numeric value.")
verb <- 1 < (ctrl$iprint <- as.integer(ctrl$iprint))
if (verb) {
cat("npt =", ctrl$npt, ", n = ",n,"\n")
cat("rhobeg = ", ctrl$rhobeg,", rhoend = ", ctrl$rhoend, "\n")
}
if(ctrl$iprint > 0)
cat("start par. = ", par, "fn = ", checkObj, "\n")
retlst<-.Call(uobyqa_cpp, par, ctrl, fn1)
if (retlst$ierr > 0){
if (retlst$ierr == 390) {
retlst$ierr<-1
retlst$msg<-"uobyqa -- maximum number of function evaluations exceeded"
} else if (retlst$ierr == 2101) {
retlst$ierr<-3
retlst$msg<-"uobyqa -- a trust region step failed to reduce q"
}
} else {
retlst$msg<-"Normal exit from uobyqa"
}
retlst
}
print.minqa <- function(x, digits = max(3, getOption("digits") - 3), ...)
{
cat("parameter estimates:", toString(x$par), "\n")
cat("objective:", toString(x$fval), "\n")
cat("number of function evaluations:", toString(x$feval), "\n")
invisible(x)
}
|
context("ft_search")
test_that("ft_search returns...", {
skip_on_cran()
vcr::use_cassette("ft_search", {
aa <- ft_search(query = 'ecology', from = 'plos')
flds <- c('id','author','eissn','journal','counter_total_all','alm_twitterCount')
bb <- ft_search(query = 'climate change', from = 'plos', plosopts = list(fl = flds))
Sys.sleep(1)
cc <- ft_search(query = 'ecology', from = 'crossref')
dd <- ft_search(query = 'owls', from = 'biorxiv')
}, preserve_exact_body_bytes = TRUE)
expect_is(aa, "ft")
expect_is(bb, "ft")
expect_is(cc, "ft")
expect_is(dd, "ft")
expect_is(aa$plos, "ft_ind")
expect_is(aa$bmc, "ft_ind")
expect_is(bb$plos, "ft_ind")
expect_is(cc$crossref, "ft_ind")
expect_is(dd$biorxiv, "ft_ind")
expect_is(aa$plos$found, "integer")
expect_is(aa$plos$license, "list")
expect_is(aa$plos$opts, "list")
expect_is(aa$plos$data, "data.frame")
expect_is(aa$plos$data$id, "character")
expect_equal(
sort(names(bb$plos$data)),
sort(c("id", "alm_twitterCount", "counter_total_all", "journal", "eissn", "author")))
expect_is(cc$crossref$data, "data.frame")
expect_true(cc$crossref$opts$filter[[1]])
expect_is(dd$biorxiv$data, "data.frame")
expect_match(dd$biorxiv$data$url[1], "http")
})
test_that("ft_search works with scopus", {
skip_on_cran()
vcr::use_cassette("ft_search_scopus", {
aa <- ft_search(query = 'ecology', from = 'scopus')
})
expect_is(aa, "ft")
expect_is(aa$scopus, "ft_ind")
expect_is(aa$scopus$opts, "list")
expect_equal(aa$scopus$source, "scopus")
expect_type(aa$scopus$found, "double")
expect_is(aa$scopus$data, "data.frame")
res <- ft_search(query = '[TITLE-ABS-KEY (("Chen caeculescens atlantica") AND (demograph* OR model OR population) AND (climate OR "climatic factor" OR "climatic driver" OR precipitation OR rain OR temperature))]', from = 'scopus')
expect_is(res, "ft")
expect_equal(NROW(res$scopus$data), 0)
})
test_that("ft_search works for larger requests", {
skip_on_cran()
skip_on_ci()
vcr::use_cassette("ft_search_entrez", {
res_entrez <- ft_search(query = 'ecology', from = 'entrez', limit = 200)
})
expect_is(res_entrez, "ft")
expect_is(res_entrez$entrez, "ft_ind")
expect_equal(NROW(res_entrez$entrez$data), 200)
vcr::use_cassette("ft_search_plos", {
res_plos <- ft_search(query = 'ecology', from = 'plos', limit = 200)
})
expect_is(res_plos, "ft")
expect_is(res_plos$plos, "ft_ind")
expect_equal(NROW(res_plos$plos$data), 200)
vcr::use_cassette("ft_search_crossref", {
res_cr <- ft_search(query = 'ecology', from = 'crossref', limit = 200)
})
expect_is(res_cr, "ft")
expect_is(res_cr$crossref, "ft_ind")
expect_equal(NROW(res_cr$crossref$data), 200)
})
test_that("ft_search fails well", {
skip_on_cran()
vcr::use_cassette("ft_search_fails_well_entrez_limit2large", {
expect_error(ft_search(query = 'ecology', from = 'entrez', limit = 2000))
})
expect_error(ft_search(query = 'ecology', from = 'crossref', limit = 2000),
"limit parameter must be 1000 or less")
expect_error(ft_search(from = 'plos'), "argument \"query\" is missing")
expect_error(ft_search("foobar", from = 'stuff'), "'arg' should be one of")
vcr::use_cassette("ft_search_fails_well_plos_no_results", {
plos_no_data <- ft_search(5, from = 'plos')
})
expect_equal(NROW(plos_no_data$plos$data), 0)
expect_equal(plos_no_data$plos$found, 0)
vcr::use_cassette("ft_search_fails_well_biorxiv_no_results", {
expect_error(biorxiv_search("asdfasdfasdfasfasfd"),
"no results found in Biorxiv")
})
})
test_that("ft_search curl options work", {
skip_on_cran()
expect_error(
ft_search(query='ecology', from='plos', timeout_ms = 1),
"[Tt]ime")
expect_error(
ft_search(query='ecology', from='bmc', timeout_ms = 1),
"[Tt]ime")
expect_error(
ft_search(query='ecology', from='crossref', timeout_ms = 1),
"[Tt]ime")
expect_error(
ft_search(query='ecology', from='biorxiv', timeout_ms = 1),
"[Tt]ime")
expect_error(
ft_search(query='ecology', from='europmc', timeout_ms = 1),
"[Tt]ime")
expect_error(
ft_search(query='ecology', from='scopus', timeout_ms = 1),
"[Tt]ime")
expect_error(
ft_search("Y='19'...", from='microsoft',
maopts = list(key = Sys.getenv("MICROSOFT_ACADEMIC_KEY")),
timeout_ms = 1),
"[Tt]ime")
})
|
chk_lt <- function(x, value = 0, x_name = NULL) {
if (vld_lt(x, value)) {
return(invisible(x))
}
if (is.null(x_name)) x_name <- deparse_backtick_chk(substitute(x))
if (length(x) == 1L) {
abort_chk(x_name, " must be less than ", cc(value), ", not ", cc(x), x = x, value = value)
}
abort_chk(x_name, " must have values less than ", cc(value), x = x, value = value)
}
vld_lt <- function(x, value = 0) all(x[!is.na(x)] < value)
|
library(readr)
suppressPackageStartupMessages(library(dplyr))
library(tidyr)
library(ggplot2)
gap_dat_orig <- read_tsv("04_gap-merged.tsv")
(china <- gap_dat_orig %>%
filter(country == "China"))
china <- china %>%
filter(year %% 5 == 2)
china_tidy <- china %>%
gather(key = "variable", value = "value",
pop, lifeExp, gdpPercap)
ggplot(china_tidy, aes(x = year, y = value)) +
facet_wrap(~ variable, scales="free_y") +
geom_point() + geom_line() +
scale_x_continuous(breaks = seq(1950, 2011, 15))
china_gdp_fit <- lm(gdpPercap ~ year, china, subset = year <= 1982)
summary(china_gdp_fit)
(china_gdp_1952 <- china_gdp_fit %>%
predict(data.frame(year = 1952)) %>%
round(6))
china_pop_fit <- lm(pop ~ year, china)
summary(china_pop_fit)
(china_pop_1952 <- china_pop_fit %>%
predict(data.frame(year = 1952)) %>%
as.integer())
china_lifeExp_1952 <- 44
gap_dat_new <- rbind(gap_dat_orig,
data.frame(country = 'China', year = 1952,
pop = china_pop_1952, continent = 'Asia',
lifeExp = china_lifeExp_1952,
gdpPercap = china_gdp_1952))
gap_dat_new <- gap_dat_new %>%
arrange(country, year)
china_tidy <- gap_dat_new %>%
filter(country == "China") %>%
gather(key = "variable", value = "value",
pop, lifeExp, gdpPercap)
ggplot(china_tidy, aes(x = year, y = value)) +
facet_wrap(~ variable, scales="free_y") +
geom_point() + geom_line() +
scale_x_continuous(breaks = seq(1950, 2011, 15))
write_tsv(gap_dat_new, "05_gap-merged-with-china-1952.tsv")
devtools::session_info()
|
pi0est <- function(p, lambda = seq(0.05, 0.95, by = 0.01), dof = 3) {
p <- sort(p)
len <- length(lambda)
prob <- numeric(len)
for (i in 1:len) prob[i] <- mean( p > lambda[i] ) / (1 - lambda[i])
spi0 <- smooth.spline(lambda, prob, df = dof)
min( predict(spi0, x = lambda[len])$y, 1 )
}
|
beval <- function(dset, blist, outdim="std", w = .5, dpparam=c(.05,2,2,0), bpparam=c(.01,1,1.5,0), justparams=FALSE){
if(justparams){return(list(weta = w,dimpparams = dpparam, boxpparams = bpparam))}
penal <- function(n,hdisp=2,expo=2,vdisp=0){
return(pmax(0, (n-hdisp)^expo - vdisp))
}
if (outdim=="std"){
outdim <- ncol(dset)
}
tpts <- nrow(dset)
this <- sum(dset[,outdim])
dpscaler <- dpparam[1]
bpscalar <- bpparam[1]
B <- length(blist)
bdims <- rep(0,B)
for (i in 1:B){
bdims[i] <- length(blist[[i]][[1]])
}
bincvecs <- matrix(TRUE,nrow=tpts,ncol=B)
dnet <- c()
for (b in 1:B){
dimvect <- blist[[b]][[1]]
dnet <- c(dnet,dimvect)
bmat <- blist[[b]][[2]]
incvecs <- !logical(length=tpts)
for (i in 1:length(dimvect)){
di <- dimvect[i]
incvecs <- incvecs & (dset[,di] >= bmat[i,1])
incvecs <- incvecs & (dset[,di] < bmat[i,2])
}
bincvecs[,b] <- incvecs
}
masvecs <- logical(length=nrow(dset))
for (b in 1:B){
masvecs <- masvecs | bincvecs[,b]
}
dleft <- dset[masvecs,]
hisinset <- sum(dleft[,outdim])
tinset <- nrow(dleft)
dens <- hisinset/tinset
cov <- hisinset/this
dimpenal <- dpscaler*sum(penal(bdims,dpparam[2],dpparam[3],dpparam[4]))
bpenal <- bpscalar*penal(length(blist),bpparam[2],bpparam[3],bpparam[4])
dct <- (dens)^w*(cov)^(1-w)
obj <- dimpenal + bpenal - dct
bindim <- rep("-",outdim-1)
bindim[unique(dnet)] <- "X"
return(c(dens,cov,dct,obj,length(blist),min(bdims),max(bdims),sum(bdims),length(unique(dnet)),bindim))
}
|
context("TEST MODELTIME WORKFLOW VS MODELS")
m750 <- m4_monthly %>% filter(id == "M750")
splits <- initial_time_split(m750, prop = 0.9)
model_fit_no_boost <- arima_reg() %>%
set_engine(engine = "auto_arima") %>%
fit(log(value) ~ date, data = training(splits))
test_that("Auto ARIMA (Parsnip)", {
model_table <- modeltime_table(model_fit_no_boost)
expect_s3_class(model_table, "mdl_time_tbl")
expect_true(all(c(".model_id", ".model", ".model_desc") %in% names(model_table)))
calibrated_tbl <- model_table %>%
modeltime_calibrate(testing(splits))
expect_s3_class(calibrated_tbl, "mdl_time_tbl")
expect_equal(nrow(calibrated_tbl), 1)
expect_true(".calibration_data" %in% names(calibrated_tbl))
expect_message({
calibrated_tbl %>%
modeltime_forecast()
})
forecast_tbl <- calibrated_tbl %>%
modeltime_forecast(testing(splits))
expect_equal(nrow(forecast_tbl), nrow(testing(splits)))
accuracy_tbl <- calibrated_tbl %>%
modeltime_accuracy(metric_set = metric_set(rsq, yardstick::mae))
expect_equal(nrow(accuracy_tbl), 1)
expect_true(all(c("rsq", "mae") %in% names(accuracy_tbl)))
expect_false(any(c("mape", "mase", "smape", "rmse") %in% names(accuracy_tbl)))
future_forecast_tbl <- calibrated_tbl %>%
modeltime_refit(data = m750) %>%
modeltime_forecast(h = "3 years")
expect_equal(future_forecast_tbl$.index[1], ymd("2015-07-01"))
})
wflw_fit_arima <- workflow() %>%
add_model(
spec = arima_reg() %>%
set_engine("auto_arima")
) %>%
add_recipe(
recipe = recipe(value ~ date, data = training(splits)) %>%
step_date(date, features = "month") %>%
step_log(value)
) %>%
fit(training(splits))
test_that("Auto ARIMA (Workflow)", {
model_table <- modeltime_table(wflw_fit_arima)
expect_s3_class(model_table, "mdl_time_tbl")
expect_true(all(c(".model_id", ".model", ".model_desc") %in% names(model_table)))
calibrated_tbl <- model_table %>%
modeltime_calibrate(testing(splits))
expect_s3_class(calibrated_tbl, "mdl_time_tbl")
expect_equal(nrow(calibrated_tbl), 1)
expect_true(".calibration_data" %in% names(calibrated_tbl))
forecast_tbl <- calibrated_tbl %>%
modeltime_forecast(testing(splits))
expect_equal(nrow(forecast_tbl), nrow(testing(splits)))
accuracy_tbl <- calibrated_tbl %>%
modeltime_accuracy(metric_set = metric_set(rsq, yardstick::mae))
expect_equal(nrow(accuracy_tbl), 1)
expect_true(all(c("rsq", "mae") %in% names(accuracy_tbl)))
expect_false(any(c("mape", "mase", "smape", "rmse") %in% names(accuracy_tbl)))
future_forecast_tbl <- calibrated_tbl %>%
modeltime_refit(data = m750) %>%
modeltime_forecast(h = "3 years")
expect_equal(future_forecast_tbl$.index[1], ymd("2015-07-01"))
})
test_that("Models for Mega Test", {
skip_on_cran()
model_fit_boosted <- arima_boost(
non_seasonal_ar = 0,
non_seasonal_differences = 1,
non_seasonal_ma = 1,
seasonal_ar = 1,
seasonal_differences = 1,
seasonal_ma = 1
) %>%
set_engine(engine = "arima_xgboost") %>%
fit(log(value) ~ date + as.numeric(date) + month(date, label = TRUE),
data = training(splits))
model_fit_ets <- exp_smoothing() %>%
set_engine("ets") %>%
fit(log(value) ~ date + as.numeric(date) + month(date, label = TRUE),
data = training(splits))
model_spec <- exp_smoothing(
error = "multiplicative",
trend = "additive",
season = "multiplicative") %>%
set_engine("ets")
recipe_spec <- recipe(value ~ date, data = training(splits)) %>%
step_log(value)
wflw_fit_ets <- workflow() %>%
add_recipe(recipe_spec) %>%
add_model(model_spec) %>%
fit(training(splits))
model_fit_lm <- linear_reg() %>%
set_engine("lm") %>%
fit(log(value) ~ as.numeric(date) + month(date, label = TRUE),
data = training(splits))
model_spec <- linear_reg() %>%
set_engine("lm")
recipe_spec <- recipe(value ~ date, data = training(splits)) %>%
step_date(date, features = "month") %>%
step_log(value)
wflw_fit_lm <- workflow() %>%
add_recipe(recipe_spec) %>%
add_model(model_spec) %>%
fit(training(splits))
model_fit_mars <- mars(mode = "regression") %>%
set_engine("earth") %>%
fit(log(value) ~ as.numeric(date) + month(date, label = TRUE),
data = training(splits))
model_spec <- mars(mode = "regression") %>%
set_engine("earth")
recipe_spec <- recipe(value ~ date, data = training(splits)) %>%
step_date(date, features = "month", ordinal = FALSE) %>%
step_mutate(date_num = as.numeric(date)) %>%
step_normalize(date_num) %>%
step_rm(date) %>%
step_log(value)
wflw_fit_mars <- workflow() %>%
add_recipe(recipe_spec) %>%
add_model(model_spec) %>%
fit(training(splits))
model_fit_svm <- svm_rbf(mode = "regression") %>%
set_engine("kernlab") %>%
fit(log(value) ~ as.numeric(date) + month(date, label = TRUE),
data = training(splits))
model_spec <- svm_rbf(mode = "regression") %>%
set_engine("kernlab")
recipe_spec <- recipe(value ~ date, data = training(splits)) %>%
step_date(date, features = "month") %>%
step_rm(date) %>%
step_dummy(all_nominal()) %>%
step_log(value)
wflw_fit_svm <- workflow() %>%
add_recipe(recipe_spec) %>%
add_model(model_spec) %>%
fit(training(splits))
model_fit_randomForest <- rand_forest(mode = "regression") %>%
set_engine("randomForest") %>%
fit(log(value) ~ as.numeric(date) + month(date, label = TRUE),
data = training(splits))
model_spec <- rand_forest() %>%
set_engine("randomForest")
recipe_spec <- recipe(value ~ date, data = training(splits)) %>%
step_date(date, features = "month") %>%
step_mutate(date_num = as.numeric(date)) %>%
step_rm(date) %>%
step_dummy(all_nominal()) %>%
step_log(value)
wflw_fit_randomForest <- workflow() %>%
add_recipe(recipe_spec) %>%
add_model(model_spec) %>%
fit(training(splits))
model_fit_xgboost <- boost_tree(mode = "regression") %>%
set_engine("xgboost", objective = "reg:squarederror") %>%
fit(log(value) ~ as.numeric(date) + month(date, label = TRUE),
data = training(splits))
model_spec <- boost_tree() %>%
set_engine("xgboost", objective = "reg:squarederror")
recipe_spec <- recipe(value ~ date, data = training(splits)) %>%
step_date(date, features = "month") %>%
step_mutate(date_num = as.numeric(date)) %>%
step_rm(date) %>%
step_dummy(all_nominal()) %>%
step_log(value)
wflw_fit_xgboost <- workflow() %>%
add_recipe(recipe_spec) %>%
add_model(model_spec) %>%
fit(training(splits))
model_table <- modeltime_table(
model_fit_no_boost,
wflw_fit_arima,
model_fit_boosted,
model_fit_ets,
wflw_fit_ets,
model_fit_lm,
wflw_fit_lm,
model_fit_mars,
wflw_fit_mars,
model_fit_svm,
wflw_fit_svm,
model_fit_randomForest,
wflw_fit_randomForest,
model_fit_xgboost,
wflw_fit_xgboost
)
expect_error(modeltime_table("a"))
expect_s3_class(model_table, "mdl_time_tbl")
expect_equal(ncol(model_table), 3)
expect_error(modeltime_accuracy(1))
accuracy_tbl <- model_table %>%
modeltime_calibrate(testing(splits)) %>%
modeltime_accuracy()
expect_s3_class(accuracy_tbl, "tbl_df")
expect_true(all(!is.na(accuracy_tbl$mae)))
expect_error(modeltime_forecast(1))
forecast_tbl <- model_table %>%
modeltime_calibrate(testing(splits)) %>%
modeltime_forecast(
new_data = testing(splits),
actual_data = bind_rows(training(splits), testing(splits))
)
expect_s3_class(forecast_tbl, "tbl_df")
expect_equal(
nrow(forecast_tbl),
nrow(model_table) * nrow(testing(splits)) + nrow(bind_rows(training(splits), testing(splits)))
)
model_table_refit <- model_table %>%
modeltime_calibrate(testing(splits)) %>%
modeltime_refit(data = m750)
expect_s3_class(model_table_refit, "mdl_time_tbl")
forecast_tbl <- model_table_refit %>%
filter(!.model_id %in% c(8)) %>%
modeltime_forecast(
new_data = future_frame(m750, .length_out = "3 years"),
actual_data = m750
)
expect_s3_class(forecast_tbl, "tbl_df")
actual_tbl <- forecast_tbl %>% filter(.model_desc == "ACTUAL")
future_predictions_tbl <- forecast_tbl %>% filter(.model_desc != "ACTUAL")
expect_true(all(tail(actual_tbl$.index, 1) < future_predictions_tbl$.index))
})
|
Q1 <- c(-0.1677489, -0.7369231, -0.3682588, 0.5414703)
Q2 <- c(-0.8735598, 0.1145235, -0.2093062, 0.4242270)
Q3 <- c(0.426681700, -0.20287610, 0.43515810, -0.76643420)
Q4 <- matrix(c(-0.1677489, -0.7369231, -0.3682588, 0.5414703,
-0.8735598, 0.1145235, -0.2093062, 0.4242270,
0.426681700, -0.20287610, 0.43515810, -0.76643420),3,4,byrow=TRUE)
EV1 <- c(-0.1995301, -0.8765382, -0.4380279, 114.4324)
EV2 <- c(-9.646669e-001, 1.264676e-001, -2.311356e-001, 1.297965e+002)
EV3 <- c(6.642793e-001, -3.158476e-001, 6.774757e-001, 2.800695e+002)
EV4 <- matrix(c(-1.995301e-001, -8.765382e-001, -4.380280e-001, 1.144324e+002,
-9.646669e-001, 1.264676e-001, -2.311356e-001, 1.297965e+002,
6.642793e-001, -3.158476e-001, 6.774757e-001, 2.800695e+002),3,4,byrow=TRUE)
DCM1 <- matrix(c(-0.3573404, -0.1515663, 0.9215940, 0.6460385, 0.6724915, 0.3610947, -0.6744939, 0.7244189, -0.1423907),3,3,byrow=TRUE)
DCM2 <- matrix(c(0.88615060, -0.3776729, 0.2685150,-0.02249957, -0.6138316, -0.7891163,0.46285090, 0.6932344, -0.5524447),3,3,byrow=TRUE)
DCM3 <- matrix(c(0.5389574, -0.8401672, 0.06036564,0.4939131, 0.2571603, -0.83061330,0.6823304, 0.4774806, 0.55356800),3,3,byrow=TRUE)
DCM4 <- array(c(-0.35734040, 0.64603850, -0.67449390, -0.15156630, 0.67249150, 0.72441890,
0.92159400, 0.36109470, -0.14239070, 0.88615060, -0.02249957, 0.46285090,
-0.37767290, -0.61383160, 0.69323440, 0.26851500, -0.78911630, -0.55244470,
0.53895740, 0.49391310, 0.68233040, -0.84016720, 0.25716030, 0.47748060,
0.06036564, -0.83061330, 0.55356800),dim=c(3,3,3))
EAxyx <- matrix(c(0.3734309, 1.427920, 2.3205212,-1.2428130, 0.985502, 0.9821003,-1.4982479, 2.157439, 0.6105777),3,3,byrow=TRUE)
EAxyz <- matrix(c( 2.07603606, -0.7402790, -1.376712,-0.02538478, 0.4812086, -0.897943, 0.74181498, 0.7509456, -2.429857),3,3,byrow=TRUE)
EAxzy <- matrix(c( -2.9199163, -0.8101911, -1.3627437,-0.5515727, -0.7659671, 0.6973821,-1.8678238, -0.4977850, 2.2523838),3,3,byrow=TRUE)
EAyzx <- matrix(c( 1.4175036, 0.3694416, 2.3762543,0.4524244, -0.9093689, -0.0366379,3.0329736, -0.9802081, 2.0508342),3,3,byrow=TRUE)
EAyxz <- matrix(c( -2.0579912, 0.70238301, 2.6488233, 0.4813408, -0.02250147, -0.9096943, 0.9022582, 0.51658433, -1.8710396),3,3,byrow=TRUE)
EAzxy <- matrix(c( -2.3190385, -0.1521527, 1.9406908,-0.8460724, -0.3872818, 0.2942186,-2.0648275, -0.9975913, 0.1115396),3,3,byrow=TRUE)
EAzyx <- matrix(c( 1.1951869, 1.17216717, -2.7404413,-0.9600165, 0.27185113, -0.4028824,-2.1586537, 0.06040236, -1.0004280),3,3,byrow=TRUE)
EAxzx <- matrix(c( -1.197365, 1.427920, -2.391868,-2.813609, 0.985502, 2.552897,-3.069044, 2.157439, 2.181374),3,3,byrow=TRUE)
EAyxy <- matrix(c( 1.777046, 2.3083664, 0.5096786,2.069637, 0.9098912, -1.5993010,2.624796, 1.8308788, -1.0343298),3,3,byrow=TRUE)
EAyzy <- matrix(c( -2.935343, 2.3083664, -1.061118,-2.642752, 0.9098912, 3.113088,-2.087593, 1.8308788, -2.605126),3,3,byrow=TRUE)
EAzxz <- matrix(c( -0.8069433, 1.9362151, -1.733798, 1.6193689, 0.4818253, -2.523541, 0.9442344, 1.0015975, -3.069866),3,3,byrow=TRUE)
EAzyz <- matrix(c( -2.3777396, 1.9362151, -0.1630019, 0.0485726, 0.4818253, -0.9527442,-0.6265619, 1.0015975, -1.4990700),3,3,byrow=TRUE)
EAall <- rbind(EAxyx,EAxyz,EAxzy,EAyzx,EAyxz,EAzxy,EAzyx,EAxzx,EAyxy,EAyzy,EAzxz,EAzyz)
EAvct <- c('xyx','xyz','xzy','yzx','yxz','zxy','zyx','xzx','yxy','yzy','zxz','zyz')
print('Qnorm and Qnormalize')
Qnormalize(Q1)
Qnormalize(Q4)
Qnorm(Q1)
Qnorm(Q4)
print('Q2EV')
Q2EV(Q1)
Q2EV(Q2)
Q2EV(Q3)
Q2EV(Q4)
print('EV2Q')
EV2Q(EV1,1e-7)
EV2Q(EV2,1e-7)
EV2Q(EV3,1e-7)
EV2Q(EV4,1e-7)
print('DCM2Q')
DCM2Q(DCM1)
DCM2Q(DCM2)
DCM2Q(DCM3)
DCM2Q(DCM4)
print('Q2DCM')
Q2DCM(Q1)
Q2DCM(Q2)
Q2DCM(Q3)
Q2DCM(Q4)
print('Q2EA')
for (EAv in EAvct) { print (Q2EA(Q1,EAv));print (Q2EA(Q2,EAv));print (Q2EA(Q3,EAv)) }
for (EAv in EAvct) print (Q2EA(Q4,EAv))
print('EA2Q')
n <- 1;for (EAv in rep(EAvct,each=3)) { print (EA2Q(EAall[n,],EAv));n <- n+1 }
n <- 1;for (EAv in EAvct) { print (EA2Q(EAall[n:(n+2),],EAv));n <- n+3 }
print('DCM2EV')
DCM2EV(DCM1)
DCM2EV(DCM2)
DCM2EV(DCM3)
DCM2EV(DCM4)
print('EV2DCM')
EV2DCM(EV1,1e-7)
EV2DCM(EV2,1e-7)
EV2DCM(EV3,1e-7)
EV2DCM(EV4,1e-7)
print('DCM2EA')
for (EAv in EAvct) { print (DCM2EA(DCM1,EAv));print (DCM2EA(DCM2,EAv));print (DCM2EA(DCM3,EAv)) }
for (EAv in EAvct) print (DCM2EA(DCM4,EAv))
print('EA2DCM')
print(EA2DCM(EAxyx,'xyx',1e-7))
print(EA2DCM(EAxyz,'xyz',1e-7))
print(EA2DCM(EAxzy,'xzy',1e-7))
print(EA2DCM(EAyzx,'yzx',1e-7))
print(EA2DCM(EAyxz,'yxz',1e-7))
print(EA2DCM(EAzxy,'zxy',1e-7))
print(EA2DCM(EAzyx,'zyx',1e-7))
print(EA2DCM(EAxzx,'xzx',1e-7))
print(EA2DCM(EAyxy,'yxy',1e-7))
print(EA2DCM(EAyzy,'yzy',1e-7))
print(EA2DCM(EAzxz,'zxz',1e-7))
print(EA2DCM(EAzyz,'zyz',1e-7))
print('EA2EV')
print(EA2EV(EAxyx,'xyx',1e-7))
print(EA2EV(EAxyz,'xyz',1e-7))
print(EA2EV(EAxzy,'xzy',1e-7))
print(EA2EV(EAyzx,'yzx',1e-7))
print(EA2EV(EAyxz,'yxz',1e-7))
print(EA2EV(EAzxy,'zxy',1e-7))
print(EA2EV(EAzyx,'zyx',1e-7))
print(EA2EV(EAxzx,'xzx',1e-7))
print(EA2EV(EAyxy,'yxy',1e-7))
print(EA2EV(EAyzy,'yzy',1e-7))
print(EA2EV(EAzxz,'zxz',1e-7))
print(EA2EV(EAzyz,'zyz',1e-7))
print('EV2EA')
for (EAv in EAvct) { print (EV2EA(EV1,EAv,1e-7));print (EV2EA(EV2,EAv,1e-7));print (EV2EA(EV3,EAv,1e-7)) }
for (EAv in EAvct) print (EV2EA(EV4,EAv,1e-7))
print('Q2GL')
Q2GL(Q1)
Q2GL(Q2)
Q2GL(Q3)
Q2GL(Q4)
print('EA2EA')
EA2EA(EAxyx,'xyx','xyz')
EAxyz
EA2EA(EAxyx,'xyx','xzy')
EAxzy
EA2EA(EAxyx,'xyx','yzx')
EAyzx
EA2EA(EAxyx,'xyx','yxz')
EAyxz
EA2EA(EAxyx,'xyx','zxy')
EAzxy
EA2EA(EAxyx,'xyx','zyx')
EAzyx
EA2EA(EAxyx,'xyx','xzx')
EAxzx
EA2EA(EAxyx,'xyx','yxy')
EAyxy
EA2EA(EAxyx,'xyx','yzy')
EAyzy
EA2EA(EAxyx,'xyx','zxz')
EAzxz
EA2EA(EAxyx,'xyx','zyz')
EAzyz
|
test_that("scan_data works with dittodb-mocked Postgres database connection", {
skip_on_cran()
skip_on_ci()
dittodb::with_mock_db({
con <- DBI::dbConnect(
drv = RPostgres::Postgres(),
dbname = "trade_statistics",
user = "guest",
password = "",
host = "tradestatistics.io",
port = 5432
)
yrpc <- DBI::dbGetQuery(con, "SELECT * FROM hs07_yrpc LIMIT 100")
scan_results <- expect_warning(scan_data(yrpc))
DBI::dbDisconnect(con)
expect_is(scan_results, "examination_page")
expect_is(scan_results, "shiny.tag.list")
expect_is(scan_results, "list")
})
})
|
blr_multi_model_fit_stats <- function(model, ...) UseMethod("blr_multi_model_fit_stats")
blr_multi_model_fit_stats.default <- function(model, ...) {
blr_check_model(model)
k <- list(model, ...)
j <- lapply(k, blr_model_fit_stats)
n <- length(j)
names(j) <- seq_len(n)
for (i in seq_len(n)) {
class(j[[i]]) <- "list"
}
output <- setDF(rbindlist(j))
result <- list(mfit = output)
class(result) <- "blr_multi_model_fit_stats"
return(result)
}
print.blr_multi_model_fit_stats <- function(x, ...) {
df <- x$mfit[c(-7, -13)]
measures <- c(
"Log-Lik Intercept Only", "Log-Lik Full Model", "Deviance",
"LR", "Prob > LR", "MCFadden's R2", "McFadden's Adj R2",
"ML (Cox-Snell) R2", "Cragg-Uhler(Nagelkerke) R2",
"McKelvey & Zavoina's R2", "Efron's R2", "Count R2",
"Adj Count R2", "AIC", "BIC"
)
model_id <- seq_len(nrow(x$mfit))
col_names <- c(paste("Model", model_id))
print(multi_fit_stats_table(df, measures, col_names))
}
multi_fit_stats_table <- function(df, measures, col_names) {
y <- round(t(df), 3)
colnames(y) <- col_names
cbind(data.frame(Measures = measures), y)
}
|
chk_not_subset <- function(x, values, x_name = NULL) {
if (vld_not_subset(x, values)) {
return(invisible(x))
}
values <- sort(unique(values), na.last = TRUE)
if (is.null(x_name)) x_name <- deparse_backtick_chk(substitute(x))
if (length(x) == 1L) {
abort_chk(x_name, " must not match ", cc(unique(c(x, values)), " or "), x = x, values = values)
}
abort_chk(x_name, " must not have any values matching ", cc(values, " or "), x = x, values = values)
}
vld_not_subset <- function(x, values) !any(x %in% values) || !length(x)
|
.checkModel <- function(fileName){
filename <- .check_file(fileName)
covsection <- .getModelSection(filename, section = "COVARIATE", block = "DEFINITION")
if (!is.null(covsection)) {
if (any(grepl("distribution", covsection))) {
stop(
"Invalid model file. Definition of distributions for covariates in the [COVARIATE] block is not supported anymore. \n",
"Instead, generate the covariates in your R script and pass them as a data.frame to the 'parameter' argument of simulx. \n",
"See 'http://simulx.lixoft.com/definition/model/' for model definition.",
call. = F
)
}
if (any(grepl("P\\(([a-zA-Z0-9]|\\s|=)*\\)", covsection))) {
stop(
"Invalid model file. Definition of distributions for covariates in the [COVARIATE] block is not supported anymore. \n",
"Instead, generate the covariates in your R script and pass them as a data.frame to the 'parameter' argument of simulx. \n",
"See 'http://simulx.lixoft.com/definition/model/' for model definition.",
call. = F
)
}
}
popsection <- .getModelSection(filename, section = "POPULATION", block = "DEFINITION")
if (!is.null(popsection)) {
if (any(grepl("distribution", popsection))) {
stop(
"Definition of distributions for population parameters in the [POPULATION] block is not supported anymore. \n",
"Instead, generate the population parameters in your R script and pass them as a data.frame to the 'parameter' argument of simulx. \n",
"See 'http://simulx.lixoft.com/definition/model/' for model definition.",
call. = F
)
}
if (any(grepl("P\\(([a-zA-Z0-9]|\\s|=)*\\)", popsection))) {
stop(
"Definition of distributions for population parameters in the [POPULATION] block is not supported anymore. \n",
"Instead, generate the population parameters in your R script and pass them as a data.frame to the 'parameter' argument of simulx. \n",
"See 'http://simulx.lixoft.com/definition/model/' for model definition.",
call. = F
)
}
}
return(invisible(TRUE))
}
.checkModelOutputSection <- function(fileName){
lines <- suppressWarnings(readLines(con = fileName, n = -1))
bIsOUT = F
for(index in 1: length(lines)){
bIsOUT <- bIsOUT|grepl(x =lines[index], pattern = 'OUTPUT:')
}
return(bIsOUT)
}
.checkParameter <- function(parameter){
if(!is.null(parameter)){
if(!(is.vector(parameter)||(is.data.frame(parameter))))
stop("Invalid paramerer. It must be a vector or a data.frame.", call. = F)
}
return(parameter)
}
.checkMissingParameters <- function(parameter, expectedParameters, frommlx = FALSE) {
diff <- setdiff(expectedParameters, parameter)
ismissing <- FALSE
if (length(diff)) {
ismissing <- TRUE
if (length(diff) == 1) {
message <- paste0(" '", diff, "' has not been specified. It ")
} else {
message <- paste0(" '", paste(diff, collapse = "', '"), "' have not been specified. They ")
}
if (frommlx) {
message <- paste0(message, "will be set to the value estimated by Monolix.")
} else {
message <- paste0(message, "will be set to 1.")
}
warning(message, call. = F)
}
return(ismissing)
}
.checkExtraParameters <- function(parameter, expectedParameters) {
diff <- setdiff(parameter, expectedParameters)
isextra <- FALSE
if (length(diff)) {
isextra <- TRUE
if (length(diff) == 1) {
warning("Found extra parameters. '", diff, "' is not in the model.", call. = F)
} else {
warning("Found extra parameters. '", paste(diff, collapse = "', '"), "' are not in the model.", call. = F)
}
}
return(isextra)
}
.checkUnitaryList <- function(inputList, mandatoryNames, defaultNames, listName){
if (is.null(inputList)) {
return(inputList)
}
if (!(is.data.frame(inputList) | is.vector(inputList))) {
stop("Invalid ", listName, ". It must be a vector with at least the following fields: ", paste(mandatoryNames, collapse = ', '), ".", call. = F)
}
namesList <- names(inputList)
for (indexName in seq_along(namesList)) {
if (! (is.list(inputList[[indexName]]) || is.vector(inputList[[indexName]]) || is.factor(inputList[[indexName]]))) {
stop("Invalid field ", namesList[indexName], " in '", listName, "'. It must be a vector.", call. = F)
}
}
if (!is.null(mandatoryNames)) {
missingName <- setdiff(mandatoryNames, namesList)
if (length(missingName) > 0) {
message <- paste0("Mandatory fields are missing in '", listName, "', ")
if (length(missingName) == 1) {
message <- paste0(message, "'", missingName,"' is not defined. \n")
}else{
message <- paste0(message, "('", paste(missingName, collapse = "', '"), "') are not defined. \n")
}
message <- paste0(message, "Skip '", listName, "'. ")
if (grepl("output", listName) & any(missingName %in% c("name", "time"))) {
message <- paste0(message, "If it is a parameter, note that simulx function now returns all parameters by default.")
}
warning(message, call. = F)
inputList <- NULL
}
}
extraName <- setdiff(namesList, union(mandatoryNames,defaultNames))
if (length(extraName) > 0) {
message <- paste0("Invalid fields will be ignored in '", listName, "'.")
if( length(extraName) == 1){
message <- paste0(message, " ", extraName," will be ignored.")
}else{
message <- paste0(message, " (", paste(extraName, collapse = ','),") will be ignored.")
}
warning(message, call. = F)
inputList <- inputList[! namesList %in% extraName]
}
return(inputList)
}
.checkUnitaryTreatment<- function(treatment, listName = "treatment"){
mandatoryNames <- c("time", "amount")
defaultNames <- c("tinf", "rate", "type")
if (is.data.frame(treatment)) {
indexID <- which(names(treatment) == 'id')
if (length(indexID)) mandatoryNames <- c(mandatoryNames, names(treatment)[indexID])
}
names(treatment)[names(treatment) == "adm"] <- "type"
names(treatment)[names(treatment) == "amt"] <- "amount"
if (is.element("target", names(treatment))) {
stop("Invalid field 'target' for 'treatment' argument. You must use 'adm=' instead.", call. = FALSE)
}
treatment <- .checkUnitaryList(
treatment,
mandatoryNames = mandatoryNames,
defaultNames = defaultNames,
listName = listName
)
if (!is.null(treatment)) {
if (is.element("type", names(treatment))) {
.check_strict_pos_integer(treatment$type, "treatment type")
} else{
treatment$type = 1
}
.check_vector_of_double(treatment$time, "treatment times")
nbTimePoint = length(treatment$time)
.check_vector_of_double(treatment$amount, "treatment amount")
if (length(treatment$amount) > 1) {
.check_vectors_length(treatment$amount, treatment$time, "treatment amount", "treatment times")
} else {
treatment$amount <- rep(treatment$amount, nbTimePoint)
}
if (length(treatment$type) > 1) {
.check_vectors_length(treatment$type, treatment$time, "treatment type", "treatment times")
}
if (is.element("rate", names(treatment))) {
.check_vector_of_pos_double(treatment$rate, "treatment rate")
if (length(treatment$rate) > 1) {
.check_vectors_length(treatment$rate, treatment$time, "treatment rate", "treatment times")
} else {
treatment$rate <- rep(treatment$rate, nbTimePoint)
}
}
if (is.element("tinf", names(treatment))) {
.check_vector_of_pos_double(treatment$tinf, "treatment infusion duration")
if (length(treatment$tinf) > 1) {
.check_vectors_length(treatment$tinf, treatment$time, "treatment infusion duration", "treatment times")
} else{
treatment$tinf <- rep(treatment$tinf, nbTimePoint)
}
}
} else {
treatment <- as.list(treatment)
}
return(treatment)
}
.checkTreatment <- function(treatment){
if (is.element("time", names(treatment))) {
treatment <- list(treatment)
}
for (itreat in seq_along(treatment)) {
treatementValue <- treatment[[itreat]]
if (length(treatment) > 1) {
listName <- paste0("treatment ", itreat)
} else {
listName <- "treatment"
}
treatment[[itreat]] <- .checkUnitaryTreatment(treatementValue, listName = listName)
}
treatment <- treatment[sapply(treatment, function(e) length(e) > 0)]
return(treatment)
}
.checkUnitaryOutput<- function(output, listName = "output"){
mandatoryNames <- c("name", "time")
defaultNames <- c("lloq", "uloq", "limit")
if (!is.null(output)) {
if (is.data.frame(output)) {
indexID <- which(names(output) == 'id')
if (length(indexID)) defaultNames <- c(defaultNames, names(output)[indexID], names(output))
}
output <- .checkUnitaryList(
output,
mandatoryNames = mandatoryNames,
defaultNames = defaultNames,
listName = listName
)
if (is.element("name", names(output)))
.check_vector_of_char(output$name, "output name")
if (is.element("time", names(output))) {
if (is.data.frame(output$time)) {
if (! is.element("time", names(output$time))) {
stop("When output time is a dataframe, it must contains at least a time column.", call. = FALSE)
}
} else {
if (length(output$time) > 1) {
.check_vector_of_double(output$time, "output time")
} else {
if (output$time != "none") .check_vector_of_double(output$time, "output time")
}
}
}
}
if (is.null(output)) {
output <- as.list(output)
}
return(output)
}
.checkOutput <- function(output){
if (is.data.frame(output) | is.element("name", names(output))) {
output <- list(output)
}
for (iout in seq_along(output)) {
outputValue <- output[[iout]]
if (length(output) > 1) {
listName <- paste0("output ", iout)
} else {
listName <- "output"
}
output[[iout]] <- .checkUnitaryOutput(outputValue, listName = listName)
}
output <- output[sapply(output, function(e) length(e) > 0)]
if (!length(output)) output <- NULL
return(output)
}
.checkOutputDirectory <- function(directory) {
if(!is.null(directory)){
if(!dir.exists(directory))
stop("Directory '", directory, "' does not exist.", call. = FALSE)
}
return(invisible(TRUE))
}
.checkDelimiter <- function(delimiter, argname) {
if (is.null(delimiter)) return(invisible(TRUE))
if (! is.element(delimiter, c("\t", " ", ";", ",")))
stop("'", argname, "' must be one of: {\"",
paste(c("\\t", " ", ";", ","), collapse="\", \""), "\"}.",
call. = FALSE)
return(invisible(TRUE))
}
.checkExtension <- function(ext, argname) {
if (is.null(ext)) return(invisible(TRUE))
if (! is.element(ext, c("csv", "txt")))
stop("'", argname, "' must be one of: {\"",
paste(c("csv", "txt"), collapse="\", \""), "\"}.",
call. = FALSE)
return(invisible(TRUE))
}
.checkUnitaryRegressor<- function(regressor, listName = "regressor") {
mandatoryNames <- c('name', 'time', 'value')
defaultNames <- NULL
if (is.data.frame(regressor)) {
indexID <- which(names(regressor) == 'id')
mandatoryNames <- c("time")
if (length(indexID)) defaultNames <- c(names(regressor)[indexID], names(regressor))
}
regressor <- .checkUnitaryList(
regressor,
mandatoryNames = mandatoryNames,
defaultNames = defaultNames,
listName = listName
)
if (is.element("name", names(regressor))) {
if (length(regressor$name) > 1)
stop("The regressor name must have only one value.", call. = F)
.check_char(regressor$name, "regressor name")
}
if(is.element("value", names(regressor))) {
.check_vector_of_double(regressor$value, "regressor value")
}
if (is.element("time", names(regressor))) {
.check_vector_of_double(regressor$time, "regressor time")
}
if (is.null(regressor)) regressor <- as.list(regressor)
return(regressor)
}
.checkRegressor <- function(regressor){
if (is.data.frame(regressor) | is.element("name", names(regressor))) {
regressor <- list(regressor)
}
for (ireg in seq_along(regressor)) {
regValue <- regressor[[ireg]]
if (length(regressor) > 1) {
listName <- paste0("regressor ", ireg)
} else {
listName <- "regressor"
}
regressor[[ireg]] <- .checkUnitaryRegressor(regValue, listName = listName)
}
regressor <- regressor[sapply(regressor, function(e) length(e) > 0)]
return(regressor)
}
.checkUnitaryGroup<- function(group, listName){
if (is.element("size", names(group))) {
if (length(group$size) > 1) {
stop("group size must a strictly positive integer of size 1", call. = F)
}
.check_strict_pos_integer(group$size, "treatment size")
} else {
}
if (is.element("parameter", names(group))) {
group$parameter <- .transformParameter(parameter=group$parameter)
group$parameter <- .checkParameter(parameter=group$parameter)
}
if (is.element("output", names(group))) {
group$output <- .checkOutput(output = group$output)
}
if (is.element("treatment", names(group))) {
group$treatment <- .checkTreatment(treatment = group$treatment)
group$treatment <- .splitTreatment(group$treatment)
}
if (is.element("regressor", names(group))) {
group$regressor <- .checkRegressor(regressor = group$regressor)
group$regressor <- .transformRegressor(group$regressor)
}
return(group)
}
.checkGroup <- function(group){
allowedNames <- c("size", "parameter", "output", "treatment", "regressor", "level")
if (any(is.element(allowedNames, names(group))) | !all(sapply(group, .is_list_or_named_vector))) {
group <- list(group)
}
for (igroup in seq_along(group)) {
gValue <- group[[igroup]]
if (length(group) > 1) {
listName <- paste0("group ", igroup)
} else {
listName <- "group"
}
group[[igroup]] <- .checkUnitaryGroup(gValue, listName = listName)
}
return(group)
}
.checkSimpopParameter <- function(parameter) {
if (is.null(parameter)) return(parameter)
if (!is.data.frame(parameter)) stop("parameter must be a dataframe object.", call. = FALSE)
.check_in_vector(names(parameter), "'parameter names'", c("pop.param", "sd", "trans", "lim.a", "lim.b"))
if (! all(is.element(c("pop.param", "sd"), names(parameter))))
stop("You must specified at least 'pop.param' and 'sd' in 'parameter' argument")
paramName <- row.names(parameter)
if (is.null(parameter$trans)) {
parameter$trans <- "N"
i.omega <- c(grep("^omega_", paramName), grep("^omega2_", paramName))
i.corr <- unique(c(grep("^r_", paramName), grep("^corr_", paramName)))
parameter$trans[i.omega] <- "L"
parameter$trans[i.corr] <- "R"
}
.check_in_vector(parameter$trans, "'parameter trans'", c("N", "L", "G", "P", "R"))
if (is.null(parameter$lim.a)) {
parameter$lim.a <- NA
parameter$lim.a[parameter$trans == "G"] <- 0
}
if (is.null(parameter$lim.b)) {
parameter$lim.b <- NA
parameter$lim.b[parameter$trans == "G"] <- 1
}
if (! all(is.na(parameter[parameter$trans != "G", c("lim.a", "lim.b")])))
stop("lim.a and lim.b must be specified for logit transformations only (trans = G). For other transformations, set lim.a and lim.b to NaN.", call. = FALSE)
lima <- parameter[parameter$trans == "G",]$lim.a
limb <- parameter[parameter$trans == "G",]$lim.b
if (!all(lima < limb))
stop("lim.a must be strictly inferior to lim.b.", call. = FALSE)
parameter$lim.a[parameter$trans == "N"] <- - Inf
parameter$lim.b[parameter$trans == "N"] <- Inf
.check_in_range(parameter$pop.param[parameter$trans == "L"], "pop.param of lognormal distribution",
lowerbound = 0, includeBound = FALSE)
parameter$lim.a[parameter$trans == "L"] <- 0
parameter$lim.b[parameter$trans == "L"] <- Inf
.check_in_range(parameter$pop.param[parameter$trans == "G"], "pop.param of logit distribution",
lowerbound = parameter$lim.a[parameter$trans == "G"],
upperbound = parameter$lim.b[parameter$trans == "G"],
includeBound = FALSE)
.check_in_range(parameter$pop.param[parameter$trans == "R"], "pop.param of r distribution",
lowerbound = -1, upperbound = 1, includeBound = TRUE)
parameter$lim.a[parameter$trans == "R"] <- -1
parameter$lim.b[parameter$trans == "R"] <- 1
.check_in_range(parameter$pop.param[parameter$trans == "P"], "pop.param of probit normal distribution",
lowerbound = 0, upperbound = 1, includeBound = TRUE)
parameter$lim.a[parameter$trans == "P"] <- 0
parameter$lim.b[parameter$trans == "P"] <- 1
return(parameter)
}
.checkSimpopCorr <- function(corr, nbParams) {
if (nrow(corr) != nbParams | ncol(corr) != nbParams)
stop("'corr' must be a matrix of shape (", nbParams, " x ", nbParams, ").", call. = FALSE)
.check_in_range(corr[! is.na(corr)], "corr elements", lowerbound = -1 - 1e-4, upperbound = 1 + 1e-4, includeBound = TRUE)
return(corr)
}
.check_file <- function(filename, fileType = "File") {
if (!file.exists(filename))
stop(fileType, " ", filename, " does not exist.", call. = FALSE)
filename <- normalizePath(filename)
return(filename)
}
.check_integer <- function(int, argname = NULL) {
if(!(is.double(int)||is.integer(int)))
stop("Invalid ", argname, ". It must be an integer.", call. = F)
if (!all(as.integer(int) == int))
stop("Invalid ", argname, ". It must be an integer.", call. = F)
return(int)
}
.check_strict_pos_integer <- function(int, argname) {
if(!(is.double(int)||is.integer(int)))
stop("Invalid ", argname, ". It must be a strictly positive integer.", call. = F)
if ((int <= 0) || (!as.integer(int) == int))
stop("Invalid ", argname, ". It must be a strictly positive integer.", call. = F)
return(int)
}
.check_pos_integer <- function(int, argname) {
if(!(is.double(int)||is.integer(int)))
stop("Invalid ", argname, ". It must be a positive integer.", call. = F)
if ((int < 0) || (!as.integer(int) == int))
stop("Invalid ", argname, ". It must be a positive integer.", call. = F)
return(int)
}
.check_double <- function(d, argname) {
if(!(is.double(d)||is.integer(d)))
stop("Invalid ", argname, ". It must be a double.", call. = F)
return(d)
}
.check_vector_of_double <- function(v, argname) {
if(!(is.double(v)||is.integer(v)))
stop("Invalid ", argname, ". It must be a vector of doubles.", call. = F)
return(v)
}
.check_pos_double <- function(d, argname) {
if(!(is.double(d)||is.integer(d)) || (d < 0))
stop("Invalid ", argname, ". It must be a positive double.", call. = F)
return(d)
}
.check_vector_of_pos_double <- function(v, argname) {
if(!(is.double(v)||is.integer(v)) || (v < 0))
stop("Invalid ", argname, ". It must be a vector of positive doubles.", call. = F)
return(v)
}
.check_strict_pos_double <- function(d, argname) {
if(!(is.double(d)||is.integer(d)) || (d <= 0))
stop("Invalid ", argname, ". It must be a strictly positive double.", call. = F)
return(d)
}
.check_vector_of_strict_pos_double <- function(v, argname) {
if(!(is.double(v)||is.integer(v)) || (v <= 0))
stop("Invalid ", argname, ". It must be a vector of strictly positive doubles.", call. = F)
return(v)
}
.check_char <- function(str, argname = NULL) {
if (!is.character(str)) {
stop("Invalid ", argname, ". It must be a string", call. = F)
}
return(str)
}
.check_vector_of_char <- function(v, argname) {
if (!is.character(v)) {
stop("Invalid ", argname, ". It must be a vector of strings", call. = F)
}
return(v)
}
.check_bool <- function(bool, argname) {
if (!is.logical(bool))
stop("Invalid ", argname, ". It must be logical", call. = F)
return(bool)
}
.check_vectors_length <- function(v1, v2, argname1, argname2) {
if (length(v1) != length(v2))
stop(argname1, " vector and ", argname2, " vector must have the same length.", call. = F)
return(invisible(TRUE))
}
.check_in_vector <- function(arg, argname, vector) {
if (is.null(arg)) return(invisible(TRUE))
if (! all(is.element(arg, vector))) {
stop(argname, " must be in {'", paste(vector, collapse="', '"), "'}.", call. = F)
}
return(invisible(TRUE))
}
.check_in_range <- function(arg, argname, lowerbound = -Inf, upperbound = Inf, includeBound = TRUE) {
if (includeBound) {
if (! all(arg >= lowerbound) | ! all(arg <= upperbound)) {
stop(argname, " must be in [", lowerbound, ", ", upperbound, "].", call. = F)
}
} else {
if (! all(arg > lowerbound) | ! all(arg < upperbound)) {
stop(argname, " must be in ]", lowerbound, ", ", upperbound, "[.", call. = F)
}
}
return(invisible(TRUE))
}
is.string <- function(str) {
isStr <- TRUE
if (!is.null(names(str))) {
isStr <- FALSE
} else if (length(str) > 1) {
isStr <- FALSE
} else if (! is.character(str)) {
isStr <- FALSE
}
return(isStr)
}
|
context("Burr functions")
test_that("Density, distribution function, quantile function, raw moments and random generation for the Burr distribution work correctly", {
expect_equal(2, qburr(pburr(2, log.p = TRUE), log.p = TRUE))
expect_equal(pburr(2), mburr(truncation = 2))
x <- rburr(1e5, shape2 = 3)
expect_equal(mean(x), mburr(r = 1, shape2 = 3, lower.tail = FALSE), tolerance = 1e-1)
expect_equal(sum(x[x > quantile(x, 0.1)]) / length(x), mburr(r = 1, shape2 = 3, truncation = as.numeric(quantile(x, 0.1)), lower.tail = FALSE), tolerance = 1e-1)
})
test_that("Comparing probabilites of power-transformed variables", {
coeff <- burr_plt(shape1 = 2, shape2 = 3, scale = 1, a = 5, b = 7)$coefficients
expect_equal(pburr(3, shape1 = 2, shape2 = 3, scale = 1), pburr(5 * 3^7, shape1 = coeff[["shape1"]], shape2 = coeff[["shape2"]], scale = coeff[["scale"]]))
coeff <- burr_plt(shape1 = 2, shape2 = 3, scale = 1, a = 5, b = 7, inv = TRUE)$coefficients
expect_equal(pburr(0.9, shape1 = coeff[["shape1"]], shape2 = coeff[["shape2"]], scale = coeff[["scale"]]), pburr(5 * 0.9^7, shape1 = 2, shape2 = 3, scale = 1))
x <- rburr(1e5, shape1 = 2, shape2 = 3, scale = 1)
coeff <- burr_plt(shape1 = 2, shape2 = 3, scale = 1, a = 2, b = 0.5)$coefficients
y <- rburr(1e5, shape1 = coeff[["shape1"]], shape2 = coeff[["shape2"]], scale = coeff[["scale"]])
expect_equal(mean(2 * x^0.5), mean(y), tolerance = 1e-1)
expect_equal(mean(2 * x^0.5), mburr(r = 1, shape1 = coeff[["shape1"]], shape2 = coeff[["shape2"]], scale = coeff[["scale"]], lower.tail = FALSE), tolerance = 1e-1)
expect_equal(mean(y), mburr(r = 1, shape1 = coeff[["shape1"]], shape2 = coeff[["shape2"]], scale = coeff[["scale"]], lower.tail = FALSE), tolerance = 1e-1)
})
|
predict.lvm.mixture <- function(object,p=coef(object,full=TRUE),model="normal",predict.fun=NULL,...) {
p0 <- coef(object,full=FALSE)
pp <- p[seq_along(p0)]
pr <- p[length(p0)+seq(length(p)-length(p0))];
if (length(pr)<object$k) pr <- c(pr,1-sum(pr))
myp <- modelPar(object$multigroup,p=pp)$p
logff <- sapply(seq(object$k), function(j) (logLik(object$multigroup$lvm[[j]],p=myp[[j]],data=object$data,indiv=TRUE,model=model)))
logplogff <- t(apply(logff,1, function(y) y+log(pr)))
zmax <- apply(logplogff,1,max)
logsumpff <- log(rowSums(exp(logplogff-zmax)))+zmax
gamma <- exp(apply(logplogff,2,function(y) y - logsumpff))
M <- 0; V <- 0
x <- lava::vars(object$model)
for (i in seq(object$k)) {
m <- Model(object$multigroup)[[i]]
P <- predict(m,data=object$data,p=myp[[i]],x=x)
if (!is.null(predict.fun)) {
M <- M+gamma[,i]*predict.fun(P,as.vector(attributes(P)$cond.var), ...)
} else {
M <- M+gamma[,i]*P
V <- V+gamma[,i]*as.vector(attributes(P)$cond.var) + gamma[,i]*P^2
}
}
V <- V-M^2
return(M)
}
|
.survival.default <- function(x, alpha, beta){
num <- beta * (.zeta_x(alpha, x + 1))
den <- (VGAM::zeta(alpha) - (1 - beta)*(.zeta_x(alpha, x + 1)))
return(num/den)
}
smoezipf <- function(x, alpha, beta, show.plot=F){
values <- sapply(x, .survival.default, alpha = alpha, beta = beta, simplify = T)
if(show.plot){
graphics::barplot(values)
}
return(values)
}
|
geometricboot <-
function(j=NULL,wr1,wr2,x.pred,y.pred,n,cbb,joint){
if (is.numeric(cbb)==TRUE) {
xresid2 <- c(wr1,wr1)
yresid2 <- c(wr2,wr2)
k <- n/cbb
xblocks <- sample(1:n,k,replace=TRUE)
if (joint==FALSE) yblocks <- sample(1:n,k,replace=TRUE)
else yblocks <- xblocks
xressamp <- c(t(outer(xblocks,0:(cbb-1),FUN="+")))
yressamp <- c(t(outer(yblocks,0:(cbb-1),FUN="+")))
y.boot<-yresid2[yressamp]+y.pred
x.boot<-xresid2[xressamp]+x.pred
}
else {
if (joint==FALSE) {
rx <- sample(wr1,n,replace=TRUE)
ry <- sample(wr2,n,replace=TRUE)
}
else {
resid.sampler <- sample(1:n,n,replace=TRUE)
rx <- wr1[resid.sampler]
ry <- wr2[resid.sampler]
}
x.boot<-rx + x.pred
y.boot<-ry + y.pred
}
x <- x.boot
y <- y.boot
start <- direct(x,y)
ti<-n
for (i in 1:length(x)) {
x0<-x[i]
y0<-y[i]
zmin1<-optimize(ellipsespot,c(0,pi),"x0"=x0,"y0"=y0,"cx"=start$vals["cx"],"cy"=start$vals["cy"],"semi.major"=start$vals["semi.major"],"semi.minor"=start$vals["semi.minor"],"rote.rad"=start$vals["theta"])
zmin2<-optimize(ellipsespot,c(pi,2*pi),"x0"=x0,"y0"=y0,"cx"=start$vals["cx"],"cy"=start$vals["cy"],"semi.major"=start$vals["semi.major"],"semi.minor"=start$vals["semi.minor"],"rote.rad"=start$vals["theta"])
ti[i]<-ifelse(zmin1$objective < zmin2$objective, zmin1, zmin2)[[1]]
}
pred.x<-start$vals["cx"] +start$vals["semi.major"]*cos(start$vals["theta"])*cos(ti)-start$vals["semi.minor"]*sin(start$vals["theta"])*sin(ti)
pred.y<-start$vals["cy"] +start$vals["semi.major"]*sin(start$vals["theta"])*cos(ti)+start$vals["semi.minor"]*cos(start$vals["theta"])*sin(ti)
model <- list("period.time"=ti,"values"=c("cx"=as.vector(start$vals["cx"]),"cy"=as.vector(start$vals["cy"]),
"semi.major"=as.vector(start$vals["semi.major"]),"semi.minor"=as.vector(start$vals["semi.minor"]),
"rote.rad"=as.vector(start$vals["theta"])),"x"=x,"y"=y)
results <- geom_ellipse(model,1.001)
cx <- as.vector(results$values[4]); cy <- as.vector(results$values[5]);
theta <- as.vector(results$values[1]); semi.major <- as.vector(results$values[2]);
semi.minor <- as.vector(results$values[3]);
z <- c("cx"=cx,"cy"=cy,"theta"=theta,"semi.major"=semi.major,"semi.minor"=semi.minor,"theta.deg"=theta*180/pi,"phase.angle"=ti[1])
z
}
|
teamBowlingScorecardAllOppnAllMatches <- function(matches,theTeam){
noBalls=wides=team=runs=bowler=wicketKind=wicketPlayerOut=NULL
team=bowler=ball=wides=noballs=runsConceded=overs=NULL
over=wickets=maidens=NULL
a <-filter(matches,team==theTeam)
a1 <- unlist(strsplit(a$ball[1],"\\."))
a2 <- paste(a1[1],"\\.",sep="")
b <- a %>%
select(bowler,ball,noballs,wides,runs,wicketKind,wicketPlayerOut) %>%
mutate(over=gsub(a2,"",ball)) %>%
mutate(over=gsub("\\.\\d+","",over))
c <- summarise(group_by(b,bowler,over),sum(runs,wides,noballs))
names(c) <- c("bowler","over","runsConceded")
d <-summarize(group_by(c,bowler),maidens=sum(runsConceded==0))
e <- summarize(group_by(c,bowler),runs=sum(runsConceded))
f <- select(c,bowler,over)
g <- summarise(group_by(f,bowler),overs=length(unique(over)))
h <- b %>%
select(bowler,wicketKind,wicketPlayerOut) %>%
filter(wicketPlayerOut != "nobody")
i <- summarise(group_by(h,bowler),wickets=length(wicketPlayerOut))
j <- full_join(g,d,by="bowler")
k <- full_join(j,e,by="bowler")
l <- full_join(k,i,by="bowler")
if(sum(is.na(l$wickets)) != 0){
l[is.na(l$wickets),]$wickets=0
}
l <-arrange(l,desc(wickets),desc(runs),maidens)
l
}
|
parseMiniSEED <- function(buffer) {
result <- .Call("parseMiniSEED",buffer)
return(result)
}
|
orderm <- function(base = NULL, frontier = NULL,
noutput = 1, orientation=1, M = 25, B = 500) {
require(lpSolve)
if(is.null(frontier))
frontier <- base
if(!is.null(base) & !is.null(frontier)){
base <- as.matrix(base)
frontier <- as.matrix(frontier)
}
if(ncol(base) != ncol(frontier))
stop("Number of columns in base matrix and frontier matrix should be the same!")
s <- noutput
m <- ncol(base) - s
n <- nrow(base)
nf <- nrow(frontier)
front.Y <- t(frontier[, 1:s])
front.X <- t(frontier[, (s+1):(s+m)])
base.Y <- t(base[, 1:s])
base.X <- t(base[, (s+1):(s+m)])
re <- data.frame(matrix(0, nrow = n, ncol = 1))
names(re) <- c("eff")
for(i in 1:n){
if(orientation == 1){
eff <- list()
x0 <- base.X[,i]
y0 <- base.Y[,i]
for(b in 1:B){
front.idx.y <- apply(front.Y >= y0, 2, prod) == 1
front.idx <- which(front.idx.y == 1)
front.idx.m <- sample(front.idx, M, replace = TRUE)
mat <- matrix(front.X[,front.idx.m]/x0, nrow = m, ncol = length(front.idx.m))
eff[[b]] <- min(apply(mat, 2, max))
}
}
if(orientation == 2){
eff <- list()
x0 <- base.X[,i]
y0 <- base.Y[,i]
for(b in 1:B){
front.idx.x <- apply(front.X <= x0, 2, prod) == 1
front.idx <- which(front.idx.x == 1)
front.idx.m <- sample(front.idx, M, replace = TRUE)
mat <- matrix(front.Y[,front.idx.m]/y0, nrow = s, ncol = length(front.idx.m))
eff[[b]] <- max(apply(mat, 2, min))
}
}
re[i,1] <- mean(do.call(c, eff))
}
return(re)
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.