code
stringlengths 1
13.8M
|
---|
print.xtable.bpca <- function(x,
hline.after=getOption("xtable.hline.after", NULL),
include.colnames=getOption("xtable.include.colnames", FALSE),
add.to.row=getOption("xtable.add.to.row", NULL),
sanitize.text.function=getOption("xtable.sanitize.text.function", NULL),
sanitize.rownames.function=getOption("xtable.sanitize.rownames.function", sanitize.text.function),
sanitize.colnames.function=getOption("xtable.sanitize.rownames.function", sanitize.text.function), ...)
{
aux_attr <- attr(x,'align')
attr(x,'align') <- c('l', aux_attr)
if(is.null(sanitize.rownames.function)){
morerow <- function(x) paste("&",
x,
collpase='')
sanitizerownamesfunction <- morerow
}else{
morerow <- function(x) paste("&",
sanitize.rownames.function(x),
collpase='')
sanitizerownamesfunction <- morerow
}
if(is.null(sanitize.colnames.function)){
sanitize.colnames.function <- function(x) x
}
if(is.null(add.to.row)){
variables <- rownames(x)[1:(length(rownames(x))-3)]
nvariables <- length(variables)
components <- dimnames(x)[[2]]
ncomponents <- length(components)
whatcomponents <- as.numeric(gsub("[A-Za-z]*","",components))
label_eigenvec <- unique(gsub("(\\\\_[\\s\\S]*)",
"",
variables,
perl=TRUE))
label_eigenval <- rownames(x)[length(rownames(x))-2]
label_variance <- rownames(x)[-(1:(nvariables+1))]
newvariables <- gsub(paste(label_eigenvec,
"\\\\_",
sep=""),
"",
variables)
head1 <- paste("&&\\multicolumn{",
ncomponents,
"}{c}{",
sanitize.colnames.function(label_eigenval),
"} \\\\ \\cline{3-",
length(aux_attr)+1,
"}\n",
sep="")
aux_head21 <- c("&& ",
rep("",
ncomponents-1))
aux_head22 <- paste(components,
" $(\\lambda_",
whatcomponents,
"=",
round(as.numeric(x[nvariables+1,]),
attr(x,
'digits')[2]),
")$",
sep='')
aux_head23 <- paste(aux_head21,
sanitize.colnames.function(aux_head22),
collapse='&')
head2 <- paste(aux_head23,
"\\\\ \n ",
collapse="")
label_eigenvec <- ifelse(is.null(sanitize.rownames.function),
label_eigenvec,
label_eigenvec <- sanitize.rownames.function(label_eigenvec))
firstvariablerow <-ifelse(is.null(sanitize.rownames.function),
newvariables[1],
firstvariablerow <- sanitize.rownames.function(newvariables[1]))
aux_com1 <- paste(paste("\\hline \n \\multirow{",
nvariables,
"}{*}{",
label_eigenvec,
"}",
sep=''),
firstvariablerow,
sep='&')
aux_com11 <- gsub("(&\\s)",
"",
aux_com1,
perl=TRUE)
aux_com2 <- paste(round(x[1,],
attr(x,
'digits')[2]),
collapse='&')
if(include.colnames){
add.to.row <- list(pos=list(0, 0, 0, 0),
command=NULL)
aux_head01 <- paste("&",
colnames(x))
aux_head02 <- paste(aux_head01,
collapse="")
head0 <- paste("&",
aux_head02,
"\\\\ \n")
command <- c(head0,
head1,
head2,
paste(paste(aux_com11,
aux_com2,
sep='&'),
'\\\\ \n'))
} else {
add.to.row <- list(pos=list(0, 0, 0),
command=NULL)
command <- c(head1,
head2,
paste(paste(aux_com11,
aux_com2,
sep='&'),
'\\\\ \n'))
}
add.to.row$command <- command
}
rownames(x) <- c(newvariables,
label_eigenval,
label_variance)
if(is.null(hline.after)){
hline.after <- c(-1,
nrow(x[-c(1,nvariables+1),])-2,
nrow(x[-c(1,nvariables+1),]))
}
print.xtable(x[-c(1,nvariables+1),],
hline.after=hline.after,
include.colnames=FALSE,
sanitize.rownames.function=sanitizerownamesfunction,
add.to.row=add.to.row,
...)
} |
test_that("GetQualificationScore", {
skip_if_not(CheckAWSKeys())
SearchQualificationTypes(must.be.owner = TRUE, verbose = FALSE) -> quals
quals$QualificationTypeId[[1]] -> qual1
AssignQualification(workers = "A3LXJ76P1ZZPMC", qual = qual1)
GetQualificationScore(as.factor(qual1), as.factor("A3LXJ76P1ZZPMC")) -> result
expect_type(result, "list")
GetQualificationScore(qual1, c("A3LXJ76P1ZZPMC","A3LXJ76P1ZZPMC")) -> result
expect_type(result, "list")
try(GetQualificationScore(c(qual1,qual1), "A3LXJ76P1ZZPMC"), TRUE) -> result
expect_s3_class(result, 'try-error')
RevokeQualification(qual1, "A3LXJ76P1ZZPMC")
}) |
suppressPackageStartupMessages({
library("geojsonio")
library("sf", quietly = TRUE)
})
poly_geo_json <- structure("{\"type\":\"FeatureCollection\",\"features\":[{\"type\":\"Feature\",\"geometry\":{\"type\":\"Polygon\",\"coordinates\":[[[-76.3,-49.68],[-75.53,-51.13],[-74.71,-56.89],[-84.11,-57.09],[-77.9,-50.62],[-84.12,-49.59],[-76.3,-49.68]]]},\"properties\":{\"x\": -78, \"y\": -53}},{\"type\":\"Feature\",\"geometry\":{\"type\":\"Polygon\",\"coordinates\":[[[-68.77,69.82],[-66.26,62.96],[-74.22,60.87],[-74.12,65.22],[-74.55,65.81],[-75.66,67.03],[-68.77,69.82]]]},\"properties\":{\"x\": -71, \"y\": 65}},{\"type\":\"Feature\",\"geometry\":{\"type\":\"Polygon\",\"coordinates\":[[[136.27,65.8],[137.78,64.03],[140.03,59.56],[139.48,56.48],[133.64,62.44],[129.67,69.6],[136.27,65.8]]]},\"properties\":{\"x\": 135, \"y\": 65}}]}", class = c("json", "geo_json"))
poly_geo_list <- geojson_list(poly_geo_json)
poly_spdf <- geojson_sp(poly_geo_json)
poly_sp <- as(poly_spdf, "SpatialPolygons")
poly_sf <- st_as_sf(poly_spdf)
poly_sfc <- st_geometry(poly_sf)
test_that("ms_points works with defaults", {
expected_json <- structure("{\"type\":\"FeatureCollection\",\"features\":[\n{\"type\":\"Feature\",\"geometry\":{\"type\":\"Point\",\"coordinates\":[-78.4154562738861,-53.95000746272258]},\"properties\":{\"x\":-78,\"y\":-53,\"rmapshaperid\":0}},\n{\"type\":\"Feature\",\"geometry\":{\"type\":\"Point\",\"coordinates\":[-70.8687480648099,65.19505422895163]},\"properties\":{\"x\":-71,\"y\":65,\"rmapshaperid\":1}},\n{\"type\":\"Feature\",\"geometry\":{\"type\":\"Point\",\"coordinates\":[135.65518268439885,63.10517782011297]},\"properties\":{\"x\":135,\"y\":65,\"rmapshaperid\":2}}\n]}", class = c("json", "geo_json"))
expected_sp <- geojson_sp(expected_json)
expected_sp <- expected_sp[, setdiff(names(expected_sp), "rmapshaperid")]
expect_is(ms_points(poly_geo_json), "geo_json")
expect_is(ms_points(unclass(poly_geo_json)), "geo_json")
expect_equivalent(ms_points(poly_geo_list), geojson_list(expected_json))
expect_equivalent(ms_points(poly_spdf), expected_sp)
expect_equivalent(ms_points(poly_sp), as(expected_sp, "SpatialPoints"))
skip_if_not(has_sys_mapshaper())
expect_is(ms_points(poly_geo_json, sys = TRUE), "geo_json")
expect_is(ms_points(unclass(poly_geo_json), sys = TRUE), "geo_json")
expect_is(ms_points(poly_geo_list, sys = TRUE), "geo_list")
expect_is(ms_points(poly_spdf, sys = TRUE), "SpatialPointsDataFrame")
expect_is(ms_points(poly_sp, sys = TRUE), "SpatialPoints")
})
test_that("ms_points works with defaults with sf", {
expected_json <- structure("{\"type\":\"FeatureCollection\",\"features\":[\n{\"type\":\"Feature\",\"geometry\":{\"type\":\"Point\",\"coordinates\":[-78.4154562738861,-53.95000746272258]},\"properties\":{\"x\":-78,\"y\":-53,\"rmapshaperid\":0}},\n{\"type\":\"Feature\",\"geometry\":{\"type\":\"Point\",\"coordinates\":[-70.8687480648099,65.19505422895163]},\"properties\":{\"x\":-71,\"y\":65,\"rmapshaperid\":1}},\n{\"type\":\"Feature\",\"geometry\":{\"type\":\"Point\",\"coordinates\":[135.65518268439885,63.10517782011297]},\"properties\":{\"x\":135,\"y\":65,\"rmapshaperid\":2}}\n]}", class = c("json", "geo_json"))
expected_sf <- st_read(expected_json, quiet = TRUE, stringsAsFactors = FALSE)[1:2]
expect_equivalent(ms_points(poly_sf), expected_sf)
expect_equivalent(ms_points(poly_sfc), st_geometry(expected_sf))
skip_if_not(has_sys_mapshaper())
expect_is(ms_points(poly_sf, sys = TRUE), "sf")
expect_is(ms_points(poly_sfc, sys = TRUE), "sfc")
})
test_that("ms_points works with location=centroid", {
expected_json <- structure("{\"type\":\"FeatureCollection\",\"features\":[\n{\"type\":\"Feature\",\"geometry\":{\"type\":\"Point\",\"coordinates\":[-78.4154562738861,-53.95000746272258]},\"properties\":{\"x\":-78,\"y\":-53,\"rmapshaperid\":0}},\n{\"type\":\"Feature\",\"geometry\":{\"type\":\"Point\",\"coordinates\":[-70.8687480648099,65.19505422895163]},\"properties\":{\"x\":-71,\"y\":65,\"rmapshaperid\":1}},\n{\"type\":\"Feature\",\"geometry\":{\"type\":\"Point\",\"coordinates\":[135.65518268439885,63.10517782011297]},\"properties\":{\"x\":135,\"y\":65,\"rmapshaperid\":2}}\n]}", class = c("json", "geo_json"))
expected_sp <- geojson_sp(expected_json)
expected_sp <- expected_sp[, setdiff(names(expected_sp), "rmapshaperid")]
expect_equivalent(ms_points(poly_geo_json, location = "centroid"), ms_points(poly_geo_json))
expect_is(ms_points(poly_geo_json, location = "centroid"), "geo_json")
expect_equivalent(ms_points(poly_geo_list, location = "centroid"), geojson_list(expected_json))
expect_equivalent(ms_points(poly_spdf, location = "centroid"), expected_sp)
expected_sf <- st_read(expected_json, quiet = TRUE, stringsAsFactors = FALSE)[1:2]
expect_equivalent(ms_points(poly_sf, location = "centroid"), expected_sf)
expect_equivalent(ms_points(poly_sfc, location = "centroid"), st_geometry(expected_sf))
})
test_that("ms_points works with location=inner", {
expected_json <- structure("{\"type\":\"FeatureCollection\",\"features\":[\n{\"type\":\"Feature\",\"geometry\":{\"type\":\"Point\",\"coordinates\":[-77.94495627388609,-54.35054796472695]},\"properties\":{\"x\":-78,\"y\":-53,\"rmapshaperid\":0}},\n{\"type\":\"Feature\",\"geometry\":{\"type\":\"Point\",\"coordinates\":[-70.7792242552861,65.38990758263705]},\"properties\":{\"x\":-71,\"y\":65,\"rmapshaperid\":1}},\n{\"type\":\"Feature\",\"geometry\":{\"type\":\"Point\",\"coordinates\":[135.73366753288371,63.20605469121952]},\"properties\":{\"x\":135,\"y\":65,\"rmapshaperid\":2}}\n]}", class = c("json", "geo_json"))
expected_sp <- geojson_sp(expected_json)
expected_sp <- expected_sp[, setdiff(names(expected_sp), "rmapshaperid")]
expect_is(ms_points(poly_geo_json, location = "inner"), "geo_json")
expect_equivalent(ms_points(poly_geo_list, location = "inner"), geojson_list(expected_json))
expect_equivalent(ms_points(poly_spdf, location = "inner"), expected_sp)
expected_sf <- st_read(expected_json, quiet = TRUE, stringsAsFactors = FALSE)[1:2]
expect_equivalent(ms_points(poly_sf, location = "inner"), expected_sf, tolerance = 0.0001)
expect_equivalent(ms_points(poly_sfc, location = "inner"), st_geometry(expected_sf), tolerance = 0.0001)
})
test_that("ms_points works with x and y", {
expected_json <- structure("{\"type\":\"FeatureCollection\",\"features\":[\n{\"type\":\"Feature\",\"geometry\":{\"type\":\"Point\",\"coordinates\":[-78,-53]},\"properties\":{\"x\":-78,\"y\":-53,\"rmapshaperid\":0}},\n{\"type\":\"Feature\",\"geometry\":{\"type\":\"Point\",\"coordinates\":[-71,65]},\"properties\":{\"x\":-71,\"y\":65,\"rmapshaperid\":1}},\n{\"type\":\"Feature\",\"geometry\":{\"type\":\"Point\",\"coordinates\":[135,65]},\"properties\":{\"x\":135,\"y\":65,\"rmapshaperid\":2}}\n]}", class = c("json", "geo_json"))
expected_sp <- geojson_sp(expected_json)
expected_sp <- expected_sp[, setdiff(names(expected_sp), "rmapshaperid")]
expect_is(ms_points(poly_geo_json, x = "x", y = "y"), "geo_json")
expect_equivalent(ms_points(poly_geo_list, x = "x", y = "y"), geojson_list(expected_json))
expect_equivalent(ms_points(poly_spdf, x = "x", y = "y"), expected_sp)
expect_equivalent(ms_points(poly_sf, x = "x", y = "y"),
st_read(expected_json, quiet = TRUE, stringsAsFactors = FALSE)[1:2])
})
test_that("ms_points fails correctly", {
expect_error(ms_points(poly_geo_json, location = "foo"), "location must be 'centroid' or 'inner'")
expect_error(ms_points(poly_geo_json, location = "inner", x = "x", y = "y"),
"You have specified both a location and x/y for point placement")
expect_error(ms_points(poly_geo_json, location = "inner", x = "x"),
"You have specified both a location and x/y for point placement")
expect_error(ms_points(poly_geo_json, location = "inner", y = "y"),
"You have specified both a location and x/y for point placement")
expect_error(ms_points(poly_geo_json, x = "x"), "Only one of x/y pair found")
expect_error(ms_points(poly_geo_json, y = "y"), "Only one of x/y pair found")
expect_error(ms_points(poly_geo_json, force_FC = "true"),
"force_FC must be TRUE or FALSE")
}) |
library(hamcrest)
expected <- structure(list(breaks = c(0x1.18p+4, 0x1.2p+4, 0x1.28p+4, 0x1.3p+4,
0x1.38p+4, 0x1.4p+4, 0x1.48p+4, 0x1.5p+4, 0x1.58p+4, 0x1.6p+4,
0x1.68p+4, 0x1.7p+4, 0x1.78p+4, 0x1.8p+4, 0x1.88p+4, 0x1.9p+4,
0x1.98p+4, 0x1.ap+4, 0x1.a8p+4, 0x1.bp+4, 0x1.b8p+4, 0x1.cp+4,
0x1.c8p+4, 0x1.dp+4, 0x1.d8p+4, 0x1.ep+4, 0x1.e8p+4, 0x1.fp+4
), counts = c(2L, 3L, 4L, 7L, 26L, 14L, 25L, 41L, 58L, 62L, 85L,
80L, 81L, 76L, 76L, 85L, 58L, 60L, 49L, 28L, 31L, 22L, 9L, 6L,
7L, 4L, 1L), density = c(0x1.0624dd2f1a9fcp-8, 0x1.89374bc6a7efap-8,
0x1.0624dd2f1a9fcp-7, 0x1.cac083126e979p-7, 0x1.a9fbe76c8b439p-5,
0x1.cac083126e979p-6, 0x1.999999999999ap-5, 0x1.4fdf3b645a1cbp-4,
0x1.db22d0e560419p-4, 0x1.fbe76c8b43958p-4, 0x1.5c28f5c28f5c3p-3,
0x1.47ae147ae147bp-3, 0x1.4bc6a7ef9db23p-3, 0x1.374bc6a7ef9dbp-3,
0x1.374bc6a7ef9dbp-3, 0x1.5c28f5c28f5c3p-3, 0x1.db22d0e560419p-4,
0x1.eb851eb851eb8p-4, 0x1.916872b020c4ap-4, 0x1.cac083126e979p-5,
0x1.fbe76c8b43958p-5, 0x1.6872b020c49bap-5, 0x1.26e978d4fdf3bp-6,
0x1.89374bc6a7efap-7, 0x1.cac083126e979p-7, 0x1.0624dd2f1a9fcp-7,
0x1.0624dd2f1a9fcp-9), mids = c(0x1.1cp+4, 0x1.24p+4, 0x1.2cp+4,
0x1.34p+4, 0x1.3cp+4, 0x1.44p+4, 0x1.4cp+4, 0x1.54p+4, 0x1.5cp+4,
0x1.64p+4, 0x1.6cp+4, 0x1.74p+4, 0x1.7cp+4, 0x1.84p+4, 0x1.8cp+4,
0x1.94p+4, 0x1.9cp+4, 0x1.a4p+4, 0x1.acp+4, 0x1.b4p+4, 0x1.bcp+4,
0x1.c4p+4, 0x1.ccp+4, 0x1.d4p+4, 0x1.dcp+4, 0x1.e4p+4, 0x1.ecp+4
), xname = "c(26.5634283971954, 21.0732303298413, 24.3554456026418, 27.8262270160784, 21.0516594829065, 26.1227544800997, 21.9320839800539, 22.6745110977843, 25.3143811489893, 26.091516231568, 26.9439992510412, 22.8730734382734, 19.8777627054095, 21.7797482238853, 25.2923154723437, 24.1340208803617, 23.8839182840953, 22.657972895955, 22.3000552045009, 26.6738355797256, 18.6248888365263, 23.9565194361072, 26.4891838109457, 26.0517318962984, 24.7894878538271, 27.1548166896301, 23.7468747561327, 22.598123733951, \n 20.0035376393095, 26.0457176060119, 21.8760575904575, 23.5488556573365, 22.4658360307562, 20.9100279697375, 22.7509638057389, 27.0942650627132, 24.3760938003495, 26.7187187459996, 25.5755198092905, 22.9937517105827, 21.5380328429234, 25.0051238417917, 23.6822896606452, 21.6701022575063, 26.2435005300501, 22.0107068368343, 25.4547700327182, 23.1979176448434, 23.1408849494699, 28.2688740511134, 26.6169867557461, 20.9728060366406, 28.8029792847255, 26.0476085170716, 21.8890908680693, 20.8118884990165, \n 27.8423586684695, 19.1674722361506, 26.4586123434204, 26.9755634044577, 24.4977525211131, 19.9532248628515, 22.6973470814496, 21.9508340282816, 19.7592764595584, 21.2605845214684, 23.4624749530086, 21.5478978392629, 23.8108202342591, 24.587379394677, 22.6469309965652, 26.9319932966287, 22.4869010823946, 24.7935710575211, 27.3676541094555, 24.6779813351123, 24.8538266893032, 22.9424947956967, 23.7048967627905, 21.4975722818542, 24.3231544468967, 23.2252312655677, 24.1701429792189, 23.7550829900611, \n 24.6816604203094, 25.3468469434056, 20.6994536501591, 23.485446862901, 21.144679751462, 20.9111964214449, 25.0215173002148, 21.9629725842547, 29.567328307732, 24.8059180623643, 24.4884674152798, 26.3278575586487, 21.794075741216, 24.1250978170988, 23.2482343935872, 21.1732825957225, 26.3338377502362, 21.3736040465611, 23.5773593162406, 24.688057446777, 27.1072939945814, 21.0831549631409, 29.1234326568092, 22.93481422306, 22.6288238189562, 24.2453085216833, 22.3212406289117, 30.0857027518018, \n 24.5166356898763, 23.9815782350249, 26.6076513608488, 22.6127095692847, 22.8053132600314, 22.4035873387531, 24.1970354021746, 24.8328075337292, 21.5560214057481, 28.3526147935468, 21.8135736457659, 23.1517956162491, 26.2115084597216, 19.992081068339, 21.6426695811513, 24.7812037714352, 21.1175898866619, 22.4272693443429, 23.1215988294815, 24.2115176027199, 21.7506195172551, 21.8822284095886, 24.5037060824213, 20.0595241540384, 23.8675324858033, 23.1116176860322, 24.8936495177902, 22.3603465133677, \n 23.7921672313761, 28.2427609323447, 23.6886867004164, 24.6831147536639, 28.1046523295704, 23.6084802739167, 27.2735686041502, 25.7984189637527, 22.7053029447642, 25.2539693537766, 21.6398655515835, 25.4342120229012, 20.9585210461234, 27.3679095044265, 23.5817829794965, 23.2255036687432, 26.5314392514959, 22.4235396733226, 24.0536598159755, 28.5360263716425, 24.3932526451565, 21.9661507858564, 29.5483048763506, 29.9914975074989, 25.2195310106299, 21.8848965290227, 26.4892544320462, 23.2663079406252, \n 20.8497337065484, 25.4531711214783, 25.5375386822068, 22.6826060685614, 23.8916529346259, 21.3728521238244, 21.389706956775, 28.1219938557674, 25.6573425695554, 24.6356611992215, 26.3500506924287, 23.4039149692232, 19.6621002949442, 21.2947020985557, 27.8689634738125, 24.9665308234181, 22.8186088293961, 25.4062418695928, 27.1223415750006, 25.5702929224546, 24.9907324561226, 26.560435654452, 25.7425231322824, 24.1687342538822, 24.6654750451077, 21.604096610578, 22.7591681909341, 23.8632703785382, \n 19.5440863032035, 23.6232023015585, 22.5323588325701, 23.5425595666018, 26.7093906672381, 26.1122774914759, 21.3714442132983, 21.4643140295544, 23.1960756066878, 24.9408626130395, 22.3093881407066, 27.2321183061534, 26.4616082328802, 23.1416139218221, 26.6275655607694, 28.1287306933453, 23.3091369997718, 22.5112080902619, 20.7165231397068, 22.055645533488, 28.0016418918253, 20.6224416896435, 27.6281610876212, 21.0350859774375, 24.2152210357361, 24.2571020847297, 21.0444114095305, 18.54397947643, \n 26.9514958719466, 22.065128806995, 20.0814952150032, 19.6049844152586, 22.823984420168, 27.2064513016542, 23.4668550808944, 22.7829935136588, 24.469794987964, 23.837855222599, 20.3810910670664, 29.3481535892034, 25.1475474099985, 25.4984780287715, 23.340381556253, 21.312969446815, 24.6806106716021, 23.2214528583199, 24.0999158155781, 25.6725767167379, 25.9452278681514, 22.1392297148956, 19.7854759528805, 26.3160013460668, 26.6015603233607, 25.8176394324703, 21.9294634549587, 25.1372486946257, \n 26.1414067413676, 29.706296270733, 21.2880725958292, 26.2354654342106, 24.7781839829827, 24.2709309533463, 27.6672146060926, 27.5583537175524, 26.7221580311687, 24.3383103973917, 22.270277401351, 20.9715102837017, 21.5223586537717, 22.4808791467864, 21.8794690184451, 29.0090455704904, 23.8151470604463, 27.7777112818669, 23.2487156520984, 22.9840541217216, 19.973727062915, 22.7370124691751, 25.1359380700723, 23.5840839827061, 28.5848834806789, 25.1423142806661, 22.1641095213771, 26.3273974935475, \n 24.5816537244499, 24.6543701353563, 20.5144880510104, 24.3538730680223, 23.9180550280429, 24.9028118687796, 28.5468890514803, 24.6002441355419, 26.3307482807768, 22.9524722634568, 22.5450369239019, 17.9817479840404, 21.970926924623, 25.4693969691064, 26.0481389474156, 24.4306284041542, 23.242371664385, 24.6120658064488, 27.8174644311152, 22.5602057761231, 26.3967607835294, 23.7614062961394, 24.5755517112864, 25.7768459217238, 22.6073076363744, 22.2412855668329, 24.5841905784085, 20.7202588310117, \n 27.6737563307463, 25.9569842632704, 21.0320966548509, 25.4164535292377, 25.2883083986181, 21.1577711495333, 24.9711718700263, 26.6866446020177, 27.0992017429549, 22.5764226008885, 21.1719567031447, 23.2219778076753, 24.0460250000324, 18.4981417581494, 23.9439668249728, 24.1929749628773, 23.7559809624958, 22.525042442832, 22.3581638908985, 23.3213016664574, 19.4773934111103, 19.1166456338309, 23.2536201311992, 25.0950154817921, 23.8931795683684, 21.357181897142, 24.2206604296785, 21.5402129133873, \n 22.285027641256, 23.0068215364517, 23.3741112076432, 22.987601700407, 24.4409575230942, 24.636266339704, 21.5187376273904, 22.6045995909268, 22.5774373086906, 27.8711662343074, 22.7824907413673, 25.3392661434498, 24.9072665432779, 21.9922807919374, 23.2056642988679, 21.3795010244688, 20.3497261261301, 20.8379115234591, 26.8660899597012, 25.9539621827104, 23.6866099665691, 25.0837747624573, 24.2792165265899, 24.2702761529985, 30.0102610513062, 23.1894316244674, 27.0767022843712, 26.1657361803649, \n 22.0217603189933, 26.439668581923, 19.9916729530495, 23.4742679798028, 23.7098486574149, 21.9654616898997, 26.5907574663293, 23.0237402901549, 26.586648946961, 21.5348582999215, 21.1061221081134, 23.8198704383698, 24.2343307891475, 23.6825923496769, 25.5288904425722, 23.6264885270279, 26.631166759848, 22.4052285686297, 24.7262273152108, 27.8232383500613, 21.5380517700776, 21.1638981679782, 26.9099662966898, 22.3320808748272, 26.3633392374981, 24.3731436664634, 26.671351089662, 27.4582806881087, \n 30.1689226353668, 22.8764719299464, 28.1619956042683, 24.8455727990596, 23.583071527743, 28.3464070944829, 25.8433855407612, 26.6175562691536, 24.8454103844791, 25.8463964726319, 19.9196364903211, 23.4258018537922, 25.8887221369348, 22.0449947750251, 21.6645978726653, 22.1413725187933, 20.3355229136676, 24.7157757315696, 26.859120759473, 25.4176327750736, 25.254018956424, 25.7532627640903, 20.0060424424387, 25.9206281900106, 28.0191648007246, 24.950597506294, 22.9727681959203, 20.5324479706537, \n 22.7154597393261, 24.7981743790455, 25.984837986009, 24.3113180458192, 22.8707205041202, 22.4418179496629, 25.9363810828132, 26.4437760621611, 25.8470094832384, 25.5256014534884, 19.7428248942555, 23.0715763026653, 24.8954764088303, 26.9780299534038, 21.8292398776181, 25.6819782486414, 25.0637637310052, 24.1344575537888, 24.00060948085, 25.069374557971, 21.1986095201603, 22.6571038187071, 19.8208256182977, 26.2719027144583, 25.6074600247158, 27.5756667325763, 21.5113514395419, 26.5242870229295, \n 22.0513200164999, 23.1740334694696, 22.9904348574036, 20.0998044103757, 24.9288864916274, 20.9580015739141, 24.24946497418, 22.7395078292951, 22.7550802328992, 21.5992742738146, 25.099891878079, 24.9756018671258, 22.096442009012, 21.9467580500133, 23.1397790614472, 21.9382199192968, 22.4593218710819, 24.3325378512405, 23.8756845675336, 26.7505253181644, 19.9770464851688, 26.4794146967844, 27.9413862994718, 23.8430526163154, 25.5891647120319, 24.1743261013497, 23.6496489537902, 23.0959838639878, \n 24.8380900829203, 27.3342558048319, 25.2426358879942, 28.3190216540313, 26.7125952906417, 21.4548690416608, 26.1780790232281, 25.0829801960578, 21.293876547643, 23.8349995931875, 23.6911657543771, 22.5078698782894, 28.0247356799713, 17.6816270286467, 21.5720042810599, 25.375546208931, 25.6897207014249, 25.1998342629035, 22.3278687641803, 27.5116051108853, 25.7052145996477, 23.0493062483877, 26.5670098404839, 27.3993782171349, 25.9314682266343, 21.3792602973178, 26.3278590904395, 24.4472856054508, \n 29.2843860896378, 23.324501281981, 27.6702118547006, 25.0255191072433, 25.2495711492846, 27.8042222602807, 23.2026112393731, 24.5124742769602, 22.0665161399578, 21.6417915208774, 20.6453017420573, 24.4376258118278, 21.3036819039984, 26.9896718145863, 24.7418888624842, 24.829791668678, 23.2751534118296, 27.3301276380751, 19.7868261081813, 21.1117749907534, 26.5404046114989, 24.2732617514889, 23.3843328686874, 28.4565582926934, 24.46127036944, 25.3435712315635, 25.1067015584489, 22.4892239369189, \n 22.6000914102395, 22.6733754458662, 22.8812272391277, 22.7241998420069, 22.6312529928692, 23.4144670844057, 24.8351226027159, 21.1708901138809, 22.1934990126851, 26.2537757548884, 27.4627908144283, 25.275426152079, 25.7724368902489, 23.2309883685293, 27.6674066830493, 21.8189599988483, 21.9170347704899, 21.2752224553219, 25.9351752171701, 23.5405585938551, 21.6413548893655, 30.6287473964446, 23.6431928106079, 23.3624086170381, 25.9824323448879, 28.6243795680063, 23.8639503485922, 26.2773066490527, \n 25.881949157093, 21.9106483981102, 24.0542885755205, 28.3261287927778, 27.7963160735113, 26.2788129762865, 25.0875463545416, 23.551938096821, 22.838238085894, 22.9021493167457, 28.3816303534718, 21.7607670074606, 25.5575848543263, 24.257900931864, 22.9149821467977, 22.7732328405303, 23.679190771918, 24.2513335202716, 27.9384923896715, 20.2900777064019, 20.2306319030994, 22.5390422223776, 21.9624320922553, 24.8321291111257, 24.996805282917, 22.0107538415363, 19.2914832722059, 25.6273944431386, \n 22.9570801132346, 23.9960447489532, 25.5020216935304, 18.3099370705069, 29.9933692754142, 25.9760545825852, 26.8497756221746, 22.0406161499476, 25.913300728844, 25.4093720320473, 27.8237750136642, 22.0848870035383, 29.8754940909266, 21.6867272929041, 23.4102817781251, 20.1090660291217, 27.9894804869024, 24.6456027249959, 25.9789639220761, 23.9978663600008, 21.9072032507657, 25.7389675149585, 25.3518990742003, 21.2284626410734, 23.390782181051, 25.0182353287487, 20.4694886710561, 19.6360405571098, \n 24.9368447622744, 22.8205339214704, 26.4005124028578, 24.0511025520614, 23.525600851542, 25.1743873689904, 26.4806190532242, 22.8998611937701, 26.5516937862225, 23.5673652796074, 26.639048858761, 21.5745420024702, 28.9475428496201, 25.3153222267514, 23.3488764297466, 24.4664683655516, 28.299754666797, 23.790044058116, 22.2307759731708, 23.3469498310161, 22.037969531844, 24.3740275533, 20.925201392473, 28.6754253117741, 27.3073749272837, 23.2714747197539, 24.4927695046729, 22.1988431917047, 23.5965764121592, \n 19.8138406501969, 25.0513360546574, 23.9582917402353, 21.6916338287961, 25.3546848562, 25.1977414336034, 24.6874403818326, 27.0016624276981, 24.3589780178887, 23.1162372916603, 25.4441796000712, 27.0097657049526, 28.1611905882701, 19.3738862433222, 20.1187737804154, 27.9215448424686, 19.8544459148178, 25.159020670023, 24.6189246619307, 27.1130050103581, 22.7029551710707, 23.5076316350596, 24.4872492082985, 23.8998126689201, 26.9240151699162, 22.4117362927173, 24.8537092476711, 23.2643812996508, \n 23.9174921202309, 25.0841688216421, 26.3678400191815, 27.3914977842565, 27.7440038099587, 21.8665984440952, 24.8805525815408, 23.9213493737035, 23.2862968278405, 21.8543851800902, 25.9943461788566, 24.5527860572242, 23.8044865740885, 21.7896535816723, 24.0311617052197, 24.8215771532518, 19.5397297476005, 22.9916320422905, 24.4710383817209, 21.7630353588042, 24.6954713767283, 19.796505115429, 21.1791664501926, 26.5849440263196, 23.2450606054596, 22.9581917150431, 23.3749375224851, 23.0330869263201, \n 25.49208088571, 22.7570874086676, 23.9901063174694, 18.4246711360262, 22.6543218850404, 26.1423834475921, 25.4394416560856, 26.5081213630383, 24.9035956538498, 21.7670553334791, 22.5036178159817, 25.2514386120467, 23.4857729204392, 25.4929546126909, 20.8435068331162, 26.8560579705235, 23.268641853099, 25.9454520694199, 24.4453070750036, 25.3486823044699, 23.1382409092641, 23.93340126397, 24.7119269314022, 29.0583065243365, 23.9339752797036, 23.6040516540994, 27.5557700488596, 23.5952381737614, \n 27.6057097757062, 26.6072724641605, 26.3244539233051, 21.0254240467319, 25.8165074116337, 24.7403179374837, 26.4511679174019, 22.217786924037, 22.9685855054566, 24.9945528315602, 27.7101572120264, 22.3650098668962, 22.118481380273, 25.2157499340857, 27.5724772107284, 25.7135872189084, 22.1704515562857, 23.7245638034752, 26.6333013725585, 19.6980520297813, 18.7474422032699, 22.1240920047113, 24.6484350668, 25.7165631907785, 22.3806187125695, 23.4300570436704, 26.2356231321507, 26.385136209525, \n 25.5261037695985, 26.0728903697749, 24.0152124991513, 26.4274482569992, 30.2119047089405, 24.1111315454093, 26.2763365503225, 25.6887608135529, 20.8566578850385, 22.6479602002123, 25.1019022847843, 25.0518921576019, 23.7032742870655, 25.1020413781864, 23.7630187014503, 25.8813329402164, 25.0247497000233, 24.2297202543522, 22.6340068868981, 20.7741301082975, 27.9578582823744, 24.8821248031445, 23.0935463204422, 23.8959290369998, 24.1469258077714, 25.2597519549526, 23.4405706537068, 22.7066644837194, \n 26.4670595642611, 23.5285131748049, 19.9224301618832, 25.5584448795195, 24.5068470035028, 25.8511804465425, 22.4600139668265, 26.5089263650298, 21.7045503686009, 22.1380341308443, 25.3521680198717, 25.1236463046856, 23.3241494400977, 24.6174685708879, 23.4969773188412, 21.862510568765, 29.0551451771861, 23.1824750418095, 23.8059971521198, 24.474606931859, 22.219833146741, 22.1154357965997, 21.3621608440128, 24.6234615883934, 22.0989568918066, 26.3337625348145, 22.1830704422695, 23.1226425092941, \n 24.5769163924795, 23.8952003963413, 23.2475394949589, 22.4490922231268, 24.1024298896872, 21.1899119152235, 22.1836364965676, 25.8367965382769, 19.0456282734481, 26.8179673766035, 25.3477495541446, 24.2592203446349, 24.4851839667287, 20.739220032011, 26.5925259136753, 27.3053719911648, 23.7066086896106, 24.7058214632509, 26.4129040669372, 28.0940459101359, 26.2815182338726, 24.1919481324191, 22.6288266133665, 26.3887388088857, 25.3372858525515, 27.6546328078419, 24.1136317825588, 25.5545360210688, \n 23.8789836439294, 25.1677457626152, 23.1965799505867, 25.39030872513, 25.1046304023901, 27.0755181858625, 26.1659022210627, 22.8986870733777, 23.9114086285139, 24.5693555791871, 29.7601656954633, 24.1895646424408, 23.3802232370839, 23.255273581044, 23.4708067791094, 24.1766127691121, 22.5344007138147, 24.076575648311, 23.6570861354488, 23.308368187864, 25.3010607854632, 23.1735193525107, 23.6345255089233, 23.4100829136008, 25.2110724891184, 22.8196466956189, 26.3244964100435, 23.9207213560876, \n 24.5542368169015, 26.2806019288174, 26.7757645878796, 24.677687867002, 20.4607698164252, 22.8734100961968, 25.0707690497281, 28.9061904898661, 22.6662495205923, 23.2196893687754, 25.0660007809778, 26.411194277651, 23.83713029892, 26.6267465365056, 23.9808371577527, 25.1479956041541, 27.3253396269586, 24.0656218114766, 26.5542443214263, 21.914103974471, 28.7830890187457, 19.6668931672601, 25.489998388083, 24.9023295964964, 25.3897910600611, 22.2905529619216, 23.0552693241267, 25.2708971342974, \n 22.6144031260341, 26.0607924997767, 23.5977477806018, 23.1841244327187, 25.0361357428259, 23.1418683775904, 27.0087097673401, 22.3356463565599, 22.431579546978, 26.873447286005, 20.950769318336, 21.8895504641497, 23.1695670316979, 19.843351729665, 24.0091768851652, 25.511929033368, 24.153202725394, 26.5097328405245, 23.1572322557906, 25.2414207625711, 21.9069535797744, 25.5010593353589, 24.5571764795224, 22.7311988510744, 27.3075168589764, 25.910783701021, 25.4185352503815, 26.3983683687876, \n 27.468042772834, 28.488995807628, 22.8364462728617, 22.1342017264734, 26.0561184464917, 22.969641287474, 19.2948934099151, 24.4445698006693, 28.2404029603619, 22.3368271738386, 25.5197615974556, 26.3297374017116, 24.1094593820154, 24.6903775379685, 20.5543525817611, 22.5656598572146, 24.4419692091639, 22.8758272397932, 25.1888547991348, 20.7063310916429, 27.0946654643658, 22.2795041047242, 21.3377108399547, 25.9069282654005, 24.096107466175, 21.4333210061792, 25.0084744809184, 25.6061595347796, \n 21.0545145818748, 22.936267081493, 19.8010714731226, 23.6366401304632, 22.8779100984057, 24.5832930751233, 22.9506578414776, 26.3786453562785, 24.1159373523935, 20.884796909607, 24.741280788899, 23.3745271507587, 22.0746433476025, 21.6210509231585, 27.6479048147371, 23.8373958554498, 18.8262258052966, 25.469464151379, 22.4780917112772, 26.1483770416268, 23.0905769630516, 25.3440103154198, 23.5348094808692, 23.7949040032871, 26.2287327987848, 27.5968353689264, 22.7379029061818, 22.7956387763123, \n 25.1482746284191, 25.5094394421, 25.3967822369732, 28.2230382502412, 24.2696215153262, 23.5476539951416, 22.4857354719217, 22.9789243207698, 26.9277526138439, 25.0710477858558, 22.6274020213151, 24.6372713516139, 24.0626196602817, 23.242116542589, 25.4738663203844, 28.1386278101733, 19.9277815999832, 22.8510002938672, 24.0309986726417)",
equidist = TRUE), .Names = c("breaks", "counts", "density",
"mids", "xname", "equidist"), class = "histogram")
assertThat(graphics:::hist.default(breaks=20,main="Breaks=20",plot=FALSE,x=c(26.5634283971954, 21.0732303298413, 24.3554456026418, 27.8262270160784,
21.0516594829065, 26.1227544800997, 21.9320839800539, 22.6745110977843,
25.3143811489893, 26.091516231568, 26.9439992510412, 22.8730734382734,
19.8777627054095, 21.7797482238853, 25.2923154723437, 24.1340208803617,
23.8839182840953, 22.657972895955, 22.3000552045009, 26.6738355797256,
18.6248888365263, 23.9565194361072, 26.4891838109457, 26.0517318962984,
24.7894878538271, 27.1548166896301, 23.7468747561327, 22.598123733951,
20.0035376393095, 26.0457176060119, 21.8760575904575, 23.5488556573365,
22.4658360307562, 20.9100279697375, 22.7509638057389, 27.0942650627132,
24.3760938003495, 26.7187187459996, 25.5755198092905, 22.9937517105827,
21.5380328429234, 25.0051238417917, 23.6822896606452, 21.6701022575063,
26.2435005300501, 22.0107068368343, 25.4547700327182, 23.1979176448434,
23.1408849494699, 28.2688740511134, 26.6169867557461, 20.9728060366406,
28.8029792847255, 26.0476085170716, 21.8890908680693, 20.8118884990165,
27.8423586684695, 19.1674722361506, 26.4586123434204, 26.9755634044577,
24.4977525211131, 19.9532248628515, 22.6973470814496, 21.9508340282816,
19.7592764595584, 21.2605845214684, 23.4624749530086, 21.5478978392629,
23.8108202342591, 24.587379394677, 22.6469309965652, 26.9319932966287,
22.4869010823946, 24.7935710575211, 27.3676541094555, 24.6779813351123,
24.8538266893032, 22.9424947956967, 23.7048967627905, 21.4975722818542,
24.3231544468967, 23.2252312655677, 24.1701429792189, 23.7550829900611,
24.6816604203094, 25.3468469434056, 20.6994536501591, 23.485446862901,
21.144679751462, 20.9111964214449, 25.0215173002148, 21.9629725842547,
29.567328307732, 24.8059180623643, 24.4884674152798, 26.3278575586487,
21.794075741216, 24.1250978170988, 23.2482343935872, 21.1732825957225,
26.3338377502362, 21.3736040465611, 23.5773593162406, 24.688057446777,
27.1072939945814, 21.0831549631409, 29.1234326568092, 22.93481422306,
22.6288238189562, 24.2453085216833, 22.3212406289117, 30.0857027518018,
24.5166356898763, 23.9815782350249, 26.6076513608488, 22.6127095692847,
22.8053132600314, 22.4035873387531, 24.1970354021746, 24.8328075337292,
21.5560214057481, 28.3526147935468, 21.8135736457659, 23.1517956162491,
26.2115084597216, 19.992081068339, 21.6426695811513, 24.7812037714352,
21.1175898866619, 22.4272693443429, 23.1215988294815, 24.2115176027199,
21.7506195172551, 21.8822284095886, 24.5037060824213, 20.0595241540384,
23.8675324858033, 23.1116176860322, 24.8936495177902, 22.3603465133677,
23.7921672313761, 28.2427609323447, 23.6886867004164, 24.6831147536639,
28.1046523295704, 23.6084802739167, 27.2735686041502, 25.7984189637527,
22.7053029447642, 25.2539693537766, 21.6398655515835, 25.4342120229012,
20.9585210461234, 27.3679095044265, 23.5817829794965, 23.2255036687432,
26.5314392514959, 22.4235396733226, 24.0536598159755, 28.5360263716425,
24.3932526451565, 21.9661507858564, 29.5483048763506, 29.9914975074989,
25.2195310106299, 21.8848965290227, 26.4892544320462, 23.2663079406252,
20.8497337065484, 25.4531711214783, 25.5375386822068, 22.6826060685614,
23.8916529346259, 21.3728521238244, 21.389706956775, 28.1219938557674,
25.6573425695554, 24.6356611992215, 26.3500506924287, 23.4039149692232,
19.6621002949442, 21.2947020985557, 27.8689634738125, 24.9665308234181,
22.8186088293961, 25.4062418695928, 27.1223415750006, 25.5702929224546,
24.9907324561226, 26.560435654452, 25.7425231322824, 24.1687342538822,
24.6654750451077, 21.604096610578, 22.7591681909341, 23.8632703785382,
19.5440863032035, 23.6232023015585, 22.5323588325701, 23.5425595666018,
26.7093906672381, 26.1122774914759, 21.3714442132983, 21.4643140295544,
23.1960756066878, 24.9408626130395, 22.3093881407066, 27.2321183061534,
26.4616082328802, 23.1416139218221, 26.6275655607694, 28.1287306933453,
23.3091369997718, 22.5112080902619, 20.7165231397068, 22.055645533488,
28.0016418918253, 20.6224416896435, 27.6281610876212, 21.0350859774375,
24.2152210357361, 24.2571020847297, 21.0444114095305, 18.54397947643,
26.9514958719466, 22.065128806995, 20.0814952150032, 19.6049844152586,
22.823984420168, 27.2064513016542, 23.4668550808944, 22.7829935136588,
24.469794987964, 23.837855222599, 20.3810910670664, 29.3481535892034,
25.1475474099985, 25.4984780287715, 23.340381556253, 21.312969446815,
24.6806106716021, 23.2214528583199, 24.0999158155781, 25.6725767167379,
25.9452278681514, 22.1392297148956, 19.7854759528805, 26.3160013460668,
26.6015603233607, 25.8176394324703, 21.9294634549587, 25.1372486946257,
26.1414067413676, 29.706296270733, 21.2880725958292, 26.2354654342106,
24.7781839829827, 24.2709309533463, 27.6672146060926, 27.5583537175524,
26.7221580311687, 24.3383103973917, 22.270277401351, 20.9715102837017,
21.5223586537717, 22.4808791467864, 21.8794690184451, 29.0090455704904,
23.8151470604463, 27.7777112818669, 23.2487156520984, 22.9840541217216,
19.973727062915, 22.7370124691751, 25.1359380700723, 23.5840839827061,
28.5848834806789, 25.1423142806661, 22.1641095213771, 26.3273974935475,
24.5816537244499, 24.6543701353563, 20.5144880510104, 24.3538730680223,
23.9180550280429, 24.9028118687796, 28.5468890514803, 24.6002441355419,
26.3307482807768, 22.9524722634568, 22.5450369239019, 17.9817479840404,
21.970926924623, 25.4693969691064, 26.0481389474156, 24.4306284041542,
23.242371664385, 24.6120658064488, 27.8174644311152, 22.5602057761231,
26.3967607835294, 23.7614062961394, 24.5755517112864, 25.7768459217238,
22.6073076363744, 22.2412855668329, 24.5841905784085, 20.7202588310117,
27.6737563307463, 25.9569842632704, 21.0320966548509, 25.4164535292377,
25.2883083986181, 21.1577711495333, 24.9711718700263, 26.6866446020177,
27.0992017429549, 22.5764226008885, 21.1719567031447, 23.2219778076753,
24.0460250000324, 18.4981417581494, 23.9439668249728, 24.1929749628773,
23.7559809624958, 22.525042442832, 22.3581638908985, 23.3213016664574,
19.4773934111103, 19.1166456338309, 23.2536201311992, 25.0950154817921,
23.8931795683684, 21.357181897142, 24.2206604296785, 21.5402129133873,
22.285027641256, 23.0068215364517, 23.3741112076432, 22.987601700407,
24.4409575230942, 24.636266339704, 21.5187376273904, 22.6045995909268,
22.5774373086906, 27.8711662343074, 22.7824907413673, 25.3392661434498,
24.9072665432779, 21.9922807919374, 23.2056642988679, 21.3795010244688,
20.3497261261301, 20.8379115234591, 26.8660899597012, 25.9539621827104,
23.6866099665691, 25.0837747624573, 24.2792165265899, 24.2702761529985,
30.0102610513062, 23.1894316244674, 27.0767022843712, 26.1657361803649,
22.0217603189933, 26.439668581923, 19.9916729530495, 23.4742679798028,
23.7098486574149, 21.9654616898997, 26.5907574663293, 23.0237402901549,
26.586648946961, 21.5348582999215, 21.1061221081134, 23.8198704383698,
24.2343307891475, 23.6825923496769, 25.5288904425722, 23.6264885270279,
26.631166759848, 22.4052285686297, 24.7262273152108, 27.8232383500613,
21.5380517700776, 21.1638981679782, 26.9099662966898, 22.3320808748272,
26.3633392374981, 24.3731436664634, 26.671351089662, 27.4582806881087,
30.1689226353668, 22.8764719299464, 28.1619956042683, 24.8455727990596,
23.583071527743, 28.3464070944829, 25.8433855407612, 26.6175562691536,
24.8454103844791, 25.8463964726319, 19.9196364903211, 23.4258018537922,
25.8887221369348, 22.0449947750251, 21.6645978726653, 22.1413725187933,
20.3355229136676, 24.7157757315696, 26.859120759473, 25.4176327750736,
25.254018956424, 25.7532627640903, 20.0060424424387, 25.9206281900106,
28.0191648007246, 24.950597506294, 22.9727681959203, 20.5324479706537,
22.7154597393261, 24.7981743790455, 25.984837986009, 24.3113180458192,
22.8707205041202, 22.4418179496629, 25.9363810828132, 26.4437760621611,
25.8470094832384, 25.5256014534884, 19.7428248942555, 23.0715763026653,
24.8954764088303, 26.9780299534038, 21.8292398776181, 25.6819782486414,
25.0637637310052, 24.1344575537888, 24.00060948085, 25.069374557971,
21.1986095201603, 22.6571038187071, 19.8208256182977, 26.2719027144583,
25.6074600247158, 27.5756667325763, 21.5113514395419, 26.5242870229295,
22.0513200164999, 23.1740334694696, 22.9904348574036, 20.0998044103757,
24.9288864916274, 20.9580015739141, 24.24946497418, 22.7395078292951,
22.7550802328992, 21.5992742738146, 25.099891878079, 24.9756018671258,
22.096442009012, 21.9467580500133, 23.1397790614472, 21.9382199192968,
22.4593218710819, 24.3325378512405, 23.8756845675336, 26.7505253181644,
19.9770464851688, 26.4794146967844, 27.9413862994718, 23.8430526163154,
25.5891647120319, 24.1743261013497, 23.6496489537902, 23.0959838639878,
24.8380900829203, 27.3342558048319, 25.2426358879942, 28.3190216540313,
26.7125952906417, 21.4548690416608, 26.1780790232281, 25.0829801960578,
21.293876547643, 23.8349995931875, 23.6911657543771, 22.5078698782894,
28.0247356799713, 17.6816270286467, 21.5720042810599, 25.375546208931,
25.6897207014249, 25.1998342629035, 22.3278687641803, 27.5116051108853,
25.7052145996477, 23.0493062483877, 26.5670098404839, 27.3993782171349,
25.9314682266343, 21.3792602973178, 26.3278590904395, 24.4472856054508,
29.2843860896378, 23.324501281981, 27.6702118547006, 25.0255191072433,
25.2495711492846, 27.8042222602807, 23.2026112393731, 24.5124742769602,
22.0665161399578, 21.6417915208774, 20.6453017420573, 24.4376258118278,
21.3036819039984, 26.9896718145863, 24.7418888624842, 24.829791668678,
23.2751534118296, 27.3301276380751, 19.7868261081813, 21.1117749907534,
26.5404046114989, 24.2732617514889, 23.3843328686874, 28.4565582926934,
24.46127036944, 25.3435712315635, 25.1067015584489, 22.4892239369189,
22.6000914102395, 22.6733754458662, 22.8812272391277, 22.7241998420069,
22.6312529928692, 23.4144670844057, 24.8351226027159, 21.1708901138809,
22.1934990126851, 26.2537757548884, 27.4627908144283, 25.275426152079,
25.7724368902489, 23.2309883685293, 27.6674066830493, 21.8189599988483,
21.9170347704899, 21.2752224553219, 25.9351752171701, 23.5405585938551,
21.6413548893655, 30.6287473964446, 23.6431928106079, 23.3624086170381,
25.9824323448879, 28.6243795680063, 23.8639503485922, 26.2773066490527,
25.881949157093, 21.9106483981102, 24.0542885755205, 28.3261287927778,
27.7963160735113, 26.2788129762865, 25.0875463545416, 23.551938096821,
22.838238085894, 22.9021493167457, 28.3816303534718, 21.7607670074606,
25.5575848543263, 24.257900931864, 22.9149821467977, 22.7732328405303,
23.679190771918, 24.2513335202716, 27.9384923896715, 20.2900777064019,
20.2306319030994, 22.5390422223776, 21.9624320922553, 24.8321291111257,
24.996805282917, 22.0107538415363, 19.2914832722059, 25.6273944431386,
22.9570801132346, 23.9960447489532, 25.5020216935304, 18.3099370705069,
29.9933692754142, 25.9760545825852, 26.8497756221746, 22.0406161499476,
25.913300728844, 25.4093720320473, 27.8237750136642, 22.0848870035383,
29.8754940909266, 21.6867272929041, 23.4102817781251, 20.1090660291217,
27.9894804869024, 24.6456027249959, 25.9789639220761, 23.9978663600008,
21.9072032507657, 25.7389675149585, 25.3518990742003, 21.2284626410734,
23.390782181051, 25.0182353287487, 20.4694886710561, 19.6360405571098,
24.9368447622744, 22.8205339214704, 26.4005124028578, 24.0511025520614,
23.525600851542, 25.1743873689904, 26.4806190532242, 22.8998611937701,
26.5516937862225, 23.5673652796074, 26.639048858761, 21.5745420024702,
28.9475428496201, 25.3153222267514, 23.3488764297466, 24.4664683655516,
28.299754666797, 23.790044058116, 22.2307759731708, 23.3469498310161,
22.037969531844, 24.3740275533, 20.925201392473, 28.6754253117741,
27.3073749272837, 23.2714747197539, 24.4927695046729, 22.1988431917047,
23.5965764121592, 19.8138406501969, 25.0513360546574, 23.9582917402353,
21.6916338287961, 25.3546848562, 25.1977414336034, 24.6874403818326,
27.0016624276981, 24.3589780178887, 23.1162372916603, 25.4441796000712,
27.0097657049526, 28.1611905882701, 19.3738862433222, 20.1187737804154,
27.9215448424686, 19.8544459148178, 25.159020670023, 24.6189246619307,
27.1130050103581, 22.7029551710707, 23.5076316350596, 24.4872492082985,
23.8998126689201, 26.9240151699162, 22.4117362927173, 24.8537092476711,
23.2643812996508, 23.9174921202309, 25.0841688216421, 26.3678400191815,
27.3914977842565, 27.7440038099587, 21.8665984440952, 24.8805525815408,
23.9213493737035, 23.2862968278405, 21.8543851800902, 25.9943461788566,
24.5527860572242, 23.8044865740885, 21.7896535816723, 24.0311617052197,
24.8215771532518, 19.5397297476005, 22.9916320422905, 24.4710383817209,
21.7630353588042, 24.6954713767283, 19.796505115429, 21.1791664501926,
26.5849440263196, 23.2450606054596, 22.9581917150431, 23.3749375224851,
23.0330869263201, 25.49208088571, 22.7570874086676, 23.9901063174694,
18.4246711360262, 22.6543218850404, 26.1423834475921, 25.4394416560856,
26.5081213630383, 24.9035956538498, 21.7670553334791, 22.5036178159817,
25.2514386120467, 23.4857729204392, 25.4929546126909, 20.8435068331162,
26.8560579705235, 23.268641853099, 25.9454520694199, 24.4453070750036,
25.3486823044699, 23.1382409092641, 23.93340126397, 24.7119269314022,
29.0583065243365, 23.9339752797036, 23.6040516540994, 27.5557700488596,
23.5952381737614, 27.6057097757062, 26.6072724641605, 26.3244539233051,
21.0254240467319, 25.8165074116337, 24.7403179374837, 26.4511679174019,
22.217786924037, 22.9685855054566, 24.9945528315602, 27.7101572120264,
22.3650098668962, 22.118481380273, 25.2157499340857, 27.5724772107284,
25.7135872189084, 22.1704515562857, 23.7245638034752, 26.6333013725585,
19.6980520297813, 18.7474422032699, 22.1240920047113, 24.6484350668,
25.7165631907785, 22.3806187125695, 23.4300570436704, 26.2356231321507,
26.385136209525, 25.5261037695985, 26.0728903697749, 24.0152124991513,
26.4274482569992, 30.2119047089405, 24.1111315454093, 26.2763365503225,
25.6887608135529, 20.8566578850385, 22.6479602002123, 25.1019022847843,
25.0518921576019, 23.7032742870655, 25.1020413781864, 23.7630187014503,
25.8813329402164, 25.0247497000233, 24.2297202543522, 22.6340068868981,
20.7741301082975, 27.9578582823744, 24.8821248031445, 23.0935463204422,
23.8959290369998, 24.1469258077714, 25.2597519549526, 23.4405706537068,
22.7066644837194, 26.4670595642611, 23.5285131748049, 19.9224301618832,
25.5584448795195, 24.5068470035028, 25.8511804465425, 22.4600139668265,
26.5089263650298, 21.7045503686009, 22.1380341308443, 25.3521680198717,
25.1236463046856, 23.3241494400977, 24.6174685708879, 23.4969773188412,
21.862510568765, 29.0551451771861, 23.1824750418095, 23.8059971521198,
24.474606931859, 22.219833146741, 22.1154357965997, 21.3621608440128,
24.6234615883934, 22.0989568918066, 26.3337625348145, 22.1830704422695,
23.1226425092941, 24.5769163924795, 23.8952003963413, 23.2475394949589,
22.4490922231268, 24.1024298896872, 21.1899119152235, 22.1836364965676,
25.8367965382769, 19.0456282734481, 26.8179673766035, 25.3477495541446,
24.2592203446349, 24.4851839667287, 20.739220032011, 26.5925259136753,
27.3053719911648, 23.7066086896106, 24.7058214632509, 26.4129040669372,
28.0940459101359, 26.2815182338726, 24.1919481324191, 22.6288266133665,
26.3887388088857, 25.3372858525515, 27.6546328078419, 24.1136317825588,
25.5545360210688, 23.8789836439294, 25.1677457626152, 23.1965799505867,
25.39030872513, 25.1046304023901, 27.0755181858625, 26.1659022210627,
22.8986870733777, 23.9114086285139, 24.5693555791871, 29.7601656954633,
24.1895646424408, 23.3802232370839, 23.255273581044, 23.4708067791094,
24.1766127691121, 22.5344007138147, 24.076575648311, 23.6570861354488,
23.308368187864, 25.3010607854632, 23.1735193525107, 23.6345255089233,
23.4100829136008, 25.2110724891184, 22.8196466956189, 26.3244964100435,
23.9207213560876, 24.5542368169015, 26.2806019288174, 26.7757645878796,
24.677687867002, 20.4607698164252, 22.8734100961968, 25.0707690497281,
28.9061904898661, 22.6662495205923, 23.2196893687754, 25.0660007809778,
26.411194277651, 23.83713029892, 26.6267465365056, 23.9808371577527,
25.1479956041541, 27.3253396269586, 24.0656218114766, 26.5542443214263,
21.914103974471, 28.7830890187457, 19.6668931672601, 25.489998388083,
24.9023295964964, 25.3897910600611, 22.2905529619216, 23.0552693241267,
25.2708971342974, 22.6144031260341, 26.0607924997767, 23.5977477806018,
23.1841244327187, 25.0361357428259, 23.1418683775904, 27.0087097673401,
22.3356463565599, 22.431579546978, 26.873447286005, 20.950769318336,
21.8895504641497, 23.1695670316979, 19.843351729665, 24.0091768851652,
25.511929033368, 24.153202725394, 26.5097328405245, 23.1572322557906,
25.2414207625711, 21.9069535797744, 25.5010593353589, 24.5571764795224,
22.7311988510744, 27.3075168589764, 25.910783701021, 25.4185352503815,
26.3983683687876, 27.468042772834, 28.488995807628, 22.8364462728617,
22.1342017264734, 26.0561184464917, 22.969641287474, 19.2948934099151,
24.4445698006693, 28.2404029603619, 22.3368271738386, 25.5197615974556,
26.3297374017116, 24.1094593820154, 24.6903775379685, 20.5543525817611,
22.5656598572146, 24.4419692091639, 22.8758272397932, 25.1888547991348,
20.7063310916429, 27.0946654643658, 22.2795041047242, 21.3377108399547,
25.9069282654005, 24.096107466175, 21.4333210061792, 25.0084744809184,
25.6061595347796, 21.0545145818748, 22.936267081493, 19.8010714731226,
23.6366401304632, 22.8779100984057, 24.5832930751233, 22.9506578414776,
26.3786453562785, 24.1159373523935, 20.884796909607, 24.741280788899,
23.3745271507587, 22.0746433476025, 21.6210509231585, 27.6479048147371,
23.8373958554498, 18.8262258052966, 25.469464151379, 22.4780917112772,
26.1483770416268, 23.0905769630516, 25.3440103154198, 23.5348094808692,
23.7949040032871, 26.2287327987848, 27.5968353689264, 22.7379029061818,
22.7956387763123, 25.1482746284191, 25.5094394421, 25.3967822369732,
28.2230382502412, 24.2696215153262, 23.5476539951416, 22.4857354719217,
22.9789243207698, 26.9277526138439, 25.0710477858558, 22.6274020213151,
24.6372713516139, 24.0626196602817, 23.242116542589, 25.4738663203844,
28.1386278101733, 19.9277815999832, 22.8510002938672, 24.0309986726417
))[-5]
, identicalTo( expected[-5] ) ) |
asymmetry_factor <- function(rt, int) {
gauge <- 0.1
H <- max(int)
x_H <- which(int == H)[1]
tR <- rt[x_H]
rt1 <- rt[1:x_H]
int1 <- int[1:x_H]
W1 <- approx(int1, rt1, H*gauge, method = "linear", 0, 0, rule = 2, f = 0, ties = mean)
a <- tR - W1[[2]]
N_Seg <- length(rt)
rt2 <- rt[x_H:N_Seg]
int2 <- int[x_H:N_Seg]
W2 <- approx(int2, rt2, H*gauge, method = "linear", 0, 0, rule = 2, f = 0, ties = mean)
b <- W2[[2]] - tR
return(b/a)
} |
account_comment_count <-
function(account = 'me',
...){
if(!"token" %in% names(list(...)) && account == 'me')
stop("This operation can only be performed for account 'me' using an OAuth token.")
out <- imgurGET(paste0('account/', account, '/comments/count'), ...)
structure(out, class = 'imgur_basic')
} |
format_error <- function(message, .envir = parent.frame()) {
if (is.null(names(message)) || names(message)[1] == "") {
names(message)[1] <- "1"
}
message[1] <- paste0("Error: ", message[1])
rsconsole <- c("rstudio_console", "rstudio_console_starting")
if (rstudio_detect()$type %in% rsconsole) {
oldopt <- options(cli.width = console_width() - 15L)
} else {
oldopt <- options(
cli.width = getOption("cli.condition_width") %||% getOption("cli.width")
)
}
on.exit(options(oldopt), add =TRUE)
formatted1 <- fmt((function() {
cli_div(class = "cli_rlang cli_abort", theme = cnd_theme())
cli_bullets(message, .envir = .envir)
})(), collapse = TRUE, strip_newline = TRUE)
formatted1[1] <- sub("Error:[ ]?", "", formatted1[1])
update_rstudio_color(formatted1)
}
format_warning <- function(message, .envir = parent.frame()) {
if (is.null(names(message)) || names(message)[1] == "") {
names(message)[1] <- "1"
}
oldopt <- options(
cli.width = getOption("cli.condition_width") %||% getOption("cli.width")
)
on.exit(options(oldopt), add = TRUE)
formatted1 <- fmt((function() {
cli_div(class = "cli_rlang cli_warn", theme = cnd_theme())
cli_bullets(message, .envir = .envir)
})(), collapse = TRUE, strip_newline = TRUE)
update_rstudio_color(formatted1)
}
format_message <- function(message, .envir = parent.frame()) {
oldopt <- options(
cli.width = getOption("cli.condition_width") %||% getOption("cli.width")
)
on.exit(options(oldopt), add = TRUE)
formatted1 <- fmt((function() {
cli_div(class = "cli_rlang cli_inform", theme = cnd_theme())
cli_bullets(message, .envir = .envir)
})(), collapse = TRUE, strip_newline = TRUE)
update_rstudio_color(formatted1)
}
update_rstudio_color <- function(message) {
rscol <- get_rstudio_fg_color()
if (!is.null(rscol)) {
message[] <- rscol(message)
} else {
message <- paste0(style_bold(""), message)
}
message
}
get_rstudio_fg_color <- function() {
tryCatch(
get_rstudio_fg_color0(),
error = function(e) NULL
)
}
get_rstudio_fg_color0 <- function() {
rs <- rstudio_detect()
oktypes <- c("rstudio_console", "rstudio_console_starting")
if (! rs$type %in% oktypes) return(NULL)
if (rs$num_colors == 1) return(NULL)
colstr <- rstudioapi::getThemeInfo()$foreground
if (is.null(colstr)) return(NULL)
colstr0 <- substr(colstr, 5, nchar(colstr) - 1)
rgbnum <- scan(text = colstr0, sep = ",", quiet = TRUE)
rgb <- grDevices::rgb(rgbnum[1]/255, rgbnum[2]/255, rgbnum[3]/255)
make_ansi_style(rgb)
}
rstudio_detect <- function() {
rstudio$detect()
}
cnd_theme <- function() {
list(
".cli_rlang .bullets .bullet-v" = list(
before = function(x) paste0(col_green(cnd_symb("tick")), " ")
),
".bullets .bullet-x" = list(
before = function(x) paste0(col_red(cnd_symb("cross")), " ")
),
".bullets .bullet-i" = list(
before = function(x) paste0(col_cyan(cnd_symb("info")), " ")
),
".bullets .bullet-*" = list(
before = function(x) paste0(col_cyan(cnd_symb("bullet")), " ")
),
".bullets .bullet->" = list(
before = function(x) paste0(cnd_symb("arrow_right"), " ")
)
)
}
cnd_symb <- function(name) {
opt <- getOption("cli.condition_unicode_bullets", NULL)
if (isTRUE(opt)) {
symbol_utf8[[name]]
} else if (isFALSE(opt)) {
symbol_ascii[[name]]
} else {
symbol[[name]]
}
} |
test_that("typical usage of `max_gap_streak()`", {
expect_equal(max_gap_streak('Val'), 0L)
expect_equal(max_gap_streak(c('Val', '-')), 1L)
expect_equal(max_gap_streak(c('Val', '-', '-')), 2L)
})
test_that("picking the longest streak of gaps`", {
expect_equal(max_gap_streak(c('-', 'Val', '-', '-')), 2L)
expect_equal(max_gap_streak(c('-', 'Val', '-', '-', 'Pro', '-', '-', '-')), 3L)
})
test_that("empty inputs", {
expect_equal(max_gap_streak(character()), 0L)
expect_equal(max_gap_streak(NA_character_), 0L)
expect_equal(max_gap_streak(c(NA_character_, NA_character_)), 0L)
}) |
jacobi.p.quadrature <- function( functn, rule, alpha=0, beta=0, lower=-1, upper=1,
weighted=TRUE, ... )
{
if ( !is.function( functn ) )
stop( "functn argument is not an R function" )
if ( !is.data.frame( rule ) )
stop( "rule argument is not a data frame" )
if ( is.infinite( lower ) )
stop( "lower bound is infinite" )
if ( is.infinite( upper ) )
stop( "lower bound is infinite" )
if ( weighted ) {
ff <-
if ( length( list( ... ) ) && length( formals( functn ) ) > 1 )
function( x, alpha, beta ) { functn( x, ... ) }
else
function( x, alpha, beta ) { functn( x ) }
}
else {
ff <-
if ( length( list( ... ) ) && length( formals( functn ) ) > 1 )
function( x, alpha, beta ) { functn( x, ... ) / jacobi.p.weight( x, alpha, beta ) }
else
function( x, alpha, beta ) { functn( x ) / jacobi.p.weight( x, alpha, beta ) }
}
lambda <- ( upper - lower ) / ( 2 )
mu <- ( lower + upper ) / ( 2 )
y <- lambda * rule$x + mu
w <- rule$w
return( lambda * sum( w * ff(y, alpha, beta ) ) )
} |
nametonumber<-function(x,ts,cs,idvars,noms,ords,logs,sqrts,lgstc,lags,leads)
{
listconvert<-function(opt) {
junk.seq<-1:ncol(x)
junk.names<-dimnames(x)[[2]]
for (i in 1:length(opt)) {
mat<-opt[i]==junk.names
if (sum(mat) == 0)
return(NA)
opt[i]<-junk.seq[mat]
}
return(as.numeric(opt))
}
code<-0
mess<-paste("One of the variable names in the options list does not match a variable name in the data.")
if (class(ts)=="character")
ts<-listconvert(ts)
if (class(cs)=="character")
cs<-listconvert(cs)
if (class(idvars)=="character")
idvars<-listconvert(idvars)
if (class(noms)=="character")
noms<-listconvert(noms)
if (class(ords)=="character")
ords<-listconvert(ords)
if (class(logs)=="character")
logs<-listconvert(logs)
if (class(sqrts)=="character")
sqrts<-listconvert(sqrts)
if (class(lgstc)=="character")
lgstc<-listconvert(lgstc)
if (class(lags)=="character")
lags<-listconvert(lags)
if (class(leads)=="character")
leads<-listconvert(leads)
output<-list(code=code,ts=ts,cs=cs,idvars=idvars,noms=noms,
ords=ords,logs=logs,sqrts=sqrts,lgstc=lgstc,
lags=lags,leads=leads,mess=mess)
if (any(is.na(output)))
output$code<-1
return(output)
}
amtransform<-function(x,logs,sqrts,lgstc) {
logs<-unique(logs)
sqrts<-unique(sqrts)
lgstc<-unique(lgstc)
xmin<-c()
if (!is.null(logs)) {
for (i in 1:length(logs)) {
j<-logs[i]
xmin<-c(xmin,min(c(0,min(x[,j],na.rm=TRUE))))
x[,j]<-log(x[,j]-xmin[i]+1)
}
}
if (!is.null(sqrts))
for (i in sqrts)
x[,i]<-sqrt(x[,i])
if (!is.null(lgstc))
for (i in lgstc)
x[,i]<-log(x[,i]/(1-x[,i]))
return(list(x=x,xmin=xmin))
}
untransform<-function(x.imp,logs,xmin,sqrts,lgstc) {
logs<-unique(logs)
sqrts<-unique(sqrts)
lgstc<-unique(lgstc)
if (!is.null(logs)) {
for (i in 1:length(logs)) {
j<-logs[[i]]
x.imp[,j]<-exp(x.imp[,j])+xmin[[i]]
}
}
if (!is.null(sqrts))
for (i in sqrts)
x.imp[,i]<-(x.imp[,i])^2
if (!is.null(lgstc))
for (i in lgstc)
x.imp[,i]<-exp(x.imp[,i])/(1 + exp(x.imp[,i]))
return(x.imp)
}
frame.to.matrix<-function(x,idvars) {
char.vars<-which(sapply(x,class)=="character")
if (length(char.vars) > 0)
for (i in char.vars)
if (is.na(match(i,idvars)))
x[,i]<-as.factor(x[,i])
else
x[,i]<-1
return(data.matrix(x))
}
amsubset<-function(x,idvars,p2s,ts,cs,priors=NULL,
polytime=NULL,splinetime=NULL,intercs=FALSE,lags=NULL,
leads=NULL,noms=NULL,bounds=NULL, overimp = NULL) {
lags <- unique(lags)
leads <- unique(leads)
noms <- unique(noms)
idvars <- unique(idvars)
index <- c(1:ncol(x))
theta.names <- colnames(x)
if (!is.null(idvars)) {
index <- index[-idvars]
theta.names <- theta.names[-idvars]
}
if (is.data.frame(x))
x <- frame.to.matrix(x,idvars)
overvalues <- NULL
if (!is.null(overimp)) {
whole.vars <- overimp[overimp[,1] == 0, 2]
whole.vars <- as.matrix(expand.grid(1:nrow(x), whole.vars))
overimp <- overimp[overimp[,1] != 0,]
overimp <- rbind(overimp, whole.vars)
if (!is.matrix(overimp))
overimp <- t(as.matrix(overimp))
overvalues <- x[overimp]
is.na(x) <- overimp
}
AMmiss <- is.na(x)
if (!is.null(lags)) {
if (!identical(cs,NULL)) {
tsarg<-list(x[,cs],x[,ts])
} else {
tsarg<-list(x[,ts])
}
tssort<-do.call("order",tsarg)
x.sort<-x[tssort,]
for (i in lags) {
lagged<-c(NA,x.sort[1:(nrow(x)-1),i])
if (!identical(cs,NULL)) {
for (i in 2:nrow(x.sort))
if (x.sort[i,cs]!=x.sort[i-1,cs])
is.na(lagged)<-i
}
x.sort<-cbind(x.sort,lagged)
x<-cbind(x,1)
index<-c(index,-.5)
theta.names <- c(theta.names, paste("lag",colnames(x)[i],sep="."))
}
x[tssort,]<-x.sort
}
if (!is.null(leads)){
if (!identical(cs,NULL)) {
tsarg<-list(x[,cs],x[,ts])
} else {
tsarg<-list(x[,ts])
}
tssort<-do.call("order",tsarg)
x.sort<-x[tssort,]
for (i in leads) {
led<-x.sort[2:nrow(x),i]
led<-c(led,NA)
if (!identical(cs,NULL)) {
for (i in 1:(nrow(x.sort)-1))
if (x.sort[i,cs]!=x.sort[i+1,cs])
is.na(led)<-i
}
x.sort<-cbind(x.sort,led)
x<-cbind(x,1)
index<-c(index,.5)
theta.names <- c(theta.names, paste("lead",colnames(x)[i],sep="."))
}
x[tssort,]<-x.sort
}
if (!is.null(ts)) {
theta.names <- theta.names[index != ts]
index<-index[index!=ts]
idvars<-c(idvars,ts)
}
if (!is.null(cs)) {
theta.names <- theta.names[index != cs]
index<-index[index!=cs]
idvars<-c(idvars,cs)
}
if (!is.null(noms)) {
for (i in noms) {
values<-unique(na.omit(x[,i]))
newx<-matrix(0,nrow=nrow(x),ncol=length(values)-1)
theta.names <- theta.names[index != i]
index<-index[index!=i]
for (j in 2:length(values)) {
newx[,j-1]<-ifelse(x[,i] == values[j],1,0)
index<-c(index,-i)
theta.names <- c(theta.names, paste("noms",colnames(x)[i],j,sep="."))
}
x<-cbind(x,newx)
idvars<-c(idvars,i)
}
}
if (!identical(polytime,NULL) | !identical(splinetime,NULL) ){
if (!identical(splinetime,NULL)){
time<-x[,ts]
knot<-rep(0,5)
if(splinetime>3){
knot[1:(splinetime-1)]<-seq(from=min(time),to=max(time),length=(splinetime-1))
}
timebasis<-cbind(1,time,time^2,time^3,pmax(time-knot[2],0)^3,pmax(time-knot[3],0)^3,pmax(time-knot[4],0)^3)
timebasis<-timebasis[,1:(splinetime+1),drop=FALSE]
}
if (!identical(polytime,NULL)){
time<-x[,ts]
timebasis<-cbind(1,time,time^2,time^3)
timebasis<-timebasis[,1:(polytime+1) ,drop=FALSE]
}
cstypes<-unique(x[,cs])
timevars<-matrix(0,nrow(x),1)
if (intercs){
for (i in cstypes){
dummy<-as.numeric(x[,cs]==i)
timevars<-cbind(timevars,dummy*timebasis)
}
timevars<-timevars[,c(-1,-2), drop = FALSE]
} else {
timevars<-cbind(timevars,timebasis)
timevars<-timevars[,-c(1,2), drop = FALSE]
}
x<-cbind(x,timevars)
if (ncol(timevars)) {
for (i in 1:ncol(as.matrix(timevars))) {
index<-c(index,0)
theta.names <- c(theta.names, paste("time",i,sep="."))
}
}
} else {
if (intercs) {
cstypes <- unique(x[,cs])
timevars <- matrix(0, nrow(x), 1)
for (i in cstypes) {
dummy <- as.numeric(x[,cs] == i)
timevars <- cbind(timevars, dummy)
}
timevars <- timevars[,-c(1,2)]
x<-cbind(x,timevars)
if (ncol(timevars)) {
for (i in 1:ncol(as.matrix(timevars))) {
index<-c(index,0)
theta.names <- c(theta.names, paste("time",i,sep="."))
}
}
}
}
if (!identical(idvars,NULL))
x<-x[,-idvars, drop = FALSE]
if (p2s == 2) {
cat("Variables used: ", theta.names,"\n")
}
AMr1 <- is.na(x)
flag <- rowSums(AMr1)==ncol(x)
if (max(flag) == 1){
blanks <- which(flag)
x <- x[!flag,]
if (!is.null(priors)) {
priors <- priors[!(priors[,1] %in% blanks),]
if (length(blanks) == 1) {
row.adjust <- 1 * (priors[, 1, drop = FALSE] > blanks)
} else {
row.adjust <- colSums(sapply(priors[, 1, drop = FALSE],">",blanks))
}
priors[,1] <- priors[,1,drop=FALSE] - row.adjust
}
if (p2s) cat("Warning: There are observations in the data that are completely missing.","\n",
" These observations will remain unimputed in the final datasets.","\n")
} else {
blanks<-NULL
}
priors[,2] <- match(priors[,2], index)
bounds[,1] <- match(bounds[,1], index)
if (is.null(dim(x))) {
x <- matrix(x, ncol = 1)
}
return(list(x=x,index=index,idvars=idvars,blanks=blanks,priors=priors,bounds=bounds,theta.names=theta.names,missMatrix=AMmiss,overvalues=overvalues))
}
unsubset <- function(x.orig, x.imp, blanks, idvars, ts, cs, polytime,
splinetime, intercs, noms, index, ords) {
if (is.data.frame(x.orig)) {
oldidvars <- idvars[-match(c(cs, noms), idvars)]
x.orig <- frame.to.matrix(x.orig, oldidvars)
}
AMr1.orig <- is.na(x.orig)
if (identical(blanks, NULL)) {blanks <- -(1:nrow(x.orig))}
if (identical(idvars, NULL)) {idvars <- -(1:ncol(x.orig))}
if (!is.null(noms)) {
for (i in noms) {
y <- runif(nrow(x.imp))
dums <- x.imp[, which(index == -i)]
p <- dums * (dums > 0) * (dums < 1) + ((dums - 1) >= 0)
psub <- rowSums(as.matrix(p))
psub <- (psub <= 1) + (psub) * (psub > 1)
p <- p / psub
pzero <- 1 - rowSums(as.matrix(p))
p <- cbind(pzero, p)
pk <- ncol(p)
utri.mat <- matrix(0, nrow = pk, ncol = pk)
utri.mat <- utri.mat + upper.tri(utri.mat, diag = TRUE)
cump <- p %*% utri.mat
cump.shift <- cbind(matrix(0, nrow(cump), 1), cump[, 1:(ncol(cump) - 1)])
yy <- (y < cump) * (y > cump.shift)
renom <- (yy %*% unique(na.omit(x.orig[, i])))
x.orig[-blanks, i] <- renom
}
}
if (!is.null(ords)) {
ords <- unique(ords)
impords <- match(ords,index)
x <- x.imp[, impords] * AMr1.orig[-blanks, ords]
minmaxords <- matrix(0, length(ords), 2)
for(jj in 1:length(ords)) {
tempords <- x.orig[AMr1.orig[, ords[jj]] == 0 , ords[jj]]
minmaxords[jj,1] <- min(tempords)
minmaxords[jj,2] <- max(tempords)
}
minord <- minmaxords[,1]
maxord <- minmaxords[,2]
ordrange <- maxord - minord
p <- t((t(x) - minord) / ordrange) * AMr1.orig[-blanks, ords]
p <- p * (p > 0) * (p < 1) + ((p - 1) >= 0)
newimp <- matrix(0, nrow(x.imp), length(ords))
for (k in 1:length(ords)) {
reordnl <- rbinom(nrow(x.imp), ordrange[k], p[, k])
newimp[, k] <- reordnl + minord[k] * AMr1.orig[-blanks, ords[k]]
}
for(jj in 1:length(ords)){
x.imp[, impords[jj]] <- round(x.imp[, impords[jj]])
x.imp[AMr1.orig[-blanks, ords[jj]] == 1, impords[jj]] <- newimp[AMr1.orig[-blanks, ords[jj]] == 1, jj]
}
}
if (!identical(c(blanks, idvars), c(NULL, NULL))) {
x.orig[-blanks, -idvars] <- x.imp[, 1:ncol(x.orig[, -idvars, drop = FALSE])]
} else {
x.orig <- x.imp[, 1:ncol(x.orig)]
}
return(x.orig)
}
scalecenter<-function(x,priors=NULL,bounds=NULL){
AMn<-nrow(x)
ones<-matrix(1,AMn,1)
meanx<-colMeans(x,na.rm=TRUE)
stdvx<-apply(x,2,sd,na.rm=TRUE)
no.obs <- colSums(!is.na(x)) == 0
if (!is.null(priors)) {
meanx[no.obs] <- 0
stdvx[no.obs] <- 1
}
x.ztrans<-(x-(ones %*% meanx))/(ones %*% stdvx)
if (!is.null(priors)){
priors[,3]<-(priors[,3]-meanx[priors[,2]])/stdvx[priors[,2]]
priors[,4]<- (priors[,4]/stdvx[priors[,2]])^2
}
if (!is.null(bounds)) {
bounds[,2] <- (bounds[,2]-meanx[bounds[,1]])/stdvx[bounds[,1]]
bounds[,3] <- (bounds[,3]-meanx[bounds[,1]])/stdvx[bounds[,1]]
}
return(list(x=x.ztrans,mu=meanx,sd=stdvx,priors=priors,bounds=bounds))
}
unscale<-function(x,mu,sd){
AMn<-nrow(x)
ones<-matrix(1,AMn,1)
x.unscale<-(x * (ones %*% sd)) + (ones %*% mu)
return(x.unscale)
}
amstack<-function(x,colorder=TRUE,priors=NULL,bounds=NULL){
AMp<-ncol(x)
AMr1<-is.na(x)
if (colorder){
p.order <- order(colSums(AMr1))
AMr1<-AMr1[,p.order, drop = FALSE]
} else {
p.order<-1:ncol(x)
}
n.order <- do.call("order", as.data.frame(AMr1[,AMp:1]))
AMr1<- AMr1[n.order,, drop = FALSE]
x<- x[n.order,p.order, drop = FALSE]
if (!identical(priors,NULL)){
priors[,1]<-match(priors[,1],n.order)
priors[,2]<-match(priors[,2],p.order)
}
if (!identical(bounds,NULL))
bounds[,1]<-match(bounds[,1],p.order)
return(list(x=x,n.order=n.order,p.order=p.order,priors=priors,bounds=bounds))
}
amunstack<-function(x,n.order,p.order){
x.unstacked<-matrix(0,nrow=nrow(x),ncol=ncol(x))
x.unstacked[n.order,p.order]<-x
return(x.unstacked)
}
generatepriors<-function(AMr1,empri=NULL,priors=NULL){
if (!identical(priors,NULL)) {
if (ncol(priors) == 5){
new.priors<-matrix(NA, nrow = nrow(priors), ncol = 4)
new.priors[,1:2]<-priors[,1:2]
new.priors[,3]<-priors[,3] + ((priors[,4] - priors[,3])/2)
new.priors[,4]<-(priors[,4]-priors[,3])/(2*qnorm(1-(1-priors[,5])/2))
} else {
new.priors <-priors
}
zeros <- which(new.priors[,1]==0)
if (length(zeros) > 0) {
varPriors <- new.priors[zeros,2]
missCells <- which(AMr1[,varPriors,drop=FALSE], arr.ind=TRUE)
addedPriors <- matrix(NA, nrow=nrow(missCells), ncol=4)
addedPriors[,1] <- missCells[,1]
addedPriors[,2] <- varPriors[missCells[,2]]
addedPriors[,-c(1,2)] <- new.priors[zeros[missCells[,2]],-c(1,2)]
new.priors <- new.priors[-zeros,,drop=FALSE]
new.priors <- rbind(new.priors,addedPriors)
new.priors <- new.priors[!duplicated(new.priors[,1:2]),]
}
return(new.priors)
}
}
combine.output <- function(...) {
cl <- match.call()
cool <- unlist(lapply(cl, function(x) is.null(eval(x,parent.frame())$amelia.args)))
if (max(cool[-1])==1)
stop("One of the arguments is not an Amelia output list.")
ms <- unlist(lapply(cl,function(x) eval(x, parent.frame())$amelia.args$m))
m <- sum(ms)
new.out <- vector("list", 2*m+1)
names(new.out)[[2*m+1]] <- "amelia.args"
new.out[[2*m+1]] <- eval(cl[[2]])$amelia.args
new.out$amelia.args$m <- m
count <- 1
for (i in 1:length(ms)) {
for (j in 1:ms[i]) {
new.out[[count]] <- eval(cl[[1+i]])[[j]]
new.out[[m+count]] <- eval(cl[[1+i]])[[ms[i]+j]]
new.out$amelia.args[[count+19]] <- eval(cl[[1+i]])$amelia.args[[j+19]]
names(new.out)[count] <- paste("m", count, sep="")
names(new.out)[m+count] <- paste("theta", count, sep="")
names(new.out$amelia.args)[count+19] <- paste("iter.hist", count, sep="")
count <- count + 1
}
}
return(new.out)
}
amelia.prep <- function(x,m=5,p2s=1,frontend=FALSE,idvars=NULL,logs=NULL,
ts=NULL,cs=NULL,empri=NULL,
tolerance=0.0001,polytime=NULL,splinetime=NULL,startvals=0,lags=NULL,
leads=NULL,intercs=FALSE,sqrts=NULL,
lgstc=NULL,noms=NULL,incheck=TRUE,ords=NULL,collect=FALSE,
arglist=NULL, priors=NULL,var=NULL,autopri=0.05,bounds=NULL,
max.resample=NULL, overimp = NULL, emburn=NULL, boot.type=NULL) {
code <- 1
if (!identical(arglist,NULL)) {
if (!("ameliaArgs" %in% class(arglist))) {
error.code <- 46
error.mess <- paste("The argument list you provided is invalid.")
return(list(code=error.code, message=error.mess))
}
idvars <- arglist$idvars
empri <- arglist$empri
ts <- arglist$ts
cs <- arglist$cs
tolerance <- arglist$tolerance
polytime <- arglist$polytime
splinetime<- arglist$splinetime
lags <- arglist$lags
leads <- arglist$leads
logs <- arglist$logs
sqrts <- arglist$sqrts
lgstc <- arglist$lgstc
intercs <- arglist$intercs
noms <- arglist$noms
startvals <- arglist$startvals
ords <- arglist$ords
priors <- arglist$priors
autopri <- arglist$autopri
empri <- arglist$empri
bounds <- arglist$bounds
overimp <- arglist$overimp
emburn <- arglist$emburn
boot.type <- arglist$boot.type
max.resample <- arglist$max.resample
}
if (is.data.frame(x)) x <- as.data.frame(x)
numopts<-nametonumber(x=x,ts=ts,cs=cs,idvars=idvars,noms=noms,ords=ords,
logs=logs,sqrts=sqrts,lgstc=lgstc,lags=lags,leads=leads)
if (numopts$code == 1) {
return(list(code=44,message=numopts$mess))
}
if (incheck) {
checklist<-amcheck(x = x, m = m, idvars = numopts$idvars, priors =
priors, empri = empri, ts = numopts$ts, cs = numopts$cs,
tolerance = tolerance, polytime =
polytime, splinetime = splinetime, lags = numopts$lags, leads = numopts$leads, logs
= numopts$logs, sqrts = numopts$sqrts, lgstc
=numopts$lgstc, p2s = p2s, frontend = frontend,
intercs = intercs, noms = numopts$noms,
startvals = startvals, ords = numopts$ords, collect =
collect, bounds=bounds,
max.resample=max.resample, overimp = overimp, emburn=emburn,
boot.type=boot.type)
if (!is.null(checklist$code)) {
return(list(code=checklist$code,message=checklist$mess))
}
m <- checklist$m
priors <- checklist$priors
}
priors <- generatepriors(AMr1 = is.na(x),empri = empri, priors = priors)
archv <- match.call(expand.dots=TRUE)
archv[[1]] <- NULL
archv <- list(idvars=numopts$idvars, logs=numopts$logs, ts=numopts$ts, cs=numopts$cs,
empri=empri, tolerance=tolerance,
polytime=polytime, splinetime=splinetime, lags=numopts$lags, leads=numopts$leads,
intercs=intercs, sqrts=numopts$sqrts, lgstc=numopts$lgstc,
noms=numopts$noms, ords=numopts$ords,
priors=priors, autopri=autopri, bounds=bounds,
max.resample=max.resample, startvals=startvals,
overimp = overimp, emburn=emburn, boot.type=boot.type)
if (p2s==2) {
cat("beginning prep functions\n")
flush.console()
}
d.trans<-amtransform(x,logs=numopts$logs,sqrts=numopts$sqrts,lgstc=numopts$lgstc)
d.subset<-amsubset(d.trans$x,idvars=numopts$idvars,p2s=p2s,ts=numopts$ts,cs=numopts$cs,polytime=polytime,splinetime=splinetime,intercs=intercs,noms=numopts$noms,priors=priors,bounds=bounds,
lags=numopts$lags, leads=numopts$leads, overimp=overimp)
d.scaled<-scalecenter(d.subset$x,priors=d.subset$priors,bounds=d.subset$bounds)
d.stacked<-amstack(d.scaled$x,colorder=TRUE,priors=d.scaled$priors,bounds=d.scaled$bounds)
if (incheck) {
realAMp <- ncol(d.stacked$x)
realAMn <- nrow(d.stacked$x)
if (!identical(empri,NULL)) {
if (realAMp*2 > realAMn+empri) {
error.code<-34
error.mess<-paste("The number of observations in too low to estimate the number of \n",
"parameters. You can either remove some variables, reduce \n",
"the order of the time polynomial, or increase the empirical prior.")
return(list(code=error.code,message=error.mess))
}
if (realAMp*4 > realAMn +empri) {
warning("You have a small number of observations, relative to the number, of variables in the imputation model. Consider removing some variables, or reducing the order of time polynomials to reduce the number of parameters.")
}
} else {
if (realAMp*2 > realAMn) {
error.code<-34
error.mess<-paste("The number of observations is too low to estimate the number of \n",
"parameters. You can either remove some variables, reduce \n",
"the order of the time polynomial, or increase the empirical prior.")
return(list(code=error.code,message=error.mess))
}
if (realAMp*4 > realAMn) {
warning("You have a small number of observations, relative to the number, of variables in the imputation model. Consider removing some variables, or reducing the order of time polynomials to reduce the number of parameters.")
}
}
}
return(list(
x = d.stacked$x,
code = code,
priors = d.stacked$priors,
n.order = d.stacked$n.order,
p.order = d.stacked$p.order,
scaled.mu = d.scaled$mu,
scaled.sd = d.scaled$sd,
trans.x = d.trans$x,
blanks = d.subset$blanks,
idvars = d.subset$idvars,
ts = numopts$ts,
cs = numopts$cs,
noms = numopts$noms,
index = d.subset$index,
ords = numopts$ords,
m = m,
logs = numopts$logs,
archv = archv,
xmin = d.trans$xmin,
sqrts = numopts$sqrts,
lgstc = numopts$lgstc,
subset.index = d.subset$index,
autopri = autopri,
bounds = d.stacked$bounds,
theta.names = d.subset$theta.names,
missMatrix = d.subset$missMatrix,
overvalues = d.subset$overvalues,
empri = empri,
tolerance = tolerance))
} |
phe_dsr <- function(data, x, n, stdpop = esp2013, stdpoptype = "vector",
type = "full", confidence = 0.95, multiplier = 100000) {
if (missing(data)|missing(x)|missing(n)) {
stop("function phe_dsr requires at least 3 arguments: data, x, n")
}
if (n_distinct(select(ungroup(count(data)),n)) != 1) {
stop("data must contain the same number of rows for each group")
}
if (!(stdpoptype %in% c("vector","field"))) {
stop("valid values for stdpoptype are vector and field")
} else if (stdpoptype == "vector") {
if (pull(slice(select(ungroup(count(data)),n),1)) != length(stdpop)) {
stop("stdpop length must equal number of rows in each group within data")
}
data <- mutate(data,stdpop_calc = stdpop)
} else if (stdpoptype == "field") {
if (deparse(substitute(stdpop)) %in% colnames(data)) {
data <- mutate(data,stdpop_calc = {{ stdpop }} )
} else stop("stdpop is not a field name from data")
}
if (any(pull(data, {{ x }}) < 0, na.rm=TRUE)) {
stop("numerators must all be greater than or equal to zero")
} else if (any(pull(data, {{ n }}) <= 0)) {
stop("denominators must all be greater than zero")
} else if (!(type %in% c("value", "lower", "upper", "standard", "full"))) {
stop("type must be one of value, lower, upper, standard or full")
} else if (length(confidence) >2) {
stop("a maximum of two confidence levels can be provided")
} else if (length(confidence) == 2) {
if (!(confidence[1] == 0.95 & confidence[2] == 0.998)) {
stop("two confidence levels can only be produced if they are specified as 0.95 and 0.998")
}
} else if ((confidence < 0.9)|(confidence > 1 & confidence < 90)|(confidence > 100)) {
stop("confidence level must be between 90 and 100 or between 0.9 and 1")
}
if (length(confidence) == 2) {
conf1 <- confidence[1]
conf2 <- confidence[2]
phe_dsr <- data %>%
mutate(wt_rate = na.zero({{ x }}) * stdpop_calc / ({{ n }}),
sq_rate = na.zero({{ x }}) * (stdpop_calc / ({{ n }}))^2, na.rm=TRUE) %>%
summarise(total_count = sum({{ x }},na.rm=TRUE),
total_pop = sum({{ n }}),
value = sum(wt_rate) / sum(stdpop_calc) * multiplier,
vardsr = 1/sum(stdpop_calc)^2 * sum(sq_rate),
lower95_0cl = value + sqrt((vardsr/sum({{ x }}, na.rm=TRUE)))*
(byars_lower(sum({{ x }}, na.rm=TRUE), conf1) - sum({{ x }}, na.rm=TRUE)) * multiplier,
upper95_0cl = value + sqrt((vardsr/sum({{ x }}, na.rm=TRUE)))*
(byars_upper(sum({{ x }}, na.rm=TRUE), conf1) - sum({{ x }}, na.rm=TRUE)) * multiplier,
lower99_8cl = value + sqrt((vardsr/sum({{ x }}, na.rm=TRUE)))*
(byars_lower(sum({{ x }}, na.rm=TRUE), conf2) - sum({{ x }}, na.rm=TRUE)) * multiplier,
upper99_8cl = value + sqrt((vardsr/sum({{ x }}, na.rm=TRUE)))*
(byars_upper(sum({{ x }}, na.rm=TRUE), conf2) - sum({{ x }}, na.rm=TRUE)) * multiplier,
.groups = "keep") %>%
select(-vardsr) %>%
mutate(confidence = "95%, 99.8%",
statistic = paste("dsr per",format(multiplier,scientific=F)),
method = "Dobson")
phe_dsr$value[phe_dsr$total_count < 10] <- NA
phe_dsr$upper95_0cl[phe_dsr$total_count < 10] <- NA
phe_dsr$lower95_0cl[phe_dsr$total_count < 10] <- NA
phe_dsr$upper99_8cl[phe_dsr$total_count < 10] <- NA
phe_dsr$lower99_8cl[phe_dsr$total_count < 10] <- NA
phe_dsr$statistic[phe_dsr$total_count < 10] <- "dsr NA for total count < 10"
if (type == "lower") {
phe_dsr <- phe_dsr %>%
select(-total_count, -total_pop, -value, -upper95_0cl, -upper99_8cl,
-confidence, -statistic, -method)
} else if (type == "upper") {
phe_dsr <- phe_dsr %>%
select(-total_count, -total_pop, -value, -lower95_0cl, -lower99_8cl,
-confidence, -statistic, -method)
} else if (type == "value") {
phe_dsr <- phe_dsr %>%
select(-total_count, -total_pop, -lower95_0cl, -lower99_8cl, -upper95_0cl, -upper99_8cl,
-confidence, -statistic, -method)
} else if (type == "standard") {
phe_dsr <- phe_dsr %>%
select(-confidence, -statistic, -method)
}
} else {
if (confidence >= 90) {
confidence <- confidence/100
}
phe_dsr <- data %>%
mutate(wt_rate = na.zero({{ x }}) * stdpop_calc / ({{ n }}),
sq_rate = na.zero({{ x }}) * (stdpop_calc / ({{ n }}))^2, na.rm=TRUE) %>%
summarise(total_count = sum({{ x }},na.rm=TRUE),
total_pop = sum({{ n }}),
value = sum(wt_rate) / sum(stdpop_calc) * multiplier,
vardsr = 1/sum(stdpop_calc)^2 * sum(sq_rate),
lowercl = value + sqrt((vardsr/sum({{ x }},na.rm=TRUE)))*(byars_lower(sum({{ x }},na.rm=TRUE),
confidence)-sum({{ x }},na.rm=TRUE)) * multiplier,
uppercl = value + sqrt((vardsr/sum({{ x }},na.rm=TRUE)))*(byars_upper(sum({{ x }},na.rm=TRUE),
confidence)-sum({{ x }},na.rm=TRUE)) * multiplier,
.groups = "keep") %>%
select(-vardsr) %>%
mutate(confidence = paste(confidence*100,"%",sep=""),
statistic = paste("dsr per",format(multiplier,scientific=F)),
method = "Dobson")
phe_dsr$value[phe_dsr$total_count < 10] <- NA
phe_dsr$uppercl[phe_dsr$total_count < 10] <- NA
phe_dsr$lowercl[phe_dsr$total_count < 10] <- NA
phe_dsr$statistic[phe_dsr$total_count < 10] <- "dsr NA for total count < 10"
if (type == "lower") {
phe_dsr <- phe_dsr %>%
select(-total_count, -total_pop, -value, -uppercl, -confidence, -statistic, -method)
} else if (type == "upper") {
phe_dsr <- phe_dsr %>%
select(-total_count, -total_pop, -value, -lowercl, -confidence, -statistic, -method)
} else if (type == "value") {
phe_dsr <- phe_dsr %>%
select(-total_count, -total_pop, -lowercl, -uppercl, -confidence, -statistic, -method)
} else if (type == "standard") {
phe_dsr <- phe_dsr %>%
select(-confidence, -statistic, -method)
}
}
return(phe_dsr)
} |
SelectV <- function(data,grouping,Selmethod=c("ExpHC","HC","Fdr","Fair","fixedp"),
NullDist=c("locfdr","Theoretical"),uselocfdr=c("onlyHC","always"),
minlocfdrp=200,comvar=TRUE,Fdralpha=0.5,ExpHCalpha=0.5,HCalpha0=0.1,
maxp=ncol(data),tol=1E-12,...)
{
Selmethod <- match.arg(Selmethod)
NullDist <-match.arg(NullDist)
uselocfdr <- match.arg(uselocfdr)
if (NullDist != "locfdr" && uselocfdr == "always")
stop("Error: uselocfdr argument can only be used when NullDist is set to locfdr")
p <- ncol(data)
if (p < minlocfdrp) NullDist <- "Theoretical"
nk <- table(grouping)
k <- nrow(nk)
nk <- as.vector(nk)
n <- sum(nk)
if (Selmethod=="Fair" && k!=2)
stop("Fair method can only be used with two-group classification problems.\n")
if (k==2) {
tscr <- tscores(data,grouping,n,nk,comvar=comvar)
scores <- abs(tscr$st)
pvalues <- 2*pt(scores,tscr$df,lower.tail=FALSE)
}
else {
fscr <- fscores(data,grouping,n,nk,k)
scores <- fscr$st
pvalues <- pf(scores,k-1,fscr$df,lower.tail=FALSE)
}
pvalues[pvalues<tol] <- tol
pvalues[pvalues>1-tol] <- 1 - tol
if (Selmethod=="fixedp") {
sortedpv <- sort(pvalues,index.return=TRUE)
return(list(nvkpt=maxp,vkptInd=sort(sortedpv$ix[1:maxp])))
}
if (NullDist=="locfdr" && uselocfdr == "always") pvalues <- locfdrpval(pvalues)
if (Selmethod=="ExpHC" || Selmethod=="Fdr")
{
sortedpv <- sort(pvalues,index.return=TRUE)
if (Selmethod=="ExpHC") usefullpv <- sortedpv$x[sortedpv$x<ExpHCalpha*1:p/(p*sum(1/1:p))]
else usefullpv <- sortedpv$x[sortedpv$x<Fdralpha*1:p/p]
if (length(usefullpv)==0) Fdrnvar <- 1
else {
maxpv <- max(usefullpv)
Fdrnvar <- min(maxp,which(sortedpv$x==maxpv))
}
}
if (Selmethod=="Fair") {
Stddata <- Fairstdbygrps(data,grouping,nk,n,p)
if (n-k>p) Fairres <- Fair(scores^2,p,nk,R=t(Stddata)%*%Stddata)
else Fairres <- Fair(scores^2,p,nk,StdDt=Stddata)
names(Fairres$m) <- NULL
return(list(nvkpt=Fairres$m,vkptInd=Fairres$vkptInd))
}
if (NullDist=="locfdr" && uselocfdr == "onlyHC") pvalues <- locfdrpval(pvalues)
{
if (Selmethod == "ExpHC" || Selmethod == "HC") {
if (Selmethod== "ExpHC") minvar <- Fdrnvar
else minvar <- 1
HCres <- HC(p,pvalues,minvkpt=minvar,alpha0=min(HCalpha0,maxp/p))
names(HCres$nkptvar) <- NULL
return(list(nvkpt=HCres$nkptvar,vkptInd=HCres$varkept))
}
else {
if (Selmethod == "Fdr") nkptvar = Fdrnvar
names(nkptvar) <- NULL
return(list(nvkpt=nkptvar,vkptInd=sortedpv$ix[1:nkptvar]))
}
}
}
tscores <- function(data,grouping,n,nk,comvar)
{
Xbark <- apply(data,2,grpmeans,grp=grouping)
vark <- apply(data,2,grpvar,grp=grouping)
if (comvar==TRUE) {
df <- n-2
denom <- sqrt( (1/nk[1]+1/nk[2]) * ((nk[1]-1)*vark[1,]+(nk[2]-1)*vark[2,]) / df )
}
else {
tmp1 <- vark[1,]/nk[1]
tmp2 <- vark[2,]/nk[2]
tmps <- tmp1 + tmp2
df <- round( tmps^2/ ( tmp1^2/(nk[1]-1)+tmp2^2/(nk[2]-1) ) )
denom <- sqrt(tmps)
}
list(st=(Xbark[1,]-Xbark[2,])/denom,df=df)
}
fscores <- function(data,grouping,n,nk,k)
{
df <- n - k
vark <- apply(data,2,grpvar,grp=grouping)
W <- apply((nk-1)*vark,2,sum)
B <- (n-1)*apply(data,2,var) - W
list(st=(B/(k-1))/(W/df),df=df)
}
locfdrpval <- function(pvalues)
{
zscores <- qnorm(pvalues)
empnull <- mylocfdr(zscores,plot=0,silently=TRUE)
if (class(empnull)=="error1") empnull <- mylocfdr(zscores,plot=0,nulltype=2,silently=TRUE)
if (class(empnull)=="error3") empnull <- mylocfdr(zscores,plot=0,nulltype=1,silently=TRUE)
if (class(empnull)!="error2") {
zscores <- (zscores-empnull$fp0[3,1])/empnull$fp0[3,2]
pvalues <- pnorm(zscores)
}
return(pvalues)
}
HC <- function(p,pvalues,HCplus=FALSE,minvkpt=1,alpha0=0.1)
{
sortedpv <- sort(pvalues,index.return=TRUE)
if (HCplus) p0 <- max(minvkpt,length(sortedpv$x[sortedpv$data0<=1/p])+1)
else p0 <- minvkpt
p1 <- floor(alpha0*p)
if (p0 >= p1) nkptvar <- p0
else {
unifq <- (p0:p1)/p
HC <- p * (unifq-sortedpv$x[p0:p1]) / sqrt( (p0:p1)*(1-unifq) )
if (max(HC)>0.) nkptvar <- which.max(HC)+p0-1
else nkptvar <- p0
}
XPind <- sort(sortedpv$ix[1:nkptvar])
list(threshold=sortedpv$x[nkptvar],varkept=XPind,nkptvar=nkptvar)
}
Fair <- function(T2,p,nk,R=NULL,StdDt=NULL,ivar=FALSE,blocksize=25,maxblrun=7,maxp=p)
{
maxeigvl <- function(m,M) {
indices <- srtdT2$ix[1:m]
return(eigen(M[indices,indices],symmetric=TRUE,only.values=TRUE)$values[1])
}
maxsingvl <- function(m,M) rghtsngv(M[,srtdT2$ix[1:m],drop=FALSE],nv=0)$d[1]
n <- nk[1]+nk[2]
n1n2 <- nk[1]*nk[2]
n1minusn2 <- nk[1]-nk[2]
T2sum <- array(dim=blocksize)
srtdT2 <- sort(T2,decreasing=TRUE,index.return=TRUE)
bestval <- pT2sum <- pcrtval <- 0.
run <- 0
for (a in 1:(floor(maxp/blocksize)+1)) {
m0 <- (a-1)*blocksize
if (m0==maxp) break
blocksize <- min(blocksize,maxp-m0)
indices <- (m0+1):(m0+blocksize)
T2sum[1] <- pT2sum + srtdT2$x[m0+1]
if (blocksize>1) for (b in 2:blocksize) T2sum[b] <- T2sum[b-1] + srtdT2$x[m0+b]
pT2sum <- T2sum[blocksize]
if (ivar==FALSE) {
if (!is.null(StdDt)) lambdamax <- sapply(indices,maxsingvl,M=StdDt)^2/(nrow(StdDt)-2)
else lambdamax <- sapply(indices,maxeigvl,M=R)
crtval <- n*(T2sum + indices*n1minusn2/n)^2/(lambdamax*n1n2*(indices+T2sum))
}
else crtval <- n*(T2sum + indices*n1minusn2/n)^2/(n1n2*(indices+T2sum))
m1 <- which.max(crtval)
if (crtval[m1] > bestval) {
m <- m0+m1
bestval <- crtval[m1]
}
else if (crtval[m1] > pcrtval) run <- 0
else run <- run+1
if (run > maxblrun) break
pcrtval <- crtval[m1]
}
if (m > maxp) m <- maxp
return(list(m=m,vkptInd=srtdT2$ix[1:m],threshold=sqrt(srtdT2$x[m])))
}
Fairstdbygrps <- function(data,grouping,nk=NULL,n=NULL,p=NULL)
{
if (is.null(nk)) nk <- as.vector(table(grouping))
if (is.null(n)) n <- nk[1] + nk[2]
if (is.null(p)) p <- ncol(data)
grplevels <- levels(grouping)
vark <- apply(data,2,grpvar,grp=grouping)
meank <- apply(data,2,grpmeans,grp=grouping)
globals <- sqrt((vark[1,]+vark[2,])/2)
for (i in 1:2) data[grouping==grplevels[i],] <- scale(data[grouping==grplevels[i],],center=meank[i,],scale=globals)
data
} |
genhaplopairs <- function(n) {
two2n <- 2^n
bmat <- matrix(0,two2n,n)
bmat[1,1] <- 0
bmat[2,1] <- 1
two2i <- 1
for (i in 2:n) {
two2i <- two2i*2
bmat[1:two2i,i] <- 0
bmat[two2i+(1:two2i),i] <- 1
bmat[two2i+(1:two2i),1:(i-1)] <- bmat[1:two2i,1:(i-1)]
}
np <- sum(choose(n,1:n)*2^(0:(n-1)))
haplopairs <- matrix(0, np, 2)
nstart <- 1
nend <- 0
g1indextable <- matrix(0,two2n,2)
for (i in 2:two2n) {
g1indextable[i,1] <- nstart
j <- sum(bmat[i,])
two2j <- 2^(j-1)
g1indextable[i,2] <- two2j
nend <- nend + two2j
jj <- which(bmat[i,]==1) - 1
haplopairs[nstart:nend, 1] <- bmat[1:two2j,1:j]%*%2^jj
haplopairs[nstart:nend, 2] <- sum(2^jj) - haplopairs[nstart:nend, 1]
nstart <- nstart + two2j
}
list(g1tbl=g1indextable,hpair=haplopairs)
} |
test_results <- read_stats(system.file("test_data/anova_glm.json",
package = "tidystats"))
tolerance <- 0.001
counts <- c(18,17,15,20,10,20,25,13,12)
outcome <- gl(3,1,9)
treatment <- gl(3,3)
d.AD <- data.frame(treatment, outcome, counts)
glm.D93 <- glm(counts ~ outcome + treatment, family = poisson())
glm.D93a <- update(glm.D93, ~treatment * outcome)
test_that("anova.glm works", {
model <- anova(glm.D93)
tidy_model <- tidy_stats(model)
tidy_model_test <- test_results$anova_glm
tidy_model$package$version <- NULL
tidy_model_test$package$version <- NULL
expect_equal(tidy_model, tidy_model_test, tolerance = tolerance)
})
test_that("cp anova.glm works", {
model <- anova(glm.D93, test = "Cp")
tidy_model <- tidy_stats(model)
tidy_model_test <- test_results$anova_glm_cp
tidy_model$package$version <- NULL
tidy_model_test$package$version <- NULL
expect_equal(tidy_model, tidy_model_test, tolerance = tolerance)
})
test_that("chisq anova.glm works", {
model <- anova(glm.D93, test = "Chisq")
tidy_model <- tidy_stats(model)
tidy_model_test <- test_results$anova_glm_chisq
tidy_model$package$version <- NULL
tidy_model_test$package$version <- NULL
expect_equal(tidy_model, tidy_model_test, tolerance = tolerance)
})
test_that("rao anova.glm works", {
model <- anova(glm.D93, glm.D93a, test = "Rao")
tidy_model <- tidy_stats(model)
tidy_model_test <- test_results$anova_glm_rao
tidy_model$package$version <- NULL
tidy_model_test$package$version <- NULL
expect_equal(tidy_model, tidy_model_test, tolerance = tolerance)
}) |
gauspuls <- function(t, fc = 1e3, bw = 0.5) {
if (!isPosscal(fc))
stop("fc must be a non-negative real scalar")
if (!isPosscal(bw) || bw <= 0)
stop("bw must be a positive real scalar")
fv <- - (bw^2 * fc^2) / (8 * log(10 ^ (-6 / 20)))
tv <- 1 / (4 * pi^2 * fv)
y <- exp(-t * t / (2 * tv)) * cos(2 * pi * fc * t)
y
} |
specify.prior <- function(roots = NULL, leaves = NULL, nodes, indices = FALSE){
if(missing(nodes)){
stop("Please specify a value for the 'nodes' argument!")
}
if(!is.null(roots)){
blacklist_root <- specify_root(roots, nodes, indices)
} else{
blacklist_root <- NULL
}
if(!is.null(leaves)){
blacklist_leaf <- specify_leaf(leaves, nodes, indices)
} else{
blacklist_leaf <- NULL
}
rbind(blacklist_root, blacklist_leaf)
}
specify_root <- function(root, nodes, indices = FALSE){
if(!all(root %in% nodes)){
msg <- sprintf("The list of root nodes must be contained within the full list of nodes!")
stop(msg)
}
lists <- lapply(root, function(r) t(sapply(nodes, function(x) c(x, r), USE.NAMES = FALSE)))
lists <- do.call("rbind", lists)
if(!indices){
lists
} else{
matrix(match(lists, nodes), ncol = 2)
}
}
specify_leaf <- function(leaf, nodes, indices = FALSE){
if(!all(leaf %in% nodes)){
msg <- sprintf("The list of leaf nodes must be contained within the full list of nodes!")
stop(msg)
}
lists <- lapply(leaf, function(r) t(sapply(nodes, function(x) c(r, x), USE.NAMES = FALSE)))
lists <- do.call("rbind", lists)
if(!indices){
lists
} else{
matrix(match(lists, nodes), ncol = 2)
}
} |
f.aggregate <- function(data){
.nlines <- dim(data)[1]
.tag <- f.create.tag(data)
.lines <- tapply(1:.nlines, .tag, function(x)x)
.freq <- f.groupsum(X = rep(1, .nlines), INDICES = .tag)
.unique <- !duplicated(.tag)
.tag.unique <- .tag[.unique]
.data.agg <- dframe(data, freq = .freq)
.data.agg <- .data.agg[.unique, , drop = F]
.lines <- .lines[.tag.unique]
attr(.data.agg, "orig.lines") <- .lines
return(.data.agg)
} |
mmpcAnm<-function(data){
fitG=mmpc(data,test="mi-g-sh")
fitG=amat(fitG)
n=ncol(fitG)
pb <- txtProgressBar(0,sum(fitG==1)/2,style = 3)
count=0
for(i in 1:n){
for(j in 1:n){
if(fitG[i,j]==1&&fitG[j,i]==1){
count=count+1
setTxtProgressBar(pb,count)
X=data[,c(i,j)]
fit<-getParents(X=X,method = "bivariateANM")
if(fit[1,2]==1){
fitG[j,i]=0
}else if(fit[2,1]==1){
fitG[i,j]=0
}else{
fitG[j,i]=0
fitG[i,j]=0
}
}
}
}
return(fitG)
} |
rskellam <- function(n, lambda1, lambda2=lambda1){
if (missing(n)|missing(lambda1)) stop("first 2 arguments are required")
if (length(n)>1) n <- length(n)
lambda1 <- rep(lambda1,length.out=n)
lambda2 <- rep(lambda2,length.out=n)
oops <- !(is.finite(lambda1)&(lambda1>=0)&is.finite(lambda2)&(lambda2>=0))
if(any(oops)) warning("NaNs produced")
ret <- rep(NaN,length.out=n)
n <- n-sum(oops)
ret[!oops] <- stats::rpois(n,lambda1[!oops])-stats::rpois(n,lambda2[!oops])
ret
} |
"egypt" |
context("Tests regression code (rrreg.predictor)")
rm(list=ls())
p <- 2/3
p1 <- 1/6
p0 <- 1/6
nigeria$cov.age.10 <- nigeria$cov.age/10
nigeria$cov.age.10sq <- nigeria$cov.age.10^2
test_that("rrreg works", {
skip_on_cran()
set.seed(3)
rr.q1.reg.obj1 <- rrreg(rr.q1 ~ cov.asset.index + cov.married +
cov.age.10 + cov.age.10sq + cov.education + cov.female,
data = nigeria, p = p, p1 = p1, p0 = p0,
design = "forced-known")
summary(rr.q1.reg.obj1)
set.seed(3)
rr.q1.pred.obj1 <- rrreg.predictor(civic ~ cov.asset.index + cov.married + cov.age.10 + I(cov.age.10^2) +
cov.education + cov.female + rr.q1,
rr.item = "rr.q1", parstart = FALSE, estconv = TRUE,
data = nigeria, verbose = FALSE,
p = p, p1 = p1, p0 = p0, design = "forced-known")
print(rr.q1.pred.obj1)
summary(rr.q1.pred.obj1)
coef(rr.q1.pred.obj1)
vcov(rr.q1.pred.obj1)
set.seed(3)
rr.q1.pred.obj2 <-
rrreg.predictor(civic ~ cov.asset.index + cov.married + cov.age.10 + I(cov.age.10^2) +
cov.education + cov.female + rr.q1,
rr.item = "rr.q1", parstart = FALSE, estconv = TRUE,
data = nigeria, verbose = FALSE, fit.sens = "glm",
p = p, p1 = p1, p0 = p0, design = "forced-known")
print(rr.q1.pred.obj2)
summary(rr.q1.pred.obj2)
coef(rr.q1.pred.obj2)
vcov(rr.q1.pred.obj2)
set.seed(3)
rr.q1.pred.obj3 <-
rrreg.predictor(civic ~ cov.asset.index + cov.married + cov.age.10 + I(cov.age.10^2) +
cov.education + cov.female + rr.q1,
rr.item = "rr.q1", parstart = FALSE, estconv = TRUE,
data = nigeria, verbose = FALSE, fit.outcome = "glm",
p = p, p1 = p1, p0 = p0, design = "forced-known")
print(rr.q1.pred.obj3)
summary(rr.q1.pred.obj3)
coef(rr.q1.pred.obj3)
vcov(rr.q1.pred.obj3)
set.seed(3)
rr.q1.pred.obj4 <-
rrreg.predictor(civic ~ cov.asset.index + cov.married + cov.age.10 + I(cov.age.10^2) +
cov.education + cov.female + rr.q1,
rr.item = "rr.q1", parstart = FALSE, estconv = TRUE,
data = nigeria, verbose = FALSE, bstart = coef(rr.q1.reg.obj1),
p = p, p1 = p1, p0 = p0, design = "forced-known")
set.seed(3)
rr.q1.pred.obj5 <-
rrreg.predictor(civic ~ cov.asset.index + cov.married + cov.age.10 + I(cov.age.10^2) +
cov.education + cov.female + rr.q1,
rr.item = "rr.q1", parstart = FALSE, estconv = TRUE,
data = nigeria, verbose = FALSE, tstart =c(coef(rr.q1.reg.obj1), .02),
p = p, p1 = p1, p0 = p0, design = "forced-known")
set.seed(1)
rr.q1.pred.obj6 <-
rrreg.predictor(civic ~ cov.asset.index + cov.married + cov.age.10 + I(cov.age.10^2) +
cov.education + cov.female + rr.q1,
rr.item = "rr.q1", parstart = TRUE, estconv = TRUE,
data = nigeria, verbose = FALSE,
p = p, p1 = p1, p0 = p0, design = "forced-known")
set.seed(3)
rr.q1.pred.obj7 <-
rrreg.predictor(civic ~ cov.asset.index + cov.married + cov.age.10 + I(cov.age.10^2) +
cov.education + cov.female + rr.q1,
rr.item = "rr.q1", parstart = FALSE, estconv = TRUE,
data = nigeria, verbose = FALSE, maxIter = 10500,
p = p, p1 = p1, p0 = p0, design = "forced-known")
summary(rr.q1.pred.obj7)
print(rr.q1.pred.obj7)
set.seed(3)
rr.q1.pred.obj8 <-
rrreg.predictor(civic ~ cov.asset.index + cov.married + cov.age.10 + I(cov.age.10^2) +
cov.education + cov.female + rr.q1,
rr.item = "rr.q1", parstart = FALSE, estconv = TRUE,
data = nigeria, verbose = TRUE,
p = p, p1 = p1, p0 = p0, design = "forced-known")
set.seed(3)
rr.q1.pred.obj9 <-
rrreg.predictor(civic ~ cov.asset.index + cov.married + cov.age.10 + I(cov.age.10^2) +
cov.education + cov.female + rr.q1,
rr.item = "rr.q1", parstart = FALSE, estconv = TRUE,
data = nigeria, verbose = FALSE, optim = TRUE,
p = p, p1 = p1, p0 = p0, design = "forced-known")
set.seed(3)
rr.q1.pred.obj10 <-
rrreg.predictor(civic ~ cov.asset.index + cov.married + cov.age.10 + I(cov.age.10^2) +
cov.education + cov.female + rr.q1,
rr.item = "rr.q1", parstart = FALSE, estconv = TRUE,
data = nigeria, verbose = FALSE, em.converge = 10^(-3),
p = p, p1 = p1, p0 = p0, design = "forced-known")
set.seed(3)
rr.q1.pred.obj11 <-
rrreg.predictor(civic ~ cov.asset.index + cov.married + cov.age.10 + I(cov.age.10^2) +
cov.education + cov.female + rr.q1,
rr.item = "rr.q1", parstart = FALSE, estconv = TRUE,
data = nigeria, verbose = FALSE, glmMaxIter = 21000,
p = p, p1 = p1, p0 = p0, design = "forced-known")
set.seed(3)
rr.q1.pred.obj12 <-
rrreg.predictor(civic ~ cov.asset.index + cov.married + cov.age.10 + I(cov.age.10^2) +
cov.education + cov.female + rr.q1,
rr.item = "rr.q1", parstart = FALSE, estconv = FALSE,
data = nigeria, verbose = FALSE,
p = p, p1 = p1, p0 = p0, design = "forced-known")
rr.q1.pred.pred1 <- predict(rr.q1.pred.obj1,
avg = TRUE, quasi.bayes = TRUE,
n.sims = 10000)
rr.q1.pred.pred2 <- predict(rr.q1.pred.obj1, alpha = .1,
avg = TRUE, quasi.bayes = TRUE,
n.sims = 10000)
rr.q1.pred.pred1
rr.q1.pred.pred2
rr.q1.pred.pred3 <- predict(rr.q1.pred.obj1,
avg = TRUE, quasi.bayes = TRUE,
n.sims = 10500)
rr.q1.pred.pred4 <- predict(rr.q1.pred.obj1,
avg = FALSE, quasi.bayes = TRUE,
n.sims = 10000)
rr.q1.pred.pred5 <- predict(rr.q1.pred.obj1, newdata = nigeria,
avg = TRUE, quasi.bayes = TRUE,
n.sims = 10000)
rr.q1.pred.pred6 <- predict(rr.q1.pred.obj1,
avg = TRUE, quasi.bayes = FALSE,
n.sims = 10000)
rr.q1.pred.pred7 <- predict(rr.q1.pred.obj1, fix.z = .4,
avg = TRUE, quasi.bayes = TRUE,
n.sims = 10000)
rr.q1.pred.pred8 <- predict(rr.q1.pred.obj1, fix.z = rep(.4, nrow(rr.q1.pred.obj1$data)),
avg = TRUE, quasi.bayes = TRUE,
n.sims = 10000)
})
test_that("output from rrreg.predictor gives us unscaled data, x", {
set.seed(3)
rr.q1.pred.obj1 <- rrreg.predictor(civic ~ cov.asset.index + cov.married + cov.age.10 + I(cov.age.10^2) +
cov.education + cov.female + rr.q1,
rr.item = "rr.q1", parstart = FALSE, estconv = TRUE,
data = nigeria, verbose = FALSE,
p = p, p1 = p1, p0 = p0, design = "forced-known")
rrpX <- rr.q1.pred.obj1$x
x <- cbind(1, nigeria[, c("civic", "rr.q1", "cov.asset.index", "cov.married", "cov.age.10",
"cov.age.10sq", "cov.education", "cov.female")])
x <- as.matrix(x[complete.cases(x),])
x <- x[, c(1, "cov.asset.index", "cov.married", "cov.age.10",
"cov.age.10sq", "cov.education", "cov.female")]
expect_equivalent(rrpX, x)
}) |
label_switch <- function(x, m){
N <- nrow(x)
g <- ncol(x)
avg <- colMeans(x[1:m, ])
s2 <- apply(x[1:m, ], 2, sd)
perm <- rbind(c(1:g), allPerms(g))
true_perm <- matrix(nrow = N - m, ncol = g)
for(i in (m+1):N){
true_perm[i-m, ] <- c(1:g)
d <- sum((x[i, ] - avg)^2 / s2)
for(p in 2:nrow(perm)){
dd <- sum((x[i, ] - avg[perm[p, ]])^2 / s2[perm[p, ]])
if(dd < d){
d <- dd
true_perm[i - m, ] <- perm[p, ]
}
}
x[i, ] <- x[i, true_perm[i - m, ]]
avgg <- avg
avg <- (i - 1)/i * avgg + 1/i * x[i, ]
for(k in 1:g){
s2[k] <- (i - 1) / i * s2[k] +
(i - 1) / i *(avgg[k] - avg[k])^2 +
1 / i * (x[i, k] - avg[k])^2
}
}
list(x = x, true_perm = true_perm)
} |
lvscore <- function (x, W, option = 1)
{
nobs <- nrow(x)
nlv <- ncol(W)
ctz <- x - matrix(1,nobs,1)%*%apply(x,2,mean)
covz <- (t(ctz)%*%ctz)/nobs
Dstz <- sqrt(diag(diag(covz)))
UstdW <- solve(Dstz,W)
if ( option == 1 ) {
sumUstdW <- apply(UstdW,2,sum)
rUstdW <- UstdW
for (j in 1:nlv) {
rUstdW[,j] <- UstdW[,j]/sumUstdW[j]
}
lvscore <- x%*%rUstdW
} else if ( option == 2 ) {
lvscore <- x%*%UstdW
}
lvscore
} |
colSD <- function(x, ...) {apply(X=x, MARGIN=2, FUN=sd, ...)}
item_dmacs <- function (LambdaR, ThreshR,
LambdaF, ThreshF,
MeanF, VarF,
SD, categorical = FALSE,
stepsize = .001) {
if (length(ThreshR) != length(ThreshF)) stop("Item must have same number of thresholds in both reference and focal group")
if (LambdaR == 0) {return(NA)}
if (length(ThreshR) > 1) { categorical <- TRUE}
z <- seq(-5, 5, stepsize)
integrand <- (expected_value(LambdaF, ThreshF, MeanF+z*sqrt(VarF), categorical) -
expected_value(LambdaR, ThreshR, MeanF+z*sqrt(VarF), categorical))^2 * dnorm(z)
sqrt(sum(integrand*stepsize*sqrt(VarF)))/SD
}
delta_mean_item <- function (LambdaR, ThreshR,
LambdaF, ThreshF,
MeanF, VarF,
categorical = FALSE, stepsize = .001) {
if (length(ThreshR) != length(ThreshF)) stop("Item must have same number of thresholds in both reference and focal group")
if(LambdaR == 0) {return(NA)}
if (length(ThreshR) > 1) { categorical <- TRUE}
z <- seq(-5, 5, stepsize)
integrand <- (expected_value(LambdaF, ThreshF, MeanF+z*sqrt(VarF), categorical) -
expected_value(LambdaR, ThreshR, MeanF+z*sqrt(VarF), categorical)) * dnorm(z)
sum(integrand*stepsize*sqrt(VarF))
}
delta_var <- function (LambdaR, LambdaF, VarF, categorical = FALSE) {
if(categorical) {
warning("Delta variance can only be computed for linear models, not for categorical ones")
return(NULL)
}
delta_cov_mat <- matrix(nrow=length(LambdaR), ncol=length(LambdaR))
for (i in 1:length(LambdaR)) {
for (j in 1:length(LambdaR)) {
delta_cov_mat[i,j] <- LambdaR[[j]]*(LambdaF[[i]]-LambdaR[[i]])*VarF
+ LambdaR[[i]]*(LambdaF[[j]]-LambdaR[[j]])*VarF
+ (LambdaF[[i]]-LambdaR[[i]])*(LambdaF[[j]]-LambdaR[[j]])*VarF
}
}
sum(delta_cov_mat)
}
expected_value <- function (Lambda, Thresh, Theta, categorical = FALSE) {
if (length(Thresh) > 1) { categorical <- TRUE }
if (categorical) {
max <- length(Thresh)
Thresh[max+1] <- 9999999
expected <- 0
for (i in 1:max) {expected <- expected + i*(pnorm(Lambda*(Theta-Thresh[i]))-pnorm(Lambda*(Theta-Thresh[i+1])))}
expected
} else {
Thresh+Lambda*Theta
}
} |
Likelihoodfun <-
function(p1m, p2f, p1f, n1m, n0m, n2f, n1f, n0f)
{
if (n1m != 0 & n0m != 0){
LH <- log(p1m) * n1m + log(1 - p1m) * n0m + log(p2f) * n2f + log(p1f) * n1f + log(1 - p2f - p1f) * n0f
return(LH)
}
else{
LH <- log(p2f) * n2f + log(p1f) * n1f + log(1 - p2f - p1f) * n0f
return(LH)
}
} |
"epi.mh" <- function(ev.trt, n.trt, ev.ctrl, n.ctrl, names, method = "odds.ratio", alternative = c("two.sided", "less", "greater"), conf.level = 0.95)
{
k <- length(names)
a.i <- ev.trt
b.i <- n.trt - ev.trt
c.i <- ev.ctrl
d.i <- n.ctrl - ev.ctrl
N. <- 1 - ((1 - conf.level) / 2)
z <- qnorm(N., mean = 0, sd = 1)
for(i in 1:k){
if(a.i[i] < 1 | b.i[i] < 1 | c.i[i] < 1 | d.i[i] < 1){
a.i[i] <- a.i[i] + 0.5; b.i[i] <- b.i[i] + 0.5; c.i[i] <- c.i[i] + 0.5; d.i[i] <- d.i[i] + 0.5
}
}
n.1i <- a.i + b.i
n.2i <- c.i + d.i
N.i <- a.i + b.i + c.i + d.i
R <- sum((a.i * d.i) / N.i)
S <- sum((b.i * c.i) / N.i)
E <- sum(((a.i + d.i) * a.i * d.i) / N.i^2)
F. <- sum(((a.i + d.i) * b.i * c.i) / N.i^2)
G <- sum(((b.i + c.i) * a.i * d.i) / N.i^2)
H <- sum(((b.i + c.i) * b.i * c.i) / N.i^2)
P <- sum(((n.1i * n.2i * (a.i + c.i)) - (a.i * c.i * N.i)) / N.i^2)
R. <- sum((a.i * n.2i) / N.i)
S. <- sum((c.i * n.1i) / N.i)
if(method == "odds.ratio"){
OR.i <- (a.i * d.i) / (b.i * c.i)
lnOR.i <- log(OR.i)
SE.lnOR.i <- sqrt(1/a.i + 1/b.i + 1/c.i + 1/d.i)
SE.OR.i <- exp(SE.lnOR.i)
lower.lnOR.i <- lnOR.i - (z * SE.lnOR.i)
upper.lnOR.i <- lnOR.i + (z * SE.lnOR.i)
lower.OR.i <- exp(lower.lnOR.i)
upper.OR.i <- exp(upper.lnOR.i)
w.i <- (b.i * c.i) / N.i
w.iv.i <- 1 / (SE.lnOR.i)^2
OR.mh <- sum(w.i * OR.i) / sum(w.i)
lnOR.mh <- sum(w.i * log(OR.i)) / sum(w.i)
G <- a.i * d.i / N.i
H <- b.i * c.i / N.i
P <- (a.i + d.i) / N.i
Q <- (b.i + c.i) / N.i
GQ.HP <- G * Q + H * P
sumG <- sum(G)
sumH <- sum(H)
sumGP <- sum(G * P)
sumGH <- sum(G * H)
sumHQ <- sum(H * Q)
sumGQ <- sum(G * Q)
sumGQ.HP <- sum(GQ.HP)
var.lnOR.mh <- sumGP / (2 * sumG^2) + sumGQ.HP/(2 * sumGH) + sumHQ/(2 * sumH^2)
SE.lnOR.mh <- sqrt(var.lnOR.mh)
SE.OR.mh <- exp(SE.lnOR.mh)
lower.OR.mh <- exp(lnOR.mh - z * SE.lnOR.mh)
upper.OR.mh <- exp(lnOR.mh + z * SE.lnOR.mh)
Q <- sum(w.iv.i * (lnOR.i - lnOR.mh)^2)
df <- k - 1
p.heterogeneity <- 1 - pchisq(Q, df)
Hsq <- Q / (k - 1)
lnHsq <- log(Hsq)
if(Q > k) {
lnHsq.se <- (1 * log(Q) - log(k - 1)) / (2 * sqrt(2 * Q) - sqrt((2 * (k - 3))))
}
if(Q <= k) {
lnHsq.se <- sqrt((1/(2 * (k - 2))) * (1 - (1 / (3 * (k - 2)^2))))
}
lnHsq.l <- lnHsq - (z * lnHsq.se)
lnHsq.u <- lnHsq + (z * lnHsq.se)
Hsq.l <- exp(lnHsq.l)
Hsq.u <- exp(lnHsq.u)
Isq <- ((Hsq - 1) / Hsq) * 100
Isq.l <- ((Hsq.l - 1) / Hsq.l) * 100
Isq.u <- ((Hsq.u - 1) / Hsq.u) * 100
effect.z <- lnOR.mh / SE.lnOR.mh
alternative <- match.arg(alternative)
p.effect <- switch(alternative, two.sided = 2 * pnorm(abs(effect.z), lower.tail = FALSE), less = pnorm(effect.z), greater = pnorm(effect.z, lower.tail = FALSE))
OR <- data.frame(OR.i, lower.OR.i, upper.OR.i)
names(OR) <- c("est", "lower", "upper")
OR.summary <- data.frame(OR.mh, lower.OR.mh, upper.OR.mh)
names(OR.summary) <- c("est", "lower", "upper")
weights <- data.frame(w.i, w.iv.i)
names(weights) <- c("raw", "inv.var")
Hsq <- data.frame(Hsq, Hsq.l, Hsq.u)
names(Hsq) <- c("est", "lower", "upper")
Isq <- data.frame(Isq, Isq.l, Isq.u)
names(Isq) <- c("est", "lower", "upper")
rval <- list(OR = OR, OR.summary = OR.summary, weights = weights,
heterogeneity = c(Q = Q, df = df, p.value = p.heterogeneity),
Hsq = Hsq,
Isq = Isq,
effect = c(z = effect.z, p.value = p.effect))
}
else
if(method == "risk.ratio"){
RR.i <- (a.i / n.1i) / (c.i / n.2i)
lnRR.i <- log(RR.i)
SE.lnRR.i <- sqrt(1/a.i + 1/c.i - 1/n.1i - 1/n.2i)
SE.RR.i <- exp(SE.lnRR.i)
lower.lnRR.i <- lnRR.i - (z * SE.lnRR.i)
upper.lnRR.i <- lnRR.i + (z * SE.lnRR.i)
lower.RR.i <- exp(lower.lnRR.i)
upper.RR.i <- exp(upper.lnRR.i)
w.i <- (c.i * n.1i) / N.i
w.iv.i <- 1 / (SE.lnRR.i)^2
RR.mh <- sum(w.i * RR.i) / sum(w.i)
lnRR.mh <- log(RR.mh)
SE.lnRR.mh <- sqrt(P / (R. * S.))
SE.RR.mh <- exp(SE.lnRR.mh)
lower.lnRR.mh <- log(RR.mh) - (z * SE.lnRR.mh)
upper.lnRR.mh <- log(RR.mh) + (z * SE.lnRR.mh)
lower.RR.mh <- exp(lower.lnRR.mh)
upper.RR.mh <- exp(upper.lnRR.mh)
Q <- sum(w.iv.i * (lnRR.i - lnRR.mh)^2)
df <- k - 1
p.heterogeneity <- 1 - pchisq(Q, df)
Hsq <- Q / (k - 1)
lnHsq <- log(Hsq)
if(Q > k) {
lnHsq.se <- (1 * log(Q) - log(k - 1)) / (2 * sqrt(2 * Q) - sqrt((2 * (k - 3))))
}
if(Q <= k) {
lnHsq.se <- sqrt((1/(2 * (k - 2))) * (1 - (1 / (3 * (k - 2)^2))))
}
lnHsq.l <- lnHsq - (z * lnHsq.se)
lnHsq.u <- lnHsq + (z * lnHsq.se)
Hsq.l <- exp(lnHsq.l)
Hsq.u <- exp(lnHsq.u)
Isq <- ((Hsq - 1) / Hsq) * 100
Isq.l <- ((Hsq.l - 1) / Hsq.l) * 100
Isq.u <- ((Hsq.u - 1) / Hsq.u) * 100
effect.z <- log(RR.mh) / SE.lnRR.mh
alternative <- match.arg(alternative)
p.effect <- switch(alternative, two.sided = 2 * pnorm(abs(effect.z), lower.tail = FALSE), less = pnorm(effect.z), greater = pnorm(effect.z, lower.tail = FALSE))
RR <- data.frame(RR.i, lower.RR.i, upper.RR.i)
names(RR) <- c("est", "lower", "upper")
RR.summary <- data.frame(RR.mh, lower.RR.mh, upper.RR.mh)
names(RR.summary) <- c("est", "lower", "upper")
weights <- data.frame(w.i, w.iv.i)
names(weights) <- c("raw", "inv.var")
Hsq <- data.frame(Hsq, Hsq.l, Hsq.u)
names(Hsq) <- c("est", "lower", "upper")
Isq <- data.frame(Isq, Isq.l, Isq.u)
names(Isq) <- c("est", "lower", "upper")
rval <- list(RR = RR, RR.summary = RR.summary, weights = weights,
heterogeneity = c(Q = Q, df = df, p.value = p.heterogeneity),
Hsq = Hsq,
Isq = Isq,
effect = c(z = effect.z, p.value = p.effect))
}
return(rval)
} |
greySet <- function(xlim, ylim, xtick = NA, ytick = NA, nx = 1, ny = 1,
xaxs = "i", yaxs = "i", xarg = list(tick.ratio = 0.5),
yarg = list(tick.ratio = 0.5, las = 1), v = T, inverse = F,
abbr = "", skip = 0, targ = list(col = "white", lwd = 2),
rarg = list(border = NA, col = "grey85"))
{
opar <- par("xaxs","yaxs")
on.exit(do.call(par,opar))
par(xaxs = xaxs, yaxs = yaxs)
plot.new()
plot.window(xlim,ylim)
usr <- par("usr")
if(is.na(xtick)){
xpar <- par("xaxp")
xtick <- abs(xpar[2] - xpar[1])/xpar[3]
}
xra <- usr[c(1,2)]
xra <- encase(xra[1], xra[2], xtick)
if(xlim[1] < xlim[2]){
xt <- seq(from = xra[1], to = xra[2], by = xtick)
} else if (xlim[1] > xlim[2]) {
xt <- seq(from = xra[1], to = xra[2], by = -xtick)
} else {stop("The two first elements of xlim must be different numbers")}
if(is.na(ytick)){
ypar <- par("yaxp")
ytick <- abs(ypar[2] - ypar[1])/ypar[3]
}
yra <- usr[c(3,4)]
yra <- encase(yra[1], yra[2], ytick)
if(ylim[1] < ylim[2]){
yt <- seq(from = yra[1], to = yra[2], by = ytick)
} else if (ylim[1] > ylim[2]) {
yt <- seq(from = yra[1] ,to = yra[2],by = -ytick)
} else {stop("The two first elements of ylim must be different numbers")}
if(v){
xy <- xt
cor <- xtick
dt <- yt
} else {
xy <- yt
cor <- ytick
dt <- xt
}
xy <- c((min(xy) - cor), sort(xy), (max(xy) + cor))
if(inverse){
xy <- xy + cor
}
xy1 <- every_nth(xy, 2, empty = F)
xy2 <- every_nth(xy, 2, empty = F, inverse = T)
cx <- max(length(xy1),length(xy2))
usr <- par("usr")
tdt <- every_nth(dt, skip + 1, empty = FALSE, inverse = TRUE)
ltdt <- length(tdt)
ldt <- (c(tdt[-1], 2*tdt[ltdt] - tdt[ltdt-1]) + tdt)/2
if(v){
for(i in seq_len(cx))
{
pos <- c(xy1[i], xy2[i])
if(!is.null(rarg)){
lr <- merge_list(list(xleft = pos[1], ybottom = usr[3],
xright = pos[2], ytop= usr[4]),
rarg, list(border = NA, col = "grey85"))
do.call(rect, lr)
}
if(!is.null(targ)){
lt <- merge_list(list(x = mean(pos), y = tdt, labels = abbr),
targ, list(col = "white", lwd = 2))
ll <- merge_list(list(x = pos[1], y = ldt, labels = pos[1]),
targ, list(col = "white", lwd = 2, srt = 90,
adj = c(NA,-0.5)))
do.call(text, lt)
do.call(text, ll)
}
}
} else {
for(i in seq_len(cx))
{
pos <- c(xy1[i], xy2[i])
if(!is.null(rarg)){
lr <- merge_list(list(xleft = usr[1], ybottom = pos[1],
xright = usr[2], ytop= pos[2]),
rarg, list(border = NA, col = "grey85"))
do.call(rect, lr)
}
if(!is.null(targ)){
lt <- merge_list(list(x = tdt, y = mean(pos), labels = abbr),
targ, list(col = "white", lwd = 2))
ll <- merge_list(list(x = ldt, y = pos[1], labels = pos[1]),
targ, list(col = "white", lwd = 2, adj = c(NA,1)))
do.call(text, lt)
do.call(text, ll)
}
}
}
if(!is.null(xarg)){
lx <- merge_list(xarg, list(side = 1, n = nx, at.maj = xt),
list(tick.ratio = 0.5))
do.call(minorAxis, lx)
}
if(!is.null(yarg)){
ly <- merge_list(yarg, list(side = 2, n = ny, at.maj = yt),
list(tick.ratio = 0.5, las = 1))
do.call(minorAxis, ly)
}
} |
ebayes_EM<-function(x,z,y,EMB.tau,EMB.omega)
{
tau<-EMB.tau;omega<-EMB.omega
n<-nrow(z);k<-ncol(z)
if(abs(min(eigen(crossprod(x,x))$values))<1e-6)
b<-solve(crossprod(x,x)+diag(ncol(x))*0.01)%*%crossprod(x,y)
else
b<-solve(crossprod(x,x))%*%crossprod(x,y)
v0<-as.numeric(crossprod((y-x%*%b),(y-x%*%b))/n)
u<-matrix(rep(0,k),k,1)
v<-matrix(rep(0,k),k,1)
s<-matrix(rep(0,k),k,1)
for(i in 1:k)
{
zz<-z[,i]
s[i]<-((crossprod(zz,zz)+1e-100)^(-1))*v0
u[i]<-s[i]*crossprod(zz,(y-x%*%b))/v0
v[i]<-u[i]^2+s[i]
}
vv<-matrix(rep(0,n*n),n,n);
for(i in 1:k)
{
zz<-z[,i]
vv<-vv+tcrossprod(zz,zz)*v[i]
}
vv<-vv+diag(n)*v0
iter<-0;err<-1000;iter_max<-500;err_max<-1e-8
while((iter<iter_max)&&(err>err_max))
{
iter<-iter+1
v01<-v0
v1<-v
b1<-b
vi<-solve(vv)
xtv<-crossprod(x,vi)
if(ncol(x)==1)
{
b<-((xtv%*%x)^(-1))*(xtv%*%y)
}else
{
if(abs(min(eigen(xtv%*%x)$values))<1e-6){
b<-solve((xtv%*%x)+diag(ncol(x))*0.01)%*%(xtv%*%y)
}
else{
b<-solve(xtv%*%x)%*%(xtv%*%y)
}
}
r<-y-x%*%b
ss<-matrix(rep(0,n),n,1)
for(i in 1:k)
{
zz<-z[,i]
zztvi<-crossprod(zz,vi)
u[i]<-v[i]*zztvi%*%r
s[i]<-v[i]*(1-zztvi%*%zz*v[i])
v[i]<-(u[i]^2+s[i]+omega)/(tau+3)
ss<-ss+zz*u[i]
}
v0<-as.numeric(crossprod(r,(r-ss))/n)
vv<-matrix(rep(0,n*n),n,n)
for(i in 1:k)
{
zz<-z[,i]
vv<-vv+tcrossprod(zz,zz)*v[i]
}
vv<-vv+diag(n)*v0
err<-(crossprod((b1-b),(b1-b))+(v01-v0)^2+crossprod((v1-v),(v1-v)))/(2+k)
beta<-t(b)
sigma2<-v0
}
return (u)
}
multinormal<-function(y,mean,sigma)
{
pdf_value<-(1/sqrt(2*3.14159265358979323846*sigma))*exp(-(y-mean)*(y-mean)/(2*sigma));
return (pdf_value)
}
likelihood<-function(xxn,xxx,yn,bbo)
{
nq<-ncol(xxx)
ns<-nrow(yn)
at1<-0
ww1<-as.matrix(which(abs(bbo)>1e-5))
at1<-dim(ww1)[1]
lod<-matrix(rep(0,nq),nq,1)
if(at1>0.5)
ad<-cbind(xxn,xxx[,ww1])
else
ad<-xxn
if(abs(min(eigen(crossprod(ad,ad))$values))<1e-6)
bb<-solve(crossprod(ad,ad)+diag(ncol(ad))*0.01)%*%crossprod(ad,yn)
else
bb<-solve(crossprod(ad,ad))%*%crossprod(ad,yn)
vv1<-as.numeric(crossprod((yn-ad%*%bb),(yn-ad%*%bb))/ns);
ll1<-sum(log(abs(multinormal(yn,ad%*%bb,vv1))))
sub<-1:ncol(ad);
if(at1>0.5)
{
for(i in 1:at1)
{
ij<-which(sub!=sub[i+ncol(xxn)])
ad1<-ad[,ij]
if(abs(min(eigen(crossprod(ad1,ad1))$values))<1e-6)
bb1<-solve(crossprod(ad1,ad1)+diag(ncol(ad1))*0.01)%*%crossprod(ad1,yn)
else
bb1<-solve(crossprod(ad1,ad1))%*%crossprod(ad1,yn)
vv0<-as.numeric(crossprod((yn-ad1%*%bb1),(yn-ad1%*%bb1))/ns);
ll0<-sum(log(abs(multinormal(yn,ad1%*%bb1,vv0))))
lod[ww1[i]]<--2.0*(ll0-ll1)/(2.0*log(10))
}
}
return (lod)
}
PCG <- function(G,b,m.marker,sigma.k2,sigma.e2,tol,miter){
tau <- c(sigma.k2,sigma.e2)
k <- 0
x <- matrix(0,length(b),1)
r <- b
M <- 1/(tau[1]*(1/m.marker)*rowSums(G^2) + tau[2])
p <- M*r
min.tol <- sqrt(sum(p^2))
while((min.tol > tol) && (k < miter)){
Ap <- tau[1]*(1/m.marker)*tcrossprod(G,crossprod(p,G)) + tau[2]*p
alpha <- diag(crossprod(r,p))/diag(crossprod(p,Ap))
x1 <- x + p*alpha
r1 <- r - Ap*alpha
min.tol <- sqrt(colSums(r1^2))
if(min.tol < tol){
x <- x1
r <- r1
p <- p1
break
}
p1 <- M*r1
beta <- diag(crossprod(p1,r1))/diag(crossprod(p,r))
p1 <- p1 + p*beta
x <- x1
r <- r1
p <- p1
k <- k + 1
}
return (x)
}
ScoreEB <- function(genofile, phenofile, popfile = NULL, trait.num = 1, EMB.tau = 0, EMB.omega = 0, B.Moment = 20, tol.pcg = 1e-4, iter.pcg = 100, bin = 100, lod.cutoff = 3.0, seed.num = 10000, dir_out)
{
t.start <- proc.time()
geno <- fread(genofile, header = TRUE)
pheno <- fread(phenofile, header = TRUE)
pheno <- as.matrix(pheno[,-1])
n.geno <- dim(geno)[2]
X <- t(geno[,5:n.geno])
X.scale <- scale(X, center = TRUE, scale = TRUE)
n.sample <- dim(X)[1]
m.marker <- dim(X)[2]
F.fix <- as.matrix(rep(1, n.sample))
if(is.null(popfile) == FALSE){
popstr <- fread(popfile, header = TRUE)
popstr <- popstr[-1,]
popstr <- popstr[,-1]
F.fix <- cbind(F.fix, popstr)
}
result.total <- NULL
for(jj in 1:trait.num){
result <- NULL
result.final <- NULL
Y <- as.matrix(pheno[,jj])
Y.center <- scale(Y, center = TRUE, scale = FALSE)
set.seed(seed.num)
B <- B.Moment
Zb <- matrix(0,n.sample,B)
for(i in 1:B)
{
Zb[,i] <- rnorm(n.sample,0,1)
}
XX.Zb <- X.scale%*%(t(X.scale)%*%Zb)
XX.Fix.Zb <- X.scale%*%(t(X.scale)%*%(F.fix%*%(solve(crossprod(F.fix, F.fix))%*%crossprod(F.fix, Zb))))
Minus.Tmp <- XX.Zb - XX.Fix.Zb
Zb.Minus <- matrix(0,B,1)
Minus.Minus <- matrix(0,B,1)
for(i in 1:B){
Zb.Minus[i] <- crossprod(as.matrix(Zb[,i]),as.matrix(Minus.Tmp[,i]))
Minus.Minus[i] <- crossprod(as.matrix(Minus.Tmp[,i]),as.matrix(Minus.Tmp[,i]))
}
Trace.KV <- (1/B)*(1/m.marker)*sum(Zb.Minus)
Trace.KVKV <- (1/B)*(1/m.marker)^2*sum(Minus.Minus)
N.C <- n.sample - dim(F.fix)[2]
Coef.Var <- matrix(c(Trace.KVKV, Trace.KV, Trace.KV, N.C), 2, 2, byrow = TRUE)
YY <- crossprod(Y.center, Y.center)
Y.Fix.Y <- t(Y.center)%*%(F.fix%*%(solve(crossprod(F.fix,F.fix))%*%crossprod(F.fix, Y.center)))
YVY <- as.numeric(YY) - as.numeric(Y.Fix.Y)
XY <- crossprod(X.scale, Y.center)
X.Fix.Y <- t(X.scale)%*%(F.fix%*%(solve(crossprod(F.fix,F.fix))%*%crossprod(F.fix, Y.center)))
Minus.Y <- XY - X.Fix.Y
Y.VKV.Y <- (1/m.marker)*as.numeric(crossprod(Minus.Y, Minus.Y))
Predict.Vec <- as.matrix(c(Y.VKV.Y, YVY))
var.com <- solve(Coef.Var)%*%Predict.Vec
sigma.k2 <- abs(var.com[1])
sigma.e2 <- abs(var.com[2])
if(n.sample <= 1000){
M0 <- sigma.k2*tcrossprod(X, X)/m.marker + sigma.e2*diag(n.sample)
M0Y <- solve(M0)%*%Y
M0F <- solve(M0)%*%F.fix
}else{
M0Y <- PCG(X,Y,m.marker,sigma.k2,sigma.e2,tol.pcg,iter.pcg)
M0F <- PCG(X,F.fix,m.marker,sigma.k2,sigma.e2,tol.pcg,iter.pcg)
}
FtM0F <- solve(crossprod(F.fix, M0F))
FtM0Y <- crossprod(F.fix, M0Y)
right.part <- M0F%*%FtM0F%*%FtM0Y
PY <- M0Y - right.part
tmp <- crossprod(X, PY)
t.score <- 0.5*tmp^2
result <- cbind(as.matrix(jj, m.marker), as.matrix(1:m.marker), geno[,2:3], as.matrix(t.score))
if((m.marker%%bin)==0){
group <- m.marker/bin
}else{
group <- floor(m.marker/bin) + 1
}
find.bin.max <- NULL
for(i in 1:(group-1)){
tmp <- (1+(i-1)*bin):(i*bin)
max.score <- result[(i-1)*bin + max(which(result[tmp, 5] == max(result[tmp, 5]))),]
find.bin.max <- rbind(find.bin.max, matrix(max.score,1,))
}
tmp.last <- (1+(group-1)*bin):m.marker
max.score.last <- result[(group-1)*bin + max(which(result[tmp.last, 5] == max(result[tmp.last, 5]))),]
find.bin.max <- rbind(find.bin.max, matrix(max.score.last,1,))
find.bin.max <- find.bin.max[order(as.numeric(find.bin.max[,5]), decreasing = TRUE),]
nrow.find.bin.max <- dim(find.bin.max)[1]
nrow.select <- min(n.sample, nrow.find.bin.max)
find.bin.max <- find.bin.max[1:nrow.select,]
geno.bayes <- X[,as.numeric(find.bin.max[,2])]
b.bayes <- ebayes_EM(F.fix,geno.bayes,Y,EMB.tau,EMB.omega)
lod <- likelihood(F.fix,geno.bayes,Y,b.bayes)
result.final <- cbind(find.bin.max, as.matrix(b.bayes), as.matrix(lod))
select.final <- which(result.final[,7]>=lod.cutoff)
if(length(select.final)==0){
print(paste0("There is no SNP identified in Trait", jj, "!"))
next
}else if(length(select.final)==1){
result.final <- matrix(unlist(result.final[select.final,]),1,7)
}else{
result.final <- result.final[select.final,]
}
p.value <- as.matrix(pchisq(as.numeric(result.final[,7])*4.605,1,lower.tail = FALSE))
result.final <- cbind(result.final, p.value)
result.total <- rbind(result.total,result.final)
}
colnames(result.total) <- c("Trait", "Id", "Chr", "Pos", "Score", "Beta", "Lod", "Pvalue")
t.end <- proc.time()
t.use <- t.end - t.start
time.use <- as.matrix(c(t.use[[1]], t.use[[2]], t.use[[3]]))
rownames(time.use) <- c("User", "System", "Elapse")
write.table(time.use, paste0(dir_out, "/", "ScoreEB.time.csv"), sep = ",", quote = FALSE, row.names = TRUE, col.names = FALSE)
write.table(result.total, paste0(dir_out, "/", "ScoreEB.Result.csv"), sep = ",", quote = FALSE, row.names = FALSE, col.names = TRUE)
return (result.total)
} |
h <- function(R, k, r, sigma) 1/sigma*g(R/sigma, k, r/sigma)
g <- function(x, k, lambda) 2*x*dchisq(x^2, k, lambda^2)
inthr <- function(R, k, sigma) intglam(R/sigma, k)
intglam <- function(x, k) {
y <- x^2/2
res <- NA
tryCatch({res <- sqrt(pi)*exp(-y)*y^((k-1)/2)/gamma(k/2)*
myKummerM(1/2, k/2, y)},
error = function(ex) {
cat('Did not manage to compute kummerM,',
'numerical integration used instead.\n')
}, finally = {if(is.na(res)) res <- numintglam(x, k)})
return(res)
}
intrhr <- function(R, k, sigma) {
x <- R/sigma
y <- x^2/2
res <- NA
tryCatch({res <- sigma*sqrt(2)*y^((k-1)/2)*exp(-y)/gamma(k/2)*
myKummerM(1, k/2, y)},
error = function(ex) {
cat('Did not manage to compute kummerM,',
'numerical integration used instead.\n')
}, finally = {if(is.na(res)) res <- numintrhr(R, k, sigma)})
return(res)
}
intr2hr <- function(R, k, sigma) {
x <- R/sigma
y <- x^2/2
res <- NA
tryCatch({res <- sigma^2*sqrt(pi)*y^((k-1)/2)*exp(-y)/gamma(k/2)*
myKummerM(3/2, k/2, y)},
error = function(ex) {
cat('Did not manage to compute kummerM,',
'numerical integration used instead.\n')
}, finally = {if(is.na(res)) res <- numintr2hr(R, k, sigma)})
return(res)
}
upintlim <- function(x, k) sqrt(qchisq(1 - 1e-5, df = k, ncp = x^2))
numintglam <- function(x, k) {
integrate(function(lam) {
g(x, k, lam)
}, 0, upintlim(x, k))$value
}
numintrhr <- function(R, k, sigma) {
integrate(function(r) {
r*h(R, k, r, sigma)
}, 0, upintlim(R/sigma, k))$value
}
numintr2hr <- function(R, k, sigma) {
integrate(function(r) {
r^2*h(R, k, r, sigma)
}, 0, upintlim(R/sigma, k))$value
} |
zexact <- function(dat, conf.level){
alpha <- 1 - conf.level
alpha2 <- 0.5 * alpha
a <- dat[,1]
n <- dat[,2]
p <- a / n
a1 <- a == 0
a2 <- a == n
lb <- ub <- a
lb[a1] <- 1
ub[a2] <- n[a2] - 1
low <- 1 - qbeta(1 - alpha2, n + 1 - a, lb)
upp <- 1 - qbeta(alpha2, n - ub, a + 1)
if (any(a1))
low[a1] <- rep(0, sum(a1))
if (any(a2))
upp[a2] <- rep(1, sum(a2))
rval <- data.frame(est = p, lower = low, upper = upp)
rval
} |
context("load secuTrial validation overview")
skip_on_cran()
val_ovv_location <- system.file("extdata", "sT_exports", "BMD", "bmd_validation_overview.xlsx", package = "secuTrialR")
val_ovv <- read_validation_overview(data_dir = val_ovv_location)
test_that("Validation Overview loaded correctly.", {
expect_equal(dim(val_ovv), c(5, 12))
expect_equal(unique(val_ovv$Column), c("bmd", "grouping", "age"))
})
not_xlsx <- "a_file.xls"
test_that("xls exception correctly triggered.", {
expect_error(read_validation_overview(data_dir = not_xlsx))
}) |
new_keys <- function(x = list()) {
if (!is.list(x)) {
x <- as.list(as.character(x))
}
new_list_of(x, character(), class = "dm_keys")
}
vec_ptype2.dm_keys.dm_keys <- function(x, y, ...) new_keys()
vec_cast.dm_keys.dm_keys <- function(x, to, ...) x
vec_ptype_abbr.dm_keys <- function(x) {
"keys"
}
vec_proxy_compare.dm_keys <- function(x, ...) {
x_raw <- vec_data(x)
n <- max(vapply(x_raw, length, integer(1)))
full <- lapply(x_raw, function(x) c(x, rep("", n - length(x))))
as.data.frame(do.call(rbind, full))
}
pillar_shaft.dm_keys <- function(x) {
x <- map_chr(x, commas, max_commas = 3)
pillar::pillar_shaft(x)
}
format.dm_keys <- function(x, ...) {
map_chr(x, commas, max_commas = Inf)
}
get_key_cols <- function(x) {
stopifnot(length(x) == 1)
x[[1]]
} |
rep_index <- function(index,num){
rep(index, num)
} |
gaussian.CARanova <- function(formula, data=NULL, W, burnin, n.sample, thin=1, prior.mean.beta=NULL, prior.var.beta=NULL, prior.nu2=NULL, prior.tau2=NULL, rho.S=NULL, rho.T=NULL, verbose=TRUE)
{
a <- common.verbose(verbose)
frame.results <- common.frame(formula, data, "gaussian")
N.all <- frame.results$n
p <- frame.results$p
X <- frame.results$X
X.standardised <- frame.results$X.standardised
X.sd <- frame.results$X.sd
X.mean <- frame.results$X.mean
X.indicator <- frame.results$X.indicator
offset <- frame.results$offset
Y <- frame.results$Y
which.miss <- frame.results$which.miss
n.miss <- frame.results$n.miss
Y.DA <- Y
if(is.null(rho.S))
{
rho <- runif(1)
fix.rho.S <- FALSE
}else
{
rho <- rho.S
fix.rho.S <- TRUE
}
if(!is.numeric(rho)) stop("rho.S is fixed but is not numeric.", call.=FALSE)
if(rho<0 ) stop("rho.S is outside the range [0, 1].", call.=FALSE)
if(rho>1 ) stop("rho.S is outside the range [0, 1].", call.=FALSE)
if(is.null(rho.T))
{
lambda <- runif(1)
fix.rho.T <- FALSE
}else
{
lambda <- rho.T
fix.rho.T <- TRUE
}
if(!is.numeric(lambda)) stop("rho.T is fixed but is not numeric.", call.=FALSE)
if(lambda<0 ) stop("rho.T is outside the range [0, 1].", call.=FALSE)
if(lambda>1 ) stop("rho.T is outside the range [0, 1].", call.=FALSE)
W.quants <- common.Wcheckformat.leroux(W)
K <- W.quants$n
N <- N.all / K
W <- W.quants$W
W.triplet <- W.quants$W.triplet
W.n.triplet <- W.quants$n.triplet
W.triplet.sum <- W.quants$W.triplet.sum
n.neighbours <- W.quants$n.neighbours
W.begfin <- W.quants$W.begfin
if(is.null(prior.mean.beta)) prior.mean.beta <- rep(0, p)
if(is.null(prior.var.beta)) prior.var.beta <- rep(100000, p)
if(is.null(prior.tau2)) prior.tau2 <- c(1, 0.01)
if(is.null(prior.nu2)) prior.nu2 <- c(1, 0.01)
prior.beta.check(prior.mean.beta, prior.var.beta, p)
prior.var.check(prior.tau2)
prior.var.check(prior.nu2)
common.burnin.nsample.thin.check(burnin, n.sample, thin)
mod.glm <- glm(Y~X.standardised-1, offset=offset)
beta.mean <- mod.glm$coefficients
beta.sd <- sqrt(diag(summary(mod.glm)$cov.scaled))
beta <- rnorm(n=length(beta.mean), mean=beta.mean, sd=beta.sd)
res.temp <- Y - X.standardised %*% beta - offset
res.sd <- sd(res.temp, na.rm=TRUE)/5
phi <- rnorm(n=K, mean=0, sd = res.sd)
delta <- rnorm(n=N, mean=0, sd = res.sd)
tau2.phi <- var(phi)/10
tau2.delta <- var(delta)/10
nu2 <- runif(1, 0, res.sd)
offset.mat <- matrix(offset, nrow=K, ncol=N, byrow=FALSE)
regression.mat <- matrix(X.standardised %*% beta, nrow=K, ncol=N, byrow=FALSE)
phi.mat <- matrix(rep(phi, N), byrow=F, nrow=K)
delta.mat <- matrix(rep(delta, K), byrow=T, nrow=K)
fitted <- as.numeric(offset.mat + regression.mat + phi.mat + delta.mat)
n.keep <- floor((n.sample - burnin)/thin)
samples.beta <- array(NA, c(n.keep, p))
samples.phi <- array(NA, c(n.keep, K))
samples.delta <- array(NA, c(n.keep, N))
samples.nu2 <- array(NA, c(n.keep, 1))
samples.tau2 <- array(NA, c(n.keep, 2))
colnames(samples.tau2) <- c("tau2.phi", "tau2.delta")
if(!fix.rho.S) samples.rho <- array(NA, c(n.keep, 1))
if(!fix.rho.T) samples.lambda <- array(NA, c(n.keep, 1))
samples.fitted <- array(NA, c(n.keep, N.all))
samples.loglike <- array(NA, c(n.keep, N.all))
if(n.miss>0) samples.Y <- array(NA, c(n.keep, n.miss))
accept <- rep(0,4)
proposal.sd.rho <- 0.02
proposal.sd.lambda <- 0.02
tau2.phi.shape <- prior.tau2[1] + K/2
tau2.delta.shape <- prior.tau2[1] + N/2
nu2.shape <- prior.nu2[1] + N*K/2
if(!fix.rho.S)
{
Wstar <- diag(apply(W,1,sum)) - W
Wstar.eigen <- eigen(Wstar)
Wstar.val <- Wstar.eigen$values
det.Q.W <- 0.5 * sum(log((rho * Wstar.val + (1-rho))))
}else
{}
D <-array(0, c(N,N))
for(i in 1:N)
{
for(j in 1:N)
{
if(abs((i-j))==1) D[i,j] <- 1
}
}
D.triplet <- c(NA, NA, NA)
for(i in 1:N)
{
for(j in 1:N)
{
if(D[i,j]>0)
{
D.triplet <- rbind(D.triplet, c(i,j, D[i,j]))
}else{}
}
}
D.triplet <- D.triplet[-1, ]
D.n.triplet <- nrow(D.triplet)
D.triplet.sum <- tapply(D.triplet[ ,3], D.triplet[ ,1], sum)
D.neighbours <- tapply(D.triplet[ ,3], D.triplet[ ,1], length)
D.begfin <- array(NA, c(N, 2))
temp <- 1
for(i in 1:N)
{
D.begfin[i, ] <- c(temp, (temp + D.neighbours[i]-1))
temp <- temp + D.neighbours[i]
}
if(!fix.rho.T)
{
Dstar <- diag(apply(D,1,sum)) - D
Dstar.eigen <- eigen(Dstar)
Dstar.val <- Dstar.eigen$values
det.Q.D <- 0.5 * sum(log((lambda * Dstar.val + (1-lambda))))
}else
{}
data.precision.beta <- t(X.standardised) %*% X.standardised
if(length(prior.var.beta)==1)
{
prior.precision.beta <- 1 / prior.var.beta
}else
{
prior.precision.beta <- solve(diag(prior.var.beta))
}
W.list<- mat2listw(W)
W.nb <- W.list$neighbours
W.islands <- n.comp.nb(W.nb)
islands <- W.islands$comp.id
n.islands <- max(W.islands$nc)
if(rho==1) tau2.phi.shape <- prior.tau2[1] + 0.5 * (K-n.islands)
if(lambda==1) tau2.delta.shape <- prior.tau2[1] + 0.5 * (N-1)
if(verbose)
{
cat("Generating", n.keep, "post burnin and thinned (if requested) samples.\n", sep = " ")
progressBar <- txtProgressBar(style = 3)
percentage.points<-round((1:100/100)*n.sample)
}else
{
percentage.points<-round((1:100/100)*n.sample)
}
for(j in 1:n.sample)
{
if(n.miss>0)
{
Y.DA[which.miss==0] <- rnorm(n=n.miss, mean=fitted[which.miss==0], sd=sqrt(nu2))
}else
{}
Y.DA.mat <- matrix(Y.DA, nrow=K, ncol=N, byrow=FALSE)
nu2.offset <- as.numeric(Y.DA.mat - offset.mat - regression.mat - phi.mat - delta.mat)
nu2.scale <- prior.nu2[2] + sum(nu2.offset^2)/2
nu2 <- 1 / rgamma(1, nu2.shape, scale=(1/nu2.scale))
fc.precision <- prior.precision.beta + data.precision.beta / nu2
fc.var <- solve(fc.precision)
beta.offset <- as.numeric(Y.DA.mat - offset.mat - phi.mat - delta.mat)
beta.offset2 <- t(X.standardised) %*% beta.offset / nu2 + prior.precision.beta %*% prior.mean.beta
fc.mean <- fc.var %*% beta.offset2
chol.var <- t(chol(fc.var))
beta <- fc.mean + chol.var %*% rnorm(p)
regression.mat <- matrix(X.standardised %*% beta, nrow=K, ncol=N, byrow=FALSE)
phi.offset <- Y.DA.mat - offset.mat - regression.mat - delta.mat
phi.offset2 <- apply(phi.offset,1, sum, na.rm=TRUE)
temp1 <- gaussiancarupdate(W.triplet, W.begfin, W.triplet.sum, K, phi, tau2.phi, nu2, phi.offset2, rho, N)
phi <- temp1
if(rho<1)
{
phi <- phi - mean(phi)
}else
{
phi[which(islands==1)] <- phi[which(islands==1)] - mean(phi[which(islands==1)])
}
phi.mat <- matrix(rep(phi, N), byrow=F, nrow=K)
delta.offset <- Y.DA.mat - offset.mat - regression.mat - phi.mat
delta.offset2 <- apply(delta.offset,2, sum, na.rm=TRUE)
temp2 <- gaussiancarupdate(D.triplet, D.begfin, D.triplet.sum, N, delta, tau2.delta, nu2, delta.offset2, lambda, K)
delta <- temp2
delta <- delta - mean(delta)
delta.mat <- matrix(rep(delta, K), byrow=T, nrow=K)
temp2.phi <- quadform(W.triplet, W.triplet.sum, W.n.triplet, K, phi, phi, rho)
tau2.phi.scale <- temp2.phi + prior.tau2[2]
tau2.phi <- 1 / rgamma(1, tau2.phi.shape, scale=(1/tau2.phi.scale))
temp2.delta <- quadform(D.triplet, D.triplet.sum, D.n.triplet, N, delta, delta, lambda)
tau2.delta.scale <- temp2.delta + prior.tau2[2]
tau2.delta <- 1 / rgamma(1, tau2.delta.shape, scale=(1/tau2.delta.scale))
if(!fix.rho.S)
{
proposal.rho <- rtruncnorm(n=1, a=0, b=1, mean=rho, sd=proposal.sd.rho)
temp3 <- quadform(W.triplet, W.triplet.sum, W.n.triplet, K, phi, phi, proposal.rho)
det.Q.proposal <- 0.5 * sum(log((proposal.rho * Wstar.val + (1-proposal.rho))))
logprob.current <- det.Q.W - temp2.phi / tau2.phi
logprob.proposal <- det.Q.proposal - temp3 / tau2.phi
hastings <- log(dtruncnorm(x=rho, a=0, b=1, mean=proposal.rho, sd=proposal.sd.rho)) - log(dtruncnorm(x=proposal.rho, a=0, b=1, mean=rho, sd=proposal.sd.rho))
prob <- exp(logprob.proposal - logprob.current + hastings)
if(prob > runif(1))
{
rho <- proposal.rho
det.Q.W <- det.Q.proposal
accept[1] <- accept[1] + 1
}else
{}
accept[2] <- accept[2] + 1
}else
{}
if(!fix.rho.T)
{
proposal.lambda <- rtruncnorm(n=1, a=0, b=1, mean=lambda, sd=proposal.sd.lambda)
temp3 <- quadform(D.triplet, D.triplet.sum, D.n.triplet, N, delta, delta, proposal.lambda)
det.Q.proposal <- 0.5 * sum(log((proposal.lambda * Dstar.val + (1-proposal.lambda))))
logprob.current <- det.Q.D - temp2.delta / tau2.delta
logprob.proposal <- det.Q.proposal - temp3 / tau2.delta
hastings <- log(dtruncnorm(x=lambda, a=0, b=1, mean=proposal.lambda, sd=proposal.sd.lambda)) - log(dtruncnorm(x=proposal.lambda, a=0, b=1, mean=lambda, sd=proposal.sd.lambda))
prob <- exp(logprob.proposal - logprob.current + hastings)
if(prob > runif(1))
{
lambda <- proposal.lambda
det.Q.D <- det.Q.proposal
accept[3] <- accept[3] + 1
}else
{}
accept[4] <- accept[4] + 1
}else
{}
fitted <- as.numeric(offset.mat + regression.mat + phi.mat + delta.mat)
loglike <- dnorm(Y, mean = fitted, sd = rep(sqrt(nu2),N.all), log=TRUE)
if(j > burnin & (j-burnin)%%thin==0)
{
ele <- (j - burnin) / thin
samples.beta[ele, ] <- beta
samples.phi[ele, ] <- phi
samples.delta[ele, ] <- delta
if(!fix.rho.S) samples.rho[ele, ] <- rho
if(!fix.rho.T) samples.lambda[ele, ] <- lambda
samples.nu2[ele, ] <- nu2
samples.fitted[ele, ] <- fitted
samples.tau2[ele, ] <- c(tau2.phi, tau2.delta)
samples.loglike[ele, ] <- loglike
if(n.miss>0) samples.Y[ele, ] <- Y.DA[which.miss==0]
}else
{}
if(ceiling(j/100)==floor(j/100) & j < burnin)
{
if(!fix.rho.S) proposal.sd.rho <- common.accceptrates2(accept[1:2], proposal.sd.rho, 40, 50, 0.5)
if(!fix.rho.T) proposal.sd.lambda <- common.accceptrates2(accept[3:4], proposal.sd.lambda, 40, 50, 0.5)
accept <- rep(0,4)
}else
{}
if(j %in% percentage.points & verbose)
{
setTxtProgressBar(progressBar, j/n.sample)
}
}
if(verbose)
{
cat("\nSummarising results.")
close(progressBar)
}else
{}
if(!fix.rho.S)
{
accept.rho <- 100 * accept[1] / accept[2]
}else
{
accept.rho <- NA
}
if(!fix.rho.T)
{
accept.lambda <- 100 * accept[3] / accept[4]
}else
{
accept.lambda <- NA
}
accept.final <- c(rep(100,3), accept.rho, accept.lambda)
names(accept.final) <- c("beta", "phi", "delta", "rho.S", "rho.T")
mean.phi <- apply(samples.phi, 2, mean)
mean.delta <- apply(samples.delta, 2, mean)
mean.phi.mat <- matrix(rep(mean.phi, N), byrow=F, nrow=K)
mean.delta.mat <- matrix(rep(mean.delta, K), byrow=T, nrow=K)
mean.beta <- apply(samples.beta,2,mean)
regression.mat <- matrix(X.standardised %*% mean.beta, nrow=K, ncol=N, byrow=FALSE)
fitted.mean <- as.numeric(offset.mat + regression.mat + mean.phi.mat + mean.delta.mat)
nu2.mean <- mean(samples.nu2)
deviance.fitted <- -2 * sum(dnorm(Y, mean = fitted.mean, sd = rep(sqrt(nu2.mean),N.all), log = TRUE), na.rm=TRUE)
modelfit <- common.modelfit(samples.loglike, deviance.fitted)
fitted.values <- apply(samples.fitted, 2, mean)
response.residuals <- as.numeric(Y) - fitted.values
pearson.residuals <- response.residuals /sqrt(nu2.mean)
residuals <- data.frame(response=response.residuals, pearson=pearson.residuals)
samples.beta.orig <- common.betatransform(samples.beta, X.indicator, X.mean, X.sd, p, FALSE)
samples.beta.orig <- mcmc(samples.beta.orig)
summary.beta <- t(apply(samples.beta.orig, 2, quantile, c(0.5, 0.025, 0.975)))
summary.beta <- cbind(summary.beta, rep(n.keep, p), rep(100,p), effectiveSize(samples.beta.orig), geweke.diag(samples.beta.orig)$z)
rownames(summary.beta) <- colnames(X)
colnames(summary.beta) <- c("Median", "2.5%", "97.5%", "n.sample", "% accept", "n.effective", "Geweke.diag")
summary.hyper <- array(NA, c(5, 7))
rownames(summary.hyper) <- c("tau2.S", "tau2.T", "nu2", "rho.S", "rho.T")
summary.hyper[1,1:3] <- quantile(samples.tau2[ ,1], c(0.5, 0.025, 0.975))
summary.hyper[2,1:3] <- quantile(samples.tau2[ ,2], c(0.5, 0.025, 0.975))
summary.hyper[3,1:3] <- quantile(samples.nu2, c(0.5, 0.025, 0.975))
summary.hyper[1, 4:7] <- c(n.keep, 100, effectiveSize(mcmc(samples.tau2[ ,1])), geweke.diag(mcmc(samples.tau2[ ,1]))$z)
summary.hyper[2, 4:7] <- c(n.keep, 100, effectiveSize(mcmc(samples.tau2[ ,2])), geweke.diag(mcmc(samples.tau2[ ,2]))$z)
summary.hyper[3, 4:7] <- c(n.keep, 100, effectiveSize(mcmc(samples.nu2)), geweke.diag(mcmc(samples.nu2))$z)
if(!fix.rho.S)
{
summary.hyper[4,1:3] <- quantile(samples.rho, c(0.5, 0.025, 0.975))
summary.hyper[4, 4:7] <- c(n.keep, accept.rho, effectiveSize(mcmc(samples.rho)), geweke.diag(mcmc(samples.rho))$z)
}else
{
summary.hyper[4, 1:3] <- c(rho, rho, rho)
summary.hyper[4, 4:7] <- rep(NA, 4)
}
if(!fix.rho.T)
{
summary.hyper[5, 1:3] <- quantile(samples.lambda, c(0.5, 0.025, 0.975))
summary.hyper[5, 4:7] <- c(n.keep, accept.lambda, effectiveSize(samples.lambda), geweke.diag(samples.lambda)$z)
}else
{
summary.hyper[5, 1:3] <- c(lambda, lambda, lambda)
summary.hyper[5, 4:7] <- rep(NA, 4)
}
summary.results <- rbind(summary.beta, summary.hyper)
summary.results[ , 1:3] <- round(summary.results[ , 1:3], 4)
summary.results[ , 4:7] <- round(summary.results[ , 4:7], 1)
if(fix.rho.S & fix.rho.T)
{
samples.rhoext <- NA
}else if(fix.rho.S & !fix.rho.T)
{
samples.rhoext <- samples.lambda
names(samples.rhoext) <- "rho.T"
}else if(!fix.rho.S & fix.rho.T)
{
samples.rhoext <- samples.rho
names(samples.rhoext) <- "rho.S"
}else
{
samples.rhoext <- cbind(samples.rho, samples.lambda)
colnames(samples.rhoext) <- c("rho.S", "rho.T")
}
if(n.miss==0) samples.Y = NA
colnames(samples.tau2) <- c("tau2.S", "tau2.T")
samples <- list(beta=mcmc(samples.beta.orig), phi=mcmc(samples.phi), delta=mcmc(samples.delta), tau2=mcmc(samples.tau2), nu2=mcmc(samples.nu2), rho=mcmc(samples.rhoext), fitted=mcmc(samples.fitted), Y=mcmc(samples.Y))
model.string <- c("Likelihood model - Gaussian (identity link function)", "\nLatent structure model - spatial and temporal main effects\n")
results <- list(summary.results=summary.results, samples=samples, fitted.values=fitted.values, residuals=residuals, modelfit=modelfit, accept=accept.final, localised.structure=NULL, formula=formula, model=model.string, X=X)
class(results) <- "CARBayesST"
if(verbose)
{
b<-proc.time()
cat("Finished in ", round(b[3]-a[3], 1), "seconds.\n")
}else
{}
return(results)
} |
validate.summary.input <- function(summary.files, pathway, family, reference, lambda,
ncases, ncontrols, nsamples){
if(!is.null(summary.files)){
validate.summary.files(summary.files)
}
if(!is.null(pathway)){
validate.pathway.definition(pathway)
}
if(!is.null(family)){
validate.family(family)
}
if(!is.null(reference)){
validate.reference(reference)
}
if(!is.null(summary.files) && !is.null(lambda)){
validate.lambda.summaryData(summary.files, lambda)
}
if(!is.null(family) && !is.null(lambda) && !is.null(ncases) && !is.null(ncontrols) && !is.null(nsamples)){
validate.sample.size(family, lambda, ncases, ncontrols, nsamples)
}
} |
ordY<-function(mp, cat, y) {
if(min(mp)<=0 | max(mp)>=1) {
stop("Marginal probabilities must be between 0 and 1.")
}
if(sum(mp)>(1+.Machine$double.eps^0.5) | sum(mp)<(1-.Machine$double.eps^0.5)) {
stop('Marginal probabilities must sum to 1.')
}
if(length(mp)!=length(cat)) {
stop('There must be a corresponding probability for each given category.')
}
cp<-mps2cps(mps=list(mp))
ocats<-data.frame(n=cat, pmin=c(0,cp[[1]]), pmax=c(cp[[1]],1))
ocats<-ocats[which(ocats$pmin!=ocats$pmax),]
ocats$min<-quantile(y, ocats$pmin)
ocats$max<-quantile(y, ocats$pmax)
y.df<-data.frame(y=y, x=rep(NA, length(y)))
for(i in ocats$n) {
min<-ocats[which(ocats$n==i),'min']
max<-ocats[which(ocats$n==i),'max']
if(i==min(ocats$n)) {
y.df[which(y.df[,'y']==min), 'x']<-i
}
y.df[which(y.df$y>min & y.df$y<=max), 'x']<-i
}
return(y.df)
} |
library(testthat)
library(chunked)
test_check("chunked") |
ggEdit <- function(input, output, session, obj, verbose=TRUE, showDefaults=FALSE, width="auto", height="auto") {
TEMPLIST <- new.env()
shiny::observe({
TEMPLIST$objList.new <- list()
p.in <- obj()
if (is.ggplot(p.in)) {
p.in <- list(p.in)
}
if (is.null(names(p.in))) {
names(p.in) <- as.character(1:length(p.in))
}
if (!all(unlist(lapply(p.in, is.ggplot)))) {
stop("'object' is not a valid ggplot object")
}
TEMPLIST$objList.new <- p.in
TEMPLIST$obj.theme <- vector("list", length(TEMPLIST$objList.new))
TEMPLIST$nonLayers <- vector("list", length(TEMPLIST$objList.new))
TEMPLIST$nonLayersTxt <- vector("list", length(TEMPLIST$objList.new))
})
output$activePlot <- shiny::renderUI({
ns <- session$ns
nm <- factor(names(TEMPLIST$objList.new), ordered = TRUE, levels = names(TEMPLIST$objList.new))
shiny::selectInput(ns("activePlot"), "Choose Plot:", choices = split(1:length(nm), nm), selected = 1)
})
baseLayerVerbose <- shiny::eventReactive(input$activePlot, {
p.in <- obj()
if (is.ggplot(p.in)) {
p.in <- list(p.in)
}
if (is.null(names(p.in))) {
names(p.in) <- as.character(1:length(p.in))
}
lapply(p.in, function(x) lapply(x$layers, function(y) cloneLayer(y, verbose = TRUE, showDefaults = showDefaults)))
})
plotIdx <- shiny::eventReactive(input$activePlot, {
if (is.null(input$activePlot)) {
1
} else {
as.numeric(input$activePlot)
}
})
shiny::observe(TEMPLIST$obj.new <- TEMPLIST$objList.new[[plotIdx()]])
theme.now <- ggplot2::theme_get()
shiny::observeEvent(input$activePlot, {
output$layers <- shiny::renderUI({
ns <- session$ns
shiny::radioButtons(ns("geoms"), "Choose layer(s):", choices = geom_list(TEMPLIST$obj.new), selected = geom_list(TEMPLIST$obj.new)[1], inline = TRUE)
})
TEMPLIST$obj.theme <- lapply(TEMPLIST$objList.new, function(p) {
if (length(p$theme) > 0) {
theme.now <- theme.now %+replace% p$theme
}
themeFetch(theme.now)
})
})
update.Layer <- shiny::eventReactive(input$sendElem, {
TEMPLIST$obj.new <- TEMPLIST$objList.new[[as.numeric(input$activePlot)]]
layer.idx <- which(geom_list(TEMPLIST$obj.new) == input$geoms)
numElem <- unlist(lapply(TEMPLIST$obj.Elems[[layer.idx]], function(x) length(x$val[[1]])))
for (item in names(TEMPLIST$obj.Elems[[layer.idx]])) {
if (numElem[item] == 1) {
newLayer <- cloneLayer(TEMPLIST$obj.new$layers[[layer.idx]])
newLayer$aes_params[[item]] <- eval(parse(text = paste0("input$pop", toupper(item))))
TEMPLIST$obj.new$layers[[layer.idx]] <- newLayer
} else {
if (TEMPLIST$obj.Elems[[layer.idx]][[item]][["class"]][[1]] == "numeric") {
if (input[[paste0("pop", toupper(item), "fixedPal")]] != "Manual") {
palItem <- paste0("'", input[[paste0("pop", toupper(item), "fixedPal")]], "'")
palTxt <- paste0("scale_", item, "_gradientn(colours=scales::brewer_pal(palette=", palItem, ",direction=-1)(9)[1:5])")
TEMPLIST$nonLayersTxt[[as.numeric(input$activePlot)]][[paste0("scale_", item, "_gradientn")]] <- palTxt
suppressMessages({
nL <- eval(parse(text = palTxt))
})
TEMPLIST$nonLayers[[as.numeric(input$activePlot)]][[paste0("scale_", item, "_gradientn")]] <- nL
suppressMessages({
eval(parse(text = paste0("TEMPLIST$obj.new <- TEMPLIST$obj.new + ", palTxt)))
})
} else {
LowCol <- paste0("'", input[[paste0("pop", input$pop, toupper(item), "Low")]], "'")
HighCol <- paste0("'", input[[paste0("pop", input$pop, toupper(item), "High")]], "'")
ColTxt <- paste0("scale_", item, "_gradient(low=", LowCol, ",high=", HighCol, ")")
TEMPLIST$nonLayersTxt[[as.numeric(input$activePlot)]][[paste0("scale_", item, "_gradient")]] <- ColTxt
suppressMessages({
nL <- eval(parse(text = ColTxt))
})
TEMPLIST$nonLayers[[as.numeric(input$activePlot)]][[paste0("scale_", item, "_gradient")]] <- nL
suppressMessages({
eval(parse(text = paste0("TEMPLIST$obj.new <- TEMPLIST$obj.new + ", ColTxt)))
})
}
} else {
vals <- unlist(lapply(names(input)[grepl(paste0("pop", toupper(item), "[1-9]"), names(input))], function(x) input[[x]]))
if (!item %in% c("size", "shape", "linetype")) {
vals <- paste0("'", vals, "'")
}
if (item == "linetype") {
vals <- match(vals, c("0", scales::linetype_pal()(6))) - 1
}
vals <- paste0(vals, collapse = ",")
TEMPLIST$nonLayersTxt[[as.numeric(input$activePlot)]][[paste0("scale_", item, "_manual")]] <- paste0("scale_", item, "_manual(values=c(", vals, "))")
suppressMessages({
nL <- eval(parse(text = paste0("scale_", item, "_manual(values=c(", vals, "))")))
})
TEMPLIST$nonLayers[[as.numeric(input$activePlot)]][[paste0("scale_", item, "_manual")]] <- nL
suppressMessages(eval(parse(text = paste0("TEMPLIST$obj.new <- TEMPLIST$obj.new + scale_", item, "_manual(values=c(", vals, "))"))))
}
}
}
TEMPLIST$objList.new[[as.numeric(input$activePlot)]] <- TEMPLIST$obj.new
return(TEMPLIST$objList.new)
})
output$popElems <- shiny::renderUI({
ns <- session$ns
if (is.null(input$activePlot)) {
aP <- 1
} else {
aP <- as.numeric(input$activePlot)
}
TEMPLIST$obj.new <- TEMPLIST$objList.new[[aP]]
TEMPLIST$obj.Elems <- fetch_aes_ggplotBuild(TEMPLIST$obj.new, geom_list(TEMPLIST$obj.new))
if (is.null(input$geoms)) {
gIdx <- 1
} else {
gIdx <- input$geoms
}
obj.elems <- TEMPLIST$obj.Elems[[gIdx]]
obj.elems <- obj.elems[!names(obj.elems) %in% c("family")]
obj.elemsL <- list()
for (item in names(obj.elems)) {
item_class <- obj.elems[[item]]$class[[1]]
if (item %in% c("colour", "color", "fill")) {
divName <- "divColor"
if (is.null(obj.elemsL[[divName]])) {
obj.elemsL[[divName]] <- list()
}
} else {
if (item_class == "data.frame") {
divName <- "divSlide"
if (is.null(obj.elemsL[[divName]])) {
obj.elemsL[[divName]] <- list()
}
}
if (item_class %in% c("character", "factor")) {
divName <- "divSelect"
if (is.null(obj.elemsL[[divName]])) {
obj.elemsL[[divName]] <- list()
}
}
}
obj.elemsL[[divName]][[item]] <- obj.elems[[item]]
}
shinyBS::bsModal(
id = ns("updateElemPopup"), title = "Update Plot Layer", trigger = ns("updateElem"), size = "large",
shiny::fluidRow(
lapply(obj.elemsL, function(objItem) {
shiny::column(
4,
lapply(names(objItem), FUN = function(item) {
list(
lapply(arg.value(item, objItem, session), function(x) {
do.call(what = x[["type"]], args = x[["args"]])
})
)
})
)
})
),
shiny::div(align = "right", shiny::actionButton(ns("sendElem"), "Update Layer"))
)
})
update.Theme <- shiny::eventReactive(input$sendTheme, {
TEMPLIST$obj.new <- TEMPLIST$objList.new[[as.numeric(input$activePlot)]]
strThemeCallList <- lapply(names(TEMPLIST$obj.theme[[plotIdx()]]), function(item) {
themeNewVal(TEMPLIST$obj.theme[[plotIdx()]][item], TEMPLIST$obj.new, input)
})
strThemeCall <- paste0("TEMPLIST$obj.new <- TEMPLIST$obj.new + theme(", paste0(unlist(strThemeCallList), collapse = ","), ")")
eval(parse(text = strThemeCall))
TEMPLIST$objList.new[[as.numeric(input$activePlot)]] <- TEMPLIST$obj.new
TEMPLIST$themeUpdate <- lapply(TEMPLIST$objList.new, function(p) p$theme)
return(TEMPLIST$objList.new)
})
shiny::observeEvent(input$SetThemeGlobal, {
if (length(TEMPLIST$obj.new$theme) > 0) {
theme.now <- theme.now + TEMPLIST$obj.new$theme
}
ggplot2::theme_set(theme_get() %+replace% theme.now)
})
update.ThemeGrid <- shiny::eventReactive(input$SetThemeGrid, {
p.now <- TEMPLIST$objList.new[[as.numeric(input$activePlot)]]
if (length(p.now$theme) > 0) {
theme.now <- theme.now + p.now$theme
}
for (i in 1:length(TEMPLIST$objList.new)) {
TEMPLIST$objList.new[[i]] <- TEMPLIST$objList.new[[i]] + theme.now
TEMPLIST$themeUpdate[[i]] <- TEMPLIST$objList.new[[i]]$theme
}
return(TEMPLIST$objList.new)
})
shiny::observeEvent(input$activePlot, {
output$popTheme <- shiny::renderUI({
ns <- session$ns
shinyBS::bsModal(
id = ns("updateThemePopup"), title = shiny::HTML('Update Plot Theme <a href="http://docs.ggplot2.org/0.9.3.1/theme.html" target="_blank">(help)</a>'), trigger = ns("updateTheme"), size = "large",
do.call(
shiny::tabsetPanel,
unlist(lapply(1:length(TEMPLIST$obj.theme[[plotIdx()]]), FUN = function(j) {
if (themeListDepth(TEMPLIST$obj.theme[[plotIdx()]][j]) > 2) {
list(themeMakePanel(TEMPLIST$obj.theme[[plotIdx()]][j], session = session))
} else {
unlist(lapply(j, function(i) {
themeMakePanel(TEMPLIST$obj.theme[[plotIdx()]][i], session = session)
}), recursive = FALSE)
}
}), recursive = FALSE)
),
shiny::hr(),
shiny::div(align = "right", shiny::actionButton(ns("sendTheme"), "Set Theme"))
)
})
})
output$Plot <- shiny::renderPlot({
as.ggedit(TEMPLIST$objList.new)
}, width = width, height = height)
shiny::observeEvent(input$updateElem, {
output$Plot <- shiny::renderPlot({
if (input$sendElem == 0) {
as.ggedit(TEMPLIST$objList.new)
} else {
pList.out <- update.Layer()
as.ggedit(pList.out)
}
}, width = width, height = height)
})
shiny::observeEvent(input$updateTheme, {
output$Plot <- shiny::renderPlot({
if (input$sendTheme == 0) {
as.ggedit(TEMPLIST$objList.new)
} else {
pList.out <- update.Theme()
as.ggedit(pList.out)
}
}, width = width, height = height)
})
shiny::observeEvent(input$SetThemeGrid, {
pList.out <- update.ThemeGrid()
output$Plot <- shiny::renderPlot({
as.ggedit(pList.out)
}, width = width, height = height)
})
simTxt <- shiny::reactive({
LayerVerbose <- lapply(TEMPLIST$objList.new, function(p) lapply(p$layer, function(item) cloneLayer(l = item, verbose = T, showDefaults = showDefaults)))
if (is.null(input$activePlot)) {
aP <- 1
} else {
aP <- as.numeric(input$activePlot)
}
if (is.null(input$geoms)) {
l <- 1
} else {
l <- which(geom_list(TEMPLIST$obj.new) == input$geoms)
}
a <- input$updateElem
a1 <- input$updateElemPopup
if (length(l) == 0) {
l <- 1
}
strNew <- strBase <- ""
if (length(LayerVerbose) > 0) {
strNew <- LayerVerbose[[aP]][[l]]
}
if (length(baseLayerVerbose()) > 0) {
strBase <- baseLayerVerbose()[[aP]][[l]]
}
return(list(Original = strBase, Edited = strNew))
})
output$SimPrint <- shiny::renderUI({
ns <- session$ns
junk <- ""
if (length(simTxt()) > 0) {
junk <- textConnection(utils::capture.output(simTxt()))
}
toace <- paste0(readLines(junk), collapse = "\n")
if (input$viewVerbose %% 2 == 1) {
if (Sys.info()[1] == "Windows") {
output$codeout <- shiny::renderText({
toace
})
shiny::verbatimTextOutput("codeout")
} else {
shinyAce::aceEditor(
outputId = "codeout",
value = toace,
mode = "r",
theme = "chrome",
height = "100px",
fontSize = 12
)
}
}
})
Out <- shiny::eventReactive({
c(input$sendTheme, input$sendElem)
}, {
if (!is.null(input$sendTheme)) {
if (input$sendTheme > 0) {
junk1 <- update.Theme()
}
}
if (!is.null(input$sendElem)) {
if (input$sendElem > 0) {
junk1 <- update.Layer()
}
}
ggeditOut <- list()
ggeditOut$UpdatedPlots <- TEMPLIST$objList.new
class(ggeditOut$UpdatedPlots) <- c("ggedit", class(ggeditOut$UpdatedPlots))
ggeditOut$UpdatedLayers <- layersListObj(obj = TEMPLIST$objList.new, lbl = names(TEMPLIST$objList.new))
ggeditOut$UpdatedLayersElements <- layersList(TEMPLIST$objList.new)
if (verbose) {
ggeditOut$UpdatedLayerCalls <- lapply(TEMPLIST$objList.new, function(p) lapply(p$layer, function(item) cloneLayer(l = item, verbose = TRUE, showDefaults = showDefaults)))
}
names(TEMPLIST$nonLayers) <- names(TEMPLIST$nonLayersTxt) <- names(TEMPLIST$objList.new)
ggeditOut$updatedScales <- TEMPLIST$nonLayers
if (verbose) {
ggeditOut$UpdatedScalesCalls <- TEMPLIST$nonLayersTxt
}
if ("themeUpdate" %in% names(TEMPLIST)) {
ggeditOut$UpdatedThemes <- TEMPLIST$themeUpdate
if (verbose) {
ggeditOut$UpdatedThemeCalls <- lapply(names(TEMPLIST$objList.new), function(lp, input) {
p <- TEMPLIST$objList.new[[lp]]
if (length(p$theme) > 0) {
if (!showDefaults) {
themeBase <- ggplot2::theme_get()
if (length(TEMPLIST$obj[[lp]]$theme) > 0) {
themeBase <- themeBase + TEMPLIST$obj[[lp]]$theme
}
compare(p$theme, themeBase, verbose = TRUE)
} else {
x.theme <- themeFetch(p$theme)
x <- lapply(names(x.theme), function(item) {
themeNewVal(x.theme[item], p, input)
})
paste0("theme(", paste0(unlist(x), collapse = ","), ")")
}
} else {
c("list()")
}
}, input)
names(ggeditOut$UpdatedThemeCalls) <- names(TEMPLIST$objList.new)
}
}
class(ggeditOut) <- c("ggedit", class(ggeditOut))
return(ggeditOut)
})
return(Out)
} |
NULL
dashboardPage <- bs4DashPage
dashboardHeader <- bs4DashNavbar
dashboardBrand <- bs4DashBrand
dashboardUser <- bs4UserMenu
dropdownMenu <- bs4DropdownMenu
dashboardControlbar <- bs4DashControlbar
dashboardFooter <- bs4DashFooter
dashboardSidebar <- bs4DashSidebar
updateSidebar <- updatebs4Sidebar
sidebarHeader <- bs4SidebarHeader
sidebarMenu <- bs4SidebarMenu
sidebarUserPanel <- bs4SidebarUserPanel
menuItem <- bs4SidebarMenuItem
menuSubItem <- bs4SidebarMenuSubItem
dashboardBody <- bs4DashBody
tabItems <- bs4TabItems
tabItem <- bs4TabItem
box <- bs4Card
userBox <- bs4UserCard
userDescription <- bs4UserDescription
tabBox <- bs4TabCard
infoBox <- bs4InfoBox
infoBoxOutput <- bs4InfoBoxOutput
renderInfoBox <- renderbs4InfoBox
valueBox <- bs4ValueBox
valueBoxOutput <- bs4ValueBoxOutput
renderValueBox <- renderbs4ValueBox
updateTabItems <- updatebs4TabItems
cardSidebar <- bs4CardSidebar
boxSidebar <- bs4CardSidebar
updateCardSidebar <- updatebs4CardSidebar
updateBoxSidebar <- updatebs4CardSidebar
updateCard <- updatebs4Card
updateBox <- updatebs4Card
boxDropdown <- cardDropdown
boxDropdownItem <- cardDropdownItem
cardLabel <- bs4CardLabel
boxLabel <- bs4CardLabel
boxProfile <- cardProfile
boxProfileItem <- cardProfileItem
socialBox <- bs4SocialCard
boxComment <- cardComment
boxPad <- cardPad
starBlock <- bs4Stars
timelineBlock <- bs4Timeline
timelineLabel <- bs4TimelineLabel
timelineItem <- bs4TimelineItem
timelineItemMedia <- bs4TimelineItemMedia
timelineStart <- bs4TimelineStart
timelineEnd <- bs4TimelineEnd
dashboardBadge <- bs4Badge
carousel <- bs4Carousel
carouselItem <- bs4CarouselItem
progressBar <- bs4ProgressBar
multiProgressBar <- bs4MultiProgressBar
accordion <- bs4Accordion
accordionItem <- bs4AccordionItem
sortable <- bs4Sortable
blockQuote <- bs4Quote
jumbotron <- bs4Jumbotron
listGroup <- bs4ListGroup
listGroupItem <- bs4ListGroupItem
callout <- bs4Callout
loadingState <- bs4Loading
ribbon <- bs4Ribbon
boxLayout <- bs4CardLayout |
library(ggplot2)
gg_pta <- function(data = data.frame(),
theme = theme_light,
lettermark = NULL,
lettermarksize = 30,
xlab = "Frequency in Hertz (Hz)",
ylab = "Hearing Levels in Decibels (dB)",
xlim = c(125, 8000),
xbreaks = c(125, 250, 500, 1000, 2000, 4000, 8000),
minor_xbreaks = c(750, 1500, 3000),
x_base_lwd = 1.0,
xlabels = c("125","250", "500", "1000", "2000",
"4000", "8000"),
ylim = c(120,-10),
yposition = "left")
{
p <- ggplot(data) +
theme() +
scale_x_continuous(name = xlab,
position="top",
trans = "log2",
breaks = xbreaks,
minor_breaks = minor_xbreaks,
labels = xlabels,
limits = xlim) +
scale_y_reverse(name = ylab,
breaks=seq(150,-30,-10),
minor_breaks = NULL,
limits = ylim,
position = yposition) +
coord_fixed(ratio =.05)
if(x_base_lwd > 0)
p <- p + geom_hline(yintercept = 0, lwd = x_base_lwd)
if("R" %in% lettermark)
p <- p + geom_text(data=data.frame(0),
mapping=aes(x=250, y=90), label="R", alpha=.2, size=lettermarksize)
if("L" %in% lettermark)
p <- p + geom_text(data=data.frame(0),
mapping=aes(x=4e3, y=90), label="L", alpha=.2, size=lettermarksize)
return(p)
} |
CNOT5_20 <- function(a){
cnot5_20=TensorProd(CNOT4_20(diag(16)),diag(2))
result = cnot5_20 %*% a
result
} |
ridgeVAR1fused <- function(Y,
id,
lambdaA=0,
lambdaF=0,
lambdaP=0,
targetA=matrix(0, dim(Y)[1], dim(Y)[1]),
targetP=matrix(0, dim(Y)[1], dim(Y)[1]),
targetPtype="none",
fitA="ml",
zerosA=matrix(nrow=0, ncol=2),
zerosAfit="sparse",
zerosP=matrix(nrow=0, ncol=2),
cliquesP=list(),
separatorsP=list(),
unbalanced=matrix(nrow=0, ncol=2),
diagP=FALSE,
efficient=TRUE,
nInit=100,
nInitA=5,
minSuccDiff=0.001,
minSuccDiffA=0.001){
if (!is(Y, "array")){
stop("Input (Y) is of wrong class.")
}
if (length(dim(Y)) != 3){
stop("Input (Y) is of wrong dimensions: either covariate, time or sample dimension is missing.")
}
if (!is(id, "numeric") & !is(id, "integer")){
stop("Input (id) is of wrong class.")
}
if (length(id) != dim(Y)[3]){
stop("Input (id) is of wrong length: should equal sample dimension of Y.")
}
if (!is(lambdaA, "numeric")){
stop("Input (lambdaA) is of wrong class.")
}
if (length(lambdaA) != 1){
stop("Input (lambdaA) is of wrong length.")
}
if (is.na(lambdaA)){
stop("Input (lambdaA) is not a non-negative number.")
}
if (lambdaF < 0){
stop("Input (lambdaF) is not a non-negative number.")
}
if (!is(lambdaF, "numeric")){
stop("Input (lambdaF) is of wrong class.")
}
if (length(lambdaF) != 1){
stop("Input (lambdaF) is of wrong length.")
}
if (is.na(lambdaF)){
stop("Input (lambdaF) is not a non-negative number.")
}
if (lambdaF < 0){
stop("Input (lambdaF) is not a non-negative number.")
}
if (!is(lambdaP, "numeric")){
stop("Input (lambdaP) is of wrong class.")
}
if (length(lambdaP) != 1){
stop("Input (lambdaP) is of wrong length.")
}
if (is.na(lambdaP)){
stop("Input (lambdaP) is not a non-negative number.")
}
if (lambdaP < 0){
stop("Input (lambdaP) is not a non-negative number.")
}
if (!is.null(unbalanced) & !is(unbalanced, "matrix")){
stop("Input (unbalanced) is of wrong class.")
}
if (!is.null(unbalanced)){
if(ncol(unbalanced) != 2){
stop("Wrong dimensions of the matrix unbalanced.")
}
}
if (!is(zerosAfit, "character")){
stop("Input (zerosAfit) is of wrong class.")
}
if (!is(zerosAfit, "character")){
if (!(zerosAfit %in% c("dense", "sparse"))){
stop("Input (zerosAfit) ill-specified.")
}
}
if (!is(diagP, "logical")){
stop("Input (diagP) is of wrong class.")
}
if (!is(efficient, "logical")){
stop("Input (efficient) is of wrong class.")
}
if (!is(nInit, "numeric") & !is(nInit, "logical")){
stop("Input (nInit) is of wrong class.")
}
if (length(nInit) != 1){
stop("Input (nInit) is of wrong length.")
}
if (is.na(nInit)){
stop("Input (nInit) is not a positive integer.")
}
if (nInit < 0){
stop("Input (nInit) is not a positive integer.")
}
if (!is(nInitA, "numeric") & !is(nInitA, "logical")){
stop("Input (nInitA) is of wrong class.")
}
if (length(nInitA) != 1){
stop("Input (nInitA) is of wrong length.")
}
if (is.na(nInitA)){
stop("Input (nInitA) is not a positive integer.")
}
if (nInitA < 0){
stop("Input (nInitA) is not a positive integer.")
}
if (!is(minSuccDiff, "numeric")){
stop("Input (minSuccDiff) is of wrong class.")
}
if (length(minSuccDiff) != 1){
stop("Input (minSuccDiff) is of wrong length.")
}
if (is.na(minSuccDiff)){
stop("Input (minSuccDiff) is not a positive number.")
}
if (minSuccDiffA <= 0){
stop("Input (minSuccDiffA) is not a positive number.")
}
if (!is(minSuccDiffA, "numeric")){
stop("Input (minSuccDiffA) is of wrong class.")
}
if (length(minSuccDiffA) != 1){
stop("Input (minSuccDiffA) is of wrong length.")
}
if (is.na(minSuccDiffA)){
stop("Input (minSuccDiffA) is not a positive number.")
}
if (minSuccDiffA <= 0){
stop("Input (minSuccDiffA) is not a positive number.")
}
if (!is.null(targetA) & !is(targetA, "matrix")){
stop("Input (targetA) is of wrong class.")
}
if (is.null(targetP)){
targetP <- "Null"
}
if (!is.null(targetP) & (!is(targetP, "matrix") & !is(targetP, "character"))){
stop("Input (targetP) is of wrong class.")
}
if (!is.null(targetP) & is(targetP, "matrix")){
if(!isSymmetric(targetP)){
stop("Non-symmetrical target for the precision matrix provided")
}
}
if (diagP & !is.null(targetP) & is(targetP, "matrix")){
if(max(abs(upper.tri(targetP))) != 0){
stop("Inconsistent input (targetP v. diagP) provided")
}
}
if (!is.null(targetP) & is(targetP, "character")){
if( length(intersect(targetP, c("DAIE", "DIAES", "DUPV", "DAPV", "DCPV", "DEPV", "Null"))) != 1 ){
stop("Wrong default target for the precision matrix provided: see default.target for the options.")
}
}
if (!is.null(targetA)){
if (dim(Y)[1] != nrow(targetA)){
stop("Dimensions of input (targetA) do not match that of other input (Y).")
}
}
if (!is.null(targetA)){
if (dim(Y)[1] != ncol(targetA)){
stop("Dimensions of input (targetA) do not match that of other input (Y).")
}
}
if (!is.null(targetP) & !is(targetP, "matrix")){
if (dim(Y)[1] != nrow(targetP)){
stop("Dimensions of input (targetP) do not match that of other input (Y).")
}
}
if (!is.null(zerosA) & !is(zerosA, "matrix")){
stop("Input (zerosA) is of wrong class.")
}
if (!is.null(zerosA)){
if(ncol(zerosA) != 2){
stop("Wrong dimensions of the (zerosA) matrix.")
}
}
if (!is.null(zerosA)){
zerosA <- zerosA[order(zerosA[,2], zerosA[,1]),]
}
if (!is.null(zerosP) & !is(zerosP, "matrix")){
stop("Input (zerosP) is of wrong class.")
}
if (!is.null(zerosP)){
if(ncol(zerosP) != 2){
stop("Wrong dimensions of the (zerosP).")
}
}
if (!is.null(zerosP)){
zerosP <- zerosP[order(zerosP[,2], zerosP[,1]),]
}
p <- nrow(Y)
targetA <- lambdaA * targetA;
if (nrow(zerosP) == 0){
VAR1hat <- .armaVAR1fused_ridgeML(Y,
id,
lambdaA,
lambdaF,
lambdaP,
targetA,
targetP,
targetPtype,
fitA,
unbalanced,
diagP,
efficient,
zerosA[,1],
zerosA[,2],
zerosAfit,
nInit,
nInitA,
minSuccDiff,
minSuccDiffA);
Phat <- VAR1hat$P;
Ahats <- VAR1hat$As;
LL <- VAR1hat$LL;
}
if (nrow(zerosP) > 0){
if (fitA == "ss"){
if (!is.null(unbalanced)){
Y <- .armaVAR_array2cube_withMissing(Y,
unbalanced[,1],
unbalanced[,2]);
}
VARYs <- COVYs <- Ahats <- matrix(nrow=0, ncol=dim(Y)[1])
for (g in 0:max(id)){
VARYs <- rbind(VARYs, .armaVAR1_VARYhat(Y[,,which(id == g), drop=FALSE],
efficient,
unbalanced));
COVYs <- rbind(COVYs, .armaVAR1_COVYhat(Y[,,which(id == g), drop=FALSE]));
Ahats <- rbind(Ahats, .armaVAR1_Ahat_ridgeSS(VARYs[c((p*g+1):(p*(g+1))),],
COVYs[c((p*g+1):(p*(g+1))),],
lambdaA,
targetA));
}
eigDecomps <- .armaEigenDecomp_stackedCovariances(VARYs);
Ahats <- .armaVAR1fused_Ahat(Ahats,
diag(nrow(Y)),
COVYs,
eigDecomps[[2]],
eigDecomps[[1]],
lambdaA,
lambdaF,
targetA,
fitA,
zerosA[,1],
zerosA[,2],
zerosAfit,
nInitA,
minSuccDiffA);
Se <- .armaVAR1fused_Shat_ML(Y, Ahats, id);
if (length(cliquesP)==0){
supportPinfo <- support4ridgeP(zeros=zerosP,
nNodes=dim(Y)[1]);
cliquesP <- supportPinfo$cliques;
separatorsP <- supportPinfo$separators;
zerosP <- supportPinfo$zeros;
}
if (is.character(targetP)){
target <- .armaP_defaultTarget(Se,
targetType=targetPtype,
fraction=0.0001,
multiplier=0);
} else {
target <- targetP;
}
Phat <- ridgePchordal(Se,
lambda=lambdaP,
target=target,
zeros=zerosP,
cliques=cliquesP,
separators=separatorsP,
type="Alt",
verbose=FALSE);
}
if (fitA == "ml"){
if (!is.null(unbalanced)){
Y <- .armaVAR_array2cube_withMissing(Y, unbalanced[,1], unbalanced[,2]);
}
VARYs <- COVYs <- Ahats <- matrix(nrow=0, ncol=dim(Y)[1]);
for (g in 0:max(id)){
VARYs <- rbind(VARYs, .armaVAR1_VARYhat(Y[,,which(id == g), drop=FALSE],
efficient,
unbalanced));
COVYs <- rbind(COVYs, .armaVAR1_COVYhat(Y[,,which(id == g), drop=FALSE]));
Ahats <- rbind(Ahats, .armaVAR1_Ahat_ridgeSS(VARYs[c((p*g+1):(p*(g+1))),],
COVYs[c((p*g+1):(p*(g+1))),],
lambdaA,
targetA));
}
eigDecomps <- .armaEigenDecomp_stackedCovariances(VARYs);
Se <- .armaVAR1fused_Shat_ML(Y, Ahats, id);
if (length(cliquesP)==0){
supportPinfo <- support4ridgeP(zeros=zerosP,
nNodes=dim(Y)[1]);
cliquesP <- supportPinfo$cliques;
separatorsP <- supportPinfo$separators;
zerosP <- supportPinfo$zeros;
}
if (is.character(targetP)){
target <- .armaP_defaultTarget(Se,
targetType=targetPtype,
fraction=0.0001,
multiplier=0);
} else {
target <- targetP;
}
Phat <- ridgePchordal(Se,
lambda=lambdaP,
target=target,
zeros=zerosP,
cliques=cliquesP,
separators=separatorsP,
type="Alt",
verbose=FALSE);
for (u in 1:nInit){
Aprevs <- Ahats;
Pprev <- Phat;
Ahats <- .armaVAR1fused_Ahat(Ahats,
Phat,
COVYs,
eigDecomps[[2]],
eigDecomps[[1]],
lambdaA,
lambdaF,
targetA,
fitA,
zerosA[,1],
zerosA[,2],
zerosAfit,
nInitA,
minSuccDiffA);
Se <- .armaVAR1fused_Shat_ML(Y, Ahats, id);
if (is.character(targetP)){
target <- .armaP_defaultTarget(Se,
targetType=targetPtype,
fraction=0.0001,
multiplier=0)
} else {
target <- targetP
}
Phat <- ridgePchordal(Se,
lambda=lambdaP,
target=target,
zeros=zerosP,
cliques=cliquesP,
separators=separatorsP,
type="Alt",
verbose=FALSE);
if (.armaVAR1fused_convergenceEvaluation(Ahats, Aprevs, Phat, Pprev) < minSuccDiff){
break
}
}
}
}
LL <- 0
for (g in 0:max(id)){
Se <- .armaVAR1_Shat_ML(Y[, , which(id == g), drop=FALSE],
Ahats[c((p*g+1):(p*(g+1))),, drop=FALSE]);
LL <- LL + (dim(Y)[2] - 1) * sum(id==g) * (determinant(Phat)$modulus - sum(Se * Phat)) / 2;
}
return(list(As=Ahats, P=Phat, LL=LL, lambdaA=lambdaA, lambdaF=lambdaF, lambdaP=lambdaP))
} |
get_pull_requests <- function(base_url, api_key, owner, repo){
if (missing(base_url)) {
stop("Please add a valid URL")
} else if (missing(api_key)) {
stop("Please add a valid API token")
} else if (missing(owner)) {
stop("Please add a valid owner")
} else if (missing(repo)) {
stop("Please add a valid repository")
}
base_url <- sub("/$", "", base_url)
gitea_url <-
file.path(base_url, "api/v1", sub("^/", "", "/repos"),
owner, repo, "pulls")
authorization <- paste("token", api_key)
r <- tryCatch(
GET(
gitea_url,
add_headers(Authorization = authorization),
accept_json()
),
error = function(cond) {
"Failure"
}
)
if (class(r) != "response") {
stop(paste0("Error consulting the url: ", gitea_url))
}
stop_for_status(r)
content_pull_req <- fromJSON(content(r, as = "text"))
content_pull_req <- as.data.frame(content_pull_req)
return(content_pull_req)
} |
plot.spdppc = function(x, type='envelope', nsample=NULL, interval=0.90, obs.lwd=1.5,obs.col='black',sim.col='lightgrey',alpha=1,envelope.col='lightgrey',positive.col='red',negative.col='blue',calendar='BP', ...)
{
if (!type%in%c('spaghetti','envelope')) {stop("The argument 'type' should be either 'spaghetti' or 'envelope'.")}
if (is.null(nsample)) {nsample = ncol(x$simmatrix)}
if (type=='spaghetti' & nsample > ncol(x$simmatrix))
{
warning(paste0('nsample large than the number of posterior simulations. Running with nsample=',ncol(x$simmatrix)))
nsample = ncol(x$simmatrix)
}
if (calendar=="BP"){
plotyears <- x$obs$calBP
xlabel <- "Years cal BP"
xlim <- c(max(plotyears),min(plotyears))
} else if (calendar=="BCAD"){
plotyears <- BPtoBCAD(x$obs$calBP)
xlabel <- "Years BC/AD"
if (all(range(plotyears)<0)){xlabel <- "Years BC"}
if (all(range(plotyears)>0)){xlabel <- "Years AD"}
xlim <- c(min(plotyears),max(plotyears))
} else {
stop("Unknown calendar type")
}
if (type=='envelope')
{
lo = apply(x$simmatrix,1,quantile,prob=(1-interval)/2)
hi = apply(x$simmatrix,1,quantile,prob=1-(1-interval)/2)
obs = x$obs$PrDens
ylim = c(0,max(hi,x$obs$PrDens))
booms <- which(obs>hi)
busts <- which(obs<lo)
baseline <- rep(NA,length(obs))
colpts = rep('grey',length(obs))
colpts[booms] = 'red'
colpts[busts] = 'blue'
boomPlot <- baseline
if (length(booms)>0){ boomPlot[booms]=obs[booms] }
bustPlot <- baseline
if (length(busts)>0){ bustPlot[busts]=obs[busts] }
boomBlocks <- vector("list")
counter <- 0
state <- "off"
for (i in 1:length(boomPlot)){
if (!is.na(boomPlot[i])&state=="off"){
counter <- counter+1
boomBlocks <- c(boomBlocks,vector("list",1))
boomBlocks[[counter]] <- vector("list",2)
boomBlocks[[counter]][[1]] <- boomPlot[i]
boomBlocks[[counter]][[2]] <- plotyears[i]
state <- "on"
}
if (state=="on"){
if (!is.na(boomPlot[i])){
boomBlocks[[counter]][[1]] <- c(boomBlocks[[counter]][[1]],boomPlot[i])
boomBlocks[[counter]][[2]] <- c(boomBlocks[[counter]][[2]],plotyears[i])
}
if (is.na(boomPlot[i])){
state <- "off"
}
}
}
bustBlocks <- vector("list")
counter <- 0
state <- "off"
for (i in 1:length(bustPlot)){
if (!is.na(bustPlot[i])&state=="off"){
counter <- counter+1
bustBlocks <- c(bustBlocks,vector("list",1))
bustBlocks[[counter]] <- vector("list",2)
bustBlocks[[counter]][[1]] <- bustPlot[i]
bustBlocks[[counter]][[2]] <- plotyears[i]
state <- "on"
}
if (state=="on"){
if (!is.na(bustPlot[i])){
bustBlocks[[counter]][[1]] <- c(bustBlocks[[counter]][[1]],bustPlot[i])
bustBlocks[[counter]][[2]] <- c(bustBlocks[[counter]][[2]],plotyears[i])
}
if (is.na(bustPlot[i])){
state <- "off"
}
}
}
plot(0, 0, xlim=xlim, ylim=ylim, type="n", col="white", ylab='Probability', xlab=xlabel, xaxt="n", ...)
polygon(c(plotyears,rev(plotyears)),c(lo,rev(hi)),col=envelope.col,border=NA)
if (length(booms)>0){
for (i in 1:length(boomBlocks)){
bbb = unique(boomBlocks[[i]][[2]])
index = which(plotyears%in%bbb)
polygon(c(bbb,rev(bbb)),c(x$obs$PrDens[index],rev(hi[index])),border=NA,col=positive.col)
}
}
if (length(busts)>0){
for (i in 1:length(bustBlocks)){
bbb = unique(bustBlocks[[i]][[2]])
index = which(plotyears%in%bbb)
polygon(c(bbb,rev(bbb)),c(x$obs$PrDens[index],rev(lo[index])),border=NA,col=negative.col)
}
}
lines(plotyears,x$obs$PrDens,lwd=obs.lwd,col=obs.col)
}
if (type=='spaghetti')
{
simmat = x$simmatrix[,sample(1:ncol(x$simmatrix),size=nsample)]
ylim=c(0,max(x$obs$PrDens,simmat))
plot(0, 0, xlim=xlim, ylim=ylim, type="n", col="white", ylab='Probability', xlab=xlabel, xaxt="n", ...)
sim.col=col2rgb(sim.col)
sim.col = rgb(sim.col[1,]/255,sim.col[2,]/255,sim.col[3,]/255,alpha=alpha)
apply(simmat,2,lines,x=plotyears,col=sim.col)
lines(plotyears,x$obs$PrDens,lwd=obs.lwd,col=obs.col)
}
if (calendar=="BP"){
rr <- range(pretty(plotyears))
axis(side=1,at=seq(rr[2],rr[1],-100),labels=NA,tck = -.01)
axis(side=1,at=pretty(plotyears),labels=abs(pretty(plotyears)))
} else if (calendar=="BCAD"){
yy <- plotyears
rr <- range(pretty(yy))
prettyTicks <- seq(rr[1],rr[2],+100)
prettyTicks[which(prettyTicks>=0)] <- prettyTicks[which(prettyTicks>=0)]-1
axis(side=1,at=prettyTicks, labels=NA,tck = -.01)
py <- pretty(yy)
pyShown <- py
if (any(pyShown==0)){pyShown[which(pyShown==0)]=1}
py[which(py>1)] <- py[which(py>1)]-1
axis(side=1,at=py,labels=abs(pyShown))
}
} |
city_prefixes_en_gb <- c('North', 'East', 'West', 'South', 'New', 'Lake', 'Port')
city_suffixes_en_gb <- c(
'town',
'ton',
'land',
'ville',
'berg',
'burgh',
'borough',
'bury',
'view',
'port',
'mouth',
'stad',
'furt',
'chester',
'fort',
'haven',
'side',
'shire'
)
building_number_formats_en_gb <- c('
street_suffixes_en_gb <- c(
'alley',
'avenue',
'branch',
'bridge',
'brook',
'brooks',
'burg',
'burgs',
'bypass',
'camp',
'canyon',
'cape',
'causeway',
'center',
'centers',
'circle',
'circles',
'cliff',
'cliffs',
'club',
'common',
'corner',
'corners',
'course',
'court',
'courts',
'cove',
'coves',
'creek',
'crescent',
'crest',
'crossing',
'crossroad',
'curve',
'dale',
'dam',
'divide',
'drive',
'drives',
'estate',
'estates',
'expressway',
'extension',
'extensions',
'fall',
'falls',
'ferry',
'field',
'fields',
'flat',
'flats',
'ford',
'fords',
'forest',
'forge',
'forges',
'fork',
'forks',
'fort',
'freeway',
'garden',
'gardens',
'gateway',
'glen',
'glens',
'green',
'greens',
'grove',
'groves',
'harbor',
'harbors',
'haven',
'heights',
'highway',
'hill',
'hills',
'hollow',
'inlet',
'island',
'islands',
'isle',
'junction',
'junctions',
'key',
'keys',
'knoll',
'knolls',
'lake',
'lakes',
'land',
'landing',
'lane',
'light',
'lights',
'loaf',
'lock',
'locks',
'lodge',
'loop',
'mall',
'manor',
'manors',
'meadow',
'meadows',
'mews',
'mill',
'mills',
'mission',
'motorway',
'mount',
'mountain',
'mountains',
'neck',
'orchard',
'oval',
'overpass',
'park',
'parks',
'parkway',
'parkways',
'pass',
'passage',
'path',
'pike',
'pine',
'pines',
'place',
'plain',
'plains',
'plaza',
'point',
'points',
'port',
'ports',
'prairie',
'radial',
'ramp',
'ranch',
'rapid',
'rapids',
'rest',
'ridge',
'ridges',
'river',
'road',
'roads',
'route',
'row',
'rue',
'run',
'shoal',
'shoals',
'shore',
'shores',
'skyway',
'spring',
'springs',
'spur',
'spurs',
'square',
'squares',
'station',
'stravenue',
'stream',
'street',
'streets',
'summit',
'terrace',
'throughway',
'trace',
'track',
'trafficway',
'trail',
'tunnel',
'turnpike',
'underpass',
'union',
'unions',
'valley',
'valleys',
'via',
'viaduct',
'view',
'views',
'village',
'villages',
'ville',
'vista',
'walk',
'walks',
'wall',
'way',
'ways',
'well',
'wells'
)
postcode_formats_en_gb <- c(
'AN NEE',
'ANN NEE',
'PN NEE',
'PNN NEE',
'ANC NEE',
'PND NEE'
)
POSTAL_ZONES_ONE_CHAR <- c("B", "E", "G", "L", "M", "N", "S", "W")
POSTAL_ZONES_TWO_CHARS <- c(
'AB', 'AL', 'BA', 'BB', 'BD', 'BH', 'BL', 'BN', 'BR',
'BS', 'BT', 'CA', 'CB', 'CF', 'CH', 'CM', 'CO', 'CR', 'CT',
'CV', 'CW', 'DA', 'DD', 'DE', 'DG', 'DH', 'DL', 'DN', 'DT',
'DY', 'EC', 'EH', 'EN', 'EX', 'FK', 'FY', 'GL',
'GY', 'GU', 'HA', 'HD', 'HG', 'HP', 'HR', 'HS', 'HU', 'HX',
'IG', 'IM', 'IP', 'IV', 'JE', 'KA', 'KT', 'KW', 'KY',
'LA', 'LD', 'LE', 'LL', 'LN', 'LS', 'LU', 'ME', 'MK',
'ML', 'NE', 'NG', 'NN', 'NP', 'NR', 'NW', 'OL', 'OX',
'PA', 'PE', 'PH', 'PL', 'PO', 'PR', 'RG', 'RH', 'RM',
'SA', 'SE', 'SG', 'SK', 'SL', 'SM', 'SN', 'SO', 'SP', 'SR',
'SS', 'ST', 'SW', 'SY', 'TA', 'TD', 'TF', 'TN', 'TQ', 'TR',
'TS', 'TW', 'UB', 'WA', 'WC', 'WD', 'WF', 'WN', 'WR',
'WS', 'WV', 'YO', 'ZE'
)
locale_data_en_gb <- list(
postcode_sets = list(
' ' = ' ',
'N' = 0:9,
'A' = POSTAL_ZONES_ONE_CHAR,
'B' = strsplit('ABCDEFGHKLMNOPQRSTUVWXY', '')[[1]],
'C' = strsplit('ABCDEFGHJKSTUW', '')[[1]],
'D' = strsplit('ABEHMNPRVWXY', '')[[1]],
'E' = strsplit('ABDEFGHJLNPQRSTUWXYZ', '')[[1]],
'P' = POSTAL_ZONES_TWO_CHARS
)
)
city_formats_en_gb <- c(
'{{city_prefix}} {{first_name}}{{city_suffix}}',
'{{city_prefix}} {{first_name}}',
'{{first_name}}{{city_suffix}}',
'{{last_name}}{{city_suffix}}'
)
street_name_formats_en_gb <- c(
'{{first_name}} {{street_suffix}}',
'{{last_name}} {{street_suffix}}'
)
street_address_formats_en_gb <- c(
'{{building_number}} {{street_name}}',
'{{secondary_address}}\n{{street_name}}'
)
address_formats_en_gb <- "{{street_address}}\n{{city}}\n{{postcode}}"
secondary_address_formats_en_gb <- c('Flat
'Studio |
setClass("pubClass", representation("numeric", id = "integer"))
setClass("privCl", representation(x = "numeric", id = "integer"))
.showMe <- function(object)
cat("show()ing object of class ", class(object),
" and slots named\n\t",
paste(slotNames(object), collapse=", "), "\n")
setMethod("show", "pubClass", .showMe)
setMethod("show", "privCl", .showMe)
setMethod("plot", "pubClass", function(x, ...) plot(as(x, "numeric"), ...))
setMethod("plot", "privCl", function(x, ...) plot(x@x, ...))
assertError <- function(expr)
stopifnot(inherits(try(expr, silent = TRUE), "try-error"))
assertWarning <- function(expr)
stopifnot(inherits(tryCatch(expr, warning = function(w)w), "warning"))
if(isGeneric("colSums")) {
stop("'colSums' is already generic -- need new example in test ...")
} else {
setGeneric("colSums")
stopifnot(isGeneric("colSums"))
}
assertError(setGeneric("pubGenf"))
setGeneric("pubGenf", function(x,y) standardGeneric("pubGenf"))
setGeneric("myGenf", function(x,y){ standardGeneric("myGenf") })
setMethod("myGenf", "pubClass", function(x, y) 2*x)
assertError(setMethod("pubGenf", "pubClass", function(x, ...) { 10*x } ))
setMethod("pubGenf", c(x="pubClass"), function(x, y) { 10*x } )
setGeneric("pubfn",
function(filename,
dimLengths,
dimSteps,
dimStarts,
likeTemplate,
likeFile) {
function(x,y) standardGeneric("pubfn")
})
setMethod("pubfn", signature=
signature(filename="character",
dimLengths="numeric",
dimSteps="numeric",
dimStarts="numeric"),
function(filename=filename,
dimLengths=NULL,
dimSteps=NULL, dimStarts=NULL) {
sys.call()
})
setClassUnion("atomicVector",
members = c("logical", "integer", "numeric",
"complex", "raw", "character"))
setClassUnion("array_or_vector",
members = c("array", "matrix", "atomicVector"))
setClass("M", contains = "VIRTUAL",
slots = c(Dim = "integer", Dimnames = "list"),
prototype = prototype(Dim = integer(2), Dimnames = list(NULL,NULL)))
setClass("dM", contains = c("M", "VIRTUAL"), slots = c(x = "numeric"))
setClass("diagM", contains = c("M", "VIRTUAL"), slots = c(diag = "character"))
setClass("ddiM", contains = c("diagM", "dM"))
setClassUnion("mM", members = c("matrix", "M")) |
context("Unstructured Data")
test_that("array inputs", {
mnist <- dataset_mnist()
train_X <- list(x=array(mnist$train$x,
c(dim(mnist$train$x),1))
)
subset <- 1:200
train_X[[1]]<- train_X[[1]][subset,,,,drop=FALSE]
train_y <- to_categorical(mnist$train$y[subset])
conv_mod <- function(x) x %>%
layer_conv_2d(filters = 16, kernel_size = c(3,3),
activation= "relu",
input_shape = shape(NULL, NULL, 1)) %>%
layer_global_average_pooling_2d() %>%
layer_dense(units = 10)
simple_mod <- function(x) x %>%
layer_dense(units = 4, activation = "relu") %>%
layer_dense(units = 1, activation = "linear")
z <- rnorm(length(subset))
fac <- gl(4, length(subset)/4)
m <- runif(length(z))
list_as_input <- append(train_X, (data.frame(z=z, fac=fac, m=m)))
mod <- deepregression(y = train_y, list_of_formulas =
list(logit = ~ 1 + simple_mod(z) + fac + conv_mod(x)),
data = list_as_input,
list_of_deep_models = list(simple_mod = simple_mod,
conv_mod = conv_mod),
family = "multinoulli")
cvres <- mod %>% cv(epochs = 2, cv_folds = 2, batch_size=100)
expect_is(cvres, "drCV")
lapply(cvres, function(x) {
expect_true(is.numeric(x$metrics$loss))
expect_true(is.numeric(x$metrics$val_loss))
expect_true(!any(is.nan(x$metrics$loss)))
})
expect_equal(dim(coef(mod)[[1]]), c(4, 10))
mod %>% fit(epochs = 2,
batch_size=100,
view_metrics=FALSE,
validation_split = NULL)
expect_is(mod, "deepregression")
expect_true(!any(is.nan(unlist(coef(mod)))))
})
context("Deep Specification")
test_that("deep specification", {
set.seed(24)
n <- 200
b0 <- 1
x <- runif(n) %>% as.matrix()
z <- runif(n)
fac <- gl(10, n/10)
true_mean_fun <- function(xx) sin(10*xx) + b0
y <- true_mean_fun(x) + rnorm(n = n, mean = 0, sd = 2)
k <- rnorm(length(x))
data = data.frame(x = x, fac = fac, z = z)
data$k <- k
deep_model <- function(x) x %>%
layer_dense(units = 4, activation = "relu") %>%
layer_dense(units = 1, activation = "linear")
another_deep_model <- function(x) x %>%
layer_dense(units = 4, activation = "relu") %>%
layer_dense(units = 1, activation = "linear")
third_model <- function(x) x %>%
layer_dense(units = 4, activation = "relu") %>%
layer_dense(units = 1, activation = "linear")
formulae <- c(
"~ d(x,z) + k",
"~ d(x,z,k)",
"~ d(x) + d(z)",
"~ deep_model(x) + another_deep_model(z)",
"~ deep_model(x,z) + another_deep_model(k)",
"~ deep_model(x) + another_deep_model(z) + third_model(k)"
)
list_models <- list(deep_model = deep_model,
another_deep_model = another_deep_model,
third_model = third_model)
list_models_wo_name <- list(deep_model, another_deep_model)
use <- list(1,1,1:2,1:2,1:2,1:3)
for (i in seq_len(length(formulae))) {
form <- formulae[i]
usei <- use[[i]]
this_list <- list_models[usei]
if (i %in% 1:3) {
use_list <- list_models_wo_name[use[[i]]]
if(i==3) use_list <- use_list[1]
} else {
use_list <- list_models[use[[i]]]
}
suppressWarnings(
mod <- deepregression(
y = y,
data = data,
list_of_formulas = list(loc = as.formula(form), scale = ~1),
list_of_deep_models = use_list
)
)
suppressWarnings(
res <- mod %>% fit(epochs=2, verbose = FALSE, view_metrics = FALSE)
)
expect_is(mod, "deepregression")
expect_true(!any(is.nan(unlist(coef(mod)))))
expect_true(!any(is.nan(fitted(mod))))
suppressWarnings(res <- mod %>% predict(data))
expect_true(is.numeric(res))
expect_true(!any(is.nan(res)))
}
}) |
library(grid)
t <- read.csv("gargsVSclass.csv", sep = ",", header = TRUE, check.names = FALSE)
row.names(t) <- t[, 1]
t <- t[, -1]
t[is.na(t)] <- 0
table.value(t, plegend.drawKey = FALSE, ppoints.cex = 0.2, symbol = "circle", axis.text = list(cex = 0.7), pgrid.draw = TRUE,
ptable.margin = list(bottom = 5, left = 15, top = 15, right = 5),
ptable.x = list(tck = 5), ptable.y = list(tck = 5, srt = 45, pos = "left"))
|
"rdrm" <- function(nosim, fct, mpar, xerror, xpar = 1, yerror = "rnorm", ypar = c(0, 1),
onlyY = FALSE)
{
if (is.numeric(xerror))
{
x <- xerror
} else {
evalStr1 <- paste(xerror, "(", paste(xpar, sep = ",", collapse = ","), ")")
x <- eval(parse(text = evalStr1))
}
lenx <- length(x)
x <- sort(x)
x <- rep(x, nosim)
xMat <- matrix(x, nosim, lenx, byrow = TRUE)
meanVec <- fct$fct(x, matrix(mpar, lenx*nosim, length(mpar), byrow = TRUE))
if (yerror == "rbinom")
{
if (length(ypar) == 1)
{
ypar <- rep(ypar, lenx*nosim)
wMat <- matrix(ypar, nosim, lenx, byrow = TRUE)
} else {
wMat <- matrix(ypar, nosim, lenx, byrow = TRUE)
}
evalStr2 <- paste(deparse(substitute(yerror)), "(", lenx*nosim, ", ypar, meanVec)")
errorVec <- eval(parse(text = evalStr2))
yMat <- matrix(errorVec, nosim, lenx, byrow = TRUE)
if (onlyY)
{
return(list(y = yMat))
} else {
return(list(x = xMat, w = wMat, y = yMat))
}
} else {
evalStr2 <- paste(yerror, "(", lenx*nosim, ",",
paste(ypar, sep = ",", collapse = ","), ")")
errorVec <- eval(parse(text = evalStr2))
yMat <- matrix(meanVec, nosim, lenx, byrow = TRUE) + errorVec
if (onlyY)
{
return(list(y = yMat))
} else {
return(list(x = xMat, y = yMat))
}
}
} |
parJagsModel <-
function(cl, name, file, data = sys.frame(sys.parent()),
inits, n.chains = 1, n.adapt = 1000, quiet = FALSE)
{
requireNamespace("rjags")
cl <- evalParallelArgument(cl, quit=TRUE)
if (!inherits(cl, "cluster"))
stop("cl must be of class 'cluster'")
clusterEvalQ(cl, requireNamespace("rjags"))
if (length(cl) < n.chains)
stop("length(cl) < n.chains")
if (is.function(file) || inherits(file, "custommodel")) {
if (is.function(file))
file <- match.fun(file)
if (inherits(cl, "SOCKcluster")) {
file <- write.jags.model(file)
on.exit(try(clean.jags.model(file)))
}
}
n.clones <- dclone::nclones.list(as.list(data))
if ("lecuyer" %in% list.modules()) {
mod <- parListModules(cl)
for (i in 1:length(mod)) {
if (!("lecuyer" %in% mod[[i]]))
stop("'lecuyer' module must be loaded on workers")
}
}
inits <- if (missing(inits))
parallel.inits(n.chains=n.chains) else parallel.inits(inits, n.chains)
if (!is.character(name))
name <- as.character(name)
cldata <- list(file=file, data=as.list(data), inits=inits,
n.adapt=n.adapt, name=name, quiet=quiet,
n.adapt=n.adapt, quiet=quiet,
n.clones=n.clones)
jagsparallel <- function(i) {
cldata <- pullDcloneEnv("cldata", type = "model")
res <- rjags::jags.model(file=cldata$file, data=cldata$data,
inits=cldata$inits[[i]], n.chains=1,
n.adapt=cldata$n.adapt, quiet=cldata$quiet)
if (!is.null(n.clones) && n.clones > 1) {
attr(res, "n.clones") <- n.clones
}
pushDcloneEnv(cldata$name, res, type = "results")
NULL
}
dir <- if (inherits(cl, "SOCKcluster"))
getwd() else NULL
parDosa(cl, 1:n.chains, jagsparallel, cldata,
lib = c("dclone", "rjags"), balancing = "none", size = 1,
rng.type = getOption("dcoptions")$RNG,
cleanup = TRUE,
dir = NULL,
unload=FALSE)
} |
.geometry <- function(width, height, units, res)
{
units <- match.arg(units, c("in", "px", "cm", "mm"))
if(units != "px" && is.na(res))
stop("'res' must be specified unless 'units = \"px\"'")
width <- switch(units,
"in" = res,
"cm" = res/2.54,
"mm" = res/25.4,
"px" = 1) * width
height <- switch(units,
"in" = res,
"cm" = res/2.54,
"mm" = res/25.4,
"px" = 1) * height
list(width = width, height = height)
}
png <-
function(filename = "Rplot%03d.png",
width = 480, height = 480, units = "px", pointsize = 12,
bg = "white", res = NA, family = "sans",
restoreConsole = TRUE, type = c("windows", "cairo", "cairo-png"),
antialias = c("default", "none", "cleartype", "gray", "subpixel"),
symbolfamily="default")
{
if(!checkIntFormat(filename)) stop("invalid 'filename'")
g <- .geometry(width, height, units, res)
if(match.arg(type) == "cairo") {
antialias <- match(match.arg(antialias), aa.cairo)
invisible(.External(C_devCairo, filename, 2L,
g$width, g$height, pointsize,
bg, res, antialias, 100L,
if(nzchar(family)) family else "sans", 300,
chooseSymbolFont(symbolfamily)))
} else if(match.arg(type) == "cairo-png") {
antialias <- match(match.arg(antialias), aa.cairo)
invisible(.External(C_devCairo, filename, 5L,
g$width, g$height, pointsize,
bg, res, antialias, 100L,
if(nzchar(family)) family else "sans", 300,
chooseSymbolFont(symbolfamily)))
} else {
new <- if (!missing(antialias)) {
list(bitmap.aa.win = match.arg(antialias, aa.win))
} else list()
antialias <-
check.options(new = new, envir = .WindowsEnv,
name.opt = ".Windows.Options",
reset = FALSE, assign.opt = FALSE)$bitmap.aa.win
invisible(.External(C_devga, paste0("png:", filename),
g$width, g$height, pointsize, FALSE, 1L,
NA_real_, NA_real_, bg, 1,
as.integer(res), NA_integer_, FALSE, .PSenv, NA,
restoreConsole, "", FALSE, TRUE,
family, match(antialias, aa.win)))
}
}
bmp <-
function(filename = "Rplot%03d.bmp",
width = 480, height = 480, units = "px", pointsize = 12,
bg = "white", res = NA, family = "sans",
restoreConsole = TRUE, type = c("windows", "cairo"),
antialias = c("default", "none", "cleartype", "gray", "subpixel"),
symbolfamily="default")
{
if(!checkIntFormat(filename)) stop("invalid 'filename'")
g <- .geometry(width, height, units, res)
if(match.arg(type) == "cairo") {
antialias <- match(match.arg(antialias), aa.cairo)
invisible(.External(C_devCairo, filename,
9L, g$width, g$height, pointsize,
bg, res, antialias, 100L,
if(nzchar(family)) family else "sans", 300,
chooseSymbolFont(symbolfamily)))
} else {
new <- if (!missing(antialias)) {
list(bitmap.aa.win = match.arg(antialias, aa.win))
} else list()
antialias <-
check.options(new = new, envir = .WindowsEnv,
name.opt = ".Windows.Options",
reset = FALSE, assign.opt = FALSE)$bitmap.aa.win
invisible(.External(C_devga, paste0("bmp:", filename),
g$width, g$height, pointsize, FALSE, 1L,
NA_real_, NA_real_, bg, 1,
as.integer(res), NA_integer_, FALSE, .PSenv, NA,
restoreConsole, "", FALSE, TRUE,
family, match(antialias, aa.win)))
}
}
jpeg <-
function(filename = "Rplot%03d.jpg",
width = 480, height = 480, units = "px", pointsize = 12,
quality = 75, bg = "white", res = NA, family = "sans",
restoreConsole = TRUE, type = c("windows", "cairo"),
antialias = c("default", "none", "cleartype", "gray", "subpixel"),
symbolfamily="default")
{
if(!checkIntFormat(filename)) stop("invalid 'filename'")
g <- .geometry(width, height, units, res)
if(match.arg(type) == "cairo") {
antialias <- match(match.arg(antialias), aa.cairo)
invisible(.External(C_devCairo, filename, 3L, g$width, g$height, pointsize,
bg, res, antialias, quality,
if(nzchar(family)) family else "sans", 300,
chooseSymbolFont(symbolfamily)))
} else {
new <- if (!missing(antialias)) {
list(bitmap.aa.win = match.arg(antialias, aa.win))
} else list()
antialias <-
check.options(new = new, envir = .WindowsEnv,
name.opt = ".Windows.Options",
reset = FALSE, assign.opt = FALSE)$bitmap.aa.win
invisible(.External(C_devga,
paste0("jpeg:", quality, ":",filename),
g$width, g$height, pointsize, FALSE, 1L,
NA_real_, NA_real_, bg, 1,
as.integer(res), NA_integer_, FALSE, .PSenv, NA,
restoreConsole, "", FALSE, TRUE,
family, match(antialias, aa.win)))
}
}
tiff <-
function(filename = "Rplot%03d.tif",
width = 480, height = 480, units = "px", pointsize = 12,
compression = c("none", "rle", "lzw", "jpeg", "zip",
"lzw+p", "zip+p"),
bg = "white", res = NA, family = "sans",
restoreConsole = TRUE, type = c("windows", "cairo"),
antialias = c("default", "none", "cleartype", "gray", "subpixel"),
symbolfamily="default")
{
if(!checkIntFormat(filename)) stop("invalid 'filename'")
g <- .geometry(width, height, units, res)
comp <-
switch(match.arg(compression),
"none" = 1L, "rle" = 2L, "lzw" = 5L, "jpeg" = 7L, "zip" = 8L,
"lzw+p" = 15L, "zip+p" = 18L)
if(match.arg(type) == "cairo") {
antialias <- match(match.arg(antialias), aa.cairo)
invisible(.External(C_devCairo, filename, 8L,
g$width, g$height, pointsize,
bg, res, antialias, comp,
if(nzchar(family)) family else "sans", 300,
chooseSymbolFont(symbolfamily)))
} else {
new <- if (!missing(antialias)) {
list(bitmap.aa.win = match.arg(antialias, aa.win))
} else list()
antialias <-
check.options(new = new, envir = .WindowsEnv,
name.opt = ".Windows.Options",
reset = FALSE, assign.opt = FALSE)$bitmap.aa.win
invisible(.External(C_devga,
paste0("tiff:", comp, ":", filename),
g$width, g$height, pointsize, FALSE, 1L,
NA_real_, NA_real_, bg, 1,
as.integer(res), NA_integer_, FALSE, .PSenv, NA,
restoreConsole, "", FALSE, TRUE,
family, match(antialias, aa.win)))
}
}
grSoftVersion <- function() {
bm <- .Call(C_bmVersion)
if(nzchar(bm[3L])) bm[3L] <- strsplit(bm[3L], "\n")[[1L]][1L]
c(cairo = cairoVersion(), cairoFT = cairoFT(), pango = pangoVersion(), bm)
}
chooseSymbolFont <- function(family) {
if (family == "default") {
if (grSoftVersion()["cairoFT"] == "yes") {
cairoSymbolFont("Standard Symbols L")
} else {
cairoSymbolFont("Symbol")
}
} else {
checkSymbolFont(family)
}
} |
fit_hbd_pdr_on_grid = function( tree,
oldest_age = NULL,
age0 = 0,
age_grid = NULL,
min_PDR = -Inf,
max_PDR = +Inf,
min_rholambda0 = 1e-10,
max_rholambda0 = +Inf,
guess_PDR = NULL,
guess_rholambda0 = NULL,
fixed_PDR = NULL,
fixed_rholambda0 = NULL,
splines_degree = 1,
condition = "auto",
relative_dt = 1e-3,
Ntrials = 1,
Nbootstraps = 0,
Ntrials_per_bootstrap = NULL,
Nthreads = 1,
max_model_runtime = NULL,
fit_control = list(),
verbose = FALSE,
verbose_prefix = ""){
if(verbose) cat(sprintf("%sChecking input variables..\n",verbose_prefix))
original_Ntips = length(tree$tip.label)
if(tree$Nnode<1) return(list(success = FALSE, error="Input tree is too small"));
if(age0<0) return(list(success = FALSE, error="age0 must be non-negative"));
root_age = get_tree_span(tree)$max_distance
if(is.null(oldest_age)) oldest_age = root_age;
if(root_age<age0) return(list(success=FALSE, error=sprintf("age0 (%g) is older than the root age (%g)",age0,root_age)));
if(oldest_age<age0) return(list(success=FALSE, error=sprintf("age0 (%g) is older than the oldest considered age (%g)",age0,oldest_age)));
if((!is.null(age_grid)) && (length(age_grid)>1) && ((age_grid[1]>age0) || (tail(age_grid,1)<oldest_age))) return(list(success = FALSE, error=sprintf("Provided age-grid range (%g - %g) does not cover entire required age range (%g - %g)",age_grid[1],tail(age_grid,1),age0,oldest_age)));
if((!(condition %in% c("crown","stem","auto"))) && (!startsWith(condition,"stem")) && (!startsWith(condition,"crown"))) return(list(success = FALSE, error = sprintf("Invalid condition '%s': Expected 'stem', 'stem2', 'stem<N>', 'crown', 'crown<N>', or 'auto'.",condition)));
if(condition=="auto") condition = (if(abs(oldest_age-root_age)<=1e-10*root_age) "crown" else (if(oldest_age>root_age) "stem2" else "stem"))
if(age0>0){
if(verbose) cat(sprintf("%sTrimming tree at age0=%g..\n",verbose_prefix,age0))
tree = trim_tree_at_height(tree,height=root_age-age0)$tree
if(tree$Nnode<1) return(list(success = FALSE, error=sprintf("Tree is too small after trimming at age0 (%g)",age0)));
if(!is.null(oldest_age)) oldest_age = oldest_age - age0
if(!is.null(age_grid)) age_grid = age_grid - age0
root_age = root_age - age0
}
if(verbose) cat(sprintf("%sPrecomputing some stats about the tree..\n",verbose_prefix))
LTT0 = length(tree$tip.label);
lineage_counter = count_lineages_through_time(tree, Ntimes=max(3,log2(LTT0)), include_slopes=TRUE, ultrametric=TRUE)
sorted_node_ages = sort(get_all_branching_ages(tree));
root_age = tail(sorted_node_ages,1);
age_epsilon = 1e-4*mean(tree$edge.length);
if(Ntrials<1) return(list(success = FALSE, error = sprintf("Ntrials must be at least 1")))
if(is.null(age_grid)){
if((!is.null(guess_PDR)) && (length(guess_PDR)>1)) return(list(success = FALSE, error = sprintf("Invalid number of guessed PDRs; since no age grid was provided, you must provide a single (constant) guess_PDR or none at all")));
age_grid = 0
NG = 1
}else{
NG = length(age_grid)
if((!is.null(guess_PDR)) && (length(guess_PDR)!=1) && (length(guess_PDR)!=NG)) return(list(success = FALSE, error = sprintf("Invalid number of guessed PDRs (%d); since an age grid of size %d was provided, you must either provide one or %d PDRs",length(guess_PDR),NG,NG)));
if((length(age_grid)>1) && (age_grid[NG]>oldest_age-1e-5*(age_grid[NG]-age_grid[NG-1]))) age_grid[NG] = max(age_grid[NG],oldest_age);
if((length(age_grid)>1) && (age_grid[1]<1e-5*(age_grid[2]-age_grid[1]))) age_grid[1] = min(age_grid[1],0);
}
if(is.null(max_model_runtime)) max_model_runtime = 0;
if(!(splines_degree %in% c(0,1,2,3))) return(list(success = FALSE, error = sprintf("Invalid splines_degree: Extected one of 0,1,2,3.")));
if(NG==1) splines_degree = 1;
if(verbose) cat(sprintf("%sPreparing for fitting..\n",verbose_prefix))
min_rholambda0 = max(0,min_rholambda0);
max_rholambda0 = max(0,max_rholambda0);
if(length(min_PDR)==1) min_PDR = rep(min_PDR,times=NG);
if(length(max_PDR)==1) max_PDR = rep(max_PDR,times=NG);
if(is.null(guess_rholambda0)) guess_rholambda0 = NA;
if(is.null(fixed_rholambda0)) fixed_rholambda0 = NA;
if(is.null(guess_PDR)){
guess_PDR = rep(NA,times=NG);
}else if(length(guess_PDR)==1){
guess_PDR = rep(guess_PDR,times=NG);
}
if(is.null(fixed_PDR)){
fixed_PDR = rep(NA,times=NG);
}else if(length(fixed_PDR)==1){
fixed_PDR = rep(fixed_PDR,times=NG);
}
if((!is.na(fixed_rholambda0)) && ((fixed_rholambda0<min_rholambda0) || (fixed_rholambda0>max_rholambda0))){
return(list(success = FALSE, error=sprintf("Fixed rholambda0 (%g) is outside of the requested bounds (%g - %g)",fixed_rholambda0,min_rholambda0,max_rholambda0)));
}
if(any(fixed_PDR[!is.na(fixed_PDR)]<min_PDR[!is.na(fixed_PDR)]) || any(fixed_PDR[!is.na(fixed_PDR)]>max_PDR[!is.na(fixed_PDR)])){
return(list(success = FALSE, error=sprintf("Some fixed PDRs are outside of the requested bounds")));
}
default_guess_PDR = mean(lineage_counter$relative_slopes);
guess_PDR[is.na(guess_PDR)] = default_guess_PDR;
if(is.na(guess_rholambda0)) guess_rholambda0 = tail(lineage_counter$relative_slopes[lineage_counter$relative_slopes>0],1)
if(is.null(guess_rholambda0) || (length(guess_rholambda0)==0) || (!is.finite(guess_rholambda0)) || (guess_rholambda0==0)) guess_rholambda0 = log(LTT0)/root_age
guess_PDR = pmin(max_PDR, pmax(min_PDR, guess_PDR));
guess_rholambda0 = min(max_rholambda0, max(min_rholambda0, guess_rholambda0))
fixed_param_values = c(fixed_PDR, fixed_rholambda0);
fitted_params = which(is.na(fixed_param_values))
fixed_params = which(!is.na(fixed_param_values))
guess_param_values = c(guess_PDR, guess_rholambda0);
guess_param_values[fixed_params] = fixed_param_values[fixed_params]
min_param_values = c(min_PDR,min_rholambda0);
max_param_values = c(max_PDR,max_rholambda0);
NP = length(fixed_param_values)
NFP = length(fitted_params);
scale_PDR = abs(guess_PDR); scale_PDR[scale_PDR==0] = mean(scale_PDR);
scale_rholambda0 = abs(guess_rholambda0);
if(scale_rholambda0==0) scale_rholambda0 = log2(LTT0)/root_age;
param_scales = c(scale_PDR,scale_rholambda0);
if(is.null(fit_control)) fit_control = list()
if(is.null(fit_control$step.min)) fit_control$step.min = 0.001
if(is.null(fit_control$x.tol)) fit_control$x.tol = 1e-8
if(is.null(fit_control$iter.max)) fit_control$iter.max = 1000
if(is.null(fit_control$eval.max)) fit_control$eval.max = 2 * fit_control$iter.max * NFP
objective_function = function(fparam_values){
param_values = fixed_param_values; param_values[fitted_params] = fparam_values * param_scales[fitted_params];
if(any(is.nan(param_values)) || any(is.infinite(param_values))) return(Inf);
PDRs = param_values[1:NG];
rholambda0 = param_values[NG+1];
if(length(age_grid)==1){
input_age_grid = c(0,oldest_age);
input_PDRs = c(PDRs, PDRs);
}else{
input_age_grid = age_grid;
input_PDRs = PDRs
}
results = HBD_PDR_loglikelihood_CPP(branching_ages = sorted_node_ages,
oldest_age = oldest_age,
rholambda0 = rholambda0,
age_grid = input_age_grid,
PDRs = input_PDRs,
splines_degree = splines_degree,
condition = condition,
relative_dt = relative_dt,
runtime_out_seconds = max_model_runtime,
diff_PDR = numeric(),
diff_PDR_degree = 0)
if(!results$success) return(Inf)
LL = results$loglikelihood
if(is.na(LL) || is.nan(LL)) return(Inf)
return(-LL)
}
fitted_grid_params = fitted_params[fitted_params!=(NG+1)]
gradient_function = function(fparam_values){
if(splines_degree!=1) return(NaN);
param_values = fixed_param_values; param_values[fitted_params] = fparam_values * param_scales[fitted_params];
if(any(is.nan(param_values)) || any(is.infinite(param_values))) return(Inf);
PDRs = param_values[1:NG];
rholambda0 = param_values[NG+1];
if(NG==1){
input_age_grid = c(0,oldest_age);
input_PDRs = c(PDRs, PDRs);
}else{
input_age_grid = age_grid;
input_PDRs = PDRs
}
diff_PDR_degree = 1
if(NG==1){
diff_PDR = (if(is.na(fixed_PDR)) c(1,0,1,0) else numeric())
}else{
diff_PDR_all = derivatives_of_grid_curve_CPP(Xgrid=age_grid, Ygrid=PDRs)
diff_PDR = unlist(lapply(fitted_grid_params, FUN=function(p) diff_PDR_all[((NG+p-1)*NG*(diff_PDR_degree+1) + 1):((NG+p-1)*NG*(diff_PDR_degree+1) + NG*(diff_PDR_degree+1))]))
}
results = HBD_PDR_loglikelihood_CPP(branching_ages = sorted_node_ages,
oldest_age = oldest_age,
rholambda0 = rholambda0,
age_grid = input_age_grid,
PDRs = input_PDRs,
splines_degree = splines_degree,
condition = condition,
relative_dt = relative_dt,
runtime_out_seconds = max_model_runtime,
diff_PDR = diff_PDR,
diff_PDR_degree = diff_PDR_degree);
if(!results$success) return(rep(Inf,times=NFP));
gradient_full = rep(NA, times=NP)
gradient_full[NG+1] = results$dLL_drholambda0
gradient_full[fitted_grid_params] = results$dLL_dPDR
gradient = gradient_full[fitted_params] * param_scales[fitted_params]
return(-gradient);
}
fit_single_trial = function(trial){
scales = param_scales[fitted_params]
lower_bounds = min_param_values[fitted_params]
upper_bounds = max_param_values[fitted_params]
if(trial==1){
start_values = guess_param_values[fitted_params]
}else{
start_values = get_random_params(defaults=guess_param_values[fitted_params], lower_bounds=lower_bounds, upper_bounds=upper_bounds, scales=scales, orders_of_magnitude=4)
}
start_LL = objective_function(start_values/scales)
if(!is.finite(start_LL)) return(list(objective_value=NA, fparam_values = rep(NA,times=NFP), converged=FALSE, Niterations=0, Nevaluations=1));
if(is.null(fit_control$rel.tol)) fit_control$rel.tol = max(1e-30,min(1e-5,0.0001/abs(start_LL)))
fit = stats::nlminb(start_values/scales,
objective = objective_function,
gradient = (if(splines_degree==1) gradient_function else NULL),
lower = lower_bounds/scales,
upper = upper_bounds/scales,
control = fit_control)
return(list(objective_value=fit$objective, fparam_values = fit$par*scales, converged=(fit$convergence==0), Niterations=fit$iterations, Nevaluations=fit$evaluations[1]));
}
if((Ntrials>1) && (Nthreads>1) && (.Platform$OS.type!="windows")){
if(verbose) cat(sprintf("%sFitting %d model parameters (%d trials, parallelized)..\n",verbose_prefix,NFP,Ntrials))
fits = parallel::mclapply( 1:Ntrials,
FUN = function(trial) fit_single_trial(trial),
mc.cores = min(Nthreads, Ntrials),
mc.preschedule = FALSE,
mc.cleanup = TRUE);
}else{
if(verbose) cat(sprintf("%sFitting %d model parameters (%s)..\n",verbose_prefix,NFP,(if(Ntrials==1) "1 trial" else sprintf("%d trials",Ntrials))))
fits = sapply(1:Ntrials,function(x) NULL)
for(trial in 1:Ntrials){
fits[[trial]] = fit_single_trial(trial)
}
}
objective_values = unlist_with_nulls(sapply(1:Ntrials, function(trial) fits[[trial]]$objective_value))
valids = which((!is.na(objective_values)) & (!is.nan(objective_values)) & (!is.null(objective_values)) & (!is.infinite(objective_values)));
if(length(valids)==0) return(list(success=FALSE, error=sprintf("Fitting failed for all trials")));
best = valids[which.min(sapply(valids, function(i) objective_values[i]))]
objective_value = -fits[[best]]$objective_value;
loglikelihood = objective_value
fitted_param_values = fixed_param_values; fitted_param_values[fitted_params] = fits[[best]]$fparam_values;
fitted_PDR = fitted_param_values[1:NG]
fitted_rholambda0 = fitted_param_values[NG+1]
if(is.null(objective_value) || any(is.na(fitted_param_values)) || any(is.nan(fitted_param_values))) return(list(success=FALSE, error=sprintf("Some fitted parameters are NaN")));
age_grid = age_grid + age0
oldest_age = oldest_age + age0
root_age = root_age + age0
if(Nbootstraps>0){
if(verbose) cat(sprintf("%sEstimating confidence intervals using %d parametric bootstraps..\n",verbose_prefix,Nbootstraps))
if(verbose) cat(sprintf("%s Calculating pulled speciation rate from PDR, for simulating trees..\n",verbose_prefix))
sim_age_grid = age_grid
sim_PDR = fitted_PDR
if(tail(sim_age_grid,1)<root_age){
sim_age_grid = c(sim_age_grid, root_age*1.01)
sim_PDR = c(sim_PDR, tail(sim_PDR,1));
}
if(sim_age_grid[1]>0){
sim_age_grid = c(0,sim_age_grid)
sim_PDR = c(sim_PDR[1],sim_PDR)
}
sim = get_PSR_from_PDR_HBD( age0 = age0,
oldest_age = tail(sim_age_grid,1),
age_grid = sim_age_grid,
PDR = sim_PDR,
rholambda0 = fitted_rholambda0,
splines_degree = splines_degree,
relative_dt = relative_dt,
include_nLTT0 = TRUE);
if(!sim$success) return(list(success=FALSE, error=sprintf("Bootstrapping failed: Could not calculate PSR corresponding to fitted PDR: %s",sim$error), age_grid=age_grid, fitted_PDR=fitted_PDR, fitted_rholambda0=fitted_rholambda0, loglikelihood=loglikelihood));
if(is.null(Ntrials_per_bootstrap)) Ntrials_per_bootstrap = max(1,Ntrials)
bootstrap_params = matrix(NA,nrow=Nbootstraps,ncol=NG+1)
NBsucceeded = 0
for(b in 1:Nbootstraps){
if(verbose) cat(sprintf("%s Bootstrap
bootstrap = castor::generate_tree_hbd_reverse( Ntips = LTT0/sim$nLTT0,
crown_age = root_age,
age_grid = sim$ages,
PSR = sim$PSR,
splines_degree = 1,
relative_dt = relative_dt)
if(!bootstrap$success) return(list(success=FALSE, error=sprintf("Bootstrapping failed: Could not generate tree for the fitted PDR: %s",bootstrap$error), age_grid=age_grid, fitted_PDR=fitted_PDR, fitted_rholambda0=fitted_rholambda0, loglikelihood=loglikelihood));
bootstrap_tree = bootstrap$trees[[1]]
fit = fit_hbd_pdr_on_grid( tree = bootstrap_tree,
oldest_age = oldest_age,
age0 = age0,
age_grid = age_grid,
min_PDR = min_PDR,
max_PDR = max_PDR,
min_rholambda0 = min_rholambda0,
max_rholambda0 = max_rholambda0,
guess_PDR = guess_PDR,
guess_rholambda0 = guess_rholambda0,
fixed_PDR = fixed_PDR,
fixed_rholambda0 = fixed_rholambda0,
splines_degree = 1,
condition = condition,
relative_dt = relative_dt,
Ntrials = Ntrials_per_bootstrap,
Nbootstraps = 0,
Nthreads = Nthreads,
max_model_runtime = max_model_runtime,
fit_control = fit_control,
verbose = verbose,
verbose_prefix = paste0(verbose_prefix," "))
if(!fit$success){
if(verbose) cat(sprintf("%s WARNING: Fitting failed for this bootstrap: %s\n",verbose_prefix,fit$error))
}else{
bootstrap_params[b,] = c(fit$fitted_PDR, fit$fitted_rholambda0)
NBsucceeded = NBsucceeded + 1
}
}
standard_errors_flat = sqrt(pmax(0, colMeans(bootstrap_params^2, na.rm=TRUE) - colMeans(bootstrap_params, na.rm=TRUE)^2))
standard_errors = list(PDR=standard_errors_flat[1:NG], rholambda0=standard_errors_flat[NG+1])
quantiles = sapply(1:ncol(bootstrap_params), FUN=function(p) quantile(bootstrap_params[,p], probs=c(0.25, 0.75, 0.025, 0.975, 0.5), na.rm=TRUE, type=8))
CI50lower = list(PDR=quantiles[1,1:NG], rholambda0=quantiles[1,NG+1])
CI50upper = list(PDR=quantiles[2,1:NG], rholambda0=quantiles[2,NG+1])
CI95lower = list(PDR=quantiles[3,1:NG], rholambda0=quantiles[3,NG+1])
CI95upper = list(PDR=quantiles[4,1:NG], rholambda0=quantiles[4,NG+1])
medians = list(PDR=quantiles[5,1:NG], rholambda0=quantiles[5,NG+1])
bootstrap_estimates = list(PDR=bootstrap_params[,1:NG], rholambda0=bootstrap_params[,NG+1])
}
return(list(success = TRUE,
objective_value = objective_value,
objective_name = "loglikelihood",
loglikelihood = loglikelihood,
fitted_PDR = fitted_PDR,
fitted_rholambda0 = fitted_rholambda0,
guess_PDR = guess_param_values[1:NG],
guess_rholambda0 = guess_param_values[NG+1],
age_grid = age_grid,
NFP = NFP,
AIC = 2*NFP - 2*loglikelihood,
BIC = log(sum((sorted_node_ages<=oldest_age) & (sorted_node_ages>=age0)))*NFP - 2*loglikelihood,
converged = fits[[best]]$converged,
Niterations = fits[[best]]$Niterations,
Nevaluations = fits[[best]]$Nevaluations,
bootstrap_estimates = (if(Nbootstraps>0) bootstrap_estimates else NULL),
standard_errors = (if(Nbootstraps>0) standard_errors else NULL),
medians = (if(Nbootstraps>0) medians else NULL),
CI50lower = (if(Nbootstraps>0) CI50lower else NULL),
CI50upper = (if(Nbootstraps>0) CI50upper else NULL),
CI95lower = (if(Nbootstraps>0) CI95lower else NULL),
CI95upper = (if(Nbootstraps>0) CI95upper else NULL)))
} |
drift.term <- function(yuima, theta, env){
r.size <- yuima@[email protected]
d.size <- yuima@[email protected]
modelstate <- yuima@[email protected]
DRIFT <- yuima@model@drift
n <- dim(env$X)[1]
drift <- matrix(0,n,d.size)
tmp.env <- new.env(parent = env)
assign(yuima@[email protected], env$time, envir=tmp.env)
for(i in 1:length(theta)){
assign(names(theta)[i],theta[[i]], envir=tmp.env)
}
for(d in 1:d.size){
assign(modelstate[d], env$X[,d], envir=tmp.env)
}
for(d in 1:d.size){
drift[,d] <- eval(DRIFT[d], envir=tmp.env)
}
return(drift)
}
diffusion.term <- function(yuima, theta, env){
r.size <- yuima@[email protected]
d.size <- yuima@[email protected]
modelstate <- yuima@[email protected]
DIFFUSION <- yuima@model@diffusion
n <- dim(env$X)[1]
tmp.env <- new.env(parent = env)
assign(yuima@[email protected], env$time, envir=tmp.env)
diff <- array(0, dim=c(d.size, r.size, n))
for(i in 1:length(theta)){
assign(names(theta)[i],theta[[i]],envir=tmp.env)
}
for(d in 1:d.size){
assign(modelstate[d], env$X[,d], envir=tmp.env)
}
for(r in 1:r.size){
for(d in 1:d.size){
diff[d, r, ] <- eval(DIFFUSION[[d]][r], envir=tmp.env)
}
}
return(diff)
}
measure.term <- function(yuima, theta, env){
r.size <- yuima@[email protected]
d.size <- yuima@[email protected]
modelstate <- yuima@[email protected]
n <- dim(env$X)[1]
tmp.env <- new.env(parent = env)
assign(yuima@[email protected], env$time, envir =tmp.env)
JUMP <- yuima@[email protected]
measure <- array(0, dim=c(d.size, r.size, n))
for(i in 1:length(theta)){
assign(names(theta)[i],theta[[i]],envir=tmp.env)
}
for(d in 1:d.size){
assign(modelstate[d], env$X[,d],envir=tmp.env)
}
for(r in 1:r.size){
if(d.size==1){
measure[1,r,] <- eval(JUMP[[r]],envir=tmp.env)
}else{
for(d in 1:d.size){
measure[d,r,] <- eval(JUMP[[d]][r],envir=tmp.env)
}
}
}
return(measure)
}
is.Poisson <- function(obj){
if(is(obj,"yuima"))
return(is(obj@model, "yuima.poisson"))
if(is(obj,"yuima.model"))
return(is(obj, "yuima.poisson"))
return(FALSE)
}
is.CARMA <- function(obj){
if(is(obj,"yuima"))
return(is(obj@model, "yuima.carma"))
if(is(obj,"yuima.model"))
return(is(obj, "yuima.carma"))
return(FALSE)
}
qmle <- function(yuima, start, method="L-BFGS-B", fixed = list(), print=FALSE, envir=globalenv(),
lower, upper, joint=FALSE, Est.Incr="NoIncr",aggregation=TRUE, threshold=NULL,rcpp=FALSE, ...){
if(Est.Incr=="Carma.Inc"){
Est.Incr<-"Incr"
}
if(Est.Incr=="Carma.Par"){
Est.Incr<-"NoIncr"
}
if(Est.Incr=="Carma.IncPar"){
Est.Incr<-"IncrPar"
}
if(is(yuima@model, "yuima.carma")){
NoNeg.Noise<-FALSE
cat("\nStarting qmle for carma ... \n")
}
if(is.CARMA(yuima)&& length(yuima@model@[email protected])!=0){
method<-"L-BFGS-B"
}
call <- match.call()
if( missing(yuima))
yuima.stop("yuima object is missing.")
if(is.COGARCH(yuima)){
if(missing(lower))
lower <- list()
if(missing(upper))
upper <- list()
res <- NULL
if("grideq" %in% names(as.list(call)[-(1:2)])){
res <- PseudoLogLik.COGARCH(yuima, start, method=method, fixed = list(),
lower, upper, Est.Incr, call, aggregation = aggregation, ...)
}else{
res <- PseudoLogLik.COGARCH(yuima, start, method=method, fixed = list(),
lower, upper, Est.Incr, call, grideq = FALSE, aggregation = aggregation,...)
}
return(res)
}
if(is.PPR(yuima)){
if(missing(lower))
lower <- list()
if(missing(upper))
upper <- list()
res <- quasiLogLik.PPR(yuimaPPR = yuima, parLambda = start, method=method, fixed = list(),
lower, upper, call, ...)
return(res)
}
orig.fixed <- fixed
orig.fixed.par <- names(orig.fixed)
if(is.Poisson(yuima))
threshold <- 0
if( missing(start) )
yuima.stop("Starting values for the parameters are missing.")
if(length(fixed) > 0 && !is.Poisson(yuima) && !is.CARMA(yuima) && !is.COGARCH(yuima)) {
new.yuima.list <- changeFixedParametersToConstant(yuima, fixed)
new.yuima <- new.yuima.list$new.yuima
qmle.env <- new.yuima.list$env
new.start = start[!is.element(names(start), names(fixed))]
new.lower = lower[!is.element(names(lower), names(fixed))]
new.upper = upper[!is.element(names(upper), names(fixed))]
res <- qmle(new.yuima, start = new.start, method = method, fixed = list(), print = print, envir = qmle.env,
lower = new.lower, upper = new.upper, joint = joint, Est.Incr = Est.Incr, aggregation = aggregation, threshold = threshold, rcpp = rcpp, ...)
res@call <- match.call()
res@model <- yuima@model
fixed.res <- fixed
mode(fixed.res) <- "numeric"
res@fullcoef <- c(res@fullcoef,fixed.res)
res@fixed <- fixed.res
return(res)
}
yuima.nobs <- as.integer(max(unlist(lapply(get.zoo.data(yuima),length))-1,na.rm=TRUE))
diff.par <- yuima@model@parameter@diffusion
if(is.CARMA(yuima) && length(diff.par)==0
&& length(yuima@model@parameter@jump)!=0){
diff.par<-yuima@model@parameter@jump
}
if(is.CARMA(yuima) && length(yuima@model@parameter@jump)!=0){
CPlist <- c("dgamma", "dexp")
codelist <- c("rIG", "rgamma")
if(yuima@[email protected]=="CP"){
tmp <- regexpr("\\(", yuima@model@measure$df$exp)[1]
measurefunc <- substring(yuima@model@measure$df$exp, 1, tmp-1)
if(!is.na(match(measurefunc,CPlist))){
yuima.warn("carma(p,q): the qmle for a carma(p,q) driven by a Compound Poisson with no-negative random size")
NoNeg.Noise<-TRUE
if((yuima@model@info@q+1)==(yuima@model@info@q+1)){
start[["mean.noise"]]<-1
}
}
}
if(yuima@[email protected]=="code"){
if(class(yuima@model@measure$df)=="yuima.law"){
measurefunc <- "yuima.law"
}
else{
tmp <- regexpr("\\(", yuima@model@measure$df$exp)[1]
measurefunc <- substring(yuima@model@measure$df$exp, 1, tmp-1)
}
if(!is.na(match(measurefunc,codelist))){
yuima.warn("carma(p,q): the qmle for a carma(p,q) driven by a non-Negative Levy will be implemented as soon as possible")
NoNeg.Noise<-TRUE
if((yuima@model@info@q+1)==(yuima@model@info@q+1)){
start[["mean.noise"]]<-1
}
}
}
}
if(is.CARMA(yuima) && length(yuima@model@[email protected])>0){
yuima.warn("carma(p,q): the case of lin.par will be implemented as soon as")
return(NULL)
}
drift.par <- yuima@model@parameter@drift
if(is.CARMA(yuima)){
xinit.par <- yuima@model@parameter@xinit
}
jump.par <- NULL
if(is.CARMA(yuima)){
jump.par <- yuima@model@parameter@jump
measure.par <- yuima@model@parameter@measure
} else {
if(length(yuima@model@parameter@jump)!=0){
measure.par <- unique(c(yuima@model@parameter@measure,yuima@model@parameter@jump))
} else {
measure.par <- yuima@model@parameter@measure
}
}
common.par <- yuima@model@parameter@common
JointOptim <- joint
if(is.CARMA(yuima) && length(yuima@model@parameter@jump)!=0){
if(any((match(jump.par, drift.par)))){
JointOptim <- TRUE
yuima.warn("Drift and diffusion parameters must be different. Doing
joint estimation, asymptotic theory may not hold true.")
}
}
if(length(common.par)>0){
JointOptim <- TRUE
yuima.warn("Drift and diffusion parameters must be different. Doing
joint estimation, asymptotic theory may not hold true.")
}
if(!is.list(start))
yuima.stop("Argument 'start' must be of list type.")
fullcoef <- NULL
if(length(diff.par)>0)
fullcoef <- diff.par
if(length(drift.par)>0)
fullcoef <- c(fullcoef, drift.par)
if(is.CARMA(yuima) &&
(length(yuima@model@[email protected])!=0)){
fullcoef<-c(fullcoef, yuima@model@[email protected])
}
if(is.CARMA(yuima) && (NoNeg.Noise==TRUE)){
if((yuima@model@info@q+1)==yuima@model@info@p){
mean.noise<-"mean.noise"
fullcoef<-c(fullcoef, mean.noise)
}
}
fullcoef<-c(fullcoef, measure.par)
if(is.CARMA(yuima)){
if(length(yuima@model@parameter@xinit)>1){
condIniCarma<-!(yuima@model@parameter@xinit%in%fullcoef)
if(sum(condIniCarma)>0){
NamesInitial<- yuima@model@parameter@xinit[condIniCarma]
start <- as.list(unlist(start)[!names(unlist(start))%in%(NamesInitial)])
}
}
}
npar <- length(fullcoef)
fixed.par <- names(fixed)
fixed.carma=NULL
if(is.CARMA(yuima) && (length(measure.par) > 0)){
if(!missing(fixed)){
if(names(fixed) %in% measure.par){
idx.fixed.carma<-match(names(fixed),measure.par)
idx.fixed.carma<-idx.fixed.carma[!is.na(idx.fixed.carma)]
if(length(idx.fixed.carma)!=0){
fixed.carma<-as.numeric(fixed[measure.par[idx.fixed.carma]])
names(fixed.carma)<-measure.par[idx.fixed.carma]
}
}
}
upper.carma=NULL
if(!missing(upper)){
if(names(upper) %in% measure.par){
idx.upper.carma<-match(names(upper),measure.par)
idx.upper.carma<-idx.upper.carma[!is.na(idx.upper.carma)]
if(length(idx.upper.carma)!=0){
upper.carma<-as.numeric(upper[measure.par[idx.upper.carma]])
names(upper.carma)<-measure.par[idx.upper.carma]
}
}
}
lower.carma=NULL
if(!missing(lower)){
if(names(lower) %in% measure.par){
idx.lower.carma<-match(names(lower),measure.par)
idx.lower.carma<-idx.lower.carma[!is.na(idx.lower.carma)]
if(length(idx.lower.carma)!=0){
lower.carma<-as.numeric(lower[measure.par[idx.lower.carma]])
names(lower.carma)<-measure.par[idx.lower.carma]
}
}
}
for( j in c(1:length(measure.par))){
if(is.na(match(measure.par[j],names(fixed)))){
fixed.par <- c(fixed.par,measure.par[j])
fixed[measure.par[j]]<-start[measure.par[j]]
}
}
}
if (any(!(fixed.par %in% fullcoef)))
yuima.stop("Some named arguments in 'fixed' are not arguments to the supplied yuima model")
nm <- names(start)
oo <- match(nm, fullcoef)
if(any(is.na(oo)))
yuima.stop("some named arguments in 'start' are not arguments to the supplied yuima model")
start <- start[order(oo)]
nm <- names(start)
idx.diff <- match(diff.par, nm)
idx.drift <- match(drift.par, nm)
idx.measure <- match(measure.par, nm)
if(is.CARMA(yuima)){
idx.xinit <- as.integer(na.omit(match(xinit.par,nm)))
}
idx.fixed <- match(fixed.par, nm)
orig.idx.fixed <- idx.fixed
tmplower <- as.list( rep( -Inf, length(nm)))
names(tmplower) <- nm
if(!missing(lower)){
idx <- match(names(lower), names(tmplower))
if(any(is.na(idx)))
yuima.stop("names in 'lower' do not match names fo parameters")
tmplower[ idx ] <- lower
}
lower <- tmplower
tmpupper <- as.list( rep( Inf, length(nm)))
names(tmpupper) <- nm
if(!missing(upper)){
idx <- match(names(upper), names(tmpupper))
if(any(is.na(idx)))
yuima.stop("names in 'lower' do not match names fo parameters")
tmpupper[ idx ] <- upper
}
upper <- tmpupper
d.size <- yuima@[email protected]
if (is.CARMA(yuima)){
d.size <-1
}
n <- length(yuima)[1]
env <- new.env(parent = envir)
assign("X", as.matrix(onezoo(yuima)), envir=env)
assign("deltaX", matrix(0, n-1, d.size), envir=env)
assign("Cn.r", numeric(n-1), envir=env)
if(length(yuima@[email protected]) == 0)
threshold <- 0
if (is.CARMA(yuima)){
env$X<-as.matrix(env$X[,1])
env$deltaX<-as.matrix(env$deltaX[,1])
assign("time.obs",length(env$X),envir=env)
assign("p", yuima@model@info@p, envir=env)
assign("q", yuima@model@info@q, envir=env)
assign("V_inf0", matrix(diag(rep(1,env$p)),env$p,env$p), envir=env)
}
assign("time", as.numeric(index(yuima@[email protected][[1]])), envir=env)
for(t in 1:(n-1)){
env$deltaX[t,] <- env$X[t+1,] - env$X[t,]
if(!is.CARMA(yuima))
env$Cn.r[t] <- ((sqrt( env$deltaX[t,] %*% env$deltaX[t,])) <= threshold)
}
if(length(yuima@[email protected]) == 0)
env$Cn.r <- rep(1, length(env$Cn.r))
assign("h", deltat(yuima@[email protected][[1]]), envir=env)
if(length(yuima@[email protected]) > 0 && yuima@[email protected] == "CP"){
if(class(yuima@model@measure$df)=="yuima.law"){
args <- yuima@model@parameter@measure
}else{
args <- unlist(strsplit(suppressWarnings(sub("^.+?\\((.+)\\)", "\\1",yuima@model@measure$df$expr,perl=TRUE)), ","))
}
idx.intensity <- numeric(0)
if(length(measure.par) > 0){
for(i in 1:length(measure.par)){
if(sum(grepl(measure.par[i],yuima@model@measure$intensity)))
idx.intensity <- append(idx.intensity,i)
}
}
assign("idx.intensity", idx.intensity, envir=env)
assign("measure.var", args[1], envir=env)
}
f <- function(p) {
mycoef <- as.list(p)
if(!is.CARMA(yuima)){
if(length(c(idx.fixed,idx.measure))>0)
names(mycoef) <- nm[-c(idx.fixed,idx.measure)]
else
names(mycoef) <- nm
} else {
if(length(idx.fixed)>0)
names(mycoef) <- nm[-idx.fixed]
else
names(mycoef) <- nm
}
mycoef[fixed.par] <- fixed
minusquasilogl(yuima=yuima, param=mycoef, print=print, env,rcpp=rcpp)
}
fpsi <- function(p){
mycoef <- as.list(p)
idx.cont <- c(idx.diff,idx.drift)
if(length(c(idx.fixed,idx.cont))>0)
names(mycoef) <- nm[-c(idx.fixed,idx.cont)]
else
names(mycoef) <- nm
mycoef[fixed.par] <- fixed
minusquasipsi(yuima=yuima, param=mycoef, print=print, env=env)
}
fj <- function(p) {
mycoef <- as.list(p)
if(!is.CARMA(yuima)){
idx.fixed <- orig.idx.fixed
if(length(c(idx.fixed,idx.measure))>0)
names(mycoef) <- nm[-c(idx.fixed,idx.measure)]
else
names(mycoef) <- nm
} else {
names(mycoef) <- nm
mycoef[fixed.par] <- fixed
}
mycoef[fixed.par] <- fixed
minusquasilogl(yuima=yuima, param=mycoef, print=print, env,rcpp=rcpp)
}
oout <- NULL
HESS <- matrix(0, length(nm), length(nm))
colnames(HESS) <- nm
rownames(HESS) <- nm
HaveDriftHess <- FALSE
HaveDiffHess <- FALSE
HaveMeasHess <- FALSE
if(length(start)){
if(JointOptim){
old.fixed <- fixed
new.start <- start
old.start <- start
if(!is.CARMA(yuima)){
if(length(c(idx.fixed,idx.measure))>0)
new.start <- start[-c(idx.fixed,idx.measure)]
}
if(length(new.start)>1){
if(is.CARMA(yuima) && (NoNeg.Noise==TRUE))
if(mean.noise %in% names(lower)){lower[mean.noise]<-10^-7}
oout <- optim(new.start, fj, method = method, hessian = TRUE, lower=lower, upper=upper)
if(length(fixed)>0)
oout$par[fixed.par]<- unlist(fixed)[fixed.par]
if(is.CARMA(yuima)){
HESS <- oout$hessian
} else {
HESS[names(new.start),names(new.start)] <- oout$hessian
}
if(is.CARMA(yuima) && length(yuima@model@[email protected])!=0){
b0<-paste0(yuima@model@[email protected],"0",collapse="")
idx.b0<-match(b0,rownames(HESS))
HESS<-HESS[-idx.b0,]
HESS<-HESS[,-idx.b0]
}
if(is.CARMA(yuima)&& length(fixed)>0 && length(yuima@model@parameter@measure)==0){
for(i in c(1:length(fixed.par))){
indx.fixed<-match(fixed.par[i],rownames(HESS))
HESS<-HESS[-indx.fixed,]
HESS<-HESS[,-indx.fixed]
}
}
if(is.CARMA(yuima) && length(yuima@model@parameter@measure)!=0){
for(i in c(1:length(fixed.par))){
indx.fixed<-match(fixed.par[i],rownames(HESS))
HESS<-HESS[-indx.fixed,]
HESS<-HESS[,-indx.fixed]
}
if(is.CARMA(yuima) && (NoNeg.Noise==TRUE)){
idx.noise<-(match(mean.noise,rownames(HESS)))
HESS<-HESS[-idx.noise,]
HESS<-HESS[,-idx.noise]
}
}
HaveDriftHess <- TRUE
HaveDiffHess <- TRUE
} else {
opt1 <- optimize(f, lower = lower[[names(new.start)]],
upper = upper[[names(new.start)]], ...)
oout <- list(par = opt1$minimum, value = opt1$objective)
names(oout$par) <- names(new.start)
}
theta1 <- oout$par[diff.par]
theta2 <- oout$par[drift.par]
} else {
theta1 <- NULL
old.fixed <- fixed
old.start <- start
if(length(idx.diff)>0){
old.fixed <- fixed
old.start <- start
old.fixed.par <- fixed.par
new.start <- start[idx.diff]
new.fixed <- fixed
if(length(idx.drift)>0)
new.fixed[nm[idx.drift]] <- start[idx.drift]
fixed <- new.fixed
fixed.par <- names(fixed)
idx.fixed <- match(fixed.par, nm)
names(new.start) <- nm[idx.diff]
mydots <- as.list(call)[-(1:2)]
mydots$print <- NULL
mydots$rcpp <- NULL
mydots$fixed <- NULL
mydots$fn <- as.name("f")
mydots$start <- NULL
mydots$par <- unlist(new.start)
mydots$hessian <- FALSE
mydots$upper <- as.numeric(unlist( upper[ nm[idx.diff] ]))
mydots$lower <- as.numeric(unlist( lower[ nm[idx.diff] ]))
mydots$joint <- NULL
mydots$aggregation <- NULL
mydots$threshold <- NULL
mydots$envir <- NULL
mydots$Est.Incr <- NULL
mydots$print <- NULL
mydots$aggregation <- NULL
mydots$rcpp <- NULL
if((length(mydots$par)>1) | any(is.infinite(c(mydots$upper,mydots$lower)))){
mydots$method<-method
oout <- do.call(optim, args=mydots)
} else {
mydots$f <- mydots$fn
mydots$fn <- NULL
mydots$par <- NULL
mydots$hessian <- NULL
mydots$interval <- as.numeric(c(unlist(lower[diff.par]),unlist(upper[diff.par])))
mydots$lower <- NULL
mydots$upper <- NULL
mydots$method<- NULL
mydots$envir <- NULL
mydots$Est.Incr <- NULL
mydots$print <- NULL
mydots$aggregation <- NULL
mydots$rcpp <- NULL
opt1 <- do.call(optimize, args=mydots)
theta1 <- opt1$minimum
names(theta1) <- diff.par
oout <- list(par = theta1, value = opt1$objective)
}
theta1 <- oout$par
fixed <- old.fixed
start <- old.start
fixed.par <- old.fixed.par
}
theta2 <- NULL
if(length(idx.drift)>0){
fixed <- old.fixed
start <- old.start
old.fixed.par <- fixed.par
new.start <- start[idx.drift]
new.fixed <- fixed
new.fixed[names(theta1)] <- theta1
fixed <- new.fixed
fixed.par <- names(fixed)
idx.fixed <- match(fixed.par, nm)
names(new.start) <- nm[idx.drift]
mydots <- as.list(call)[-(1:2)]
mydots$print <- NULL
mydots$rcpp <- NULL
mydots$fixed <- NULL
mydots$fn <- as.name("f")
mydots$threshold <- NULL
mydots$start <- NULL
mydots$par <- unlist(new.start)
mydots$hessian <- FALSE
mydots$upper <- unlist( upper[ nm[idx.drift] ])
mydots$lower <- unlist( lower[ nm[idx.drift] ])
mydots$joint <- NULL
mydots$aggregation <- NULL
mydots$envir <- NULL
mydots$Est.Incr <- NULL
mydots$print <- NULL
mydots$aggregation <- NULL
mydots$rcpp <- NULL
if(length(mydots$par)>1 | any(is.infinite(c(mydots$upper,mydots$lower)))){
if(is.CARMA(yuima)){
if(NoNeg.Noise==TRUE){
if((yuima@model@info@q+1)==yuima@model@info@p){
mydots$lower[names(start["NoNeg.Noise"])]<-10^(-7)
}
}
if(length(yuima@model@[email protected])!=0){
name_b0<-paste0(yuima@model@[email protected],"0",collapse="")
index_b0<-match(name_b0,nm)
mydots$lower[index_b0]<-1
mydots$upper[index_b0]<-1+10^(-7)
}
if (length(yuima@model@[email protected])!=0){
mydots$upper <- unlist( upper[ nm ])
mydots$lower <- unlist( lower[ nm ])
idx.tot<-unique(c(idx.drift,idx.xinit))
new.start <- start[idx.tot]
names(new.start) <- nm[idx.tot]
mydots$par <- unlist(new.start)
}
}
mydots$method <- method
oout1 <- do.call(optim, args=mydots)
} else {
mydots$f <- mydots$fn
mydots$fn <- NULL
mydots$par <- NULL
mydots$hessian <- NULL
mydots$method<-NULL
mydots$interval <- as.numeric(c(lower[drift.par],upper[drift.par]))
mydots$envir <- NULL
mydots$Est.Incr <- NULL
mydots$print <- NULL
mydots$aggregation <- NULL
mydots$rcpp <- NULL
opt1 <- do.call(optimize, args=mydots)
theta2 <- opt1$minimum
names(theta2) <- drift.par
oout1 <- list(par = theta2, value = as.numeric(opt1$objective))
}
theta2 <- oout1$par
fixed <- old.fixed
start <- old.start
old.fixed.par <- fixed.par
}
oout1 <- list(par= c(theta1, theta2))
if (! is.CARMA(yuima)){
if(length(c(diff.par, diff.par))>0)
names(oout1$par) <- c(diff.par,drift.par)
}
oout <- oout1
}
} else {
list(par = numeric(0L), value = f(start))
}
fMeas <- function(p) {
mycoef <- as.list(p)
minusquasipsi(yuima=yuima, param=mycoef, print=print, env=env)
}
fDrift <- function(p) {
mycoef <- as.list(p)
if(! is.CARMA(yuima)){
names(mycoef) <- drift.par
mycoef[diff.par] <- coef[diff.par]
}
minusquasilogl(yuima=yuima, param=mycoef, print=print, env,rcpp=rcpp)
}
fDiff <- function(p) {
mycoef <- as.list(p)
if(! is.CARMA(yuima)){
names(mycoef) <- diff.par
mycoef[drift.par] <- coef[drift.par]
}
minusquasilogl(yuima=yuima, param=mycoef, print=print, env,rcpp=rcpp)
}
theta3 <- NULL
if(length(idx.measure)>0 & !is.CARMA(yuima)){
idx.cont <- c(idx.drift,idx.diff)
fixed <- old.fixed
start <- old.start
old.fixed.par <- fixed.par
new.fixed <- fixed
new.start <- start[idx.measure]
new.fixed <- fixed
new.fixed[names(theta1)] <- theta1
new.fixed[names(theta2)] <- theta2
fixed <- new.fixed
fixed.par <- names(fixed)
idx.fixed <- match(fixed.par, nm)
names(new.start) <- nm[idx.measure]
mydots <- as.list(call)[-(1:2)]
mydots$threshold <- NULL
mydots$fixed <- NULL
mydots$fn <- as.name("fpsi")
mydots$start <- NULL
mydots$threshold <- NULL
mydots$envir <- NULL
mydots$Est.Incr <- NULL
mydots$print <- NULL
mydots$aggregation <- NULL
mydots$rcpp <- NULL
mydots$par <- unlist(new.start)
mydots$hessian <- TRUE
mydots$joint <- NULL
mydots$upper <- unlist( upper[ nm[idx.measure] ])
mydots$lower <- unlist( lower[ nm[idx.measure] ])
mydots$method <- method
oout3 <- do.call(optim, args=mydots)
theta3 <- oout3$par
HESS[measure.par,measure.par] <- oout3$hessian
HaveMeasHess <- TRUE
fixed <- old.fixed
start <- old.start
fixed.par <- old.fixed.par
}
if(!is.CARMA(yuima)){
oout4 <- list(par= c(theta1, theta2, theta3))
names(oout4$par) <- c(diff.par,drift.par,measure.par)
oout <- oout4
}
coef <- oout$par
control=list()
par <- coef
if(!is.CARMA(yuima)){
names(par) <- unique(c(diff.par, drift.par,measure.par))
nm <- unique(c(diff.par, drift.par,measure.par))
} else {
names(par) <- unique(c(diff.par, drift.par))
nm <- unique(c(diff.par, drift.par))
}
if(is.CARMA(yuima) && length(yuima@model@parameter@measure)!=0){
nm <-c(nm,measure.par)
if((NoNeg.Noise==TRUE)){nm <-c(nm,mean.noise)}
nm<-unique(nm)
}
if(is.CARMA(yuima) && (length(yuima@model@[email protected])!=0)){
nm <-unique(c(nm,yuima@model@[email protected]))
}
conDrift <- list(trace = 5, fnscale = 1,
parscale = rep.int(5, length(drift.par)),
ndeps = rep.int(0.001, length(drift.par)), maxit = 100L,
abstol = -Inf, reltol = sqrt(.Machine$double.eps), alpha = 1,
beta = 0.5, gamma = 2, REPORT = 10, type = 1, lmm = 5,
factr = 1e+07, pgtol = 0, tmax = 10, temp = 10)
conDiff <- list(trace = 5, fnscale = 1,
parscale = rep.int(5, length(diff.par)),
ndeps = rep.int(0.001, length(diff.par)), maxit = 100L,
abstol = -Inf, reltol = sqrt(.Machine$double.eps), alpha = 1,
beta = 0.5, gamma = 2, REPORT = 10, type = 1, lmm = 5,
factr = 1e+07, pgtol = 0, tmax = 10, temp = 10)
conMeas <- list(trace = 5, fnscale = 1,
parscale = rep.int(5, length(measure.par)),
ndeps = rep.int(0.001, length(measure.par)), maxit = 100L,
abstol = -Inf, reltol = sqrt(.Machine$double.eps), alpha = 1,
beta = 0.5, gamma = 2, REPORT = 10, type = 1, lmm = 5,
factr = 1e+07, pgtol = 0, tmax = 10, temp = 10)
if(is.CARMA(yuima) && length(yuima@model@[email protected])!=0 ){
conDrift <- list(trace = 5, fnscale = 1,
parscale = rep.int(5, length(c(drift.par,yuima@model@[email protected]))),
ndeps = rep.int(0.001, length(c(drift.par,yuima@model@[email protected]))),
maxit = 100L,
abstol = -Inf, reltol = sqrt(.Machine$double.eps), alpha = 1,
beta = 0.5, gamma = 2, REPORT = 10, type = 1, lmm = 5,
factr = 1e+07, pgtol = 0, tmax = 10, temp = 10)
conDiff <- list(trace = 5, fnscale = 1,
parscale = rep.int(5, length(diff.par)),
ndeps = rep.int(0.001, length(diff.par)), maxit = 100L,
abstol = -Inf, reltol = sqrt(.Machine$double.eps), alpha = 1,
beta = 0.5, gamma = 2, REPORT = 10, type = 1, lmm = 5,
factr = 1e+07, pgtol = 0, tmax = 10, temp = 10)
}
if(!HaveDriftHess & (length(drift.par)>0)){
if(!is.CARMA(yuima)){
hess2 <- optimHess(coef[drift.par], fDrift, NULL, control=conDrift)
HESS[drift.par,drift.par] <- hess2
} else{
names(coef) <- c(drift.par,yuima@model@[email protected])
hess2 <- optimHess(coef, fDrift, NULL, control=conDrift)
HESS <- hess2
}
if(is.CARMA(yuima) && length(yuima@model@[email protected])!=0){
b0<-paste0(yuima@model@[email protected],"0",collapse="")
idx.b0<-match(b0,rownames(HESS))
HESS<-HESS[-idx.b0,]
HESS<-HESS[,-idx.b0]
}
}
if(!HaveDiffHess & (length(diff.par)>0)){
hess1 <- optimHess(coef[diff.par], fDiff, NULL, control=conDiff)
HESS[diff.par,diff.par] <- hess1
}
oout$hessian <- HESS
if(!HaveMeasHess & (length(measure.par) > 0) & !is.CARMA(yuima)){
hess1 <- optimHess(coef[measure.par], fMeas, NULL, control=conMeas)
oout$hessian[measure.par,measure.par] <- hess1
}
vcov <- matrix(NA, length(coef), length(coef))
if (length(coef)) {
rrr <- try(solve(oout$hessian), TRUE)
if(class(rrr)[1] != "try-error")
vcov <- rrr
}
mycoef <- as.list(coef)
if(!is.CARMA(yuima)){
names(mycoef) <- nm
}
idx.fixed <- orig.idx.fixed
mycoef.cont <- mycoef
if(length(c(idx.fixed,idx.measure)>0))
mycoef.cont <- mycoef[-c(idx.fixed,idx.measure)]
min.diff <- 0
min.jump <- 0
if(length(c(diff.par,drift.par))>0 & !is.CARMA(yuima)){
min.diff <- minusquasilogl(yuima=yuima, param=mycoef[c(diff.par,drift.par)], print=print, env,rcpp=rcpp)
}else{
if(length(c(diff.par,drift.par))>0 & is.CARMA(yuima)){
min.diff <- minusquasilogl(yuima=yuima, param=mycoef, print=print, env,rcpp=rcpp)
}
}
if(length(c(measure.par))>0 & !is.CARMA(yuima))
min.jump <- minusquasipsi(yuima=yuima, param=mycoef[measure.par], print=print, env=env)
min <- min.diff + min.jump
if(min==0)
min <- NA
dummycov<-matrix(0,length(coef),length(coef))
rownames(dummycov)<-names(coef)
colnames(dummycov)<-names(coef)
dummycov[rownames(vcov),colnames(vcov)]<-vcov
vcov<-dummycov
if(!is.CARMA(yuima)){
if(length(yuima@[email protected]) > 0 && yuima@[email protected] == "CP"){
final_res<-new("yuima.CP.qmle",
Jump.times=env$time[env$Cn.r==0],
Jump.values=env$deltaX[env$Cn.r==0,],
X.values=env$X[env$Cn.r==0,],
model=yuima@model,
call = call, coef = coef, fullcoef = unlist(mycoef),
vcov = vcov, min = min, details = oout, minuslogl = minusquasilogl,
method = method, nobs=yuima.nobs, threshold=threshold)
} else {
final_res<-new("yuima.qmle", call = call, coef = coef, fullcoef = unlist(mycoef),
vcov = vcov, min = min, details = oout, minuslogl = minusquasilogl,
method = method, nobs=yuima.nobs, model=yuima@model)
}
} else {
if( Est.Incr=="IncrPar" || Est.Incr=="Incr" ){
final_res<-new("yuima.carma.qmle", call = call, coef = coef, fullcoef = unlist(mycoef),
vcov = vcov, min = min, details = oout, minuslogl = minusquasilogl,
method = method, nobs=yuima.nobs, logL.Incr = NULL)
}else{
if(Est.Incr=="NoIncr"){
final_res<-new("yuima.qmle", call = call, coef = coef, fullcoef = unlist(mycoef),
vcov = vcov, min = min, details = oout, minuslogl = minusquasilogl,
method = method, nobs=yuima.nobs , model=yuima@model)
return(final_res)
}else{
yuima.warn("The variable Est.Incr is not correct. See qmle documentation for the allowed values ")
final_res<-new("mle", call = call, coef = coef, fullcoef = unlist(mycoef),
vcov = vcov, min = min, details = oout, minuslogl = minusquasilogl,
method = method, nobs=yuima.nobs)
return(final_res)
}
}
}
if(!is.CARMA(yuima)){
return(final_res)
}else {
param<-coef(final_res)
observ<-yuima@data
model<-yuima@model
info<-model@info
numb.ar<-info@p
name.ar<-paste([email protected],c(numb.ar:1),sep="")
ar.par<-param[name.ar]
numb.ma<-info@q
name.ma<-paste([email protected],c(0:numb.ma),sep="")
ma.par<-param[name.ma]
loc.par=NULL
if (length([email protected])!=0){
loc.par<-param[[email protected]]
}
scale.par=NULL
if (length([email protected])!=0){
scale.par<-param[[email protected]]
}
lin.par=NULL
if (length([email protected])!=0){
lin.par<-param[[email protected]]
}
if(min(yuima.PhamBreton.Alg(ar.par[numb.ar:1]))>=0){
cat("\n Stationarity condition is satisfied...\n Starting Estimation Increments ...\n")
}else{
yuima.warn("Insert constraints in Autoregressive parameters for enforcing stationarity" )
cat("\n Starting Estimation Increments ...\n")
}
ttt<[email protected][[1]]
tt<-index(ttt)
y<-coredata(ttt)
if(NoNeg.Noise==TRUE && (info@p==(info@q+1))){final_res@coef[mean.noise]<-mean(y)/tail(ma.par,n=1)*ar.par[1]}
levy<-yuima.CarmaNoise(y,tt,ar.par,ma.par, loc.par, scale.par, lin.par, NoNeg.Noise)
inc.levy<-NULL
if (!is.null(levy)){
inc.levy<-diff(t(levy))
}
if(Est.Incr=="Carma.Inc"||Est.Incr=="Incr"){
inc.levy.fin<-zoo(inc.levy,tt[(1+length(tt)-length(inc.levy)):length(tt)])
carma_final_res<-new("yuima.carma.qmle", call = call, coef = coef, fullcoef = unlist(mycoef),
vcov = vcov, min = min, details = oout, minuslogl = minusquasilogl,
method = method, Incr.Lev = inc.levy.fin,
model = yuima@model, nobs=yuima.nobs, logL.Incr = NULL)
return(carma_final_res)
}
cat("\nStarting Estimation parameter Noise ...\n")
dummycovCarmapar<-vcov[unique(c(drift.par,diff.par)),unique(c(drift.par,diff.par))]
if(!is.null(loc.par)){
dummycovCarmapar<-vcov[unique(c(drift.par,diff.par,[email protected])),
unique(c(drift.par,diff.par,[email protected]))]
}
dummycovCarmaNoise<-vcov[unique(measure.par),unique(c(measure.par))]
dummycoeffCarmapar<-coef[unique(c(drift.par,diff.par))]
if(!is.null(loc.par)){
dummycoeffCarmapar<-coef[unique(c(drift.par,diff.par,[email protected]))]
}
dummycoeffCarmaNoise<-coef[unique(c(measure.par))]
coef<-NULL
coef<-c(dummycoeffCarmapar,dummycoeffCarmaNoise)
names.par<-c(unique(c(drift.par,diff.par)),unique(c(measure.par)))
if(!is.null(loc.par)){
names.par<-c(unique(c(drift.par,diff.par,[email protected])),unique(c(measure.par)))
}
names(coef)<-names.par
cov<-NULL
cov<-matrix(0,length(names.par),length(names.par))
rownames(cov)<-names.par
colnames(cov)<-names.par
if(is.null(loc.par)){
cov[unique(c(drift.par,diff.par)),unique(c(drift.par,diff.par))]<-dummycovCarmapar
}else{
cov[unique(c(drift.par,diff.par,[email protected])),unique(c(drift.par,diff.par,[email protected]))]<-dummycovCarmapar
}
cov[unique(c(measure.par)),unique(c(measure.par))]<-dummycovCarmaNoise
if(length([email protected])!=0){
if([email protected]=="CP"){
name.func.dummy <- as.character(model@measure$df$expr[1])
name.func<- substr(name.func.dummy,1,(nchar(name.func.dummy)-1))
names.measpar<-as.vector(strsplit(name.func,', '))[[1]][-1]
valuemeasure<-as.numeric(names.measpar)
name.int.dummy <- as.character(model@measure$intensity)
valueintensity<-as.numeric(name.int.dummy)
NaIdx<-which(!is.na(c(valueintensity,valuemeasure)))
if(length(NaIdx)!=0){
yuima.warn("the constrained MLE for levy increment will be implemented as soon as possible")
carma_final_res<-new("yuima.carma.qmle", call = call, coef = coef, fullcoef = unlist(mycoef),
vcov = vcov, min = min, details = oout, minuslogl = minusquasilogl,
method = method, Incr.Lev = inc.levy,
model = yuima@model, logL.Incr = NULL)
return(carma_final_res)
}
if(aggregation==TRUE){
if(floor(yuima@sampling@n/yuima@sampling@Terminal)!=yuima@sampling@n/yuima@sampling@Terminal){
yuima.stop("the n/Terminal in sampling information is not an integer. Set Aggregation=FALSE")
}
inc.levy1<-diff(cumsum(c(0,inc.levy))[seq(from=1,
to=yuima@sampling@n[1],
by=(yuima@sampling@n/yuima@sampling@Terminal)[1]
)])
}else{
inc.levy1<-inc.levy
}
names.measpar<-c(name.int.dummy, names.measpar)
if(measurefunc=="dnorm"){
result.Lev<-yuima.Estimation.Lev(Increment.lev=inc.levy1,
param0=coef[ names.measpar],
fixed.carma=fixed.carma,
lower.carma=lower.carma,
upper.carma=upper.carma,
measure=measurefunc,
[email protected],
dt=env$h,
aggregation=aggregation)
}
if(measurefunc=="dgamma"){
result.Lev<-yuima.Estimation.Lev(Increment.lev=inc.levy1,
param0=coef[ names.measpar],
fixed.carma=fixed.carma,
lower.carma=lower.carma,
upper.carma=upper.carma,
measure=measurefunc,
[email protected],
dt=env$h,
aggregation=aggregation)
}
if(measurefunc=="dexp"){
result.Lev<-yuima.Estimation.Lev(Increment.lev=inc.levy1,
param0=coef[ names.measpar],
fixed.carma=fixed.carma,
lower.carma=lower.carma,
upper.carma=upper.carma,
measure=measurefunc,
[email protected],
dt=env$h,
aggregation=aggregation)
}
Inc.Parm<-result.Lev$estLevpar
IncVCOV<-result.Lev$covLev
names(Inc.Parm)[NaIdx]<-measure.par
rownames(IncVCOV)[NaIdx]<-as.character(measure.par)
colnames(IncVCOV)[NaIdx]<-as.character(measure.par)
coef<-NULL
coef<-c(dummycoeffCarmapar,Inc.Parm)
names.par<-names(coef)
cov<-NULL
cov<-matrix(0,length(names.par),length(names.par))
rownames(cov)<-names.par
colnames(cov)<-names.par
if(is.null(loc.par)){
cov[unique(c(drift.par,diff.par)),unique(c(drift.par,diff.par))]<-dummycovCarmapar
}else{
cov[unique(c(drift.par,diff.par,[email protected])),unique(c(drift.par,diff.par,[email protected]))]<-dummycovCarmapar
}
cov[names(Inc.Parm),names(Inc.Parm)]<-IncVCOV
}
if(yuima@[email protected]=="code"){
if(class(model@measure$df)=="yuima.law"){
valuemeasure <- "yuima.law"
NaIdx<-NULL
}else{
name.func.dummy <- as.character(model@measure$df$expr[1])
name.func<- substr(name.func.dummy,1,(nchar(name.func.dummy)-1))
names.measpar<-as.vector(strsplit(name.func,', '))[[1]][-1]
valuemeasure<-as.numeric(names.measpar)
NaIdx<-which(!is.na(valuemeasure))
}
if(length(NaIdx)!=0){
yuima.warn("the constrained MLE for levy increment will be implemented as soon as possible")
carma_final_res<-new("yuima.carma.qmle", call = call, coef = coef, fullcoef = unlist(mycoef),
vcov = vcov, min = min, details = oout, minuslogl = minusquasilogl,
method = method, Incr.Lev = inc.levy,
model = yuima@model, logL.Incr = NULL)
return(carma_final_res)
}
if(aggregation==TRUE){
if(all(floor(yuima@sampling@n/yuima@sampling@Terminal)!=yuima@sampling@n/yuima@sampling@Terminal)){
yuima.stop("the n/Terminal in sampling information is not an integer. Aggregation=FALSE is recommended")
}
inc.levy1<-diff(cumsum(c(0,inc.levy))[seq(from=1,
to=yuima@sampling@n[1],
by=(yuima@sampling@n/yuima@sampling@Terminal)[1]
)])
}else{
inc.levy1<-inc.levy
}
if(measurefunc=="yuima.law"){
dummyParMeas<-c(coef[measure.par],1)
names(dummyParMeas)<-c(measure.par,yuima@[email protected])
cond <- length(dens(yuima@model@measure$df,x=as.numeric(inc.levy1),param=as.list(dummyParMeas)))
if(cond==0){
result.Lev <- list(estLevpar=coef[measure.par],
covLev=matrix(NA,
length(coef[measure.par]),
length(coef[measure.par]))
)
yuima.warn("Levy measure parameters can not be estimated.")
}else{
dummyMyfunMeas<-function(par, Law, Data, time, param.name, name.time){
dummyParMeas<-c(par,time)
names(dummyParMeas)<-c(param.name,name.time)
v <- log(pmax(na.omit(dens(Law,x=Data,param=as.list(dummyParMeas))), 10^(-40)))
v1 <- v[!is.infinite(v)]
return(-sum(v1))
}
if(aggregation){
mytime<-1
}else{
mytime<-yuima@sampling@delta
inc.levy1<- as.numeric(inc.levy1)
}
prova <- optim(fn = dummyMyfunMeas, par = coef[measure.par],
method = method,Law=yuima@model@measure$df,
Data=inc.levy1,
time=mytime, param.name=measure.par,
name.time = yuima@[email protected])
Heeee<-optimHess(fn = dummyMyfunMeas, par = coef[measure.par],
Law=yuima@model@measure$df,
Data=inc.levy1,
time=mytime, param.name=measure.par,
name.time = yuima@[email protected])
result.Lev <- list(estLevpar=prova$par,covLev=solve(Heeee))
}
}
if(measurefunc=="rIG"){
result.Lev<-yuima.Estimation.Lev(Increment.lev=inc.levy1,
param0=coef[ names.measpar],
fixed.carma=fixed.carma,
lower.carma=lower.carma,
upper.carma=upper.carma,
measure=measurefunc,
[email protected],
dt=env$h,
aggregation=aggregation)
}
if(measurefunc=="rNIG"){
result.Lev<-yuima.Estimation.Lev(Increment.lev=inc.levy1,
param0=coef[ names.measpar],
fixed.carma=fixed.carma,
lower.carma=lower.carma,
upper.carma=upper.carma,
measure=measurefunc,
[email protected],
dt=env$h,
aggregation=aggregation)
}
if(measurefunc=="rbgamma"){
result.Lev<-list(estLevpar=coef[ names.measpar],
covLev=matrix(NA,
length(coef[ names.measpar]),
length(coef[ names.measpar]))
)
}
if(measurefunc=="rvgamma"){
result.Lev<-yuima.Estimation.Lev(Increment.lev=inc.levy1,
param0=coef[ names.measpar],
fixed.carma=fixed.carma,
lower.carma=lower.carma,
upper.carma=upper.carma,
measure=measurefunc,
[email protected],
dt=env$h,
aggregation=aggregation)
}
Inc.Parm<-result.Lev$estLevpar
IncVCOV<-result.Lev$covLev
names(Inc.Parm)[NaIdx]<-measure.par
rownames(IncVCOV)[NaIdx]<-as.character(measure.par)
colnames(IncVCOV)[NaIdx]<-as.character(measure.par)
coef<-NULL
coef<-c(dummycoeffCarmapar,Inc.Parm)
names.par<-names(coef)
cov<-NULL
cov<-matrix(0,length(names.par),length(names.par))
rownames(cov)<-names.par
colnames(cov)<-names.par
if(is.null(loc.par)){
cov[unique(c(drift.par,diff.par)),unique(c(drift.par,diff.par))]<-dummycovCarmapar
}else{
cov[unique(c(drift.par,diff.par,[email protected])),unique(c(drift.par,diff.par,[email protected]))]<-dummycovCarmapar
}
cov[names(Inc.Parm),names(Inc.Parm)]<-IncVCOV
}
}
if(Est.Incr=="Carma.IncPar"||Est.Incr=="IncrPar"){
inc.levy.fin<-zoo(inc.levy,tt[(1+length(tt)-length(inc.levy)):length(tt)])
carma_final_res<-new("yuima.carma.qmle", call = call, coef = coef, fullcoef = unlist(coef),
vcov = cov, min = min, details = oout, minuslogl = minusquasilogl,
method = method, Incr.Lev = inc.levy.fin,
model = yuima@model, nobs=yuima.nobs,
logL.Incr = tryCatch(-result.Lev$value,error=function(theta){NULL}))
}else{
if(Est.Incr=="Carma.Par"||Est.Incr=="NoIncr"){
carma_final_res<-new("mle", call = call, coef = coef, fullcoef = unlist(coef),
vcov = cov, min = min, details = oout, minuslogl = minusquasilogl,
method = method, nobs=yuima.nobs)
}
}
return(carma_final_res)
}
}
minusquasipsi <- function(yuima, param, print=FALSE, env){
idx.intensity <- env$idx.intensity
fullcoef <- yuima@model@parameter@all
measurecoef <- param[unique(c(yuima@model@parameter@measure,yuima@model@parameter@jump))]
npar <- length(fullcoef)
nm <- names(param)
oo <- match(nm, fullcoef)
if(any(is.na(oo)))
yuima.stop("some named arguments in 'param' are not arguments to the supplied yuima model")
param <- param[order(oo)]
h <- env$h
Dn.r <- !env$Cn.r
d.size <- yuima@[email protected]
n <- length(yuima)[1]
myidx <- which(Dn.r)[-n]
measure <- measure.term(yuima, param, env)
QL <- 0
dx <- env$deltaX
measure.var <- env$measure.var
for(i in 1:length(measurecoef))
assign(names(measurecoef)[i],measurecoef[i][[1]], envir=env)
if(is.null(dim(measure[,,1]))){
for(t in myidx){
iC <- 1/measure[, , t]
assign(measure.var,iC%*%dx[t,],envir=env)
assign(yuima@[email protected], env$time[t], envir=env)
intensity <- eval(yuima@model@measure$intensity, envir=env)
dF <- intensity*eval(yuima@model@measure$df$expr,envir=env)/iC
logpsi <- 0
if(dF>0)
logpsi <- log(dF)
QL <- QL + logpsi
}
} else {
for(t in myidx){
iC <- solve(measure[, , t])
assign(measure.var,iC%*%dx[t,], envir=env)
assign(yuima@[email protected], env$time[t], envir=env)
intensity <- eval(yuima@model@measure$intensity, envir=env)
dF <- intensity*eval(yuima@model@measure$df$expr,envir=env)*det(iC)
logpsi <- 0
if(dF>0)
logpsi <- log(dF)
QL <- QL + logpsi
}
}
myf <- function(x) {
f1 <- function(u){
assign(yuima@[email protected], u, envir=env)
intensity <- eval(yuima@model@measure$intensity, envir=env)
}
sapply(x, f1)
}
myint <- integrate(f=myf, lower=yuima@sampling@Initial, upper=yuima@sampling@Terminal,subdivisions=100)$value
QL <- QL -myint
if(!is.finite(QL)){
yuima.warn("quasi likelihood is too small to calculate.")
return(1e10)
}
if(print==TRUE){
yuima.warn(sprintf("NEG-QL: %f, %s", -QL, paste(names(param),param,sep="=",collapse=", ")))
}
if(is.infinite(QL)) return(1e10)
return(as.numeric(-QL))
}
quasilogl <- function(yuima, param, print=FALSE,rcpp=FALSE){
d.size <- yuima@[email protected]
if (is(yuima@model, "yuima.carma")){
d.size <-1
}
n <- length(yuima)[1]
env <- new.env()
assign("X", as.matrix(onezoo(yuima)), envir=env)
assign("deltaX", matrix(0, n-1, d.size), envir=env)
assign("Cn.r", rep(1,n-1), envir=env)
if(is.CARMA(yuima)){
env$X<-as.matrix(env$X[,1])
env$deltaX<-as.matrix(env$deltaX[,1])
env$time.obs<-length(env$X)
assign("p", yuima@model@info@p, envir=env)
assign("q", yuima@model@info@q, envir=env)
assign("V_inf0", matrix(diag(rep(1,env$p)),env$p,env$p), envir=env)
}
for(t in 1:(n-1))
env$deltaX[t,] <- env$X[t+1,] - env$X[t,]
assign("h", deltat(yuima@[email protected][[1]]), envir=env)
assign("time", as.numeric(index(yuima@[email protected][[1]])), envir=env)
-minusquasilogl(yuima=yuima, param=param, print=print, env,rcpp=rcpp)
}
minusquasilogl <- function(yuima, param, print=FALSE, env,rcpp=FALSE){
diff.par <- yuima@model@parameter@diffusion
drift.par <- yuima@model@parameter@drift
if(is.CARMA(yuima)){
if(length(yuima@model@[email protected])!=0){
xinit.par <- yuima@model@parameter@xinit
}
}
if(is.CARMA(yuima) && length(yuima@model@[email protected])==0
&& length(yuima@model@parameter@jump)!=0){
diff.par<-yuima@model@parameter@jump
}
if(is.CARMA(yuima) && length(yuima@model@[email protected])==0
&& length(yuima@model@parameter@measure)!=0){
measure.par<-yuima@model@parameter@measure
}
if(is.CARMA(yuima) && length(yuima@model@[email protected])>0 ){
yuima.warn("carma(p,q): the case of lin.par will be implemented as soon as")
return(NULL)
}
if(is.CARMA(yuima)){
xinit.par <- yuima@model@parameter@xinit
}
drift.par <- yuima@model@parameter@drift
fullcoef <- NULL
if(length(diff.par)>0)
fullcoef <- diff.par
if(length(drift.par)>0)
fullcoef <- c(fullcoef, drift.par)
if(is.CARMA(yuima)){
if(length(xinit.par)>0)
fullcoef <- c(fullcoef, xinit.par)
}
if(is.CARMA(yuima) && (length(yuima@model@parameter@measure)!=0))
fullcoef<-c(fullcoef, measure.par)
if(is.CARMA(yuima)){
if("mean.noise" %in% names(param)){
mean.noise<-"mean.noise"
fullcoef <- c(fullcoef, mean.noise)
NoNeg.Noise<-TRUE
}
}
npar <- length(fullcoef)
nm <- names(param)
oo <- match(nm, fullcoef)
if(any(is.na(oo)))
yuima.stop("some named arguments in 'param' are not arguments to the supplied yuima model")
param <- param[order(oo)]
nm <- names(param)
idx.diff <- match(diff.par, nm)
idx.drift <- match(drift.par, nm)
if(is.CARMA(yuima)){
idx.xinit <-as.integer(na.omit(match(xinit.par, nm)))
}
h <- env$h
Cn.r <- env$Cn.r
theta1 <- unlist(param[idx.diff])
theta2 <- unlist(param[idx.drift])
n.theta1 <- length(theta1)
n.theta2 <- length(theta2)
n.theta <- n.theta1+n.theta2
if(is.CARMA(yuima)){
theta3 <- unlist(param[idx.xinit])
n.theta3 <- length(theta3)
n.theta <- n.theta1+n.theta2+n.theta3
}
d.size <- yuima@[email protected]
n <- length(yuima)[1]
if (is.CARMA(yuima)){
d.size <-1
prova<-as.numeric(param)
names(prova)<-names(param)
param<-prova[c(length(prova):1)]
time.obs<-env$time.obs
y<-as.numeric(env$X)
u<-env$h
p<-env$p
q<-env$q
ar.par <- yuima@model@[email protected]
name.ar<-paste0(ar.par, c(1:p))
ma.par <- yuima@model@[email protected]
name.ma<-paste0(ma.par, c(0:q))
if (length(yuima@model@[email protected])==0){
a<-param[name.ar]
b<-param[name.ma]
if(length(yuima@model@[email protected])!=0){
if(length(b)==1){
b<-1
} else{
indx_b0<-paste0(yuima@model@[email protected],"0",collapse="")
b[indx_b0]<-1
}
sigma<-tail(param,1)
}else {sigma<-1}
NoNeg.Noise<-FALSE
if(is.CARMA(yuima)){
if("mean.noise" %in% names(param)){
NoNeg.Noise<-TRUE
}
}
if(NoNeg.Noise==TRUE){
if (length(b)==p){
mean.y<-mean(y)
}else{
mean.y<-0
}
y<-y-mean.y
}
V_inf0<-env$V_inf0
p<-env$p
q<-env$q
strLog<-yuima.carma.loglik1(y, u, a, b, sigma,time.obs,V_inf0,p,q)
}else{
a<-param[name.ar]
name.ma<-paste0(ma.par, c(0:q))
b<-param[name.ma]
if(length(yuima@model@[email protected])!=0){
if(length(b)==1){
b<-1
} else{
indx_b0<-paste0(yuima@model@[email protected],"0",collapse="")
b[indx_b0]<-1
}
scale.par <- yuima@model@[email protected]
sigma <- param[scale.par]
} else{sigma <- 1}
loc.par <- yuima@model@[email protected]
mu <- param[loc.par]
NoNeg.Noise<-FALSE
if(is.CARMA(yuima)){
if("mean.noise" %in% names(param)){
NoNeg.Noise<-TRUE
}
}
if(is.CARMA(yuima)&&(NoNeg.Noise==TRUE)){
if (length(b)==p){
mean.noise<-param[mean.noise]
mean.y<-mean(y-mu)
}else{
mean.y<-0
}
y<-y-mean.y
}
y.start <- y-mu
V_inf0<-env$V_inf0
p<-env$p
q<-env$q
strLog<-yuima.carma.loglik1(y.start, u, a, b, sigma,time.obs,V_inf0,p,q)
}
QL<-strLog$loglikCdiag
} else if (!rcpp) {
drift <- drift.term(yuima, param, env)
diff <- diffusion.term(yuima, param, env)
QL <- 0
pn <- 0
vec <- env$deltaX-h*drift[-n,]
K <- -0.5*d.size * log( (2*pi*h) )
dimB <- dim(diff[, , 1])
if(is.null(dimB)){
for(t in 1:(n-1)){
yB <- diff[, , t]^2
logdet <- log(yB)
pn <- Cn.r[t]*(K - 0.5*logdet-0.5*vec[t, ]^2/(h*yB))
QL <- QL+pn
}
} else {
for(t in 1:(n-1)){
yB <- diff[, , t] %*% t(diff[, , t])
logdet <- log(det(yB))
if(is.infinite(logdet) ){
pn <- log(1)
yuima.warn("singular diffusion matrix")
return(1e10)
}else{
pn <- (K - 0.5*logdet +
((-1/(2*h))*t(vec[t, ])%*%solve(yB)%*%vec[t, ]))*Cn.r[t]
QL <- QL+pn
}
}
}
} else {
drift_name <- yuima@model@drift
diffusion_name <- yuima@model@diffusion
data <- matrix(0,length(yuima@[email protected][[1]]),d.size)
for(i in 1:d.size) data[,i] <- as.numeric(yuima@[email protected][[i]])
env$data <- data
thetadim <- length(yuima@model@parameter@all)
noise_number <- yuima@[email protected]
assign(yuima@[email protected],env$time[-length(env$time)],envir = env)
for(i in 1:d.size) assign(yuima@[email protected][i], data[-length(data[,1]),i],envir = env)
for(i in 1:thetadim) assign(names(param)[i], param[[i]],envir = env)
d_b <- NULL
for(i in 1:d.size){
if(length(eval(drift_name[[i]],envir = env))==(length(data[,1])-1)){
d_b[[i]] <- drift_name[[i]]
}
else{
if(is.na(c(drift_name[[i]][2]))){
drift_name[[i]] <- parse(text=paste(sprintf("(%s)", drift_name[[i]])))[[1]]
}
d_b[[i]] <- parse(text=paste("(",drift_name[[i]][2],")*rep(1,length(data[,1])-1)",sep=""))
}
}
v_a<-matrix(list(NULL),d.size,noise_number)
for(i in 1:d.size){
for(j in 1:noise_number){
if(length(eval(diffusion_name[[i]][[j]],envir = env))==(length(data[,1])-1)){
v_a[[i,j]] <- diffusion_name[[i]][[j]]
}
else{
if(is.na(c(diffusion_name[[i]][[j]][2]))){
diffusion_name[[i]][[j]] <- parse(text=paste(sprintf("(%s)", diffusion_name[[i]][[j]])))[[1]]
}
v_a[[i,j]] <- parse(text=paste("(",diffusion_name[[i]][[j]][2],")*rep(1,length(data[,1])-1)",sep=""))
}
}
}
dx_set <- as.matrix((data-rbind(numeric(d.size),as.matrix(data[-length(data[,1]),])))[-1,])
drift_set <- diffusion_set <- NULL
for(i in 1:d.size) drift_set <- cbind(drift_set,eval(d_b[[i]],envir = env))
for(i in 1:noise_number){
for(j in 1:d.size) diffusion_set <- cbind(diffusion_set,eval(v_a[[j,i]],envir = env))
}
QL <- (likndim(dx_set,drift_set,diffusion_set,env$h)*(-0.5) + (n-1)*(-0.5*d.size * log( (2*pi*env$h) )))
}
if(!is.finite(QL)){
yuima.warn("quasi likelihood is too small to calculate.")
return(1e10)
}
if(print==TRUE){
yuima.warn(sprintf("NEG-QL: %f, %s", -QL, paste(names(param),param,sep="=",collapse=", ")))
}
if(is.infinite(QL)) return(1e10)
return(as.numeric(-QL))
}
MatrixA<-function (a)
{
pp = length(a)
af = cbind(rep(0, pp - 1), diag(pp - 1))
af = rbind(af, -a[pp:1])
return(af)
}
carma.kalman<-function(y, u, p, q, a,bvector, sigma, times.obs, V_inf0){
A<-MatrixA(a)
expA<-expm(A*u,method="Pade",order=6, trySym=FALSE, do.sparseMsg = FALSE)
V_inf<-V0inf(a,p,sigma)
expAT<-t(expA)
Qmatr <- V_inf - expA %*% V_inf %*% expAT
statevar<-numeric(length=p)
SigMatr <- V_inf+0
sd_2<-0
Result<-numeric(length=2)
Kgain<-numeric(length=p)
dum_zc<-numeric(length=p)
Mat22int<-numeric(length=(p*p))
loglstar<- .Call("Cycle_Carma", y, statevar, expA, as.integer(length(y)),
as.integer(p), Qmatr, SigMatr, bvector, Result, Kgain,
dum_zc, Mat22int,
PACKAGE="yuima")
return(list(loglstar=loglstar[1]-0.5*log(2*pi)*times.obs,s2hat=loglstar[2]))
}
V0inf<-function(a,p,sigma){
B<-matrix(0,nrow=p,ncol=p)
aa <- -rev(a)
for(i in 1:p){
for(j in 1:p){
if ((2*j-i) %in% c(1:p)){
B[i,j]<-(-1)^(j-i)*aa[2*j-i]
}
if((2*j-i)==(p+1)){
B[i,j]<-(-1)^(j-i-1)
}
}
}
Vdiag <- -solve(B)[,p]*0.5*sigma^2
if(length(Vdiag)>1){
V <- diag(Vdiag)
}else{V <- as.matrix(Vdiag)}
for(i in 1:p){
for(j in (i+1):p){
if((i+j) %% 2 == 0){
V[i,j]=(-1)^((i-j)/2)*V[(i+j)/2,(i+j)/2]
V[j,i]=V[i,j]
}
}
}
return(V)
}
yuima.carma.loglik1<-function (y, u, a, b, sigma,time.obs,V_inf0,p,q)
{
bvector <- c(b, rep(0, p - q-1))
sigma<-sigma
y<-y
xxalt<-carma.kalman(y, u, p, q, a,bvector,sigma,time.obs,V_inf0)
list(loglikCdiag = xxalt$loglstar,s2hat=xxalt$s2hat)
}
quasiloglvec <- function(yuima, param, print=FALSE, env){
diff.par <- yuima@model@parameter@diffusion
drift.par <- yuima@model@parameter@drift
fullcoef <- NULL
if(length(diff.par)>0)
fullcoef <- diff.par
if(length(drift.par)>0)
fullcoef <- c(fullcoef, drift.par)
npar <- length(fullcoef)
nm <- names(param)
oo <- match(nm, fullcoef)
if(any(is.na(oo)))
yuima.stop("some named arguments in 'param' are not arguments to the supplied yuima model")
param <- param[order(oo)]
nm <- names(param)
idx.diff <- match(diff.par, nm)
idx.drift <- match(drift.par, nm)
h <- env$h
theta1 <- unlist(param[idx.diff])
theta2 <- unlist(param[idx.drift])
n.theta1 <- length(theta1)
n.theta2 <- length(theta2)
n.theta <- n.theta1+n.theta2
d.size <- yuima@[email protected]
n <- length(yuima)[1]
drift <- drift.term(yuima, param, env)
diff <- diffusion.term(yuima, param, env)
QL <- numeric(n-1)
pn <- 0
vec <- env$deltaX-h*drift[-n,]
K <- -0.5*d.size * log( (2*pi*h) )
dimB <- dim(diff[, , 1])
if(is.null(dimB)){
for(t in 1:(n-1)){
yB <- diff[, , t]^2
logdet <- log(yB)
pn <- K - 0.5*logdet-0.5*vec[t, ]^2/(h*yB)
QL[t] <- pn
}
} else {
for(t in 1:(n-1)){
yB <- diff[, , t] %*% t(diff[, , t])
logdet <- log(det(yB))
if(is.infinite(logdet) ){
pn <- log(1)
yuima.warn("singular diffusion matrix")
return(1e10)
}else{
pn <- K - 0.5*logdet +
((-1/(2*h))*t(vec[t, ])%*%solve(yB)%*%vec[t, ])
QL[t] <- pn
}
}
}
return(QL)
}
setMethod("summary", "yuima.qmle",
function (object, ...)
{
cmat <- cbind(Estimate = object@coef, `Std. Error` = sqrt(diag(object@vcov)))
m2logL <- 2 * object@min
Additional.Info <- list()
if(is(object@model,"yuima.carma")){
Additional.Info <-list(Stationarity = Diagnostic.Carma(object))
}
tmp <- new("summary.yuima.qmle", call = object@call, coef = cmat,
m2logL = m2logL,
model = object@model,
Additional.Info = Additional.Info
)
tmp
}
)
setMethod("show", "summary.yuima.qmle",
function (object)
{
cat("Quasi-Maximum likelihood estimation\n\nCall:\n")
print(object@call)
cat("\nCoefficients:\n")
print(coef(object))
cat("\n-2 log L:", object@m2logL, "\n")
if(length([email protected])>0){
if(is(object@model,"yuima.carma")){
Dummy<-paste0("\nCarma(",object@model@info@p,",",object@model@info@q,")",
collapse = "")
if([email protected]$Stationarity){
cat(Dummy,"model: Stationarity conditions are satisfied.\n")
}else{
cat(Dummy,"model: Stationarity conditions are not satisfied.\n")
}
}
}
}
)
setMethod("plot",signature(x="yuima.CP.qmle"),
function(x, ...){
t <- [email protected]
X <- [email protected]
points(x=t,y=X, ...)
}
)
setMethod("summary", "yuima.CP.qmle",
function (object, ...)
{
cmat <- cbind(Estimate = object@coef, `Std. Error` = sqrt(diag(object@vcov)))
m2logL <- 2 * object@min
x <- [email protected]
j <- [email protected]
t <- [email protected]
tmp <- new("summary.yuima.CP.qmle", call = object@call, coef = cmat,
m2logL = m2logL, NJ = length(t),
MeanJ = mean(j,na.rm=TRUE),
SdJ = sd(j,na.rm=TRUE),
MeanT = mean(diff(t),na.rm=TRUE),
X.values = x,
Jump.values = j,
Jump.times = t,
model = object@model,
threshold=object@threshold
)
tmp
}
)
setMethod("show", "summary.yuima.CP.qmle",
function (object)
{
cat("Quasi-Maximum likelihood estimation\n\nCall:\n")
print(object@call)
cat("\nCoefficients:\n")
print(coef(object))
cat("\n-2 log L:", object@m2logL, "\n")
cat(sprintf("\n\nNumber of estimated jumps: %d\n",object@NJ))
cat(sprintf("\nAverage inter-arrival times: %f\n",object@MeanT))
cat(sprintf("\nAverage jump size: %f\n",object@MeanJ))
cat(sprintf("\nStandard Dev. of jump size: %f\n",object@SdJ))
cat(sprintf("\nJump Threshold: %f\n",object@threshold))
cat("\nSummary statistics for jump times:\n")
print(summary([email protected]))
cat("\nSummary statistics for jump size:\n")
print(summary([email protected],na.rm=TRUE))
cat("\n")
}
)
setMethod("summary", "yuima.carma.qmle",
function (object, ...)
{
cmat <- cbind(Estimate = object@coef, `Std. Error` = sqrt(diag(object@vcov)))
m2logL <- 2 * object@min
data<-Re(coredata([email protected]))
data<- data[!is.na(data)]
Additional.Info <- list()
if(is(object@model,"yuima.carma")){
Additional.Info <-list(Stationarity = Diagnostic.Carma(object))
}
tmp <- new("summary.yuima.carma.qmle", call = object@call, coef = cmat,
m2logL = m2logL,
MeanI = mean(data),
SdI = sd(data),
logLI = [email protected],
TypeI = object@[email protected],
NumbI = length(data),
StatI = summary(data),
Additional.Info = Additional.Info,
model = object@model
)
tmp
}
)
setMethod("show", "summary.yuima.carma.qmle",
function (object)
{
cat("Two Stage Quasi-Maximum likelihood estimation\n\nCall:\n")
print(object@call)
cat("\nCoefficients:\n")
print(coef(object))
cat("\n-2 log L:", object@m2logL, "\n")
cat(sprintf("\n\nNumber of increments: %d\n",object@NumbI))
cat(sprintf("\nAverage of increments: %f\n",object@MeanI))
cat(sprintf("\nStandard Dev. of increments: %f\n",object@SdI))
if(!is.null(object@logLI)){
cat(sprintf("\n\n-2 log L of increments: %f\n",-2*object@logLI))
}
cat("\nSummary statistics for increments:\n")
print(object@StatI)
cat("\n")
if(length([email protected])>0){
if(is(object@model,"yuima.carma")){
Dummy<-paste0("\nCarma(",object@model@info@p,",",object@model@info@q,")",
collapse = "")
if([email protected]$Stationarity){
cat(Dummy,"model: Stationarity conditions are satisfied.\n")
}else{
cat(Dummy,"model: Stationarity conditions are not satisfied.\n")
}
}
}
}
)
setMethod("plot",signature(x="yuima.carma.qmle"),
function(x, ...){
Time<-index([email protected])
Incr.L<-coredata([email protected])
if(is.complex(Incr.L)){
yuima.warn("Complex increments. We plot only the real part")
Incr.L<-Re(Incr.L)
}
plot(x=Time,y=Incr.L, ...)
}
)
dCPN<-function(x,lambda,mu,sigma){
a<-min(mu-100*sigma,min(x)-1)
b<-max(mu+100*sigma,max(x)+1)
ChFunToDens.CPN <- function(n, a, b, lambda, mu, sigma) {
i <- 0:(n-1)
dx <- (b-a)/n
x <- a + i * dx
dt <- 2*pi / ( n * dx )
c <- -n/2 * dt
d <- n/2 * dt
t <- c + i * dt
charact.CPN<-function(t,lambda,mu,sigma){
normal.y<-exp(1i*t*mu-sigma^2*t^2/2)
y<-exp(lambda*(normal.y-1))
}
phi_t <- charact.CPN(t,lambda,mu,sigma)
X <- exp( -(0+1i) * i * dt * a ) * phi_t
Y <- fft(X)
density <- dt / (2*pi) * exp( - (0+1i) * c * x ) * Y
data.frame(
i = i,
t = t,
characteristic_function = phi_t,
x = x,
density = Re(density)
)
}
invFFT<-ChFunToDens.CPN(lambda=lambda,mu=mu,sigma=sigma,n=2^10,a=a,b=b)
dens<-approx(invFFT$x,invFFT$density,x)
return(dens$y)
}
dCPExp<-function(x,lambda,rate){
a<-10^-6
b<-max(1/rate*10 +1/rate^2*10 ,max(x[!is.na(x)])+1)
ChFunToDens.CPExp <- function(n, a, b, lambda, rate) {
i <- 0:(n-1)
dx <- (b-a)/n
x <- a + i * dx
dt <- 2*pi / ( n * dx )
c <- -n/2 * dt
d <- n/2 * dt
t <- c + i * dt
charact.CPExp<-function(t,lambda,rate){
normal.y<-(rate/(1-1i*t))
y<-exp(lambda*(normal.y-1))
}
phi_t <- charact.CPExp(t,lambda,rate)
X <- exp( -(0+1i) * i * dt * a ) * phi_t
Y <- fft(X)
density <- dt / (2*pi) * exp( - (0+1i) * c * x ) * Y
data.frame(
i = i,
t = t,
characteristic_function = phi_t,
x = x,
density = Re(density)
)
}
invFFT<-ChFunToDens.CPExp(lambda=lambda,rate=rate,n=2^10,a=a,b=b)
dens<-approx(invFFT$x[!is.na(invFFT$density)],invFFT$density[!is.na(invFFT$density)],x)
return(dens$y[!is.na(dens$y)])
}
dCPGam<-function(x,lambda,shape,scale){
a<-10^-6
b<-max(shape*scale*10 +shape*scale^2*10 ,max(x[!is.na(x)])+1)
ChFunToDens.CPGam <- function(n, a, b, lambda, shape,scale) {
i <- 0:(n-1)
dx <- (b-a)/n
x <- a + i * dx
dt <- 2*pi / ( n * dx )
c <- -n/2 * dt
d <- n/2 * dt
t <- c + i * dt
charact.CPGam<-function(t,lambda,shape,scale){
normal.y<-(1-1i*t*scale)^(-shape)
y<-exp(lambda*(normal.y-1))
}
phi_t <- charact.CPGam(t,lambda,shape,scale)
X <- exp( -(0+1i) * i * dt * a ) * phi_t
Y <- fft(X)
density <- dt / (2*pi) * exp( - (0+1i) * c * x ) * Y
data.frame(
i = i,
t = t,
characteristic_function = phi_t,
x = x,
density = Re(density)
)
}
invFFT<-ChFunToDens.CPGam(lambda=lambda,shape=shape,scale=scale,n=2^10,a=a,b=b)
dens<-approx(invFFT$x[!is.na(invFFT$density)],invFFT$density[!is.na(invFFT$density)],x)
return(dens$y[!is.na(dens$y)])
}
minusloglik.Lev <- function(par,env){
if(env$measure.type=="code"){
if(env$measure=="rNIG"){
alpha<-par[1]
beta<-par[2]
delta<-par[3]
mu<-par[4]
f<-dNIG(env$data,alpha,beta,delta,mu)
v<-log(as.numeric(na.omit(f)))
v1<-v[!is.infinite(v)]
-sum(v1)
}else{
if(env$measure=="rvgamma"){
lambda<-par[1]
alpha<-par[2]
beta<-par[3]
mu<-par[4]
f<-dvgamma(env$data,lambda,alpha,beta,mu)
v<-log(as.numeric(na.omit(f)))
v1<-v[!is.infinite(v)]
-sum(v1)
}else{
if(env$measure=="rIG"){
delta<-par[1]
gamma<-par[2]
f<-dIG(env$data,delta,gamma)
v<-log(as.numeric(na.omit(f)))
v1<-v[!is.infinite(v)]
-sum(v1)
}
}
}
}else{
if(env$measure=="dnorm"){
lambda<-par[1]
mu<-par[2]
sigma<-par[3]
f<-dCPN(env$data,lambda,mu,sigma)
v<-log(as.numeric(na.omit(f)))
v1<-v[!is.infinite(v)]
-sum(v1)
}else{
if(env$measure=="dexp"){
lambda<-par[1]
rate<-par[2]
f<-dCPExp(env$data,lambda,rate)
v<-log(as.numeric(na.omit(f)))
v1<-v[!is.infinite(v)]
-sum(v1)
}else{
if(env$measure=="dgamma"){
lambda<-par[1]
shape<-par[2]
scale<-par[3]
f<-dCPGam(env$data,lambda,shape,scale)
v<-log(as.numeric(na.omit(f)))
v1<-v[!is.infinite(v)]
-sum(v1)
}
}
}
}
}
Lev.hessian<-function (params,env){
logLik.Lev <- function(params){
if(env$measure.type=="code"){
if(env$measure=="rNIG"){
alpha<-params[1]
beta<-params[2]
delta<-params[3]
mu<-params[4]
f<-dNIG(env$data,alpha,beta,delta,mu)
v<-log(as.numeric(na.omit(f)))
v1<-v[!is.infinite(v)]
return(sum(v1))
}else{
if(env$measure=="rvgamma"){
lambda<-params[1]
alpha<-params[2]
beta<-params[3]
mu<-params[4]
f<-dvgamma(env$data,lambda,alpha,beta,mu)
v<-log(as.numeric(na.omit(f)))
v1<-v[!is.infinite(v)]
return(sum(v1))
}else{
if(env$measure=="rIG"){
delta<-params[1]
gamma<-params[2]
f<-dIG(env$data,delta,gamma)
v<-log(as.numeric(na.omit(f)))
v1<-v[!is.infinite(v)]
return(sum(v1))
}else{
if(env$measure=="rgamma"){
shape<-params[1]
rate<-params[2]
f<-dgamma(env$data,shape,rate)
v<-log(as.numeric(na.omit(f)))
v1<-v[!is.infinite(v)]
return(sum(v1))
}
}
}
}
}else{
if(env$measure=="dnorm"){
lambda<-params[1]
mu<-params[2]
sigma<-params[3]
return(sum(log(dCPN(env$data,lambda,mu,sigma))))
}else{
if(env$measure=="dexp"){
lambda<-params[1]
rate<-params[2]
return(sum(log(dCPExp(env$data,lambda,rate))))
}else{
if(env$measure=="dgamma"){
lambda<-params[1]
shape<-params[2]
scale<-params[3]
return(sum(log(dCPGam(env$data,lambda,shape,scale))))
}
}
}
}
}
hessian<-tryCatch(optimHess(par=params, fn=logLik.Lev),
error=function(theta){matrix(NA,env$lengpar,env$lengpar)})
if(env$aggregation==FALSE){
if(env$measure.type=="CP"){
Matr.dum<-diag(c(1/env$dt, rep(1, (length(params)-1))))
}else{
if(env$measure=="rNIG"){
Matr.dum<-diag(c(1,1,1/env$dt,1/env$dt))
}else{
if(env$measure=="rvgamma"){
Matr.dum<-diag(c(1/env$dt,1,1,1/env$dt))
}else{
if(env$measure=="rIG"){
Matr.dum<-diag(c(1/env$dt,1))
}else{
if(env$measure=="rgamma"){
Matr.dum<-diag(c(1/env$dt,1))
}
}
}
}
}
cov<--Matr.dum%*%solve(hessian)%*%Matr.dum
}else{
cov<--solve(hessian)
}
return(cov)
}
yuima.Estimation.Lev<-function(Increment.lev,param0,
fixed.carma=fixed.carma,
lower.carma=lower.carma,
upper.carma=upper.carma,
measure=measure,
measure.type=measure.type,
dt=env$h,
aggregation=aggregation){
env<-new.env()
env$data<-Increment.lev
env$measure<-measure
env$measure.type<-measure.type
env$dt<-dt
if(aggregation==FALSE){
if(measure.type=="code"){
if(env$measure=="rNIG"){
param0[3]<-param0[3]*dt
param0[4]<-param0[4]*dt
}else{
if(env$measure=="rvgamma"){
param0[1]<-param0[1]*dt
param0[4]<-param0[4]*dt
}else{
if(env$measure=="rIG"){
param0[1]<-param0[1]*dt
}else{
if(env$measure=="rgamma"){
param0[1]<-param0[1]*dt
}
}
}
}
}else{
param0[1]<-param0[1]*dt
}
}
if(measure.type=="code"){
if(measure=="rNIG"){
ui<-rbind(c(1, -1, 0, 0),c(1, 1, 0, 0),c(1, 0, 0, 0),c(0, 0, 1, 0))
ci<-c(0,0,0,10^(-6))
}else{
if(measure=="rvgamma"){
ui<-rbind(c(1,0, 0, 0),c(0, 1, 1, 0),c(0, 1,-1, 0),c(0, 1,0, 0))
ci<-c(10^-6,10^-6,10^(-6), 0)
}else{
if(measure=="rIG"){
ui<-rbind(c(1,0),c(0, 1))
ci<-c(10^-6,10^-6)
}else{
if(measure=="rgamma"){
ui<-rbind(c(1,0),c(0, 1))
ci<-c(10^-6,10^-6)
}
}
}
}
}else{
if(measure=="dnorm"){
ui<-rbind(c(1,0,0),c(0,0,1))
ci<-c(10^-6,10^-6)
}else{
if(measure=="dexp"){
ui<-rbind(c(1,0),c(0,1))
ci<-c(10^-6,10^-6)
}else{
if(measure=="dgamma"){
ui<-rbind(c(1,0,0),c(0,1,0),c(0,0,1))
ci<-c(10^-6,10^-6,10^-6)
}
}
}
}
if(!is.null(lower.carma)){
lower.con<-matrix(0,length(lower.carma),length(param0))
rownames(lower.con)<-names(lower.carma)
colnames(lower.con)<-names(param0)
numb.lower<-length(lower.carma)
lower.con[names(lower.carma),names(lower.carma)]<-1*diag(numb.lower)
dummy.lower.names<-paste0(names(lower.carma),".lower")
rownames(lower.con)<-dummy.lower.names
names(lower.carma)<-dummy.lower.names
ui<-rbind(ui,lower.con)
ci<-c(ci,lower.carma)
}
if(!is.null(upper.carma)){
upper.con<-matrix(0,length(upper.carma),length(param0))
rownames(upper.con)<-names(upper.carma)
colnames(upper.con)<-names(param0)
numb.upper<-length(upper.carma)
upper.con[names(upper.carma),names(upper.carma)]<--1*diag(numb.upper)
dummy.upper.names<-paste0(names(upper.carma),".upper")
rownames(upper.con)<-dummy.upper.names
names(upper.carma)<-dummy.upper.names
ui<-rbind(ui,upper.con)
ci<-c(ci,-upper.carma)
}
if(!is.null(fixed.carma)){
names.fixed<-names(fixed.carma)
numb.fixed<-length(fixed.carma)
fixed.con<-matrix(0,length(fixed.carma),length(param0))
rownames(fixed.con)<-names(fixed.carma)
colnames(fixed.con)<-names(param0)
fixed.con.bis<-fixed.con
fixed.con[names(fixed.carma),names(fixed.carma)]<--1*diag(numb.fixed)
fixed.con.bis[names(fixed.carma),names(fixed.carma)]<-1*diag(numb.fixed)
dummy.fixed.names<-paste0(names(fixed.carma),".fixed.u")
dummy.fixed.bis.names<-paste0(names(fixed.carma),".fixed.l")
rownames(fixed.con)<-dummy.fixed.names
rownames(fixed.con.bis)<-dummy.fixed.bis.names
names(fixed.carma)<-dummy.fixed.names
ui<-rbind(ui,fixed.con,fixed.con.bis)
ci<-c(ci,-fixed.carma-10^-6,fixed.carma-10^-6)
}
lengpar<-length(param0)
paramLev<-NA*c(1:length(lengpar))
env$lengpar<-lengpar
firs.prob<-tryCatch(constrOptim(theta=param0,
f=minusloglik.Lev,grad=NULL,ui=ui,ci=ci,env=env),
error=function(theta){NULL})
if(!is.null(firs.prob)){
paramLev<-firs.prob$par
names(paramLev)<-names(param0)
if(!is.null(fixed.carma)){
paramLev[names.fixed]<-fixed.carma
names(paramLev)<-names(param0)
}
}else{warning("the start value for levy measure is outside of the admissible region")}
env$aggregation<-aggregation
if(is.na(paramLev[1])){
covLev<-matrix(0,length(paramLev),length(paramLev))
}else{
covLev<-Lev.hessian(params=paramLev,env)
rownames(covLev)<-names(paramLev)
if(!is.null(fixed.carma)){
covLev[names.fixed,]<-matrix(0,numb.fixed,lengpar)
}
colnames(covLev)<-names(paramLev)
if(!is.null(fixed.carma)){
covLev[,names.fixed]<-matrix(0,lengpar,numb.fixed)
}
}
if(aggregation==FALSE){
if(measure.type=="code"){
if(env$measure=="rNIG"){
paramLev[3]<-paramLev[3]/dt
paramLev[4]<-paramLev[4]/dt
}else{
if(env$measure=="rvgamma"){
paramLev[1]<-paramLev[1]/dt
paramLev[4]<-paramLev[4]/dt
}else{
if(env$measure=="rIG"){
paramLev[1]<-paramLev[1]/dt
}else{
if(env$measure=="rgamma"){
paramLev[1]<-paramLev[1]/dt
}
}
}
}
}else{
paramLev[1]<-paramLev[1]/dt
}
}
results<-list(estLevpar=paramLev,covLev=covLev, value=firs.prob$value)
return(results)
} |
prop.gt <- function(p0,gtData,covariance=FALSE,nburn=2000,ngit=5000,maxit=200,tol=1e-03,tracing=TRUE,conf.level=0.95){
Memb <- gtData[ ,-(1:5)]
N <- max(Memb)
maxAssign <- max(as.numeric(table(Memb[Memb > 0])))
ytm <- matrix(-9,N,maxAssign)
tmp <- as.matrix( Memb )
vec <- 1:nrow(gtData)
for(d in 1:N){
tid <- tmp==d
store <- NULL
for(i in 1:ncol(tmp)){
store <- c(store,vec[tid[ ,i]])
}
ytm[d,1:length(store)] <- sort(store)
}
Yt <- stats::rbinom(N,1,p0)
Ytmat <- cbind(Yt,rowSums(ytm>0),ytm)
Ycol <- ncol(Ytmat)
SeSp <- gtData[ ,3:4]
Z <- gtData[ ,-(3:5)]
Zrow <- nrow(Z)
Zcol <- ncol(Z)
GI <- ngit + nburn
p1 <- p0
p0 <- p0 + 2*tol
s <- 1
convergence <- 0
while(abs(p1-p0) > tol){
p0 <- p1
U <- matrix(stats::runif(N*GI),nrow=N,ncol=GI)
res <- .Call("gbsonedhom_c",as.double(p0),as.integer(Ytmat),
as.integer(Z),as.integer(N),as.double(SeSp),as.integer(Ycol),
as.integer(Zrow),as.integer(Zcol),as.double(U),as.integer(GI),
as.integer(nburn), PACKAGE="groupTesting")
temp <- sum( res )/ngit
p1 <- temp/N
if(s >= maxit){
convergence <- 1
break
}
s <- s + 1
if(tracing){
print(c(s-1,p1))
}
}
covr2 <- NULL
if(covariance){
U <- matrix(stats::runif(N*GI),nrow=N,ncol=GI)
Info <- .Call("cvondknachom_c",as.double(p1),as.integer(Ytmat),
as.integer(Z),as.integer(N),as.double(SeSp),as.integer(Ycol),
as.integer(Zrow),as.integer(Zcol),as.double(U),as.integer(GI),
as.integer(nburn), PACKAGE="groupTesting")
covr2 <- solve( Info )
}
pHat <- p1
if(covariance){
se <- sqrt(covr2)
alternative <- "two.sided"
z <- qnorm(ifelse(alternative=="two.sided",
(1+conf.level)/2, conf.level))
CI <- c(pHat-z*se, pHat+z*se)
res <- data.frame(round(pHat, 3), round(se, 3),
round(CI[1],3), round(CI[2],3) )
}else{
res <- data.frame(round(pHat, 3), NA, NA, NA)
}
rownames(res) <- colnames(res) <- NULL
rownames(res) <- "prop"
colnames(res) <- c("Estimate", "StdErr",
paste(conf.level*100, "%lower", sep=""),
paste(conf.level*100, "%upper", sep=""))
list("param" = p1,
"covariance" = covr2,
"iterUsed" = s-1,
"convergence" = convergence,
"summary" = res
)
} |
expected <- eval(parse(text="c(-1.6, -0.9)"));
test(id=0, code={
argv <- eval(parse(text="list(c(-1.6, -0.9))"));
do.call(`unclass`, argv);
}, o=expected); |
NULL
RemoveRowsWithPoundSign <- function(df, file) {
pound.chrom.idx <- which(df$CHROM == "
if (length(pound.chrom.idx) > 0) {
warning("Removing ", length(pound.chrom.idx),
" rows with
df1 <- df[-pound.chrom.idx, ]
return(df1)
} else {
return(df)
}
}
RemoveRowsWithPoundSignNew <- function(df, name.of.VCF = NULL) {
pound.chrom.idx <- which(df$CHROM == "
if (length(pound.chrom.idx) > 0) {
warning("In VCF ", ifelse(is.null(name.of.VCF), "", dQuote(name.of.VCF)),
" ", length(pound.chrom.idx), " row out of ",
nrow(df), " had value
"See discarded.variants in the return value for more details")
df1 <- df[-pound.chrom.idx, ]
df1.to.remove <- df[pound.chrom.idx, ]
df1.to.remove$discarded.reason <- 'Chromosome name is "
return(list(df = df1, discarded.variants = df1.to.remove))
} else {
return(list(df = df))
}
}
RemoveRowsWithDuplicatedCHROMAndPOS <- function(df, file) {
dups <- which(duplicated(df[, c("CHROM", "POS")]))
if (length(dups) > 0) {
dups2 <- which(duplicated(df[ , c("CHROM", "POS")], fromLast = TRUE))
warning("In ", file, " ", 2 * length(dups), " rows out of ",
nrow(df), " had duplicate CHROM and POS and were removed: ",
dups2, " ", dups)
df1 <- df[-c(dups, dups2), ]
return(df1)
} else {
return(df)
}
}
RemoveRowsWithDuplicatedCHROMAndPOSNew <- function(df, name.of.VCF = NULL) {
discarded.variants <- df[0, ]
dups <- which(duplicated(df[, c("CHROM", "POS", "REF", "ALT")]))
if (length(dups) > 0) {
warning("In VCF ", ifelse(is.null(name.of.VCF), "", dQuote(name.of.VCF)),
" ", 2 * length(dups), " row out of ",
nrow(df), " had same CHROM, POS, REF and ALT and only one copy is kept. ",
"See discarded.variants in the return value for more details")
df.to.remove <- df[dups, ]
df.to.remove$discarded.reason <- "Variant with same CHROM, POS, REF and ALT as another variant"
discarded.variants <-
dplyr::bind_rows(discarded.variants, df.to.remove)
df1 <- df[-dups, ]
} else {
df1 <- df
}
dups2 <- which(duplicated(df1[, c("CHROM", "POS", "REF")]))
if (length(dups2) > 0) {
warning("In VCF ", ifelse(is.null(name.of.VCF), "", dQuote(name.of.VCF)),
" ", 2 * length(dups2), " row out of ",
nrow(df), " had same CHROM, POS, REF but different ALT and were removed. ",
"See discarded.variants in the return value for more details")
dups3 <- which(duplicated(df1[, c("CHROM", "POS", "REF")], fromLast = TRUE))
df1.to.remove <- df1[c(dups2, dups3), ]
df1.to.remove$discarded.reason <- "Variant with same CHROM, POS, REF but different ALT"
discarded.variants <-
dplyr::bind_rows(discarded.variants, df1.to.remove)
df2 <- df1[-c(dups2, dups3), ]
} else {
df2 <- df1
}
if (nrow(discarded.variants) > 0) {
return(list(df = df2, discarded.variants = discarded.variants))
} else {
return(list(df = df2))
}
}
RenameColumnsWithNameStrand <- function(df) {
if ("strand" %in% colnames(df)) {
colnames(df)[which(colnames(df) == "strand")] <- "strand_old"
warning('There is column in VCF which has name "strand", ',
'it has been renamed to "strand_old" so as ',
'not to conflict with code in other parts of ICAMS package.')
}
return(df)
}
RenameColumnsWithNameVAF <- function(df) {
if ("VAF" %in% colnames(df)) {
colnames(df)[which(colnames(df) == "VAF")] <- "VAF_old"
warning('There is column in VCF which has name "VAF", ',
'it has been renamed to "VAF_old" so as ',
'not to conflict with code in other parts of ICAMS package.')
}
return(df)
}
ReadStrelkaSBSVCF <- function(file, name.of.VCF = NULL) {
df <- MakeDataFrameFromVCF(file)
if (is.null(name.of.VCF)) {
vcf.name <- tools::file_path_sans_ext(basename(file))
} else {
vcf.name <- name.of.VCF
}
if (nrow(df) == 0) {
return(df)
} else {
df1 <- GetStrelkaVAF(vcf = df, name.of.VCF = vcf.name)
return(df1)
}
}
MakeDataFrameFromVCF <- function(file, name.of.VCF = NULL) {
if (is.null(name.of.VCF)) {
vcf.name <- basename(file)
} else {
vcf.name <- name.of.VCF
}
tryCatch({
df1 <-
suppressWarnings(data.table::fread(file, na.strings = "",
skip = "
if (nrow(df1) == 0) {
return(df1)
}
required.col.names <- c("
col.names.exist <- required.col.names %in% colnames(df1)
col.names.not.available <- required.col.names[!col.names.exist]
if (!all(col.names.exist)) {
stop("some columns required in VCF are not available ",
paste(col.names.not.available, collapse = " "))
}
},
error = function(err.info) {
if (!is.null(err.info$message)) {
stop(vcf.name, " does not appear to be a VCF file.\nDetails: ",
err.info$message)
}
})
names <- c("CHROM", colnames(df1)[-1])
colnames(df1) <- names
df1$CHROM <- as.character(df1$CHROM)
df1 <- RenameColumnsWithNameStrand(df1)
df1 <- RenameColumnsWithNameVAF(df1)
return(df1)
}
ReadStrelkaIDVCF <- function(file, name.of.VCF = NULL) {
df1 <- MakeDataFrameFromVCF(file)
if (is.null(name.of.VCF)) {
vcf.name <- tools::file_path_sans_ext(basename(file))
} else {
vcf.name <- name.of.VCF
}
if (nrow(df1) == 0) {
return(df1)
}
if (!("TUMOR" %in% names(df1)) ||
!("FORMAT" %in% names(df1))) {
stop("\nVCF ", dQuote(vcf.name),
" does not appear to be a Strelka VCF, column names are \n",
paste(colnames(df1), collapse=" "))
}
control <- unique(df1[["FORMAT"]])
stopifnot(length(control) == 1)
colnames <- unlist(strsplit(control, split=":", fixed=TRUE))
each.base.col <- c("AU", "CU", "GU", "TU")
if (all(each.base.col %in% colnames)) {
stop("\nVCF ", dQuote(vcf.name),
" does not appear to be a Strelka ID VCF, ",
"the value of column FORMAT is \n",
control)
}
return(df1)
}
GetStrelkaVAF <-function(vcf, name.of.VCF = NULL) {
stopifnot("data.frame" %in% class(vcf))
if (!("TUMOR" %in% names(vcf)) ||
!("FORMAT" %in% names(vcf))) {
stop("\nVCF ",
ifelse(is.null(name.of.VCF), "", paste0(dQuote(name.of.VCF), " ")),
"does not appear to be a Strelka VCF, column names are \n",
paste(colnames(vcf), collapse=" "))
}
TUMOR <- vcf[["TUMOR"]]
control <- unique(vcf[["FORMAT"]])
alt <- vcf[["ALT"]]
stopifnot(length(control) == 1)
colnames <- unlist(strsplit(control, split=":", fixed=TRUE))
values <- strsplit(TUMOR, split=":", fixed=TRUE)
vaf <- numeric(nrow(vcf))
read.depth <- integer(nrow(vcf))
each.base.col <- c("AU", "CU", "GU", "TU")
if (!all(each.base.col %in% colnames)) {
stop("\nVCF ",
ifelse(is.null(name.of.VCF), "", paste0(dQuote(name.of.VCF), " ")),
"does not appear to be a Strelka SBS VCF, ",
"the value of column FORMAT is \n",
control)
}
for (i in 1:length(vaf)) {
row.i <- values[[i]]
names(row.i) <- colnames
all.read.counts <- row.i[each.base.col]
x <- strsplit(all.read.counts, split=",", fixed=TRUE)
tier1.counts <- lapply(X = x, FUN = function(x) x[1])
tier1.counts <- as.numeric(unlist(tier1.counts))
names(tier1.counts) <- each.base.col
total.read.count <- sum(tier1.counts)
alt.count <- tier1.counts[paste0(alt[i], "U")]
vaf[i] <- alt.count/total.read.count
read.depth[i] <- total.read.count
}
return(cbind(vcf, VAF = vaf, read.depth = read.depth))
}
ReadMutectVCF <-
function(file, name.of.VCF = NULL, tumor.col.name = NA) {
df <- MakeDataFrameFromVCF(file)
if (is.null(name.of.VCF)) {
vcf.name <- tools::file_path_sans_ext(basename(file))
} else {
vcf.name <- name.of.VCF
}
if (nrow(df) == 0) {
return(df)
} else {
df1 <- GetMutectVAF(vcf = df, name.of.VCF = vcf.name,
tumor.col.name = tumor.col.name)
return(df1)
}
}
GetMutectVAF <- function(vcf, name.of.VCF = NULL, tumor.col.name = NA) {
stopifnot("data.frame" %in% class(vcf))
if (nrow(vcf) == 0) {
return(vcf)
}
type1 <- c("F1R2", "F2R1")
type2 <- c("REF_F1R2", "ALT_F1R2", "REF_F2R1", "ALT_F2R1")
vcf.format <- unlist(stringi::stri_split_fixed(vcf$FORMAT[1], ":"))
is.type1 <- all(type1 %in% vcf.format)
is.type2 <- all(type2 %in% vcf.format)
if (!is.type1 && !is.type2) {
warning("\nVCF ", ifelse(is.null(name.of.VCF), "", dQuote(name.of.VCF)),
" does not appear to be a Mutect VCF, please check the data")
vcf$VAF <- NA
vcf$read.depth <- NA
return(vcf)
}
if (!is.na(tumor.col.name)) {
if (!tumor.col.name %in% colnames(vcf)) {
stop("\n", dQuote(tumor.col.name),
" is not one of the column names in vcf ",
ifelse(is.null(name.of.VCF), "", dQuote(name.of.VCF)))
}
}
GetTumorColumn <- function(vcf, tumor.col.name) {
if (is.na(tumor.col.name)) {
return(vcf[[10]])
} else {
return(vcf[[tumor.col.name]])
}
}
tumor.col <- GetTumorColumn(vcf, tumor.col.name)
ExtracVAFAndReadDepth <- function(tumor.col, type) {
format.info <- stringi::stri_split_fixed(vcf$FORMAT, ":")
read.counts.idx <- lapply(format.info, function(x) match(type, x))
tumor.info.list <- stringi::stri_split_fixed(tumor.col, ":")
Extract <- function(idx, tumor.info.list, read.counts.idx) {
x <- tumor.info.list[[idx]]
idx <- read.counts.idx[[idx]]
as.integer(unlist(strsplit(x[idx], ",")))
}
num <- nrow(vcf)
read.counts.info <- lapply(1:num, FUN = Extract,
tumor.info.list = tumor.info.list,
read.counts.idx = read.counts.idx)
vafs <- sapply(read.counts.info, function(x) {
vaf <- sum(x[c(2, 4)]) / sum(x)
})
read.depth <- sapply(read.counts.info, function(x) sum(x))
return(data.frame(VAF = vafs, read.depth = read.depth))
}
vafs <- NULL
if (is.type1) {
vafs <- ExtracVAFAndReadDepth(tumor.col, type1)
} else if (is.type2) {
vafs <- ExtracVAFAndReadDepth(tumor.col, type2)
}
CheckAndReturnVAFs <- function(vafs) {
idx.zero.vaf <- which(vafs$VAF == 0)
if(length(idx.zero.vaf) == 0) {
return(cbind(vcf, vafs))
} else {
zero.vaf.row <- length(idx.zero.vaf)
total.vaf.row <- nrow(vafs)
warning("\nThere are ", zero.vaf.row, " out of total ", total.vaf.row,
" rows which have zero VAF value in vcf ",
ifelse(is.null(name.of.VCF), "", dQuote(name.of.VCF)), "\n",
"Please check the data and if necessary, specify the correct ",
"column name for tumor sample using argument 'tumor.col.name'")
return(cbind(vcf, vafs))
}
}
CheckAndReturnVAFs(vafs)
}
GetFreebayesVAF <- function(vcf, name.of.VCF = NULL) {
key.words <- c("SRF", "SRR", "SAF", "SAR")
if(!all(sapply(key.words, FUN = grepl, x = vcf$INFO[1]))) {
stop("\nVCF ", ifelse(is.null(name.of.VCF), "", dQuote(name.of.VCF)),
" does not appear to be a freebayes VCF, please check the data")
}
info.list <- strsplit(vcf$INFO, split = ";")
CalculateVAF <- function(vector, key.words) {
idx <- sapply(key.words, FUN = grep, x = vector)
info <- vector[idx]
info1 <- unlist(strsplit(info, split = "="))
info2 <- as.integer(info1[c(2, 4, 6, 8)])
names(info2) <- key.words
return(data.frame(VAF = sum(info2[3:4]) / sum(info2),
read.depth = sum(info2)))
}
list1 <- lapply(info.list, FUN = CalculateVAF, key.words = key.words)
vafs <- do.call("rbind", list1)
return(cbind(vcf, vafs))
}
GetConsensusVAF <- function(vcf, mc.cores = 1) {
info <- vcf$INFO
tmp <- stringi::stri_split_fixed(info, ";")
alt.counts <- parallel::mclapply(tmp, FUN = function(x) {
idx <- grep("t_alt_count", x, fixed = TRUE)
if (length(idx) == 0) {
return(as.integer(NA))
} else {
alt.info <- x[idx]
alt.count <- gsub("t_alt_count=", "", alt.info)
return(as.integer(alt.count))
}
}, mc.cores = mc.cores)
alt.counts1 <- unlist(alt.counts)
ref.counts <- parallel::mclapply(tmp, FUN = function(x) {
idx <- grep("t_ref_count", x, fixed = TRUE)
if (length(idx) == 0) {
return(as.integer(NA))
} else {
ref.info <- x[idx]
ref.count <- gsub("t_ref_count=", "", ref.info)
return(as.integer(ref.count))
}
}, mc.cores = mc.cores)
ref.counts1 <- unlist(ref.counts)
read.depth <- alt.counts1 + ref.counts1
vaf <- alt.counts1/read.depth
vcf$VAF <- vaf
vcf$read.depth <- read.depth
return(vcf)
}
ReadVCF <-
function(file, variant.caller = "unknown", name.of.VCF = NULL, tumor.col.name = NA,
filter.status = NULL, get.vaf.function = NULL, ...) {
df0 <- MakeDataFrameFromVCF(file, name.of.VCF = name.of.VCF)
if (nrow(df0) == 0) {
return(df0)
}
if (is.null(filter.status)) {
df1 <- df <- df0
} else {
df1 <- df <- df0[FILTER == filter.status]
}
if (nrow(df) == 0) {
return(df)
}
df1$VAF <- as.numeric(NA)
df1$read.depth <- as.numeric(NA)
if (variant.caller == "unknown") {
if (is.null(get.vaf.function)) {
return(df1)
} else {
df2 <- get.vaf.function(df, ...)
return(df2)
}
}
if (!variant.caller %in% c("strelka", "mutect", "freebayes")) {
stop(paste0("\nVariant caller", variant.caller, "is not supported by",
" ICAMS, please specify either ", dQuote("strelka"), ", ",
dQuote("mutect"), " or ", dQuote("freebayes")))
}
if (is.null(name.of.VCF)) {
vcf.name <- tools::file_path_sans_ext(basename(file))
} else {
vcf.name <- name.of.VCF
}
if (variant.caller == "strelka") {
if (!("TUMOR" %in% names(df)) ||
!("FORMAT" %in% names(df))) {
stop("\nVCF ", dQuote(vcf.name),
" does not appear to be a Strelka VCF, column names are \n",
paste(colnames(df), collapse=" "))
}
SBS.idx0 <- which(nchar(df$REF) == 1 & nchar(df$ALT) == 1)
SBS.multiple.alt <-
which(nchar(df$REF) == 1 & grepl(",", df$ALT, fixed = TRUE))
SBS.idx <- c(SBS.idx0, SBS.multiple.alt)
if (length(SBS.idx) == 0) {
return(df)
} else {
SBS.df <- df[SBS.idx, ]
SBS.df1 <- GetStrelkaVAF(vcf = SBS.df, name.of.VCF = vcf.name)
df1[SBS.idx, ]$VAF <- SBS.df1$VAF
df1[SBS.idx, ]$read.depth <- SBS.df1$read.depth
return(df1)
}
}
if (variant.caller == "mutect") {
df2 <- GetMutectVAF(vcf = df, name.of.VCF = vcf.name,
tumor.col.name = tumor.col.name)
return(df2)
}
if (variant.caller == "freebayes") {
SBS.idx0 <- which(nchar(df$REF) == 1 & nchar(df$ALT) == 1)
SBS.multiple.alt <-
which(nchar(df$REF) == 1 & grepl(",", df$ALT, fixed = TRUE))
SBS.idx <- c(SBS.idx0, SBS.multiple.alt)
if (length(SBS.idx) == 0) {
return(df)
} else {
SBS.df <- df[SBS.idx, ]
SBS.df1 <- GetFreebayesVAF(vcf = SBS.df, name.of.VCF = vcf.name)
df1[SBS.idx, ]$VAF <- SBS.df1$VAF
df1[SBS.idx, ]$read.depth <- SBS.df1$read.depth
return(df1)
}
}
}
ReadVCFs <- function(files, variant.caller = "unknown", num.of.cores = 1,
names.of.VCFs = NULL,
tumor.col.names = NA, filter.status = NULL,
get.vaf.function = NULL, ...) {
num.of.cores <- AdjustNumberOfCores(num.of.cores)
if (is.null(names.of.VCFs)) {
vcfs.names <- tools::file_path_sans_ext(basename(files))
} else {
CheckNamesOfVCFs(files, names.of.VCFs)
vcfs.names <- names.of.VCFs
}
num.of.files <- length(files)
if (all(is.na(tumor.col.names))) {
tumor.col.names <- rep(NA, num.of.files)
}
ReadVCF1 <- function(idx, files, variant.caller, vector1, vector2) {
ReadVCF(file = files[idx], variant.caller = variant.caller,
name.of.VCF = vector1[idx], tumor.col.name = vector2[idx],
filter.status = filter.status, get.vaf.function = get.vaf.function,
...)
}
vcfs <- parallel::mclapply(1:num.of.files, FUN = ReadVCF1, files = files,
variant.caller = variant.caller,
vector1 = vcfs.names, vector2 = tumor.col.names,
mc.cores = num.of.cores)
names(vcfs) <- vcfs.names
return(vcfs)
}
CheckAndRemoveDiscardedVariants <- function(vcf, name.of.VCF = NULL) {
if (nrow(vcf) == 0) {
return(list(df = vcf))
}
discarded.variants <- vcf[0, ]
idx <- which(vcf$REF == vcf$ALT)
if (length(idx) > 0) {
df.to.remove <- vcf[idx, ]
df.to.remove$discarded.reason <- "Variant with same REF and ALT"
discarded.variants <-
dplyr::bind_rows(discarded.variants, df.to.remove)
vcf <- vcf[-idx, ]
}
retval <- RemoveRowsWithPoundSignNew(df = vcf, name.of.VCF = name.of.VCF)
df1 <- retval$df
discarded.variants <-
dplyr::bind_rows(discarded.variants, retval$discarded.variants)
retval1 <-
RemoveRowsWithDuplicatedCHROMAndPOSNew(df = df1, name.of.VCF = name.of.VCF)
df2 <- retval1$df
discarded.variants <-
dplyr::bind_rows(discarded.variants, retval1$discarded.variants)
retval2 <- StandardChromNameNew(df = df2, name.of.VCF = name.of.VCF)
df3 <- retval2$df
discarded.variants <-
dplyr::bind_rows(discarded.variants, retval2$discarded.variants)
multiple.alt <- grep(",", df3$ALT, fixed = TRUE)
if (length(multiple.alt) > 0) {
warning("VCF ", ifelse(is.null(name.of.VCF), "", dQuote(name.of.VCF)),
" has variants with multiple alternative alleles and were ",
"discarded. See discarded.variants in the return value for more ",
"details.")
df4 <- df3[-multiple.alt, ]
df4.to.remove <- df3[multiple.alt, ]
df4.to.remove$discarded.reason <- "Variant with multiple alternative alleles"
discarded.variants <-
dplyr::bind_rows(discarded.variants, df4.to.remove)
} else {
df4 <- df3
}
other.df <- which(nchar(df4$REF) > 2 & nchar(df4$ALT) == nchar(df4$REF))
if (length(other.df) > 0) {
warning("VCF ", ifelse(is.null(name.of.VCF), "", dQuote(name.of.VCF)),
" has variants involving three or more nucleotides and were ",
"discarded. See discarded.variants in the return value for more ",
"details.")
df5 <- df4[-other.df, ]
df5.to.remove <- df4[other.df, ]
df5.to.remove$discarded.reason <- "Variant involves three or more nucleotides"
discarded.variants <-
dplyr::bind_rows(discarded.variants, df5.to.remove)
} else {
df5 <- df4
}
complex.indels <- which((nchar(df5$REF) != nchar(df5$ALT)) &
(substr(df5$REF, 1, 1) != substr(df5$ALT, 1, 1)))
if (length(complex.indels) > 0) {
warning("VCF ", ifelse(is.null(name.of.VCF), "", dQuote(name.of.VCF)),
" has complex indels and were discarded. See discarded.variants ",
"in the return value for more details.")
df6 <- df5[-complex.indels, ]
df6.to.remove <- df5[complex.indels, ]
df6.to.remove$discarded.reason <- "Complex indel"
discarded.variants <-
dplyr::bind_rows(discarded.variants, df6.to.remove)
} else {
df6 <- df5
}
wrong.DBS.type1 <- dplyr::filter(df6, nchar(REF) == 2, nchar(ALT) == 2,
substr(REF, 1, 1) == substr(ALT, 1, 1))
wrong.DBS.type2 <- dplyr::filter(df6, nchar(REF) == 2, nchar(ALT) == 2,
substr(REF, 2, 2) == substr(ALT, 2, 2))
wrong.DBS <- dplyr::bind_rows(wrong.DBS.type1, wrong.DBS.type2)
if (nrow(wrong.DBS) > 0) {
warning("VCF ", ifelse(is.null(name.of.VCF), "", dQuote(name.of.VCF)),
" has wrong DBS variants and were discarded. See discarded.variants ",
"in the return value for more details.")
wrong.DBS.pos <- wrong.DBS$POS
wrong.DBS$discarded.reason <- "Wrong DBS variant"
discarded.variants <-
dplyr::bind_rows(discarded.variants, wrong.DBS)
df7 <- dplyr::filter(df6, !POS %in% wrong.DBS.pos)
} else {
df7 <- df6
}
if (nrow(discarded.variants) == 0) {
return(list(df = df7))
} else {
return(list(df = df7, discarded.variants = discarded.variants))
}
}
SplitOneMutectVCF <- function(vcf.df, name.of.VCF = NULL) {
if (nrow(vcf.df) == 0) {
return(list(SBS = vcf.df, DBS = vcf.df, ID = vcf.df))
}
discarded.variants <- vcf.df[0, ]
retval <-
CheckAndRemoveDiscardedVariants(vcf = vcf.df, name.of.VCF = name.of.VCF)
df <- retval$df
discarded.variants <-
dplyr::bind_rows(discarded.variants, retval$discarded.variants)
SBS.df <- df[nchar(df$REF) == 1 & nchar(df$ALT) == 1, ]
DBS.df <- df[nchar(df$REF) == 2 & nchar(df$ALT) == 2, ]
ID.df <- df[nchar(df$REF) != nchar(df$ALT), ]
if (nrow(discarded.variants) == 0) {
return(list(SBS = SBS.df, DBS = DBS.df, ID = ID.df))
} else {
return(list(SBS = SBS.df, DBS = DBS.df, ID = ID.df,
discarded.variants = discarded.variants))
}
}
SplitListOfMutectVCFs <-
function(list.of.vcfs,
suppress.discarded.variants.warnings = TRUE) {
names.of.VCFs <- names(list.of.vcfs)
GetSplitMutectVCFs <- function(idx, list.of.vcfs) {
split.vcfs <- SplitOneMutectVCF(list.of.vcfs[[idx]],
name.of.VCF = names(list.of.vcfs)[idx])
return(split.vcfs)
}
num.of.vcfs <- length(list.of.vcfs)
if (suppress.discarded.variants.warnings == TRUE) {
v1 <- suppressWarnings(lapply(1:num.of.vcfs, GetSplitMutectVCFs,
list.of.vcfs = list.of.vcfs))
} else {
v1 <- lapply(1:num.of.vcfs, GetSplitMutectVCFs,
list.of.vcfs = list.of.vcfs)
}
names(v1) <- names.of.VCFs
SBS <- lapply(v1, function(x) x$SBS)
DBS <- lapply(v1, function(x) x$DBS)
ID <- lapply(v1, function(x) x$ID)
discarded.variants <- lapply(v1, function(x) x$discarded.variants)
discarded.variants1 <- Filter(Negate(is.null), discarded.variants)
if (length(discarded.variants1) == 0) {
return(list(SBS = SBS, DBS = DBS, ID = ID))
} else {
return(list(SBS = SBS, DBS = DBS, ID = ID,
discarded.variants = discarded.variants1))
}
}
SplitSBSVCF <- function(vcf.df, max.vaf.diff = 0.02, name.of.VCF = NULL) {
stopifnot("data.frame" %in% class(vcf.df))
if (nrow(vcf.df) == 0) {
return(list(SBS.vcf = vcf.df, DBS.vcf = vcf.df))
}
discarded.variants <- vcf.df[0, ]
retval <-
CheckAndRemoveDiscardedVariants(vcf = vcf.df, name.of.VCF = name.of.VCF)
vcf.df <- retval$df
discarded.variants <-
dplyr::bind_rows(discarded.variants, retval$discarded.variants)
num.in <- nrow(vcf.df)
vcf.dt <- data.table(vcf.df)
vcf.dt[, POS.plus.one := POS + 1]
dt2 <- merge(vcf.dt, vcf.dt,
by.x = c("CHROM", "POS"),
by.y = c("CHROM", "POS.plus.one"))
dt2[, HIGH := POS]
dt2[, LOW := POS.y]
non.SBS <- dt2[abs(VAF.x - VAF.y) <= max.vaf.diff]
rm(dt2)
if (nrow(non.SBS) == 0) {
empty <- vcf.df[-(1:nrow(vcf.df)), ]
if (nrow(discarded.variants) == 0) {
return(list(SBS.vcf = vcf.df, DBS.vcf = empty))
} else {
return(list(SBS.vcf = vcf.df, DBS.vcf = empty,
discarded.variants = discarded.variants))
}
}
pairs.to.remove <-
data.frame(non.SBS[, .(CHROM, POS = HIGH)])
pairs.to.remove <-
rbind(pairs.to.remove,
data.frame(non.SBS[, .(CHROM, POS = LOW)]))
dt.rm <- data.table(pairs.to.remove)
dt.rm$delete.flag = TRUE
out.SBS.dt <- merge(vcf.dt, dt.rm, by = c("CHROM", "POS"), all.x = TRUE)
out.SBS.dt2 <- out.SBS.dt[is.na(delete.flag)]
out.SBS.df <-
as.data.frame(out.SBS.dt2[, c("POS.plus.one", "delete.flag") := NULL])
num.SBS.out <- nrow(out.SBS.df)
non.SBS <- non.SBS[, c("CHROM", "LOW", "HIGH")]
ranges <-
GenomicRanges::GRanges(non.SBS$CHROM,
IRanges::IRanges(start = non.SBS$LOW, end = non.SBS$HIGH))
rranges <- GenomicRanges::reduce(ranges)
DBS.plus <- as.data.frame(rranges)
if ((sum(DBS.plus$width) + num.SBS.out) != num.in) {
if ((sum(DBS.plus$width) + num.SBS.out) > num.in) {
stop("Possible programming error or input problem: too many SBS")
} else {
warning("Possible site with multiple variant alleles involved in a DBS\n")
}
}
DBSx <- DBS.plus[DBS.plus$width == 2, c("seqnames", "start", "end"), ]
colnames(DBSx) <- c("CHROM", "LOW", "HIGH")
DBSx$CHROM <- as.character(DBSx$CHROM)
DBS.vcf.df <- MakeVCFDBSdf(DBSx, vcf.dt)
num.DBS.out <- nrow(DBS.vcf.df)
other.ranges <- DBS.plus[DBS.plus$width > 2, ]
if (nrow(other.ranges) > 0) {
colnames(other.ranges)[1:3] <- c("CHROM", "LOW.POS", "HIGH.POS")
other.ranges$discarded.reason <- "Variants that do not represent SBS or DBS"
if (nrow(discarded.variants) == 0) {
discarded.variants <- other.ranges
} else {
discarded.variants <- dplyr::bind_rows(discarded.variants, other.ranges)
}
warning("VCF ", ifelse(is.null(name.of.VCF), "", dQuote(name.of.VCF)),
" has variants involving three or more nucleotides and were ",
"discarded. See discarded.variants in the return value for more ",
"details.")
}
num.other <- sum(other.ranges$width)
if ((num.SBS.out + 2 * num.DBS.out + num.other) != num.in) {
warning("Counts are off:", num.SBS.out, 2*num.DBS.out, num.other, "vs", num.in, "\n")
}
if (nrow(discarded.variants) == 0) {
return(list(SBS.vcf = out.SBS.df, DBS.vcf = DBS.vcf.df))
} else {
return(list(SBS.vcf = out.SBS.df, DBS.vcf = DBS.vcf.df,
discarded.variants = discarded.variants))
}
}
SplitOneVCF <- function(vcf.df, max.vaf.diff = 0.02, name.of.VCF = NULL) {
if (nrow(vcf.df) == 0) {
return(list(SBS = vcf.df, DBS = vcf.df, ID = vcf.df))
}
discarded.variants <- vcf.df[0, ]
retval <-
CheckAndRemoveDiscardedVariants(vcf = vcf.df, name.of.VCF = name.of.VCF)
df <- retval$df
discarded.variants <-
dplyr::bind_rows(discarded.variants, retval$discarded.variants)
SBS.df0 <- df[nchar(df$REF) == 1 & nchar(df$ALT) == 1, ]
split.dfs <- SplitSBSVCF(vcf.df = SBS.df0, max.vaf.diff = max.vaf.diff,
name.of.VCF = name.of.VCF)
SBS.df <- split.dfs$SBS.vcf
DBS.df0 <- split.dfs$DBS.vcf
discarded.variants <-
dplyr::bind_rows(discarded.variants, split.dfs$discarded.variants)
DBS.df1 <- df[nchar(df$REF) == 2 & nchar(df$ALT) == 2, ]
DBS.df <- dplyr::bind_rows(DBS.df0, DBS.df1)
ID.df <- df[nchar(df$REF) != nchar(df$ALT), ]
if (nrow(discarded.variants) == 0) {
return(list(SBS = SBS.df, DBS = DBS.df, ID = ID.df))
} else {
return(list(SBS = SBS.df, DBS = DBS.df, ID = ID.df,
discarded.variants = discarded.variants))
}
}
SplitListOfVCFs <-function(list.of.vcfs,
variant.caller,
max.vaf.diff = 0.02,
num.of.cores = 1,
suppress.discarded.variants.warnings = TRUE) {
names.of.VCFs <- names(list.of.vcfs)
GetSplitVCFs <- function(idx, list.of.vcfs, variant.caller) {
if (variant.caller == "mutect") {
split.vcfs <- SplitOneMutectVCF(vcf.df = list.of.vcfs[[idx]],
name.of.VCF = names(list.of.vcfs)[idx])
} else {
split.vcfs <- SplitOneVCF(list.of.vcfs[[idx]],
max.vaf.diff = max.vaf.diff,
name.of.VCF = names(list.of.vcfs)[idx])
}
return(split.vcfs)
}
num.of.vcfs <- length(list.of.vcfs)
if (suppress.discarded.variants.warnings == TRUE) {
v1 <- suppressWarnings(parallel::mclapply(1:num.of.vcfs, GetSplitVCFs,
list.of.vcfs = list.of.vcfs,
variant.caller = variant.caller,
mc.cores = num.of.cores))
} else {
v1 <- parallel::mclapply(1:num.of.vcfs, GetSplitVCFs,
list.of.vcfs = list.of.vcfs,
variant.caller = variant.caller,
mc.cores = num.of.cores)
}
names(v1) <- names.of.VCFs
SBS <- lapply(v1, function(x) x$SBS)
DBS <- lapply(v1, function(x) x$DBS)
ID <- lapply(v1, function(x) x$ID)
discarded.variants <- lapply(v1, function(x) x$discarded.variants)
discarded.variants1 <- Filter(Negate(is.null), discarded.variants)
if (length(discarded.variants1) == 0) {
return(list(SBS = SBS, DBS = DBS, ID = ID))
} else {
return(list(SBS = SBS, DBS = DBS, ID = ID,
discarded.variants = discarded.variants1))
}
}
AddSeqContext <-
function(df, ref.genome, seq.context.width = 10, name.of.VCF = NULL) {
if (0 == nrow(df)) return(df)
ref.genome <- NormalizeGenomeArg(ref.genome)
chr.names <- CheckAndFixChrNames(vcf.df = df,
ref.genome = ref.genome,
name.of.VCF = name.of.VCF)
Ranges <-
GenomicRanges::GRanges(chr.names,
IRanges::IRanges(start = df$POS - seq.context.width,
end = df$POS + seq.context.width)
)
df$extracted.seq <- BSgenome::getSeq(ref.genome, Ranges, as.character = TRUE)
names(df)[names(df) == "extracted.seq"] <-
paste0("seq.", 2 * seq.context.width + 1, "bases")
return(df)
}
AddTranscript <-
function(df, trans.ranges = NULL, ref.genome, name.of.VCF = NULL) {
if (nrow(df) == 0) {
return(df)
}
if (is.null(trans.ranges)) {
return(data.table(df))
}
ref.genome <- NormalizeGenomeArg(ref.genome = ref.genome)
new.chr.names <-
CheckAndFixChrNamesForTransRanges(trans.ranges = trans.ranges,
vcf.df = df,
ref.genome = ref.genome,
name.of.VCF = name.of.VCF)
trans.ranges$chrom <- new.chr.names
if (!data.table::haskey(trans.ranges)) {
data.table::setkeyv(trans.ranges, c("chrom", "start", "end"))
}
df1 <- data.table(df)
df1[, POS2 := POS]
dt <- data.table::foverlaps(df1, trans.ranges,
by.x = c("CHROM", "POS", "POS2"),
type = "within", mult = "all")
dt1 <- dt %>% dplyr::group_by(CHROM, ALT, POS) %>%
dplyr::mutate(bothstrand = "+" %in% strand && "-" %in% strand)
data.table::setDT(dt1)
dt2 <- dt1 %>% dplyr::group_by(CHROM, ALT, POS) %>% dplyr::mutate(count = dplyr::n())
data.table::setDT(dt2)
dt3 <- dt2[strand == "-", c("end", "start") := .(start, end)]
df.colnames <- colnames(df)
trans.ranges.colnames <- colnames(trans.ranges)[-1]
data.table::setcolorder(dt3, neworder = c(df.colnames, trans.ranges.colnames))
data.table::setnames(dt3,
old = c("start", "end", "strand", "Ensembl.gene.ID", "gene.symbol"),
new = c("trans.start.pos", "trans.end.pos", "trans.strand",
"trans.Ensembl.gene.ID", "trans.gene.symbol"))
dt4 <- dt3[, POS2 := NULL]
return(dt4)
}
MakeVCFDBSdf <- function(DBS.range.df, SBS.vcf.dt) {
tmpvcf <- SBS.vcf.dt
DBS.range.dt <- as.data.table(DBS.range.df)
tmp1 <- merge(DBS.range.dt, tmpvcf,
by.x = c("CHROM", "LOW"),
by.y = c("CHROM", "POS"))
tmp2 <- merge(tmp1, tmpvcf,
by.x = c("CHROM", "HIGH"),
by.y = c("CHROM", "POS"))
tmp2[, read.depth := pmin(read.depth.x, read.depth.y)]
tmp2[, VAF := rowMeans(cbind(VAF.x, VAF.y))]
tmp2[, POS := LOW]
tmp2[, remark.for.DBS := "From merged SBSs"]
tmp2[, REF := paste0(REF.x, REF.y)]
tmp2[, ALT := paste0(ALT.x, ALT.y)]
tmp2[, c("read.depth.x", "read.depth.y", "VAF.x", "VAF.y", "LOW", "HIGH",
"REF.x", "REF.y", "ALT.x", "ALT.y", "POS.plus.one.x",
"POS.plus.one.y") := NULL]
old.col.names <- setdiff(colnames(SBS.vcf.dt), "POS.plus.one")
col.names.order1 <-
c("CHROM", "POS", "REF", "ALT", "VAF", "read.depth", "remark.for.DBS")
col.names.order2 <- setdiff(old.col.names, col.names.order1)
for (name in col.names.order2) {
name1 <- paste0(name, c(".x", ".y"))
GetUniqueInformation <- function(x) {
y <- paste(unique(unlist(x)), collapse = ",")
class(y) <- class(unique(unlist(x)))
return(y)
}
tmp2[, (name) := apply(X = .SD, MARGIN = 1,
FUN = GetUniqueInformation), .SDcols = name1]
tmp2[, (name1) := NULL]
}
col.names.order <- c(col.names.order1, col.names.order2)
return(tmp2[, ..col.names.order])
}
SplitStrelkaSBSVCF <- function(vcf.df, max.vaf.diff = 0.02, name.of.VCF = NULL) {
stopifnot("data.frame" %in% class(vcf.df))
if (nrow(vcf.df) == 0) {
return(list(SBS.vcf = vcf.df, DBS.vcf = vcf.df))
}
discarded.variants <- vcf.df[0, ]
retval <-
CheckAndRemoveDiscardedVariants(vcf = vcf.df, name.of.VCF = name.of.VCF)
vcf.df <- retval$df
discarded.variants <-
dplyr::bind_rows(discarded.variants, retval$discarded.variants)
num.in <- nrow(vcf.df)
vcf.dt <- data.table(vcf.df)
vcf.dt[, POS.plus.one := POS + 1]
dt2 <- merge(vcf.dt, vcf.dt,
by.x = c("CHROM", "POS"),
by.y = c("CHROM", "POS.plus.one"))
dt2[, HIGH := POS]
dt2[, LOW := POS.y]
non.SBS <- dt2[abs(VAF.x - VAF.y) <= max.vaf.diff]
rm(dt2)
if (nrow(non.SBS) == 0) {
empty <- vcf.df[-(1:nrow(vcf.df)), ]
if (nrow(discarded.variants) == 0) {
return(list(SBS.vcf = vcf.df, DBS.vcf = empty))
} else {
return(list(SBS.vcf = vcf.df, DBS.vcf = empty,
discarded.variants = discarded.variants))
}
}
pairs.to.remove <-
data.frame(non.SBS[, .(CHROM, POS = HIGH)])
pairs.to.remove <-
rbind(pairs.to.remove,
data.frame(non.SBS[, .(CHROM, POS = LOW)]))
dt.rm <- data.table(pairs.to.remove)
dt.rm$delete.flag = TRUE
out.SBS.dt <- merge(vcf.dt, dt.rm, by = c("CHROM", "POS"), all.x = TRUE)
out.SBS.dt2 <- out.SBS.dt[is.na(delete.flag)]
out.SBS.df <-
as.data.frame(out.SBS.dt2[, c("POS.plus.one", "delete.flag") := NULL])
num.SBS.out <- nrow(out.SBS.df)
non.SBS <- non.SBS[, c("CHROM", "LOW", "HIGH")]
ranges <-
GenomicRanges::GRanges(non.SBS$CHROM,
IRanges::IRanges(start = non.SBS$LOW, end = non.SBS$HIGH))
rranges <- GenomicRanges::reduce(ranges)
DBS.plus <- as.data.frame(rranges)
if ((sum(DBS.plus$width) + num.SBS.out) != num.in) {
if ((sum(DBS.plus$width) + num.SBS.out) > num.in) {
stop("Possible programming error or input problem: too many SBS")
} else {
warning("Possible site with multiple variant alleles involved in a DBS\n")
}
}
DBSx <- DBS.plus[DBS.plus$width == 2, c("seqnames", "start", "end"), ]
colnames(DBSx) <- c("CHROM", "LOW", "HIGH")
DBSx$CHROM <- as.character(DBSx$CHROM)
DBS.vcf.df <- MakeVCFDBSdf(DBSx, vcf.dt)
num.DBS.out <- nrow(DBS.vcf.df)
other.ranges <- DBS.plus[DBS.plus$width > 2, ]
if (nrow(other.ranges) > 0) {
colnames(other.ranges)[1:3] <- c("CHROM", "LOW.POS", "HIGH.POS")
other.ranges$discarded.reason <- "Variants that do not represent SBS or DBS"
if (nrow(discarded.variants) == 0) {
discarded.variants <- other.ranges
} else {
discarded.variants <- dplyr::bind_rows(discarded.variants, other.ranges)
}
warning("VCF ", ifelse(is.null(name.of.VCF), "", dQuote(name.of.VCF)),
" has variants involving three or more nucleotides and were ",
"discarded. See discarded.variants in the return value for more ",
"details.")
}
num.other <- sum(other.ranges$width)
if ((num.SBS.out + 2 * num.DBS.out + num.other) != num.in) {
warning("Counts are off:", num.SBS.out, 2*num.DBS.out, num.other, "vs", num.in, "\n")
}
if (nrow(discarded.variants) == 0) {
return(list(SBS.vcf = out.SBS.df, DBS.vcf = DBS.vcf.df))
} else {
return(list(SBS.vcf = out.SBS.df, DBS.vcf = DBS.vcf.df,
discarded.variants = discarded.variants))
}
}
SplitListOfStrelkaSBSVCFs <-
function(list.of.vcfs, suppress.discarded.variants.warnings = TRUE) {
names.of.VCFs <- names(list.of.vcfs)
GetSplitStrelkaSBSVCFs <- function(idx, list.of.vcfs) {
split.vcfs <- SplitStrelkaSBSVCF(list.of.vcfs[[idx]],
name.of.VCF = names(list.of.vcfs)[idx])
return(split.vcfs)
}
num.of.vcfs <- length(list.of.vcfs)
if (suppress.discarded.variants.warnings == TRUE) {
split.vcfs <-
suppressWarnings(lapply(1:num.of.vcfs, GetSplitStrelkaSBSVCFs,
list.of.vcfs = list.of.vcfs))
} else {
split.vcfs <- lapply(1:num.of.vcfs, GetSplitStrelkaSBSVCFs,
list.of.vcfs = list.of.vcfs)
}
names(split.vcfs) <- names.of.VCFs
SBS.vcfs <- lapply(split.vcfs, function(x) x$SBS.vcf)
DBS.vcfs <- lapply(split.vcfs, function(x) x$DBS.vcf)
discarded.variants <- lapply(split.vcfs, function(x) x$discarded.variants)
discarded.variants1 <- Filter(Negate(is.null), discarded.variants)
if (length(discarded.variants1) == 0) {
return(list(SBS.vcfs = SBS.vcfs, DBS.vcfs = DBS.vcfs))
} else {
return(list(SBS.vcfs = SBS.vcfs, DBS.vcfs = DBS.vcfs,
discarded.variants = discarded.variants1))
}
}
CheckSeqContextInVCF <- function(vcf, column.to.use) {
if (0 == nrow(vcf)) return()
stopifnot(nchar(vcf$REF) == nchar(vcf$ALT))
stopifnot(!any(vcf$REF == '-'))
stopifnot(!any(vcf$ALT == '-'))
vcf <- data.table::as.data.table(vcf)
cut.pos <- 1 + (nchar(unlist(vcf[, ..column.to.use])) - 1) / 2
stopifnot(cut.pos == round(cut.pos))
cut.from.ref <- substr(unlist(vcf[, ..column.to.use]), cut.pos,
(cut.pos + nchar(vcf$REF)) - 1)
error.rows <- which(vcf$REF != cut.from.ref)
if (any(error.rows > 0)) {
temp <- tempfile(fileext = ".csv")
write.csv(vcf[error.rows, ], file = temp)
stop("Seqence context of reference allele is inconsistent,",
"see file ", temp)
}
}
ReadStrelkaSBSVCFs <- function(files, names.of.VCFs = NULL) {
vcfs <-
lapply(files, FUN = ReadStrelkaSBSVCF, name.of.VCF = names.of.VCFs)
if (is.null(names.of.VCFs)) {
names(vcfs) <- tools::file_path_sans_ext(basename(files))
} else {
CheckNamesOfVCFs(files, names.of.VCFs)
names(vcfs) <- names.of.VCFs
}
return(vcfs)
}
ReadMutectVCFs <-
function(files, names.of.VCFs = NULL, tumor.col.names = NA) {
if (is.null(names.of.VCFs)) {
vcfs.names <- tools::file_path_sans_ext(basename(files))
} else {
CheckNamesOfVCFs(files, names.of.VCFs)
vcfs.names <- names.of.VCFs
}
num.of.files <- length(files)
if (all(is.na(tumor.col.names))) {
tumor.col.names <- rep(NA, num.of.files)
}
GetMutectVCFs <- function(idx, files, names.of.VCFs, tumor.col.names) {
ReadMutectVCF(file = files[idx], name.of.VCF = names.of.VCFs[idx],
tumor.col.name = tumor.col.names[idx])
}
vcfs <- lapply(1:num.of.files, FUN = GetMutectVCFs,
files = files, names.of.VCFs = vcfs.names,
tumor.col.names = tumor.col.names)
names(vcfs) <- vcfs.names
return(vcfs)
}
AnnotateSBSVCF <- function(SBS.vcf, ref.genome,
trans.ranges = NULL, name.of.VCF = NULL) {
SBS.vcf <- AddSeqContext(df = SBS.vcf, ref.genome = ref.genome,
name.of.VCF = name.of.VCF)
CheckSeqContextInVCF(SBS.vcf, "seq.21bases")
trans.ranges <- InferTransRanges(ref.genome, trans.ranges)
if (!is.null(trans.ranges)) {
SBS.vcf <- AddTranscript(df = SBS.vcf, trans.ranges = trans.ranges,
ref.genome = ref.genome,
name.of.VCF = name.of.VCF)
}
return(as.data.table(SBS.vcf))
}
AddSBSClass <- function(vcf) {
col.names <- colnames(vcf)
vcf$SBS1536.class <- paste0(substr(vcf$seq.21bases, 9, 13), vcf$ALT)
vcf$SBS1536.class <- PyrPenta(vcf$SBS1536.class)
vcf$SBS96.class <- paste0(substr(vcf$SBS1536.class, 2, 4),
substr(vcf$SBS1536.class, 6, 6))
vcf$SBS192.class <- NA
idx <- which(!is.na(vcf$trans.strand) & (vcf$bothstrand == FALSE))
vcf$SBS192.class[idx] <- vcf$SBS96.class[idx]
idx1 <- which(vcf$trans.strand == "-" & (vcf$bothstrand == FALSE))
vcf$SBS192.class[idx1] <- RevcSBS96(vcf$SBS192.class[idx1])
new.col.names <- c(col.names, "SBS96.class", "SBS192.class", "SBS1536.class")
data.table::setDT(vcf)
setcolorder(vcf, new.col.names)
return(vcf)
}
CheckSBSClassInVCF <- function(vcf, mat, sample.id) {
if (nrow(mat) %in% c(96, 1536)) {
df <- dplyr::distinct(vcf, CHROM, ALT, POS, .keep_all = TRUE)
if (nrow(df) != colSums(mat)) {
stop("In sample ", sample.id, ", the number of SBS", nrow(mat),
" variants in the annotated VCF is not the same as the total ",
"counts in mutation matrix.")
}
} else {
df1 <- vcf[!is.na(trans.strand), ]
df2 <- df1[bothstrand == FALSE, ]
df3 <- dplyr::distinct(df2, CHROM, ALT, POS, .keep_all = TRUE)
if (nrow(df3) != colSums(mat)) {
stop("In sample ", sample.id, ", the number of SBS", nrow(mat),
" variants in the annotated VCF is not the same as the total ",
"counts in mutation matrix.")
}
}
}
AddAndCheckSBSClassInVCF <-
function(vcf, mat96, mat1536, mat192 = NULL, sample.id) {
vcf1 <- AddSBSClass(vcf)
CheckSBSClassInVCF(vcf1, mat96, sample.id)
CheckSBSClassInVCF(vcf1, mat1536, sample.id)
if (!is.null(mat192)) {
CheckSBSClassInVCF(vcf1, mat192, sample.id)
}
return(vcf1)
}
CheckAndReturnSBSMatrix <-
function(vcf, discarded.variants, mat96, mat1536, mat192 = NULL,
return.annotated.vcf = FALSE, sample.id = "counts") {
if (nrow(discarded.variants) == 0) {
if (is.null(mat192)) {
if (return.annotated.vcf == FALSE) {
return(list(catSBS96 = mat96, catSBS1536 = mat1536))
} else {
vcf.SBS.class <-
AddAndCheckSBSClassInVCF(vcf, mat96, mat1536, mat192, sample.id)
return(list(catSBS96 = mat96, catSBS1536 = mat1536,
annotated.vcf = vcf.SBS.class))
}
} else {
if (return.annotated.vcf == FALSE) {
return(list(catSBS96 = mat96, catSBS192 = mat192,
catSBS1536 = mat1536))
} else {
vcf.SBS.class <-
AddAndCheckSBSClassInVCF(vcf, mat96, mat1536, mat192, sample.id)
return(list(catSBS96 = mat96, catSBS192 = mat192, catSBS1536 = mat1536,
annotated.vcf = vcf.SBS.class))
}
}
} else {
if (is.null(mat192)) {
if (return.annotated.vcf == FALSE) {
return(list(catSBS96 = mat96, catSBS1536 = mat1536,
discarded.variants = discarded.variants))
} else {
vcf.SBS.class <-
AddAndCheckSBSClassInVCF(vcf, mat96, mat1536, mat192, sample.id)
return(list(catSBS96 = mat96, catSBS1536 = mat1536,
annotated.vcf = vcf.SBS.class,
discarded.variants = discarded.variants))
}
} else {
if (return.annotated.vcf == FALSE) {
return(list(catSBS96 = mat96, catSBS192 = mat192,
catSBS1536 = mat1536,
discarded.variants = discarded.variants))
} else {
vcf.SBS.class <-
AddAndCheckSBSClassInVCF(vcf, mat96, mat1536, mat192, sample.id)
return(list(catSBS96 = mat96, catSBS192 = mat192, catSBS1536 = mat1536,
annotated.vcf = vcf.SBS.class,
discarded.variants = discarded.variants))
}
}
}
}
CreateOneColSBSMatrix <- function(vcf, sample.id = "count",
return.annotated.vcf = FALSE) {
CheckForEmptySBSVCF <- function(vcf, return.annotated.vcf) {
if (0 == nrow(vcf)) {
catSBS96 <-
matrix(0, nrow = length(ICAMS::catalog.row.order$SBS96), ncol = 1,
dimnames = list(ICAMS::catalog.row.order$SBS96, sample.id))
catSBS192 <-
matrix(0, nrow = length(ICAMS::catalog.row.order$SBS192), ncol = 1,
dimnames = list(ICAMS::catalog.row.order$SBS192, sample.id))
catSBS1536 <-
matrix(0, nrow = length(ICAMS::catalog.row.order$SBS1536), ncol = 1,
dimnames = list(ICAMS::catalog.row.order$SBS1536, sample.id))
if (return.annotated.vcf == FALSE) {
return(list(catSBS96 = catSBS96, catSBS192 = catSBS192,
catSBS1536 = catSBS1536))
} else {
return(list(catSBS96 = catSBS96, catSBS192 = catSBS192,
catSBS1536 = catSBS1536, annotated.vcf = vcf))
}
} else {
return(FALSE)
}
}
ret1 <- CheckForEmptySBSVCF(vcf = vcf,
return.annotated.vcf = return.annotated.vcf)
if (!is.logical(ret1)) {
return(ret1)
}
stopifnot(nchar(vcf$ALT) == 1)
stopifnot(nchar(vcf$REF) == 1)
stopifnot(vcf$ALT != vcf$REF)
discarded.variants <- vcf[0]
mismatches <- which(vcf$REF != substr(vcf$seq.21bases, 11, 11))
if (length(mismatches) != 0) {
discarded.variants <- rbind(discarded.variants, vcf[mismatches, ])
discarded.variants$discarded.reason <-
paste0('SBS variant whose reference base in ref.genome does not match the',
' reference base in the VCF file.')
message("In sample ", sample.id, " ", length(mismatches), " row out of ",
nrow(vcf), " had reference base in ref.genome that does not match the ",
"reference base in the VCF file.\n",
"Please check the ref.genome argument.\n",
"See discarded.variants in the return value for more details")
vcf <- vcf[-mismatches, ]
}
idx <- grep("N", substr(vcf$seq.21bases, 9, 13))
if (!length(idx) == 0) {
discarded.variants <- rbind(discarded.variants, vcf[idx, ])
discarded.variants$discarded.reason <-
'SBS variant whose pentanucleotide context contains "N"'
vcf <- vcf[-idx, ]
warning(
'Variants in the SBS vcf ', sample.id,
' whose pentanucleotide context contains "N" ',
'have been deleted so as not to conflict with downstream processing. ',
'See discarded.variants in the return value for more details.')
}
ret2 <- CheckForEmptySBSVCF(vcf = vcf,
return.annotated.vcf = return.annotated.vcf)
if (!is.logical(ret2)) {
return(ret2)
}
vcf0 <- vcf
context <- substr(vcf$seq.21bases, 9, 13)
vcf$mutation <- paste0(context, vcf$ALT)
vcf$pyr.mut <- PyrPenta(vcf$mutation)
vcf1 <- vcf %>% dplyr::group_by(CHROM, ALT, POS) %>%
dplyr::summarise(REF = REF[1], pyr.mut = pyr.mut[1])
tab1536 <- table(vcf1[, "pyr.mut"])
stopifnot(setequal(
setdiff(names(tab1536), ICAMS::catalog.row.order$SBS1536),
c()))
dt1536 <- data.table(tab1536)
colnames(dt1536) <- c("rn", "count")
d <- data.table(rn = ICAMS::catalog.row.order$SBS1536)
stopifnot(length(ICAMS::catalog.row.order$SBS1536) == 1536)
x <- merge(d, dt1536, by = "rn", all.x = TRUE)
x[is.na(count), count := 0]
stopifnot(sum(x$count) == nrow(vcf1))
mat1536 <- matrix(x$count)
rownames(mat1536) <- x$rn
mat1536 <- mat1536[ICAMS::catalog.row.order$SBS1536, , drop = FALSE]
colnames(mat1536) <- sample.id
x[, nrn := paste0(substr(rn, 2, 4), substr(rn, 6, 6))]
dt96 <- x[, sum(count), by = nrn]
stopifnot(nrow(dt96) == 96)
mat96 <- matrix(dt96$V1)
rownames(mat96) <- dt96$nrn
mat96 <- mat96[ICAMS::catalog.row.order$SBS96, , drop = FALSE]
colnames(mat96) <- sample.id
if (is.null(vcf$trans.strand)) {
retval <-
CheckAndReturnSBSMatrix(vcf = vcf0, discarded.variants = discarded.variants,
mat96 = mat96, mat1536 = mat1536, mat192 = NULL,
return.annotated.vcf = return.annotated.vcf,
sample.id = sample.id)
return(retval)
}
vcf2 <- vcf[bothstrand == FALSE, ]
vcf3 <- vcf2 %>% dplyr::group_by(CHROM, ALT, POS) %>%
dplyr::summarise(REF = REF[1], mutation = mutation[1], trans.strand = trans.strand[1])
if (nrow(vcf3) == 0) {
mat192 <-
matrix(0, nrow = length(ICAMS::catalog.row.order$SBS192), ncol = 1,
dimnames = list(ICAMS::catalog.row.order$SBS192, sample.id))
retval <-
CheckAndReturnSBSMatrix(vcf = vcf0, discarded.variants = discarded.variants,
mat96 = mat96, mat1536 = mat1536, mat192 = mat192,
return.annotated.vcf = return.annotated.vcf,
sample.id = sample.id)
return(retval)
}
tab192 <- table(paste0(substr(vcf3$mutation, 2, 4),
substr(vcf3$mutation, 6, 6)),
vcf3$trans.strand,
useNA = "ifany")
stopifnot(sum(tab192) == nrow(vcf3))
dt192 <- as.data.table(tab192)
colnames(dt192) <- c("rn", "trans.strand", "count")
dt192 <- dt192[!is.na(trans.strand)]
dt192[trans.strand == "-", rn := RevcSBS96(rn)]
dt192 <- dt192[ , .(count = sum(count)), by = rn]
x192 <- data.table(rn = ICAMS::catalog.row.order$SBS192)
x <- merge(x192, dt192, by = "rn", all.x = TRUE)
x[is.na(count), count := 0]
mat192 <- matrix(x[, count])
rownames(mat192) <- unlist(x[, 1])
mat192 <- mat192[ICAMS::catalog.row.order$SBS192, , drop = FALSE]
colnames(mat192) <- sample.id
CheckAndReturnSBSMatrix(vcf = vcf0, discarded.variants = discarded.variants,
mat96 = mat96, mat1536 = mat1536, mat192 = mat192,
return.annotated.vcf = return.annotated.vcf,
sample.id = sample.id)
}
AnnotateDBSVCF <- function(DBS.vcf, ref.genome,
trans.ranges = NULL, name.of.VCF = NULL) {
DBS.vcf <- AddSeqContext(df = DBS.vcf, ref.genome = ref.genome,
name.of.VCF = name.of.VCF)
CheckSeqContextInVCF(DBS.vcf, "seq.21bases")
trans.ranges <- InferTransRanges(ref.genome, trans.ranges)
if (!is.null(trans.ranges)) {
DBS.vcf <- AddTranscript(df = DBS.vcf, trans.ranges = trans.ranges,
ref.genome = ref.genome,
name.of.VCF = name.of.VCF)
}
return(as.data.table(DBS.vcf))
}
AddDBSClass <- function(vcf) {
vcf$DBS78.class <- CanonicalizeDBS(vcf$REF, vcf$ALT)
vcf$DBS136.class <- CanonicalizeQUAD(substr(vcf$seq.21bases, 10, 13))
vcf$DBS144.class <- NA
idx <- which(!is.na(vcf$trans.strand) & (vcf$bothstrand == FALSE))
vcf$DBS144.class[idx] <- paste0(vcf$REF[idx], vcf$ALT[idx])
idx1 <- which(vcf$trans.strand == "-" & (vcf$bothstrand == FALSE))
vcf$DBS144.class[idx1] <- RevcDBS144(vcf$DBS144.class[idx1])
return(vcf)
}
CheckDBSClassInVCF <- function(vcf, mat, sample.id) {
if (nrow(mat) %in% c(78, 136)) {
df <- dplyr::distinct(vcf, CHROM, ALT, POS, .keep_all = TRUE)
if (nrow(df) != colSums(mat)) {
stop("In sample ", sample.id, ", the number of DBS", nrow(mat),
" variants in the annotated VCF is not the same as the total ",
"counts in mutation matrix.")
}
} else {
df1 <- vcf[!is.na(trans.strand), ]
df2 <- df1[bothstrand == FALSE, ]
df3 <- dplyr::distinct(df2, CHROM, ALT, POS, .keep_all = TRUE)
if (nrow(df3) != colSums(mat)) {
stop("In sample ", sample.id, ", the number of DBS", nrow(mat),
" variants in the annotated VCF is not the same as the total ",
"counts in mutation matrix.")
}
}
}
AddAndCheckDBSClassInVCF <-
function(vcf, mat78, mat136, mat144 = NULL, sample.id) {
vcf1 <- AddDBSClass(vcf)
CheckDBSClassInVCF(vcf1, mat78, sample.id)
CheckDBSClassInVCF(vcf1, mat136, sample.id)
if (!is.null(mat144)) {
CheckDBSClassInVCF(vcf1, mat144, sample.id)
}
return(vcf1)
}
CheckAndReturnDBSMatrix <-
function(vcf, discarded.variants, mat78, mat136, mat144 = NULL,
return.annotated.vcf = FALSE, sample.id = "counts") {
if (nrow(discarded.variants) == 0) {
if (is.null(mat144)) {
if (return.annotated.vcf == FALSE) {
return(list(catDBS78 = mat78, catDBS136 = mat136))
} else {
vcf.DBS.class <-
AddAndCheckDBSClassInVCF(vcf, mat78, mat136, mat144, sample.id)
return(list(catDBS78 = mat78, catDBS136 = mat136,
annotated.vcf = vcf.DBS.class))
}
} else {
if (return.annotated.vcf == FALSE) {
return(list(catDBS78 = mat78, catDBS144 = mat144,
catDBS136 = mat136))
} else {
vcf.DBS.class <-
AddAndCheckDBSClassInVCF(vcf, mat78, mat136, mat144, sample.id)
return(list(catDBS78 = mat78, catDBS144 = mat144, catDBS136 = mat136,
annotated.vcf = vcf.DBS.class))
}
}
} else {
if (is.null(mat144)) {
if (return.annotated.vcf == FALSE) {
return(list(catDBS78 = mat78, catDBS136 = mat136,
discarded.variants = discarded.variants))
} else {
vcf.DBS.class <-
AddAndCheckDBSClassInVCF(vcf, mat78, mat136, mat144, sample.id)
return(list(catDBS78 = mat78, catDBS136 = mat136,
annotated.vcf = vcf.DBS.class,
discarded.variants = discarded.variants))
}
} else {
if (return.annotated.vcf == FALSE) {
return(list(catDBS78 = mat78, catDBS144 = mat144,
catDBS136 = mat136,
discarded.variants = discarded.variants))
} else {
vcf.DBS.class <-
AddAndCheckDBSClassInVCF(vcf, mat78, mat136, mat144, sample.id)
return(list(catDBS78 = mat78, catDBS144 = mat144, catDBS136 = mat136,
annotated.vcf = vcf.DBS.class,
discarded.variants = discarded.variants))
}
}
}
}
CreateOneColDBSMatrix <- function(vcf, sample.id = "count",
return.annotated.vcf = FALSE) {
CheckForEmptyDBSVCF <- function(vcf, return.annotated.vcf) {
if (0 == nrow(vcf)) {
catDBS78 <-
matrix(0, nrow = length(ICAMS::catalog.row.order$DBS78), ncol = 1,
dimnames = list(ICAMS::catalog.row.order$DBS78, sample.id))
catDBS136 <-
matrix(0, nrow = length(ICAMS::catalog.row.order$DBS136), ncol = 1,
dimnames = list(ICAMS::catalog.row.order$DBS136, sample.id))
catDBS144 <-
matrix(0, nrow = length(ICAMS::catalog.row.order$DBS144), ncol = 1,
dimnames = list(ICAMS::catalog.row.order$DBS144, sample.id))
if (return.annotated.vcf == FALSE) {
return(list(catDBS78 = catDBS78, catDBS136 = catDBS136,
catDBS144 = catDBS144))
} else {
return(list(catDBS78 = catDBS78, catDBS136 = catDBS136,
catDBS144 = catDBS144, annotated.vcf = vcf))
}
} else {
return(FALSE)
}
}
ret1 <- CheckForEmptyDBSVCF(vcf = vcf,
return.annotated.vcf = return.annotated.vcf)
if (!is.logical(ret1)) {
return(ret1)
}
stopifnot(nchar(vcf$ALT) == 2)
stopifnot(nchar(vcf$REF) == 2)
discarded.variants <- vcf[0]
idx <- grep("N", substr(vcf$seq.21bases, 10, 13))
if (!length(idx) == 0) {
discarded.variants <- rbind(discarded.variants, vcf[idx, ])
discarded.variants$discarded.reason <-
'DBS variant whose tetranucleotide context contains "N"'
vcf <- vcf[-idx, ]
warning(
'Variants in the DBS vcf ', sample.id,
' whose tetranucleotide context contains "N" ',
'have been deleted so as not to conflict with downstream processing. ',
'See discarded.variants in the return value for more details.')
}
ret2 <- CheckForEmptyDBSVCF(vcf = vcf,
return.annotated.vcf = return.annotated.vcf)
if (!is.logical(ret2)) {
return(ret2)
}
vcf1 <- vcf %>% dplyr::group_by(CHROM, ALT, POS) %>%
dplyr::summarise(REF = REF[1], seq.21bases = seq.21bases[1])
canon.DBS.78 <- CanonicalizeDBS(vcf1$REF, vcf1$ALT)
tab.DBS.78 <- table(canon.DBS.78)
row.order.78 <- data.table(rn = ICAMS::catalog.row.order$DBS78)
DBS.dt.78 <- as.data.table(tab.DBS.78)
DBS.dt.78.2 <-
merge(row.order.78, DBS.dt.78,
by.x = "rn", by.y = "canon.DBS.78", all = TRUE)
DBS.dt.78.2[is.na(N), N := 0]
stopifnot(DBS.dt.78.2$rn == ICAMS::catalog.row.order$DBS78)
DBS.mat.78 <- as.matrix(DBS.dt.78.2[, 2])
rownames(DBS.mat.78) <- DBS.dt.78.2$rn
colnames(DBS.mat.78)<- sample.id
canon.DBS.136 <- CanonicalizeQUAD(substr(vcf1$seq.21bases, 10, 13))
tab.DBS.136 <- table(canon.DBS.136)
row.order.136 <- data.table(rn = ICAMS::catalog.row.order$DBS136)
DBS.dt.136 <- as.data.table(tab.DBS.136)
DBS.dt.136.2 <-
merge(row.order.136, DBS.dt.136,
by.x = "rn", by.y = "canon.DBS.136", all = TRUE)
DBS.dt.136.2[is.na(N), N := 0]
stopifnot(DBS.dt.136.2$rn == ICAMS::catalog.row.order$DBS136)
DBS.mat.136 <- as.matrix(DBS.dt.136.2[, 2])
rownames(DBS.mat.136) <- DBS.dt.136.2$rn
colnames(DBS.mat.136)<- sample.id
if (is.null(vcf$trans.strand)) {
retval <-
CheckAndReturnDBSMatrix(vcf = vcf, discarded.variants = discarded.variants,
mat78 = DBS.mat.78, mat136 = DBS.mat.136,
mat144 = NULL,
return.annotated.vcf = return.annotated.vcf,
sample.id = sample.id)
return(retval)
}
vcf2 <- vcf[bothstrand == FALSE, ]
vcf3 <- vcf2 %>% dplyr::group_by(CHROM, ALT, POS) %>%
dplyr::summarise(REF = REF[1], trans.strand = trans.strand[1])
if (nrow(vcf3) == 0) {
DBS.mat.144 <-
matrix(0, nrow = length(ICAMS::catalog.row.order$DBS144), ncol = 1,
dimnames = list(ICAMS::catalog.row.order$DBS144, sample.id))
retval <-
CheckAndReturnDBSMatrix(vcf = vcf, discarded.variants = discarded.variants,
mat78 = DBS.mat.78, mat136 = DBS.mat.136,
mat144 = DBS.mat.144,
return.annotated.vcf = return.annotated.vcf,
sample.id = sample.id)
return(retval)
}
tab.DBS.144 <-
table(paste0(vcf3$REF, vcf3$ALT), vcf3$trans.strand, useNA = "ifany")
stopifnot(sum(tab.DBS.144) == nrow(vcf3))
DBS.dt.144 <- as.data.table(tab.DBS.144)
colnames(DBS.dt.144) <- c("rn", "trans.strand", "count")
DBS.dt.144 <- DBS.dt.144[!is.na(trans.strand)]
DBS.dt.144[trans.strand == "-", rn := RevcDBS144(rn)]
DBS.dt.144 <- DBS.dt.144[, .(count = sum(count)), by = rn]
row.order.144 <- data.table(rn = ICAMS::catalog.row.order$DBS144)
DBS.dt.144.2 <- merge(row.order.144, DBS.dt.144, by = "rn", all.x = TRUE)
DBS.dt.144.2[is.na(count), count := 0]
stopifnot(DBS.dt.144.2$rn == ICAMS::catalog.row.order$DBS144)
DBS.mat.144 <- as.matrix(DBS.dt.144.2[, 2])
rownames(DBS.mat.144) <- DBS.dt.144.2$rn
colnames(DBS.mat.144)<- sample.id
CheckAndReturnDBSMatrix(vcf = vcf, discarded.variants = discarded.variants,
mat78 = DBS.mat.78, mat136 = DBS.mat.136,
mat144 = DBS.mat.144,
return.annotated.vcf = return.annotated.vcf,
sample.id = sample.id)
}
StrelkaSBSVCFFilesToCatalogAndPlotToPdf <-
function(files,
ref.genome,
trans.ranges = NULL,
region = "unknown",
names.of.VCFs = NULL,
output.file = "",
return.annotated.vcfs = FALSE,
suppress.discarded.variants.warnings = TRUE) {
catalogs0 <-
StrelkaSBSVCFFilesToCatalog(files, ref.genome, trans.ranges,
region, names.of.VCFs,
return.annotated.vcfs,
suppress.discarded.variants.warnings)
catalogs <- catalogs0
catalogs$discarded.variants <- catalogs$annotated.vcfs <- NULL
if (output.file != "") output.file <- paste0(output.file, ".")
for (name in names(catalogs)) {
PlotCatalogToPdf(catalogs[[name]],
file = paste0(output.file, name, ".pdf"))
if (name == "catSBS192") {
PlotCatalogToPdf(catalogs[[name]],
file = paste0(output.file, "SBS12.pdf"),
plot.SBS12 = TRUE)
}
}
return(catalogs)
}
StrelkaIDVCFFilesToCatalogAndPlotToPdf <-
function(files,
ref.genome,
region = "unknown",
names.of.VCFs = NULL,
output.file = "",
flag.mismatches = 0,
return.annotated.vcfs = FALSE,
suppress.discarded.variants.warnings = TRUE) {
list <-
StrelkaIDVCFFilesToCatalog(files, ref.genome, region, names.of.VCFs,
flag.mismatches, return.annotated.vcfs,
suppress.discarded.variants.warnings)
if (output.file != "") output.file <- paste0(output.file, ".")
PlotCatalogToPdf(list$catalog, file = paste0(output.file, "catID", ".pdf"))
return(list)
}
MutectVCFFilesToCatalogAndPlotToPdf <-
function(files,
ref.genome,
trans.ranges = NULL,
region = "unknown",
names.of.VCFs = NULL,
tumor.col.names = NA,
output.file = "",
flag.mismatches = 0,
return.annotated.vcfs = FALSE,
suppress.discarded.variants.warnings = TRUE) {
catalogs0 <-
MutectVCFFilesToCatalog(files, ref.genome, trans.ranges,
region, names.of.VCFs, tumor.col.names,
flag.mismatches, return.annotated.vcfs,
suppress.discarded.variants.warnings)
catalogs <- catalogs0
catalogs$discarded.variants <- catalogs$annotated.vcfs <- NULL
if (output.file != "") output.file <- paste0(output.file, ".")
for (name in names(catalogs)) {
PlotCatalogToPdf(catalogs[[name]],
file = paste0(output.file, name, ".pdf"))
if (name == "catSBS192") {
PlotCatalogToPdf(catalogs[[name]],
file = paste0(output.file, "SBS12.pdf"),
plot.SBS12 = TRUE)
}
}
return(catalogs0)
}
VCFsToCatalogsAndPlotToPdf <-
function(files,
output.dir,
ref.genome,
variant.caller = "unknown",
num.of.cores = 1,
trans.ranges = NULL,
region = "unknown",
names.of.VCFs = NULL,
tumor.col.names = NA,
filter.status = NULL,
get.vaf.function = NULL,
...,
max.vaf.diff = 0.02,
base.filename = "",
return.annotated.vcfs = FALSE,
suppress.discarded.variants.warnings = TRUE) {
num.of.cores <- AdjustNumberOfCores(num.of.cores)
catalogs0 <-
VCFsToCatalogs(files = files,
ref.genome = ref.genome,
variant.caller = variant.caller,
num.of.cores = num.of.cores,
trans.ranges = trans.ranges,
region = region,
names.of.VCFs = names.of.VCFs,
tumor.col.names = tumor.col.names,
filter.status = filter.status,
get.vaf.function = get.vaf.function,
... = ...,
max.vaf.diff = max.vaf.diff,
return.annotated.vcfs = return.annotated.vcfs,
suppress.discarded.variants.warnings =
suppress.discarded.variants.warnings)
catalogs <- catalogs0
catalogs$discarded.variants <- catalogs$annotated.vcfs <- NULL
if (base.filename != "") base.filename <- paste0(base.filename, ".")
for (name in names(catalogs)) {
PlotCatalogToPdf(catalogs[[name]],
file = file.path(output.dir,
paste0(base.filename, name, ".pdf")))
if (name == "catSBS192") {
PlotCatalogToPdf(catalogs[[name]],
file = file.path(output.dir,
paste0(base.filename, "SBS12.pdf")),
plot.SBS12 = TRUE)
}
}
return(catalogs0)
}
CanonicalizeDBS <- function(ref.vec, alt.vec) {
DBS <- paste0(ref.vec, alt.vec)
idx <- which(!(DBS %in% ICAMS::catalog.row.order$DBS78))
if (length(idx) == 0) {
return(DBS)
} else {
out <- paste0(revc(ref.vec[idx]), revc(alt.vec[idx]))
stopifnot(all(out %in% ICAMS::catalog.row.order$DBS78))
DBS[idx] <- out
return(DBS)
}
}
CanonicalizeQUAD <- function(quad) {
idx <- which(!(quad %in% ICAMS::catalog.row.order$DBS136))
if (length(idx) == 0) {
return(quad)
} else {
out <- revc(quad[idx])
stopifnot(all(out %in% ICAMS::catalog.row.order$DBS136))
quad[idx] <- out
return(quad)
}
}
CheckNamesOfVCFs <- function(files, names.of.VCFs) {
stopifnot(inherits(names.of.VCFs, "character"))
if (length(files) != length(names.of.VCFs)) {
stop("\nThe number of names in names.of.VCFs does not match ",
"the number of VCF files")
}
}
InferTransRanges <- function(ref.genome, trans.ranges) {
if (!is.null(trans.ranges)) {
return(trans.ranges)
} else {
if (IsGRCh37(ref.genome)) {
return(ICAMS::trans.ranges.GRCh37)
} else if (IsGRCh38(ref.genome)) {
return(ICAMS::trans.ranges.GRCh38)
} else if (IsGRCm38(ref.genome)) {
return(ICAMS::trans.ranges.GRCm38)
} else {
return(trans.ranges)
}
}
} |
objectColorize <- function(mat, color) {
col = rgb2hsv(col2rgb(color))
return(cpp_M_HSV2RGB(mat, h = col[1], s = col[2]))
} |
pool_parameters <- function(x,
exponentiate = FALSE,
effects = "fixed",
component = "conditional",
verbose = TRUE,
...) {
original_model <- random_params <- NULL
obj_name <- deparse(substitute(x), width.cutoff = 500)
if (all(sapply(x, insight::is_model)) && all(sapply(x, insight::is_model_supported))) {
original_model <- x[[1]]
x <- lapply(x, model_parameters, effects = effects, component = component, ...)
}
if (!all(sapply(x, inherits, "parameters_model"))) {
stop("'x' must be a list of 'parameters_model' objects, as returned by the 'model_parameters()' function.", call. = FALSE)
}
if (is.null(original_model)) {
original_model <- .get_object(x[[1]])
}
if (isTRUE(attributes(x[[1]])$exponentiate)) {
warning(insight::format_message("Pooling on exponentiated parameters is not recommended. Please call 'model_parameters()' with 'exponentiate = FALSE', and then call 'pool_parameters(..., exponentiate = TRUE)'."), call. = FALSE)
}
original_x <- x
if ("Component" %in% colnames(x[[1]]) && !.is_empty_object(component) && component != "all") {
x <- lapply(x, function(i) {
i <- i[i$Component == component, ]
i$Component <- NULL
i
})
warning(paste0("Pooling applied to the ", component, " model component."), call. = FALSE)
}
params <- do.call(rbind, x)
len <- length(x)
ci <- attributes(original_x[[1]])$ci
if (is.null(ci)) ci <- .95
parameter_values <- x[[1]]$Parameter
if (effects == "all" && "Effects" %in% colnames(params) && "random" %in% params$Effects) {
random_params <- params[params$Effects == "random", ]
params <- params[params$Effects != "random", ]
parameter_values <- x[[1]]$Parameter[x[[1]]$Effects != "random"]
}
estimates <- split(params, factor(params$Parameter, levels = unique(parameter_values)))
pooled_params <- do.call(rbind, lapply(estimates, function(i) {
pooled_estimate <- mean(i$Coefficient)
ubar <- mean(i$SE^2)
tmp <- ubar + (1 + 1 / len) * stats::var(i$Coefficient)
pooled_se <- sqrt(tmp)
df_column <- colnames(i)[grepl("(\\bdf\\b|\\bdf_error\\b)", colnames(i))][1]
if (length(df_column)) {
pooled_df <- .barnad_rubin(m = nrow(i), b = stats::var(i$Coefficient), t = tmp, dfcom = unique(i[[df_column]]))
} else {
pooled_df <- Inf
}
pooled_statistic <- pooled_estimate / pooled_se
alpha <- (1 + ci) / 2
fac <- suppressWarnings(stats::qt(alpha, df = pooled_df))
data.frame(
Coefficient = pooled_estimate,
SE = pooled_se,
CI_low = pooled_estimate - pooled_se * fac,
CI_high = pooled_estimate + pooled_se * fac,
Statistic = pooled_statistic,
df_error = pooled_df,
p = 2 * stats::pt(abs(pooled_statistic), df = pooled_df, lower.tail = FALSE)
)
}))
pooled_random <- NULL
if (!is.null(random_params)) {
estimates <- split(random_params, factor(random_params$Parameter, levels = unique(random_params$Parameter)))
pooled_random <- do.call(rbind, lapply(estimates, function(i) {
pooled_estimate <- mean(i$Coefficient, na.rm = TRUE)
data.frame(
Parameter = unique(i$Parameter),
Coefficient = pooled_estimate,
Effects = "random",
stringsAsFactors = FALSE
)
}))
}
pooled_params$Parameter <- parameter_values
pooled_params <- pooled_params[c("Parameter", "Coefficient", "SE", "CI_low", "CI_high", "Statistic", "df_error", "p")]
if (isTRUE(exponentiate) || identical(exponentiate, "nongaussian")) {
pooled_params <- .exponentiate_parameters(pooled_params, NULL, exponentiate)
}
if (!is.null(pooled_random)) {
pooled_params <- merge(pooled_params, pooled_random, all = TRUE, sort = FALSE)
}
pooled_params <- .add_pooled_params_attributes(
pooled_params,
model_params = original_x[[1]],
model = original_model,
ci,
exponentiate,
verbose = verbose
)
attr(pooled_params, "object_name") <- obj_name
sig <- unlist(.compact_list(lapply(original_x, function(i) {
attributes(i)$sigma
})))
if (!.is_empty_object(sig)) {
attr(pooled_params, "sigma") <- mean(sig, na.rm = TRUE)
}
class(pooled_params) <- c("parameters_model", "see_parameters_model", class(pooled_params))
pooled_params
}
.barnad_rubin <- function(m, b, t, dfcom = 999999) {
if (is.null(dfcom) || all(is.na(dfcom)) || all(is.infinite(dfcom))) {
return(Inf)
}
lambda <- (1 + 1 / m) * b / t
lambda[lambda < 1e-04] <- 1e-04
dfold <- (m - 1) / lambda^2
dfobs <- (dfcom + 1) / (dfcom + 3) * dfcom * (1 - lambda)
dfold * dfobs / (dfold + dfobs)
}
.add_pooled_params_attributes <- function(pooled_params, model_params, model, ci, exponentiate, verbose = TRUE) {
info <- insight::model_info(model, verbose = FALSE)
pretty_names <- attributes(model_params)$pretty_names
if (length(pretty_names) < nrow(model_params)) {
pretty_names <- c(pretty_names, model_params$Parameter[(length(pretty_names) + 1):nrow(model_params)])
}
attr(pooled_params, "ci") <- ci
attr(pooled_params, "exponentiate") <- exponentiate
attr(pooled_params, "pretty_names") <- pretty_names
attr(pooled_params, "verbose") <- verbose
attr(pooled_params, "ordinal_model") <- attributes(pooled_params)$ordinal_model
attr(pooled_params, "model_class") <- attributes(pooled_params)$model_class
attr(pooled_params, "bootstrap") <- attributes(pooled_params)$bootstrap
attr(pooled_params, "iterations") <- attributes(pooled_params)$iterations
attr(pooled_params, "df_method") <- attributes(pooled_params)$df_method
attr(pooled_params, "digits") <- attributes(pooled_params)$digits
attr(pooled_params, "ci_digits") <- attributes(pooled_params)$ci_digits
attr(pooled_params, "p_digits") <- attributes(pooled_params)$p_digits
coef_col <- .find_coefficient_type(info, exponentiate)
attr(pooled_params, "coefficient_name") <- coef_col
attr(pooled_params, "zi_coefficient_name") <- ifelse(isTRUE(exponentiate), "Odds Ratio", "Log-Odds")
attr(pooled_params, "model_formula") <- insight::find_formula(model)
pooled_params
} |
"AdaptNeigh" <-
function(pointsin,X,coeff,nbrs,remove,intercept,neighbours){
mindetails<-NULL
minindices<-NULL
results<-list()
tempres<-list()
newinfo<-list()
nlist<-list()
N<-length(pointsin);
min1<-min(N-1,neighbours)
min2<-min(N-1,2*neighbours)
closest<-FALSE
for (k in 1:min1){
out1<-getnbrs(X,remove,pointsin,k,closest)
nbrs<-out1$nbrs
index<-out1$index
out2<-AdaptPred(pointsin,X,coeff,nbrs,remove,intercept,neighbours)
nlist[[k]]<-out1
tempres[[k]]<-out2
}
closest<-TRUE
for (k in 1:min2){
out1<-getnbrs(X,remove,pointsin,k,closest)
nbrs<-out1$nbrs
index<-out1$index
out2<-AdaptPred(pointsin,X,coeff,nbrs,remove,intercept,neighbours)
nlist[[k+min1]]<-out1
tempres[[k+min1]]<-out2
}
for (i in 1:(min1+min2)){
minindices[i]<-tempres[[i]]$minindex
mindetails[i]<-tempres[[i]]$details[minindices[i]]
}
totalminindex<-order(abs(mindetails))[1]
pred<-coeff[remove]-mindetails[totalminindex]
coeff[remove]<-mindetails[totalminindex]
clo<-NULL
int<-NULL
scheme<-NULL
if(totalminindex<=neighbours){
clo<-FALSE
}
else{
clo<-TRUE
}
results<-tempres[[totalminindex]]
nbrs<-nlist[[totalminindex]]$nbrs
index<-nlist[[totalminindex]]$index
newinfo<-list(clo=clo,nbrs=nbrs,index=index)
return(list(results=results,newinfo=newinfo))
} |
github_info <- function(remote = "origin") {
remote_url <- get_remote_url(remote)
repo <- extract_repo(remote_url)
get_repo_data(repo)
}
get_remote_url <- function(remote) {
gert::git_remote_info(remote = remote)$url
}
extract_repo <- function(url) {
re <- "github[^/:]*[/:]([^/]+)/(.*?)(?:\\.git)?$"
m <- regexec(re, url)
match <- regmatches(url, m)[[1]]
if (length(match) == 0) {
abort(paste0("Unrecognized repo format: ", url))
}
paste0(match[2], "/", match[3])
}
get_repo_data <- function(repo) {
req <- gh::gh("/repos/:repo", repo = repo)
return(req)
} |
importAURNCsv <- function(file = file.choose(), header.at = 5, data.at = 7, na.strings = c(
"No data",
"", "NA"
), date.name = "Date", date.break = "-", time.name = "time",
misc.info = c(1, 2, 3, 4), is.site = 4, bad.24 = TRUE, correct.time = -3600,
output = "final", data.order = c("value", "status", "unit"),
simplify.names = TRUE, ...) {
initial.ans <- import(
file = file, header.at = header.at,
na.strings = na.strings, data.at = data.at, date.name = date.name,
date.break = date.break, time.name = time.name, misc.info = misc.info,
is.site = NULL, bad.24 = bad.24, correct.time = correct.time,
output = "working", ...
)
date.name <- make.names(date.name)
time.name <- make.names(time.name)
site.1 <- read.table(
file,
header = FALSE, sep = initial.ans$ops$sep,
skip = (is.site - 1), nrows = 1, colClasses = "character",
col.names = initial.ans$names, fill = TRUE, flush = TRUE
)
site.1 <- site.1[1:length(initial.ans$names)]
names(site.1) <- make.names(initial.ans$names, unique = TRUE)
site.1 <- site.1[!names(site.1) == date.name]
site.1 <- site.1[!names(site.1) == time.name]
site.2 <- as.character(site.1)
site.2 <- c(1:length(site.2))[gsub(" ", "", site.2) != ""]
if (length(site.2) > 1) {
site.3 <- c(site.2[2:length(site.2)] - 1, ncol(site.1))
}
else {
site.3 <- ncol(site.1)
}
site.names <- as.character(as.vector(site.1[site.2]))
site.names <- gsub("(^ +)|( +$)", "", site.names)
initial.ans$data <- lapply(1:(length(site.2)), function(x) {
ans <- initial.ans$data[site.2[x]:site.3[x]]
ans.names <- names(ans)
if (simplify.names == TRUE) {
ans.names[grep("carbon.monoxide", ans.names, ignore.case = TRUE)] <- "co"
ans.names[grep(
"pm10.particulate.matter", ans.names,
ignore.case = TRUE
)] <- "pm10"
ans.names[grep("non.volatile.pm10", ans.names, ignore.case = TRUE)] <- "nv.pm10"
ans.names[grep("volatile.pm10", ans.names, ignore.case = TRUE)] <- "v.pm10"
ans.names[grep(
"pm2.5.particulate.matter", ans.names,
ignore.case = TRUE
)] <- "pm2.5"
ans.names[grep("non.volatile.pm2.5", ans.names, ignore.case = TRUE)] <- "nv.pm2.5"
ans.names[grep("volatile.pm2.5", ans.names, ignore.case = TRUE)] <- "v.pm2.5"
ans.names[grep("nitric.oxide", ans.names, ignore.case = TRUE)] <- "no"
ans.names[grep("nitrogen.oxides", ans.names, ignore.case = TRUE)] <- "nox"
ans.names[grep("nitrogen.dioxide", ans.names, ignore.case = TRUE)] <- "no2"
ans.names[grep("ozone", ans.names, ignore.case = TRUE)] <- "o3"
ans.names[grep("sulphur.dioxide", ans.names, ignore.case = TRUE)] <- "so2"
}
for (i in 1:length(data.order)) {
if (data.order[i] == "value") {
}
else {
ans.names[grep(data.order[i], ans.names, ignore.case = TRUE)] <- paste(
data.order[i],
".", ans.names[(grep(
data.order[i], ans.names,
ignore.case = TRUE
)) - (i - 1)],
sep = ""
)
}
}
names(ans) <- ans.names
site <- rep(site.names[x], nrow(initial.ans$data))
ans <- cbind(date = initial.ans$date, site = site, ans)
})
initial.ans$data <- do.call(bind_rows, initial.ans$data)
if (simplify.names == TRUE) {
initial.ans$misc <- c(initial.ans$misc, "importAURN operation: simplify names applied")
}
if (!output == "working") {
ans <- initial.ans$data
if (!is.null(misc.info)) {
comment(ans) <- initial.ans$misc
}
ids <- which(is.na(ans$date))
if (length(ids) > 0) {
ans <- ans[-ids, ]
warning(paste(
"Missing dates detected, removing",
length(ids), "lines"
))
}
print(unlist(sapply(ans, class)))
return(ans)
}
else {
return(initial.ans)
}
} |
open_in_dbsnp <- function(variant_id) {
if (!(rlang::is_character(variant_id) ))
stop("variant_id must be a character vector.")
if (interactive()) {
urls <-
glue::glue("https://www.ncbi.nlm.nih.gov/snp/{variant_id}")
purrr::walk(urls, utils::browseURL)
return(invisible(TRUE))
} else {
return(invisible(TRUE))
}
}
open_in_gtex <- function(variant_id) {
if (!(rlang::is_character(variant_id) ))
stop("variant_id must be a character vector.")
if (interactive()) {
urls <-
glue::glue("https://gtexportal.org/home/snp/{variant_id}")
purrr::walk(urls, utils::browseURL)
return(invisible(TRUE))
} else {
return(invisible(TRUE))
}
} |
ergm.etagradmult <- function(theta, v, etamap){
storage.mode(v) <- "double"
.Call("ergm_etagradmult_wrapper", as.numeric(theta), v, etamap, PACKAGE="ergm")
} |
`%instanceof%` <- .jinstanceof <- function( o, cl ){
if( !inherits( o, "jobjRef" ) ){
stop( "o is not a java object" )
}
if( inherits( cl, "jobjRef" ) ){
if( .jclass( cl ) == "java.lang.Class" ){
clazz <- cl
} else {
clazz <- .jcall( cl, "Ljava/lang/Class;", "getClass" )
}
} else if( inherits( cl, "jclassName" ) ) {
clazz <- cl@jobj
} else if( inherits( cl, "character" ) ){
clazz <- .jfindClass(cl)
} else {
return(FALSE)
}
.jcall( clazz , "Z", "isInstance", .jcast(o, "java/lang/Object" ) )
} |
XGBoost_QA_Results_MultiClass <- data.table::CJ(
TOF = c(TRUE,FALSE),
GridTune = c(TRUE,FALSE),
Success = "Failure",
PartitionInFunction = c(TRUE,FALSE)
)
XGBoost_QA_Results_MultiClass <- XGBoost_QA_Results_MultiClass[!(TOF & GridTune)]
XGBoost_QA_Results_MultiClass <- XGBoost_QA_Results_MultiClass[!(PartitionInFunction & TOF)]
XGBoost_QA_Results_MultiClass[, RunNumber := seq_len(.N)]
for(run in seq_len(XGBoost_QA_Results_MultiClass[,.N])) {
tof <- XGBoost_QA_Results_MultiClass[run, TOF]
PartitionInFunction <- XGBoost_QA_Results_MultiClass[run, PartitionInFunction]
gridtune <- XGBoost_QA_Results_MultiClass[run, GridTune]
Tar <- "Adrian"
data <- RemixAutoML::FakeDataGenerator(
Correlation = 0.85,
N = 25000L,
ID = 2L,
AddWeightsColumn = TRUE,
ZIP = 0L,
AddDate = TRUE,
Classification = FALSE,
MultiClass = TRUE)
data <- RemixAutoML::AutoDiffLagN(
data = data,
DateVariable = "DateTime",
GroupVariables = c("Factor_2"),
DiffVariables = names(data)[!names(data) %in% c("IDcol_1","IDcol_2","Adrian","DateTime","Factor_1","Factor_2")],
DiffDateVariables = NULL,
DiffGroupVariables = NULL,
NLag1 = 0,
NLag2 = 1,
Sort = TRUE,
RemoveNA = TRUE)
if(!tof && !PartitionInFunction) {
Sets <- RemixAutoML::AutoDataPartition(
data = data,
NumDataSets = 3,
Ratios = c(0.7,0.2,0.1),
PartitionType = "random",
StratifyColumnNames = "Adrian",
TimeColumnName = NULL)
TTrainData <- Sets$TrainData
VValidationData <- Sets$ValidationData
TTestData <- Sets$TestData
rm(Sets)
} else {
TTrainData <- data.table::copy(data)
VValidationData <- NULL
TTestData <- NULL
}
TestModel <- tryCatch({RemixAutoML::AutoXGBoostMultiClass(
TreeMethod = "hist",
NThreads = parallel::detectCores(),
OutputSelection = c("Importances", "EvalPlots", "EvalMetrics", "Score_TrainData"),
model_path = normalizePath("./"),
metadata_path = normalizePath("./"),
ModelID = "Test_Model_1",
ReturnFactorLevels = TRUE,
ReturnModelObjects = TRUE,
SaveModelObjects = FALSE,
EncodingMethod = "credibility",
DebugMode = TRUE,
data = TTrainData,
TrainOnFull = tof,
ValidationData = VValidationData,
TestData = TTestData,
TargetColumnName = "Adrian",
FeatureColNames = names(TTrainData)[!names(TTrainData) %in% c("IDcol_1", "IDcol_2","DateTime",Tar)],
IDcols = c("IDcol_1","IDcol_2","DateTime"),
eval_metric = "merror",
LossFunction = 'multi:softprob',
grid_eval_metric = "accuracy",
NumOfParDepPlots = 3L,
PassInGrid = NULL,
GridTune = gridtune,
BaselineComparison = "default",
MaxModelsInGrid = 10L,
MaxRunsWithoutNewWinner = 20L,
MaxRunMinutes = 24L*60L,
Verbose = 1L,
Trees = if(!gridtune) 50L else c(50,51,52,53,54,55),
eta = if(!gridtune) 0.05 else c(0.05,0.06,0.07,0.08,0.09),
max_depth = if(!gridtune) 4L else c(4,5,6,7,8,9,10),
min_child_weight = if(!gridtune) 1.0 else c(1,2,3,4),
subsample = if(!gridtune) 0.55 else c(0.50,0.55,0.60,0.65),
colsample_bytree = if(!gridtune) 0.55 else c(0.55,0.65,0.7,0.75,0.8))}, error = function(x) NULL)
if(!is.null(TestModel)) XGBoost_QA_Results_MultiClass[run, Success := "Success"]
TestModel <- NULL
Sys.sleep(5)
data.table::fwrite(XGBoost_QA_Results_MultiClass, file = "C:/Users/Bizon/Documents/GitHub/RemixAutoML/tests/Testing_Data/AutoXGBoostMultiClass_QA.csv")
}
rm(list = ls()[!ls() %in% c(
"XGBoost_QA_Results_MultiClass",
"XGBoost_QA_Results_Regression",
"XGBoost_QA_Results_Classifier",
"CatBoost_QA_Results_MultiClass",
"CatBoost_QA_Results_Regression",
"CatBoost_QA_Results_Classifier")]) |
library( "mvProbit" )
options( digits = 4 )
set.seed( 123 )
nObs <- 10
xMat <- cbind(
const = rep( 1, nObs ),
x1 = as.numeric( rnorm( nObs ) > 0 ),
x2 = as.numeric( rnorm( nObs ) > 0 ),
x3 = rnorm( nObs ),
x4 = rnorm( nObs ) )
beta <- cbind( c( 0.8, 1.2, -1.0, 1.4, -0.8 ),
c( -0.6, 1.0, 0.6, -1.2, -1.6 ),
c( 0.5, -0.6, -0.7, 1.1, 1.2 ) )
sigma <- miscTools::symMatrix( c( 1, 0.2, 0.4, 1, -0.1, 1 ) )
allCoef <- c( c( beta ), sigma[ lower.tri( sigma ) ] )
yMatLin <- xMat %*% beta
yMat <- ( yMatLin + rmvnorm( nObs, sigma = sigma, pre0.9_9994 = TRUE ) ) > 0
colnames( yMat ) <- paste( "y", 1:3, sep = "" )
yExp <- mvProbitExp( ~ x1 + x2 + x3 + x4, coef = c( beta ),
sigma = sigma, data = as.data.frame( xMat ) )
round( yExp, 3 )
yExpA <- mvProbitExp( ~ x1 + x2 + x3 + x4, coef = allCoef,
data = as.data.frame( xMat ) )
all.equal( yExp, yExpA )
yExp2 <- pnorm( yMatLin )
all.equal( yExp, as.data.frame( yExp2 ) )
yExpCond <- mvProbitExp( ~ x1 + x2 + x3 + x4, coef = c( beta ),
sigma = sigma, data = as.data.frame( xMat ), cond = TRUE,
algorithm = GenzBretz() )
round( yExpCond, 3 )
yExpCondA <- mvProbitExp( ~ x1 + x2 + x3 + x4, coef = allCoef,
data = as.data.frame( xMat ), cond = TRUE,
algorithm = GenzBretz() )
all.equal( yExpCond, yExpCondA )
yExpCond2 <- matrix( NA, nrow = nObs, ncol = ncol( yMat ) )
for( i in 1:nObs ) {
for( k in 1:ncol( yMat ) ) {
set.seed( 123 )
numerator <- pmvnorm( upper = yMatLin[ i, ], sigma = sigma )
set.seed( 123 )
denominator <- pmvnorm( upper = yMatLin[ i, -k ], sigma = sigma[ -k, -k ] )
yExpCond2[ i, k ] <- numerator / denominator
}
}
all.equal( yExpCond, as.data.frame( yExpCond2 ) )
yExpCond3 <- mvProbitExp( ~ x1 + x2 + x3 + x4, coef = c( beta ),
sigma = sigma, data = as.data.frame( xMat ), cond = TRUE,
algorithm = GenzBretz )
all.equal( yExpCond, yExpCond3 )
identical( yExpCond, yExpCond3 )
yExpCond4 <- mvProbitExp( ~ x1 + x2 + x3 + x4, coef = c( beta ),
sigma = sigma, data = as.data.frame( xMat ), cond = TRUE,
algorithm = Miwa )
all.equal( yExpCond, yExpCond4, tol = 1e-3 )
yExpCond5 <- mvProbitExp( ~ x1 + x2 + x3 + x4, coef = c( beta ),
sigma = sigma, data = as.data.frame( xMat ), cond = TRUE,
algorithm = Miwa( steps = 32 ) )
all.equal( yExpCond4, yExpCond5, tol = 1e-3 )
yExpCond6 <- mvProbitExp( ~ x1 + x2 + x3 + x4, coef = c( beta ),
sigma = sigma, data = as.data.frame( xMat ), cond = TRUE,
algorithm = TVPACK )
all.equal( yExpCond, yExpCond6, tol = 1e-3 )
yExpCond7 <- mvProbitExp( ~ x1 + x2 + x3 + x4, coef = c( beta ),
sigma = sigma, data = as.data.frame( xMat ), cond = TRUE,
algorithm = TVPACK( abseps = 0.5 ) )
all.equal( yExpCond6, yExpCond7, tol = 1e-3 )
yExpCond8 <- mvProbitExp( ~ x1 + x2 + x3 + x4, coef = c( beta ),
sigma = sigma, data = as.data.frame( xMat ), cond = TRUE )
all.equal( yExpCond, yExpCond8, tol = 1e-3 )
yExpCond9 <- mvProbitExp( ~ x1 + x2 + x3 + x4, coef = c( beta ),
sigma = sigma, data = as.data.frame( xMat ), cond = TRUE,
nGHK = 100 )
all.equal( yExpCond8, yExpCond9, tol = 1e-3 )
yExpCondObs <- mvProbitExp( cbind( y1, y2, y3 ) ~ x1 + x2 + x3 + x4,
coef = c( beta ), sigma = sigma, data = as.data.frame( cbind( xMat, yMat ) ),
cond = TRUE, algorithm = GenzBretz() )
round( yExpCondObs, 3 )
yExpCondObsA <- mvProbitExp( cbind( y1, y2, y3 ) ~ x1 + x2 + x3 + x4,
coef = allCoef, data = as.data.frame( cbind( xMat, yMat ) ),
cond = TRUE, algorithm = GenzBretz() )
all.equal( yExpCondObs, yExpCondObsA )
yExpCondObs2 <- matrix( NA, nrow = nObs, ncol = ncol( yMat ) )
for( i in 1:nObs ){
for( k in 1:ncol( yMat ) ) {
ySign <- 2 * yMat[ i, ] - 1
ySign[ k ] <- 1
yLinTmp <- yMatLin[ i, ] * ySign
sigmaTmp <- diag( ySign ) %*% sigma %*% diag( ySign )
set.seed( 123 )
numerator <- pmvnorm( upper = yLinTmp, sigma = sigmaTmp )
set.seed( 123 )
denominator <- pmvnorm( upper = yLinTmp[ -k ], sigma = sigmaTmp[ -k, -k ] )
yExpCondObs2[ i, k ] <- numerator / denominator
}
}
all.equal( yExpCondObs, as.data.frame( yExpCondObs2 ) )
yExpCondObs3 <- mvProbitExp( cbind( y1, y2, y3 ) ~ x1 + x2 + x3 + x4,
coef = c( beta ), sigma = sigma, data = as.data.frame( cbind( xMat, yMat ) ),
cond = TRUE, algorithm = GenzBretz )
all.equal( yExpCondObs, yExpCondObs3 )
identical( yExpCondObs, yExpCondObs3 )
yExpCondObs4 <- mvProbitExp( cbind( y1, y2, y3 ) ~ x1 + x2 + x3 + x4,
coef = c( beta ), sigma = sigma, data = as.data.frame( cbind( xMat, yMat ) ),
cond = TRUE, algorithm = Miwa )
all.equal( yExpCondObs, yExpCondObs4, tol = 1e-3 )
yExpCondObs5 <- mvProbitExp( cbind( y1, y2, y3 ) ~ x1 + x2 + x3 + x4,
coef = c( beta ), sigma = sigma, data = as.data.frame( cbind( xMat, yMat ) ),
cond = TRUE, algorithm = Miwa( steps = 32 ) )
all.equal( yExpCondObs4, yExpCondObs5, tol = 1e-3 )
yExpCondObs6 <- mvProbitExp( cbind( y1, y2, y3 ) ~ x1 + x2 + x3 + x4,
coef = c( beta ), sigma = sigma, data = as.data.frame( cbind( xMat, yMat ) ),
cond = TRUE, algorithm = TVPACK )
all.equal( yExpCondObs, yExpCondObs6, tol = 1e-3 )
yExpCondObs7 <- mvProbitExp( cbind( y1, y2, y3 ) ~ x1 + x2 + x3 + x4,
coef = c( beta ), sigma = sigma, data = as.data.frame( cbind( xMat, yMat ) ),
cond = TRUE, algorithm = TVPACK( abseps = 0.5 ) )
all.equal( yExpCondObs6, yExpCondObs7, tol = 1e-3 )
yExpCondObs8 <- mvProbitExp( cbind( y1, y2, y3 ) ~ x1 + x2 + x3 + x4,
coef = c( beta ), sigma = sigma, data = as.data.frame( cbind( xMat, yMat ) ),
cond = TRUE )
all.equal( yExpCondObs, yExpCondObs8, tol = 1e-3 )
yExpCondObs9 <- mvProbitExp( cbind( y1, y2, y3 ) ~ x1 + x2 + x3 + x4,
coef = c( beta ), sigma = sigma, data = as.data.frame( cbind( xMat, yMat ) ),
cond = TRUE, nGHK = 100 )
all.equal( yExpCondObs8, yExpCondObs9, tol = 1e-3 )
nSim <- 10000
ySim <- array( NA, c( nObs, ncol( yMat ), nSim ) )
for( s in 1:nSim ) {
ySim[ , , s ] <- ( yMatLin + rmvnorm( nObs, sigma = sigma, pre0.9_9994 = TRUE ) ) > 0
}
yExpSim <- matrix( NA, nrow = nObs, ncol = ncol( yMat ) )
for( i in 1:nObs ) {
yExpSim[ i, ] <- rowSums( ySim[ i, , ] ) / nSim
}
round( yExpSim, 3 )
round( yExpSim - as.matrix( yExp ), 3 )
rnorm( 4 )
logLikVal <- mvProbitLogLik( cbind( y1, y2, y3 ) ~ x1 + x2 + x3 + x4,
coef = c( beta ), sigma = sigma, data = as.data.frame( cbind( xMat, yMat ) ),
algorithm = GenzBretz() )
round( logLikVal, 3 )
logLikValA <- mvProbitLogLik( cbind( y1, y2, y3 ) ~ x1 + x2 + x3 + x4,
coef = allCoef, data = as.data.frame( cbind( xMat, yMat ) ),
algorithm = GenzBretz() )
all.equal( logLikVal, logLikValA )
logLikVal3 <- mvProbitLogLik( cbind( y1, y2, y3 ) ~ x1 + x2 + x3 + x4,
coef = c( beta ), sigma = sigma, data = as.data.frame( cbind( xMat, yMat ) ),
algorithm = GenzBretz )
all.equal( logLikVal, logLikVal3 )
identical( logLikVal, logLikVal3 )
logLikVal4 <- mvProbitLogLik( cbind( y1, y2, y3 ) ~ x1 + x2 + x3 + x4,
coef = c( beta ), sigma = sigma, data = as.data.frame( cbind( xMat, yMat ) ),
algorithm = Miwa )
all.equal( logLikVal, logLikVal4, tol = 1e-3 )
logLikVal5 <- mvProbitLogLik( cbind( y1, y2, y3 ) ~ x1 + x2 + x3 + x4,
coef = c( beta ), sigma = sigma, data = as.data.frame( cbind( xMat, yMat ) ),
algorithm = Miwa( steps = 32 ) )
all.equal( logLikVal4, logLikVal5, tol = 1e-3 )
logLikVal6 <- mvProbitLogLik( cbind( y1, y2, y3 ) ~ x1 + x2 + x3 + x4,
coef = c( beta ), sigma = sigma, data = as.data.frame( cbind( xMat, yMat ) ),
algorithm = TVPACK )
all.equal( logLikVal, logLikVal6, tol = 1e-3 )
logLikVal7 <- mvProbitLogLik( cbind( y1, y2, y3 ) ~ x1 + x2 + x3 + x4,
coef = c( beta ), sigma = sigma, data = as.data.frame( cbind( xMat, yMat ) ),
algorithm = TVPACK( abseps = 0.5 ) )
all.equal( logLikVal6, logLikVal7, tol = 1e-3 )
logLikVal8 <- mvProbitLogLik( cbind( y1, y2, y3 ) ~ x1 + x2 + x3 + x4,
coef = c( beta ), sigma = sigma, data = as.data.frame( cbind( xMat, yMat ) ) )
all.equal( logLikVal, logLikVal8, tol = 1e-3 )
logLikVal9 <- mvProbitLogLik( cbind( y1, y2, y3 ) ~ x1 + x2 + x3 + x4,
coef = c( beta ), sigma = sigma, data = as.data.frame( cbind( xMat, yMat ) ),
nGHK = 100 )
all.equal( logLikVal8, logLikVal9, tol = 1e-3 )
logLikValGrad1 <- mvProbitLogLik( cbind( y1, y2, y3 ) ~ x1 + x2 + x3 + x4,
coef = c( beta ), sigma = sigma, data = as.data.frame( cbind( xMat, yMat ) ),
oneSidedGrad = TRUE, algorithm = GenzBretz() )
round( c( logLikValGrad1 ), 3 )
round( attr( logLikValGrad1, "gradient" ), 3 )
logLikValGrad1A <- mvProbitLogLik( cbind( y1, y2, y3 ) ~ x1 + x2 + x3 + x4,
coef = allCoef, data = as.data.frame( cbind( xMat, yMat ) ),
oneSidedGrad = TRUE, algorithm = GenzBretz() )
all.equal( logLikValGrad1, logLikValGrad1A )
logLikValGrad <- mvProbitLogLik( cbind( y1, y2, y3 ) ~ x1 + x2 + x3 + x4,
coef = c( beta ), sigma = sigma, data = as.data.frame( cbind( xMat, yMat ) ),
returnGrad = TRUE, algorithm = GenzBretz() )
round( c( logLikValGrad ), 3 )
round( attr( logLikValGrad, "gradient" ), 3 )
llTmp <- function( coef ) {
betaTmp <- coef[ 1:15 ]
sigmaTmp <- diag( 3 )
sigmaTmp[ lower.tri( sigmaTmp ) ] <- coef[ -(1:15) ]
sigmaTmp[ upper.tri( sigmaTmp ) ] <- t( sigmaTmp )[ upper.tri( sigmaTmp ) ]
result <- mvProbitLogLik( cbind( y1, y2, y3 ) ~ x1 + x2 + x3 + x4,
coef = betaTmp, sigma = sigmaTmp,
data = as.data.frame( cbind( xMat, yMat ) ), algorithm = GenzBretz() )
return( result )
}
logLikValGrad2 <- numericGradient( llTmp, allCoef )
round( logLikValGrad2, 3 )
all.equal( attr( logLikValGrad1, "gradient" ), logLikValGrad2,
tol = 1e-5, check.attributes = FALSE )
all.equal( attr( logLikValGrad, "gradient" ), logLikValGrad2,
check.attributes = FALSE )
rnorm( 4 )
margEffUnc <- mvProbitMargEff( ~ x1 + x2 + x3 + x4, coef = c( beta ),
sigma = sigma, data = as.data.frame( xMat ), vcov = diag( 18 ) )
round( margEffUnc, 3 )
round( attr( margEffUnc, "vcov" )[ 1:3, , ], 2 )
round( drop( attr( margEffUnc, "vcov" )[ nObs, , ] ), 2 )
print( summary( margEffUnc ), digits = c( 3, 3, 2, 2, 2 ) )
margEffUncA <- mvProbitMargEff( ~ x1 + x2 + x3 + x4, coef = allCoef,
data = as.data.frame( xMat ), vcov = diag( 18 ) )
all.equal( margEffUnc, margEffUncA )
margEffUncD <- mvProbitMargEff( ~ x1 + x2 + x3 + x4, coef = c( beta ),
sigma = sigma, data = as.data.frame( xMat ), vcov = diag( 18 ),
dummyVar = c( "x1", "x2" ) )
all.equal( margEffUncD, margEffUnc )
margEffUncD0 <- mvProbitMargEff( ~ x1 + x2 + x3 + x4, coef = c( beta ),
sigma = sigma, data = as.data.frame( xMat ), vcov = diag( 18 ),
dummyVar = NULL )
print( summary( margEffUncD0 ), digits = c( 3, 3, 2, 2, 2 ) )
margEffUncDA <- mvProbitMargEff( ~ x1 + x2 + x3 + x4, coef = c( beta ),
sigma = sigma, data = as.data.frame( xMat ), vcov = diag( 18 ),
dummyVar = c( "x1", "x2", "x3", "x4" ) )
print( summary( margEffUncDA ), digits = c( 3, 3, 2, 2, 2 ) )
margEffUncM <- mvProbitMargEff( ~ x1 + x2 + x3 + x4, coef = c( beta ),
sigma = sigma, data = as.data.frame( xMat ), vcov = diag( 18 ),
addMean = TRUE )
all.equal( margEffUnc, margEffUncM[ 1:nObs, ], check.attributes = FALSE )
round( margEffUncM[ nObs:(nObs+1), ], 3 )
all.equal( attr( margEffUnc, "vcov" ),
attr( margEffUncM, "vcov" )[ 1:nObs, , ] )
round( attr( margEffUncM, "vcov" )[ nObs:(nObs+1), , ], 2 )
round( drop( attr( margEffUncM, "vcov" )[ nObs+1, , ] ), 2 )
all.equal( summary( margEffUnc )[ , ],
summary( margEffUncM )[ 1:( 12 * nObs ), ], check.attributes = FALSE )
printCoefmat( round(
summary( margEffUncM )[ -( 1:( 12 * (nObs-1) ) ), ],
digits = 3 ), digits = 3 )
rnorm( 4 )
margEffCond <- mvProbitMargEff( ~ x1 + x2 + x3 + x4, coef = c( beta ),
sigma = sigma, data = as.data.frame( xMat ), cond = TRUE,
algorithm = GenzBretz() )
round( margEffCond, 3 )
margEffCondA <- mvProbitMargEff( ~ x1 + x2 + x3 + x4, coef = allCoef,
data = as.data.frame( xMat ), cond = TRUE,
algorithm = GenzBretz() )
all.equal( margEffCond, margEffCondA )
margEffCondD <- mvProbitMargEff( ~ x1 + x2 + x3 + x4, coef = c( beta ),
sigma = sigma, data = as.data.frame( xMat ), cond = TRUE,
dummyVars = c( "x1", "x2" ), algorithm = GenzBretz() )
all.equal( margEffCondD, margEffCond )
margEffCondD0 <- mvProbitMargEff( ~ x1 + x2 + x3 + x4, coef = c( beta ),
sigma = sigma, data = as.data.frame( xMat ), cond = TRUE,
dummyVars = NULL, algorithm = GenzBretz() )
round( margEffCondD0, 3 )
margEffCondDA <- mvProbitMargEff( ~ x1 + x2 + x3 + x4, coef = c( beta ),
sigma = sigma, data = as.data.frame( xMat ), cond = TRUE,
dummyVars = c( "x1", "x2", "x3", "x4" ), algorithm = GenzBretz() )
round( margEffCondDA, 3 )
margEffCond1 <- mvProbitMargEff( ~ x1 + x2 + x3 + x4, coef = c( beta ),
sigma = sigma, data = as.data.frame( xMat ), cond = TRUE,
algorithm = Miwa( steps = 32 ) )
round( margEffCond1, 3 )
all.equal( margEffCond, margEffCond1, tol = 1e-3 )
margEffCond2 <- mvProbitMargEff( ~ x1 + x2 + x3 + x4, coef = c( beta ),
sigma = sigma, data = as.data.frame( xMat ), cond = TRUE )
round( margEffCond2, 3 )
all.equal( margEffCond, margEffCond2, tol = 1e-3 )
all.equal( margEffCond1, margEffCond2, tol = 1e-3 )
margEffCond3 <- mvProbitMargEff( ~ x1 + x2 + x3 + x4, coef = c( beta ),
sigma = sigma, data = as.data.frame( xMat ), cond = TRUE,
nGHK = 100 )
round( margEffCond3, 3 )
all.equal( margEffCond, margEffCond3, tol = 1e-3 )
all.equal( margEffCond2, margEffCond3, tol = 1e-3 )
margEffCondV <- mvProbitMargEff( ~ x1 + x2 + x3 + x4, coef = c( beta ),
sigma = sigma, data = as.data.frame( xMat )[ c(1,5,10), ], cond = TRUE,
vcov = diag( 18 ), returnJacobian = TRUE, algorithm = GenzBretz() )
round( attr( margEffCondV, "vcov" ), 2 )
round( drop( attr( margEffCondV, "vcov" )[ 1, , ] ), 2 )
round( attr( margEffCondV, "jacobian" ), 2 )
round( drop( attr( margEffCondV, "jacobian" )[ 1, , ] ), 2 )
print( summary( margEffCondV ), digits = c( 3, 3, 2, 2, 2 ) )
margEffCondVA <- mvProbitMargEff( ~ x1 + x2 + x3 + x4, coef = allCoef,
data = as.data.frame( xMat )[ c(1,5,10), ], cond = TRUE,
vcov = diag( 18 ), returnJacobian = TRUE, algorithm = GenzBretz() )
all.equal( margEffCondV, margEffCondVA, tol = 1e-3 )
margEffCondJac <- mvProbitMargEff( ~ x1 + x2 + x3 + x4, coef = allCoef,
data = as.data.frame( xMat )[ c(1,5,10), ], cond = TRUE,
returnJacobian = TRUE, algorithm = GenzBretz() )
all.equal( attr( margEffCondJac, "jacobian" ), attr( margEffCondV, "jacobian" ) )
margEffCondM <- mvProbitMargEff( ~ x1 + x2 + x3 + x4, coef = c( beta ),
sigma = sigma, data = as.data.frame( xMat )[ c(1,5,10), ], cond = TRUE,
vcov = diag( 18 ), addMean = TRUE, returnJacobian = TRUE,
algorithm = GenzBretz() )
all.equal( margEffCondV, margEffCondM[ 1:3, ], check.attributes = FALSE )
round( margEffCondM, 3 )
all.equal( attr( margEffCondV, "vcov" ),
attr( margEffCondM, "vcov" )[ 1:3, , ] )
round( attr( margEffCondM, "vcov" ), 2 )
round( drop( attr( margEffCondM, "vcov" )[ 4, , ] ), 2 )
all.equal( attr( margEffCondV, "jacobian" ),
attr( margEffCondM, "jacobian" )[ 1:3, , ] )
round( attr( margEffCondM, "jacobian" ), 2 )
round( drop( attr( margEffCondM, "jacobian" )[ 4, , ] ), 2 )
all.equal( summary( margEffCondV )[ , ], summary( margEffCondM )[ 1:36, ],
check.attributes = FALSE )
print( summary( margEffCondM ), digits = c( 3, 3, 2, 2, 2 ) )
rnorm( 4 )
margEffCondObs <- mvProbitMargEff( cbind( y1, y2, y3 ) ~ x1 + x2 + x3 + x4,
coef = c( beta ), sigma = sigma, data = as.data.frame( cbind( xMat, yMat ) ),
cond = TRUE, algorithm = GenzBretz() )
round( margEffCondObs, 3 )
margEffCondObsA <- mvProbitMargEff( cbind( y1, y2, y3 ) ~ x1 + x2 + x3 + x4,
coef = allCoef, data = as.data.frame( cbind( xMat, yMat ) ),
cond = TRUE, algorithm = GenzBretz() )
all.equal( margEffCondObs, margEffCondObsA )
margEffCondObs1 <- mvProbitMargEff( cbind( y1, y2, y3 ) ~ x1 + x2 + x3 + x4,
coef = c( beta ), sigma = sigma, data = as.data.frame( cbind( xMat, yMat ) ),
cond = TRUE, algorithm = Miwa( steps = 32 ) )
round( margEffCondObs1, 3 )
all.equal( margEffCondObs, margEffCondObs1, tol = 1e-3 )
margEffCondObs2 <- mvProbitMargEff( cbind( y1, y2, y3 ) ~ x1 + x2 + x3 + x4,
coef = c( beta ), sigma = sigma, data = as.data.frame( cbind( xMat, yMat ) ),
cond = TRUE )
round( margEffCondObs2, 3 )
all.equal( margEffCondObs, margEffCondObs2, tol = 1e-3 )
all.equal( margEffCondObs1, margEffCondObs2, tol = 1e-3 )
margEffCondObsV <- mvProbitMargEff( cbind( y1, y2, y3 ) ~ x1 + x2 + x3 + x4,
coef = c( beta ), sigma = sigma,
data = as.data.frame( cbind( xMat, yMat ) )[ c(1,5,10), ],
cond = TRUE, vcov = diag( 18 ), returnJacobian = TRUE,
algorithm = GenzBretz() )
round( attr( margEffCondObsV, "vcov" ), 2 )
round( drop( attr( margEffCondObsV, "vcov" )[ 1, , ] ), 2 )
round( attr( margEffCondObsV, "jacobian" ), 2 )
round( drop( attr( margEffCondObsV, "jacobian" )[ 1, , ] ), 2 )
print( summary( margEffCondObsV ), digits = c( 3, 3, 2, 2, 2 ) )
margEffCondObsVA <- mvProbitMargEff( cbind( y1, y2, y3 ) ~ x1 + x2 + x3 + x4,
coef = allCoef, data = as.data.frame( cbind( xMat, yMat ) )[ c(1,5,10), ],
cond = TRUE, vcov = diag( 18 ), returnJacobian = TRUE,
algorithm = GenzBretz() )
all.equal( margEffCondObs, margEffCondObsA )
margEffCondObsJac <- mvProbitMargEff( cbind( y1, y2, y3 ) ~ x1 + x2 + x3 + x4,
coef = c( beta ), sigma = sigma,
data = as.data.frame( cbind( xMat, yMat ) )[ c(1,5,10), ],
cond = TRUE, returnJacobian = TRUE, algorithm = GenzBretz() )
all.equal( attr( margEffCondObsJac, "jacobian" ),
attr( margEffCondObsV, "jacobian" ) )
margEffCondObsM <- mvProbitMargEff( cbind( y1, y2, y3 ) ~ x1 + x2 + x3 + x4,
coef = c( beta ), sigma = sigma,
data = as.data.frame( cbind( xMat, yMat ) )[ c(1,5,10), ],
cond = TRUE, vcov = diag( 18 ), addMean = TRUE, returnJacobian = TRUE,
algorithm = GenzBretz() )
all.equal( margEffCondObsV, margEffCondObsM[ 1:3, ], check.attributes = FALSE )
round( margEffCondObsM, 3 )
all.equal( attr( margEffCondObsV, "vcov" ),
attr( margEffCondObsM, "vcov" )[ 1:3, , ] )
round( attr( margEffCondObsM, "vcov" ), 2 )
round( drop( attr( margEffCondObsM, "vcov" )[ 4, , ] ), 2 )
all.equal( attr( margEffCondObsV, "jacobian" ),
attr( margEffCondObsM, "jacobian" )[ 1:3, , ] )
round( attr( margEffCondObsM, "jacobian" ), 2 )
round( drop( attr( margEffCondObsM, "jacobian" )[ 4, , ] ), 2 )
all.equal( summary( margEffCondObsV )[ , ], summary( margEffCondObsM )[ 1:36, ],
check.attributes = FALSE )
print( summary( margEffCondObsM ), digits = c( 3, 3, 2, 2, 2 ) )
rnorm( 4 ) |
allMissing = function(x) {
.Call(c_all_missing, x)
} |
train.gam <- function (X, y, pars = list()) {
if (!exists("numBasisFcts", pars)) {
pars$numBasisFcts <- 100
}
if (!exists("staysilent", pars)) {
pars$staysilent <- TRUE
}
if (!exists("CV.folds", pars)) {
pars$CV.folds <- NA
}
if (is.null(X) || dim(as.matrix(X))[2] == 0) {
result <- list()
result$Yfit <- as.matrix(rep(mean(y), length(y)))
result$residuals <- as.matrix(y - result$Yfit)
result$model <- NA
result$df <- NA
result$edf <- NA
result$edf1 <- NA
result$p.values <- NA
}
else {
p <- dim(as.matrix(X))
if (!is.na(pars$CV.folds)) {
num.folds <- pars$CV.folds
rmse <- Inf
whichfold <- sample(rep(1:num.folds, length.out = p[1]))
for (j in 1:length(pars$numBasisFcts)) {
mod <- train.gam(as.matrix(X)[whichfold == j,
], y[whichfold == j], pars = list(numBasisFcts = pars$numBasisFcts,
CV.folds = NA))
datframe <- data.frame(as.matrix(X)[whichfold !=
j, ])
names(datframe) <- paste("var", 2:p[2],
sep = "")
rmse.tmp <- sum((predict(mod$model, datframe) -
y[whichfold != j])^2)
if (rmse.tmp < rmse) {
rmse <- rmse.tmp
final.numBasisFcts <- pars$numBasisFcts[j]
}
}
}
else {
final.numBasisFcts <- pars$numBasisFcts
}
if (p[1]/p[2] < 3 * final.numBasisFcts) {
final.numBasisFcts <- ceiling(p[1]/(3 * p[2]))
if (pars$staysilent == FALSE) {
cat("changed number of basis functions to ",
final.numBasisFcts, " in order to have enough samples per basis function\n")
}
}
dat <- data.frame(as.matrix(y), as.matrix(X))
coln <- rep("null", p[2] + 1)
for (i in 1:(p[2] + 1)) {
coln[i] <- paste("var", i, sep = "")
}
colnames(dat) <- coln
labs <- "var1 ~ "
if (p[2] > 1) {
for (i in 2:p[2]) {
labs <- paste(labs, "s(var", i, ",k = ",
final.numBasisFcts, ") + ", sep = "")
}
}
labs <- paste(labs, "s(var", p[2] + 1, ",k = ",
final.numBasisFcts, ")", sep = "")
mod_gam <- FALSE
try(mod_gam <- gam(formula = formula(labs), data = dat),
silent = TRUE)
if (typeof(mod_gam) == "logical") {
cat("There was some error with gam. The smoothing parameter is set to zero.\n")
labs <- "var1 ~ "
if (p[2] > 1) {
for (i in 2:p[2]) {
labs <- paste(labs, "s(var", i, ",k = ",
final.numBasisFcts, ",sp=0) + ", sep = "")
}
}
labs <- paste(labs, "s(var", p[2] + 1, ",k = ",
final.numBasisFcts, ",sp=0)", sep = "")
mod_gam <- gam(formula = formula(labs), data = dat)
}
result <- list()
result$Yfit <- as.matrix(mod_gam$fitted.values)
result$residuals <- as.matrix(mod_gam$residuals)
result$model <- mod_gam
result$df <- mod_gam$df.residual
result$edf <- mod_gam$edf
result$edf1 <- mod_gam$edf1
result$p.values <- summary.gam(mod_gam)$s.pv
}
return(result)
}
train.xgboost1 <- function (X, y, pars = list()) {
n <- length(y)
if (!exists("nrounds", pars)) {
pars$nrounds <- 50
}
if (!exists("max_depth", pars)) {
pars$max_depth <- c(1, 3, 4, 5, 6)
}
if (!exists("CV.folds", pars)) {
pars$CV.folds <- 10
}
if (!exists("ncores", pars)) {
pars$ncores <- 1
}
if (!exists("early_stopping", pars)) {
pars$early_stopping <- 10
}
if (!exists("silent", pars)) {
pars$silent <- TRUE
}
if (is.null(X) || dim(as.matrix(X))[2] == 0) {
result <- list()
result$Yfit <- as.matrix(rep(mean(y), length(y)))
result$residuals <- as.matrix(y - result$Yfit)
result$model <- NA
result$df <- NA
result$edf <- NA
result$edf1 <- NA
result$p.values <- NA
}
else {
X <- as.matrix(X)
if (!is.na(pars$CV.folds)) {
num.folds <- pars$CV.folds
rmse <- matrix(0, pars$nrounds, length(pars$max_depth))
set.seed(1)
whichfold <- sample(rep(1:num.folds, length.out = n))
for (j in 1:length(pars$max_depth)) {
max_depth <- pars$max_depth[j]
for (i in 1:10) {
dtrain <- xgb.DMatrix(data = data.matrix(X[whichfold !=
i, ]), label = y[whichfold != i])
dtest <- xgb.DMatrix(data = data.matrix(X[whichfold ==
i, ]), label = y[whichfold == i])
watchlist <- list(train = dtrain, test = dtest)
if (pars$ncores > 1) {
bst <- xgb.train(data = dtrain, nthread = pars$ncores,
watchlist = watchlist, nrounds = pars$nrounds,
max_depth = max_depth, verbose = FALSE,
early_stopping_rounds = pars$early_stopping,
callbacks = list(cb.evaluation.log()))
}
else {
bst <- xgb.train(data = dtrain, nthread = 1,
watchlist = watchlist, nrounds = pars$nrounds,
max_depth = max_depth, verbose = FALSE,
early_stopping_rounds = pars$early_stopping,
callbacks = list(cb.evaluation.log()))
}
newscore <- bst$evaluation_log$test_rmse
if (length(newscore) < pars$nrounds) {
newscore <- c(newscore, rep(Inf, pars$nrounds -
length(newscore)))
}
rmse[, j] <- rmse[, j] + newscore
}
}
mins <- arrayInd(which.min(rmse), .dim = dim(rmse))
if (!pars$silent) {
show(rmse)
show(mins)
if ((mins[1] == 1) | (mins[1] == pars$nrounds) |
(mins[2] == 1) | (mins[2] == length(pars$max_depth))) {
show("There have been parameters selected that were the most extreme of the CV values")
show(mins)
}
}
final.nrounds <- mins[1]
final.max_depth <- pars$max_depth[mins[2]]
}
else {
if (length(pars$max_depth) > 1) {
stop("providing a vector of parameters must be used with CV")
}
final.max_depth <- pars$max_depth
final.nrounds <- pars$nrounds
}
dtrain <- xgb.DMatrix(data = data.matrix(X), label = y)
bstY <- xgb.train(data = dtrain, nrounds = final.nrounds,
max_depth = final.max_depth, verbose = !pars$silent)
result <- list()
result$Yfit <- predict(bstY, data.matrix(X))
result$residuals <- as.matrix(y - result$Yfit)
result$model <- bstY
result$df <- NA
result$edf <- NA
result$edf1 <- NA
result$p.values <- NA
}
return(result)
} |
"_PACKAGE"
NULL
libgeos_version <- function() {
.Call(libgeos_geos_version)
} |
importVCF <- function(file, na.seq="./."){
V3 <- NULL
rn <- NULL
con <- file(file)
open(con);
results.list <- list();
headerComplete <- FALSE
headerLines <- 0
header <- c()
while (!headerComplete) {
oneLine <- readLines(con, n = 1, warn = FALSE)
lineStart <- substr(oneLine,1,2)
if(lineStart=="
headerLines <- headerLines + 1
header <- c(header, oneLine)
} else {
headerComplete <- TRUE
}
}
close(con)
vcfBody <- fread(file, skip = headerLines, header=TRUE)
map <- vcfBody[, .SD, .SDcols = c(1,3,2,4,5)]
map[,V3:=0]
setnames(map, c("V1", "snp.names", "V4", "allele.1", "allele.2", "V3"))
setcolorder(map, c(1,2,6,3,4,5))
map[[2]] <- as.character(map[[2]])
missingNames <- map[[2]]=="."
if(sum(missingNames)>0){
newLabels <- paste(map[[1]],map[[4]],sep=".")
tableNames <- table(newLabels)
multLoci <- tableNames>1
if(sum(multLoci)>0){
lociOI <- tableNames[multLoci]
for(locRun in 1:length(lociOI)){
origLoc <- which(newLabels==names(lociOI)[locRun])
for(indRun in 1:length(origLoc)){
newLabels[origLoc[indRun]] <- paste(newLabels[origLoc[indRun]],indRun,sep=".")
}
}
}
map[[2]][missingNames] <- newLabels[missingNames]
}
genotypes <- vcfBody[, .SD, .SDcols = -c(1:9)]
cols = names(genotypes)
genotypes[ , (cols) := lapply(.SD, function(x) {gsub("\\:.*","",x)}), .SDcols = cols]
genotypes[genotypes==na.seq] <- "03"
genotypes[genotypes=="0|0"] <- "00"
genotypes[genotypes=="0|1"] <- "01"
genotypes[genotypes=="1|0"] <- "01"
genotypes[genotypes=="1|1"] <- "02"
genotypes[genotypes=="0/0"] <- "00"
genotypes[genotypes=="0/1"] <- "01"
genotypes[genotypes=="1/1"] <- "02"
genotypesRN <- colnames(genotypes)
genotypes <- genotypes[, data.table(t(.SD), keep.rownames=TRUE)]
genotypes[,rn:=NULL]
setnames(genotypes, map[[2]])
rownames(genotypes) <- genotypesRN
out <- list(header=header, vcfBody, map=map, genotypes=genotypes)
class(out) <- "vcf"
out
} |
PdofCSt.cyc2 <- function(theta, T, d, m=20, tol=1e-8) {
tol.1=tol; tol.2=tol; tol.3=tol; tol.4=tol
y = gauss.quad.prob(m, dist="normal")$nodes
k = length(theta)
cut1 = theta[k-T+1]-d-0.00000001
cut2 = theta[k-T+1]+d
t1 = sum(ifelse(cut2<theta, 1, 0))
t3 = sum(ifelse((cut1<=theta)&(theta<=cut2), 1, 0))
t2 = T-t1
t3 = t3-t2
G = t3
GG=G
if ((k-GG-T) == 0) {lower = NULL
} else lower = theta[1:(k-GG-T)]
i=t2-2; j=0;
count=0
end.cyc.prob=0
mins=t2:3
top.rows=(t2+G):(G+3)
p=0
out=NULL
inds = 1:(GG+t2)
if (t2==1 || t2==2) {
out = PdofCSt.T1or2(theta, T, d, m, tol.4)
new.out = c(choose(t2+G,t2), out, out)
return(new.out)
} else {
if (G == 0) {
PdCSt = PofCSt(theta,T,m,tol.4)
new.out=c(1,PdCSt[1],PdCSt[1])
return(new.out)
} else {
repeat {
PgCSt = 0
for (g in G:0) {
res = matrix(NA, nrow=t2, ncol=(g+1))
res[1:(t2-2),]=top.rows
res[t2-1,]=g+2
res[t2,]=(g+1):1
count = count+res[t2,1]
A = res
Abar = matrix(NA, nrow=GG, ncol=(g+1))
for (ii in 1:(g+1)) {
bool = (inds %in% A[,ii])
Abar[,ii] = inds[!bool]
}
A = t(A); Abar = t(Abar)
A = A+k-GG-T; Abar = Abar+k-GG-T
R = dim(A)[1]
PrCSt = 0
for (r in 1:R) {
if (t1==0) {theta.top = NULL
} else theta.top = theta[(k-t1+1):k]
theta.star = c(lower, theta[Abar[r,]], theta[A[r,]], theta.top)
PrCSt[r] = PofCSt(theta.star,T,m,tol.4)[1]
if (PrCSt[r]<tol.1) break
}
PgCSt[G-g+1] = sum(PrCSt)
if (PgCSt[G-g+1] < tol.2) break
}
PgCSt.sum = sum(PgCSt)
p = p + PgCSt.sum
new.out = c(count, p, PgCSt.sum)
ii = i
repeat {
if ((top.rows[i]-1+j)<mins[i]) {
i=i-1
j=1
top.rows[i:(t2-2)]=(top.rows[i]-1):(top.rows[i]-t2+i+1)
} else {
if (j==0) {top.rows[i]=top.rows[i]-1; break}
break }
}
if (ii>i) {
if (p-end.cyc.prob < tol.3) {break
} else end.cyc.prob=p
}
if (i==1 && top.rows[i]==mins[i]) break
else i=t2-2
j=0
G=top.rows[i]-3
}
res[1,1]=res[1,1]-1
A = res
Abar = matrix(NA, nrow=GG, ncol=(g+1))
for (ii in 1:(g+1)) {
bool = (inds %in% A[,ii])
Abar[,ii] = inds[!bool]
}
A = t(A); Abar = t(Abar)
A = A+k-GG-T; Abar = Abar+k-GG-T
R = dim(A)[1]
PrCSt = 0
if (t1==0) {theta.top = NULL
} else theta.top = theta[(k-t1+1):k]
theta.star = c(lower, theta[Abar[r,]], theta[A[r,]], theta.top)
last.component = PofCSt(theta.star,T,m,tol.4)[1]
p = p + last.component
new.out = c(count+1, p, last.component)
return(new.out)
}
}
}
PdofCSt.T1or2 <- function(theta, T, d, m=20, tol=1e-8) {
y = gauss.quad.prob(m, dist="normal")$nodes
k = length(theta)
cut1 = theta[k-T+1]-d-0.00000001
cut2 = theta[k-T+1]+d
t1 = sum(ifelse(cut2<theta, 1, 0))
t3 = sum(ifelse((cut1<=theta)&(theta<=cut2), 1, 0))
t2 = T-t1
t3 = t3-t2
if (t3 > 0) {
A = combn((t3+t2),t2)
A = t(A)
A = A+k-(t1+t2+t3)
Abar = combn((t3+t2),t3)
Abar = t(Abar)
Abar = Abar+k-(t1+t2+t3)
if ((k-(t1+t2+t3)) == 0) {lower = NULL
} else lower = theta[1:(k-(t1+t2+t3))]
R = dim(A)[1]
PdCSt = 0
for (r in 1:R) {
if (t1==0) {theta.top = NULL
} else theta.top = theta[(k-t1+1):k]
theta.star = c(lower, theta[Abar[R-r+1,]], theta[A[r,]], theta.top)
PdCSt[r] = PofCSt(theta.star,T,m,tol)[1]
}
} else {PdCSt = PofCSt(theta,T,m,tol); PdCSt=PdCSt[2:length(PdCSt)]}
return(sum(PdCSt))
}
PofCSGt <- function(theta, T, Gd, m=20, tol=1e-8) {
y = gauss.quad.prob(m, dist="normal")$nodes
k = length(theta)
if (Gd > 0) {
A = combn((k-T),Gd)
A = t(A)
R = dim(A)[1]
if ((k-Gd-T) == 0) {Abar = NULL
} else {
Abar = matrix(NA, nrow=R, ncol=(k-T-Gd))
inds = 1:(k-T)
for (ii in 1:R) {
bool = (inds %in% A[ii,])
Abar[ii,] = inds[!bool]
}
}
A.top = rep((k-T+1):k,times=R)
A.top = matrix(A.top,R,length((k-T+1):k),byrow=TRUE)
A = cbind(A,A.top)
PdCSt = 0
for (r in 1:R) {
theta.star = c(theta[Abar[R-r+1,]], theta[A[R-r+1,]])
PdCSt[r] = PofCSt(theta.star,(T+Gd),m,tol)[1]
}
} else {PdCSt = PofCSt(theta,T,m,tol); PdCSt=PdCSt[2:length(PdCSt)]}
sum(PdCSt)
}
PofCSt <- function(theta, T, m, tol=1e-7) {
y = gauss.quad.prob(m, dist="normal")$nodes
k = length(theta)
len.y = length(y)
Qti = NA
phi.y = NA
for (i in (k-T):1) {
y.theta = y + theta[i]
if ((i-1) >= 1) {
h = seq(1, (i-1))
len.h = length(h)
y.theta.h = rep(y.theta, each=len.h)
norm.mat = pnorm(y.theta.h-theta[h])
norm.mat = matrix(norm.mat,len.y, len.h, byrow=TRUE)
G1 = apply(norm.mat, 1, prod)
} else {G1 = 1}
if ((i+1)<=(k-T)) {
l = seq((i+1), (k-T))
len.l = length(l)
y.theta.l = rep(y.theta, each=len.l)
norm.mat = pnorm(y.theta.l-theta[l])
norm.mat = matrix(norm.mat, len.y, len.l, byrow=TRUE)
G2 = apply(norm.mat, 1, prod)
} else {G2 = 1}
j = seq((k-T+1), k)
len.j = length(j)
y.theta.j = rep(y.theta, each=len.j)
norm.mat = 1-pnorm(y.theta.j-theta[j])
norm.mat = matrix(norm.mat, len.y, len.j, byrow=TRUE)
G3 = apply(norm.mat, 1, prod)
G123 = cbind(G1,G2,G3)
phi.y = apply(G123, 1, prod)
Qti[(k-T+1-i)] = G.H.Quad(phi.y, m)
if (Qti[(k-T+1-i)] < tol) break
}
PCSt = sum(Qti)
c(PCSt, Qti)
}
G.H.Quad <- function(x, m) {
weight = gauss.quad.prob(m, dist="normal")$weights
res = sum(x*weight)
res
}
PdofCSGt.bootstrap5 <- function(theta, T, D, G, B, SDE, dist=c("normal","t"), df=14, trunc=6, est.names=c("O")) {
beg = Sys.time()
T = sort(T); D = sort(D); G = sort(G)
T.len = length(T); D.len=length(D); G.len=length(G)
name.len = length(est.names)
resD = array(NA, c(D.len,T.len,name.len), list(D,T,est.names))
resG = array(NA, c(G.len,T.len,name.len), list(G,T,est.names))
num.corrD = array(0, c(D.len,T.len,name.len), list(D,T,est.names))
num.corrG = array(0, c(G.len,T.len,name.len), list(G,T,est.names))
bound = T[T.len]
const = 1/SDE
theta = sort(theta)
K = length(theta)
for (b in 1:B) {
if (dist[1]=="normal") {xbar=rnorm(K,0)
} else if (dist[1]=="t") xbar=rt(K,df)
for (est in 1:name.len) {
if (est.names[est]=="O") {Theta=const*theta
}
x=xbar+Theta
x = x[x>(x[K-bound+1]-trunc)]
k = length(x)
Th.i = rank(Theta[(K-k+1):K])
X.i = order(x)
for (j in 1:T.len) {
for (i in 1:D.len) {
t = T[j]; d = D[i]
cut1 = Theta[K-t+1]-d
cut2 = Theta[K-t+1]+d
t1 = sum(ifelse(cut2<Theta, 1, 0))
t3 = sum(ifelse((cut1<=Theta)&(Theta<=cut2), 1, 0))
t2 = t-t1
t3 = t3-t2
if ((t1+t2+t3)>k) {(resD[i:D.len,j,est]=1)&break
} else {
x.i = X.i[(k-t1-t2+1):k]
if (t1==0) {th.i=NULL
} else {
th.i = Th.i[(k-t1+1):k]
dCSt = match(th.i, x.i)
dCSt = dCSt[!is.na(dCSt)]
if (length(dCSt) == t1) {x.i=x.i[-dCSt]
} else (resD[i,j,est]=0)&next
}
th.i = Th.i[(k-t1-t2-t3+1):(k-t1)]
dCSt = x.i %in% th.i
if (sum(dCSt) == t2) {(resD[i:D.len,j,est]=1)&break
} else resD[i,j,est]=0
}
}
for (i in 1:G.len) {
t = T[j]; g = G[i]
if ((t+g)>k) {(resG[i:G.len,j,est]=1)&break}
th.i = Th.i[(k-t+1):k]
x.i = X.i[(k-t-g+1):k]
GCSt = th.i %in% x.i
if (sum(GCSt) == t) {(resG[i:G.len,j,est]=1)&break
} else resG[i,j,est]=0
}
}
num.corrD[,,est] = num.corrD[,,est] + resD[,,est]
num.corrG[,,est] = num.corrG[,,est] + resG[,,est]
}
}
est.PdCSt = num.corrD/B
est.PCSGt = num.corrG/B
end = Sys.time()
print(end-beg)
out = list(est.PdCSt,est.PCSGt)
names(out)=c("d", "G")
return(out)
}
PofCSLt.bootstrap5 <- function(theta, T, L, B, SDE, dist=c("normal", "t"), df=14, trunc=6, est.names=c("O")) {
beg = Sys.time()
T = sort(T); L = sort(L)
T.len = length(T); L.len=length(L)
name.len = length(est.names)
resL = array(NA, c(L.len,T.len,name.len), list(L,T,est.names))
num.corrL = array(0, c(L.len,T.len,name.len), list(L,T,est.names))
bound = T[T.len]
const = 1/SDE
theta = sort(theta)
K = length(theta)
for (b in 1:B) {
if (dist[1]=="normal") {xbar=rnorm(K,0)
} else if (dist[1]=="t") xbar=rt(K,df)
for (est in 1:name.len) {
if (est.names[est]=="O") {Theta=const*theta
}
x=xbar+Theta
x = x[x>(x[K-bound+1]-trunc)]
k = length(x)
Th.i = 1:k
X.i = order(x)
for (i in 1:L.len) {
for (j in 1:T.len) {
t = T[j]; g = L[i]
if (g>t) {(resL[i:L.len,j,est]=NA)&next}
th.i = Th.i[(k-t+1):k]
x.i = X.i[(k-t+1):k]
LCSt = th.i %in% x.i
if (sum(LCSt) >= g) {(resL[i,j:T.len,est]=1)&break
} else resL[i,j,est]=0
}
}
num.corrL[,,est] = num.corrL[,,est] + resL[,,est]
}
}
est.PCSLt = num.corrL/B
end = Sys.time()
print(end-beg)
return(est.PCSLt)
}
PdCSGt.bootstrap.NP2 <- function(X1, X2, T, D, G, N, trunc=6) {
beg = Sys.time()
K = dim(X1)[1]; r1 = dim(X1)[2]; r2 = dim(X2)[2]
theta = abs(tindep(X1, X2)[,1])
ord.i = order(theta)
theta = theta[ord.i]
X1 = X1[ord.i, ]; X2 = X2[ord.i, ];
T = sort(T); D = sort(D); G = sort(G)
T.len = length(T); G.len=length(G); D.len=length(D)
resD = matrix(NA, D.len, T.len)
resG = matrix(NA, G.len, T.len)
num.corrD = matrix(0, D.len, T.len)
num.corrG = matrix(0, G.len, T.len)
colnames(num.corrD)=T; rownames(num.corrD)=D
colnames(num.corrG)=T; rownames(num.corrG)=G
bound = T[T.len]
for (n in 1:N) {
X.C = apply(X1, 1, sample, size=r1, replace=TRUE)
X.C = t(X.C)
X.T1 = apply(X2, 1, sample, size=r2, replace=TRUE)
X.T1 = t(X.T1)
sample = cbind(X.C, X.T1)
tstat = abs(tindep(X.C, X.T1)[,1])
tstat = tstat[tstat>(tstat[K-bound+1]-trunc)]
k = length(tstat)
Th.i = rank(theta[(K-k+1):K])
X.i = order(tstat)
for (j in 1:T.len) {
for (i in 1:D.len) {
t = T[j]; d = D[i]
cut1 = theta[K-t+1]-d
cut2 = theta[K-t+1]+d
t1 = sum(ifelse(cut2<theta, 1, 0))
t3 = sum(ifelse((cut1<=theta)&(theta<=cut2), 1, 0))
t2 = t-t1
t3 = t3-t2
if ((t1+t2+t3)>k) {(resD[i:D.len,j]=1)&break}
x.i = X.i[(k-t1-t2+1):k]
if (t1==0) {th.i=NULL
} else {
th.i = Th.i[(k-t1+1):k]
dCSt = match(th.i, x.i)
dCSt = dCSt[!is.na(dCSt)]
if (length(dCSt) == t1) {x.i=x.i[-dCSt]
} else (resD[i,j]=0)&next
}
th.i = Th.i[(k-t1-t2-t3+1):(k-t1)]
dCSt = x.i %in% th.i
if (sum(dCSt) == t2) {(resD[i:D.len,j]=1)&break
} else resD[i,j]=0
}
for (i in 1:G.len) {
t = T[j]; g = G[i]
if ((t+g)>k) {(resG[i:G.len,j]=1)&break}
th.i = Th.i[(k-t+1):k]
x.i = X.i[(k-t-g+1):k]
GCSt = th.i %in% x.i
if (sum(GCSt) == t) {(resG[i:G.len,j]=1)&break
} else resG[i,j]=0
}
}
num.corrD = num.corrD + resD
num.corrG = num.corrG + resG
}
est.PdCSt = num.corrD/N
est.PCSGt = num.corrG/N
out = list(est.PdCSt,est.PCSGt)
names(out) = c("d","G")
end = Sys.time()
print(end-beg)
return(out)
}
tindep <- function(X, Y, flag=0) {
k = dim(X)[1];
nx = dim(X)[2]; ny = dim(Y)[2]
Xbar = apply(X, 1, mean)
Xse = apply(X, 1, var)/nx
Ybar = apply(Y, 1, mean)
Yse = apply(Y, 1, var)/ny
T = (Xbar-Ybar)/sqrt(Xse+Yse)
df = (Xse + Yse)^2 / (Xse^2/(nx-1) + Yse^2/(ny-1))
Pvalue = 2*(1-pt(abs(T), df= df))
if (flag != 0) {
procs <- c("Bonferroni", "Holm", "Hochberg", "SidakSS", "SidakSD", "BH", "BY")
res <- mt.rawp2adjp(Pvalue, procs)
adjp <- res$adjp[order(res$index), ]
return(cbind(T,adjp))
}
else return(cbind(T,Pvalue))
}
PCS.exact <- function(theta, t=1, g=NULL, d=NULL, m=20, tol=1e-8) {
t = as.integer(t)
if (length(t)>1) stop("t must be a scalar")
if (!is.integer(t) | t<1) stop("t must be a positive integer")
if (is.null(g) & is.null(d)) stop("A value is required for g or d")
if (!is.null(g) & !is.null(d)) stop("Only g or d may be inputted at one time, not both")
if (!is.null(g)) {
if (length(g)>1) stop("g must be a scalar")
g = as.integer(g)
if (g < 0) stop("g must be a non-negative integer")
}
if (!is.null(d)) {
if (length(d)>1) stop("d must be a scalar")
if (d < 0) stop("d must be a non-negative real number")
}
if (m<=0) stop("m (number of nodes in Gaussian quadrature) must be positive")
if (tol>0.01) warning("tol (tolerance parameter) is large and may admit gross errors")
theta = sort(theta)
if (is.null(d)) {
if (length(theta) <= t+g) {out=1; warning("t+g >= length(theta), which implies PCS=1, necessarily"); return(out)}
out = PofCSGt(theta=theta, T=t, Gd=g, m=m, tol=tol)
} else {
if (length(theta) < 2) stop("theta must be a vector of length >= 2")
out = PdofCSt.cyc2(theta=theta, T=t, d=d, m=m, tol=tol)[2]
}
if (out>1) out=1.00
return(out)
}
PCS.boot.par <- function(theta, T=1:1, G=NULL, D=NULL, L=NULL, B=100, SDE=1, dist=c("normal","t"), df=14, trunc=6) {
if (length(theta) < 2) stop("theta must be a vector of length >= 2")
T = as.integer(T)
if (min(T)<1) stop("The elements of T must be positive integers")
if (max(T)>length(theta)) stop("The elements of T must be less than or equal to the length of theta")
if (B<1) stop("B (bootstrap size) must be greater than one")
if (SDE<=0) stop("SDE (standard error) must be positive")
dist=dist[1]
if (dist != "normal" & dist != "t") stop("dist must be either 'normal' or 't'")
if (df<=0) stop("df (degrees of freedom) must be positive")
if (trunc<=0) stop("trunc (truncation parameter) must be positive")
if (trunc<=4) warning("trunc<=4 and may therefore produce inaccuracies in estimating PCS")
if (is.null(G) & is.null(D) & is.null(L)) stop("A vector is required for G or D or L")
if (is.null(G) & !is.null(D) & is.null(L)) {
if (min(D)<0) stop("The elements of D must be non-negative")
out = PdofCSGt.bootstrap5(theta, T, D, G=0, B, SDE, dist, df, trunc)
out = out$d[,,1];
if(length(D)==1) out=matrix(out,nrow=1,ncol=length(T),dimnames=list(D,T)) }
if (!is.null(G) & is.null(D) & is.null(L)) {
G = as.integer(G); if (!is.integer(G) | min(G)<0) stop("The elements of G must be non-negative integers")
out = PdofCSGt.bootstrap5(theta, T, D=0, G, B, SDE, dist, df, trunc)
out = out$G[,,1]
if(length(G)==1) out=matrix(out,nrow=1,ncol=length(T),dimnames=list(G,T)) }
if (is.null(G) & is.null(D) & !is.null(L)) {
L = as.integer(L); if (!is.integer(L) | min(L)<0) stop("The elements of L must be non-negative integers")
if (max(L) > max(T)) stop("The maximum element of L must be <= the maximum element of T")
out = PofCSLt.bootstrap5(theta, T, L, B, SDE, dist, df, trunc)
out = out[,,1]
if(length(L)==1) out=matrix(out,nrow=1,ncol=length(T),dimnames=list(L,T)) }
if (!is.null(G) & !is.null(D) & is.null(L)) {
if (min(D)<0) stop("The elements of D must be non-negative")
G = as.integer(G); if (!is.integer(G) | min(G)<0) stop("The elements of G must be non-negative integers")
out = PdofCSGt.bootstrap5(theta, T, D, G, B, SDE, dist, df, trunc)
outD = out$d[,,1]; if(length(D)==1) outD=matrix(outD,nrow=1,ncol=length(T),dimnames=list(D,T))
outG = out$G[,,1]; if(length(G)==1) outG=matrix(outG,nrow=1,ncol=length(T),dimnames=list(G,T))
out = list(outG, outD); names(out)=c("G", "D") }
if (!is.null(G) & is.null(D) & !is.null(L)) {
G = as.integer(G); if (!is.integer(G) | min(G)<0) stop("The elements of G must be non-negative integers")
L = as.integer(L); if (!is.integer(L) | min(L)<0) stop("The elements of L must be non-negative integers")
if (max(L) > max(T)) stop("The maximum element of L must be <= the maximum element of T")
outG = PdofCSGt.bootstrap5(theta, T, D=0, G, B, SDE, dist, df, trunc)
outL = PofCSLt.bootstrap5(theta, T, L, B, SDE, dist, df, trunc)
outG = outG$G[,,1]; if(length(G)==1) outG=matrix(outG,nrow=1,ncol=length(T),dimnames=list(G,T))
outL = outL[,,1]; if(length(L)==1) outL=matrix(outL,nrow=1,ncol=length(T),dimnames=list(L,T))
out = list(outG, outL); names(out)=c("G", "L") }
if (is.null(G) & !is.null(D) & !is.null(L)) {
if (min(D)<0) stop("The elements of D must be non-negative")
L = as.integer(L); if (!is.integer(L) | min(L)<0) stop("The elements of L must be non-negative integers")
if (max(L) > max(T)) stop("The maximum element of L must be <= the maximum element of T")
outD = PdofCSGt.bootstrap5(theta, T, D, G=0, B, SDE, dist, df, trunc)
outL = PofCSLt.bootstrap5(theta, T, L, B, SDE, dist, df, trunc)
outD = outD$d[,,1]; if(length(D)==1) outD=matrix(outD,nrow=1,ncol=length(T),dimnames=list(D,T))
outL = outL[,,1]; if(length(L)==1) outL=matrix(outL,nrow=1,ncol=length(T),dimnames=list(L,T))
out = list(outD, outL); names(out)=c("D", "L") }
if (!is.null(G) & !is.null(D) & !is.null(L)) {
if (min(D)<0) stop("The elements of D must be non-negative")
G = as.integer(G); if (!is.integer(G) | min(G)<0) stop("The elements of G must be non-negative integers")
L = as.integer(L); if (!is.integer(L) | min(L)<0) stop("The elements of L must be non-negative integers")
if (max(L) > max(T)) stop("The maximum element of L must be <= the maximum element of T")
out = PdofCSGt.bootstrap5(theta, T, D, G, B, SDE, dist, df, trunc)
outL = PofCSLt.bootstrap5(theta, T, L, B, SDE, dist, df, trunc)
outD = out$d[,,1]; if(length(D)==1) outD=matrix(outD,nrow=1,ncol=length(T),dimnames=list(D,T))
outG = out$G[,,1]; if(length(G)==1) outG=matrix(outG,nrow=1,ncol=length(T),dimnames=list(G,T))
outL = outL[,,1]; if(length(L)==1) outL=matrix(outL,nrow=1,ncol=length(T),dimnames=list(L,T))
out = list(outG, outD, outL); names(out)=c("G", "D", "L") }
return(out)
}
PCS.boot.np <- function(X1, X2, T=1, G=1, D=NULL, B, trunc=6) {
if (!is.matrix(X1) | !is.matrix(X2)) stop("X1 and X2 must be matrices")
if (nrow(X1) != nrow(X2)) stop("The number of rows in X1 and X2 must be the same")
if (sum(is.na(X1)) | sum(is.na(X2)) >= 1) stop("The entries of X1 and X2 cannot be 'NA'")
T = as.integer(T)
if (min(T)<1) stop("The elements of T must be positive integers")
N=B
if (N<1) stop("N (bootstrap size) must be greater than one")
if (trunc<=0) stop("truncation parameter (trunc) must be positive")
if (trunc<=4) warning("trunc<=4 and may therefore produce inaccuracies in estimating PCS")
if (is.null(G) & is.null(D)) stop("A vector is required for G or D")
if (is.null(G) & !is.null(D)) {
if (min(D)<0) stop("The elements of D must be non-negative")
out = PdCSGt.bootstrap.NP2(X1, X2, T, D, G=0, N, trunc=6)
out = out$d }
if (!is.null(G) & is.null(D)) {
G = as.integer(G); if (!is.integer(G) | min(G)<0) stop("The elements of G must be non-negative integers")
out = PdCSGt.bootstrap.NP2(X1, X2, T, D=0, G, N, trunc=6)
out = out$G }
if (!is.null(G) & !is.null(D)) {
if (min(D)<0) stop("The elements of D must be non-negative")
G = as.integer(G); if (!is.integer(G) | min(G)<0) stop("The elements of G must be non-negative integers")
out = PdCSGt.bootstrap.NP2(X1, X2, T, D, G, N, trunc=6)
out.d = out$d; out.G = out$G; out = list(out.G,out.d); names(out) = c("G","d") }
return(out)
} |
Set <- R6::R6Class(
classname = "Set",
public = list(
initialize = function(attributes, M = NULL, ...) {
private$attributes <- attributes
if (!is.null(M)) {
private$v <- Matrix::Matrix(M, sparse = TRUE)
} else {
private$v <- Matrix::Matrix(0,
nrow = length(attributes),
ncol = 1,
sparse = TRUE)
}
dots <- list(...)
if (length(dots) > 0) {
do.call(self$assign, dots)
}
},
assign = function(attributes = c(),
values = c(),
...) {
dots <- unlist(list(...))
attrs <- names(dots)
vals <- unname(dots)
attributes <- c(attributes, attrs)
values <- c(values, vals)
idx <- match(attributes, private$attributes)
nas <- which(is.na(idx))
if (length(nas) > 0) {
idx <- idx[-nas]
values <- values[-nas]
}
if (length(idx) > 0) {
private$v[idx] <- values
}
},
`[` = function(indices) {
if (is.logical(indices)) {
indices <- which(indices)
}
if (is.character(indices)) {
indices <- match(indices, private$attributes)
indices <- indices[!is.na(indices)]
}
if (is.numeric(indices)) {
indices <- indices[indices <= self$length()]
}
w <- private$v
idx <- setdiff(seq(self$length()), indices)
w[idx] <- 0
S <- Set$new(attributes = private$attributes,
M = w)
return(S)
},
cardinal = function() {
sum(private$v)
},
get_vector = function() {
private$v
},
get_attributes = function() {
private$attributes
},
length = function() {
length(private$attributes)
},
print = function(eol = TRUE) {
if (sum(private$v) > 0) {
cat(stringr::str_wrap(.set_to_string(S = private$v,
attributes = private$attributes),
width = getOption("width"),
exdent = 2))
if (eol) {
cat("\n")
} else {
cat("")
}
} else {
if (eol) {
cat("{}\n")
} else {
cat("{}")
}
}
},
to_latex = function(print = TRUE) {
str <- "\\ensuremath{\\varnothing}"
if (sum(private$v) > 0) {
str <- set_to_latex(S = private$v,
attributes = private$attributes)
}
if (print) {
cat(str)
return(invisible(str))
} else {
return(str)
}
}
),
private = list(
v = NULL,
attributes = NULL
)
) |
boot_sdm <- function(x,
boot.R = 999,
ncpus = 1,
seed = NULL)
{
assertthat::assert_that(
is.numeric(x), length(x) > 1,
assertthat::is.count(boot.R), boot.R > 1,
assertthat::is.count(ncpus))
if (!is.null(seed)) {
set.seed(seed)
}
if(ncpus > 1){
x.boot <- parallel::mclapply(1:boot.R,
function(i){
mean(sample(x,
size = length(x),
replace = TRUE))},
mc.cores = ncpus)
} else {
x.boot <- lapply(1:boot.R,
function(i){
mean(sample(x,
size = length(x),
replace = TRUE))})
}
return(unlist(x.boot))
} |
"scIVTmag" |
tidy_toMonitor <- function(data = NULL) {
if (monitor_isTidy(data)) {
metaColumns <-
names(dplyr::select(data, -.data$datetime, -.data$pm25))
wideData <- tidyr::spread(data, .data$datetime, .data$pm25)
meta <- dplyr::select(wideData, metaColumns) %>%
as.data.frame()
rownames(meta) <- meta$monitorID
data_wide <-
dplyr::select(wideData, -metaColumns, .data$monitorID)
data <-
tidyr::gather(data_wide, "datetime", "pm25", -.data$monitorID) %>%
tidyr::spread("monitorID", "pm25") %>%
as.data.frame()
ws_monitor <- structure(list(data = data, meta = meta),
class = c("ws_monitor", "list"))
} else if (monitor_isMonitor(data)) {
message("Data is already a ws_monitor object.")
ws_monitor <- data
} else {
stop("Data is not in a reconized format.")
}
return(ws_monitor)
} |
context('GetRateSummary')
test_that("'getURL' errors are handled gracefully", {
set_zillow_web_service_id('ZWSID')
with_mock(
getURL = function(...) {stop('Cryptic getURL error')},
expect_error(GetRateSummary(), "Zillow API call with request '.+' failed with Error in RCurl::getURL\\(request\\): Cryptic getURL error"),
.env = 'RCurl'
)
}) |
cnsc <- function(...) {
cnscinfun()
}
cnscinfun <- function() {
MC <- match.call(definition = sys.function(sys.parent(1)),
call = sys.call(sys.parent(1)), expand.dots = FALSE,
envir = parent.frame(2L))
mconvert <- function(cla, obj) {
charobj <- as.character(obj)
switch(cla,
"call" = eval(obj),
"name" = if (charobj %in% ls(.GlobalEnv)) eval(obj) else charobj,
c(obj)
)
}
if (length(MC) < 2L) NULL else {
OBJ <- MC[[2]]
CLA <- lapply(OBJ, class)
if (length(OBJ) == 1L) {
mconvert(CLA[[1]], OBJ[[1]])
} else {
unlist(mapply(mconvert, CLA, OBJ, SIMPLIFY = FALSE), recursive = FALSE)
}
}
}
cnscinfun2 <- function(...) {
MC <- match.call(definition = sys.function(sys.parent(2)),
call = sys.call(sys.parent(2)), expand.dots = FALSE,
envir = parent.frame(3L))
mconvert <- function(cla, obj) {
charobj <- as.character(obj)
switch(cla,
"call" = eval(obj),
"name" = if (charobj %in% ls(.GlobalEnv)) eval(obj) else charobj,
c(obj)
)
}
if (length(MC) < 2L) NULL else {
OBJ <- MC[[2]]
CLA <- lapply(OBJ, class)
if (length(OBJ) == 1L) {
mconvert(CLA[[1]], OBJ[[1]])
} else {
unlist(mapply(mconvert, CLA, OBJ, SIMPLIFY = FALSE), recursive = FALSE)
}
}
} |
structure(list(url = "https://api.twitter.com/2/tweets?tweet.fields=attachments%2Cauthor_id%2Cconversation_id%2Ccreated_at%2Centities%2Cgeo%2Cid%2Cin_reply_to_user_id%2Clang%2Cpublic_metrics%2Cpossibly_sensitive%2Creferenced_tweets%2Csource%2Ctext%2Cwithheld&user.fields=created_at%2Cdescription%2Centities%2Cid%2Clocation%2Cname%2Cpinned_tweet_id%2Cprofile_image_url%2Cprotected%2Cpublic_metrics%2Curl%2Cusername%2Cverified%2Cwithheld&expansions=author_id%2Centities.mentions.username%2Cgeo.place_id%2Cin_reply_to_user_id%2Creferenced_tweets.id%2Creferenced_tweets.id.author_id&place.fields=contained_within%2Ccountry%2Ccountry_code%2Cfull_name%2Cgeo%2Cid%2Cname%2Cplace_type&ids=1%2C2%2C3%2C4%2C5%2C6%2C7%2C8%2C1266858090143588352%2C1266857669157097473%2C1266856357954756609%2C1266855807699861506%2C1266855344086663169%2C1266854627758276608%2C1266854586188476421",
status_code = 200L, headers = structure(list(date = "Sun, 19 Dec 2021 20:49:16 UTC",
server = "tsa_o", `api-version` = "2.32", `content-type` = "application/json; charset=utf-8",
`cache-control` = "no-cache, no-store, max-age=0", `content-length` = "4576",
`x-access-level` = "read", `x-frame-options` = "SAMEORIGIN",
`content-encoding` = "gzip", `x-xss-protection` = "0",
`x-rate-limit-limit` = "300", `x-rate-limit-reset` = "1639947531",
`content-disposition` = "attachment; filename=json.json",
`x-content-type-options` = "nosniff", `x-rate-limit-remaining` = "268",
`strict-transport-security` = "max-age=631138519", `x-response-time` = "320",
`x-connection-hash` = "a8dd0b91b56c071e60ae4b83ca064c192d64678a1e03c31011044170217df8ed"), class = c("insensitive",
"list")), all_headers = list(list(status = 200L, version = "HTTP/2",
headers = structure(list(date = "Sun, 19 Dec 2021 20:49:16 UTC",
server = "tsa_o", `api-version` = "2.32", `content-type` = "application/json; charset=utf-8",
`cache-control` = "no-cache, no-store, max-age=0",
`content-length` = "4576", `x-access-level` = "read",
`x-frame-options` = "SAMEORIGIN", `content-encoding` = "gzip",
`x-xss-protection` = "0", `x-rate-limit-limit` = "300",
`x-rate-limit-reset` = "1639947531", `content-disposition` = "attachment; filename=json.json",
`x-content-type-options` = "nosniff", `x-rate-limit-remaining` = "268",
`strict-transport-security` = "max-age=631138519",
`x-response-time` = "320", `x-connection-hash` = "a8dd0b91b56c071e60ae4b83ca064c192d64678a1e03c31011044170217df8ed"), class = c("insensitive",
"list")))), cookies = structure(list(domain = c(".twitter.com",
".twitter.com", ".twitter.com", ".twitter.com"), flag = c(TRUE,
TRUE, TRUE, TRUE), path = c("/", "/", "/", "/"), secure = c(TRUE,
TRUE, TRUE, TRUE), expiration = structure(c(1702744284, 1702744284,
1702744284, 1702744284), class = c("POSIXct", "POSIXt")),
name = c("guest_id_marketing", "guest_id_ads", "personalization_id",
"guest_id"), value = c("REDACTED", "REDACTED", "REDACTED",
"REDACTED")), row.names = c(NA, -4L), class = "data.frame"),
content = charToRaw("{\"data\":[{\"possibly_sensitive\":false,\"created_at\":\"2020-05-30T22:24:40.000Z\",\"text\":\"RT @wlkutsch: Gestern hatten wir ein CO2FFEE mit @ICOS_RI über die Arktis: Wissenschaft: wir müssen Emissionen schnell reduzieren. Politik:…\",\"lang\":\"de\",\"id\":\"1266858090143588352\",\"public_metrics\":{\"retweet_count\":2,\"reply_count\":0,\"like_count\":0,\"quote_count\":0},\"source\":\"Twitter for iPhone\",\"referenced_tweets\":[{\"type\":\"retweeted\",\"id\":\"1266779244484575232\"}],\"entities\":{\"mentions\":[{\"start\":3,\"end\":12,\"username\":\"wlkutsch\",\"id\":\"2816669118\"},{\"start\":49,\"end\":57,\"username\":\"ICOS_RI\",\"id\":\"3216719140\"}]},\"author_id\":\"1180995990284853248\",\"conversation_id\":\"1266858090143588352\"},{\"possibly_sensitive\":false,\"created_at\":\"2020-05-30T22:23:00.000Z\",\"text\":\"RT @TheReal32492440:
date = structure(1639946956, class = c("POSIXct", "POSIXt"
), tzone = "GMT"), times = c(redirect = 0, namelookup = 2.9e-05,
connect = 3e-05, pretransfer = 0.000114, starttransfer = 0.334972,
total = 0.335458)), class = "response") |
context("LCM() is working properly")
test_that("LCM works", {
expect_equal(LCM(5L, 7L), 35L)
expect_equal(LCM(5L, 8L), 40L)
expect_equal(LCM(5L, 9L), 45L)
expect_equal(LCM(5L, 10L), 10L)
}) |
pribor <- function(date = Sys.Date() - 1, maturity = "1D") {
cnb <- as.logical(Sys.getenv("CNB_UP", unset = TRUE))
if (!ok_to_proceed("https://www.cnb.cz/en/financial-markets/money-market/pribor/fixing-of-interest-rates-on-interbank-deposits-pribor/year.txt") | !cnb) {
message("Data source broken.")
return(NULL)
}
if(!inherits(date, "Date")) stop("'date' parameter expected as a Date data type!")
if(!all(maturity %in% c("1D", "1W", "2W", "1M", "3M", "6M", "9M", "1Y"))) stop(paste0("'", maturity, "' is not a recognized maturity abbreviation!"))
roky <- format(date, "%Y") %>%
unique()
sazba <- paste0("PRIBOR_", maturity)
res <- lapply(roky, dnl_pribor) %>%
dplyr::bind_rows() %>%
dplyr::filter(date_valid %in% date) %>%
dplyr::select(date_valid, !! sazba) %>%
dplyr::mutate_if(is.numeric, ~ . / 100) %>%
dplyr::arrange(date_valid)
res
}
dnl_pribor <- function(year) {
remote_path <- "https://www.cnb.cz/cs/financni-trhy/penezni-trh/pribor/fixing-urokovych-sazeb-na-mezibankovnim-trhu-depozit-pribor/rok.txt?year="
remote_file <- paste0(remote_path, year)
local_file <- file.path(tempdir(), paste0("pr-", year, ".txt"))
if (!file.exists(local_file)) {
curl::curl_download(url = remote_file, destfile = local_file, quiet = T)
Sys.sleep(1/500)
}
local_df <- readr::read_delim(local_file,
delim = "|", skip = 2,
locale = readr::locale(decimal_mark = ","),
col_names = c(
"date_valid",
"PRIBID_1D", "PRIBOR_1D",
"PRIBID_1W", "PRIBOR_1W",
"PRIBID_2W", "PRIBOR_2W",
"PRIBID_1M", "PRIBOR_1M",
"PRIBID_2M", "PRIBOR_2M",
"PRIBID_3M", "PRIBOR_3M",
"PRIBID_6M", "PRIBOR_6M",
"PRIBID_9M", "PRIBOR_9M",
"PRIBID_1Y", "PRIBOR_1Y"
),
col_types = readr::cols(
date_valid = readr::col_date(format = "%d.%m.%Y"),
PRIBID_1D = readr::col_double(),
PRIBOR_1D = readr::col_double(),
PRIBID_1W = readr::col_double(),
PRIBOR_1W = readr::col_double(),
PRIBID_2W = readr::col_double(),
PRIBOR_2W = readr::col_double(),
PRIBID_1M = readr::col_double(),
PRIBOR_1M = readr::col_double(),
PRIBID_2M = readr::col_double(),
PRIBOR_2M = readr::col_double(),
PRIBID_3M = readr::col_double(),
PRIBOR_3M = readr::col_double(),
PRIBID_6M = readr::col_double(),
PRIBOR_6M = readr::col_double(),
PRIBID_9M = readr::col_double(),
PRIBOR_9M = readr::col_double(),
PRIBID_1Y = readr::col_double(),
PRIBOR_1Y = readr::col_double()
)
)
attr(local_df, 'spec') <- NULL
local_df
} |
exhaustiveMP<-function(data,tree=NULL,method="branch.and.bound"){
if(method=="branch.and.bound"){
if(length(data)>15) stop("branch and bound only allowed for n<=15")
if(is.null(tree)){
if(attr(data,"type")=="DNA"){
print("no input tree; starting with NJ tree")
tree<-NJ(dist.dna(as.DNAbin(data)))
} else {
print("no input tree; using random starting tree")
tree<-rtree(n=length(data),tip.label=names(data),br=NULL,rooted=FALSE)
}
}
trees<-branch.and.bound(data,tree)
} else if(method=="exhaustive"){
if(length(data)>10) stop("exhaustive search only allowed for n<=10")
if(!is.null(tree)) print("starting tree not necessary for exhaustive search")
trees<-exhaustive.search(data)
}
if(length(trees)==1) trees<-trees[[1]]
return(trees)
}
branch.and.bound<-function(data,tree){
bound<-parsimony(tree,data)
if(is.null(tree$edge.length)){
print("starting with 3 species chosen at random")
new<-list(stree(n=3,tip.label=sample(tree$tip.label,3)))
} else {
print("starting with 3 species chosen to maximize distance")
mdSp<-names(sort(colSums(cophenetic(tree)),decreasing=TRUE))[1:3]
mdSp<-c("Tarsier","Chimp","J.Macaque")
print(mdSp)
new<-list(stree(n=3,tip.label=mdSp))
}
class(new)<-"multiPhylo"
added<-new[[1]]$tip.label; remaining<-setdiff(tree$tip.label,added)
while(length(remaining)>0){
old<-new; new<-list()
new.tip<-sample(remaining,1)
pscores<-vector()
for(i in 1:length(old)){
temp<-add.everywhere(old[[i]],new.tip)
score<-parsimony(temp,data)
new<-c(new,temp[score<=bound])
pscores<-c(pscores,score[score<=bound])
}
added<-c(added,new.tip)
print(paste(length(added),"species added;",length(new),"trees retained",collapse=""))
remaining<-setdiff(tree$tip.label,added)
}
trees<-new[pscores==min(pscores)]
for(i in 1:length(trees)) attr(trees[[i]],"pscore")<-min(pscores)
return(trees)
}
exhaustive.search<-function(data){
all.trees<-allTrees(n=length(data),tip.label=names(data),rooted=FALSE)
print(paste("searching",length(all.trees),"trees",collapse=""))
all.trees = .uncompressTipLabel(all.trees)
pscores<-parsimony(all.trees,data)
minscore<-min(pscores); trees<-all.trees[pscores==minscore]
for(i in 1:length(trees)) attr(trees[[i]],"pscore")<-min(pscores)
return(trees)
} |
py_main_thread_func <- function(f) {
tools <- import("rpytools")
tools$thread$main_thread_func(f)
} |
.build_classification=function(species){
.split_lab=function(label){
lab=gsub(".", "_", label, fixed=TRUE)
lab=gsub(" ", "_", lab, fixed=TRUE)
lab=unlist(strsplit(lab, "_", fixed=TRUE))
lab=lab[lab!=""]
lab
}
data.frame(genus=sapply(species, function(s) .split_lab(s)[1]), species=species, stringsAsFactors=FALSE)
}
.build_calibrations=function(dat, scion, scion_desc=NULL, tol=0){
fetch_spanning=function(phy, nd, desc){
if(nd<=Ntip(phy)) return(NULL)
dd=.get.desc.of.node(nd,phy)[1:2]
tt=sapply(dd, function(x) return(desc[[x]][1]))
return(phy$tip.label[sort(tt)])
}
if(is.null(scion_desc)) scion_desc=.cache.descendants(scion)$tips
N=Ntip(scion)
stock_times=dat
scion_hash=scion$hash
df=data.frame(MRCA=scion_hash[(N+1):length(scion_hash)], MaxAge=NA, MinAge=NA, taxonA=NA, taxonB=NA, valid=FALSE, stringsAsFactors=FALSE)
for(i in 1:nrow(df)){
if(!is.na(hash.cur<-df$MRCA[i])){
if(hash.cur%in%stock_times$hash){
node.idx=i+N
df[i,c("MaxAge","MinAge")]<-age.idx<-stock_times$time[match(hash.cur, stock_times$hash)]
df[i,c("taxonA","taxonB")]<-taxa.idx<-fetch_spanning(scion, node.idx, scion_desc)
if(age.idx>tol & all(!is.na(taxa.idx))) df[i,"valid"]=TRUE
}
}
}
df=df[df$valid,]
return(df[,-which(names(df)=="valid")])
}
congruify.phylo=function(reference, target, taxonomy=NULL, tol=0, scale=c(NA, "PATHd8", "treePL"), ncores=NULL){
stock=reference
scion=target
method=match.arg(unname(sapply(scale, toString)), c("NA", "PATHd8", "treePL"))
hashes.mapping <- function (phy, taxa, mapping){
mapping=mapping[names(mapping)%in%phy$tip.label]
if(is.null(taxa)) stop("Must supply 'tips'.")
if(!all(names(mapping)%in%phy$tip.label)) stop("'mapping' must be named list with names in tip labels of 'phy'.")
mapping=mapping[match(names(mapping), phy$tip.label)]
descendants <- .cache.descendants(phy)$tips
hashes <- sapply(descendants, function(desc) .hash.tip(unlist(mapping[desc]), taxa))
empty=.md5(integer(length(taxa)))
hashes[hashes==empty]=NA
phy$hash=hashes
phy=.uniquify_hashes(phy)
return(phy)
}
times.mapping=function(phy, taxa, mapping){
stock=hashes.mapping(phy, taxa, mapping)
tmp=heights.phylo(stock)
tmp$hash=stock$hash
dat=data.frame(time=tmp[,"end"], hash=stock$hash, stringsAsFactors=FALSE)
dat$hash[1:Ntip(stock)]=NA
return(list(stock=stock,dat=dat))
}
smooth_scion=function(stock, scion, scion_desc, taxa, spp, tol=0.01, method=c("PATHd8", NA, "treePL")){
method=match.arg(toString(method), c("NA", "PATHd8", "treePL"))
if(!is.ultrametric(stock, tol=tol)) warning("Supplied 'stock' is non-ultrametric.")
stock_tmp=times.mapping(stock, taxa, spp)
stock=stock_tmp$stock
stock_dat=stock_tmp$dat
calibration=.build_calibrations(stock_dat, scion, scion_desc, tol=tol)
if(!nrow(calibration)) {
warning("No concordant branches reconciled between 'stock' and 'scion'; ensure that 'tax' involves rownames found as tip labels in 'scion'")
return(NA)
}
if(method=="PATHd8") {
phy=PATHd8.phylo(scion, calibration, base=".tmp_PATHd8", rm=FALSE)
phy$hash=c(rep("", Ntip(phy)), phy$node.label)
phy$hash[phy$hash==""]=NA
} else if(method=="treePL") {
phy=treePL.phylo(scion, calibration, base=".tmp_treePL", rm=FALSE)
phy$hash=c(rep("", Ntip(phy)), phy$node.label)
phy$hash[phy$hash==""]=NA
} else if(method=="NA"){
phy=NULL
}
stock$node.label=stock$hash[(Ntip(stock)+1):max(stock$edge)]
stock$node.label[is.na(stock$node.label)]=""
return(list(phy=phy, calibrations=calibration, reference=stock, target=scion))
}
classification=taxonomy
unfurl=FALSE
if(class(stock)=="phylo") {
stock=list(stock)
unfurl=TRUE
}
if(is.null(classification)) {
classification=as.data.frame(unique(as.matrix(.build_classification(scion$tip.label)),MARGIN=2))
}
tips=unique(unlist(lapply(stock, function(x) x$tip.label)))
spp=lapply(tips, function(o) {
x=rownames(classification)[which(classification==o, arr.ind=TRUE)[,1]]
x=x[x%in%scion$tip.label]
})
names(spp)=tips
taxa=unique(unlist(spp))
scion=hashes.phylo(scion, taxa, ncores)
scion_desc=.cache.descendants(scion)$tips
if(is.null(scion$edge.length)) scion$edge.length=numeric(nrow(scion$edge))
f=lapply
results=f(1:length(stock), function(i) {
phy=stock[[i]]
smooth_scion(phy, scion, scion_desc, taxa, spp, tol=tol, method=method)
})
if(unfurl) results=results[[1]]
return(results)
}
write.treePL=function(phy, calibrations, nsites=10000, min=0.0001, base="", opts=list(smooth=100, nthreads=8, optad=0, opt=1, cvstart=1000, cviter=3, cvend=0.1, thorough=TRUE)){
if(file.exists(inp<-paste(base,"infile",sep="."))) unlink(inp)
if(file.exists(int<-paste(base,"intree",sep="."))) unlink(inp)
poss=list(
cv="numeric",
collapse="boolean",
checkconstraints="boolean",
cvstart="numeric",
cvstop="numeric",
cvmultstep="numeric",
verbose="boolean",
lftemp="numeric",
pltemp="numeric",
plcool="numeric",
lfstoptemp="numeric",
plstoptemp="numeric",
lfrtstep="numeric",
plrtstep="numeric",
thorough="boolean",
lfiter="integer",
pliter="integer",
cviter="integer",
ldfsimaniter="integer",
plsimaniter="integer",
cvsimaniter="integer",
calcgrad="numeric",
paramverbose="boolean",
prime="boolean",
opt="boolean",
optad="boolean",
optcvad="boolean",
moredetail="boolean",
moredetailad="boolean",
moredetailcvad="boolean",
randomcv="boolean",
ftol="numeric",
xtol="numeric",
mapspace="boolean",
nthreads="integer"
)
if(length(opts)==0) {
print(poss)
stop("No 'opts' specified")
}
z=phy$edge.length[which(phy$edge.length>0)]
if(any(z<min)){
scl=min/min(z)
phy$edge.length=phy$edge.length*scl
}
write.tree(phy, file=int)
constraints<-constraintnames<-character(nrow(calibrations))
for(i in 1:nrow(calibrations)){
cal=calibrations[i,]
taxon=cal$MRCA
desc=c(cal$taxonA, cal$taxonB)
txt1=ifelse(!is.na(cal$MinAge), paste("min =", taxon, cal$MinAge, sep=" "), "")
txt2=ifelse(!is.na(cal$MaxAge), paste("max =", taxon, cal$MaxAge, sep=" "), "")
txt=paste(txt1,txt2,sep="\n")
constraints[i]=txt
constraintnames[i]=paste("mrca =", taxon, desc[1], desc[2], sep=" ")
}
infile=list(
tree=paste("treefile = ", int, sep=""),
ns=paste("numsites = ", nsites, sep=""),
names=paste(unlist(constraintnames), collapse="\n"),
mrca=paste(unlist(constraints), collapse="\n"),
out=paste("outfile = ", paste(base, "dated", "tre", sep="."), sep=""),
opt=paste(names(opts), opts, sep="=", collapse="\n")
)
inp=paste(base,"infile",sep=".")
writeLines(paste(infile,collapse="\n\n"), con=inp)
attr(inp, "method")="treePL"
return(inp)
}
write.r8s=function(phy=NULL, calibrations, base="", blformat=c(lengths="persite", nsites=10000, ultrametric="no", round="yes"), divtime=c(method="NPRS", algorithm="POWELL"), describe=c(plot="chrono_description"), cv=c(cvStart=0, cvInc=0.5, cvNum=8), do.cv=FALSE){
if(file.exists(inp<-paste(base,"infile",sep="."))) unlink(inp)
constraints<-constraintnames<-character(nrow(calibrations))
for(i in 1:nrow(calibrations)){
cal=calibrations[i,]
taxon=cal$MRCA
desc=c(cal$taxonA, cal$taxonB)
txt=paste(paste("\tfixage taxon=", taxon,sep=""), paste("age=", cal$MinAge, ";\n", sep=""), sep=" ")
constraints[i]=txt
constraintnames[i]=paste("\tMRCA", taxon, desc[1], paste(desc[2], ";\n", sep=""), sep=" ")
}
cv.code <- ""
if(do.cv) {
cv.code <- paste(paste(names(cv), cv, sep="="), ";\n", sep="", collapse="")
}
infile=paste(c(
"
"begin trees;\n",
paste("tree r8s = ", write.tree(phy), "\n", sep=""),
"end;\n",
"begin r8s;\n",
paste("\tblformat ", paste(names(blformat), blformat, collapse=" ", sep="="), ";\n", sep=""),
names=paste(unlist(constraintnames), collapse=""),
mrca=paste(unlist(constraints), collapse=""),
"\tcollapse;\n",
paste("\tdivtime ", paste(names(divtime), divtime, collapse=" ", sep="="), cv.code, ";\n", sep=""),
paste("\tdescribe ", paste(names(describe), describe, sep="="), ";\n", sep="", collapse=""),
"end;"
),collapse="")
inp=paste(base,"infile",sep=".")
writeLines(paste(infile,collapse="\n\n"), con=inp)
attr(inp, "method")="r8s"
return(inp)
}
write.pathd8=function(phy, calibrations, base=""){
if(file.exists(inp<-paste(base,"infile",sep="."))) unlink(inp)
check=function(t, phy) all(t%in%phy$tip.label)
a=check(calibrations$taxonA, phy)
b=check(calibrations$taxonB, phy)
if(!all(c(a,b))) stop("Some calibrations not encountered in tree.")
calibrations$fixage=ifelse(calibrations$MinAge==calibrations$MaxAge, TRUE, FALSE)
constraints<-constraintnames<-character(nrow(calibrations))
for(i in 1:nrow(calibrations)){
cal=calibrations[i,]
taxon=cal$MRCA
desc=c(cal$taxonA, cal$taxonB)
if(cal$fixage) {
txt=paste("mrca:", paste(desc[1], ", ", desc[2], ", ", sep=""), paste("fixage=", cal$MinAge, ";", sep=""), sep=" ")
} else {
txt1=paste("mrca:", paste(desc[1], ", ", desc[2], ", ", sep=""), paste("minage=", cal$MinAge, ";", sep=""), sep=" ")
txt2=paste("mrca:", paste(desc[1], ", ", desc[2], ", ", sep=""), paste("maxage=", cal$MaxAge, ";", sep=""), sep=" ")
txt=paste(txt1,txt2,sep="\n")
}
constraints[i]=txt
constraintnames[i]=paste("name of mrca: ", paste(desc[1], ", ", desc[2], ", ", sep=""), paste("name=", cal$MRCA, ";", sep=""), sep=" ")
}
phy$node.label=NULL
infile=list(tree=write.tree(phy),
mrca=paste(unlist(constraints), collapse="\n"),
names=paste(unlist(constraintnames), collapse="\n")
)
inp=paste(base,"infile",sep=".")
writeLines(paste(infile,collapse="\n\n"), con=inp)
attr(inp, "method")="pathd8"
return(inp)
}
PATHd8.phylo=function(phy, calibrations=NULL, base="", rm=TRUE){
phy$node.label=NULL
if(!is.null(calibrations)){
infile=write.pathd8(phy, calibrations, base)
} else {
infile=paste(base, "infile", sep=".")
write.tree(phy, infile)
}
smooth.file=paste(base, "smoothed.tre", sep=".")
parsed.outfile=paste(base, "pathd8.out", sep=".")
outfile=paste(base, "pathd8.orig.out", sep=".")
if(file.exists(outfile)) unlink(outfile)
if(!system("which PATHd8", ignore.stdout=TRUE)==0) stop("Install 'PATHd8' before proceeding.")
system(paste("PATHd8 -n", infile, "-pn >", outfile, sep=" "))
system(paste("grep \"d8 tree\" ", outfile, ">", parsed.outfile, sep=" "))
smoothed=read.tree(parsed.outfile)
if(rm & base=="") {
unlink(parsed.outfile)
unlink(smooth.file)
unlink(outfile)
unlink(infile)
}
return(smoothed)
}
treePL.phylo=function(phy, calibrations=NULL, base="", rm=TRUE){
phy$node.label=NULL
if(!is.null(calibrations)){
infile=write.treePL(phy=phy, calibrations=calibrations, base=base)
} else {
infile=paste(base, "infile", sep=".")
write.tree(phy, infile)
}
smooth.file=paste(base, "dated.tre", sep=".")
outfile=paste(base, "treePL.orig.out", sep=".")
if(file.exists(outfile)) unlink(outfile)
if(!system("which treePL", ignore.stdout=TRUE)==0) stop("Install 'treePL' before proceeding.")
system(paste("treePL ", infile, " >", outfile, sep=" "))
smoothed=read.tree(smooth.file)
if(rm & base=="") {
unlink(smooth.file)
unlink(outfile)
unlink(infile)
}
return(smoothed)
}
r8s.phylo=function(phy, calibrations=NULL, base="r8srun", ez.run="none", rm=TRUE, blformat=c(lengths="persite", nsites=10000, ultrametric="no", round="yes"), divtime=c(method="NPRS", algorithm="POWELL"), cv=c(cvStart=0, cvInc=0.5, cvNum=8), do.cv=FALSE){
if(grepl("nprs",ez.run, ignore.case=TRUE)) {
divtime <- c(method="NPRS", algorithm="POWELL")
do.cv <- FALSE
}
if(grepl("pl",ez.run, ignore.case=TRUE)) {
divtime <- c(method="PL", algorithm="qnewt")
cv <- c(cvStart=0, cvInc=0.5, cvNum=8)
do.cv <- TRUE
}
phy$node.label=NULL
if(!is.null(calibrations)){
infile=write.r8s(phy, calibrations, base, blformat=blformat, divtime=divtime, cv=cv, do.cv=do.cv)
} else {
infile=paste(base, "infile", sep=".")
write.tree(phy, infile)
}
smooth.file=paste(base, "smoothed.tre", sep=".")
parsed.outfile=paste(base, "r8s.out", sep=".")
outfile=paste(base, "r8s.orig.out", sep=".")
if(file.exists(outfile)) unlink(outfile)
if(!system("which r8s", ignore.stdout=TRUE)==0) stop("Install 'r8s' before proceeding.")
system(paste("r8s -b -f", infile, " >", outfile, sep=" "))
system(paste("grep \"tree r8s\" ", outfile, ">", parsed.outfile, sep=" "))
smoothed=read.tree(parsed.outfile)
if(rm & base=="") {
unlink(parsed.outfile)
unlink(smooth.file)
unlink(outfile)
unlink(infile)
}
return(smoothed)
} |
"led_engin" |
dist2list <-
function(dist){
if(!class(dist) == "dist"){
stop("the input data must be a dist object.")
}
dat <- as.data.frame(as.matrix(dist))
if(is.null(names(dat))){
rownames(dat) <- paste(1:nrow(dat))
}
value <- stack(dat)$values
rnames <- rownames(dat)
namecol <- expand.grid(rnames,rnames)
colnames(namecol) <- c("col", "row")
res <- data.frame(namecol, value)
return(res)
} |
getNameTestType <- function(testType, parameterName) {
nameChar <- switch(testType,
"oneSample"="Safe One Sample",
"paired"="Safe Paired Sample",
"twoSample"="Safe Two Sample",
"gLogrank"="Safe Gaussian",
"eLogrank"="Safe Exact",
"logrank"="Safe",
"2x2" = "Safe Test of Two Proportions")
testName <- switch(parameterName,
"phiS"="Z-Test",
"deltaS"="T-Test",
"thetaS"="Logrank Test")
return(paste(nameChar, testName))
}
getNameAlternative <- function(alternative=c("two.sided", "greater", "less"), testType, h0=0) {
alternative <- match.arg(alternative)
if (testType == "oneSample") {
trueMeanStatement <- "true mean"
} else if (testType %in% c("paired", "twoSample")) {
trueMeanStatement <- "true difference in means ('x' minus 'y') is"
} else if (testType == "2x2") {
trueMeanStatement <- "true difference between proportions in group a and b is"
} else if (testType %in% c("gLogrank", "eLogrank", "logrank")) {
trueMeanStatement <- "true hazard ratio is"
}
nameChar <- paste(trueMeanStatement, switch(alternative,
"two.sided"= paste("not equal to", h0),
"greater"= paste("greater than", h0),
"less"= paste("less than", h0))
)
return(nameChar)
}
print.safeTest <- function (x, digits = getOption("digits"), prefix = "\t", ...) {
designObj <- x[["designObj"]]
if (!is.null(x[["testType"]]) && x[["testType"]] != designObj[["testType"]])
designObj[["testType"]] <- x[["testType"]]
testType <- designObj[["testType"]]
analysisName <- getNameTestType("testType"=testType, "parameterName"=names(designObj[["parameter"]]))
alternativeName <- getNameAlternative("alternative"=designObj[["alternative"]],
"testType"=testType, "h0"=designObj[["h0"]])
cat("\n")
cat(strwrap(analysisName, prefix = prefix), sep = "\n")
cat("\n")
cat("data: ", x[["dataName"]], ". ", sep="")
nObs <- x[["n"]]
if (!is.null(nObs)) {
out <- character()
out <- c(out, paste(names(nObs), "=", format(nObs, digits = max(1L, digits - 2L))))
cat(paste(out, collapse = ", "), sep="\n")
}
estimate <- x[["estimate"]]
if (!is.null(estimate)) {
out <- character()
out <- c(out, paste(names(estimate), "=", format(estimate, digits = max(1L, digits - 2L))))
cat(paste0("estimates: ", paste(out, collapse = ", "), sep="\n"))
}
ciValue <- x[["ciValue"]]
confSeq <- x[["confSeq"]]
if (!is.null(confSeq) && !is.null(ciValue)) {
cat(format(100*(ciValue)), " percent confidence sequence:\n",
" ", paste(format(x[["confSeq"]][1:2], digits = digits),
collapse = " "), "\n", sep = "")
}
cat("\n")
statValue <- x[["statistic"]]
parameter <- designObj[["parameter"]]
eValue <- x[["eValue"]]
alphaString <- format(designObj[["alpha"]], digits = max(1L, digits - 2L))
eValueString <- format(eValue, digits = max(1L, digits - 2L))
eThresholdString <- format(1/designObj[["alpha"]], digits = max(1L, digits - 2L))
out <- character()
if (!is.null(statValue))
out <- c(out, paste(names(statValue), "=", format(statValue, digits = max(1L, digits - 2L))))
out <- c(out, paste(names(parameter), "=", format(parameter, digits = max(1L, digits - 2L))))
cat(paste0("test: ", paste(out, collapse = ", "), sep="\n"))
cat("e-value =", eValueString, "> 1/alpha =", eThresholdString, ":",
eValue > 1/designObj[["alpha"]])
cat("\n")
cat("alternative hypothesis:", alternativeName, "\n")
cat("\n")
cat("design: ")
if (designObj[["pilot"]]) {
cat("the pilot test is based on an exploratory alpha =", alphaString)
cat("\n")
} else {
cat("the test was designed with alpha =", alphaString)
cat("\n")
nPlan <- designObj[["nPlan"]]
if (!is.null(nPlan)) {
out <- paste(names(nPlan), "=", nPlan)
cat(paste0("for experiments with ", paste(out, collapse = ", "), sep="\n"))
}
betaValue <- designObj[["beta"]]
if (!is.null(betaValue)) {
betaString <- format(designObj[["beta"]], digits = max(1L, digits - 2L))
powerString <- format(1-designObj[["beta"]], digits = max(1L, digits - 2L))
cat("to guarantee a power = ", powerString,
" (beta = ", betaString, ")", sep="")
cat("\n")
}
esMin <- designObj[["esMin"]]
if (!is.null(esMin)) {
out <- paste0("minimal relevant ", names(esMin), " = ", format(esMin, digits = max(1L, digits - 2L)),
" (", designObj[["alternative"]], ")")
cat("for", out, "\n")
}
}
}
print.safeDesign <- function(x, digits = getOption("digits"), prefix = "\t", ...) {
designObj <- x
testType <- designObj[["testType"]]
parameterName <- names(designObj[["parameter"]])
note <- designObj[["note"]]
analysisName <- paste(getNameTestType("testType"=testType, "parameterName"=parameterName), "Design")
cat("\n")
cat(strwrap(analysisName, prefix = prefix), sep = "\n")
cat("\n")
designObj[["decision rule"]] <- 1/designObj[["alpha"]]
displayList <- list()
for (item in c("nPlan", "nEvents", "esMin", "alternative","alternativeRestriction", "beta", "parameter",
"alpha", "decision rule", "logImpliedTarget")) {
itemValue <- designObj[[item]]
itemValueString <- format(itemValue, digits=digits)
if (!is.null(itemValue)) {
if (item == "nPlan") {
nPlanTwoSe <- designObj[["nPlanTwoSe"]]
if (!is.null(nPlanTwoSe)) {
tempNeem <- names(designObj[["nPlan"]])
for (i in seq_along(itemValue)) {
if (i==1) {
itemValueString <- paste0(format(itemValue[i], digits=digits), "\U00B1",
format(nPlanTwoSe[i]))
} else {
itemValueString <- paste(itemValueString,
paste0(format(itemValue[i], digits=digits), "\U00B1",
format(nPlanTwoSe[i])),
sep=", ")
}
}
tempNeem <- paste0(names(designObj[["nPlan"]]), "\U00B1", "2se")
displayList[[paste(tempNeem, collapse=", ")]] <- itemValueString
} else {
tempNeem <- names(designObj[["nPlan"]])
displayList[[paste(tempNeem, collapse=", ")]] <- itemValue
}
} else if (item == "nEvents") {
nEventsTwoSe <- designObj[["nEventsTwoSe"]]
tempNeem <- names(designObj[["nEvents"]])
if (!is.null(nEventsTwoSe)) {
tempNeem <- paste0(tempNeem, "\U00B1", "2se")
itemValueString <- paste0(format(itemValue, digits=digits), "\U00B1",
format(nEventsTwoSe))
} else {
itemValueString <- paste0(format(itemValue, digits=digits))
}
displayList[[paste(tempNeem, collapse=", ")]] <- itemValueString
} else if (item=="beta") {
betaTwoSe <- designObj[["betaTwoSe"]]
itemValueString <- format(1-itemValue, digits=digits)
if (!is.null(betaTwoSe)) {
displayList[[paste0("power: (1 - beta)", "\U00B1", "2se")]] <-
paste0(itemValueString, "\U00B1",format(betaTwoSe, digits=digits))
} else {
displayList[["power: 1 - beta"]] <- itemValueString
}
} else if (item=="parameter") {
displayList[[paste("parameter:", names(designObj[["parameter"]]))]] <- itemValueString
} else if (item=="decision rule") {
displayList[["decision rule: e-value > 1/alpha"]] <- itemValueString
} else if (item=="logImpliedTarget") {
tempNeem <- "log(implied target)"
logImpliedTargetTwoSe <- designObj[["logImpliedTargetTwoSe"]]
if (!is.null(logImpliedTargetTwoSe)) {
tempNeem <- paste0(tempNeem, "\U00B1", "2se")
itemValueString <- paste0(itemValueString, "\U00B1",
format(logImpliedTargetTwoSe, digits=digits))
}
displayList[[tempNeem]] <- itemValueString
} else if (item=="esMin") {
displayList[[paste("minimal", names(itemValue))]] <- itemValueString
} else if (item == "alternativeRestriction"){
displayList[["alternative restriction"]] <- itemValueString
} else {
displayList[[item]] <- itemValueString
}
}
}
cat(paste(format(names(displayList), width = 20L, justify = "right"),
format(displayList, digits = digits), sep = " = "), sep = "\n")
someTime <- designObj[["timeStamp"]]
if (!is.null(someTime)) {
cat("\n")
cat(paste("Timestamp:", format(someTime, usetz = TRUE)))
}
if (!is.null(note)) {
cat("\n")
nNotes <- length(note)
if (nNotes == 1) {
cat("\n", "Note: ", note, "\n", sep = "")
} else {
for (i in 1:nNotes) {
cat("\n", "Note ", i, ": ", note[i], "\n", sep = "")
}
}
}
}
print.safeTSim <- function(x, ...) {
analysisName <- getNameTestType("testType" = x[["testType"]], "parameterName"=names(x[["parameter"]]))
if(!is.null(x[["safeSim"]])) {
cat("\n")
cat(" Simulations for", analysisName, "\n")
cat("\n")
}
cat("Based on nSim =", x[["nsim"]], "and ")
cat("if the true effect size is \n")
cat(" deltaTrue =", x[["deltaTrue"]])
cat("\n")
cat("then the safe test optimised to detect an effect size of at least: \n")
cat(" deltaMin =", x[["esMin"]])
cat("\n")
cat("with tolerable type I error rate of ")
cat("\n")
cat(" alpha =", x[["alpha"]], "and power: 1-beta =", 1-x[["beta"]])
cat("\n")
if (length(x[["nPlan"]])==1) {
cat("for experiments with planned sample size: \n")
cat(" n1Plan =", x[["nPlan"]])
} else {
cat("For experiments with planned sample sizes: \n")
cat(" n1Plan =", x[["nPlan"]][1], "and n2Plan =", x[["nPlan"]][2])
}
cat("\n")
cat("\n")
cat("Is estimated to have a null rejection rate of")
cat("\n")
cat(" powerAtNPlan =", x[["safeSim"]][["powerAtN1Plan"]])
cat("\n")
cat("at the planned sample sizes.")
cat("\n")
freqPowerAtN1Plan <- x[["freqSim"]][["powerAtN1Plan"]]
if (!is.null(freqPowerAtN1Plan)) {
cat("For the p-value test: freqPowerAtNPlan =", freqPowerAtN1Plan)
cat("\n")
}
cat("\n")
cat("Is estimated to have a null rejection rate of ")
cat("\n")
cat(" powerOptioStop =", x[["safeSim"]][["powerOptioStop"]])
cat("\n")
cat("under optional stopping, and the average stopping time is:")
cat("\n")
if (length(x[["nPlan"]]==1)) {
cat(" n1Mean =", x[["safeSim"]][["nMean"]])
} else {
cat(" n1Mean =", x[["safeSim"]][["nMean"]], "and n2Mean =", x[["ratio"]]*x[["safeSim"]][["nMean"]])
}
cat("\n")
freqPowerOptioStop <- x[["freqSim"]][["powerOptioStop"]]
if (!is.null(freqPowerAtN1Plan)) {
cat("For the p-value test: freqPowerOptioStop =", freqPowerOptioStop)
cat("\n")
}
} |
context("Test error handling of epiobs")
form <- dummy ~ 1 + cov
test_that("Wrong class for formula is caught", {
expect_error(obs <- epiobs(formula = "dummy", i2o = 1), regexp = "must have class")
})
test_that("i2o is a non-negative numeric vector", {
expect_error(obs <- epiobs(formula = form, i2o = 1), NA)
expect_warning(obs <- epiobs(formula = form, i2o = numeric()), regexp = "sum")
expect_warning(obs <- epiobs(formula = form, i2o = c(1,1,1)), regexp = "sum")
expect_warning(obs <- epiobs(formula = form, i2o = 0), regexp = "sum")
expect_error(obs <- epiobs(formula = form, i2o = "dummy"), regexp = "numeric")
expect_error(obs <- epiobs(formula = form, i2o = -1), regexp = "non-negative")
})
test_that("family and link are scalar characters in required set", {
expect_error(obs <- epiobs(formula = form, family = "normal", i2o = 1), NA)
expect_error(obs <- epiobs(formula = form, family = na.action, i2o = 1), regexp = "character")
expect_error(obs <- epiobs(formula = form, family = c("normal", "normal"), i2o = 1), regexp = "scalar")
expect_error(obs <- epiobs(formula = form, family = "dummy", i2o = 1), regexp = "neg_binom")
expect_error(obs <- epiobs(formula = form, link = "identity", i2o = 1), NA)
expect_error(obs <- epiobs(formula = form, link = na.action, i2o = 1), regexp = "link")
expect_error(obs <- epiobs(formula = form, link = c("identity", "identity"), i2o = 1), regexp = "scalar")
expect_error(obs <- epiobs(formula = form, link = "dummy", i2o = 1), regexp = "logit")
})
test_that("center handled correctly", {
expect_error(obs <- epiobs(formula = form, center = TRUE, i2o = 1), NA)
expect_error(obs <- epiobs(formula = form, center = 1, i2o = 1), regexp = "logical")
expect_error(obs <- epiobs(formula = form, center = c(TRUE,TRUE), i2o = 1), regexp = "scalar")
})
test_that("prior functions require call to rstanarm prior", {
expect_error(obs <- epiobs(formula = form, i2o = 1, prior = "dummy"), regexp = "rstanarm prior")
expect_error(obs <- epiobs(formula = form, i2o = 1, prior_intercept = "dummy"), regexp = "rstanarm prior")
expect_error(obs <- epiobs(formula = form, i2o = 1, prior_aux = "dummy"), regexp = "rstanarm prior")
})
test_that("priors must be in restricted families", {
expect_error(obs <- epiobs(formula = form, i2o = 1, prior = rstanarm::cauchy()), regexp = "normal")
expect_error(obs <- epiobs(formula = form, i2o = 1, prior_intercept = rstanarm::cauchy()), regexp = "normal")
expect_error(obs <- epiobs(formula = form, i2o = 1, prior_aux = rstanarm::lasso()), regexp = "normal")
}) |
teamERAcrossOvers <- function(match,t1,t2,plot=1) {
team=ball=totalRuns=total=str_extract=type=ER=opposition=NULL
ggplotly=NULL
a <-filter(match,team==t1)
a1 <- a %>% filter(between(as.numeric(str_extract(ball, "\\d+(\\.\\d+)?$")), 0.1, 5.9))
a2 <- select(a1,team,totalRuns)
a3 <- a2 %>% group_by(team) %>% summarise(total=sum(totalRuns),count=n())
a3$ER=ifelse(dim(a3)[1]==0, 0,a3$total/a3$count * 6)
if(dim(a3)[1]!=0){
a3$type="1-Power Play"
a3$opposition=t2
}
b1 <- a %>% filter(between(as.numeric(str_extract(ball, "\\d+(\\.\\d+)?$")), 6.1, 15.9))
b2 <- select(b1,team,totalRuns)
b3 <- b2 %>% group_by(team) %>% summarise(total=sum(totalRuns),count=n())
b3$ER=ifelse(dim(b3)[1]==0, 0,b3$total/b3$count * 6)
if(dim(b3)[1]!=0){
b3$type="2-Middle overs"
b3$opposition=t2
}
c1 <- a %>% filter(between(as.numeric(str_extract(ball, "\\d+(\\.\\d+)?$")), 16.1, 20.0))
c2 <- select(c1,team,totalRuns)
c3 <- c2 %>% group_by(team) %>% summarise(total=sum(totalRuns),count=n())
c3$ER=ifelse(dim(c3)[1]==0, 0,c3$total/c3$count * 6)
if(dim(c3)[1]!=0){
c3$type="3-Death overs"
c3$opposition=t2
}
a <-filter(match,team==t2)
a11 <- a %>% filter(between(as.numeric(str_extract(ball, "\\d+(\\.\\d+)?$")), 0.1, 5.9))
a21 <- select(a11,team,totalRuns)
a31 <- a21 %>% group_by(team) %>% summarise(total=sum(totalRuns),count=n())
a31$ER=ifelse(dim(a31)[1]==0, 0,a31$total/a31$count * 6)
if(dim(a31)[1]!=0){
a31$type="1-Power Play"
a31$opposition=t1
}
b11 <- a %>% filter(between(as.numeric(str_extract(ball, "\\d+(\\.\\d+)?$")), 6.1, 15.9))
b21 <- select(b11,team,totalRuns)
b31 <- b21 %>% group_by(team) %>% summarise(total=sum(totalRuns),count=n())
b31$ER=ifelse(dim(b31)[1]==0, 0,b31$total/b31$count * 6)
if(dim(b31)[1]!=0){
b31$type="2-Middle overs"
b31$opposition=t1
}
c11 <- a %>% filter(between(as.numeric(str_extract(ball, "\\d+(\\.\\d+)?$")), 16.1, 20.0))
c21 <- select(c11,team,totalRuns)
c31 <- c21 %>% group_by(team) %>% summarise(total=sum(totalRuns),count=n())
c31$ER=ifelse(dim(c31)[1]==0, 0,c31$total/c31$count * 6)
if(dim(c31)[1]!=0){
c31$type="3-Death overs"
c31$opposition=t1
}
m=rbind(a3,b3,c3,a31,b31,c31)
plot.title= paste("Economy rate across 20 overs of ",t1, "and", t2, sep=" ")
if(plot ==1){
ggplot(m,aes(x=type, y=ER, fill=opposition)) +
geom_bar(stat="identity", position = "dodge") +
ggtitle(bquote(atop(.(plot.title),
atop(italic("Data source:http://cricsheet.org/"),""))))
}else {
g <- ggplot(m,aes(x=type, y=ER, fill=opposition)) +
geom_bar(stat="identity", position = "dodge") +
ggtitle(plot.title)
ggplotly(g)
}
} |
momentuHierHMM <- function(m)
{
if(!is.momentuHMM(m) | is.null(m$conditions$hierStates) | is.null(m$conditions$hierDist))
stop("Can't construct momentuHierHMM object: fields are missing")
obj <- m
class(obj) <- append(c("momentuHierHMM","hierarchical"),class(obj))
return(obj)
}
is.momentuHierHMM <- function(x)
inherits(x,"momentuHierHMM") |
expected <- eval(parse(text="c(17, 289, 4913, 83521, 1419857, 24137569, 410338673, 6975757441, 118587876497, 2015993900449, 34271896307633, 582622237229761, 9904578032905936, 168377826559400928, 2862423051509815808, 48661191875666870272, 8.27240261886337e+20, 1.40630844520677e+22, 2.39072435685151e+23, 4.06423140664757e+24, 6.90919339130087e+25, 1.17456287652115e+27, 1.99675689008595e+28, 3.39448671314612e+29, 5.7706274123484e+30, 9.81006660099228e+31, 1.66771132216869e+33, 2.83510924768677e+34, 4.81968572106751e+35, 8.19346572581477e+36, 1.39288917338851e+38, 2.36791159476047e+39, 4.02544971109279e+40, 6.84326450885775e+41, 1.16335496650582e+43, 1.97770344305989e+44, 3.36209585320181e+45, 5.71556295044308e+46, 9.71645701575324e+47, 1.65179769267805e+49, 2.80805607755269e+50, 4.77369533183957e+51, 8.11528206412726e+52, 1.37959795090163e+54, 2.34531651653278e+55, 3.98703807810572e+56, 6.77796473277973e+57, 1.15225400457255e+59, 1.95883180777334e+60, 3.33001407321468e+61, 5.66102392446496e+62, 9.62374067159043e+63, 1.63603591417037e+65, 2.78126105408963e+66, 4.72814379195238e+67, 8.03784444631904e+68, 1.36643355587424e+70, 2.3229370449862e+71, 3.94899297647655e+72, 6.71328806001013e+73, 1.14125897020172e+75, 1.94014024934293e+76, 3.29823842388298e+77, 5.60700532060106e+78, 9.5319090450218e+79, 1.62042453765371e+81, 2.7547217140113e+82, 4.68302691381921e+83, 7.96114575349266e+84, 1.35339477809375e+86, 2.30077112275938e+87, 3.91131090869094e+88, 6.6492285447746e+89, 1.13036885261168e+91, 1.92162704943986e+92, 3.26676598404776e+93, 5.5535021728812e+94, 9.44095369389803e+95, 1.60496212796267e+97, 2.72843561753653e+98, 4.6383405498121e+99, 7.88517893468058e+100, 1.3404804188957e+102, 2.27881671212269e+103, 3.87398841060857e+104, 6.58578029803456e+105, 1.11958265066588e+107, 1.90329050613199e+108, 3.23559386042438e+109, 5.50050956272145e+110, 9.35086625662646e+111, 1.5896472636265e+113, 2.70240034816505e+114, 4.59408059188058e+115, 7.80993700619699e+116, 1.32768929105349e+118, 2.25707179479093e+119, 3.83702205114458e+120, 6.52293748694579e+121, 1.10889937278078e+123, 5.5535021728812e+94, 3.33001407321468e+61, 1.95883180777334e+60, 1.15225400457255e+59, 6.77796473277973e+57, 3.98703807810572e+56, 2.34531651653278e+55, 1.37959795090163e+54, 8.11528206412726e+52, 4.77369533183957e+51, 2.80805607755269e+50, 1.65179769267805e+49, 2015993900449)"));
test(id=0, code={
argv <- eval(parse(text="list(17L, c(1L, 2L, 3L, 4L, 5L, 6L, 7L, 8L, 9L, 10L, 11L, 12L, 13L, 14L, 15L, 16L, 17L, 18L, 19L, 20L, 21L, 22L, 23L, 24L, 25L, 26L, 27L, 28L, 29L, 30L, 31L, 32L, 33L, 34L, 35L, 36L, 37L, 38L, 39L, 40L, 41L, 42L, 43L, 44L, 45L, 46L, 47L, 48L, 49L, 50L, 51L, 52L, 53L, 54L, 55L, 56L, 57L, 58L, 59L, 60L, 61L, 62L, 63L, 64L, 65L, 66L, 67L, 68L, 69L, 70L, 71L, 72L, 73L, 74L, 75L, 76L, 77L, 78L, 79L, 80L, 81L, 82L, 83L, 84L, 85L, 86L, 87L, 88L, 89L, 90L, 91L, 92L, 93L, 94L, 95L, 96L, 97L, 98L, 99L, 100L, 77L, 50L, 49L, 48L, 47L, 46L, 45L, 44L, 43L, 42L, 41L, 40L, 10L))"));
do.call(`^`, argv);
}, o=expected); |
met2CF.FACE <- function(in.path,in.prefix,outfolder,start_date,end_date,input.id,site,format, ...) {
files <- dir(in.path, in.prefix)
file <- files[grep(pattern = "*.nc", files)]
if (!(length(file) == 1)) {
return(NULL)
}
f <- gsub("//","/",file.path(in.path, file))
for (treatment in c("a", "e")) {
t.outfolder <- paste(unlist(strsplit(outfolder, "FACE")), collapse = paste0("FACE_", treatment))
if (!file.exists(t.outfolder)) {
dir.create(t.outfolder)
}
f.cf <- file.path(t.outfolder, file)
if (!file.exists(f.cf)) {
nc1 <- ncdf4::nc_open(f, write = TRUE)
time_units <- paste0("hours/2", unlist(strsplit(nc1$var$TIMEstp$units, "timesteps"))[2])
time <- ncdf4::ncdim_def(name = "time", units = time_units, vals = nc1$dim$tstep$vals)
lon <- ncdf4::ncdim_def("longitude", "degrees_east", as.numeric(site$lon))
lat <- ncdf4::ncdim_def("latitude", "degrees_north", as.numeric(site$lat))
dim <- list(lat, lon, time)
wd <- 0
ws <- ncdf4::ncvar_get(nc = nc1, varid = "Wind")
ew <- ws * cos(wd * (pi / 180))
nw <- ws * sin(wd * (pi / 180))
var <- ncdf4::ncvar_def(name = "eastward_wind", units = "m/s", dim = dim, missval = -6999, verbose = FALSE)
nc2 <- ncdf4::nc_create(filename = f.cf, vars = var, verbose = FALSE)
ncdf4::ncvar_put(nc = nc2, varid = "eastward_wind", vals = ew)
var <- ncdf4::ncvar_def(name = "northward_wind", units = "m/s", dim = dim, missval = -6999, verbose = FALSE)
nc2 <- ncdf4::ncvar_add(nc = nc2, v = var, verbose = FALSE)
ncdf4::ncvar_put(nc = nc2, varid = "northward_wind", vals = nw)
vars.used.index.all <- setdiff(seq_along(format$vars$variable_id), format$time.row)
nt <- setdiff(c("a","e"), treatment)
exclude.treatment <- paste0(nt,c("CO2","O3"))
vars.used.index <- vars.used.index.all[!(format$vars$input_name[vars.used.index.all] %in% exclude.treatment)]
derp <- grep(paste0(treatment,"CO2"), format$vars$input_name[vars.used.index])
if(length(derp) >1){
for(i in 2:length(derp)){
vars.used.index <- vars.used.index[-derp[i]]
}
}
derp <- grep(paste0(treatment,"O3"), format$vars$input_name[vars.used.index])
if(length(derp) >1){
for(i in 2:length(derp)){
vars.used.index <- vars.used.index[-derp[i]]
}
}
vars_used <- format$vars[vars.used.index, ]
for (i in seq_len(nrow(vars_used))) {
vals <- ncdf4::ncvar_get(nc1, vars_used$input_name[i])
if (vars_used$input_units[i] == vars_used$pecan_units[i]) {
print("match")
} else {
u1 <- vars_used$input_units[i]
u2 <- vars_used$pecan_units[i]
if (udunits2::ud.are.convertible(u1, u2)) {
print(sprintf("convert %s %s to %s %s",
vars_used$input_name[i], vars_used$input_units[i],
vars_used$pecan_name[i], vars_used$pecan_units[i]))
vals <- udunits2::ud.convert(vals, u1, u2)
} else if (PEcAn.utils::misc.are.convertible(u1, u2)) {
print(sprintf("convert %s %s to %s %s",
vars_used$input_name[i], u1,
vars_used$pecan_name[i], u2))
vals <- PEcAn.utils::misc.convert(x, u1, u2)
} else {
PEcAn.logger::logger.error("Units cannot be converted")
}
}
var <- ncdf4::ncvar_def(name = vars_used$pecan_name[i],
units = vars_used$pecan_units[i],
dim = dim, verbose = FALSE)
nc2 <- ncdf4::ncvar_add(nc = nc2, v = var, verbose = FALSE)
ncdf4::ncvar_put(nc = nc2, varid = vars_used$pecan_name[i], vals = vals)
att <- ncdf4::ncatt_get(nc1,vars_used$input_name[i], "long_name")
if (att$hasatt) {
val <- att$value
ncdf4::ncatt_put(nc = nc2, varid = vars_used$pecan_name[i], attname = "long_name", attval = val)
}
}
ncdf4::nc_close(nc2)
year <- ncdf4::ncvar_get(nc1, "YEAR")
y <- year[1]:year[length(year)]
n <- length(y)
t <- -1
for (j in seq_len(n)) {
new.file <- file.path(t.outfolder, paste(in.prefix, y[j],"nc", sep ="."))
if (!file.exists(new.file)) {
s <- t + 1
print(s)
e <- t + sum(year == y[j])
print(e)
if (file.exists(f.cf) == TRUE && file.exists(new.file) == FALSE) {
system(paste0("ncks -d time,", s, ",", e, " ", f.cf, " ", new.file))
}
}
t <- e
}
print(paste("Treatment ", treatment, " done"))
} else {
print(paste("Treatment ", treatment, " aleady done"))
}
file.remove(f.cf)
}
} |
topline <- function(df, variable, weight, remove = c(""), n = TRUE,
pct = TRUE, valid_pct = TRUE, cum_pct = TRUE){
d.output <- df %>%
mutate({{variable}} := to_factor({{variable}}, sort_levels = "values"),
{{variable}} := forcats::fct_explicit_na({{variable}})) %>%
mutate(total = sum({{weight}}),
valid.total = sum(({{weight}})[{{variable}} != "(Missing)"])) %>%
group_by({{variable}}) %>%
summarise(pct = (sum({{weight}})/first(total))*100,
valid.pct = (sum({{weight}})/first(valid.total)*100),
n = sum({{weight}})) %>%
ungroup() %>%
mutate(cum = cumsum(valid.pct),
valid.pct = replace(valid.pct, {{variable}} == "(Missing)", NA),
cum = replace(cum, {{variable}} == "(Missing)", NA)) %>%
select(Response = {{variable}}, Frequency = n, Percent = pct,
`Valid Percent` = valid.pct, `Cumulative Percent` = cum) %>%
filter(! str_to_upper(Response) %in% str_to_upper(remove))
if(valid_pct == FALSE){
d.output <- select(d.output, -`Valid Percent`)
}
if(cum_pct == FALSE){
d.output <- select(d.output, -`Cumulative Percent`)
}
if(n == FALSE){
d.output <- select(d.output, -Frequency)
}
if(pct == FALSE){
d.output <- select(d.output, -Percent)
}
d.output %>%
as_tibble()
} |
knitr::opts_chunk$set(
collapse = TRUE,
comment = "
)
library(dplyr)
library(naniar)
airquality %>%
impute_below_at(vars(Ozone)) %>%
select(Ozone, Solar.R) %>%
head()
impute_mean(oceanbuoys$air_temp_c) %>% head()
impute_mean_at(oceanbuoys, .vars = vars(air_temp_c)) %>% head()
impute_mean_if(oceanbuoys, .predicate = is.integer) %>% head()
impute_mean_all(oceanbuoys) %>% head()
library(simputation)
ocean_imp <- oceanbuoys %>%
bind_shadow() %>%
impute_lm(air_temp_c ~ wind_ew + wind_ns) %>%
impute_lm(humidity ~ wind_ew + wind_ns) %>%
impute_lm(sea_temp_c ~ wind_ew + wind_ns) %>%
add_label_shadow()
library(ggplot2)
ggplot(ocean_imp,
aes(x = air_temp_c,
y = humidity,
color = any_missing)) +
geom_point() +
scale_color_brewer(palette = "Dark2") +
theme(legend.position = "bottom")
ggplot(ocean_imp,
aes(x = air_temp_c,
fill = any_missing)) +
geom_density(alpha = 0.3) +
scale_fill_brewer(palette = "Dark2") +
theme(legend.position = "bottom")
ggplot(ocean_imp,
aes(x = humidity,
fill = any_missing)) +
geom_density(alpha = 0.3) +
scale_fill_brewer(palette = "Dark2") +
theme(legend.position = "bottom")
ocean_imp %>%
group_by(any_missing) %>%
summarise_at(.vars = vars(air_temp_c),
.funs = funs(min, mean, median, max, .args = list(na.rm = TRUE)))
ocean_imp_yr <- oceanbuoys %>%
bind_shadow() %>%
impute_lm(air_temp_c ~ wind_ew + wind_ns + year + longitude + latitude) %>%
impute_lm(humidity ~ wind_ew + wind_ns + year + longitude + latitude) %>%
impute_lm(sea_temp_c ~ wind_ew + wind_ns + year + longitude + latitude) %>%
add_label_shadow()
ggplot(ocean_imp_yr,
aes(x = air_temp_c,
y = humidity,
color = any_missing)) +
geom_point() +
scale_color_brewer(palette = "Dark2") +
theme(legend.position = "bottom")
library(Hmisc)
aq_imp <- aregImpute(~Ozone + Temp + Wind + Solar.R,
n.impute = 1,
type = "pmm",
data = airquality)
aq_imp
aq_nab <- nabular(airquality) %>% add_label_shadow()
aq_nab$Ozone[is.na(aq_nab$Ozone)] <- aq_imp$imputed$Ozone
aq_nab$Solar.R[is.na(aq_nab$Solar.R)] <- aq_imp$imputed$Solar.R
ggplot(aq_nab,
aes(x = Ozone,
y = Solar.R,
colour = any_missing)) +
geom_point() |
context('plot_imp')
test_that('plot_imp'
,{
df = mtcars2[, ! names(mtcars2) %in% 'ids' ]
set.seed(1)
train = caret::train( disp ~ .
, df
, method = 'rf'
, trControl = caret::trainControl( method = 'none' )
, importance = TRUE )
p = alluvial_model_response_caret(train, df, degree = 3)
p_imp = plot_imp(p, df)
expect_doppelganger('plot_imp', p_imp)
})
test_that('add_importance_plot'
,{
df = mtcars2[, ! names(mtcars2) %in% 'ids' ]
train = caret::train( disp ~ .
, df
, method = 'rf'
, trControl = caret::trainControl( method = 'none' )
, importance = TRUE )
pred_train = caret::predict.train(train, df)
expect_warning(p <- alluvial_model_response_caret(train, df, degree = 4, pred_train = pred_train))
p_grid = add_marginal_histograms(p, data_input = df, plot = F)
expect_true( 'gtable' %in% class(p_grid) )
p_grid = add_imp_plot(p_grid, p, data_input = df, plot = F)
p_grid = add_imp_plot(p, data_input = df, plot = F)
}) |
context("v2.0 token other")
tenant <- Sys.getenv("AZ_TEST_TENANT_ID")
app <- Sys.getenv("AZ_TEST_APP_ID")
username <- Sys.getenv("AZ_TEST_USERNAME")
password <- Sys.getenv("AZ_TEST_PASSWORD")
native_app <- Sys.getenv("AZ_TEST_NATIVE_APP_ID")
cert_app <- Sys.getenv("AZ_TEST_CERT_APP_ID")
cert_file <- Sys.getenv("AZ_TEST_CERT_FILE")
web_app <- Sys.getenv("AZ_TEST_WEB_APP_ID")
web_app_pwd <- Sys.getenv("AZ_TEST_WEB_APP_PASSWORD")
userpwd <- Sys.getenv("AZ_TEST_USERPWD")
admin_username <- Sys.getenv("AZ_TEST_ADMINUSERNAME")
if(tenant == "" || app == "" || username == "" || password == "" || native_app == "" ||
cert_app == "" || cert_file == "" || web_app == "" || web_app_pwd == "" || userpwd == "")
skip("Authentication tests skipped: ARM credentials not set")
aut_hash <- Sys.getenv("AZ_TEST_AUT_HASH2")
ccd_hash <- Sys.getenv("AZ_TEST_CCD_HASH2")
dev_hash <- Sys.getenv("AZ_TEST_DEV_HASH2")
if(aut_hash == "" || ccd_hash == "" || dev_hash == "")
skip("Authentication tests skipped: token hashes not set")
if(system.file(package="httpuv") == "")
skip("Authentication tests skipped: httpuv must be installed")
if(!interactive())
skip("Authentication tests skipped: must be an interactive session")
suppressWarnings(file.remove(dir(AzureR_dir(), full.names=TRUE)))
test_that("Providing optional args works",
{
res <- "https://management.azure.com/.default"
resbase <- "https://management.azure.com"
aut_tok <- get_azure_token(res, tenant, native_app, username=admin_username, auth_type="authorization_code",
version=2)
expect_true(is_azure_token(aut_tok))
expect_identical(resbase, decode_jwt(aut_tok)$payload$aud)
expect_null(
delete_azure_token(res, tenant, native_app, username=admin_username, auth_type="authorization_code", version=2,
confirm=FALSE))
})
test_that("Providing multiple scopes works",
{
scopes <- c(paste0("https://graph.microsoft.com/",
c("User.Read.All", "Directory.Read.All", "Directory.AccessAsUser.All")),
"offline_access")
aut_tok <- get_azure_token(scopes, tenant, native_app, auth_type="authorization_code", version=2)
expect_true(is_azure_token(aut_tok))
expect_identical("https://graph.microsoft.com", decode_jwt(aut_tok)$payload$aud)
})
test_that("Dubious requests handled gracefully",
{
badres <- "resource"
expect_error(get_azure_token(badres, tenant, app, password=password, version=2))
nopath <- "https://management.azure.com"
expect_warning(tok <- get_azure_token(nopath, tenant, app, password=password, version=2))
expect_equal(tok$scope, "https://management.azure.com/.default")
})
test_that("Providing path in aad_host works",
{
res <- "https://management.azure.com/.default"
aad_url <- file.path("https://login.microsoftonline.com", normalize_tenant(tenant), "oauth2/v2.0")
resbase <- "https://management.azure.com"
tok <- get_azure_token(res, tenant, app, password=password, aad_host=aad_url, version=2)
expect_true(is_azure_token(tok))
expect_identical(resbase, decode_jwt(tok)$payload$aud)
})
test_that("On-behalf-of flow works",
{
res <- file.path(app, ".default")
res2 <- "offline_access"
tok0 <- get_azure_token(c(res, res2), tenant, native_app, version=2)
expect_true(is_azure_token(tok0))
name0 <- decode_jwt(tok0$credentials$access_token)$payload$name
expect_type(name0, "character")
tok1 <- get_azure_token("https://graph.microsoft.com/.default", tenant, app, password, on_behalf_of=tok0, version=2)
expect_true(is_azure_token(tok1))
expect_identical("https://graph.microsoft.com", decode_jwt(tok1)$payload$aud)
name1 <- decode_jwt(tok1$credentials$access_token)$payload$name
expect_identical(name0, name1)
expect_silent(tok1$refresh())
})
test_that("Certificate authentication works",
{
res <- "https://management.azure.com/.default"
resbase <- "https://management.azure.com"
tok <- get_azure_token(res, tenant, cert_app, certificate=cert_file, version=2)
expect_true(is_azure_token(tok))
expect_identical(resbase, decode_jwt(tok)$payload$aud)
})
test_that("Standalone auth works",
{
res <- "https://management.azure.com/.default"
resbase <- "https://management.azure.com"
auth_uri <- build_authorization_uri(res, tenant, native_app, version=2)
code <- AzureAuth:::listen_for_authcode(auth_uri, "http://localhost:1410")
tok <- get_azure_token(res, tenant, native_app, version=2, auth_code=code, use_cache=FALSE)
expect_identical(tok$hash(), aut_hash)
expect_identical(resbase, decode_jwt(tok)$payload$aud)
creds <- get_device_creds(res, tenant, native_app, version=2)
cat(creds$message, "\n")
tok2 <- get_azure_token(res, tenant, native_app, auth_type="device_code", version=2, device_creds=creds,
use_cache=FALSE)
expect_identical(tok2$hash(), dev_hash)
expect_identical(resbase, decode_jwt(tok2)$payload$aud)
})
test_that("Webapp authentication works",
{
res <- "https://management.azure.com/.default"
resbase <- "https://management.azure.com"
tok <- get_azure_token(res, tenant, web_app, password=web_app_pwd, auth_type="authorization_code", version=2)
expect_true(is_azure_token(tok))
expect_identical(resbase, decode_jwt(tok)$payload$aud)
tok2 <- get_azure_token(res, tenant, web_app, password=web_app_pwd, version=2)
expect_true(is_azure_token(tok2))
expect_identical(tok2$auth_type, "client_credentials")
expect_identical(resbase, decode_jwt(tok2)$payload$aud)
tok3 <- get_azure_token(res, tenant, web_app, password=web_app_pwd, username=admin_username,
auth_type="authorization_code", version=2)
expect_true(is_azure_token(tok2))
expect_identical(resbase, decode_jwt(tok3)$payload$aud)
expect_error(get_azure_token(res, tenant, web_app, version=2))
})
test_that("Resource owner grant works",
{
res <- "https://management.azure.com/.default"
resbase <- "https://management.azure.com"
tok <- get_azure_token(res, tenant, native_app, password=userpwd, username=username, auth_type="resource_owner",
version=2)
expect_true(is_azure_token(tok))
expect_identical(resbase, decode_jwt(tok)$payload$aud)
})
test_that("Refreshing with changed resource works",
{
res <- "https://management.azure.com/.default"
resbase <- "https://management.azure.com"
res2 <- "offline_access"
tok <- get_azure_token(c(res, res2), tenant, native_app, version=2)
expect_identical(resbase, decode_jwt(tok)$payload$aud)
tok$scope[1] <- "https://graph.microsoft.com/.default"
tok$refresh()
expect_identical(decode_jwt(tok)$payload$aud, "https://graph.microsoft.com")
})
test_that("Consumers tenant works",
{
res <- "https://graph.microsoft.com/.default"
res2 <- "offline_access"
res3 <- "openid"
tok <- get_azure_token(c(res, res2, res3), "consumers", native_app, version=2)
expect_error(decode_jwt(tok))
expect_identical(decode_jwt(tok, "id")$payload$tid, "9188040d-6c67-4c5b-b112-36a304b66dad")
}) |
parse_table_reference <- function(expr, tidyverse, secure) {
expr <- extract_alias(expr)
expr <- remove_enclosing_parentheses(expr)
table_alias <- names(expr)
expr <- parse_expression(expr, tidyverse = tidyverse, secure = secure)
expr_parts <- strsplit(deparse(expr), "::")[[1]]
if (length(expr_parts) == 2) {
if (!all(vapply(
expr_parts,
is_one_valid_r_name,
TRUE
))) {
stop("Invalid name in FROM clause", call. = FALSE)
}
} else if (length(expr_parts) == 1) {
if (!is_one_valid_r_name(expr_parts)) {
stop("Invalid name in FROM clause", call. = FALSE)
}
} else {
stop("Invalid name in FROM clause", call. = FALSE)
}
output <- list(expr)
names(output) <- table_alias
output
} |
test_that("paper examples", {
y <- warpbreaks$breaks
X <- model.matrix(breaks ~ wool*tension, data=warpbreaks)
N <- 2e2
set.seed(143)
eps_vals <- c(rep(2e-1, 6), 2e-2)
fm1_hmc <- hmc(N, theta.init = c(rep(0, 6), 1),
epsilon = eps_vals, L = 20,
logPOSTERIOR = linear_posterior,
glogPOSTERIOR = g_linear_posterior,
varnames = c(colnames(X), "log_sigma_sq"),
param=list(y=y, X=X), chains=2,
parallel=FALSE)
c1_hmc <- as.vector(round(coef(fm1_hmc), 6))
test1 <- c(43.466057, -7.679181, -12.104186, -12.363732,
15.987310, 4.629290, 4.967581)
expect_equal(c1_hmc, test1)
p1_hmc <- as.vector(round(psrf(fm1_hmc), 6))
test1b <- c(0.999640, 0.997576, 1.004052, 1.008374, 1.033657, 1.003823,
0.998089)
expect_equal(p1_hmc, test1b)
birthwt2 <- MASS::birthwt
birthwt2$race2 <- factor(birthwt2$race, labels = c("white", "black", "other"))
birthwt2$ptd <- ifelse(birthwt2$ptl > 0, 1, 0)
birthwt2$ftv2 <- factor(ifelse(birthwt2$ftv > 2, 2, birthwt2$ftv),
labels = c("0", "1", "2+"))
X <- model.matrix(low ~ age + lwt + race2 + smoke + ptd + ht + ui + ftv2,
data = birthwt2)
y <- birthwt2$low
N <- 3e2
continuous_ind <- c(FALSE, TRUE, TRUE, rep(FALSE, 8))
eps_vals <- ifelse(continuous_ind, 1e-3, 5e-2)
set.seed(143)
fm2_hmc <- hmc(N, theta.init = rep(0, 11),
epsilon = eps_vals, L = 10,
logPOSTERIOR = logistic_posterior,
glogPOSTERIOR = g_logistic_posterior,
param=list(y=y, X=X),
varnames = colnames(X),
chains=2, parallel=FALSE)
c2_hmc <- as.vector(round(coef(fm2_hmc), 6))
test2 <- c(-0.864417, -0.003891, -0.007844, 0.913143, 0.161931,
0.270894, 0.720779, 0.486956, 0.200137, -0.575171,
0.066911)
expect_equal(c2_hmc, test2)
p2_hmc <- as.vector(round(psrf(fm2_hmc), 6))
test2b <- c(1.174999, 1.003712, 1.198086, 0.999307, 1.056735,
1.276597, 1.119958, 1.062370, 1.302114, 1.026340,
1.000074)
expect_equal(p2_hmc, test2b)
library(lme4)
data(Gdat)
Zi.lst <- split(rep(1, nrow(Gdat)), Gdat$Site)
Zi.lst <- lapply(Zi.lst, as.matrix)
Z <- Matrix::bdiag(Zi.lst)
Z <- as.matrix(Z)
X <- model.matrix(~ factor(year), data=Gdat)
X <- cbind(X, Gdat$prev)
colnames(X)[ncol(X)] <- "prev"
colnames(X) <- make.names(colnames(X))
colnames(X)[1] <- "intercept"
y <- Gdat$shells
p <- ncol(X)
N <- 1e2
initvals <- c(rep(0, 4),
rep(0, 10),
0)
eps_vals <- c(3e-2, 3e-2, 3e-2, 1e-3, rep(1e-1, 10), 3e-2)
set.seed(412)
fm3_hmc <- hmc(N = N, theta.init = initvals, epsilon = eps_vals, L = 10,
logPOSTERIOR = glmm_poisson_posterior,
glogPOSTERIOR = g_glmm_poisson_posterior,
varnames=c(colnames(X), paste0("u", 1:ncol(Z)), "xi"),
param=list(y = y, X=X, Z=Z, n=10, nuxi=1, Axi=25),
chains=2, parallel=FALSE)
c3_hmc <- as.vector(round(coef(fm3_hmc), 6))
test3 <- c(-0.144120, -0.567178, -0.216619, 0.020996, -0.911150,
-0.305638, -0.659170, 0.693371, -0.096723, 1.135807,
0.350674, -0.131506, 0.892042, -1.035583, -0.386174)
expect_equal(c3_hmc, test3)
p3_hmc <- as.vector(round(psrf(fm3_hmc), 6))
test3b <- c(1.019176, 0.994988, 1.006717, 1.002968, 0.995020,
0.996206, 1.014475, 0.999683, 1.002627, 1.001639,
1.050004, 1.032394, 1.042095, 1.028268, 1.002090)
expect_equal(p3_hmc, test3b)
}) |
train_model <- function(container,algorithm=c("SVM","SLDA","BOOSTING","BAGGING","RF","GLMNET","TREE","NNET"),
method="C-classification", cross=0, cost=100, kernel="radial",
maxitboost=100,
maxitglm=10^5,
size=1,maxitnnet=1000,MaxNWts=10000,rang=0.1,decay=5e-4,trace=FALSE,
ntree=200,
l1_regularizer=0.0,l2_regularizer=0.0,use_sgd=FALSE,set_heldout=0,verbose=FALSE,
...) {
gc()
if (algorithm=="SVM") {
model <- svm(x=container@training_matrix, y=container@training_codes, method=method, cross=cross, cost=cost, probability=TRUE, kernel=kernel)
} else if (algorithm=="SLDA") {
model <- slda(container.training_codes ~ ., data=data.frame(as.matrix(container@training_matrix),container@training_codes))
} else if (algorithm=="BOOSTING") {
model <- LogitBoost(xlearn=as.matrix(container@training_matrix), ylearn=container@training_codes, nIter=maxitboost)
} else if (algorithm=="BAGGING") {
model <- bagging(container.training_codes ~ ., data=data.frame(as.matrix(container@training_matrix),container@training_codes))
} else if (algorithm=="RF") {
model <- randomForest(x=as.matrix(container@training_matrix), y=container@training_codes, ntree=ntree)
} else if (algorithm=="GLMNET") {
training_matrix <- as(container@training_matrix,"sparseMatrix")
model <- glmnet(x=training_matrix, y=container@training_codes, family="multinomial", maxit=maxitglm)
} else if (algorithm=="TREE") {
model <- tree(container.training_codes ~ ., data=data.frame(as.matrix(container@training_matrix),container@training_codes))
} else if (algorithm=="NNET") {
model <- nnet(container.training_codes ~ ., data=data.frame(as.matrix(container@training_matrix),container@training_codes), size=size, maxit=maxitnnet, MaxNWts=MaxNWts, rang=rang, decay=decay, trace=trace)
} else {
stop("ERROR: Invalid algorithm specified. Type print_algorithms() for a list of available algorithms.")
}
gc()
return(model)
} |
setMethodS3("smoothWSA", "matrix", function(Y, x, w=NULL, kernel=gaussKernel, sd=100e3, na.rm=TRUE, ..., progress=TRUE, verbose=FALSE) {
K <- nrow(Y)
I <- ncol(Y)
if (length(x) != K) {
throw("Argument 'x' has different number of values that rows in 'Y': ",
length(x), " != ", K)
}
if (is.null(w)) {
w <- 1
} else if (is.matrix(w)) {
if (nrow(w) != K) {
throw("Argument 'w' has different number of rows than 'Y': ",
nrow(w), " != ", K)
}
if (ncol(w) != I) {
throw("Argument 'w' has different number of columns than 'Y': ",
ncol(w), " != ", I)
}
} else if (is.vector(w)) {
if (length(w) != K) {
throw("Argument 'w' has different number of values that rows in 'Y': ",
length(w), " != ", K)
}
}
if (any(w < 0))
throw("Argument 'w' contains negative weights.")
if (any(!is.finite(w)))
throw("Argument 'w' contains non-finite weights.")
verbose <- Arguments$getVerbose(verbose)
if (verbose) {
pushState(verbose)
on.exit(popState(verbose))
}
YR <- rowMedians(Y, na.rm=TRUE)
M <- log2(Y) - log2(YR)
if (na.rm) {
nas <- is.na(M)
dim(nas) <- c(K,I)
}
naValue <- NA_real_
theta <- matrix(naValue, nrow=K, ncol=I)
phi <- rep(naValue, times=K)
cat("Progress: ")
for (kk in seq_len(K)) {
if (progress && kk %% 100 == 0)
cat(kk, ", ", sep="")
wK <- kernel(x, mean=x[kk], sd=sd)
wM <- matrix(wK, nrow=K, ncol=I)
wK <- NULL
wM <- w*wM
if (na.rm)
wM[nas] <- 0
wMR <- rowSums(wM)
keep <- which(wMR > 0)
wMR <- NULL
if (length(keep) > 0) {
wM <- wM[keep,,drop=FALSE]
m <- M[keep,,drop=FALSE]
verbose && print(verbose, list(m=m, wM=wM))
wMs <- colSums(wM, na.rm=TRUE)
wM <- wM/wMs
theta[kk,] <- colSums(wM*m)
m <- wMs <- NULL
} else {
}
wM <- wMR <- keep <- NULL
}
cat(kk, "\n", sep="")
theta <- theta + log2(YR)
theta <- 2^theta
phi <- 2^phi
list(theta=theta, phi=phi)
}) |
defaults <- function(x, y) c(x, y[setdiff(names(y), names(x))])
unrowname <- function(x) {
if (is.data.frame(x)) {
attr(x, "row.names") <- .set_row_names(.row_names_info(x, 2L))
} else if (is.matrix(x)) {
dimnames(x)[1] <- list(NULL)
} else {
abort("Can only remove rownames from data.frame and matrix objects")
}
x
}
rename <- function(x, replace) {
current_names <- names(x)
old_names <- names(replace)
missing_names <- setdiff(old_names, current_names)
if (length(missing_names) > 0) {
replace <- replace[!old_names %in% missing_names]
old_names <- names(replace)
}
names(x)[match(old_names, current_names)] <- as.vector(replace)
x
}
id_var <- function(x, drop = FALSE) {
if (length(x) == 0) {
id <- integer()
n = 0L
} else if (!is.null(attr(x, "n")) && !drop) {
return(x)
} else if (is.factor(x) && !drop) {
x <- addNA(x, ifany = TRUE)
id <- as.integer(x)
n <- length(levels(x))
} else {
levels <- sort(unique(x), na.last = TRUE)
id <- match(x, levels)
n <- max(id)
}
attr(id, "n") <- n
id
}
id <- function(.variables, drop = FALSE) {
nrows <- NULL
if (is.data.frame(.variables)) {
nrows <- nrow(.variables)
.variables <- unclass(.variables)
}
lengths <- vapply(.variables, length, integer(1))
.variables <- .variables[lengths != 0]
if (length(.variables) == 0) {
n <- nrows %||% 0L
id <- seq_len(n)
attr(id, "n") <- n
return(id)
}
if (length(.variables) == 1) {
return(id_var(.variables[[1]], drop = drop))
}
ids <- rev(lapply(.variables, id_var, drop = drop))
p <- length(ids)
ndistinct <- vapply(ids, attr, "n", FUN.VALUE = numeric(1), USE.NAMES = FALSE)
n <- prod(ndistinct)
if (n > 2^31) {
char_id <- do.call("paste", c(ids, sep = "\r"))
res <- match(char_id, unique(char_id))
}
else {
combs <- c(1, cumprod(ndistinct[-p]))
mat <- do.call("cbind", ids)
res <- c((mat - 1L) %*% combs + 1L)
}
if (drop) {
id_var(res, drop = TRUE)
}
else {
res <- as.integer(res)
attr(res, "n") <- n
res
}
}
join_keys <- function(x, y, by) {
joint <- rbind_dfs(list(x[by], y[by]))
keys <- id(joint, drop = TRUE)
n_x <- nrow(x)
n_y <- nrow(y)
list(x = keys[seq_len(n_x)], y = keys[n_x + seq_len(n_y)],
n = attr(keys, "n"))
}
revalue <- function(x, replace) {
if (is.character(x)) {
replace <- replace[names(replace) %in% x]
if (length(replace) == 0) return(x)
x[match(names(replace), x)] <- replace
} else if (is.factor(x)) {
lev <- levels(x)
replace <- replace[names(replace) %in% lev]
if (length(replace) == 0) return(x)
lev[match(names(replace), lev)] <- replace
levels(x) <- lev
} else if (!is.null(x)) {
abort("x is not a factor or character vector")
}
x
}
round_any <- function(x, accuracy, f = round) {
if (!is.numeric(x)) abort("`x` must be numeric")
f(x/accuracy) * accuracy
}
rbind_dfs <- function(dfs) {
out <- list()
columns <- unique(unlist(lapply(dfs, names)))
nrows <- vapply(dfs, .row_names_info, integer(1), type = 2L)
total <- sum(nrows)
if (length(columns) == 0) return(new_data_frame(list(), total))
allocated <- rep(FALSE, length(columns))
names(allocated) <- columns
col_levels <- list()
for (df in dfs) {
new_columns <- intersect(names(df), columns[!allocated])
for (col in new_columns) {
if (is.factor(df[[col]])) {
all_factors <- all(vapply(dfs, function(df) {
val <- .subset2(df, col)
is.null(val) || is.factor(val)
}, logical(1)))
if (all_factors) {
col_levels[[col]] <- unique(unlist(lapply(dfs, function(df) levels(.subset2(df, col)))))
}
out[[col]] <- rep(NA_character_, total)
} else {
out[[col]] <- rep(.subset2(df, col)[1][NA], total)
}
}
allocated[new_columns] <- TRUE
if (all(allocated)) break
}
is_date <- lapply(out, inherits, 'Date')
is_time <- lapply(out, inherits, 'POSIXct')
pos <- c(cumsum(nrows) - nrows + 1)
for (i in seq_along(dfs)) {
df <- dfs[[i]]
rng <- seq(pos[i], length.out = nrows[i])
for (col in names(df)) {
date_col <- inherits(df[[col]], 'Date')
time_col <- inherits(df[[col]], 'POSIXct')
if (is_date[[col]] && !date_col) {
out[[col]][rng] <- as.Date(
unclass(df[[col]]),
origin = ggplot_global$date_origin
)
} else if (is_time[[col]] && !time_col) {
out[[col]][rng] <- as.POSIXct(
unclass(df[[col]]),
origin = ggplot_global$time_origin
)
} else if (date_col || time_col || inherits(df[[col]], 'factor')) {
out[[col]][rng] <- as.character(df[[col]])
} else {
out[[col]][rng] <- df[[col]]
}
}
}
for (col in names(col_levels)) {
out[[col]] <- factor(out[[col]], levels = col_levels[[col]])
}
attributes(out) <- list(
class = "data.frame",
names = names(out),
row.names = .set_row_names(total)
)
out
}
dapply <- function(df, by, fun, ..., drop = TRUE) {
grouping_cols <- .subset(df, by)
ids <- id(grouping_cols, drop = drop)
group_rows <- split(seq_len(nrow(df)), ids)
fallback_order <- unique(c(by, names(df)))
rbind_dfs(lapply(seq_along(group_rows), function(i) {
cur_data <- df_rows(df, group_rows[[i]])
res <- fun(cur_data, ...)
if (is.null(res)) return(res)
if (length(res) == 0) return(new_data_frame())
vars <- lapply(setNames(by, by), function(col) .subset2(cur_data, col)[1])
if (is.matrix(res)) res <- split_matrix(res)
if (is.null(names(res))) names(res) <- paste0("V", seq_along(res))
if (all(by %in% names(res))) return(new_data_frame(unclass(res)))
res <- modify_list(unclass(vars), unclass(res))
new_data_frame(res[intersect(c(fallback_order, names(res)), names(res))])
}))
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.