code
stringlengths 1
13.8M
|
---|
context("models")
test_that("photor", {
expect_equal(ncol(photor(350)), 2)
expect_equal(nrow(photor(350, lambda = seq(300, 700, 1))), 401)
})
test_that("photor", {
expect_equal(ncol(photor(350, beta.band=TRUE)), 2)
expect_equal(nrow(photor(350, lambda = seq(300, 700, 1), beta.band=TRUE)), 401)
})
test_that("logistic", {
expect_equal(ncol(logistic(x0=350,L=60,k=0.08)), 2)
expect_equal(nrow(logistic(x0=350,L=60,k=0.08)), 401)
})
data("bee")
data("D65")
data("Rb")
midpoint<-seq(from = 300, to = 700, 50)
W<-seq(300, 700, 1)
R<-data.frame(W)
for (i in 1:length(midpoint)) {
R[,i+1]<-logistic(x = seq(300, 700, 1), x0=midpoint[[i]], L = 50, k=0.04)[,2]
}
names(R)[2:ncol(R)]<-midpoint
test_that("CTTKmodel", {
expect_equal(ncol(CTTKmodel(photo=2,
R=R,
I=D65,
Rb=Rb,
C=bee)), 6)
})
test_that("CTTKmodel", {
expect_equal(ncol(CTTKmodel(photo=c("tri"),
R=R,
I=D65,
Rb=Rb,
C=bee)), 9)
})
test_that("CTTKmodel", {
expect_equal(nrow(CTTKmodel(photo=c("tri"),
R=R,
I=D65,
Rb=Rb,
C=bee)), 9)
})
test_that("CTTKmodel", {
expect_equal(ncol(CTTKmodel(photo=c("tetra"),
R=R,
I=D65,
Rb=Rb,
C=photor(c(350,420,490,560)))), 12)
})
test_that("CTTKmodel", {
expect_equal(nrow(CTTKmodel(photo=c("tetra"),
R=R,
I=D65,
Rb=Rb,
C=photor(c(350,420,490,560)))), 9)
})
test_that("CTTKmodel", {
expect_equal(ncol(CTTKmodel(photo=6,
R=R,
I=D65,
Rb=Rb,
C=photor(c(350,380,420,490,520,560)))), 18)
})
test_that("EMmodel", {
expect_equal(ncol(EMmodel(photo=2,
R=R,
I=D65,
Rb=Rb,
C=bee)), 6)
})
test_that("EMmodel", {
expect_equal(ncol(EMmodel(photo=c("tri"),
R=R,
I=D65,
Rb=Rb,
C=bee)), 9)
})
test_that("EMmodel", {
expect_equal(nrow(EMmodel(photo=c("tri"),
R=R,
I=D65,
Rb=Rb,
C=bee)), 9)
})
test_that("EMmodel", {
expect_equal(ncol(EMmodel(photo=c("tetra"),
R=R,
I=D65,
Rb=Rb,
C=photor(c(350,420,490,560)))), 12)
})
test_that("EMmodel", {
expect_equal(nrow(EMmodel(photo=c("tetra"),
R=R,
I=D65,
Rb=Rb,
C=photor(c(350,420,490,560)))), 9)
})
test_that("EMmodel", {
expect_equal(ncol(EMmodel(photo=6,
R=R,
I=D65,
Rb=Rb,
C=photor(c(350,380,420,490,520,560)))), 18)
})
test_that("RNLmodel", {
expect_equal(ncol(RNLmodel(photo=2,
model="linear",
R1=R,
I=D65,
Rb=Rb,
C=photor(c(420,560)),
noise=TRUE,
e=c(0.1,0.05))), 13)
expect_equal(nrow(RNLmodel(photo=2,
model="linear",
R1=R,
I=D65,
Rb=Rb,
C=photor(c(420,560)),
noise=TRUE,
e=c(0.1,0.05))), 9)
})
test_that("RNLmodel", {
expect_equal(ncol(RNLmodel(photo=2,
model="linear",
R1=R,
I=D65,
Rb=Rb,
C=photor(c(420,560)),
noise=F,
v=0.1, n=c(1,2))), 13)
expect_equal(nrow(RNLmodel(photo=2,
model="linear",
R1=R,
I=D65,
Rb=Rb,
C=photor(c(420,560)),
noise=F,
v=0.1, n=c(1,2))), 9)
})
test_that("RNLmodel", {
expect_equal(ncol(RNLmodel(photo=2,
model="log",
R1=R,
I=D65,
Rb=Rb,
C=photor(c(420,560)),
noise=TRUE,
e=c(0.1,0.05))), 13)
expect_equal(nrow(RNLmodel(photo=2,
model="log",
R1=R,
I=D65,
Rb=Rb,
C=photor(c(420,560)),
noise=TRUE,
e=c(0.1,0.05))), 9)
})
test_that("RNLmodel", {
expect_equal(ncol(RNLmodel(photo=2,
model="log",
R1=R,
I=D65,
Rb=Rb,
C=photor(c(420,560)),
noise=F,
v=0.1, n=c(1,2))), 13)
expect_equal(nrow(RNLmodel(photo=2,
model="log",
R1=R,
I=D65,
Rb=Rb,
C=photor(c(420,560)),
noise=F,
v=0.1, n=c(1,2))), 9)
})
test_that("RNLmodel", {
expect_equal(ncol(RNLmodel(photo=c("tri"),
model="linear",
R1=R,
I=D65,
Rb=Rb,
C=photor(c(350,420,560)),
noise=TRUE,
e=c(0.1,0.07,0.05))), 20)
expect_equal(nrow(RNLmodel(photo=c("tri"),
model="linear",
R1=R,
I=D65,
Rb=Rb,
C=photor(c(350,420,560)),
noise=TRUE,
e=c(0.1,0.07,0.05))), 9)
})
test_that("RNLmodel", {
expect_equal(ncol(RNLmodel(photo=c("tri"),
model="linear",
R1=R,
I=D65,
Rb=Rb,
C=photor(c(350,420,560)),
noise=F,
v=0.1, n=c(1,1.5,2))), 20)
expect_equal(nrow(RNLmodel(photo=c("tri"),
model="linear",
R1=R,
I=D65,
Rb=Rb,
C=photor(c(350,420,560)),
noise=F,
v=0.1, n=c(1,1,5,2))), 9)
})
test_that("RNLmodel", {
expect_equal(ncol(RNLmodel(photo=c("tri"),
model="log",
R1=R,
I=D65,
Rb=Rb,
C=photor(c(350,420,560)),
noise=TRUE,
e=c(0.1,0.07,0.05))), 20)
expect_equal(nrow(RNLmodel(photo=c("tri"),
model="log",
R1=R,
I=D65,
Rb=Rb,
C=photor(c(350,420,560)),
noise=TRUE,
e=c(0.1,0.07,0.05))), 9)
})
test_that("RNLmodel", {
expect_equal(ncol(RNLmodel(photo=c("tri"),
model="log",
R1=R,
I=D65,
Rb=Rb,
C=photor(c(350,420,560)),
noise=F,
v=0.1, n=c(1,1.5,2))), 20)
expect_equal(nrow(RNLmodel(photo=c("tri"),
model="log",
R1=R,
I=D65,
Rb=Rb,
C=photor(c(350,420,560)),
noise=F,
v=0.1, n=c(1,1.5,2))), 9)
})
test_that("RNLmodel", {
expect_equal(ncol(RNLmodel(photo=c("tetra"),
model="linear",
R1=R,
I=D65,
Rb=Rb,
C=photor(c(350,420,490,560)),
noise=TRUE,
e=c(0.1,0.07,0.05,0.04))), 27)
expect_equal(nrow(RNLmodel(photo=c("tetra"),
model="linear",
R1=R,
I=D65,
Rb=Rb,
C=photor(c(350,420,490,560)),
noise=TRUE,
e=c(0.1,0.07,0.05,0.04))), 9)
})
test_that("RNLmodel", {
expect_equal(ncol(RNLmodel(photo=c("tetra"),
model="linear",
R1=R,
I=D65,
Rb=Rb,
C=photor(c(350,420,490,560)),
noise=F,
v=0.1, n=c(1,1.5,2,2))), 27)
expect_equal(nrow(RNLmodel(photo=c("tetra"),
model="linear",
R1=R,
I=D65,
Rb=Rb,
C=photor(c(350,420,490,560)),
noise=F,
v=0.1, n=c(1,1,5,2,2))), 9)
})
test_that("RNLmodel", {
expect_equal(ncol(RNLmodel(photo=c("tetra"),
model="log",
R1=R,
I=D65,
Rb=Rb,
C=photor(c(350,420,490,560)),
noise=TRUE,
e=c(0.1,0.07,0.05,0.04))), 27)
expect_equal(nrow(RNLmodel(photo=c("tetra"),
model="log",
R1=R,
I=D65,
Rb=Rb,
C=photor(c(350,420,490,560)),
noise=TRUE,
e=c(0.1,0.07,0.05,0.04))), 9)
})
test_that("RNLmodel", {
expect_equal(ncol(RNLmodel(photo=c("tetra"),
model="log",
R1=R,
I=D65,
Rb=Rb,
C=photor(c(350,420,490,560)),
noise=F,
v=0.1, n=c(1,1.5,2,2))), 27)
expect_equal(nrow(RNLmodel(photo=c("tetra"),
model="log",
R1=R,
I=D65,
Rb=Rb,
C=photor(c(350,420,490,560)),
noise=F,
v=0.1, n=c(1,1.5,2,2))), 9)
})
test_that("RNLmodel", {
expect_equal(ncol(RNLmodel(photo=5,
model="log",
R1=R,
I=D65,
Rb=Rb,
C=photor(c(350,420,490,560,600)),
noise=F,
v=0.1, n=c(1,1.5,2,2,2))), 34)
expect_equal(nrow(RNLmodel(photo=5,
model="log",
R1=R,
I=D65,
Rb=Rb,
C=photor(c(350,420,490,560,600)),
noise=F,
v=0.1, n=c(1,1.5,2,2,2))), 9)
})
test_that("RNLthres", {
expect_equal(ncol(RNLthres(photo=5,
I=D65,
Rb=Rb,
C=photor(c(350,420,490,560,600)),
noise=F,
v=0.1, n=c(1,1.5,2,2,2))), 3)
expect_equal(nrow(RNLthres(photo=5,
I=D65,
Rb=Rb,
C=photor(c(350,420,490,560,600)),
noise=F,
v=0.1, n=c(1,1.5,2,2,2))), 401)
})
test_that("CTTKmodel", {
expect_equal(CTTKmodel(photo=c("tri"),
R=Rb,
I=D65,
Rb=Rb,
C=bee)$deltaS, 0)
})
test_that("CTTKmodel", {
expect_equal(CTTKmodel(photo=c("tetra"),
R=Rb,
I=D65,
Rb=Rb,
C=photor(c(350,420,490,560)))$deltaS, 0)
})
EMRb<-data.frame(300:700, rep(7,401))
test_that("EMmodel", {
expect_equal(round(EMmodel(photo=c("tri"),
R=cbind(EMRb[,1],EMRb[,2]+10),
I=D65,
Rb=EMRb,
C=bee)$deltaS, 2), 0)
})
test_that("EMmodel", {
expect_equal(round(EMmodel(photo=c("tetra"),
R=cbind(EMRb[,1],EMRb[,2]+10),
I=D65,
Rb=EMRb,
C=photor(c(350,420,490,560)))$deltaS, 2), 0)
})
test_that("RNLmodel", {
expect_equal(RNLmodel(photo=2,
model="linear",
R1=Rb,
I=D65,
Rb=Rb,
C=photor(c(420,560)),
noise=TRUE,
e=c(0.1,0.05))$deltaS, 0)
})
test_that("RNLmodel", {
expect_equal(RNLmodel(photo=2,
model="linear",
R1=Rb,
I=D65,
Rb=Rb,
C=photor(c(420,560)),
noise=F,
v=0.1, n=c(1,2))$deltaS, 0)
})
test_that("RNLmodel", {
expect_equal(RNLmodel(photo=2,
model="log",
R1=Rb,
I=D65,
Rb=Rb,
C=photor(c(420,560)),
noise=TRUE,
e=c(0.1,0.05))$deltaS, 0)
})
test_that("RNLmodel", {
expect_equal(RNLmodel(photo=2,
model="log",
R1=Rb,
I=D65,
Rb=Rb,
C=photor(c(420,560)),
noise=F,
v=0.1, n=c(1,2))$deltaS, 0)
})
test_that("RNLmodel", {
expect_equal(RNLmodel(photo=c("tri"),
model="linear",
R1=Rb,
I=D65,
Rb=Rb,
C=photor(c(350,420,560)),
noise=TRUE,
e=c(0.1,0.07,0.05))$deltaS, 0)
})
test_that("RNLmodel", {
expect_equal(RNLmodel(photo=c("tri"),
model="linear",
R1=Rb,
I=D65,
Rb=Rb,
C=photor(c(350,420,560)),
noise=F,
v=0.1, n=c(1,1.5,2))$deltaS, 0)
})
test_that("RNLmodel", {
expect_equal(RNLmodel(photo=c("tri"),
model="log",
R1=Rb,
I=D65,
Rb=Rb,
C=photor(c(350,420,560)),
noise=TRUE,
e=c(0.1,0.07,0.05))$deltaS, 0)
})
test_that("RNLmodel", {
expect_equal(RNLmodel(photo=c("tri"),
model="log",
R1=Rb,
I=D65,
Rb=Rb,
C=photor(c(350,420,560)),
noise=F,
v=0.1, n=c(1,1.5,2))$deltaS, 0)
})
test_that("RNLmodel", {
expect_equal(RNLmodel(photo=c("tetra"),
model="linear",
R1=Rb,
I=D65,
Rb=Rb,
C=photor(c(350,420,490,560)),
noise=TRUE,
e=c(0.1,0.07,0.05,0.04))$deltaS, 0)
})
test_that("RNLmodel", {
expect_equal(RNLmodel(photo=c("tetra"),
model="linear",
R1=Rb,
I=D65,
Rb=Rb,
C=photor(c(350,420,490,560)),
noise=F,
v=0.1, n=c(1,1.5,2,2))$deltaS, 0)
})
test_that("RNLmodel, tetrachromatic, log=T, noise=T", {
expect_equal(RNLmodel(photo=c("tetra"),
model="log",
R1=Rb,
I=D65,
Rb=Rb,
C=photor(c(350,420,490,560)),
noise=TRUE,
e=c(0.1,0.07,0.05,0.04))$deltaS, 0)
})
test_that("RNLmodel, tetrachromatic, log=F, noise=F", {
expect_equal(RNLmodel(photo=c("tetra"),
model="log",
R1=Rb,
I=D65,
Rb=Rb,
C=photor(c(350,420,490,560)),
noise=F,
v=0.1, n=c(1,1.5,2,2))$deltaS, 0)
})
r500<-logistic(x0=500,L=50,k=0.04)
test_that("CTTKmodel, trichromatic, AVICOL", {
model<-CTTKmodel(photo="tri",
R=r500,
I=D65,
Rb=Rb,
C=bee)
expect_equal(round(model$E1, 3), 0.301)
expect_equal(round(model$E2, 3), 0.597)
expect_equal(round(model$E3, 3), 0.786)
})
test_that("CTTKmodel, tetrachromatic, AVICOL", {
model<-CTTKmodel(photo="tetra",
R=r500,
I=D65,
Rb=Rb,
C=photor(c(350,420,490,560)))
expect_equal(round(model$E1, 3), 0.112)
expect_equal(round(model$E2, 3), 0.520)
expect_equal(round(model$E3, 3), 0.755)
expect_equal(round(model$E4, 3), 0.795)
})
test_that("RNLmodel, dichromatic, AVICOL", {
model<-RNLmodel(photo="di",
model="log",
R1=r500,
I=D65,
Rb=Rb,
C=photor(c(420,560)),
noise=F,
v=0.05,
n=c(1,1.77))
expect_equal(round(model$e1, 3), 0.050)
expect_equal(round(model$e2, 3), 0.038)
expect_equal(abs(round(model$E1_R1, 3)-round(log(10^0.034),3))<=0.002, TRUE)
expect_equal(abs(round(model$E2_R1, 3)-round(log(10^0.588),3))<=0.002, TRUE)
expect_equal(abs(round(model$deltaS, 3)-round(log(10^8.853),3))<=0.002, TRUE)
})
test_that("RNLmodel, dichromatic, AVICOL alternative", {
model<-RNLmodel(photo="di",
model="log",
R1=r500,
I=D65,
Rb=Rb,
C=photor(c(420,560)),
noise=F,
v=0.05,
n=c(1,1.77),
coord="alternative")
expect_equal(round(model$e1, 3), 0.050)
expect_equal(round(model$e2, 3), 0.038)
expect_equal(abs(round(model$E1_R1, 3)-round(log(10^0.034),3))<=0.002, TRUE)
expect_equal(abs(round(model$E2_R1, 3)-round(log(10^0.588),3))<=0.002, TRUE)
expect_equal(abs(round(model$deltaS, 3)-round(log(10^8.853),3))<=0.002, TRUE)
})
test_that("RNLmodel, trichromatic, AVICOL", {
model<-RNLmodel(photo="tri",
model="log",
R1=r500,
I=D65,
Rb=Rb,
C=bee,
noise=F,
v=0.05,
n=c(1,5,10))
expect_equal(abs(round(model$E1_R1, 3)-round(log(10^-0.366),3))<=0.002, TRUE)
expect_equal(abs(round(model$E2_R1, 3)-round(log(10^0.171),3))<=0.002, TRUE)
expect_equal(abs(round(model$E3_R1, 3)-round(log(10^0.564),3))<=0.002, TRUE)
expect_equal(round(model$e1, 3), 0.050)
expect_equal(round(model$e2, 3), 0.022)
expect_equal(round(model$e3, 3), 0.016)
expect_equal(abs(round(model$deltaS, 3)-round(log(10^21.094),3))<=0.002, TRUE)
})
test_that("RNLmodel, trichromatic, AVICOL, alternative", {
model<-RNLmodel(photo="tri",
model="log",
R1=r500,
I=D65,
Rb=Rb,
C=bee,
noise=F,
v=0.05,
n=c(1,5,10),
coord="alternative")
expect_equal(abs(round(model$E1_R1, 3)-round(log(10^-0.366),3))<=0.002, TRUE)
expect_equal(abs(round(model$E2_R1, 3)-round(log(10^0.171),3))<=0.002, TRUE)
expect_equal(abs(round(model$E3_R1, 3)-round(log(10^0.564),3))<=0.002, TRUE)
expect_equal(round(model$e1, 3), 0.050)
expect_equal(round(model$e2, 3), 0.022)
expect_equal(round(model$e3, 3), 0.016)
expect_equal(abs(round(model$deltaS, 3)-round(log(10^21.094),3))<=0.002, TRUE)
})
test_that("RNLmodel, tetrachromatic, AVICOL, alternative", {
model<-RNLmodel(photo="tetra",
model="log",
R1=r500,
I=D65,
Rb=Rb,
C=photor(c(350,420,490,560)),
noise=F,
v=0.05,
n=c(1, 1.9, 2.2, 2.1))
expect_equal(abs(round(model$E1_R1, 3)-round(log(10^-0.900),3))<=0.002, TRUE)
expect_equal(abs(round(model$E2_R1, 3)-round(log(10^0.034),3))<=0.002, TRUE)
expect_equal(abs(round(model$E3_R1, 3)-round(log(10^0.488),3))<=0.002, TRUE)
expect_equal(abs(round(model$E4_R1, 3)-round(log(10^0.588),3))<=0.002, TRUE)
expect_equal(round(model$e1, 3), 0.050)
expect_equal(round(model$e2, 3), 0.036)
expect_equal(round(model$e3, 3), 0.034)
expect_equal(round(model$e4, 3), 0.035)
expect_equal(abs(round(model$deltaS, 3)-round(log(10^26.530),3))<=0.002, TRUE)
})
test_that("RNLmodel, tetrachromatic, AVICOL", {
model<-RNLmodel(photo="tetra",
model="log",
R1=r500,
I=D65,
Rb=Rb,
C=photor(c(350,420,490,560)),
noise=F,
v=0.05,
n=c(1, 1.9, 2.2, 2.1),
coord="alternative")
expect_equal(abs(round(model$E1_R1, 3)-round(log(10^-0.900),3))<=0.002, TRUE)
expect_equal(abs(round(model$E2_R1, 3)-round(log(10^0.034),3))<=0.002, TRUE)
expect_equal(abs(round(model$E3_R1, 3)-round(log(10^0.488),3))<=0.002, TRUE)
expect_equal(abs(round(model$E4_R1, 3)-round(log(10^0.588),3))<=0.002, TRUE)
expect_equal(round(model$e1, 3), 0.050)
expect_equal(round(model$e2, 3), 0.036)
expect_equal(round(model$e3, 3), 0.034)
expect_equal(round(model$e4, 3), 0.035)
expect_equal(abs(round(model$deltaS, 3)-round(log(10^26.530),3))<=0.002, TRUE)
})
test_that("RNLmodel, tetrachromatic, vismodel and tcs", {
model<-EMmodel(photo="tetra",
R=r500,
I=D65,
Rb=Rb,
C=photor(c(350,420,490,560)))
expect_equal(round(model$E1, 3), -4.284)
expect_equal(round(model$E2, 3), 0.163)
expect_equal(round(model$E3, 3), 2.322)
expect_equal(round(model$E4, 3), 2.799)
expect_equal(round(model$X1, 3), 1.615)
expect_equal(round(model$X2, 3), 0.595)
expect_equal(round(model$X3, 3), -4.534)
expect_equal(round(model$deltaS, 3), 4.850)
})
test_that("CTTKmodel, tetrachromatic, AVICOL, new method", {
photo1<-4
C=photor(c(350,420,490,560))
P<-vector(length=photo1)
for (i in 1:length(P)) {
P[[i]]<-Qr(I=D65, R=r500, Rb=Rb, C=C[,c(1,i+1)], interpolate=TRUE, nm=300:700)
}
E<-P/(P+1)
expect_equal(round(E[[1]], 3), 0.112)
expect_equal(round(E[[2]], 3), 0.520)
expect_equal(round(E[[3]], 3), 0.755)
expect_equal(round(E[[4]], 3), 0.795)
new<-colour_space(type="length", n=photo1, length=1, edge=NA, q=E)
new<-sqrt(sum(new$coordinates^2))
original<-CTTKmodel(photo="tetra",
R=r500,
I=D65,
Rb=Rb,
C=photor(c(350,420,490,560)))$deltaS
expect_equal(round(new, 3), round(original, 3))
})
test_that("EMmodel, tetrachromatic, new method", {
photo1<-4
C=photor(c(350,420,490,560))
S<-vector(length=photo1)
for (i in 1:photo1) {
S[[i]]<-Qr(I=D65, R=r500, Rb=Rb, C=C[,c(1,1+i)], interpolate=TRUE, nm=300:700)
}
S.log<-log(S)
E<-S.log/sum(S.log)
new<-colour_space(type="length", n=photo1, length=0.75, edge=sqrt(3/2), q=E)
new<-sqrt(sum(new$coordinates^2))
original<-EMmodel(photo="tetra",
R=r500,
I=D65,
Rb=Rb,
C=photor(c(350,420,490,560)))$deltaS
expect_equal(round(new, 3), round(original, 3))
})
test_that("GENmodel, EMmodel, tetra", {
model1<-EMmodel(photo=4,
R=r500,
I=D65,
Rb=Rb,
C=photor(c(350,420,490,560)))
model2<-GENmodel(photo=4,
type="length",
length=0.75,
unity=TRUE,
func=log,
R=r500,
I=D65,
Rb=Rb,
C=photor(c(350,420,490,560)))
expect_equal(round(model1$deltaS, 3), round(model2$deltaS, 3))
})
test_that("GENmodel, CTTKmodel, tetra",{
model1<-CTTKmodel(photo=4,
R=r500,
I=D65,
Rb=Rb,
C=photor(c(350,420,490,560)))
model2<-GENmodel(photo=4,
type="length",
length=1,
unity=FALSE,
func=function(x){x/(x+1)},
R=r500,
I=D65,
Rb=Rb,
C=photor(c(350,420,490,560)))
expect_equal(round(model1$deltaS, 3), round(model2$deltaS, 3))
})
test_that("GENmodel, CTTKmodel, tri", {
model1<-CTTKmodel(photo=3,
R=r500,
I=D65,
Rb=Rb,
C=photor(c(350,420,490)))
model2<-GENmodel(photo=3,
type="length",
length=1,
unity=FALSE,
func=function(x){x/(x+1)},
R=r500,
I=D65,
Rb=Rb,
C=photor(c(350,420,490)))
expect_equal(round(model1$deltaS, 3), round(model2$deltaS, 3))
})
R<-data.frame(W)
for (i in 1:length(midpoint)) {
R[,i+1]<-logistic(x = seq(300, 700, 1), x0=midpoint[[i]], L = 50, k=0.04)[,2]
}
names(R)[2:ncol(R)]<-midpoint
test_that("RNL THRES AND RADARPLOT", {
model<-RNLthres(Rb=Rb,I=D65,C=photor(c(400,550)),e=c(0.05,0.01))
expect_error(radarplot(model, item="E"))
})
test_that("deltaS", {
model<-RNLmodel(model="log",R1=R,Rb=Rb,I=D65,C=photor(c(400,550)),noise=TRUE,e=c(0.05,0.01))
expect_equal(deltaS(model)[1,2], sqrt((model[1,"X1_R1"]-model[2,"X1_R1"])^2))
model<-RNLmodel(model="log",R1=R,Rb=Rb,I=D65,C=photor(c(400,500,550)),noise=TRUE,e=c(0.05,0.07,0.01))
expect_equal(deltaS(model)[5,7],
sqrt((model[5,"X1_R1"]-model[7,"X1_R1"])^2+
(model[5,"X2_R1"]-model[7,"X2_R1"])^2))
model<-RNLmodel(model="log",R1=R,Rb=Rb,I=D65,C=photor(c(350,400,500,550)),noise=TRUE,e=c(0.05,0.07,0.01,0.03))
expect_equal(deltaS(model)[5,7],
sqrt((model[5,"X1_R1"]-model[7,"X1_R1"])^2+
(model[5,"X2_R1"]-model[7,"X2_R1"])^2+
(model[5,"X3_R1"]-model[7,"X3_R1"])^2))
model<-RNLmodel(model="log",R1=R,Rb=Rb,I=D65,C=photor(c(350,400,450,500,550)),noise=TRUE,e=c(0.05,0.07,0.07,0.01,0.03))
expect_equal(deltaS(model)[1,2],
sqrt((model[1,"X1_R1"]-model[2,"X1_R1"])^2+
(model[1,"X2_R1"]-model[2,"X2_R1"])^2+
(model[1,"X3_R1"]-model[2,"X3_R1"])^2+
(model[1,"X4_R1"]-model[2,"X4_R1"])^2))
model<-CTTKmodel(R=R,Rb=Rb,I=D65,C=photor(c(350,400,450,500,550)))
expect_equal(deltaS(model)[1,2],
sqrt((model[1,"X1"]-model[2,"X1"])^2+
(model[1,"X2"]-model[2,"X2"])^2+
(model[1,"X3"]-model[2,"X3"])^2+
(model[1,"X4"]-model[2,"X4"])^2))
model<-EMmodel(R=R,Rb=Rb,I=D65,C=photor(c(350,400,450,500,550)))
expect_equal(deltaS(model)[1,2],
sqrt((model[1,"X1"]-model[2,"X1"])^2+
(model[1,"X2"]-model[2,"X2"])^2+
(model[1,"X3"]-model[2,"X3"])^2+
(model[1,"X4"]-model[2,"X4"])^2))
model<-EMmodel(type="edge", R=R,Rb=Rb,I=D65,C=photor(c(350,400,450,500,550)))
expect_equal(deltaS(model)[1,2],
sqrt((model[1,"X1"]-model[2,"X1"])^2+
(model[1,"X2"]-model[2,"X2"])^2+
(model[1,"X3"]-model[2,"X3"])^2+
(model[1,"X4"]-model[2,"X4"])^2))
}) |
effectInfomod8 <-
function(object)
{
mf <- model.frame(object$model)
aux <- summary(object$model)$coefficients
Xlevels <- levels(mf[, 2])
nlevels <- length(Xlevels)
beta <- aux[2:nlevels, ]
Xincrease <- paste("changing X from its reference, '", Xlevels[1], "', to the alternative level", sep = "")
effecttype <- "odds ratio of Y"
effectsize <- "exp(beta)"
furtherinfo <- "\nFurther details can be obtained using effect() and providing the level for the\nconfidence interval, 'level'."
res <- list(beta = beta, Xincrease = Xincrease, effecttype = effecttype, effectsize = effectsize)
res
} |
knitr::opts_chunk$set(
collapse = TRUE,
comment = "
required <- c("maps", "mapproj", "knitr")
if (!all(sapply(required, requireNamespace, quietly = TRUE))) {
knitr::opts_chunk$set(eval = FALSE)
}
ggplot2::ggplot(
data.frame(
y = c(1, 3), group = c("a", "b"),
facet = factor(c(rep("pie", 2)), levels = c("pie", "moon"))
),
ggplot2::aes(y = y, fill = group)
) +
ggplot2::geom_col(ggplot2::aes(x = factor(0)), width = 2, color = "black") +
gggibbous::geom_moon(
data = data.frame(
x = factor(rep(-0.5, 2)), y = rep(0, 2), ratio = c(0.75, 0.25),
right = c(TRUE, FALSE), group = c("b", "a"), facet = c("moon", "moon")
),
ggplot2::aes(x = x, y = y, ratio = ratio, right = right),
size = 36, stroke = 0.5
) +
ggplot2::coord_polar(theta = "y") +
ggplot2::facet_wrap(~facet) +
ggplot2::scale_fill_manual(values = c("white", "black"), guide = "none") +
ggplot2::theme_void(16)
library(gggibbous)
ggplot(data.frame(x = 1:5, y = 1, size = 2^(0:4)), aes(x, y, size = size)) +
geom_moon() +
geom_point(y = 2) +
lims(x = c(0.5, 5.5), y = c(0.5, 2.5)) +
scale_size(range = c(5, 10))
ggplot(data.frame(x = 1:5, y = 0, ratio = 0:4 * 0.25), aes(x = x, y = y)) +
geom_moon(aes(ratio = ratio), size = 20, fill = "black") +
geom_text(aes(y = y + 1, label = ratio)) +
lims(x = c(0.5, 5.5), y = c(-1, 1.4)) +
theme_void()
tidymoons <- data.frame(
x = rep(1:3, 6),
y = rep(rep(3:1, each = 3), 2),
ratio = c(1:9 / 10, 9:1 / 10),
right = rep(c(TRUE, FALSE), each = 9)
)
ggplot(tidymoons) +
geom_moon(aes(x, y, ratio = ratio, right = right, fill = right)) +
lims(x = c(0.5, 3.5), y = c(0.5, 3.5))
ggplot(tidymoons, aes(x, y, ratio = ratio, right = right, size = 2^x)) +
geom_moon(data = subset(tidymoons, right), fill = "violetred") +
geom_moon(
data = subset(tidymoons, !right), fill = "turquoise3",
key_glyph = draw_key_moon_left
) +
lims(x = c(0.5, 3.5), y = c(0.5, 3.5)) +
scale_size("size", range = c(5, 10), breaks = 2^(1:3))
ggplot(tidymoons) +
geom_moon(
aes(x, y, ratio = ratio, right = right, fill = right, size = 2^x),
key_glyph = draw_key_full_moon
) +
lims(x = c(0.5, 3.5), y = c(0.5, 3.5)) +
scale_size("size", range = c(5, 10), breaks = 2^(1:3)) +
scale_fill_manual(values = c("firebrick1", "dodgerblue2")) +
theme(legend.box = "horizontal")
dmeladh_adj <- dmeladh
dmeladh_adj$long <- dmeladh$Longitude + c(
-2, 0, -2, 2, -3, 3, 3, 2, 3, 4, -2.5, -2.5, -1, -2, -2.5, -4, 2.5,
5, 6, 7, 2, -7, -5.5, -3, 0, -7, -2, 3, 5.5, 0.5, -1, -1.5, -3, 2)
dmeladh_adj$lat <- dmeladh$Latitude + c(
-2, 2, 0, 1, 0, 0, 0, 2, 0.5, -1, 1, -1.5, 2, 4, 1.5, 0, 2,
1, -1, -3, -2, 1, -1, -2, -3, -2, -4, -3, -1, 1.5, 2, 2, -2, 0)
moonmap <- ggplot(dmeladh_adj, aes(long, lat)) +
geom_polygon(
data = map_data(
"world", region = "(Australia)|(Indonesia)|(Papua New Guinea)"),
aes(group = group),
fill = "gray80"
) +
geom_segment(aes(xend = Longitude, yend = Latitude), color = "gray20") +
geom_point(aes(Longitude, Latitude), size = 0.75, color = "gray20") +
scale_size(range = c(4, 10)) +
coord_map(xlim = c(110, 160), ylim = c(-45, -5)) +
theme_void() +
theme(
legend.position = c(0.05, 0.05),
legend.direction = "horizontal",
legend.justification = c(0, 0)
)
moonmap +
geom_moon(
aes(ratio = AdhS / 100, size = N),
right = FALSE, fill = "gold", color = "gold",
key_glyph = draw_key_moon_left
) +
geom_moon(
aes(ratio = AdhF / 100, size = N),
fill = "forestgreen", color = "forestgreen"
)
tidyadh <- reshape(
dmeladh_adj,
varying = c("AdhF", "AdhS"),
v.names = "percent",
timevar = "allele",
times = c("AdhF", "AdhS"),
idvar = c("Locality", "Latitude", "Longitude", "long", "lat", "N"),
direction = "long"
)
tidyadh$right <- rep(c(TRUE, FALSE), each = nrow(dmeladh_adj))
moonmap +
geom_moon(
data = tidyadh, key_glyph = draw_key_full_moon,
aes(ratio = percent / 100, fill = allele, color = allele, right = right,
size = N)
) +
scale_fill_manual(values = c("forestgreen", "gold")) +
scale_color_manual(values = c("forestgreen", "gold"))
moonphase <- subset(lunardist, !is.na(phase))
moonphase$percent <- ifelse(
moonphase$phase == "new", 0, ifelse(moonphase$phase == "full", 1, 0.5))
ggplot(lunardist, aes(date, distance)) +
geom_line() +
geom_moon(data = moonphase, ratio = 1, size = 5, fill = "black") +
geom_moon(
data = moonphase, aes(ratio = percent),
size = 5, fill = "yellow", right = moonphase$phase == "first quarter"
)
rest_names <- c(
"Anscombe's Luncheonette", "Chai Squared", "Tukey's Honest Southern Diner",
"Bagels ANOVA", "Spearmint Row"
)
restaurants <- data.frame(
Restaurant = factor(rest_names, levels = rest_names),
Food = c(5, 3, 4, 4, 1),
Decor = c(2, 5, 3, 1, 5),
Service = c(4, 2, 3, 3, 5),
Price = c(4, 5, 2, 5, 2)
)
knitr::kable(restaurants, align = "lcccc")
rest_cats <- c("Food", "Decor", "Service", "Price")
tidyrest <- reshape(
restaurants,
varying = rest_cats,
v.names = "Score",
timevar = "Category",
times = factor(rest_cats, levels = rest_cats),
idvar = "Restaurant",
direction = "long"
)
ggplot(tidyrest, aes(0, 0)) +
geom_moon(aes(ratio = (Score - 1) / 4), fill = "black") +
geom_moon(aes(ratio = 1 - (Score - 1) / 4), right = FALSE) +
facet_grid(Restaurant ~ Category, switch = "y") +
theme_minimal() +
theme(
panel.grid = element_blank(),
strip.text.y.left = element_text(angle = 0, hjust = 1),
axis.text = element_blank(),
axis.title = element_blank()
) |
extract_node_info <- function(i, root_node, subtree_count, node_count, df, node_bucket, subtreemode, nodeleftorright){
node_count <- node_count + 1
newrow <- data.frame(Tree_num = i,
SubTree_num = subtree_count,
Node_num = node_count,
Node_type = root_node$node_type,
Misclassified_num = root_node$node_number_misclassified,
Samples_num = root_node$node_tot_samples,
Depth = root_node$node_depth,
Object_id = root_node$node_objectid,
Parent_id = 0,
NodeLeftorRight = nodeleftorright,
r_t = root_node$node_r_t,
p_t = root_node$node_p_t,
R_t = root_node$node_R_t)
if(typeof(root_node$node_parent) == "S4"){
newrow$Parent_id <- root_node$node_parent$node_objectid
}
df <- rbind(df, newrow)
if(!root_node$node_children_left_NA){
ln <- root_node$node_children_left
results <- extract_node_info(i,
ln,
subtree_count,
node_count,
df,
node_bucket,
FALSE,
"L")
node_count <- results[[1]]
df <- results[[2]]
node_bucket <- results[[3]]
if(ln$node_type == "INTERNAL" & !ln$node_processed_as_subtree & !subtreemode){
ln$node_processed_as_subtree <- TRUE
node_bucket <- append(node_bucket, ln)
}
} else {
outp <- list(node_count, df, node_bucket)
return(outp)
}
if(!root_node$node_children_right_NA){
rn <- root_node$node_children_right
results <- extract_node_info(i,
rn,
subtree_count,
node_count,
df,
node_bucket,
FALSE,
"R")
node_count <- results[[1]]
df <- results[[2]]
node_bucket <- results[[3]]
if(rn$node_type == "INTERNAL" & !rn$node_processed_as_subtree & !subtreemode){
rn$node_processed_as_subtree <- TRUE
node_bucket <- append(node_bucket, rn)
}
} else {
outp <- list(node_count, df, node_bucket)
return(outp)
}
outp <- list(node_count, df, node_bucket)
return(outp)
}
perform_cost_complexity_pruning <- function(treeobjs, df){
loop_start <- 1
loop_end <- length(treeobjs)
node_bucket <- list()
for(i in loop_start:loop_end){
subtree_count <- 1
node_count <- 0
root_node <- treeobjs[[i]]
root_node$node_subtree_num <- subtree_count
results <- extract_node_info(i,
root_node,
subtree_count,
node_count,
df,
node_bucket,
FALSE,
"ROOT")
node_count <- results[[1]]
df <- results[[2]]
node_bucket <- results[[3]]
if(length(node_bucket) == 0){
return(df)
}
for(j in 1:length(node_bucket)){
node_count <- 0
subtree_count <- subtree_count + 1
root_node <- node_bucket[[j]]
root_node$node_subtree_num <- subtree_count
results <- extract_node_info(i,
root_node,
subtree_count,
node_count,
df,
node_bucket,
TRUE,
"SUBTREE_ROOT")
node_count <- results[[1]]
df <- results[[2]]
}
}
return(df)
}
perform_ccp_driver <- function(treeobjs){
df <- structure(list(Tree_num = integer(),
SubTree_num = integer(),
Node_num = integer(),
Node_type = character(),
Misclassified_num = integer(),
Samples_num = integer(),
Depth = integer(),
Object_id = integer(),
Parent_id = integer(),
NodeLeftorRight = character(),
r_t = double(),
p_t = double(),
R_t = double()),
class = "data.frame")
x <- c("Tree_num", "SubTree", "Node_num", "Node_type", "Misclassified_num",
"Samples_num",
"Depth", "Object_id", "Parent_id", "Node_Left_Right", "r_t", "p_t", "R_t")
colnames(df) <- x
df <- perform_cost_complexity_pruning(treeobjs, df)
df$R_i <- NA
total_number_samples_in_tree <- df$Samples_num[1]
df$R_i <- df$Misclassified_num / total_number_samples_in_tree
alpha_list <- list()
moretrees <- TRUE
seekalpha_subtreesubset_list <- list()
base_trees <- df
seekalpha_phase <- 1
while(moretrees){
df$R_T_t <- NA
df$Num_Leaves <- NA
df$alpha <- NA
res <- seekalpha(df, base_trees)
df <- res[[1]]
if(typeof(df) == "logical"){
moretree <- FALSE
break
}
smallest_alpha <- res[[2]]
subtree_with_smallest_alpha <- res[[3]]
collapse_this_node_with_smallest_alpha <- res[[4]]
number_internal_nodes_deleted <- res[[5]]
total_number_nodes_deleted <- res[[6]]
seekalpha_subtreesubset <- res[[7]]
seekalpha_subtreesubset <- cbind(seekalpha_phase,
seekalpha_subtreesubset)
seekalpha_subtreesubset_list <- rbind(seekalpha_subtreesubset_list,
seekalpha_subtreesubset)
seekalpha_phase <- seekalpha_phase + 1
alpha_list <- append(alpha_list,
list(smallest_alpha,
subtree_with_smallest_alpha,
collapse_this_node_with_smallest_alpha,
number_internal_nodes_deleted,
total_number_nodes_deleted))
}
save_ccp_phase_data(seekalpha_subtreesubset_list)
res <- NA
if(length(alpha_list) == 0){
empty_df <- base_trees[FALSE,]
res <- list(empty_df)
} else {
alpha_list_df <- data.frame(matrix(unlist(alpha_list),
nrow = length(alpha_list) / 5,
byrow = T))
colnames(alpha_list_df) <- c("alpha",
"subtree_with_smallest_alpha",
"collapse_this_node",
"number_internal_nodes_deleted",
"total_number_nodes_deleted")
res <- list(alpha_list_df)
}
return(res)
}
compute_R_T_t <- function(df, subtree){
R_i_for_subtree <- df[which(df$Node_type == "TERMINAL" & df$SubTree_num == subtree),]$R_i
Number_leaves_on_subtree <- length(R_i_for_subtree)
R_T_t <- sum(R_i_for_subtree)
res <- list(R_T_t, Number_leaves_on_subtree)
return(res)
}
seekalpha <- function(df, base_trees){
for(i in 1:nrow(df)) {
row <- df[i,]
if(row$Node_type == "INTERNAL" & row$SubTree_num != 1 & row$NodeLeftorRight == "SUBTREE_ROOT"){
res <- compute_R_T_t(df, row$SubTree_num)
row$R_T_t <- res[[1]]
row$Num_Leaves <- res[[2]]
row$alpha <- (row$R_t - row$R_T_t) / (row$Num_Leaves - 1)
df[i,] <- row
}
}
subtreesubset <- df[which(df$NodeLeftorRight == "SUBTREE_ROOT"),]
if(nrow(subtreesubset) == 0){
return(NA)
}
smallest_alpha <- subtreesubset[which.min(subtreesubset$alpha),]
if(nrow(smallest_alpha) > 1){
browser()
}
subtree_with_smallest_alpha <- smallest_alpha$SubTree_num
collapse_this_node_with_smallest_alpha <- smallest_alpha$Object_id
original_tree_df <- df[which(df$SubTree_num == 1),]
collapse_this <- df[which(df$SubTree_num == subtree_with_smallest_alpha),]
remove_these <- collapse_this$Object_id[collapse_this$Object_id != collapse_this_node_with_smallest_alpha]
original_tree_df <- df
for(i in 1:nrow(original_tree_df)){
rowi <- original_tree_df[i,]
if(rowi$Object_id == collapse_this_node_with_smallest_alpha){
if(rowi$Node_type == "INTERNAL"){
original_tree_df[i,]$Node_type <- "TERMINAL"
original_tree_df[i,]$NodeLeftorRight <- NA
}
}
}
new_df <- original_tree_df[which(original_tree_df$Object_id %notin% remove_these),]
number_internal_nodes_deleted <- nrow(base_trees[which(base_trees$SubTree_num == subtree_with_smallest_alpha & base_trees$Node_type == "INTERNAL"),])
number_terminal_nodes_deleted <- nrow(base_trees[which(base_trees$SubTree_num == subtree_with_smallest_alpha & base_trees$Node_type == "TERMINAL"),])
total_number_nodes_deleted <- (number_internal_nodes_deleted - 1) + number_terminal_nodes_deleted
results <- list(new_df,
smallest_alpha$alpha,
subtree_with_smallest_alpha,
collapse_this_node_with_smallest_alpha,
number_internal_nodes_deleted,
total_number_nodes_deleted,
subtreesubset)
return(results)
} |
.onLoad <- function(lib, pkg) {
.jpackage(pkg)
} |
summary.dht_bootstrap <- function(object, alpha=0.05, ...){
x <- list()
x$nboot <- attr(object, "nboot")
x$nbootfailures <- attr(object, "failures")
x$nbootsuccess <- x$nboot - x$nbootfailures
x$alpha <- alpha
class(object) <- "list"
object <- as.data.frame(object)
object$bootstrap_ID <- NULL
numcols <- unlist(lapply(object, is.numeric))
sumfun <- function(x){
if(is.numeric(x)){
xx <- data.frame(median = median(x, na.rm=TRUE),
mean = mean(x, na.rm=TRUE),
se = sqrt(var(x, na.rm=TRUE)),
lcl = quantile(x, (alpha/2), na.rm=TRUE),
ucl = quantile(x, 1-(alpha/2), na.rm=TRUE))
xx$cv <- xx$se/xx$median
}else{
xx <- NULL
}
return(xx)
}
tn <- lapply(object, sumfun)
x$tab <- do.call(rbind.data.frame, tn)
class(x) <- "summary.dht_bootstrap"
return(x)
} |
simIntOcc <- function(n.data, J.x, J.y, J.obs, n.rep, beta, alpha,
sp = FALSE, cov.model, sigma.sq, phi, nu, ...) {
formal.args <- names(formals(sys.function(sys.parent())))
elip.args <- names(list(...))
for(i in elip.args){
if(! i %in% formal.args)
warning("'",i, "' is not an argument")
}
if (missing(n.data)) {
stop("error: n.data must be specified")
}
if (length(n.data) != 1) {
stop("error: n.data must be a single numeric value.")
}
if (missing(J.x)) {
stop("error: J.x must be specified")
}
if (length(J.x) != 1) {
stop("error: J.x must be a single numeric value.")
}
if (missing(J.y)) {
stop("error: J.y must be specified")
}
if (length(J.y) != 1) {
stop("error: J.y must be a single numeric value.")
}
J <- J.x * J.y
if (missing(J.obs)) {
stop("error: J.obs must be specified")
}
if (length(J.obs) != n.data) {
stop(paste("error: J.obs must be a vector of length ", n.data, sep = ''))
}
if (missing(n.rep)) {
stop("error: n.rep must be specified.")
}
if (!is.list(n.rep)) {
stop(paste("error: n.rep must be a list of ", n.data, " vectors", sep = ''))
}
if (length(n.rep) != n.data) {
stop(paste("error: n.rep must be a list of ", n.data, " vectors", sep = ''))
}
for (i in 1:n.data) {
if (length(n.rep[[i]]) != J.obs[i]) {
stop(paste("error: n.rep[[", i, "]] must be of length ", J.obs[i], sep = ''))
}
}
if (missing(beta)) {
stop("error: beta must be specified.")
}
if (missing(alpha)) {
stop("error: alpha must be specified.")
}
if (!is.list(alpha)) {
stop(paste("error: alpha must be a list with ", n.data, " vectors", sep = ''))
}
if (sp) {
if(missing(sigma.sq)) {
stop("error: sigma.sq must be specified when sp = TRUE")
}
if(missing(phi)) {
stop("error: phi must be specified when sp = TRUE")
}
if(missing(cov.model)) {
stop("error: cov.model must be specified when sp = TRUE")
}
cov.model.names <- c("exponential", "spherical", "matern", "gaussian")
if(! cov.model %in% cov.model.names){
stop("error: specified cov.model '",cov.model,"' is not a valid option; choose from ",
paste(cov.model.names, collapse=", ", sep="") ,".")
}
if (cov.model == 'matern' & missing(nu)) {
stop("error: nu must be specified when cov.model = 'matern'")
}
}
rmvn <- function(n, mu=0, V = matrix(1)) {
p <- length(mu)
if(any(is.na(match(dim(V),p))))
stop("Dimension problem!")
D <- chol(V)
t(matrix(rnorm(n*p), ncol=p)%*%D + rep(mu,rep(n,p)))
}
logit <- function(theta, a = 0, b = 1){log((theta-a)/(b-theta))}
logit.inv <- function(z, a = 0, b = 1){b-(b-a)/(1+exp(z))}
n.beta <- length(beta)
X <- matrix(1, nrow = J, ncol = n.beta)
if (n.beta > 1) {
for (i in 2:n.beta) {
X[, i] <- rnorm(J)
}
}
sites <- list()
for (i in 1:n.data) {
sites[[i]] <- sort(sample(1:J, J.obs[i], replace = FALSE))
}
X.p <- list()
for (i in 1:n.data) {
n.alpha.curr <- length(alpha[[i]])
K.curr <- n.rep[[i]]
J.curr <- J.obs[[i]]
X.p[[i]] <- array(NA, dim = c(J.curr, max(K.curr), n.alpha.curr))
X.p[[i]][, , 1] <- 1
if (n.alpha.curr > 1) {
for (q in 2:n.alpha.curr) {
for (j in 1:J.curr) {
X.p[[i]][j, 1:K.curr[j], q] <- rnorm(K.curr[j])
}
}
}
}
s.x <- seq(0, 1, length.out = J.x)
s.y <- seq(0, 1, length.out = J.y)
coords <- as.matrix(expand.grid(s.x, s.y))
if (sp) {
if (cov.model == 'matern') {
theta <- c(phi, nu)
} else {
theta <- phi
}
Sigma <- mkSpCov(coords, as.matrix(sigma.sq), as.matrix(0), theta, cov.model)
w <- rmvn(1, rep(0, J), Sigma)
} else {
w <- NA
}
if (sp) {
psi <- logit.inv(X %*% as.matrix(beta) + w)
} else {
psi <- logit.inv(X %*% as.matrix(beta))
}
z <- rbinom(J, 1, psi)
p <- list()
y <- list()
for (i in 1:n.data) {
K.curr <- n.rep[[i]]
J.curr <- J.obs[[i]]
p[[i]] <- matrix(NA, nrow = J.curr, ncol = max(K.curr))
y[[i]] <- matrix(NA, nrow = J.curr, ncol = max(K.curr))
sites.curr <- sites[[i]]
X.p.curr <- X.p[[i]]
alpha.curr <- as.matrix(alpha[[i]])
for (j in 1:J.curr) {
p[[i]][j, 1:K.curr[j]] <- logit.inv(X.p.curr[j, 1:K.curr[j], ] %*% alpha.curr)
y[[i]][j, 1:K.curr[j]] <- rbinom(K.curr[j], 1, p[[i]][j, 1:K.curr[j]] * z[sites.curr[j]])
}
}
sites.obs <- sort(unique(unlist(sites)))
sites.pred <- (1:J)[!(1:J %in% sites.obs)]
X.obs <- X[sites.obs, , drop = FALSE]
X.pred <- X[sites.pred, , drop = FALSE]
z.obs <- z[sites.obs]
z.pred <- z[sites.pred]
coords.obs <- coords[sites.obs,, drop = FALSE]
coords.pred <- coords[sites.pred,, drop = FALSE]
if (sp) {
w.obs <- w[sites.obs, , drop = FALSE]
w.pred <- w[sites.pred, , drop = FALSE]
} else {
w.obs <- NA
w.pred <- NA
}
psi.obs <- psi[sites.obs, , drop = FALSE]
psi.pred <- psi[sites.pred, , drop = FALSE]
sites.vec <- unlist(sites)
sites.new <- rep(0, length(sites.vec))
for (i in 1:length(sites.vec)) {
sites.new[i] <- which(sites.obs == sites.vec[i])
}
sites.return <- list()
indx <- 1
for (i in 1:n.data) {
sites.return[[i]] <- sites.new[indx:(indx + J.obs[i] - 1)]
indx <- indx + J.obs[i]
}
return(
list(X.obs = X.obs, X.pred = X.pred, X.p = X.p,
coords.obs = coords.obs, coords.pred = coords.pred,
w.obs = w.obs, w.pred = w.pred,
psi.obs = psi.obs, psi.pred = psi.pred, z.obs = z.obs,
z.pred = z.pred, p = p, y = y, sites = sites.return
)
)
} |
focal <- "Fe"
true_focal_element_name <- "Iron"
blue <- "
red <- "
yellow <- "
purple <- "
black <- "
pink <- "
orange <- "
true_cluster_palette <- "Set2"
true_n_clusters <- 6
true_cluster_colors <- RColorBrewer::brewer.pal(true_n_clusters, true_cluster_palette)
true_mineral_label_size <- 12
true_mineral_size_scale <- 10
true_element_label_size <- 11
true_element_size_scale <- 20
true_mineral_size <- 7
true_mineral_color <- red
true_mineral_label_color <- orange
true_mineral_palette <- "Blues"
true_element_color <- blue
true_element_label_color <- pink
true_element_palette <- "Reds"
true_edge_color <- purple
true_edge_palette <- "Greens"
true_mineral_by <- "mean_pauling"
true_element_by <- "pauling"
true_edge_by <- "max_age"
true_mineral_shape <- "square"
true_element_shape <- "circle"
true_na_color <- black
true_highlight_color <- yellow
true_special_element_id <- c("P", "O-2", "H+1", "Fe+2", "Fe", "Fe+3")
true_custom_selection_color_1 <- orange
true_custom_selection_color_2 <- black
true_custom_element_colors <- c("P" = true_custom_selection_color_1,
"O-2" = true_custom_selection_color_1,
"H+1" = true_custom_selection_color_2)
true_custom_selection_set_1 <- c("P", "O-2")
true_custom_selection_set_2 <- c("H+1")
true_style_options <- list("color_by_cluster" = FALSE,
"cluster_colors" = true_cluster_colors,
"mineral_color_by" = "singlecolor",
"mineral_color" = true_mineral_color,
"element_color_by" = "singlecolor",
"element_color" = true_element_color,
"mineral_palette" = true_mineral_palette,
"element_palette" = true_element_palette,
"mineral_label_color" = true_mineral_label_color,
"element_label_color" = true_element_label_color,
"mineral_shape" = true_mineral_shape,
"element_shape" = true_element_shape,
"elements_of_interest" = focal,
"elements_by_redox" = TRUE,
"highlight_element" = TRUE,
"highlight_color" = true_highlight_color,
"custom_element_colors" = true_custom_element_colors,
"na_color" = true_na_color,
"element_size_by" = "singlesize",
"element_label_size" = true_element_label_size,
"element_size_scale" = true_element_size_scale,
"mineral_size_by" = "singlesize",
"mineral_size_scale" = true_mineral_size_scale,
"mineral_label_size" = true_mineral_label_size,
"mineral_size" = true_mineral_size,
"edge_color_by" = "singlecolor",
"edge_color" = true_edge_color,
"edge_palette" = true_edge_palette) |
grid2poly <- function(obj, var.name = names(obj)[1], reproject = TRUE, method = c("sp", "raster", "RSAGA")[1], tmp.file = TRUE, saga_lib = "shapes_grid", saga_module = 3, silent = FALSE, ...){
if(length(obj)>1e4){
warning("Operation not recommended for large grids (>>1e4 pixels).", immediate. = TRUE)
}
if(method=="raster"){
r <- raster(obj[var.name])
pol <- rasterToPolygons(r)
names(pol) <- var.name
}
else{
if(method=="RSAGA"){
if(requireNamespace("RSAGA", quietly = TRUE)){
if(!RSAGA::rsaga.env()[["cmd"]]=="NULL"){
if(tmp.file==TRUE){
tf <- tempfile()
} else {
tf <- var.name
}
obj <- as(obj[var.name], "SpatialPixelsDataFrame")
writeGDAL(obj[var.name], paste(tf, ".sdat", sep=""), "SAGA")
if(requireNamespace("RSAGA", quietly = TRUE)){
RSAGA::rsaga.geoprocessor(lib=saga_lib, module=saga_module, param=list(GRIDS=paste(tf, ".sgrd", sep=""), SHAPES=paste(tf, ".shp", sep=""), NODATA=TRUE, TYPE=1), show.output.on.console = silent)
if(requireNamespace("maptools", quietly = TRUE)){
pol <- maptools::readShapePoly(paste(tf, ".shp", sep=""), proj4string=obj@proj4string)
} else {
pol <- readOGR(paste(tf, ".shp", sep=""))
}
}
}
}
else { stop("SAGA GIS path could not be located. See 'rsaga.env()' for more info.") }
}
else {
obj <- as(obj[var.name], "SpatialPixelsDataFrame")
pol = as(obj, "SpatialPolygonsDataFrame")
}
}
prj.check <- check_projection(pol, control = TRUE)
if (!prj.check&reproject==TRUE) { pol <- reproject(pol) }
dm <- data.frame(obj@data[,var.name])
names(dm) <- var.name
pol <- SpatialPolygonsDataFrame(pol, dm, match.ID=FALSE)
return(pol)
} |
NULL
DeltaB2pc_cat3logit <- function(DeltaB, n = 8, edge = 0.01) {
out <- NULL
DeltaB %>%
c(0,.) %>%
{ outer(., ., '==') } %>%
{ .[lower.tri(.)]} -> xcmp
switch(as.character(sum(xcmp)),
'0' = DeltaB2pc_cat3logit_dim3(DeltaB, n, edge),
'1' = DeltaB2pc_cat3logit_dim2(DeltaB, n, edge),
'3' = DeltaB2pc_cat3logit_dim1(DeltaB, n, edge)
)
}
DeltaB2pc_cat3logit_dim1 <- function(DeltaB, n, edge) {
out <- list(status = 'p', fo = NULL)
{ edge + (0 : (n - 1)) / (n - 1) * (1 - 3 * edge) } %>%
{ expand.grid(p1 = ., p2 = .) } %>%
{ .$p3 <- 1 - .$p1 - .$p2; . } %>%
{ .[which((.$p3 >= 0) & (.$p3 <= 1)), ] } %>%
lapply(I) %>%
{ mapply(c, .$p1, .$p2, .$p3, SIMPLIFY = FALSE) } -> out$pp
out
}
DeltaB2pc_cat3logit_dim2 <- function(DeltaB, n, edge) {
out <-
if ((DeltaB[1] == 0) & (DeltaB[2] > 0)) {
ww <- list(wA = c(1, 0, 0), wB = c(0, 1, 0))
out <- list(status = 'p0')
} else if ((DeltaB[1] > 0) & (DeltaB[2] == 0)) {
ww <- list(wA = c(1, 0, 0), wB = c(0, 0, 1))
out <- list(status = 'p0')
} else if ((DeltaB[1] > 0) & (DeltaB[2] > 0)) {
ww <- list(wA = c(0, 1, 0), wB = c(0, 0, 1))
out <- list(status = 'pc')
} else if ((DeltaB[1] == 0) & (DeltaB[2] < 0)) {
ww <- list(wA = c(1, 0, 0), wB = c(0, 1, 0))
out <- list(status = 'pc')
} else if ((DeltaB[1] < 0) & (DeltaB[2] == 0)) {
ww <- list(wA = c(1, 0, 0), wB = c(0, 0, 1))
out <- list(status = 'pc')
} else if ((DeltaB[1] < 0) & (DeltaB[2] < 0)) {
ww <- list(wA = c(0, 1, 0), wB = c(0, 0, 1))
out <- list(status = 'p0')
}
out['fo'] <- switch(out$status,
'p0' = list(NULL),
'pc' = list(ww$wA + ww$wB))
ww %<>% lapply(v2vedge, edge = edge)
out$pp <- convex_comb((0.5 + (0 : (n - 1))) / n, ww$wA, ww$wB, simplify = FALSE)
out
}
DeltaB2pc_cat3logit_dim3 <- function(DeltaB, n, edge) {
vv <- DeltaB2vroles_cat3logit(DeltaB)
wA <- v2vedge(vv$vt, edge)
wB <- 0.5 * (v2vedge(vv$vo, edge) + v2vedge(vv$vs, edge))
convex_comb((0.5 + (0 : (n - 1))) / n, wA, wB, simplify = FALSE) %>%
{ list(status = 'pc', fo = 1 - vv$vo, pp = .) } %>%
return()
}
DeltaB2pc_ord3logit <- function(DeltaB, alpha, n = 8, edge = 0.01) {
if (DeltaB != 0) {
c(0.5, NA, NA) %>%
linkfun_ord3logit(alpha) %>%
linkinv_ord3logit(alpha) %>%
apply(1, list) %>%
Reduce(c, .) %>%
{ list(status = 'pc', fo = NULL, pp = .) } -> out
if (DeltaB > 0) { out$fo <- c(0, 1, 1) } else { out$fo <- c(1, 1, 0) }
} else {
cbind((0.5 + (0 : (n - 1))) / n, NA, NA) %>%
linkfun_ord3logit(alpha) %>%
linkinv_ord3logit(alpha) %>%
apply(1, list) %>%
Reduce(c, .) %>%
{ list(status = 'p', fo = NULL, pp = .) } -> out
}
out
} |
context("Lp")
test_that("Euclidean function is calculated correctly", {
x <- c(1, 2, 3, 4)
y <- c(3, 4, 1, 2)
expect_equal(EuclideanDistance(x, y), 4)
})
test_that("Minkowski function is calculated correctly", {
x <- c(1, 2, 3, 4)
y <- c(3, 4, 1, 2)
expect_equal(MinkowskiDistance(x, y, 3), 32^(1/3))
})
test_that("Infinite norm function is calculated correctly", {
x <- c(1, 2, 3, 4)
y <- c(3, 4, 1, 2)
expect_equal(InfNormDistance(x, y), 2)
})
test_that("Manhattan function is calculated correctly", {
x <- c(1, 2, 3, 4)
y <- c(3, 4, 1, 2)
expect_equal(ManhattanDistance(x, y), 8)
})
test_that("Euclidean. Manhattan, Minkowski and Infinite Norm distances are
calculated correctly using LPDistance function", {
x <- c(1, 2, 3, 4)
y <- c(3, 4, 1, 2)
expect_equal(LPDistance(x, y, "euclidean"), 4)
expect_equal(LPDistance(x, y, "minkowski", p=3), 32^(1/3))
expect_equal(LPDistance(x, y, "manhattan"), 8)
expect_equal(LPDistance(x, y, "infnorm"), 2)
})
test_that("Exceptions in Lp distances", {
x <- c("a", "b", "c", "d")
y <- c(3, 4, 1, 2)
expect_equal(EuclideanDistance(x, y), NA)
expect_equal(MinkowskiDistance(x, y, p=3), NA)
expect_equal(ManhattanDistance(x, y), NA)
expect_equal(InfNormDistance(x, y), NA)
x <- replicate(3, rnorm(3))
expect_equal(EuclideanDistance(x, y), NA)
expect_equal(MinkowskiDistance(x, y, p=3), NA)
expect_equal(ManhattanDistance(x, y), NA)
expect_equal(InfNormDistance(x, y), NA)
x <- as.numeric(c())
expect_equal(EuclideanDistance(x, y), NA)
expect_equal(MinkowskiDistance(x, y, p=3), NA)
expect_equal(ManhattanDistance(x, y), NA)
expect_equal(InfNormDistance(x, y), NA)
x <- c(1, 2)
expect_equal(EuclideanDistance(x, y), NA)
expect_equal(MinkowskiDistance(x, y, p=3), NA)
expect_equal(ManhattanDistance(x, y), NA)
expect_equal(InfNormDistance(x, y), NA)
x <- c(1, 2, NA, 3)
expect_equal(EuclideanDistance(x, y), NA)
expect_equal(MinkowskiDistance(x, y, p=3), NA)
expect_equal(ManhattanDistance(x, y), NA)
expect_equal(InfNormDistance(x, y), NA)
x <- c(1, 2, 2, 3)
p <- 1.2
expect_equal(MinkowskiDistance(x, y, p), NA)
p <- -1
expect_equal(MinkowskiDistance(x, y, p), NA)
}) |
context(desc = "Testing clean_params")
testthat::test_that(
desc = "negative alphas",
code = {
model <- make_model('X -> Y')
model$parameters_df$priors <- rep(-1, 6)
expect_error(clean_params(model$parameters_df))
}
)
testthat::test_that(
desc = "normalized params warning",
code = {
model <- make_model('X -> Y')
model$parameters_df$param_value <- c(.70, .80, .25, .25, .25, .25)
expect_message(clean_params(model$parameters_df))
}
) |
summary.RFopt <- function(object, ...) {
object <- lapply(object, function(z) z[order(names(z))])
object <- object[c(1, 1 + order(names(object[-1])))]
class(object) <- "summary.RFopt"
object
}
print.summary.RFopt <- function(x, ...) {
str(x, give.attr=FALSE, ...)
invisible(x)
}
print.RFopt <- function(x, ...) {
print.summary.RFopt(summary.RFopt(x, ...))
invisible(x)
}
summary.RFoptElmnt <- function(object, ...) {
object <- object[order(names(object))]
class(object) <- "summary.RFoptElmt"
object
}
print.summary.RFoptElmnt <- function(x, ...) {
str(x, give.attr=FALSE, ...)
invisible(x)
}
print.RFoptElmnt <- function(x, ...) {
print.summary.RFoptElmnt(summary.RFoptElmnt(x, ...))
invisible(x)
}
detach_packages <- function(pkgs) {
for (pkg in pkgs) {
pkg <- paste0("package:", pkg)
while(pkg %in% search()) detach(pkg, unload = TRUE, character.only=TRUE)
}
}
libraries <- function(pkgs, control, verbose=FALSE) {
if (length(control) > 0) {
idx <- pmatch(names(control), names(as.list(args(library))))
control <- control[idx[!is.na(idx)]]
}
for (pkg in pkgs) do.call("library", c(list(pkg), control))
if (verbose) message("libraries attached.")
}
OneTo <- function(n)
return(if (length(n) > 1) stop("invalid end of loop") else if (n < 1)
NULL else 1:n)
S <- function(x) if (length(x) > 1) "s" else ""
ARE <- function(x) if (length(x) > 1) "are" else "is"
HAVE <- function(x) if (length(x) > 1) "have" else "has"
sources <- function(pkgs, raw=FALSE, repos=NULL, local.only=FALSE) {
gitrepos <- "schlather/PACKAGES"
gitinfo <- "https://github.com/"
gitdownload <- "https://raw.githubusercontent.com/"
debug <- FALSE
ip <- installed.packages()[, "Version"]
ip <- ip[pkgs]
names(ip) <- pkgs
s <- if (local.only) "local" else c("local", "cran", "github")
found <- matrix(FALSE, nrow=length(pkgs), ncol=length(s))
V <- where <- matrix("", nrow=length(pkgs), ncol=length(s))
dimnames(V) <- dimnames(where) <- dimnames(found) <-list(pkgs, s)
for (frm in c("local0", s)) {
from <- frm
if (from == "local0") {
from <- "local"
url <- ""
} else if (from == "local") url <- getwd()
else {
if (from == "cran") {
type <- "source"
if (length(repos) == 0) repos <- getOption("repos")
if (debug) print(repos)
cran <- NULL
url <- try(contrib.url(repos=repos, type="source"))
if (!is(url, "try-error")) {
cran <- try(available.packages(contriburl = url)[pkgs, "Version"])
if (is(cran, "try-error") || length(cran) == 0) next
}
if (length(cran) == 0) next
} else if (from == "github") {
url <- paste0(gitinfo, gitrepos)
github <- try(grep("tar.gz", fixed=TRUE, readLines(url), value = TRUE))
if (is(github, "try-error") || length(github) == 0) next
} else stop("BUG")
}
for (i in 1:length(pkgs)) {
if (from == "cran") {
versions <- cran[i]
} else {
if (from == "local") {
if (url == "") f <- dir(pattern=paste0(pkgs[i], "_.*\\.tar\\.gz"))
else f <- dir(pattern=paste0(pkgs[i], "_.*\\.tar\\.gz"), path=url)
} else {
f <- grep(paste0(pkgs[i],"_"), github, value = TRUE)
}
if (length(f) > 0) {
pkg <- paste0(pkgs[i],"_")
versions <- sapply(strsplit(f, "\\.tar\\.gz"), function(x) {
s <- strsplit(x[1], pkg)[[1]]
s[length(s)]
})
} else versions <- NULL
}
old.version <- ip[i]
where[i, from] <- url
for (j in OneTo(length(versions))) {
cmp <- compareVersion(versions[j], ip[i])
if (cmp >= 0) {
found[i, from] <- TRUE
if (compareVersion(versions[j], old.version)) {
old.version <- versions[j]
V[i, from] <- versions[j]
}
}
}
}
if (frm == "local") {
if (all(anyfound <- apply(found, 1, any))) break;
}
}
if (debug) Print(list(where=where, found=found, newer.version=V, ip=ip))
if (raw) return(list(where=where, found=found, newer.version=V, ip=ip))
failed <- !apply(found, 1, any)
if (any(failed)) {
if (all(failed)) return(list(what=NULL, failed=failed))
where <- where[!failed, , drop=FALSE]
found <- found[!failed, , drop=FALSE]
V <- V[!failed, , drop=FALSE]
ip <- ip[!failed]
pkgs <- pkgs[!failed]
}
what <- matrix("", nrow=length(ip), ncol=4)
dimnames(what) <- list(names(ip), c("how", "where", "version", "call"))
method <- colnames(V)
if (all(apply(V == "", 1, any, na.rm=TRUE))) {
found[V != ""] <- FALSE
dim(found) <- dim(where)
} else if (all(what[, "cran"] != "")) found[, method != "cran"] <- FALSE
for (i in 1:length(ip)) {
if (length(f <- which(found[i,])) == 0) next
newest <- f[1]
for (j in f[-1]) if (compareVersion(V[j], V[newest]) > 0) newest <- j
what[i, 1:3] <- c(method[newest], where[i, newest],
if (V[i, newest] == "") ip[i] else V[i, newest])
}
idx <- what[, "how"] == "local"
path <- what[idx, "where"]
add <- path != "" & substring(path, nchar(path)) != .Platform$file.sep
path[add] <- paste0(path, .Platform$file.sep)
what[idx, "call"] <- paste0(path, pkgs[idx], "_", what[idx, "version"], ".tar.gz")
idx <- what[, "how"] == "github"
what[idx, "call"] <- paste0(gitdownload, gitrepos, "/main/", pkgs[idx], "_",
what[idx, "version"], ".tar.gz")
idx <- what[, "how"] == "cran"
what[idx, "call"] <- pkgs[idx]
if (debug) Print(t(what), failed)
return(list(what=what, failed=failed))
}
reinstallPackages <- function(ic, installNrun, install.control) {
install <- installNrun$install
mem_is_aligned <- installNrun$mem_is_aligned
if (is.na(mem_is_aligned)) mem_is_aligned <- TRUE
verbose <- FALSE
force <- quiet <- CROSS <- pkgs.given <- path.given <- local.only <- FALSE
repos <- path <- pkgs <- NULL
if (ic) {
N <- names(install.control)
if ("pkg" %in% N)
stop("'pkg' is an invalid option for 'install.control'. Did you mean 'pkgs'?")
pkgs.given <- "pkgs" %in% N
path.given <- "path" %in% N
path <- install.control$path
delete <- c("repos", "path", "force", "pkgs", "CROSS")
for (arg in c(delete, "verbose", "quiet"))
if (length(install.control[[arg]]) > 0) {
assign(arg, install.control[[arg]])
if (arg %in% delete) install.control[[arg]] <- NULL
}
if (length(install.control$force) > 0 && !force) install <- "ask"
else if (length(install) > 0 && install %in% c("ask", "no installation"))
install <- "install"
if ("MEM_IS_ALIGNED" %in% N) {
mem_is_aligned <- install.control$MEM_IS_ALIGNED
force <- TRUE
}
if ("LOCAL_ONLY" %in% N)
local.only <- install.control$LOCAL_ONLY
}
if (!pkgs.given) pkgs <- .Call(C_getPackagesToBeInstalled, force)
verbose <- verbose && !quiet
if (length(pkgs) == 0) {
.Call(C_SIMDmessages, "all")
cat("See ?RFoptions for options.\n")
if (!quiet)
message(if (!pkgs.given) "No packages found to be installed.",
if (!path.given && !pkgs.given)
" Consider setting, in 'install.control', a path to a local directory.",
if (verbose) " This happens particularly if the the installation process was interrupted. Try it again in the next session or use 'RFoptions(install.control=list(force=TRUE))' for instance.")
return()
}
if (install == "ask") {
if (!quiet)
cat("The package", S(pkgs), " ", paste0("'", pkgs, "'", collapse=", "),
" ", HAVE(pkgs), " been compiled without appropriate SIMD/AVX2 flags. So, calculations can be slow. If the package",
S(pkgs), " ", ARE(pkgs),
" recompiled with the necessary flags, the calculations might be faster.\nR should be restarted after re-compiling. The argument 'install.control' might be used to run the re-compilation without asking and to pass further arguments to 'install.packages', e.g., 'RFoptions(install.control=list(verbose=TRUE))'\nTo avoid this feedback, set 'RFoptions(install=\"no\")' or 'RFoptions(install=\"install\")' before calling any other function of '",
pkgs[length(pkgs)],"'.\n\n", sep="")
omp <- .Call(C_SIMDmessages, pkgs)
}
if (!quiet) cat("Searching for tar balls... ")
s <- sources(pkgs,repos=repos, local.only=local.only)
cat("\n")
if (all(s$failed)) {
if (!quiet) cat("Not a single source found for re-installation.\n")
return()
}
tell.which <- function(s, verbose) {
cat("The following package", S(!s$failed), " will be re-installed:\n",
sep="",
paste0(if (!verbose) "\t",
rownames(s$what), "_", s$what[, "version"],
" from ", s$what[, "how"],
if (verbose) ", ", if (verbose) s$what[, "where"], "\n")
)
if (any(s$failed)) {
cat("No recent tar ball found for ",
paste0("'", names(s$failed)[s$failed], "'", collapse=", ", sep=""),
". ", sep="")
if (verbose)
cat("Consider calling\n\t'RFoptions(install.control=list(path=\"<local directory>\",\n\t\t\tverbose=TRUE))'")
cat("\n")
}
}
neon <- .Call(C_isNEONavailable)
arm32 <- !is.na(neon)
x86_64 <- .Call(C_isX86_64)
CROSS_DEFAULT <- if (arm32) "arm32" else if (x86_64) "avx" else "FALSE"
if ((asked = install == "ask")) {
if (!quiet) tell.which(s, verbose)
repeat {
txt <- paste0("Shall '", rownames(s$what)[1],
"' and all further packages based on 'RandomFieldsUtils' be recompiled (Y/n/h/s)erver/<args>) ? ")
install.control <- readline(txt)
if (install.control %in% c("h", "H")) {
cat("\nHelp info (see ?RFoptions Details..InstallNrun..install for details)\n
====================================================\n")
cat("Y : installation \n")
cat("n : interruption.\n")
cat("s : CROSS=\"", CROSS_DEFAULT, "\".\n")
cat("<args>: arguments for 'install.packages',\n e.g. 'lib = \"~\", quite=TRUE'\n")
cat("\n")
} else break
}
install <-
if (install.control %in% c("n", "N")) "no installation" else "install"
path <- NULL
if (install.control %in% c("s", "S")) CROSS <- CROSS_DEFAULT
if (nchar(install.control) <= 3) install.control <-""
if (verbose) {
if (install == "no installation") .Call(C_SIMDmessages, NULL)
else {
S <- "\t*************************************************\n"
cat("\n", S, "\t*** Do not forget to restart R. ***\n",S)
sleep.milli(1500)
}
}
} else {
omp <- .Call(C_SIMDmessages, "OMP")
if (!quiet) tell.which(s, verbose)
}
if (install != "no installation") {
if (is.character(install.control))
install.control <- eval(parse(text=paste("list(", install.control, ")")))
SIMD_FLAGS <- CXX_FLAGS <- args <- ""
if (length(install.control$configue.args) > 0) {
args <- install.control$configue.args
install.control$configue.args <- NULL
}
if (length(install.control$CXX_FLAGS) > 0) {
CXX_FLAGS <- install.control$CXX_FLAGS
install.control$CXX_FLAGS <- omp <- NULL
}
if (length(install.control$SIMD_FLAGS) > 0) {
SIMD_FLAGS <- install.control$SIMD_FLAGS
install.control$SIMD_FLAGS <- NULL
}
if (length(install.control$USE_GPU) > 0) {
usegpu <- if (install.control$USE_GPU) " USE_GPU=TRUE" else ""
install.control$USE_GPU <- NULL
} else
usegpu <- if (.Call(C_isGPUavailable)) " USE_GPU=try" else ""
idx <- pmatch(names(install.control),names(as.list(args(install.packages))))
install.control <- install.control[which(!is.na(idx))]
args <- paste0(args,
usegpu,
" USERASKED=", asked,
" CROSS=", CROSS,
" MEM_IS_ALIGNED=", mem_is_aligned,
if (length(SIMD_FLAGS) > 0)
paste0(" SIMD_FLAGS='", SIMD_FLAGS, "'"),
if (length(CXX_FLAGS) + length(omp) > 0)
paste0(" CXX_FLAGS='", CXX_FLAGS, " ", omp, "'")
)
if (verbose) Print(install.control, args)
how <- s$what[, "how"]
pkgs <- s$what[, "call"]
for (p in 1:nrow(s$what)) {
z <- Try(do.call("install.packages",
c(list(pkgs=pkgs[p], type="source",
repos = if (how[p] == "cran")
s$what[p, "where"] else NULL),
install.control,
configure.args=args)))
if (is(z, "try-error")) print(z)
}
}
cat("\n\n")
}
RFoptions <- function(..., no.class=FALSE, install.control=NULL) {
opt <- .External(C_RFoptions, ...)
ic <- hasArg("install.control")
if (ic || (length(opt) > 0 && is.list(opt) && is.list(opt$installNrun) &&
opt$installNrun$installPackages && interactive())) {
reinstallPackages(ic=ic, installNrun=opt$installNrun,
install.control=install.control)
if (ic) return(invisible(NULL))
}
if (length(opt) == 0 || no.class) return(invisible(opt))
if (is.list(opt[[1]])) {
opt <- lapply(opt,
function(x) {
class(x) <- "RFoptElmnt"
x
})
class(opt) <- "RFopt"
} else class(opt) <- "RFoptElmnt"
opt
} |
"_PACKAGE"
vmr_env <- new.env(parent = emptyenv())
vmr_env$verbose_mode <- 1
vmr_env$vagrant_bin <- "vagrant" |
expected <- eval(parse(text="c(\" 100\", \"-1e-13\", \" Inf\", \"-Inf\", \" NaN\", \"3.14\", \" NA\")"));
test(id=0, code={
argv <- eval(parse(text="list(structure(c(100, -1e-13, Inf, -Inf, NaN, 3.14159265358979, NA), .Names = c(\" 100\", \"-1e-13\", \" Inf\", \"-Inf\", \" NaN\", \"3.14\", \" NA\")))"));
do.call(`names`, argv);
}, o=expected); |
numerical_deriv <- function(par, f, ..., delta = 1e-5, gradient = TRUE, type = 'Richardson'){
forward_difference <- function(par, f, delta, ...){
dots <- list(...)
np <- length(par)
g <- numeric(np)
if(is.null(dots$ObJeCtIvE)) fx <- f(par, ...) else fx <- dots$ObJeCtIvE
for(i in seq_len(np)){
p <- par
p[i] <- p[i] + delta
g[i] <- (f(p, ...) - fx) / delta
}
g
}
forward_difference2 <- function(par, f, delta, ...){
dots <- list(...)
np <- length(par)
hess <- matrix(0, np, np)
if(is.null(dots$ObJeCtIvE)) fx <- f(par, ...) else fx <- dots$ObJeCtIvE
fx1 <- numeric(np)
for(i in seq_len(np)){
tmp <- par
tmp[i] <- tmp[i] + delta
fx1[i] <- f(tmp, ...)
}
for(i in seq_len(np)){
for(j in i:np){
fx1x2 <- par
fx1x2[i] <- fx1x2[i] + delta
fx1x2[j] <- fx1x2[j] + delta
hess[i,j] <- hess[j, i] <- (f(fx1x2, ...) - fx1[i] - fx1[j] + fx) / (delta^2)
}
}
(hess + t(hess))/2
}
central_difference <- function(par, f, delta, ...){
np <- length(par)
g <- numeric(np)
for(i in seq_len(np)){
p1 <- p2 <- par
p1[i] <- p1[i] + delta
p2[i] <- p2[i] - delta
g[i] <- (f(p1, ...) - f(p2, ...)) / (2 * delta)
}
g
}
central_difference2 <- function(par, f, delta, ...){
np <- length(par)
hess <- matrix(0, np, np)
fx <- f(par, ...)
for(i in seq_len(np)){
for(j in i:np){
if(i == j){
p <- par
p[i] <- p[i] + 2 * delta; s1 <- f(p, ...)
p[i] <- p[i] - 4 * delta; s3 <- f(p, ...)
hess[i, i] <- (s1 - 2*fx + s3) / (4 * delta^2)
} else {
p <- par
p[i] <- p[i] + delta; p[j] <- p[j] + delta; s1 <- f(p, ...)
p[j] <- p[j] - 2*delta; s2 <- f(p, ...)
p[i] <- p[i] - 2*delta; s4 <- f(p, ...)
p[j] <- p[j] + 2*delta; s3 <- f(p, ...)
hess[i,j] <- hess[j,i] <- (s1 - s2 - s3 + s4) / (4 * delta^2)
}
}
}
(hess + t(hess))/2
}
richardson <- function(par, f, delta, r = 4L, ...){
R0 <- R1 <- matrix(0, length(par), r)
R0[, 1L] <- central_difference(par=par, f=f, delta=delta, ...)
for(i in 1L:(r-1L)){
delta <- delta/2
R1[ ,1L] <- central_difference(par=par, f=f, delta=delta, ...)
for (j in 1L:i)
R1[ ,j + 1] <- (4^j * R1[ , j] - R0[, j]) / (4^j - 1)
R0 <- R1
}
R1[ , i+1]
}
richardson2 <- function(par, f, delta, r = 4L, ...){
R0 <- R1 <- matrix(0, length(par)^2, r)
R0[, 1L] <- as.vector(central_difference2(par=par, f=f, delta=delta, ...))
for(i in 1L:(r-1L)){
delta <- delta/2
R1[ ,1L] <- as.vector(central_difference2(par=par, f=f, delta=delta, ...))
for (j in 1L:i)
R1[ ,j + 1] <- (4^j * R1[ , j] - R0[, j]) / (4^j - 1)
R0 <- R1
}
hess <- matrix(R1[ , i+1], length(par), length(par))
(hess + t(hess))/2
}
if(!length(par)){
if(gradient) return(numeric())
else return(matrix(numeric()))
}
if(type == 'central'){
ret <- if(gradient) central_difference(par=par, f=f, delta=delta, ...)
else central_difference2(par=par, f=f, delta=delta, ...)
} else if(type == 'forward'){
ret <- if(gradient) forward_difference(par=par, f=f, delta=delta, ...)
else forward_difference2(par=par, f=f, delta=delta, ...)
} else if(type == 'Richardson'){
ret <- if(gradient) richardson(par=par, f=f, delta=delta*10, ...)
else richardson2(par=par, f=f, delta=delta*1000, ...)
}
ret
} |
mean.Bolstad = function(x, ...){
if(any(grepl("mean", names(x))))
return(x$mean)
xVals = x$param.x
fx = approxfun(xVals, xVals * x$posterior)
return(integrate(fx, min(xVals), max(xVals))$value)
} |
getnames<-function(dat,st=3,sep=" "){
fa<-dim(dat)[2]-1
a<-substr(dat[[1]],1,st)
for(i in 2:fa){
a<-paste(a,substr(dat[[i]],1,st),sep=sep)
}
y<-as.matrix(dat[[fa+1]])
rownames(y)<-a
y}
direct.sum <- function(...){
nmat <- nargs()
allmat<- list(...)
C<-allmat[[1]]
for(i in 2:nmat) {
B<-allmat[[i]]
A<-C
C <- rbind(cbind(A, matrix(0, nrow = nrow(A), ncol = ncol(B))),
cbind(matrix(0, nrow = nrow(B), ncol = ncol(A)), B))}
return(C)
}
cocamod<-function(model,Z){
nmarg<-length(model)-3
strata<-c(model$strata)
levs<-c(model$livelli)
if(is.null(model$cocacontr)){model$cocacontr<-as.list(rep(0,length(levs)))}
C<-list()
M<-list()
for (mi in 1:nmarg){
marginal<-model[[mi]]
margindex<-c(marginal$marg)
margset<-marginal$int
types<-c(marginal$types)
nint<-length(margset)
CM<-list()
MM<-list()
for (ii in 1:nint){
margint<-c(margset[[ii]])
matrici<-cocamat(margint,margindex,types,levs,model$cocacontr)
CM[[ii]]<-matrici$CMAT
MM[[ii]]<-matrici$MMAT
}
M[[mi]]<-MM[[1]]
C[[mi]]<-CM[[1]]
if(nint>1){
for(is in 2:nint){
M[[mi]]<-rbind(M[[mi]],MM[[is]])
C[[mi]]<-direct.sum(C[[mi]],CM[[is]])
}
}
remove(list=c("MM","CM"))
}
MG<-M[[1]]
CG<-C[[1]]
if(nmarg>1){
for(i in 2:nmarg){
MG<-rbind(MG,M[[i]])
CG<-direct.sum(CG,C[[i]])
}
}
I<-which(Z==1,arr.ind=TRUE)
IM<-diag(1,dim(Z)[1])
PZZ<-IM[I[,1],]
mmat<-kronecker(diag(1,strata),MG)%*%PZZ
list(CMAT=kronecker(diag(1,strata),CG),MMAT=mmat)
}
cocamat<-function(margint,margindex,types,levs,rmat)
{
for (i in 1:length(levs)){
if(types[i]=="b"){
a<-gl(levs[i],1)
a<-t(contr.treatment(a))
rownames(a)<-NULL
colnames(a)<-NULL
m<-matrix(0,levs[i]-1,levs[i])
m[1:(levs[i]-1),1]<-1
m<-rbind(m,a)
rmat[[i]]<-m
}
}
m<-list()
c<-list()
for(i in 1:length(levs)){
c[[i]]<-1
if (types[i]=="marg" ){ m[[i]]<-matrix(1,1,levs[i])}
else {
if ((types[i]=="r")|(types[i]=="b") ){m[[i]]<-matrix(c(rmat[[i]][1,]),1,levs[i],byrow=TRUE)}
else{m[[i]]<-matrix(c(1,rep(0,levs[i]-1)),1,levs[i])}
}
}
for(i in margint){
c[[i]]<-cbind(-diag(1,levs[i]-1),diag(1,levs[i]-1))
mid<-diag(1,levs[i])
mupper<-mid
mlower<-mid
mupper[upper.tri(mupper,diag=TRUE)]<-1
mlower[lower.tri(mlower,diag=TRUE)]<-1
if (types[i]=="l") {m[[i]]<- t(cbind(mid[,-levs[i]],mid[,-1]))}
if (types[i]=="c") {m[[i]] <-rbind(t(diag(1,levs[i])[,-levs[i]]),t(mlower[,-1]))}
if (types[i]=="rc") {m[[i]]<- rbind(t(mupper[,-levs[i]]),t(diag(1,levs[i])[,-1]))}
if (types[i]=="g")
{m[[i]]<- rbind(t(mupper[,-levs[i]]),t(mlower[,-1])) }
if ((types[i]=="r")|(types[i]=="b")) {m[[i]]<-rmat[[i]]}
}
M<-m[[1]]
C<-c[[1]]
if(length(levs)>1){
for(i in 2:length(levs)){
M<-kronecker(m[[i]],M)
C<-kronecker(c[[i]],C)
}
}
matrici<-list(CMAT=C,MMAT=M)
}
LDMatrix<-function(level,formula,names=NULL){
if (is.null(names)) {names<-LETTERS[1:length(level)]
}
clev<-cumprod(level)
totlev<-prod(level)
C<-gl(level[1],1,totlev)
for (i in 2:length(level)) {
c<-gl(level[i],clev[i-1],totlev)
C<-cbind(C,c)
}
colnames(C)<-names
C<-as.data.frame(C)
C<-data.frame(lapply(C,as.factor))
C<-model.matrix(as.formula(formula),C)
C<-C[,-1]
matrici<-list(IMAT=solve(t(C)%*%C)%*%t(C),DMAT=C)
}
cocadise<-function(Z=NULL,names=NULL,formula=NULL,lev)
{
if (is.null(formula))
{
ncz<-dim(Z)[2]
zi<-Z[,1]
zi<-zi[zi>0]
DD<-diag(c(zi))[,2:length(zi)]
zi<-zi[-1]
DI<-cbind(-zi,diag(c(zi)))
if( ncz>1){
for(i in 2:ncz){
zi<-Z[,i]
zi<-zi[zi>0]
DMATI<-diag(c(zi))[,2:length(zi)]
zi<-zi[-1]
CMATI<-cbind(-zi,diag(c(zi)))
DD<-direct.sum(DD,DMATI)
DI<-direct.sum(DI,CMATI)
}
}
matrici<-list(IMAT=DI,DMAT=DD)
}
else
LDMatrix(lev,formula,names)
}
pop <- function(strata,ncell) {
kronecker(diag(strata),matrix(1,ncell,1))
}
make.h.fct<-function(models,E=TRUE)
{
if(all(E)){
function(m){models$matrici$CMAT%*%log(models$matrici$MMAT%*%m)}
}
else{
function(m){models$matrici$E%*%models$matrici$CMAT%*%log(models$matrici$MMAT%*%m)}
}
}
make.d.fct<-function(dismod,D=TRUE)
{
if(is.null(D)){
function(m){dismod$matrici$CMAT%*%log(dismod$matrici$MMAT%*%m)}
}
else{
function(m){dismod$matrici$D%*%
dismod$matrici$CMAT%*%log(dismod$matrici$MMAT%*%m)}
}
}
make.derht.fct<-function(models,E=TRUE)
{
if(all(E)){
function(m){
t(models$matrici$CMAT%*%diag(1/c(models$matrici$MMAT%*%m))%*%models$matrici$MMAT)}
}
else{
function(m){t(models$matrici$E%*%models$matrici$CMAT%*%diag(1/c(models$matrici$MMAT%*%m))%*%models$matrici$MMAT)}
}
}
make.derdt.fct<-function(models,D=TRUE)
{
if(is.null(D)){
function(m){
t(models$matrici$CMAT%*%diag(1/c(models$matrici$MMAT%*%m))
%*%models$matrici$MMAT)}
}
else{
function(m){t(models$matrici$D%*%
models$matrici$CMAT%*%diag(1/c(models$matrici$MMAT%*%m))%*%models$matrici$MMAT)}
}
}
make.L.fct<-function(models,E=TRUE)
{
if(E==TRUE){
function(m){models$matrici$CMAT%*%log(models$matrici$MMAT%*%m)}
}
else{
function(m){t(create.U(models$matrici$X))%*%models$matrici$CMAT%*%log(models$matrici$MMAT%*%m)}
}
}
make.derLt.fct<-function(models,E=TRUE)
{
if(E==TRUE){
function(m){
t(models$matrici$CMAT%*%diag(1/c(models$matrici$MMAT%*%m))%*%models$matrici$MMAT)}
}
else{
function(m){t(t(create.U(models$matrici$X))%*%models$matrici$CMAT%*%
diag(1/c(models$matrici$MMAT%*%m))%*%models$matrici$MMAT)}
}
}
chibar<-function(m,Z,ZF,d.fct=0,h.fct=0,test0=0,test1=0,repli=0,kudo=TRUE,TESTAB=TRUE,alpha=c(0.02,0.03,0),pesi=NULL,
derdt.fct=0,derht.fct=0,formula=NULL,names=NULL,lev){
Zlist<-cocadise(Z,formula=formula,lev=lev,names=names)
p<-m*c(1/Z%*%t(Z)%*%m)
m<-p
Dm <- diag(c(m))
Hmat<- t(Zlist$DMAT)%*%( diag(c(m))-((ZF*c(m))%*%t(ZF*c(p))))%*%Zlist$DMAT
if (is.function(derdt.fct)==F)
{
DH <- num.deriv.fct(d.fct,m)
}
else {
DH <- derdt.fct(m)
}
DH<- t(Zlist$DMAT)%*%Dm%*%DH
D<-t(DH)
if (is.function(h.fct)==TRUE)
{
if (is.function(derht.fct)==F) {
H <- num.deriv.fct(h.fct,m)
}
else {
H <- derht.fct(m)
}
H<-t(Zlist$DMAT)%*%Dm%*%H
E<-t(H)
X<-create.U(t(E))
D<-D%*%X
}
else{X<-diag(1,dim(D)[2])}
Hmat<-t(X)%*%Hmat%*%X
l<-dim(Hmat)[1]
mi<-solve(chol(Hmat))
DD<-D%*%mi
ur<-qr(D)$rank
qq<-qq0<-w<-matrix(0,ur+1,1)
if(is.null(pesi)){
if(!kudo){
w<-pesi.sim(l,ur,DD,repli)}
else{w<-kudo.classic(DD,diag(1,l))}
}
else { w=pesi}
gdl0<-matrix(0,ur+1,1)
if( (test0 >0)){
q1<-matrix(0,ur+1,1)}
else{
q1<-matrix(1,ur+1,1)}
if( (test0 >0)){
for(i in 1:(ur+1)){
gdl0[i]<-ur+1-i
if ((w[i] >0)&(gdl0[i]==0)){
q1[i]=0}
if ((w[i] >0)&(gdl0[i]>0)){
q1[i]<-1-pchisq(test0,gdl0[i])}
}
}
p0<-t(w)%*%q1
gdl1<-matrix(0,ur+1,1)
if( (test1 >0)){
q1<-matrix(0,ur+1,1)}
else{
q1<-matrix(1,ur+1,1)}
if( (test1 >0)){
for(i in 1:(ur+1)){
gdl1[i]<-i-1
if ((w[i] >0)&(gdl1[i]==0)){
q1[i]=0}
if ((w[i] >0)&(gdl1[i]>0)){
q1[i]<-1-pchisq(test1,gdl1[i])}
}
}
p1<-t(w)%*%q1
dec=NULL
if(TESTAB)
{
dec<-hmmm.testAB(testA=test0,testB=test1,alpha=alpha,printflag=FALSE,pesi=w)}
lista<-list(testA=test0,pvalA=p0,testB=test1,pvalB=p1,pesi=cbind(w,gdl0,gdl1),TESTAB.dec=dec)
class(lista)<-"hmmmchibar"
lista
}
pesi.sim<-function(l,ur,DD,repli)
{
Z<-matrix(rnorm(l*repli,0,1),l,repli)
qq<-qq0<-matrix(0,ur+1,1)
pesiw<-function(z){
pw<-matrix(0,ur+1,1)
ddl<-NULL
ddl<-
solve.QP(Dmat=diag(1,l),dvec= z, Amat=t(DD), meq=0, factorized=FALSE)
if (is.null(ddl)){ pw<-matrix(0,ur+1,1)}
else{
d<-NULL
d<-qr(DD[ddl$iact,])$rank
pw[d+1]<-pw[d+1]+1
if (all(!is.na(pw))){
qq[d+1]<-qq[d+1]+t(ddl$solution-z)%*%(ddl$solution-z)
qq0[d+1]<-qq0[d+1]+ur-t(ddl$solution-z)%*%(ddl$solution-z)}
rm(ddl)
if (any(is.na(pw))){
pw<-matrix(0,ur+1,1)
}
}
pw
}
w<-apply(Z,MARGIN=2,FUN=pesiw)
w<-apply(w,1,sum)
qq0<-qq0/w
qq<-qq/w
w<-w/sum(w)
}
pw.set<-function(n){
size<-matrix(0:n)
m<-matrix(0,n,1)
for(i in 1:n){
m<-cbind(m,combn(c(1:n), i, tabulate, nbins=n))
}
m}
kudo.classic<-function(Dis,Var)
{
Omega<-Dis%*%Var%*%t(Dis)
k<-dim(Omega)[1]
A<-pw.set(k)
a<-colSums(A)
w<-matrix(0,k+1,1)
Mvncdf1<-pmvnorm(lower=rep(0,k),upper=rep(Inf,k),sigma=solve(Omega))
Mvncdf0<-pmvnorm(lower=rep(0,k),upper=rep(Inf,k),sigma=Omega)
if(k==2){
w[1]<- Mvncdf0
w[2]<-1- Mvncdf0 -Mvncdf1
w[3]<-Mvncdf1
}
else{
if(floor((k+1)/2)==(k+1)/2){
J<-(0:k)[-c((k+1)/2,(k+3)/2)]}
else{
J<-(0:k)[-c((k+2)/2,(k+4)/2)]}
oe<-setdiff(0:k,J)
for(j in J){
hj<-which(a==j)
sj<-length(hj)
Aj=matrix(A[,hj],k,sj)
w[j+1]<-0
for (h in 1:sj){
aj<-Aj[,h]
r1<-length(which(aj==1))
r0<-length(which(aj==0))
mvncdf1<-Mvncdf1
mvncdf0<-Mvncdf0
if((r0>0)&(r1>0)){
V<-solve(Omega[which(aj==1),which(aj==1)])
mvncdf1<-pmvnorm(lower=rep(0,r1),upper=rep(Inf,r1),sigma=V)
U<-Omega[which(aj==0),which(aj==0)]-Omega[which(aj==0),which(aj==1)]%*%V%*%Omega[which(aj==1),which(aj==0)]
mvncdf0<-pmvnorm(lower=rep(0,r0),upper=rep(Inf,r0),sigma=U)}
w[j+1]=w[j+1]+(r0>0)*(r1>0)*mvncdf0*mvncdf1+(r0>0)*(r1==0)*mvncdf0+(r0==0)*(r1>0)*mvncdf1
}
}
odd<-sum(w[seq(2,k+1,2)])
even<-sum(w[seq(1,k+1,2)])
oe<-oe+1
w[oe[1]]<-0.5-odd*(floor(oe[1]/2)==oe[1]/2)-even*(!(floor(oe[1]/2)==oe[1]/2))
w[oe[2]]<-0.5-odd*(floor(oe[2]/2)==oe[2]/2)-even*(!(floor(oe[2]/2)==oe[2]/2))
}
w
}
Fchibar2<-function(x=Inf,y=Inf,pesi){
ur<-dim(pesi)[1]-1
test0<-x
test1<-y
gdl0<-matrix(0,ur+1,1)
q0<-matrix(0,ur+1,1)
for(i in 1:(ur+1)){
gdl0[i]<-ur+1-i
if ((pesi[,1][i] >0)&(gdl0[i]==0)){
q0[i]=1}
if ((pesi[,1][i] >0)&(gdl0[i]>0)){
q0[i]<-pchisq(test0,gdl0[i])}
}
p0<-t(pesi[,1])%*%q0
gdl1<-matrix(0,ur+1,1)
q1<-matrix(0,ur+1,1)
for(i in 1:(ur+1)){
gdl1[i]<-i-1
if ((pesi[,1][i] >0)&(gdl1[i]==0)){
q1[i]=1}
if ((pesi[,1][i] >0)&(gdl1[i]>0)){
q1[i]<-pchisq(test1,gdl1[i])}
}
p1<-t(pesi[,1])%*%q1
p01<-t((pesi[,1]))%*%(q1*q0)
c(p0,p1,p01)
}
qchibar<-function(pesi,alphaA=0.05,alphaB=0.05){
if(!is.matrix(pesi)){pesi<-matrix(pesi,ncol=1)}
F1<-function(x){Fchibar2(y=x,pesi=pesi)[2]-1+alphaB}
F0<-function(x){Fchibar2(x=x,pesi=pesi)[1]-1+alphaA}
qA<-uniroot(F0,c(0,999999))$root
qB<-uniroot(F1,c(0,999999))$root
q<-c(qA,qB)
names(q)<-c("qA","qB")
q
}
testDF<-function(pesi,alpha){
if(!is.matrix(pesi)){pesi<-matrix(pesi,ncol=1)}
y1<-Inf
if(alpha[2]>alpha[3]){
F1<-function(x){Fchibar2(y=x,pesi=pesi)[2]-1+alpha[2]-alpha[3]}
y1<-uniroot(F1,c(0,qchisq(1-alpha[2]+alpha[3],dim(pesi)[1]-1)
))$root}
Fx<-function(x){Fchibar2(x=x,y=y1,pesi=pesi)[3]-1+alpha[2]+alpha[1]}
xs<-uniroot(Fx,c(0,999999))$root
y0<-y1
if((alpha[3]>0)){
F0<-function(x){
(Fchibar2(y=x,pesi=pesi)[2]-Fchibar2(x=xs,y=x,pesi=pesi)[3])-alpha[1]}
max<-min(y1,999999)
y0<-uniroot(F0,c(0,max))$root
}
cv<-c(y1,y0,xs)
names(cv)<-c("y1","y0","xs")
cv
}
hmmm.testAB<-function(testA=NULL,testB=NULL,P,alpha=c(0.025,0.025,0.001),printflag=FALSE,pesi=NULL,cv=NULL){
if(is.null(pesi)){pesi<-P$pesi}
if(is.null(testA)|is.null(testB)){
testA<-P$testA
testB<-P$testB}
if(is.null(cv)){
cv<-testDF(pesi,alpha)}
if(printflag){
cat("\n")
cat("\n DF PROCEDURE")
cat("\n")
cat("\n")
cat("\n classification errors:")
cat("\n")
cat( " alpha1= ",alpha[1]," alpha2= ",alpha[2]," alpha12= ",alpha[3])
cat("\n")
cat("\n DF critical values:")
cat( " y1= ",cv[1]," y0= ",cv[2]," xs= ",cv[3])
cat("\n")
cat("\n DF DECISION")
dec<-NULL
if((testA< cv[3])&( testB< cv[1])){ cat("\n Mantain the null model")
dec=0}
if((testA> cv[3])&( testB< cv[2])) {
cat("\n Reject the null model versus the inequality model")
dec=1}
if((testA> cv[3])&( testB> cv[2])|( testB> cv[1])){
cat("\n Reject the null model versus the unrestricted model")
cat("\n")
dec=2}
cat("\n")
}
if((testA< cv[3])&( testB< cv[1])){
dec=0}
if((testA> cv[3])&( testB< cv[2])) {
dec=1}
if((testA> cv[3])&( testB> cv[2])|( testB> cv[1])){
dec=2
}
list(dec=dec,testA=testA,testB=testB,cv=cv,alpha=alpha)
}
print.hmmmchibar<-function(x,...){chibar.summary(x)}
summary.hmmmchibar<-function(object,plotflag=1,step=0.01,lsup=0,...){chibar.summary(object,plotflag=plotflag,step=step,lsup=lsup)}
chibar.summary<-function(P,plotflag=0,step=0.01,lsup=0){
results<-
matrix(c(P$testA,P$testB,P$pvalA,P$pvalB),2,2)
rownames(results)<-c("testA","testB")
colnames(results)<-c("test","pvalue")
colnames(P$pesi)<-c("weights","df A","df B")
rownames(P$pesi)<-as.character(P$pesi[,3])
cat("\n CHIBAR P VALUES\n")
cat("\n")
print(results)
cat("\n")
if(!is.null(P$TESTAB.dec))
{
cat("\n")
cat("\n TESTAB PROCEDURE")
cat("\n")
cat("\n")
cat("\n classification errors:")
cat("\n")
cat( " alpha1 = ",P$TESTAB.dec$alpha[1]," alpha2 = ",P$TESTAB.dec$alpha[2]," alpha12 = ",P$TESTAB.dec$alpha[3])
cat("\n")
cat("\n critical values:")
cv<-P$TESTAB.dec$cv
testA<-P$testA
testB<-P$testB
cat( " y2 = ", cv[1]," y12 = ", cv[2], "y1 = ", cv[3])
cat("\n")
cat("\n TESTAB DECISION")
dec<-NULL
if((testA < cv[3])&( testB < cv[1])){ cat("\n Mantain the null model")
dec=0}
if((testA > cv[3])&( testB < cv[2])) {
cat("\n Reject the null model for the inequality model")
dec=1}
if((testA > cv[3])&( testB > cv[2])|( testB > cv[1])){
cat("\n Reject the null model for the unrestricted model")
cat("\n")
dec=2}
cat("\n")
}
if(plotflag>0){
cat("\n weights of the chibar-distribution\n")
cat("\n")
print(P$pesi[,1:3])}
if(plotflag>1){
ur<-length(P$pesi[,1])-1
if(lsup==0){lsup=2*ur}
Z<-seq(from=0,to=lsup,by=step)
Z<-matrix(Z,length(Z),1)
Fchibar<-function(x){
test0<-x
test1<-x
gdl0<-matrix(0,ur+1,1)
if( (test0 >0)){
q1<-matrix(0,ur+1,1)}
else{
q1<-matrix(1,ur+1,1)}
if( (test0 >0)){
for(i in 1:(ur+1)){
gdl0[i]<-ur+1-i
if ((P$pesi[,1][i] >0)&(gdl0[i]==0)){
q1[i]=0}
if ((P$pesi[,1][i] >0)&(gdl0[i]>0)){
q1[i]<-1-pchisq(test0,gdl0[i])}
}
}
p0<-t(P$pesi[,1])%*%q1
gdl1<-matrix(0,ur+1,1)
if( (test1 >0)){
q1<-matrix(0,ur+1,1)}
else{
q1<-matrix(1,ur+1,1)}
if( (test1 >0)){
for(i in 1:(ur+1)){
gdl1[i]<-i-1
if ((P$pesi[,1][i] >0)&(gdl1[i]==0)){
q1[i]=0}
if ((P$pesi[,1][i] >0)&(gdl1[i]>0)){
q1[i]<-1-pchisq(test1,gdl1[i])}
}
}
p1<-t(P$pesi[,1])%*%q1
c(p0,p1)
}
F<-apply(Z,1,FUN=Fchibar)
F<-t(F)
if(plotflag>2){
matplot(Z,F,type="l",ylim=c(0,1))
abline(P$pvalA,0,col="black")
abline(P$pvalB,0,col="red")
if(P$testA<lsup){
abline(v=P$testA,col="black")}
if(P$testB<lsup){
abline(v=P$testB,col="red")}
}
F<-cbind(Z,F)
colnames(F)<-c("x","FA","FB")
F
}
}
bcf.interactions<-function(marglist)
{
nmarg<-length(marglist)
cumintset<-NULL
for(i in 1:nmarg){
if (length(marglist[[i]]$marg)==1){
allintset<-marglist[[i]]$marg}
else{
allintset<-bar.col.for(marglist[[i]]$marg)}
marglist[[i]]$int<-setdiff(allintset,cumintset)
cumintset<-union(cumintset,marglist[[i]]$int)
}
marglist
}
bar.col.for<-function(marg)
{
p<-length(marg)
cifre<-p
bincol<-function(n){
if (n<=1) v=n
else if (n%%2==0) v=c(Recall(n/2),0)
else v=c(Recall((n-1)/2),1)
}
b<-matrix(0,1,p)
s<-2^p-1
for (i in 1:s){
bb<-bincol(i)
n=length(bb)
if (n<p) bb<-c(rep(0,cifre-n),bb)
b<-rbind(b,matrix(bb,1,p))
}
b<-b[-1,]
b<-b[order(rowSums(b),b%*%matrix(1:p,p,1)),]
int<-list()
for(ii in c(1:dim(b)[1])){
int[[ii]]<-marg[which(b[ii,]==1)]}
int
}
marg.list<-function(all.m,sep="-",mflag="marg") {
n <- length(all.m)
marglist<-list()
for(i in 1:n) {
ca<-all.m[i]
ca<-unlist(strsplit(ca,split=sep))
ca[ca==mflag]<-"marg"
ci<-which(ca!="marg")
marglist[[i]]<-list(marg=ci,types=ca)
}
marglist
}
loglin.model<-function(lev,int=NULL,strata=1,dismarg=0,type="b",D=TRUE,
c.gen=TRUE,printflag=FALSE,names=NULL,formula=NULL){
if(!is.null(formula)&is.null(int)&printflag==TRUE){
a<-terms(formula)
m<-attr(a,"factors")
x<-dimnames(m)[1]
x<-unlist(x)
myfun<-function(m){
which(names %in% x[which(m==1)]
)}
int<-apply(m,2,myfun)
}
if(is.null(formula)||(!is.null(int)&printflag==TRUE)){
cocacontr=NULL
if(type=="b"){
cocacontr<-list()
for (i in 1:length(lev)){
a<-gl(lev[i],1)
a<-t(contr.treatment(a))
rownames(a)<-NULL
colnames(a)<-NULL
m<-matrix(0,lev[i]-1,lev[i])
m[1:(lev[i]-1),1]<-1
m<-rbind(m,a)
cocacontr[[i]]<-m
}
}
MARG<-c(1:length(lev))
all.int<-bar.col.for(1:length(lev))
s<-length(all.int)
type.temporary<-rep(type,length(lev))
marg<-list()
marg<-list(marg=MARG,int=all.int,types=type.temporary)
model<-hmmm.model(marg=list(marg),lev=lev,cocacontr=cocacontr,names=names)
dscr<-hmmm.model.summary(model,printflag=FALSE)
XX<-diag(1,prod(lev)-1)
if(!c.gen==TRUE&!is.null(int)){
keep<-list()
for (i in 1:s){
for (ii in 1:length(int)){
a<-intersect(all.int[[i]],int[[ii]])
if(setequal(a,int[[ii]])){keep[[length(keep)+1]]<-all.int[[i]]
}
}
}
int<-setdiff(all.int,keep)
}
if(!is.null(int)){
sel<-0
for(i in 1:length(int)){
if(length(int[[i]]) >=2){
included.index<-bar.col.for(int[[i]])}
else{included.index<-int[[i]]}
for(ii in 1:length(included.index)){
inint<-paste(c(included.index[[ii]]),collapse="")
inint<-as.numeric(dscr[which(dscr[,1]==inint,arr.ind=TRUE),][(dim(dscr)[2]-1):dim(dscr)[2]])
sel<-c(sel,c(inint[1]:inint[2]))}
}
XX<-unique(XX[,sel],MARGIN=2)
}
if(printflag==TRUE){
i<-rowSums(XX)
i<-i[as.numeric(dscr[,dim(dscr)[2]])]
print("included interactions")
print(dscr[which(i==1),],quote=FALSE)
print("exluded interactions")
print(dscr[which(i==0),],quote=FALSE)
}
XX=kronecker(diag(1,strata),XX)
}
if(is.null(formula)){
mod.loglin<-
hmmm.model(marg=list(marg),dismarg=dismarg,lev=lev,strata=strata,X=XX,cocacontr=cocacontr,names=names)}
else{
marginali<-paste(c(rep(type,length(lev))),collapse="-")
marginali<-marg.list(marginali)
hmmm.model(marg=marginali
,lev=lev,
names=names,formula=formula)
}
}
hmmm.model<-
function(marg=NULL,dismarg=0,lev,cocacontr=NULL,strata=1,Z=NULL,ZF=Z,X=NULL,D=NULL,
E=NULL,names=NULL,formula=NULL,sel=NULL)
{if(strata>1){formula<-NULL
dismarg<-0}
if(!is.null(sel)){ E<-t(diag(1,(prod(lev)-1))[,sel])}
if(is.matrix(E)){X<-0}
if((is.null(X))&!is.matrix(E)){ X<-diag(1,(prod(lev)-1)) }
if (is.null(Z)){
Z<-pop(strata,prod(lev))}
if (is.null(ZF)){
ZF<-Z}
if(is.null(marg)){
MARG<-bar.col.for(1:length(lev))
s<-length(MARG)
marg<-list()
for (i in 1:s){
type.temporary<-rep("marg",length(lev))
type.temporary[MARG[[i]]]<-"l"
marg[[i]]<-list(marg=MARG[[i]],types=type.temporary)
}
}
s<-length(marg)
for (i in 1:s){
if (is.null(marg[[i]]$int)){
marg<-bcf.interactions(marg)
break
}
else next
}
marginali<-c(marg,list(livelli=lev,cocacontr=cocacontr,strata=strata))
model<-list(modello=marginali,matrici=cocamod(marginali,Z),formula=formula)
model$matrici$Z<-Z
model$matrici$ZF<-ZF
model$matrici$X<-X
model$matrici$E<-0
if (is.matrix(X)&is.null(E)) {
E<-FALSE
model$matrici$E<-t(create.U(model$matrici$X))
}
if(is.matrix(E)){
model$matrici$E<-E
model$matrici$X<-create.U(t(E))
E<-FALSE }
if ( sum(abs(model$matrici$E)) == 0){
model$functions$h.fct<-0
model$functions$derht.fct<-0}
else{
model$functions$h.fct<-make.h.fct(model,E)
model$functions$derht.fct<-make.derht.fct(model,E)
}
model$functions$L.fct<-make.L.fct(model,TRUE)
model$functions$derLt.fct<-make.derLt.fct(model,TRUE)
if(is.list(dismarg)){
if(!is.list(dismarg[[1]])){dismarg<-list(dismarg)}
model$dismod<-c(dismarg,list(livelli=lev,cocacontr=cocacontr,strata=strata))
model$dismod<-list(modello=model$dismod,matrici=cocamod(model$dismod,Z))
model$dismod$matrici$D<-D
model$functions$d.fct=make.d.fct(model$dismod,D)
model$functions$derdt.fct=make.derdt.fct(model$dismod,D)
}
model$names<-names
model$formula<-formula
model$Formula<-NULL
class(model)<-"hmmmmod"
model
}
hmmm.mlfit<-
function(y,model,noineq=TRUE,maxit=1000,norm.diff.conv=1e-5,norm.score.conv=1e-5,
y.eps=0,chscore.criterion=2,
m.initial=y,mup=1,step=1){
if(noineq){
a <- mphineq.fit(y,Z=model$matrici$Z,ZF=model$matrici$ZF,E=model$matrici$E,
L.fct=model$functions$L.fct,
derLt.fct=model$functions$derLt.fct,
h.fct=model$functions$h.fct,derht.fct=model$functions$derht.fct,
X=model$matrici$X,formula=model$formula,names=model$names,lev=model$modello$livelli,
maxiter=maxit,norm.diff.conv=norm.diff.conv,norm.score.conv=norm.score.conv,
y.eps=y.eps,
chscore.criterion=chscore.criterion,m.initial=m.initial,mup=mup,
step=step)
}
else{
a<-mphineq.fit(y,Z=model$matrici$Z,ZF=model$matrici$ZF,E=model$matrici$E,
L.fct=model$functions$L.fct,
derLt.fct=model$functions$derLt.fct,d.fct=model$functions$d.fct,
h.fct=model$functions$h.fct,derht.fct=model$functions$derht.fct,
derdt.fct=model$functions$derdt.fct,
X=model$matrici$X,formula=model$formula,names=model$names,lev=model$modello$livelli,
maxiter=maxit,norm.diff.conv=norm.diff.conv,norm.score.conv=norm.score.conv,
y.eps=y.eps,
chscore.criterion=chscore.criterion,m.initial=m.initial,mup=mup,step=step)
}
a$model<-model
class(a)<-"hmmmfit"
a
}
hmmm.chibar<-function(nullfit,disfit,satfit,repli=6000,kudo=FALSE,TESTAB=FALSE,alpha=c(0.02,0.03,0),pesi=NULL){
if(class(disfit)=="hmmmfit"){
model<-disfit$model
P<-chibar(m=nullfit$m,Z=model$matrici$Z,ZF=model$matrici$ZF,
d.fct=model$functions$d.fct,derdt.fct=model$functions$derdt.fct,
h.fct=model$functions$h.fct,derht.fct=model$functions$derht.fct,
test0=c(nullfit$Gsq)-c(disfit$Gsq),test1=c(disfit$Gsq)-c(satfit$Gsq),repli=repli,kudo=kudo,TESTAB=TESTAB,alpha=alpha,pesi=pesi,
formula=model$formula,names=model$names,lev=model$modello$livelli)
}
if(class(disfit)=="mphfit"){
P<-chibar(m=disfit$m,Z=disfit$Z,ZF=disfit$ZF,
d.fct=disfit$d.fct,derdt.fct=disfit$derdt.fct,
h.fct<-disfit$h.fct,derht.fct=disfit$derht.fct,
test0=c(nullfit$Gsq)-c(disfit$Gsq),test1=c(disfit$Gsq)-c(satfit$Gsq),repli=repli,kudo=kudo,TESTAB=TESTAB,alpha=alpha,pesi=pesi,
)
}
class(P)<-"hmmmchibar"
P
}
print.hmmmmod<-function(x,...){
hmmm.model.summary(x,printflag=TRUE)}
summary.hmmmmod<-function(object,...){
hmmm.model.summary(object,printflag=TRUE)}
print.hmmmfit<-function(x,aname=" ",printflag=FALSE,...){
hmmm.model.summary(x$model,x,aname=aname,printflag=printflag,printhidden=0)}
hmmm.model.summary<-function(modelfull,fitmod=NULL,printflag=TRUE,aname="modfit",printhidden=0){
names<-modelfull$names
if (!is.null(fitmod)) {
a<-fitmod
a$df=a$df+dim(a$Zlist$DMAT)[1]-dim(a$Zlist$DMAT)[2]-a$model$modello$strata
if(printhidden==0){
cat("\n")
cat("SUMMARY of MODEL:", aname )
cat("\nOVERALL GOODNESS OF FIT:")
cat("\n")
cat(" Likelihood Ratio Stat (df=",a$df,"): Gsq = ",
round(a$Gsq,5))
if (a$df > 0) cat(" (p = ",signif(1-pchisq(a$Gsq,a$df),5),")")
cat("\n")
sm <- 100*length(a$m[a$m < 5])/length(a$m)
if ((sm > 75)&(a$df > 0)) {
cat("\n WARNING:", paste(sm,"%",sep=""),
"of expected counts are less than 5. \n")
cat(" Chi-square approximation may be questionable.")
}
cat("\n")
}
if(printhidden==1){
cat("\n")
cat("LOGLIK OF THE HIDDEN MODEL:")
cat(" Loglik = ",
round(a$Gsq,5))
cat("\n")
cat("\n")
cat("SUMMARY of MODEL:", aname )
cat("
(df=",a$df,")")
cat("\n")
}
if(printhidden==2){
cat("\n")
cat("SUMMARY of MODEL:", aname )
cat("
(df=",a$df,")")
cat("\n")
}
cat("\n")
}
printflagT<-TRUE
if(printflagT){
model<-modelfull$modello
nmarg<-length(model)-3
levs<-c(model$livelli)
np<-prod(levs)-1
C<-matrix("",np,1)
M<-matrix("",np,1)
T<-matrix("",np,1)
npar<-matrix(0,np,1)
iiii<-0
for (mi in 1:nmarg){
marginal<-model[[mi]]
margindex<-c(marginal$marg)
margset<-marginal$int
types<-c(marginal$types)
nint<-length(margset)
for (ii in 1:nint){
margint<-c(margset[[ii]])
npar[ii+iiii,1]<-prod(levs[margint]-1)
M[ii+iiii,1]<-paste(c(marginal$marg),collapse="")
C[ii+iiii,1]<-paste(c(margset[[ii]]),collapse="")
T[ii+iiii,1]<-paste(c(types[margset[[ii]]]),collapse="")
}
iiii<-iiii+nint
}
npar2<-cumsum(npar)
npar1<-npar2-npar+1
if(is.null(fitmod$L)){
MCTL<-cbind(C,M,T,npar,npar1,npar2 )
colnames(MCTL)<-c("inter.","marg.","type","npar","start","end")
MCTL<-MCTL[1:iiii,]
MCTL<-MCTL
if(!is.null(names)){
C<-matrix(MCTL[,1])
M<-matrix(MCTL[,2])
C1<-matrix("",dim(MCTL)[1],1)
M1<-matrix("",dim(MCTL)[1],1)
for(j in 1:dim(MCTL)[1]){
NC<-as.numeric(unlist(strsplit(C[j],split="")))
NM<-as.numeric(unlist(strsplit(M[j],split="")))
C1[j]<-paste(names[NC],collapse=".")
M1[j]<-paste(names[NM],collapse=",")}
MCTL<-cbind(C,C1,M,M1,MCTL[,3:6] )
colnames(MCTL)<-c("inter.","inter.names","marg.","marg.names","type","npar","start","end")
}
if (printflag) {print(MCTL,quote=FALSE)}
MCTL<-MCTL
}
else{
if (model$strata==1){
fitmod<-fitmod$L}
else{
a<-fitmod
fitmod<-matrix(fitmod$L,max(npar2),model$strata)}
if(!is.null(names)){
C1<-matrix("",dim(C)[1],1)
M1<-matrix("",dim(M)[1],1)
for(j in 1:dim(C)[1]){
NC<-as.numeric(unlist(strsplit(C[j],split="")))
NM<-as.numeric(unlist(strsplit(M[j],split="")))
C1[j]<-paste(names[NC],collapse=".")
M1[j]<-paste(names[NM],collapse=",")}
C<-C1
M<-M1
}
C<-rep(C,as.numeric(npar))
M<-rep(M,as.numeric(npar))
T<-rep(T,as.numeric(npar))
MCTL<-cbind(C,M,T,round(fitmod,6))
colnames(MCTL)<-c("inter.","marg.","type",paste("STRATA",(1:model$strata),sep="_"))
if (printflag) {print(MCTL,quote=FALSE)}
}
MCTL<-MCTL
}
}
recursive<-function(...){
n<-nargs()
all.logit<-list(...)
cocacontr<-list()
for(i in 1:n) {
if(!is.matrix(all.logit[[i]])){cocacontr[[i]]=0}
else{
X<-all.logit[[i]]
XX<-X
for(j in 1:nrow(X)){
X[j,]<-as.numeric(all.logit[[i]][j,]==1)
XX[j,]<-as.numeric(all.logit[[i]][j,]==-1)
}
cocacontr[[i]]<-rbind(X,XX)
}
}
cocacontr
}
hmmm.model.X<-function(marg,lev,names,Formula=NULL,strata=1,fnames=NULL,cocacontr=NULL,ncocacontr=NULL,replace=TRUE){
str<-prod(strata)
model<-hmmm.model(marg=marg,lev=lev,names=names,strata=str,cocacontr=cocacontr)
model<-create.XMAT(model,Formula=Formula,strata=strata,fnames=fnames,
cocacontr=cocacontr,ncocacontr=ncocacontr,replace=replace)
}
create.XMAT<-function(model,Formula=NULL,strata=1,fnames=NULL,cocacontr=NULL,ncocacontr=NULL,replace=TRUE){
if(is.null(cocacontr)){
cocacontr<-as.list(rep("contr.treatment",length(strata) ))
ncocacontr<-strata-1
}
if(is.null(ncocacontr)){ncocacontr<-strata-1}
descr<-hmmm.model.summary(model,printflag=FALSE)
if(is.null(model$names)){
intnames<-paste("int",descr[,1],sep="_")
descr[,1]<-intnames}
else{intnames<-descr[,2]
descr[,1]<-intnames}
if(is.null(Formula)){
npar<-as.numeric(descr[,"npar"])
intnames<-descr[,1]
ll<-paste(fnames,collapse="*")
l<-paste(intnames,"*",ll,sep="")
l[npar<2]<-ll
Formula<-as.list(paste(intnames,"=","~",l,sep=""))
names(Formula)<-intnames
}
if(!is.null(names(Formula))){
reo<-match(descr[,1],names(Formula))
Formula<-Formula[reo]}
npar<-as.numeric(descr[,"npar"])
if (is.null(fnames)){
fnames=paste("f",1:length(strata),sep="_")}
factlist<-list()
for (i in 1:dim(descr)[1]){
np<-npar[i]*prod(strata)
intfact<-data.frame(gl(npar[i],1,np))
rep2<-npar[i]
for(ii in 1:length(strata)){
factii<-gl(strata[ii],rep2,np)
contrasts(factii)<-eval(cocacontr[[ii]])
contrasts(factii,ncocacontr[ii])<-contrasts(factii)[,1:ncocacontr[ii]]
rep2<-rep2*strata[ii]
intfact<-cbind(intfact,factii)
}
names(intfact)<-c(intnames[i],fnames)
factlist[[i]]<-intfact
}
XL<-as.list(rep("zero",dim(descr)[1]))
px<-0
Xnames<-"zero"
for (i in 1:dim(descr)[1]){
if (Formula[[i]]=="zero"){
px<-c(px,0)}
else{
XL[[i]]<-model.matrix(as.formula(Formula[[i]]),data=factlist[[i]])
px<-c(px,dim(XL[[i]])[2])
Xnames<-c(Xnames,colnames(XL[[i]]))
}
}
px<-px[-1]
Xnames<-Xnames[-1]
XX<-matrix(0,sum(npar)*prod(strata),sum(px))
or<-rep(rep((1:dim(descr)[1]),times=npar),prod(strata))
pstart<-0
for(i in 1:dim(descr)[1]){
if (Formula[[i]]!="zero"){
XX[or==i,(pstart+1):(pstart+px[i])]<-XL[[i]]
pstart<-pstart+px[i]}
}
if (replace) {
colnames(XX)<-Xnames
model$matrici$X<-XX
model$matrici$E<-t(create.U(model$matrici$X))
if ( sum(abs(model$matrici$E)) == 0){
model$functions$h.fct<-0
model$functions$derht.fct<-0
}
else{
model$functions$h.fct<-make.h.fct(model,E=FALSE)
model$functions$derht.fct<-make.derht.fct(model,E=FALSE)
}
model$functions$L.fct<-make.L.fct(model,TRUE)
model$functions$derLt.fct<-make.derLt.fct(model,TRUE)
model$lev.strata<-strata
model$formula=NULL
model$Formula<-Formula
class(model)<-"hmmmmod"
model}
else{XL}
}
anova.hmmmfit<-function(object,objectlarge,...){t<-
hmmm.hmmm.anova(object,objectlarge)
t}
hmmm.hmmm.anova<-function(modelA,modelB){
a<-modelA
if(class(a)=="hmmmfit"){
modelA$df=a$df+dim(a$Zlist$DMAT)[1]-dim(a$Zlist$DMAT)[2]-a$model$modello$strata}
pA<-signif(1-pchisq(a$Gsq,modelA$df),5)
a<-modelB
if(class(a)=="hmmmfit"){
modelB$df=a$df+dim(a$Zlist$DMAT)[1]-dim(a$Zlist$DMAT)[2]-a$model$modello$strata}
pB<-signif(1-pchisq(a$Gsq,modelB$df),5)
Gsq<-abs(modelA$Gsq-modelB$Gsq)
dof<-abs(modelA$df-modelB$df)
pvalue<-(1-pchisq(abs(Gsq),dof))
anova.table<-matrix(c(modelA$Gsq,modelB$Gsq,Gsq,modelA$df,modelB$df,dof,pA,pB,pvalue),3,3,
dimnames = list(c("model A", "model B","LR test"), c("statistics value",
"df", "pvalue")))
}
summary.hmmmfit<-function(object,cell.stats=TRUE,...){model.summary(object,cell.stats=cell.stats,model.info=FALSE)}
summary.mphfit<-function(object,...){model.summary(object,cell.stats=TRUE,model.info=FALSE)}
print.mphfit<-function(x,...){model.summary(x,cell.stats=FALSE,model.info=FALSE)}
model.summary <- function(mph.out,cell.stats=FALSE,model.info=FALSE) {
a <- mph.out
if(class(a)=="hmmmfit"){
a$df=a$df+dim(a$Zlist$DMAT)[1]-dim(a$Zlist$DMAT)[2]-a$model$modello$strata}
if (cell.stats==TRUE||class(a)=="mphfit") {
cat("\n GOODNESS OF FIT:")
cat("\n")
cat(" Likelihood Ratio Stat (df=",a$df,"): Gsq = ",
round(a$Gsq,5))
if (a$df > 0) cat(" (p = ",signif(1-pchisq(a$Gsq,a$df),5),")")
cat("\n")
cat(" Pearson's Score Stat (df=",a$df,"): Xsq = ",
round(a$Xsq,5))
if (a$df > 0) cat(" (p = ",signif(1-pchisq(a$Xsq,a$df),5),")")
cat("\n")
}
cat("\n")
if (a$L[1] != "NA") {
sbeta <- as.matrix(sqrt(abs(diag(a$covbeta))))
z <- a$beta/sbeta
pval <- 2*(1-pnorm(abs(z)))
dimnames(sbeta)[2] <- "StdErr(BETA)"
dimnames(z)[2] <- "Z-ratio"
dimnames(pval)[2] <- "p-value"
if(class(a)=="mphfit"||a$model$modello$strata >1){
cat("\n COVARIATE EFFECTS...")
cat("\n")
print(cbind(a$beta,sbeta,z,pval))
cat("\n")}
if (cell.stats==TRUE) {
stdL <- as.matrix(sqrt(diag(a$covL)))
dimnames(stdL)[2] <- "StdErr(L)"
LLL<-round(cbind(a$Lobs,a$L,stdL,a$Lresid),4)
if(class(a)=="hmmmfit"){
if(is.null(a$model$names)){
descr<-hmmm.model.summary(a$model,printflag=FALSE)
descr<-rep(descr[,1],descr[,4])}
else{
descr<-hmmm.model.summary(a$model,printflag=FALSE)
descr<-hmmm.model.summary(a$model,printflag=FALSE)
descr<-rep(descr[,2],descr[,6])}
intnames<-descr
rownames(LLL)<-rep(intnames,dim(a$model$matrici$Z)[2])}
print(LLL)
cat("\n")
}}
if (cell.stats==TRUE) {
stdm <- as.matrix(sqrt(diag(a$covm)))
stdp <- as.matrix(sqrt(diag(a$covp)))
dimnames(stdm)[2] <- "StdErr(FV)"
dimnames(stdp)[2] <- "StdErr(PROB)"
cat("\n JOINT PROBABILITIES AND EXPECTED FREQUENCIES")
cat("\n")
print(round(cbind(a$y,a$m,stdm,a$p,stdp,a$adjresid),5))
cat("\n")
}
if (model.info==TRUE) {
print(a$formula)
print(a$Formula)
}
}
num.deriv.fct <- function(f.fct,m) {
eps <- (.Machine$double.eps)^(1/3)
d <- eps * m + eps
lenm <- length(m)
E <- diag(c(d))
f1 <- f.fct(m+E[,1])
lenf <- length(f1)
Ft <- (f1-f.fct(m-E[,1]))/(2*d[1])
for (j in 2:lenm) {
Ft <- cbind(Ft,((f.fct(m+E[,j])-f.fct(m-E[,j]))/(2*d[j])))
}
dimnames(Ft) <- NULL
t(Ft)
}
create.U <- function(X) {
nrowX <- nrow(X)
u <- nrowX - ncol(X)
if (u == 0) {U <- 0}
else {w.mat <- matrix(runif(nrowX*u,1,10),nrowX,u)
U <- w.mat - X%*%solve(t(X)%*%X)%*%t(X)%*%w.mat
}
U
} |
`print.obsSens` <-
function(x,...){
print(summary(x),...)
invisible(x)
} |
misclassMlogit <- function(Y, X, setM, P,
na.action = na.omit,
control = list(),
par = NULL,
baseoutcome = NULL,
x = FALSE) {
if (!is.matrix(X)) stop("X is no matrix")
if (!is.matrix(setM)) stop("setM is no matrix")
if (!is.matrix(Y)) stop("Y must be a matrix")
if (nrow(Y) != nrow(X)) stop("dimensions of Y and X do not match")
if (is.null(baseoutcome)) baseoutcome <- which.max(colSums(Y))
if (baseoutcome < ncol(Y)) Y <- cbind(Y[, -baseoutcome], Y[, baseoutcome])
if (!is.matrix(P)) stop("P is no matrix")
if (nrow(setM) != ncol(P)) stop("dimensions of P and setM do not match")
if (nrow(P) != nrow(X)) stop("dimensions of P and X do not match")
g <- NULL
control <- do.call("make.control", control)
f <- fmlogitValidation
if (control$method == "BFGS" | control$method == "BFGS2" | control$method == "CG" | control$method == "nlm") {
g <- gmlogitValidation
}
length.par <- (ncol(X) + ncol(setM) + 1) * (ncol(Y) - 1)
if (is.null(par)) {
cat("estimate starting parameters...")
par <- rep.int(0.0, length.par)
} else {
if (length(par) != length.par) stop("starting parameter par is not valid.")
}
cat("optimize...")
erg <- fit.algorithm(par, f, g, Y, X, P, setM, control)
cat("ok.\n")
ret <- list()
ret$baseoutcome <- baseoutcome
ret$family <- family
ret$setM <- setM
p <- ncol(X) + ncol(setM) + 1
ret$coefficients <- erg$par[1:(p * (ncol(Y) - 1))]
for (l in 1:(ncol(Y) - 1)) {
names(ret$coefficients)[(l - 1) * p + 1:p] <- c(paste0("alt", l, ":(Intercept)"),
unlist(lapply(dimnames(X)[[2]], function(n) paste0("alt", l, ":", n))),
unlist(lapply(dimnames(P)[[2]][-1], function(p) paste0("alt", l, ":", p))))
}
ret$y <- Y
ret$na.action <- na.action
ret$df.residual <- nrow(X) - length(par)
ret$optim <- erg
if (!is.null(g)) ret$optim$gradient <- g(erg$par, Y, X, P, setM)
if (control$method == "BFGS2") {
ret$iter <- erg$info[4]
ret$optim$call <- "ucminf"
} else {
ret$optim$call <- "optim"
ret$iter <- erg$counts
}
if (x) ret$x <- cbind(rep.int(1, nrow(X)), X, P[, -1, drop = FALSE])
ret$SEtype <- "standard"
ret$setM <- setM
class(ret) <- c("misclassMlogit", "mlogit")
ret$prior.weights <- rep_len(1, nrow(X))
ret$fitted.values <- predict(ret, X, P, type = "response")
ret$control <- control
ret$logLik <- erg$value
ret$formula <- as.formula(paste0("y ~ ", paste(dimnames(X)[[2]], collapse = " + "), " + M"))
return(ret)
}
predict.misclassMlogit <- function(object, X, P = NULL, type = c("link", "response"),
na.action = na.pass, ...) {
type <- match.arg(type)
k <- length(object$coefficients) / (ncol(X) + 1 + ncol(object$setM))
ret <- list(nrow(object$setM))
for (m in 1:nrow(object$setM)) {
eta <- get.eta(cbind(X, object$setM[m,]), object$coefficients, k)
if (type == "response") {
summen <- rowSums(exp(eta)) + 1
ret[[m]] <- cbind(exp(eta), rep.int(1, nrow(eta))) / summen
dimnames(ret[[m]])[[2]] <- c(paste0("M", 1:k), "M0")
} else {
ret[[m]] <- eta
dimnames(ret[[m]])[[2]] <- paste0("M", 1:k)
}
}
if (type == "response" & !is.null(P)) {
tmp <- ret
ret <- tmp[[1]] * P[, 1]
for (m in 2:nrow(object$setM)) {
ret <- ret + tmp[[m]] * P[, m]
}
}
return(ret)
}
get.eta <- function(X, beta, k) {
d <- ncol(X)
ret <- do.call(cbind, lapply(1:k, function(l) {
X %*% beta[((0:d) * k) + l][-1] + beta[l]
}))
return(ret)
}
boot.misclassMlogit <- function(ret, Y, X, Pmodel, PX,
boot.fraction = 1, repetitions = 1000) {
if (!("misclassMlogit" %in% class(ret))) stop("object is no misclassMlogit result")
baseoutcome <- ret$baseoutcome
if (boot.fraction > 1) stop("boot.fraction must be <1")
if (boot.fraction <= 0) stop("boot.fraction must be >0")
if (is.matrix(Y)) {
k <- ncol(Y) - 1
if (k == 0) k <- 1
} else {
k <- 1
}
if (!is.matrix(X) & (!is.big.matrix(X))) stop("X is no matrix")
if (!is.matrix(Y)) stop("Y must be a matrix")
if (nrow(Y) != nrow(X)) stop("dimensions of Y and X do not match")
if (is.null(baseoutcome)) baseoutcome <- which.max(colSums(Y))
if (baseoutcome < k) Y <- cbind(Y[, -baseoutcome], Y[, baseoutcome])
g <- NULL
f <- cfmlogitValidation
if (ret$control$method == "BFGS" || ret$control$method == "BFGS2" ||
ret$control$method == "CG" || ret$control$method == "nlm") {
g <- cgmlogitValidation
}
return(bootstrapping(ret, Y, X, Pmodel, PX, f, g, boot.fraction, repetitions))
}
summary.misclassMlogit <- function(object, ...) {
b <- coef(object, fixed = TRUE)
std.err <- sqrt(diag(vcov(object)))
z <- b / std.err
p <- 2 * (1 - pnorm(abs(z)))
CoefTable <- cbind(b, std.err, z, p)
print(CoefTable)
colnames(CoefTable) <- c("Estimate", "Std. Error", "t-value", "Pr(>|t|)")
object$CoefTable <- CoefTable
class(object) <- c("summary.misclassMlogit", "summary.mlogit", "mlogit")
return(object)
}
print.summary.misclassMlogit <- function(x, digits = max(3L, getOption("digits") - 3L), ...) {
class(x) <- setdiff(class(x), "summary.misclassMlogit")
cat("\nCall:\n")
print(x$call)
cat("\n")
cat("Frequencies of alternatives:")
print(prop.table(x$freq), digits = digits)
cat("\n")
print(x$est.stat)
cat("\nCoefficients :\n")
printCoefmat(x$CoefTable, digits = digits)
cat("\n")
cat(paste("Log-Likelihood: ", signif(x$logLik, digits), "\n",
sep = ""))
if (x$SEtype == "robust") {
cat("Results show robust standard errors.\n")
} else if (x$SEtype == "bootstrap") {
cat("Results based on ")
cat(nrow(x$boot.beta))
cat(" bootstrap samples.\n\n")
}
}
vcov.misclassMlogit <- function(object, ...) {
if (is.null(object$CoVar)) {
ok <- try(covmat <- chol2inv(chol(object$optim$hessian)), silent = TRUE)
if (inherits(ok, "try-error")) {
print(ok[1])
return(NULL)
}
} else {
covmat <- object$CoVar
}
return(covmat)
}
mfx.misclassMlogit <- function(w, x.mean = TRUE, rev.dum = TRUE, outcome = 2, baseoutcome = 1,
digits = 3, ...) {
if (outcome == baseoutcome) return(NULL)
if (!("mlogit" %in% class(w))) {
stop("Please provide an object from 'mlogit()'.\n")
}
if (is.null(dim(w$x))) {
stop("Please specify 'x = TRUE' in misclassMlogit().\n")}
x.bar <- as.matrix(colMeans(w$x))
b.est <- as.matrix(coef(w))
K <- nrow(x.bar)
alt <- length(coef(w)) / K
xb <- rep.int(0, alt)
for (i in 1:alt) {
xb[i] <- t(x.bar) %*% b.est[(0:(K - 1)) * alt + i,]
}
xb <- exp(xb)
if (!x.mean) stop('not implemented')
if (outcome < baseoutcome) {
f.xb <- xb[outcome] / ((1 + sum(xb)))^2
me <- f.xb * coef(w)[(0:(K - 1)) * alt + outcome]
} else if (outcome > baseoutcome) {
f.xb <- xb[outcome - 1] / ((1 + sum(xb)))^2
me <- f.xb * coef(w)[(0:(K - 1)) * alt + outcome - 1]
}
bx <- list()
for (i in 1:alt) {
bx[[i]] <- b.est[(0:(K - 1)) * alt + i,] %*% t(x.bar)
}
if (baseoutcome > outcome) {
pg <- xb[outcome] / (1 + sum(xb))
dr <- diag(1, K, K) + (1 - 2 * pg) * bx[[outcome]]
va <- (pg - pg^2)^2 * dr %*% vcov(w)[(0:(K - 1)) * alt + outcome,
(0:(K - 1)) * alt + outcome] %*% t(dr)
} else if (baseoutcome < outcome) {
pg <- xb[outcome - 1] / (1 + sum(xb))
dr <- diag(1, K, K) + (1 - 2 * pg) * bx[[outcome - 1]]
va <- (pg - pg^2)^2 * dr %*% vcov(w)[(0:(K - 1)) * alt + outcome - 1,
(0:(K - 1)) * alt + outcome - 1] %*% t(dr)
}
se <- sqrt(diag(va))
if (rev.dum) {
for (i in 1:ncol(w$x)) {
if (identical(sort(unique(w$x[,i])), c(0, 1)) || (i >= ncol(w$x) - dim(w$setM)[2]) ) {
x.d1 <- x.bar; x.d1[i, 1] <- 1
x.d0 <- x.bar; x.d0[i, 1] <- 0
xb0 <- rep.int(0, alt)
xb1 <- rep.int(0, alt)
for (j in 1:alt) {
xb0[j] <- t(x.d0) %*% b.est[(0:(K - 1)) * alt + j,]
xb1[j] <- t(x.d1) %*% b.est[(0:(K - 1)) * alt + j,]
}
xb1 <- exp(xb1)
xb0 <- exp(xb0)
if (baseoutcome > outcome) {
pr1 <- xb1[outcome] / (1 + sum(xb1))
pr0 <- xb0[outcome] / (1 + sum(xb0))
dr2 <- (pr1 - pr1^2) %*% t(x.d1) - (pr0 - pr0^2) %*% t(x.d0)
va2 <- dr2 %*% vcov(w)[(0:(K - 1)) * alt + outcome,
(0:(K - 1)) * alt + outcome] %*% t(dr2)
} else if (baseoutcome < outcome) {
pr1 <- xb1[outcome - 1] / (1 + sum(xb1))
pr0 <- xb0[outcome - 1] / (1 + sum(xb0))
dr2 <- (pr1 - pr1^2) %*% t(x.d1) - (pr0 - pr0^2) %*% t(x.d0)
va2 <- dr2 %*% vcov(w)[(0:(K - 1)) * alt + outcome - 1,
(0:(K - 1)) * alt + outcome - 1] %*% t(dr2)
}
me[i] <- pr1 - pr0
se[i] <- sqrt(as.numeric(va2))
}
}
}
out <- data.frame(effect = me, error = se)
out$t.value <- out$effect / out$error
out$p.value <- 2 * (1 - pt(abs(out[, 3]), w$df.residual))
out <- round(out, digits = digits)
result <- list(f.xb = f.xb, w = w, out = out)
class(result) <- c("mfx.misclassMlogit", "mfx")
return(result)
}
simulate_mlogit_dataset <- function(n = 1000, const = c(0, 0), alpha = c(1, 2),
beta = -2 * c(1, 2), beta2 = NULL) {
set.seed(30)
X <- rnorm(n, 0, 2)
if (is.null(beta2)) {
Mmisclass <- rbinom(n, 1, 0.5)
M <- rep.int(0, n)
for (i in 1:n) {
temp <- exp(X[i] + Mmisclass[i])
M[i] <- rbinom(1, 1, temp / (1 + temp))
}
Y1 <- (alpha[1] * X) + beta[1] * M + const[1]
Y2 <- (alpha[2] * X) + beta[2] * M + const[2]
} else {
M <- matrix(rep.int(0, 2 * n), ncol = 2)
for (i in 1:n) {
if (sum(Mmisclass[i,]) == 2) Mmisclass[i,] <- c(0, 0)
temp <- exp(X[i])
misclass <- rbinom(1, 1, temp / (1 + temp))
if (rbinom(1, 1, 0.5)) {
M[i, 1] <- ifelse(misclass, 1 - Mmisclass[i, 1], Mmisclass[i, 1])
} else {
M[i, 2] <- ifelse(misclass, 1 - Mmisclass[i, 2], Mmisclass[i, 2])
}
if (sum(M[i,]) == 2) M[i,] <- c(0, 0)
}
Y1 <- alpha[1] * X + beta[1] * M[, 1] + beta2[1] * M[, 2] + const[1]
Y2 <- alpha[2] * X + beta[2] * M[, 1] + beta2[2] * M[, 2] + const[2]
}
Y1 <- exp(Y1)
Y2 <- exp(Y2)
tmp <- rep.int(1, n) + Y1 + Y2
Y <- matrix(rep.int(0, 3 * n), ncol = 3)
for (i in 1:n) {
prob <- c(Y1[i] / tmp[i], Y2[i] / tmp[i], 1 / tmp[i])
Y[i,] <- t(rmultinom(1, 1, prob))
}
return(data.frame(Y = as.factor(Y[, 1] + 2 * Y[, 2] + 3 * (1 - Y[, 1] - Y[, 2])),
X = X,
M = Mmisclass,
M2 = M))
} |
get.sra <- function(palm_ids, con,
ret_df = FALSE, ret_contig.df = FALSE, qc = TRUE) {
run <- bio_sample <- palm_id <- qc_pass <- NULL
if (qc){
contigs <- tbl(con, "palm_sra") %>%
filter(palm_id %in% palm_ids) %>%
filter(qc_pass == qc) %>%
as.data.frame()
} else {
contigs <- tbl(con, "palm_sra") %>%
filter(palm_id %in% palm_ids) %>%
as.data.frame()
}
if ( ret_df ){
return.df <- contigs[ , c("palm_id", "run_id", "coverage", "q_sequence")]
return(return.df)
} else if ( ret_contig.df ){
return(contigs)
} else {
contig.sra <- unique(as.character(contigs$run_id))
return(contig.sra)
}
} |
FactorLevCorr <- function(x) {
if (NROW(x) <= 1)
stop(paste(NROW(x), "row in data."))
x <- unique(x, MARGIN = 1)
n <- NCOL(x)
nLevels <- rep(NaN, n)
for (i in matlabColon(1, n)) nLevels[i] <- Nlevels(x[, i])
z <- diag(nLevels, nrow = length(nLevels))
for (i in matlabColon(1, n)) for (j in matlabColon(i + 1, n)) {
ni <- nLevels[i]
nj <- nLevels[j]
nij <- Nlevels(x[, c(i, j)])
multij <- ni * nj
maxij <- max(ni, nj)
if (ni <= nj)
one <- 1 else one <- -1
if (nij == maxij)
z[i, j] <- one else z[i, j] <- one * (multij - nij)/(multij - maxij)
z[j, i] <- -z[i, j]
}
colnames(z) <- colnames(x)
rownames(z) <- colnames(x)
z
}
Nlevels = function(x){
NROW(unique(x,MARGIN=1))
}
HierarchicalGroups <- function(x = NULL, mainName = TRUE, eachName = FALSE, fCorr = FactorLevCorr(x)) {
nLevels <- diag(fCorr)
if (min(nLevels) <= 1)
stop("Number of levels < 2 in a variable")
ix <- order(nLevels, decreasing = TRUE)
if (length(fCorr) > 1)
z <- functionRecursive(fCorr[ix, ix], 1:NCOL(fCorr))$l else z <- list(1)
z1 <- rep(NA, length(z))
for (i in 1:length(z)) {
z[[i]] <- ix[z[[i]]]
z1[i] <- z[[i]][1]
if (mainName)
names(z)[i] <- colnames(fCorr)[z1[i]]
}
z <- SortNrList(z)
for (i in 1:length(z)) {
if (length(unique(nLevels[z[[i]]])) != length(z[[i]]))
warning("There are identical variables")
}
if (eachName) {
for (i in 1:length(z)) z[[i]] <- colnames(fCorr)[z[[i]]]
}
z
}
functionRecursive <- function(fCorr, ind) {
drop <- numeric(0)
x <- vector("list", 0)
for (i in ind) if (!(i %in% drop)) {
z <- functionRecursive(fCorr, (1:NCOL(fCorr))[fCorr[i, ] == -1])
drop <- c(drop, i, z$drop)
l <- z$l
for (k in matlabColon(1, length(l))) l[[k]] <- c(i, l[[k]])
if (!length(l))
l <- list(i)
x <- c(x, l)
}
list(drop = drop, l = x)
}
SortNrList <- function(x, index.return = FALSE) {
m <- matrix(0, length(x), max(sapply(x, length)))
for (i in seq_len(length(x))) m[i, seq_len(length(x[[i]]))] <- x[[i]]
ix <- SortRows(m, index.return = TRUE)
if (index.return)
return(ix)
x[ix]
}
SortRows <- function(m, cols = 1:dim(m)[2], index.return = FALSE) {
ix <- eval(parse(text = paste("order(", paste("m[,", cols, ",drop=TRUE]", sep = "", collapse = ","),
")")))
if (index.return)
return(ix)
m[ix, , drop = FALSE]
}
FindTableGroup <- function(x = NULL, findLinked = FALSE, mainName = TRUE, fCorr = FactorLevCorr(x),
CheckHandling = warning) {
hier <- HierarchicalGroups(mainName = mainName, eachName = FALSE, fCorr = fCorr)
table1 <- UniqueNrList(hier, 1)
table2 <- UniqueNrList(hier, -1)
if (identical(table1, table2))
table2 <- NULL
if (is.null(table2)) {
if (length(table1) != length(hier)) {
outside <- seq_len(length(hier))[!(seq_len(length(hier)) %in% table1)]
table2 <- outside[UniqueNrList(hier[outside])]
}
}
if (!findLinked) {
uh <- unlist(hier)
if (length(unique(uh)) == length(uh))
uniqueTable <- TRUE else uniqueTable <- FALSE
if (uniqueTable & !is.null(table2))
stop("Error detected in unique algorithm")
table2 <- NULL
if (!uniqueTable)
CheckHandling("Not a single unique table")
} else {
if (length(unique(c(table1, table2))) != length(hier))
CheckHandling("All variables could not be used")
}
if (is.null(table2))
return(list(groupVarInd = hier, table = list(ind1 = table1)))
return(list(groupVarInd = hier, table = list(ind1 = table1, ind2 = table2)))
}
DimFromHier <- function(x, hier, addName = FALSE, total = "Total") {
for (i in matlabColon(1, length(hier))) hier[[i]] <- DimFromHier1(x, hier[[i]],
addName = addName, total = total)
hier
}
DimFromHier1 <- function(x, indHier = 1:dim(x)[2], addName = FALSE, total = "Total") {
start <- "@@"
add <- "@"
r1 <- data.frame(levels = "@", codes = total, stringsAsFactors = FALSE)
b <- CrossLevels(x[, rev(indHier), drop = FALSE])
m <- NCOL(b)
n <- NROW(b)
symbol <- start
for (i in matlabColon(2, m)) symbol <- c(symbol, paste(symbol[i - 1], add, sep = ""))
symbols <- rep(" ", m * n)
codes <- rep(" ", m * n)
k <- 0
bb <- b[1, , drop = FALSE]
for (i in matlabColon(1, n)) for (j in matlabColon(1, m)) {
newrow <- FALSE
if (i == 1)
newrow <- TRUE else if (bb[1, j] != b[i, j])
newrow <- TRUE
if (newrow) {
k <- k + 1
bb[1, j] <- b[i, j]
symbols[k] <- symbol[j]
if (addName)
codes[k] <- paste(colnames(b)[j], as.character(b[i, j]), sep = ".") else codes[k] <- as.character(b[i, j])
}
}
rbind(r1, data.frame(levels = symbols[matlabColon(1, k)], codes = codes[matlabColon(1,
k)], stringsAsFactors = FALSE))
}
FindDimLists <- function(x, groupVarInd = HierarchicalGroups(x = x), addName = FALSE,
sep = ".", xReturn = FALSE, total = "Total") {
hierGr <- GroupNrList(groupVarInd)
CheckOk <- TRUE
if (!addName)
for (i in seq_len(length(hierGr))) if (!CheckLevels(x, hierGr[[i]], CheckLevelsHandling = warning))
CheckOk <- FALSE
if (!CheckOk) {
warning("Settting addName to TRUE (overriding input)")
addName <- TRUE
}
if (addName) {
addVar <- NULL
for (i in matlabColon(1, length(hierGr))) addVar <- c(addVar, hierGr[[i]][matlabColon(2,
length(hierGr[[i]]))])
addVar <- unique(addVar)
for (i in addVar) x[, i] <- paste(colnames(x)[i], x[, i], sep = sep)
}
if (addName)
for (i in seq_len(length(hierGr))) CheckLevels(x, hierGr[[i]], CheckLevelsHandling = stop)
for (i in seq_len(length(groupVarInd))) CheckLevels(x, groupVarInd[[i]], CheckLevelsHandling = stop,
checkDecreasing = TRUE, total = total)
dimLists <- DimFromHier(x, groupVarInd, addName = FALSE, total = total)
if (!xReturn)
return(dimLists)
for (i in seq_len(NCOL(x))) x[, i] <- as.character(x[, i])
list(x = x, dimLists = dimLists)
}
CheckLevels <- function(data, dimVarInd = 1:NCOL(data), CheckLevelsHandling = warning,
checkDecreasing = FALSE, total = NULL) {
x <- NULL
oldlength <- Inf
for (i in dimVarInd) {
iunique <- unique(as.character(data[, i]))
ilength <- length(iunique)
if (checkDecreasing)
if (ilength > oldlength)
stop("Number of levels not decreasing")
oldlength <- ilength
x <- c(x, iunique)
}
if (!is.null(total)) {
if (total %in% x)
CheckLevelsHandling(paste(total, "is used as a level name ...", paste(colnames(data)[dimVarInd],
collapse = ", ")))
}
if (length(x) == length(unique(x)))
return(TRUE)
CheckLevelsHandling(paste("Levelnames must be different in", paste(colnames(data)[dimVarInd],
collapse = ", ")))
return(FALSE)
}
FindCommonCells <- function(dimList1, dimList2) {
okNames <- TRUE
if (length(unique(names(dimList1))) != length(dimList1))
okNames <- FALSE
if (length(unique(names(dimList2))) != length(dimList2))
okNames <- FALSE
if (!okNames)
stop("Elements of dimLists must be named uniquely.")
commonNames <- names(dimList1)[names(dimList1) %in% names(dimList2)]
niceProblem <- identical(names(dimList1), names(dimList2))
if (!niceProblem)
stop("Only problems where identical(names(dimList1),names(dimList2))=TRUE implemented.")
commonCells <- vector("list", length(commonNames))
names(commonCells) <- commonNames
for (i in seq_len(length(commonNames))) {
okAll <- (niceProblem & identical(dimList1[[i]], dimList2[[i]]))
if (okAll)
commonCells[[i]] <- vector("list", 3) else commonCells[[i]] <- vector("list", 4)
commonCells[[i]][[1]] <- commonNames[i]
commonCells[[i]][[2]] <- commonNames[i]
if (okAll)
commonCells[[i]][[3]] <- "All" else {
i1 = which(names(dimList1) == commonNames[i])
i2 = which(names(dimList2) == commonNames[i])
c1 <- dimList1[[i1]]$codes
c2 <- dimList2[[i2]]$codes
cc <- c1[c1 %in% c2]
commonCells[[i]][[3]] <- DimListReCode(cc, dimList1[[i1]])
commonCells[[i]][[4]] <- DimListReCode(cc, dimList2[[i2]])
}
}
commonCells
}
UniqueNrList <- function(x, sort = 0) {
if (sort == 0)
ix <- seq_len(length(x)) else {
ix <- SortNrList(x, index.return = TRUE)
if (sort < 0)
ix <- rev(ix)
}
z <- NULL
xz <- NULL
for (i in ix) {
if (!any((x[[i]] %in% xz))) {
z <- c(z, i)
xz <- c(xz, x[[i]])
}
}
sort(z)
}
GroupNrList <- function(x) {
n <- length(x)
z <- vector("list", n)
z[[1]] <- x[[1]]
k <- 1
for (i in matlabColon(2, n)) {
a <- x[[i]]
jj <- 0
for (j in seq_len(k)) {
if (any(x[[i]] %in% z[[j]]))
jj <- j
}
if (jj == 0) {
k <- k + 1
z[[k]] <- x[[i]]
} else {
z[[jj]] <- unique(c(z[[jj]], x[[i]]))
}
}
z[seq_len(k)]
}
CrossLevels <- function(x) {
SortRows(unique(x, MARGIN = 1))
}
DimListReCode <- function(codes, dimList) {
hi <- DimList2Hierarchy(dimList)
dupCodes <- unique(hi$mapsTo[duplicated(hi$mapsTo)])
hi <- hi[!(hi$mapsTo %in% dupCodes), ]
if (nrow(hi) == 0)
return(codes)
for (i in 0:nrow(hi)) {
ma <- match(codes, hi$mapsFrom)
isMatch <- !is.na(ma)
if (!any(isMatch))
return(codes)
codes[isMatch] <- hi$mapsTo[ma[isMatch]]
}
stop("Something is wrong. Cyclic hierarchy?")
} |
data(mtcars)
names(mtcars)
hist(mtcars$mpg)
library(ggplot2)
ggplot(mtcars, aes(x = mtcars$mpg))
ggplot(mtcars, aes(x = mtcars$mpg)) + geom_histogram()
housing = read.csv('./data/landdata-states.csv')
head(housing[,c('Home.Value', 'Date')])
plot(Home.Value ~ Date, data=subset(housing, State='MA'))
points(Home.Value ~ Date, data=subset(housing, State='TX'), col='red')
legend(1975, 400000, c('MA', 'TX'), title='State', col=c('black', 'red'), pch=c(1,1))
ggplot(subset(housing, State %in% c('MA', 'TX')),
aes(x=Date, y=Home.Value, color=State)) + geom_point()
ggplot(subset(mtcars, gear %in% c(3,4)), aes(x=cyl, y=gear, col=factor(am))) + geom_point()
help.search("geom_", package = "ggplot2")
data("airquality")
str(airquality)
str(faithful)
str(sleep)
sleep
AirPassengers
housing
str(housing)
hp2001Q1 = subset(housing, Date == 2001.25)
dim(hp2001Q1)
ggplot(hp2001Q1, aes(y=Structure.Cost, x=Land.Value)) + geom_point()
hp2001Q1 <- subset(housing, Date == 2001.25)
ggplot(hp2001Q1, aes(y = Structure.Cost, x = Land.Value)) +
geom_point()
ggplot(hp2001Q1, aes(y =Structure.Cost, x = log(Land.Value))) + geom_point()
ggplot(mtcars, aes(y = mpg, x = wt)) +
geom_point()
hp2001Q1$pred.SC <- predict(lm(Structure.Cost ~ log(Land.Value), data = hp2001Q1))
p1 <- ggplot(hp2001Q1, aes(x = log(Land.Value), y = Structure.Cost))
p1 + geom_point(aes(color = Home.Value)) +
geom_line(aes(y = pred.SC))
p1 +
geom_point( aes(color = Home.Value )) +
geom_smooth()
p1 +
geom_text(aes( label=State), size=3)
library(ggrepel)
p1 +
geom_point() +
geom_text_repel( aes(label=State), size=3)
p1 +
geom_point (aes (size=2),
color='red')
p1 +
geom_point(aes (color = Home.Value, shape=region))
library(ggplot2)
ggplot(data = mtcars, aes(x=wt, y=mpg)) +
geom_smooth(method='lm', color='red', linetype=2) +
labs(title = "Automobile Data", x="Weight", y="Miles per Gallon")
mydata=mtcars
mydata$am = factor(mydata$am, levels=c(0,1), labels=c('Automatic', 'Manual'))
mydata$vs = factor(mydata$vs, levels=c(0,1), labels=c('V-Engine', 'Straight Engine'))
levels(mydata$cyl)
mydata$cyl = factor(mydata$cyl)
str(mydata)
ggplot(data=mtcars, aes(x=hp, y=mpg, shape=cyl, col=cyl)) + scale_shape_identity() + geom_point(size=1) + facet_grid(am~vs) + labs(title=' Automobile Data by Engine Type', x ='Horsepower', y='Miles per Gallon')
data(singer, package='lattice')
ggplot(singer, aes(x =height)) + geom_histogram()
ggplot(singer, aes(x=voice.part, y=height)) + geom_boxplot()
data(Salaries, package='carData')
Salaries
ggplot(Salaries, aes(x=rank, y=salary)) +
geom_boxplot( fill='cornflowerblue', color='black', notch=T) +
geom_point(position='jitter', color='blue', alpha=.5) +
geom_rug(sides='l', color='black')
?singer
head(singer)
table(singer)
str(singer)
data('Salaries', package='car')
str(Salaries)
head(Salaries)
?Salaries
data(Salaries, package='carData' )
ggplot( data=Salaries, aes(x=rank, y=salary, fill=sex)) +
geom_boxplot() +
scale_x_discrete( breaks=c('AsstProf', 'AssocProf', 'Prof'), labels= c('Assistant \n Professor', 'Associate \n Professor', 'Full \n Professor' )) +
scale_y_continuous(breaks = c(5000, 10000, 15000, 20000), labels =c('Rs 50K', 'Rs 100k', 'Rs 150K', 'Rs 200K')) +
labs(title= 'Faculty Salary by Rank and Sex', y=' Salaries', x=' Ranks', fill='Gender') +
theme(legend.position =c(.1, .8))
ggplot( data=Salaries, aes(x=rank, y=salary, fill=sex)) +
geom_boxplot() +
scale_x_discrete( breaks=c('AsstProf', 'AssocProf', 'Prof'), labels= c('Assistant \n Professor', 'Associate \n Professor', 'Full \n Professor' )) +
scale_y_continuous(breaks = c(5000, 10000, 15000, 20000), labels =c('Rs 50K', 'Rs 100k', 'Rs 150K', 'Rs 200K')) +
labs(title= 'Faculty Salary by Rank and Sex', y=' Salaries', x=' Ranks', fill='Gender') +
theme(legend.position =c(.1, .8))
ggplot(mtcars, aes(x=wt, y=mpg, size=disp)) +
geom_point(shape=21, color='black', fill='cornsilk') +
labs(x='Weight', y='Miles per Gallon' , title='Bubble Chart', size='Engine \n Displacement')
Eg1b
data(Salaries, package='carData')
ggplot(data=Salaries, aes(x=yrs.since.phd, y = salary, color=rank)) +
scale_color_brewer(palette='Set1') +
geom_point(size=2)
mytheme <- theme(plot.title=element_text(face="bold.italic",
size="14", color="brown"),
axis.title=element_text(face="bold.italic",
size=10, color="brown"),
axis.text=element_text(face="bold", size=9,
color="darkblue"),
panel.background=element_rect(fill="white",
color="darkblue"),
panel.grid.major.y=element_line(color="grey",
linetype=1),
panel.grid.minor.y=element_line(color="grey",
linetype=2),
panel.grid.minor.x=element_blank(),
legend.position="top")
ggplot(Salaries, aes(x=rank, y=salary, fill=sex)) +
geom_boxplot() +
labs(title="Salary by Rank and Sex",
x="Rank", y="Salary") +
mytheme
data(Salaries, package="car")
p1 <- ggplot(data=Salaries, aes(x=rank)) +
geom_bar()
p2 <- ggplot(data=Salaries, aes(x=sex)) +
geom_bar()
p3 <- ggplot(data=Salaries, aes(x=yrs.since.phd, y=salary)) +
geom_point()
library(gridExtra)
grid.arrange(p1, p2, p3, ncol=3)
ggplot(data=mtcars, aes(x=mpg)) + geom_histogram()
ggsave(file="mygraph.pdf")
library(playwith)
library(lattice)
playwith(
xyplot(mpg~wt|factor(cyl)*factor(am),
data=mtcars, subscripts=TRUE,
type=c("r", "p"))
) |
required_arguments <- list(
pie = c("fill"),
donut = c("fill"),
column = c("x")
)
optional_args <- list(
gg_violin = c("adjust", "alpha"),
gg_density = c("adjust", "alpha", "alpha_densitygroup"),
gg_lollipop = c("gg_lwd", "labels", "gg_size"),
gg_boxplot = c("gg_lwd"),
gg_cumcurve = c("gg_lwd"),
gg_column = c("ordered"),
gg_lollipop2 = c("ordered", "gg_lwd", "gg_size"),
gg_pie = c("ordered"),
gg_donut = c("ordered"),
gg_column2 = c("labels"),
gg_barcode = c("alpha", "gg_barSize"),
gg_dotstrip = c("alpha", "gg_size"),
gg_poppyramid = c("gg_bins"),
gg_freqpolygon = c("gg_lwd", "gg_size"),
gg_barcode2 = c("gg_height", "gg_width", "alpha"),
gg_barcode3 = c("gg_height", "gg_width", "alpha"),
gg_beeswarm = c("gg_size"),
gg_ridgeline = c("alpha", "alpha_densitygroup"),
gg_gridplot = c("gg_perN"),
gg_quasirandom = c("gg_size", "gg_swarmwidth", "gg_method"),
gg_divergingstackedbar = c("gg_cutpoint")
)
replace_data_name <- function(expr, new_name) {
if (is.name(expr[[2]])) {
expr[[2]] <- rlang::sym(new_name)
} else {
expr[[2]] <- replace_data_name(expr[[2]], new_name)
}
expr
}
insert_into_first_place <- function(expr, insert_expr) {
if (is.name(expr[[2]])) {
expr[[2]] <- rlang::expr(!!expr[[2]] %>% !!insert_expr)
} else {
expr[[2]] <- insert_into_first_place(expr[[2]], insert_expr)
}
expr
}
add_to_group <- function(expr, vars) {
if (expr[[3]][[1]] == "dplyr::group_by") {
expr[[3]] <- as.call(c(as.list(expr[[3]]), vars))
} else if (expr[[3]][[1]] == "dplyr::ungroup") {
expr[[3]] <- as.call(list(rlang::expr(dplyr::group_by), vars))
}
if (is.name(expr[[2]])) {
expr
} else {
expr[[2]] <- add_to_group(expr[[2]], vars)
expr
}
}
rotate_gridplot <- function(expr) {
if (expr[[1]] == "waffle::waffle") {
expr$flip <- TRUE
}
expr
}
apply_palette <- function(expr, palette, type) {
viridis_names <- unname(unlist(viridis_palette_names()))
colour_plots <- c("gg_cumcurve", "gg_lollipop", "gg_freqpolygon", "gg_barcode", "gg_dotstrip", "gg_quasirandom", "gg_lollipop2", "gg_barcode3", "gg_dotstrip")
if (palette %in% viridis_names) {
if (type %in% colour_plots) {
rlang::expr(!!expr + ggplot2::scale_colour_viridis_d(option = !!palette))
} else {
if (type != "gg_heatmap") {
rlang::expr(!!expr + ggplot2::scale_fill_viridis_d(option = !!palette))
} else {
rlang::expr(!!expr + ggplot2::scale_fill_viridis_c(option = !!palette))
}
}
} else if (palette == "greyscale") {
if (type %in% colour_plots) {
rlang::expr(!!expr + ggplot2::scale_colour_grey())
} else {
if (type != "gg_heatmap") {
rlang::expr(!!expr + ggplot2::scale_fill_grey())
} else {
rlang::expr(!!expr + ggplot2::scale_fill_gradient(low = "white", high = "black"))
}
}
} else {
if (type %in% colour_plots) {
rlang::expr(!!expr + ggplot2::scale_colour_brewer(palette = !!palette))
} else {
if (type != "gg_heatmap") {
rlang::expr(!!expr + ggplot2::scale_fill_brewer(palette = !!palette))
} else {
rlang::expr(!!expr + ggplot2::scale_fill_distiller(palette = !!palette))
}
}
}
}
check_nas <- function(data, exprs, data_name, plot_args) {
plot_varnames <- unlist(plot_args[plot_args %in% names(data)])
if (any(vapply(data[, plot_varnames, drop = FALSE], anyNA, logical(1)))) {
complete <- complete.cases(data[, plot_varnames])
plot_varnames <- rlang::syms(plot_varnames)
if (is.null(exprs$data)) {
exprs <- list(
data = rlang::expr(plot_data <- !!rlang::sym(data_name) %>% tidyr::drop_na(!!!plot_varnames)),
plot = replace_data_name(exprs$plot, "plot_data")
)
exprs$plot <- rlang::expr(!!exprs$plot + ggplot2::labs(subtitle = !!sprintf("%d Missing Observations Removed", sum(!complete))))
} else {
exprs$data[[3]] <- insert_into_first_place(exprs$data[[3]], rlang::expr(tidyr::drop_na()))
exprs$plot <- rlang::expr(!!exprs$plot + ggplot2::labs(subtitle = !!sprintf("%d Missing Observations Removed", sum(!complete))))
}
}
exprs
}
count_nas <- function(data, exprs, data_name, plot_args) {
plot_varnames <- unlist(plot_args[plot_args %in% names(data)])
if (any(vapply(data[, plot_varnames, drop = FALSE], anyNA, logical(1)))) {
complete <- complete.cases(data[, plot_varnames])
exprs$plot <- rlang::expr(!!exprs$plot + ggplot2::labs(subtitle = !!sprintf("%d Missing Observations Removed", sum(!complete))))
}
exprs
}
iNZightPlotGG_facet <- function(data, data_name, exprs, g1, g2, g1.level, g2.level) {
if (!is.null(g1) && length(g1) > 0) {
if (!is.null(g1.level) && g1.level != "_MULTI") {
if (is.null(exprs$data)) {
exprs <- list(
data = rlang::expr(plot_data <- !!rlang::sym(data_name) %>% dplyr::filter(!!rlang::sym(g1) == !!g1.level)),
plot = replace_data_name(exprs$plot, "plot_data")
)
} else {
exprs$data[[3]] <- insert_into_first_place(exprs$data[[3]], rlang::expr(dplyr::filter(!!rlang::sym(g1) == !!g1.level)))
exprs$data[[3]] <- add_to_group(exprs$data[[3]], rlang::sym(g1))
}
} else {
if (!is.null(exprs$data)) {
exprs$data[[3]] <- add_to_group(exprs$data[[3]], rlang::sym(g1))
}
}
}
if (!is.null(g2) && length(g2) > 0) {
if (!is.null(g2.level) && g2.level != "_MULTI" && g2.level != "_ALL") {
if (is.null(exprs$data)) {
exprs <- list(
data = rlang::expr(plot_data <- !!rlang::sym(data_name) %>% dplyr::filter(!!rlang::sym(g2) == !!g2.level)),
plot = replace_data_name(exprs$plot, "plot_data")
)
exprs$data[[3]] <- add_to_group(exprs$data[[3]], rlang::sym(g2))
} else {
exprs$data[[3]] <- insert_into_first_place(exprs$data[[3]], rlang::expr(dplyr::filter(!!rlang::sym(g2) == !!g2.level)))
exprs$data[[3]] <- add_to_group(exprs$data[[3]], rlang::sym(g2))
}
} else {
if (!is.null(exprs$data)) {
exprs$data[[3]] <- add_to_group(exprs$data[[3]], rlang::sym(g2))
}
}
}
if (isTRUE(is.null(g2) || length(g2) == 0)) {
exprs$plot <- rlang::expr(!!exprs$plot + ggplot2::facet_wrap(ggplot2::vars(!!rlang::sym(g1)), labeller = ggplot2::label_both))
} else {
if (!is.null(g2.level) && g2.level == "_MULTI") {
exprs$plot <- rlang::expr(!!exprs$plot + ggplot2::facet_grid(cols = ggplot2::vars(!!rlang::sym(g1)), rows = ggplot2::vars(!!rlang::sym(g2)), labeller = ggplot2::label_both))
} else {
exprs$plot <- rlang::expr(!!exprs$plot + ggplot2::facet_wrap(ggplot2::vars(!!rlang::sym(g1)), labeller = ggplot2::label_both))
}
}
exprs
}
iNZightPlotGG_decide <- function(data, varnames, type, extra_vars) {
varnames <- varnames[grep("\\.level$", names(varnames), invert = TRUE)]
varnames <- varnames[grep("g1", names(varnames), invert = TRUE)]
varnames <- varnames[grep("g2", names(varnames), invert = TRUE)]
non_mapped <- varnames[grep("^(x|y)$", names(varnames), invert = TRUE)]
varnames <- varnames[grep("^(x|y)$", names(varnames))]
varnames <- varnames[varnames != ""]
nullVars <- vapply(data[, varnames, drop = FALSE], is.null, FUN.VALUE = logical(1))
varnames[which(nullVars)] <- NULL
varnames[!varnames %in% colnames(data)] <- NULL
if (type %in% c("gg_pie", "gg_donut")) {
names(varnames) <- replace(names(varnames), names(varnames) == "x", "fill")
} else if (type %in% c("gg_violin", "gg_barcode", "gg_boxplot", "gg_cumcurve", "gg_column2", "gg_lollipop", "gg_dotstrip", "gg_density", "gg_barcode2", "gg_beeswarm", "gg_ridgeline", "gg_quasirandom", "gg_barcode3")) {
if (!("y" %in% names(varnames))) {
names(varnames) <- replace(names(varnames), names(varnames) == "x", "y")
if (isTRUE(!is.null(extra_vars$fill_colour) && extra_vars$fill_colour != "")) {
if (type %in% c("gg_lollipop", "gg_cumcurve", "gg_barcode", "gg_dotstrip", "gg_quasirandom", "gg_barcode3")) {
varnames["colour"] <- extra_vars$fill_colour
} else {
varnames["fill"] <- extra_vars$fill_colour
}
} else if (type != "gg_cumcurve") {
varnames["fill"] <- "darkgreen"
}
} else if (is.numeric(data[[varnames["x"]]])) {
orig_x <- varnames["x"]
varnames["x"] <- varnames["y"]
varnames["y"] <- orig_x
}
} else if (type %in% c("gg_stackedbar", "gg_stackedcolumn")) {
names(varnames) <- replace(names(varnames), names(varnames) == "x", "fill")
if ("y" %in% names(varnames)) {
names(varnames) <- replace(names(varnames), names(varnames) == "y", "x")
}
} else if (type == "gg_poppyramid") {
if (is.numeric(data[[varnames["x"]]])) {
names(varnames) <- replace(names(varnames), names(varnames) == "y", "fill")
} else {
names(varnames) <- replace(names(varnames), names(varnames) == "x", "fill")
names(varnames) <- replace(names(varnames), names(varnames) == "y", "x")
}
} else if (type == "gg_spine") {
names(varnames) <- replace(names(varnames), names(varnames) == "y", "fill")
} else if (type == "gg_freqpolygon") {
names(varnames) <- replace(names(varnames), names(varnames) == "y", "colour")
} else if (type == "gg_column") {
if ("y" %in% names(varnames)) {
names(varnames) <- replace(names(varnames), names(varnames) == "y", "group")
}
}
if (type %in% c("gg_column2", "gg_lollipop")) {
names(varnames) <- replace(names(varnames), names(varnames) == "labels", "x")
}
extra_args <- Filter(Negate(is.null), extra_vars[optional_args[[type]]])
varnames <- as.list(varnames)
if (!is.null(extra_args) && length(extra_args) > 0) {
varnames <- append(as.list(varnames), as.list(extra_args))
names(varnames) <- sub("^gg_", "", names(varnames))
if (type %in% c("gg_barcode")) {
if ("barSize" %in% names(varnames)) {
names(varnames) <- replace(names(varnames), names(varnames) == "barSize", "size")
} else {
varnames[['size']] <- 16
}
}
if (type %in% c("gg_barcode2")) {
if ("width" %in% names(varnames)) {
varnames[['width']] <- as.numeric(varnames[['width']])
}
if ("height" %in% names(varnames)) {
varnames[['height']] <- as.numeric(varnames[['height']])
}
}
if (type %in% c("gg_barcode3")) {
if ("width" %in% names(varnames)) {
varnames[['size']] <- as.numeric(varnames[['width']])
varnames[['width']] <- NULL
}
if ("height" %in% names(varnames)) {
varnames[['radius']] <- as.numeric(varnames[['height']])
varnames[['height']] <- NULL
}
}
if (type %in% c("gg_density", "gg_ridgeline")) {
if ("x" %in% names(varnames)) {
varnames[["alpha"]] <- NULL
varnames[["alpha_density"]] <- NULL
if (!is.null(varnames[["alpha_densitygroup"]])) {
names(varnames) <- replace(names(varnames), names(varnames) == "alpha_densitygroup", "alpha")
} else {
varnames[['alpha']] <- 0.6
}
}
if (!is.null(varnames[["alpha"]])) {
varnames[["alpha"]] <- as.numeric(varnames[["alpha"]])
}
}
if (type %in% c("gg_quasirandom")) {
names(varnames) <- replace(names(varnames), names(varnames) == "swarmwidth", "width")
}
if (type %in% c("gg_lollipop2")) {
if (!("y" %in% names(varnames))) {
if (isTRUE(!is.null(extra_vars$fill_colour) && extra_vars$fill_colour != "")) {
varnames[["colour"]] <- extra_vars$fill_colour
}
}
}
}
if (type %in% c("gg_lollipop", "gg_lollipop2", "gg_freqpolygon", "gg_dotstrip", "gg_beeswarm", "gg_quasirandom")) {
if (!("size" %in% names(varnames))) {
varnames[['size']] <- 6
}
}
append(varnames, as.list(non_mapped))
}
iNZightPlotGG_extraargs <- function(extra_args) {
to.keep <- c(
"shape" = "pch",
"colour" = "col.pt",
"size" = "cex",
"alpha" = "alpha",
"bg" = "bg",
"adjust" = "adjust",
"lwd" = "lwd",
"gg_lwd" = "gg_lwd"
)
extra_args <- extra_args[to.keep]
changed_args <- Filter(function(x) extra_args[[x]] != inzpar()[[x]], names(extra_args))
return_args <- extra_args[changed_args]
names(return_args) <- names(to.keep)[match(names(return_args), to.keep)]
return_args
}
iNZightPlotGG <- function(
data,
type,
data_name = "data",
...,
main = NULL,
xlab = NULL,
ylab = NULL,
caption = NULL,
extra_args = c(),
palette = "default",
gg_theme = "grey"
) {
dots <- list(...)
if (length(extra_args) > 0) {
rotate <- extra_args$rotation
desc <- extra_args$desc
overall_size <- extra_args$cex
rotate_labels <- extra_args$rotate_labels
extra_args$desc <- desc
}
plot_args <- iNZightPlotGG_decide(data, unlist(dots), type, extra_args)
plot_exprs <- do.call(
sprintf("iNZightPlotGG_%s", gsub("^gg_", "", type)),
c(rlang::sym(data_name), main = main, xlab = xlab, ylab = ylab, plot_args)
)
if (!(type %in% c("gg_pie", "gg_donut", "gg_cumcurve"))) {
if (type == "gg_gridplot" && isTRUE(rotate)) {
plot_exprs$plot <- rotate_gridplot(plot_exprs$plot)
} else {
default_rotated <- c("gg_boxplot", "gg_violin", "gg_beeswarm", "gg_quasirandom", "gg_lollipop", "gg_column2", "gg_spine")
if (type %in% default_rotated) {
rotate <- if (!is.null(rotate)) !rotate else TRUE
}
if (isTRUE(rotate)) {
plot_exprs$plot <- rotate(plot_exprs$plot)
}
}
}
if (length(gg_theme) > 0 && gg_theme != "grey") {
theme_fun <- list(
"bw" = rlang::expr(ggplot2::theme_bw()),
"light" = rlang::expr(ggplot2::theme_light()),
"dark" = rlang::expr(ggplot2::theme_dark()),
"minimal" = rlang::expr(ggplot2::theme_minimal()),
"classic" = rlang::expr(ggplot2::theme_classic()),
"void" = rlang::expr(ggplot2::theme_void()),
"stata" = rlang::expr(ggthemes::theme_stata()),
"wsj" = rlang::expr(ggthemes::theme_wsj()),
"tufte" = rlang::expr(ggthemes::theme_tufte()),
"gdocs" = rlang::expr(ggthemes::theme_gdocs()),
"fivethirtyeight" = rlang::expr(ggthemes::theme_fivethirtyeight()),
"excel" = rlang::expr(ggthemes::theme_excel()),
"economist" = rlang::expr(ggthemes::theme_economist())
)[[gg_theme]]
plot_exprs$plot <- rlang::expr(!!plot_exprs$plot + !!theme_fun)
}
if (exists("rotate_labels") && !(type %in% c("gg_pie", "gg_donut", "gg_cumcurve", "gg_gridplot"))) {
if (isTRUE(rotate_labels$x)) {
plot_exprs$plot <- rlang::expr(!!plot_exprs$plot + ggplot2::theme(axis.text.x = ggplot2::element_text(angle = 45, vjust = 1, hjust=1)))
}
if (isTRUE(rotate_labels$y)) {
plot_exprs$plot <- rlang::expr(!!plot_exprs$plot + ggplot2::theme(axis.text.y = ggplot2::element_text(angle = 45, vjust = 1, hjust=1)))
}
}
if (exists("overall_size") && !is.null(overall_size) && isTRUE(overall_size != 1)) {
plot_exprs$plot <- rlang::expr(!!plot_exprs$plot + ggplot2::theme(text = ggplot2::element_text(size = !!(as.numeric(overall_size) * 11))))
}
if (isTRUE(!extra_args$bg %in% c("lightgrey", "
plot_exprs$plot <- rlang::expr(!!plot_exprs$plot + ggplot2::theme(panel.background = ggplot2::element_rect(fill = !!extra_args$bg)))
}
if (isTRUE(!is.null(dots$g1) && length(dots$g1) > 0)) {
plot_exprs <- iNZightPlotGG_facet(data, data_name, plot_exprs, dots$g1, dots$g2, dots$g1.level, dots$g2.level)
}
if (isTRUE(!missing(palette) && !is.null(palette) && palette != "default")) {
plot_exprs$plot <- apply_palette(plot_exprs$plot, palette, type)
}
if (!(type %in% c("gg_lollipop", "gg_column2"))) {
plot_exprs <- check_nas(data, plot_exprs, data_name, unname(plot_args))
} else {
plot_exprs <- count_nas(data, plot_exprs, data_name, unname(plot_args))
}
if (isTRUE(!is.null(caption) && caption != "")) {
plot_exprs$plot <- rlang::expr(!!plot_exprs$plot + ggplot2::labs(caption = caption))
}
if (type %in% c("gg_barcode3", "gg_dotstrip", "gg_ridgeline")) {
plot_exprs$plot <- rlang::expr(!!plot_exprs$plot + ggplot2::scale_y_discrete(limits = rev))
} else if (type %in% c("gg_violin", "gg_boxplot", "gg_beeswarm", "gg_quasirandom")) {
plot_exprs$plot <- rlang::expr(!!plot_exprs$plot + ggplot2::scale_x_discrete(limits = rev))
}
eval_env <- rlang::env(!!rlang::sym(data_name) := data)
eval_results <- lapply(plot_exprs, eval, envir = eval_env)
plot_object <- eval_results[[length(eval_results)]]
dev.hold()
tryCatch(
print(plot_object),
finally = dev.flush()
)
attr(plot_object, "code") <- unname(unlist(lapply(plot_exprs, rlang::expr_text)))
attr(plot_object, "code_expr") <- plot_exprs
attr(plot_object, "data_name") <- data_name
attr(plot_object, "plottype") <- c(type)
attr(plot_object, "varnames") <- unlist(dots)
attr(plot_object, "use.plotly") <- !type %in% c("gg_pie", "gg_donut", "gg_gridplot", "gg_barcode2", "gg_barcode", "gg_ridgeline")
if (type %in% c("gg_lollipop", "gg_column2")) {
attr(plot_object, "varnames") <- attr(plot_object, "varnames")[names(attr(plot_object, "varnames")) != "y"]
}
invisible(plot_object)
}
iNZightPlotGG_pie <- function(data, fill, main = sprintf("Pie Chart of %s", as.character(fill)), ordered = FALSE, ...) {
fill <- rlang::sym(fill)
if (ordered == "desc") {
data_expr <- rlang::expr(
plot_data <- !!rlang::enexpr(data) %>%
dplyr::mutate(!!fill := forcats::fct_infreq(!!fill))
)
data <- rlang::sym("plot_data")
} else if (ordered == "asc") {
data_expr <- rlang::expr(
plot_data <- !!rlang::enexpr(data) %>%
dplyr::mutate(!!fill := forcats::fct_rev(forcats::fct_infreq(!!fill)))
)
data <- rlang::sym("plot_data")
}
plot_expr <- rlang::expr(
ggplot2::ggplot(!!rlang::enexpr(data), ggplot2::aes(x = factor(1), fill = !!fill)) +
ggplot2::geom_bar(
ggplot2::aes(
y = !!rlang::sym("..count..") / sum(!!rlang::sym("..count.."))
),
position = "fill"
) +
ggplot2::coord_polar(theta = "y") +
ggplot2::xlab("") +
ggplot2::ylab("") +
ggplot2::scale_y_reverse() +
ggplot2::scale_x_discrete(breaks = NULL) +
ggplot2::ggtitle(!!main) +
ggplot2::theme(
panel.grid.major = ggplot2::element_blank(),
panel.grid.minor = ggplot2::element_blank(),
axis.text.x = ggplot2::element_blank()
)
)
if (ordered %in% c("asc", "desc")) {
list(
data = data_expr,
plot = plot_expr
)
} else {
list(
plot = plot_expr
)
}
}
iNZightPlotGG_donut <- function(data, fill, main = sprintf("Donut Chart of %s", as.character(fill)), ordered = FALSE, ...) {
fill <- rlang::sym(fill)
if (ordered == "desc") {
data_expr <- rlang::expr(
plot_data <- !!rlang::enexpr(data) %>%
dplyr::mutate(!!fill := forcats::fct_infreq(!!fill)) %>%
dplyr::group_by(!!fill) %>%
dplyr::summarise(Count = dplyr::n()) %>%
dplyr::ungroup() %>%
dplyr::mutate(Fraction = !!rlang::sym("Count") / sum(!!rlang::sym("Count"))) %>%
dplyr::arrange(dplyr::desc(!!rlang::sym("Fraction"))) %>%
dplyr::mutate(ymax = cumsum(!!rlang::sym("Fraction"))) %>%
dplyr::mutate(ymin = dplyr::lag(!!rlang::sym("ymax"), default = 0))
)
} else if (ordered == "asc") {
data_expr <- rlang::expr(
plot_data <- !!rlang::enexpr(data) %>%
dplyr::mutate(!!fill := forcats::fct_rev(forcats::fct_infreq(!!fill))) %>%
dplyr::group_by(!!fill) %>%
dplyr::summarise(Count = dplyr::n()) %>%
dplyr::ungroup() %>%
dplyr::mutate(Fraction = !!rlang::sym("Count") / sum(!!rlang::sym("Count"))) %>%
dplyr::arrange(!!rlang::sym("Fraction")) %>%
dplyr::mutate(ymax = cumsum(!!rlang::sym("Fraction"))) %>%
dplyr::mutate(ymin = dplyr::lag(!!rlang::sym("ymax"), default = 0))
)
} else {
data_expr <- rlang::expr(
plot_data <- !!rlang::enexpr(data) %>%
dplyr::group_by(!!fill) %>%
dplyr::summarise(Count = dplyr::n()) %>%
dplyr::ungroup() %>%
dplyr::mutate(Fraction = !!rlang::sym("Count") / sum(!!rlang::sym("Count"))) %>%
dplyr::mutate(ymax = cumsum(!!rlang::sym("Fraction"))) %>%
dplyr::mutate(ymin = dplyr::lag(!!rlang::sym("ymax"), default = 0))
)
}
plot_expr <- rlang::expr(
ggplot2::ggplot(plot_data, ggplot2::aes(fill = !!fill, ymax = !!rlang::sym("ymax"), ymin = !!rlang::sym("ymin"), xmax = 4, xmin = 3)) +
ggplot2::geom_rect() +
ggplot2::coord_polar(theta = "y") +
ggplot2::xlab("") +
ggplot2::ylab("") +
ggplot2::scale_x_continuous(breaks = NULL, limits = c(0, 4)) +
ggplot2::scale_y_continuous(labels = scales::percent) +
ggplot2::ggtitle(!!main) +
ggplot2::theme(
panel.grid.major = ggplot2::element_blank(),
panel.grid.minor = ggplot2::element_blank(),
axis.text.x = ggplot2::element_blank()
)
)
list(
data = data_expr,
plot = plot_expr
)
}
iNZightPlotGG_column <- function(data, x, group, main = sprintf("Column chart of %s", as.character(x)), xlab = as.character(x), ylab = "Count", ordered = FALSE, ...) {
x <- rlang::sym(x)
if (ordered == "desc") {
data_expr <- rlang::expr(
plot_data <- !!rlang::enexpr(data) %>%
dplyr::mutate(!!x := forcats::fct_infreq(!!x))
)
data <- rlang::sym("plot_data")
} else if (ordered == "asc") {
data_expr <- rlang::expr(
plot_data <- !!rlang::enexpr(data) %>%
dplyr::mutate(!!x := forcats::fct_rev(forcats::fct_infreq(!!x)))
)
data <- rlang::sym("plot_data")
}
if (missing(group)) {
plot_expr <- rlang::expr(
ggplot2::ggplot(!!rlang::enexpr(data), ggplot2::aes(x = !!x, fill = !!x)) +
ggplot2::geom_bar() +
ggplot2::labs(title = !!main) +
ggplot2::xlab(!!xlab) +
ggplot2::ylab(!!ylab)
)
} else {
group <- rlang::sym(group)
plot_expr <- rlang::expr(
ggplot2::ggplot(!!rlang::enexpr(data), ggplot2::aes(x = !!x, fill = !!group)) +
ggplot2::geom_bar(position = "dodge") +
ggplot2::labs(title = !!main) +
ggplot2::xlab(!!xlab) +
ggplot2::ylab(!!ylab)
)
}
if (ordered %in% c("asc", "desc")) {
list(
data = data_expr,
plot = plot_expr
)
} else {
list(
plot = plot_expr
)
}
}
rotate <- function(plot_expr) {
check_for_function <- function(expr, fun, i = 0) {
if (length(expr) == 1) {
as.character(expr) == fun
} else {
if (rlang::call_name(expr[[3]]) == fun) {
TRUE
} else {
check_for_function(expr[[2]], fun)
}
}
}
remove_function <- function(expr, fun, i = 0) {
if (length(expr) == 1) {
if (as.character(expr) == fun) {
expr <- NULL
expr
}
} else {
if (rlang::call_name(expr[[3]]) == fun) {
expr[[2]]
} else {
expr[[2]] <- remove_function(expr[[2]], fun)
expr
}
}
}
if (check_for_function(plot_expr, "coord_flip")) {
remove_function(plot_expr, "coord_flip")
} else {
rlang::expr(!!plot_expr + ggplot2::coord_flip())
}
}
iNZightPlotGG_bar <- function(data, x, main = "Bar chart", ...) {
column_plot <- iNZightPlotGG_column(data, x, main, ...)
column_plot$plot <- rotate(column_plot$plot)
column_plot
}
iNZightPlotGG_heatmap <- function(data, x, y, main = sprintf("Heatmap of %s and %s", as.character(x), as.character(y)), xlab = as.character(x), ylab = as.character(y), ...) {
x <- rlang::sym(x)
y <- rlang::sym(y)
data_expr <- rlang::expr(
plot_data <- !!rlang::enexpr(data) %>%
dplyr::group_by(!!x, !!y) %>%
dplyr::summarise(Count = dplyr::n())
)
plot_expr <- rlang::expr(
ggplot2::ggplot(plot_data, ggplot2::aes(x = !!x, y = !!y)) +
ggplot2::geom_tile(ggplot2::aes(fill = !!rlang::sym("Count"))) +
ggplot2::labs(title = !!main) +
ggplot2::xlab(!!xlab) +
ggplot2::ylab(!!ylab)
)
list(
data = data_expr,
plot = plot_expr
)
}
iNZightPlotGG_stackedcolumn <- function(data, fill, main = sprintf("Stacked column of %s", as.character(fill)), x, xlab = as.character(x), ylab = "Percent", ...) {
fill = rlang::sym(fill)
if (missing(x)) {
x <- rlang::expr(factor(1))
was_missing <- TRUE
} else {
x <- rlang::sym(x)
was_missing <- FALSE
}
plot_expr <- rlang::expr(
ggplot2::ggplot(!!rlang::enexpr(data), ggplot2::aes(x = !!x, fill = !!fill)) +
ggplot2::geom_bar(
ggplot2::aes(
y = !!rlang::sym("..count..") / sum(!!rlang::sym("..count.."))
), position = "fill"
) +
ggplot2::scale_y_continuous(labels = scales::percent) +
ggplot2::labs(title = !!main) +
ggplot2::xlab(!!xlab) +
ggplot2::ylab(!!ylab)
)
if (isTRUE(was_missing)) {
plot_expr <- rlang::expr(
!!plot_expr +
ggplot2::scale_x_discrete(breaks = NULL) +
ggplot2::xlab("")
)
} else {
plot_expr <- rlang::expr(
!!plot_expr +
ggplot2::xlab(!!xlab)
)
}
list(
plot = plot_expr
)
}
iNZightPlotGG_stackedbar <- function(data, fill, main = "Stacked bar", x, ...) {
column_plot <- iNZightPlotGG_stackedcolumn(!!rlang::enexpr(data), fill, main, x, ...)
column_plot$plot <- rotate(column_plot$plot)
column_plot
}
iNZightPlotGG_violin <- function(data, x, y, fill = "darkgreen", main = sprintf("Distribution of %s", as.character(y)), xlab = as.character(x), ylab = as.character(y), ...) {
y <- rlang::sym(y)
dots <- list(...)
if (missing(x)) {
x <- rlang::expr(factor(1))
plot_expr <- rlang::expr(
ggplot2::ggplot(!!rlang::enexpr(data), ggplot2::aes(x = !!x, y = !!y)) +
ggplot2::geom_violin(fill = !!fill, !!!dots) +
ggplot2::labs(title = !!main) +
ggplot2::xlab("") +
ggplot2::ylab(!!ylab) +
ggplot2::theme(
axis.text.y = ggplot2::element_blank(),
axis.ticks.y = ggplot2::element_blank()
)
)
} else {
x <- rlang::sym(x)
fill <- rlang::sym(x)
plot_expr <- rlang::expr(
ggplot2::ggplot(!!rlang::enexpr(data), ggplot2::aes(x = !!x, y = !!y, fill = !!fill)) +
ggplot2::geom_violin(!!!dots) +
ggplot2::labs(title = !!main) +
ggplot2::xlab(!!xlab) +
ggplot2::ylab(!!ylab)
)
}
list(
plot = plot_expr
)
}
iNZightPlotGG_barcode <- function(data, x, y, fill = "darkgreen", main = sprintf("Distribution of %s", as.character(y)), xlab = as.character(y), ylab = as.character(x), ...) {
y <- rlang::sym(y)
dots <- list(...)
if (missing(x)) {
x <- rlang::expr(factor(1))
plot_expr <- rlang::expr(
ggplot2::ggplot(!!rlang::enexpr(data), ggplot2::aes(x = !!y, y = !!x)) +
ggplot2::geom_point(shape = "|", !!!dots) +
ggplot2::labs(title = !!main) +
ggplot2::xlab(!!xlab) +
ggplot2::ylab("") +
ggplot2::theme(
axis.text.y = ggplot2::element_blank(),
axis.ticks.y = ggplot2::element_blank()
)
)
} else {
x <- rlang::sym(x)
colour <- rlang::sym(x)
plot_expr <- rlang::expr(
ggplot2::ggplot(!!rlang::enexpr(data), ggplot2::aes(x = !!y, y = !!x)) +
ggplot2::geom_point(shape = "|", !!!dots) +
ggplot2::labs(title = !!main) +
ggplot2::xlab(!!xlab) +
ggplot2::ylab(!!ylab)
)
}
list(
plot = plot_expr
)
}
iNZightPlotGG_barcode2 <- function(data, x, y, fill = "darkgreen", main = sprintf("Distribution of %s", as.character(y)), xlab = as.character(y), ylab = as.character(x), ...) {
y <- rlang::sym(y)
dots <- list(...)
if (missing(x)) {
x <- rlang::expr(factor(1))
plot_expr <- rlang::expr(
ggplot2::ggplot(!!rlang::enexpr(data), ggplot2::aes(x = !!y, y = !!x)) +
ggplot2::geom_tile(!!!dots) +
ggplot2::labs(title = !!main) +
ggplot2::xlab(!!xlab) +
ggplot2::ylab("") +
ggplot2::theme(
axis.text.y = ggplot2::element_blank(),
axis.ticks.y = ggplot2::element_blank()
)
)
} else {
x <- rlang::sym(x)
colour <- rlang::sym(x)
plot_expr <- rlang::expr(
ggplot2::ggplot(!!rlang::enexpr(data), ggplot2::aes(x = !!y, y = !!x)) +
ggplot2::geom_tile(!!!dots) +
ggplot2::labs(title = !!main) +
ggplot2::xlab(!!xlab) +
ggplot2::ylab(!!ylab)
)
}
list(
plot = plot_expr
)
}
iNZightPlotGG_barcode3 <- function(data, x, y, fill = "darkgreen", main = sprintf("Distribution of %s", as.character(y)), xlab = as.character(y), ylab = as.character(x), ...) {
y <- rlang::sym(y)
dots <- list(...)
if (is.null(dots$radius)) {
radius <- 0.5
dots$radius <- 0.5
} else {
radius <- dots$radius
}
if (is.null(dots$size)) {
dots$size <- 1
}
if (missing(x)) {
x <- rlang::expr(factor(1))
plot_expr <- rlang::expr(
ggplot2::ggplot(!!rlang::enexpr(data), ggplot2::aes(x = !!y, y = !!x)) +
ggplot2::geom_spoke(angle = pi/2, position = ggplot2::position_nudge(y = -!!radius/2), !!!dots) +
ggplot2::labs(title = !!main) +
ggplot2::xlab(!!xlab) +
ggplot2::ylab("") +
ggplot2::theme(
axis.text.y = ggplot2::element_blank(),
axis.ticks.y = ggplot2::element_blank()
)
)
} else {
x <- rlang::sym(x)
colour <- rlang::sym(x)
plot_expr <- rlang::expr(
ggplot2::ggplot(!!rlang::enexpr(data), ggplot2::aes(x = !!y, y = !!x, colour = !!colour)) +
ggplot2::geom_spoke(angle = pi/2, position = ggplot2::position_nudge(y = -!!radius/2), !!!dots) +
ggplot2::labs(title = !!main) +
ggplot2::xlab(!!xlab) +
ggplot2::ylab(!!ylab)
)
}
list(
plot = plot_expr
)
}
iNZightPlotGG_boxplot <- function(data, x, y, fill = "darkgreen", main = sprintf("Distribution of %s", as.character(y)), xlab = as.character(x), ylab = as.character(y), ...) {
y <- rlang::sym(y)
dots <- list(...)
if (missing(x)) {
x <- rlang::expr(factor(1))
plot_expr <- rlang::expr(
ggplot2::ggplot(!!rlang::enexpr(data), ggplot2::aes(x = !!x, y = !!y)) +
ggplot2::geom_boxplot(fill = !!fill, !!!dots) +
ggplot2::labs(title = !!main) +
ggplot2::xlab("") +
ggplot2::ylab(!!ylab) +
ggplot2::theme(
axis.text.y = ggplot2::element_blank(),
axis.ticks.y = ggplot2::element_blank()
)
)
} else {
x <- rlang::sym(x)
fill <- rlang::sym(x)
plot_expr <- rlang::expr(
ggplot2::ggplot(!!rlang::enexpr(data), ggplot2::aes(x = !!x, y = !!y, fill = !!fill)) +
ggplot2::geom_boxplot(!!!dots) +
ggplot2::labs(title = !!main) +
ggplot2::xlab(!!xlab) +
ggplot2::ylab(!!ylab)
)
}
list(
plot = plot_expr
)
}
iNZightPlotGG_column2 <- function(data, x, y, main = sprintf("Distribution of %s", as.character(y)), xlab = "Index", ylab = as.character(y), desc = FALSE, labels, ...) {
y <- rlang::sym(y)
dots <- list(...)
if (missing(x)) {
if (missing(labels) || labels == "") {
x <- rlang::expr(1:nrow(!!rlang::enexpr(data)))
data_expr <- rlang::expr(
plot_data <- !!rlang::enexpr(data) %>%
dplyr::arrange(!!y)
)
} else {
x <- rlang::sym(labels)
data_expr <- rlang::expr(
plot_data <- !!rlang::enexpr(data) %>%
dplyr::arrange(!!y) %>%
dplyr::mutate(!!x := forcats::fct_reorder(!!x, !!y))
)
}
} else {
x <- rlang::sym(x)
data_expr <- rlang::expr(
plot_data <- !!rlang::enexpr(data) %>%
dplyr::arrange(!!y) %>%
dplyr::mutate(!!x := forcats::fct_reorder(!!x, !!y))
)
}
plot_expr <- rlang::expr(
ggplot2::ggplot(plot_data, ggplot2::aes(x = !!x, y = !!y)) +
ggplot2::geom_col(!!!dots) +
ggplot2::labs(title = !!main) +
ggplot2::xlab(!!xlab) +
ggplot2::ylab(!!ylab)
)
list(
data = data_expr,
plot = plot_expr
)
}
iNZightPlotGG_lollipop <- function(data, x, y, main = sprintf("Distribution of %s", as.character(y)), xlab = "Index", ylab = as.character(y), desc = FALSE, labels, ...) {
y <- rlang::sym(y)
dots <- list(...)
point_dots <- dots[c("size", "colour")]
line_dots <- dots[c("lwd", "colour")]
point_dots <- Filter(Negate(is.null), point_dots)
line_dots <- Filter(Negate(is.null), line_dots)
if (missing(x)) {
if (missing(labels) || labels == "") {
x <- rlang::expr(1:nrow(!!rlang::enexpr(data)))
data_expr <- rlang::expr(
plot_data <- !!rlang::enexpr(data) %>%
dplyr::arrange(!!y)
)
} else {
x <- rlang::sym(labels)
data_expr <- rlang::expr(
plot_data <- !!rlang::enexpr(data) %>%
dplyr::arrange(!!y) %>%
dplyr::mutate(!!x := forcats::fct_reorder(!!x, !!y))
)
}
} else {
x <- rlang::sym(x)
data_expr <- rlang::expr(
plot_data <- !!rlang::enexpr(data) %>%
dplyr::arrange(!!y) %>%
dplyr::mutate(!!x := forcats::fct_reorder(!!x, !!y))
)
}
plot_expr <- rlang::expr(
ggplot2::ggplot(plot_data, ggplot2::aes(x = !!x, y = !!y)) +
ggplot2::geom_segment(ggplot2::aes(xend = !!x, yend = 0), !!!line_dots) +
ggplot2::geom_point(!!!point_dots) +
ggplot2::labs(title = !!main) +
ggplot2::xlab(!!xlab) +
ggplot2::ylab(!!ylab)
)
list(
data = data_expr,
plot = plot_expr
)
}
iNZightPlotGG_cumcurve <- function(data, x, y, main = sprintf("Cumulative Curve of %s", as.character(y)), xlab = as.character(y), ylab = "Cumulative Frequency", ...) {
y <- rlang::sym(y)
dots <- list(...)
if (missing(x)) {
data_expr <- rlang::expr(
plot_data <- !!rlang::enexpr(data) %>%
dplyr::arrange(!!y) %>%
dplyr::mutate(Observation = 1:dplyr::n())
)
plot_expr <- rlang::expr(
ggplot2::ggplot(plot_data, ggplot2::aes(x = !!y, y = !!rlang::sym("Observation"))) +
ggplot2::geom_step(!!!dots) +
ggplot2::labs(title = !!main) +
ggplot2::xlab(!!xlab) +
ggplot2::ylab(!!ylab)
)
} else {
x <- rlang::sym(x)
data_expr <- rlang::expr(
plot_data <- !!rlang::enexpr(data) %>%
dplyr::group_by(!!x) %>%
dplyr::arrange(!!x, !!y) %>%
dplyr::mutate(Observation = 1:dplyr::n())
)
plot_expr <- rlang::expr(
ggplot2::ggplot(plot_data, ggplot2::aes(x = !!y, y = !!rlang::sym("Observation"), colour = !!x)) +
ggplot2::geom_step(!!!dots) +
ggplot2::labs(title = !!main) +
ggplot2::xlab(!!xlab) +
ggplot2::ylab(!!ylab)
)
}
list(
data = data_expr,
plot = plot_expr
)
}
iNZightPlotGG_poppyramid <- function(data, x, fill, main = sprintf("Count of %s by %s", as.character(x), as.character(fill)), xlab = as.character(x), ylab = "Count", ...) {
x <- rlang::sym(x)
fill <- rlang::sym(fill)
dots <- list(...)
plot_expr <- rlang::expr(
ggplot2::ggplot(!!rlang::enexpr(data), ggplot2::aes(x = !!x, fill = !!fill)) +
ggplot2::geom_histogram(data = subset(!!rlang::enexpr(data), !!fill == levels(!!fill)[1]), !!!dots) +
ggplot2::geom_histogram(
data = subset(
!!rlang::enexpr(data),
!!fill == levels(!!fill)[2]
),
ggplot2::aes(
y = !!rlang::sym("..count..") * -1
),
!!!dots
) +
ggplot2::labs(title = !!main) +
ggplot2::xlab(!!xlab) +
ggplot2::ylab(!!ylab) +
ggplot2::scale_y_continuous(labels = abs)
)
list(
plot = plot_expr
)
}
iNZightPlotGG_spine <- function(data, x, fill, main = sprintf("Count of %s by %s", as.character(x), as.character(fill)), xlab = as.character(x), ylab = "Count", ...) {
x <- rlang::sym(x)
fill <- rlang::sym(fill)
dots <- list(...)
plot_expr <- rlang::expr(
ggplot2::ggplot(!!rlang::enexpr(data), ggplot2::aes(x = !!x, fill = !!fill)) +
ggplot2::geom_bar(data = subset(!!rlang::enexpr(data), !!fill == levels(!!fill)[1]), !!!dots) +
ggplot2::geom_bar(
data = subset(
!!rlang::enexpr(data),
!!fill == levels(!!fill)[2]
),
ggplot2::aes(
y = !!rlang::sym("..count..") * -1
),
!!!dots
) +
ggplot2::coord_flip() +
ggplot2::labs(title = !!main) +
ggplot2::xlab(!!xlab) +
ggplot2::ylab(!!ylab) +
ggplot2::scale_y_continuous(labels = abs)
)
list(
plot = plot_expr
)
}
iNZightPlotGG_freqpolygon <- function(data, x, colour, main = sprintf("Count of %s by %s", as.character(x), as.character(colour)), xlab = as.character(x), ylab = "Count", ...) {
x <- rlang::sym(x)
colour <- rlang::sym(colour)
dots <- list(...)
point_dots <- dots[c("size", "colour")]
line_dots <- dots[c("lwd", "colour")]
point_dots <- Filter(Negate(is.null), point_dots)
line_dots <- Filter(Negate(is.null), line_dots)
plot_expr <- rlang::expr(
ggplot2::ggplot(!!rlang::enexpr(data), ggplot2::aes(x = !!x, colour = !!colour, group = !!colour)) +
ggplot2::geom_line(stat = "count", !!!line_dots) +
ggplot2::geom_point(stat = "count", !!!point_dots) +
ggplot2::labs(title = !!main) +
ggplot2::xlab(!!xlab) +
ggplot2::ylab(!!ylab)
)
list(
plot = plot_expr
)
}
iNZightPlotGG_dotstrip <- function(data, x, y, fill = "darkgreen", main = sprintf("Distribution of %s", as.character(y)), xlab = as.character(y), ylab = as.character(x), ...) {
y <- rlang::sym(y)
dots <- list(...)
if (missing(x)) {
x <- rlang::expr(factor(1))
plot_expr <- rlang::expr(
ggplot2::ggplot(!!rlang::enexpr(data), ggplot2::aes(x = !!y, y = !!x)) +
ggplot2::geom_point(!!!dots) +
ggplot2::labs(title = !!main) +
ggplot2::xlab(!!xlab) +
ggplot2::ylab("") +
ggplot2::theme(
axis.text.y = ggplot2::element_blank(),
axis.ticks.y = ggplot2::element_blank()
)
)
} else {
x <- rlang::sym(x)
colour <- rlang::sym(x)
plot_expr <- rlang::expr(
ggplot2::ggplot(!!rlang::enexpr(data), ggplot2::aes(x = !!y, y = !!x, colour = !!colour)) +
ggplot2::geom_point(!!!dots) +
ggplot2::labs(title = !!main) +
ggplot2::xlab(!!xlab) +
ggplot2::ylab(!!ylab)
)
}
list(
plot = plot_expr
)
}
iNZightPlotGG_density <- function(data, x, y, fill = "darkgreen", main = sprintf("Distribution of %s", as.character(y)), xlab = as.character(y), ylab = "Density", ...) {
y <- rlang::sym(y)
dots <- list(...)
if (missing(x)) {
plot_expr <- rlang::expr(
ggplot2::ggplot(!!rlang::enexpr(data), ggplot2::aes(x = !!y)) +
ggplot2::geom_density(fill = !!fill, !!!dots) +
ggplot2::labs(title = !!main) +
ggplot2::xlab(!!xlab) +
ggplot2::ylab(!!ylab)
)
} else {
fill <- rlang::sym(x)
plot_expr <- rlang::expr(
ggplot2::ggplot(!!rlang::enexpr(data), ggplot2::aes(x = !!y, fill = !!fill)) +
ggplot2::geom_density(!!!dots) +
ggplot2::labs(title = !!main) +
ggplot2::xlab(!!xlab) +
ggplot2::ylab(!!ylab)
)
}
list(
plot = plot_expr
)
}
iNZightPlotGG_mosaic <- function(data, x, y, main = sprintf("Mosaic plot of %s and %s", as.character(x), as.character(y)), xlab = as.character(x), ylab = as.character(y), ...) {
x <- rlang::sym(x)
y <- rlang::sym(y)
data_expr <- rlang::expr(
plot_data <- !!rlang::enexpr(data) %>%
dplyr::select(!!x, !!y) %>%
dplyr::mutate(!!x := factor(!!x)) %>%
dplyr::mutate(!!y := factor(!!y))
)
plot_expr <- rlang::expr(
ggplot2::ggplot(plot_data) +
ggmosaic::geom_mosaic(ggplot2::aes(x = ggmosaic::product(!!x), fill = !!y)) +
ggplot2::labs(title = !!main) +
ggplot2::xlab(!!xlab) +
ggplot2::ylab(!!ylab)
)
list(
data = data_expr,
plot = plot_expr
)
}
iNZightPlotGG_lollipop2 <- function(data, x, y, main = sprintf("Count of %s", as.character(x)), xlab = as.character(x), ylab = "Count", ordered = FALSE, ...) {
x <- rlang::sym(x)
dots <- list(...)
point_dots <- dots[c("size", "colour")]
line_dots <- dots[c("lwd", "colour")]
point_dots <- Filter(Negate(is.null), point_dots)
line_dots <- Filter(Negate(is.null), line_dots)
if (missing(y)) {
if (ordered %in% c("desc", "asc")) {
data_expr <- rlang::expr(
plot_data <- !!rlang::enexpr(data) %>%
dplyr::group_by(!!x) %>%
dplyr::summarise(Count = dplyr::n()) %>%
dplyr::ungroup() %>%
dplyr::mutate(!!x := forcats::fct_reorder(!!x, !!rlang::sym("Count"), .desc = !!(ordered == "desc")))
)
} else {
data_expr <- rlang::expr(
plot_data <- !!rlang::enexpr(data) %>%
dplyr::group_by(!!x) %>%
dplyr::summarise(Count = dplyr::n())
)
}
plot_expr <- rlang::expr(
ggplot2::ggplot(plot_data, ggplot2::aes(!!x, !!rlang::sym("Count"))) +
ggplot2::geom_point(!!!point_dots) +
ggplot2::geom_segment(ggplot2::aes(xend = !!x, yend = 0), !!!line_dots) +
ggplot2::labs(title = !!main) +
ggplot2::xlab(!!xlab) +
ggplot2::ylab(!!ylab)
)
} else {
y <- rlang::sym(y)
if (ordered %in% c("desc", "asc")) {
data_expr <- rlang::expr(
plot_data <- !!rlang::enexpr(data) %>%
dplyr::group_by(!!x, !!y) %>%
dplyr::summarise(Count = dplyr::n()) %>%
dplyr::ungroup() %>%
dplyr::mutate(!!x := forcats::fct_reorder(!!x, !!rlang::sym("Count"), .desc = !!(ordered == "desc")))
)
} else {
data_expr <- rlang::expr(
plot_data <- !!rlang::enexpr(data) %>%
dplyr::group_by(!!x, !!y) %>%
dplyr::summarise(Count = dplyr::n())
)
}
plot_expr <- rlang::expr(
ggplot2::ggplot(plot_data, ggplot2::aes(x = !!x, colour = !!y, y = !!rlang::sym("Count"))) +
ggplot2::geom_point(position = ggplot2::position_dodge(width = 0.5), !!!point_dots) +
ggplot2::geom_linerange(ggplot2::aes(ymin = 0, ymax = !!rlang::sym("Count")), position = ggplot2::position_dodge(width = 0.5), !!!line_dots) +
ggplot2::labs(title = !!main) +
ggplot2::xlab(!!xlab) +
ggplot2::ylab(!!ylab)
)
}
list(
data = data_expr,
plot = plot_expr
)
}
iNZightPlotGG_gridplot <- function(data, x, main = sprintf("Gridplot of %s", as.character(x)), xlab = sprintf("%s observation/square", perN), perN = 1, ...) {
x <- rlang::sym(x)
data_expr <- rlang::expr(
plot_data <- !!rlang::enexpr(data) %>%
dplyr::select(!!x) %>%
table() %>%
magrittr::divide_by_int(!!as.integer(perN))
)
plot_expr <- rlang::expr(
waffle::waffle(plot_data, title = !!main, xlab = !!xlab)
)
list(
data = data_expr,
plot = plot_expr
)
}
iNZightPlotGG_divergingstackedbar <- function(data, x, y, main = sprintf("Diverging stacked bar of %s by %s", as.character(y), as.character(x)), xlab = as.character(x), ylab = "Count", cutpoint = NULL,...) {
orig_x <- x
x <- rlang::sym(y)
y <- rlang::sym(orig_x)
if (is.null(cutpoint) || cutpoint == "Default") {
cutpoint <- rlang::expr(floor(nlevels(!!y) / 2))
} else {
cutpoint <- rlang::enexpr(cutpoint)
}
data_expr <- rlang::expr(
plot_data <- !!rlang::enexpr(data) %>%
dplyr::group_by(!!x, !!y) %>%
dplyr::summarise(Count = dplyr::n())
)
plot_expr <- rlang::expr(
ggplot2::ggplot(plot_data, ggplot2::aes(x = !!x, fill = !!y)) +
ggplot2::geom_col(data = subset(plot_data, !!y %in% levels(!!y)[1:!!cutpoint]), ggplot2::aes(y = -!!rlang::sym("Count"))) +
ggplot2::geom_col(data = subset(plot_data, !(!!y %in% levels(!!y)[1:!!cutpoint])), ggplot2::aes(y = !!rlang::sym("Count")), position = ggplot2::position_stack(reverse = TRUE)) +
ggplot2::geom_hline(yintercept = 0) +
ggplot2::coord_flip() +
ggplot2::labs(title = !!main) +
ggplot2::xlab(!!xlab) +
ggplot2::ylab(!!ylab) +
ggplot2::scale_y_continuous(labels = abs) +
ggplot2::scale_fill_discrete(breaks = levels(plot_data[[!!as.character(y)]]))
)
list(
data = data_expr,
plot = plot_expr
)
}
iNZightPlotGG_beeswarm <- function(data, x, y, main = sprintf("Distribution of %s", as.character(y)), xlab = as.character(x), ylab = as.character(y), ...) {
y <- rlang::sym(y)
dots <- list(...)
if (missing(x)) {
x <- rlang::expr(factor(1))
plot_expr <- rlang::expr(
ggplot2::ggplot(!!rlang::enexpr(data), ggplot2::aes(x = !!x, y = !!y)) +
ggbeeswarm::geom_beeswarm(!!!dots) +
ggplot2::ggtitle(!!main) +
ggplot2::xlab("") +
ggplot2::ylab(!!ylab) +
ggplot2::theme(
axis.text.y = ggplot2::element_blank(),
axis.ticks.y = ggplot2::element_blank()
)
)
} else {
x <- rlang::sym(x)
plot_expr <- rlang::expr(
ggplot2::ggplot(!!rlang::enexpr(data), ggplot2::aes(x = !!x, y = !!y, colour = !!x)) +
ggbeeswarm::geom_beeswarm(!!!dots) +
ggplot2::ggtitle(!!main) +
ggplot2::xlab(!!xlab) +
ggplot2::ylab(!!ylab)
)
}
list(
plot = plot_expr
)
}
iNZightPlotGG_ridgeline <- function(data, x, y, main = sprintf("Distribution of %s", as.character(y)), xlab = as.character(y), ylab = as.character(x), ...) {
x <- rlang::sym(x)
y <- rlang::sym(y)
dots <- list(...)
plot_expr <- rlang::expr(
ggplot2::ggplot(!!rlang::enexpr(data), ggplot2::aes(x = !!y, y = !!x, fill = !!x)) +
ggridges::geom_density_ridges(!!!dots) +
ggplot2::ggtitle(!!main) +
ggplot2::xlab(!!xlab) +
ggplot2::ylab(!!ylab)
)
list(
plot = plot_expr
)
}
iNZightPlotGG_quasirandom <- function(data, x, y, main = sprintf("Distribution of %s", as.character(y)), xlab = as.character(x), ylab = as.character(y), ...) {
y <- rlang::sym(y)
dots <- list(...)
if (missing(x)) {
x <- rlang::expr(factor(1))
plot_expr <- rlang::expr(
ggplot2::ggplot(!!rlang::enexpr(data), ggplot2::aes(x = !!x, y = !!y)) +
ggbeeswarm::geom_quasirandom(!!!dots) +
ggplot2::ggtitle(!!main) +
ggplot2::xlab("") +
ggplot2::ylab(!!ylab) +
ggplot2::theme(
axis.text.y = ggplot2::element_blank(),
axis.ticks.y = ggplot2::element_blank()
)
)
} else {
x <- rlang::sym(x)
plot_expr <- rlang::expr(
ggplot2::ggplot(!!rlang::enexpr(data), ggplot2::aes(x = !!x, y = !!y, colour = !!x)) +
ggbeeswarm::geom_quasirandom(!!!dots) +
ggplot2::ggtitle(!!main) +
ggplot2::xlab(!!xlab) +
ggplot2::ylab(!!ylab)
)
}
list(
plot = plot_expr
)
} |
vb_cs_fpca = function(formula, data=NULL, verbose = TRUE, Kt=5, Kp=2, alpha = .1,
Aw = NULL, Bw = NULL, Apsi = NULL, Bpsi = NULL, argvals = NULL){
call <- match.call()
tf <- terms.formula(formula, specials = "re")
trmstrings <- attr(tf, "term.labels")
specials <- attr(tf, "specials")
where.re <-specials$re - 1
if(length(where.re)!=0){
mf_fixed <- model.frame(tf[-where.re], data = data)
formula = tf[-where.re]
responsename <- attr(tf, "variables")[2][[1]]
REs = list(NA, NA)
REs[[1]] = names(eval(parse(text=attr(tf[where.re], "term.labels")))$data)
REs[[2]]=paste0("(1|",REs[[1]],")")
formula2 <- paste(responsename, "~", REs[[1]],sep = "")
newfrml <- paste(responsename, "~", REs[[2]],sep = "")
newtrmstrings <- attr(tf[-where.re], "term.labels")
formula2 <- formula(paste(c(formula2, newtrmstrings), collapse = "+"))
newfrml <- formula(paste(c(newfrml, newtrmstrings), collapse = "+"))
mf <- model.frame(formula2, data = data)
if(length(data)==0){Z = lme4::mkReTrms(lme4::findbars(newfrml),fr=mf)$Zt
}else
{Z = lme4::mkReTrms(lme4::findbars(newfrml),fr=data)$Zt}
} else {
mf_fixed <- model.frame(tf, data = data)
}
mt_fixed <- attr(mf_fixed, "terms")
Y <- model.response(mf_fixed, "numeric")
W.des = X <- model.matrix(mt_fixed, mf_fixed, contrasts)
I = dim(X)[1]
D = dim(Y)[2]
p = dim(X)[2]
if (!is.null(argvals)) {warning("Argument <argvals> supplied but not used.")}
argvals = seq(0,1,,D)
Theta = bs(1:D, df=Kt, intercept=TRUE, degree=3)
diff0 = diag(1, D, D)
diff2 = matrix(rep(c(1,-2,1, rep(0, D-2)), D-2)[1:((D-2)*D)], D-2, D, byrow = TRUE)
P0 = t(Theta) %*% t(diff0) %*% diff0 %*% Theta
P2 = t(Theta) %*% t(diff2) %*% diff2 %*% Theta
P.mat = alpha * P0 + (1-alpha) * P2
Y.vec = as.vector(t(Y))
obspts.vec = !is.na(Y.vec)
Y.vec = Y.vec[obspts.vec]
J = sum(obspts.vec)
t.designmat.X = t(kronecker(X, Theta)[obspts.vec,])
XtX = matrix(0, Kt*p, Kt*p)
sumXtX = matrix(0, Kt*p, Kt*p)
for(i in 1:I){
obs.points = which(!is.na(Y[i, ]))
X.cur = kronecker(matrix(X[i,], nrow = 1, ncol = p), Theta)[obs.points,]
XtX = XtX + crossprod(X.cur)
sumXtX = sumXtX + t(X.cur)%*% X.cur
}
vec.BW = solve(kronecker(t(W.des)%*% W.des, t(Theta) %*% Theta)) %*% t(kronecker(W.des, Theta)) %*% Y.vec
mu.q.BW = matrix(vec.BW, Kt, p)
Yhat = as.matrix(W.des %*% t(mu.q.BW) %*% t(Theta))
Aw = ifelse(is.null(Aw), Kt/2, Aw)
if(is.null(Bw)){
Bw = b.q.lambda.BW = sapply(1:p, function(u) max(1, .5*sum(diag( t(mu.q.BW[,u]) %*% P.mat %*% (mu.q.BW[,u])))))
} else {
Bw = b.q.lambda.BW = rep(Bw, p)
}
Apsi = ifelse(is.null(Apsi), Kt/2, Apsi)
Bpsi = ifelse(is.null(Bpsi), Kt/2, Bpsi)
Asig = 1; Bsig = 1
sigma.q.Bpsi = vector("list", Kp)
for(k in 1:Kp){
sigma.q.Bpsi[[k]] = diag(1, Kt)
}
mu.q.Bpsi = matrix(0, nrow = Kt, ncol = Kp)
sigma.q.C = vector("list", I)
for(k in 1:I){
sigma.q.C[[k]] = diag(1, Kp)
}
mu.q.C = matrix(rnorm(I*Kp, 0, .01), I, Kp)
b.q.lambda.Bpsi = rep(Bpsi, Kp)
b.q.sigma.me = Bsig
pcaef.cur = matrix(0, I, D)
lpxq=c(0,1)
j=2
if(verbose) { cat("Beginning Algorithm \n") }
while(j<11){
mean.cur = as.vector(t(pcaef.cur))[obspts.vec]
sigma.q.beta = solve(as.numeric((Asig + I*D/2)/(b.q.sigma.me)) * XtX + kronecker(diag((Aw+Kt/2)/b.q.lambda.BW, p, p), P.mat ))
mu.q.beta = matrix(sigma.q.beta %*% (as.numeric((Asig + I*D/2)/(b.q.sigma.me)) * t.designmat.X %*% (Y.vec - mean.cur)), nrow = Kt, ncol = p)
beta.cur = t(mu.q.beta) %*% t(Theta)
fixef.cur = as.matrix(X %*% beta.cur)
mean.cur = as.vector(t(fixef.cur))[obspts.vec]
designmat = kronecker(mu.q.C, Theta)[obspts.vec,]
sigma.q.Bpsi = solve(
kronecker(diag((Apsi+Kt/2)/b.q.lambda.Bpsi), P.mat ) +
as.numeric((Asig + J/2)/(b.q.sigma.me)) * f_sum(mu.q.c = mu.q.C, sig.q.c = sigma.q.C, theta = t(Theta), obspts.mat = !is.na(Y))
)
mu.q.Bpsi = matrix(((Asig + J/2)/(b.q.sigma.me)) * sigma.q.Bpsi %*% f_sum2(y = Y, fixef = fixef.cur, mu.q.c = mu.q.C, kt = Kt, theta = t(Theta)), nrow = Kt, ncol = Kp)
psi.cur = t(mu.q.Bpsi) %*% t(Theta)
ppT = (psi.cur) %*% t(psi.cur)
for(subj in 1:I){
obs.points = which(!is.na(Y[subj, ]))
Theta_i = t(Theta)[,obs.points]
sigma.q.C[[subj]] = solve(
diag(1, Kp, Kp ) +
((Asig + J/2)/(b.q.sigma.me)) * (f_trace(Theta_i = Theta_i, Sig_q_Bpsi = sigma.q.Bpsi, Kp = Kp, Kt = Kt) +
t(mu.q.Bpsi) %*% Theta_i %*% t(Theta_i) %*% mu.q.Bpsi)
)
mu.q.C[subj,] = ((Asig + J/2)/(b.q.sigma.me)) * sigma.q.C[[subj]] %*% as.matrix(psi.cur[,obs.points]) %*% (Y[subj,obs.points] - fixef.cur[subj,obs.points] )
}
pcaef.cur = as.matrix(mu.q.C %*% psi.cur)
resid = as.vector(Y - fixef.cur - pcaef.cur)
b.q.sigma.me = as.numeric(Bsig + .5 * (crossprod(resid[!is.na(resid)]) +
sum(diag(sumXtX %*% sigma.q.beta)) +
f_sum4(mu.q.c= mu.q.C, sig.q.c = sigma.q.C, mu.q.bpsi = mu.q.Bpsi, sig.q.bpsi = sigma.q.Bpsi, theta= Theta, obspts.mat = !is.na(Y))) )
for(term in 1:dim(W.des)[2]){
b.q.lambda.BW[term] = Bw[term] + .5 * (t(mu.q.BW[,term]) %*% P.mat %*% mu.q.BW[,term] +
sum(diag(P.mat %*% sigma.q.beta[(Kt*(term-1)+1):(Kt*term),(Kt*(term-1)+1):(Kt*term)])))
}
for(K in 1:Kp){
b.q.lambda.Bpsi[K] = Bpsi + .5 * (t(mu.q.Bpsi[,K]) %*% P.mat %*% mu.q.Bpsi[,K] +
sum(diag(P.mat %*% sigma.q.Bpsi[(Kt*(K-1)+1):(Kt*K),(Kt*(K-1)+1):(Kt*K)])))
}
curlpxq = 10
lpxq = c(lpxq, curlpxq)
j=j+1
if(verbose) { cat(".") }
}
Yhat = X %*% beta.cur
sigeps.pm = 1 / as.numeric((Asig + J/2)/(b.q.sigma.me))
beta.sd = beta.LB = beta.UB = matrix(NA, nrow = p, ncol = D)
for(i in 1:p){
beta.sd[i,] = sqrt(diag((Theta) %*% sigma.q.beta[(Kt*(i-1)+1):(Kt*i),(Kt*(i-1)+1):(Kt*i)] %*% t(Theta)))
beta.LB[i,] = beta.cur[i,]-1.96*beta.sd[i,]
beta.UB[i,] = beta.cur[i,]+1.96*beta.sd[i,]
}
w <- quadWeights(argvals)
Wsqrt <- diag(sqrt(w))
Winvsqrt <- diag(1/(sqrt(w)))
V <- Wsqrt %*% t(psi.cur) %*% cov(mu.q.C) %*% (psi.cur) %*% Wsqrt
efunctions = matrix(Winvsqrt %*% eigen(V, symmetric = TRUE)$vectors[, seq(len = Kp)], nrow = D, ncol = Kp)
evalues = eigen(V, symmetric = TRUE, only.values = TRUE)$values[1:Kp]
fpca.obj = list(Yhat = pcaef.cur,
Y = Y - X %*% beta.cur,
scores = mu.q.C %*% psi.cur %*% efunctions %*% solve(t(efunctions) %*% (efunctions)),
mu = apply(Y - X %*% beta.cur, 2, mean, na.rm = TRUE),
efunctions = efunctions,
evalues = evalues,
npc = Kp)
class(fpca.obj) = "fpca"
data = if(is.null(data)) { mf_fixed } else { data }
ret = list(beta.cur, beta.UB, beta.LB, fixef.cur, mt_fixed, data, sigeps.pm, fpca.obj)
names(ret) = c("beta.hat", "beta.UB", "beta.LB", "Yhat", "terms", "data", "sigeps.pm", "fpca.obj")
class(ret) = "fosr"
ret
} |
context("model fit using one chemical")
concd <- c(-9, -8, -7, -6, -5, -4)
respd_up <- c(0, 2, 30, 40, 50, 60)
respd_down <- c(0, 2, 30, 40, 50, 60)*-1
test_that("use defaults", {
outp <- fit_modls(concd, respd_up)
expect_length(outp, 2)
})
test_that("use one model", {
outp <- fit_modls(concd, respd_up, modls = c("cnst"))
expect_true(length(outp) == 1 && outp[[1]]$modl == "cnst")
})
test_that("use hill + one direction", {
outp <- fit_modls(concd, respd_up, hill_pdir = -1)
expect_true(is.na(outp$hill$tp))
})
test_that("use hill + a different object function", {
outp1 <- fit_modls(concd, respd_up, modls = "hill", hill_f = "ObjHillnorm")
outp2 <- fit_modls(concd, respd_up, modls = "hill")
expect_true(outp1$hill$ga != outp2$hill$ga)
})
test_that("warnings", {
expect_warning(fit_modls(concd, respd_up, modls = "cnst", hill_f = "ObjHillnorm"))
expect_warning(fit_modls(concd, respd_up, hill_fx = "ObjHillnorm"))
expect_warning(fit_modls(concd, respd_up, xx = "ss"))
})
test_that("errors", {
expect_error(fit_modls(concd, respd_up[-1]))
expect_error(fit_modls(concd[1:3], respd_up[1:3]))
expect_error(fit_modls(concd, respd_up, Mask = rep(1, length(concd)) ))
}) |
context("rounding")
test_that("rounding of scores()", {
expect_message(scores(c(`100m` = 10.822, LJ = 7.1), "male"),
"One or more entries of `marks` have been rounded to the second decimal place",
fixed = TRUE)
expect_equal(scores(c(`100m` = 10.822, LJ = 7.1), "male")$marks, c(`100m` = 10.82, LJ = 7.1))
})
test_that("rounding of marks()", {
expect_message(marks(c(`100m` = 851.4, LJ = 700), "male"),
"One or more entries of `scores` have been rounded to the nearest integer",
fixed = TRUE)
}) |
ezez <-
function(z){exp(z-exp(z))} |
"lc_subject_classification" |
library(psych)
library(car)
library(lsr)
library(ggplot2)
library(reshape2)
setwd("C:/Users/Dima/Documents/R/coursera/")
file <- read.table("Stats1.13.HW.11.txt", header=T)
View(file)
fileSF <- subset(file, file$cond == "fixed")
fileSF
t.test(fileSF$verbal.pre,fileSF$verbal.post, paired=T )
t.test(fileSF$spatial.pre, fileSF$spatial.post, paired=T)
t.test(fileSF$spatial.pre, fileSF$spatial.post, paired=T)
wilcox.test(fileSF$spatial.pre, fileSF$spatial.post, paired=F)
fileSM <- subset(file, file$cond == "malleable")
fileSM
round(cohensD(fileSF$spatial.pre, fileSF$spatial.post, method="paired"),2)
fileSF.gainV <- fileSF$verbal.post- fileSF$verbal.pre
fileSF["fileSF.gainV"] <- fileSF.gainV
fileSF.gainS <- fileSF$spatial.post- fileSF$spatial.pre
fileSF["fileSF.gainS"] <- fileSF.gainS
fileSF.gainI <- fileSF$intel.post- fileSF$intel.pre
fileSF["fileSF.gainI"] <- fileSF.gainI
describe(fileSF)
cohensD(fileSF$verbal.pre, fileSF$verbal.post, method="paired")
cohensD(fileSF$spatial.pre, fileSF$spatial.post, method="paired")
cohensD(fileSF$intel.pre, fileSF$intel.post, method="paired")
fileSM.gainV <- fileSM$verbal.post- fileSM$verbal.pre
fileSM["fileSM.gainV"] <- fileSM.gainV
fileSM.gainS <- fileSM$spatial.post- fileSM$spatial.pre
fileSM["fileSM.gainS"] <- fileSM.gainS
fileSM.gainI <- fileSM$intel.post- fileSM$intel.pre
fileSM["fileSM.gainI"] <- fileSM.gainI
describe(fileSM)
cohensD(fileSM$verbal.pre, fileSM$verbal.post, method="paired")
cohensD(fileSM$spatial.pre, fileSM$spatial.post, method="paired")
cohensD(fileSM$intel.pre, fileSM$intel.post, method="paired")
wilcox.test(file$spatial.pre,file$verbal.pre, paired = F)
wilcox.test(file$spatial.pre,file$intel, paired = F)
wilcox.test(file$verbal.pre,file$intel.pre, paired = F)
pre.m = fileSM$verbal.pre + fileSM$spatial.pre + fileSM$intel.pre
post.m = fileSM$verbal.post + fileSM$spatial.post + fileSM$intel.post
cohensD(pre.m, post.m, method="paired")
pre.f = fileSF$verbal.pre + fileSF$spatial.pre + fileSF$intel.pre
post.f = fileSF$verbal.post + fileSF$spatial.post + fileSF$intel.post
cohensD(pre.f, post.f, method="paired")
cohensD(fileSM$verbal.pre, fileSM$verbal.post, method="paired")
cohensD(fileSM$spatial.pre, fileSM$spatial.post, method="paired")
cohensD(fileSM$intel.pre, fileSM$intel.post, method="paired")
cohensD(fileSF$verbal.pre, fileSF$verbal.post, method="paired")
cohensD(fileSF$spatial.pre, fileSF$spatial.post, method="paired")
cohensD(fileSF$intel.pre, fileSF$intel.post, method="paired")
TukeyHSD(aov.model) |
createBasin <-
function(name, simulation) UseMethod("createBasin") |
library(deBInfer)
context("Testing functions used in the likelihood evaluation for prior and posterior")
test_that("logd_prior calculates correct log densities", {
expect_equal(logd_prior(1, 'norm', hypers=list(mean=2,sd=1)), dnorm(1, mean=2,sd=1,log=TRUE))
expect_equal(logd_prior(1:10, 'norm', hypers=list(mean=2,sd=1)), dnorm(1:10, mean=2,sd=1,log=TRUE))
expect_equal(logd_prior(1:10, 'gamma', hypers=list(shape=2,scale=1)), dgamma(1:10, shape=2,scale=1,log=TRUE))
})
test_that("logd_prior works with truncdist", {
expect_equal(logd_prior(1:10, 'trunc', hypers=list(spec='norm', mean=2,sd=1, a=0.5)), dtrunc(1:10, spec='norm', mean=2, sd=1, a=0.5, log=TRUE))
expect_equal(logd_prior(0:10, 'trunc', hypers=list(spec='norm', mean=2,sd=1, a=0.5)), dtrunc(0:10, spec='norm', mean=2, sd=1, a=0.5, log=TRUE))
}) |
sipd_packages <- function(survey = NULL) {
options(repos=structure(c(CRAN="https://cran.r-project.org/")))
if (is.null(survey) | !is.character(survey)) {
survey <- "SIPD"
}
if (toupper(survey) == "COVID") {
packs <- "COVIDIBGE"
}
else if (toupper(survey) == "PNADC") {
packs <- "PNADcIBGE"
}
else if (toupper(survey) == "PNS") {
packs <- "PNSIBGE"
}
else if (toupper(survey) == "POF") {
packs <- "POFIBGE"
}
else {
packs <- utils::packageDescription("SIPDIBGE")$Depends
packs <- strsplit(packs, ",")[[1]]
packs <- gsub("^\\s+|\\s+$", "", packs)
packs <- vapply(strsplit(packs, "\\s+"), "[[", 1, FUN.VALUE=character(1))
packs <- packs[endsWith(packs, "IBGE")]
}
return(packs)
} |
`spaMM.colors` <- function (n = 64, redshift = 1, adjustcolor_args=NULL) {
orig <- c("
"
"
"
"
"
"
"
"
"
"
"
"
orig[1:20] <- topo.colors(64)[1:20]
if ( ! is.null(adjustcolor_args)) orig <- do.call("adjustcolor",
c(list(col=orig),adjustcolor_args))
if (n == 64 && redshift == 1)
return(orig)
rgb.tim <- t(col2rgb(orig))
temp <- matrix(NA, ncol = 3, nrow = n)
x <- (seq(0, 1, , 64)^(redshift))
xg <- seq(0, 1, , n)
for (k in 1:3) {
colorpts <- data.frame(x=x,y=rgb.tim[,k])
blob <- corrHLfit(y~ Matern(1|x),data=eval(colorpts),ranFix=list(phi=1e-07,nu=4,rho=10))
hold <- predict(blob,newdata=data.frame(x=xg),control=list(fix_predVar=FALSE))[,1]
hold[hold < 0] <- 0
hold[hold > 255] <- 255
temp[, k] <- round(hold)
}
rgb(temp[, 1], temp[, 2], temp[, 3], maxColorValue = 255)
}
niceLabels <- function (x, log = FALSE, lpos, maxticks = Inf, axis, base = 10L,
...)
{
if (log) {
if (missing(lpos)) {
lposwasmissing <- TRUE
lpos <- c(1, 2, 5)
}
else lposwasmissing <- FALSE
x <- x[x > 0]
fc <- floor(log(min(x), base)):ceiling(log(max(x), base))
tick <- as.vector(outer(lpos, base^fc, "*"))
mintick <- min(tick)
maxtick <- max(tick)
ft <- max(c(mintick, tick[tick < min(x)]))
lt <- min(c(maxtick, tick[tick > max(x)]))
tick <- tick[tick >= ft/1.00000001 & tick <= lt * 1.00000001]
if (lposwasmissing && length(tick) < 4) {
if (axis == 1) {
llpos <- c(1, 1.2, 1.5, 2, 3, 4, 5, 6, 8)
}
else llpos <- c(1, 1.2, 1.5, 2, 3, 4, 5, 6, 7, 8,
9)
tick <- niceLabels(x, log = TRUE, axis = axis, lpos = llpos)
}
if (lposwasmissing && length(tick) > maxticks)
tick <- niceLabels(x, log = TRUE, axis = axis, lpos = c(1,
3))
if (lposwasmissing && length(tick) > maxticks)
tick <- niceLabels(x, log = TRUE, axis = axis, lpos = c(1))
if (lposwasmissing && length(tick) > maxticks) {
base <- base * base
tick <- niceLabels(x, log = TRUE, axis = axis, base = base,
lpos = c(1), maxticks = maxticks)
}
}
else {
tick <- pretty(x, high.u.bias = 0, ...)
if (axis %in% c(1, 3) && length(tick) > 7) {
check <- nchar(tick)
blob <- diff(check)
if (any(blob == 0 & check[-1] == 5)) {
tick <- pretty(x, high.u.bias = 1.5, ...)
}
}
}
return(tick)
}
makeTicks <- function(x,
axis, maxticks,scalefn=NULL,logticks,
validRange=NULL
) {
labels <- niceLabels(x, log=logticks, axis=axis)
if( ! is.null(validRange)) {
labels <- pmax(validRange[1],labels)
labels <- pmin(validRange[2],labels)
}
at <- labels
if( ! is.null(scalefn)) {
at <- sapply(at,scalefn)
invalidat <- (is.infinite(at) | is.nan(at))
at <- at[ ! invalidat]
labels <- labels[ ! invalidat]
}
return(list(labels=labels, at=at, phantomat=NULL))
}
`spaMM.filled.contour` <- function (x = seq(0, 1, length.out = nrow(z)), y = seq(0, 1,
length.out = ncol(z)), z, xrange = range(x, finite = TRUE),
yrange = range(y, finite = TRUE), zrange = range(z, finite = TRUE),
margin=1/20,
levels = pretty(zrange, nlevels), nlevels = 20, color.palette = spaMM.colors,
col = color.palette(length(levels) - 1), plot.title, plot.axes,
key.title=NULL, key.axes=NULL, map.asp = NULL, xaxs = "i", yaxs = "i", las = 1,
axes = TRUE, frame.plot = axes, ...)
{
if (missing(z)) {
if (!missing(x)) {
if (is.list(x)) {
z <- x$z
y <- x$y
x <- x$x
}
else {
z <- x
x <- seq.int(0, 1, length.out = nrow(z))
}
}
else stop("no 'z' matrix specified")
}
else if (is.list(x)) {
y <- x$y
x <- x$x
}
if (any(diff(x) <= 0) || any(diff(y) <= 0))
stop("increasing 'x' and 'y' values expected")
mar.orig <- (par.orig <- par(c("mar", "las", "mfrow")))$mar
on.exit(par(par.orig))
wmaphmap <- .calc_plot_dims(x,y,xrange=xrange,yrange=yrange,margin=margin,map.asp=map.asp)
layout(matrix(c(2, 1), ncol = 2L), widths = c(lcm(wmaphmap[1]),lcm(wmaphmap[3])),heights=c(lcm(wmaphmap[2])),respect=TRUE)
par(las = las)
mar <- mar.orig
mar[4L] <- mar[2L]
mar[2L] <- 1
par(mar = mar)
plot.new()
.plotScale(z,levels,key.axes,key.title,axes,col)
mar <- mar.orig
mar[4L] <- 1
par(mar = mar)
plot.new()
plot.window(xrange, yrange, "", xaxs = xaxs, yaxs = yaxs)
.filled.contour(x, y, z, levels, col)
if (missing(plot.axes)) {
if (axes) {
title(main = "", xlab = "", ylab = "")
Axis(x, side = 1)
Axis(y, side = 2)
}
}
else plot.axes
if (frame.plot)
box()
if (missing(plot.title))
title(...)
else plot.title
invisible()
}
.calc_plot_dims <- function(x,y,xrange=NULL,yrange=NULL,margin=1/20,map.asp=NULL) {
if (is.null(xrange)) {
xrange <- range(x)
}
xspan <- (xrange[2]-xrange[1])
margex <- xspan * margin
xrange <- xrange+margex*c(-1,1)
if (is.null(yrange)) {
yrange <- range(y)
}
yspan <- (yrange[2]-yrange[1])
margey <- yspan * margin
yrange <- yrange+margey*c(-1,1)
wscale <- (3 + par("mar")[2]) * par("csi") * 2.54
wmap <- par("din")[1]*2.54 - wscale
Wmargin <- (par("din")[1]-par("pin")[1])*2.54
wplotmap <- wmap - Wmargin
Hmargin <- (par("din")[2]-par("pin")[2])*2.54
max_map.asp <- ( par("din")[2]*2.54 -Hmargin)/wplotmap
if (is.null(map.asp)) map.asp <- yspan/xspan
map.asp <- min(map.asp,max_map.asp)
if (map.asp>4) map.asp <- 1
hmap <- wplotmap*map.asp + Hmargin
return(c(wmap,hmap,wscale))
}
.plotScale <- function(z,levels,key.axes=NULL,key.title=NULL,axes,col) {
zrange <- range(z)
plot.window(xlim = c(0, 1), ylim = range(z),xaxs = "i",
yaxs = "i")
rect(0, levels[-length(levels)], 1, levels[-1L], col = col)
if (is.null(key.axes)) {
if (axes)
axis(4)
}
else key.axes
box()
if (!is.null(key.title)) key.title
}
spaMMplot2D <- function (x,y,z,
xrange=range(x, finite = TRUE),yrange=range(y, finite = TRUE),
margin=1/20,add.map= FALSE,
nlevels = 20, color.palette = spaMM.colors,
map.asp=NULL,
col = color.palette(length(levels) - 1),
plot.title=NULL, plot.axes=NULL, decorations=NULL,
key.title=NULL, key.axes=NULL, xaxs = "i", yaxs = "i", las = 1,
axes = TRUE, frame.plot = axes,...) {
dotlist <- list(...)
par.orig <- par(c(dotlist[intersect(names(dotlist),names(par()))],c("mar", "las", "mfrow")))
on.exit(par(par.orig))
mar.orig <- par.orig$mar
levels <- pretty(range(z), nlevels)
nlevels <- length(levels)-1
zscaled <- 1 + floor(nlevels*(0.000001+0.999998*(z-min(z))/(max(z)-min(z))))
ZColor <- color.palette(n=nlevels)
wmaphmap <- .calc_plot_dims(x,y,xrange=xrange,yrange=yrange,margin=margin,map.asp=map.asp)
layout(matrix(c(2, 1), ncol = 2L),
widths = c(lcm(wmaphmap[1]),lcm(wmaphmap[3])),
heights=c(lcm(wmaphmap[2])),respect=TRUE)
par(las = las)
mar <- mar.orig
mar[4L] <- mar[2L]
mar[2L] <- 1
par(mar = mar)
plot.new()
.plotScale(z,levels,key.axes,key.title,axes,col)
mar <- mar.orig
mar[4L] <- 1
par(mar = mar)
topontop <- order(zscaled,decreasing=FALSE)
plot(x=x[topontop],y=y[topontop],
xlab="",ylab="",
axes=FALSE,
xlim=xrange,ylim=yrange,xaxs = xaxs, yaxs = yaxs,
col=ZColor[zscaled[topontop]],lwd=2)
if (is.logical(add.map)) {
if(add.map) {
if (requireNamespace("maps",quietly=TRUE)) {
maps::map(,xlim=xrange,ylim=yrange,add=TRUE)
} else message("Package 'maps' not available, 'add.map' is ignored.")
}
} else eval(add.map)
if (is.null(plot.title)) {
do.call(title,dotlist[intersect(names(dotlist),names(formals(title)))])
} else plot.title
if (is.null(plot.axes)) {
if (axes) {
Axis(x, side = 1)
Axis(y, side = 2)
}
} else plot.axes
if ( ! is.null(decorations)) eval(decorations,envir=parent.frame())
if (frame.plot) box()
invisible()
}
mapMM <- function (fitobject,Ztransf=NULL,coordinates,
add.points,decorations=NULL,plot.title=NULL,plot.axes=NULL,envir=-3,...) {
if ( ! missing(add.points)) warning("'add.points' is obsolete, use 'decorations'")
if (missing(coordinates)) {
info_olduniqueGeo <- attr(fitobject,"info.uniqueGeo")
if ( ! is.array(info_olduniqueGeo)) {
coordinates <- unique(unlist(lapply(info_olduniqueGeo,colnames)))
} else coordinates <- colnames(info_olduniqueGeo)
}
if (length(coordinates)!=2L) {
stop(paste0("'mapMM' plots only 2D maps, while coordinates are of length ",length(coordinates)))
}
pred <- predict(fitobject,binding="fitted")
x <- pred[,coordinates[1]]
y <- pred[,coordinates[2]]
Zvalues <- pred[,attr(pred,"fittedName")]
if ( ! is.null(Ztransf)) {Zvalues <- do.call(Ztransf,list(Z=Zvalues))}
spaMMplot2D(x=x,y=y,z=Zvalues,
decorations=eval(decorations,envir),
plot.title=eval(plot.title,envir),
plot.axes=eval(plot.axes,envir),
...)
}
`filled.mapMM` <- function(fitobject, Ztransf=NULL, coordinates, xrange = NULL, yrange = NULL,
margin = 1/20, map.formula, phi = 1e-05, gridSteps = 41,
decorations = quote(points(pred[, coordinates], cex = 1, lwd = 2)),
add.map = FALSE, axes = TRUE, plot.title=NULL, plot.axes=NULL, map.asp = NULL,
variance=NULL,
var.contour.args=list(),
smoothObject=NULL, return.="smoothObject",
...)
{
if (missing(coordinates)) {
info_olduniqueGeo <- attr(fitobject,"info.uniqueGeo")
if ( ! is.array(info_olduniqueGeo)) {
coordinates <- unique(unlist(lapply(info_olduniqueGeo,colnames)))
} else coordinates <- colnames(info_olduniqueGeo)
}
if (length(coordinates) != 2L) {
stop(paste("'map' plots only 2D maps, while coordinates are of length ",
length(coordinates), sep = ""))
}
if (length(variance)==0L) {
variance <- list()
} else {
if (length(variance)>1L) stop("'variance' argument should include a single name")
if (is.character(variance)) variance <- structure(list(TRUE),names=variance)
}
pred <- predict(fitobject, binding="fitted")
if (missing(map.formula)) {
form <- formula.HLfit(fitobject,which="hyper")
map.formula <- as.formula(paste(attr(pred,"fittedName"), " ~ 1 + ", paste(.findSpatial(form),collapse=" + ")))
} else {
map.formula <- as.formula(paste(attr(pred,"fittedName"), " ~", paste(map.formula[length(map.formula)])))
}
if (is.null(smoothObject)) smoothObject <- fitme(map.formula, data = pred, fixed = list(phi = phi),method="REML")
smoo <- predict(smoothObject,binding="dummy")
x <- smoo[, coordinates[1]]
y <- smoo[, coordinates[2]]
if (is.null(xrange)) {
xrange <- range(x)
margex <- (xrange[2] - xrange[1]) * margin
xrange <- xrange + margex * c(-1, 1)
}
if (is.null(yrange)) {
yrange <- range(y)
margey <- (yrange[2] - yrange[1]) * margin
yrange <- yrange + margey * c(-1, 1)
}
if (is.null(plot.axes)) {
if (axes) {
plot.axes <- quote({title(main = "", xlab = "", ylab = "")
Axis(x, side = 1)
Axis(y, side = 2)})
} else plot.axes <- quote(NULL)
}
xGrid <- seq(xrange[1], xrange[2], length.out = gridSteps)
yGrid <- seq(yrange[1], yrange[2], length.out = gridSteps)
newdata <- expand.grid(xGrid, yGrid)
colnames(newdata) <- coordinates
gridpred <- predict(smoothObject, newdata = newdata,variances=variance, control=list(fix_predVar=FALSE))
if (length(variance)==1L) {
pvar <- attr(gridpred,names(variance))
varz <- matrix(pvar,ncol=length(yGrid),nrow=length(xGrid))
contourArgs <- c(list(x=xGrid,y=yGrid,z=varz,add=TRUE),var.contour.args)
add.varcontour <- quote(do.call(contour,contourArgs))
} else add.varcontour <- quote(NULL)
if (is.logical(add.map)) {
if (add.map) {
if (requireNamespace("maps",quietly=TRUE)) {
add.map <- quote(maps::map(, xlim = xrange, ylim = yrange,
add = TRUE))
}
else {
message("Package 'maps' not available, 'add.map' is ignored.")
add.map <- quote(NULL)
}
}
else add.map <- quote(NULL)
}
else add.map <- quote(add.map)
Zvalues <- matrix(gridpred, ncol = gridSteps)
if ( ! is.null(Ztransf)) {
Zvalues <- do.call(Ztransf,list(Z=Zvalues))
if (any(is.nan(Zvalues))) stop("NaN in Ztransf'ormed values: see Details of 'filled.mapMM' documentation.")
if (any(is.infinite(Zvalues))) stop("+/-Inf in Ztransf'ormed values.")
}
spaMM.filled.contour(x = xGrid, y = yGrid, z = Zvalues, margin=margin, plot.axes = {
eval(plot.axes)
eval(add.varcontour)
eval(decorations)
eval(add.map)
}, plot.title=eval(plot.title),map.asp = map.asp, ...)
if (return.=="smoothObject") {
invisible(smoothObject)
} else return(list(x = xGrid, y = yGrid, z = Zvalues))
}
map_ranef <- function(fitobject, re.form, Ztransf=NULL, xrange = NULL, yrange = NULL,
margin = 1/20, gridSteps = 41,
decorations = quote(points(fitobject$data[, coordinates], cex = 1, lwd = 2)),
add.map = FALSE, axes = TRUE, plot.title=NULL, plot.axes=NULL, map.asp = NULL,
...)
{
corr_types <- .get_from_ranef_info(fitobject)$corr_types
spatialone <- which(corr_types %in% c("Matern","Cauchy", "IMRF"))
if (length(spatialone)>1L && missing(re.form)) {
stop("Model includes >1 spatial random effect.\n Please specify one through 're.form'.")
} else re.form <- as.formula(paste(". ~", attr(fitobject$ZAlist,"exp_ranef_strings")[[spatialone]]))
info_olduniqueGeo <- attr(fitobject,"info.uniqueGeo")
coordinates <- colnames(info_olduniqueGeo[[as.character(spatialone)]])
if (length(coordinates) != 2L) {
stop(paste("'map' plots only 2D maps, while coordinates are of length ",
length(coordinates), sep = ""))
}
olddata <- fitobject$data
x <- olddata[, coordinates[1]]
y <- olddata[, coordinates[2]]
if (is.null(xrange)) {
xrange <- range(x)
margex <- (xrange[2] - xrange[1]) * margin
xrange <- xrange + margex * c(-1, 1)
}
if (is.null(yrange)) {
yrange <- range(y)
margey <- (yrange[2] - yrange[1]) * margin
yrange <- yrange + margey * c(-1, 1)
}
if (is.null(plot.axes)) {
if (axes) {
plot.axes <- quote({title(main = "", xlab = "", ylab = "")
Axis(x, side = 1)
Axis(y, side = 2)})
} else plot.axes <- quote(NULL)
}
xGrid <- seq(xrange[1], xrange[2], length.out = gridSteps)
yGrid <- seq(yrange[1], yrange[2], length.out = gridSteps)
newdata <- expand.grid(xGrid, yGrid)
colnames(newdata) <- coordinates
rownames(newdata) <- NULL
template <- olddata[1,setdiff(colnames(olddata),coordinates),drop=FALSE]
newdata <- cbind(template[rep(1,nrow(newdata)),],newdata)
pred_noranef <- predict(fitobject, newdata = newdata, re.form=NA, type="link")
pred_oneranef <- predict(fitobject, newdata = newdata, re.form=re.form, type="link")
if ( ! is.null(allvarsS <- attr(attr(pred_oneranef,"frame"),"allvarsS"))) {
cum_nobs <- cumsum(c(0L, sapply(attr(pred_oneranef,"frame"), nrow)))
submod_it <- which(sapply(allvarsS, length)>0)[1]
pred_noranef <- pred_noranef[.subrange(cum_nobs, submod_it)]
pred_oneranef <- pred_oneranef[.subrange(cum_nobs, submod_it)]
}
gridpred <- pred_oneranef-pred_noranef
if (is.logical(add.map)) {
if (add.map) {
if (requireNamespace("maps",quietly=TRUE)) {
add.map <- quote(maps::map(, xlim = xrange, ylim = yrange,
add = TRUE))
}
else {
message("Package 'maps' not available, 'add.map' is ignored.")
add.map <- quote(NULL)
}
}
else add.map <- quote(NULL)
}
else add.map <- quote(add.map)
Zvalues <- matrix(gridpred, ncol = gridSteps)
if ( ! is.null(Ztransf)) {
Zvalues <- do.call(Ztransf,list(Z=Zvalues))
if (any(is.nan(Zvalues))) stop("NaN in Ztransf'ormed values: see Details of 'filled.mapMM' documentation.")
if (any(is.infinite(Zvalues))) stop("+/-Inf in Ztransf'ormed values.")
}
spaMM.filled.contour(x = xGrid, y = yGrid, z = Zvalues, margin=margin, plot.axes = {
eval(plot.axes)
eval(decorations)
eval(add.map)
}, plot.title=eval(plot.title),map.asp = map.asp, ...)
invisible(cbind(newdata[,coordinates], z=gridpred))
} |
testthat::context("Unit tests for impact_model.R")
test_that("ObservationsAreIllConditioned", {
ObservationsAreIllConditioned <- CausalImpact:::ObservationsAreIllConditioned
expect_error(ObservationsAreIllConditioned())
expect_false(ObservationsAreIllConditioned(c(1, 2, 3)))
expect_false(ObservationsAreIllConditioned(c(1, 2, 3, NA, NA)))
expect_false(ObservationsAreIllConditioned(c(NA, NA, 1, NA, 2, 3, NA, NA)))
expect_warning(ObservationsAreIllConditioned(c(NA, NA, NA, NA, NA)), "all NA")
expect_true(suppressWarnings(
ObservationsAreIllConditioned(c(NA, NA, NA, NA, NA))))
expect_error(ObservationsAreIllConditioned(NULL))
expect_error(ObservationsAreIllConditioned(c()))
expect_warning(ObservationsAreIllConditioned(c(1)),
"fewer than 3 non-NA values")
expect_true(suppressWarnings(ObservationsAreIllConditioned(c(1))))
expect_warning(ObservationsAreIllConditioned(c(1, 2)),
"fewer than 3 non-NA values")
expect_true(suppressWarnings(ObservationsAreIllConditioned(c(1, 2))))
expect_warning(ObservationsAreIllConditioned(c(1, 2, NA)),
"fewer than 3 non-NA values")
expect_true(suppressWarnings(ObservationsAreIllConditioned(c(1, 2, NA))))
expect_warning(ObservationsAreIllConditioned(c(NA, 1, 2, NA)),
"fewer than 3 non-NA values")
expect_true(suppressWarnings(ObservationsAreIllConditioned(c(NA, 1, 2, NA))))
expect_warning(ObservationsAreIllConditioned(c(NA, 1, 2, NA, NA)),
"fewer than 3 non-NA values")
expect_true(suppressWarnings(
ObservationsAreIllConditioned(c(NA, 1, 2, NA, NA))))
})
test_that("FormatInputForConstructModel", {
FormatInputForConstructModel <- CausalImpact:::FormatInputForConstructModel
expect_error(FormatInputForConstructModel())
data <- zoo(cbind(rnorm(1000), rnorm(1000), rnorm(1000)))
names(data) <- c("a", "b", "c")
model.args <- list(niter = 1000,
standardize.data = TRUE,
prior.level.sd = 0.01,
nseasons = 1,
season.duration = 1,
dynamic.regression = FALSE,
max.flips = 100)
expect_equal(FormatInputForConstructModel(data, model.args),
list(data = data, model.args = model.args))
anon.data <- zoo(cbind(rnorm(1000), rnorm(1000), rnorm(1000)))
expected.data <- anon.data
names(expected.data) <- c("y", "x1", "x2")
expect_equal(FormatInputForConstructModel(anon.data, model.args)$data,
expected.data)
expect_error(FormatInputForConstructModel(data, list(foo.extra.arg = 123)))
bad.data <- list(NULL, zoo(cbind(c(1, 2, 3, 4, 5), c(6, 7, 8, NA, NA))), NA)
lapply(bad.data, function(data) {
expect_error(FormatInputForConstructModel(data, model.args)) })
bad.model.args <- list(list(niterFoo = 10), NA, as.numeric(NA), 1, c(1, 2, 3))
lapply(bad.model.args, function(model.args) {
expect_error(FormatInputForConstructModel(data, model.args)) })
bad.niter <- list(NA, as.numeric(NA), -1, 9, 9.1, "foo", c(100, 200))
lapply(bad.niter, function(niter) {
expect_error(FormatInputForConstructModel(data, list(niter = niter))) })
bad.prior.level.sd <- list(NA, as.numeric(NA), -1, 0, "foo", c(100, 200))
lapply(bad.prior.level.sd, function(prior.level.sd) {
expect_error(FormatInputForConstructModel(data, list(prior.level.sd =
prior.level.sd))) })
bad.nseasons <- list(0, NA, as.numeric(NA), -1, 9.1, "foo", c(100, 200))
lapply(bad.nseasons, function(nseasons) {
expect_error(FormatInputForConstructModel(data,
list(nseasons = nseasons)))
})
bad.season.duration <- list(0, NA, as.numeric(NA), -1, 9.1, "foo",
c(100, 200))
lapply(bad.season.duration, function(season.duration) {
expect_error(FormatInputForConstructModel(data, list(season.duration =
season.duration))) })
bad.dynamic.regression <- list(NA, as.numeric(NA), 123, "foo", c(TRUE, FALSE))
lapply(bad.dynamic.regression, function(dynamic.regression) {
expect_error(FormatInputForConstructModel(data,
list(dynamic.regression =
dynamic.regression))) })
bad.max.flips <- list(-2, 9.1, "foo", c(100, 200))
lapply(bad.max.flips, function(max.flips) {
expect_error(FormatInputForConstructModel(data,
list(max.flips = max.flips))) })
})
test_that("ConstructModel", {
ConstructModel <- CausalImpact:::ConstructModel
expect_error(ConstructModel())
set.seed(1)
data0 <- zoo(cbind(rnorm(100), rnorm(100), rnorm(100)))
data1 <- data0
names(data1) <- c("a", "b", "c")
data2 <- data0
names(data2) <- c("y", "x1", "x2")
some.data <- list(data0, data1, data2, as.data.frame(data0),
as.data.frame(data1), as.data.frame(data2),
coredata(data0))
model.args <- list(niter = 100)
lapply(some.data, function(data) {
suppressWarnings(bsts.model <- ConstructModel(data * 0 + 1, model.args))
expect_true(is.null(bsts.model))
})
suppressWarnings(expected.model <- ConstructModel(data0[, 1], model.args))
lapply(some.data, function(data) {
suppressWarnings(bsts.model <- ConstructModel(data[, 1], model.args))
expect_false(is.null(bsts.model))
expect_equal(class(bsts.model), "bsts")
expect_equal(as.numeric(bsts.model$original.series), as.numeric(data[, 1]))
expect_equal(bsts.model$state.contributions,
expected.model$state.contributions)
})
suppressWarnings(expected.model <- ConstructModel(data0, model.args))
lapply(some.data, function(data) {
suppressWarnings(bsts.model <- ConstructModel(data, model.args))
expect_false(is.null(bsts.model))
expect_equal(class(bsts.model), "bsts")
expect_equal(as.numeric(bsts.model$original.series), as.numeric(data[, 1]))
expect_equivalent(bsts.model$predictors[, 1], rep(1, nrow(data)))
expect_equivalent(bsts.model$predictors[, 2], as.numeric(data[, 2]))
expect_equivalent(bsts.model$predictors[, 3], as.numeric(data[, 3]))
expect_equal(bsts.model$state.contributions,
expected.model$state.contributions)
})
suppressWarnings(expected.model <-
ConstructModel(data0, list(niter = 100, dynamic.regression = TRUE)))
lapply(some.data, function(data) {
suppressWarnings(bsts.model <-
ConstructModel(data, list(niter = 100, dynamic.regression = TRUE)))
expect_false(is.null(bsts.model))
expect_equal(class(bsts.model), "bsts")
expect_equal(as.numeric(bsts.model$original.series), as.numeric(data[, 1]))
expect_equal(
as.numeric(bsts.model$state.specification[[2]]$predictors[, 1]),
as.numeric(data[, 2]))
expect_equal(
as.numeric(bsts.model$state.specification[[2]]$predictors[, 2]),
as.numeric(data[, 3]))
expect_equal(bsts.model$state.contributions,
expected.model$state.contributions)
})
}) |
print <- function(ehelp.obj){
UseMethod("print",ehelp.obj)
}
print.ehelp <- function(ehelp.obj,coloring=T) {
lines <- paste0("---------------------------------------------------------------",'\n', sep='')
clrPalette <- ehelp.palette()
cat(lines)
for (obj.line in seq_along(ehelp.obj$code)) {
if (coloring) {
clr <- list("","")
if (ehelp.obj$code[obj.line] %in% clrPalette$codes)
clr <- clrPalette$color[ clrPalette$code == ehelp.obj$code[obj.line] ][[1]]
cat( paste0(clr[[1]], ehelp.obj$txt[obj.line], clr[[2]]) )
} else {
cat(ehelp.obj$txt[obj.line])
}
}
cat('\n',lines)
}
processOutput <- function(ehelp.obj, fnName,fnCorpus, output) {
valid.outputFmts <- c("txt", "ascii", "html", "latex", "markdown")
ext.outputs <- c("txt","asc","html","tex","md")
if (output %in% tolower(valid.outputFmts)) {
fileName <- paste0(fnName,"-eHelp",'.',ext.outputs[output == valid.outputFmts])
write.fmt(ehelp.obj,output,filename=fileName)
} else if (output %in% toupper(valid.outputFmts)) {
fileName <- paste0(fnName,"-eHelp",'.',toupper(ext.outputs[tolower(output) == valid.outputFmts]))
write.fmt(ehelp.obj,output,filename=fileName, leaveOpen=T)
fmt <- format.defns(tolower(output),filename=fileName)
write.Fncorpus(fnName, fnCorpus, filename=fileName,
begining=fmt$lst['begining'], ending=fmt$lst['ending']
, EoL=fmt$lst$eol
)
} else if (tolower(output) != "none") {
message("The selected output format <<",output,">> is not supported. \n",
"Valid options are: ",paste0(valid.outputFmts,sep=" "),"--and-- ",
paste0(toupper(valid.outputFmts),sep=" "),'\n')
}
}
write.ehelp <- function(X.obj,filename){
lines <- paste0("-----------------------",'\n')
utils::write.table(X.obj,file=filename,sep="", col.names=FALSE,row.names=FALSE, quote=FALSE, eol="")
write.Info(filename)
message(paste(filename,"written to",getwd()))
}
write.Fncorpus <- function(fnName, fnListing, filename,
lines=paste0("/*
begining="", ending="", EoL="\r\n"){
header <- c(lines,paste0("/* *** FUNCTION LISTING: ",fnName," ****/"),lines)
fnListing[1] <- paste0(fnName, " <- ",fnListing[1])
listing <- c(begining,header,fnListing,lines,ending)
utils::write.table(listing,file=filename,sep=EoL, col.names=FALSE,row.names=FALSE, quote=FALSE, append=TRUE, eol=EoL)
}
write.Info <- function(filename, lines=paste0("-----------------------------------------------"), pre="", post="", EoL="") {
date <- format(Sys.time(), "%a %b %d %X %Y")
username <- Sys.info()["user"]
sysinfo <- Sys.info()["sysname"]
sessionInfo <- sessionInfo()["R.version"]
output <- c(pre)
output <- c(output,'\n',lines,'\n')
output <- c(output,paste0(" Generated using the eHelp package -- ",date))
output <- c(output,paste0('\n'," User: ",username," -- System: ",sysinfo))
output <- c(output,paste0(" ",sessionInfo$R.version$version.string,'\n'))
output <- c(output,post)
utils::write.table(output, file=filename,sep="", col.names=FALSE,row.names=FALSE, quote=FALSE, append=TRUE, eol=EoL)
}
write.fmt <- function(X.obj, format, filename, leaveOpen=FALSE){
format <- tolower(format)
ehelp.txt <- X.obj$txt
formatting.codes <- ehelp.palette()
if (leaveOpen) {
fmt <- format.defns(format,filename,ending="")
} else {
fmt <- format.defns(format,filename)
}
if (format == "ascii") format <- "color"
for (line in seq_along(ehelp.txt)) {
lang.Code <- list("","")
if (X.obj$code[line] %in% formatting.codes$codes)
lang.Code <- formatting.codes[[format]][ formatting.codes$codes == X.obj$code[line] ][[1]]
ehelp.txt[line] <- paste0(lang.Code[[1]], ehelp.txt[line], lang.Code[[2]], fmt$eol)
for (sp.char in fmt$sp.chars)
ehelp.txt[line] <- gsub(sp.char[[1]],sp.char[[2]],ehelp.txt[line])
}
ehelp.txt <- c(fmt$struct, fmt$lines,ehelp.txt)
utils::write.table(ehelp.txt,file=filename,sep="", col.names=FALSE,row.names=FALSE, quote=FALSE, eol=fmt$eol)
write.Info(filename, lines=fmt$lines, pre=fmt$pre, post=fmt$post, EoL=fmt$eol)
message(paste(filename,"written to",getwd(),'\n'))
} |
likTraitPhylo<-function (y, phy, covPIC = TRUE, brCov=NULL)
{
if (is.matrix(y) == FALSE) {
stop("Trait data must be a matrix with taxon names as row names")
}
n <- length(phy$tip.label)
k <- ncol(y)
phy <- reorder(phy, order = "pruningwise")
y <- as.matrix(y[phy$tip.label, ])
contrasts <- apply(y, 2, pic.motmot, phy = phy)
rawVariances <- c(contrasts[[1]]$contr[, 2], contrasts[[1]]$V)
rawContrasts <- sapply(contrasts, function(k) c(k$contr[, 1], 0))
t.mat <- apply(rawContrasts, 2, function(x) x / sqrt(rawVariances))
if(is.null(brCov)) {
brCov <- crossprod(t.mat, t.mat) / (n-1)
} else {
brCov <- as.matrix(brCov)
}
if(covPIC == FALSE) {
brCov[upper.tri(brCov)] <- 0
brCov[lower.tri(brCov)] <- 0
}
iW <- solve(brCov)
addCon.mat <- apply(rawContrasts, 1, function(con) crossprod(con, iW %*% con)) / rawVariances
addCons <- sum(addCon.mat)
logLikelihood <- -0.5 * (n * k * log(2 * pi) + n * log(det(brCov)) + k * sum(log(rawVariances)) + addCons)
return(list(brownianVariance = brCov, logLikelihood = logLikelihood))
} |
.interp <- function(m, fits, slopes, at) {
if(any(is.nan(fits))) {
ind <- !is.nan(fits)
c_interp(m[ind], fits[ind], slopes[ind], at)
} else {
c_interp(m, fits, slopes, at)
}
} |
setClass(
"Maintainer",
slots = list(name = "character",
email = "character"),
contains = "Instruction"
)
Maintainer <- function(name, email = NA_character_) {
.Deprecated("Label_Maintainer")
methods::new("Maintainer", name = name, email = email)
}
setClassUnion("NullOrLabelOrMaintainer",
members = c("Maintainer", "NULL", "Label"))
setMethod(
"docker_arguments",
signature = signature(obj = "Maintainer"),
definition = function(obj) {
arg <- paste0("\"", obj@name, "\"")
if (!is.na(obj@email) && length(obj@email) > 0)
arg <- paste(arg, obj@email)
return(arg)
}
) |
reuseMeta <- function(GADSdat, varName, other_GADSdat, other_varName = NULL, missingLabels = NULL, addValueLabels = FALSE) {
UseMethod("reuseMeta")
}
reuseMeta.GADSdat <- function(GADSdat, varName, other_GADSdat, other_varName = NULL, missingLabels = NULL, addValueLabels = FALSE) {
if(!is.null(missingLabels) && !missingLabels %in% c("drop", "leave", "only")) stop("Invalid input for argument missingLabels.")
if(!varName %in% names(GADSdat$dat)) stop("varName is not a variable in the GADSdat.")
if(is.null(other_varName)) other_varName <- varName
new_meta <- extractMeta(other_GADSdat, other_varName)
new_meta <- new_meta[, names(new_meta) != "data_table"]
new_meta[, "varName"] <- varName
if(addValueLabels || identical(missingLabels, "leave")) {
for(i in c("varLabel", "format", "display_width")) {
new_meta[, i] <- GADSdat$labels[GADSdat$labels$varName == varName, i][1]
}
}
remove_rows <- which(GADSdat$labels$varName == varName)
if(identical(missingLabels, "drop")) new_meta <- drop_missing_labels(new_meta)
if(identical(missingLabels, "only")) new_meta <- drop_valid_labels(new_meta)
if(identical(missingLabels, "leave")) {
new_meta <- drop_missing_labels(new_meta)
remove_rows <- which(GADSdat$labels$varName == varName & GADSdat$labels$missings != "miss")
if(identical(new_meta$labeled, "no")) new_meta <- new_meta[-1, ]
}
if(addValueLabels && GADSdat$labels[remove_rows, "labeled"][1] != "no") {
remove_rows <- numeric()
}
if((addValueLabels || identical(missingLabels, "leave")) && nrow(new_meta) > 0 && new_meta[1, "labeled"] == "yes") {
GADSdat$labels[GADSdat$labels$varName == varName, "labeled"] <- "yes"
}
labels <- GADSdat$labels
if(length(remove_rows) > 0) labels <- labels[-remove_rows, ]
labels <- rbind(labels, new_meta)
labels <- labels[order(match(labels$varName,names(GADSdat$dat))), ]
row.names(labels) <- NULL
out <- new_GADSdat(dat = GADSdat$dat, labels = labels)
check_GADSdat(out)
out
}
drop_missing_labels <- function(meta) {
if(length(unique(meta$varName)) != 1) stop("This function only works for meta information of a single variable.")
meta_new <- meta[which(meta$missings == "valid"), ]
if(nrow(meta_new) == 0) {
meta_new <- meta[1, ]
meta_new$missings <- meta_new$valLabel <- NA_character_
meta_new$value <- NA_integer_
meta_new$labeled <- "no"
}
row.names(meta_new) <- NULL
meta_new
}
drop_valid_labels <- function(meta) {
if(length(unique(meta$varName)) != 1) stop("This function only works for meta information of a single variable.")
meta_new <- meta[which(meta$missings == "miss"), ]
if(nrow(meta_new) == 0) {
meta_new <- meta[1, ]
meta_new$missings <- meta_new$valLabel <- NA_character_
meta_new$value <- NA_integer_
meta_new$labeled <- "no"
}
row.names(meta_new) <- NULL
meta_new
} |
sqty= c(4141,3842,3056,3519,4226, 4630,3507,3754, 5000,5120,4011, 5015,1916,675, 3636,3224,2295, 2730,2618,4421, 4113,3746, 3532, 3825,1096, 761,2088,820,2114, 1882,2159,1602,3354,2927)
price = c(59,59,59,59,59,59,59,59,59,59,59,59,79,79,79,79,79,79,79,79,79, 79,79,79,99,99, 99,99,99,99,99,99,99,99)
promotion= c(200,200,200,200,400,400,400,400, 600,600,600,600,200,200,200,200, 400,400,400,400,600,600,600,600, 200,200,200,200,400,400,400,400,600,600)
sales1 = data.frame(sqty, price, promotion)
head(sales1)
str(sales1)
sales2 = read.csv(file.choose())
library(gsheet)
url = "https://docs.google.com/spreadsheets/d/1h7HU0X_Q4T5h5D1Q36qoK40Tplz94x_HZYHOJJC_edU/edit
sales3 = as.data.frame(gsheet::gsheet2tbl(url))
str(sales3)
sales4 = read.csv('./data/salesqty.csv')
str(sales4)
sapply(list(sales1,sales2,sales3, sales4), dim)
sapply(list(sales1,sales2,sales3, sales4), names)
sales = sales1
head(sales)
?lm
slr1 = lm(formula = sqty ~ price, data=sales)
summary(slr1)
slr2 = lm(formula = sqty ~ promotion, data=sales)
summary(slr2)
AIC(slr1, slr2)
mlrmodel1 = lm(sqty ~ price + promotion, sales)
mlrmodel1a = lm( data=sales, formula = sqty ~ price + promotion)
range(sales$sqty)
summary(mlrmodel1)
coef(mlrmodel1)
attributes(mlrmodel1)
head(sales)
dim(sales)
fitted(mlrmodel1)
cbind(sales, fitted(mlrmodel1), residuals(mlrmodel1))
summary(mlrmodel1)
coef(mlrmodel1)
5837 + 59 * -53 + 200 * 3.651
names(omni)
range(sales$price); range(sales$promotion)
(ndata1 = data.frame(price=c(69,98), promotion=c(500,559)))
predict(mlrmodel1, newdata=ndata1)
cbind(ndata1, Predict=predict(mlrmodel1, newdata=ndata1, predict='response'))
names(mlrmodel1)
summary(mlrmodel1)
summary(mlrmodel1)$r.squared
(r2 = summary(mlrmodel1)$r.squared)
k = 2
(n = nrow(sales))
(adjr2 = 1 - ( (1 - r2) * ((n - 1)/ (n - k - 1))))
summary(mlrmodel1)$fstatistic[1]
(df1 = k) ; (df2 = n-k-1)
qf(.95, df1, df2)
fstat = summary(mlrmodel1)$fstatistic
pf(fstat[1], fstat[2], fstat[3], lower.tail=FALSE)
plot(mlrmodel1,1)
plot(mlrmodel1,2)
plot(mlrmodel1,3)
plot(mlrmodel1,4)
fitted(mlrmodel1)
residuals(mlrmodel1)
mlrmodel1$residuals
cbind(sales$sqty, fitted(mlrmodel1), sales$sqt - fitted(mlrmodel1), residuals(mlrmodel1))
names(mlrmodel1)
summary(mlrmodel1)
cbind(fitted(mlrmodel1), residuals(mlrmodel1))
plot(cbind(fitted(mlrmodel1), residuals(mlrmodel1)))
plot(cbind(sales$price, residuals(mlrmodel1)))
plot(cbind(sales$promotion, residuals(mlrmodel1)))
library(Metrics)
mlrmodel1
rmse(sales$sqty, fitted(mlrmodel1))
slr1
rmse(sales$sqty, fitted(slr1))
slr2
rmse(sales$sqty, fitted(slr2))
AIC(mlrmodel1, slr1, slr2)
head(sales) ; names(sales)
mlr2 = lm(sqty ~ price + promotion, data= sales)
summary(mlr2)
new1=data.frame(price=60:70, promotion=400)
predict(mlr2, newdata = new1)
cbind(new1,predict2 = predict(mlr2, newdata = new1) ) |
library(didrooRFM)
?didrooRFM
TransNo = c('10','11','12','13')
CustomerID = c('Cust1','Cust2', 'Cust3','Cust2')
DateofPurch = as.Date(c('2010-11-1','2008-3-25','2017-3-25', '2016-3-25'))
Amount = c(1000,500, 600, 700)
customerData = data.frame(TransNo,CustomerID,DateofPurch,Amount)
customerData
?didrooRFM
rfm1 = findRFM(customerData)
rfm1$FinalScore
rfm1[5:16]
options(dplyr.width = Inf)
rfm1
findRFM(customerData, recencyWeight = 3, frequencyWeight = 12,
monetoryWeight = 2)
data(customerdata)
TransNo = 10001:20000
head(sales1)
customerdata = cbind(TransNo, sales1)
head(customerdata)
str(customerdata)
names(customerdata)
data()
customerdata = customerdata[,c('TransNo','custid', 'sales.dates', 'sales.value')]
dim(customerdata)
head(customerdata,100)
dim(customerdata)
library(didrooRFM)
findRFM(customerdata, recencyWeight = 4, frequencyWeight = 4,
monetoryWeight = 4) |
print.CGP <-
function(x,...){
cat("Call:\n")
print(x$call)
cat("\n Lambda:\n")
print(x$lambda)
cat("\n Theta:\n")
print(x$theta)
cat("\n Alpha:\n")
print(x$alpha)
cat("\n Bandwidth:\n")
print(x$bandwidth)
} |
devtools::load_all(".")
req <- request_generate(
"spreadsheets.get",
list(spreadsheetId = "1xTUxWGcFLtDIHoYJ1WsjQuLmpUtBf--8Bcu5lQ302--"),
token = NULL
)
raw_resp <- request_make(req)
response_process(raw_resp)
ct <- httr::content(raw_resp)
str(ct) |
influ_phylm <-
function(formula,
data,
phy,
model = "lambda",
cutoff = 2,
track = TRUE,
...) {
if (!inherits(formula, "formula"))
stop("formula must be class 'formula'")
if (!inherits(data, "data.frame"))
stop("data must be class 'data.frame'")
if (!inherits(phy, "phylo"))
stop("phy must be class 'phylo'")
if ((model == "trend") && (ape::is.ultrametric(phy)))
stop("Trend is unidentifiable for ultrametric trees., see ?phylolm for details")
else
data_phy <- match_dataphy(formula, data, phy, ...)
full.data <- data_phy$data
phy <- data_phy$phy
N <- nrow(full.data)
mod.0 <- phylolm::phylolm(formula,
data = full.data,
model = model,
phy = phy)
intercept.0 <- mod.0$coefficients[[1]]
estimate.0 <- mod.0$coefficients[[2]]
pval.intercept.0 <-
phylolm::summary.phylolm(mod.0)$coefficients[[1, 4]]
pval.estimate.0 <-
phylolm::summary.phylolm(mod.0)$coefficients[[2, 4]]
optpar.0 <- mod.0$optpar
sensi.estimates <-
data.frame(
"species" = numeric(),
"intercept" = numeric(),
"DIFintercept" = numeric(),
"intercept.perc" = numeric(),
"pval.intercept" = numeric(),
"estimate" = numeric(),
"DIFestimate" = numeric(),
"estimate.perc" = numeric(),
"pval.estimate" = numeric(),
"AIC" = numeric(),
"optpar" = numeric()
)
counter <- 1
errors <- NULL
if (track == TRUE)
pb <- utils::txtProgressBar(min = 0,
max = N,
style = 3)
for (i in 1:N) {
crop.data <- full.data[c(1:N)[-i], ]
crop.phy <- ape::drop.tip(phy, phy$tip.label[i])
mod = try(phylolm::phylolm(
formula,
data = crop.data,
model = model,
phy = crop.phy
),
TRUE)
if (isTRUE(class(mod) == "try-error")) {
error <- i
names(error) <- rownames(full.data$data)[i]
errors <- c(errors, error)
next
}
else {
sp <- phy$tip.label[i]
intercept <-
mod$coefficients[[1]]
estimate <-
mod$coefficients[[2]]
DIFintercept <-
intercept - intercept.0
DIFestimate <-
estimate - estimate.0
intercept.perc <-
round((abs(
DIFintercept / intercept.0
)) * 100, digits = 1)
estimate.perc <-
round((abs(
DIFestimate / estimate.0
)) * 100, digits = 1)
pval.intercept <-
phylolm::summary.phylolm(mod)$coefficients[[1, 4]]
pval.estimate <-
phylolm::summary.phylolm(mod)$coefficients[[2, 4]]
aic.mod <- mod$aic
if (model == "BM" | model == "trend") {
optpar <- NA
}
if (model != "BM" & model != "trend") {
optpar <- mod$optpar
}
if (track == TRUE)
utils::setTxtProgressBar(pb, i)
estim.simu <-
data.frame(
sp,
intercept,
DIFintercept,
intercept.perc,
pval.intercept,
estimate,
DIFestimate,
estimate.perc,
pval.estimate,
aic.mod,
optpar,
stringsAsFactors = F
)
sensi.estimates[counter,] <- estim.simu
counter = counter + 1
}
}
if (track == TRUE)
on.exit(close(pb))
sDIFintercept <- sensi.estimates$DIFintercept /
stats::sd(sensi.estimates$DIFintercept)
sDIFestimate <- sensi.estimates$DIFestimate /
stats::sd(sensi.estimates$DIFestimate)
sensi.estimates$sDIFestimate <- sDIFestimate
sensi.estimates$sDIFintercept <- sDIFintercept
param0 <-
list(
coef = phylolm::summary.phylolm(mod.0)$coefficients,
aic = phylolm::summary.phylolm(mod.0)$aic,
optpar = mod.0$optpar
)
reorder.on.estimate <- sensi.estimates[order(abs(sensi.estimates$sDIFestimate), decreasing =
T), c("species", "sDIFestimate")]
influ.sp.estimate <-
as.character(reorder.on.estimate$species[abs(reorder.on.estimate$sDIFestimate) >
cutoff])
reorder.on.intercept <- sensi.estimates[order(abs(sensi.estimates$sDIFintercept), decreasing =
T), c("species", "sDIFintercept")]
influ.sp.intercept <-
as.character(reorder.on.intercept$species[abs(reorder.on.intercept$sDIFintercept) >
cutoff])
res <- list(
call = match.call(),
cutoff = cutoff,
formula = formula,
full.model.estimates = param0,
influential.species = list(
influ.sp.estimate = influ.sp.estimate,
influ.sp.intercept = influ.sp.intercept
),
sensi.estimates = sensi.estimates,
data = full.data,
errors = errors
)
class(res) <- "sensiInflu"
if (length(res$errors) > 0) {
warning("Some species deletion presented errors, please check: output$errors")
}
else {
res$errors <- "No errors found."
}
return(res)
} |
expected <- eval(parse(text="structure(list(y = NULL), .Names = \"y\", class = \"data.frame\", row.names = c(NA, 10L), terms = quote(y ~ 0))"));
test(id=0, code={
argv <- eval(parse(text="list(structure(list(y = c(-0.667819876370237, 0.170711734013213, 0.552921941721332, -0.253162069270378, -0.00786394222146348, 0.0246733498130512, 0.0730305465518564, -1.36919169254062, 0.0881443844426084, -0.0834190388782434)), .Names = \"y\", class = \"data.frame\", row.names = c(NA, 10L), terms = quote(y ~ 0)), structure(list(y = NULL), .Names = \"y\", class = \"data.frame\", row.names = c(NA, 10L), terms = quote(y ~ 0)))"));
.Internal(`copyDFattr`(argv[[1]], argv[[2]]));
}, o=expected); |
ibmclose <- stats::ts(c(460, 457, 452, 459, 462, 459, 463, 479, 493, 490,
492, 498, 499, 497, 496, 490, 489, 478, 487, 491, 487, 482, 479,
478, 479, 477, 479, 475, 479, 476, 476, 478, 479, 477, 476, 475,
475, 473, 474, 474, 474, 465, 466, 467, 471, 471, 467, 473, 481,
488, 490, 489, 489, 485, 491, 492, 494, 499, 498, 500, 497, 494,
495, 500, 504, 513, 511, 514, 510, 509, 515, 519, 523, 519, 523,
531, 547, 551, 547, 541, 545, 549, 545, 549, 547, 543, 540, 539,
532, 517, 527, 540, 542, 538, 541, 541, 547, 553, 559, 557, 557,
560, 571, 571, 569, 575, 580, 584, 585, 590, 599, 603, 599, 596,
585, 587, 585, 581, 583, 592, 592, 596, 596, 595, 598, 598, 595,
595, 592, 588, 582, 576, 578, 589, 585, 580, 579, 584, 581, 581,
577, 577, 578, 580, 586, 583, 581, 576, 571, 575, 575, 573, 577,
582, 584, 579, 572, 577, 571, 560, 549, 556, 557, 563, 564, 567,
561, 559, 553, 553, 553, 547, 550, 544, 541, 532, 525, 542, 555,
558, 551, 551, 552, 553, 557, 557, 548, 547, 545, 545, 539, 539,
535, 537, 535, 536, 537, 543, 548, 546, 547, 548, 549, 553, 553,
552, 551, 550, 553, 554, 551, 551, 545, 547, 547, 537, 539, 538,
533, 525, 513, 510, 521, 521, 521, 523, 516, 511, 518, 517, 520,
519, 519, 519, 518, 513, 499, 485, 454, 462, 473, 482, 486, 475,
459, 451, 453, 446, 455, 452, 457, 449, 450, 435, 415, 398, 399,
361, 383, 393, 385, 360, 364, 365, 370, 374, 359, 335, 323, 306,
333, 330, 336, 328, 316, 320, 332, 320, 333, 344, 339, 350, 351,
350, 345, 350, 359, 375, 379, 376, 382, 370, 365, 367, 372, 373,
363, 371, 369, 376, 387, 387, 376, 385, 385, 380, 373, 382, 377,
376, 379, 386, 387, 386, 389, 394, 393, 409, 411, 409, 408, 393,
391, 388, 396, 387, 383, 388, 382, 384, 382, 383, 383, 388, 395,
392, 386, 383, 377, 364, 369, 355, 350, 353, 340, 350, 349, 358,
360, 360, 366, 359, 356, 355, 367, 357, 361, 355, 348, 343, 330,
340, 339, 331, 345, 352, 346, 352, 357),f=1,s=1) |
create_lcp_density <- function(lcps, raster, rescale = FALSE, rasterize_as_points = TRUE) {
if (!inherits(lcps, c("SpatialLines", "SpatialLinesDataFrame"))) {
stop("lcps expects a SpatialLines* object")
}
if (!inherits(raster, "RasterLayer")) {
stop("raster expects a RasterLayer object")
}
if (rasterize_as_points) {
lcps <- methods::as(lcps, "SpatialPoints")
}
cumulative_pts <- raster::rasterize(x = lcps, y = raster, fun = "count")
cumulative_pts[is.na(cumulative_pts)] <- 0
if (rescale) {
rasterRescale <- function(r) {
((r - raster::cellStats(r, "min"))/(raster::cellStats(r, "max") - raster::cellStats(r, "min")))
}
cumulative_pts <- rasterRescale(cumulative_pts)
}
return(cumulative_pts)
} |
randsplit <- function(n.grid,n.interval,method=c("modunif","random","weighted"),
weights=numeric(0),offset=0,min.internal=2)
{
n.interval <- round(n.interval,0); if ( n.interval <= 0 ) n.interval <- 1
min.internal <- max(1,round(min.internal,0))
if ( n.interval == 1 ) return(c(1,n.grid))
if ( method[1] == "modunif")
{
n.max <- max(floor((n.grid-1)/4),1)
if ( n.interval > n.max ) stop(paste("*** randsplit: number of splits",n.interval-1,"too large for",n.grid,"indices; maximum is",n.max))
l.mean <- (n.grid-1)/n.interval
endpoints <- c(1,round(1+1:(n.interval-1)*l.mean,0),n.grid)
for ( i in 2:n.interval ) endpoints[i] <- round(runif(1,endpoints[i]-l.mean/3,endpoints[i]+l.mean/3),0)
}
else
{
if ( n.grid < (n.interval+1)*(min.internal+1) ) stop(paste("*** randsplit: n.grid[",n.grid,"] < (n.interval[",n.interval,"]+1)",
"*(min.internal[",min.internal,"]+1)",sep=""))
if ( method[1] == "random" )
{
offset <- offset %% (min.internal+1)
subset <- seq(from=1+(min.internal+1)+offset,to=n.grid-(min.internal+1),by=min.internal+1)
if ( length(subset) == 1 )
{
if ( n.interval == 2 ) samp <- subset
else stop("*** randsplit: internal error")
}
else
{
samp <- sort(sample(subset,size=n.interval-1))
}
endpoints <- c(1,samp,n.grid)
}
else
{
if ( method[1] == "weighted" )
{
if ( length(weights)>0 & length(weights) != n.grid )
{
weights <- rep(1,n.grid)
warning("randsplit: weight vector with incorrect length, weights ignored")
}
if ( length(weights) == 0 ) weights <- rep(1,n.grid)
offset <- offset %% (min.internal+1)
subset <- seq(from=1+(min.internal+1)+offset,to=n.grid-(min.internal+1),by=min.internal+1)
if ( length(subset) == 1 )
{
if ( n.interval == 2 ) samp <- subset
else stop("*** randsplit: internal error")
}
else
{
subweights <- weights[subset]
samp <- sort(sample(subset,size=n.interval-1,prob=subweights))
}
endpoints <- c(1,samp,n.grid)
}
else
{
stop(paste("unknown splitting method: \"",method[1],"\"",sep=""))
}
}
}
return(endpoints)
} |
cov_adj <- function(data.blocks, covs, n, dim.names=NULL) {
M <- length(data.blocks)
if(Reduce("+", lapply(c("matrix","character","numeric","array","data.frame"),
inherits,
x=covs)) == 0) {
stop("'covs' must be a vector, matrix, array, or dataframe")
}
if (is.null(dim.names)) {
tmp_row_names <- seq_len(nrow(data.blocks[[1]]))
dim.names <- lapply(data.blocks, function(x) list(tmp_row_names,
seq_len(ncol(x))))
}
if (is.null(dim(covs))) covs <- as.matrix(covs)
if (nrow(covs) != n) stop("Different number or rows between covariates and
omics")
if (dim(covs)[2] > 1 & is.null(colnames(covs))) {
warning("Missing column names in covs")
colnames(covs) <- paste0("Cov",seq_len(ncol(covs)))
}
if (is.null(rownames(covs))) {
warning("Missing (row)names in 'covs'.
Consistency with omics row names cannot be evaluated")
}
else {
for (l in seq_len(M)) {
if(!all.equal(dim.names[[1]][[l]], rownames(covs))) {
warning("Row names across omic blocks are inconsistent.")
}
}
}
if(inherits(covs, "data.frame")) Q <-
stats::model.matrix(stats::as.formula(paste0(" ~ -1 ",
paste0(colnames(covs),
collapse = " + "))),
data=covs)
else Q <- stats::model.matrix(~-1 + covs)
SVD.Q <- svd(Q, nv=0)
U.Q <- SVD.Q$u[, SVD.Q$d^2 > 1e-5]
R <- lapply(seq_len(M), function(r) t(U.Q) %*% data.blocks[[r]])
L <- lapply(R, function(x) U.Q %*% x)
block.class <- rep("matrix", M)
block.class[vapply(data.blocks, inherits, TRUE, what = "FBM")] <- "FBM"
data.blocks.adj <- lapply(seq_len(M), function(r) {
if (inherits(data.blocks[[r]], "FBM")) {
CC <- bigstatsr::FBM(nrow(data.blocks[[r]]),
ncol(data.blocks[[r]]),
create_bk = T)
bigstatsr::big_apply(CC, function(x, ind) {
x[, ind] <- data.blocks[[r]][, ind] - L[[r]][, ind]
NULL
},
a.combine = "c",
ind = seq_along(dim.names[[r]][[2]]))
}
else CC <- data.blocks[[r]] - L[[r]]
return(CC)
})
names(data.blocks.adj) <- names(data.blocks)
return(data.blocks.adj)
} |
predict.cclcda2 <- function(
object,
newdata,
...
)
{
if (!inherits(object, "cclcda2"))
stop("object not of class", " 'cclcda2'")
x <- newdata
lca.w <- object$lca.w
lca.theta <- object$lca.theta
lca.wmk <- object$lca.wmk
m <- object$m
r <- object$r
d <- object$d
k <- object$k
prior <- object$prior
n <- ncol(newdata)
theta <- unlist(lca.theta, use.names=FALSE)
theta <- matrix(theta, ncol=m, byrow=TRUE)
x.bin <- function(x=x)
{
res <- numeric(sum(r))
for (i in 1:d)
{
for (j in 1:r[i])
{
if((is.na(x[i])==FALSE & x[i]==j))
res[(sum(r[1:i]))-(r[i]-j)] <- 1
}
}
return(res)
}
posterior <- function(z)
{
temp <- prior*apply(t(t(lca.wmk)*apply(theta^x.bin(z), 2, prod)),1,sum)
return(temp/sum(temp))
}
post <- t(apply(x,1,posterior))
class <- factor(apply(post, 1, which.max))
result <- list(class=class, posterior=post)
return(result)
} |
spp_references <- function(taxon_id, raw = FALSE, token = NULL, verbose = TRUE,
pause = 1, ...) {
if (length(taxon_id) > 1) {
out <- lapply(taxon_id, spp_references, raw = raw, token = token,
verbose = verbose, pause = pause, ...)
out <- rcites_combine_lists(out, taxon_id, raw)
} else {
if (is.null(token))
token <- rcites_getsecret()
if (rcites_checkid(taxon_id)) {
out <- NULL
} else {
if (verbose)
rcites_current_id(taxon_id)
q_url <- rcites_url("taxon_concepts/", taxon_id, "/references.json")
tmp <- rcites_res(q_url, token, raw, verbose, ...)
if (raw) {
out <- tmp
class(out) <- c("list", "spp_raw")
} else {
out <- list()
out$references <- rcites_simplify_decisions(tmp)
class(out) <- c("spp_refs")
}
}
}
Sys.sleep(pause)
out
} |
xfibres = function(infile,
bvecs,
bvals,
mask = NULL,
nfibres = 1,
bet.opts = "",
verbose = TRUE,
njumps = NULL,
burnin = NULL,
burnin_noard = NULL,
sampleevery = NULL,
updateproposalevery = NULL,
seed = NULL,
noard = FALSE,
allard = FALSE,
nospat = FALSE,
nonlinear = FALSE,
cnonlinear = FALSE,
rician = FALSE,
f0 = FALSE,
ardf0 = FALSE,
opts = ""
) {
infile = checkimg(infile)
if (is.null(mask)) {
tfile = tempfile(fileext = ".nii.gz")
bet = fslbet(infile, outfile = tfile,
retimg = FALSE, opts = bet.opts)
mask = tempfile(fileext = ".nii.gz")
res = fslbin(tfile, retimg = FALSE, outfile = mask)
}
mask = checkimg(mask)
parse_args = function(x){
x = paste0(names(x), '="', x, '"')
x = paste(x, collapse = " ")
x
}
parse_num_args = function(x){
x = paste0(names(x), '=', x)
x = paste(x, collapse = " ")
x
}
if (is.matrix(bvecs)) {
stopifnot(ncol(bvecs) == 3)
tfile = tempfile(fileext = ".txt")
bvecs = apply(bvecs, 1, paste, collapse = " ")
writeLines(bvecs, con = tfile)
bvecs = tfile
}
if (is.numeric(bvals)) {
tfile = tempfile(fileext = ".txt")
bvals = as.character(bvals)
writeLines(bvals, con = tfile)
bvals = tfile
}
infile = unname(infile)
mask = unname(mask)
bvecs = unname(bvecs)
bvals = unname(bvals)
vec = c("--data" = infile,
"--mask" = mask,
"--bvecs" = bvecs,
"--bvals" = bvals)
vec = parse_args(vec)
nfibres = unname(nfibres)
njumps = unname(njumps)
burnin = unname(burnin)
burnin_noard = unname(burnin_noard)
sampleevery = unname(sampleevery)
updateproposalevery = unname(updateproposalevery)
num_vec = c(
"--nfibres" = nfibres,
"--njumps" = njumps,
"--burnin" = burnin,
"--burnin_noard" = burnin_noard,
"--sampleevery" = sampleevery,
"--updateproposalevery" = updateproposalevery)
num_vec = parse_num_args(num_vec)
vec = paste(vec, num_vec)
if (is.null(seed)) {
seed = unname(seed)
vec = c(vec, "--seed" = seed)
}
noard = unname(noard)
allard = unname(allard)
nospat = unname(nospat)
nonlinear = unname(nonlinear)
cnonlinear = unname(cnonlinear)
rician = unname(rician)
f0 = unname(f0)
ardf0 = unname(ardf0)
verbose = unname(verbose)
log_vec = c(
"--noard" = noard,
"--allard" = allard,
"--nospat" = nospat,
"--nonlinear" = nonlinear,
"--cnonlinear" = cnonlinear,
"--rician" = rician,
"--f0" = f0,
"--ardf0" = ardf0,
"--verbose" = verbose)
nn = names(log_vec)
log_vec = as.logical(log_vec)
names(log_vec) = nn
log_vec = log_vec[ log_vec ]
nn = names(log_vec)
if (length(nn) > 0) {
nn = paste(nn, collapse = " ")
vec = paste(vec, nn)
}
vec = paste0(vec, " ", opts)
cmd = get.fsl()
s = sprintf('%s %s', "xfibres", vec)
cmd <- paste0(cmd, s)
if (verbose) {
message(cmd, "\n")
}
res = system(cmd)
return(res)
} |
AIC.glmssn <- function(object,...,k=2) {
if(class(object) != "glmssn") return("Not a glmssn object")
if(!missing(k)) cat("This argument has no effect\n")
if(object$args$family == "poisson") return(NA)
if(object$args$family =="binomial") return(NA)
cp <- covparms(object)[,3]
nparmstheta <- length(cp)
rankX <- object$sampinfo$rankX
if(object$args$EstMeth == "REML")
nparms <- nparmstheta
if(object$args$EstMeth == "ML")
nparms <- rankX + nparmstheta
object$estimates$m2LL + k*nparms
} |
plot_MAC=function(models, alpha, con_sets, p,xnames=NULL,color="lightblue"){
predictors=1:p
if (is.null(xnames))xnames=as.character(predictors)
result=list()
for (i in 1:length(alpha)){
bmatrix=t(sapply(con_sets[[i]], function(x)predictors %in%
(models[[x]])))
result[[i]]=bmatrix
row.names(bmatrix)=as.character(1:nrow(bmatrix))
tittle=paste(paste(100*(1-alpha[i]),"%",sep=""),"MCS")
plot(bmatrix, col=c("white",color),main = tittle,key=NULL,
xlab="predictors", ylab="Models")
axis(1,at=predictors,labels = xnames)
}
return(result)
} |
errcheck_vreq<-function(com,comnull,vr)
{
if (!(is.numeric(com) && is.numeric(comnull) && is.numeric(vr)))
{
stop("Error in vreq class: all slots must be numeric")
}
if (!(length(com)==1 && length(comnull)==1 && length(vr)==1))
{
stop("Error in vreq class: all slots must be single numbers")
}
if (!(is.finite(com) && is.finite(comnull) && is.finite(vr)))
{
stop("Error in vreq class: non-finite values not allowed")
}
if (com<=0 || comnull<=0 || vr<=0)
{
stop("Error in vreq class: only positive values allowed")
}
if (!isTRUE(all.equal(com,comnull*vr)))
{
stop("Error in vreq class: com should equal comnull times vr")
}
} |
context("Check select_neighbours() function")
test_that("Output format - select_neighbours",{
expect_is(select_neighbours(apartmentsTest, new_apartment, n = 10), "data.frame")
expect_is(select_neighbours(apartmentsTest, new_apartment, n = NULL, frac = 0.001), "data.frame")
}) |
gvcm.cat <-
function(
formula,
data,
family = gaussian,
method = c("lqa", "AIC", "BIC"),
tuning = list(lambda=TRUE, specific=FALSE, phi=0.5, grouped.fused=0.5, elastic=0.5, vs=0.5, spl=0.5),
weights,
offset,
start,
control,
model = FALSE,
x = FALSE,
y = FALSE,
plot=FALSE,
...
)
{
Call <- match.call()
indx <- match(c("formula", "data"),
names(Call), nomatch = 0)
if (indx[1] == 0)
stop("A formula argument is required. \n")
if (indx[2] == 0)
stop("A data argument is required. \n")
if (missing(control))
control <- cat_control(...)
if (is.character(family))
family <- get(family, mode = "function", envir = parent.frame())
if (is.function(family))
family <- family()
if (is.null(family$family)) {
print(family)
stop("'family' not recognized")
}
if (family$family=="Gamma") family <- Gamma(link="log")
if (!is.logical(model) || !is.logical(x) || !is.logical(y) || !is.logical(plot))
stop ("Error in input arguments. \n")
method <- match.arg(method)
if (!(method %in% c("lqa", "AIC", "BIC")))
stop ("method is incorrect. \n")
if (missing(data))
data <- environment(formula)
data <- na.omit(data)
dsgn <- design(formula,data)
X <- dsgn$X
n <- nrow(X)
Y <- model.extract(dsgn$m, "response")
if (is.factor(Y)==TRUE){Y <- as.numeric(Y)-1}
if (missing(weights))
weights <- rep(1, times=n)
if (length(weights)!=nrow(X) || !is.vector(weights) || !is.numeric(weights))
stop("Error in input weights. ")
if (!is.null(dim(Y)[2]) && family$family=="binomial") {
weights <- (Y[,1]+Y[,2])*weights
Y <- Y[,1]/(Y[,1]+Y[,2])
}
if (family$family=="binomial" && (sum(Y>1) || sum(Y<0)))
stop("No binomial response. \n")
if (family$family=="Gamma" && (sum(Y<=0)))
stop("No Gamma-distributed response. \n")
indices <- index(dsgn, data, formula)
indices["index2b",1] <- if (control$assured.intercept) 0 else 1
not <- c()
if (sum(X[,1])==n || sum(X[,1])==sum(weights)) {not <- 1}
if (any(rowSums(indices)[c(7,10)]!=0)){
sm <- which( c(indices[7,]+ indices[10,]) != 0)
for (i in 1:length(sm)) { not <- c(not, (sum(indices[1, 1:sm[i]])-indices[1,sm[i]]+1):(sum(indices[1, 1:sm[i]])) )}
}
not <- if (length(not)>0) {-not} else {1:ncol(X)}
if(control$center){
centering <- colSums(diag(weights)%*%X)/sum(weights)
X[, not] <- scale(X[, not], center = centering[not], scale = FALSE)
}
if(control$standardize){
scaling <- sqrt(colSums(t((t(X) - colSums(diag(weights)%*%X)/sum(weights))^2*weights))/(sum(weights)-1))
X[, not] <- scale(X[, not], center = FALSE, scale = scaling[not])
}
if (method %in% c("AIC", "BIC")) {
output <- abc(X, Y, indices, family, method, weights, offset, start, control, plot)
} else {
output <- pest(X, Y, indices, family, tuning, weights, offset, start, control, plot)
}
if (!exists("output"))
stop("Error in argument 'method'")
if (control$bootstrap>0) {
bootstrap.errors <- bootstrap(X, Y, indices, family, tuning=output$tuning, weights,
offset, start, control=output$control, method)
} else {
bootstrap.errors <- NULL
}
output$call <- Call
output$formula <- dsgn$formula
output$terms <- dsgn$Terms
output$data <- data
output$x <- if(x==TRUE) X else NULL
output$y <- if(y==TRUE) Y else NULL
output$model <- if(model==TRUE) dsgn$m else NULL
output$xlevels <- .getXlevels(dsgn$Terms, dsgn$m)
output$bootstrap.errors <- bootstrap.errors
output$method <- method
output$scaling <- if(control$standardize == TRUE) scaling else NULL
output$centering <- if(control$center == TRUE) centering else NULL
class(output) <- c("gvcm.cat", "glm", "lm")
output
} |
pc.stable = function(x, cluster, whitelist = NULL, blacklist = NULL,
test = NULL, alpha = 0.05, B = NULL, max.sx = NULL, debug = FALSE,
undirected = FALSE) {
bnlearn(x = x, cluster = cluster, whitelist = whitelist,
blacklist = blacklist, test = test, alpha = alpha, B = B,
max.sx = max.sx, method = "pc.stable", debug = debug,
undirected = undirected)
}
gs = function(x, cluster, whitelist = NULL, blacklist = NULL, test = NULL,
alpha = 0.05, B = NULL, max.sx = NULL, debug = FALSE, undirected = FALSE) {
bnlearn(x = x, cluster = cluster, whitelist = whitelist,
blacklist = blacklist, test = test, alpha = alpha, B = B,
max.sx = max.sx, method = "gs", debug = debug, undirected = undirected)
}
iamb = function(x, cluster, whitelist = NULL, blacklist = NULL, test = NULL,
alpha = 0.05, B = NULL, max.sx = NULL, debug = FALSE, undirected = FALSE) {
bnlearn(x = x, cluster = cluster, whitelist = whitelist,
blacklist = blacklist, test = test, alpha = alpha, B = B,
max.sx = max.sx, method = "iamb", debug = debug, undirected = undirected)
}
fast.iamb = function(x, cluster, whitelist = NULL, blacklist = NULL,
test = NULL, alpha = 0.05, B = NULL, max.sx = NULL, debug = FALSE,
undirected = FALSE) {
bnlearn(x = x, cluster = cluster, whitelist = whitelist,
blacklist = blacklist, test = test, alpha = alpha, B = B,
max.sx = max.sx, method = "fast.iamb", debug = debug,
undirected = undirected)
}
inter.iamb = function(x, cluster, whitelist = NULL, blacklist = NULL,
test = NULL, alpha = 0.05, B = NULL, max.sx = NULL, debug = FALSE,
undirected = FALSE) {
bnlearn(x = x, cluster = cluster, whitelist = whitelist,
blacklist = blacklist, test = test, alpha = alpha, B = B,
max.sx = max.sx, method = "inter.iamb", debug = debug,
undirected = undirected)
}
iamb.fdr = function(x, cluster, whitelist = NULL, blacklist = NULL, test = NULL,
alpha = 0.05, B = NULL, max.sx = NULL, debug = FALSE, undirected = FALSE) {
bnlearn(x = x, cluster = cluster, whitelist = whitelist,
blacklist = blacklist, test = test, alpha = alpha, B = B,
max.sx = max.sx, method = "iamb.fdr", debug = debug,
undirected = undirected)
}
mmpc = function(x, cluster, whitelist = NULL, blacklist = NULL, test = NULL,
alpha = 0.05, B = NULL, max.sx = NULL, debug = FALSE, undirected = TRUE) {
bnlearn(x = x, cluster = cluster, whitelist = whitelist,
blacklist = blacklist, test = test, alpha = alpha, B = B,
max.sx = max.sx, method = "mmpc", debug = debug,
undirected = undirected)
}
si.hiton.pc = function(x, cluster, whitelist = NULL, blacklist = NULL,
test = NULL, alpha = 0.05, B = NULL, max.sx = NULL, debug = FALSE,
undirected = TRUE) {
bnlearn(x = x, cluster = cluster, whitelist = whitelist,
blacklist = blacklist, test = test, alpha = alpha, B = B,
max.sx = max.sx, method = "si.hiton.pc", debug = debug,
undirected = undirected)
}
hpc = function(x, cluster, whitelist = NULL, blacklist = NULL, test = NULL,
alpha = 0.05, B = NULL, max.sx = NULL, debug = FALSE, undirected = TRUE) {
bnlearn(x = x, cluster = cluster, whitelist = whitelist,
blacklist = blacklist, test = test, alpha = alpha, B = B,
max.sx = max.sx, method = "hpc", debug = debug, undirected = undirected)
}
aracne = function(x, whitelist = NULL, blacklist = NULL, mi = NULL,
debug = FALSE) {
mi.matrix(x = x, whitelist = whitelist, blacklist = blacklist,
method = "aracne", mi = mi, debug = debug)
}
chow.liu = function(x, whitelist = NULL, blacklist = NULL, mi = NULL,
debug = FALSE) {
mi.matrix(x = x, whitelist = whitelist, blacklist = blacklist,
method = "chow.liu", mi = mi, debug = debug)
}
hc = function(x, start = NULL, whitelist = NULL, blacklist = NULL,
score = NULL, ..., debug = FALSE, restart = 0, perturb = 1,
max.iter = Inf, maxp = Inf, optimized = TRUE) {
greedy.search(x = x, start = start, whitelist = whitelist,
blacklist = blacklist, score = score, heuristic = "hc", debug = debug,
..., restart = restart, perturb = perturb,
max.iter = max.iter, maxp = maxp, optimized = optimized)
}
tabu = function(x, start = NULL, whitelist = NULL, blacklist = NULL,
score = NULL, ..., debug = FALSE, tabu = 10, max.tabu = tabu,
max.iter = Inf, maxp = Inf, optimized = TRUE) {
greedy.search(x = x, start = start, whitelist = whitelist,
blacklist = blacklist, score = score, heuristic = "tabu", debug = debug,
..., max.iter = max.iter, tabu = tabu, max.tabu = max.tabu,
maxp = maxp, optimized = optimized)
}
rsmax2 = function(x, whitelist = NULL, blacklist = NULL, restrict = "si.hiton.pc",
maximize = "hc", restrict.args = list(), maximize.args = list(),
debug = FALSE) {
hybrid.search(x, whitelist = whitelist, blacklist = blacklist,
restrict = restrict, maximize = maximize, restrict.args = restrict.args,
maximize.args = maximize.args, debug = debug)
}
mmhc = function(x, whitelist = NULL, blacklist = NULL, restrict.args = list(),
maximize.args = list(), debug = FALSE) {
hybrid.search(x, whitelist = whitelist, blacklist = blacklist,
restrict = "mmpc", maximize = "hc", restrict.args = restrict.args,
maximize.args = maximize.args, debug = debug)
}
h2pc = function(x, whitelist = NULL, blacklist = NULL, restrict.args = list(),
maximize.args = list(), debug = FALSE) {
hybrid.search(x, whitelist = whitelist, blacklist = blacklist,
restrict = "hpc", maximize = "hc", restrict.args = restrict.args,
maximize.args = maximize.args, debug = debug)
}
learn.mb = function(x, node, method, whitelist = NULL, blacklist = NULL,
start = NULL, test = NULL, alpha = 0.05, B = NULL, max.sx = NULL,
debug = FALSE) {
mb.backend(x, target = node, method = method, whitelist = whitelist,
blacklist = blacklist, start = start, test = test, alpha = alpha,
B = B, max.sx = max.sx, debug = debug)
}
learn.nbr = function(x, node, method, whitelist = NULL, blacklist = NULL,
test = NULL, alpha = 0.05, B = NULL, max.sx = NULL, debug = FALSE) {
nbr.backend(x, target = node, method = method, whitelist = whitelist,
blacklist = blacklist, test = test, alpha = alpha, B = B,
max.sx = max.sx, debug = debug)
}
naive.bayes = function(x, training, explanatory) {
bayesian.classifier(x, training = training, explanatory = explanatory,
method = "naive.bayes", whitelist = NULL, blacklist = NULL, expand = list(),
debug = FALSE)
}
tree.bayes = function(x, training, explanatory, whitelist = NULL,
blacklist = NULL, mi = NULL, root = NULL, debug = FALSE) {
bayesian.classifier(x, training = training, explanatory = explanatory,
method = "tree.bayes", whitelist = whitelist, blacklist = blacklist,
expand = list(estimator = mi, root = root), debug = debug)
} |
overall_abnormality <- function(Subj, Ref, stopping_rule = "Kaiser-Guttman", dist_measure = "MAD", TVE = 1, k = 2){
n_vars <- length(Subj)
n <- nrow(Ref)
refpop_means <- colMeans(Ref)
refpop_sd <- apply(Ref,2,stats::sd)
refpop_sc <- scale(Ref,scale=T,center=T)
refpop_cov<- stats::cov(refpop_sc)
p <- stats::prcomp(Ref, center=T, scale=T, retx=T)
EigenValues <- p$sdev^2
if(stopping_rule=="Kaiser-Guttman"){
numPCs <- sum(EigenValues >= 1)
}
if(stopping_rule=="brStick"){
numPCs <- brStick(EigenValues)
}
if(stopping_rule=="TVE"){
EigenSum <- sum(p$sdev^2)
PC_VAF <- t(matrix(abs(EigenValues/EigenSum)))
cum_PC_VAF <- round(cumsum(PC_VAF),5)
numPCs <- min(which(cum_PC_VAF >= TVE))
}
refpop_projs<- p$x
PCmeans<- colMeans(refpop_projs)
PCsds <- p$sdev
Subj_sc <- (Subj-refpop_means)/refpop_sd
Subj_projs<- Subj_sc%*% p$rotation[,1:numPCs]
dist <- (Subj_projs-PCmeans[1:numPCs])/PCsds[1:numPCs]
if(dist_measure=="Euclidean"){
k=2
dist_squared <- (abs(dist))^k
abnormality <- sum(dist_squared)^(1/k)
}
if(dist_measure=="MAD"){
dist_abs <- abs(dist)
abnormality <- sum(dist_abs)/numPCs
}
if(dist_measure=="Manhattan"){
k=1
dist_squared <- (abs(dist))^k
abnormality <- sum(dist_squared)^(1/k)
}
if(dist_measure=="RMSE"){
n <- length(dist)
sum_dist_squared <- sum(dist^2)
abnormality <- sqrt(sum_dist_squared/n)
}
if(dist_measure=="Lk-Norm"){
dist_squared <- (abs(dist))^k
abnormality <- sum(dist_squared)^(1/k)
}
return(abnormality)
} |
activity_frequency <- function(eventlog, level, append, append_column, ...) {
UseMethod("activity_frequency")
}
activity_frequency.eventlog <- function(eventlog,
level = c("log","trace","activity","case"),
append = F,
append_column = NULL,
sort = TRUE,
...) {
absolute <- NULL
level <- match.arg(level)
level <- deprecated_level(level, ...)
if(is.null(append_column)) {
append_column <- case_when(level == "activity" ~ "absolute",
level == "case" ~ "absolute",
T ~ "NA")
}
FUN <- switch(level,
log = activity_frequency_log,
case = activity_frequency_case,
trace = activity_frequency_trace,
activity = activity_frequency_activity)
output <- FUN(eventlog = eventlog)
if(sort && level %in% c("case", "trace","activity")) {
output %>%
arrange(-absolute) -> output
}
return_metric(eventlog, output, level, append, append_column, "activity_frequency")
}
activity_frequency.grouped_eventlog <- function(eventlog,
level = c("log","trace","activity","case"),
append = F,
append_column = NULL,
sort = TRUE,
...) {
absolute <- NULL
level <- match.arg(level)
level <- deprecated_level(level, ...)
if(is.null(append_column)) {
append_column <- case_when(level == "activity" ~ "absolute",
level == "case" ~ "absolute",
T ~ "NA")
}
FUN <- switch(level,
log = activity_frequency_log,
case = activity_frequency_case,
trace = activity_frequency_trace,
activity = activity_frequency_activity)
if(level != "log") {
grouped_metric(eventlog, FUN) -> output
}
else {
grouped_metric_raw_log(eventlog, FUN) -> output
}
if(sort && level %in% c("case", "trace","activity")) {
output %>%
arrange(-absolute) -> output
}
return_metric(eventlog, output, level, append, append_column, "activity_frequency")
} |
predictShape.lm <- function(fit, datamod, PC, mshape)
{
if (!inherits(fit, "lm"))
stop("provide linear model of class 'lm'!")
dims <- dim(mshape)
if (!missing(datamod)) {
mat <- model.matrix(datamod)
pred <- mat%*%fit$coefficients
names <- as.matrix(model.frame(datamod))
names <- apply(names,1, paste,collapse= "_")
names <- gsub(" ","",names)
} else {
pred <- predict(fit)
names <- rownames(pred)
}
predPC <- t(PC%*%t(pred))
if (dim(pred)[1] > 1) {
out <- array(NA,dim=c(dims,dim(pred)[1]))
for (i in 1:dim(out)[3])
out[,,i] <- mshape+matrix(predPC[i,],dims[1],dims[2])
} else {
out <- mshape+matrix(predPC,dims[1],dims[2])
}
if (length(dim(out)) == 3)
dimnames(out)[[3]] <- names
rownames(pred) <- names
return(list(predicted=out,predictedPC=pred))
} |
mv.eeltest2 <- function(y1, y2, tol = 1e-07, R = 0) {
dm <- dim(y1)
n1 <- dm[1] ; d <- dm[2]
n2 <- dim(y2)[2]
eel2 <- function(x, y, n1, n2, d) {
lam1 <- numeric(d)
fx1 <- numeric(n1)
fx2 <- n1
fx2a <- x
fx3 <- colsums( fx2a )
fx4 <- fx3 / fx2
fy1 <- numeric(n2)
fy2 <- n2
fy2a <- y
fy3 <- colsums( fy2a )
fy4 <- fy3 / fy2
f <- fx4 - fy4
der <- - tcrossprod( fx4 ) + crossprod(fx2a, x) / fx2 - tcrossprod( fy4 ) + crossprod(fy2a, y) / fy2
lam2 <- lam1 - solve(der, f)
i <- 2
while ( sum( abs(lam2 - lam1 ) ) > tol ) {
i <- i + 1
lam1 <- lam2
fx1 <- exp( as.vector( x %*% lam1 ) )
fx2 <- sum(fx1)
fx2a <- x * fx1
fx3 <- colsums( fx2a )
fx4 <- fx3 / fx2
fy1 <- exp( as.vector( - y %*% lam1 ) )
fy2 <- sum(fy1)
fy2a <- y * fy1
fy3 <- colsums( fy2a )
fy4 <- fy3 / fy2
f <- fx4 - fy4
der <- - tcrossprod( fx4 ) + crossprod(fx2a, x) / fx2 - tcrossprod( fy4 ) + crossprod(fy2a, y) / fy2
lam2 <- lam1 - solve(der, f)
}
p1 <- fx1 / fx2
p2 <- fy1 / fy2
stat <- - 2 * sum( log( n1 * p1) ) - 2 * sum( log(n2 * p2) )
pvalue <- pchisq(stat, d, lower.tail = FALSE)
info <- c(stat, pvalue, d)
names(info) <- c("statistic", "p-value", "degrees of freedom")
list(p1 = p1, p2 = p2, lambda = lam2, iters = i, info = info)
}
runtime <- proc.time()
res <- try( eel2(y1, y2, n1, n2, d), silent = TRUE )
runtime <- proc.time() - runtime
res$runtime <- runtime
res$note <- paste("Chi-square approximation")
if ( class(res) == "try-error" ) {
res$info[1] <- 1e10
res$info[2] <- 0
res$p1 <- NA
res$p2 <- NA
}
if ( R == 0 || res$info[1] == 1e+10 ) {
res <- res
} else if ( R == 1 ) {
test <- as.numeric( res$info[1] )
d <- dim(y1)[2]
delta <- Rfast::james(y1, y2, R = 1)$info[3]
stat <- as.numeric( test / delta )
pvalue <- as.numeric( pchisq(stat, d, lower.tail = FALSE) )
res$info[1] <- stat
res$info[2] <- pvalue
res$note <- paste("James corrected chi-square approximation")
} else if ( R == 2 ) {
test <- as.numeric( res$info[1] )
d <- dim(y1)[2]
dof <- Rfast::james(y1, y2, R = 2)$info[5]
v <- dof + d - 1
stat <- dof / (v * d) * test
pvalue <- pf(stat, d, dof, lower.tail = FALSE)
dof <- c(d, v - d + 1)
res$info <- c(stat, pvalue, dof)
names(res$info) <- c("statistic", "p-value", "numer df", "denom df")
res$note <- paste("F approximation")
}
res
} |
jubilee.eqty_ols <- function(dtb, end.frac, lookback.channel, tol.frac=1/6) {
jubilee.assert_column(dtb, c("fraction", "log.tri"))
if (length(end.frac) > 1) {
f <- function(x) {
s <- jubilee.eqty_ols(dtb, x, lookback.channel, tol.frac)
c(x, s)
}
t <- do.call(rbind, parallel::mclapply(end.frac, f))
colnames(t) <- c("fraction", "eqty.lm.a", "eqty.lm.r")
t <- data.table(t)
fraction <- NULL
data.table::setkey(t,fraction)
return(t)
}
start.frac <- end.frac - lookback.channel
I <- which(dtb$fraction >= start.frac & dtb$fraction <= end.frac & !is.na(dtb$log.tri))
if (length(I)==0) return(c(NA,NA))
dtb2 <- dtb[I,]
if (min(dtb2$fraction) > start.frac+tol.frac) return(c(NA,NA))
t <- dtb2$fraction - end.frac
X <- dtb2$log.tri
R <- stats::cov(X,t)/stats::var(t)
a <- mean(X)-R*mean(t)
return(c(a,R))
} |
me_ <- nm_fun("TEST-range_autofit")
test_that("range_autofit() works", {
skip_if_offline()
skip_if_no_token()
dat <- tibble::tribble(
~x, ~y, ~z, ~a, ~b, ~c,
"abcd", "efgh", "ijkl", "mnop", "qrst", "uvwx"
)
ss <- local_ss(me_(), sheets = list(dat = dat))
ssid <- as_sheets_id(ss)
range_autofit(ss)
before <- gs4_get_impl_(
ssid, fields = "sheets.data.columnMetadata.pixelSize"
)
dat2 <- purrr::modify(dat, ~ paste0(.x, "_", .x))
dat4 <- purrr::modify(dat2, ~ paste0(.x, "_", .x))
sheet_append(ss, dat4)
range_autofit(ss)
after <- gs4_get_impl_(
ssid, fields = "sheets.data.columnMetadata.pixelSize"
)
before <- pluck(before, "sheets", 1, "data", 1, "columnMetadata")
after <- pluck(after, "sheets", 1, "data", 1, "columnMetadata")
expect_true(all(unlist(before) < unlist(after)))
})
test_that("A1-style ranges can be turned into a request", {
req <- prepare_auto_resize_request(123, as_range_spec("D:H"))
req <- pluck(req, 1, "autoResizeDimensions", "dimensions")
expect_equal(req$dimension, "COLUMNS")
expect_equal(req$startIndex, cellranger::letter_to_num("D") - 1)
expect_equal(req$endIndex, cellranger::letter_to_num("H"))
req <- prepare_auto_resize_request(123, as_range_spec("3:7"))
req <- pluck(req, 1, "autoResizeDimensions", "dimensions")
expect_equal(req$dimension, "ROWS")
expect_equal(req$startIndex, 3 - 1)
expect_equal(req$endIndex, 7)
})
test_that("cell_limits can be turned into a request", {
req <- prepare_auto_resize_request(
123,
as_range_spec(cell_limits())
)
req <- pluck(req, 1, "autoResizeDimensions", "dimensions")
expect_equal(req$dimension, "COLUMNS")
expect_null(req$startIndex)
expect_null(req$endIndex)
req <- prepare_auto_resize_request(
123,
as_range_spec(cell_cols(c(3, NA)))
)
req <- pluck(req, 1, "autoResizeDimensions", "dimensions")
expect_equal(req$dimension, "COLUMNS")
expect_equal(req$startIndex, 3 - 1)
expect_null(req$endIndex)
req <- prepare_auto_resize_request(
123,
as_range_spec(cell_cols(c(NA, 5)))
)
req <- pluck(req, 1, "autoResizeDimensions", "dimensions")
expect_equal(req$dimension, "COLUMNS")
expect_equal(req$endIndex, 5)
})
test_that("an invalid range is rejected", {
expect_error(
prepare_auto_resize_request(123, as_range_spec("D3:H")),
"only columns or only rows"
)
}) |
wkt2geojson <- function(str, fmt = 16, feature = TRUE, numeric = TRUE,
simplify = FALSE) {
type <- get_type(str, ignore_case = TRUE)
res <- switch(type,
Point = load_point(str, fmt, feature, numeric),
Multipoint = load_multipoint(str, fmt, feature, numeric,
simplify),
Polygon = load_polygon(str, fmt, feature, numeric),
Multipolygon = load_multipolygon(str, fmt, feature, numeric,
simplify),
Linestring = load_linestring(str, fmt, feature, numeric),
Multilinestring = load_multilinestring(str, fmt, feature, numeric,
simplify),
Geometrycollection =
load_geometrycollection(str, fmt, feature, numeric)
)
structure(res, class = "geojson")
}
wellknown_types <- c("POINT",'MULTIPOINT',"POLYGON","MULTIPOLYGON",
"LINESTRING","MULTILINESTRING","GEOMETRYCOLLECTION",
"TRIANGLE","CIRCULARSTRING","COMPOUNDCURVE")
get_type <- function(x, ignore_case = FALSE){
type <- cw(wellknown_types[sapply(wellknown_types, grepl, x = x, ignore.case = ignore_case)],
onlyfirst = TRUE)
if (length(type) > 1) {
grep(tolower(strextract(x, "[A-Za-z]+")), type, ignore.case = TRUE,
value = TRUE)
} else {
type
}
}
load_point <- function(str, fmt = 16, feature = TRUE, numeric = TRUE) {
zm <- tolower(strextract(strextract(str, "[zmZM]+\\("), "[A-Za-z]+")) %||% ""
str_coord <- str_trim_(gsub("POINT\\s?([ZMzm]+)?\\(| \\)", "", str,
ignore.case = TRUE))
coords <- strsplit(gsub("[[:punct:]]$", "",
str_trim_(str_coord)), "\\s")[[1]]
coords <- nozero(coords)
if (nzchar(zm)) {
coords <- switch(
zm,
"zm" = coords[-4],
"m" = coords[-3],
"z" = coords
)
}
iffeat('Point', if (numeric) as.numeric(coords, fmt) else
format_num(coords, fmt), feature)
}
format_num <- function(x, fmt) {
sprintf(paste0("%.", fmt, "f"), as.numeric(x))
}
load_multipoint <- function(str, fmt = 16, feature = TRUE, numeric = TRUE,
simplify = FALSE) {
zm <- tolower(strextract(strextract(str, "[zmZM]+\\("), "[A-Za-z]+")) %||% ""
str_coord <- str_trim_(gsub("MULTIPOINT\\s?([ZMzm]+)?\\(", "", str,
ignore.case = TRUE))
str_coord <- gsub("^\\(|\\)$", "", str_coord)
str_coord <- strsplit(str_coord, "\\),")[[1]]
if (simplify) {
if (length(str_coord) == 1) return(load_point(str_coord, fmt, feature, numeric))
}
coords <- unname(lapply(str_coord, function(z){
pairs <- strsplit(strsplit(gsub("\\(|\\)", "",
str_trim_(z)), ",|,\\s")[[1]], "\\s")
do.call("rbind", lapply(pairs, function(x) {
tmp <- if (numeric) as.numeric(nozero(x), fmt) else format_num(nozero(x), fmt)
matrix(tmp, ncol = length(tmp))
}))
}))
coords <- do.call("rbind", coords)
if (nzchar(zm)) {
coords <- switch(
zm,
"zm" = coords[,-4],
"m" = coords[,-3],
"z" = coords
)
}
iffeat('MultiPoint', coords, feature)
}
load_polygon <- function(str, fmt = 16, feature = TRUE, numeric = TRUE) {
zm <- tolower(strextract(strextract(str, "[zmZM]+\\("), "[A-Za-z]+")) %||% ""
str_coord <- str_trim_(gsub("POLYGON\\s?([ZMzm]+)?\\(", "", str,
ignore.case = TRUE))
str_coord <- gsub("^\\(|\\)$", "", str_coord)
str_coord <- strsplit(str_coord, "\\),")[[1]]
coords <- lapply(str_coord, function(z){
pairs <- strsplit(strsplit(gsub("\\(|\\)", "",
str_trim_(z)), ",|,\\s")[[1]], "\\s")
do.call("rbind", lapply(pairs, function(x) {
tmp <- if (numeric) as.numeric(nozero(x), fmt) else format_num(nozero(x), fmt)
matrix(tmp, ncol = length(tmp))
}))
})
if (nzchar(zm)) {
coords <- switch(
zm,
"zm" = lapply(coords, function(z) z[,-4]),
"m" = lapply(coords, function(z) z[,-3]),
"z" = coords
)
}
iffeat('Polygon', coords, feature)
}
load_multipolygon <- function(str, fmt = 16, feature = TRUE, numeric = TRUE,
simplify = FALSE) {
str <- gsub("\n", "", str)
zm <- tolower(strextract(strextract(str, "[zmZM]+\\("), "[A-Za-z]+")) %||% ""
str_coord <- str_trim_(gsub("MULTIPOLYGON\\s?([Zmzm]+)?", "", str, ignore.case = TRUE))
str_coord <- gsub("^\\(|\\)$", "", str_coord)
str_coord <- strsplit(str_coord, "\\)),")[[1]]
if (simplify) {
if (length(str_coord) == 1) return(load_polygon(str_coord, fmt, feature, numeric))
}
coords <- lapply(str_coord, function(z){
pairs <- strsplit( gsub("\\(|\\)", "", strsplit(str_trim_(z), "\\),")[[1]]), ",|,\\s")
lapply(pairs, function(zz){
do.call("rbind", unname(lapply(sapply(str_trim_(zz), strsplit, split = "\\s"), function(x) {
tmp <- if (numeric) as.numeric(nozero(x), fmt) else format_num(nozero(x), fmt)
matrix(tmp, ncol = length(tmp))
})))
})
})
if (nzchar(zm)) {
coords <- switch(
zm,
"zm" = lapply(coords, function(z) lapply(z, function(w) w[,-4])),
"m" = lapply(coords, function(z) lapply(z, function(w) w[,-3])),
"z" = coords
)
}
iffeat('MultiPolygon', coords, feature)
}
load_linestring <- function(str, fmt = 16, feature = TRUE, numeric = TRUE){
str <- gsub("\n", "", str)
zm <- tolower(strextract(strextract(str, "[zmZM]+\\("), "[A-Za-z]+")) %||% ""
str_coord <- str_trim_(gsub("LINESTRING\\s?([Zmzm]+)?", "", str, ignore.case = TRUE))
str_coord <- gsub("^\\(|\\)$", "", str_coord)
str_coord <- strsplit(str_coord, "\\),")[[1]]
coords <- lapply(str_coord, function(z){
pairs <- strsplit(strsplit(gsub("\\(|\\)", "", str_trim_(z)), ",|,\\s")[[1]], "\\s")
do.call("rbind", lapply(pairs, function(x) {
tmp <- if (numeric) as.numeric(nozero(x), fmt) else format_num(nozero(x), fmt)
matrix(tmp, ncol = length(tmp))
}))
})
coords <- do.call("rbind", coords)
if (nzchar(zm)) {
coords <- switch(
zm,
"zm" = coords[,-4],
"m" = coords[,-3],
"z" = coords
)
}
iffeat('LineString', coords, feature)
}
load_multilinestring <- function(str, fmt = 16, feature = TRUE, numeric = TRUE,
simplify = FALSE) {
str <- gsub("\n", "", str)
zm <- tolower(strextract(strextract(str, "[zmZM]+\\("), "[A-Za-z]+")) %||% ""
str_coord <- str_trim_(gsub("MULTILINESTRING\\s?([ZMzm]+)?", "", str, ignore.case = TRUE))
str_coord <- gsub("^\\(|\\)$", "", str_coord)
str_coord <- strsplit(str_coord, "\\),|\\)\\(")[[1]]
if (simplify) {
if (length(str_coord) == 1) return(load_linestring(str_coord, fmt, feature, numeric))
}
coords <- lapply(str_coord, function(z){
pairs <- strsplit(strsplit(str_trim_(gsub("\\(|\\)", "", str_trim_(z))), ",|,\\s")[[1]], "\\s")
do.call("rbind", lapply(pairs, function(x) {
tmp <- if (numeric) as.numeric(nozero(x), fmt) else format_num(nozero(x), fmt)
matrix(tmp, ncol = length(tmp))
}))
})
if (nzchar(zm)) {
coords <- switch(
zm,
"zm" = lapply(coords, function(z) z[,-4]),
"m" = lapply(coords, function(z) z[,-3]),
"z" = coords
)
}
iffeat('MultiLineString', coords, feature)
}
load_geometrycollection <- function(str, fmt = 16, feature = TRUE, numeric = TRUE){
str_coord <- str_trim_(gsub("GEOMETRYCOLLECTION\\s?", "",
gsub("\n", "", str), ignore.case = TRUE))
str_coord <- gsub("^\\(|\\)$", "", str_coord)
matches <- noneg(sort(sapply(wellknown_types, regexpr, text = str_coord)))
out <- list()
for (i in seq_along(matches)) {
end <- if (i == length(matches)) nchar(str_coord) else matches[[i + 1]] - 1
strg <- substr(str_coord, matches[[i]], end)
strg <- gsub(",\\s+?$", "", strg)
out[[ i ]] <- get_load_fxn(tolower(names(matches[i])))(strg, fmt, feature, numeric)
}
list(type = 'GeometryCollection', geometries = out)
}
get_load_fxn <- function(type){
switch(type,
point = load_point,
multipoint = load_multipoint,
linestring = load_linestring,
multilinestring = load_multilinestring,
polygon = load_polygon,
multipolygon = load_multipolygon,
geometrycollection = load_geometrycollection)
}
noneg <- function(x) x[!x < 0]
iffeat <- function(type, cd, feature) {
tmp <- list(type = type, coordinates = cd)
if (feature) {
list(type = "Feature", geometry = tmp)
} else {
tmp
}
} |
[
{
"title": "Getting My Eye In Around F1 Quali Data – Parallel Coordinate Plots, Sort of…",
"href": "https://blog.ouseful.info/2011/07/30/getting-my-eye-in-around-f1-quali-data-parallel-coordinate-plots-sort-of/"
},
{
"title": "Exploring the robustness of Bayes Factors: A convenient plotting function",
"href": "http://www.nicebread.de/exploring-the-robustness-of-bayes-factors-a-convenient-plotting-function-2/"
},
{
"title": "RStudio: a cut above",
"href": "https://statbandit.wordpress.com/2011/03/01/rstudio-a-cut-above/"
},
{
"title": "table() in R",
"href": "http://jointposterior.blogspot.com/2011/01/table-in-r.html"
},
{
"title": "Fluctuation plot using ggplot2 in R",
"href": "http://www.gettinggeneticsdone.com/2010/01/fluctuation-plot-using-ggplot2-in-r.html"
},
{
"title": "Calculating DV01 using futile.paradigm",
"href": "https://cartesianfaith.com/2011/04/25/calculating-dv01-using-futile-paradigm/"
},
{
"title": "Vienna, Oct 2013 – Advanced R Programming",
"href": "https://www.rmetrics.org/node/144"
},
{
"title": "“Introduction to R” Course February 21-22, 2013",
"href": "http://www.milanor.net/blog/introduction-to-r-course-february-21-22-2013/"
},
{
"title": "Learning R: Project 1, Part 2",
"href": "http://statsadventure.blogspot.com/2011/10/learning-r-project-1-part-2.html"
},
{
"title": "Forecasting: Principles and Practice",
"href": "http://davegiles.blogspot.com/2012/05/forecasting-principles-and-practice.html"
},
{
"title": "rggobi 2.1.3 released (for linux and mac)",
"href": "http://ggobi.blogspot.com/2006/04/rggobi-213-released-for-linux-and-mac.html"
},
{
"title": "R snippets for vim-SnipMate",
"href": "http://www.lindonslog.com/linux-unix/r-snippets-vim-snipmate/"
},
{
"title": "Particle learning [rejoinder]",
"href": "https://xianblog.wordpress.com/2010/11/10/particle-learning%C2%A0rejoinder/"
},
{
"title": "R Tutorial Series: ANOVA Tables",
"href": "https://feedproxy.google.com/~r/RTutorialSeries/~3/_KIxE32tZEw/r-tutorial-series-one-way-analysis-of.html"
},
{
"title": "Using R to Analyze Baseball Games in “Real Time”",
"href": "https://web.archive.org/web/http://www.sigmafield.org/2009/10/04/using-r-to-analyze-baseball-games-in-real-time"
},
{
"title": "Are Consumer Preferences Deep or Shallow?",
"href": "http://joelcadwell.blogspot.com/2014/07/are-consumer-preferences-deep-or-shallow.html"
},
{
"title": "Top 15 Daily Tweeters of
"href": "http://drewconway.com/zia/?p=2642"
},
{
"title": "Getting data from the Infochimps Geo API in R",
"href": "https://dataexcursions.wordpress.com/2011/09/10/getting-data-from-the-infochimps-geo-api-in-r/"
},
{
"title": "Halloween: An Excuse for Plotting with Icons",
"href": "http://citizen-statistician.org/2015/10/27/halloween-an-excuse-for-plotting-with-icons/"
},
{
"title": "R and Java – JRI using eclipse on 64 bit machines",
"href": "http://www.studytrails.com/r/r-and-java-jri-using-eclipse-on-64-bit-machines/"
},
{
"title": "eoda offers courses for data visualization and graphics with R",
"href": "http://blog.eoda.de/2013/09/09/eoda-offers-courses-for-data-visualization-and-graphics-with-r-2/"
},
{
"title": "Shiny https: Securing Shiny Open Source with SSL",
"href": "http://ipub.com/shiny-https/"
},
{
"title": "Hacks for thinking about high-dimensional space",
"href": "http://isomorphism.es/post/120539470124/hacks-for-thinking-about-high-dimensional-space"
},
{
"title": "New R Package – domaintools (access the DomainTools.com WHOIS API)",
"href": "http://datadrivensecurity.info/blog/posts/2015/Aug/new-r-package-domaintools/"
},
{
"title": "La historia detrás del software: el caso de R.",
"href": "https://web.archive.org/web/http://blog.flacso.edu.mx/vmgg/2011/03/18/la-historia-detras-del-software-el-caso-de-r/"
},
{
"title": "R, now a major programming language, sees a 127% growth in book sales",
"href": "http://blog.revolutionanalytics.com/2012/05/r-now-a-major-programming-language-sees-a-127-growth-in-book-sales.html"
},
{
"title": "XLConnect – A platform-independent interface to Excel",
"href": "https://miraisolutions.wordpress.com/2011/08/31/xlconnect-a-platform-independent-interface-to-excel/"
},
{
"title": "Exporting Results of Linear Regression with Robust Standard Errors",
"href": "http://rforpublichealth.blogspot.com/2013/08/exporting-results-of-linear-regression_24.html"
},
{
"title": "Tall big data, wide big data",
"href": "http://www.quantumforest.com/2011/12/tall-big-data-wide-big-data/"
},
{
"title": "Static and dynamic graphics course, July 2007, Salt Lake City",
"href": "http://ggobi.blogspot.com/2007/03/static-and-dynamic-graphics-course.html"
},
{
"title": "Geography and Data",
"href": "http://jaredknowles.com/journal/2009/6/15/geography-and-data.html"
},
{
"title": "swing graph",
"href": "https://web.archive.org/web/http://jackman.stanford.edu/blog/?p=1735"
},
{
"title": "Social Media Interest Maps of Newsnight and BBCQT Twitterers",
"href": "https://blog.ouseful.info/2012/01/26/social-media-interest-maps-of-newsnight-and-bbcqt-twitterers/"
},
{
"title": "Training for the big data era: eoda publishes R Academy programme for 2016",
"href": "http://blog.eoda.de/2016/01/04/training-for-the-big-data-era-eoda-publishes-r-academy-programme-for-2016/"
},
{
"title": "Exercise in REML/Mixed model",
"href": "http://wiekvoet.blogspot.com/2013/08/exercise-in-remlmixed-model.html"
},
{
"title": "Some of Excel’s Finance Functions in R",
"href": "http://factbased.blogspot.com/2013/02/some-of-excel-finance-functions-in-r.html"
},
{
"title": "Happy International Year of Statistics",
"href": "https://web.archive.org/web/http://mpkanalytics.com/2013/01/01/happy-international-year-of-statistics/"
},
{
"title": "Analyzing Monthly Expenses with a Pareto Chart",
"href": "https://qualityandinnovation.com/2013/03/09/analyzing-monthly-expenses-with-a-pareto-chart/"
},
{
"title": "Slopegraphs in R",
"href": "http://rud.is/b/2013/01/11/slopegraphs-in-r/"
},
{
"title": "Dynamic Modeling 3: When the first-order difference model doesn’t cut it",
"href": "https://web.archive.org/web/http://nortalktoowise.com/?p=593"
},
{
"title": "RcppArmadillo 0.3.4.4",
"href": "http://dirk.eddelbuettel.com/blog/2012/11/16/"
},
{
"title": "Moving average/median",
"href": "http://agrarianresearch.org/blog/?p=111"
},
{
"title": "In search of an incredible posterior",
"href": "http://pirategrunt.com/r/2016/06/23/IncrediblePosterior/"
},
{
"title": "Versions of Sweave.sh",
"href": "http://ggorjan.blogspot.com/2009/01/versions-of-sweavesh.html"
},
{
"title": "Answering “How many people use my R package?”",
"href": "https://www.r-statistics.com/2013/06/answering-how-many-people-use-my-r-package/"
},
{
"title": "High Frequency GARCH: The multiplicative component GARCH (mcsGARCH) model",
"href": "http://unstarched.net/2013/03/20/high-frequency-garch-the-multiplicative-component-garch-mcsgarch-model/"
},
{
"title": "Multivariate Joint Models for Multiple Longitudinal Outcomes and a Time-to-Event",
"href": "http://iprogn.blogspot.com/2016/10/multivariate-joint-models-for-multiple.html"
},
{
"title": "Submit a paper to the R/Finance conference",
"href": "http://blog.revolutionanalytics.com/2011/12/submit-a-paper-to-the-rfinance-conference.html"
},
{
"title": "AlienVault Longitudinal Study Part 4",
"href": "http://datadrivensecurity.info/blog/posts/2014/Jun/alienvault-longitudinal-study-part-4/"
},
{
"title": "auto-complete in ESS",
"href": "https://web.archive.org/web/http://ygc.name/2014/12/07/auto-complete-in-ess/"
}
] |
topoplot<-function(erpobj, startmsec=-200, endmsec=1200, win.ini, win.end, exclude = NULL,
elec.coord=NULL, projection="orthographic", palette.col="jet", palette.steps=10, return.coord = FALSE,
zlim=NULL, interpolation = "cubicspline", extrap = TRUE, interp.points = 500, return.notfound=FALSE, mask = TRUE, contour=TRUE, x.rev=FALSE,
draw.elec.pos=TRUE, draw.nose=FALSE, draw.elec.lab=TRUE, elec.lab.adj=c(0.5, NA), head.col="black", head.lwd=1, ...)
{
requireNamespace("akima")
if (!projection%in%c("orthographic", "equalarea")){
stop("Available projectios are: orthographic, equalarea
The projection specified is: ", projection, call.=F)
}
available.palettes=c("jet", "heat")
if ((!palette.col[1]%in%available.palettes)&length(palette.col)==1){
available.palettes=paste("\"",available.palettes,"\"", collapse=", ", sep="")
stop("To create a palette for the plot, at least two colors must be specified.
Type colors() to see a list of available colors.
Two default palettes are available:", available.palettes, ".", call.=F)
}
if (is.null(elec.coord)){
elec.coord=structure(list(el.name = structure(c(81L, 117L, 82L, 68L, 70L,
69L, 10L, 11L, 3L, 1L, 2L, 4L, 7L, 5L, 6L, 8L, 45L, 43L, 41L,
39L, 77L, 40L, 42L, 44L, 46L, 66L, 64L, 62L, 60L, 61L, 63L, 65L,
67L, 74L, 72L, 51L, 49L, 47L, 59L, 48L, 50L, 52L, 73L, 71L, 75L,
57L, 55L, 53L, 54L, 56L, 58L, 76L, 121L, 119L, 17L, 15L, 13L,
14L, 16L, 18L, 120L, 118L, 128L, 23L, 21L, 19L, 20L, 22L, 24L,
129L, 125L, 123L, 30L, 28L, 26L, 38L, 27L, 29L, 31L, 124L, 122L,
126L, 36L, 34L, 32L, 33L, 35L, 37L, 127L, 99L, 97L, 95L, 93L,
90L, 115L, 92L, 94L, 96L, 98L, 91L, 113L, 111L, 112L, 114L, 105L,
103L, 101L, 110L, 102L, 104L, 100L, 106L, 108L, 109L, 85L, 89L,
86L, 107L, 87L, 88L, 78L, 80L, 79L, 12L, 83L, 84L, 9L, 25L, 116L,
130L), .Label = c("AF3", "AF4", "AF7", "AF8", "AFF1h", "AFF2h",
"AFF5h", "AFF6h", "AFp10h", "AFp3", "AFp4", "AFp9h", "C1", "C2",
"C3", "C4", "C5", "C6", "CCP1h", "CCP2h", "CCP3h", "CCP4h", "CCP5h",
"CCP6h", "Centroid", "CP1", "CP2", "CP3", "CP4", "CP5", "CP6",
"CPP1h", "CPP2h", "CPP3h", "CPP4h", "CPP5h", "CPP6h", "CPz",
"F1", "F2", "F3", "F4", "F5", "F6", "F7", "F8", "FC1", "FC2",
"FC3", "FC4", "FC5", "FC6", "FCC1h", "FCC2h", "FCC3h", "FCC4h",
"FCC5h", "FCC6h", "FCz", "FFC1h", "FFC2h", "FFC3h", "FFC4h",
"FFC5h", "FFC6h", "FFT7h", "FFT8h", "Fp1", "Fp2", "Fpz", "FT10",
"FT7", "FT8", "FT9", "FTT7h", "FTT8h", "Fz", "I1", "I2", "Iz",
"Left", "Nasion", "NFp1h", "NFp2h", "O1", "O2", "OI1h", "OI2h",
"Oz", "P1", "P10", "P2", "P3", "P4", "P5", "P6", "P7", "P8",
"P9", "PO10", "PO3", "PO4", "PO7", "PO8", "PO9", "POO1", "POO10h",
"POO2", "POO9h", "POz", "PPO1h", "PPO2h", "PPO3h", "PPO4h", "Pz",
"Ref", "Right", "T10", "T7", "T8", "T9", "TP10", "TP7", "TP8",
"TP9", "TPP7h", "TPP8h", "TTP7h", "TTP8h", "Cz"), class = "factor"),
d = c(108, 114, 110, 69, 69, 69, 69, 69, 69, 69, 69, 69,
69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69,
69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69,
69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69,
69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69,
69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69,
69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69,
69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69,
69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 99, 120, 69), y = c(0,
0, 0.923, 0.9511, 1, 0.9511, 0.9565, 0.9565, 0.809, 0.892,
0.8919, 0.809, 0.7777, 0.8289, 0.8289, 0.7777, 0.5878, 0.6343,
0.6726, 0.6979, 0.7067, 0.6979, 0.6726, 0.6343, 0.5878, 0.4741,
0.5107, 0.5384, 0.5533, 0.5533, 0.5384, 0.5107, 0.4741, 0.2852,
0.309, 0.3373, 0.3612, 0.377, 0.3826, 0.377, 0.3612, 0.3373,
0.309, 0.2852, 0.164, 0.1779, 0.1887, 0.1944, 0.1944, 0.1887,
0.1779, 0.164, -1e-04, 0, 1e-04, 1e-04, 2e-04, 1e-04, 1e-04,
1e-04, 0, 0, -0.1639, -0.1778, -0.1883, -0.194, -0.194, -0.1884,
-0.1778, -0.1639, -0.2852, -0.309, -0.3372, -0.3609, -0.3767,
-0.3822, -0.3767, -0.3608, -0.3372, -0.309, -0.2853, -0.474,
-0.5106, -0.5384, -0.5532, -0.5532, -0.5384, -0.5106, -0.474,
-0.5429, -0.5878, -0.6342, -0.6724, -0.6975, -0.7063, -0.6975,
-0.6724, -0.6342, -0.5878, -0.5429, -0.8108, -0.8284, -0.8284,
-0.8108, -0.7467, -0.809, -0.8918, -0.923, -0.8918, -0.809,
-0.7467, -0.9739, -0.9739, -0.873, -0.9511, -1, -0.9511,
-0.873, -0.9679, -0.9679, -0.8785, -0.923, -0.8785, 0.8732,
0.9601, 0.9601, 0.8732, 0, 0, 0), x = c(-0.9237, 0.9237,
0, -0.309, 0, 0.3091, -0.2508, 0.2508, -0.5878, -0.3554,
0.3553, 0.5878, -0.5417, -0.122, 0.122, 0.5417, -0.809, -0.721,
-0.5399, -0.2888, 0, 0.2888, 0.5399, 0.721, 0.809, -0.8642,
-0.7218, -0.4782, -0.1672, 0.1672, 0.4782, 0.7218, 0.8642,
-0.8777, -0.9511, -0.8709, -0.6638, -0.3581, 0, 0.3581, 0.6638,
0.8709, 0.9511, 0.8777, -0.9669, -0.8184, -0.5466, -0.1919,
0.1919, 0.5466, 0.8184, 0.9669, -0.9237, -1, -0.9237, -0.7066,
-0.3824, 0.3824, 0.7066, 0.9237, 1, 0.9237, -0.9669, -0.8185,
-0.5465, -0.1918, 0.1918, 0.5465, 0.8185, 0.9669, -0.8777,
-0.9511, -0.8712, -0.6635, -0.358, 0, 0.358, 0.6635, 0.8712,
0.9511, 0.8777, -0.8639, -0.722, -0.4786, -0.1673, 0.1673,
0.4786, 0.722, 0.8638, -0.7472, -0.809, -0.7211, -0.5401,
-0.2889, 0, 0.2889, 0.5401, 0.7211, 0.809, 0.7472, -0.352,
-0.122, 0.122, 0.3519, -0.5425, -0.5878, -0.3549, 0, 0.3549,
0.5878, 0.5425, -0.1285, 0.1286, -0.4448, -0.309, 0, 0.309,
0.4448, -0.1533, 0.1533, -0.2854, 0, 0.2854, -0.4449, -0.1564,
0.1564, 0.4449, 0, 0.9237, 0), z = c(-0.3826, -0.3826, -0.3824,
1e-04, 1e-04, 0, 0.1438, 0.1437, 0, 0.2782, 0.2783, 0, 0.3163,
0.5452, 0.5452, 0.3163, 0, 0.2764, 0.5043, 0.6542, 0.7067,
0.6542, 0.5043, 0.2764, 0, 0.1647, 0.4651, 0.6925, 0.8148,
0.8148, 0.6925, 0.4651, 0.1647, -0.3826, 0, 0.3549, 0.6545,
0.8532, 0.9233, 0.8532, 0.6545, 0.3549, 0, -0.3826, 0.1915,
0.5448, 0.8154, 0.9615, 0.9615, 0.8154, 0.5448, 0.1915, -0.3826,
0, 0.3826, 0.7066, 0.9231, 0.9231, 0.7066, 0.3826, 0, -0.3826,
0.1915, 0.5449, 0.8153, 0.9611, 0.9611, 0.8153, 0.5449, 0.1915,
-0.3826, -1e-04, 0.3552, 0.6543, 0.8534, 0.9231, 0.8534,
0.6543, 0.3552, -1e-04, -0.3826, 0.1646, 0.4653, 0.6933,
0.8155, 0.8155, 0.6933, 0.4653, 0.1646, -0.3826, -1e-04,
0.2764, 0.5045, 0.6545, 0.7065, 0.6545, 0.5045, 0.2764, -1e-04,
-0.3826, 0.4659, 0.5453, 0.5453, 0.4659, -0.3825, 0, 0.2776,
0.3824, 0.2776, 0, -0.3825, 0.1822, 0.1822, -0.195, 0, 0,
0, -0.195, -0.195, -0.195, -0.3824, -0.3823, -0.3824, -0.1949,
-0.1949, -0.1949, -0.1949, 0, -0.5773, 1)), .Names = c("el.name",
"d", "y", "x", "z"), row.names = c(NA, 130L), class = "data.frame")
}
necessary.el.coord.cols=c("el.name", "y", "x")
if (!all(necessary.el.coord.cols%in%names(elec.coord))) {
colsnotfound=necessary.el.coord.cols[!necessary.el.coord.cols%in%names(elec.coord)]
colsnotfound=paste("\"",colsnotfound,"\"", collapse=", ", sep="")
stop("The electrode coordinate object should contain at least these columns:
1) \"el.name\": a column containing electrode names.
2) \"y\": a column containing the y of electrode coordinates.
3) \"x\": a column containing the x of electrode coordinates.
The following column(s) have not been found in the object supplied: ", colsnotfound, call.=FALSE)
}
if (return.coord == TRUE){
return(elec.coord)
} else {
erpobj=erpobj[,!names(erpobj)%in%exclude]
curr.el=names(erpobj[,!names(erpobj)%in%exclude])
up.curr.el=toupper(curr.el)
up.elec.coord.names=toupper(elec.coord$el.name)
found=names(erpobj)[up.curr.el%in%up.elec.coord.names]
notfound=names(erpobj)[!up.curr.el%in%up.elec.coord.names]
if (length(notfound)>0&return.notfound==FALSE){
notfound.list=paste(notfound, "\n", sep="")
stop("the following electrodes have not been found\n", notfound.list, call.=F)
}
if (length(notfound)>0&return.notfound==TRUE&is.null(exclude)){
return(notfound)
}
}
curr.elec.coord=elec.coord[up.elec.coord.names%in%up.curr.el,]
curr.elec.coord=curr.elec.coord[order(as.character(curr.elec.coord$el.name)),]
erpobj=erpobj[,order(names(erpobj))]
if (x.rev==TRUE){
curr.elec.coord$x=-curr.elec.coord$x
}
if(projection=="orthographic"){
x=curr.elec.coord$x
y=curr.elec.coord$y
}
if (projection=="equalarea"){
x=curr.elec.coord$x *sqrt(1/(1 + curr.elec.coord$z))
y=curr.elec.coord$y *sqrt(1/(1 + curr.elec.coord$z))
}
if (palette.col[1]=="jet"){
mypalette <- colorRampPalette(c("
} else {
if (palette.col[1]=="heat"){
mypalette <- colorRampPalette(heat.colors(palette.steps))
} else {
mypalette <-colorRampPalette(palette.col)
}
}
if (win.ini == win.end){
ampl = as.numeric(erpobj[round(msectopoints(win.ini, dim(erpobj)[1], startmsec, endmsec)), ])
} else {
ampl = colMeans(erpobj[round(msectopoints(win.ini, dim(erpobj)[1], startmsec, endmsec)):
round(msectopoints(win.end, dim(erpobj)[1], startmsec, endmsec)), ])
}
xlim=c(-1.3, 1.3)
ylim=c(-1.3, 1.3)
if (interpolation=="cubicspline"){
interp.linear=F
}
if (interpolation=="linear"){
interp.linear=T
extrap=FALSE
}
interp.data=interp(x,y, ampl, xo=seq(xlim[1], xlim[2], length = interp.points), yo=seq(ylim[1], ylim[2], length = interp.points), linear=interp.linear, extrap= extrap)
interp.xlim.up=which(interp.data$x>(xlim[2]-0.1))
interp.xlim.inf=which(interp.data$x<(xlim[1]+0.1))
interp.ylim.up=which(interp.data$y>(ylim[2]-0.1))
interp.ylim.inf=which(interp.data$y<(ylim[1]+0.1))
interp.data$z[c(interp.xlim.up, interp.xlim.inf),]=NA
interp.data$z[,c(interp.ylim.up, interp.ylim.inf)]=NA
range.used=round(range(interp.data$z, na.rm=T),2)
if (is.null(zlim)){
newlim=round(max(abs(range.used)))
zlim=c(-newlim, newlim)
}
if ((min(zlim)>min(range.used))|(max(zlim)<max(zlim))){
cat("WARNING: your data (after interpolation) are out of range as compared to the zlims specified.\n",
"Your data range is:", paste(range.used, collapse= ", "), "\n",
"Your zlims are:", paste(zlim, collapse=", "), "\n")
}
image(interp.data, col=mypalette(palette.steps), xlim=c(xlim[1], xlim[2]), ylim=c(ylim[1], ylim[2]), zlim=c(zlim[1], zlim[2]), frame.plot=FALSE, axes=FALSE, ...)
if (contour == TRUE){
cont_levels=seq(zlim[1], zlim[2], dist(zlim)/palette.steps)
contour(interp.data, ylim=c(xlim[1], xlim[2]), xlim=c(ylim[1], ylim[2]), zlim=c(zlim[1], zlim[2]), frame.plot=FALSE, axes=FALSE, add=TRUE, drawlabels=FALSE, levels=cont_levels)
}
if (mask == TRUE){
plotcircle <- function (x, y, r, ...)
{
angle = seq(0, 2*pi, length = 200)
xc = x + (r * cos(angle))
yc = y + (r * sin(angle))
polygon(xc, yc, ...)
return(list(x = xc, y = yc))
}
circ.coord=plotcircle(0,0,1, border = head.col, lwd = head.lwd)
circle.points=length(circ.coord$x)
pol.x=c(xlim[2], circ.coord$x[1:120], xlim[1], xlim[1], xlim[2], xlim[2])
pol.y=c(0.4, -circ.coord$y[1:120], 0.4, ylim[1], ylim[1], 0.4)
polygon(pol.x, pol.y, col="white", lty="blank")
polygon(pol.x, -pol.y, col="white", lty="blank")
}
if (draw.elec.pos==TRUE){
points(x,y, pch=19, col="black")
}
if (draw.elec.lab==TRUE){
text(x,y+0.1, labels=names(erpobj), adj=elec.lab.adj)
}
if (draw.nose==TRUE){
Nose.left=structure(list(x = c(-0.165709488227992, -0.170777055719452,
-0.160641920736532, -0.120101380804849, -0.0542230034158648,
0), y = c(0.991010207442792, 1.00031714102429, 1.02885836681128,
1.06310783775568, 1.13731502480188, 1.16)), .Names = c("x", "y"
), row.names = 2:7, class = "data.frame")
Nose.right=structure(list(x = c(0.165709488227992, 0.170777055719452, 0.160641920736532,
0.120101380804849, 0.0542230034158648, 0), y = c(0.991010207442792,
1.00031714102429, 1.02885836681128, 1.06310783775568, 1.13731502480188,
1.16)), .Names = c("x", "y"), row.names = 2:7, class = "data.frame")
lines(Nose.left, col=head.col, lwd=head.lwd)
lines(Nose.right, col=head.col, lwd=head.lwd)
}
invisible(list(zlim=zlim, palette=mypalette(palette.steps)))
} |
set.seed(1)
x <- data.frame(
a = rnorm(100),
b = rnorm(100),
c = rnorm(100)
)
y <- data.frame(
d = rnorm(100),
e = rnorm(100),
f = rnorm(100)
)
z <- data.frame(
g = sample(letters, 100, TRUE),
h = rnorm(100),
i = rnorm(100)
)
z$h[1:5] <- NA
test_that("correlation returns s3 object", {
expect_s3_class(correlate(x), "correlate")
expect_s3_class(correlate(x, y), "correlate")
expect_s3_class(correlate(x, z), "correlate")
})
out <- list(
xx = unclass(correlate(x)),
xy = unclass(correlate(x, y)),
xz = unclass(correlate(x, z))
)
test_that("correlation output has expected structure", {
for(m in 1:length(out)) {
expect_type(out[[m]], "list")
expect_length(out[[m]], 5)
expect_named(out[[m]], c("correlation", "p.value", "sample.size", "args", "tiesProblem"))
for(i in 1:3) {
expect_true(inherits(out[[m]][[i]], "matrix"))
}
expect_type(out[[m]][[4]], "character")
expect_type(out[[m]][[5]], "logical")
}
})
test_that("NA values appear in correct slots", {
expect_true(all(is.na(out$xz$correlation[,"g"])))
expect_equal(out$xz$sample.size[1, "g"], NA_integer_)
expect_equal(out$xz$sample.size[1, "h"], 95)
expect_equal(out$xz$sample.size[1, "i"], 100)
}) |
observeEvent(input$sideBarTab, {
if (input$sideBarTab == "hokkaido" && is.null(GLOBAL_VALUE$hokkaidoData)) {
GLOBAL_VALUE$hokkaidoData <- fread(file = paste0(DATA_PATH, "Pref/Hokkaido/covid19_data.csv"))
GLOBAL_VALUE$hokkaidoDataUpdateTime <- file.info(paste0(DATA_PATH, "Pref/Hokkaido/covid19_data.csv"))$mtime
GLOBAL_VALUE$hokkaidoData$date <- as.Date(paste0(GLOBAL_VALUE$hokkaidoData$年, "/", GLOBAL_VALUE$hokkaidoData$月, "/", GLOBAL_VALUE$hokkaidoData$日))
GLOBAL_VALUE$hokkaidoPatients <- fread(file = paste0(DATA_PATH, "Pref/Hokkaido/patients.csv"))
}
})
hokkaidoData <- reactive({
return(list(
data = GLOBAL_VALUE$hokkaidoData,
dataUpdateTime = GLOBAL_VALUE$hokkaidoDataUpdateTime,
patient = GLOBAL_VALUE$hokkaidoPatients
))
})
output$hokkaidoValueBoxes <- renderUI({
data <- hokkaidoData()$data
positiveRate <- paste0(round(tail(data$陽性累計, n = 1) / tail(data$検査累計, n = 1) * 100, 2), "%")
dischargeRate <- paste0(round(tail(data$治療終了累計, n = 1) / tail(data$陽性累計, n = 1) * 100, 2), "%")
deathRate <- precentage <- paste0(round(tail(data$死亡累計, n = 1) / tail(data$陽性累計, n = 1) * 100, 2), "%")
return(
tagList(
fluidRow(
createValueBox(
value = tail(data$検査累計, n = 1),
subValue = paste0(i18n$t("陽性率:"), precentage),
sparkline = createSparklineInValueBox(data, "日検査数"),
subtitle = i18n$t("検査数"),
icon = "vials",
color = "yellow",
diff = tail(data$日検査数, n = 1)
),
createValueBox(
value = tail(data$陽性累計, n = 1),
subValue = paste0(i18n$t("速報:"), sum(byDate[, 2, with = T], na.rm = T)),
sparkline = createSparklineInValueBox(data, "日陽性数"),
subtitle = i18n$t("陽性者数"),
icon = "procedures",
color = "red",
diff = tail(data$日陽性数, n = 1)
)
),
fluidRow(
createValueBox(
value = tail(data$治療終了累計, n = 1),
subValue = dischargeRate,
sparkline = createSparklineInValueBox(data, "日治療終了数"),
subtitle = i18n$t("回復者数"),
icon = "user-shield",
color = "green",
diff = tail(data$日治療終了数, n = 1)
),
createValueBox(
value = tail(data$死亡累計, n = 1),
subValue = deathRate,
sparkline = createSparklineInValueBox(data, "日死亡数"),
subtitle = i18n$t("死亡者数"),
icon = "bible",
color = "navy",
diff = tail(data$日死亡数, n = 1)
)
)
)
)
})
output$hokkaidoSummaryGraph <- renderEcharts4r({
data <- hokkaidoData()$data
dataUpdateTime <- hokkaidoData()$dataUpdateTime
latestUpdateDuration <- difftime(Sys.time(), dataUpdateTime)
LATEST_UPDATE <- paste0(
round(latestUpdateDuration[[1]], 0),
convertUnit2Ja(latestUpdateDuration)
)
data %>%
e_chart(date) %>%
e_line(陽性累計, color = middleRed, symbolSize = 0) %>%
e_mark_line(data = list(xAxis = "2020-02-28", label = list(formatter = "2月28日\n緊急事態宣言")), symbol = "circle") %>%
e_mark_line(data = list(xAxis = "2020-03-19", label = list(formatter = "3月19日\n緊急事態終了")), symbol = "circle") %>%
e_line(患者累計, color = middleYellow, symbolSize = 0) %>%
e_line(死亡累計, color = darkNavy, symbolSize = 0) %>%
e_bar(日陽性数, color = darkRed, name = "日次陽性者数", y_index = 1) %>%
e_y_axis(splitLine = list(show = F), index = 1, max = 3 * max(data$日陽性数, na.rm = T)) %>%
e_x_axis(splitLine = list(show = F)) %>%
e_grid(left = "8%", right = "5%", bottom = "10%") %>%
e_legend(orient = "vertical", top = "15%", left = "8%") %>%
e_tooltip(trigger = "axis") %>%
e_title(text = "検査数・陽性者数推移", subtext = paste(i18n$t("更新時刻:"), LATEST_UPDATE)) %>%
e_group("hokkaidoSummary")
})
output$hokkaidoStackGraph <- renderEcharts4r({
data <- hokkaidoData()$data
dataUpdateTime <- hokkaidoData()$dataUpdateTime
latestUpdateDuration <- difftime(Sys.time(), dataUpdateTime)
LATEST_UPDATE <- paste0(
round(latestUpdateDuration[[1]], 0),
convertUnit2Ja(latestUpdateDuration)
)
data %>%
e_chart(date) %>%
e_bar(検査累計, color = middleYellow, stack = 1) %>%
e_bar(陽性累計, color = middleRed, stack = 2, z = 2, barGap = "-100%") %>%
e_mark_line(data = list(xAxis = "2020-02-28", label = list(formatter = "2月28日\n緊急事態宣言")), symbol = "circle") %>%
e_mark_line(data = list(xAxis = "2020-03-19", label = list(formatter = "3月19日\n緊急事態終了")), symbol = "circle") %>%
e_bar(治療終了累計, color = middleGreen, stack = 3) %>%
e_bar(死亡累計, color = darkNavy, stack = 3) %>%
e_x_axis(splitLine = list(show = F)) %>%
e_grid(left = "8%", right = "5%", bottom = "10%") %>%
e_legend(orient = "vertical", top = "15%", left = "8%") %>%
e_tooltip(trigger = "axis") %>%
e_title(text = "検査数・罹患者内訳推移", subtext = paste(i18n$t("更新時刻:"), LATEST_UPDATE)) %>%
e_group("hokkaidoSummary") %>%
e_connect_group("hokkaidoSummary")
})
output$hokkaidoConfirmedMap <- renderLeaflet({
data <- hokkaidoData()$patient
clusterData <- GLOBAL_VALUE$signatePlace[カテゴリ != "市区町村"]
Icons <- iconList(
"男性" = makeIcon(iconUrl = "www/Icon/male.png", iconRetinaUrl = "www/Icon/male.png", 24, 24),
"女性" = makeIcon(iconUrl = "www/Icon/female.png", iconRetinaUrl = "www/Icon/female.png", 24, 24)
)
leaflet(data) %>%
addTiles() %>%
addProviderTiles(providers$Wikimedia) %>%
addMarkers(
lng = ~居住地経度,
lat = ~居住地緯度,
layerId = ~No,
label = mapply(function(No, age, gender) {
HTML(sprintf("<b>%s番:</b>%s %s", No, age, gender))
},
data$No, data$年代.x, data$性別.y,
SIMPLIFY = F
),
icon = ~ Icons[性別.y],
clusterOptions = markerClusterOptions(), labelOptions = labelOptions(direction = "top")
) %>%
addMarkers(
lat = clusterData$`緯度(世界測地系)`,
lng = clusterData$`経度(世界測地系)`,
popup = clusterData$mapPopup,
label = clusterData$接触場所,
labelOptions = labelOptions(direction = "top")
) %>%
setView(
lng = provinceCode[id == 1]$lng,
lat = provinceCode[id == 1]$lat,
zoom = 7
) %>%
addMiniMap(
tiles = providers$Wikimedia,
toggleDisplay = TRUE
)
})
hokkaidoProfile <- reactive({
id <- hokkaidoValue$profileId
if (!is.null(data) && !is.null(id)) {
data <- hokkaidoData()$patient
return(data[No == id])
} else {
return(NULL)
}
})
hokkaidoValue <- reactiveValues(profileId = NULL)
observeEvent(input$hokkaidoConfirmedMap_marker_click$id, {
hokkaidoValue$profileId <- input$hokkaidoConfirmedMap_marker_click$id
})
observeEvent(input$hokkaidoPatientTable_rows_selected, {
hokkaidoValue$profileId <- input$hokkaidoPatientTable_rows_selected
})
output$hokkaidoProfile <- renderUI({
profile <- hokkaidoProfile()
if (!is.null(profile)) {
outerLinks <- strsplit(profile$情報源, split = ";")[[1]]
outerLinkTags <- tagList(lapply(1:length(outerLinks), function(i) {
tags$a(href = outerLinks[i], icon("link"), "外部リンク", style = "float: right!important;")
}))
activityLog <- ifelse(profile$行動歴 == "", "詳細なし", gsub("\n", "<br>", profile$行動歴))
box(
title = tagList(icon("id-card"), "公開された感染者情報"),
width = 12,
closable = F,
boxProfile(
title = paste0("北海道", profile$No),
src = ifelse(profile$性別.x == "男性", "Icon/male.png", "Icon/female.png"),
subtitle = tagList(profile$性別.x),
bordered = TRUE,
boxProfileItem(
title = tagList(icon("user-clock"), "年代"),
description = profile$年代.x
),
boxProfileItem(
title = tagList(icon("bullhorn"), "公表日"),
description = as.Date(profile$リリース日)
),
boxProfileItem(
title = tagList(icon("user-tie"), "職業"),
description = profile$属性
),
boxProfileItem(
title = tagList(icon("home"), "居住地"),
description = profile$居住地.x
),
boxProfileItem(
title = tagList(icon("external-link-alt"), "情報源"),
description = outerLinkTags
),
),
footer = tagList(
tags$b(icon("handshake"), "濃厚接触者状況"),
tags$p(tags$small(HTML(gsub("\n", "<br>", profile$濃厚接触者状況)))),
tags$hr(),
tags$b(icon("procedures"), "症状・経過"),
tags$p(tags$small(HTML(gsub("\n", "<br>", profile$`症状・経過`)))),
tags$hr(),
tags$b(icon("walking"), "行動歴"),
tags$p(tags$small(HTML(activityLog)))
)
)
} else {
return(
box(
title = tagList(icon("id-card"), "公開された感染者情報"),
width = 12,
closable = F,
tags$b("マップ上のマークをクリックすると感染者詳細がみれます。")
)
)
}
})
output$hokkaidoPatientTable <- renderDataTable({
data <- hokkaidoData()$patient
showCols <- c("No", "リリース日", "居住地.x", "年代.x", "性別.x", "濃厚接触者状況")
dataForShow <- data[, showCols, with = F]
colnames(dataForShow) <- c("自治体番号", "公表日", "居住地", "年代", "性別", "濃厚接触者状況")
dataForShow$公表日 <- as.Date(dataForShow$公表日)
dataForShow$居住地 <- as.factor(dataForShow$居住地)
dataForShow$年代 <- as.factor(dataForShow$年代)
dataForShow$性別 <- as.factor(dataForShow$性別)
DT::datatable(
dataForShow,
rownames = F,
filter = "top",
extensions = c("Responsive"),
selection = "single",
options = list(
dom = "tf",
paging = F,
filter = "top",
scrollX = T,
scrollY = "300px"
)
)
}) |
knitr::opts_chunk$set(
collapse = TRUE,
comment = "
)
library(r2r)
m <- hashmap()
m[["key"]] <- "value"
m[c(1, 2, 3)] <- c("a", "b", "c")
m[[c(4, 5, 6)]] <- c("d", "e", "f")
m[["key"]]
m[c(1, 2, 3)]
m[[c(1, 2, 3)]]
m[c(4, 5, 6)]
m[[c(4, 5, 6)]]
insert(m, "user", "vgherard")
query(m, "user")
s <- hashset()
insert(s, 1)
s[[2]] <- T
s[c(1, 2, 3)]
m[[ lm(wt ~ mpg, mtcars) ]] <- list("This is my fit!", 840)
m[[ lm(wt ~ mpg, mtcars) ]]
m[[ lm(cyl ~ mpg, mtcars) ]]
m <- hashmap(default = 0)
objects <- list(1, 1, "1", FALSE, "1", 1)
for (object in objects)
m[[object]] <- m[[object]] + 1
m[["1"]]
m <- hashmap(on_missing_key = "throw")
tryCatch(m[["Missing key"]], error = function(cnd) "Oops!")
m <- hashmap()
m[[1]] <- "double"
m[["1"]] <- "character"
m[[1]]
m <- hashmap(key_preproc_fn = Arg)
m[list(1, 1 + 1i, 1i)] <- list("EAST", "NORTH-EAST", "NORTH")
m[[10]]
m[[100i]]
m[[2 + 2i]] |
fetch_officer_object = function (obnd, verbose=TRUE){
isgood = TRUE
msgs = c()
rpt = NULL
if(obnd[["isgood"]]){
rpt = obnd[["rpt"]]
} else {
isgood = FALSE
msgs = c(msgs, "Bad onbrand object supplied")
}
if(!isgood){
obnd[["isgood"]] = FALSE
msgs = c(msgs, "onbrand::fetch_officer_object()")
}
if(verbose & !is.null(msgs)){
message(paste(msgs, collapse="\n"))
}
res = list(isgood = isgood,
rpt = rpt,
msgs = msgs)
res} |
freqC <- function(x, w, data, digits=2, rowlabs, printC=FALSE, plot=TRUE,
main, xlab, ylab, bar.col, ...)
{
if(missing(x)) stop("Oops. You need to specify the variable to analyze using the x argument. To see how to use this function, try example(freqC) or help(freqC).")
if(plot!=FALSE)
{
old.par <- graphics::par(no.readonly = TRUE)
on.exit(graphics::par(old.par))
}
x.name = deparse(substitute(x))
if(!missing(w)) w.name = deparse(substitute(w))
check.value(digits, valuetype="numeric")
if(!missing(data))
{
if(is.matrix(data)) data <- data.frame(data)
if(!missing(x)) x <- vector.from.data(substitute(x), data)
if(!missing(w)) w <- vector.from.data(substitute(w), data)
}
check.variable(x)
if(!is.null(attr(x, "label"))) x.label <- attr(x, "label")
if(!missing(w))
{
check.variable(w, vartype="numeric")
weighted=TRUE
}
if(missing(w))
{
w <- rep(1, length(x))
weighted=FALSE
}
if(!missing(rowlabs))
{
if(!is.null(levels(x))) x.values <- levels(x) else x.values <- unique(stats::na.omit(x))
if(length(rowlabs) != length(x.values)) stop("Oops. The x variable's labels aren't the right length for this x variable's values. There are ", length(rowlabs), " labels for ", length(x.values), " different values. To see how to use the rowlabs argument, try help(freqC).")
}
k <- grep(FALSE, (is.na(x) | is.na(w)))
x <- x[k]
w <- w[k]
if(plot==TRUE) main.heading <- headingbox("Describing Distribution of Values with Frequency Table and Bar Chart", width=75, marker="=")
if(plot==FALSE) main.heading <- headingbox("Describing Distribution of Values with Frequency Table", width=75, marker="=")
if(printC==TRUE) printC(main.heading)
caption <- paste("Frequency Distribution of", x.name)
if(exists("x.label")) caption <- paste(caption, " (", x.label, ")", sep="")
if(weighted==TRUE) caption <- paste(caption, ", Weighted by ", w.name, sep="")
if(("ordered" %in% class(x)) & ("factor" %in% class(x))) class(x) <- c("ordered", "factor")
obj1 <- descr::freq(x, w, y.axis="percent", cex.names=0.75, las=2, plot=FALSE, ...)
obj1 <- round(data.frame(obj1), digits)
if(!missing(rowlabs)) row.names(obj1) <- c(rowlabs, "Total")
else rowlabs <- row.names(obj1)[-nrow(obj1)]
if(all(obj1[, "Frequency"]%%1==0)) n.drop.decimals <- TRUE else n.drop.decimals <- FALSE
obj1[, "Frequency"] <- format(obj1[, "Frequency"], drop0trailing=n.drop.decimals, nsmall=digits, digits=digits)
print(knitr::kable(format(obj1, drop0trailing=F, nsmall=digits, digits=digits), caption=caption, format="simple", align="r"))
if(printC==TRUE) printC(knitr::kable(format(obj1, drop0trailing=F, nsmall=digits, digits=digits),
caption=printCaption(caption), format="html", align="r"))
cat("\n")
if(length(unique(x)) > 15)
{
tryhistmessage <- paste("Suggestion:", x.name, "has a lot of unique values. Try making a histogram with histC function for clearer description of its distribution.")
message(tryhistmessage)
}
if(plot==TRUE)
{
for(k in 1:(1+as.numeric(printC)))
{
if(printC==TRUE & k==2)
{
imagename <- paste("freqC.plot.", unclass(Sys.time()), ".png", sep="")
grDevices::png(filename=imagename, width=4, height=3, units="in", type="cairo", pointsize=8, res=300, antialias="default")
class(imagename) <- "image"
printC(imagename)
}
maxaxislabelsize <- max(nchar(dimnames(obj1)[[1]]))
nticklabels <- length(dimnames(obj1)[[1]]) - 1
restoreask <- graphics::par("ask")
if(printC==TRUE & k==2) graphics::par(ask=FALSE) else graphics::par(ask=TRUE)
restoremar <- graphics::par("mar")
if((maxaxislabelsize*nticklabels) > 50)
{
graphics::par(mar=c(4.1 + sqrt(maxaxislabelsize + 2), 4.1, 4.1, 1.1))
cex.ticklabels <- 0.75
las.ticklabels <- 2
mtext.lines <- (4.1 + sqrt(maxaxislabelsize)) - 1.5
if(k<2) message(paste("One or more text labels for bars is long. Consider using rowlabs argument to abbreviate."))
}
else
{
graphics::par(mar=c(4.1, 4.1, 3.6, 1.1))
cex.ticklabels <- 0.9
las.ticklabels <- 1
mtext.lines <- 2.5
}
if(missing(main)) main <- caption
if(missing(ylab)) ylab <- "Percentage"
if(missing(xlab)) xlab <- paste(x.name, "values")
if(missing(bar.col)) bar.col <- "gray80"
main <- strwrap(main, width=50)
obj2 <- descr::freq(x, w, y.axis="percent", cex.names=cex.ticklabels, las=las.ticklabels, plot=plot,
ylab=ylab, main=main, col=bar.col, names.arg=rowlabs)
graphics::mtext(text = xlab, side = 1, line = mtext.lines)
graphics::par(ask=restoreask, mar=restoremar)
if(printC==TRUE & k==2) grDevices::dev.off()
}
}
if(printC==T) printC(match.call(expand.dots = FALSE))
invisible(obj1)
} |
predict_Nsurv <- function(object, ...){
UseMethod("predict_Nsurv")
}
predict_Nsurv.survFit <- function(object,
data_predict = NULL,
spaghetti = FALSE,
mcmc_size = NULL,
hb_value = TRUE,
hb_valueFORCED = NA,
extend_time = 100,
...) {
x <- object
if(!("Nsurv" %in% colnames(data_predict))){
warning("Please provide a column 'Nsurv' in the 'data_predict' argument to have
prediction on the Number of survivor.")
}
message("Note that computing can be quite long (several minutes).
Tips: To reduce that time you can reduce Number of MCMC chains (default mcmc_size is set to 1000).")
mcmc <- x$mcmc
model_type <- x$model_type
if(is.null(data_predict)){
if("survFitVarExp" %in% class(x)){
x_interpolate = data.frame(
time = x$jags.data$time_long,
conc = x$jags.data$conc_long,
replicate = x$jags.data$replicate_long)
} else{
data_predict = data.frame(
time = x$jags.data$time,
conc = x$jags.data$conc,
replicate = x$jags.data$replicate,
Nsurv = x$jags.data$Nsurv)
x_interpolate <- predict_interpolate(data_predict, extend_time = extend_time) %>%
dplyr::arrange(replicate, time)
}
}
if(!is.null(data_predict)){
x_interpolate <- predict_interpolate(data_predict, extend_time = extend_time) %>%
dplyr::arrange(replicate, time)
}
df <- data.frame(
time = x_interpolate$time,
conc = x_interpolate$conc,
replicate = x_interpolate$replicate)
unique_replicate <- unique(df$replicate)
ls_time <- list()
ls_conc <- list()
for(i in 1:length(unique_replicate)){
ls_time[[i]] <- dplyr::filter(df, replicate == unique_replicate[i])$time
ls_conc[[i]] <- dplyr::filter(df, replicate == unique_replicate[i])$conc
}
mcmc.samples = mcmc
if(!is.null(mcmc_size)){
reduc_tab = lapply(mcmc.samples, "[",
seq(1, nrow(mcmc.samples[[1]]), length = mcmc_size),
1:ncol(mcmc.samples[[1]]))
mcmc.samples = reduc_tab
}
mctot = do.call("rbind", mcmc.samples)
kd = 10^mctot[, "kd_log10"]
if(hb_value == TRUE){
if("hb" %in% colnames(mctot)){
hb <- mctot[, "hb"]
} else{ hb <- 10^mctot[, "hb_log10"] }
} else if(hb_value == FALSE){
if(is.na(hb_valueFORCED)){
if(is.na(x$hb_valueFIXED)){
stop("Please provide value for `hb` using `hb_valueFORCED`.")
} else{
hb <- rep(x$hb_valueFIXED, nrow(mctot))
}
} else{
hb <- rep(hb_valueFORCED, nrow(mctot))
}
}
k = 1:length(unique_replicate)
if(model_type == "SD"){
kk <- 10^mctot[, "kk_log10"]
z <- 10^mctot[, "z_log10"]
dtheo = lapply(k, function(kit) {
Surv.SD_Cext(Cw = ls_conc[[kit]],
time = ls_time[[kit]],
kk=kk,
kd=kd,
hb=hb,
z=z)
})
}
if(model_type == "IT"){
alpha <- 10^mctot[, "alpha_log10"]
beta <- 10^mctot[, "beta_log10"]
dtheo = lapply(k, function(kit) {
Surv.IT_Cext(Cw = ls_conc[[kit]],
time = ls_time[[kit]],
kd = kd,
hb = hb,
alpha = alpha,
beta = beta)
})
}
dtheo <- do.call("rbind", lapply(dtheo, t))
df_mcmc <- as_tibble(do.call("rbind", x$mcmc))
NsurvPred_valid <- select(df_mcmc, contains("Nsurv_sim"))
NsurvPred_check <- select(df_mcmc, contains("Nsurv_ppc"))
if(is.null(data_predict) &
ncol(NsurvPred_valid) > 0 &
ncol(NsurvPred_check) > 0){
df_quantile <- data.frame(
time = data_predict$time,
conc = data_predict$conc,
replicate = data_predict$replicate,
Nsurv = data_predict$Nsurv,
Nsurv_q50_check = apply(NsurvPred_check, 1, quantile, probs = 0.5, na.rm = TRUE),
Nsurv_qinf95_check = apply(NsurvPred_check, 1, quantile, probs = 0.025, na.rm = TRUE),
Nsurv_qsup95_check = apply(NsurvPred_check, 1, quantile, probs = 0.975, na.rm = TRUE),
Nsurv_q50_valid = apply(NsurvPred_valid, 1, quantile, probs = 0.5, na.rm = TRUE),
Nsurv_qinf95_valid = apply(NsurvPred_valid, 1, quantile, probs = 0.025, na.rm = TRUE),
Nsurv_qsup95_valid = apply(NsurvPred_valid, 1, quantile, probs = 0.975, na.rm = TRUE))
} else{
df_psurv <- as_tibble(dtheo) %>%
mutate(time = df$time,
replicate = df$replicate)
df_filter <- dplyr::inner_join(df_psurv, data_predict, by = c("replicate", "time")) %>%
filter(!is.na(Nsurv)) %>%
group_by(replicate) %>%
arrange(replicate, time) %>%
mutate(Nprec = ifelse(time == min(time), Nsurv, lag(Nsurv)),
iter = row_number(),
iter_prec = ifelse(time == min(time), iter, lag(iter))) %>%
ungroup()
mat_psurv <- df_filter %>%
select(contains("V"), - Nsurv) %>%
as.matrix()
ncol_NsurvPred <- ncol(mat_psurv)
nrow_NsurvPred <- nrow(mat_psurv)
iter = df_filter$iter
iter_prec = df_filter$iter_prec
NsurvPred_valid <- matrix(ncol = ncol_NsurvPred, nrow = nrow(mat_psurv))
Nprec <- cbind(df_filter$Nprec)[, rep(1,ncol_NsurvPred)]
mat_psurv_prec = matrix(ncol = ncol_NsurvPred, nrow = nrow_NsurvPred)
for(i in 1:nrow_NsurvPred){
if(iter[i] == iter_prec[i]){
mat_psurv_prec[i,] = mat_psurv[i,]
} else{
mat_psurv_prec[i,] = mat_psurv[i-1,]
}
}
mat_pSurv_ratio = mat_psurv / mat_psurv_prec
NsurvPred_check_vector = rbinom(ncol_NsurvPred*nrow_NsurvPred,
size = Nprec,
prob = mat_pSurv_ratio)
NsurvPred_check = matrix(NsurvPred_check_vector, byrow = FALSE, nrow = nrow_NsurvPred)
NsurvPred_valid[1, ] = rep(Nprec[1], ncol_NsurvPred)
for(i in 2:nrow(mat_psurv)){
if(iter[i] == iter_prec[i]){
NsurvPred_valid[i,] = NsurvPred_check[i,]
} else{
NsurvPred_valid[i,] = rbinom(ncol_NsurvPred,
size = NsurvPred_valid[i-1,],
prob = mat_pSurv_ratio[i,])
}
}
df_quantile <- data.frame(time = df_filter$time,
conc = df_filter$conc,
replicate = df_filter$replicate,
Nsurv = df_filter$Nsurv,
Nsurv_q50_check = apply(NsurvPred_check, 1, quantile, probs = 0.5, na.rm = TRUE),
Nsurv_qinf95_check = apply(NsurvPred_check, 1, quantile, probs = 0.025, na.rm = TRUE),
Nsurv_qsup95_check = apply(NsurvPred_check, 1, quantile, probs = 0.975, na.rm = TRUE),
Nsurv_q50_valid = apply(NsurvPred_valid, 1, quantile, probs = 0.5, na.rm = TRUE),
Nsurv_qinf95_valid = apply(NsurvPred_valid, 1, quantile, probs = 0.025, na.rm = TRUE),
Nsurv_qsup95_valid = apply(NsurvPred_valid, 1, quantile, probs = 0.975, na.rm = TRUE))
}
if(spaghetti == TRUE){
random_column <- sample(1:ncol(NsurvPred_valid), size = round(10/100 * ncol(NsurvPred_valid)))
df_spaghetti <- as_tibble(NsurvPred_valid[, random_column]) %>%
mutate(time = data_predict$time,
conc = data_predict$conc,
replicate = data_predict$replicate,
Nsurv = data_predict$Nsurv)
} else df_spaghetti <- NULL
return_object <- list(df_quantile = df_quantile,
df_spaghetti = df_spaghetti)
class(return_object) <- c(class(return_object), "survFitPredict_Nsurv")
return(return_object)
} |
test_that("minmax ctx3", {
r <- minmax(0:100)
expect_true(is.ctx3(r))
expect_equal(as.vector(r), c(0, 50, 100))
expect_equal(names(r), c('low', 'center', 'high'))
})
test_that("minmax ctx3 with custom value", {
r <- minmax(0:100, high=80)
expect_true(is.ctx3(r))
expect_equal(as.vector(r), c(0, 40, 80))
expect_equal(names(r), c('low', 'center', 'high'))
})
test_that("minmax ctx3 with custom value 2", {
r <- minmax(0:100, relCenter=0.4)
expect_true(is.ctx3(r))
expect_equal(as.vector(r), c(0, 40, 100))
expect_equal(names(r), c('low', 'center', 'high'))
})
test_that("minmax ctx3bilat", {
r <- minmax(0:100, type='ctx3bilat')
expect_true(is.ctx3bilat(r))
expect_equal(as.vector(r), c(0, 25, 50, 75, 100))
expect_equal(names(r), c('negMax', 'negCenter', 'origin', 'center', 'max'))
})
test_that("minmax ctx5", {
r <- minmax(0:100, type='ctx5')
expect_true(is.ctx5(r))
expect_equal(as.vector(r), c(0, 25, 50, 75, 100))
expect_equal(names(r), c('low', 'lowerCenter', 'center', 'upperCenter', 'high'))
})
test_that("minmax ctx5bilat", {
r <- minmax(0:100, type='ctx5bilat')
expect_true(is.ctx5bilat(r))
expect_equal(as.vector(r), c(0, 12.5, 25, 37.5, 50, 62.5, 75, 87.5, 100))
expect_equal(names(r), c('negMax', 'negUpperCenter', 'negCenter', 'negLowerCenter', 'origin', 'lowerCenter', 'center', 'upperCenter', 'max'))
}) |
"maED" <- function(object, fctList = NULL, respLev, interval = c("none", "buckland", "kang"), linreg = FALSE,
clevel = NULL, level = 0.95, type = c("relative", "absolute"), display = TRUE, na.rm = FALSE, extended = FALSE)
{
interval <- match.arg(interval)
type <- match.arg(type)
ncolPM <- ncol(object$"parmMat")
if ((!identical(ncolPM, 1)) && (is.null(clevel)))
{
retMat <- NULL
for (i in 1:ncolPM)
{
curveId <- (colnames(object$"parmMat"))[i]
retMat <- rbind(retMat,
maED(object, fctList, respLev, interval, linreg = linreg, clevel = curveId, level = level,
type = type, display = display, na.rm = na.rm, extended = extended))
}
return(retMat)
} else {
interval <- match.arg(interval)
msMat <- do.call("mselect", list(object = object, fctList = fctList, sorted = "no"))
lenfl <- length(fctList)
lenrl <- length(respLev)
numRows <- lenfl + 1
numCols <- lenrl
edEst <- matrix(NA, numRows + linreg, numCols)
edSe <- matrix(NA, numRows + linreg, numCols)
if (identical(interval, "kang"))
{
interval2 <- "delta"
} else {
interval2 <- "none"
}
edMat <- ED(object, respLev, interval2, clevel, type = type, display = FALSE)
edEst[1, ] <- as.vector((edMat)[, 1])
edSe[1, ] <- as.vector((edMat)[, 2])
if (identical(interval2, "delta"))
{
edCll <- matrix(NA, numRows, numCols)
edClu <- matrix(NA, numRows, numCols)
edCll[1, ] <- as.vector((edMat)[, 3])
edClu[1, ] <- as.vector((edMat)[, 4])
}
for (i in 1:lenfl)
{
edMati <- try(ED(update(object, fct = fctList[[i]]), respLev, interval2, clevel,
type = type, display = FALSE), silent = TRUE)
if (inherits(edMati, "try-error"))
{
edMati <- matrix(NA, length(respLev), 4)
}
edEst[i + 1, ] <- as.vector((edMati)[, 1])
edSe[i + 1, ] <- as.vector((edMati)[, 2])
if (identical(interval2, "delta"))
{
edCll[i + 1, ] <- as.vector((edMati)[, 3])
edClu[i + 1, ] <- as.vector((edMati)[, 4])
}
}
if (linreg)
{
linFit1 <- lm(object$"data"[, 2:1])
edLin <- ED.lin(linFit1, respLev)
edEst[lenfl + 2, ] <- unlist((edLin)[, 1])
edSe[lenfl + 2, ] <- unlist((edLin)[, 2])
expVec <- as.vector(exp(-c(msMat[, 2], AIC(linFit1)) / 2))
} else {
expVec <- as.vector(exp(-msMat[, 2] / 2))
}
wVec <- expVec / sum(expVec, na.rm = na.rm)
edVec <- apply(edEst * wVec, 2, sum, na.rm = na.rm)
if (identical(interval, "none"))
{
retMat <- as.matrix(cbind(edVec))
colnames(retMat) <- colnames(edMat)[1]
}
if (identical(interval, "buckland"))
{
seVec <- apply(sqrt(edSe^2 + (t(t(edEst) - apply(edEst, 2, mean, na.rm = na.rm)))^2) * wVec, 2,
sum, na.rm = na.rm)
quantVal <- qnorm(1 - (1 - level)/2) * seVec
retMat <- as.matrix(cbind(edVec, seVec, edVec - quantVal, edVec + quantVal))
colnames(retMat) <- c(colnames(edMat)[c(1, 2)], "Lower", "Upper")
}
if (identical(interval, "kang"))
{
retMat <- as.matrix(cbind(apply(edEst * wVec, 2, sum, na.rm = na.rm),
apply(edCll * wVec, 2, sum, na.rm = na.rm),
apply(edClu * wVec, 2, sum, na.rm = na.rm)))
colnames(retMat) <- colnames(edMat)[c(1,3,4)]
}
rownames(retMat) <- rownames(edMat)
disMat <- as.matrix(cbind(edEst, wVec))
colnames(disMat) <- c(paste("ED", respLev, sep = ""), "Weight")
if (linreg)
{
rownames(disMat) <- c(rownames(msMat), "Lin")
} else {
rownames(disMat) <- rownames(msMat)
}
if (display)
{
print(disMat)
cat("\n")
}
if (extended)
{
return(list(estimates = retMat, fits = disMat))
} else {
retMat
}
}
} |
eztune <- function(x, y, method = "svm", optimizer = "hjn", fast = TRUE,
cross = NULL, loss = "default") {
nms <- colnames(x)
if(length(unique(y)) == 2) {
lev <- levels(as.factor(y))
y <- as.numeric(as.factor(y)) - 1
type <- "bin"
if(loss == "default") loss = "class"
} else {
y <- as.numeric(as.character(y))
type <- "reg"
if(loss == "default") loss = "mse"
}
if(fast > 1) {
fast <- round(fast)
}
if(!is.null(cross)) {
cross <- round(cross)
}
command <- paste(type, method, optimizer, sep = ".")
ezt <- switch(command,
bin.ada.ga = ada.bin.ga(x, y, cross = cross, fast = fast, loss = loss),
bin.ada.hjn = ada.bin.hjn(x, y, cross = cross, fast = fast, loss = loss),
bin.gbm.ga = gbm.bin.ga(x, y, cross = cross, fast = fast, loss = loss),
bin.gbm.hjn = gbm.bin.hjn(x, y, cross = cross, fast = fast, loss = loss),
bin.svm.ga = svm.bin.ga(x, y, cross = cross, fast = fast, loss = loss),
bin.svm.hjn = svm.bin.hjn(x, y, cross = cross, fast = fast, loss = loss),
bin.en.ga = en.bin.ga(x, y, cross = cross, fast = fast, loss = loss),
bin.en.hjn = en.bin.hjn(x, y, cross = cross, fast = fast, loss = loss),
reg.gbm.ga = gbm.reg.ga(x, y, cross = cross, fast = fast, loss = loss),
reg.gbm.hjn = gbm.reg.hjn(x, y, cross = cross, fast = fast, loss = loss),
reg.svm.ga = svm.reg.ga(x, y, cross = cross, fast = fast, loss = loss),
reg.svm.hjn = svm.reg.hjn(x, y, cross = cross, fast = fast, loss = loss),
reg.en.ga = en.reg.ga(x, y, cross = cross, fast = fast, loss = loss),
reg.en.hjn = en.reg.hjn(x, y, cross = cross, fast = fast, loss = loss)
)
ezt$variables <- nms
if(grepl("bin.", command)) ezt$levels <- lev
class(ezt) <- "eztune"
ezt
} |
calc_sites <- function(locid_col = NULL, pid_col = NULL, predictions = NULL, maxdist = NULL) {
vect <- execGRASS("g.list",
parameters = list(
type = "vect"
),
intern = TRUE)
rast <- execGRASS("g.list",
parameters = list(
type = "rast"
),
intern = TRUE)
if (!"sites_o" %in% vect)
stop("Sites not found. Did you run import_data()?")
if (!"edges" %in% vect)
stop("Edges not found. Did you run calc_edges()?")
if(!is.null(predictions)){
i <- grep("_o$",predictions)
if(length(i) > 0){
predictions[-i] <- paste0(predictions[-i],"_o")
} else
predictions <- paste0(predictions,"_o")
if (any(!predictions %in% vect))
stop("Prediction sites not found. Did you run import_data() on them?")
}
site_maps <- c("sites", predictions)
site_maps <- sub("_o$","", site_maps)
s <- sapply(site_maps, prepare_sites, locid_c = locid_col, pid_c = pid_col, maxdist = maxdist)
execGRASS("v.db.dropcolumn",
map = "sites",
columns = "cat_edge")
}
prepare_sites <- function(sites_map, locid_c = NULL, pid_c = NULL, maxdist = NULL){
execGRASS("g.copy",
flags = c("overwrite", "quiet"),
parameters = list(
vector = paste0(paste0(sites_map,"_o"), ",",sites_map)))
message(paste0("Preparing sites '", sites_map, "' ..."))
message("Snapping sites to streams ...")
cnames <- execGRASS("db.columns", flags = "quiet",
parameters = list(
table = sites_map
), intern = TRUE)
if(any(i <- which(c("cat_edge","str_edge","dist","NEAR_X", "NEAR_Y") %in% cnames))){
execGRASS("v.db.dropcolumn", flags = "quiet",
map = "sites",
columns = paste0(c("cat_edge","str_edge","dist","NEAR_X", "NEAR_Y")[i], collapse = ","))
}
execGRASS("v.db.addcolumn",
parameters = list(
map = sites_map,
columns = "cat_edge int,str_edge int,dist double precision,NEAR_X double precision,NEAR_Y double precision"
))
execGRASS("v.distance",
flags = c("overwrite", "quiet"),
parameters = list(from = sites_map,
to = "edges",
upload = "cat,dist,to_x,to_y",
column = "cat_edge,dist,NEAR_X,NEAR_Y"))
sites <- readVECT(sites_map, type = "point", ignore.stderr = TRUE)
proj4 <- proj4string(sites)
sites <- as(sites, "data.frame")
sp::coordinates(sites) <- ~ NEAR_X + NEAR_Y
proj4string(sites) <- proj4
names(sites)[names(sites) %in% c( "coords.x1", "coords.x2")] <- c("NEAR_X", "NEAR_Y")
sites$cat_ <- NULL
mdist <- max(sites@data$dist)
message(paste("Maximum snapping distance found:", round(mdist,3), "m"))
if(!is.null(maxdist)){
if(mdist > maxdist){
i <- which(sites@data$dist >= maxdist)
sites <- sites[-i,]
message(paste0("There were ", length(i), " sites with snapping distance > maxdist (", maxdist," m). Sites were deleted."))
}
}
message("Setting pid and locID ...")
if(!is.null(locid_c) && locid_c %in% colnames(sites@data) ){
sites@data$locID <- as.numeric(as.factor(sites@data[,locid_c]))
} else {
sites@data$locID <- sites@data$cat
}
if(!is.null(pid_c) && pid_c %in% colnames(sites@data)){
sites@data$pid <- as.numeric(as.factor(sites@data[,pid_c]))
} else {
sites@data$pid <- sites@data$locID
}
i <- which(colnames(sites@data) %in% c("cat", "cat_"))
sites@data <- sites@data[,-i]
sink("temp.txt")
writeVECT(sites, vname = sites_map,
v.in.ogr_flags = c("overwrite", "quiet", "o"),
ignore.stderr = TRUE)
rm(sites)
sink()
message("Assigning netID and rid ...")
execGRASS("v.db.addcolumn",
flags = c("quiet"),
parameters = list(map = sites_map,
columns = "netID int, rid int"))
execGRASS("db.execute",
parameters = list(
sql=paste0('UPDATE ', sites_map, ' SET rid=(SELECT rid FROM edges WHERE ', sites_map, '.cat_edge=edges.cat)')
))
execGRASS("db.execute",
parameters = list(
sql=paste0('UPDATE ', sites_map, ' SET netID=(SELECT netID FROM edges WHERE ', sites_map, '.cat_edge=edges.cat)')
))
execGRASS("db.execute",
parameters = list(
sql=paste0('UPDATE ', sites_map, ' SET str_edge=(SELECT stream FROM edges WHERE ', sites_map, '.cat_edge=edges.cat)')
))
message("Calculating upDist ...")
execGRASS("v.db.addcolumn",
parameters = list(map = sites_map,
columns = "upDist double precision, distalong double precision"))
execGRASS("v.distance", flags = c("quiet"),
parameters =list(
from = sites_map,
to = "edges",
to_type = "line",
upload = "to_along",
column = "distalong"
))
sql_str <- paste0('UPDATE ', sites_map, ' SET upDist=',
'round(((SELECT upDist FROM edges WHERE edges.cat=',
sites_map, '.cat_edge)-distalong),2)')
execGRASS("db.execute",
parameters = list(
sql=sql_str
))
execGRASS("v.db.addcolumn",
flags = c("quiet"),
parameters = list(
map = sites_map,
columns = "ratio double precision"
))
sql_str <- paste0('UPDATE ', sites_map, ' SET ratio=1-',
'distalong/',
'(SELECT Length FROM edges WHERE edges.cat=', sites_map, '.cat_edge)')
execGRASS("db.execute",
parameters = list(
sql=sql_str
))
try(unlink("temp.txt"), silent = TRUE)
} |
precintcon.monthly.aggregation <- function(object) {
if ((is.element("precintcon.daily", class(object)))) {
sum <- rowSums(object[3:33], na.rm=TRUE)
data <- data.frame(object[1], object[2], sum)
colnames(data) <- c("year", "month", "precipitation")
class(data) <- c("data.frame", "precintcon.monthly")
return(data)
} else if ((is.element("precintcon.monthly", class(object)))) {
return(object)
} else
stop("Invalid data. Please, check your input object.")
} |
dysymod <- function(indnr, paramnr, var1, var2, chVar1, chVar2, mvar1, mvar2, var3, chVar3, mvar3, var4, chVar4, mvar4)
{
if (indnr == 2)
{
nterms = 17;
nmodelterms = paramnr;
nmodels = 3;
term = vector('list', nterms)
term[[1]] = ''
term[[2]] = ('/x')
term[[3]] = ('/y')
term[[4]] = ('x')
term[[5]] = ('y')
term[[6]] = ('/(x*y)')
term[[7]] = ('x/y')
term[[8]] = ('y/x')
term[[9]] = ('x*y')
term[[10]] = ('x^2')
term[[11]] = ('/x^2')
term[[12]] = ('y^2')
term[[13]] = ('/y^2')
term[[14]] = ('x^3')
term[[15]] = ('y^3')
term[[16]] = ('/x^3')
term[[17]] = ('/y^3')
scaling = vector('list', nterms)
scaling[[1]] = 1
scaling[[2]] = mvar1
scaling[[3]] = mvar2
scaling[[4]] = 1/mvar1
scaling[[5]] = 1/mvar2
scaling[[6]] = mvar1*mvar2
scaling[[7]] = mvar2/mvar1
scaling[[8]] = mvar1/mvar2
scaling[[9]] = 1/(mvar1*mvar2)
scaling[[10]] = 1/(mvar1*mvar1)
scaling[[11]] = mvar1*mvar1
scaling[[12]] = 1/(mvar2*mvar2)
scaling[[13]] = mvar2*mvar2
scaling[[14]] = 1/(mvar1*mvar1*mvar1)
scaling[[15]] = 1/(mvar2*mvar2*mvar2)
scaling[[16]] = mvar1*mvar1*mvar1
scaling[[17]] = mvar2*mvar2*mvar2
scaling <- unlist(scaling)
selmod1 <- array(0, dim=c(nmodelterms, nmodels, nmodelterms))
selmod2 <- array(0, dim=c(nmodelterms, nmodels, nmodelterms))
SEtestx <- (bestfitmod(indnr, paramnr, var1, var2, chVar1))
SEtesty <- (bestfitmod(indnr, paramnr, var1, var2, chVar2))
for (ii in 1:nmodelterms)
{
M <- combs(1:nterms, ii)
idx1 = order(SEtestx[ii,], na.last = TRUE, decreasing = FALSE)
idx2 = order(SEtesty[ii,], na.last = TRUE, decreasing = FALSE)
for (jj in 1:nmodels)
{
selmod1[ii, jj, 1:ii] <- M[idx1[jj],]
selmod2[ii, jj, 1:ii] <- M[idx2[jj],]
}
}
for (i in 1:nmodelterms)
{
for (j in 1:nmodels)
{
modsel = matrix(0, i, 1)
modsel[1:i] <- selmod1[i, j, 1:i]
tmp = polyfitreg(indnr, var1, var2, chVar1, modsel)
B <- tmp[[1]]
meansqerr <- tmp[[2]]
rmsx <- meansqerr*(mvar1*mvar1)
B_out_x <- array(list(), dim=c(nmodelterms, nmodels))
B_out_x[[i, j]] <- matrix(0, nterms, 1)
B_out_x[[i, j]][modsel] <- B
B_out_x[[i, j]] <- B_out_x[[i, j]]*(mvar1*scaling)
B <- B*mvar1*scaling[modsel]
out_text_x <- array(list(), dim=c(i, j))
out_text_x[[i, j]] <- cbind('dx', '=')
for (k in 1:i)
out_text_x[[i, j]] <- cbind(out_text_x[[i, j]], '+', format(B[k], digits = 2), term[[modsel[k]]])
print(c('Model
write.table(format(out_text_x[[i, j]], justify="left", width=1), row.names=FALSE, col.names=FALSE, quote=FALSE)
}
print('--------------------------------------------')
}
for (i in 1:nmodelterms)
{
for (j in 1:nmodels)
{
modsel = matrix(0, i, 1)
modsel[1:i] <- selmod2[i, j, 1:i]
tmp = polyfitreg(indnr, var1, var2, chVar2, modsel)
B <- tmp[[1]]
meansqerr <- tmp[[2]]
rmsy = meansqerr*(mvar2*mvar2)
B_out_y <- array(list(), dim=c(nmodelterms, nmodels))
B_out_y[[i, j]] <- matrix(0, nterms, 1)
B_out_y[[i, j]][modsel] <- B
B_out_y[[i, j]] <- B_out_y[[i, j]]*(mvar2*scaling)
B <- B*mvar2*scaling[modsel]
out_text_y <- array(list(), dim=c(i, j))
out_text_y[[i, j]] <- cbind('dy', '=')
for (k in 1:i)
out_text_y[[i, j]] <- cbind(out_text_y[[i, j]], '+', format(B[k], digits = 2), term[[modsel[k]]])
print(c('Model
write.table(format(out_text_y[[i, j]], justify="left", width=1), row.names=FALSE, col.names=FALSE, quote=FALSE)
}
print('--------------------------------------------')
}
return(list(B_out_x=B_out_x, B_out_y=B_out_y, out_text_x=out_text_x, out_text_y=out_text_y,
scaling=scaling, SEtestx=SEtestx, SEtesty=SEtesty))
}
if (indnr == 3)
{
nterms = 39;
nmodelterms = paramnr;
nmodels = 3;
term = vector('list', nterms)
term[[1]] = ''
term[[2]] = ('/x')
term[[3]] = ('/y')
term[[4]] = ('/z')
term[[5]] = ('x')
term[[6]] = ('y')
term[[7]] = ('z')
term[[8]] = ('/(x*y)')
term[[9]] = ('/(y*z)')
term[[10]] = ('/(x*z)')
term[[11]] = ('x*y')
term[[12]] = ('y*z')
term[[13]] = ('x*z')
term[[14]] = ('x/y')
term[[15]] = ('y/x')
term[[16]] = ('x/z')
term[[17]] = ('z/x')
term[[18]] = ('y/z')
term[[19]] = ('z/y')
term[[20]] = ('x/(y*z)')
term[[21]] = ('y/(x*z)')
term[[22]] = ('z/(x*y)')
term[[23]] = ('(x*y)/z')
term[[24]] = ('(y*z)/x')
term[[25]] = ('(z*x)/y')
term[[26]] = ('x*y*z')
term[[27]] = ('1/(x*y*z)')
term[[28]] = ('x^2')
term[[29]] = ('/x^2')
term[[30]] = ('y^2')
term[[31]] = ('/y^2')
term[[32]] = ('z^2')
term[[33]] = ('/z^2')
term[[34]] = ('x^3')
term[[35]] = ('y^3')
term[[36]] = ('z^3')
term[[37]] = ('/x^3')
term[[38]] = ('/y^3')
term[[39]] = ('/z^3')
scaling = vector('list', nterms)
scaling[[1]] = 1
scaling[[2]] = mvar1
scaling[[3]] = mvar2
scaling[[4]] = mvar3
scaling[[5]] = 1/mvar1
scaling[[6]] = 1/mvar2
scaling[[7]] = 1/mvar3
scaling[[8]] = mvar1*mvar2
scaling[[9]] = mvar2*mvar3
scaling[[10]] = mvar1*mvar3
scaling[[11]] = 1/(mvar1*mvar2)
scaling[[12]] = 1/(mvar2*mvar3)
scaling[[13]] = 1/(mvar1*mvar3)
scaling[[14]] = mvar2/mvar1
scaling[[15]] = mvar1/mvar2
scaling[[16]] = mvar3/mvar1
scaling[[17]] = mvar1/mvar3
scaling[[18]] = mvar3/mvar2
scaling[[19]] = mvar2/mvar3
scaling[[20]] = (mvar2*mvar3)/mvar1
scaling[[21]] = (mvar1*mvar3)/mvar2
scaling[[22]] = (mvar1*mvar2)/mvar3
scaling[[23]] = mvar3/(mvar1*mvar2)
scaling[[24]] = mvar1/(mvar2*mvar3)
scaling[[25]] = mvar2/(mvar1*mvar3)
scaling[[26]] = 1/(mvar1*mvar2*mvar3)
scaling[[27]] = mvar1*mvar2*mvar3
scaling[[28]] = mvar1^(-2)
scaling[[29]] = mvar1^2
scaling[[30]] = mvar2^(-2)
scaling[[31]] = mvar2^2
scaling[[32]] = mvar3^(-2)
scaling[[33]] = mvar3^2
scaling[[34]] = 1/(mvar1*mvar1*mvar1)
scaling[[35]] = 1/(mvar2*mvar2*mvar2)
scaling[[36]] = 1/(mvar3*mvar3*mvar3)
scaling[[37]] = mvar1*mvar1*mvar1
scaling[[38]] = mvar2*mvar2*mvar2
scaling[[39]] = mvar3*mvar3*mvar3
scaling <- unlist(scaling)
selmod1 <- array(0, dim=c(nmodelterms, nmodels, nmodelterms))
selmod2 <- array(0, dim=c(nmodelterms, nmodels, nmodelterms))
selmod3 <- array(0, dim=c(nmodelterms, nmodels, nmodelterms))
SEtestx <- (bestfitmod(indnr, paramnr, var1, var2, chVar1, var3))
SEtesty <- (bestfitmod(indnr, paramnr, var1, var2, chVar2, var3))
SEtestz <- (bestfitmod(indnr, paramnr, var1, var2, chVar3, var3))
for (ii in 1:nmodelterms)
{
M <- combs(1:nterms, ii)
idx1 = order(SEtestx[ii,], na.last = TRUE, decreasing = FALSE)
idx2 = order(SEtesty[ii,], na.last = TRUE, decreasing = FALSE)
idx3 = order(SEtestz[ii,], na.last = TRUE, decreasing = FALSE)
for (jj in 1:nmodels)
{
selmod1[ii, jj, 1:ii] <- M[idx1[jj],]
selmod2[ii, jj, 1:ii] <- M[idx2[jj],]
selmod3[ii, jj, 1:ii] <- M[idx3[jj],]
}
}
for (i in 1:nmodelterms)
{
for (j in 1:nmodels)
{
modsel = matrix(0, i, 1)
modsel[1:i] <- selmod1[i, j, 1:i]
tmp = polyfitreg(indnr, var1, var2, chVar1, modsel, var3)
B <- tmp[[1]]
meansqerr <- tmp[[2]]
rmsx <- meansqerr*(mvar1*mvar1)
B_out_x <- array(list(), dim=c(nmodelterms, nmodels))
B_out_x[[i, j]] <- matrix(0, nterms, 1)
B_out_x[[i, j]][modsel] <- B
B_out_x[[i, j]] <- B_out_x[[i, j]]*(mvar1*scaling)
B <- B*mvar1*scaling[modsel]
out_text_x <- array(list(), dim=c(i, j))
out_text_x[[i, j]] <- cbind('dx', '=')
for (k in 1:i)
out_text_x[[i, j]] <- cbind(out_text_x[[i, j]], '+', format(B[k], digits = 2), term[[modsel[k]]])
print(c('Model
write.table(format(out_text_x[[i, j]], justify="left", width=1), row.names=FALSE, col.names=FALSE, quote=FALSE)
}
print('--------------------------------------------')
}
for (i in 1:nmodelterms)
{
for (j in 1:nmodels)
{
modsel = matrix(0, i, 1)
modsel[1:i] <- selmod2[i, j, 1:i]
tmp = polyfitreg(indnr, var1, var2, chVar2, modsel, var3)
B <- tmp[[1]]
meansqerr <- tmp[[2]]
rmsy = meansqerr*(mvar2*mvar2)
B_out_y <- array(list(), dim=c(nmodelterms, nmodels))
B_out_y[[i, j]] <- matrix(0, nterms, 1)
B_out_y[[i, j]][modsel] <- B
B_out_y[[i, j]] <- B_out_y[[i, j]]*(mvar2*scaling)
B <- B*mvar2*scaling[modsel]
out_text_y <- array(list(), dim=c(i, j))
out_text_y[[i, j]] <- cbind('dy', '=')
for (k in 1:i)
out_text_y[[i, j]] <- cbind(out_text_y[[i, j]], '+', format(B[k], digits = 2), term[[modsel[k]]])
print(c('Model
write.table(format(out_text_y[[i, j]], justify="left", width=1), row.names=FALSE, col.names=FALSE, quote=FALSE)
}
print('--------------------------------------------')
}
for (i in 1:nmodelterms)
{
for (j in 1:nmodels)
{
modsel = matrix(0, i, 1)
modsel[1:i] <- selmod3[i, j, 1:i]
tmp = polyfitreg(indnr, var1, var2, chVar3, modsel, var3)
B <- tmp[[1]]
meansqerr <- tmp[[2]]
rmsz = meansqerr*(mvar3*mvar3)
B_out_z <- array(list(), dim=c(nmodelterms, nmodels))
B_out_z[[i, j]] <- matrix(0, nterms, 1)
B_out_z[[i, j]][modsel] <- B
B_out_z[[i, j]] <- B_out_z[[i, j]]*(mvar3*scaling)
B <- B*mvar3*scaling[modsel]
out_text_z <- array(list(), dim=c(i, j))
out_text_z[[i, j]] <- cbind('dz', '=')
for (k in 1:i)
out_text_z[[i, j]] <- cbind(out_text_z[[i, j]], '+', format(B[k], digits = 2), term[[modsel[k]]])
print(c('Model
write.table(format(out_text_z[[i, j]], justify="left", width=1), row.names=FALSE, col.names=FALSE, quote=FALSE)
}
print('--------------------------------------------')
}
return(list(B_out_x=B_out_x, B_out_y=B_out_y, B_out_z=B_out_z, out_text_x=out_text_x, out_text_y=out_text_y,
out_text_z=out_text_z, scaling=scaling, SEtestx=SEtestx, SEtesty=SEtesty, SEtestz=SEtestz))
}
if (indnr == 4)
{
nterms = 97;
nmodelterms = paramnr;
nmodels = 3;
term = vector('list', nterms)
term[[1]] = ''
term[[2]] = ('/x')
term[[3]] = ('/y')
term[[4]] = ('/z')
term[[5]] = ('/v')
term[[6]] = ('x')
term[[7]] = ('y')
term[[8]] = ('z')
term[[9]] = ('v')
term[[10]] = ('/(x*y)')
term[[11]] = ('/(y*z)')
term[[12]] = ('/(x*z)')
term[[13]] = ('/(x*v)')
term[[14]] = ('/(y*v)')
term[[15]] = ('/(z*v)')
term[[16]] = ('x*y')
term[[17]] = ('y*z')
term[[18]] = ('x*z')
term[[19]] = ('x*v')
term[[20]] = ('y*v')
term[[21]] = ('z*v')
term[[22]] = ('x/y')
term[[23]] = ('y/x')
term[[24]] = ('x/z')
term[[25]] = ('z/x')
term[[26]] = ('y/z')
term[[27]] = ('z/y')
term[[28]] = ('x/v')
term[[29]] = ('v/x')
term[[30]] = ('y/v')
term[[31]] = ('v/y')
term[[32]] = ('z/v')
term[[33]] = ('v/z')
term[[34]] = ('x/(y*z)')
term[[35]] = ('y/(x*z)')
term[[36]] = ('z/(x*y)')
term[[37]] = ('v/(x*y)')
term[[38]] = ('v/(x*z)')
term[[39]] = ('v/(y*z)')
term[[40]] = ('x/(y*v)')
term[[41]] = ('x/(z*v)')
term[[42]] = ('y/(x*v)')
term[[43]] = ('y/(z*v)')
term[[44]] = ('z/(x*v)')
term[[45]] = ('z/(y*v)')
term[[46]] = ('(x*y)/z')
term[[47]] = ('(y*z)/x')
term[[48]] = ('(z*x)/y')
term[[49]] = ('(x*y)/v')
term[[50]] = ('(y*z)/v')
term[[51]] = ('(z*x)/v')
term[[52]] = ('(x*v)/z')
term[[53]] = ('(y*v)/z')
term[[54]] = ('(y*v)/x')
term[[55]] = ('(z*v)/x')
term[[56]] = ('(v*x)/y')
term[[57]] = ('(v*z)/y')
term[[58]] = ('x*y*z')
term[[59]] = ('x*y*v')
term[[60]] = ('x*v*z')
term[[61]] = ('v*y*z')
term[[62]] = ('1/(x*y*z)')
term[[63]] = ('1/(x*y*v)')
term[[64]] = ('1/(x*v*z)')
term[[65]] = ('1/(v*y*z)')
term[[66]] = ('x/(v*y*z)')
term[[67]] = ('y/(x*v*z)')
term[[68]] = ('z/(x*y*v)')
term[[69]] = ('v/(x*y*z)')
term[[70]] = ('(x*y*z)/v')
term[[71]] = ('(x*y*v)/z')
term[[72]] = ('(x*v*z)/y')
term[[73]] = ('(v*y*z)/x')
term[[74]] = ('(x*y)/(v*z)')
term[[75]] = ('(x*z)/(v*y)')
term[[76]] = ('(x*v)/(y*z)')
term[[77]] = ('(y*z)/(v*x)')
term[[78]] = ('(y*v)/(z*x)')
term[[79]] = ('(z*v)/(x*y)')
term[[80]] = ('1/(x*y*z*v)')
term[[81]] = ('x*y*z*v')
term[[82]] = ('x^2')
term[[83]] = ('/x^2')
term[[84]] = ('y^2')
term[[85]] = ('/y^2')
term[[86]] = ('z^2')
term[[87]] = ('/z^2')
term[[88]] = ('v^2')
term[[89]] = ('/v^2')
term[[90]] = ('x^3')
term[[91]] = ('y^3')
term[[92]] = ('z^3')
term[[93]] = ('v^3')
term[[94]] = ('/x^3')
term[[95]] = ('/y^3')
term[[96]] = ('/z^3')
term[[97]] = ('/v^3')
scaling = vector('list', nterms)
scaling[[1]] = 1
scaling[[2]] = mvar1
scaling[[3]] = mvar2
scaling[[4]] = mvar3
scaling[[5]] = mvar4
scaling[[6]] = 1/mvar1
scaling[[7]] = 1/mvar2
scaling[[8]] = 1/mvar3
scaling[[9]] = 1/mvar4
scaling[[10]] = mvar1*mvar2
scaling[[11]] = mvar2*mvar3
scaling[[12]] = mvar1*mvar3
scaling[[13]] = mvar1*mvar4
scaling[[14]] = mvar2*mvar4
scaling[[15]] = mvar3*mvar4
scaling[[16]] = 1/(mvar1*mvar2)
scaling[[17]] = 1/(mvar2*mvar3)
scaling[[18]] = 1/(mvar1*mvar3)
scaling[[19]] = 1/(mvar1*mvar4)
scaling[[20]] = 1/(mvar2*mvar4)
scaling[[21]] = 1/(mvar3*mvar4)
scaling[[22]] = mvar2/mvar1
scaling[[23]] = mvar1/mvar2
scaling[[24]] = mvar3/mvar1
scaling[[25]] = mvar1/mvar2
scaling[[26]] = mvar3/mvar2
scaling[[27]] = mvar2/mvar3
scaling[[28]] = mvar4/mvar1
scaling[[29]] = mvar1/mvar4
scaling[[30]] = mvar4/mvar2
scaling[[31]] = mvar2/mvar4
scaling[[32]] = mvar4/mvar3
scaling[[33]] = mvar3/mvar4
scaling[[34]] = (mvar2*mvar3)/mvar1
scaling[[35]] = (mvar1*mvar3)/mvar2
scaling[[36]] = (mvar1*mvar2)/mvar3
scaling[[37]] = (mvar1*mvar2)/mvar4
scaling[[38]] = (mvar1*mvar3)/mvar4
scaling[[39]] = (mvar2*mvar3)/mvar4
scaling[[40]] = (mvar2*mvar4)/mvar1
scaling[[41]] = (mvar3*mvar4)/mvar1
scaling[[42]] = (mvar1*mvar4)/mvar2
scaling[[43]] = (mvar3*mvar4)/mvar2
scaling[[44]] = (mvar1*mvar4)/mvar3
scaling[[45]] = (mvar2*mvar4)/mvar3
scaling[[46]] = mvar3/(mvar1*mvar2)
scaling[[47]] = mvar1/(mvar2*mvar3)
scaling[[48]] = mvar2/(mvar1*mvar3)
scaling[[49]] = mvar4/(mvar1*mvar2)
scaling[[50]] = mvar4/(mvar2*mvar3)
scaling[[51]] = mvar4/(mvar1*mvar3)
scaling[[52]] = mvar3/(mvar1*mvar4)
scaling[[53]] = mvar3/(mvar2*mvar4)
scaling[[54]] = mvar1/(mvar2*mvar4)
scaling[[55]] = mvar1/(mvar3*mvar4)
scaling[[56]] = mvar2/(mvar1*mvar4)
scaling[[57]] = mvar2/(mvar3*mvar4)
scaling[[58]] = 1/(mvar1*mvar2*mvar3)
scaling[[59]] = 1/(mvar1*mvar2*mvar4)
scaling[[60]] = 1/(mvar1*mvar4*mvar3)
scaling[[61]] = 1/(mvar4*mvar2*mvar3)
scaling[[62]] = mvar1*mvar2*mvar3
scaling[[63]] = mvar1*mvar2*mvar4
scaling[[64]] = mvar1*mvar4*mvar3
scaling[[65]] = mvar4*mvar2*mvar3
scaling[[66]] = (mvar4*mvar2*mvar3)/mvar1
scaling[[67]] = (mvar1*mvar4*mvar3)/mvar2
scaling[[68]] = (mvar1*mvar2*mvar4)/mvar3
scaling[[69]] = (mvar1*mvar2*mvar3)/mvar4
scaling[[70]] = mvar4/(mvar1*mvar2*mvar3)
scaling[[71]] = mvar3/(mvar1*mvar2*mvar4)
scaling[[72]] = mvar2/(mvar1*mvar4*mvar3)
scaling[[73]] = mvar1/(mvar4*mvar2*mvar3)
scaling[[74]] = (mvar4*mvar3)/(mvar1*mvar2)
scaling[[75]] = (mvar4*mvar2)/(mvar1*mvar3)
scaling[[76]] = (mvar2*mvar3)/(mvar1*mvar4)
scaling[[77]] = (mvar4*mvar1)/(mvar2*mvar3)
scaling[[78]] = (mvar3*mvar1)/(mvar2*mvar4)
scaling[[79]] = (mvar1*mvar2)/(mvar3*mvar4)
scaling[[80]] = mvar1*mvar2*mvar3*mvar4
scaling[[81]] = 1/(mvar1*mvar2*mvar3*mvar4)
scaling[[82]] = mvar1^(-2)
scaling[[83]] = mvar1^2
scaling[[84]] = mvar2^(-2)
scaling[[85]] = mvar2^2
scaling[[86]] = mvar3^(-2)
scaling[[87]] = mvar3^2
scaling[[88]] = mvar4^(-2)
scaling[[89]] = mvar4^2
scaling[[90]] = 1/(mvar1*mvar1*mvar1)
scaling[[91]] = 1/(mvar2*mvar2*mvar2)
scaling[[92]] = 1/(mvar3*mvar3*mvar3)
scaling[[93]] = 1/(mvar4*mvar4*mvar4)
scaling[[94]] = mvar1*mvar1*mvar1
scaling[[95]] = mvar2*mvar2*mvar2
scaling[[96]] = mvar3*mvar3*mvar3
scaling[[97]] = mvar4*mvar4*mvar4
scaling <- unlist(scaling)
selmod1 <- array(0, dim=c(nmodelterms, nmodels, nmodelterms))
selmod2 <- array(0, dim=c(nmodelterms, nmodels, nmodelterms))
selmod3 <- array(0, dim=c(nmodelterms, nmodels, nmodelterms))
selmod4 <- array(0, dim=c(nmodelterms, nmodels, nmodelterms))
SEtestx <- (bestfitmod(indnr, paramnr, var1, var2, chVar1, var3, var4))
SEtesty <- (bestfitmod(indnr, paramnr, var1, var2, chVar2, var3, var4))
SEtestz <- (bestfitmod(indnr, paramnr, var1, var2, chVar3, var3, var4))
SEtestv <- (bestfitmod(indnr, paramnr, var1, var2, chVar4, var3, var4))
for (ii in 1:nmodelterms)
{
M <- combs(1:nterms, ii)
idx1 = order(SEtestx[ii,], na.last = TRUE, decreasing = FALSE)
idx2 = order(SEtesty[ii,], na.last = TRUE, decreasing = FALSE)
idx3 = order(SEtestz[ii,], na.last = TRUE, decreasing = FALSE)
idx4 = order(SEtestv[ii,], na.last = TRUE, decreasing = FALSE)
for (jj in 1:nmodels)
{
selmod1[ii, jj, 1:ii] <- M[idx1[jj],]
selmod2[ii, jj, 1:ii] <- M[idx2[jj],]
selmod3[ii, jj, 1:ii] <- M[idx3[jj],]
selmod4[ii, jj, 1:ii] <- M[idx4[jj],]
}
}
for (i in 1:nmodelterms)
{
for (j in 1:nmodels)
{
modsel = matrix(0, i, 1)
modsel[1:i] <- selmod1[i, j, 1:i]
tmp = polyfitreg(indnr, var1, var2, chVar1, modsel, var3, var4)
B <- tmp[[1]]
meansqerr <- tmp[[2]]
rmsx <- meansqerr*(mvar1*mvar1)
B_out_x <- array(list(), dim=c(nmodelterms, nmodels))
B_out_x[[i, j]] <- matrix(0, nterms, 1)
B_out_x[[i, j]][modsel] <- B
B_out_x[[i, j]] <- B_out_x[[i, j]]*(mvar1*scaling)
B <- B*mvar1*scaling[modsel]
out_text_x <- array(list(), dim=c(i, j))
out_text_x[[i, j]] <- cbind('dx', '=')
for (k in 1:i)
out_text_x[[i, j]] <- cbind(out_text_x[[i, j]], '+', format(B[k], digits = 2), term[[modsel[k]]])
print(c('Model
write.table(format(out_text_x[[i, j]], justify="left", width=1), row.names=FALSE, col.names=FALSE, quote=FALSE)
}
print('--------------------------------------------')
}
for (i in 1:nmodelterms)
{
for (j in 1:nmodels)
{
modsel = matrix(0, i, 1)
modsel[1:i] <- selmod2[i, j, 1:i]
tmp = polyfitreg(indnr, var1, var2, chVar2, modsel, var3, var4)
B <- tmp[[1]]
meansqerr <- tmp[[2]]
rmsy = meansqerr*(mvar2*mvar2)
B_out_y <- array(list(), dim=c(nmodelterms, nmodels))
B_out_y[[i, j]] <- matrix(0, nterms, 1)
B_out_y[[i, j]][modsel] <- B
B_out_y[[i, j]] <- B_out_y[[i, j]]*(mvar2*scaling)
B <- B*mvar2*scaling[modsel]
out_text_y <- array(list(), dim=c(i, j))
out_text_y[[i, j]] <- cbind('dy', '=')
for (k in 1:i)
out_text_y[[i, j]] <- cbind(out_text_y[[i, j]], '+', format(B[k], digits = 2), term[[modsel[k]]])
print(c('Model
write.table(format(out_text_y[[i, j]], justify="left", width=1), row.names=FALSE, col.names=FALSE, quote=FALSE)
}
print('--------------------------------------------')
}
for (i in 1:nmodelterms)
{
for (j in 1:nmodels)
{
modsel = matrix(0, i, 1)
modsel[1:i] <- selmod3[i, j, 1:i]
tmp = polyfitreg(indnr, var1, var2, chVar3, modsel, var3, var4)
B <- tmp[[1]]
meansqerr <- tmp[[2]]
rmsz = meansqerr*(mvar3*mvar3)
B_out_z <- array(list(), dim=c(nmodelterms, nmodels))
B_out_z[[i, j]] <- matrix(0, nterms, 1)
B_out_z[[i, j]][modsel] <- B
B_out_z[[i, j]] <- B_out_z[[i, j]]*(mvar3*scaling)
B <- B*mvar3*scaling[modsel]
out_text_z <- array(list(), dim=c(i, j))
out_text_z[[i, j]] <- cbind('dz', '=')
for (k in 1:i)
out_text_z[[i, j]] <- cbind(out_text_z[[i, j]], '+', format(B[k], digits = 2), term[[modsel[k]]])
print(c('Model
write.table(format(out_text_z[[i, j]], justify="left", width=1), row.names=FALSE, col.names=FALSE, quote=FALSE)
}
print('--------------------------------------------')
}
for (i in 1:nmodelterms)
{
for (j in 1:nmodels)
{
modsel = matrix(0, i, 1)
modsel[1:i] <- selmod4[i, j, 1:i]
tmp = polyfitreg(indnr, var1, var2, chVar4, modsel, var3, var4)
B <- tmp[[1]]
meansqerr <- tmp[[2]]
rmsv = meansqerr*(mvar4*mvar4)
B_out_v <- array(list(), dim=c(nmodelterms, nmodels))
B_out_v[[i, j]] <- matrix(0, nterms, 1)
B_out_v[[i, j]][modsel] <- B
B_out_v[[i, j]] <- B_out_v[[i, j]]*(mvar4*scaling)
B <- B*mvar4*scaling[modsel]
out_text_v <- array(list(), dim=c(i, j))
out_text_v[[i, j]] <- cbind('dv', '=')
for (k in 1:i)
out_text_v[[i, j]] <- cbind(out_text_v[[i, j]], '+', format(B[k], digits = 2), term[[modsel[k]]])
print(c('Model
write.table(format(out_text_v[[i, j]], justify="left", width=1), row.names=FALSE, col.names=FALSE, quote=FALSE)
}
print('--------------------------------------------')
}
return(list(B_out_x=B_out_x, B_out_y=B_out_y, B_out_z=B_out_z, B_out_v=B_out_v, out_text_x=out_text_x,
out_text_y=out_text_y, out_text_z=out_text_z, out_text_v=out_text_v, scaling=scaling,
SEtestx=SEtestx, SEtesty=SEtesty, SEtestz=SEtestz, SEtestv=SEtestv))
}
} |
NULL
box_cox_vec <- function(x, lambda = "auto", silent = FALSE) {
if (is.null(lambda) | lambda[1] == "auto") {
lambda <- auto_lambda(x)
if (!silent) message("box_cox_vec(): Using value for lambda: ", lambda)
}
if (lambda < 0) {
x[x < 0] <- NA
}
if (lambda == 0) {
log(x)
} else {
(sign(x) * abs(x) ^ lambda - 1) / lambda
}
}
box_cox_inv_vec <- function(x, lambda) {
if (rlang::is_missing(lambda)) {
rlang::abort("box_cox_inv_vec(lambda): Is missing. Please provide a value for lambda.")
}
if (lambda < 0) {
x[x > -1 / lambda] <- NA
}
if (lambda == 0) {
exp(x)
} else {
x <- x * lambda + 1
sign(x) * abs(x) ^ (1 / lambda)
}
}
auto_lambda <- function(x, method = c("guerrero", "loglik"), lambda_lower = -1, lambda_upper = 2) {
forecast::BoxCox.lambda(x, method = method[1], lower = lambda_lower, upper = lambda_upper)
} |
estimate_sigma_data_spliting = function(X,y, verbose=FALSE){
nrep = 20
sigma_est = 0
nest = 0
for (i in 1:nrep){
n=nrow(X)
m=floor(n/2)
subsample = sample(1:n, m, replace=FALSE)
leftover = setdiff(1:n, subsample)
CV = cv.glmnet(X[subsample,], y[subsample], standardize=FALSE, intercept=FALSE, family="gaussian")
beta_hat = coef(CV, s="lambda.min")[-1]
selected = which(beta_hat!=0)
if (verbose){
print(c("nselected",length(selected)))
}
if (length(selected)>0){
LM = lm(y[leftover]~X[leftover,][,selected])
sigma_est = sigma_est+sigma(LM)
nest = nest+1
}
}
return(sigma_est/nest)
}
selective.plus.BH = function(beta, selected.vars, pvalues, q, verbose=FALSE){
if (is.null(selected.vars)){
return(list(power=NA, FDR=NA, pvalues=NULL, null.pvalues=NULL, ci=NULL, nselected=0))
}
nselected = length(selected.vars)
p.adjust.BH = p.adjust(pvalues, method = "BH", n = nselected)
rejected = selected.vars[which(p.adjust.BH<q)]
nrejected=length(rejected)
if (verbose){
print(paste("sel+BH rejected", nrejected, "vars:",toString(rejected)))
}
true.nonzero = which(beta!=0)
true.nulls = which(beta==0)
if (verbose){
print(paste("true nonzero", length(true.nonzero), "vars:", toString(true.nonzero)))
}
TP = length(intersect(rejected, true.nonzero))
s = length(true.nonzero)
if (s==0){
power = NA
} else{
power = TP/s
}
FDR = (nrejected-TP)/max(1, nrejected)
selected.nulls = NULL
for (i in 1:nselected){
if (any(true.nulls==selected.vars[i])){
selected.nulls = c(selected.nulls, i)
}
}
null.pvalues=NA
if (length(selected.nulls)>0){
null.pvalues = pvalues[selected.nulls]
if (verbose){
print(paste("selected nulls", length(selected.nulls), "vars:",toString(selected.vars[selected.nulls])))
}
}
return(list(power=power,
FDR=FDR,
pvalues=pvalues,
null.pvalues=null.pvalues,
nselected=nselected,
nrejected=nrejected))
}
AR_design = function(n, p, rho, scale=FALSE){
times = c(1:p)
cov_mat <- rho^abs(outer(times, times, "-"))
chol_mat = chol(cov_mat)
X=matrix(rnorm(n*p), nrow=n) %*% t(chol_mat)
if (scale==TRUE){
X = scale(X)
X = X/sqrt(n)
}
return(X)
}
equicorrelated_design = function(n, p, rho, scale=FALSE){
X = sqrt(1-rho)*matrix(rnorm(n*p),n) + sqrt(rho)*matrix(rep(rnorm(n), p), nrow = n)
if (scale==TRUE){
X = scale(X)
X = X/sqrt(n)
}
return(X)
}
gaussian_instance = function(n, p, s, rho, sigma, snr, random_signs=TRUE, scale=FALSE, design="AR"){
if (design=="AR"){
X=AR_design(n,p,rho, scale)
} else if (design=="equicorrelated"){
X=equicorrelated_design(n,p, rho, scale)
}
beta = rep(0, p)
beta[1:s]=snr
if (random_signs==TRUE && s>0){
signs = sample(c(-1,1), s, replace = TRUE)
beta[1:s] = beta[1:s] * signs
}
beta=sample(beta)
y = X %*% beta + rnorm(n)*sigma
result <- list(X=X,y=y,beta=beta)
return(result)
}
logistic_instance = function(n, p, s, rho, snr, random_signs=TRUE, scale=FALSE, design="AR"){
if (design=="AR"){
X=AR_design(n,p,rho, scale)
} else if (design=="equicorrelated"){
X=equicorrelated_design(n,p, rho, scale)
}
beta = rep(0, p)
beta[1:s]=snr
if (random_signs==TRUE && s>0){
signs = sample(c(-1,1), s, replace = TRUE)
beta[1:s] = beta[1:s] * signs
}
beta=sample(beta)
mu = X %*% beta
prob = exp(mu)/(1+exp(mu))
y = rbinom(n, 1, prob)
result <- list(X=X,y=y,beta=beta)
return(result)
}
estimate_sigma = function(X, y, beta_hat_cv){
n=nrow(X)
p=ncol(X)
if (n<p){
residuals = y-X%*% beta_hat_cv
nactive=length(which(beta_hat_cv!=0))
sigma_est_sq = drop(t(residuals) %*% residuals/(nrow(X)-nactive))
sigma_est=sqrt(sigma_est_sq)
} else{
m = lm(y~X-1)
sigma_est = summary(m)$sigma
}
return(sigma_est)
}
theoretical.lambda = function(X, loss="ls", sigma=1){
n = nrow(X); p = ncol(X)
nsamples= 1000
if (loss=="ls"){
empirical = apply(abs(t(X) %*% matrix(rnorm(n*nsamples), nrow=n)), 2, max)
} else if (loss=="logit"){
empirical = apply(abs(t(X) %*% matrix(sample(c(-0.5,0.5), n*nsamples, replace = TRUE), nrow=n)), 2, max)
}
lam = mean(empirical)*sigma/n
return(lam)
} |
upForcing <- function(path_p = tempdir(), path_pet = tempdir(), file_type = "raster", format = "GTiff"){
if (!exists("path_pet")){
stop("Not filepath to read evapotranspiration data")
} else if (!exists("path_p")){
stop("Not filepath to read precipitation data")
}
if (file_type == "raster"){
if (format == "GTiff"){
if( length(list.files(path_pet, pattern = ".tif")) == 0 | length( list.files(path_p, pattern = ".tif")) == 0){
stop("Not avaliable data of precipitation or evapotranspiration")
}
pet_files <- list.files(path_pet)
pet <- raster::stack(paste(path_pet, pet_files, sep = ""))
p_files <- list.files(path_p)
p <- raster::stack(paste(path_p, p_files, sep = ""))
} else if(format == "NCDF"){
if( length( list.files(path_pet, pattern = ".nc")) == 0 | length( list.files(path_p, pattern = ".nc")) == 0){
stop("Not avaliable data of precipitation or evapotranspiration")
}
pet <- raster::brick(path_pet)
p <- raster::brick(path_p)
}
p_v <- raster::rasterToPoints(p)
pet_v <- raster::rasterToPoints(pet)
} else {
pet_files <- list.files(path_pet)
p_files <- list.files(path_p)
p_v <- read.csv(paste(path_p, p_files,sep = ""))
pet_v <- read.csv(paste(path_pet, pet_files,sep = ""))
}
meteo <- list(PET = pet_v, Prec = p_v)
return(meteo)
} |
tam2mirt_fix <- function( D, factors, B, dat, AXsi,
mean.trait, cov.trait, tamobj )
{
lavsyn <- NULL
for (dd in 1:D){
fac.dd <- factors[dd]
B2.dd <- round( B[,2,dd], 4)
syn0 <- paste0( paste0( B2.dd[ B2.dd!=0], "*", colnames(dat)[ B2.dd!=0] ), collapse="+" )
syn0 <- paste0( fac.dd, "=~ ", syn0, "\n")
lavsyn <- paste0( lavsyn, syn0 )
}
maxK <- ncol(AXsi) - 1
for (kk in 1:maxK){
t1 <- round( AXsi[,kk+1], 4 )
string1 <- paste0("t", kk )
syn0 <- paste0( colnames(dat), " | ", t1, "*", string1)
syn0 <- paste0( syn0, collapse="\n")
hh <- ""
if (kk !=maxK){ hh <- "\n" }
lavsyn <- paste0( lavsyn, syn0, hh)
}
itemg <- colnames(dat)[ maxK==1 ]
if ( length(itemg) > 0 ){
lavsyn <- paste0( lavsyn, "\n", paste0( paste0( itemg, " ?=0*g1" ), collapse="\n") )
lavsyn <- paste0( lavsyn, "\n", paste0( paste0( itemg, " ?=0*s1" ), collapse="\n") )
}
syn0 <- paste0( factors, " ~ ", round(as.vector(mean.trait),4), "*1" )
syn0 <- paste0( syn0, collapse="\n")
lavsyn <- paste0( lavsyn, "\n", syn0 )
syn0 <- paste0( factors, " ~~ ", round( as.vector(diag(cov.trait)),4), "*",factors )
syn0 <- paste0( syn0, collapse="\n")
lavsyn <- paste0( lavsyn, "\n", syn0 )
if (D>1){
for (dd in 1:(D-1)){
for (ee in (dd+1):D ){
syn0 <- paste0( factors[dd], " ~~ ",
round( cov.trait[dd,ee],4), "*",factors[ee] )
syn0 <- paste0( syn0, collapse="\n")
lavsyn <- paste0( lavsyn, "\n", syn0 )
}
}
}
lavsyn <- paste0( lavsyn, " \n")
return(lavsyn)
} |
residuals.loopsummary <- function(object,...){
g <- object
resid.x <-g$pred.x-g$x
resid.y <-g$pred.y-g$y
resid.geometric <- sqrt(resid.x^2+resid.y^2)
return(data.frame("input"=resid.x,"output"=resid.y,"geometric"=resid.geometric))
} |
setMethod("*", c("AcDcLcDistribution","AcDcLcDistribution"),
function(e1,e2){
e1 <- .ULC.cast(e1)
e2 <- .ULC.cast(e2)
ep <- getdistrOption("TruncQuantile")
e1DC <- decomposePM(e1)
e2DC <- decomposePM(e2)
w12pp <- e1DC$pos$w*e2DC$pos$w
w12mm <- e1DC$neg$w*e2DC$neg$w
w12pm <- e1DC$pos$w*e2DC$neg$w
w12mp <- e1DC$neg$w*e2DC$pos$w
mixCoeff <- c(w12pp,w12mm,w12pm,w12mp)
mixCoeff <- c(mixCoeff,1-sum(mixCoeff))
e12pp <- if(w12pp>ep)
as(exp(log(e1DC$pos$D)+log(e2DC$pos$D)),
"UnivarLebDecDistribution")
else as(Dirac(1), "UnivarLebDecDistribution")
e12pp.f <- discretePart(e1DC$pos$D)@.finSupport[2] &
discretePart(e2DC$pos$D)@.finSupport[2]
d12pp <- discretePart(e12pp)
[email protected] <- e12pp.f
discretePart(e12pp) <- d12pp
e12mm <- if(w12mm>ep)
as(exp(log(-e1DC$neg$D)+log(-e2DC$neg$D)),
"UnivarLebDecDistribution")
else as(Dirac(1), "UnivarLebDecDistribution")
e12mm.f <- discretePart(e1DC$neg$D)@.finSupport[1]&
discretePart(e2DC$neg$D)@.finSupport[1]
d12mm <- discretePart(e12mm)
[email protected] <- e12mm.f
discretePart(e12mm) <- d12mm
e12pm <- if(w12pm>ep)
as(-exp(log(e1DC$pos$D)+log(-e2DC$neg$D)),
"UnivarLebDecDistribution")
else as(Dirac(-1), "UnivarLebDecDistribution")
e12pm.f <- discretePart(e1DC$pos$D)@.finSupport[2] &
discretePart(e2DC$neg$D)@.finSupport[1]
d12pm <- discretePart(e12pm)
[email protected] <- e12pm.f
discretePart(e12pm) <- d12pm
if(identical(e1,e2)){
e12mp <- e12pm
e12mp.f <- e12pm.f
}else{ e12mp <- if(w12mp>ep)
as( -exp(log(-e1DC$neg$D)+log(e2DC$pos$D)),
"UnivarLebDecDistribution")
else as(Dirac(-1), "UnivarLebDecDistribution")
e12mp.f <- discretePart(e1DC$neg$D)@.finSupport[1] &
discretePart(e2DC$pos$D)@.finSupport[2]
d12mp <- discretePart(e12mp)
[email protected] <- e12mp.f
discretePart(e12mp) <- d12mp
}
e12pm <- .del0dmixfun(e12pm)
e12mp <- .del0dmixfun(e12mp)
obj <- flat.LCD(mixCoeff = mixCoeff,
e12pp, e12mm, e12pm, e12mp,
as(Dirac(0),"UnivarLebDecDistribution"))
dP <- discretePart(obj)
[email protected] <- c((w12pm+w12mp<ep^2)|(e12pm.f&e12mp.f),
(w12pp+w12pp<ep^2)|(e12pp.f&e12mm.f))
discretePart(obj) <- dP
if(getdistrOption("simplifyD"))
obj <- simplifyD(obj)
rnew <- function(n, ...){}
body(rnew) <- substitute({ g1(n, ...) * g2(n, ...) },
list(g1 = e1@r, g2 = e2@r))
obj@r <- rnew
if(is(e1@Symmetry,"SphericalSymmetry")&&
is(e2@Symmetry,"SphericalSymmetry"))
if(.isEqual(SymmCenter(e1@Symmetry),0) &&
.isEqual(SymmCenter(e2@Symmetry),0))
obj@Symmetry <- SphericalSymmetry(0)
return(obj)
})
setMethod("/", c("numeric",
"AcDcLcDistribution"),
function(e1,e2){
if (is((e2s <- as.character(deparse(match.call(
call = sys.call(sys.parent(1)))$e2))), "try-error"))
e2s <- "e2"
e2 <- .ULC.cast(e2)
if (discreteWeight(e2)>getdistrOption("TruncQuantile"))
if (d.discrete(e2)(0)>getdistrOption("TruncQuantile"))
stop(gettextf("1 / %s is not well-defined with positive probability ", e2s))
e2DC <- decomposePM(e2)
w2p <- e2DC$pos$w
w2m <- e2DC$neg$w
e2p <- as(exp(-log(e2DC$pos$D)), "UnivarLebDecDistribution")
d2p <- discretePart(e2p)
[email protected] <- c(TRUE,TRUE)
discretePart(e2p) <- d2p
e2m <- as(-exp(-log(-e2DC$neg$D)), "UnivarLebDecDistribution")
d2m <- discretePart(e2m)
[email protected] <- c(TRUE,TRUE)
discretePart(e2m) <- d2m
e2D <- flat.LCD(mixCoeff = c(w2p, w2m), e2p, e2m)
dP <- discretePart(e2D)
[email protected] <- c(TRUE,TRUE)
discretePart(e2D) <- dP
if(getdistrOption("simplifyD"))
e2D <- simplifyD(e2D)
obj <- e1*e2D
rnew <- function(n, ...){}
body(rnew) <- substitute({ g1 / g2(n, ...) },
list(g1 = e1, g2 = e2@r))
obj@r <- rnew
if(is(e2@Symmetry,"SphericalSymmetry"))
if(.isEqual(SymmCenter(e2@Symmetry),0))
obj@Symmetry <- SphericalSymmetry(0)
return(obj)
})
setMethod("/", c("AcDcLcDistribution",
"AcDcLcDistribution"),
function(e1,e2){
if (is((e2s <- as.character(deparse(match.call(
call = sys.call(sys.parent(1)))$e2))), "try-error"))
e2s <- "e2"
e2 <- .ULC.cast(e2)
if (discreteWeight(e2)>getdistrOption("TruncQuantile"))
if (d.discrete(e2)(0)>getdistrOption("TruncQuantile"))
stop(gettextf("1 / %s is not well-defined with positive probability ", e2s))
obj <- e1 * (1/e2)
rnew <- function(n, ...){}
body(rnew) <- substitute({ g1(n, ...) / g2(n, ...) },
list(g1 = e1@r, g2 = e2@r))
obj@r <- rnew
if(is(e1@Symmetry,"SphericalSymmetry")&&
is(e2@Symmetry,"SphericalSymmetry"))
if(.isEqual(SymmCenter(e1@Symmetry),0) &&
.isEqual(SymmCenter(e2@Symmetry),0))
obj@Symmetry <- SphericalSymmetry(0)
return(obj)
})
setMethod("^", c("AcDcLcDistribution","Integer"),
function(e1,e2){
if(.isEqual(e2,0)) return(Dirac(1))
if(.isEqual(e2,1)) return(e1)
ep <- getdistrOption("TruncQuantile")
d00 <- discretePart(e1)@d(0)
d0 <- discreteWeight(e1)*d00
if(d0 > ep){
if(.isEqual(d00,1)){
e1 <- acPart(e1)
}else{
su <- support(discretePart(e1))
pr <- d(discretePart(e1))(su)
acW <- acWeight(e1)
discreteP <- DiscreteDistribution(
supp = su[su!=0],
prob = pr[su!=0]/(1-d00))
e1 <- UnivarLebDecDistribution(acPart = acPart(e1),
discretePart = discreteP, acWeight = acW)
}
}
f.0 <- discretePart(e1)@.finSupport
if(e2 > 0){
e1pf <- f.0[2] & (f.0[1]|(e2%%2 == 1L))
e1mf <- f.0[1] | (e2%%2 == 0L)
}else{
e1pf <- e1mf <- TRUE
}
e1DC <- decomposePM(e1)
mixCoeff <- c(e1DC$pos$w,e1DC$neg$w)
mixCoeff <- mixCoeff/sum(mixCoeff)
e1p <- if(mixCoeff[1]>ep)
as(exp(e2*log(e1DC$pos$D)),"UnivarLebDecDistribution")
else as(Dirac(1), "UnivarLebDecDistribution")
d1p <- discretePart(e1p)
[email protected] <- c(TRUE,e1pf)
discretePart(e1p) <- d1p
e1m <- if(mixCoeff[2]>ep)
as((-1)^e2*exp(e2*log(-e1DC$neg$D)),"UnivarLebDecDistribution")
else as(Dirac((-1)^e2), "UnivarLebDecDistribution")
d1m <- discretePart(e1m)
[email protected] <- c(e1mf,TRUE)
discretePart(e1m) <- d1m
erg <- flat.LCD(mixCoeff = mixCoeff, e1p, e1m)
if(d0 > ep){
if(.isEqual(d00,1)){
erg <- UnivarLebDecDistribution(acPart = acPart(erg),
discretePart = Dirac(0), acWeight = acW)
}else{
su <- support(discretePart(erg))
su0 <- c(su,0)
o <- order(su0)
pr <- c(d(discretePart(erg))(su) * (1-d00), d00)
suo <- su0[o]
pro <- pr[o]
discreteP <- DiscreteDistribution(supp = suo, prob = pro)
[email protected] <- c(e1mf,e1pf)
erg <- UnivarLebDecDistribution(acPart = acPart(erg),
discretePart = discreteP, acWeight = acW)
}
}
if(getdistrOption("simplifyD"))
erg <- simplifyD(erg)
rnew <- function(n, ...){}
body(rnew) <- substitute({ g1(n, ...)^g2 },
list(g1 = e1@r, g2 = e2))
erg@r <- rnew
return(erg)
})
setMethod("^", c("AcDcLcDistribution","numeric"),
function(e1,e2){
if (is(try(mc <- match.call(call = sys.call(sys.parent(1))),
silent=TRUE), "try-error"))
{e1s <- "e1"; e2s <- "e2"}
else {e1s <- as.character(deparse(mc$e1))
e2s <- as.character(deparse(mc$e2))}
if (length(e2)>1) stop("length of operator must be 1")
if (isTRUE(all.equal(e2,1))) return(e1)
if (isTRUE(all.equal(e2,0))) return(Dirac(1))
e1 <- .ULC.cast(e1)
if (e2<0) return((1/e1)^(-e2))
if (.isNatural(e2, tol = 1e-10))
return(get("^")(e1 = e1, e2 = as(e2,"Integer")))
ep <- getdistrOption("TruncQuantile")
d00 <- discretePart(e1)@d(0)
d0 <- discreteWeight(e1)*d00
p0 <- p(e1)(0)
if ((p0 > ep && e2 < 0) || (p0 > d0+ep))
stop(gettextf("%s^%s is not well-defined with positive probability ",
e1s, e2s))
d1 <- discretePart(e1)
if(d0 > ep){
if(.isEqual(d00,1)){
e1 <- acPart(e1)
}else{
su <- support(d1)
pr <- d(d1)(su)
acW <- acWeight(e1)
discreteP <- DiscreteDistribution(
supp = su[su!=0],
prob = pr[su!=0]/(1-d00))
[email protected] <- c(TRUE, [email protected])
e1 <- UnivarLebDecDistribution(acPart = acPart(e1),
discretePart = discreteP, acWeight = acW)
}
}
erg <- exp( e2 * log(e1))
if(d0 > ep){
if(.isEqual(d00,1)){
erg <- UnivarLebDecDistribution(acPart = acPart(erg),
discretePart = Dirac(0), acWeight = acW)
}else{
acW <- acWeight(erg)
erg.d <- discretePart(erg)
su <- support(erg.d)
su0 <- c(su,0)
o <- order(su0)
pr <- c(d(erg.d)(su) * (1-d00), d00)
suo <- su0[o]
pro <- pr[o]
discreteP <- DiscreteDistribution(supp = suo, prob = pro)
[email protected] <- c(TRUE, [email protected])
erg <- UnivarLebDecDistribution(acPart = acPart(erg),
discretePart = discreteP, acWeight = acW)
}
}
if(getdistrOption("simplifyD"))
erg <- simplifyD(erg)
rnew <- function(n, ...){}
body(rnew) <- substitute({ g1(n, ...)^g2 },
list(g1 = e1@r, g2 = e2))
erg@r <- rnew
return(erg)
}
)
setMethod("^", c("AcDcLcDistribution","Dirac"),
function(e1,e2)e1^location(e2))
setMethod("^", c("AcDcLcDistribution","AcDcLcDistribution"),
function(e1,e2){
if (is((e1s <- as.character(deparse(match.call(
call = sys.call(sys.parent(1)))$e1))), "try-error"))
e1s <- "e1"
if (is((e2s <- as.character(deparse(match.call(
call = sys.call(sys.parent(1)))$e2))), "try-error"))
e2s <- "e2"
e1 <- .ULC.cast(e1)
e2 <- .ULC.cast(e2)
ep <- getdistrOption("TruncQuantile")
if(p(e2)(0)-discreteWeight(e2)*d.discrete(e2)(0)>ep)
{
if (d.discrete(e1)(0)*discreteWeight(e1) > ep)
stop(gettextf("%s^%s is not well-defined with positive probability ",
e1s, e2s))
if ((discreteWeight(e2)>1-ep) && all(.isInteger(support(e2)))){
Dlist <- lapply(support(e2), function(x)
as(do.call("^",list(e1=e1,e2=x)), "UnivarLebDecDistribution"))
erg <- as(simplifyD( do.call(flat.LCD,
c(Dlist, alist(mixCoeff = d.discrete(e2)(support(e2)))))),
"UnivarLebDecDistribution")
if(getdistrOption("simplifyD")) erg <- simplifyD(erg)
return(erg)
}
if (p(e1)(0) > ep)
stop(gettextf("%s^%s is not well-defined with positive probability ",
e1s, e2s))
}
if(p(e1)(0)>ep)
{
if ((discreteWeight(e2)>1-ep) && all(.isInteger(support(e2)))){
Dlist <- lapply(support(e2), function(x)
as(do.call("^",list(e1=e1,e2=x)), "UnivarLebDecDistribution"))
erg <- as(simplifyD( do.call(flat.LCD,
c(Dlist, alist(mixCoeff = d.discrete(e2)(support(e2)))))),
"UnivarLebDecDistribution")
if(getdistrOption("simplifyD")) erg <- simplifyD(erg)
return(erg)
}
stop(gettextf("%s^%s is not well-defined with positive probability ",
e1s, e2s))
}
le1 <- log(e1)
le <- le1 * e2
erg <- exp(le)
if(getdistrOption("simplifyD")) erg <- simplifyD(erg)
rnew <- function(n, ...){}
body(rnew) <- substitute({ g1(n, ...)^g2(n, ...) },
list(g1 = e1@r, g2 = e2@r))
erg@r <- rnew
return(erg)
})
setMethod("^", c("numeric","AcDcLcDistribution"),
function(e1,e2){
if (is((e1s <- as.character(deparse(match.call(
call = sys.call(sys.parent(1)))$e1))), "try-error"))
e1s <- "e1"
if (is((e2s <- as.character(deparse(match.call(
call = sys.call(sys.parent(1)))$e2))), "try-error"))
e2s <- "e2"
e2 <- .ULC.cast(e2)
ep <- getdistrOption("TruncQuantile")
if(p(e2)(0)-discreteWeight(e2)*d.discrete(e2)(0)>ep)
{
if (abs(e1) < ep)
stop(gettextf("%s^%s is not well-defined with positive probability ",
e1s, e2s))
if ((discreteWeight(e2)>1-ep) && all(.isInteger(support(e2)))){
erg <- DiscreteDistribution(e1^support(e2),
d.discrete(e2)(support(e2)))
[email protected] <- c(TRUE, discretePart(e2)@.finSupport[2])
if(e1<0){
de2 <- discretePart(e2)
su <- support(e2)
oddS <- su[su%%2==1L]
podd <- sum(d.discrete(e2)(oddS))
peven <- 1-podd
[email protected] <- c([email protected][2]|(podd<ep^2),
[email protected][2]|(peven<ep^2))
}
if(!getdistrOption("simplifyD"))
erg <- as(erg,"UnivarLebDecDistribution")
return(erg)
}
if (e1 < -ep)
stop(gettextf("%s^%s is not well-defined with positive probability ",
e1s, e2s))
}
if(e1< -ep)
{
if ((discreteWeight(e2)>1-ep) && all(.isInteger(support(e2)))){
erg <- DiscreteDistribution(e1^support(e2),
d.discrete(e2)(support(e2)))
[email protected] <- c(TRUE, discretePart(e2)@.finSupport[2])
if(e1<0){
de2 <- discretePart(e2)
su <- support(e2)
oddS <- su[su%%2==1L]
podd <- sum(d.discrete(e2)(oddS))
peven <- 1-podd
[email protected] <- c([email protected][2]|(podd<ep^2),
[email protected][2]|(peven<ep^2))
}
if(!getdistrOption("simplifyD"))
erg <- as(erg,"UnivarLebDecDistribution")
return(erg)
}
stop(gettextf("%s^%s is not well-defined with positive probability ",
e1s, e2s))
}
le1 <- log(e1)
le <- le1 * e2
erg <- exp(le)
if(getdistrOption("simplifyD")) erg <- simplifyD(erg)
rnew <- function(n, ...){}
body(rnew) <- substitute({ g1^g2(n, ...) },
list(g1 = e1, g2 = e2@r))
erg@r <- rnew
return(erg)
})
setMethod("+", signature(e1="AcDcLcDistribution", e2="AcDcLcDistribution"),
function(e1,e2)(.ULC.cast(e1)+(-.ULC.cast(e2))))
setMethod("-", signature(e1="AcDcLcDistribution", e2="AcDcLcDistribution"),
function(e1,e2)(.ULC.cast(e1)+(-.ULC.cast(e2))))
setMethod("sign", "AcDcLcDistribution",
function(x){
if(is(x,"AbscontDistribution")) d0 <-0
else if(is(x,"DiscreteDistribution")) d0 <- d(x)(0)
else d0 <- d.discrete(as(x,UnivarLebDecDistribution))(0)
pm <- p(x)(-getdistrOption("TruncQuantile"))
pp <- p(x)(getdistrOption("TruncQuantile"), lower=FALSE)
Symmetry <- NoSymmetry()
if(.isEqual(pm,pp))
Symmetry <- SphericalSymmetry(0)
DiscreteDistribution(supp = c(-1,0,1), prob = c(pm, d0, pp))
})
setMethod("sqrt", "AcDcLcDistribution",
function(x) x^0.5)
setMethod("Math", "AcDcLcDistribution",
function(x) callGeneric(.ULC.cast(x))) |
findAllPaths = function(conf, maxLength = 2){
if (!("conf.mat" %in% class(conf))){
conf = as.conflictmat(conf)
}
if (maxLength < 2) stop("'maxLength' should be no smaller than 2.")
if (maxLength > 6) stop("'maxLength' should be no greater than 6.")
if(maxLength %% as.integer(maxLength) != 0) {
stop("'maxLength' needs to be an integer.")
}
allPathsOutput <- allPaths(conf, maxLength)
paths <- allPathsOutput[[2]]
pathOutput <- paths
for (i in 1:length(paths)){
for (j in 1:length(paths[[i]]))
pathOutput[[i]][j] <- row.names(conf)[paths[[i]][j]]
}
pathOutputAll <- list(which(conf > 0, arr.ind = TRUE), pathOutput)
names(pathOutputAll)[1] <- "direct pathways"
names(pathOutputAll)[2] <- "indirect pathways"
return(pathOutputAll)
} |
calcAC <- function(distribution,paramVec,varcovVec=NULL,proportionSearchDF,distanceCol,
proportionCol,additionalCol=NULL,nBoot=NULL,truncBounds=NULL,
ciLevel=0.9,randomSeed=NULL,...){
if(missing(distribution) || length(distribution)!=1){
stop('argument distribution must be a single character string')
}
distn <- tolower(distribution)
if(missing(paramVec) || !is.numeric(paramVec) || any(is.na(paramVec))){
stop('argument paramVec needs to be numeric')
}
param <- paramVec
if(!is.null(varcovVec)){
if(missing(varcovVec) || !is.numeric(varcovVec) || any(is.na(varcovVec))){
stop('argument varcovVec needs to be numeric or NULL')
}
}
if(!is.null(nBoot)&&!is.numeric(nBoot)){
stop('argument nBoot needs to be numeric')
}
nBoot <- ifelse(is.null(nBoot),0,nBoot)
numBoot <- ifelse(nBoot<0,0,ceiling(nBoot))
if(is.null(varcovVec)){
numBoot <- 0
}
if(numBoot>0){
if(!is.numeric(ciLevel)){
stop('argument ciLevel needs to be numeric')
}
if(ciLevel>1|ciLevel<0){
stop('argument ciLevel needs to be between 0 and 1')
}
}
if(numBoot>0){
S <- matrix(nrow=length(param),ncol=length(param))
S[lower.tri(S,diag=TRUE)] <- varcovVec
S[upper.tri(S)] <- S[lower.tri(S)]
if(any(is.na(S))| any(diag(S)<=0)){
stop('Problem creating the variance-covariance matrix from varcovVec')
}
}else{
S <- NULL
}
if(missing(proportionSearchDF) || !is.data.frame(proportionSearchDF)){
stop('proportionSearchDF must be a data.frame')
}
if(missing(distanceCol) || length(distanceCol)!=1){
stop('distanceCol must be a single string')
}
if(missing(proportionCol) || length(proportionCol)!=1){
stop('proportionCol must be a single string')
}
propCols <- c(distanceCol,proportionCol,additionalCol)
if(!is.character(propCols)){
stop('distanceCol, proportionCol, and additionalCol must be class character')
}
unknownCols <- propCols[!propCols%in%names(proportionSearchDF)]
if(length(unknownCols)>0){
stop('The following columns are not in proportionSearchDF: ',paste0(unknownCols,collapse=', '))
}
probCol <- 'prob'
while(probCol%in%propCols){
probCol <- paste0(probCol,'Z')
}
probCol
acCol <- 'pointEst'
while(acCol%in%propCols){
acCol <- paste0(acCol,'Z')
}
acCol
repCol <- 'rep'
while(repCol%in%propCols){
repCol <- paste0(repCol,'Z')
}
repCol
if(!is.null(truncBounds)&&!is.numeric(truncBounds)){
stop('argument truncBounds needs to be numeric')
}
if(is.null(truncBounds)){
tUp <- Inf
tLow <- 0
}else{
tUp <- max(truncBounds,na.rm=TRUE)
tLow <- min(truncBounds,na.rm=TRUE)
if(length(truncBounds)==1){
tLow <- 0
}
if(length(truncBounds)>2){
warning(paste('The smallest value and largest value of truncBounds',
'will be used as the bounds of the truncation.'),
immediate. = TRUE)
}
}
if (!is.null(randomSeed)) {
if (!is.numeric(randomSeed)) {
stop('argument randomSeed needs to be numeric')
}
}
if(numBoot>0){
dotify = function(fn, ...){
do.call(fn, as.list(match.call()[names(match.call()) %in% names(formals(fn))]))
}
set.seed(randomSeed)
rparam <- dotify(fn=mvtnorm::rmvnorm,n=numBoot,mean=param,sigma=S,...)
paramMatrix <- rbind(param,rparam)
}else{
paramMatrix <- matrix(param,ncol=length(param))
}
if(ncol(paramMatrix)<2){
paramMatrix <- cbind(paramMatrix,NA)
}
if(!is.null(additionalCol)){
agFormula <- stats::formula(paste0(acCol,'~',paste0(additionalCol,collapse='+')))
}else{
agFormula <- stats::formula(paste0(acCol,'~',1))
}
acValues <- data.frame()
for(i in 1:nrow(paramMatrix)){
allDat <- proportionSearchDF[,propCols]
allDat[,probCol] <- getDistanceProbability(q=allDat[,distanceCol],distribution = distn,
param1 = paramMatrix[i, 1],
param2 = paramMatrix[i, 2],
tbound = c(tLow, tUp),...)
allDat[,acCol] <- allDat[,proportionCol]*allDat[,probCol]
thisRep <- stats::aggregate(formula=agFormula,FUN=sum,data=allDat)
thisRep[,repCol] <- i-1
acValues <- rbind(acValues,thisRep)
}
pointEst <- acValues[acValues[,repCol]==0,]
pointEst[,repCol] <- NULL
bootReps <- acValues[acValues[,repCol]>0,]
if(nrow(bootReps)==0){
bootReps <- NULL
}
if(numBoot>0){
bootConf <- stats::aggregate(formula=agFormula,FUN=stats::quantile,data=bootReps,probs=c((1-ciLevel)/2,1-(1-ciLevel)/2))
bootBounds <- as.data.frame(bootConf[,acCol])
names(bootBounds) <- paste0(c('L','U'),ciLevel*100)
bootCI <- cbind(bootConf[,additionalCol,drop=FALSE],bootBounds)
outSummary <- merge(pointEst,bootCI,by=additionalCol)
}else{
outSummary <- pointEst
}
out <- list()
out$summary <- outSummary
out$distribution <- distn
out$parameters <- param
out$paramVarCov <- S
out$bootstrap <- bootReps
class(out) <- c('windAC','list')
return(out)
} |
ahp.ri <- function(nsims, dim, seed = 42) {
if (dim%%1!=0){
stop("Number of dimensions must be an integer.")
}
if (dim < 3){
stop("Number of dimensions cannot be lower than 3.")
}
set.seed(seed)
genri <- function(dim){
saatyscale <- c(1/9,1/8,1/7,1/6,1/5,1/4,1/3,1/2,1:9)
draws <- sample(saatyscale, 0.5*dim*(dim-1), replace = TRUE)
.mat <- matrix(data = NA, nrow = dim, ncol = dim)
.mat[row(.mat) == col(.mat)] <- 1
.mat[lower.tri(.mat, diag = FALSE)] <- draws[1:(0.5*dim*(dim-1))]
for (i in 1:nrow(.mat)) {
for (j in 1:ncol(.mat)) {
if (is.na(.mat[i, j])) {
.mat[i, j] <- 1/.mat[j,i]
}
}
}
max_lambda <- max(Re(eigen(.mat)$values))
ri <- (max_lambda - dim)/(dim - 1)
return(ri)
}
mean(replicate(nsims, genri(dim)))
} |
context("Cadence Histogram")
library(activPAL)
test_that("generate_cadence_histograme", {
input_directory <- paste(system.file("extdata", "", package = "activPAL"),"/",sep="")
output_directory <- paste(tempdir(),"/",sep="")
file_data <- stepping.cadence.bands.folder.two.stepping.groups(input_directory,output_directory)
expect_equal(nrow(file_data), 1)
expect_equal(ncol(file_data), 3)
file_data <- stepping.cadence.bands.folder.four.stepping.groups(input_directory,output_directory)
expect_equal(nrow(file_data), 1)
expect_equal(ncol(file_data), 3)
file_data <- list.files(output_directory,"Test_-cadence-histogram.png")
expect_equal(length(file_data), 1)
expect_equal(file_data, "Test_-cadence-histogram.png")
file.remove(paste(output_directory,file_data,sep=""))
file_data <- list.files(output_directory,"median_cadence_summary*.csv")
expect_equal(length(file_data), 1)
expect_equal(file_data, "median_cadence_summary.csv")
file.remove(paste(output_directory,file_data,sep=""))
}) |
"Gamlist" <-
function(...)
{
gl <- list(...)
oldClass(gl) <- c("Gamlist", "glmlist")
gl
} |
ciTest_ordinal <- function(x, set=NULL, statistic="dev", N=0, ...){
statistic <- match.arg(statistic, c("deviance","wilcoxon","kruskal","jt"))
if (inherits(x, "data.frame")){
dataNames <- names(x)
} else {
if (inherits(x, "table")){
dataNames <- names(dimnames(x))
} else {
stop("'x' must be either a table or a dataframe")
}
}
if (is.null(set)){
set <- dataNames
set.idx <- 1:length(set)
} else {
if (inherits(set, "numeric")){
set.idx <- set
set <- dataNames[set.idx]
} else
if (inherits(set,c("formula", "character"))){
set <- unlist(rhsFormula2list(set))
set.idx <- match(set, dataNames)
}
}
.CI.ordinal(set.idx, set, dataset=x, test=statistic, N=N)
}
.CI.ordinal <- function(set.idx, set, dataset, test="deviance", N=0) {
c1 <- set.idx[1]
c2 <- set.idx[2]
if (length(set.idx) > 2)
S <- set.idx[-(1:2)]
else
S <- NULL
LRT <- function(m, d1, d2) {
oneslice <- function(t,d1,d2) {
dim(t) <- c(d1,d2)
t1 <- addmargins(t)
cm <- t1[d1+1,1:d2]
rm <- t1[1:d1,d2+1]
N <- t1[d1+1,d2+1]
df <- (sum(cm>0)-1)*(sum(rm>0)-1)
dev <- 0
if (df>0) {
fv <- (rm %o% cm)/N
dev <- 2*sum(t*log(t/fv), na.rm=T)
}
return(c(df=df, dev=dev))
}
ans <- apply(m, 1, oneslice, d1, d2)
obs.deviance <- sum(ans[2,])
df <- sum(ans[1,])
P <- 1 - pchisq(obs.deviance, df)
return(list(deviance=obs.deviance, df=df, P=P))
}
wilcoxon <- function(m, d1, d2) {
oneslice <- function(t,d1,d2) {
dim(t) <- c(d1,d2)
t1 <- addmargins(t)
cm <- t1[d1+1,1:d2]
rm <- t1[1:d1,d2+1]
N <- t1[d1+1,d2+1]
r <- cumsum(c(0, cm[-d2]))+(1+cm)/2
W <- sum(r*t[1,])
EW <- (rm[1]/N)*sum(r*cm)
VW <- (rm[1]*rm[2]/(N*(N-1)))*sum(((r-EW/rm[1])^2)*cm)
return(c(W, EW, VW))
}
ans <- apply(m, 1, oneslice, d1, d2)
W <- sum(ans[1,])
EW <- sum(ans[2,])
VW <- sum(ans[3,])
P <- 2*(1 - pnorm(abs(W-EW), sd=sqrt(VW)))
return(list(W=W, EW=EW, P=P))
}
kruskal <- function(m, d1, d2) {
oneslice <- function(t,d1,d2) {
dim(t) <- c(d1,d2)
t1 <- addmargins(t)
cm <- t1[d1+1,1:d2]
rm <- t1[1:d1,d2+1]
N <- t1[d1+1,d2+1]
r <- cumsum(c(0, cm[-d2]))+(1+cm)/2
T <- sum(cm[1:d2]^3-cm[1:d2])/(N^3-N)
f <- 12*((N*(N+1)*(1-T))^(-1))
KW <- f*sum(((t%*%r-rm[1:d1]*(N+1)/2)^2)/rm[1:d1])
df <- (sum(rm>0)-1)
return(c(df=df, KW=KW))
}
ans <- apply(m, 1, oneslice, d1, d2)
obs.KW <- sum(ans[2,])
df <- sum(ans[1,])
P <- 1 - pchisq(obs.KW,df)
return(list(KW=obs.KW, df=df, P=P))
}
jt <- function(m, d1, d2) {
oneslice<-function(t,d1,d2){
dim(t)<-c(d1,d2)
t1 <-addmargins(t)
cm <-t1[d1+1,1:d2]
rm <-t1[1:d1,d2+1]
N <-t1[d1+1,d2+1]
W <-c()
T <-c()
R <-c()
for(i in c(2:d1)){
for(j in c(1:(i-1))){
if(i != j){
T<-c(t[i,],T)
R<-c((rm[i]*(rm[i]+1)/2),R)
W<-c(c(cumsum(c(0,t[i,-d2]+t[j,-d2])))+ c((((t[i,1:d2]+t[j,1:d2])+1)/2)),W)
}
W<-c(W)
T<-c(T)
R<-c(R)
}}
JT <-sum(W*T)-sum(R)
EJT <-sum(N^2-sum(rm^2))/4
U1 <-N*(N-1)*(2*N+5)-sum(rm*(rm-1)*(2*rm+5))-sum(cm*(cm-1)*(2*cm+5))
U2 <-sum(rm*(rm-1)*(rm-2))*sum((cm)*(cm-1)*(cm-2))
U3 <-sum((rm)*(rm-1))*sum((cm)*(cm-1))
t1 <-72
t2 <-36*N*(N-1)*(N-2)
t3 <-8*N*(N-1)
VJT <-(U1/t1)+(U2/t2)+(U3/t3)
return(c(JT, EJT,VJT))
}
ans <- apply(m, 1, oneslice, d1, d2)
JT <- sum(ans[1,])
EJT <- sum(ans[2,])
VJT <- sum(ans[3,])
P <- 2*(1 - pnorm(abs(JT-EJT), sd=sqrt(VJT)))
return(list(JT=JT, EJT=EJT, P=P))
}
rcsum <- function(t,d1,d2) {
dim(t) <- c(d1,d2)
t1 <- addmargins(t)
cm <- t1[d1+1,1:d2]
rm <- t1[1:d1,d2+1]
return(c(rm,cm))
}
rdev <- function(tots, d1, d2, Nsim) {
rm <- tots[1:d1]
cm <- tots[(d1+1):(d1+d2)]
N <- sum(rm)
fv <- (rm %o% cm)/N
tablist <- r2dtable(Nsim, rm, cm)
return(sapply(tablist, function(t) 2*sum(t*log(t/fv), na.rm=T)))
}
wdev <- function(tots, d1, d2, Nsim) {
rm <- tots[1:d1]
cm <- tots[(d1+1):(d1+d2)]
r <- cumsum(c(0, cm[-d2]))+(1+cm)/2
tablist <- r2dtable(Nsim, rm, cm)
return(sapply(tablist, function(t) sum(r*t[1,1:d2])))
}
kdev <- function(tots, d1, d2, Nsim) {
rm <- tots[1:d1]
cm <- tots[(d1+1):(d1+d2)]
N <- sum(rm)
r <- cumsum(c(0, cm[-d2]))+(1+cm)/2
T <- sum(cm[1:d2]^3-cm[1:d2])/(N^3-N)
f <- 12*((N*(N+1)*(1-T))^(-1))
tablist <- r2dtable(Nsim, rm, cm)
return(sapply(tablist, function(t) f*sum(((t[,1:d2]%*%r-rm[1:d1]*(N+1)/2)^2)/rm[1:d1])))
}
jtdev <- function(tots, d1, d2, Nsim) {
rm <- tots[1:d1]
cm <- tots[(d1+1):(d1+d2)]
N <- sum(rm)
U<-function(t,d1,d2){
W<-c()
T<-c()
R<-c()
for(i in c(2:d1)){
for(j in c(1:(i-1))){
if(i != j){
T<-c(t[i,],T)
R<-c((rm[i]*(rm[i]+1)/2),R)
W<-c(c(cumsum(c(0,t[i,-d2]+t[j,-d2])))+ c((((t[i,1:d2]+t[j,1:d2])+1)/2)),W)
}
W<-c(W)
T<-c(T)
R<-c(R)
}}
return(sum(W*T)-sum(R))
}
tablist <- r2dtable(Nsim, rm, cm)
ans<-sapply(tablist, U,d1,d2)
return(c(ans))
}
if (!(class(dataset) %in% c("data.frame", "table", "array", "matrix"))){
stop("dataset incorrectly specified")
}
if (!(test %in% c("deviance", "wilcoxon", "kruskal", "jt"))){
stop("test incorrectly specified")
}
if (class(dataset)=="data.frame") {
d1 <- nlevels(dataset[,c1])
d2 <- nlevels(dataset[,c2])
ds <- dataset[,c(c1,c2,S)]
} else {
d1 <- dim(dataset)[c1]
d2 <- dim(dataset)[c2]
ds <- apply(dataset, c(c1,c2,S), sum)
}
if ((d1<=1) | (d2<=1))
stop("invalid factor(s)")
if (is.null(S))
rv <- NULL
else
rv <- 3:(length(S)+2)
ft <- ftable(ds, col.vars=2:1, row.vars <- rv)
dim(ft) <- c(length(ft)/(d1*d2), d1*d2)
switch(test,
"deviance"={obs <- LRT(ft, d1, d2)},
"wilcoxon"={obs <- wilcoxon(ft, d1, d2)},
"kruskal" ={obs <- kruskal(ft,d1,d2)},
"jt" ={obs <- jt(ft,d1,d2)})
if (N>0){
rcsums <- apply(ft, 1, rcsum, d1, d2)
switch(test,
"deviance"={
strata.stats <- apply(rcsums, 2, rdev, d1, d2, N)
mc.P <- sum(rowSums(strata.stats) >= obs$deviance)/N
obs <- c(obs, montecarlo.P=mc.P)
},
"wilcoxon"={
strata.stats <- apply(rcsums, 2, wdev, d1, d2, N)
mc.P <- sum(abs(rowSums(strata.stats)-obs$EW) >= abs(obs$W-obs$EW))/N
obs <- c(obs, montecarlo.P=mc.P)
},
"kruskal" ={
strata.stats <- apply(rcsums, 2, kdev, d1, d2, N)
mc.P <- sum((rowSums(strata.stats) >= obs$KW))/N
obs <- c(obs, montecarlo.P=mc.P)
},
"jt" ={
strata.stats <- apply(rcsums, 2, jtdev, d1, d2, N)
mc.P <- sum((rowSums(strata.stats)-obs$EJT) >= abs(obs$JT-obs$EJT))/N
obs <- c(obs, montecarlo.P=mc.P)
})
}
obs$set <- set
obs
}
.CI.exact <- function(c1,c2, S=NULL, dataset, test="deviance", N=0) {
LRT <- function(m, d1, d2) {
oneslice <- function(t,d1,d2) {
dim(t) <- c(d1,d2); t1 <- addmargins(t)
cm <- t1[d1+1,1:d2]; rm <- t1[1:d1,d2+1]; N<- t1[d1+1,d2+1]
df <- (sum(cm>0)-1)*(sum(rm>0)-1)
dev <- 0
if (df>0) {fv <- (rm %o% cm)/N; dev <- 2*sum(t*log(t/fv), na.rm=T)}
return(c(df=df, dev=dev))
}
ans <- apply(m, 1, oneslice, d1, d2)
obs.deviance <- sum(ans[2,])
df <- sum(ans[1,])
P <- 1 - pchisq(obs.deviance, df)
return(list(deviance=obs.deviance, df=df, P=P))
}
wilcoxon <- function(m, d1, d2) {
oneslice <- function(t,d1,d2) {
dim(t) <- c(d1,d2); t1 <- addmargins(t)
cm <- t1[d1+1,1:d2]; rm <- t1[1:d1,d2+1]; N<- t1[d1+1,d2+1]
r <- cumsum(c(0, cm[-d2]))+(1+cm)/2
W <- sum(r*t[1,])
EW <- (rm[1]/N)*sum(r*cm)
VW <- (rm[1]*rm[2]/(N*(N-1)))*sum(((r-EW/rm[1])^2)*cm)
return(c(W, EW, VW))
}
ans <- apply(m, 1, oneslice, d1, d2)
W <- sum(ans[1,])
EW <- sum(ans[2,])
VW <- sum(ans[3,])
P <- 2*(1 - pnorm(abs(W-EW), sd=sqrt(VW)))
return(list(W=W, EW=EW, P=P))
}
kruskal <- function(m, d1, d2) {
oneslice <- function(t,d1,d2) {
dim(t) <- c(d1,d2); t1 <- addmargins(t)
cm <- t1[d1+1,1:d2]; rm <- t1[1:d1,d2+1]; N<- t1[d1+1,d2+1]
r <- cumsum(c(0, cm[-d2]))+(1+cm)/2
T <- sum(cm[1:d2]^3-cm[1:d2])/(N^3-N)
f <- 12*((N*(N+1)*(1-T))^(-1))
KW <- f*sum(((t%*%r-rm[1:d1]*(N+1)/2)^2)/rm[1:d1])
df <- (sum(rm>0)-1)
return(c(df=df, KW=KW))
}
ans <- apply(m, 1, oneslice, d1, d2)
obs.KW <- sum(ans[2,])
df<- sum(ans[1,])
P <- 1 - pchisq(obs.KW,df)
return(list(KW=obs.KW, df=df, P=P))
}
jt <- function(m, d1, d2) {
oneslice<-function(t,d1,d2){
dim(t)<-c(d1,d2); t1<-addmargins(t)
cm<-t1[d1+1,1:d2]; rm<-t1[1:d1,d2+1]; N<-t1[d1+1,d2+1]
W<-c()
T<-c()
R<-c()
for(i in c(2:d1)){
for(j in c(1:(i-1))){
if(i != j){
T<-c(t[i,],T)
R<-c((rm[i]*(rm[i]+1)/2),R)
W<-c(c(cumsum(c(0,t[i,-d2]+t[j,-d2])))+ c((((t[i,1:d2]+t[j,1:d2])+1)/2)),W)
}
W<-c(W)
T<-c(T)
R<-c(R)
}}
JT<-sum(W*T)-sum(R)
EJT<-sum(N^2-sum(rm^2))/4
U1<-N*(N-1)*(2*N+5)-sum(rm*(rm-1)*(2*rm+5))-sum(cm*(cm-1)*(2*cm+5))
U2<-sum(rm*(rm-1)*(rm-2))*sum((cm)*(cm-1)*(cm-2))
U3<-sum((rm)*(rm-1))*sum((cm)*(cm-1))
t1<-72
t2<-36*N*(N-1)*(N-2)
t3<-8*N*(N-1)
VJT<-(U1/t1)+(U2/t2)+(U3/t3)
return(c(JT, EJT,VJT))
}
ans <- apply(m, 1, oneslice, d1, d2)
JT <- sum(ans[1,])
EJT <- sum(ans[2,])
VJT <- sum(ans[3,])
P<- 2*(1 - pnorm(abs(JT-EJT), sd=sqrt(VJT)))
return(list(JT=JT, EJT=EJT, P=P))
}
rcsum <- function(t,d1,d2) {
dim(t) <- c(d1,d2); t1 <- addmargins(t)
cm <- t1[d1+1,1:d2]; rm <- t1[1:d1,d2+1]
return(c(rm,cm))
}
rdev <- function(tots, d1, d2, Nsim) {
rm <- tots[1:d1]; cm <- tots[(d1+1):(d1+d2)]; N <- sum(rm)
fv <- (rm %o% cm)/N
tablist <- r2dtable(Nsim, rm, cm)
return(sapply(tablist, function(t) 2*sum(t*log(t/fv), na.rm=T)))
}
wdev <- function(tots, d1, d2, Nsim) {
rm <- tots[1:d1]; cm <- tots[(d1+1):(d1+d2)]
r <- cumsum(c(0, cm[-d2]))+(1+cm)/2
tablist <- r2dtable(Nsim, rm, cm)
return(sapply(tablist, function(t) sum(r*t[1,1:d2])))
}
kdev <- function(tots, d1, d2, Nsim) {
rm <- tots[1:d1]; cm <- tots[(d1+1):(d1+d2)]; N <- sum(rm)
r <- cumsum(c(0, cm[-d2]))+(1+cm)/2
T <- sum(cm[1:d2]^3-cm[1:d2])/(N^3-N)
f <- 12*((N*(N+1)*(1-T))^(-1))
tablist <- r2dtable(Nsim, rm, cm)
return(sapply(tablist, function(t) f*sum(((t[,1:d2]%*%r-rm[1:d1]*(N+1)/2)^2)/rm[1:d1])))
}
jtdev <- function(tots, d1, d2, Nsim) {
rm <- tots[1:d1]; cm <- tots[(d1+1):(d1+d2)]; N <- sum(rm)
U<-function(t,d1,d2){
W<-c()
T<-c()
R<-c()
for(i in c(2:d1)){
for(j in c(1:(i-1))){
if(i != j){
T<-c(t[i,],T)
R<-c((rm[i]*(rm[i]+1)/2),R)
W<-c(c(cumsum(c(0,t[i,-d2]+t[j,-d2])))+ c((((t[i,1:d2]+t[j,1:d2])+1)/2)),W)
}
W<-c(W)
T<-c(T)
R<-c(R)
}}
return(sum(W*T)-sum(R))
}
tablist <- r2dtable(Nsim, rm, cm)
ans<-sapply(tablist, U,d1,d2)
return(c(ans))
}
if (!(class(dataset) %in% c("data.frame", "table"))) stop("dataset incorrectly specified")
if (!(test %in% c("deviance", "wilcoxon", "kruskal", "jt"))) stop("test incorrectly specified")
if (class(dataset)=="data.frame") {
d1 <- nlevels(dataset[,c1]); d2 <- nlevels(dataset[,c2]); ds <- dataset[,c(c1,c2,S)]
} else { d1 <- dim(dataset)[c1]; d2 <- dim(dataset)[c2]; ds <- apply(dataset, c(c1,c2,S), sum)}
if ((d1<=1) | (d2<=1)) stop("invalid factor(s)")
if (is.null(S)) rv <- NULL else rv <- 3:(length(S)+2)
ft <- ftable(ds, col.vars=2:1, row.vars <- rv)
dim(ft) <- c(length(ft)/(d1*d2), d1*d2)
if (test=="deviance") obs <- LRT(ft, d1, d2) else {
if (test=="wilcoxon") obs <- wilcoxon(ft, d1, d2) else {
if (test=="kruskal") obs <- kruskal(ft,d1,d2) else obs <- jt(ft,d1,d2)}}
if (N==0) return(obs) else {
rcsums <- apply(ft, 1, rcsum, d1, d2)
if (test=="deviance") {
strata.stats <- apply(rcsums, 2, rdev, d1, d2, N)
mc.P <- sum(rowSums(strata.stats) >= obs$deviance)/N
return(c(obs, montecarlo.P=mc.P))
} else {
if (test=="wilcoxon") {
strata.stats <- apply(rcsums, 2, wdev, d1, d2, N)
mc.P <- sum(abs(rowSums(strata.stats)-obs$EW) >= abs(obs$W-obs$EW))/N
return(c(obs, montecarlo.P=mc.P))
} else {
if (test=="kruskal") {
strata.stats <- apply(rcsums, 2, kdev, d1, d2, N)
mc.P <- sum((rowSums(strata.stats) >= obs$KW))/N
return(c(obs, montecarlo.P=mc.P))
} else {
strata.stats <- apply(rcsums, 2, jtdev, d1, d2, N)
mc.P <- sum((rowSums(strata.stats)-obs$EJT) >= abs(obs$JT-obs$EJT))/N
return(c(obs, montecarlo.P=mc.P))
}}
}}
} |
trace_calls <- function (x, parent_functions = NULL, parent_ref = NULL) {
count <- function(key, val) {
call("if", TRUE,
call("{",
as.call(list(call(":::", as.symbol("covr"), as.symbol("count")), key)),
val
)
)
}
if (is.null(parent_functions)) {
parent_functions <- deparse(substitute(x))
}
recurse <- function(y) {
lapply(y, trace_calls, parent_functions = parent_functions)
}
if (is.atomic(x) || is.name(x)) {
if (is.null(parent_ref)) {
x
}
else {
if (is_na(x) || is_brace(x)) {
x
} else {
key <- new_counter(parent_ref, parent_functions)
count(key, x)
}
}
}
else if (is.call(x)) {
src_ref <- attr(x, "srcref") %||% impute_srcref(x, parent_ref)
if ((identical(x[[1]], as.name("<-")) || identical(x[[1]], as.name("="))) &&
(is.call(x[[3]]) && identical(x[[3]][[1]], as.name("function")))) {
parent_functions <- c(parent_functions, as.character(x[[2]]))
}
if (identical(x[[1]], as.name("{")) && length(x) == 2 && is.call(x[[2]]) && identical(x[[2]][[1]], as.name("{"))) {
as.call(x)
} else if (!is.null(src_ref)) {
as.call(Map(trace_calls, x, src_ref, MoreArgs = list(parent_functions = parent_functions)))
} else if (!is.null(parent_ref)) {
key <- new_counter(parent_ref, parent_functions)
count(key, as.call(recurse(x)))
} else {
as.call(recurse(x))
}
}
else if (is.function(x)) {
if (is.primitive(x)) {
return(x)
}
fun_body <- body(x)
if (!is.null(attr(x, "srcref")) &&
(is.symbol(fun_body) || !identical(fun_body[[1]], as.name("{")))) {
src_ref <- attr(x, "srcref")
key <- new_counter(src_ref, parent_functions)
fun_body <- count(key, trace_calls(fun_body, parent_functions))
} else {
fun_body <- trace_calls(fun_body, parent_functions)
}
new_formals <- trace_calls(formals(x), parent_functions)
if (is.null(new_formals)) new_formals <- list()
formals(x) <- new_formals
body(x) <- fun_body
x
}
else if (is.pairlist(x)) {
as.pairlist(recurse(x))
}
else if (is.expression(x)) {
as.expression(recurse(x))
}
else if (is.list(x)) {
recurse(x)
}
else {
message("Unknown language class: ", paste(class(x), collapse = "/"),
call. = FALSE)
x
}
}
.counters <- new.env(parent = emptyenv())
new_counter <- function(src_ref, parent_functions) {
key <- key(src_ref)
.counters[[key]]$value <- 0
.counters[[key]]$srcref <- src_ref
.counters[[key]]$functions <- parent_functions
key
}
count <- function(key) {
.counters[[key]]$value <- .counters[[key]]$value + 1
}
clear_counters <- function() {
rm(envir = .counters, list = ls(envir = .counters))
}
key <- function(x) {
paste(collapse = ":", c(get_source_filename(x), x))
}
f1 <- function() {
f2 <- function() {
2
}
f2()
} |
prediction.gee <- function(model, calculate_se = FALSE, ...) {
pred <- make_data_frame(fitted = predict(model, ...))
pred[["se.fitted"]] <- NA_real_
vc <- NA_real_
structure(pred,
class = c("prediction", "data.frame"),
at = NULL,
type = NA_character_,
call = if ("call" %in% names(model)) model[["call"]] else NULL,
model_class = class(model),
row.names = seq_len(nrow(pred)),
vcov = vc,
jacobian = NULL,
weighted = FALSE)
} |
NULL
plate_types[['wildtype_mutant_pnpp']] <- "wildtype_mutant_pnpp"
parent_plate_type.wildtype_mutant_pnpp <- function(plate) {
"pnpp_experiment"
}
define_params.wildtype_mutant_pnpp <- function(plate) {
params <- NextMethod("define_params")
new_params <- list(
'GENERAL' = list(
'POSITIVE_NAME' = 'wildtype',
'NEGATIVE_NAME' = 'mutant'
)
)
params %<>% utils::modifyList(new_params)
params
}
wells_wildtype <- function(plate) {
stopifnot(plate %>% inherits("wildtype_mutant_pnpp"))
wells_positive(plate)
}
wells_mutant <- function(plate) {
stopifnot(plate %>% inherits("wildtype_mutant_pnpp"))
wells_negative(plate)
}
plot.wildtype_mutant_pnpp <- function(
x,
wells, samples,
...,
col_drops_mutant = "purple3", col_drops_wildtype = "green3",
col_drops_rain = "black",
show_mutant_freq = TRUE, text_size_mutant_freq = 4,
alpha_drops_low_mutant_freq = 0.5,
show_low_high_mut_freq = TRUE,
bg_mutant = "purple3", bg_wildtype = "green3",
alpha_bg_low_high_mut_freq = 0.1
)
{
dots <- list(...)
if (missing(col_drops_mutant) && !is.null(dots[["col_drops_negative"]])) {
col_drops_mutant <- dots[["col_drops_negative"]]
}
if (missing(col_drops_wildtype) && !is.null(dots[["col_drops_positive"]])) {
col_drops_wildtype <- dots[["col_drops_positive"]]
}
NextMethod("plot", x,
col_drops_negative = col_drops_mutant,
col_drops_positive = col_drops_wildtype,
col_drops_rain = col_drops_rain,
show_negative_freq = show_mutant_freq,
text_size_negative_freq = text_size_mutant_freq,
alpha_drops_low_negative_freq = alpha_drops_low_mutant_freq,
show_low_high_neg_freq = show_low_high_mut_freq,
bg_negative = bg_mutant, bg_positive = bg_wildtype,
alpha_bg_low_high_neg_freq = alpha_bg_low_high_mut_freq)
} |
"hkagepop19" |
prConvertDfFactors <- function(x) {
if (!"data.frame" %in% class(x)) {
return(x)
}
i <- sapply(x, function(col) {
(
(
!is.numeric(col) &&
!is.character(col)
) ||
(
inherits(col, "times")
)
)
})
if (any(i)) {
x[i] <- lapply(x[i], as.character)
}
return(x)
} |
spin = function(
hair, knit = TRUE, report = TRUE, text = NULL, envir = parent.frame(),
format = c('Rmd', 'Rnw', 'Rhtml', 'Rtex', 'Rrst'),
doc = "^
comment = c("^[
) {
format = match.arg(format)
x = if (nosrc <- is.null(text)) read_utf8(hair) else split_lines(text)
stopifnot(length(comment) == 2L)
c1 = grep(comment[1], x); c2 = grep(comment[2], x)
if (length(c1) != length(c2))
stop('comments must be put in pairs of start and end delimiters')
if (length(c1)) x = x[-unique(unlist(mapply(seq, c1, c2, SIMPLIFY = FALSE)))]
parsed_data = getParseData(parse(text = x, keep.source = TRUE))
is_matchable = seq_along(x) %in% unique(parsed_data[parsed_data$col1 == 1, 'line1'])
p = if (identical(tolower(format), 'rmd')) .fmt.rmd(x) else .fmt.pat[[tolower(format)]]
if (any(i <- is_matchable & grepl(inline, x))) x[i] = gsub(inline, p[4], x[i])
r = rle((is_matchable & grepl(doc, x)) | i)
n = length(r$lengths); txt = vector('list', n); idx = c(0L, cumsum(r$lengths))
p1 = gsub('\\{', '\\\\{', paste0('^', p[1L], '.*', p[2L], '$'))
for (i in seq_len(n)) {
block = x[seq(idx[i] + 1L, idx[i + 1])]
txt[[i]] = if (r$values[i]) {
sub(doc, '', block)
} else {
block = strip_white(block)
if (!length(block)) next
if (length(opt <- grep(rc <- '^(
block[opt] = paste0(p[1L], gsub(paste0(rc, '\\s*|-*\\s*$'), '', block[opt]), p[2L])
if (any(opt > 1)) {
j = opt[opt > 1]
block[j] = paste(p[3L], block[j], sep = '\n')
}
}
if (!grepl(p1, block[1L])) {
block = c(paste0(p[1L], p[2L]), block)
}
c('', block, p[3L], '')
}
}
txt = unlist(txt)
if (report && format %in% c('Rnw', 'Rtex') && !grepl('^\\s*\\\\documentclass', txt)) {
txt = c('\\documentclass{article}', '\\begin{document}', txt, '\\end{document}')
}
if (nosrc) {
outsrc = with_ext(hair, format)
write_utf8(txt, outsrc)
txt = NULL
} else outsrc = NULL
if (!knit) return(txt %n% outsrc)
out = if (report) {
if (format == 'Rmd') {
knit2html(outsrc, text = txt, envir = envir)
} else if (!is.null(outsrc) && (format %in% c('Rnw', 'Rtex'))) {
knit2pdf(outsrc, envir = envir)
}
} else knit(outsrc, text = txt, envir = envir)
if (!precious && !is.null(outsrc)) file.remove(outsrc)
invisible(out)
}
.fmt.pat = list(
rnw = c('<<', '>>=', '@', '\\\\Sexpr{\\1}'),
rhtml = c('<!--begin.rcode ', '', 'end.rcode-->', '<!--rinline \\1 -->'),
rtex = c('% begin.rcode ', '', '% end.rcode', '\\\\rinline{\\1}'),
rrst = c('.. {r ', '}', '.. ..', ':r:`\\1`')
)
.fmt.rmd = function(x) {
x = one_string(x)
l = attr(gregexpr('`+', x)[[1]], 'match.length')
l = max(l, 0)
if (length(l) > 0) {
i = highr:::spaces(l + 1, '`')
b = highr:::spaces(max(l + 1, 3), '`')
} else {
i = '`'
b = '```'
}
c(paste0(b, '{r '), '}', b, paste0(i, 'r \\1 ', i))
}
spin_child = function(input, format) {
if (!isTRUE(getOption('knitr.in.progress')))
return(sys.source(input, parent.frame()))
fmt = if (missing(format)) {
if (is.null(fmt <- out_format()))
stop('spin_child() must be called in a knitting process')
.spin.fmt = c(
'latex' = 'Rnw', 'sweave' = 'Rnw', 'listings' = 'Rnw',
'html' = 'Rhtml', 'markdown' = 'Rmd'
)
if (is.na(fmt <- .spin.fmt[fmt]))
stop('the document format ', fmt, ' is not supported yet')
fmt
} else format
asis_output(knit_child(
text = spin(text = read_utf8(input), knit = FALSE, report = FALSE, format = fmt),
quiet = TRUE
))
} |
in_range <- function(target, low, high) {
return(low < target & target < high)
} |
setGeneric(name = "dephase_chrom",
def = function(Object, rel_dephase)
standardGeneric("dephase_chrom"))
setMethod(f = "dephase_chrom",
signature = c("GCxGC"),
definition = function(Object, rel_dephase) {
if (rel_dephase < 0 | rel_dephase > 100)
stop("A relative value from 0 to 100 must be provided")
chrom_rows <- nrow(Object@chromatogram)
dephase_index <- seq(1, chrom_rows * rel_dephase / 100)
chrom1 <- Object@chromatogram[dephase_index, ]
chrom2 <- Object@chromatogram[-dephase_index, ]
chrom <- rbind(chrom2, chrom1)
Object@chromatogram <- chrom
return(Object)
}) |
Subsets and Splits